_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
e7b8e91832683efa69cf2daa05d9ec6cb255ff1b5c18ca802ad1a50481560dbe | murbard/plebeia | dumb.mli | type segment = Plebeia.Plebeia_impl.Path.side list
type value = Plebeia.Plebeia_impl.value
type context = unit
type error = string
type t
type cursor
val get_root_node : cursor -> t
val empty : context -> cursor
(** Creates a cursor to a new, empty tree. *)
val subtree : cursor -> segment -> (cursor, error) result
(** Moves the cursor down a segment, to the root of a sub-tree. Think
"cd segment/" *)
val create_subtree: cursor -> segment -> (cursor, error) result
(** Create a subtree (bud). Think "mkdir segment" *)
val parent : cursor -> (cursor, error) result
(** Moves the cursor back to the parent tree. Think "cd .." *)
val get : cursor -> segment -> (value, error) result
(** Gets a value if present in the current tree at the given
segment. *)
val insert: cursor -> segment -> value -> (cursor, error) result
(** Inserts a value at the given segment in the current tree.
Returns the new cursor if successful. *)
val upsert: cursor -> segment -> value -> (cursor, error) result
(** Upserts. This can still fail if the segment leads to a subtree. *)
val delete: cursor -> segment -> (cursor, error) result
(** Delete a leaf or subtree. *)
val of_plebeia_node : Plebeia.Plebeia_impl.context -> ('a, 'b, 'c) Plebeia.Plebeia_impl.node -> t
val dot_of_node : t -> string
val dot_of_cursor : cursor -> string
| null | https://raw.githubusercontent.com/murbard/plebeia/95a0eed6f7b8c6836d15032557467a3e93bd83b8/tests/dumb.mli | ocaml | * Creates a cursor to a new, empty tree.
* Moves the cursor down a segment, to the root of a sub-tree. Think
"cd segment/"
* Create a subtree (bud). Think "mkdir segment"
* Moves the cursor back to the parent tree. Think "cd .."
* Gets a value if present in the current tree at the given
segment.
* Inserts a value at the given segment in the current tree.
Returns the new cursor if successful.
* Upserts. This can still fail if the segment leads to a subtree.
* Delete a leaf or subtree. | type segment = Plebeia.Plebeia_impl.Path.side list
type value = Plebeia.Plebeia_impl.value
type context = unit
type error = string
type t
type cursor
val get_root_node : cursor -> t
val empty : context -> cursor
val subtree : cursor -> segment -> (cursor, error) result
val create_subtree: cursor -> segment -> (cursor, error) result
val parent : cursor -> (cursor, error) result
val get : cursor -> segment -> (value, error) result
val insert: cursor -> segment -> value -> (cursor, error) result
val upsert: cursor -> segment -> value -> (cursor, error) result
val delete: cursor -> segment -> (cursor, error) result
val of_plebeia_node : Plebeia.Plebeia_impl.context -> ('a, 'b, 'c) Plebeia.Plebeia_impl.node -> t
val dot_of_node : t -> string
val dot_of_cursor : cursor -> string
|
20a2df98c93011dac4ff7849946480fc878d4b48deaec07014854482bd7edef6 | klutometis/clrs | 6.5-1.scm | (require-extension syntax-case
check)
(require 'section)
(import section-6.5)
(require '../6.2/section)
(import* section-6.2
make-heap
heap-data)
(let* ((data '(15 13 9 5 12 8 7 4 0 6 2 1))
(heap (make-heap data (length data))))
(check (heap-extract-max heap)
=> 15)
(check (heap-data heap)
=> '(13 12 9 5 6 8 7 4 0 1 2)))
| null | https://raw.githubusercontent.com/klutometis/clrs/f85a8f0036f0946c9e64dde3259a19acc62b74a1/6.5/6.5-1.scm | scheme | (require-extension syntax-case
check)
(require 'section)
(import section-6.5)
(require '../6.2/section)
(import* section-6.2
make-heap
heap-data)
(let* ((data '(15 13 9 5 12 8 7 4 0 6 2 1))
(heap (make-heap data (length data))))
(check (heap-extract-max heap)
=> 15)
(check (heap-data heap)
=> '(13 12 9 5 6 8 7 4 0 1 2)))
| |
83256c1dd775b4b51f0ca2f454cc4da7d97e410504d424cd65f5d37f28843341 | dcSpark/fracada-il-primo | EvilEndpoints.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE ImportQualifiedPost #
# LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
module Spec.EvilEndpoints where
import Control.Monad hiding (fmap)
import qualified Data.Map as Map
import Data.Text (Text)
import Data.Void (Void)
import Fracada.Minting
import Fracada.Offchain
import Fracada.Validator
import Ledger hiding (singleton)
import Ledger.Constraints as Constraints
import qualified Ledger.Typed.Scripts as Scripts
import Ledger.Value as Value
import Plutus.Contract as Contract
import qualified PlutusTx
import PlutusTx.IsData
import PlutusTx.Prelude hiding (Semigroup (..), unless)
import Prelude (Semigroup (..), String, show)
import Text.Printf (printf)
-- try to mint more than what's declared in the datum
extraFractionNFT :: FractionNFTParameters -> ToFraction -> Contract w FracNFTEvilSchema Text ()
extraFractionNFT params@FractionNFTParameters {initTokenClass, authorizedPubKeys} ToFraction {fractions, fractionTokenName} = do
pay nft to contract
-- pay minted tokens back to signer
pkh <- Contract.ownPaymentPubKeyHash
let --find the minting script instance
mintingScript = mintFractionTokensPolicy params fractionTokenName
-- define the value to mint (amount of tokens) and be paid to signer
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName fractions
moreTokensToMint = Value.singleton currency fractionTokenName (fractions + 100)
payBackTokens = mustPayToPubKey pkh tokensToMint
value of NFT
valueToScript = assetClassValue initTokenClass 1
-- keep the minted amount and asset class in the datum
fractionAsset = assetClass currency fractionTokenName
datum = Datum $ toBuiltinData FractionNFTDatum {tokensClass = fractionAsset, totalFractions = fractions, newNftClass = fractionAsset}
--build the constraints and submit the transaction
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.otherScript validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer moreTokensToMint
<> Constraints.mustPayToOtherScript (fractionNftValidatorHash params) datum valueToScript
<> payBackTokens
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s for NFT %s" (show fractions) (show initTokenClass)
Contract.logInfo @String $ printf "pks %s" (show authorizedPubKeys)
mint fractional tokens without paying the initial NFT
mintTokensNoNFT :: FractionNFTParameters -> ToFraction -> Contract w FracNFTEvilSchema Text ()
mintTokensNoNFT params@FractionNFTParameters {initTokenClass} ToFraction {fractions, fractionTokenName} = do
pay nft to contract
-- pay minted tokens back to signer
pkh <- Contract.ownPaymentPubKeyHash
let --find the minting script instance
mintingScript = mintFractionTokensPolicy params fractionTokenName
-- define the value to mint (amount of tokens) and be paid to signer
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName fractions
payBackTokens = mustPayToPubKey pkh tokensToMint
--build the constraints and submit the transaction
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.otherScript validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> payBackTokens
-- ledgerTx <- submitTxConstraintsWith @Void lookups tx
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s for NFT %s" (show fractions) (show initTokenClass)
return the NFT without burning all the fractional tokens
returnNFTNoFrac :: FractionNFTParameters -> Integer -> Contract w FracNFTEvilSchema Text ()
returnNFTNoFrac params@FractionNFTParameters {initTokenClass} numberToBurn = do
pay nft to signer
-- burn tokens
pkh <- Contract.ownPaymentPubKeyHash
utxos' <- utxosAt (fractionNftValidatorAddress params)
declare the NFT value
valueToWallet = assetClassValue initTokenClass 1
find the that has the NFT we 're looking for
(nftRef, nftTx) = head $ Map.toList utxos'
-- use the auxiliary extractData function to get the datum content
FractionNFTDatum {tokensClass} <- extractData nftTx
assuming that all the fraction tokens are in the owner 's ` ownPubkey ` address . For tracing it is good enough ,
-- though for real-use-cases it is more nuanced, as the owner can have them on different
-- UTxOs.
futxos <- utxosAt (pubKeyHashAddress pkh Nothing)
let tokensAsset = AssetClass (tokensCurrency, fractionTokenName)
fracTokenUtxos = Map.filter (\v -> assetClassValueOf (_ciTxOutValue v) tokensAsset > 0) futxos
-- declare the fractional tokens to burn
(_, fractionTokenName) = unAssetClass tokensClass
tokensCurrency = curSymbol params fractionTokenName
amountToBurn = negate numberToBurn
tokensToBurn = Value.singleton tokensCurrency fractionTokenName amountToBurn
nothingRedeemer = Nothing :: Maybe AddToken
-- build the constraints and submit
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy (mintFractionTokensPolicy params fractionTokenName)
<> Constraints.otherScript validator
<> Constraints.unspentOutputs utxos'
<> Constraints.unspentOutputs fracTokenUtxos
<> Constraints.ownPaymentPubKeyHash pkh
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToBurn
<> Constraints.mustSpendScriptOutput nftRef (Redeemer $ toBuiltinData nothingRedeemer)
<> Constraints.mustPayToPubKey pkh valueToWallet
-- ledgerTx <- submitTxConstraintsWith @Void lookups tx
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "burnt %s" (show amountToBurn)
add extra NFT than the ones signed
addMoreNFT :: FractionNFTParameters -> AddNFT -> Contract w FracNFTEvilSchema Text ()
addMoreNFT params AddNFT {an_asset, an_sigs} = do
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
value of NFT
valueToScript = valueOfTxs utxosAtValidator <> assetClassValue an_asset 2
nftTx = snd . head $ Map.toList utxosAtValidator
previousDatum <- extractData nftTx
let --update datum incrementing the count of nfts
updatedDatum = previousDatum {newNftClass = an_asset}
redeemer = Just $ AddToken an_sigs
validatorScript = fractionNftValidatorInstance params
tx = collectFromScript utxosAtValidator redeemer <> mustPayToTheScript updatedDatum valueToScript
void $ submitTxConstraintsSpending validatorScript utxosAtValidator tx
Contract.logInfo @String $ printf "added new NFT %s" (show an_asset)
-- mint more fractional tokens than the ones signed
mintExtraTokens :: FractionNFTParameters -> MintMore -> Contract w FracNFTEvilSchema Text ()
mintExtraTokens params MintMore {mm_count, mm_sigs} = do
pay nft to contract
-- pay minted tokens back to signer
pkh <- Contract.ownPaymentPubKeyHash
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
currentDatum@FractionNFTDatum {tokensClass, totalFractions = currentFractions} <- extractData $ snd $ head $ Map.toList utxosAtValidator
let fractionTokenName = snd $ unAssetClass tokensClass
--find the minting script instance
mintingScript = mintFractionTokensPolicy params fractionTokenName
-- define the value to mint (amount of tokens) and be paid to signer
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName (mm_count + 1)
payBackTokens = mustPayToPubKey pkh tokensToMint
-- keep the minted amount and asset class in the datum
newDatum = currentDatum {totalFractions = currentFractions + mm_count}
-- preserve NFTs
valueToScript = valueOfTxs utxosAtValidator
redeemer = Just $ AddToken mm_sigs
--build the constraints and submit the transaction
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.unspentOutputs utxosAtValidator
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> Constraints.mustPayToTheScript newDatum valueToScript
<> collectFromScript utxosAtValidator redeemer
<> payBackTokens
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s extra tokens, total %s " (show mm_count) (show $ currentFractions + mm_count)
try to take out an NFT while minting
mintTokensStealNft :: FractionNFTParameters -> (MintMore, AssetClass) -> Contract w FracNFTEvilSchema Text ()
mintTokensStealNft params (MintMore {mm_count, mm_sigs}, nftToSteal) = do
pay nft to contract
-- pay minted tokens back to signer
pkh <- Contract.ownPaymentPubKeyHash
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
currentDatum@FractionNFTDatum {tokensClass, totalFractions = currentFractions} <- extractData $ snd $ head $ Map.toList utxosAtValidator
let fractionTokenName = snd $ unAssetClass tokensClass
--find the minting script instance
mintingScript = mintFractionTokensPolicy params fractionTokenName
stealValue = assetClassValue nftToSteal 1
-- define the value to mint (amount of tokens) and be paid to signer
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName (mm_count)
1 that the wallet already has + 1 in the contract
-- keep the minted amount and asset class in the datum
newDatum = currentDatum {totalFractions = currentFractions + mm_count}
-- preserve NFTs
valueToScript = unionWith (-) (valueOfTxs utxosAtValidator) (assetClassValue nftToSteal 1)
redeemer = Just $ AddToken mm_sigs
--build the constraints and submit the transaction
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.unspentOutputs utxosAtValidator
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> Constraints.mustPayToTheScript newDatum valueToScript
<> collectFromScript utxosAtValidator redeemer
<> payBackTokens
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s extra tokens, total %s " (show mm_count) (show $ currentFractions + mm_count)
# INLINEABLE anyMintScript #
anyMintScript :: () -> ScriptContext -> Bool
anyMintScript _ _ = True
anyMintScriptPolicy :: MintingPolicy
anyMintScriptPolicy =
mkMintingPolicyScript $
$$(PlutusTx.compile [||Scripts.wrapMintingPolicy $ anyMintScript||])
anyMintCurSymbol :: CurrencySymbol
anyMintCurSymbol = scriptCurrencySymbol $ anyMintScriptPolicy
Mint another asset class at the same time
mintVariousTokens :: FractionNFTParameters -> MintMore -> Contract w FracNFTEvilSchema Text ()
mintVariousTokens params MintMore {mm_count, mm_sigs} = do
pay nft to contract
-- pay minted tokens back to signer
pkh <- Contract.ownPaymentPubKeyHash
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
currentDatum@FractionNFTDatum {tokensClass, totalFractions = currentFractions} <- extractData $ snd $ head $ Map.toList utxosAtValidator
let fractionTokenName = snd $ unAssetClass tokensClass
--find the minting script instance
mintingScript = mintFractionTokensPolicy params fractionTokenName
-- define the value to mint (amount of tokens) and be paid to signer
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName mm_count
extraTokens = Value.singleton anyMintCurSymbol "EXTRA" 10
payBackTokens = mustPayToPubKey pkh tokensToMint
-- keep the minted amount and asset class in the datum
newDatum = currentDatum {totalFractions = currentFractions + mm_count}
-- preserve NFTs
valueToScript = valueOfTxs utxosAtValidator
redeemer = Just $ AddToken mm_sigs
--build the constraints and submit the transaction
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.mintingPolicy anyMintScriptPolicy
<> Constraints.unspentOutputs utxosAtValidator
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> Constraints.mustMintValueWithRedeemer (Redeemer $ toBuiltinData ()) extraTokens
<> Constraints.mustPayToTheScript newDatum valueToScript
<> collectFromScript utxosAtValidator redeemer
<> payBackTokens
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s extra tokens, total %s " (show mm_count) (show $ currentFractions + mm_count)
-- Add NFT without updating the datum
addNFTNoDatumUpd :: FractionNFTParameters -> AddNFT -> Contract w FracNFTEvilSchema Text ()
addNFTNoDatumUpd params AddNFT {an_asset, an_sigs} = do
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
value of NFT
valueToScript = (valueOfTxs utxosAtValidator) <> assetClassValue an_asset 1
nftTx = snd . head $ Map.toList utxosAtValidator
previousDatum <- extractData nftTx
let --update datum incrementing the count of nfts
updatedDatum = previousDatum
redeemer = Just $ AddToken an_sigs
validatorScript = fractionNftValidatorInstance params
tx = collectFromScript utxosAtValidator redeemer <> mustPayToTheScript updatedDatum valueToScript
void $ submitTxConstraintsSpending validatorScript utxosAtValidator tx
Contract.logInfo @String $ printf "added new NFT %s" (show an_asset)
-- Keep an NFT in the contract after burning all fractional tokens
partialReturn :: FractionNFTParameters -> () -> Contract w FracNFTEvilSchema Text ()
partialReturn params@FractionNFTParameters {initTokenClass} _ = do
pay nft to signer
-- burn tokens
pkh <- Contract.ownPaymentPubKeyHash
utxos' <- utxosAt (fractionNftValidatorAddress params)
declare the NFT value
valueToWallet = assetClassValue initTokenClass 1
find the that has the NFT we 're looking for
(nftRef, nftTx) = head $ Map.toList utxos'
-- use the auxiliary extractData function to get the datum content
currentDatum@FractionNFTDatum {tokensClass, totalFractions, newNftClass} <- extractData nftTx
assuming that all the fraction tokens are in the owner 's ` ownPubkey ` address . For tracing it is good enough ,
-- though for real-use-cases it is more nuanced, as the owner can have them on different
-- UTxOs.
futxos <- utxosAt (pubKeyHashAddress pkh Nothing)
let tokensAsset = AssetClass (tokensCurrency, fractionTokenName)
fracTokenUtxos = Map.filter (\v -> assetClassValueOf (_ciTxOutValue v) tokensAsset > 0) futxos
-- declare the fractional tokens to burn
(_, fractionTokenName) = unAssetClass tokensClass
tokensCurrency = curSymbol params fractionTokenName
amountToBurn = negate totalFractions
tokensToBurn = Value.singleton tokensCurrency fractionTokenName amountToBurn
nothingRedeemer = Nothing :: Maybe AddToken
valueKept = assetClassValue newNftClass 1
-- build the constraints and submit
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy (mintFractionTokensPolicy params fractionTokenName)
<> Constraints.unspentOutputs utxos'
<> Constraints.unspentOutputs fracTokenUtxos
<> Constraints.ownPaymentPubKeyHash pkh
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToBurn
<> Constraints.mustSpendScriptOutput nftRef (Redeemer $ toBuiltinData nothingRedeemer)
<> Constraints.mustPayToPubKey pkh valueToWallet
<> Constraints.mustPayToTheScript currentDatum valueKept
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "burnt %s" (show totalFractions)
returnNoNFT :: FractionNFTParameters -> () -> Contract w FracNFTEvilSchema Text ()
returnNoNFT params _ = do
pay nft to signer
-- burn tokens
pkh <- Contract.ownPaymentPubKeyHash
utxos' <- utxosAt (fractionNftValidatorAddress params)
find the that has the NFT we 're looking for
nftTx = snd $ head $ Map.toList utxos'
-- use the auxiliary extractData function to get the datum content
FractionNFTDatum {tokensClass, totalFractions} <- extractData nftTx
assuming that all the fraction tokens are in the owner 's ` ownPubkey ` address . For tracing it is good enough ,
-- though for real-use-cases it is more nuanced, as the owner can have them on different
-- UTxOs.
futxos <- utxosAt (pubKeyHashAddress pkh Nothing)
let tokensAsset = AssetClass (tokensCurrency, fractionTokenName)
fracTokenUtxos = Map.filter (\v -> assetClassValueOf (_ciTxOutValue v) tokensAsset > 0) futxos
-- declare the fractional tokens to burn
(_, fractionTokenName) = unAssetClass tokensClass
tokensCurrency = curSymbol params fractionTokenName
amountToBurn = negate totalFractions
tokensToBurn = Value.singleton tokensCurrency fractionTokenName amountToBurn
-- build the constraints and submit
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy (mintFractionTokensPolicy params fractionTokenName)
<> Constraints.otherScript validator
<> Constraints.unspentOutputs utxos'
<> Constraints.unspentOutputs fracTokenUtxos
<> Constraints.ownPaymentPubKeyHash pkh
tx = Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToBurn
-- <>
-- Constraints.mustSpendScriptOutput nftRef ( Redeemer $ toBuiltinData emptyRedeemer ) <>
-- Constraints.mustPayToPubKey pkh valueToWallet
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "burnt %s" (show totalFractions)
type FracNFTEvilSchema =
Endpoint "extraFractionNFT" ToFraction
.\/ Endpoint "mintTokensNoNFT" ToFraction
.\/ Endpoint "returnNFTNoFrac" Integer
.\/ Endpoint "addMoreNFT" AddNFT
.\/ Endpoint "mintExtraTokens" MintMore
.\/ Endpoint "mintTokensStealNft" (MintMore, AssetClass)
.\/ Endpoint "mintVariousTokens" MintMore
.\/ Endpoint "addNFTNoDatumUpd" AddNFT
.\/ Endpoint "partialReturn" ()
.\/ Endpoint "returnNoNFT" ()
endpoints :: FractionNFTParameters -> Contract () FracNFTEvilSchema Text ()
endpoints params =
forever $
handleError logError $
awaitPromise $
extraFractionNFT' `select` mintTokensNoNFT' `select` burn' `select` addNFT' `select` mintMoreTokens'
`select` mintSteal'
`select` mintVariousTokens'
`select` addNFTNoDatumUpd'
`select` partialReturn'
`select` returnNoNFT'
where
extraFractionNFT' = endpoint @"extraFractionNFT" $ extraFractionNFT params
mintTokensNoNFT' = endpoint @"mintTokensNoNFT" $ mintTokensNoNFT params
burn' = endpoint @"returnNFTNoFrac" $ returnNFTNoFrac params
addNFT' = endpoint @"addMoreNFT" $ addMoreNFT params
mintMoreTokens' = endpoint @"mintExtraTokens" $ mintExtraTokens params
mintSteal' = endpoint @"mintTokensStealNft" $ mintTokensStealNft params
mintVariousTokens' = endpoint @"mintVariousTokens" $ mintVariousTokens params
addNFTNoDatumUpd' = endpoint @"addNFTNoDatumUpd" $ addNFTNoDatumUpd params
partialReturn' = endpoint @"partialReturn" $ partialReturn params
returnNoNFT' = endpoint @"returnNoNFT" $ returnNoNFT params
reuse signature while adding a different nft
-- reuse signature while minting fract
| null | https://raw.githubusercontent.com/dcSpark/fracada-il-primo/0400e8f7d465d309d9638eb4a50eede2fed4effb/test/Spec/EvilEndpoints.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeOperators #
try to mint more than what's declared in the datum
pay minted tokens back to signer
find the minting script instance
define the value to mint (amount of tokens) and be paid to signer
keep the minted amount and asset class in the datum
build the constraints and submit the transaction
pay minted tokens back to signer
find the minting script instance
define the value to mint (amount of tokens) and be paid to signer
build the constraints and submit the transaction
ledgerTx <- submitTxConstraintsWith @Void lookups tx
burn tokens
use the auxiliary extractData function to get the datum content
though for real-use-cases it is more nuanced, as the owner can have them on different
UTxOs.
declare the fractional tokens to burn
build the constraints and submit
ledgerTx <- submitTxConstraintsWith @Void lookups tx
update datum incrementing the count of nfts
mint more fractional tokens than the ones signed
pay minted tokens back to signer
find the minting script instance
define the value to mint (amount of tokens) and be paid to signer
keep the minted amount and asset class in the datum
preserve NFTs
build the constraints and submit the transaction
pay minted tokens back to signer
find the minting script instance
define the value to mint (amount of tokens) and be paid to signer
keep the minted amount and asset class in the datum
preserve NFTs
build the constraints and submit the transaction
pay minted tokens back to signer
find the minting script instance
define the value to mint (amount of tokens) and be paid to signer
keep the minted amount and asset class in the datum
preserve NFTs
build the constraints and submit the transaction
Add NFT without updating the datum
update datum incrementing the count of nfts
Keep an NFT in the contract after burning all fractional tokens
burn tokens
use the auxiliary extractData function to get the datum content
though for real-use-cases it is more nuanced, as the owner can have them on different
UTxOs.
declare the fractional tokens to burn
build the constraints and submit
burn tokens
use the auxiliary extractData function to get the datum content
though for real-use-cases it is more nuanced, as the owner can have them on different
UTxOs.
declare the fractional tokens to burn
build the constraints and submit
<>
Constraints.mustSpendScriptOutput nftRef ( Redeemer $ toBuiltinData emptyRedeemer ) <>
Constraints.mustPayToPubKey pkh valueToWallet
reuse signature while minting fract | # LANGUAGE DeriveGeneric #
# LANGUAGE ImportQualifiedPost #
# LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module Spec.EvilEndpoints where
import Control.Monad hiding (fmap)
import qualified Data.Map as Map
import Data.Text (Text)
import Data.Void (Void)
import Fracada.Minting
import Fracada.Offchain
import Fracada.Validator
import Ledger hiding (singleton)
import Ledger.Constraints as Constraints
import qualified Ledger.Typed.Scripts as Scripts
import Ledger.Value as Value
import Plutus.Contract as Contract
import qualified PlutusTx
import PlutusTx.IsData
import PlutusTx.Prelude hiding (Semigroup (..), unless)
import Prelude (Semigroup (..), String, show)
import Text.Printf (printf)
extraFractionNFT :: FractionNFTParameters -> ToFraction -> Contract w FracNFTEvilSchema Text ()
extraFractionNFT params@FractionNFTParameters {initTokenClass, authorizedPubKeys} ToFraction {fractions, fractionTokenName} = do
pay nft to contract
pkh <- Contract.ownPaymentPubKeyHash
mintingScript = mintFractionTokensPolicy params fractionTokenName
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName fractions
moreTokensToMint = Value.singleton currency fractionTokenName (fractions + 100)
payBackTokens = mustPayToPubKey pkh tokensToMint
value of NFT
valueToScript = assetClassValue initTokenClass 1
fractionAsset = assetClass currency fractionTokenName
datum = Datum $ toBuiltinData FractionNFTDatum {tokensClass = fractionAsset, totalFractions = fractions, newNftClass = fractionAsset}
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.otherScript validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer moreTokensToMint
<> Constraints.mustPayToOtherScript (fractionNftValidatorHash params) datum valueToScript
<> payBackTokens
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s for NFT %s" (show fractions) (show initTokenClass)
Contract.logInfo @String $ printf "pks %s" (show authorizedPubKeys)
mint fractional tokens without paying the initial NFT
mintTokensNoNFT :: FractionNFTParameters -> ToFraction -> Contract w FracNFTEvilSchema Text ()
mintTokensNoNFT params@FractionNFTParameters {initTokenClass} ToFraction {fractions, fractionTokenName} = do
pay nft to contract
pkh <- Contract.ownPaymentPubKeyHash
mintingScript = mintFractionTokensPolicy params fractionTokenName
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName fractions
payBackTokens = mustPayToPubKey pkh tokensToMint
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.otherScript validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> payBackTokens
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s for NFT %s" (show fractions) (show initTokenClass)
return the NFT without burning all the fractional tokens
returnNFTNoFrac :: FractionNFTParameters -> Integer -> Contract w FracNFTEvilSchema Text ()
returnNFTNoFrac params@FractionNFTParameters {initTokenClass} numberToBurn = do
pay nft to signer
pkh <- Contract.ownPaymentPubKeyHash
utxos' <- utxosAt (fractionNftValidatorAddress params)
declare the NFT value
valueToWallet = assetClassValue initTokenClass 1
find the that has the NFT we 're looking for
(nftRef, nftTx) = head $ Map.toList utxos'
FractionNFTDatum {tokensClass} <- extractData nftTx
assuming that all the fraction tokens are in the owner 's ` ownPubkey ` address . For tracing it is good enough ,
futxos <- utxosAt (pubKeyHashAddress pkh Nothing)
let tokensAsset = AssetClass (tokensCurrency, fractionTokenName)
fracTokenUtxos = Map.filter (\v -> assetClassValueOf (_ciTxOutValue v) tokensAsset > 0) futxos
(_, fractionTokenName) = unAssetClass tokensClass
tokensCurrency = curSymbol params fractionTokenName
amountToBurn = negate numberToBurn
tokensToBurn = Value.singleton tokensCurrency fractionTokenName amountToBurn
nothingRedeemer = Nothing :: Maybe AddToken
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy (mintFractionTokensPolicy params fractionTokenName)
<> Constraints.otherScript validator
<> Constraints.unspentOutputs utxos'
<> Constraints.unspentOutputs fracTokenUtxos
<> Constraints.ownPaymentPubKeyHash pkh
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToBurn
<> Constraints.mustSpendScriptOutput nftRef (Redeemer $ toBuiltinData nothingRedeemer)
<> Constraints.mustPayToPubKey pkh valueToWallet
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "burnt %s" (show amountToBurn)
add extra NFT than the ones signed
addMoreNFT :: FractionNFTParameters -> AddNFT -> Contract w FracNFTEvilSchema Text ()
addMoreNFT params AddNFT {an_asset, an_sigs} = do
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
value of NFT
valueToScript = valueOfTxs utxosAtValidator <> assetClassValue an_asset 2
nftTx = snd . head $ Map.toList utxosAtValidator
previousDatum <- extractData nftTx
updatedDatum = previousDatum {newNftClass = an_asset}
redeemer = Just $ AddToken an_sigs
validatorScript = fractionNftValidatorInstance params
tx = collectFromScript utxosAtValidator redeemer <> mustPayToTheScript updatedDatum valueToScript
void $ submitTxConstraintsSpending validatorScript utxosAtValidator tx
Contract.logInfo @String $ printf "added new NFT %s" (show an_asset)
mintExtraTokens :: FractionNFTParameters -> MintMore -> Contract w FracNFTEvilSchema Text ()
mintExtraTokens params MintMore {mm_count, mm_sigs} = do
pay nft to contract
pkh <- Contract.ownPaymentPubKeyHash
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
currentDatum@FractionNFTDatum {tokensClass, totalFractions = currentFractions} <- extractData $ snd $ head $ Map.toList utxosAtValidator
let fractionTokenName = snd $ unAssetClass tokensClass
mintingScript = mintFractionTokensPolicy params fractionTokenName
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName (mm_count + 1)
payBackTokens = mustPayToPubKey pkh tokensToMint
newDatum = currentDatum {totalFractions = currentFractions + mm_count}
valueToScript = valueOfTxs utxosAtValidator
redeemer = Just $ AddToken mm_sigs
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.unspentOutputs utxosAtValidator
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> Constraints.mustPayToTheScript newDatum valueToScript
<> collectFromScript utxosAtValidator redeemer
<> payBackTokens
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s extra tokens, total %s " (show mm_count) (show $ currentFractions + mm_count)
try to take out an NFT while minting
mintTokensStealNft :: FractionNFTParameters -> (MintMore, AssetClass) -> Contract w FracNFTEvilSchema Text ()
mintTokensStealNft params (MintMore {mm_count, mm_sigs}, nftToSteal) = do
pay nft to contract
pkh <- Contract.ownPaymentPubKeyHash
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
currentDatum@FractionNFTDatum {tokensClass, totalFractions = currentFractions} <- extractData $ snd $ head $ Map.toList utxosAtValidator
let fractionTokenName = snd $ unAssetClass tokensClass
mintingScript = mintFractionTokensPolicy params fractionTokenName
stealValue = assetClassValue nftToSteal 1
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName (mm_count)
1 that the wallet already has + 1 in the contract
newDatum = currentDatum {totalFractions = currentFractions + mm_count}
valueToScript = unionWith (-) (valueOfTxs utxosAtValidator) (assetClassValue nftToSteal 1)
redeemer = Just $ AddToken mm_sigs
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.unspentOutputs utxosAtValidator
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> Constraints.mustPayToTheScript newDatum valueToScript
<> collectFromScript utxosAtValidator redeemer
<> payBackTokens
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s extra tokens, total %s " (show mm_count) (show $ currentFractions + mm_count)
# INLINEABLE anyMintScript #
anyMintScript :: () -> ScriptContext -> Bool
anyMintScript _ _ = True
anyMintScriptPolicy :: MintingPolicy
anyMintScriptPolicy =
mkMintingPolicyScript $
$$(PlutusTx.compile [||Scripts.wrapMintingPolicy $ anyMintScript||])
anyMintCurSymbol :: CurrencySymbol
anyMintCurSymbol = scriptCurrencySymbol $ anyMintScriptPolicy
Mint another asset class at the same time
mintVariousTokens :: FractionNFTParameters -> MintMore -> Contract w FracNFTEvilSchema Text ()
mintVariousTokens params MintMore {mm_count, mm_sigs} = do
pay nft to contract
pkh <- Contract.ownPaymentPubKeyHash
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
currentDatum@FractionNFTDatum {tokensClass, totalFractions = currentFractions} <- extractData $ snd $ head $ Map.toList utxosAtValidator
let fractionTokenName = snd $ unAssetClass tokensClass
mintingScript = mintFractionTokensPolicy params fractionTokenName
currency = scriptCurrencySymbol mintingScript
tokensToMint = Value.singleton currency fractionTokenName mm_count
extraTokens = Value.singleton anyMintCurSymbol "EXTRA" 10
payBackTokens = mustPayToPubKey pkh tokensToMint
newDatum = currentDatum {totalFractions = currentFractions + mm_count}
valueToScript = valueOfTxs utxosAtValidator
redeemer = Just $ AddToken mm_sigs
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy mintingScript
<> Constraints.mintingPolicy anyMintScriptPolicy
<> Constraints.unspentOutputs utxosAtValidator
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToMint
<> Constraints.mustMintValueWithRedeemer (Redeemer $ toBuiltinData ()) extraTokens
<> Constraints.mustPayToTheScript newDatum valueToScript
<> collectFromScript utxosAtValidator redeemer
<> payBackTokens
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "forged %s extra tokens, total %s " (show mm_count) (show $ currentFractions + mm_count)
addNFTNoDatumUpd :: FractionNFTParameters -> AddNFT -> Contract w FracNFTEvilSchema Text ()
addNFTNoDatumUpd params AddNFT {an_asset, an_sigs} = do
utxosAtValidator <- utxosAt (fractionNftValidatorAddress params)
value of NFT
valueToScript = (valueOfTxs utxosAtValidator) <> assetClassValue an_asset 1
nftTx = snd . head $ Map.toList utxosAtValidator
previousDatum <- extractData nftTx
updatedDatum = previousDatum
redeemer = Just $ AddToken an_sigs
validatorScript = fractionNftValidatorInstance params
tx = collectFromScript utxosAtValidator redeemer <> mustPayToTheScript updatedDatum valueToScript
void $ submitTxConstraintsSpending validatorScript utxosAtValidator tx
Contract.logInfo @String $ printf "added new NFT %s" (show an_asset)
partialReturn :: FractionNFTParameters -> () -> Contract w FracNFTEvilSchema Text ()
partialReturn params@FractionNFTParameters {initTokenClass} _ = do
pay nft to signer
pkh <- Contract.ownPaymentPubKeyHash
utxos' <- utxosAt (fractionNftValidatorAddress params)
declare the NFT value
valueToWallet = assetClassValue initTokenClass 1
find the that has the NFT we 're looking for
(nftRef, nftTx) = head $ Map.toList utxos'
currentDatum@FractionNFTDatum {tokensClass, totalFractions, newNftClass} <- extractData nftTx
assuming that all the fraction tokens are in the owner 's ` ownPubkey ` address . For tracing it is good enough ,
futxos <- utxosAt (pubKeyHashAddress pkh Nothing)
let tokensAsset = AssetClass (tokensCurrency, fractionTokenName)
fracTokenUtxos = Map.filter (\v -> assetClassValueOf (_ciTxOutValue v) tokensAsset > 0) futxos
(_, fractionTokenName) = unAssetClass tokensClass
tokensCurrency = curSymbol params fractionTokenName
amountToBurn = negate totalFractions
tokensToBurn = Value.singleton tokensCurrency fractionTokenName amountToBurn
nothingRedeemer = Nothing :: Maybe AddToken
valueKept = assetClassValue newNftClass 1
validator = fractionNftValidatorInstance params
lookups =
Constraints.mintingPolicy (mintFractionTokensPolicy params fractionTokenName)
<> Constraints.unspentOutputs utxos'
<> Constraints.unspentOutputs fracTokenUtxos
<> Constraints.ownPaymentPubKeyHash pkh
<> Constraints.typedValidatorLookups validator
tx =
Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToBurn
<> Constraints.mustSpendScriptOutput nftRef (Redeemer $ toBuiltinData nothingRedeemer)
<> Constraints.mustPayToPubKey pkh valueToWallet
<> Constraints.mustPayToTheScript currentDatum valueKept
void $ mkTxConstraints @Fractioning lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "burnt %s" (show totalFractions)
returnNoNFT :: FractionNFTParameters -> () -> Contract w FracNFTEvilSchema Text ()
returnNoNFT params _ = do
pay nft to signer
pkh <- Contract.ownPaymentPubKeyHash
utxos' <- utxosAt (fractionNftValidatorAddress params)
find the that has the NFT we 're looking for
nftTx = snd $ head $ Map.toList utxos'
FractionNFTDatum {tokensClass, totalFractions} <- extractData nftTx
assuming that all the fraction tokens are in the owner 's ` ownPubkey ` address . For tracing it is good enough ,
futxos <- utxosAt (pubKeyHashAddress pkh Nothing)
let tokensAsset = AssetClass (tokensCurrency, fractionTokenName)
fracTokenUtxos = Map.filter (\v -> assetClassValueOf (_ciTxOutValue v) tokensAsset > 0) futxos
(_, fractionTokenName) = unAssetClass tokensClass
tokensCurrency = curSymbol params fractionTokenName
amountToBurn = negate totalFractions
tokensToBurn = Value.singleton tokensCurrency fractionTokenName amountToBurn
validator = fractionValidatorScript params
lookups =
Constraints.mintingPolicy (mintFractionTokensPolicy params fractionTokenName)
<> Constraints.otherScript validator
<> Constraints.unspentOutputs utxos'
<> Constraints.unspentOutputs fracTokenUtxos
<> Constraints.ownPaymentPubKeyHash pkh
tx = Constraints.mustMintValueWithRedeemer emptyRedeemer tokensToBurn
void $ mkTxConstraints @Void lookups tx >>= submitTxConfirmed . adjustUnbalancedTx
Contract.logInfo @String $ printf "burnt %s" (show totalFractions)
type FracNFTEvilSchema =
Endpoint "extraFractionNFT" ToFraction
.\/ Endpoint "mintTokensNoNFT" ToFraction
.\/ Endpoint "returnNFTNoFrac" Integer
.\/ Endpoint "addMoreNFT" AddNFT
.\/ Endpoint "mintExtraTokens" MintMore
.\/ Endpoint "mintTokensStealNft" (MintMore, AssetClass)
.\/ Endpoint "mintVariousTokens" MintMore
.\/ Endpoint "addNFTNoDatumUpd" AddNFT
.\/ Endpoint "partialReturn" ()
.\/ Endpoint "returnNoNFT" ()
endpoints :: FractionNFTParameters -> Contract () FracNFTEvilSchema Text ()
endpoints params =
forever $
handleError logError $
awaitPromise $
extraFractionNFT' `select` mintTokensNoNFT' `select` burn' `select` addNFT' `select` mintMoreTokens'
`select` mintSteal'
`select` mintVariousTokens'
`select` addNFTNoDatumUpd'
`select` partialReturn'
`select` returnNoNFT'
where
extraFractionNFT' = endpoint @"extraFractionNFT" $ extraFractionNFT params
mintTokensNoNFT' = endpoint @"mintTokensNoNFT" $ mintTokensNoNFT params
burn' = endpoint @"returnNFTNoFrac" $ returnNFTNoFrac params
addNFT' = endpoint @"addMoreNFT" $ addMoreNFT params
mintMoreTokens' = endpoint @"mintExtraTokens" $ mintExtraTokens params
mintSteal' = endpoint @"mintTokensStealNft" $ mintTokensStealNft params
mintVariousTokens' = endpoint @"mintVariousTokens" $ mintVariousTokens params
addNFTNoDatumUpd' = endpoint @"addNFTNoDatumUpd" $ addNFTNoDatumUpd params
partialReturn' = endpoint @"partialReturn" $ partialReturn params
returnNoNFT' = endpoint @"returnNoNFT" $ returnNoNFT params
reuse signature while adding a different nft
|
666b6b823b9c53ef744c4a75647e8fe8c829cc95a7d44c8eac7158ddd563c8a4 | charlieg/Sparser | review-results.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
copyright ( c ) 1994 -- all rights reserved
;;;
;;; File: "review results"
;;; Module: "interface;SUN:"
Version : December 1994
initiated 12/13/94
(in-package :sparser)
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/interface/SUN/review-results.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:SPARSER -*-
File: "review results"
Module: "interface;SUN:" | copyright ( c ) 1994 -- all rights reserved
Version : December 1994
initiated 12/13/94
(in-package :sparser)
|
e974870f0cbd615a2ef3d9f47641e9f061386150f537e5a98048c98f237bcc00 | shiguredo/swidden | swidden_middleware.erl | -module(swidden_middleware).
-export([failure/2, failure/3]).
failure(Req, Type) when is_binary(Type) ->
cowboy_req:reply(400, #{<<"content-type">> => <<"application/json">>},
jsone:encode(#{error_type => Type}), Req).
failure(Req, Type, Reason) when is_binary(Type) andalso is_map(Reason) ->
cowboy_req:reply(400, #{<<"content-type">> => <<"application/json">>},
jsone:encode(#{error_type => Type, error_reason => Reason}), Req).
| null | https://raw.githubusercontent.com/shiguredo/swidden/faec93ae6b6c9e59840f0df22c5f72e381b54c02/src/swidden_middleware.erl | erlang | -module(swidden_middleware).
-export([failure/2, failure/3]).
failure(Req, Type) when is_binary(Type) ->
cowboy_req:reply(400, #{<<"content-type">> => <<"application/json">>},
jsone:encode(#{error_type => Type}), Req).
failure(Req, Type, Reason) when is_binary(Type) andalso is_map(Reason) ->
cowboy_req:reply(400, #{<<"content-type">> => <<"application/json">>},
jsone:encode(#{error_type => Type, error_reason => Reason}), Req).
| |
b5a27e8bf567452aa1eaf6eb0140059b898ee669f0f5267e60253cadc40f8b75 | ros/roslisp_common | comm-state-machine.lisp | Copyright ( c ) 2014 , < >
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
* Neither the name of the Institute for Artificial Intelligence/
Universitaet Bremen nor the names of its contributors may be used to
;;; endorse or promote products derived from this software without specific
;;; prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
;;; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
;;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
;;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
;;; POSSIBILITY OF SUCH DAMAGE.
(in-package :actionlib-lisp)
(defparameter *states*
State Signal Target - State
(make-states '((:done ())
(:waiting-for-goal-ack (:cancel-goal :waiting-for-cancel-ack
:pending :pending
:active :active
:recalling :recalling
:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:pending (:cancel-goal :waiting-for-cancel-ack
:active :active
:recalling :recalling
:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:active (:cancel-goal :waiting-for-cancel-ack
:preempting :preempting
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:waiting-for-cancel-ack (:recalling :recalling
:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:recalling (:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:preempting (:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:waiting-for-result (:receive :done
:lost :done)))))
(defclass comm-state-machine ()
((stm :initform (make-instance 'state-machine
:current-state (getf *states* :waiting-for-goal-ack)
:states *states*)
:accessor stm
:documentation "Manages the state and statetransitions.")
(goal-id :initarg :goal-id
:reader goal-id)
(start-time :initform (ros-time)
:accessor start-time)
(transition-cb :initarg :transition-cb
:initform nil
:accessor transition-cb)
(feedback-cb :initarg :feedback-cb
:initform nil
:accessor feedback-cb)
(send-cancel-fn :initarg :send-cancel-fn
:reader send-cancel-fn)
(latest-goal-status :initform :pending
:accessor latest-goal-status)
(latest-result :initform nil
:accessor latest-result)
(latest-feedback :initform nil
:accessor latest-feedback)
(lost-ctr :initform 0
:accessor lost-ctr)
(csm-mutex :initform (make-mutex :name (string (gensym "csm-lock")))
:reader csm-mutex))
(:documentation "Monitors the state of the communication between action-client
and the server for one goal and executes the callbacks."))
(defgeneric transition-to (csm signal)
(:documentation "Processes the signal and executes the transition-callback if
necessary"))
(defgeneric update-status (csm status)
(:documentation "Updates the state with the given status."))
(defgeneric update-result (csm action-result)
(:documentation "Updates the state with the given result."))
(defgeneric update-feedback (csm action-feedback)
(:documentation "Updates the state with the given feedback and executes the
feedback callback."))
(defgeneric comm-state (goal-handle)
(:documentation "Returns the state of the goal's communication
state machine."))
;;; Implementation
(defmethod transition-to ((csm comm-state-machine) signal)
"Tranists to the next state given the signal and calls the
transition-callback. If the result was processed before the
last status update the transition-callback gets called even
if the state-machine doesn't change"
(when (and (or (eql (name (get-current-state (stm csm))) :done)
(process-signal (stm csm) signal))
(transition-cb csm))
(funcall (transition-cb csm))))
(defmethod update-status ((csm comm-state-machine) status)
"If the status is not equal to the last status the comm-state-machine
gets updated with the new status"
(with-recursive-lock ((csm-mutex csm))
(unless (eql status :lost)
(setf (lost-ctr csm) 0))
(when (get-next-state (stm csm) status)
(setf (latest-goal-status csm) status))
(transition-to csm status)))
(defmethod update-result ((csm comm-state-machine) action-result)
"Updates the result of the comm-state-machine"
(with-recursive-lock ((csm-mutex csm))
(setf (latest-result csm) action-result)
(transition-to csm :receive)))
(defmethod update-feedback ((csm comm-state-machine) action-feedback)
"Updates the latest feedback of the comm-state-machine and calls
the feedback-callback"
(with-recursive-lock ((csm-mutex csm))
(setf (latest-feedback csm) action-feedback))
(if (feedback-cb csm)
(funcall (feedback-cb csm) action-feedback)))
(defmethod comm-state ((csm comm-state-machine))
"Returns the name of the current state of the comm-state-machine
as a symbol"
(name (get-current-state (stm csm))))
| null | https://raw.githubusercontent.com/ros/roslisp_common/4db311da26497d84a147f190200e50c7a5b4106e/actionlib_lisp/src/new_implementation/comm-state-machine.lisp | lisp | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
endorse or promote products derived from this software without specific
prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Implementation | Copyright ( c ) 2014 , < >
* Neither the name of the Institute for Artificial Intelligence/
Universitaet Bremen nor the names of its contributors may be used to
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
(in-package :actionlib-lisp)
(defparameter *states*
State Signal Target - State
(make-states '((:done ())
(:waiting-for-goal-ack (:cancel-goal :waiting-for-cancel-ack
:pending :pending
:active :active
:recalling :recalling
:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:pending (:cancel-goal :waiting-for-cancel-ack
:active :active
:recalling :recalling
:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:active (:cancel-goal :waiting-for-cancel-ack
:preempting :preempting
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:waiting-for-cancel-ack (:recalling :recalling
:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:recalling (:preempting :preempting
:rejected :waiting-for-result
:recalled :waiting-for-result
:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:preempting (:preempted :waiting-for-result
:succeeded :waiting-for-result
:aborted :waiting-for-result
:receive :done
:lost :done))
(:waiting-for-result (:receive :done
:lost :done)))))
(defclass comm-state-machine ()
((stm :initform (make-instance 'state-machine
:current-state (getf *states* :waiting-for-goal-ack)
:states *states*)
:accessor stm
:documentation "Manages the state and statetransitions.")
(goal-id :initarg :goal-id
:reader goal-id)
(start-time :initform (ros-time)
:accessor start-time)
(transition-cb :initarg :transition-cb
:initform nil
:accessor transition-cb)
(feedback-cb :initarg :feedback-cb
:initform nil
:accessor feedback-cb)
(send-cancel-fn :initarg :send-cancel-fn
:reader send-cancel-fn)
(latest-goal-status :initform :pending
:accessor latest-goal-status)
(latest-result :initform nil
:accessor latest-result)
(latest-feedback :initform nil
:accessor latest-feedback)
(lost-ctr :initform 0
:accessor lost-ctr)
(csm-mutex :initform (make-mutex :name (string (gensym "csm-lock")))
:reader csm-mutex))
(:documentation "Monitors the state of the communication between action-client
and the server for one goal and executes the callbacks."))
(defgeneric transition-to (csm signal)
(:documentation "Processes the signal and executes the transition-callback if
necessary"))
(defgeneric update-status (csm status)
(:documentation "Updates the state with the given status."))
(defgeneric update-result (csm action-result)
(:documentation "Updates the state with the given result."))
(defgeneric update-feedback (csm action-feedback)
(:documentation "Updates the state with the given feedback and executes the
feedback callback."))
(defgeneric comm-state (goal-handle)
(:documentation "Returns the state of the goal's communication
state machine."))
(defmethod transition-to ((csm comm-state-machine) signal)
"Tranists to the next state given the signal and calls the
transition-callback. If the result was processed before the
last status update the transition-callback gets called even
if the state-machine doesn't change"
(when (and (or (eql (name (get-current-state (stm csm))) :done)
(process-signal (stm csm) signal))
(transition-cb csm))
(funcall (transition-cb csm))))
(defmethod update-status ((csm comm-state-machine) status)
"If the status is not equal to the last status the comm-state-machine
gets updated with the new status"
(with-recursive-lock ((csm-mutex csm))
(unless (eql status :lost)
(setf (lost-ctr csm) 0))
(when (get-next-state (stm csm) status)
(setf (latest-goal-status csm) status))
(transition-to csm status)))
(defmethod update-result ((csm comm-state-machine) action-result)
"Updates the result of the comm-state-machine"
(with-recursive-lock ((csm-mutex csm))
(setf (latest-result csm) action-result)
(transition-to csm :receive)))
(defmethod update-feedback ((csm comm-state-machine) action-feedback)
"Updates the latest feedback of the comm-state-machine and calls
the feedback-callback"
(with-recursive-lock ((csm-mutex csm))
(setf (latest-feedback csm) action-feedback))
(if (feedback-cb csm)
(funcall (feedback-cb csm) action-feedback)))
(defmethod comm-state ((csm comm-state-machine))
"Returns the name of the current state of the comm-state-machine
as a symbol"
(name (get-current-state (stm csm))))
|
033fc2259aa5767f8480df0d4f1427c1c110f2e675873a5fa5df97c8c2852fdf | racket/typed-racket | sealing-contract-2.rkt | #;
(exn-pred #rx"superclass already contains")
#lang racket/base
(module u racket
;; adds m instead of n like the spec says
(define (mixin cls)
(class cls
(super-new)
(define/public (m x) x)))
(provide mixin))
(module t typed/racket/optional
;; expects a mixin that adds n
(require/typed (submod ".." u)
[mixin
(All (r #:row)
(-> (Class #:row-var r)
(Class #:row-var r [n (-> Integer Integer)])))])
(mixin (class object%
(super-new)
(define/public (m x) x))))
(require 't)
| null | https://raw.githubusercontent.com/racket/typed-racket/1dde78d165472d67ae682b68622d2b7ee3e15e1e/typed-racket-test/fail/optional/sealing-contract-2.rkt | racket |
adds m instead of n like the spec says
expects a mixin that adds n | (exn-pred #rx"superclass already contains")
#lang racket/base
(module u racket
(define (mixin cls)
(class cls
(super-new)
(define/public (m x) x)))
(provide mixin))
(module t typed/racket/optional
(require/typed (submod ".." u)
[mixin
(All (r #:row)
(-> (Class #:row-var r)
(Class #:row-var r [n (-> Integer Integer)])))])
(mixin (class object%
(super-new)
(define/public (m x) x))))
(require 't)
|
d6ad18f51f5e77c150f1c4fa3bda1b92cbc70edd25353c198615dc1b5587e65c | advancedtelematic/quickcheck-state-machine | Parallel.hs | {-# LANGUAGE FlexibleContexts #-}
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE Rank2Types #-}
# LANGUAGE ScopedTypeVariables #
-----------------------------------------------------------------------------
-- |
-- Module : Test.StateMachine.Parallel
Copyright : ( C ) 2017 , ATS Advanced Telematic Systems GmbH
-- License : BSD-style (see the file LICENSE)
--
Maintainer : < >
-- Stability : provisional
Portability : non - portable ( GHC extensions )
--
-- This module contains helpers for generating, shrinking, and checking
-- parallel programs.
--
-----------------------------------------------------------------------------
module Test.StateMachine.Parallel
( forAllNParallelCommands
, forAllParallelCommands
, generateNParallelCommands
, generateParallelCommands
, shrinkNParallelCommands
, shrinkParallelCommands
, shrinkAndValidateNParallel
, shrinkAndValidateParallel
, shrinkCommands'
, runNParallelCommands
, runParallelCommands
, runParallelCommands'
, runNParallelCommandsNTimes
, runParallelCommandsNTimes
, runNParallelCommandsNTimes'
, runParallelCommandsNTimes'
, executeParallelCommands
, linearise
, toBoxDrawings
, prettyNParallelCommands
, prettyParallelCommands
, prettyParallelCommandsWithOpts
, prettyNParallelCommandsWithOpts
, advanceModel
, checkCommandNamesParallel
, coverCommandNamesParallel
, commandNamesParallel
) where
import Control.Monad
(replicateM, when)
import Control.Monad.Catch
(MonadMask, mask, onException)
import Control.Monad.State.Strict
(runStateT)
import Data.Bifunctor
(bimap)
import Data.Foldable
(toList)
import Data.List
(find, partition, permutations)
import qualified Data.Map.Strict as Map
import Data.Maybe
(fromMaybe, mapMaybe)
import Data.Monoid
import Data.Set
(Set)
import qualified Data.Set as S
import Data.Tree
(Tree(Node))
import Prelude
import Test.QuickCheck
(Gen, Property, Testable, choose, forAllShrinkShow,
property, sized)
import Test.QuickCheck.Monadic
(PropertyM, run)
import Text.PrettyPrint.ANSI.Leijen
(Doc)
import Text.Show.Pretty
(ppShow)
import UnliftIO
(MonadIO, MonadUnliftIO, concurrently,
forConcurrently, newTChanIO)
import Test.StateMachine.BoxDrawer
import Test.StateMachine.ConstructorName
import Test.StateMachine.DotDrawing
import Test.StateMachine.Logic
import Test.StateMachine.Sequential
import Test.StateMachine.Types
import qualified Test.StateMachine.Types.Rank2 as Rank2
import Test.StateMachine.Utils
------------------------------------------------------------------------
forAllParallelCommands :: Testable prop
=> (Show (cmd Symbolic), Show (resp Symbolic), Show (model Symbolic))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> Maybe Int
-> (ParallelCommands cmd resp -> prop) -- ^ Predicate.
-> Property
forAllParallelCommands sm mminSize =
forAllShrinkShow (generateParallelCommands sm mminSize) (shrinkParallelCommands sm) ppShow
forAllNParallelCommands :: Testable prop
=> (Show (cmd Symbolic), Show (resp Symbolic), Show (model Symbolic))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> Int -- ^ Number of threads
-> (NParallelCommands cmd resp -> prop) -- ^ Predicate.
-> Property
forAllNParallelCommands sm np =
forAllShrinkShow (generateNParallelCommands sm np) (shrinkNParallelCommands sm) ppShow
-- | Generate parallel commands.
--
Parallel commands are generated as follows . We begin by generating
sequential commands and then splitting this list in two at some index . The
-- first half will be used as the prefix.
--
The second half will be used to build suffixes . For example , starting from
-- the following sequential commands:
--
-- > [A, B, C, D, E, F, G, H, I]
--
We split it in two , giving us the prefix and the rest :
--
-- > prefix: [A, B]
-- > rest: [C, D, E, F, G, H, I]
--
-- We advance the model with the prefix.
--
-- __Make a suffix__: we take commands from @rest@ as long as these are
-- parallel safe (see 'parallelSafe'). This means that the pre-conditions
( using the \'advanced\ ' model ) of each of those commands will hold no
-- matter in which order they are executed.
--
Say this is true for @[C , D , E]@ , but not anymore for @F@ , maybe because
@F@ depends on one of @[C , D , Then we divide this \'chunk\ ' in two by
splitting it in the middle , obtaining @[C]@ and @[D , These two halves
-- of the chunk (stored as a 'Pair') will later be executed in parallel.
-- Together they form one suffix.
--
Then the model is advanced using the whole chunk @[C , D , Think of it
as a barrier after executing the two halves of the chunk in parallel . Then
-- this process of building a chunk/suffix repeats itself, starting from
_ _ Make a suffix _ _ using the \'advanced\ ' model .
--
-- In the end we might end up with something like this:
--
> ┌ ─ [ C ] ─ ─ ┐ ┌ [ F , G ] ┐
-- > [A, B] ─┤ ├──┤ │
> [ D , E ] ┘ [ H , I ] ┘
--
generateParallelCommands :: forall model cmd m resp. Rank2.Foldable resp
=> Show (model Symbolic)
=> (Show (cmd Symbolic), Show (resp Symbolic))
=> StateMachine model cmd m resp
-> Maybe Int
-> Gen (ParallelCommands cmd resp)
generateParallelCommands sm@StateMachine { initModel } mminSize = do
Commands cmds <- generateCommands sm mminSize
prefixLength <- sized (\k -> choose (0, k `div` 3))
let (prefix, rest) = bimap Commands Commands (splitAt prefixLength cmds)
return (ParallelCommands prefix
(makeSuffixes (advanceModel sm initModel prefix) rest))
where
makeSuffixes :: model Symbolic -> Commands cmd resp -> [Pair (Commands cmd resp)]
makeSuffixes model0 = go model0 [] . unCommands
where
go _ acc [] = reverse acc
go model acc cmds = go (advanceModel sm model (Commands safe))
(Pair (Commands safe1) (Commands safe2) : acc)
rest
where
(safe, rest) = spanSafe sm model [] cmds
(safe1, safe2) = splitAt (length safe `div` 2) safe
Split the list of commands in two such that the first half is a
-- list of commands for which the preconditions of all commands hold
-- for permutation of the list, i.e. it is parallel safe. The other
-- half is the remainder of the input list.
spanSafe :: Rank2.Foldable resp
=> StateMachine model cmd m resp
-> model Symbolic -> [Command cmd resp] -> [Command cmd resp]
-> ([Command cmd resp], [Command cmd resp])
spanSafe _ _ safe [] = (reverse safe, [])
spanSafe sm model safe (cmd : cmds)
| length safe <= 5
, parallelSafe sm model (Commands (cmd : safe))
= spanSafe sm model (cmd : safe) cmds
| otherwise
= (reverse safe, cmd : cmds)
-- Generate Parallel commands. The length of each suffix, indicates how many thread can
-- concurrently execute the commands safely.
generateNParallelCommands :: forall model cmd m resp. Rank2.Foldable resp
=> Show (model Symbolic)
=> (Show (cmd Symbolic), Show (resp Symbolic))
=> StateMachine model cmd m resp
-> Int
-> Gen (NParallelCommands cmd resp)
generateNParallelCommands sm@StateMachine { initModel } np =
if np <= 0 then error "number of threads must be positive" else do
Commands cmds <- generateCommands sm Nothing
prefixLength <- sized (\k -> choose (0, k `div` 3))
let (prefix, rest) = bimap Commands Commands (splitAt prefixLength cmds)
return (ParallelCommands prefix
(makeSuffixes (advanceModel sm initModel prefix) rest))
where
makeSuffixes :: model Symbolic -> Commands cmd resp -> [[(Commands cmd resp)]]
makeSuffixes model0 = go model0 [] . unCommands
where
go :: model Symbolic
-> [[(Commands cmd resp)]]
-> [(Command cmd resp)]
-> [[(Commands cmd resp)]]
go _ acc [] = reverse acc
go model acc cmds = go (advanceModel sm model (Commands safe))
(safes : acc)
rest
where
(safe, rest) = spanSafe sm model [] cmds
safes = Commands <$> chunksOf np (length safe `div` np) safe
Split the list in n sublists , whose concat is the initial list .
-- We try to keep the length of each sublist len.
--
-- It is important that we miss no elements here or else executeCommands may fail, because
-- of missing references. It is also important that the final list has the correct length
-- n, or else there will be different number of threads than the user specified.
chunksOf :: Int -> Int -> [a] -> [[a]]
chunksOf 1 _ xs = [xs]
chunksOf n len xs = as : chunksOf (n-1) len bs
where (as, bs) = splitAt len xs
-- | A list of commands is parallel safe if the pre-conditions for all commands
-- hold in all permutations of the list.
parallelSafe :: Rank2.Foldable resp
=> StateMachine model cmd m resp -> model Symbolic
-> Commands cmd resp -> Bool
parallelSafe StateMachine { precondition, transition, mock } model0
= all (preconditionsHold model0)
. permutations
. unCommands
where
preconditionsHold _ [] = True
preconditionsHold model (Command cmd resp vars : cmds) =
boolean (precondition model cmd) &&
preconditionsHold (transition model cmd resp) cmds &&
-- This makes sure that in all permutations the length of variables created is the same.
By doing so , we try to avoid MockSemanticsMismatch errors .
-- More -state-machine/pull/348
length vars == length (getUsedVars $ fst $ runGenSym (mock model cmd) newCounter)
-- | Apply the transition of some commands to a model.
advanceModel :: StateMachine model cmd m resp
-> model Symbolic -- ^ The model.
-> Commands cmd resp -- ^ The commands.
-> model Symbolic
advanceModel StateMachine { transition } model0 =
go model0 . unCommands
where
go model [] = model
go model (Command cmd resp _vars : cmds) =
go (transition model cmd resp) cmds
------------------------------------------------------------------------
-- | Shrink a parallel program in a pre-condition and scope respecting
-- way.
shrinkParallelCommands
:: forall cmd model m resp. Rank2.Traversable cmd
=> Rank2.Foldable resp
=> StateMachine model cmd m resp
-> (ParallelCommands cmd resp -> [ParallelCommands cmd resp])
shrinkParallelCommands sm (ParallelCommands prefix suffixes)
= concatMap go
[ Shrunk s (ParallelCommands prefix' (map toPair suffixes'))
| Shrunk s (prefix', suffixes') <- shrinkPairS shrinkCommands' shrinkSuffixes
(prefix, map fromPair suffixes)
]
++
shrinkMoveSuffixToPrefix
where
go :: Shrunk (ParallelCommands cmd resp) -> [ParallelCommands cmd resp]
go (Shrunk shrunk cmds) =
shrinkAndValidateParallel sm
(if shrunk then DontShrink else MustShrink)
cmds
shrinkSuffixes :: [(Commands cmd resp, Commands cmd resp)]
-> [Shrunk [(Commands cmd resp, Commands cmd resp)]]
shrinkSuffixes = shrinkListS (shrinkPairS' shrinkCommands')
-- Moving a command from a suffix to the prefix preserves validity
shrinkMoveSuffixToPrefix :: [ParallelCommands cmd resp]
shrinkMoveSuffixToPrefix = case suffixes of
[] -> []
(suffix : suffixes') ->
[ ParallelCommands (prefix <> Commands [prefix'])
(fmap Commands (toPair suffix') : suffixes')
| (prefix', suffix') <- pickOneReturnRest2 (unCommands (proj1 suffix),
unCommands (proj2 suffix))
]
-- | Shrink a parallel program in a pre-condition and scope respecting
-- way.
shrinkNParallelCommands
:: forall cmd model m resp. Rank2.Traversable cmd
=> Rank2.Foldable resp
=> StateMachine model cmd m resp
-> (NParallelCommands cmd resp -> [NParallelCommands cmd resp])
shrinkNParallelCommands sm (ParallelCommands prefix suffixes)
= concatMap go
[ Shrunk s (ParallelCommands prefix' suffixes')
| Shrunk s (prefix', suffixes') <- shrinkPairS shrinkCommands' shrinkSuffixes
(prefix, suffixes)
]
++
shrinkMoveSuffixToPrefix
where
go :: Shrunk (NParallelCommands cmd resp) -> [NParallelCommands cmd resp]
go (Shrunk shrunk cmds) =
shrinkAndValidateNParallel sm
(if shrunk then DontShrink else MustShrink)
cmds
shrinkSuffixes :: [[Commands cmd resp]]
-> [Shrunk [[Commands cmd resp]]]
shrinkSuffixes = shrinkListS (shrinkListS'' shrinkCommands')
-- Moving a command from a suffix to the prefix preserves validity
shrinkMoveSuffixToPrefix :: [NParallelCommands cmd resp]
shrinkMoveSuffixToPrefix = case suffixes of
[] -> []
(suffix : suffixes') ->
[ ParallelCommands (prefix <> Commands [prefix'])
(fmap Commands suffix' : suffixes')
| (prefix', suffix') <- pickOneReturnRestL (unCommands <$> suffix)
]
| Shrinks Commands in a way that it has strictly less number of commands .
shrinkCommands' :: Commands cmd resp -> [Shrunk (Commands cmd resp)]
shrinkCommands' = map (fmap Commands) . shrinkListS' . unCommands
shrinkAndValidateParallel :: forall model cmd m resp. (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> ShouldShrink
-> ParallelCommands cmd resp
-> [ParallelCommands cmd resp]
shrinkAndValidateParallel sm@StateMachine { initModel } = \shouldShrink (ParallelCommands prefix suffixes) ->
let env = initValidateEnv initModel
curryGo shouldShrink' (env', prefix') = go prefix' env' shouldShrink' suffixes in
case shouldShrink of
DontShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm DontShrink env prefix)
MustShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm MustShrink env prefix)
++ concatMap (curryGo MustShrink) (shrinkAndValidate sm DontShrink env prefix)
where
go :: Commands cmd resp -- validated prefix
-> ValidateEnv model -- environment after the prefix
should we /still/ shrink something ?
-> [Pair (Commands cmd resp)] -- suffixes to validate
-> [ParallelCommands cmd resp]
go prefix' = go' []
where
go' :: [Pair (Commands cmd resp)] -- accumulated validated suffixes (in reverse order)
-> ValidateEnv model -- environment after the validated suffixes
should we /still/ shrink something ?
-> [Pair (Commands cmd resp)] -- suffixes to validate
-> [ParallelCommands cmd resp]
go' _ _ MustShrink [] = [] -- Failed to shrink something
go' acc _ DontShrink [] = [ParallelCommands prefix' (reverse acc)]
go' acc env shouldShrink (Pair l r : suffixes) = do
((shrinkL, shrinkR), shrinkRest) <- shrinkOpts
(envL, l') <- shrinkAndValidate sm shrinkL env l
(envR, r') <- shrinkAndValidate sm shrinkR (env `withCounterFrom` envL) r
go' (Pair l' r' : acc) (combineEnv sm envL envR r') shrinkRest suffixes
where
shrinkOpts :: [((ShouldShrink, ShouldShrink), ShouldShrink)]
shrinkOpts =
case shouldShrink of
DontShrink -> [ ((DontShrink, DontShrink), DontShrink) ]
MustShrink -> [ ((MustShrink, DontShrink), DontShrink)
, ((DontShrink, MustShrink), DontShrink)
, ((DontShrink, DontShrink), MustShrink) ]
combineEnv :: StateMachine model cmd m resp
-> ValidateEnv model
-> ValidateEnv model
-> Commands cmd resp
-> ValidateEnv model
combineEnv sm envL envR cmds = ValidateEnv {
veModel = advanceModel sm (veModel envL) cmds
, veScope = Map.union (veScope envL) (veScope envR)
, veCounter = veCounter envR
}
withCounterFrom :: ValidateEnv model -> ValidateEnv model -> ValidateEnv model
withCounterFrom e e' = e { veCounter = veCounter e' }
shrinkAndValidateNParallel :: forall model cmd m resp. (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> ShouldShrink
-> NParallelCommands cmd resp
-> [NParallelCommands cmd resp]
shrinkAndValidateNParallel sm = \shouldShrink (ParallelCommands prefix suffixes) ->
let env = initValidateEnv $ initModel sm
curryGo shouldShrink' (env', prefix') = go prefix' env' shouldShrink' suffixes in
case shouldShrink of
DontShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm DontShrink env prefix)
MustShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm MustShrink env prefix)
++ concatMap (curryGo MustShrink) (shrinkAndValidate sm DontShrink env prefix)
where
go :: Commands cmd resp -- validated prefix
-> ValidateEnv model -- environment after the prefix
should we /still/ shrink something ?
-> [[Commands cmd resp]] -- suffixes to validate
-> [NParallelCommands cmd resp]
go prefix' = go' []
where
go' :: [[Commands cmd resp]] -- accumulated validated suffixes (in reverse order)
-> ValidateEnv model -- environment after the validated suffixes
should we /still/ shrink something ?
-> [[Commands cmd resp]] -- suffixes to validate
-> [NParallelCommands cmd resp]
go' _ _ MustShrink [] = [] -- Failed to shrink something
go' acc _ DontShrink [] = [ParallelCommands prefix' (reverse acc)]
go' acc env shouldShrink (suffix : suffixes) = do
(suffixWithShrinks, shrinkRest) <- shrinkOpts suffix
(envFinal, suffix') <- snd $ foldl f (True, [(env,[])]) suffixWithShrinks
go' ((reverse suffix') : acc) envFinal shrinkRest suffixes
where
f :: (Bool, [(ValidateEnv model, [Commands cmd resp])])
-> (ShouldShrink, Commands cmd resp)
-> (Bool, [(ValidateEnv model, [Commands cmd resp])])
f (firstCall, acc') (shrink, cmds) = (False, acc'')
where
acc'' = do
(envPrev, cmdsPrev) <- acc'
let envUsed = if firstCall then env else env `withCounterFrom` envPrev
(env', cmd') <- shrinkAndValidate sm shrink envUsed cmds
let env'' = if firstCall then env' else
combineEnv sm envPrev env' cmd'
return (env'', cmd' : cmdsPrev)
shrinkOpts :: [a] -> [([(ShouldShrink, a)], ShouldShrink)]
shrinkOpts ls =
let len = length ls
dontShrink = replicate len DontShrink
shrinks = if len == 0
then error "Invariant violation! A suffix should never be an empty list"
else flip map [1..len] $ \n ->
(replicate (n - 1) DontShrink) ++ [MustShrink] ++ (replicate (len - n) DontShrink)
in case shouldShrink of
DontShrink -> [(zip dontShrink ls, DontShrink)]
MustShrink -> fmap (\shrinkLs -> (zip shrinkLs ls, DontShrink)) shrinks
++ [(zip dontShrink ls, MustShrink)]
------------------------------------------------------------------------
runParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommands = runParallelCommandsNTimes 10
runParallelCommands' :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> (cmd Concrete -> resp Concrete)
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommands' = runParallelCommandsNTimes' 10
runNParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> NParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runNParallelCommands = runNParallelCommandsNTimes 10
runParallelCommandsNTimes :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> Int -- ^ How many times to execute the parallel program.
-> StateMachine model cmd m resp
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommandsNTimes n sm cmds =
replicateM n $ do
(hist, reason1, reason2) <- run (executeParallelCommands sm cmds True)
return (hist, logicReason (combineReasons [reason1, reason2]) .&& linearise sm hist)
runParallelCommandsNTimes' :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> Int -- ^ How many times to execute the parallel program.
-> StateMachine model cmd m resp
-> (cmd Concrete -> resp Concrete)
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommandsNTimes' n sm complete cmds =
replicateM n $ do
(hist, _reason1, _reason2) <- run (executeParallelCommands sm cmds False)
let hist' = completeHistory complete hist
return (hist', linearise sm hist')
runNParallelCommandsNTimes :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> Int -- ^ How many times to execute the parallel program.
-> StateMachine model cmd m resp
-> NParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runNParallelCommandsNTimes n sm cmds =
replicateM n $ do
(hist, reason) <- run (executeNParallelCommands sm cmds True)
return (hist, logicReason reason .&& linearise sm hist)
runNParallelCommandsNTimes' :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> Int -- ^ How many times to execute the parallel program.
-> StateMachine model cmd m resp
-> (cmd Concrete -> resp Concrete)
-> NParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runNParallelCommandsNTimes' n sm complete cmds =
replicateM n $ do
(hist, _reason) <- run (executeNParallelCommands sm cmds True)
let hist' = completeHistory complete hist
return (hist, linearise sm hist')
executeParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> ParallelCommands cmd resp
-> Bool
-> m (History cmd resp, Reason, Reason)
executeParallelCommands sm@StateMachine{ initModel, cleanup } (ParallelCommands prefix suffixes) stopOnError =
mask $ \restore -> do
hchan <- restore newTChanIO
(reason0, (env0, _smodel, _counter, _cmodel)) <- restore (runStateT
(executeCommands sm hchan (Pid 0) CheckEverything prefix)
(emptyEnvironment, initModel, newCounter, initModel))
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
if reason0 /= Ok
then do
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, reason0, reason0)
else do
(reason1, reason2, _) <- restore (go hchan (Ok, Ok, env0) suffixes)
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, reason1, reason2)
where
go _hchan (res1, res2, env) [] = return (res1, res2, env)
go hchan (Ok, Ok, env) (Pair cmds1 cmds2 : pairs) = do
((reason1, (env1, _, _, _)), (reason2, (env2, _, _, _))) <- concurrently
-- XXX: Post-conditions not checked, so we can pass in initModel here...
-- It would be better if we made executeCommands take a Maybe Environment
-- instead of the Check...
(runStateT (executeCommands sm hchan (Pid 1) CheckNothing cmds1) (env, initModel, newCounter, initModel))
(runStateT (executeCommands sm hchan (Pid 2) CheckNothing cmds2) (env, initModel, newCounter, initModel))
case (isOK $ combineReasons [reason1, reason2], stopOnError) of
(False, True) -> return (reason1, reason2, env1 <> env2)
_ -> go hchan ( reason1
, reason2
, env1 <> env2
) pairs
go hchan (Ok, ExceptionThrown e, env) (Pair cmds1 _cmds2 : pairs) = do
-- XXX: It's possible that pre-conditions fail at this point, because
-- commands may depend on references that never got created in the crashed
-- process. For example, consider:
--
-- x <- Create
-- ------------+----------
Write 1 x | Write 2 x
-- y <- Create |
-- ------------+----------
Write 3 x | Write 4 y
-- | Read x
--
If the @Write 1 fails , @y@ will never be created and the
pre - condition for @Write 4 y@ will fail . This also means that @Read
-- will never get executed, and so there could be a bug in @Write@ that
-- never gets discovered. Not sure if we can do something better here?
--
(reason1, (env1, _, _, _)) <- runStateT (executeCommands sm hchan (Pid 1) CheckPrecondition cmds1)
(env, initModel, newCounter, initModel)
go hchan ( reason1
, ExceptionThrown e
, env1
) pairs
go hchan (ExceptionThrown e, Ok, env) (Pair _cmds1 cmds2 : pairs) = do
(reason2, (env2, _, _, _)) <- runStateT (executeCommands sm hchan (Pid 2) CheckPrecondition cmds2)
(env, initModel, newCounter, initModel)
go hchan ( ExceptionThrown e
, reason2
, env2
) pairs
go _hchan out@(ExceptionThrown _, ExceptionThrown _, _env) (_ : _) = return out
go _hchan out@(PreconditionFailed {}, ExceptionThrown _, _env) (_ : _) = return out
go _hchan out@(ExceptionThrown _, PreconditionFailed {}, _env) (_ : _) = return out
go _hchan (res1, res2, _env) (Pair _cmds1 _cmds2 : _pairs) =
error ("executeParallelCommands, unexpected result: " ++ show (res1, res2))
logicReason :: Reason -> Logic
logicReason Ok = Top
logicReason r = Annotate (show r) Bot
executeNParallelCommands :: (Rank2.Traversable cmd, Show (cmd Concrete), Rank2.Foldable resp)
=> Show (resp Concrete)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> NParallelCommands cmd resp
-> Bool
-> m (History cmd resp, Reason)
executeNParallelCommands sm@StateMachine{ initModel, cleanup } (ParallelCommands prefix suffixes) stopOnError =
mask $ \restore -> do
hchan <- restore newTChanIO
(reason0, (env0, _smodel, _counter, _cmodel)) <- restore (runStateT
(executeCommands sm hchan (Pid 0) CheckEverything prefix)
(emptyEnvironment, initModel, newCounter, initModel))
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
if reason0 /= Ok
then do
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, reason0)
else do
(errors, _) <- restore (go hchan (Map.empty, env0) suffixes)
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, combineReasons $ Map.elems errors)
where
go _ res [] = return res
go hchan (previousErrors, env) (suffix : rest) = do
when (isInvalid $ Map.elems previousErrors) $
error ("executeNParallelCommands, unexpected result: " ++ show previousErrors)
let noError = Map.null previousErrors
check = if noError then CheckNothing else CheckPrecondition
res <- forConcurrently (zip [1..] suffix) $ \(i, cmds) ->
case Map.lookup i previousErrors of
Nothing -> do
(reason, (env', _, _, _)) <- runStateT (executeCommands sm hchan (Pid i) check cmds) (env, initModel, newCounter, initModel)
return (if isOK reason then Nothing else Just (i, reason), env')
Just _ -> return (Nothing, env)
let newErrors = Map.fromList $ mapMaybe fst res
errors = Map.union previousErrors newErrors
newEnv = mconcat $ snd <$> res
case (stopOnError, Map.null errors) of
(True, False) -> return (errors, newEnv)
_ -> go hchan (errors, newEnv) rest
combineReasons :: [Reason] -> Reason
combineReasons ls = fromMaybe Ok (find (/= Ok) ls)
isInvalid :: [Reason] -> Bool
isInvalid ls = any isPreconditionFailed ls &&
all notException ls
where
notException (ExceptionThrown _) = False
notException _ = True
isPreconditionFailed :: Reason -> Bool
isPreconditionFailed PreconditionFailed {} = True
isPreconditionFailed _ = False
------------------------------------------------------------------------
-- | Try to linearise a history of a parallel program execution using a
-- sequential model. See the *Linearizability: a correctness condition for
-- concurrent objects* paper linked to from the README for more info.
linearise :: forall model cmd m resp. (Show (cmd Concrete), Show (resp Concrete))
=> StateMachine model cmd m resp -> History cmd resp -> Logic
linearise StateMachine { transition, postcondition, initModel } = go . unHistory
where
go :: [(Pid, HistoryEvent cmd resp)] -> Logic
go [] = Top
go es = exists (interleavings es) (step initModel)
step :: model Concrete -> Tree (Operation cmd resp) -> Logic
step _model (Node (Crash _cmd _err _pid) _roses) =
error "Not implemented yet, see issue #162 for more details."
step model (Node (Operation cmd resp _) roses) =
postcondition model cmd resp .&&
exists' roses (step (transition model cmd resp))
exists' :: Show a => [a] -> (a -> Logic) -> Logic
exists' [] _ = Top
exists' xs p = exists xs p
------------------------------------------------------------------------
-- | Takes the output of parallel program runs and pretty prints a
-- counterexample if any of the runs fail.
prettyParallelCommandsWithOpts :: (MonadIO m, Rank2.Foldable cmd)
=> (Show (cmd Concrete), Show (resp Concrete))
=> ParallelCommands cmd resp
-> Maybe GraphOptions
-> [(History cmd resp, Logic)] -- ^ Output of 'runParallelCommands'.
-> PropertyM m ()
prettyParallelCommandsWithOpts cmds mGraphOptions =
mapM_ (\(h, l) -> printCounterexample h (logic l) `whenFailM` property (boolean l))
where
printCounterexample hist' (VFalse ce) = do
putStrLn ""
print (toBoxDrawings cmds hist')
putStrLn ""
print (simplify ce)
putStrLn ""
case mGraphOptions of
Nothing -> return ()
Just gOptions -> createAndPrintDot cmds gOptions hist'
printCounterexample _hist _
= error "prettyParallelCommands: impossible, because `boolean l` was False."
simplify :: Counterexample -> Counterexample
simplify (Fst ce@(AnnotateC _ BotC)) = ce
simplify (Snd ce) = simplify ce
simplify (ExistsC [] []) = BotC
simplify (ExistsC _ [Fst ce]) = ce
simplify (ExistsC x (Fst ce : ces)) = ce `EitherC` simplify (ExistsC x ces)
simplify (ExistsC _ (Snd ce : _)) = simplify ce
simplify _ = error "simplify: impossible,\
\ because of the structure of linearise."
prettyParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> MonadIO m
=> Rank2.Foldable cmd
=> ParallelCommands cmd resp
-> [(History cmd resp, Logic)] -- ^ Output of 'runNParallelCommands'.
-> PropertyM m ()
prettyParallelCommands cmds = prettyParallelCommandsWithOpts cmds Nothing
-- | Takes the output of parallel program runs and pretty prints a
-- counterexample if any of the runs fail.
prettyNParallelCommandsWithOpts :: (Show (cmd Concrete), Show (resp Concrete))
=> MonadIO m
=> Rank2.Foldable cmd
=> NParallelCommands cmd resp
-> Maybe GraphOptions
-> [(History cmd resp, Logic)] -- ^ Output of 'runNParallelCommands'.
-> PropertyM m ()
prettyNParallelCommandsWithOpts cmds mGraphOptions =
mapM_ (\(h, l) -> printCounterexample h (logic l) `whenFailM` property (boolean l))
where
printCounterexample hist' (VFalse ce) = do
putStrLn ""
print (simplify ce)
putStrLn ""
case mGraphOptions of
Nothing -> return ()
Just gOptions -> createAndPrintDot cmds gOptions hist'
printCounterexample _hist _
= error "prettyNParallelCommands: impossible, because `boolean l` was False."
prettyNParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> MonadIO m
=> Rank2.Foldable cmd
=> NParallelCommands cmd resp
-> [(History cmd resp, Logic)] -- ^ Output of 'runNParallelCommands'.
-> PropertyM m ()
prettyNParallelCommands cmds = prettyNParallelCommandsWithOpts cmds Nothing
-- | Draw an ASCII diagram of the history of a parallel program. Useful for
-- seeing how a race condition might have occured.
toBoxDrawings :: forall cmd resp. Rank2.Foldable cmd
=> (Show (cmd Concrete), Show (resp Concrete))
=> ParallelCommands cmd resp -> History cmd resp -> Doc
toBoxDrawings (ParallelCommands prefix suffixes) = toBoxDrawings'' allVars
where
allVars = getAllUsedVars prefix `S.union`
foldMap (foldMap getAllUsedVars) suffixes
toBoxDrawings'' :: Set Var -> History cmd resp -> Doc
toBoxDrawings'' knownVars (History h) = exec evT (fmap (out . snd) <$> Fork l p r)
where
(p, h') = partition (\e -> fst e == Pid 0) h
(l, r) = partition (\e -> fst e == Pid 1) h'
out :: HistoryEvent cmd resp -> String
out (Invocation cmd vars)
| vars `S.isSubsetOf` knownVars = show (S.toList vars) ++ " ← " ++ show cmd
| otherwise = show cmd
out (Response resp) = show resp
out (Exception err) = err
toEventType :: History' cmd resp -> [(EventType, Pid)]
toEventType = map go
where
go e = case e of
(pid, Invocation _ _) -> (Open, pid)
(pid, Response _) -> (Close, pid)
(pid, Exception _) -> (Close, pid)
evT :: [(EventType, Pid)]
evT = toEventType (filter (\e -> fst e `Prelude.elem` map Pid [1, 2]) h)
createAndPrintDot :: forall cmd resp t. Foldable t => Rank2.Foldable cmd
=> (Show (cmd Concrete), Show (resp Concrete))
=> ParallelCommandsF t cmd resp
-> GraphOptions
-> History cmd resp
-> IO ()
createAndPrintDot (ParallelCommands prefix suffixes) gOptions = toDotGraph allVars
where
allVars = getAllUsedVars prefix `S.union`
foldMap (foldMap getAllUsedVars) suffixes
toDotGraph :: Set Var -> History cmd resp -> IO ()
toDotGraph knownVars (History h) = printDotGraph gOptions $ (fmap out) <$> (Rose (snd <$> prefixMessages) groupByPid)
where
(prefixMessages, h') = partition (\e -> fst e == Pid 0) h
alterF a Nothing = Just [a]
alterF a (Just ls) = Just $ a : ls
groupByPid = foldr (\(p,e) m -> Map.alter (alterF e) p m) Map.empty h'
out :: HistoryEvent cmd resp -> String
out (Invocation cmd vars)
| vars `S.isSubsetOf` knownVars = show (S.toList vars) ++ " ← " ++ show cmd
| otherwise = show cmd
out (Response resp) = " → " ++ show resp
out (Exception err) = " → " ++ err
getAllUsedVars :: Rank2.Foldable cmd => Commands cmd resp -> Set Var
getAllUsedVars = S.fromList . foldMap (\(Command cmd _ _) -> getUsedVars cmd) . unCommands
-- | Print the percentage of each command used. The prefix check is
-- an unfortunate remaining for backwards compatibility.
checkCommandNamesParallel :: forall cmd resp t. Foldable t => CommandNames cmd
=> ParallelCommandsF t cmd resp -> Property -> Property
checkCommandNamesParallel cmds = checkCommandNames $ toSequential cmds
-- | Fail if some commands have not been executed.
coverCommandNamesParallel :: forall cmd resp t. Foldable t => CommandNames cmd
=> ParallelCommandsF t cmd resp -> Property -> Property
coverCommandNamesParallel cmds = coverCommandNames $ toSequential cmds
commandNamesParallel :: forall cmd resp t. Foldable t => CommandNames cmd
=> ParallelCommandsF t cmd resp -> [(String, Int)]
commandNamesParallel = commandNames . toSequential
toSequential :: Foldable t => ParallelCommandsF t cmd resp -> Commands cmd resp
toSequential cmds = prefix cmds <> mconcat (concatMap toList (suffixes cmds))
| null | https://raw.githubusercontent.com/advancedtelematic/quickcheck-state-machine/3cbf466db302235afdea91dac06a57b8fe0f0b4d/src/Test/StateMachine/Parallel.hs | haskell | # LANGUAGE FlexibleContexts #
# LANGUAGE Rank2Types #
---------------------------------------------------------------------------
|
Module : Test.StateMachine.Parallel
License : BSD-style (see the file LICENSE)
Stability : provisional
This module contains helpers for generating, shrinking, and checking
parallel programs.
---------------------------------------------------------------------------
----------------------------------------------------------------------
^ Predicate.
^ Number of threads
^ Predicate.
| Generate parallel commands.
first half will be used as the prefix.
the following sequential commands:
> [A, B, C, D, E, F, G, H, I]
> prefix: [A, B]
> rest: [C, D, E, F, G, H, I]
We advance the model with the prefix.
__Make a suffix__: we take commands from @rest@ as long as these are
parallel safe (see 'parallelSafe'). This means that the pre-conditions
matter in which order they are executed.
of the chunk (stored as a 'Pair') will later be executed in parallel.
Together they form one suffix.
this process of building a chunk/suffix repeats itself, starting from
In the end we might end up with something like this:
> [A, B] ─┤ ├──┤ │
list of commands for which the preconditions of all commands hold
for permutation of the list, i.e. it is parallel safe. The other
half is the remainder of the input list.
Generate Parallel commands. The length of each suffix, indicates how many thread can
concurrently execute the commands safely.
We try to keep the length of each sublist len.
It is important that we miss no elements here or else executeCommands may fail, because
of missing references. It is also important that the final list has the correct length
n, or else there will be different number of threads than the user specified.
| A list of commands is parallel safe if the pre-conditions for all commands
hold in all permutations of the list.
This makes sure that in all permutations the length of variables created is the same.
More -state-machine/pull/348
| Apply the transition of some commands to a model.
^ The model.
^ The commands.
----------------------------------------------------------------------
| Shrink a parallel program in a pre-condition and scope respecting
way.
Moving a command from a suffix to the prefix preserves validity
| Shrink a parallel program in a pre-condition and scope respecting
way.
Moving a command from a suffix to the prefix preserves validity
validated prefix
environment after the prefix
suffixes to validate
accumulated validated suffixes (in reverse order)
environment after the validated suffixes
suffixes to validate
Failed to shrink something
validated prefix
environment after the prefix
suffixes to validate
accumulated validated suffixes (in reverse order)
environment after the validated suffixes
suffixes to validate
Failed to shrink something
----------------------------------------------------------------------
^ How many times to execute the parallel program.
^ How many times to execute the parallel program.
^ How many times to execute the parallel program.
^ How many times to execute the parallel program.
XXX: Post-conditions not checked, so we can pass in initModel here...
It would be better if we made executeCommands take a Maybe Environment
instead of the Check...
XXX: It's possible that pre-conditions fail at this point, because
commands may depend on references that never got created in the crashed
process. For example, consider:
x <- Create
------------+----------
y <- Create |
------------+----------
| Read x
will never get executed, and so there could be a bug in @Write@ that
never gets discovered. Not sure if we can do something better here?
----------------------------------------------------------------------
| Try to linearise a history of a parallel program execution using a
sequential model. See the *Linearizability: a correctness condition for
concurrent objects* paper linked to from the README for more info.
----------------------------------------------------------------------
| Takes the output of parallel program runs and pretty prints a
counterexample if any of the runs fail.
^ Output of 'runParallelCommands'.
^ Output of 'runNParallelCommands'.
| Takes the output of parallel program runs and pretty prints a
counterexample if any of the runs fail.
^ Output of 'runNParallelCommands'.
^ Output of 'runNParallelCommands'.
| Draw an ASCII diagram of the history of a parallel program. Useful for
seeing how a race condition might have occured.
| Print the percentage of each command used. The prefix check is
an unfortunate remaining for backwards compatibility.
| Fail if some commands have not been executed. | # LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
Copyright : ( C ) 2017 , ATS Advanced Telematic Systems GmbH
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Test.StateMachine.Parallel
( forAllNParallelCommands
, forAllParallelCommands
, generateNParallelCommands
, generateParallelCommands
, shrinkNParallelCommands
, shrinkParallelCommands
, shrinkAndValidateNParallel
, shrinkAndValidateParallel
, shrinkCommands'
, runNParallelCommands
, runParallelCommands
, runParallelCommands'
, runNParallelCommandsNTimes
, runParallelCommandsNTimes
, runNParallelCommandsNTimes'
, runParallelCommandsNTimes'
, executeParallelCommands
, linearise
, toBoxDrawings
, prettyNParallelCommands
, prettyParallelCommands
, prettyParallelCommandsWithOpts
, prettyNParallelCommandsWithOpts
, advanceModel
, checkCommandNamesParallel
, coverCommandNamesParallel
, commandNamesParallel
) where
import Control.Monad
(replicateM, when)
import Control.Monad.Catch
(MonadMask, mask, onException)
import Control.Monad.State.Strict
(runStateT)
import Data.Bifunctor
(bimap)
import Data.Foldable
(toList)
import Data.List
(find, partition, permutations)
import qualified Data.Map.Strict as Map
import Data.Maybe
(fromMaybe, mapMaybe)
import Data.Monoid
import Data.Set
(Set)
import qualified Data.Set as S
import Data.Tree
(Tree(Node))
import Prelude
import Test.QuickCheck
(Gen, Property, Testable, choose, forAllShrinkShow,
property, sized)
import Test.QuickCheck.Monadic
(PropertyM, run)
import Text.PrettyPrint.ANSI.Leijen
(Doc)
import Text.Show.Pretty
(ppShow)
import UnliftIO
(MonadIO, MonadUnliftIO, concurrently,
forConcurrently, newTChanIO)
import Test.StateMachine.BoxDrawer
import Test.StateMachine.ConstructorName
import Test.StateMachine.DotDrawing
import Test.StateMachine.Logic
import Test.StateMachine.Sequential
import Test.StateMachine.Types
import qualified Test.StateMachine.Types.Rank2 as Rank2
import Test.StateMachine.Utils
forAllParallelCommands :: Testable prop
=> (Show (cmd Symbolic), Show (resp Symbolic), Show (model Symbolic))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> Maybe Int
-> Property
forAllParallelCommands sm mminSize =
forAllShrinkShow (generateParallelCommands sm mminSize) (shrinkParallelCommands sm) ppShow
forAllNParallelCommands :: Testable prop
=> (Show (cmd Symbolic), Show (resp Symbolic), Show (model Symbolic))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> Property
forAllNParallelCommands sm np =
forAllShrinkShow (generateNParallelCommands sm np) (shrinkNParallelCommands sm) ppShow
Parallel commands are generated as follows . We begin by generating
sequential commands and then splitting this list in two at some index . The
The second half will be used to build suffixes . For example , starting from
We split it in two , giving us the prefix and the rest :
( using the \'advanced\ ' model ) of each of those commands will hold no
Say this is true for @[C , D , E]@ , but not anymore for @F@ , maybe because
@F@ depends on one of @[C , D , Then we divide this \'chunk\ ' in two by
splitting it in the middle , obtaining @[C]@ and @[D , These two halves
Then the model is advanced using the whole chunk @[C , D , Think of it
as a barrier after executing the two halves of the chunk in parallel . Then
_ _ Make a suffix _ _ using the \'advanced\ ' model .
> ┌ ─ [ C ] ─ ─ ┐ ┌ [ F , G ] ┐
> [ D , E ] ┘ [ H , I ] ┘
generateParallelCommands :: forall model cmd m resp. Rank2.Foldable resp
=> Show (model Symbolic)
=> (Show (cmd Symbolic), Show (resp Symbolic))
=> StateMachine model cmd m resp
-> Maybe Int
-> Gen (ParallelCommands cmd resp)
generateParallelCommands sm@StateMachine { initModel } mminSize = do
Commands cmds <- generateCommands sm mminSize
prefixLength <- sized (\k -> choose (0, k `div` 3))
let (prefix, rest) = bimap Commands Commands (splitAt prefixLength cmds)
return (ParallelCommands prefix
(makeSuffixes (advanceModel sm initModel prefix) rest))
where
makeSuffixes :: model Symbolic -> Commands cmd resp -> [Pair (Commands cmd resp)]
makeSuffixes model0 = go model0 [] . unCommands
where
go _ acc [] = reverse acc
go model acc cmds = go (advanceModel sm model (Commands safe))
(Pair (Commands safe1) (Commands safe2) : acc)
rest
where
(safe, rest) = spanSafe sm model [] cmds
(safe1, safe2) = splitAt (length safe `div` 2) safe
Split the list of commands in two such that the first half is a
spanSafe :: Rank2.Foldable resp
=> StateMachine model cmd m resp
-> model Symbolic -> [Command cmd resp] -> [Command cmd resp]
-> ([Command cmd resp], [Command cmd resp])
spanSafe _ _ safe [] = (reverse safe, [])
spanSafe sm model safe (cmd : cmds)
| length safe <= 5
, parallelSafe sm model (Commands (cmd : safe))
= spanSafe sm model (cmd : safe) cmds
| otherwise
= (reverse safe, cmd : cmds)
generateNParallelCommands :: forall model cmd m resp. Rank2.Foldable resp
=> Show (model Symbolic)
=> (Show (cmd Symbolic), Show (resp Symbolic))
=> StateMachine model cmd m resp
-> Int
-> Gen (NParallelCommands cmd resp)
generateNParallelCommands sm@StateMachine { initModel } np =
if np <= 0 then error "number of threads must be positive" else do
Commands cmds <- generateCommands sm Nothing
prefixLength <- sized (\k -> choose (0, k `div` 3))
let (prefix, rest) = bimap Commands Commands (splitAt prefixLength cmds)
return (ParallelCommands prefix
(makeSuffixes (advanceModel sm initModel prefix) rest))
where
makeSuffixes :: model Symbolic -> Commands cmd resp -> [[(Commands cmd resp)]]
makeSuffixes model0 = go model0 [] . unCommands
where
go :: model Symbolic
-> [[(Commands cmd resp)]]
-> [(Command cmd resp)]
-> [[(Commands cmd resp)]]
go _ acc [] = reverse acc
go model acc cmds = go (advanceModel sm model (Commands safe))
(safes : acc)
rest
where
(safe, rest) = spanSafe sm model [] cmds
safes = Commands <$> chunksOf np (length safe `div` np) safe
Split the list in n sublists , whose concat is the initial list .
chunksOf :: Int -> Int -> [a] -> [[a]]
chunksOf 1 _ xs = [xs]
chunksOf n len xs = as : chunksOf (n-1) len bs
where (as, bs) = splitAt len xs
parallelSafe :: Rank2.Foldable resp
=> StateMachine model cmd m resp -> model Symbolic
-> Commands cmd resp -> Bool
parallelSafe StateMachine { precondition, transition, mock } model0
= all (preconditionsHold model0)
. permutations
. unCommands
where
preconditionsHold _ [] = True
preconditionsHold model (Command cmd resp vars : cmds) =
boolean (precondition model cmd) &&
preconditionsHold (transition model cmd resp) cmds &&
By doing so , we try to avoid MockSemanticsMismatch errors .
length vars == length (getUsedVars $ fst $ runGenSym (mock model cmd) newCounter)
advanceModel :: StateMachine model cmd m resp
-> model Symbolic
advanceModel StateMachine { transition } model0 =
go model0 . unCommands
where
go model [] = model
go model (Command cmd resp _vars : cmds) =
go (transition model cmd resp) cmds
shrinkParallelCommands
:: forall cmd model m resp. Rank2.Traversable cmd
=> Rank2.Foldable resp
=> StateMachine model cmd m resp
-> (ParallelCommands cmd resp -> [ParallelCommands cmd resp])
shrinkParallelCommands sm (ParallelCommands prefix suffixes)
= concatMap go
[ Shrunk s (ParallelCommands prefix' (map toPair suffixes'))
| Shrunk s (prefix', suffixes') <- shrinkPairS shrinkCommands' shrinkSuffixes
(prefix, map fromPair suffixes)
]
++
shrinkMoveSuffixToPrefix
where
go :: Shrunk (ParallelCommands cmd resp) -> [ParallelCommands cmd resp]
go (Shrunk shrunk cmds) =
shrinkAndValidateParallel sm
(if shrunk then DontShrink else MustShrink)
cmds
shrinkSuffixes :: [(Commands cmd resp, Commands cmd resp)]
-> [Shrunk [(Commands cmd resp, Commands cmd resp)]]
shrinkSuffixes = shrinkListS (shrinkPairS' shrinkCommands')
shrinkMoveSuffixToPrefix :: [ParallelCommands cmd resp]
shrinkMoveSuffixToPrefix = case suffixes of
[] -> []
(suffix : suffixes') ->
[ ParallelCommands (prefix <> Commands [prefix'])
(fmap Commands (toPair suffix') : suffixes')
| (prefix', suffix') <- pickOneReturnRest2 (unCommands (proj1 suffix),
unCommands (proj2 suffix))
]
shrinkNParallelCommands
:: forall cmd model m resp. Rank2.Traversable cmd
=> Rank2.Foldable resp
=> StateMachine model cmd m resp
-> (NParallelCommands cmd resp -> [NParallelCommands cmd resp])
shrinkNParallelCommands sm (ParallelCommands prefix suffixes)
= concatMap go
[ Shrunk s (ParallelCommands prefix' suffixes')
| Shrunk s (prefix', suffixes') <- shrinkPairS shrinkCommands' shrinkSuffixes
(prefix, suffixes)
]
++
shrinkMoveSuffixToPrefix
where
go :: Shrunk (NParallelCommands cmd resp) -> [NParallelCommands cmd resp]
go (Shrunk shrunk cmds) =
shrinkAndValidateNParallel sm
(if shrunk then DontShrink else MustShrink)
cmds
shrinkSuffixes :: [[Commands cmd resp]]
-> [Shrunk [[Commands cmd resp]]]
shrinkSuffixes = shrinkListS (shrinkListS'' shrinkCommands')
shrinkMoveSuffixToPrefix :: [NParallelCommands cmd resp]
shrinkMoveSuffixToPrefix = case suffixes of
[] -> []
(suffix : suffixes') ->
[ ParallelCommands (prefix <> Commands [prefix'])
(fmap Commands suffix' : suffixes')
| (prefix', suffix') <- pickOneReturnRestL (unCommands <$> suffix)
]
| Shrinks Commands in a way that it has strictly less number of commands .
shrinkCommands' :: Commands cmd resp -> [Shrunk (Commands cmd resp)]
shrinkCommands' = map (fmap Commands) . shrinkListS' . unCommands
shrinkAndValidateParallel :: forall model cmd m resp. (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> ShouldShrink
-> ParallelCommands cmd resp
-> [ParallelCommands cmd resp]
shrinkAndValidateParallel sm@StateMachine { initModel } = \shouldShrink (ParallelCommands prefix suffixes) ->
let env = initValidateEnv initModel
curryGo shouldShrink' (env', prefix') = go prefix' env' shouldShrink' suffixes in
case shouldShrink of
DontShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm DontShrink env prefix)
MustShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm MustShrink env prefix)
++ concatMap (curryGo MustShrink) (shrinkAndValidate sm DontShrink env prefix)
where
should we /still/ shrink something ?
-> [ParallelCommands cmd resp]
go prefix' = go' []
where
should we /still/ shrink something ?
-> [ParallelCommands cmd resp]
go' acc _ DontShrink [] = [ParallelCommands prefix' (reverse acc)]
go' acc env shouldShrink (Pair l r : suffixes) = do
((shrinkL, shrinkR), shrinkRest) <- shrinkOpts
(envL, l') <- shrinkAndValidate sm shrinkL env l
(envR, r') <- shrinkAndValidate sm shrinkR (env `withCounterFrom` envL) r
go' (Pair l' r' : acc) (combineEnv sm envL envR r') shrinkRest suffixes
where
shrinkOpts :: [((ShouldShrink, ShouldShrink), ShouldShrink)]
shrinkOpts =
case shouldShrink of
DontShrink -> [ ((DontShrink, DontShrink), DontShrink) ]
MustShrink -> [ ((MustShrink, DontShrink), DontShrink)
, ((DontShrink, MustShrink), DontShrink)
, ((DontShrink, DontShrink), MustShrink) ]
combineEnv :: StateMachine model cmd m resp
-> ValidateEnv model
-> ValidateEnv model
-> Commands cmd resp
-> ValidateEnv model
combineEnv sm envL envR cmds = ValidateEnv {
veModel = advanceModel sm (veModel envL) cmds
, veScope = Map.union (veScope envL) (veScope envR)
, veCounter = veCounter envR
}
withCounterFrom :: ValidateEnv model -> ValidateEnv model -> ValidateEnv model
withCounterFrom e e' = e { veCounter = veCounter e' }
shrinkAndValidateNParallel :: forall model cmd m resp. (Rank2.Traversable cmd, Rank2.Foldable resp)
=> StateMachine model cmd m resp
-> ShouldShrink
-> NParallelCommands cmd resp
-> [NParallelCommands cmd resp]
shrinkAndValidateNParallel sm = \shouldShrink (ParallelCommands prefix suffixes) ->
let env = initValidateEnv $ initModel sm
curryGo shouldShrink' (env', prefix') = go prefix' env' shouldShrink' suffixes in
case shouldShrink of
DontShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm DontShrink env prefix)
MustShrink -> concatMap (curryGo DontShrink) (shrinkAndValidate sm MustShrink env prefix)
++ concatMap (curryGo MustShrink) (shrinkAndValidate sm DontShrink env prefix)
where
should we /still/ shrink something ?
-> [NParallelCommands cmd resp]
go prefix' = go' []
where
should we /still/ shrink something ?
-> [NParallelCommands cmd resp]
go' acc _ DontShrink [] = [ParallelCommands prefix' (reverse acc)]
go' acc env shouldShrink (suffix : suffixes) = do
(suffixWithShrinks, shrinkRest) <- shrinkOpts suffix
(envFinal, suffix') <- snd $ foldl f (True, [(env,[])]) suffixWithShrinks
go' ((reverse suffix') : acc) envFinal shrinkRest suffixes
where
f :: (Bool, [(ValidateEnv model, [Commands cmd resp])])
-> (ShouldShrink, Commands cmd resp)
-> (Bool, [(ValidateEnv model, [Commands cmd resp])])
f (firstCall, acc') (shrink, cmds) = (False, acc'')
where
acc'' = do
(envPrev, cmdsPrev) <- acc'
let envUsed = if firstCall then env else env `withCounterFrom` envPrev
(env', cmd') <- shrinkAndValidate sm shrink envUsed cmds
let env'' = if firstCall then env' else
combineEnv sm envPrev env' cmd'
return (env'', cmd' : cmdsPrev)
shrinkOpts :: [a] -> [([(ShouldShrink, a)], ShouldShrink)]
shrinkOpts ls =
let len = length ls
dontShrink = replicate len DontShrink
shrinks = if len == 0
then error "Invariant violation! A suffix should never be an empty list"
else flip map [1..len] $ \n ->
(replicate (n - 1) DontShrink) ++ [MustShrink] ++ (replicate (len - n) DontShrink)
in case shouldShrink of
DontShrink -> [(zip dontShrink ls, DontShrink)]
MustShrink -> fmap (\shrinkLs -> (zip shrinkLs ls, DontShrink)) shrinks
++ [(zip dontShrink ls, MustShrink)]
runParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommands = runParallelCommandsNTimes 10
runParallelCommands' :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> (cmd Concrete -> resp Concrete)
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommands' = runParallelCommandsNTimes' 10
runNParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> NParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runNParallelCommands = runNParallelCommandsNTimes 10
runParallelCommandsNTimes :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
-> StateMachine model cmd m resp
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommandsNTimes n sm cmds =
replicateM n $ do
(hist, reason1, reason2) <- run (executeParallelCommands sm cmds True)
return (hist, logicReason (combineReasons [reason1, reason2]) .&& linearise sm hist)
runParallelCommandsNTimes' :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
-> StateMachine model cmd m resp
-> (cmd Concrete -> resp Concrete)
-> ParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runParallelCommandsNTimes' n sm complete cmds =
replicateM n $ do
(hist, _reason1, _reason2) <- run (executeParallelCommands sm cmds False)
let hist' = completeHistory complete hist
return (hist', linearise sm hist')
runNParallelCommandsNTimes :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
-> StateMachine model cmd m resp
-> NParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runNParallelCommandsNTimes n sm cmds =
replicateM n $ do
(hist, reason) <- run (executeNParallelCommands sm cmds True)
return (hist, logicReason reason .&& linearise sm hist)
runNParallelCommandsNTimes' :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
-> StateMachine model cmd m resp
-> (cmd Concrete -> resp Concrete)
-> NParallelCommands cmd resp
-> PropertyM m [(History cmd resp, Logic)]
runNParallelCommandsNTimes' n sm complete cmds =
replicateM n $ do
(hist, _reason) <- run (executeNParallelCommands sm cmds True)
let hist' = completeHistory complete hist
return (hist, linearise sm hist')
executeParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> (Rank2.Traversable cmd, Rank2.Foldable resp)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> ParallelCommands cmd resp
-> Bool
-> m (History cmd resp, Reason, Reason)
executeParallelCommands sm@StateMachine{ initModel, cleanup } (ParallelCommands prefix suffixes) stopOnError =
mask $ \restore -> do
hchan <- restore newTChanIO
(reason0, (env0, _smodel, _counter, _cmodel)) <- restore (runStateT
(executeCommands sm hchan (Pid 0) CheckEverything prefix)
(emptyEnvironment, initModel, newCounter, initModel))
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
if reason0 /= Ok
then do
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, reason0, reason0)
else do
(reason1, reason2, _) <- restore (go hchan (Ok, Ok, env0) suffixes)
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, reason1, reason2)
where
go _hchan (res1, res2, env) [] = return (res1, res2, env)
go hchan (Ok, Ok, env) (Pair cmds1 cmds2 : pairs) = do
((reason1, (env1, _, _, _)), (reason2, (env2, _, _, _))) <- concurrently
(runStateT (executeCommands sm hchan (Pid 1) CheckNothing cmds1) (env, initModel, newCounter, initModel))
(runStateT (executeCommands sm hchan (Pid 2) CheckNothing cmds2) (env, initModel, newCounter, initModel))
case (isOK $ combineReasons [reason1, reason2], stopOnError) of
(False, True) -> return (reason1, reason2, env1 <> env2)
_ -> go hchan ( reason1
, reason2
, env1 <> env2
) pairs
go hchan (Ok, ExceptionThrown e, env) (Pair cmds1 _cmds2 : pairs) = do
Write 1 x | Write 2 x
Write 3 x | Write 4 y
If the @Write 1 fails , @y@ will never be created and the
pre - condition for @Write 4 y@ will fail . This also means that @Read
(reason1, (env1, _, _, _)) <- runStateT (executeCommands sm hchan (Pid 1) CheckPrecondition cmds1)
(env, initModel, newCounter, initModel)
go hchan ( reason1
, ExceptionThrown e
, env1
) pairs
go hchan (ExceptionThrown e, Ok, env) (Pair _cmds1 cmds2 : pairs) = do
(reason2, (env2, _, _, _)) <- runStateT (executeCommands sm hchan (Pid 2) CheckPrecondition cmds2)
(env, initModel, newCounter, initModel)
go hchan ( ExceptionThrown e
, reason2
, env2
) pairs
go _hchan out@(ExceptionThrown _, ExceptionThrown _, _env) (_ : _) = return out
go _hchan out@(PreconditionFailed {}, ExceptionThrown _, _env) (_ : _) = return out
go _hchan out@(ExceptionThrown _, PreconditionFailed {}, _env) (_ : _) = return out
go _hchan (res1, res2, _env) (Pair _cmds1 _cmds2 : _pairs) =
error ("executeParallelCommands, unexpected result: " ++ show (res1, res2))
logicReason :: Reason -> Logic
logicReason Ok = Top
logicReason r = Annotate (show r) Bot
executeNParallelCommands :: (Rank2.Traversable cmd, Show (cmd Concrete), Rank2.Foldable resp)
=> Show (resp Concrete)
=> (MonadMask m, MonadUnliftIO m)
=> StateMachine model cmd m resp
-> NParallelCommands cmd resp
-> Bool
-> m (History cmd resp, Reason)
executeNParallelCommands sm@StateMachine{ initModel, cleanup } (ParallelCommands prefix suffixes) stopOnError =
mask $ \restore -> do
hchan <- restore newTChanIO
(reason0, (env0, _smodel, _counter, _cmodel)) <- restore (runStateT
(executeCommands sm hchan (Pid 0) CheckEverything prefix)
(emptyEnvironment, initModel, newCounter, initModel))
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
if reason0 /= Ok
then do
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, reason0)
else do
(errors, _) <- restore (go hchan (Map.empty, env0) suffixes)
`onException` (getChanContents hchan >>= cleanup . mkModel sm . History)
hist <- getChanContents hchan
cleanup $ mkModel sm $ History hist
return (History hist, combineReasons $ Map.elems errors)
where
go _ res [] = return res
go hchan (previousErrors, env) (suffix : rest) = do
when (isInvalid $ Map.elems previousErrors) $
error ("executeNParallelCommands, unexpected result: " ++ show previousErrors)
let noError = Map.null previousErrors
check = if noError then CheckNothing else CheckPrecondition
res <- forConcurrently (zip [1..] suffix) $ \(i, cmds) ->
case Map.lookup i previousErrors of
Nothing -> do
(reason, (env', _, _, _)) <- runStateT (executeCommands sm hchan (Pid i) check cmds) (env, initModel, newCounter, initModel)
return (if isOK reason then Nothing else Just (i, reason), env')
Just _ -> return (Nothing, env)
let newErrors = Map.fromList $ mapMaybe fst res
errors = Map.union previousErrors newErrors
newEnv = mconcat $ snd <$> res
case (stopOnError, Map.null errors) of
(True, False) -> return (errors, newEnv)
_ -> go hchan (errors, newEnv) rest
combineReasons :: [Reason] -> Reason
combineReasons ls = fromMaybe Ok (find (/= Ok) ls)
isInvalid :: [Reason] -> Bool
isInvalid ls = any isPreconditionFailed ls &&
all notException ls
where
notException (ExceptionThrown _) = False
notException _ = True
isPreconditionFailed :: Reason -> Bool
isPreconditionFailed PreconditionFailed {} = True
isPreconditionFailed _ = False
linearise :: forall model cmd m resp. (Show (cmd Concrete), Show (resp Concrete))
=> StateMachine model cmd m resp -> History cmd resp -> Logic
linearise StateMachine { transition, postcondition, initModel } = go . unHistory
where
go :: [(Pid, HistoryEvent cmd resp)] -> Logic
go [] = Top
go es = exists (interleavings es) (step initModel)
step :: model Concrete -> Tree (Operation cmd resp) -> Logic
step _model (Node (Crash _cmd _err _pid) _roses) =
error "Not implemented yet, see issue #162 for more details."
step model (Node (Operation cmd resp _) roses) =
postcondition model cmd resp .&&
exists' roses (step (transition model cmd resp))
exists' :: Show a => [a] -> (a -> Logic) -> Logic
exists' [] _ = Top
exists' xs p = exists xs p
prettyParallelCommandsWithOpts :: (MonadIO m, Rank2.Foldable cmd)
=> (Show (cmd Concrete), Show (resp Concrete))
=> ParallelCommands cmd resp
-> Maybe GraphOptions
-> PropertyM m ()
prettyParallelCommandsWithOpts cmds mGraphOptions =
mapM_ (\(h, l) -> printCounterexample h (logic l) `whenFailM` property (boolean l))
where
printCounterexample hist' (VFalse ce) = do
putStrLn ""
print (toBoxDrawings cmds hist')
putStrLn ""
print (simplify ce)
putStrLn ""
case mGraphOptions of
Nothing -> return ()
Just gOptions -> createAndPrintDot cmds gOptions hist'
printCounterexample _hist _
= error "prettyParallelCommands: impossible, because `boolean l` was False."
simplify :: Counterexample -> Counterexample
simplify (Fst ce@(AnnotateC _ BotC)) = ce
simplify (Snd ce) = simplify ce
simplify (ExistsC [] []) = BotC
simplify (ExistsC _ [Fst ce]) = ce
simplify (ExistsC x (Fst ce : ces)) = ce `EitherC` simplify (ExistsC x ces)
simplify (ExistsC _ (Snd ce : _)) = simplify ce
simplify _ = error "simplify: impossible,\
\ because of the structure of linearise."
prettyParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> MonadIO m
=> Rank2.Foldable cmd
=> ParallelCommands cmd resp
-> PropertyM m ()
prettyParallelCommands cmds = prettyParallelCommandsWithOpts cmds Nothing
prettyNParallelCommandsWithOpts :: (Show (cmd Concrete), Show (resp Concrete))
=> MonadIO m
=> Rank2.Foldable cmd
=> NParallelCommands cmd resp
-> Maybe GraphOptions
-> PropertyM m ()
prettyNParallelCommandsWithOpts cmds mGraphOptions =
mapM_ (\(h, l) -> printCounterexample h (logic l) `whenFailM` property (boolean l))
where
printCounterexample hist' (VFalse ce) = do
putStrLn ""
print (simplify ce)
putStrLn ""
case mGraphOptions of
Nothing -> return ()
Just gOptions -> createAndPrintDot cmds gOptions hist'
printCounterexample _hist _
= error "prettyNParallelCommands: impossible, because `boolean l` was False."
prettyNParallelCommands :: (Show (cmd Concrete), Show (resp Concrete))
=> MonadIO m
=> Rank2.Foldable cmd
=> NParallelCommands cmd resp
-> PropertyM m ()
prettyNParallelCommands cmds = prettyNParallelCommandsWithOpts cmds Nothing
toBoxDrawings :: forall cmd resp. Rank2.Foldable cmd
=> (Show (cmd Concrete), Show (resp Concrete))
=> ParallelCommands cmd resp -> History cmd resp -> Doc
toBoxDrawings (ParallelCommands prefix suffixes) = toBoxDrawings'' allVars
where
allVars = getAllUsedVars prefix `S.union`
foldMap (foldMap getAllUsedVars) suffixes
toBoxDrawings'' :: Set Var -> History cmd resp -> Doc
toBoxDrawings'' knownVars (History h) = exec evT (fmap (out . snd) <$> Fork l p r)
where
(p, h') = partition (\e -> fst e == Pid 0) h
(l, r) = partition (\e -> fst e == Pid 1) h'
out :: HistoryEvent cmd resp -> String
out (Invocation cmd vars)
| vars `S.isSubsetOf` knownVars = show (S.toList vars) ++ " ← " ++ show cmd
| otherwise = show cmd
out (Response resp) = show resp
out (Exception err) = err
toEventType :: History' cmd resp -> [(EventType, Pid)]
toEventType = map go
where
go e = case e of
(pid, Invocation _ _) -> (Open, pid)
(pid, Response _) -> (Close, pid)
(pid, Exception _) -> (Close, pid)
evT :: [(EventType, Pid)]
evT = toEventType (filter (\e -> fst e `Prelude.elem` map Pid [1, 2]) h)
createAndPrintDot :: forall cmd resp t. Foldable t => Rank2.Foldable cmd
=> (Show (cmd Concrete), Show (resp Concrete))
=> ParallelCommandsF t cmd resp
-> GraphOptions
-> History cmd resp
-> IO ()
createAndPrintDot (ParallelCommands prefix suffixes) gOptions = toDotGraph allVars
where
allVars = getAllUsedVars prefix `S.union`
foldMap (foldMap getAllUsedVars) suffixes
toDotGraph :: Set Var -> History cmd resp -> IO ()
toDotGraph knownVars (History h) = printDotGraph gOptions $ (fmap out) <$> (Rose (snd <$> prefixMessages) groupByPid)
where
(prefixMessages, h') = partition (\e -> fst e == Pid 0) h
alterF a Nothing = Just [a]
alterF a (Just ls) = Just $ a : ls
groupByPid = foldr (\(p,e) m -> Map.alter (alterF e) p m) Map.empty h'
out :: HistoryEvent cmd resp -> String
out (Invocation cmd vars)
| vars `S.isSubsetOf` knownVars = show (S.toList vars) ++ " ← " ++ show cmd
| otherwise = show cmd
out (Response resp) = " → " ++ show resp
out (Exception err) = " → " ++ err
getAllUsedVars :: Rank2.Foldable cmd => Commands cmd resp -> Set Var
getAllUsedVars = S.fromList . foldMap (\(Command cmd _ _) -> getUsedVars cmd) . unCommands
checkCommandNamesParallel :: forall cmd resp t. Foldable t => CommandNames cmd
=> ParallelCommandsF t cmd resp -> Property -> Property
checkCommandNamesParallel cmds = checkCommandNames $ toSequential cmds
coverCommandNamesParallel :: forall cmd resp t. Foldable t => CommandNames cmd
=> ParallelCommandsF t cmd resp -> Property -> Property
coverCommandNamesParallel cmds = coverCommandNames $ toSequential cmds
commandNamesParallel :: forall cmd resp t. Foldable t => CommandNames cmd
=> ParallelCommandsF t cmd resp -> [(String, Int)]
commandNamesParallel = commandNames . toSequential
toSequential :: Foldable t => ParallelCommandsF t cmd resp -> Commands cmd resp
toSequential cmds = prefix cmds <> mconcat (concatMap toList (suffixes cmds))
|
e83f1ae84be881e81096d2573a363114fc1060cdf8dd8f16f384d319affb960a | hcarty/ezmysql | ezmysql.ml | module Datetime = CalendarLib.Calendar.Precise
module Date = Datetime.Date
module Time = Datetime.Time
module Row = struct
include Map.Make (String)
let keys m = to_seq m |> Seq.map fst |> List.of_seq
let values m = to_seq m |> Seq.map snd |> List.of_seq
end
module Datetime_p = CalendarLib.Printer.Precise_Calendar
module Date_p = CalendarLib.Printer.Date
module Time_p = CalendarLib.Printer.Time
open Rresult
open Astring
let connect_exn ?(reconnect = true) uri =
let database =
(* The database name to use is the path without the '/' prefix *)
match Uri.path uri with
| "" -> None
| p ->
Some
(String.trim
~drop:(function
| '/' -> true
| _ -> false)
p)
in
let options =
if reconnect then
Some [ Mysql.OPT_RECONNECT reconnect ]
else
None
in
Mysql.quick_connect ?options ?host:(Uri.host uri) ?database
?port:(Uri.port uri) ?password:(Uri.password uri) ?user:(Uri.user uri) ()
let connect ?reconnect uri =
try connect_exn ?reconnect uri |> R.ok with
| Mysql.Error msg -> R.error_msg msg
let disconnect = Mysql.disconnect
let ping = Mysql.ping
module Pp_internal = struct
let null = Fmt.const Fmt.string "NULL"
let nullable f = Fmt.option ~none:null f
let string dbd = Fmt.of_to_string (Mysql.ml2rstr dbd)
let blob dbd = Fmt.of_to_string (Mysql.ml2rblob dbd)
let int = Fmt.of_to_string Mysql.ml2int
let int64 = Fmt.of_to_string Mysql.ml642int
let float = Fmt.of_to_string Mysql.ml2float
let datetime =
let to_string datetime =
Mysql.ml2datetime
( Datetime.year datetime,
Datetime.month datetime |> Date.int_of_month,
Datetime.day_of_month datetime,
Datetime.hour datetime,
Datetime.minute datetime,
Datetime.second datetime )
in
Fmt.of_to_string to_string
let date =
let to_string date =
Mysql.ml2date
( Date.year date,
Date.month date |> Date.int_of_month,
Date.day_of_month date )
in
Fmt.of_to_string to_string
let time =
let to_string time =
Mysql.ml2time (Time.hour time, Time.minute time, Time.second time)
in
Fmt.of_to_string to_string
let csv_simple pp_elt fmt = Fmt.pf fmt "(%a)" (Fmt.list ~sep:Fmt.comma pp_elt)
end
module Column = struct
type ('ocaml, 'sql) conv = {
serialize : 'ocaml -> 'sql;
deserialize : 'sql -> 'ocaml;
}
type ('ocaml, 'sql) t =
| String : string * ('ocaml, (string as 'sql)) conv -> ('ocaml, 'sql) t
| Blob : string * ('ocaml, (string as 'sql)) conv -> ('ocaml, 'sql) t
| Int : string * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) t
| Int64 : string * ('ocaml, (int64 as 'sql)) conv -> ('ocaml, 'sql) t
| Float : string * ('ocaml, (float as 'sql)) conv -> ('ocaml, 'sql) t
| Datetime :
string * ('ocaml, (Datetime.t as 'sql)) conv
-> ('ocaml, 'sql) t
| Date : string * ('ocaml, (Date.t as 'sql)) conv -> ('ocaml, 'sql) t
| Time : string * ('ocaml, (Time.t as 'sql)) conv -> ('ocaml, 'sql) t
type packed = Pack : _ t -> packed
type spec_basic = {
name : string;
nullable : bool;
}
type spec_string_width = {
name : string;
width : int;
nullable : bool;
}
type spec_int = {
name : string;
nullable : bool;
auto_increment : bool;
}
and (_, _) spec =
| Char :
spec_string_width * ('ocaml, (string as 'sql)) conv
-> ('ocaml, 'sql) spec
| Varchar :
spec_string_width * ('ocaml, (string as 'sql)) conv
-> ('ocaml, 'sql) spec
| Binary :
spec_string_width * ('ocaml, (string as 'sql)) conv
-> ('ocaml, 'sql) spec
| Blob : spec_basic * ('ocaml, (string as 'sql)) conv -> ('ocaml, 'sql) spec
| Tiny_int : spec_int * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) spec
| Small_int : spec_int * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) spec
| Medium_int :
spec_int * ('ocaml, (int as 'sql)) conv
-> ('ocaml, 'sql) spec
| Int : spec_int * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) spec
| Big_int : spec_int * ('ocaml, (int64 as 'sql)) conv -> ('ocaml, 'sql) spec
| Float : spec_basic * ('ocaml, (float as 'sql)) conv -> ('ocaml, 'sql) spec
| Datetime :
spec_basic * ('ocaml, (Datetime.t as 'sql)) conv
-> ('ocaml, 'sql) spec
| Date : spec_basic * ('ocaml, (Date.t as 'sql)) conv -> ('ocaml, 'sql) spec
| Time : spec_basic * ('ocaml, (Time.t as 'sql)) conv -> ('ocaml, 'sql) spec
type packed_spec = Pack : _ spec -> packed_spec
let equal_packed_spec (a : packed_spec) (b : packed_spec) =
match (a, b) with
| (Pack (Char (a_spec, _)), Pack (Char (b_spec, _))) -> a_spec = b_spec
| (Pack (Varchar (a_spec, _)), Pack (Varchar (b_spec, _))) ->
a_spec = b_spec
| (Pack (Binary (a_spec, _)), Pack (Binary (b_spec, _))) -> a_spec = b_spec
| (Pack (Blob (a_spec, _)), Pack (Blob (b_spec, _))) -> a_spec = b_spec
| (Pack (Tiny_int (a_spec, _)), Pack (Tiny_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Small_int (a_spec, _)), Pack (Small_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Medium_int (a_spec, _)), Pack (Medium_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Int (a_spec, _)), Pack (Int (b_spec, _))) -> a_spec = b_spec
| (Pack (Big_int (a_spec, _)), Pack (Big_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Float (a_spec, _)), Pack (Float (b_spec, _))) -> a_spec = b_spec
| (Pack (Datetime (a_spec, _)), Pack (Datetime (b_spec, _))) ->
a_spec = b_spec
| (Pack (Date (a_spec, _)), Pack (Date (b_spec, _))) -> a_spec = b_spec
| (Pack (Time (a_spec, _)), Pack (Time (b_spec, _))) -> a_spec = b_spec
| (Pack (Char _), _) -> false
| (Pack (Varchar _), _) -> false
| (Pack (Binary _), _) -> false
| (Pack (Blob _), _) -> false
| (Pack (Tiny_int _), _) -> false
| (Pack (Small_int _), _) -> false
| (Pack (Medium_int _), _) -> false
| (Pack (Int _), _) -> false
| (Pack (Big_int _), _) -> false
| (Pack (Float _), _) -> false
| (Pack (Datetime _), _) -> false
| (Pack (Date _), _) -> false
| (Pack (Time _), _) -> false
module Conv = struct
type ('ocaml, 'sql) t = ('ocaml, 'sql) conv
let make ~serialize ~deserialize = { serialize; deserialize }
let identity = { serialize = (fun x -> x); deserialize = (fun x -> x) }
let bool =
make
~serialize:(fun x ->
if x then
1
else
0)
~deserialize:(fun x -> x <> 0)
end
let of_spec (type o s) (spec : (o, s) spec) : (o, s) t =
match spec with
| Char ({ name; _ }, conv) -> String (name, conv)
| Varchar ({ name; _ }, conv) -> String (name, conv)
| Binary ({ name; _ }, conv) -> String (name, conv)
| Blob ({ name; _ }, conv) -> Blob (name, conv)
| Tiny_int ({ name; _ }, conv) -> Int (name, conv)
| Small_int ({ name; _ }, conv) -> Int (name, conv)
| Medium_int ({ name; _ }, conv) -> Int (name, conv)
| Int ({ name; _ }, conv) -> Int (name, conv)
| Big_int ({ name; _ }, conv) -> Int64 (name, conv)
| Float ({ name; _ }, conv) -> Float (name, conv)
| Datetime ({ name; _ }, conv) -> Datetime (name, conv)
| Date ({ name; _ }, conv) -> Date (name, conv)
| Time ({ name; _ }, conv) -> Time (name, conv)
let spec_to_sql_type (type o s) (spec : (o, s) spec) =
match spec with
| Char ({ width; _ }, _) -> Fmt.strf "char(%d)" width
| Varchar ({ width; _ }, _) -> Fmt.strf "varchar(%d)" width
| Binary ({ width; _ }, _) -> Fmt.strf "binary(%d)" width
| Blob _ -> "longblob"
| Tiny_int _ -> "tinyint"
| Small_int _ -> "smallint"
| Medium_int _ -> "mediumint"
| Int _ -> "int"
| Big_int _ -> "bigint"
| Float _ -> "float"
| Datetime _ -> "datetime"
| Date _ -> "date"
| Time _ -> "time"
let make_spec_string ~nullable ~auto_increment =
let null =
if nullable then
""
else
" not null"
in
let ai =
if auto_increment then
" auto_increment"
else
""
in
String.concat ~sep:"" [ null; ai ]
let spec_to_spec_string (type o s) (spec : (o, s) spec) =
match spec with
| Char ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Varchar ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Binary ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Blob ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Tiny_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Small_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Medium_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Big_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Float ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Datetime ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Date ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Time ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
let make_char name width ?(nullable = false) conv =
Char ({ name; width; nullable }, conv)
let make_varchar name width ?(nullable = false) conv =
Varchar ({ name; width; nullable }, conv)
let make_binary name width ?(nullable = false) conv =
Binary ({ name; width; nullable }, conv)
let make_blob name ?(nullable = false) conv = Blob ({ name; nullable }, conv)
let make_tiny_int name ?(nullable = false) ?(auto_increment = false) conv =
Tiny_int ({ name; nullable; auto_increment }, conv)
let make_small_int name ?(nullable = false) ?(auto_increment = false) conv =
Small_int ({ name; nullable; auto_increment }, conv)
let make_medium_int name ?(nullable = false) ?(auto_increment = false) conv =
Medium_int ({ name; nullable; auto_increment }, conv)
let make_int name ?(nullable = false) ?(auto_increment = false) conv =
Int ({ name; nullable; auto_increment }, conv)
let make_big_int name ?(nullable = false) ?(auto_increment = false) conv =
Big_int ({ name; nullable; auto_increment }, conv)
let make_float name ?(nullable = false) conv = Float ({ name; nullable }, conv)
let make_datetime name ?(nullable = false) conv =
Datetime ({ name; nullable }, conv)
let make_date name ?(nullable = false) conv = Date ({ name; nullable }, conv)
let make_time name ?(nullable = false) conv = Time ({ name; nullable }, conv)
let string name conv : _ t = String (name, conv)
let blob name conv : _ t = Blob (name, conv)
let int name conv : _ t = Int (name, conv)
let int64 name conv : _ t = Int64 (name, conv)
let float name conv : _ t = Float (name, conv)
let datetime name conv : _ t = Datetime (name, conv)
let date name conv : _ t = Date (name, conv)
let time name conv : _ t = Time (name, conv)
let name (type o s) (c : (o, s) t) =
match c with
| String (name, _) -> name
| Blob (name, _) -> name
| Int (name, _) -> name
| Int64 (name, _) -> name
| Float (name, _) -> name
| Datetime (name, _) -> name
| Date (name, _) -> name
| Time (name, _) -> name
let pp (type o s) dbd (c : (o, s) t) fmt (x : o) =
match c with
| String (_, conv) -> Pp_internal.string dbd fmt (conv.serialize x)
| Blob (_, conv) -> Pp_internal.blob dbd fmt (conv.serialize x)
| Int (_, conv) -> Pp_internal.int fmt (conv.serialize x)
| Int64 (_, conv) -> Pp_internal.int64 fmt (conv.serialize x)
| Float (_, conv) -> Pp_internal.float fmt (conv.serialize x)
| Datetime (_, conv) -> Pp_internal.datetime fmt (conv.serialize x)
| Date (_, conv) -> Pp_internal.date fmt (conv.serialize x)
| Time (_, conv) -> Pp_internal.time fmt (conv.serialize x)
let pp_spec dbd spec fmt x = pp dbd (of_spec spec) fmt x
let pp_name fmt c = Fmt.of_to_string name fmt c
let pp_spec_name fmt c = pp_name fmt (of_spec c)
let of_packed_spec (Pack spec : packed_spec) : packed = Pack (of_spec spec)
let name_of_spec spec = name (of_spec spec)
let name_of_packed_spec (Pack spec) = name (of_spec spec)
let with_name (type o s) name (spec : (o, s) spec) : (o, s) spec =
let strip_auto_increment s = { s with auto_increment = false } in
match spec with
| Char (s, conv) -> Char ({ s with name }, conv)
| Varchar (s, conv) -> Varchar ({ s with name }, conv)
| Binary (s, conv) -> Binary ({ s with name }, conv)
| Blob (s, conv) -> Blob ({ s with name }, conv)
| Tiny_int (s, conv) ->
Tiny_int ({ (strip_auto_increment s) with name }, conv)
| Small_int (s, conv) ->
Small_int ({ (strip_auto_increment s) with name }, conv)
| Medium_int (s, conv) ->
Medium_int ({ (strip_auto_increment s) with name }, conv)
| Int (s, conv) -> Int ({ (strip_auto_increment s) with name }, conv)
| Big_int (s, conv) -> Big_int ({ (strip_auto_increment s) with name }, conv)
| Float (s, conv) -> Float ({ s with name }, conv)
| Datetime (s, conv) -> Datetime ({ s with name }, conv)
| Date (s, conv) -> Date ({ s with name }, conv)
| Time (s, conv) -> Time ({ s with name }, conv)
end
module Field = struct
type _ t =
| String : string -> string t
| Blob : string -> string t
| Int : int -> int t
| Int64 : int64 -> int64 t
| Float : float -> float t
| Datetime : Datetime.t -> Datetime.t t
| Date : Date.t -> Date.t t
| Time : Time.t -> Time.t t
type packed = Pack : _ t -> packed
type error = Unhandled_type of Mysql.dbty
let pp (type v) dbd fmt (field : v t) =
match field with
| String s -> Pp_internal.string dbd fmt s
| Blob b -> Pp_internal.blob dbd fmt b
| Int i -> Pp_internal.int fmt i
| Int64 i -> Pp_internal.int64 fmt i
| Float f -> Pp_internal.float fmt f
| Datetime c -> Pp_internal.datetime fmt c
| Date d -> Pp_internal.date fmt d
| Time t -> Pp_internal.time fmt t
let pp_opt dbd fmt = function
| Some fld -> pp dbd fmt fld
| None -> Pp_internal.null fmt ()
let pp_packed dbd fmt field =
match field with
| Pack fld -> pp dbd fmt fld
let pp_packed_opt dbd fmt field =
match field with
| Some packed -> pp_packed dbd fmt packed
| None -> Pp_internal.null fmt ()
let to_string_unquoted = function
| Pack (String s) -> s
| Pack (Blob s) -> s
| Pack (Int i) -> Mysql.ml2int i
| Pack (Int64 i) -> Mysql.ml642int i
| Pack (Float f) -> Mysql.ml2float f
| Pack (Datetime c) -> Datetime_p.to_string c
| Pack (Date d) -> Date_p.to_string d
| Pack (Time t) -> Time_p.to_string t
let opt_to_string_unquoted = function
| None -> "NULL"
| Some x -> to_string_unquoted x
let string_of_dbty = function
| Mysql.IntTy -> "IntTy"
| Mysql.FloatTy -> "FloatTy"
| Mysql.StringTy -> "StringTy"
| Mysql.SetTy -> "SetTy"
| Mysql.EnumTy -> "EnumTy"
| Mysql.DateTimeTy -> "DateTimeTy"
| Mysql.DateTy -> "DateTy"
| Mysql.TimeTy -> "TimeTy"
| Mysql.YearTy -> "YearTy"
| Mysql.TimeStampTy -> "TimeStampeTy"
| Mysql.UnknownTy -> "UnknownTy"
| Mysql.Int64Ty -> "Int64Ty"
| Mysql.BlobTy -> "BlobTy"
| Mysql.DecimalTy -> "DecimalTy"
let error_to_string = function
| Unhandled_type typ -> Fmt.strf "Unhandled_type %s" (string_of_dbty typ)
let datetime_of_tuple (y, m, d, hh, mm, ss) = Datetime.make y m d hh mm ss
let date_of_tuple (y, m, d) = Date.make y m d
let time_of_tuple (hh, mm, ss) = Time.make hh mm ss
let of_mysql_type typ s =
match typ with
| Mysql.IntTy -> R.ok @@ Pack (Int (Mysql.int2ml s))
| Mysql.Int64Ty -> R.ok @@ Pack (Int64 (Mysql.int642ml s))
| Mysql.FloatTy -> R.ok @@ Pack (Float (Mysql.float2ml s))
| Mysql.StringTy -> R.ok @@ Pack (String (Mysql.str2ml s))
| Mysql.BlobTy -> R.ok @@ Pack (Blob (Mysql.blob2ml s))
| Mysql.DateTimeTy ->
R.ok @@ Pack (Datetime (Mysql.datetime2ml s |> datetime_of_tuple))
| Mysql.DateTy -> R.ok @@ Pack (Date (Mysql.date2ml s |> date_of_tuple))
| Mysql.TimeTy -> R.ok @@ Pack (Time (Mysql.time2ml s |> time_of_tuple))
| ( Mysql.SetTy | Mysql.EnumTy | Mysql.YearTy | Mysql.TimeStampTy
| Mysql.UnknownTy | Mysql.DecimalTy ) as typ ->
R.error (`Mysql_field (Unhandled_type typ))
let of_column_spec (type o s) (spec : (o, s) Column.spec) (v : o) : s t =
match Column.of_spec spec with
| Column.String (_, conv) -> String (conv.serialize v)
| Column.Blob (_, conv) -> Blob (conv.serialize v)
| Column.Int (_, conv) -> Int (conv.serialize v)
| Column.Int64 (_, conv) -> Int64 (conv.serialize v)
| Column.Float (_, conv) -> Float (conv.serialize v)
| Column.Datetime (_, conv) -> Datetime (conv.serialize v)
| Column.Date (_, conv) -> Date (conv.serialize v)
| Column.Time (_, conv) -> Time (conv.serialize v)
let unpack (type o s) (column : (o, s) Column.t) packed : o option =
match packed with
| Pack field ->
( match (column, field) with
| (Column.String (_, conv), String v) -> Some (conv.deserialize v)
| (Column.Blob (_, conv), Blob v) -> Some (conv.deserialize v)
| (Column.Int (_, conv), Int v) -> Some (conv.deserialize v)
| (Column.Int64 (_, conv), Int64 v) -> Some (conv.deserialize v)
| (Column.Float (_, conv), Float v) -> Some (conv.deserialize v)
| (Column.Datetime (_, conv), Datetime v) -> Some (conv.deserialize v)
| (Column.Date (_, conv), Date v) -> Some (conv.deserialize v)
| (Column.Time (_, conv), Time v) -> Some (conv.deserialize v)
| _ -> None
)
end
type 'kind sql = string constraint 'kind = [< `Run | `Get ]
type row = Field.packed option Row.t
let row_of_list l = List.to_seq l |> Row.of_seq
let pack_column_opt spec vo =
let column = Column.of_spec spec in
let name = Column.name column in
match vo with
| None -> (name, None)
| Some v -> (name, Some (Field.Pack (Field.of_column_spec spec v)))
let pack_column spec v = pack_column_opt spec (Some v)
let find_column spec (row : row) =
let column = Column.of_spec spec in
let name = Column.name column in
match Row.find_opt name row with
| None -> R.error_msgf "no column %s in row" name
| Some None -> R.ok None
| Some (Some packed) ->
( match Field.unpack column packed with
| None -> R.error_msgf "field type mismatch for %s in row" name
| Some v -> R.ok (Some v)
)
let get_column spec row =
match find_column spec row with
| Error (`Msg msg) -> invalid_arg msg
| Ok None -> raise Not_found
| Ok (Some v) -> v
let make_run fmt = Fmt.kstrf (fun x -> x) fmt
let make_get fmt = Fmt.kstrf (fun x -> x) fmt
let insert' dbd ~into:table fields fmt =
let columns = Row.keys fields in
let values = Row.values fields in
Fmt.kstrf
(fun s ->
make_run "insert into %s %a values %a %s" table
(Pp_internal.csv_simple Fmt.string)
columns
(Pp_internal.csv_simple (Field.pp_packed_opt dbd))
values s)
fmt
let pp_update fmt column = Fmt.pf fmt "%s = values(%s)" column column
let insert ?on_duplicate_key_update dbd ~into row =
match on_duplicate_key_update with
| None -> insert' dbd ~into row ""
| Some update ->
let (id_column, columns) =
match update with
| `All -> (None, Row.keys row)
| `Columns columns -> (None, columns)
| `Except columns ->
( None,
List.filter (fun name -> List.mem name columns) (Row.keys row)
)
| `With_id (id_column, columns) -> (Some id_column, columns)
in
let id_column_sql =
match id_column with
| None -> ""
| Some column ->
let pp_sep =
match columns with
| [] -> Fmt.nop
| _ -> Fmt.comma
in
(* If a column is specified, make sure last_insert_id identifies that
value once/if this insert completes successfully. *)
Fmt.strf "%a%s = last_insert_id(%s)" pp_sep () column column
in
insert' dbd ~into row "on duplicate key update %a%s"
(Fmt.list ~sep:Fmt.comma pp_update)
columns id_column_sql
let replace = `Use_insert_on_duplicate_key_update
let update table fmt = Fmt.kstrf (fun s -> Fmt.strf "update %s %s" table s) fmt
let delete ~from:table fmt =
Fmt.kstrf (fun s -> Fmt.strf "delete from %s %s" table s) fmt
let select columns ~from:table fmt =
Fmt.kstrf
(fun s ->
Fmt.strf "select %a from %s %s"
Fmt.(list ~sep:comma string)
columns table s)
fmt
let field_of_mysql_type_exn typ s =
match Field.of_mysql_type typ s with
| Ok f -> f
| Error (`Mysql_field e) -> invalid_arg (Field.error_to_string e)
let parse_row columns row =
let num_columns = Array.length columns in
if num_columns <> Array.length row then
invalid_arg "mysql: metadata column count mismatch";
Array.mapi
(fun i col ->
( Mysql.(col.name),
Option.map (field_of_mysql_type_exn Mysql.(col.ty)) row.(i) ))
columns
|> Array.to_seq
|> Row.of_seq
let to_rows columns result =
let rec loop rows =
match Mysql.fetch result with
| None -> List.rev rows
| Some row -> loop (parse_row columns row :: rows)
in
loop []
let exec dbd sql =
let result = Mysql.exec dbd sql in
let columns = Mysql.fetch_fields result in
match columns with
| None -> None
| Some columns -> Some (to_rows columns result)
let run_exn dbd sql =
match exec dbd sql with
| None -> ()
| Some _rows -> Fmt.failwith "Ezmysql.run: unexpected results from %s" sql
let run dbd sql =
match run_exn dbd sql with
| () -> Ok ()
| exception Failure msg -> R.error_msg msg
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.run: %a from %s" Fmt.(brackets string) msg sql
let get_exn dbd sql =
match exec dbd sql with
| None -> Fmt.failwith "Ezmysql.get: empty result from %s" sql
| Some rows -> rows
let get dbd sql =
match get_exn dbd sql with
| rows -> Ok rows
| exception Failure msg -> R.error_msg msg
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.get: %a from %s" Fmt.(brackets string) msg sql
let get_v (type o) (column : (o, _) Column.t) (row : row) : o option =
let name = Column.name column in
match Row.find_opt name row with
| None -> Fmt.invalid_arg "Ezmysql.get_v: No column %s in row" name
| Some v ->
( match v with
| None -> None
| Some packed_field ->
( match Field.unpack column packed_field with
| Some _ as s -> s
| None ->
Fmt.invalid_arg
"Ezmysql.get_v: column %s's type does not match what was expected"
name
)
)
let list_map f l = List.rev_map f l |> List.rev
let to_column rows column = list_map (fun row -> get_v column row) rows
let to_column2 rows (column1, column2) =
list_map (fun row -> (get_v column1 row, get_v column2 row)) rows
let to_column3 rows (column1, column2, column3) =
list_map
(fun row -> (get_v column1 row, get_v column2 row, get_v column3 row))
rows
let to_column4 rows (column1, column2, column3, column4) =
list_map
(fun row ->
( get_v column1 row,
get_v column2 row,
get_v column3 row,
get_v column4 row ))
rows
let to_column5 rows (column1, column2, column3, column4, column5) =
list_map
(fun row ->
( get_v column1 row,
get_v column2 row,
get_v column3 row,
get_v column4 row,
get_v column5 row ))
rows
let start_transaction_sql = "start transaction"
let start_transaction dbd = run dbd start_transaction_sql
let start_transaction_exn dbd = run_exn dbd start_transaction_sql
let commit_sql = "commit"
let commit dbd = run dbd commit_sql
let commit_exn dbd = run_exn dbd commit_sql
let rollback_sql = "rollback"
let rollback dbd = run dbd rollback_sql
let rollback_exn dbd = run_exn dbd rollback_sql
let with_transaction dbd f =
let ( >>= ) = R.( >>= ) in
start_transaction dbd >>= fun () ->
match f () with
| Ok _ as o -> commit dbd >>= fun () -> o
| Error _ as e -> rollback dbd >>= fun () -> e
| exception exn -> rollback dbd >>= fun () -> raise exn
let with_transaction_exn dbd f =
start_transaction_exn dbd;
match f () with
| v ->
commit_exn dbd;
v
| exception exn ->
rollback_exn dbd;
raise exn
module Prepared = struct
type 'a t = {
dbd : Mysql.dbd;
sql : string;
mutable statement : Mysql.Prepared.stmt;
}
constraint 'a = [< `Run | `Get ]
let make dbd sql = { dbd; sql; statement = Mysql.Prepared.create dbd sql }
let make_run dbd sql =
try Ok (make dbd sql) with
| Mysql.Error msg -> R.error_msg msg
let make_get dbd sql =
try Ok (make dbd sql) with
| Mysql.Error msg -> R.error_msg msg
let prepare_parameters fields =
Array.map Field.opt_to_string_unquoted (Array.of_list fields)
let to_rows columns result =
let rec loop rows =
match Mysql.Prepared.fetch result with
| None -> Ok (List.rev rows)
| Some row -> loop (parse_row columns row :: rows)
| exception Mysql.Error msg -> R.error_msg msg
in
loop []
let exec ps fields =
let ( >>= ) = R.( >>= ) in
let params = prepare_parameters fields in
( match Mysql.Prepared.execute ps.statement params with
| x -> Ok x
| exception Mysql.Error msg ->
R.error_msgf "While executing a prepared statement: %s" msg
)
>>= fun result ->
let columns =
Mysql.Prepared.result_metadata ps.statement |> Mysql.fetch_fields
in
match columns with
| None -> Ok None
| Some columns -> Ok (Some (to_rows columns result))
let run ps fields =
match exec ps fields with
| Error _ as e -> e
| Ok None -> Ok ()
| Ok (Some _rows) ->
R.error_msgf "Ezmysql.Prepared.run: unexpected results from %s" ps.sql
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.Prepared.run: %a from %s"
Fmt.(brackets string)
msg ps.sql
let get ps fields =
match exec ps fields with
| Error _ as e -> e
| Ok None ->
R.error_msgf "Ezmysql.Prepared.get: empty result from %s" ps.sql
| Ok (Some rows) -> rows
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.Prepared.get: %a from %s"
Fmt.(brackets string)
msg ps.sql
let remake ps =
(* Assume things were broken and we need to remake the statement *)
try
Mysql.ping ps.dbd;
ps.statement <- Mysql.Prepared.create ps.dbd ps.sql;
Ok ()
with
| Mysql.Error msg -> R.error_msg msg
let close ps = Mysql.Prepared.close ps.statement
end
module Table = struct
type t = {
name : string;
columns : Column.packed_spec list;
primary_key : Column.packed_spec list;
indices : (string * index_field list) list;
unique_keys : (string * Column.packed_spec list) list;
foreign_keys : foreign_key list;
deps : t list;
}
and foreign_key = {
key_name : string option;
keys : foreign_key_mapping;
on_update : foreign_key_action;
on_delete : foreign_key_action;
}
and foreign_key_mapping = {
foreign_table : t;
key_mapping : key_mapping list;
}
and foreign_key_action =
| Restrict
| Cascade
| Set_null
| No_action
and key_mapping =
| Key : {
local : ('ocaml, 'sql) Column.spec;
remote : ('ocaml, 'sql) Column.spec;
}
-> key_mapping
and index_field =
| Column : (_, _) Column.spec -> index_field
| Prefix : {
column : (_, _) Column.spec;
width : int;
}
-> index_field
let column_of_index_field : index_field -> Column.packed_spec = function
| Column spec -> Pack spec
| Prefix { column = spec; _ } -> Pack spec
let name table = table.name
let mem_columns ~truth ~test =
List.fold_left
(fun ok test_c ->
ok
&& List.exists
(fun truth_c -> Column.equal_packed_spec test_c truth_c)
truth)
true test
let mem_columns_multiple ~truth ~tests =
List.fold_left (fun ok test -> ok && mem_columns ~truth ~test) true tests
let mem_columns_fk ~truth ~tests =
List.fold_left
(fun ok fk ->
let local_columns =
List.map
(fun (Key { local; _ }) -> (Pack local : Column.packed_spec))
fk.keys.key_mapping
in
ok && mem_columns ~truth ~test:local_columns)
true tests
let make_foreign_key ?key_name foreign_table key_mapping ~on_update ~on_delete
=
match key_mapping with
| [] ->
invalid_arg
"Ezmysql.Table.make_foreign_key requires a non-empty list of fields"
| _ ->
let everything_ok =
let remote_columns =
List.map (fun (Key { remote; _ }) -> Column.Pack remote) key_mapping
in
mem_columns ~truth:foreign_table.columns ~test:remote_columns
in
if not everything_ok then
invalid_arg
@@ Fmt.strf
"Ezmysql.Table.make_foreign_key refers to columns absent from %s"
foreign_table.name;
{ key_name; keys = { foreign_table; key_mapping }; on_update; on_delete }
let make
?(primary_key = [])
?(indices = [])
?(unique_keys = [])
?(foreign_keys = [])
name
columns =
if String.is_empty name then
invalid_arg "Ezmysql.Table.make requires a non-empty table name";
( match columns with
| [] -> invalid_arg "Ezmysql.Table.make requires a non-empty column list"
| _ -> ()
);
let deps = List.map (fun fk -> fk.keys.foreign_table) foreign_keys in
let everything_ok =
let indices_columns =
List.map
(fun (_name, index) ->
List.map
(fun index_field -> column_of_index_field index_field)
index)
indices
in
let unique_keys_columns =
List.map (fun (_name, columns) -> columns) unique_keys
in
mem_columns ~truth:columns ~test:primary_key
&& mem_columns_multiple ~truth:columns ~tests:indices_columns
&& mem_columns_multiple ~truth:columns ~tests:unique_keys_columns
&& mem_columns_fk ~truth:columns ~tests:foreign_keys
in
if not everything_ok then
invalid_arg "Ezmysql.Table.make: key or index refers to absent column";
{ name; columns; primary_key; indices; unique_keys; foreign_keys; deps }
let pp_column_type fmt (Column.Pack c) =
Fmt.of_to_string Column.spec_to_sql_type fmt c
let pp_column_spec fmt (Column.Pack c) =
Fmt.of_to_string Column.spec_to_spec_string fmt c
let pp_column_name fmt (Column.Pack c) = Column.pp_spec_name fmt c
let pp_column fmt column =
Fmt.pf fmt "@[%a %a%a@]" pp_column_name column pp_column_type column
pp_column_spec column
let pp_primary_key fmt pk =
match pk with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.pf fmt "@[primary@ key@ %a@]"
(Pp_internal.csv_simple pp_column_name)
pk
let pp_index_field fmt index_field =
match index_field with
| Column spec -> Fmt.pf fmt "%a" pp_column_name (Pack spec)
| Prefix { column; width } ->
Fmt.pf fmt "%a(%d)" pp_column_name (Pack column) width
let pp_index fmt (name, index) =
match index with
| [] -> ()
| _ ->
Fmt.pf fmt "@[index@ %s@ %a@]" name
(Pp_internal.csv_simple pp_index_field)
index
let pp_indices fmt indices =
match indices with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.list ~sep:Fmt.comma pp_index fmt indices
let pp_unique_key fmt (name, uk) =
match uk with
| [] -> ()
| _ ->
Fmt.pf fmt "@[unique@ key@ %s@ %a@]" name
(Pp_internal.csv_simple pp_column_name)
uk
let pp_unique_keys fmt unique_keys =
match unique_keys with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.list ~sep:Fmt.comma pp_unique_key fmt unique_keys
let foreign_key_action_to_string = function
| Restrict -> "restrict"
| Cascade -> "cascade"
| Set_null -> "set null"
| No_action -> "no action"
let pp_foreign_key_action = Fmt.of_to_string foreign_key_action_to_string
let pp_foreign_key fmt fk =
match fk.keys.key_mapping with
| [] -> ()
| _ ->
let (local, foreign) =
List.map
(fun (Key { local; remote }) ->
(Column.Pack local, Column.Pack remote))
fk.keys.key_mapping
|> List.split
in
Fmt.pf fmt "@[foreign@ key@ ";
( match fk.key_name with
| None -> ()
| Some name -> Fmt.pf fmt "%s@ " name
);
Fmt.pf fmt "%a@;" (Pp_internal.csv_simple pp_column_name) local;
Fmt.pf fmt "references@ %s%a@;" fk.keys.foreign_table.name
(Pp_internal.csv_simple pp_column_name)
foreign;
Fmt.pf fmt "on@ update@ %a@;on@ delete@ %a@]" pp_foreign_key_action
fk.on_update pp_foreign_key_action fk.on_delete
let pp_foreign_keys fmt fks =
match fks with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.list ~sep:Fmt.comma pp_foreign_key fmt fks
let pp_sql fmt ~ok_if_exists table =
Fmt.pf fmt "@[";
Fmt.pf fmt "@[create@ table@]@ ";
if ok_if_exists then Fmt.pf fmt "@[if@ not@ exists@]@ ";
Fmt.pf fmt "%s" table.name;
Fmt.pf fmt "@;@[<1>(%a" (Fmt.list ~sep:Fmt.comma pp_column) table.columns;
Fmt.pf fmt "%a" pp_primary_key table.primary_key;
Fmt.pf fmt "%a" pp_indices table.indices;
Fmt.pf fmt "%a" pp_unique_keys table.unique_keys;
Fmt.pf fmt "%a" pp_foreign_keys table.foreign_keys;
Fmt.pf fmt ")@]"
let pp_name fmt table = Fmt.string fmt (name table)
let create_exn dbd table ~ok_if_exists =
let sql = make_run "%a" (pp_sql ~ok_if_exists) table in
run_exn dbd sql
let create dbd table ~ok_if_exists =
let sql = make_run "%a" (pp_sql ~ok_if_exists) table in
run dbd sql
let deps table = table.deps
module Topo_sort = struct
module V : Graph.Sig.COMPARABLE with type t = t = struct
(* Graph vertices are tables, unique by name *)
type nonrec t = t
let compare (a : t) (b : t) = compare a.name b.name
let equal (a : t) (b : t) = String.equal a.name b.name
let hash (a : t) = Hashtbl.hash a.name
end
(* Persistent (immutable) graphs and matching topological sort modules *)
module G = Graph.Persistent.Digraph.Concrete (V)
module Topo = Graph.Topological.Make_stable (G)
(* Add a table to an existing graph. [table] must already exist in [graph]. *)
let rec add_table graph table =
if G.mem_vertex graph table then
graph
else (
(* Make sure we add the table to our graph! *)
let graph = G.add_vertex graph table in
(* Now recursively add all of the table's dependencies *)
List.fold_left
(fun graph_accumulator table_dep ->
(* Add this table and record the dependency. *)
let graph_accumulator =
let graph = G.add_vertex graph_accumulator table_dep in
G.add_edge graph table table_dep
in
(* Now recurse in and add any dependencies for table_dep *)
add_table graph_accumulator table_dep)
graph table.deps
)
let init tables = List.fold_left add_table G.empty tables
let sorted_deps graph =
Topo.fold (fun table all_deps -> table :: all_deps) graph []
end
let transitive_sorted_deps tables =
Topo_sort.init tables |> Topo_sort.sorted_deps
end
module type S = sig
type t
val table : Table.t
val to_row : t -> row
val of_row : row -> (t, [> `Msg of string ]) result
end
module type Db = sig
type t
val table : Table.t
val init :
Mysql.dbd -> ok_if_exists:bool -> (unit, [> `Msg of string ]) result
val init_exn : Mysql.dbd -> ok_if_exists:bool -> unit
val insert' :
Mysql.dbd ->
t ->
('a, Format.formatter, unit, (unit, [> R.msg ]) result) format4 ->
'a
val insert_exn' :
Mysql.dbd -> t -> ('a, Format.formatter, unit, unit) format4 -> 'a
val insert_sql :
?on_duplicate_key_update:
[ `All
| `Columns of Column.packed_spec list
| `Except of Column.packed_spec list
| `With_id of (_, _) Column.spec * Column.packed_spec list
] ->
Mysql.dbd ->
t ->
[ `Run ] sql
val insert :
?on_duplicate_key_update:
[ `All
| `Columns of Column.packed_spec list
| `Except of Column.packed_spec list
| `With_id of (_, _) Column.spec * Column.packed_spec list
] ->
Mysql.dbd ->
t ->
(unit, [> `Msg of string ]) result
val insert_exn :
?on_duplicate_key_update:
[ `All
| `Columns of Column.packed_spec list
| `Except of Column.packed_spec list
| `With_id of (_, _) Column.spec * Column.packed_spec list
] ->
Mysql.dbd ->
t ->
unit
val replace : [ `Use_insert_on_duplicate_key_update ]
[@@ocaml.deprecated "Use 'insert ~on_duplicate_key_update' instead"]
val update_sql : ('a, Format.formatter, unit, [ `Run ] sql) format4 -> 'a
val update :
Mysql.dbd ->
('a, Format.formatter, unit, (unit, [> `Msg of string ]) result) format4 ->
'a
val update_exn : Mysql.dbd -> ('a, Format.formatter, unit, unit) format4 -> 'a
val select_sql : ('a, Format.formatter, unit, [ `Get ] sql) format4 -> 'a
val select :
Mysql.dbd ->
('a, Format.formatter, unit, (t list, [> `Msg of string ]) result) format4 ->
'a
val select_exn :
Mysql.dbd -> ('a, Format.formatter, unit, t list) format4 -> 'a
val delete_sql : ('a, Format.formatter, unit, [ `Run ] sql) format4 -> 'a
val delete :
Mysql.dbd ->
('a, Format.formatter, unit, (unit, [> `Msg of string ]) result) format4 ->
'a
val delete_exn : Mysql.dbd -> ('a, Format.formatter, unit, unit) format4 -> 'a
end
module Make (M : S) : Db with type t := M.t = struct
let table = M.table
let init dbd ~ok_if_exists = Table.create dbd M.table ~ok_if_exists
let init_exn dbd ~ok_if_exists = Table.create_exn dbd M.table ~ok_if_exists
let insert'_sql runner dbd t fmt =
let row = M.to_row t in
Fmt.kstrf
(fun s -> insert' dbd ~into:(Table.name M.table) row "%s" s |> runner dbd)
fmt
let insert' dbd t fmt = insert'_sql run dbd t fmt
let insert_exn' dbd t fmt = insert'_sql run_exn dbd t fmt
let on_duplicate_key_update_to_strings = function
| `All -> `All
| `Columns specs -> `Columns (List.map Column.name_of_packed_spec specs)
| `Except specs -> `Except (List.map Column.name_of_packed_spec specs)
| `With_id (id_spec, specs) ->
`With_id
(Column.name_of_spec id_spec, List.map Column.name_of_packed_spec specs)
let insert_sql ?on_duplicate_key_update dbd t =
let row = M.to_row t in
let on_duplicate_key_update =
match on_duplicate_key_update with
| None -> None
| Some x -> Some (on_duplicate_key_update_to_strings x)
in
insert ?on_duplicate_key_update dbd ~into:M.table.Table.name row
let insert ?on_duplicate_key_update dbd t =
run dbd (insert_sql ?on_duplicate_key_update dbd t)
let insert_exn ?on_duplicate_key_update dbd t =
run_exn dbd (insert_sql ?on_duplicate_key_update dbd t)
let replace = `Use_insert_on_duplicate_key_update
let update_sql clauses =
Fmt.kstrf (fun s -> update M.table.name "%s" s) clauses
let update_exn dbd clauses =
Fmt.kstrf (fun s -> update M.table.name "%s" s |> run_exn dbd) clauses
let update dbd clauses =
Fmt.kstrf (fun s -> update M.table.name "%s" s |> run dbd) clauses
exception Error of string
let of_row_exn row =
match M.of_row row with
| Ok x -> x
| Error (`Msg msg) -> raise (Error msg)
let select_sql clauses =
Fmt.kstrf (fun s -> select [ "*" ] ~from:M.table.Table.name "%s" s) clauses
let select_exn dbd clauses =
Fmt.kstrf
(fun s ->
let rows =
select [ "*" ] ~from:M.table.Table.name "%s" s |> get_exn dbd
in
try List.rev_map of_row_exn rows |> List.rev with
| Error msg -> failwith msg)
clauses
let select dbd clauses =
let ( >>= ) = R.( >>= ) in
Fmt.kstrf
(fun s ->
select [ "*" ] ~from:M.table.Table.name "%s" s |> get dbd
>>= fun rows ->
try List.rev_map of_row_exn rows |> List.rev |> R.ok with
| Error msg -> R.error_msg msg)
clauses
let delete_sql clauses =
Fmt.kstrf (fun s -> delete ~from:M.table.Table.name "%s" s) clauses
let delete_exn dbd clauses =
Fmt.kstrf
(fun s -> delete ~from:M.table.Table.name "%s" s |> run_exn dbd)
clauses
let delete dbd clauses =
Fmt.kstrf
(fun s -> delete ~from:M.table.Table.name "%s" s |> run dbd)
clauses
end
module Clause = struct
type comparison =
| Eq
| Ne
| Lt
| Gt
| Lte
| Gte
type 'ocaml column =
| Column : ('ocaml, 'sql) Column.t -> 'ocaml column
| Spec : ('ocaml, 'sql) Column.spec -> 'ocaml column
type t =
| And : t * t -> t
| Or : t * t -> t
| Compare : comparison * 'ocaml column * 'ocaml -> t
let make_compare comparison column v = Compare (comparison, column, v)
let ( = ) col v = make_compare Eq col v
let ( <> ) col v = make_compare Ne col v
let ( < ) col v = make_compare Lt col v
let ( > ) col v = make_compare Gt col v
let ( <= ) col v = make_compare Lte col v
let ( >= ) col v = make_compare Gte col v
let ( && ) a b = And (a, b)
let ( || ) a b = Or (a, b)
let string_of_comparison comp =
match comp with
| Eq -> "="
| Ne -> "<>"
| Lt -> "<"
| Gt -> ">"
| Lte -> "<="
| Gte -> ">="
let pp_column_name fmt column =
match column with
| Column c -> Column.pp_name fmt c
| Spec s -> Column.pp_spec_name fmt s
let pp_comparison = Fmt.of_to_string string_of_comparison
let pp_value dbd column =
match column with
| Column c -> Column.pp dbd c
| Spec s -> Column.pp_spec dbd s
let rec pp dbd fmt clause =
match clause with
| And (left, right) ->
Fmt.pf fmt "(%a@ and@ %a)" (pp dbd) left (pp dbd) right
| Or (left, right) ->
Fmt.pf fmt "(%a@ or @ %a)" (pp dbd) left (pp dbd) right
| Compare (comparison, column, v) ->
Fmt.pf fmt "%a@ %a@ %a" pp_column_name column pp_comparison comparison
(pp_value dbd column) v
end
module Pp = struct
include Pp_internal
let column = Column.pp
let column_name = Column.pp_name
let spec = Column.pp_spec
let spec_name = Column.pp_spec_name
let field = Field.pp
let field_opt = Field.pp_opt
let table_name = Table.pp_name
let clause = Clause.pp
end
| null | https://raw.githubusercontent.com/hcarty/ezmysql/06dd0536ebb95c3d3767bdcf927a63a03547b733/src/ezmysql.ml | ocaml | The database name to use is the path without the '/' prefix
If a column is specified, make sure last_insert_id identifies that
value once/if this insert completes successfully.
Assume things were broken and we need to remake the statement
Graph vertices are tables, unique by name
Persistent (immutable) graphs and matching topological sort modules
Add a table to an existing graph. [table] must already exist in [graph].
Make sure we add the table to our graph!
Now recursively add all of the table's dependencies
Add this table and record the dependency.
Now recurse in and add any dependencies for table_dep | module Datetime = CalendarLib.Calendar.Precise
module Date = Datetime.Date
module Time = Datetime.Time
module Row = struct
include Map.Make (String)
let keys m = to_seq m |> Seq.map fst |> List.of_seq
let values m = to_seq m |> Seq.map snd |> List.of_seq
end
module Datetime_p = CalendarLib.Printer.Precise_Calendar
module Date_p = CalendarLib.Printer.Date
module Time_p = CalendarLib.Printer.Time
open Rresult
open Astring
let connect_exn ?(reconnect = true) uri =
let database =
match Uri.path uri with
| "" -> None
| p ->
Some
(String.trim
~drop:(function
| '/' -> true
| _ -> false)
p)
in
let options =
if reconnect then
Some [ Mysql.OPT_RECONNECT reconnect ]
else
None
in
Mysql.quick_connect ?options ?host:(Uri.host uri) ?database
?port:(Uri.port uri) ?password:(Uri.password uri) ?user:(Uri.user uri) ()
let connect ?reconnect uri =
try connect_exn ?reconnect uri |> R.ok with
| Mysql.Error msg -> R.error_msg msg
let disconnect = Mysql.disconnect
let ping = Mysql.ping
module Pp_internal = struct
let null = Fmt.const Fmt.string "NULL"
let nullable f = Fmt.option ~none:null f
let string dbd = Fmt.of_to_string (Mysql.ml2rstr dbd)
let blob dbd = Fmt.of_to_string (Mysql.ml2rblob dbd)
let int = Fmt.of_to_string Mysql.ml2int
let int64 = Fmt.of_to_string Mysql.ml642int
let float = Fmt.of_to_string Mysql.ml2float
let datetime =
let to_string datetime =
Mysql.ml2datetime
( Datetime.year datetime,
Datetime.month datetime |> Date.int_of_month,
Datetime.day_of_month datetime,
Datetime.hour datetime,
Datetime.minute datetime,
Datetime.second datetime )
in
Fmt.of_to_string to_string
let date =
let to_string date =
Mysql.ml2date
( Date.year date,
Date.month date |> Date.int_of_month,
Date.day_of_month date )
in
Fmt.of_to_string to_string
let time =
let to_string time =
Mysql.ml2time (Time.hour time, Time.minute time, Time.second time)
in
Fmt.of_to_string to_string
let csv_simple pp_elt fmt = Fmt.pf fmt "(%a)" (Fmt.list ~sep:Fmt.comma pp_elt)
end
module Column = struct
type ('ocaml, 'sql) conv = {
serialize : 'ocaml -> 'sql;
deserialize : 'sql -> 'ocaml;
}
type ('ocaml, 'sql) t =
| String : string * ('ocaml, (string as 'sql)) conv -> ('ocaml, 'sql) t
| Blob : string * ('ocaml, (string as 'sql)) conv -> ('ocaml, 'sql) t
| Int : string * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) t
| Int64 : string * ('ocaml, (int64 as 'sql)) conv -> ('ocaml, 'sql) t
| Float : string * ('ocaml, (float as 'sql)) conv -> ('ocaml, 'sql) t
| Datetime :
string * ('ocaml, (Datetime.t as 'sql)) conv
-> ('ocaml, 'sql) t
| Date : string * ('ocaml, (Date.t as 'sql)) conv -> ('ocaml, 'sql) t
| Time : string * ('ocaml, (Time.t as 'sql)) conv -> ('ocaml, 'sql) t
type packed = Pack : _ t -> packed
type spec_basic = {
name : string;
nullable : bool;
}
type spec_string_width = {
name : string;
width : int;
nullable : bool;
}
type spec_int = {
name : string;
nullable : bool;
auto_increment : bool;
}
and (_, _) spec =
| Char :
spec_string_width * ('ocaml, (string as 'sql)) conv
-> ('ocaml, 'sql) spec
| Varchar :
spec_string_width * ('ocaml, (string as 'sql)) conv
-> ('ocaml, 'sql) spec
| Binary :
spec_string_width * ('ocaml, (string as 'sql)) conv
-> ('ocaml, 'sql) spec
| Blob : spec_basic * ('ocaml, (string as 'sql)) conv -> ('ocaml, 'sql) spec
| Tiny_int : spec_int * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) spec
| Small_int : spec_int * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) spec
| Medium_int :
spec_int * ('ocaml, (int as 'sql)) conv
-> ('ocaml, 'sql) spec
| Int : spec_int * ('ocaml, (int as 'sql)) conv -> ('ocaml, 'sql) spec
| Big_int : spec_int * ('ocaml, (int64 as 'sql)) conv -> ('ocaml, 'sql) spec
| Float : spec_basic * ('ocaml, (float as 'sql)) conv -> ('ocaml, 'sql) spec
| Datetime :
spec_basic * ('ocaml, (Datetime.t as 'sql)) conv
-> ('ocaml, 'sql) spec
| Date : spec_basic * ('ocaml, (Date.t as 'sql)) conv -> ('ocaml, 'sql) spec
| Time : spec_basic * ('ocaml, (Time.t as 'sql)) conv -> ('ocaml, 'sql) spec
type packed_spec = Pack : _ spec -> packed_spec
let equal_packed_spec (a : packed_spec) (b : packed_spec) =
match (a, b) with
| (Pack (Char (a_spec, _)), Pack (Char (b_spec, _))) -> a_spec = b_spec
| (Pack (Varchar (a_spec, _)), Pack (Varchar (b_spec, _))) ->
a_spec = b_spec
| (Pack (Binary (a_spec, _)), Pack (Binary (b_spec, _))) -> a_spec = b_spec
| (Pack (Blob (a_spec, _)), Pack (Blob (b_spec, _))) -> a_spec = b_spec
| (Pack (Tiny_int (a_spec, _)), Pack (Tiny_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Small_int (a_spec, _)), Pack (Small_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Medium_int (a_spec, _)), Pack (Medium_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Int (a_spec, _)), Pack (Int (b_spec, _))) -> a_spec = b_spec
| (Pack (Big_int (a_spec, _)), Pack (Big_int (b_spec, _))) ->
a_spec = b_spec
| (Pack (Float (a_spec, _)), Pack (Float (b_spec, _))) -> a_spec = b_spec
| (Pack (Datetime (a_spec, _)), Pack (Datetime (b_spec, _))) ->
a_spec = b_spec
| (Pack (Date (a_spec, _)), Pack (Date (b_spec, _))) -> a_spec = b_spec
| (Pack (Time (a_spec, _)), Pack (Time (b_spec, _))) -> a_spec = b_spec
| (Pack (Char _), _) -> false
| (Pack (Varchar _), _) -> false
| (Pack (Binary _), _) -> false
| (Pack (Blob _), _) -> false
| (Pack (Tiny_int _), _) -> false
| (Pack (Small_int _), _) -> false
| (Pack (Medium_int _), _) -> false
| (Pack (Int _), _) -> false
| (Pack (Big_int _), _) -> false
| (Pack (Float _), _) -> false
| (Pack (Datetime _), _) -> false
| (Pack (Date _), _) -> false
| (Pack (Time _), _) -> false
module Conv = struct
type ('ocaml, 'sql) t = ('ocaml, 'sql) conv
let make ~serialize ~deserialize = { serialize; deserialize }
let identity = { serialize = (fun x -> x); deserialize = (fun x -> x) }
let bool =
make
~serialize:(fun x ->
if x then
1
else
0)
~deserialize:(fun x -> x <> 0)
end
let of_spec (type o s) (spec : (o, s) spec) : (o, s) t =
match spec with
| Char ({ name; _ }, conv) -> String (name, conv)
| Varchar ({ name; _ }, conv) -> String (name, conv)
| Binary ({ name; _ }, conv) -> String (name, conv)
| Blob ({ name; _ }, conv) -> Blob (name, conv)
| Tiny_int ({ name; _ }, conv) -> Int (name, conv)
| Small_int ({ name; _ }, conv) -> Int (name, conv)
| Medium_int ({ name; _ }, conv) -> Int (name, conv)
| Int ({ name; _ }, conv) -> Int (name, conv)
| Big_int ({ name; _ }, conv) -> Int64 (name, conv)
| Float ({ name; _ }, conv) -> Float (name, conv)
| Datetime ({ name; _ }, conv) -> Datetime (name, conv)
| Date ({ name; _ }, conv) -> Date (name, conv)
| Time ({ name; _ }, conv) -> Time (name, conv)
let spec_to_sql_type (type o s) (spec : (o, s) spec) =
match spec with
| Char ({ width; _ }, _) -> Fmt.strf "char(%d)" width
| Varchar ({ width; _ }, _) -> Fmt.strf "varchar(%d)" width
| Binary ({ width; _ }, _) -> Fmt.strf "binary(%d)" width
| Blob _ -> "longblob"
| Tiny_int _ -> "tinyint"
| Small_int _ -> "smallint"
| Medium_int _ -> "mediumint"
| Int _ -> "int"
| Big_int _ -> "bigint"
| Float _ -> "float"
| Datetime _ -> "datetime"
| Date _ -> "date"
| Time _ -> "time"
let make_spec_string ~nullable ~auto_increment =
let null =
if nullable then
""
else
" not null"
in
let ai =
if auto_increment then
" auto_increment"
else
""
in
String.concat ~sep:"" [ null; ai ]
let spec_to_spec_string (type o s) (spec : (o, s) spec) =
match spec with
| Char ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Varchar ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Binary ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Blob ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Tiny_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Small_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Medium_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Big_int ({ nullable; auto_increment; _ }, _) ->
make_spec_string ~nullable ~auto_increment
| Float ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Datetime ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Date ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
| Time ({ nullable; _ }, _) ->
make_spec_string ~nullable ~auto_increment:false
let make_char name width ?(nullable = false) conv =
Char ({ name; width; nullable }, conv)
let make_varchar name width ?(nullable = false) conv =
Varchar ({ name; width; nullable }, conv)
let make_binary name width ?(nullable = false) conv =
Binary ({ name; width; nullable }, conv)
let make_blob name ?(nullable = false) conv = Blob ({ name; nullable }, conv)
let make_tiny_int name ?(nullable = false) ?(auto_increment = false) conv =
Tiny_int ({ name; nullable; auto_increment }, conv)
let make_small_int name ?(nullable = false) ?(auto_increment = false) conv =
Small_int ({ name; nullable; auto_increment }, conv)
let make_medium_int name ?(nullable = false) ?(auto_increment = false) conv =
Medium_int ({ name; nullable; auto_increment }, conv)
let make_int name ?(nullable = false) ?(auto_increment = false) conv =
Int ({ name; nullable; auto_increment }, conv)
let make_big_int name ?(nullable = false) ?(auto_increment = false) conv =
Big_int ({ name; nullable; auto_increment }, conv)
let make_float name ?(nullable = false) conv = Float ({ name; nullable }, conv)
let make_datetime name ?(nullable = false) conv =
Datetime ({ name; nullable }, conv)
let make_date name ?(nullable = false) conv = Date ({ name; nullable }, conv)
let make_time name ?(nullable = false) conv = Time ({ name; nullable }, conv)
let string name conv : _ t = String (name, conv)
let blob name conv : _ t = Blob (name, conv)
let int name conv : _ t = Int (name, conv)
let int64 name conv : _ t = Int64 (name, conv)
let float name conv : _ t = Float (name, conv)
let datetime name conv : _ t = Datetime (name, conv)
let date name conv : _ t = Date (name, conv)
let time name conv : _ t = Time (name, conv)
let name (type o s) (c : (o, s) t) =
match c with
| String (name, _) -> name
| Blob (name, _) -> name
| Int (name, _) -> name
| Int64 (name, _) -> name
| Float (name, _) -> name
| Datetime (name, _) -> name
| Date (name, _) -> name
| Time (name, _) -> name
let pp (type o s) dbd (c : (o, s) t) fmt (x : o) =
match c with
| String (_, conv) -> Pp_internal.string dbd fmt (conv.serialize x)
| Blob (_, conv) -> Pp_internal.blob dbd fmt (conv.serialize x)
| Int (_, conv) -> Pp_internal.int fmt (conv.serialize x)
| Int64 (_, conv) -> Pp_internal.int64 fmt (conv.serialize x)
| Float (_, conv) -> Pp_internal.float fmt (conv.serialize x)
| Datetime (_, conv) -> Pp_internal.datetime fmt (conv.serialize x)
| Date (_, conv) -> Pp_internal.date fmt (conv.serialize x)
| Time (_, conv) -> Pp_internal.time fmt (conv.serialize x)
let pp_spec dbd spec fmt x = pp dbd (of_spec spec) fmt x
let pp_name fmt c = Fmt.of_to_string name fmt c
let pp_spec_name fmt c = pp_name fmt (of_spec c)
let of_packed_spec (Pack spec : packed_spec) : packed = Pack (of_spec spec)
let name_of_spec spec = name (of_spec spec)
let name_of_packed_spec (Pack spec) = name (of_spec spec)
let with_name (type o s) name (spec : (o, s) spec) : (o, s) spec =
let strip_auto_increment s = { s with auto_increment = false } in
match spec with
| Char (s, conv) -> Char ({ s with name }, conv)
| Varchar (s, conv) -> Varchar ({ s with name }, conv)
| Binary (s, conv) -> Binary ({ s with name }, conv)
| Blob (s, conv) -> Blob ({ s with name }, conv)
| Tiny_int (s, conv) ->
Tiny_int ({ (strip_auto_increment s) with name }, conv)
| Small_int (s, conv) ->
Small_int ({ (strip_auto_increment s) with name }, conv)
| Medium_int (s, conv) ->
Medium_int ({ (strip_auto_increment s) with name }, conv)
| Int (s, conv) -> Int ({ (strip_auto_increment s) with name }, conv)
| Big_int (s, conv) -> Big_int ({ (strip_auto_increment s) with name }, conv)
| Float (s, conv) -> Float ({ s with name }, conv)
| Datetime (s, conv) -> Datetime ({ s with name }, conv)
| Date (s, conv) -> Date ({ s with name }, conv)
| Time (s, conv) -> Time ({ s with name }, conv)
end
module Field = struct
type _ t =
| String : string -> string t
| Blob : string -> string t
| Int : int -> int t
| Int64 : int64 -> int64 t
| Float : float -> float t
| Datetime : Datetime.t -> Datetime.t t
| Date : Date.t -> Date.t t
| Time : Time.t -> Time.t t
type packed = Pack : _ t -> packed
type error = Unhandled_type of Mysql.dbty
let pp (type v) dbd fmt (field : v t) =
match field with
| String s -> Pp_internal.string dbd fmt s
| Blob b -> Pp_internal.blob dbd fmt b
| Int i -> Pp_internal.int fmt i
| Int64 i -> Pp_internal.int64 fmt i
| Float f -> Pp_internal.float fmt f
| Datetime c -> Pp_internal.datetime fmt c
| Date d -> Pp_internal.date fmt d
| Time t -> Pp_internal.time fmt t
let pp_opt dbd fmt = function
| Some fld -> pp dbd fmt fld
| None -> Pp_internal.null fmt ()
let pp_packed dbd fmt field =
match field with
| Pack fld -> pp dbd fmt fld
let pp_packed_opt dbd fmt field =
match field with
| Some packed -> pp_packed dbd fmt packed
| None -> Pp_internal.null fmt ()
let to_string_unquoted = function
| Pack (String s) -> s
| Pack (Blob s) -> s
| Pack (Int i) -> Mysql.ml2int i
| Pack (Int64 i) -> Mysql.ml642int i
| Pack (Float f) -> Mysql.ml2float f
| Pack (Datetime c) -> Datetime_p.to_string c
| Pack (Date d) -> Date_p.to_string d
| Pack (Time t) -> Time_p.to_string t
let opt_to_string_unquoted = function
| None -> "NULL"
| Some x -> to_string_unquoted x
let string_of_dbty = function
| Mysql.IntTy -> "IntTy"
| Mysql.FloatTy -> "FloatTy"
| Mysql.StringTy -> "StringTy"
| Mysql.SetTy -> "SetTy"
| Mysql.EnumTy -> "EnumTy"
| Mysql.DateTimeTy -> "DateTimeTy"
| Mysql.DateTy -> "DateTy"
| Mysql.TimeTy -> "TimeTy"
| Mysql.YearTy -> "YearTy"
| Mysql.TimeStampTy -> "TimeStampeTy"
| Mysql.UnknownTy -> "UnknownTy"
| Mysql.Int64Ty -> "Int64Ty"
| Mysql.BlobTy -> "BlobTy"
| Mysql.DecimalTy -> "DecimalTy"
let error_to_string = function
| Unhandled_type typ -> Fmt.strf "Unhandled_type %s" (string_of_dbty typ)
let datetime_of_tuple (y, m, d, hh, mm, ss) = Datetime.make y m d hh mm ss
let date_of_tuple (y, m, d) = Date.make y m d
let time_of_tuple (hh, mm, ss) = Time.make hh mm ss
let of_mysql_type typ s =
match typ with
| Mysql.IntTy -> R.ok @@ Pack (Int (Mysql.int2ml s))
| Mysql.Int64Ty -> R.ok @@ Pack (Int64 (Mysql.int642ml s))
| Mysql.FloatTy -> R.ok @@ Pack (Float (Mysql.float2ml s))
| Mysql.StringTy -> R.ok @@ Pack (String (Mysql.str2ml s))
| Mysql.BlobTy -> R.ok @@ Pack (Blob (Mysql.blob2ml s))
| Mysql.DateTimeTy ->
R.ok @@ Pack (Datetime (Mysql.datetime2ml s |> datetime_of_tuple))
| Mysql.DateTy -> R.ok @@ Pack (Date (Mysql.date2ml s |> date_of_tuple))
| Mysql.TimeTy -> R.ok @@ Pack (Time (Mysql.time2ml s |> time_of_tuple))
| ( Mysql.SetTy | Mysql.EnumTy | Mysql.YearTy | Mysql.TimeStampTy
| Mysql.UnknownTy | Mysql.DecimalTy ) as typ ->
R.error (`Mysql_field (Unhandled_type typ))
let of_column_spec (type o s) (spec : (o, s) Column.spec) (v : o) : s t =
match Column.of_spec spec with
| Column.String (_, conv) -> String (conv.serialize v)
| Column.Blob (_, conv) -> Blob (conv.serialize v)
| Column.Int (_, conv) -> Int (conv.serialize v)
| Column.Int64 (_, conv) -> Int64 (conv.serialize v)
| Column.Float (_, conv) -> Float (conv.serialize v)
| Column.Datetime (_, conv) -> Datetime (conv.serialize v)
| Column.Date (_, conv) -> Date (conv.serialize v)
| Column.Time (_, conv) -> Time (conv.serialize v)
let unpack (type o s) (column : (o, s) Column.t) packed : o option =
match packed with
| Pack field ->
( match (column, field) with
| (Column.String (_, conv), String v) -> Some (conv.deserialize v)
| (Column.Blob (_, conv), Blob v) -> Some (conv.deserialize v)
| (Column.Int (_, conv), Int v) -> Some (conv.deserialize v)
| (Column.Int64 (_, conv), Int64 v) -> Some (conv.deserialize v)
| (Column.Float (_, conv), Float v) -> Some (conv.deserialize v)
| (Column.Datetime (_, conv), Datetime v) -> Some (conv.deserialize v)
| (Column.Date (_, conv), Date v) -> Some (conv.deserialize v)
| (Column.Time (_, conv), Time v) -> Some (conv.deserialize v)
| _ -> None
)
end
type 'kind sql = string constraint 'kind = [< `Run | `Get ]
type row = Field.packed option Row.t
let row_of_list l = List.to_seq l |> Row.of_seq
let pack_column_opt spec vo =
let column = Column.of_spec spec in
let name = Column.name column in
match vo with
| None -> (name, None)
| Some v -> (name, Some (Field.Pack (Field.of_column_spec spec v)))
let pack_column spec v = pack_column_opt spec (Some v)
let find_column spec (row : row) =
let column = Column.of_spec spec in
let name = Column.name column in
match Row.find_opt name row with
| None -> R.error_msgf "no column %s in row" name
| Some None -> R.ok None
| Some (Some packed) ->
( match Field.unpack column packed with
| None -> R.error_msgf "field type mismatch for %s in row" name
| Some v -> R.ok (Some v)
)
let get_column spec row =
match find_column spec row with
| Error (`Msg msg) -> invalid_arg msg
| Ok None -> raise Not_found
| Ok (Some v) -> v
let make_run fmt = Fmt.kstrf (fun x -> x) fmt
let make_get fmt = Fmt.kstrf (fun x -> x) fmt
let insert' dbd ~into:table fields fmt =
let columns = Row.keys fields in
let values = Row.values fields in
Fmt.kstrf
(fun s ->
make_run "insert into %s %a values %a %s" table
(Pp_internal.csv_simple Fmt.string)
columns
(Pp_internal.csv_simple (Field.pp_packed_opt dbd))
values s)
fmt
let pp_update fmt column = Fmt.pf fmt "%s = values(%s)" column column
let insert ?on_duplicate_key_update dbd ~into row =
match on_duplicate_key_update with
| None -> insert' dbd ~into row ""
| Some update ->
let (id_column, columns) =
match update with
| `All -> (None, Row.keys row)
| `Columns columns -> (None, columns)
| `Except columns ->
( None,
List.filter (fun name -> List.mem name columns) (Row.keys row)
)
| `With_id (id_column, columns) -> (Some id_column, columns)
in
let id_column_sql =
match id_column with
| None -> ""
| Some column ->
let pp_sep =
match columns with
| [] -> Fmt.nop
| _ -> Fmt.comma
in
Fmt.strf "%a%s = last_insert_id(%s)" pp_sep () column column
in
insert' dbd ~into row "on duplicate key update %a%s"
(Fmt.list ~sep:Fmt.comma pp_update)
columns id_column_sql
let replace = `Use_insert_on_duplicate_key_update
let update table fmt = Fmt.kstrf (fun s -> Fmt.strf "update %s %s" table s) fmt
let delete ~from:table fmt =
Fmt.kstrf (fun s -> Fmt.strf "delete from %s %s" table s) fmt
let select columns ~from:table fmt =
Fmt.kstrf
(fun s ->
Fmt.strf "select %a from %s %s"
Fmt.(list ~sep:comma string)
columns table s)
fmt
let field_of_mysql_type_exn typ s =
match Field.of_mysql_type typ s with
| Ok f -> f
| Error (`Mysql_field e) -> invalid_arg (Field.error_to_string e)
let parse_row columns row =
let num_columns = Array.length columns in
if num_columns <> Array.length row then
invalid_arg "mysql: metadata column count mismatch";
Array.mapi
(fun i col ->
( Mysql.(col.name),
Option.map (field_of_mysql_type_exn Mysql.(col.ty)) row.(i) ))
columns
|> Array.to_seq
|> Row.of_seq
let to_rows columns result =
let rec loop rows =
match Mysql.fetch result with
| None -> List.rev rows
| Some row -> loop (parse_row columns row :: rows)
in
loop []
let exec dbd sql =
let result = Mysql.exec dbd sql in
let columns = Mysql.fetch_fields result in
match columns with
| None -> None
| Some columns -> Some (to_rows columns result)
let run_exn dbd sql =
match exec dbd sql with
| None -> ()
| Some _rows -> Fmt.failwith "Ezmysql.run: unexpected results from %s" sql
let run dbd sql =
match run_exn dbd sql with
| () -> Ok ()
| exception Failure msg -> R.error_msg msg
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.run: %a from %s" Fmt.(brackets string) msg sql
let get_exn dbd sql =
match exec dbd sql with
| None -> Fmt.failwith "Ezmysql.get: empty result from %s" sql
| Some rows -> rows
let get dbd sql =
match get_exn dbd sql with
| rows -> Ok rows
| exception Failure msg -> R.error_msg msg
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.get: %a from %s" Fmt.(brackets string) msg sql
let get_v (type o) (column : (o, _) Column.t) (row : row) : o option =
let name = Column.name column in
match Row.find_opt name row with
| None -> Fmt.invalid_arg "Ezmysql.get_v: No column %s in row" name
| Some v ->
( match v with
| None -> None
| Some packed_field ->
( match Field.unpack column packed_field with
| Some _ as s -> s
| None ->
Fmt.invalid_arg
"Ezmysql.get_v: column %s's type does not match what was expected"
name
)
)
let list_map f l = List.rev_map f l |> List.rev
let to_column rows column = list_map (fun row -> get_v column row) rows
let to_column2 rows (column1, column2) =
list_map (fun row -> (get_v column1 row, get_v column2 row)) rows
let to_column3 rows (column1, column2, column3) =
list_map
(fun row -> (get_v column1 row, get_v column2 row, get_v column3 row))
rows
let to_column4 rows (column1, column2, column3, column4) =
list_map
(fun row ->
( get_v column1 row,
get_v column2 row,
get_v column3 row,
get_v column4 row ))
rows
let to_column5 rows (column1, column2, column3, column4, column5) =
list_map
(fun row ->
( get_v column1 row,
get_v column2 row,
get_v column3 row,
get_v column4 row,
get_v column5 row ))
rows
let start_transaction_sql = "start transaction"
let start_transaction dbd = run dbd start_transaction_sql
let start_transaction_exn dbd = run_exn dbd start_transaction_sql
let commit_sql = "commit"
let commit dbd = run dbd commit_sql
let commit_exn dbd = run_exn dbd commit_sql
let rollback_sql = "rollback"
let rollback dbd = run dbd rollback_sql
let rollback_exn dbd = run_exn dbd rollback_sql
let with_transaction dbd f =
let ( >>= ) = R.( >>= ) in
start_transaction dbd >>= fun () ->
match f () with
| Ok _ as o -> commit dbd >>= fun () -> o
| Error _ as e -> rollback dbd >>= fun () -> e
| exception exn -> rollback dbd >>= fun () -> raise exn
let with_transaction_exn dbd f =
start_transaction_exn dbd;
match f () with
| v ->
commit_exn dbd;
v
| exception exn ->
rollback_exn dbd;
raise exn
module Prepared = struct
type 'a t = {
dbd : Mysql.dbd;
sql : string;
mutable statement : Mysql.Prepared.stmt;
}
constraint 'a = [< `Run | `Get ]
let make dbd sql = { dbd; sql; statement = Mysql.Prepared.create dbd sql }
let make_run dbd sql =
try Ok (make dbd sql) with
| Mysql.Error msg -> R.error_msg msg
let make_get dbd sql =
try Ok (make dbd sql) with
| Mysql.Error msg -> R.error_msg msg
let prepare_parameters fields =
Array.map Field.opt_to_string_unquoted (Array.of_list fields)
let to_rows columns result =
let rec loop rows =
match Mysql.Prepared.fetch result with
| None -> Ok (List.rev rows)
| Some row -> loop (parse_row columns row :: rows)
| exception Mysql.Error msg -> R.error_msg msg
in
loop []
let exec ps fields =
let ( >>= ) = R.( >>= ) in
let params = prepare_parameters fields in
( match Mysql.Prepared.execute ps.statement params with
| x -> Ok x
| exception Mysql.Error msg ->
R.error_msgf "While executing a prepared statement: %s" msg
)
>>= fun result ->
let columns =
Mysql.Prepared.result_metadata ps.statement |> Mysql.fetch_fields
in
match columns with
| None -> Ok None
| Some columns -> Ok (Some (to_rows columns result))
let run ps fields =
match exec ps fields with
| Error _ as e -> e
| Ok None -> Ok ()
| Ok (Some _rows) ->
R.error_msgf "Ezmysql.Prepared.run: unexpected results from %s" ps.sql
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.Prepared.run: %a from %s"
Fmt.(brackets string)
msg ps.sql
let get ps fields =
match exec ps fields with
| Error _ as e -> e
| Ok None ->
R.error_msgf "Ezmysql.Prepared.get: empty result from %s" ps.sql
| Ok (Some rows) -> rows
| exception Mysql.Error msg ->
R.error_msgf "Ezmysql.Prepared.get: %a from %s"
Fmt.(brackets string)
msg ps.sql
let remake ps =
try
Mysql.ping ps.dbd;
ps.statement <- Mysql.Prepared.create ps.dbd ps.sql;
Ok ()
with
| Mysql.Error msg -> R.error_msg msg
let close ps = Mysql.Prepared.close ps.statement
end
module Table = struct
type t = {
name : string;
columns : Column.packed_spec list;
primary_key : Column.packed_spec list;
indices : (string * index_field list) list;
unique_keys : (string * Column.packed_spec list) list;
foreign_keys : foreign_key list;
deps : t list;
}
and foreign_key = {
key_name : string option;
keys : foreign_key_mapping;
on_update : foreign_key_action;
on_delete : foreign_key_action;
}
and foreign_key_mapping = {
foreign_table : t;
key_mapping : key_mapping list;
}
and foreign_key_action =
| Restrict
| Cascade
| Set_null
| No_action
and key_mapping =
| Key : {
local : ('ocaml, 'sql) Column.spec;
remote : ('ocaml, 'sql) Column.spec;
}
-> key_mapping
and index_field =
| Column : (_, _) Column.spec -> index_field
| Prefix : {
column : (_, _) Column.spec;
width : int;
}
-> index_field
let column_of_index_field : index_field -> Column.packed_spec = function
| Column spec -> Pack spec
| Prefix { column = spec; _ } -> Pack spec
let name table = table.name
let mem_columns ~truth ~test =
List.fold_left
(fun ok test_c ->
ok
&& List.exists
(fun truth_c -> Column.equal_packed_spec test_c truth_c)
truth)
true test
let mem_columns_multiple ~truth ~tests =
List.fold_left (fun ok test -> ok && mem_columns ~truth ~test) true tests
let mem_columns_fk ~truth ~tests =
List.fold_left
(fun ok fk ->
let local_columns =
List.map
(fun (Key { local; _ }) -> (Pack local : Column.packed_spec))
fk.keys.key_mapping
in
ok && mem_columns ~truth ~test:local_columns)
true tests
let make_foreign_key ?key_name foreign_table key_mapping ~on_update ~on_delete
=
match key_mapping with
| [] ->
invalid_arg
"Ezmysql.Table.make_foreign_key requires a non-empty list of fields"
| _ ->
let everything_ok =
let remote_columns =
List.map (fun (Key { remote; _ }) -> Column.Pack remote) key_mapping
in
mem_columns ~truth:foreign_table.columns ~test:remote_columns
in
if not everything_ok then
invalid_arg
@@ Fmt.strf
"Ezmysql.Table.make_foreign_key refers to columns absent from %s"
foreign_table.name;
{ key_name; keys = { foreign_table; key_mapping }; on_update; on_delete }
let make
?(primary_key = [])
?(indices = [])
?(unique_keys = [])
?(foreign_keys = [])
name
columns =
if String.is_empty name then
invalid_arg "Ezmysql.Table.make requires a non-empty table name";
( match columns with
| [] -> invalid_arg "Ezmysql.Table.make requires a non-empty column list"
| _ -> ()
);
let deps = List.map (fun fk -> fk.keys.foreign_table) foreign_keys in
let everything_ok =
let indices_columns =
List.map
(fun (_name, index) ->
List.map
(fun index_field -> column_of_index_field index_field)
index)
indices
in
let unique_keys_columns =
List.map (fun (_name, columns) -> columns) unique_keys
in
mem_columns ~truth:columns ~test:primary_key
&& mem_columns_multiple ~truth:columns ~tests:indices_columns
&& mem_columns_multiple ~truth:columns ~tests:unique_keys_columns
&& mem_columns_fk ~truth:columns ~tests:foreign_keys
in
if not everything_ok then
invalid_arg "Ezmysql.Table.make: key or index refers to absent column";
{ name; columns; primary_key; indices; unique_keys; foreign_keys; deps }
let pp_column_type fmt (Column.Pack c) =
Fmt.of_to_string Column.spec_to_sql_type fmt c
let pp_column_spec fmt (Column.Pack c) =
Fmt.of_to_string Column.spec_to_spec_string fmt c
let pp_column_name fmt (Column.Pack c) = Column.pp_spec_name fmt c
let pp_column fmt column =
Fmt.pf fmt "@[%a %a%a@]" pp_column_name column pp_column_type column
pp_column_spec column
let pp_primary_key fmt pk =
match pk with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.pf fmt "@[primary@ key@ %a@]"
(Pp_internal.csv_simple pp_column_name)
pk
let pp_index_field fmt index_field =
match index_field with
| Column spec -> Fmt.pf fmt "%a" pp_column_name (Pack spec)
| Prefix { column; width } ->
Fmt.pf fmt "%a(%d)" pp_column_name (Pack column) width
let pp_index fmt (name, index) =
match index with
| [] -> ()
| _ ->
Fmt.pf fmt "@[index@ %s@ %a@]" name
(Pp_internal.csv_simple pp_index_field)
index
let pp_indices fmt indices =
match indices with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.list ~sep:Fmt.comma pp_index fmt indices
let pp_unique_key fmt (name, uk) =
match uk with
| [] -> ()
| _ ->
Fmt.pf fmt "@[unique@ key@ %s@ %a@]" name
(Pp_internal.csv_simple pp_column_name)
uk
let pp_unique_keys fmt unique_keys =
match unique_keys with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.list ~sep:Fmt.comma pp_unique_key fmt unique_keys
let foreign_key_action_to_string = function
| Restrict -> "restrict"
| Cascade -> "cascade"
| Set_null -> "set null"
| No_action -> "no action"
let pp_foreign_key_action = Fmt.of_to_string foreign_key_action_to_string
let pp_foreign_key fmt fk =
match fk.keys.key_mapping with
| [] -> ()
| _ ->
let (local, foreign) =
List.map
(fun (Key { local; remote }) ->
(Column.Pack local, Column.Pack remote))
fk.keys.key_mapping
|> List.split
in
Fmt.pf fmt "@[foreign@ key@ ";
( match fk.key_name with
| None -> ()
| Some name -> Fmt.pf fmt "%s@ " name
);
Fmt.pf fmt "%a@;" (Pp_internal.csv_simple pp_column_name) local;
Fmt.pf fmt "references@ %s%a@;" fk.keys.foreign_table.name
(Pp_internal.csv_simple pp_column_name)
foreign;
Fmt.pf fmt "on@ update@ %a@;on@ delete@ %a@]" pp_foreign_key_action
fk.on_update pp_foreign_key_action fk.on_delete
let pp_foreign_keys fmt fks =
match fks with
| [] -> ()
| _ ->
Fmt.comma fmt ();
Fmt.list ~sep:Fmt.comma pp_foreign_key fmt fks
let pp_sql fmt ~ok_if_exists table =
Fmt.pf fmt "@[";
Fmt.pf fmt "@[create@ table@]@ ";
if ok_if_exists then Fmt.pf fmt "@[if@ not@ exists@]@ ";
Fmt.pf fmt "%s" table.name;
Fmt.pf fmt "@;@[<1>(%a" (Fmt.list ~sep:Fmt.comma pp_column) table.columns;
Fmt.pf fmt "%a" pp_primary_key table.primary_key;
Fmt.pf fmt "%a" pp_indices table.indices;
Fmt.pf fmt "%a" pp_unique_keys table.unique_keys;
Fmt.pf fmt "%a" pp_foreign_keys table.foreign_keys;
Fmt.pf fmt ")@]"
let pp_name fmt table = Fmt.string fmt (name table)
let create_exn dbd table ~ok_if_exists =
let sql = make_run "%a" (pp_sql ~ok_if_exists) table in
run_exn dbd sql
let create dbd table ~ok_if_exists =
let sql = make_run "%a" (pp_sql ~ok_if_exists) table in
run dbd sql
let deps table = table.deps
module Topo_sort = struct
module V : Graph.Sig.COMPARABLE with type t = t = struct
type nonrec t = t
let compare (a : t) (b : t) = compare a.name b.name
let equal (a : t) (b : t) = String.equal a.name b.name
let hash (a : t) = Hashtbl.hash a.name
end
module G = Graph.Persistent.Digraph.Concrete (V)
module Topo = Graph.Topological.Make_stable (G)
let rec add_table graph table =
if G.mem_vertex graph table then
graph
else (
let graph = G.add_vertex graph table in
List.fold_left
(fun graph_accumulator table_dep ->
let graph_accumulator =
let graph = G.add_vertex graph_accumulator table_dep in
G.add_edge graph table table_dep
in
add_table graph_accumulator table_dep)
graph table.deps
)
let init tables = List.fold_left add_table G.empty tables
let sorted_deps graph =
Topo.fold (fun table all_deps -> table :: all_deps) graph []
end
let transitive_sorted_deps tables =
Topo_sort.init tables |> Topo_sort.sorted_deps
end
module type S = sig
type t
val table : Table.t
val to_row : t -> row
val of_row : row -> (t, [> `Msg of string ]) result
end
module type Db = sig
type t
val table : Table.t
val init :
Mysql.dbd -> ok_if_exists:bool -> (unit, [> `Msg of string ]) result
val init_exn : Mysql.dbd -> ok_if_exists:bool -> unit
val insert' :
Mysql.dbd ->
t ->
('a, Format.formatter, unit, (unit, [> R.msg ]) result) format4 ->
'a
val insert_exn' :
Mysql.dbd -> t -> ('a, Format.formatter, unit, unit) format4 -> 'a
val insert_sql :
?on_duplicate_key_update:
[ `All
| `Columns of Column.packed_spec list
| `Except of Column.packed_spec list
| `With_id of (_, _) Column.spec * Column.packed_spec list
] ->
Mysql.dbd ->
t ->
[ `Run ] sql
val insert :
?on_duplicate_key_update:
[ `All
| `Columns of Column.packed_spec list
| `Except of Column.packed_spec list
| `With_id of (_, _) Column.spec * Column.packed_spec list
] ->
Mysql.dbd ->
t ->
(unit, [> `Msg of string ]) result
val insert_exn :
?on_duplicate_key_update:
[ `All
| `Columns of Column.packed_spec list
| `Except of Column.packed_spec list
| `With_id of (_, _) Column.spec * Column.packed_spec list
] ->
Mysql.dbd ->
t ->
unit
val replace : [ `Use_insert_on_duplicate_key_update ]
[@@ocaml.deprecated "Use 'insert ~on_duplicate_key_update' instead"]
val update_sql : ('a, Format.formatter, unit, [ `Run ] sql) format4 -> 'a
val update :
Mysql.dbd ->
('a, Format.formatter, unit, (unit, [> `Msg of string ]) result) format4 ->
'a
val update_exn : Mysql.dbd -> ('a, Format.formatter, unit, unit) format4 -> 'a
val select_sql : ('a, Format.formatter, unit, [ `Get ] sql) format4 -> 'a
val select :
Mysql.dbd ->
('a, Format.formatter, unit, (t list, [> `Msg of string ]) result) format4 ->
'a
val select_exn :
Mysql.dbd -> ('a, Format.formatter, unit, t list) format4 -> 'a
val delete_sql : ('a, Format.formatter, unit, [ `Run ] sql) format4 -> 'a
val delete :
Mysql.dbd ->
('a, Format.formatter, unit, (unit, [> `Msg of string ]) result) format4 ->
'a
val delete_exn : Mysql.dbd -> ('a, Format.formatter, unit, unit) format4 -> 'a
end
module Make (M : S) : Db with type t := M.t = struct
let table = M.table
let init dbd ~ok_if_exists = Table.create dbd M.table ~ok_if_exists
let init_exn dbd ~ok_if_exists = Table.create_exn dbd M.table ~ok_if_exists
let insert'_sql runner dbd t fmt =
let row = M.to_row t in
Fmt.kstrf
(fun s -> insert' dbd ~into:(Table.name M.table) row "%s" s |> runner dbd)
fmt
let insert' dbd t fmt = insert'_sql run dbd t fmt
let insert_exn' dbd t fmt = insert'_sql run_exn dbd t fmt
let on_duplicate_key_update_to_strings = function
| `All -> `All
| `Columns specs -> `Columns (List.map Column.name_of_packed_spec specs)
| `Except specs -> `Except (List.map Column.name_of_packed_spec specs)
| `With_id (id_spec, specs) ->
`With_id
(Column.name_of_spec id_spec, List.map Column.name_of_packed_spec specs)
let insert_sql ?on_duplicate_key_update dbd t =
let row = M.to_row t in
let on_duplicate_key_update =
match on_duplicate_key_update with
| None -> None
| Some x -> Some (on_duplicate_key_update_to_strings x)
in
insert ?on_duplicate_key_update dbd ~into:M.table.Table.name row
let insert ?on_duplicate_key_update dbd t =
run dbd (insert_sql ?on_duplicate_key_update dbd t)
let insert_exn ?on_duplicate_key_update dbd t =
run_exn dbd (insert_sql ?on_duplicate_key_update dbd t)
let replace = `Use_insert_on_duplicate_key_update
let update_sql clauses =
Fmt.kstrf (fun s -> update M.table.name "%s" s) clauses
let update_exn dbd clauses =
Fmt.kstrf (fun s -> update M.table.name "%s" s |> run_exn dbd) clauses
let update dbd clauses =
Fmt.kstrf (fun s -> update M.table.name "%s" s |> run dbd) clauses
exception Error of string
let of_row_exn row =
match M.of_row row with
| Ok x -> x
| Error (`Msg msg) -> raise (Error msg)
let select_sql clauses =
Fmt.kstrf (fun s -> select [ "*" ] ~from:M.table.Table.name "%s" s) clauses
let select_exn dbd clauses =
Fmt.kstrf
(fun s ->
let rows =
select [ "*" ] ~from:M.table.Table.name "%s" s |> get_exn dbd
in
try List.rev_map of_row_exn rows |> List.rev with
| Error msg -> failwith msg)
clauses
let select dbd clauses =
let ( >>= ) = R.( >>= ) in
Fmt.kstrf
(fun s ->
select [ "*" ] ~from:M.table.Table.name "%s" s |> get dbd
>>= fun rows ->
try List.rev_map of_row_exn rows |> List.rev |> R.ok with
| Error msg -> R.error_msg msg)
clauses
let delete_sql clauses =
Fmt.kstrf (fun s -> delete ~from:M.table.Table.name "%s" s) clauses
let delete_exn dbd clauses =
Fmt.kstrf
(fun s -> delete ~from:M.table.Table.name "%s" s |> run_exn dbd)
clauses
let delete dbd clauses =
Fmt.kstrf
(fun s -> delete ~from:M.table.Table.name "%s" s |> run dbd)
clauses
end
module Clause = struct
type comparison =
| Eq
| Ne
| Lt
| Gt
| Lte
| Gte
type 'ocaml column =
| Column : ('ocaml, 'sql) Column.t -> 'ocaml column
| Spec : ('ocaml, 'sql) Column.spec -> 'ocaml column
type t =
| And : t * t -> t
| Or : t * t -> t
| Compare : comparison * 'ocaml column * 'ocaml -> t
let make_compare comparison column v = Compare (comparison, column, v)
let ( = ) col v = make_compare Eq col v
let ( <> ) col v = make_compare Ne col v
let ( < ) col v = make_compare Lt col v
let ( > ) col v = make_compare Gt col v
let ( <= ) col v = make_compare Lte col v
let ( >= ) col v = make_compare Gte col v
let ( && ) a b = And (a, b)
let ( || ) a b = Or (a, b)
let string_of_comparison comp =
match comp with
| Eq -> "="
| Ne -> "<>"
| Lt -> "<"
| Gt -> ">"
| Lte -> "<="
| Gte -> ">="
let pp_column_name fmt column =
match column with
| Column c -> Column.pp_name fmt c
| Spec s -> Column.pp_spec_name fmt s
let pp_comparison = Fmt.of_to_string string_of_comparison
let pp_value dbd column =
match column with
| Column c -> Column.pp dbd c
| Spec s -> Column.pp_spec dbd s
let rec pp dbd fmt clause =
match clause with
| And (left, right) ->
Fmt.pf fmt "(%a@ and@ %a)" (pp dbd) left (pp dbd) right
| Or (left, right) ->
Fmt.pf fmt "(%a@ or @ %a)" (pp dbd) left (pp dbd) right
| Compare (comparison, column, v) ->
Fmt.pf fmt "%a@ %a@ %a" pp_column_name column pp_comparison comparison
(pp_value dbd column) v
end
module Pp = struct
include Pp_internal
let column = Column.pp
let column_name = Column.pp_name
let spec = Column.pp_spec
let spec_name = Column.pp_spec_name
let field = Field.pp
let field_opt = Field.pp_opt
let table_name = Table.pp_name
let clause = Clause.pp
end
|
3c5c8e929a196ac893d9f20becf3209025234727c8fc0affe321e658e72914b6 | rzezeski/try-try-try | rts_get_fsm.erl | %% @doc The coordinator for stat get operations. The key here is to
generate the preflist just like in wrtie_fsm and then query each
%% replica and wait until a quorum is met.
-module(rts_get_fsm).
-behavior(gen_fsm).
-include("rts.hrl").
%% API
-export([start_link/4, get/2]).
%% Callbacks
-export([init/1, code_change/4, handle_event/3, handle_info/3,
handle_sync_event/4, terminate/3]).
States
-export([prepare/2, execute/2, waiting/2]).
-record(state, {req_id,
from,
client,
stat_name,
preflist,
num_r=0,
replies=[]}).
%%%===================================================================
%%% API
%%%===================================================================
start_link(ReqID, From, Client, StatName) ->
gen_fsm:start_link(?MODULE, [ReqID, From, Client, StatName], []).
get(Client, StatName) ->
ReqID = mk_reqid(),
rts_get_fsm_sup:start_get_fsm([ReqID, self(), Client, StatName]),
{ok, ReqID}.
%%%===================================================================
States
%%%===================================================================
%% Intiailize state data.
init([ReqId, From, Client, StatName]) ->
SD = #state{req_id=ReqId,
from=From,
client=Client,
stat_name=StatName},
{ok, prepare, SD, 0}.
@doc Calculate the Preflist .
prepare(timeout, SD0=#state{client=Client,
stat_name=StatName}) ->
DocIdx = riak_core_util:chash_key({list_to_binary(Client),
list_to_binary(StatName)}),
Prelist = riak_core_apl:get_apl(DocIdx, ?N, rts_stat),
SD = SD0#state{preflist=Prelist},
{next_state, execute, SD, 0}.
@doc Execute the get reqs .
execute(timeout, SD0=#state{req_id=ReqId,
stat_name=StatName,
preflist=Prelist}) ->
rts_stat_vnode:get(Prelist, ReqId, StatName),
{next_state, waiting, SD0}.
%% @doc Wait for R replies and then respond to From (original client
that called ` rts : get/2 ' ) .
%% TODO: read repair...or another blog post?
waiting({ok, ReqID, Val}, SD0=#state{from=From, num_r=NumR0, replies=Replies0}) ->
NumR = NumR0 + 1,
Replies = [Val|Replies0],
SD = SD0#state{num_r=NumR,replies=Replies},
if
NumR =:= ?R ->
Reply =
case lists:any(different(Val), Replies) of
true ->
Replies;
false ->
Val
end,
From ! {ReqID, ok, Reply},
{stop, normal, SD};
true -> {next_state, waiting, SD}
end.
handle_info(_Info, _StateName, StateData) ->
{stop,badmsg,StateData}.
handle_event(_Event, _StateName, StateData) ->
{stop,badmsg,StateData}.
handle_sync_event(_Event, _From, _StateName, StateData) ->
{stop,badmsg,StateData}.
code_change(_OldVsn, StateName, State, _Extra) -> {ok, StateName, State}.
terminate(_Reason, _SN, _SD) ->
ok.
%%%===================================================================
%%% Internal Functions
%%%===================================================================
different(A) -> fun(B) -> A =/= B end.
mk_reqid() -> erlang:phash2(erlang:now()).
| null | https://raw.githubusercontent.com/rzezeski/try-try-try/c5d99f29fb3380f8653efdd1aa6a8f52143a9717/2011/riak-core-the-coordinator/rts/src/rts_get_fsm.erl | erlang | @doc The coordinator for stat get operations. The key here is to
replica and wait until a quorum is met.
API
Callbacks
===================================================================
API
===================================================================
===================================================================
===================================================================
Intiailize state data.
@doc Wait for R replies and then respond to From (original client
TODO: read repair...or another blog post?
===================================================================
Internal Functions
=================================================================== | generate the preflist just like in wrtie_fsm and then query each
-module(rts_get_fsm).
-behavior(gen_fsm).
-include("rts.hrl").
-export([start_link/4, get/2]).
-export([init/1, code_change/4, handle_event/3, handle_info/3,
handle_sync_event/4, terminate/3]).
States
-export([prepare/2, execute/2, waiting/2]).
-record(state, {req_id,
from,
client,
stat_name,
preflist,
num_r=0,
replies=[]}).
start_link(ReqID, From, Client, StatName) ->
gen_fsm:start_link(?MODULE, [ReqID, From, Client, StatName], []).
get(Client, StatName) ->
ReqID = mk_reqid(),
rts_get_fsm_sup:start_get_fsm([ReqID, self(), Client, StatName]),
{ok, ReqID}.
States
init([ReqId, From, Client, StatName]) ->
SD = #state{req_id=ReqId,
from=From,
client=Client,
stat_name=StatName},
{ok, prepare, SD, 0}.
@doc Calculate the Preflist .
prepare(timeout, SD0=#state{client=Client,
stat_name=StatName}) ->
DocIdx = riak_core_util:chash_key({list_to_binary(Client),
list_to_binary(StatName)}),
Prelist = riak_core_apl:get_apl(DocIdx, ?N, rts_stat),
SD = SD0#state{preflist=Prelist},
{next_state, execute, SD, 0}.
@doc Execute the get reqs .
execute(timeout, SD0=#state{req_id=ReqId,
stat_name=StatName,
preflist=Prelist}) ->
rts_stat_vnode:get(Prelist, ReqId, StatName),
{next_state, waiting, SD0}.
that called ` rts : get/2 ' ) .
waiting({ok, ReqID, Val}, SD0=#state{from=From, num_r=NumR0, replies=Replies0}) ->
NumR = NumR0 + 1,
Replies = [Val|Replies0],
SD = SD0#state{num_r=NumR,replies=Replies},
if
NumR =:= ?R ->
Reply =
case lists:any(different(Val), Replies) of
true ->
Replies;
false ->
Val
end,
From ! {ReqID, ok, Reply},
{stop, normal, SD};
true -> {next_state, waiting, SD}
end.
handle_info(_Info, _StateName, StateData) ->
{stop,badmsg,StateData}.
handle_event(_Event, _StateName, StateData) ->
{stop,badmsg,StateData}.
handle_sync_event(_Event, _From, _StateName, StateData) ->
{stop,badmsg,StateData}.
code_change(_OldVsn, StateName, State, _Extra) -> {ok, StateName, State}.
terminate(_Reason, _SN, _SD) ->
ok.
different(A) -> fun(B) -> A =/= B end.
mk_reqid() -> erlang:phash2(erlang:now()).
|
4512b2925c68919e7fec598d025bd073238072a983a9455282b61b2e39b403a6 | kostmo/circleci-failure-tracker | Routes.hs | {-# LANGUAGE OverloadedStrings #-}
module Routes where
import Control.Monad (unless, when)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Except (ExceptT (ExceptT), except,
runExceptT)
import Control.Monad.Trans.Reader (runReaderT)
import Data.Aeson (ToJSON)
import Data.Default (def)
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text.Lazy as LT
import qualified Data.Vault.Lazy as Vault
import Log (LogT, localDomain)
import Network.Wai
import Network.Wai.Log (logRequestsWith)
import Network.Wai.Middleware.ForceSSL (forceSSL)
import Network.Wai.Middleware.Gzip (gzip)
import Network.Wai.Middleware.Static hiding ((<|>))
import Network.Wai.Session (Session, SessionStore,
withSession)
import System.FilePath
import qualified Web.Scotty as S
import qualified Web.Scotty.Internal.Types as ScottyTypes
import qualified Auth
import qualified AuthConfig
import qualified BuildRetrieval
import qualified Builds
import qualified CircleApi
import qualified Constants
import qualified DbHelpers
import qualified FrontendHelpers
import qualified GitRev
import qualified JsonUtils
import qualified MatchOccurrences
import qualified PostedComments
import qualified Scanning
import qualified ScanPatterns
import qualified Sql.Read.Breakages as ReadBreakages
import qualified Sql.Read.Builds as ReadBuilds
import qualified Sql.Read.Commits as ReadCommits
import qualified Sql.Read.Flaky as SqlReadFlaky
import qualified Sql.Read.Matches as ReadMatches
import qualified Sql.Read.Patterns as ReadPatterns
import qualified Sql.Read.PullRequests as ReadPullRequests
import qualified Sql.Read.Queue as ReadQueue
import qualified Sql.Read.Read as SqlRead
import qualified Sql.Read.Stats as ReadStats
import qualified Sql.Read.TestResults as ReadTestResults
import qualified Sql.Read.Types as SqlReadTypes
import qualified Sql.Update as SqlUpdate
import qualified Sql.Write as SqlWrite
import qualified StatusUpdate
import qualified Types
import qualified WebApi
data SetupData = SetupData {
_setup_static_base :: String
, _setup_github_config :: AuthConfig.GithubConfig
, _third_party_creds :: CircleApi.ThirdPartyAuth
, _setup_connection_data :: DbHelpers.DbConnectionData
, _setup_mview_connection_data :: DbHelpers.DbConnectionData -- ^ for updating materialized views
}
data PersistenceData = PersistenceData {
_setup_cache :: Types.CacheStore
, _setup_session :: Vault.Key (Session IO String String)
, _setup_store :: SessionStore IO String String
}
scottyApp ::
(LogT IO () -> IO ())
-> PersistenceData
-> SetupData
-> ScottyTypes.ScottyT LT.Text IO ()
scottyApp
logger
(PersistenceData cache session store)
(SetupData static_base github_config third_party_creds connection_data mview_connection_data) = do
S.middleware $ logRequestsWith $
\logger_t -> logger $ localDomain logger_domain_identifier logger_t
S.middleware $ withSession store (fromString "SESSION") def session
S.middleware $ staticPolicy $ noDots >-> addBase static_base
S.middleware $ gzip def
unless (AuthConfig.no_force_ssl github_config || AuthConfig.is_local github_config) $
S.middleware forceSSL
-- For debugging only
when (AuthConfig.is_local github_config) FrontendHelpers.echoEndpoint
-- XXX IMPORTANT:
-- The session cookie is specific to the parent dir of the path.
-- So with the path "/api/callback", only HTTP accesses to paths
at or below the " /api/ " path will be members of the same session .
-- Consequentially, a cookie set (namely, the github access token)
-- in a request to a certain path will only be accessible to
-- other requests at or below that same parent directory.
S.get "/api/github-auth-callback" $ do
rq <- S.request
let Just (_sessionLookup, sessionInsert) = Vault.lookup session $ vault rq
Auth.callbackH cache github_config $ sessionInsert Auth.githubAuthTokenSessionKey
S.get "/logout" $ Auth.logoutH cache
S.post "/api/github-event" $ StatusUpdate.githubEventEndpoint $
Constants.ProviderConfigs
github_config
third_party_creds
connection_data
S.post "/api/code-breakage-resolution-report" $
withAuth $ SqlWrite.apiCodeBreakageResolutionInsertMultiple
<$> (Builds.RawCommit <$> S.param "sha1")
<*> S.param "causes"
S.post "/api/code-breakage-cause-report" $
withAuth $ SqlWrite.reportBreakage
<$> S.param "jobs"
<*> S.param "failure_mode_id"
<*> (FrontendHelpers.checkboxIsTrue <$> S.param "is_ongoing")
<*> (Builds.RawCommit <$> S.param "last_affected_sha1")
<*> (Builds.RawCommit <$> S.param "cause_sha1")
<*> S.param "notes"
S.post "/api/refresh-materialized-view" $ do
view_name <- S.param "view-name"
is_from_frontend <- S.param "from-frontend"
result <- liftIO $ do
mview_conn <- DbHelpers.getConnection mview_connection_data
flip runReaderT mview_conn $ SqlRead.refreshCachedMasterGrid view_name $ FrontendHelpers.checkboxIsTrue is_from_frontend
S.json $ WebApi.toJsonEither result
S.post "/api/get-logged-in-user" $
withAuth $ pure FrontendHelpers.getLoggedInUser
S.post "/api/rescan-multiple-builds" $
withAuth $
Scanning.apiRescanBuilds third_party_creds
<$> S.jsonData
S.post "/api/rescan-build" $
withAuth $
Scanning.rescanSingleBuildWrapped third_party_creds
<$> (Builds.UniversalBuildId <$> S.param "build")
S.post "/api/rebuild-single-job" $
withAuth $ FrontendHelpers.facilitateJobRebuild (CircleApi.circle_api_token third_party_creds)
<$> (Builds.UniversalBuildId <$> S.param "build")
S.post "/api/rebuild-flaky-candidates" $
withAuth $ SqlUpdate.triggerFlakyRebuildCandidates (CircleApi.circle_api_token third_party_creds)
<$> (Builds.RawCommit <$> S.param "sha1")
S.post "/api/promote-match" $
withAuth $
SqlWrite.promoteMatch
<$> (MatchOccurrences.MatchId <$> S.param "match_id")
S.post "/api/elaborate-failure" $
withAuth $
SqlWrite.elaborateBuildFailure
<$> (Builds.UniversalBuildId <$> S.param "build")
S.post "/api/rescan-commit" $
withAuth $
FrontendHelpers.rescanCommitCallback third_party_creds
<$> (Builds.RawCommit <$> S.param "sha1")
S.post "/api/populate-master-commits" $
FrontendHelpers.requireAdminToken connection_data github_config SqlWrite.storeMasterCommits
S.post "/api/populate-master-commit-metadata" $
FrontendHelpers.requireAdminToken connection_data github_config SqlWrite.storeCommitMetadata
S.post "/api/new-pattern-insert" $
withAuth $
SqlWrite.apiNewPatternWrapped <$> FrontendHelpers.patternFromParms
S.post "/api/code-breakage-mode-update" $
withAuth $
SqlWrite.updateCodeBreakageMode
<$> S.param "cause_id"
<*> S.param "mode"
S.post "/api/create-pattern-remediation" $
withAuth $
SqlWrite.createPatternRemediation
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> parseRemediationObject
get "/api/latest-master-commit-with-metadata" $
pure $ WebApi.toJsonEither <$> ReadCommits.getLatestMasterCommitWithMetadata
get "/api/viability-increase" $
ReadStats.apiViabilityIncreaseByWeek <$> S.param "weeks"
get "/api/throughput-by-hour" $
ReadStats.apiThroughputByHour <$> S.param "hours"
get "/api/sqs-queue-depth" $
ReadStats.apiQueueDepthTimeplot <$> S.param "hours"
get "/api/status-notifications-by-hour" $
ReadStats.apiStatusNotificationsByHour <$> S.param "hours"
get "/api/master-downstream-commits" $
ReadCommits.apiMasterDownstreamCommits . Builds.RawCommit <$> S.param "sha1"
get "/api/test-results" $
fmap WebApi.toJsonEither . ReadTestResults.getRecordedTestResults . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/comment-postings-by-week" $
pure ReadStats.prCommentRevisionsByWeek
get "/api/failed-commits-by-day" $
pure SqlReadFlaky.apiFailedCommitsByDay
get "/api/code-breakages-leftover-by-commit" $
pure ReadBreakages.apiLeftoverCodeBreakagesByCommit
get "/api/missing-required-builds" $
pure ReadBuilds.apiMissingRequiredBuilds
get "/api/code-breakages-author-stats" $
pure ReadBreakages.apiBreakageAuthorStats
get "/api/broken-commits-without-metadata" $
pure ReadCommits.apiBrokenCommitsWithoutMetadata
get "/api/list-failure-modes" $
pure SqlRead.apiListFailureModes
get "/api/job" $
pure SqlRead.apiJobs
get "/api/log-storage-stats" $
pure ReadStats.apiStorageStats
get "/api/log-size-histogram" $
pure ReadStats.apiByteCountHistogram
get "/api/log-lines-histogram" $
pure ReadStats.apiLineCountHistogram
get "/api/master-build-stats" $
pure ReadStats.masterBuildFailureStats
get "/api/patterns-dump" $
pure ReadPatterns.dumpPatterns
get "/api/patterns-timeline" $
pure ReadPatterns.apiPatternOccurrenceTimeline
get "/api/patterns" $
pure ReadPatterns.apiPatterns
-- FIXME This is legacy. Don't use this!
get "/api/inferred-scheduled-builds" $
pure SqlRead.getScheduledJobNames
get "/api/step-list" $
pure SqlRead.apiListSteps
get "/api/step" $
pure SqlRead.apiStep
get "/api/master-commits-granular" $
ReadBreakages.masterCommitsGranular
<$> S.param "weeks"
get "/api/master-deterministic-failure-modes" $
pure SqlRead.apiDeterministicFailureModes
get "/api/mview-refreshes" $
pure SqlRead.apiMaterializedViewRefreshes
get "/api/summary" $
pure ReadStats.apiSummaryStats
get "/api/job-schedule-stats" $
pure SqlRead.apiJobScheduleStats
get "/api/tags" $
pure SqlRead.apiTagsHistogram
get "/api/unmatched-builds" $
pure ReadBuilds.apiUnmatchedBuilds
get "/api/idiopathic-failed-builds" $
pure ReadBuilds.apiIdiopathicBuilds
get "/api/code-breakages-leftover-detected" $
pure ReadBreakages.apiLeftoverDetectedCodeBreakages
get "/api/code-breakages-detected" $
pure ReadBreakages.apiDetectedCodeBreakages
get "/api/code-breakages-annotated" $
ReadBreakages.apiAnnotatedCodeBreakagesWithImpact
<$> parseTimeRangeParms
get "/api/code-breakages-annotated-single" $
fmap WebApi.toJsonEither . ReadBreakages.apiAnnotatedCodeBreakagesWithoutImpactSingle
<$> S.param "cause_id"
get "/api/code-breakage-mode-single" $
ReadBreakages.apiCodeBreakagesModeSingle <$> S.param "cause_id"
get "/api/pr-comment-opt-out-stats" $
pure ReadPullRequests.allUserOptOutStats
get "/api/posted-pr-comments" $
ReadPullRequests.apiPostedPRComments <$> S.param "count"
get "/api/posted-comment-revision-body" $
fmap WebApi.toJsonEither . ReadPullRequests.getSinglePostedCommentMarkdown <$> (PostedComments.CommentRevisionId <$> S.param "comment_revision")
get "/api/latest-posted-comment-for-pr" $
ReadPullRequests.apiPostedCommentsForPR <$> (Builds.PullRequestNumber <$> S.param "pr")
get "/api/all-posted-comments-for-pr" $
ReadPullRequests.apiAllPostedCommentsForPR <$> (Builds.PullRequestNumber <$> S.param "pr")
TODO This endpoint not used yet
get "/api/queue-insertions-for-commit" $
ReadQueue.getQueueInsertionsForSha1 <$> (Builds.RawCommit <$> S.param "sha1")
TODO This endpoint not used yet
get "/api/queue-insertions-for-commit-and-job" $
ReadQueue.getQueueInsertionsForSha1AndJob
<$> S.param "job"
<*> (Builds.RawCommit <$> S.param "sha1")
get "/api/historical-pr-associations" $
ReadPullRequests.getPullRequestsContainingCommit . Builds.RawCommit <$> S.param "sha1"
get "/api/upstream-broken-jobs-for-commit" $
ReadBreakages.getInferredSpanningBrokenJobsBetter . Builds.RawCommit <$> S.param "sha1"
get "/api/known-breakage-affected-jobs" $
ReadBreakages.knownBreakageAffectedJobs <$> S.param "cause_id"
get "/api/tag-suggest" $
SqlRead.apiAutocompleteTags <$> S.param "term"
get "/api/step-suggest" $
SqlRead.apiAutocompleteSteps <$> S.param "term"
get "/api/master-breakages-monthly-stats" $
pure ReadStats.masterBreakageMonthlyStats
get "/api/downstream-impact-weekly" $
ReadStats.downstreamWeeklyFailureStats
<$> S.param "weeks"
get "/api/master-weekly-nondeterministic-unmitigated-failures" $
ReadStats.masterWeeklyUnattributedFailureStats
<$> S.param "weeks"
get "/api/master-monthly-nondeterministic-unmitigated-failures" $
ReadStats.masterMonthlyUnattributedFailureStats
<$> S.param "months"
get "/api/master-weekly-failure-stats" $
fmap WebApi.toJsonEither . ReadStats.masterWeeklyFailureStats
<$> S.param "weeks"
get "/api/master-pr-merge-time-weekly-failure-stats" $
ReadStats.getMergeTimeFailingPullRequestBuildsByWeek
<$> S.param "weeks"
get "/api/page-views-by-week" $
ReadStats.getPageViewsByWeek
<$> S.param "weeks"
get "/api/unmatched-builds-for-commit" $
fmap WebApi.toJsonEither . ReadBuilds.apiUnmatchedCommitBuilds . Builds.RawCommit <$> S.param "sha1"
get "/api/idiopathic-failed-builds-for-commit" $
fmap WebApi.toJsonEither . ReadBuilds.apiIdiopathicCommitBuilds . Builds.RawCommit <$> S.param "sha1"
get "/api/timed-out-builds-for-commit" $
fmap WebApi.toJsonEither . ReadBuilds.apiTimeoutCommitBuilds . Builds.RawCommit <$> S.param "sha1"
get "/api/pattern-step-occurrences" $
ReadPatterns.patternBuildStepOccurrences . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/pattern-job-occurrences" $
ReadPatterns.patternBuildJobOccurrences . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/best-pattern-matches" $
ReadMatches.getBestPatternMatches . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/pattern-matches" $
ReadMatches.getPatternMatches . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/pattern" $
ReadPatterns.apiSinglePattern . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/build-pattern-matches" $
ReadMatches.getBuildPatternMatches . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/best-build-match" $
ReadMatches.getBestBuildMatch . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/single-build-info" $
fmap WebApi.toJsonEither . SqlUpdate.getBuildInfo third_party_creds . Builds.UniversalBuildId
<$> S.param "build_id"
get "/api/list-build-github-status-events" $
SqlRead.apiGitHubNotificationsForBuild
<$> S.param "job"
<*> (Builds.RawCommit <$> S.param "sha1")
get "/api/list-commit-jobs" $
SqlRead.apiCommitJobs . Builds.RawCommit <$> S.param "sha1"
get "/api/list-master-commit-range-jobs" $
fmap SqlRead.apiCommitRangeJobs $ SqlRead.InclusiveSpan
<$> S.param "first_index"
<*> S.param "last_index"
get "/api/master-commits" $
fmap WebApi.toJsonEither . ((fmap . fmap . fmap) snd ReadCommits.getMasterCommits) <$> FrontendHelpers.getSimpleOffsetMode
get "/api/master-timeline" $
fmap WebApi.toJsonEither . SqlRead.apiMasterBuilds <$> FrontendHelpers.getOffsetMode
S.get "/api/commit-info" $ do
commit_sha1_text <- S.param "sha1"
json_result <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
runExceptT $ do
sha1 <- except $ GitRev.validateSha1 commit_sha1_text
ExceptT $ flip runReaderT conn $
SqlUpdate.countRevisionBuilds
third_party_creds
sha1
S.json $ WebApi.toJsonEither json_result
get "/api/isolated-master-failures-by-day" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedMasterFailuresByDay
<$> S.param "age-days"
get "/api/isolated-master-failures-by-week" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedMasterFailuresByWeek
<$> S.param "age-weeks"
get "/api/isolated-failures-timespan-coarse-bins" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiCoarseBinsIsolatedJobFailuresTimespan
<$> parseTimeRangeParms
get "/api/isolated-unmatched-failed-builds-master-commit-range" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiIsolatedUnmatchedBuildsMasterCommitRange
<$> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/isolated-failures-timespan-by-job" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedJobFailuresTimespan
<$> parseTimeRangeParms
get "/api/isolated-failures-timespan-by-pattern" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiIsolatedPatternFailuresTimespan
<$> parseTimeRangeParms
<*> (FrontendHelpers.checkboxIsTrue <$> S.param "exclude_named_tests")
get "/api/isolated-failures-timespan-by-test" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedTestFailuresTimespan
<$> parseTimeRangeParms
get "/api/master-job-failures-in-timespan" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiJobFailuresInTimespan
<$> S.param "job"
<*> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/master-pattern-failures-in-timespan" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiPatternFailuresInTimespan
<$> (ScanPatterns.PatternId <$> S.param "pattern")
<*> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/master-test-failures-in-timespan" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiTestFailuresInTimespan
<$> S.param "test"
<*> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/viable-commit-prerequisite-jobs" $
SqlRead.apiViableCommitPrerequisiteJobs <$>
(Builds.RawCommit <$> S.param "sha1")
get "/api/latest-viable-master-commits" $
SqlRead.apiCleanestMasterCommits
<$> S.param "missing-threshold"
<*> S.param "failing-threshold"
get "/api/pr-batch-list" $
ReadPullRequests.apiPrBatchList
<$> (map (read . LT.unpack) . LT.splitOn "," <$> S.param "pr-numbers-delimited")
get "/api/viable-commit-age-history" $
SqlRead.apiLatestViableMasterCommitAgeHistory
<$> S.param "weeks"
<*> (BuildRetrieval.decodeUtcTimeString <$> S.param "end-timestamp")
get "/api/viable-commit-lag-count-history" $
SqlRead.apiLatestViableMasterCommitLagCountHistory
<$> S.param "weeks"
<*> (BuildRetrieval.decodeUtcTimeString <$> S.param "end-timestamp")
get "/api/is-master-commit" $
ReadCommits.isMasterCommit . Builds.RawCommit <$> S.param "sha1"
S.get "/api/commit-builds" $ do
commit_sha1_text <- S.param "sha1"
json_result <- runExceptT $ do
sha1 <- except $ GitRev.validateSha1 commit_sha1_text
either_result <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
two_build_types <- flip runReaderT conn $ SqlRead.getRevisionBuilds sha1
return $ ((fmap . fmap) SqlRead.non_timed_out_builds) two_build_types
except either_result
S.json $ WebApi.toJsonEither json_result
S.get "/api/new-pattern-test" $ do
buildnum <- S.param "build_num"
new_pattern <- FrontendHelpers.patternFromParms
x <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
flip runReaderT conn $ SqlRead.apiNewPatternTest
(Builds.UniversalBuildId buildnum)
new_pattern
S.json $ WebApi.toJsonEither x
S.get "/api/get-user-opt-out-settings" $
FrontendHelpers.jsonAuthorizedDbInteractCommon SqlReadTypes.AuthConnection auth_helper_bundle $
pure ReadPullRequests.userOptOutSettings
S.post "/api/update-user-opt-out-settings" $
withAuth $ SqlWrite.updateUserOptOutSettings <$> S.param "enabled"
S.post "/api/repopulate-test-results" $
withAuth $ Scanning.repopulateTestResults third_party_creds . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/view-log-context" $ (fmap . fmap) WebApi.toJsonEither $
SqlRead.logContextFunc SqlReadTypes.hiddenContextLinecount
<$> (MatchOccurrences.MatchId <$> S.param "match_id")
<*> S.param "context_linecount"
S.get "/api/view-log-full" $ do
build_id <- S.param "build_id"
let universal_build_id = Builds.UniversalBuildId build_id
either_log_result <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
flip runReaderT conn $ SqlRead.retrieveLogFromBuildId universal_build_id
case either_log_result of
Right logs -> S.text logs
Left errors -> S.html $ LT.fromStrict $ JsonUtils._message $ JsonUtils.getDetails errors
post "/api/pattern-specificity-update" $
SqlWrite.updatePatternSpecificity
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "specificity"
post "/api/pattern-description-update" $
SqlWrite.updatePatternDescription
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "description"
post "/api/pattern-tag-add" $
SqlWrite.addPatternTag
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "tag"
post "/api/pattern-tag-remove" $
SqlWrite.removePatternTag
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "tag"
post "/api/patterns-restore" $
SqlWrite.restorePatterns
<$> S.jsonData
post "/api/code-breakage-job-delete" $
SqlWrite.deleteCodeBreakageJob
<$> S.param "cause_id"
<*> S.param "job"
post "/api/code-breakage-description-update" $
SqlWrite.updateCodeBreakageDescription
<$> S.param "cause_id"
<*> S.param "description"
post "/api/code-breakage-update-resolution-sha1" $
SqlWrite.updateCodeBreakageResolutionSha1
<$> S.param "cause_id"
<*> (Builds.RawCommit <$> S.param "resolution_sha1")
post "/api/code-breakage-update-cause-sha1" $
SqlWrite.updateCodeBreakageCauseSha1
<$> S.param "cause_id"
<*> (Builds.RawCommit <$> S.param "cause_sha1")
post "/api/code-breakage-delete-resolution" $
SqlWrite.deleteCodeBreakageResolution
<$> S.param "cause_id"
post "/api/code-breakage-delete" $
SqlWrite.deleteCodeBreakage
<$> S.param "cause_id"
S.post "/api/code-breakage-add-affected-job" $
withAuth $
SqlWrite.addCodeBreakageJobName
<$> S.param "cause_id"
<*> S.param "job_name"
S.get "/favicon.ico" $ do
S.setHeader "Content-Type" "image/x-icon"
S.file $ static_base </> "images/favicon.ico"
S.options "/" $ do
S.setHeader "Access-Control-Allow-Origin" "*"
S.setHeader "Access-Control-Allow-Methods" $ LT.intercalate ", " [
"POST"
, "GET"
, "OPTIONS"
]
S.get "/" $ do
S.setHeader "Content-Type" "text/html; charset=utf-8"
S.file $ static_base </> "index.html"
where
get x = FrontendHelpers.jsonDbGet connection_data x
auth_helper_bundle = FrontendHelpers.AuthHelperBundle
connection_data
session
github_config
third_party_creds
post x y = S.post x $
FrontendHelpers.jsonAuthorizedDbInteractCommon const
auth_helper_bundle
y
withAuth :: ToJSON a =>
ScottyTypes.ActionT LT.Text IO (SqlReadTypes.AuthDbIO (Either Text a))
-> ScottyTypes.ActionT LT.Text IO ()
withAuth = FrontendHelpers.postWithAuthentication auth_helper_bundle
logger_domain_identifier = if AuthConfig.is_local github_config
then Constants.localhostDomainName
else Constants.drCIDomainName
parseRemediationObject :: ScottyTypes.ActionT LT.Text IO SqlWrite.FailureRemediation
parseRemediationObject = do
notes <- S.param "notes"
github_issue_number <- S.param "github_issue_number"
info_url <- S.param "info_url"
return $ SqlWrite.FailureRemediation
(Just notes)
(Just github_issue_number)
(Just info_url)
parseTimeRangeParms :: ScottyTypes.ActionT LT.Text IO SqlReadTypes.TimeRange
parseTimeRangeParms = do
start_timestamp <- BuildRetrieval.decodeUtcTimeString <$> S.param "start-timestamp"
let bounded_result = do
end_timestamp <- BuildRetrieval.decodeUtcTimeString <$> S.param "end-timestamp"
return $ SqlReadTypes.Bounded $ DbHelpers.StartEnd start_timestamp end_timestamp
bounded_result `S.rescue` (\_msg -> return $ SqlReadTypes.StartOnly start_timestamp)
| null | https://raw.githubusercontent.com/kostmo/circleci-failure-tracker/393d10a72080bd527fdb159da6ebfea23fcd52d1/app/webservice/src/Routes.hs | haskell | # LANGUAGE OverloadedStrings #
^ for updating materialized views
For debugging only
XXX IMPORTANT:
The session cookie is specific to the parent dir of the path.
So with the path "/api/callback", only HTTP accesses to paths
Consequentially, a cookie set (namely, the github access token)
in a request to a certain path will only be accessible to
other requests at or below that same parent directory.
FIXME This is legacy. Don't use this! |
module Routes where
import Control.Monad (unless, when)
import Control.Monad.IO.Class (liftIO)
import Control.Monad.Trans.Except (ExceptT (ExceptT), except,
runExceptT)
import Control.Monad.Trans.Reader (runReaderT)
import Data.Aeson (ToJSON)
import Data.Default (def)
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text.Lazy as LT
import qualified Data.Vault.Lazy as Vault
import Log (LogT, localDomain)
import Network.Wai
import Network.Wai.Log (logRequestsWith)
import Network.Wai.Middleware.ForceSSL (forceSSL)
import Network.Wai.Middleware.Gzip (gzip)
import Network.Wai.Middleware.Static hiding ((<|>))
import Network.Wai.Session (Session, SessionStore,
withSession)
import System.FilePath
import qualified Web.Scotty as S
import qualified Web.Scotty.Internal.Types as ScottyTypes
import qualified Auth
import qualified AuthConfig
import qualified BuildRetrieval
import qualified Builds
import qualified CircleApi
import qualified Constants
import qualified DbHelpers
import qualified FrontendHelpers
import qualified GitRev
import qualified JsonUtils
import qualified MatchOccurrences
import qualified PostedComments
import qualified Scanning
import qualified ScanPatterns
import qualified Sql.Read.Breakages as ReadBreakages
import qualified Sql.Read.Builds as ReadBuilds
import qualified Sql.Read.Commits as ReadCommits
import qualified Sql.Read.Flaky as SqlReadFlaky
import qualified Sql.Read.Matches as ReadMatches
import qualified Sql.Read.Patterns as ReadPatterns
import qualified Sql.Read.PullRequests as ReadPullRequests
import qualified Sql.Read.Queue as ReadQueue
import qualified Sql.Read.Read as SqlRead
import qualified Sql.Read.Stats as ReadStats
import qualified Sql.Read.TestResults as ReadTestResults
import qualified Sql.Read.Types as SqlReadTypes
import qualified Sql.Update as SqlUpdate
import qualified Sql.Write as SqlWrite
import qualified StatusUpdate
import qualified Types
import qualified WebApi
data SetupData = SetupData {
_setup_static_base :: String
, _setup_github_config :: AuthConfig.GithubConfig
, _third_party_creds :: CircleApi.ThirdPartyAuth
, _setup_connection_data :: DbHelpers.DbConnectionData
}
data PersistenceData = PersistenceData {
_setup_cache :: Types.CacheStore
, _setup_session :: Vault.Key (Session IO String String)
, _setup_store :: SessionStore IO String String
}
scottyApp ::
(LogT IO () -> IO ())
-> PersistenceData
-> SetupData
-> ScottyTypes.ScottyT LT.Text IO ()
scottyApp
logger
(PersistenceData cache session store)
(SetupData static_base github_config third_party_creds connection_data mview_connection_data) = do
S.middleware $ logRequestsWith $
\logger_t -> logger $ localDomain logger_domain_identifier logger_t
S.middleware $ withSession store (fromString "SESSION") def session
S.middleware $ staticPolicy $ noDots >-> addBase static_base
S.middleware $ gzip def
unless (AuthConfig.no_force_ssl github_config || AuthConfig.is_local github_config) $
S.middleware forceSSL
when (AuthConfig.is_local github_config) FrontendHelpers.echoEndpoint
at or below the " /api/ " path will be members of the same session .
S.get "/api/github-auth-callback" $ do
rq <- S.request
let Just (_sessionLookup, sessionInsert) = Vault.lookup session $ vault rq
Auth.callbackH cache github_config $ sessionInsert Auth.githubAuthTokenSessionKey
S.get "/logout" $ Auth.logoutH cache
S.post "/api/github-event" $ StatusUpdate.githubEventEndpoint $
Constants.ProviderConfigs
github_config
third_party_creds
connection_data
S.post "/api/code-breakage-resolution-report" $
withAuth $ SqlWrite.apiCodeBreakageResolutionInsertMultiple
<$> (Builds.RawCommit <$> S.param "sha1")
<*> S.param "causes"
S.post "/api/code-breakage-cause-report" $
withAuth $ SqlWrite.reportBreakage
<$> S.param "jobs"
<*> S.param "failure_mode_id"
<*> (FrontendHelpers.checkboxIsTrue <$> S.param "is_ongoing")
<*> (Builds.RawCommit <$> S.param "last_affected_sha1")
<*> (Builds.RawCommit <$> S.param "cause_sha1")
<*> S.param "notes"
S.post "/api/refresh-materialized-view" $ do
view_name <- S.param "view-name"
is_from_frontend <- S.param "from-frontend"
result <- liftIO $ do
mview_conn <- DbHelpers.getConnection mview_connection_data
flip runReaderT mview_conn $ SqlRead.refreshCachedMasterGrid view_name $ FrontendHelpers.checkboxIsTrue is_from_frontend
S.json $ WebApi.toJsonEither result
S.post "/api/get-logged-in-user" $
withAuth $ pure FrontendHelpers.getLoggedInUser
S.post "/api/rescan-multiple-builds" $
withAuth $
Scanning.apiRescanBuilds third_party_creds
<$> S.jsonData
S.post "/api/rescan-build" $
withAuth $
Scanning.rescanSingleBuildWrapped third_party_creds
<$> (Builds.UniversalBuildId <$> S.param "build")
S.post "/api/rebuild-single-job" $
withAuth $ FrontendHelpers.facilitateJobRebuild (CircleApi.circle_api_token third_party_creds)
<$> (Builds.UniversalBuildId <$> S.param "build")
S.post "/api/rebuild-flaky-candidates" $
withAuth $ SqlUpdate.triggerFlakyRebuildCandidates (CircleApi.circle_api_token third_party_creds)
<$> (Builds.RawCommit <$> S.param "sha1")
S.post "/api/promote-match" $
withAuth $
SqlWrite.promoteMatch
<$> (MatchOccurrences.MatchId <$> S.param "match_id")
S.post "/api/elaborate-failure" $
withAuth $
SqlWrite.elaborateBuildFailure
<$> (Builds.UniversalBuildId <$> S.param "build")
S.post "/api/rescan-commit" $
withAuth $
FrontendHelpers.rescanCommitCallback third_party_creds
<$> (Builds.RawCommit <$> S.param "sha1")
S.post "/api/populate-master-commits" $
FrontendHelpers.requireAdminToken connection_data github_config SqlWrite.storeMasterCommits
S.post "/api/populate-master-commit-metadata" $
FrontendHelpers.requireAdminToken connection_data github_config SqlWrite.storeCommitMetadata
S.post "/api/new-pattern-insert" $
withAuth $
SqlWrite.apiNewPatternWrapped <$> FrontendHelpers.patternFromParms
S.post "/api/code-breakage-mode-update" $
withAuth $
SqlWrite.updateCodeBreakageMode
<$> S.param "cause_id"
<*> S.param "mode"
S.post "/api/create-pattern-remediation" $
withAuth $
SqlWrite.createPatternRemediation
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> parseRemediationObject
get "/api/latest-master-commit-with-metadata" $
pure $ WebApi.toJsonEither <$> ReadCommits.getLatestMasterCommitWithMetadata
get "/api/viability-increase" $
ReadStats.apiViabilityIncreaseByWeek <$> S.param "weeks"
get "/api/throughput-by-hour" $
ReadStats.apiThroughputByHour <$> S.param "hours"
get "/api/sqs-queue-depth" $
ReadStats.apiQueueDepthTimeplot <$> S.param "hours"
get "/api/status-notifications-by-hour" $
ReadStats.apiStatusNotificationsByHour <$> S.param "hours"
get "/api/master-downstream-commits" $
ReadCommits.apiMasterDownstreamCommits . Builds.RawCommit <$> S.param "sha1"
get "/api/test-results" $
fmap WebApi.toJsonEither . ReadTestResults.getRecordedTestResults . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/comment-postings-by-week" $
pure ReadStats.prCommentRevisionsByWeek
get "/api/failed-commits-by-day" $
pure SqlReadFlaky.apiFailedCommitsByDay
get "/api/code-breakages-leftover-by-commit" $
pure ReadBreakages.apiLeftoverCodeBreakagesByCommit
get "/api/missing-required-builds" $
pure ReadBuilds.apiMissingRequiredBuilds
get "/api/code-breakages-author-stats" $
pure ReadBreakages.apiBreakageAuthorStats
get "/api/broken-commits-without-metadata" $
pure ReadCommits.apiBrokenCommitsWithoutMetadata
get "/api/list-failure-modes" $
pure SqlRead.apiListFailureModes
get "/api/job" $
pure SqlRead.apiJobs
get "/api/log-storage-stats" $
pure ReadStats.apiStorageStats
get "/api/log-size-histogram" $
pure ReadStats.apiByteCountHistogram
get "/api/log-lines-histogram" $
pure ReadStats.apiLineCountHistogram
get "/api/master-build-stats" $
pure ReadStats.masterBuildFailureStats
get "/api/patterns-dump" $
pure ReadPatterns.dumpPatterns
get "/api/patterns-timeline" $
pure ReadPatterns.apiPatternOccurrenceTimeline
get "/api/patterns" $
pure ReadPatterns.apiPatterns
get "/api/inferred-scheduled-builds" $
pure SqlRead.getScheduledJobNames
get "/api/step-list" $
pure SqlRead.apiListSteps
get "/api/step" $
pure SqlRead.apiStep
get "/api/master-commits-granular" $
ReadBreakages.masterCommitsGranular
<$> S.param "weeks"
get "/api/master-deterministic-failure-modes" $
pure SqlRead.apiDeterministicFailureModes
get "/api/mview-refreshes" $
pure SqlRead.apiMaterializedViewRefreshes
get "/api/summary" $
pure ReadStats.apiSummaryStats
get "/api/job-schedule-stats" $
pure SqlRead.apiJobScheduleStats
get "/api/tags" $
pure SqlRead.apiTagsHistogram
get "/api/unmatched-builds" $
pure ReadBuilds.apiUnmatchedBuilds
get "/api/idiopathic-failed-builds" $
pure ReadBuilds.apiIdiopathicBuilds
get "/api/code-breakages-leftover-detected" $
pure ReadBreakages.apiLeftoverDetectedCodeBreakages
get "/api/code-breakages-detected" $
pure ReadBreakages.apiDetectedCodeBreakages
get "/api/code-breakages-annotated" $
ReadBreakages.apiAnnotatedCodeBreakagesWithImpact
<$> parseTimeRangeParms
get "/api/code-breakages-annotated-single" $
fmap WebApi.toJsonEither . ReadBreakages.apiAnnotatedCodeBreakagesWithoutImpactSingle
<$> S.param "cause_id"
get "/api/code-breakage-mode-single" $
ReadBreakages.apiCodeBreakagesModeSingle <$> S.param "cause_id"
get "/api/pr-comment-opt-out-stats" $
pure ReadPullRequests.allUserOptOutStats
get "/api/posted-pr-comments" $
ReadPullRequests.apiPostedPRComments <$> S.param "count"
get "/api/posted-comment-revision-body" $
fmap WebApi.toJsonEither . ReadPullRequests.getSinglePostedCommentMarkdown <$> (PostedComments.CommentRevisionId <$> S.param "comment_revision")
get "/api/latest-posted-comment-for-pr" $
ReadPullRequests.apiPostedCommentsForPR <$> (Builds.PullRequestNumber <$> S.param "pr")
get "/api/all-posted-comments-for-pr" $
ReadPullRequests.apiAllPostedCommentsForPR <$> (Builds.PullRequestNumber <$> S.param "pr")
TODO This endpoint not used yet
get "/api/queue-insertions-for-commit" $
ReadQueue.getQueueInsertionsForSha1 <$> (Builds.RawCommit <$> S.param "sha1")
TODO This endpoint not used yet
get "/api/queue-insertions-for-commit-and-job" $
ReadQueue.getQueueInsertionsForSha1AndJob
<$> S.param "job"
<*> (Builds.RawCommit <$> S.param "sha1")
get "/api/historical-pr-associations" $
ReadPullRequests.getPullRequestsContainingCommit . Builds.RawCommit <$> S.param "sha1"
get "/api/upstream-broken-jobs-for-commit" $
ReadBreakages.getInferredSpanningBrokenJobsBetter . Builds.RawCommit <$> S.param "sha1"
get "/api/known-breakage-affected-jobs" $
ReadBreakages.knownBreakageAffectedJobs <$> S.param "cause_id"
get "/api/tag-suggest" $
SqlRead.apiAutocompleteTags <$> S.param "term"
get "/api/step-suggest" $
SqlRead.apiAutocompleteSteps <$> S.param "term"
get "/api/master-breakages-monthly-stats" $
pure ReadStats.masterBreakageMonthlyStats
get "/api/downstream-impact-weekly" $
ReadStats.downstreamWeeklyFailureStats
<$> S.param "weeks"
get "/api/master-weekly-nondeterministic-unmitigated-failures" $
ReadStats.masterWeeklyUnattributedFailureStats
<$> S.param "weeks"
get "/api/master-monthly-nondeterministic-unmitigated-failures" $
ReadStats.masterMonthlyUnattributedFailureStats
<$> S.param "months"
get "/api/master-weekly-failure-stats" $
fmap WebApi.toJsonEither . ReadStats.masterWeeklyFailureStats
<$> S.param "weeks"
get "/api/master-pr-merge-time-weekly-failure-stats" $
ReadStats.getMergeTimeFailingPullRequestBuildsByWeek
<$> S.param "weeks"
get "/api/page-views-by-week" $
ReadStats.getPageViewsByWeek
<$> S.param "weeks"
get "/api/unmatched-builds-for-commit" $
fmap WebApi.toJsonEither . ReadBuilds.apiUnmatchedCommitBuilds . Builds.RawCommit <$> S.param "sha1"
get "/api/idiopathic-failed-builds-for-commit" $
fmap WebApi.toJsonEither . ReadBuilds.apiIdiopathicCommitBuilds . Builds.RawCommit <$> S.param "sha1"
get "/api/timed-out-builds-for-commit" $
fmap WebApi.toJsonEither . ReadBuilds.apiTimeoutCommitBuilds . Builds.RawCommit <$> S.param "sha1"
get "/api/pattern-step-occurrences" $
ReadPatterns.patternBuildStepOccurrences . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/pattern-job-occurrences" $
ReadPatterns.patternBuildJobOccurrences . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/best-pattern-matches" $
ReadMatches.getBestPatternMatches . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/pattern-matches" $
ReadMatches.getPatternMatches . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/pattern" $
ReadPatterns.apiSinglePattern . ScanPatterns.PatternId <$> S.param "pattern_id"
get "/api/build-pattern-matches" $
ReadMatches.getBuildPatternMatches . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/best-build-match" $
ReadMatches.getBestBuildMatch . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/single-build-info" $
fmap WebApi.toJsonEither . SqlUpdate.getBuildInfo third_party_creds . Builds.UniversalBuildId
<$> S.param "build_id"
get "/api/list-build-github-status-events" $
SqlRead.apiGitHubNotificationsForBuild
<$> S.param "job"
<*> (Builds.RawCommit <$> S.param "sha1")
get "/api/list-commit-jobs" $
SqlRead.apiCommitJobs . Builds.RawCommit <$> S.param "sha1"
get "/api/list-master-commit-range-jobs" $
fmap SqlRead.apiCommitRangeJobs $ SqlRead.InclusiveSpan
<$> S.param "first_index"
<*> S.param "last_index"
get "/api/master-commits" $
fmap WebApi.toJsonEither . ((fmap . fmap . fmap) snd ReadCommits.getMasterCommits) <$> FrontendHelpers.getSimpleOffsetMode
get "/api/master-timeline" $
fmap WebApi.toJsonEither . SqlRead.apiMasterBuilds <$> FrontendHelpers.getOffsetMode
S.get "/api/commit-info" $ do
commit_sha1_text <- S.param "sha1"
json_result <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
runExceptT $ do
sha1 <- except $ GitRev.validateSha1 commit_sha1_text
ExceptT $ flip runReaderT conn $
SqlUpdate.countRevisionBuilds
third_party_creds
sha1
S.json $ WebApi.toJsonEither json_result
get "/api/isolated-master-failures-by-day" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedMasterFailuresByDay
<$> S.param "age-days"
get "/api/isolated-master-failures-by-week" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedMasterFailuresByWeek
<$> S.param "age-weeks"
get "/api/isolated-failures-timespan-coarse-bins" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiCoarseBinsIsolatedJobFailuresTimespan
<$> parseTimeRangeParms
get "/api/isolated-unmatched-failed-builds-master-commit-range" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiIsolatedUnmatchedBuildsMasterCommitRange
<$> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/isolated-failures-timespan-by-job" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedJobFailuresTimespan
<$> parseTimeRangeParms
get "/api/isolated-failures-timespan-by-pattern" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiIsolatedPatternFailuresTimespan
<$> parseTimeRangeParms
<*> (FrontendHelpers.checkboxIsTrue <$> S.param "exclude_named_tests")
get "/api/isolated-failures-timespan-by-test" $
fmap WebApi.toJsonEither . SqlReadFlaky.apiIsolatedTestFailuresTimespan
<$> parseTimeRangeParms
get "/api/master-job-failures-in-timespan" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiJobFailuresInTimespan
<$> S.param "job"
<*> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/master-pattern-failures-in-timespan" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiPatternFailuresInTimespan
<$> (ScanPatterns.PatternId <$> S.param "pattern")
<*> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/master-test-failures-in-timespan" $
(fmap . fmap) WebApi.toJsonEither $ SqlReadFlaky.apiTestFailuresInTimespan
<$> S.param "test"
<*> (DbHelpers.InclusiveNumericBounds <$> S.param "commit-id-min" <*> S.param "commit-id-max")
get "/api/viable-commit-prerequisite-jobs" $
SqlRead.apiViableCommitPrerequisiteJobs <$>
(Builds.RawCommit <$> S.param "sha1")
get "/api/latest-viable-master-commits" $
SqlRead.apiCleanestMasterCommits
<$> S.param "missing-threshold"
<*> S.param "failing-threshold"
get "/api/pr-batch-list" $
ReadPullRequests.apiPrBatchList
<$> (map (read . LT.unpack) . LT.splitOn "," <$> S.param "pr-numbers-delimited")
get "/api/viable-commit-age-history" $
SqlRead.apiLatestViableMasterCommitAgeHistory
<$> S.param "weeks"
<*> (BuildRetrieval.decodeUtcTimeString <$> S.param "end-timestamp")
get "/api/viable-commit-lag-count-history" $
SqlRead.apiLatestViableMasterCommitLagCountHistory
<$> S.param "weeks"
<*> (BuildRetrieval.decodeUtcTimeString <$> S.param "end-timestamp")
get "/api/is-master-commit" $
ReadCommits.isMasterCommit . Builds.RawCommit <$> S.param "sha1"
S.get "/api/commit-builds" $ do
commit_sha1_text <- S.param "sha1"
json_result <- runExceptT $ do
sha1 <- except $ GitRev.validateSha1 commit_sha1_text
either_result <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
two_build_types <- flip runReaderT conn $ SqlRead.getRevisionBuilds sha1
return $ ((fmap . fmap) SqlRead.non_timed_out_builds) two_build_types
except either_result
S.json $ WebApi.toJsonEither json_result
S.get "/api/new-pattern-test" $ do
buildnum <- S.param "build_num"
new_pattern <- FrontendHelpers.patternFromParms
x <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
flip runReaderT conn $ SqlRead.apiNewPatternTest
(Builds.UniversalBuildId buildnum)
new_pattern
S.json $ WebApi.toJsonEither x
S.get "/api/get-user-opt-out-settings" $
FrontendHelpers.jsonAuthorizedDbInteractCommon SqlReadTypes.AuthConnection auth_helper_bundle $
pure ReadPullRequests.userOptOutSettings
S.post "/api/update-user-opt-out-settings" $
withAuth $ SqlWrite.updateUserOptOutSettings <$> S.param "enabled"
S.post "/api/repopulate-test-results" $
withAuth $ Scanning.repopulateTestResults third_party_creds . Builds.UniversalBuildId <$> S.param "build_id"
get "/api/view-log-context" $ (fmap . fmap) WebApi.toJsonEither $
SqlRead.logContextFunc SqlReadTypes.hiddenContextLinecount
<$> (MatchOccurrences.MatchId <$> S.param "match_id")
<*> S.param "context_linecount"
S.get "/api/view-log-full" $ do
build_id <- S.param "build_id"
let universal_build_id = Builds.UniversalBuildId build_id
either_log_result <- liftIO $ do
conn <- DbHelpers.getConnection connection_data
flip runReaderT conn $ SqlRead.retrieveLogFromBuildId universal_build_id
case either_log_result of
Right logs -> S.text logs
Left errors -> S.html $ LT.fromStrict $ JsonUtils._message $ JsonUtils.getDetails errors
post "/api/pattern-specificity-update" $
SqlWrite.updatePatternSpecificity
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "specificity"
post "/api/pattern-description-update" $
SqlWrite.updatePatternDescription
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "description"
post "/api/pattern-tag-add" $
SqlWrite.addPatternTag
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "tag"
post "/api/pattern-tag-remove" $
SqlWrite.removePatternTag
<$> (ScanPatterns.PatternId <$> S.param "pattern_id")
<*> S.param "tag"
post "/api/patterns-restore" $
SqlWrite.restorePatterns
<$> S.jsonData
post "/api/code-breakage-job-delete" $
SqlWrite.deleteCodeBreakageJob
<$> S.param "cause_id"
<*> S.param "job"
post "/api/code-breakage-description-update" $
SqlWrite.updateCodeBreakageDescription
<$> S.param "cause_id"
<*> S.param "description"
post "/api/code-breakage-update-resolution-sha1" $
SqlWrite.updateCodeBreakageResolutionSha1
<$> S.param "cause_id"
<*> (Builds.RawCommit <$> S.param "resolution_sha1")
post "/api/code-breakage-update-cause-sha1" $
SqlWrite.updateCodeBreakageCauseSha1
<$> S.param "cause_id"
<*> (Builds.RawCommit <$> S.param "cause_sha1")
post "/api/code-breakage-delete-resolution" $
SqlWrite.deleteCodeBreakageResolution
<$> S.param "cause_id"
post "/api/code-breakage-delete" $
SqlWrite.deleteCodeBreakage
<$> S.param "cause_id"
S.post "/api/code-breakage-add-affected-job" $
withAuth $
SqlWrite.addCodeBreakageJobName
<$> S.param "cause_id"
<*> S.param "job_name"
S.get "/favicon.ico" $ do
S.setHeader "Content-Type" "image/x-icon"
S.file $ static_base </> "images/favicon.ico"
S.options "/" $ do
S.setHeader "Access-Control-Allow-Origin" "*"
S.setHeader "Access-Control-Allow-Methods" $ LT.intercalate ", " [
"POST"
, "GET"
, "OPTIONS"
]
S.get "/" $ do
S.setHeader "Content-Type" "text/html; charset=utf-8"
S.file $ static_base </> "index.html"
where
get x = FrontendHelpers.jsonDbGet connection_data x
auth_helper_bundle = FrontendHelpers.AuthHelperBundle
connection_data
session
github_config
third_party_creds
post x y = S.post x $
FrontendHelpers.jsonAuthorizedDbInteractCommon const
auth_helper_bundle
y
withAuth :: ToJSON a =>
ScottyTypes.ActionT LT.Text IO (SqlReadTypes.AuthDbIO (Either Text a))
-> ScottyTypes.ActionT LT.Text IO ()
withAuth = FrontendHelpers.postWithAuthentication auth_helper_bundle
logger_domain_identifier = if AuthConfig.is_local github_config
then Constants.localhostDomainName
else Constants.drCIDomainName
parseRemediationObject :: ScottyTypes.ActionT LT.Text IO SqlWrite.FailureRemediation
parseRemediationObject = do
notes <- S.param "notes"
github_issue_number <- S.param "github_issue_number"
info_url <- S.param "info_url"
return $ SqlWrite.FailureRemediation
(Just notes)
(Just github_issue_number)
(Just info_url)
parseTimeRangeParms :: ScottyTypes.ActionT LT.Text IO SqlReadTypes.TimeRange
parseTimeRangeParms = do
start_timestamp <- BuildRetrieval.decodeUtcTimeString <$> S.param "start-timestamp"
let bounded_result = do
end_timestamp <- BuildRetrieval.decodeUtcTimeString <$> S.param "end-timestamp"
return $ SqlReadTypes.Bounded $ DbHelpers.StartEnd start_timestamp end_timestamp
bounded_result `S.rescue` (\_msg -> return $ SqlReadTypes.StartOnly start_timestamp)
|
4684cfe86de57ba4093002ce26d5e60d0148cd2f2236eb24d2596f8be8c8ed90 | lortabac/ariel | Prim.hs | {-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE StrictData #-}
module Ariel.Common.Prim where
import Ariel.Prelude
data Prim a
= Equal a a
| Lt a a
| Plus a a
| Minus a a
deriving (Eq, Show, Functor, Foldable, Traversable)
readPrim :: Text -> [a] -> Maybe (Prim a)
readPrim "=" [e1, e2] = Just $ Equal e1 e2
readPrim "<" [e1, e2] = Just $ Lt e1 e2
readPrim "+" [e1, e2] = Just $ Plus e1 e2
readPrim "-" [e1, e2] = Just $ Minus e1 e2
readPrim _ _ = Nothing
readPrimOrDie :: Show a => Text -> [a] -> Prim a
readPrimOrDie name args = case readPrim name args of
Just p -> p
Nothing -> error ("Invalid prim: " <> show (name, args))
| null | https://raw.githubusercontent.com/lortabac/ariel/3c4e115a7fed724ed129e3b7056cc67751e3d35b/src/Ariel/Common/Prim.hs | haskell | # LANGUAGE DeriveTraversable #
# LANGUAGE OverloadedStrings #
# LANGUAGE StrictData # |
module Ariel.Common.Prim where
import Ariel.Prelude
data Prim a
= Equal a a
| Lt a a
| Plus a a
| Minus a a
deriving (Eq, Show, Functor, Foldable, Traversable)
readPrim :: Text -> [a] -> Maybe (Prim a)
readPrim "=" [e1, e2] = Just $ Equal e1 e2
readPrim "<" [e1, e2] = Just $ Lt e1 e2
readPrim "+" [e1, e2] = Just $ Plus e1 e2
readPrim "-" [e1, e2] = Just $ Minus e1 e2
readPrim _ _ = Nothing
readPrimOrDie :: Show a => Text -> [a] -> Prim a
readPrimOrDie name args = case readPrim name args of
Just p -> p
Nothing -> error ("Invalid prim: " <> show (name, args))
|
db1674c407fda9f62ba599c2eeef96319cf2be94ee1e7caecf102c9d03a3f1fd | grin-compiler/ghc-whole-program-compiler-project | PrimOpMutVarSpec.hs | # LANGUAGE LambdaCase , QuasiQuotes , OverloadedStrings #
module PrimOpMutVarSpec where
import qualified Data.Text as Text
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List (sort)
import Data.IORef
import Test.Hspec
import Test.QuickCheck
import System.IO
import Text.Show.Pretty (pPrint, ppShow)
import Text.PrettyPrint.ANSI.Leijen
import Lambda.TH
import Lambda.Analysis.ControlFlowAnalysisM
import Lambda.Pretty (PP(..))
runTests :: IO ()
runTests = hspec spec
spec :: Spec
spec = do
----------------------------
usedRules <- runIO $ newIORef (Set.empty :: Set.Set [Text.Text])
let filterAndSort keys m = fmap sort $ Map.restrictKeys m (Set.fromList keys)
sameAs :: Show a => a -> a -> IO ()
sameAs a b = (PP (ppShow a)) `shouldBe` (PP (ppShow b))
toMVOp = filterAndSort ["NodeOrigin", "ExternalOrigin", "TagValue", "MutVar"]
addUsedM a = modifyIORef usedRules (\x -> mappend x . Set.fromList . head . Map.elems . filterAndSort ["Used"] $ a)
printUsedM = readIORef usedRules >>= pPrint
----------------------------
describe "GHC MutVar PrimOps" $ do
it "newMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v05 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
v04
v05
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar" , [ [ "v03" , "t.109" , "v01" ] ] )
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v05" , "v03" , "t.109" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v05" , "MutVar#" ]
]
)
]
it "readMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
"readMutVar#" :: {"MutVar#" %s.21 %a.11} @ t.243 -> {"State#" %s.21} @ t.245 -> {"ghc-prim_GHC.Prim.Unit#" %a.11} @ t.246
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v09 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
letS
v06 = "readMutVar#" $ v04 v02
v08 = case v06 of
("ghc-prim_GHC.Prim.Unit#" v07) @ a01 ->
v07
v08
v09
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar" , [ [ "v03" , "t.109" , "v01" ] ] )
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "a01" , "v06" , "t.246" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v06" , "v06" , "t.246" ]
, [ "v07" , "v06" , "a.11" ]
, [ "v08" , "v06" , "a.11" ]
, [ "v09" , "v06" , "a.11" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
, [ "v07" , "v01" ]
, [ "v08" , "v01" ]
, [ "v09" , "v01" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "a01" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v06" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v07" , "Tup0" ]
, [ "v08" , "Tup0" ]
, [ "v09" , "Tup0" ]
]
)
]
it "writeMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
"writeMutVar#" :: {"MutVar#" %s.4 %a.9} @ t.115 -> %a.9 -> {"State#" %s.4} @ t.117 -> {"ghc-prim_GHC.Prim.(##)"} @ t.118
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v09 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
letS
v06 = [C1]
v08 = "writeMutVar#" $ v04 v06 v02
v08
v09
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar"
, [ [ "v03" , "t.109" , "v01" ]
, [ "v03" , "t.109" , "v06" ]
]
)
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v08" , "v08" , "t.118" ]
, [ "v09" , "v08" , "t.118" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
, [ "v06" , "v06" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v06" , "C1" ]
, [ "v08" , "ghc-prim_GHC.Prim.(##)" ]
, [ "v09" , "ghc-prim_GHC.Prim.(##)" ]
]
)
]
it "casMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
"casMutVar#" :: {"MutVar#" %s.10 %a.11} @ t.12 -> %a.11 -> %a.11 -> {"State#" %s.10} @ t.14 -> {"ghc-prim_GHC.Prim.(#,#)" (T_Int64) @ t.15 %a.11} @ t.16
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v11 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
letS
v06 = [C1]
v07 = "casMutVar#" $ v04 v01 v06 v02
v10 = case v07 of
("ghc-prim_GHC.Prim.(#,#)" v08 v09) @ a01 ->
v08
v10
v11
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar"
, [ [ "v03" , "t.109" , "v01" ]
, [ "v03" , "t.109" , "v06" ]
]
)
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "a01" , "v07" , "t.16" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v07" , "v07" , "t.16" ]
, [ "v08" , "v07" , "t.15" ]
, [ "v09" , "v07" , "a.11" ]
, [ "v10" , "v07" , "t.15" ]
, [ "v11" , "v07" , "t.15" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
, [ "v06" , "v06" ]
, [ "v09" , "v01" ]
, [ "v09" , "v06" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "a01" , "ghc-prim_GHC.Prim.(#,#)" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v06" , "C1" ]
, [ "v07" , "ghc-prim_GHC.Prim.(#,#)" ]
, [ "v08" , "lit:T_Int64" ]
, [ "v09" , "C1" ]
, [ "v09" , "Tup0" ]
, [ "v10" , "lit:T_Int64" ]
, [ "v11" , "lit:T_Int64" ]
]
)
]
describe "Coverage" $ do
it "Used Rules" $ do
printUsedM
| null | https://raw.githubusercontent.com/grin-compiler/ghc-whole-program-compiler-project/6663f0ee905e19ac126db0045025e49b6d387cca/lambda/test/PrimOpMutVarSpec.hs | haskell | --------------------------
-------------------------- | # LANGUAGE LambdaCase , QuasiQuotes , OverloadedStrings #
module PrimOpMutVarSpec where
import qualified Data.Text as Text
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List (sort)
import Data.IORef
import Test.Hspec
import Test.QuickCheck
import System.IO
import Text.Show.Pretty (pPrint, ppShow)
import Text.PrettyPrint.ANSI.Leijen
import Lambda.TH
import Lambda.Analysis.ControlFlowAnalysisM
import Lambda.Pretty (PP(..))
runTests :: IO ()
runTests = hspec spec
spec :: Spec
spec = do
usedRules <- runIO $ newIORef (Set.empty :: Set.Set [Text.Text])
let filterAndSort keys m = fmap sort $ Map.restrictKeys m (Set.fromList keys)
sameAs :: Show a => a -> a -> IO ()
sameAs a b = (PP (ppShow a)) `shouldBe` (PP (ppShow b))
toMVOp = filterAndSort ["NodeOrigin", "ExternalOrigin", "TagValue", "MutVar"]
addUsedM a = modifyIORef usedRules (\x -> mappend x . Set.fromList . head . Map.elems . filterAndSort ["Used"] $ a)
printUsedM = readIORef usedRules >>= pPrint
describe "GHC MutVar PrimOps" $ do
it "newMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v05 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
v04
v05
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar" , [ [ "v03" , "t.109" , "v01" ] ] )
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v05" , "v03" , "t.109" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v05" , "MutVar#" ]
]
)
]
it "readMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
"readMutVar#" :: {"MutVar#" %s.21 %a.11} @ t.243 -> {"State#" %s.21} @ t.245 -> {"ghc-prim_GHC.Prim.Unit#" %a.11} @ t.246
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v09 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
letS
v06 = "readMutVar#" $ v04 v02
v08 = case v06 of
("ghc-prim_GHC.Prim.Unit#" v07) @ a01 ->
v07
v08
v09
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar" , [ [ "v03" , "t.109" , "v01" ] ] )
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "a01" , "v06" , "t.246" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v06" , "v06" , "t.246" ]
, [ "v07" , "v06" , "a.11" ]
, [ "v08" , "v06" , "a.11" ]
, [ "v09" , "v06" , "a.11" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
, [ "v07" , "v01" ]
, [ "v08" , "v01" ]
, [ "v09" , "v01" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "a01" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v06" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v07" , "Tup0" ]
, [ "v08" , "Tup0" ]
, [ "v09" , "Tup0" ]
]
)
]
it "writeMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
"writeMutVar#" :: {"MutVar#" %s.4 %a.9} @ t.115 -> %a.9 -> {"State#" %s.4} @ t.117 -> {"ghc-prim_GHC.Prim.(##)"} @ t.118
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v09 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
letS
v06 = [C1]
v08 = "writeMutVar#" $ v04 v06 v02
v08
v09
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar"
, [ [ "v03" , "t.109" , "v01" ]
, [ "v03" , "t.109" , "v06" ]
]
)
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v08" , "v08" , "t.118" ]
, [ "v09" , "v08" , "t.118" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
, [ "v06" , "v06" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v06" , "C1" ]
, [ "v08" , "ghc-prim_GHC.Prim.(##)" ]
, [ "v09" , "ghc-prim_GHC.Prim.(##)" ]
]
)
]
it "casMutVar#" $ do
cfa <- controlFlowAnalysisM [] ["main"] [prog|
primop effectful
"newMutVar#" :: %a.6 -> {"State#" %s.2} @ t.107 -> {"ghc-prim_GHC.Prim.Unit#" {"MutVar#" %s.2 %a.6} @ t.109} @ t.108
"casMutVar#" :: {"MutVar#" %s.10 %a.11} @ t.12 -> %a.11 -> %a.11 -> {"State#" %s.10} @ t.14 -> {"ghc-prim_GHC.Prim.(#,#)" (T_Int64) @ t.15 %a.11} @ t.16
main =
letS
v01 = [Tup0]
v02 = #T_Token "RealWorld"
v03 = "newMutVar#" $ v01 v02
v11 = case v03 of
("ghc-prim_GHC.Prim.Unit#" v04) @ a00 ->
letS
v06 = [C1]
v07 = "casMutVar#" $ v04 v01 v06 v02
v10 = case v07 of
("ghc-prim_GHC.Prim.(#,#)" v08 v09) @ a01 ->
v08
v10
v11
|]
addUsedM cfa
toMVOp cfa `sameAs` Map.fromList
[ ( "MutVar"
, [ [ "v03" , "t.109" , "v01" ]
, [ "v03" , "t.109" , "v06" ]
]
)
, ( "ExternalOrigin"
, [ [ "a00" , "v03" , "t.108" ]
, [ "a01" , "v07" , "t.16" ]
, [ "v03" , "v03" , "t.108" ]
, [ "v04" , "v03" , "t.109" ]
, [ "v07" , "v07" , "t.16" ]
, [ "v08" , "v07" , "t.15" ]
, [ "v09" , "v07" , "a.11" ]
, [ "v10" , "v07" , "t.15" ]
, [ "v11" , "v07" , "t.15" ]
]
)
, ( "NodeOrigin"
, [ [ "v01" , "v01" ]
, [ "v02" , "v02" ]
, [ "v06" , "v06" ]
, [ "v09" , "v01" ]
, [ "v09" , "v06" ]
]
)
, ( "TagValue"
, [ [ "a00" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "a01" , "ghc-prim_GHC.Prim.(#,#)" ]
, [ "v01" , "Tup0" ]
, [ "v02" , "lit:T_Token \"RealWorld\"" ]
, [ "v03" , "ghc-prim_GHC.Prim.Unit#" ]
, [ "v04" , "MutVar#" ]
, [ "v06" , "C1" ]
, [ "v07" , "ghc-prim_GHC.Prim.(#,#)" ]
, [ "v08" , "lit:T_Int64" ]
, [ "v09" , "C1" ]
, [ "v09" , "Tup0" ]
, [ "v10" , "lit:T_Int64" ]
, [ "v11" , "lit:T_Int64" ]
]
)
]
describe "Coverage" $ do
it "Used Rules" $ do
printUsedM
|
049e60c49170db5d8f846106e0e8cd1fecbe38944b7cb67afe511f67dd614e70 | potatosalad/erlang-jose | emc_testvector.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
%%%-------------------------------------------------------------------
@author < >
2014 - 2022 ,
%%% @doc
%%%
%%% @end
Created : 12 Aug 2015 by < >
%%%-------------------------------------------------------------------
-module(emc_testvector).
%% API
-export([from_binary/1]).
-export([from_file/1]).
-export([to_binary/1]).
-export([to_file/2]).
%%====================================================================
%% API functions
%%====================================================================
from_binary(Binary) ->
Lines = [Line || Line <- binary:split(Binary, [<< $\n >>, << $\r >>], [global, trim]), Line =/= <<>>],
parse_lines(Lines, []).
from_file(File) ->
case file:read_file(File) of
{ok, Binary} ->
from_binary(Binary);
ReadError ->
ReadError
end.
to_binary(Vectors) when is_list(Vectors) ->
<<
<<
(case Vector of
divider ->
<<
$\n,
"# =============================================\n",
$\n
>>;
{example, Example} ->
ExampleLen = byte_size(Example),
Bar = binary:copy(<<"=">>, ExampleLen),
<<
$\n,
"# ", Bar/binary, $\n,
"# ", Example/binary, $\n,
"# ", Bar/binary, $\n,
$\n
>>;
{component, Component} ->
ComponentLen = byte_size(Component),
Bar = binary:copy(<<"-">>, ComponentLen),
<<
$\n,
"# ", Bar/binary, $\n,
"# ", Component/binary, $\n,
"# ", Bar/binary, $\n,
$\n
>>;
{vector, {Key, Val}} ->
Hex = hex:bin_to_hex(Val),
HexLines = to_hex_lines(Hex, <<>>, []),
HexBlocks = << << HexLine/binary, $\n >> || HexLine <- HexLines >>,
<<
"# ", Key/binary, $:, $\n,
HexBlocks/binary,
$\n
>>
end)/binary
>>
|| Vector <- Vectors
>>.
to_file(File, State={_, _, _}) ->
Binary = to_binary(State),
file:write_file(File, Binary).
%%%-------------------------------------------------------------------
Internal functions
%%%-------------------------------------------------------------------
@private
parse_lines([], Acc) ->
lists:reverse(Acc);
parse_lines([
<< "# =======", _/binary >>,
<< "# Example", Example/binary >>,
<< "# =======", _/binary >>
| Lines
], Acc) ->
parse_lines(Lines, [{example, << "Example", Example/binary >>} | Acc]);
parse_lines([
<< "# ----", _/binary >>,
<< "# ", Component/binary >>,
<< "# ----", _/binary >>
| Lines
], Acc) ->
parse_lines(Lines, [{component, Component} | Acc]);
parse_lines([
<< " ----", _/binary >>,
<< "# ", Component/binary >>,
<< " ----", _/binary >>
| Lines
], Acc) ->
parse_lines(Lines, [{component, Component} | Acc]);
parse_lines([<<"# =============================================">> | Lines], Acc) ->
parse_lines(Lines, [divider | Acc]);
parse_lines([<< "# ", Key/binary >> | Lines], Acc) when length(Acc) > 0 ->
case parse_key(Key) of
skip ->
parse_lines(Lines, Acc);
NewKey ->
parse_vector(Lines, NewKey, <<>>, Acc)
end;
parse_lines([_Line | Lines], Acc) ->
parse_lines(Lines, Acc).
@private
parse_key(Key) ->
case binary:match(Key, << $: >>) of
{Pos, 1} ->
binary:part(Key, 0, Pos);
nomatch ->
skip
end.
@private
parse_vector(Lines = [<< $#, _/binary >> | _], Key, Hex, Acc) ->
Val = hex:hex_to_bin(Hex),
parse_lines(Lines, [{vector, {Key, Val}} | Acc]);
parse_vector(Lines = [<< "# ----", _/binary >> | _], Key, Hex, Acc) ->
Val = hex:hex_to_bin(Hex),
parse_lines(Lines, [{vector, {Key, Val}} | Acc]);
parse_vector(Lines = [<< " ----", _/binary >> | _], Key, Hex, Acc) ->
Val = hex:hex_to_bin(Hex),
parse_lines(Lines, [{vector, {Key, Val}} | Acc]);
parse_vector([HexLine | Lines], Key, Hex, Acc) ->
case parse_vector_hexline(HexLine, Hex) of
{ok, NewHex} ->
Val = hex:hex_to_bin(NewHex),
parse_lines([HexLine | Lines], [{vector, {Key, Val}} | Acc]);
{next, NewHex} ->
parse_vector(Lines, Key, NewHex, Acc)
end.
@private
parse_vector_hexline(<< $\s, Rest/binary >>, Hex) ->
parse_vector_hexline(Rest, Hex);
parse_vector_hexline(<< C, Rest/binary >>, Hex)
when (C >= $A andalso C =< $Z)
orelse (C >= $a andalso C =< $z)
orelse (C >= $0 andalso C =< $9) ->
parse_vector_hexline(Rest, << Hex/binary, C >>);
parse_vector_hexline(<<>>, Hex) ->
{next, Hex};
parse_vector_hexline(Rest, Hex) ->
erlang:error({badarg, [Rest, Hex]}).
@private
to_hex_lines(Rest, Line, Lines) when byte_size(Line) >= 48 ->
to_hex_lines(Rest, <<>>, [Line | Lines]);
to_hex_lines(<< A, B, Rest/binary >>, Line, Lines) ->
to_hex_lines(Rest, << Line/binary, A, B, $\s >>, Lines);
to_hex_lines(<<>>, <<>>, Lines) ->
lists:reverse(Lines);
to_hex_lines(<<>>, Line, Lines) ->
lists:reverse([Line | Lines]).
| null | https://raw.githubusercontent.com/potatosalad/erlang-jose/dbc4074066080692246afe613345ef6becc2a3fe/test/cavp_SUITE_data/emc_testvector.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
-------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
API
====================================================================
API functions
====================================================================
-------------------------------------------------------------------
------------------------------------------------------------------- | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
@author < >
2014 - 2022 ,
Created : 12 Aug 2015 by < >
-module(emc_testvector).
-export([from_binary/1]).
-export([from_file/1]).
-export([to_binary/1]).
-export([to_file/2]).
from_binary(Binary) ->
Lines = [Line || Line <- binary:split(Binary, [<< $\n >>, << $\r >>], [global, trim]), Line =/= <<>>],
parse_lines(Lines, []).
from_file(File) ->
case file:read_file(File) of
{ok, Binary} ->
from_binary(Binary);
ReadError ->
ReadError
end.
to_binary(Vectors) when is_list(Vectors) ->
<<
<<
(case Vector of
divider ->
<<
$\n,
"# =============================================\n",
$\n
>>;
{example, Example} ->
ExampleLen = byte_size(Example),
Bar = binary:copy(<<"=">>, ExampleLen),
<<
$\n,
"# ", Bar/binary, $\n,
"# ", Example/binary, $\n,
"# ", Bar/binary, $\n,
$\n
>>;
{component, Component} ->
ComponentLen = byte_size(Component),
Bar = binary:copy(<<"-">>, ComponentLen),
<<
$\n,
"# ", Bar/binary, $\n,
"# ", Component/binary, $\n,
"# ", Bar/binary, $\n,
$\n
>>;
{vector, {Key, Val}} ->
Hex = hex:bin_to_hex(Val),
HexLines = to_hex_lines(Hex, <<>>, []),
HexBlocks = << << HexLine/binary, $\n >> || HexLine <- HexLines >>,
<<
"# ", Key/binary, $:, $\n,
HexBlocks/binary,
$\n
>>
end)/binary
>>
|| Vector <- Vectors
>>.
to_file(File, State={_, _, _}) ->
Binary = to_binary(State),
file:write_file(File, Binary).
Internal functions
@private
parse_lines([], Acc) ->
lists:reverse(Acc);
parse_lines([
<< "# =======", _/binary >>,
<< "# Example", Example/binary >>,
<< "# =======", _/binary >>
| Lines
], Acc) ->
parse_lines(Lines, [{example, << "Example", Example/binary >>} | Acc]);
parse_lines([
<< "# ----", _/binary >>,
<< "# ", Component/binary >>,
<< "# ----", _/binary >>
| Lines
], Acc) ->
parse_lines(Lines, [{component, Component} | Acc]);
parse_lines([
<< " ----", _/binary >>,
<< "# ", Component/binary >>,
<< " ----", _/binary >>
| Lines
], Acc) ->
parse_lines(Lines, [{component, Component} | Acc]);
parse_lines([<<"# =============================================">> | Lines], Acc) ->
parse_lines(Lines, [divider | Acc]);
parse_lines([<< "# ", Key/binary >> | Lines], Acc) when length(Acc) > 0 ->
case parse_key(Key) of
skip ->
parse_lines(Lines, Acc);
NewKey ->
parse_vector(Lines, NewKey, <<>>, Acc)
end;
parse_lines([_Line | Lines], Acc) ->
parse_lines(Lines, Acc).
@private
parse_key(Key) ->
case binary:match(Key, << $: >>) of
{Pos, 1} ->
binary:part(Key, 0, Pos);
nomatch ->
skip
end.
@private
parse_vector(Lines = [<< $#, _/binary >> | _], Key, Hex, Acc) ->
Val = hex:hex_to_bin(Hex),
parse_lines(Lines, [{vector, {Key, Val}} | Acc]);
parse_vector(Lines = [<< "# ----", _/binary >> | _], Key, Hex, Acc) ->
Val = hex:hex_to_bin(Hex),
parse_lines(Lines, [{vector, {Key, Val}} | Acc]);
parse_vector(Lines = [<< " ----", _/binary >> | _], Key, Hex, Acc) ->
Val = hex:hex_to_bin(Hex),
parse_lines(Lines, [{vector, {Key, Val}} | Acc]);
parse_vector([HexLine | Lines], Key, Hex, Acc) ->
case parse_vector_hexline(HexLine, Hex) of
{ok, NewHex} ->
Val = hex:hex_to_bin(NewHex),
parse_lines([HexLine | Lines], [{vector, {Key, Val}} | Acc]);
{next, NewHex} ->
parse_vector(Lines, Key, NewHex, Acc)
end.
@private
parse_vector_hexline(<< $\s, Rest/binary >>, Hex) ->
parse_vector_hexline(Rest, Hex);
parse_vector_hexline(<< C, Rest/binary >>, Hex)
when (C >= $A andalso C =< $Z)
orelse (C >= $a andalso C =< $z)
orelse (C >= $0 andalso C =< $9) ->
parse_vector_hexline(Rest, << Hex/binary, C >>);
parse_vector_hexline(<<>>, Hex) ->
{next, Hex};
parse_vector_hexline(Rest, Hex) ->
erlang:error({badarg, [Rest, Hex]}).
@private
to_hex_lines(Rest, Line, Lines) when byte_size(Line) >= 48 ->
to_hex_lines(Rest, <<>>, [Line | Lines]);
to_hex_lines(<< A, B, Rest/binary >>, Line, Lines) ->
to_hex_lines(Rest, << Line/binary, A, B, $\s >>, Lines);
to_hex_lines(<<>>, <<>>, Lines) ->
lists:reverse(Lines);
to_hex_lines(<<>>, Line, Lines) ->
lists:reverse([Line | Lines]).
|
e9bf38bcc98fbe24c0bda4f94b521d23f61c53ba2eddc713c65bfb4f4f2a3958 | 8c6794b6/guile-tjit | active-slot.scm | ;;; installed-scm-file
Copyright ( C ) 1999 , 2001 , 2006 , 2009 , 2015 Free Software Foundation , Inc.
Copyright ( C ) 1993 - 1998 / ESSI < >
;;;;
;;;; This library is free software; you can redistribute it and/or
;;;; modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 3 of the License , or ( at your option ) any later version .
;;;;
;;;; This library is distributed in the hope that it will be useful,
;;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
;;;; Lesser General Public License for more details.
;;;;
You should have received a copy of the GNU Lesser General Public
;;;; License along with this library; if not, write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
;;;;
;;;;
;;;; This file was based upon active-slot.stklos from the STk distribution
version 4.0.1 by < > .
;;;;
(define-module (oop goops active-slot)
:use-module (oop goops internal)
:export (<active-class>))
(define-class <active-class> (<class>))
(define-method (compute-get-n-set (class <active-class>) slot)
(if (eq? (slot-definition-allocation slot) #:active)
(let* ((index (slot-ref class 'nfields))
(s (slot-definition-options slot))
(before-ref (get-keyword #:before-slot-ref s #f))
(after-ref (get-keyword #:after-slot-ref s #f))
(before-set! (get-keyword #:before-slot-set! s #f))
(after-set! (get-keyword #:after-slot-set! s #f))
(unbound *unbound*))
(slot-set! class 'nfields (+ index 1))
(list (lambda (o)
(if before-ref
(if (before-ref o)
(let ((res (struct-ref o index)))
(and after-ref (not (eqv? res unbound)) (after-ref o))
res)
*unbound*)
(let ((res (struct-ref o index)))
(and after-ref (not (eqv? res unbound)) (after-ref o))
res)))
(lambda (o v)
(if before-set!
(if (before-set! o v)
(begin
(struct-set! o index v)
(and after-set! (after-set! o v))))
(begin
(struct-set! o index v)
(and after-set! (after-set! o v)))))))
(next-method)))
| null | https://raw.githubusercontent.com/8c6794b6/guile-tjit/9566e480af2ff695e524984992626426f393414f/module/oop/goops/active-slot.scm | scheme | installed-scm-file
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
either
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
License along with this library; if not, write to the Free Software
This file was based upon active-slot.stklos from the STk distribution
|
Copyright ( C ) 1999 , 2001 , 2006 , 2009 , 2015 Free Software Foundation , Inc.
Copyright ( C ) 1993 - 1998 / ESSI < >
version 3 of the License , or ( at your option ) any later version .
You should have received a copy of the GNU Lesser General Public
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , USA
version 4.0.1 by < > .
(define-module (oop goops active-slot)
:use-module (oop goops internal)
:export (<active-class>))
(define-class <active-class> (<class>))
(define-method (compute-get-n-set (class <active-class>) slot)
(if (eq? (slot-definition-allocation slot) #:active)
(let* ((index (slot-ref class 'nfields))
(s (slot-definition-options slot))
(before-ref (get-keyword #:before-slot-ref s #f))
(after-ref (get-keyword #:after-slot-ref s #f))
(before-set! (get-keyword #:before-slot-set! s #f))
(after-set! (get-keyword #:after-slot-set! s #f))
(unbound *unbound*))
(slot-set! class 'nfields (+ index 1))
(list (lambda (o)
(if before-ref
(if (before-ref o)
(let ((res (struct-ref o index)))
(and after-ref (not (eqv? res unbound)) (after-ref o))
res)
*unbound*)
(let ((res (struct-ref o index)))
(and after-ref (not (eqv? res unbound)) (after-ref o))
res)))
(lambda (o v)
(if before-set!
(if (before-set! o v)
(begin
(struct-set! o index v)
(and after-set! (after-set! o v))))
(begin
(struct-set! o index v)
(and after-set! (after-set! o v)))))))
(next-method)))
|
9a79597fcb83fed3adff5c19be6787137a9198de3b74df2cc08968a54ac23237 | tsloughter/kuberl | kuberl_v1_priority_class_list.erl | -module(kuberl_v1_priority_class_list).
-export([encode/1]).
-export_type([kuberl_v1_priority_class_list/0]).
-type kuberl_v1_priority_class_list() ::
#{ 'apiVersion' => binary(),
'items' := list(),
'kind' => binary(),
'metadata' => kuberl_v1_list_meta:kuberl_v1_list_meta()
}.
encode(#{ 'apiVersion' := ApiVersion,
'items' := Items,
'kind' := Kind,
'metadata' := Metadata
}) ->
#{ 'apiVersion' => ApiVersion,
'items' => Items,
'kind' => Kind,
'metadata' => Metadata
}.
| null | https://raw.githubusercontent.com/tsloughter/kuberl/f02ae6680d6ea5db6e8b6c7acbee8c4f9df482e2/gen/kuberl_v1_priority_class_list.erl | erlang | -module(kuberl_v1_priority_class_list).
-export([encode/1]).
-export_type([kuberl_v1_priority_class_list/0]).
-type kuberl_v1_priority_class_list() ::
#{ 'apiVersion' => binary(),
'items' := list(),
'kind' => binary(),
'metadata' => kuberl_v1_list_meta:kuberl_v1_list_meta()
}.
encode(#{ 'apiVersion' := ApiVersion,
'items' := Items,
'kind' := Kind,
'metadata' := Metadata
}) ->
#{ 'apiVersion' => ApiVersion,
'items' => Items,
'kind' => Kind,
'metadata' => Metadata
}.
| |
0de227dd11de757f4cf034ef1ccf4753795c38100b96d091364f17440c26204e | clojure-interop/java-jdk | JAXBPermission.clj | (ns javax.xml.bind.JAXBPermission
"This class is for JAXB permissions. A JAXBPermission
contains a name (also referred to as a \"target name\") but
no actions list; you either have the named permission
or you don't.
The target name is the name of the JAXB permission (see below).
The following table lists all the possible JAXBPermission target names,
and for each provides a description of what the permission allows
and a discussion of the risks of granting code the permission.
Permission Target Name
What the Permission Allows
Risks of Allowing this Permission
setDatatypeConverter
Allows the code to set VM-wide DatatypeConverterInterface
via the setDatatypeConverter method
that all the methods on DatatypeConverter uses.
Malicious code can set DatatypeConverterInterface, which has
VM-wide singleton semantics, before a genuine JAXB implementation sets one.
This allows malicious code to gain access to objects that it may otherwise
not have access to, such as Frame.getFrames() that belongs to
another application running in the same JVM."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.bind JAXBPermission]))
(defn ->jaxb-permission
"Constructor.
Creates a new JAXBPermission with the specified name.
name - The name of the JAXBPermission. As of 2.2 only \"setDatatypeConverter\" is defined. - `java.lang.String`"
(^JAXBPermission [^java.lang.String name]
(new JAXBPermission name)))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.xml/src/javax/xml/bind/JAXBPermission.clj | clojure | you either have the named permission | (ns javax.xml.bind.JAXBPermission
"This class is for JAXB permissions. A JAXBPermission
contains a name (also referred to as a \"target name\") but
or you don't.
The target name is the name of the JAXB permission (see below).
The following table lists all the possible JAXBPermission target names,
and for each provides a description of what the permission allows
and a discussion of the risks of granting code the permission.
Permission Target Name
What the Permission Allows
Risks of Allowing this Permission
setDatatypeConverter
Allows the code to set VM-wide DatatypeConverterInterface
via the setDatatypeConverter method
that all the methods on DatatypeConverter uses.
Malicious code can set DatatypeConverterInterface, which has
VM-wide singleton semantics, before a genuine JAXB implementation sets one.
This allows malicious code to gain access to objects that it may otherwise
not have access to, such as Frame.getFrames() that belongs to
another application running in the same JVM."
(:refer-clojure :only [require comment defn ->])
(:import [javax.xml.bind JAXBPermission]))
(defn ->jaxb-permission
"Constructor.
Creates a new JAXBPermission with the specified name.
name - The name of the JAXBPermission. As of 2.2 only \"setDatatypeConverter\" is defined. - `java.lang.String`"
(^JAXBPermission [^java.lang.String name]
(new JAXBPermission name)))
|
4798d58bed8b77c0f6c7e3e09cbeb170d9d6f57eda38c1635f1a98af31a0d1c4 | haskell-game/fungen | Display.hs | {-# OPTIONS_HADDOCK hide #-}
{- | This FunGEn module renders the game window.
-}
FunGEN - Functional Game Engine
/~haskell/fungen
Copyright ( C ) 2002 < >
This code is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE .
FunGEN - Functional Game Engine
/~haskell/fungen
Copyright (C) 2002 Andre Furtado <>
This code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-}
module Graphics.UI.Fungen.Display (
display
) where
import Graphics.UI.Fungen.Game
import Graphics.UI.Fungen.Util (when)
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
| Given a fungen Game and IOGame step action , generate a GLUT
-- display callback that steps the game and renders its resulting
state . ' Graphics . UI.Fungen.funInit ' runs this automatically .
display :: Game t s u v -> IOGame t s u v () -> DisplayCallback
display g gameCycle = do
clear [ColorBuffer]
runIOGame (displayIOGame gameCycle) g
swapBuffers
flush
| Run one update and display an IOGame .
displayIOGame :: IOGame t s u v () -> IOGame t s u v ()
displayIOGame gameCycle = do
(_,_,objectsMoving) <- getGameFlags
when objectsMoving moveAllObjects
gameCycle
(mapDrawing,objectsDrawing,_) <- getGameFlags
when mapDrawing drawMap
when objectsDrawing drawAllObjects
printText
| null | https://raw.githubusercontent.com/haskell-game/fungen/fcf533d49fd3a0d9c1640faf6fd7d3a2f742083b/Graphics/UI/Fungen/Display.hs | haskell | # OPTIONS_HADDOCK hide #
| This FunGEn module renders the game window.
display callback that steps the game and renders its resulting
|
FunGEN - Functional Game Engine
/~haskell/fungen
Copyright ( C ) 2002 < >
This code is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE .
FunGEN - Functional Game Engine
/~haskell/fungen
Copyright (C) 2002 Andre Furtado <>
This code is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-}
module Graphics.UI.Fungen.Display (
display
) where
import Graphics.UI.Fungen.Game
import Graphics.UI.Fungen.Util (when)
import Graphics.Rendering.OpenGL
import Graphics.UI.GLUT
| Given a fungen Game and IOGame step action , generate a GLUT
state . ' Graphics . UI.Fungen.funInit ' runs this automatically .
display :: Game t s u v -> IOGame t s u v () -> DisplayCallback
display g gameCycle = do
clear [ColorBuffer]
runIOGame (displayIOGame gameCycle) g
swapBuffers
flush
| Run one update and display an IOGame .
displayIOGame :: IOGame t s u v () -> IOGame t s u v ()
displayIOGame gameCycle = do
(_,_,objectsMoving) <- getGameFlags
when objectsMoving moveAllObjects
gameCycle
(mapDrawing,objectsDrawing,_) <- getGameFlags
when mapDrawing drawMap
when objectsDrawing drawAllObjects
printText
|
88d1024913ae30a19321041e35f66e00619cd3b262b9fc88429fbfd480965a16 | mzp/ocaml-hoogle | base.ml | external (@@) : ('a -> 'b) -> 'a -> 'b = "%apply"
external (+>) : 'a -> ('a -> 'b) -> 'b = "%revapply"
let ($) f g x = f (g x)
let (!$) = Lazy.force
external id : 'a -> 'a = "%identity"
let uncurry f a b = f (a,b)
let curry f (a,b) = f a b
let flip f a b = f b a
let const a _ = a
let sure f =
function
Some x ->
Some (f x)
| None ->
None
let option f x = try Some (f x) with Not_found -> None
let maybe f x = try `Val (f x) with e -> `Error e
let tee f x = try ignore @@ f x; x with _ -> x
type ('a,'b) either = Left of 'a | Right of 'b
let failwithf fmt = Printf.kprintf (fun s () -> failwith s) fmt
let assoc x xs = (option @@ List.assoc x) xs
let string_of_list xs =
Printf.sprintf "[%s]"
@@ String.concat ";" xs
let rec unfold f init =
match f init with
Some (a, b) -> a :: unfold f b
| None -> []
let rec range a b =
if a >= b then
[]
else
a::range (a+1) b
let rec interperse delim =
function
[] -> []
| [x] -> [x]
| x::xs -> x::delim::interperse delim xs
let map_accum_left f init xs =
let f (accum,ys) x =
let accum',y =
f accum x in
(accum',y::ys) in
let accum,ys =
List.fold_left f (init,[]) xs in
accum,List.rev ys
let rec map_accum_right f init =
function
[] ->
init,[]
| x::xs ->
let (accum,ys) =
map_accum_right f init xs in
let (accum,y) =
f accum x in
accum,y::ys
let rec filter_map f =
function
x::xs ->
begin match f x with
Some y -> y::filter_map f xs
| None -> filter_map f xs
end
| [] ->
[]
let rec group_by f =
function
[] ->
[]
| x1::x2::xs when f x1 x2 ->
begin match group_by f @@ x2::xs with
y::ys ->
(x1::y)::ys
| _ ->
failwith "must not happen"
end
| x::xs ->
[x]::group_by f xs
let index x xs =
let rec loop i = function
[] ->
raise Not_found
| y::ys ->
if x = y then
i
else
loop (i+1) ys in
loop 0 xs
let string_of_char =
String.make 1
let hex =
Printf.sprintf "0x%x"
let open_out_with path f =
let ch =
open_out_bin path in
maybe f ch
+> tee (fun _ -> close_out ch)
+> function
`Val v -> v
| `Error e -> raise e
let open_in_with path f =
let ch =
open_in_bin path in
maybe f ch
+> tee (fun _ -> close_in ch)
+> function
`Val v -> v
| `Error e -> raise e
let undefined = Obj.magic 42
let undef = undefined
let rec format_list (sep : (unit, Format.formatter, unit) format) f ppf = function
| [] -> ()
| [x] -> f ppf x
| x::xs ->
Format.fprintf ppf "@[%a@]%t%a"
f x
(fun ppf -> Format.fprintf ppf sep)
(format_list sep f) xs
let format_ocaml_list f ppf xs =
Format.fprintf ppf "[ @[%a@] ]"
(format_list ";@ " f) xs
| null | https://raw.githubusercontent.com/mzp/ocaml-hoogle/dbfb2e970d65e41936baa0ba51c7f7596cc6c369/base.ml | ocaml | external (@@) : ('a -> 'b) -> 'a -> 'b = "%apply"
external (+>) : 'a -> ('a -> 'b) -> 'b = "%revapply"
let ($) f g x = f (g x)
let (!$) = Lazy.force
external id : 'a -> 'a = "%identity"
let uncurry f a b = f (a,b)
let curry f (a,b) = f a b
let flip f a b = f b a
let const a _ = a
let sure f =
function
Some x ->
Some (f x)
| None ->
None
let option f x = try Some (f x) with Not_found -> None
let maybe f x = try `Val (f x) with e -> `Error e
let tee f x = try ignore @@ f x; x with _ -> x
type ('a,'b) either = Left of 'a | Right of 'b
let failwithf fmt = Printf.kprintf (fun s () -> failwith s) fmt
let assoc x xs = (option @@ List.assoc x) xs
let string_of_list xs =
Printf.sprintf "[%s]"
@@ String.concat ";" xs
let rec unfold f init =
match f init with
Some (a, b) -> a :: unfold f b
| None -> []
let rec range a b =
if a >= b then
[]
else
a::range (a+1) b
let rec interperse delim =
function
[] -> []
| [x] -> [x]
| x::xs -> x::delim::interperse delim xs
let map_accum_left f init xs =
let f (accum,ys) x =
let accum',y =
f accum x in
(accum',y::ys) in
let accum,ys =
List.fold_left f (init,[]) xs in
accum,List.rev ys
let rec map_accum_right f init =
function
[] ->
init,[]
| x::xs ->
let (accum,ys) =
map_accum_right f init xs in
let (accum,y) =
f accum x in
accum,y::ys
let rec filter_map f =
function
x::xs ->
begin match f x with
Some y -> y::filter_map f xs
| None -> filter_map f xs
end
| [] ->
[]
let rec group_by f =
function
[] ->
[]
| x1::x2::xs when f x1 x2 ->
begin match group_by f @@ x2::xs with
y::ys ->
(x1::y)::ys
| _ ->
failwith "must not happen"
end
| x::xs ->
[x]::group_by f xs
let index x xs =
let rec loop i = function
[] ->
raise Not_found
| y::ys ->
if x = y then
i
else
loop (i+1) ys in
loop 0 xs
let string_of_char =
String.make 1
let hex =
Printf.sprintf "0x%x"
let open_out_with path f =
let ch =
open_out_bin path in
maybe f ch
+> tee (fun _ -> close_out ch)
+> function
`Val v -> v
| `Error e -> raise e
let open_in_with path f =
let ch =
open_in_bin path in
maybe f ch
+> tee (fun _ -> close_in ch)
+> function
`Val v -> v
| `Error e -> raise e
let undefined = Obj.magic 42
let undef = undefined
let rec format_list (sep : (unit, Format.formatter, unit) format) f ppf = function
| [] -> ()
| [x] -> f ppf x
| x::xs ->
Format.fprintf ppf "@[%a@]%t%a"
f x
(fun ppf -> Format.fprintf ppf sep)
(format_list sep f) xs
let format_ocaml_list f ppf xs =
Format.fprintf ppf "[ @[%a@] ]"
(format_list ";@ " f) xs
| |
4a27c54dda8cf5b3fe35cef1412ecca1ca059e918d06caa72e7de31f99aa63bc | owainlewis/ocaml-datastructures-algorithms | graph.ml | (* Graph Algorithms *)
(* Set would be a better choice here for when I get some time *)
module DiGraph = struct
exception VertexDoesNotExist
exception Cyclic of string
TODO parameterize this module
type t = string
type vertex = V of t * (t list ref)
type graph = vertex list ref
let create() = ref []
let ident v = let V (x, _) = v in x
let vertices g = List.map ident !g
let has_vertex g v = List.mem v (vertices g)
let add_vertex g v =
if has_vertex g v then g
else
let new_vertex = V (v, ref []) in
g := new_vertex :: !g;
g
let get_vertex g v =
let rec aux vert_list vertex =
match vert_list with
| [] -> None
| x::xs -> if (ident x) = vertex then Some(x) else aux xs vertex
in aux !g v
Adds a ONE - WAY connection . For undirected the operation needs to be done
in both directions
in both directions *)
let add_edge g src dest =
add_vertex g src;
add_vertex g dest;
match (get_vertex g src) with
| Some(v) -> let V (_, adjList) = v in adjList := dest :: !adjList
(* Todo in theory we can't reach this case *)
| _ -> failwith "Source vertex does not exist"
let successors g v =
let vtx = get_vertex g v in
match vtx with
| Some(vertex) -> let V (_, adjList) = vertex in !adjList
| None -> raise VertexDoesNotExist
(* Builds a directed graph from a list of edge pairs i.e [(1,2);(2,3)] etc *)
let build_directed_graph pairs =
let g = create() in
List.map (fun (src, dest) -> add_edge g src dest) pairs;
g
let sample_graph =
let edges = [
("a", "b"); ("a", "c");
("a", "d"); ("b", "e");
("c", "f"); ("d", "e");
("e", "f"); ("e", "g") ]
in build_directed_graph edges
let dfs graph start_state =
let rec depth_first_search graph visited = function
[] -> List.rev visited
| x::xs ->
if List.mem x visited then
dfs graph visited xs
else
let frontier = (successors graph x) @ xs
in dfs graph (x::visited) frontier
in depth_first_search graph [] (start_state::[])
end
let edges = [
("a", "b"); ("a", "c");
("a", "d"); ("b", "e");
("c", "f"); ("d", "e");
("e", "f"); ("e", "g") ]
let successors n edges =
let matching (s,_) = s = n in
List.map snd (List.filter matching edges)
let rec dfs edges visited = function
[] -> List.rev visited
| n::nodes ->
if List.mem n visited then
dfs edges visited nodes
else dfs edges (n::visited) ((successors n edges) @ nodes)
exception Cyclic of string
let topological_sort edges seed =
let rec sort path visited = function
[] -> visited
| n::nodes ->
if List.mem n path then raise (Cyclic n) else
let v' = if List.mem n visited then visited else
n :: sort (n::path) visited (successors n edges)
in sort path v' nodes
in sort [] [] [seed]
module type ADJ =
sig
type t
(* A graph represented as a mutable list of vertices *)
type graph
(* A graph vertex in the form (v, incoming, outgoing) *)
type vertex
(* An edge in the form source -> dest -> weight *)
type edge
val create : unit -> graph
val vertices : graph -> int list
val is_empty : graph -> bool
val add_vertex : graph -> int -> graph
val find_vertex : graph -> int -> vertex option
end
module Graph = struct
exception VertexDoesNotExist
type t = int
type vertex = V of int * (int list ref) * (int list ref)
type edge = vertex * vertex * int
type graph = vertex list ref
let create () = ref []
let vertices g =
List.map (fun (v) -> let V (x,_,_) = v in x) !g
(* TODO Duplication of logic *)
let out_func v = let V (_,x,_) = v in !x
let in_func v = let V (_,_,x) = v in !x
let flatten_edges g f =
let edges = List.map f !g in
edges |> List.flatten
let outgoing_edges g = flatten_edges g out_func
let incoming_edges g = flatten_edges g in_func
let is_empty g =
match !g with
| [] -> true
| _ -> false
In the following two functions vertex refers to the value not the type
let find_vertex graph vertex =
let rec find g v =
match g with
| [] -> None
| V (x,_,_) as vtx :: xs -> if v = x then Some(vtx) else find xs v
in find !graph vertex
Core operations
let add_vertex graph v =
let new_vertex = V (v, ref [], ref [])
in graph := new_vertex :: !graph;
graph
(* Consider cost implications here as it's going to be a bit crappy *)
let add_incoming_edge graph src dest =
let vtx = find_vertex graph src in
match vtx with
| Some(v) -> let V (_,i,_) = v in i := dest :: !i; v
| None -> failwith "No matching vertex"
let add_outgoing_edge graph src dest =
let vtx = find_vertex graph src in
match vtx with
| Some(v) -> let V (_, _, o) = v in o := dest :: !o; v
| None -> failwith "No matching vertex"
let add_undirected_edge graph src dest =
add_incoming_edge graph src dest;
add_outgoing_edge graph src dest;
graph;
end
| null | https://raw.githubusercontent.com/owainlewis/ocaml-datastructures-algorithms/4696fa4f5a015fc18e903b0b9ba2a1a8013a40ce/archive/graph.ml | ocaml | Graph Algorithms
Set would be a better choice here for when I get some time
Todo in theory we can't reach this case
Builds a directed graph from a list of edge pairs i.e [(1,2);(2,3)] etc
A graph represented as a mutable list of vertices
A graph vertex in the form (v, incoming, outgoing)
An edge in the form source -> dest -> weight
TODO Duplication of logic
Consider cost implications here as it's going to be a bit crappy |
module DiGraph = struct
exception VertexDoesNotExist
exception Cyclic of string
TODO parameterize this module
type t = string
type vertex = V of t * (t list ref)
type graph = vertex list ref
let create() = ref []
let ident v = let V (x, _) = v in x
let vertices g = List.map ident !g
let has_vertex g v = List.mem v (vertices g)
let add_vertex g v =
if has_vertex g v then g
else
let new_vertex = V (v, ref []) in
g := new_vertex :: !g;
g
let get_vertex g v =
let rec aux vert_list vertex =
match vert_list with
| [] -> None
| x::xs -> if (ident x) = vertex then Some(x) else aux xs vertex
in aux !g v
Adds a ONE - WAY connection . For undirected the operation needs to be done
in both directions
in both directions *)
let add_edge g src dest =
add_vertex g src;
add_vertex g dest;
match (get_vertex g src) with
| Some(v) -> let V (_, adjList) = v in adjList := dest :: !adjList
| _ -> failwith "Source vertex does not exist"
let successors g v =
let vtx = get_vertex g v in
match vtx with
| Some(vertex) -> let V (_, adjList) = vertex in !adjList
| None -> raise VertexDoesNotExist
let build_directed_graph pairs =
let g = create() in
List.map (fun (src, dest) -> add_edge g src dest) pairs;
g
let sample_graph =
let edges = [
("a", "b"); ("a", "c");
("a", "d"); ("b", "e");
("c", "f"); ("d", "e");
("e", "f"); ("e", "g") ]
in build_directed_graph edges
let dfs graph start_state =
let rec depth_first_search graph visited = function
[] -> List.rev visited
| x::xs ->
if List.mem x visited then
dfs graph visited xs
else
let frontier = (successors graph x) @ xs
in dfs graph (x::visited) frontier
in depth_first_search graph [] (start_state::[])
end
let edges = [
("a", "b"); ("a", "c");
("a", "d"); ("b", "e");
("c", "f"); ("d", "e");
("e", "f"); ("e", "g") ]
let successors n edges =
let matching (s,_) = s = n in
List.map snd (List.filter matching edges)
let rec dfs edges visited = function
[] -> List.rev visited
| n::nodes ->
if List.mem n visited then
dfs edges visited nodes
else dfs edges (n::visited) ((successors n edges) @ nodes)
exception Cyclic of string
let topological_sort edges seed =
let rec sort path visited = function
[] -> visited
| n::nodes ->
if List.mem n path then raise (Cyclic n) else
let v' = if List.mem n visited then visited else
n :: sort (n::path) visited (successors n edges)
in sort path v' nodes
in sort [] [] [seed]
module type ADJ =
sig
type t
type graph
type vertex
type edge
val create : unit -> graph
val vertices : graph -> int list
val is_empty : graph -> bool
val add_vertex : graph -> int -> graph
val find_vertex : graph -> int -> vertex option
end
module Graph = struct
exception VertexDoesNotExist
type t = int
type vertex = V of int * (int list ref) * (int list ref)
type edge = vertex * vertex * int
type graph = vertex list ref
let create () = ref []
let vertices g =
List.map (fun (v) -> let V (x,_,_) = v in x) !g
let out_func v = let V (_,x,_) = v in !x
let in_func v = let V (_,_,x) = v in !x
let flatten_edges g f =
let edges = List.map f !g in
edges |> List.flatten
let outgoing_edges g = flatten_edges g out_func
let incoming_edges g = flatten_edges g in_func
let is_empty g =
match !g with
| [] -> true
| _ -> false
In the following two functions vertex refers to the value not the type
let find_vertex graph vertex =
let rec find g v =
match g with
| [] -> None
| V (x,_,_) as vtx :: xs -> if v = x then Some(vtx) else find xs v
in find !graph vertex
Core operations
let add_vertex graph v =
let new_vertex = V (v, ref [], ref [])
in graph := new_vertex :: !graph;
graph
let add_incoming_edge graph src dest =
let vtx = find_vertex graph src in
match vtx with
| Some(v) -> let V (_,i,_) = v in i := dest :: !i; v
| None -> failwith "No matching vertex"
let add_outgoing_edge graph src dest =
let vtx = find_vertex graph src in
match vtx with
| Some(v) -> let V (_, _, o) = v in o := dest :: !o; v
| None -> failwith "No matching vertex"
let add_undirected_edge graph src dest =
add_incoming_edge graph src dest;
add_outgoing_edge graph src dest;
graph;
end
|
5ba6d787330a9cd2ca2ea11e96f1b4dd4731caf2048d542bedf29b7ee1e4eb4a | feeley/etos | build-parser.scm | ; file: "build-parser.scm"
(display "Building \"erlang.scm\"...")
;; To avoid annoying warning at compile-time.
;; important not to put this after the include...
;; Gambit would see it as a constant(block flag)
(define erlang-grammar #f)
(include "lalr-scm/lalr.scm")
(load "grammar.scm")
(gen-lalr1 erlang-grammar "erlang.scm")
(display "done.") (newline)
| null | https://raw.githubusercontent.com/feeley/etos/da9f089c1a7232d97827f8aa4f4b0862b7c5551f/compiler/build-parser.scm | scheme | file: "build-parser.scm"
To avoid annoying warning at compile-time.
important not to put this after the include...
Gambit would see it as a constant(block flag) |
(display "Building \"erlang.scm\"...")
(define erlang-grammar #f)
(include "lalr-scm/lalr.scm")
(load "grammar.scm")
(gen-lalr1 erlang-grammar "erlang.scm")
(display "done.") (newline)
|
def26508ba34fa0e4a0afd3e1866c3db30552c0461f677f96b29768dbe9c4dad | haskell/hackage-security | Repository.hs | -- | Abstract definition of a Repository
--
-- Most clients should only need to import this module if they wish to define
-- their own Repository implementations.
# LANGUAGE CPP #
module Hackage.Security.Client.Repository (
-- * Files
Metadata -- type index (really a kind)
, Binary -- type index (really a kind)
, RemoteFile(..)
, CachedFile(..)
, IndexFile(..)
, remoteFileDefaultFormat
, remoteFileDefaultInfo
-- * Repository proper
, Repository(..)
, AttemptNr(..)
, LogMessage(..)
, UpdateFailure(..)
, SomeRemoteError(..)
-- ** Downloaded files
, DownloadedFile(..)
-- ** Helpers
, mirrorsUnsupported
-- * Paths
, remoteRepoPath
, remoteRepoPath'
-- * Utility
, IsCached(..)
, mustCache
) where
import MyPrelude
import Control.Exception
import Data.Typeable (Typeable)
import qualified Codec.Archive.Tar.Index as Tar
import qualified Data.ByteString.Lazy as BS.L
import Distribution.Package
import Distribution.Text
import Hackage.Security.Client.Formats
import Hackage.Security.Client.Verify
import Hackage.Security.Trusted
import Hackage.Security.TUF
import Hackage.Security.Util.Checked
import Hackage.Security.Util.Path
import Hackage.Security.Util.Pretty
import Hackage.Security.Util.Some
import Hackage.Security.Util.Stack
{-------------------------------------------------------------------------------
Files
-------------------------------------------------------------------------------}
data Metadata
data Binary
-- | Abstract definition of files we might have to download
--
' RemoteFile ' is parametrized by the type of the formats that we can accept
-- from the remote repository, as well as with information on whether this file
-- is metadata actual binary content.
--
NOTE : lacks GADT support so constructors have only regular comments .
data RemoteFile :: * -> * -> * where
-- Timestamp metadata (@timestamp.json@)
--
-- We never have (explicit) file length available for timestamps.
RemoteTimestamp :: RemoteFile (FormatUn :- ()) Metadata
-- Root metadata (@root.json@)
--
-- For root information we may or may not have the file info available:
--
-- - If during the normal update process the new snapshot tells us the root
-- information has changed, we can use the file info from the snapshot.
-- - If however we need to update the root metadata due to a verification
-- exception we do not know the file info.
-- - We also do not know the file info during bootstrapping.
RemoteRoot :: Maybe (Trusted FileInfo) -> RemoteFile (FormatUn :- ()) Metadata
Snapshot metadata ( @snapshot.json@ )
--
-- We get file info of the snapshot from the timestamp.
RemoteSnapshot :: Trusted FileInfo -> RemoteFile (FormatUn :- ()) Metadata
-- Mirrors metadata (@mirrors.json@)
--
-- We get the file info from the snapshot.
RemoteMirrors :: Trusted FileInfo -> RemoteFile (FormatUn :- ()) Metadata
-- Index
--
-- The index file length comes from the snapshot.
--
-- When we request that the index is downloaded, it is up to the repository
-- to decide whether to download @00-index.tar@ or @00-index.tar.gz@.
-- The callback is told which format was requested.
--
-- It is a bug to request a file that the repository does not provide
-- (the snapshot should make it clear which files are available).
RemoteIndex :: HasFormat fs FormatGz
-> Formats fs (Trusted FileInfo)
-> RemoteFile fs Binary
-- Actual package
--
-- Package file length comes from the corresponding @targets.json@.
RemotePkgTarGz :: PackageIdentifier
-> Trusted FileInfo
-> RemoteFile (FormatGz :- ()) Binary
deriving instance Show (RemoteFile fs typ)
instance Pretty (RemoteFile fs typ) where
pretty RemoteTimestamp = "timestamp"
pretty (RemoteRoot _) = "root"
pretty (RemoteSnapshot _) = "snapshot"
pretty (RemoteMirrors _) = "mirrors"
pretty (RemoteIndex _ _) = "index"
pretty (RemotePkgTarGz pkgId _) = "package " ++ display pkgId
-- | Files that we might request from the local cache
data CachedFile =
-- | Timestamp metadata (@timestamp.json@)
CachedTimestamp
-- | Root metadata (@root.json@)
| CachedRoot
-- | Snapshot metadata (@snapshot.json@)
| CachedSnapshot
-- | Mirrors list (@mirrors.json@)
| CachedMirrors
deriving (Eq, Ord, Show)
instance Pretty CachedFile where
pretty CachedTimestamp = "timestamp"
pretty CachedRoot = "root"
pretty CachedSnapshot = "snapshot"
pretty CachedMirrors = "mirrors"
-- | Default format for each file type
--
-- For most file types we don't have a choice; for the index the repository
is only required to offer the - compressed format so that is the default .
remoteFileDefaultFormat :: RemoteFile fs typ -> Some (HasFormat fs)
remoteFileDefaultFormat RemoteTimestamp = Some $ HFZ FUn
remoteFileDefaultFormat (RemoteRoot _) = Some $ HFZ FUn
remoteFileDefaultFormat (RemoteSnapshot _) = Some $ HFZ FUn
remoteFileDefaultFormat (RemoteMirrors _) = Some $ HFZ FUn
remoteFileDefaultFormat (RemotePkgTarGz _ _) = Some $ HFZ FGz
remoteFileDefaultFormat (RemoteIndex pf _) = Some pf
-- | Default file info (see also 'remoteFileDefaultFormat')
remoteFileDefaultInfo :: RemoteFile fs typ -> Maybe (Trusted FileInfo)
remoteFileDefaultInfo RemoteTimestamp = Nothing
remoteFileDefaultInfo (RemoteRoot info) = info
remoteFileDefaultInfo (RemoteSnapshot info) = Just info
remoteFileDefaultInfo (RemoteMirrors info) = Just info
remoteFileDefaultInfo (RemotePkgTarGz _ info) = Just info
remoteFileDefaultInfo (RemoteIndex pf info) = Just $ formatsLookup pf info
{-------------------------------------------------------------------------------
Repository proper
-------------------------------------------------------------------------------}
-- | Repository
--
-- This is an abstract representation of a repository. It simply provides a way
-- to download metafiles and target files, without specifying how this is done.
-- For instance, for a local repository this could just be doing a file read,
-- whereas for remote repositories this could be using any kind of HTTP client.
data Repository down = DownloadedFile down => Repository {
-- | Get a file from the server
--
Responsibilies of ' repGetRemote ' :
--
-- * Download the file from the repository and make it available at a
-- temporary location
-- * Use the provided file length to protect against endless data attacks.
-- (Repositories such as local repositories that are not susceptible to
-- endless data attacks can safely ignore this argument.)
-- * Move the file from its temporary location to its permanent location
-- if verification succeeds.
--
-- NOTE: Calls to 'repGetRemote' should _always_ be in the scope of
-- 'repWithMirror'.
repGetRemote :: forall fs typ. Throws SomeRemoteError
=> AttemptNr
-> RemoteFile fs typ
-> Verify (Some (HasFormat fs), down typ)
-- | Get a cached file (if available)
, repGetCached :: CachedFile -> IO (Maybe (Path Absolute))
-- | Get the cached root
--
-- This is a separate method only because clients must ALWAYS have root
-- information available.
, repGetCachedRoot :: IO (Path Absolute)
-- | Clear all cached data
--
-- In particular, this should remove the snapshot and the timestamp.
-- It would also be okay, but not required, to delete the index.
, repClearCache :: IO ()
-- | Open the tarball for reading
--
-- This function has this shape so that:
--
-- * We can read multiple files from the tarball without having to open
-- and close the handle each time
-- * We can close the handle immediately when done.
, repWithIndex :: forall a. (Handle -> IO a) -> IO a
-- | Read the index index
, repGetIndexIdx :: IO Tar.TarIndex
-- | Lock the cache (during updates)
, repLockCache :: IO () -> IO ()
-- | Mirror selection
--
-- The purpose of 'repWithMirror' is to scope mirror selection. The idea
-- is that if we have
--
-- > repWithMirror mirrorList $
-- > someCallback
--
-- then the repository may pick a mirror before calling @someCallback@,
-- catch exceptions thrown by @someCallback@, and potentially try the
-- callback again with a different mirror.
--
-- The list of mirrors may be @Nothing@ if we haven't yet downloaded the
-- list of mirrors from the repository, or when our cached list of mirrors
-- is invalid. Of course, if we did download it, then the list of mirrors
-- may still be empty. In this case the repository must fall back to its
-- primary download mechanism.
--
-- Mirrors as currently defined (in terms of a "base URL") are inherently a
-- HTTP (or related) concept, so in repository implementations such as the
-- local-repo 'repWithMirrors' is probably just an identity operation (see
-- 'ignoreMirrors'). Conversely, HTTP implementations of repositories may
-- have other, out-of-band information (for example, coming from a cabal
-- config file) that they may use to influence mirror selection.
, repWithMirror :: forall a. Maybe [Mirror] -> IO a -> IO a
-- | Logging
, repLog :: LogMessage -> IO ()
-- | Layout of this repository
, repLayout :: RepoLayout
-- | Layout of the index
--
-- Since the repository hosts the index, the layout of the index is
-- not independent of the layout of the repository.
, repIndexLayout :: IndexLayout
-- | Description of the repository (used in the show instance)
, repDescription :: String
}
instance Show (Repository down) where
show = repDescription
-- | Helper function to implement 'repWithMirrors'.
mirrorsUnsupported :: Maybe [Mirror] -> IO a -> IO a
mirrorsUnsupported _ = id
-- | Are we requesting this information because of a previous validation error?
--
-- Clients can take advantage of this to tell caches to revalidate files.
newtype AttemptNr = AttemptNr Int
deriving (Eq, Ord, Num)
-- | Log messages
--
We use a ' RemoteFile ' rather than a ' RepoPath ' here because we might not have
-- a 'RepoPath' for the file that we were trying to download (that is, for
-- example if the server does not provide an uncompressed tarball, it doesn't
-- make much sense to list the path to that non-existing uncompressed tarball).
data LogMessage =
| Root information was updated
--
-- This message is issued when the root information is updated as part of
-- the normal check for updates procedure. If the root information is
updated because of a verification error WarningVerificationError is
-- issued instead.
LogRootUpdated
-- | A verification error
--
-- Verification errors can be temporary, and may be resolved later; hence
-- these are just warnings. (Verification errors that cannot be resolved
-- are thrown as exceptions.)
| LogVerificationError VerificationError
-- | Download a file from a repository
| forall fs typ. LogDownloading (RemoteFile fs typ)
-- | Incrementally updating a file from a repository
| forall fs. LogUpdating (RemoteFile fs Binary)
-- | Selected a particular mirror
| LogSelectedMirror MirrorDescription
-- | Updating a file failed
-- (we will instead download it whole)
| forall fs. LogCannotUpdate (RemoteFile fs Binary) UpdateFailure
-- | We got an exception with a particular mirror
-- (we will try with a different mirror if any are available)
| LogMirrorFailed MirrorDescription SomeException
-- | This log event is triggered before invoking a filesystem lock
-- operation that may block for a significant amount of time; once
-- the possibly blocking call completes successfully,
' LogLockWaitDone ' will be emitted .
--
@since 0.6.0
| LogLockWait (Path Absolute)
-- | Denotes completion of the operation that advertised a
-- 'LogLockWait' event
--
@since 0.6.0
| LogLockWaitDone (Path Absolute)
-- | Denotes the filesystem lock previously acquired (signaled by
-- 'LogLockWait') has been released.
--
@since 0.6.0
| LogUnlock (Path Absolute)
-- | Records why we are downloading a file rather than updating it.
data UpdateFailure =
-- | Server does not support incremental downloads
UpdateImpossibleUnsupported
-- | We don't have a local copy of the file to update
| UpdateImpossibleNoLocalCopy
-- | Update failed twice
--
If we attempt an incremental update the first time , and it fails , we let
-- it go round the loop, update local security information, and try again.
-- But if an incremental update then fails _again_, we instead attempt a
-- regular download.
| UpdateFailedTwice
-- | Update failed (for example: perhaps the local file got corrupted)
| UpdateFailed SomeException
{-------------------------------------------------------------------------------
Downloaded files
-------------------------------------------------------------------------------}
class DownloadedFile (down :: * -> *) where
-- | Verify a download file
downloadedVerify :: down a -> Trusted FileInfo -> IO Bool
-- | Read the file we just downloaded into memory
--
-- We never read binary data, only metadata.
downloadedRead :: down Metadata -> IO BS.L.ByteString
-- | Copy a downloaded file to its destination
downloadedCopyTo :: down a -> Path Absolute -> IO ()
------------------------------------------------------------------------------
Exceptions thrown by specific Repository implementations
------------------------------------------------------------------------------
Exceptions thrown by specific Repository implementations
-------------------------------------------------------------------------------}
-- | Repository-specific exceptions
--
For instance , for repositories using HTTP this might correspond to a 404 ;
-- for local repositories this might correspond to file-not-found, etc.
data SomeRemoteError :: * where
SomeRemoteError :: Exception e => e -> SomeRemoteError
deriving (Typeable)
#if MIN_VERSION_base(4,8,0)
deriving instance Show SomeRemoteError
instance Exception SomeRemoteError where displayException = pretty
#else
instance Exception SomeRemoteError
instance Show SomeRemoteError where show = pretty
#endif
instance Pretty SomeRemoteError where
pretty (SomeRemoteError ex) = displayException ex
{-------------------------------------------------------------------------------
Paths
-------------------------------------------------------------------------------}
remoteRepoPath :: RepoLayout -> RemoteFile fs typ -> Formats fs RepoPath
remoteRepoPath RepoLayout{..} = go
where
go :: RemoteFile fs typ -> Formats fs RepoPath
go RemoteTimestamp = FsUn $ repoLayoutTimestamp
go (RemoteRoot _) = FsUn $ repoLayoutRoot
go (RemoteSnapshot _) = FsUn $ repoLayoutSnapshot
go (RemoteMirrors _) = FsUn $ repoLayoutMirrors
go (RemotePkgTarGz pId _) = FsGz $ repoLayoutPkgTarGz pId
go (RemoteIndex _ lens) = formatsMap goIndex lens
goIndex :: Format f -> a -> RepoPath
goIndex FUn _ = repoLayoutIndexTar
goIndex FGz _ = repoLayoutIndexTarGz
remoteRepoPath' :: RepoLayout -> RemoteFile fs typ -> HasFormat fs f -> RepoPath
remoteRepoPath' repoLayout file format =
formatsLookup format $ remoteRepoPath repoLayout file
{-------------------------------------------------------------------------------
Utility
-------------------------------------------------------------------------------}
-- | Is a particular remote file cached?
data IsCached :: * -> * where
-- This remote file should be cached, and we ask for it by name
CacheAs :: CachedFile -> IsCached Metadata
-- We don't cache this remote file
--
-- This doesn't mean a Repository should not feel free to cache the file
-- if desired, but it does mean the generic algorithms will never ask for
-- this file from the cache.
DontCache :: IsCached Binary
-- The index is somewhat special: it should be cached, but we never
-- ask for it directly.
--
-- Instead, we will ask the Repository for files _from_ the index, which it
-- can serve however it likes. For instance, some repositories might keep
-- the index in uncompressed form, others in compressed form; some might
-- keep an index tarball index for quick access, others may scan the tarball
-- linearly, etc.
CacheIndex :: IsCached Binary
TODO : ^^ older does n't support GADT doc comments :-(
deriving instance Eq (IsCached typ)
deriving instance Show (IsCached typ)
-- | Which remote files should we cache locally?
mustCache :: RemoteFile fs typ -> IsCached typ
mustCache RemoteTimestamp = CacheAs CachedTimestamp
mustCache (RemoteRoot _) = CacheAs CachedRoot
mustCache (RemoteSnapshot _) = CacheAs CachedSnapshot
mustCache (RemoteMirrors _) = CacheAs CachedMirrors
mustCache (RemoteIndex {}) = CacheIndex
mustCache (RemotePkgTarGz _ _) = DontCache
instance Pretty LogMessage where
pretty LogRootUpdated =
"Root info updated"
pretty (LogVerificationError err) =
"Verification error: " ++ pretty err
pretty (LogDownloading file) =
"Downloading " ++ pretty file
pretty (LogUpdating file) =
"Updating " ++ pretty file
pretty (LogSelectedMirror mirror) =
"Selected mirror " ++ mirror
pretty (LogCannotUpdate file ex) =
"Cannot update " ++ pretty file ++ " (" ++ pretty ex ++ ")"
pretty (LogMirrorFailed mirror ex) =
"Exception " ++ displayException ex ++ " when using mirror " ++ mirror
pretty (LogLockWait file) =
"Waiting to acquire cache lock on " ++ pretty file
pretty (LogLockWaitDone file) =
"Acquired cache lock on " ++ pretty file
pretty (LogUnlock file) =
"Released cache lock on " ++ pretty file
instance Pretty UpdateFailure where
pretty UpdateImpossibleUnsupported =
"server does not provide incremental downloads"
pretty UpdateImpossibleNoLocalCopy =
"no local copy"
pretty UpdateFailedTwice =
"update failed twice"
pretty (UpdateFailed ex) =
displayException ex
| null | https://raw.githubusercontent.com/haskell/hackage-security/d1a13b02f5ea0bbb075c080e956eaeee06eca97d/hackage-security/src/Hackage/Security/Client/Repository.hs | haskell | | Abstract definition of a Repository
Most clients should only need to import this module if they wish to define
their own Repository implementations.
* Files
type index (really a kind)
type index (really a kind)
* Repository proper
** Downloaded files
** Helpers
* Paths
* Utility
------------------------------------------------------------------------------
Files
------------------------------------------------------------------------------
| Abstract definition of files we might have to download
from the remote repository, as well as with information on whether this file
is metadata actual binary content.
Timestamp metadata (@timestamp.json@)
We never have (explicit) file length available for timestamps.
Root metadata (@root.json@)
For root information we may or may not have the file info available:
- If during the normal update process the new snapshot tells us the root
information has changed, we can use the file info from the snapshot.
- If however we need to update the root metadata due to a verification
exception we do not know the file info.
- We also do not know the file info during bootstrapping.
We get file info of the snapshot from the timestamp.
Mirrors metadata (@mirrors.json@)
We get the file info from the snapshot.
Index
The index file length comes from the snapshot.
When we request that the index is downloaded, it is up to the repository
to decide whether to download @00-index.tar@ or @00-index.tar.gz@.
The callback is told which format was requested.
It is a bug to request a file that the repository does not provide
(the snapshot should make it clear which files are available).
Actual package
Package file length comes from the corresponding @targets.json@.
| Files that we might request from the local cache
| Timestamp metadata (@timestamp.json@)
| Root metadata (@root.json@)
| Snapshot metadata (@snapshot.json@)
| Mirrors list (@mirrors.json@)
| Default format for each file type
For most file types we don't have a choice; for the index the repository
| Default file info (see also 'remoteFileDefaultFormat')
------------------------------------------------------------------------------
Repository proper
------------------------------------------------------------------------------
| Repository
This is an abstract representation of a repository. It simply provides a way
to download metafiles and target files, without specifying how this is done.
For instance, for a local repository this could just be doing a file read,
whereas for remote repositories this could be using any kind of HTTP client.
| Get a file from the server
* Download the file from the repository and make it available at a
temporary location
* Use the provided file length to protect against endless data attacks.
(Repositories such as local repositories that are not susceptible to
endless data attacks can safely ignore this argument.)
* Move the file from its temporary location to its permanent location
if verification succeeds.
NOTE: Calls to 'repGetRemote' should _always_ be in the scope of
'repWithMirror'.
| Get a cached file (if available)
| Get the cached root
This is a separate method only because clients must ALWAYS have root
information available.
| Clear all cached data
In particular, this should remove the snapshot and the timestamp.
It would also be okay, but not required, to delete the index.
| Open the tarball for reading
This function has this shape so that:
* We can read multiple files from the tarball without having to open
and close the handle each time
* We can close the handle immediately when done.
| Read the index index
| Lock the cache (during updates)
| Mirror selection
The purpose of 'repWithMirror' is to scope mirror selection. The idea
is that if we have
> repWithMirror mirrorList $
> someCallback
then the repository may pick a mirror before calling @someCallback@,
catch exceptions thrown by @someCallback@, and potentially try the
callback again with a different mirror.
The list of mirrors may be @Nothing@ if we haven't yet downloaded the
list of mirrors from the repository, or when our cached list of mirrors
is invalid. Of course, if we did download it, then the list of mirrors
may still be empty. In this case the repository must fall back to its
primary download mechanism.
Mirrors as currently defined (in terms of a "base URL") are inherently a
HTTP (or related) concept, so in repository implementations such as the
local-repo 'repWithMirrors' is probably just an identity operation (see
'ignoreMirrors'). Conversely, HTTP implementations of repositories may
have other, out-of-band information (for example, coming from a cabal
config file) that they may use to influence mirror selection.
| Logging
| Layout of this repository
| Layout of the index
Since the repository hosts the index, the layout of the index is
not independent of the layout of the repository.
| Description of the repository (used in the show instance)
| Helper function to implement 'repWithMirrors'.
| Are we requesting this information because of a previous validation error?
Clients can take advantage of this to tell caches to revalidate files.
| Log messages
a 'RepoPath' for the file that we were trying to download (that is, for
example if the server does not provide an uncompressed tarball, it doesn't
make much sense to list the path to that non-existing uncompressed tarball).
This message is issued when the root information is updated as part of
the normal check for updates procedure. If the root information is
issued instead.
| A verification error
Verification errors can be temporary, and may be resolved later; hence
these are just warnings. (Verification errors that cannot be resolved
are thrown as exceptions.)
| Download a file from a repository
| Incrementally updating a file from a repository
| Selected a particular mirror
| Updating a file failed
(we will instead download it whole)
| We got an exception with a particular mirror
(we will try with a different mirror if any are available)
| This log event is triggered before invoking a filesystem lock
operation that may block for a significant amount of time; once
the possibly blocking call completes successfully,
| Denotes completion of the operation that advertised a
'LogLockWait' event
| Denotes the filesystem lock previously acquired (signaled by
'LogLockWait') has been released.
| Records why we are downloading a file rather than updating it.
| Server does not support incremental downloads
| We don't have a local copy of the file to update
| Update failed twice
it go round the loop, update local security information, and try again.
But if an incremental update then fails _again_, we instead attempt a
regular download.
| Update failed (for example: perhaps the local file got corrupted)
------------------------------------------------------------------------------
Downloaded files
------------------------------------------------------------------------------
| Verify a download file
| Read the file we just downloaded into memory
We never read binary data, only metadata.
| Copy a downloaded file to its destination
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
| Repository-specific exceptions
for local repositories this might correspond to file-not-found, etc.
------------------------------------------------------------------------------
Paths
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Utility
------------------------------------------------------------------------------
| Is a particular remote file cached?
This remote file should be cached, and we ask for it by name
We don't cache this remote file
This doesn't mean a Repository should not feel free to cache the file
if desired, but it does mean the generic algorithms will never ask for
this file from the cache.
The index is somewhat special: it should be cached, but we never
ask for it directly.
Instead, we will ask the Repository for files _from_ the index, which it
can serve however it likes. For instance, some repositories might keep
the index in uncompressed form, others in compressed form; some might
keep an index tarball index for quick access, others may scan the tarball
linearly, etc.
| Which remote files should we cache locally? | # LANGUAGE CPP #
module Hackage.Security.Client.Repository (
, RemoteFile(..)
, CachedFile(..)
, IndexFile(..)
, remoteFileDefaultFormat
, remoteFileDefaultInfo
, Repository(..)
, AttemptNr(..)
, LogMessage(..)
, UpdateFailure(..)
, SomeRemoteError(..)
, DownloadedFile(..)
, mirrorsUnsupported
, remoteRepoPath
, remoteRepoPath'
, IsCached(..)
, mustCache
) where
import MyPrelude
import Control.Exception
import Data.Typeable (Typeable)
import qualified Codec.Archive.Tar.Index as Tar
import qualified Data.ByteString.Lazy as BS.L
import Distribution.Package
import Distribution.Text
import Hackage.Security.Client.Formats
import Hackage.Security.Client.Verify
import Hackage.Security.Trusted
import Hackage.Security.TUF
import Hackage.Security.Util.Checked
import Hackage.Security.Util.Path
import Hackage.Security.Util.Pretty
import Hackage.Security.Util.Some
import Hackage.Security.Util.Stack
data Metadata
data Binary
' RemoteFile ' is parametrized by the type of the formats that we can accept
NOTE : lacks GADT support so constructors have only regular comments .
data RemoteFile :: * -> * -> * where
RemoteTimestamp :: RemoteFile (FormatUn :- ()) Metadata
RemoteRoot :: Maybe (Trusted FileInfo) -> RemoteFile (FormatUn :- ()) Metadata
Snapshot metadata ( @snapshot.json@ )
RemoteSnapshot :: Trusted FileInfo -> RemoteFile (FormatUn :- ()) Metadata
RemoteMirrors :: Trusted FileInfo -> RemoteFile (FormatUn :- ()) Metadata
RemoteIndex :: HasFormat fs FormatGz
-> Formats fs (Trusted FileInfo)
-> RemoteFile fs Binary
RemotePkgTarGz :: PackageIdentifier
-> Trusted FileInfo
-> RemoteFile (FormatGz :- ()) Binary
deriving instance Show (RemoteFile fs typ)
instance Pretty (RemoteFile fs typ) where
pretty RemoteTimestamp = "timestamp"
pretty (RemoteRoot _) = "root"
pretty (RemoteSnapshot _) = "snapshot"
pretty (RemoteMirrors _) = "mirrors"
pretty (RemoteIndex _ _) = "index"
pretty (RemotePkgTarGz pkgId _) = "package " ++ display pkgId
data CachedFile =
CachedTimestamp
| CachedRoot
| CachedSnapshot
| CachedMirrors
deriving (Eq, Ord, Show)
instance Pretty CachedFile where
pretty CachedTimestamp = "timestamp"
pretty CachedRoot = "root"
pretty CachedSnapshot = "snapshot"
pretty CachedMirrors = "mirrors"
is only required to offer the - compressed format so that is the default .
remoteFileDefaultFormat :: RemoteFile fs typ -> Some (HasFormat fs)
remoteFileDefaultFormat RemoteTimestamp = Some $ HFZ FUn
remoteFileDefaultFormat (RemoteRoot _) = Some $ HFZ FUn
remoteFileDefaultFormat (RemoteSnapshot _) = Some $ HFZ FUn
remoteFileDefaultFormat (RemoteMirrors _) = Some $ HFZ FUn
remoteFileDefaultFormat (RemotePkgTarGz _ _) = Some $ HFZ FGz
remoteFileDefaultFormat (RemoteIndex pf _) = Some pf
remoteFileDefaultInfo :: RemoteFile fs typ -> Maybe (Trusted FileInfo)
remoteFileDefaultInfo RemoteTimestamp = Nothing
remoteFileDefaultInfo (RemoteRoot info) = info
remoteFileDefaultInfo (RemoteSnapshot info) = Just info
remoteFileDefaultInfo (RemoteMirrors info) = Just info
remoteFileDefaultInfo (RemotePkgTarGz _ info) = Just info
remoteFileDefaultInfo (RemoteIndex pf info) = Just $ formatsLookup pf info
data Repository down = DownloadedFile down => Repository {
Responsibilies of ' repGetRemote ' :
repGetRemote :: forall fs typ. Throws SomeRemoteError
=> AttemptNr
-> RemoteFile fs typ
-> Verify (Some (HasFormat fs), down typ)
, repGetCached :: CachedFile -> IO (Maybe (Path Absolute))
, repGetCachedRoot :: IO (Path Absolute)
, repClearCache :: IO ()
, repWithIndex :: forall a. (Handle -> IO a) -> IO a
, repGetIndexIdx :: IO Tar.TarIndex
, repLockCache :: IO () -> IO ()
, repWithMirror :: forall a. Maybe [Mirror] -> IO a -> IO a
, repLog :: LogMessage -> IO ()
, repLayout :: RepoLayout
, repIndexLayout :: IndexLayout
, repDescription :: String
}
instance Show (Repository down) where
show = repDescription
mirrorsUnsupported :: Maybe [Mirror] -> IO a -> IO a
mirrorsUnsupported _ = id
newtype AttemptNr = AttemptNr Int
deriving (Eq, Ord, Num)
We use a ' RemoteFile ' rather than a ' RepoPath ' here because we might not have
data LogMessage =
| Root information was updated
updated because of a verification error WarningVerificationError is
LogRootUpdated
| LogVerificationError VerificationError
| forall fs typ. LogDownloading (RemoteFile fs typ)
| forall fs. LogUpdating (RemoteFile fs Binary)
| LogSelectedMirror MirrorDescription
| forall fs. LogCannotUpdate (RemoteFile fs Binary) UpdateFailure
| LogMirrorFailed MirrorDescription SomeException
' LogLockWaitDone ' will be emitted .
@since 0.6.0
| LogLockWait (Path Absolute)
@since 0.6.0
| LogLockWaitDone (Path Absolute)
@since 0.6.0
| LogUnlock (Path Absolute)
data UpdateFailure =
UpdateImpossibleUnsupported
| UpdateImpossibleNoLocalCopy
If we attempt an incremental update the first time , and it fails , we let
| UpdateFailedTwice
| UpdateFailed SomeException
class DownloadedFile (down :: * -> *) where
downloadedVerify :: down a -> Trusted FileInfo -> IO Bool
downloadedRead :: down Metadata -> IO BS.L.ByteString
downloadedCopyTo :: down a -> Path Absolute -> IO ()
Exceptions thrown by specific Repository implementations
Exceptions thrown by specific Repository implementations
For instance , for repositories using HTTP this might correspond to a 404 ;
data SomeRemoteError :: * where
SomeRemoteError :: Exception e => e -> SomeRemoteError
deriving (Typeable)
#if MIN_VERSION_base(4,8,0)
deriving instance Show SomeRemoteError
instance Exception SomeRemoteError where displayException = pretty
#else
instance Exception SomeRemoteError
instance Show SomeRemoteError where show = pretty
#endif
instance Pretty SomeRemoteError where
pretty (SomeRemoteError ex) = displayException ex
remoteRepoPath :: RepoLayout -> RemoteFile fs typ -> Formats fs RepoPath
remoteRepoPath RepoLayout{..} = go
where
go :: RemoteFile fs typ -> Formats fs RepoPath
go RemoteTimestamp = FsUn $ repoLayoutTimestamp
go (RemoteRoot _) = FsUn $ repoLayoutRoot
go (RemoteSnapshot _) = FsUn $ repoLayoutSnapshot
go (RemoteMirrors _) = FsUn $ repoLayoutMirrors
go (RemotePkgTarGz pId _) = FsGz $ repoLayoutPkgTarGz pId
go (RemoteIndex _ lens) = formatsMap goIndex lens
goIndex :: Format f -> a -> RepoPath
goIndex FUn _ = repoLayoutIndexTar
goIndex FGz _ = repoLayoutIndexTarGz
remoteRepoPath' :: RepoLayout -> RemoteFile fs typ -> HasFormat fs f -> RepoPath
remoteRepoPath' repoLayout file format =
formatsLookup format $ remoteRepoPath repoLayout file
data IsCached :: * -> * where
CacheAs :: CachedFile -> IsCached Metadata
DontCache :: IsCached Binary
CacheIndex :: IsCached Binary
TODO : ^^ older does n't support GADT doc comments :-(
deriving instance Eq (IsCached typ)
deriving instance Show (IsCached typ)
mustCache :: RemoteFile fs typ -> IsCached typ
mustCache RemoteTimestamp = CacheAs CachedTimestamp
mustCache (RemoteRoot _) = CacheAs CachedRoot
mustCache (RemoteSnapshot _) = CacheAs CachedSnapshot
mustCache (RemoteMirrors _) = CacheAs CachedMirrors
mustCache (RemoteIndex {}) = CacheIndex
mustCache (RemotePkgTarGz _ _) = DontCache
instance Pretty LogMessage where
pretty LogRootUpdated =
"Root info updated"
pretty (LogVerificationError err) =
"Verification error: " ++ pretty err
pretty (LogDownloading file) =
"Downloading " ++ pretty file
pretty (LogUpdating file) =
"Updating " ++ pretty file
pretty (LogSelectedMirror mirror) =
"Selected mirror " ++ mirror
pretty (LogCannotUpdate file ex) =
"Cannot update " ++ pretty file ++ " (" ++ pretty ex ++ ")"
pretty (LogMirrorFailed mirror ex) =
"Exception " ++ displayException ex ++ " when using mirror " ++ mirror
pretty (LogLockWait file) =
"Waiting to acquire cache lock on " ++ pretty file
pretty (LogLockWaitDone file) =
"Acquired cache lock on " ++ pretty file
pretty (LogUnlock file) =
"Released cache lock on " ++ pretty file
instance Pretty UpdateFailure where
pretty UpdateImpossibleUnsupported =
"server does not provide incremental downloads"
pretty UpdateImpossibleNoLocalCopy =
"no local copy"
pretty UpdateFailedTwice =
"update failed twice"
pretty (UpdateFailed ex) =
displayException ex
|
99c58905491f6b5c3154d37cac5867a78b802d0c5e03c76cfbb56e9b40eaa736 | jbreindel/battlecraft | bc_query_util.erl |
-module(bc_query_util).
-include_lib("stdlib/include/qlc.hrl").
-export([mnesia_query/3]).
mnesia_query(Gen, Offset, Limit) ->
mnesia:transaction(fun() ->
Qh = Gen(),
if Offset > 0 ->
qlc:next_answers(Qh, Offset);
true ->
ok
end,
qlc:next_answers(Qh, Limit)
end).
| null | https://raw.githubusercontent.com/jbreindel/battlecraft/622131a1ad8c46f19cf9ffd6bf32ba4a74ef4137/apps/bc_model/src/bc_query_util.erl | erlang |
-module(bc_query_util).
-include_lib("stdlib/include/qlc.hrl").
-export([mnesia_query/3]).
mnesia_query(Gen, Offset, Limit) ->
mnesia:transaction(fun() ->
Qh = Gen(),
if Offset > 0 ->
qlc:next_answers(Qh, Offset);
true ->
ok
end,
qlc:next_answers(Qh, Limit)
end).
| |
fd812aa58c569acfab79dded34a09a6f16de21bd76e462848511ae5a82b6cec7 | dwayne/eopl3 | senv.test.rkt | #lang racket
(require "./senv.rkt")
(require rackunit)
(let ([senv (extend-senv
'd
(extend-senv
'y
(extend-senv-rec
'f
(extend-senv
'x
(extend-senv
'y
(empty-senv))))))])
(check-equal? (apply-senv senv 'd) (cons 0 #f))
(check-equal? (apply-senv senv 'y) (cons 1 #f))
(check-equal? (apply-senv senv 'f) (cons 2 #t))
(check-equal? (apply-senv senv 'x) (cons 3 #f))
(check-exn #rx"No binding for a" (lambda () (apply-senv senv 'a))))
| null | https://raw.githubusercontent.com/dwayne/eopl3/9d5fdb2a8dafac3bc48852d49cda8b83e7a825cf/solutions/03-ch3/interpreters/racket/NAMELESS-PROC-3.40/senv.test.rkt | racket | #lang racket
(require "./senv.rkt")
(require rackunit)
(let ([senv (extend-senv
'd
(extend-senv
'y
(extend-senv-rec
'f
(extend-senv
'x
(extend-senv
'y
(empty-senv))))))])
(check-equal? (apply-senv senv 'd) (cons 0 #f))
(check-equal? (apply-senv senv 'y) (cons 1 #f))
(check-equal? (apply-senv senv 'f) (cons 2 #t))
(check-equal? (apply-senv senv 'x) (cons 3 #f))
(check-exn #rx"No binding for a" (lambda () (apply-senv senv 'a))))
| |
64665258f2c974f1a64f5c96656d3ce03ea2045310c27fff60e03858f410632e | anoma/juvix | Positive.hs | module Asm.Run.Positive where
import Asm.Run.Base
import Base
data PosTest = PosTest
{ _name :: String,
_relDir :: Path Rel Dir,
_file :: Path Rel File,
_expectedFile :: Path Rel File
}
root :: Path Abs Dir
root = relToProject $(mkRelDir "tests/Asm/positive")
testDescr :: PosTest -> TestDescr
testDescr PosTest {..} =
let tRoot = root <//> _relDir
file' = tRoot <//> _file
expected' = tRoot <//> _expectedFile
in TestDescr
{ _testName = _name,
_testRoot = tRoot,
_testAssertion = Steps $ asmRunAssertion file' expected' return (const (return ()))
}
allTests :: TestTree
allTests =
testGroup
"JuvixAsm run positive tests"
(map (mkTest . testDescr) tests)
tests :: [PosTest]
tests =
[ PosTest
"Arithmetic opcodes"
$(mkRelDir ".")
$(mkRelFile "test001.jva")
$(mkRelFile "out/test001.out"),
PosTest
"Direct call"
$(mkRelDir ".")
$(mkRelFile "test002.jva")
$(mkRelFile "out/test002.out"),
PosTest
"Indirect call"
$(mkRelDir ".")
$(mkRelFile "test003.jva")
$(mkRelFile "out/test003.out"),
PosTest
"Tail calls"
$(mkRelDir ".")
$(mkRelFile "test004.jva")
$(mkRelFile "out/test004.out"),
PosTest
"Tracing IO"
$(mkRelDir ".")
$(mkRelFile "test005.jva")
$(mkRelFile "out/test005.out"),
PosTest
"IO builtins"
$(mkRelDir ".")
$(mkRelFile "test006.jva")
$(mkRelFile "out/test006.out"),
PosTest
"Higher-order functions"
$(mkRelDir ".")
$(mkRelFile "test007.jva")
$(mkRelFile "out/test007.out"),
PosTest
"Branch"
$(mkRelDir ".")
$(mkRelFile "test008.jva")
$(mkRelFile "out/test008.out"),
PosTest
"Case"
$(mkRelDir ".")
$(mkRelFile "test009.jva")
$(mkRelFile "out/test009.out"),
PosTest
"Recursion"
$(mkRelDir ".")
$(mkRelFile "test010.jva")
$(mkRelFile "out/test010.out"),
PosTest
"Tail recursion"
$(mkRelDir ".")
$(mkRelFile "test011.jva")
$(mkRelFile "out/test011.out"),
PosTest
"Temporary stack"
$(mkRelDir ".")
$(mkRelFile "test012.jva")
$(mkRelFile "out/test012.out"),
PosTest
"Fibonacci numbers in linear time"
$(mkRelDir ".")
$(mkRelFile "test013.jva")
$(mkRelFile "out/test013.out"),
PosTest
"Trees"
$(mkRelDir ".")
$(mkRelFile "test014.jva")
$(mkRelFile "out/test014.out"),
PosTest
"Functions returning functions"
$(mkRelDir ".")
$(mkRelFile "test015.jva")
$(mkRelFile "out/test015.out"),
PosTest
"Arithmetic"
$(mkRelDir ".")
$(mkRelFile "test016.jva")
$(mkRelFile "out/test016.out"),
PosTest
"Closures as arguments"
$(mkRelDir ".")
$(mkRelFile "test017.jva")
$(mkRelFile "out/test017.out"),
PosTest
"Closure extension"
$(mkRelDir ".")
$(mkRelFile "test018.jva")
$(mkRelFile "out/test018.out"),
PosTest
"Recursion through higher-order functions"
$(mkRelDir ".")
$(mkRelFile "test019.jva")
$(mkRelFile "out/test019.out"),
PosTest
"Tail recursion through higher-order functions"
$(mkRelDir ".")
$(mkRelFile "test020.jva")
$(mkRelFile "out/test020.out"),
PosTest
"Higher-order functions and recursion"
$(mkRelDir ".")
$(mkRelFile "test021.jva")
$(mkRelFile "out/test021.out"),
PosTest
"Self-application"
$(mkRelDir ".")
$(mkRelFile "test022.jva")
$(mkRelFile "out/test022.out"),
PosTest
"McCarthy's 91 function"
$(mkRelDir ".")
$(mkRelFile "test023.jva")
$(mkRelFile "out/test023.out"),
PosTest
"Higher-order recursive functions"
$(mkRelDir ".")
$(mkRelFile "test024.jva")
$(mkRelFile "out/test024.out"),
PosTest
"Dynamic closure extension"
$(mkRelDir ".")
$(mkRelFile "test025.jva")
$(mkRelFile "out/test025.out"),
PosTest
"Currying & uncurrying"
$(mkRelDir ".")
$(mkRelFile "test026.jva")
$(mkRelFile "out/test026.out"),
PosTest
"Fast exponentiation"
$(mkRelDir ".")
$(mkRelFile "test027.jva")
$(mkRelFile "out/test027.out"),
PosTest
"Lists"
$(mkRelDir ".")
$(mkRelFile "test028.jva")
$(mkRelFile "out/test028.out"),
PosTest
"Structural equality"
$(mkRelDir ".")
$(mkRelFile "test029.jva")
$(mkRelFile "out/test029.out"),
PosTest
"Mutual recursion"
$(mkRelDir ".")
$(mkRelFile "test030.jva")
$(mkRelFile "out/test030.out"),
PosTest
"Temporary stack with branching"
$(mkRelDir ".")
$(mkRelFile "test031.jva")
$(mkRelFile "out/test031.out"),
PosTest
"Church numerals"
$(mkRelDir ".")
$(mkRelFile "test032.jva")
$(mkRelFile "out/test032.out"),
PosTest
"Ackermann function"
$(mkRelDir ".")
$(mkRelFile "test033.jva")
$(mkRelFile "out/test033.out"),
PosTest
"Higher-order function composition"
$(mkRelDir ".")
$(mkRelFile "test034.jva")
$(mkRelFile "out/test034.out"),
PosTest
"Nested lists"
$(mkRelDir ".")
$(mkRelFile "test035.jva")
$(mkRelFile "out/test035.out"),
PosTest
"Streams without memoization"
$(mkRelDir ".")
$(mkRelFile "test036.jva")
$(mkRelFile "out/test036.out"),
PosTest
"String instructions"
$(mkRelDir ".")
$(mkRelFile "test037.jva")
$(mkRelFile "out/test037.out")
]
| null | https://raw.githubusercontent.com/anoma/juvix/fab40c6c9932cc12592fbacc54fac1ddcd3b2228/test/Asm/Run/Positive.hs | haskell | module Asm.Run.Positive where
import Asm.Run.Base
import Base
data PosTest = PosTest
{ _name :: String,
_relDir :: Path Rel Dir,
_file :: Path Rel File,
_expectedFile :: Path Rel File
}
root :: Path Abs Dir
root = relToProject $(mkRelDir "tests/Asm/positive")
testDescr :: PosTest -> TestDescr
testDescr PosTest {..} =
let tRoot = root <//> _relDir
file' = tRoot <//> _file
expected' = tRoot <//> _expectedFile
in TestDescr
{ _testName = _name,
_testRoot = tRoot,
_testAssertion = Steps $ asmRunAssertion file' expected' return (const (return ()))
}
allTests :: TestTree
allTests =
testGroup
"JuvixAsm run positive tests"
(map (mkTest . testDescr) tests)
tests :: [PosTest]
tests =
[ PosTest
"Arithmetic opcodes"
$(mkRelDir ".")
$(mkRelFile "test001.jva")
$(mkRelFile "out/test001.out"),
PosTest
"Direct call"
$(mkRelDir ".")
$(mkRelFile "test002.jva")
$(mkRelFile "out/test002.out"),
PosTest
"Indirect call"
$(mkRelDir ".")
$(mkRelFile "test003.jva")
$(mkRelFile "out/test003.out"),
PosTest
"Tail calls"
$(mkRelDir ".")
$(mkRelFile "test004.jva")
$(mkRelFile "out/test004.out"),
PosTest
"Tracing IO"
$(mkRelDir ".")
$(mkRelFile "test005.jva")
$(mkRelFile "out/test005.out"),
PosTest
"IO builtins"
$(mkRelDir ".")
$(mkRelFile "test006.jva")
$(mkRelFile "out/test006.out"),
PosTest
"Higher-order functions"
$(mkRelDir ".")
$(mkRelFile "test007.jva")
$(mkRelFile "out/test007.out"),
PosTest
"Branch"
$(mkRelDir ".")
$(mkRelFile "test008.jva")
$(mkRelFile "out/test008.out"),
PosTest
"Case"
$(mkRelDir ".")
$(mkRelFile "test009.jva")
$(mkRelFile "out/test009.out"),
PosTest
"Recursion"
$(mkRelDir ".")
$(mkRelFile "test010.jva")
$(mkRelFile "out/test010.out"),
PosTest
"Tail recursion"
$(mkRelDir ".")
$(mkRelFile "test011.jva")
$(mkRelFile "out/test011.out"),
PosTest
"Temporary stack"
$(mkRelDir ".")
$(mkRelFile "test012.jva")
$(mkRelFile "out/test012.out"),
PosTest
"Fibonacci numbers in linear time"
$(mkRelDir ".")
$(mkRelFile "test013.jva")
$(mkRelFile "out/test013.out"),
PosTest
"Trees"
$(mkRelDir ".")
$(mkRelFile "test014.jva")
$(mkRelFile "out/test014.out"),
PosTest
"Functions returning functions"
$(mkRelDir ".")
$(mkRelFile "test015.jva")
$(mkRelFile "out/test015.out"),
PosTest
"Arithmetic"
$(mkRelDir ".")
$(mkRelFile "test016.jva")
$(mkRelFile "out/test016.out"),
PosTest
"Closures as arguments"
$(mkRelDir ".")
$(mkRelFile "test017.jva")
$(mkRelFile "out/test017.out"),
PosTest
"Closure extension"
$(mkRelDir ".")
$(mkRelFile "test018.jva")
$(mkRelFile "out/test018.out"),
PosTest
"Recursion through higher-order functions"
$(mkRelDir ".")
$(mkRelFile "test019.jva")
$(mkRelFile "out/test019.out"),
PosTest
"Tail recursion through higher-order functions"
$(mkRelDir ".")
$(mkRelFile "test020.jva")
$(mkRelFile "out/test020.out"),
PosTest
"Higher-order functions and recursion"
$(mkRelDir ".")
$(mkRelFile "test021.jva")
$(mkRelFile "out/test021.out"),
PosTest
"Self-application"
$(mkRelDir ".")
$(mkRelFile "test022.jva")
$(mkRelFile "out/test022.out"),
PosTest
"McCarthy's 91 function"
$(mkRelDir ".")
$(mkRelFile "test023.jva")
$(mkRelFile "out/test023.out"),
PosTest
"Higher-order recursive functions"
$(mkRelDir ".")
$(mkRelFile "test024.jva")
$(mkRelFile "out/test024.out"),
PosTest
"Dynamic closure extension"
$(mkRelDir ".")
$(mkRelFile "test025.jva")
$(mkRelFile "out/test025.out"),
PosTest
"Currying & uncurrying"
$(mkRelDir ".")
$(mkRelFile "test026.jva")
$(mkRelFile "out/test026.out"),
PosTest
"Fast exponentiation"
$(mkRelDir ".")
$(mkRelFile "test027.jva")
$(mkRelFile "out/test027.out"),
PosTest
"Lists"
$(mkRelDir ".")
$(mkRelFile "test028.jva")
$(mkRelFile "out/test028.out"),
PosTest
"Structural equality"
$(mkRelDir ".")
$(mkRelFile "test029.jva")
$(mkRelFile "out/test029.out"),
PosTest
"Mutual recursion"
$(mkRelDir ".")
$(mkRelFile "test030.jva")
$(mkRelFile "out/test030.out"),
PosTest
"Temporary stack with branching"
$(mkRelDir ".")
$(mkRelFile "test031.jva")
$(mkRelFile "out/test031.out"),
PosTest
"Church numerals"
$(mkRelDir ".")
$(mkRelFile "test032.jva")
$(mkRelFile "out/test032.out"),
PosTest
"Ackermann function"
$(mkRelDir ".")
$(mkRelFile "test033.jva")
$(mkRelFile "out/test033.out"),
PosTest
"Higher-order function composition"
$(mkRelDir ".")
$(mkRelFile "test034.jva")
$(mkRelFile "out/test034.out"),
PosTest
"Nested lists"
$(mkRelDir ".")
$(mkRelFile "test035.jva")
$(mkRelFile "out/test035.out"),
PosTest
"Streams without memoization"
$(mkRelDir ".")
$(mkRelFile "test036.jva")
$(mkRelFile "out/test036.out"),
PosTest
"String instructions"
$(mkRelDir ".")
$(mkRelFile "test037.jva")
$(mkRelFile "out/test037.out")
]
| |
468e3dfdf5e4e3032fea356ba1918394a4ffea73dc54ab90e0decb69ff98af7f | parapluu/Concuerror | autocomplete_common.erl | -module(autocomplete_common).
-export([test/2, test/3]).
-include_lib("stdlib/include/assert.hrl").
test(Command, Data) ->
test(Command, Data, []).
test(Command, Data, Options) ->
try
main(Command, Data, Options)
catch
C:R ->
io:format(standard_error, "Class: ~p~nReason: ~p~n", [C, R]),
halt(1)
end.
main(Command, DataRaw, Options) ->
AutoOutput = os:cmd(Command),
AutoTokens = string:tokens(AutoOutput, " \n"),
Auto = usort(AutoTokens, Options),
Data = usort(DataRaw, []),
io:format(standard_error, "Auto -- Data : ~p~n", [Auto -- Data]),
io:format(standard_error, "Data -- Auto : ~p~n", [Data -- Auto]),
?assertEqual(Data, Auto).
usort(List, Options) ->
USort = lists:usort(List),
Sort = lists:usort(List),
?assertEqual(USort, Sort),
case lists:member(no_sort_check, Options) of
true -> ok;
false -> ?assertEqual(USort, List)
end,
USort.
| null | https://raw.githubusercontent.com/parapluu/Concuerror/152a5ccee0b6e97d8c3329c2167166435329d261/tests-real/suites/options/autocomplete/autocomplete_common.erl | erlang | -module(autocomplete_common).
-export([test/2, test/3]).
-include_lib("stdlib/include/assert.hrl").
test(Command, Data) ->
test(Command, Data, []).
test(Command, Data, Options) ->
try
main(Command, Data, Options)
catch
C:R ->
io:format(standard_error, "Class: ~p~nReason: ~p~n", [C, R]),
halt(1)
end.
main(Command, DataRaw, Options) ->
AutoOutput = os:cmd(Command),
AutoTokens = string:tokens(AutoOutput, " \n"),
Auto = usort(AutoTokens, Options),
Data = usort(DataRaw, []),
io:format(standard_error, "Auto -- Data : ~p~n", [Auto -- Data]),
io:format(standard_error, "Data -- Auto : ~p~n", [Data -- Auto]),
?assertEqual(Data, Auto).
usort(List, Options) ->
USort = lists:usort(List),
Sort = lists:usort(List),
?assertEqual(USort, Sort),
case lists:member(no_sort_check, Options) of
true -> ok;
false -> ?assertEqual(USort, List)
end,
USort.
| |
2eaba0956ecd8b85f6b68a8fa57fbddca36dfa6800a6bb009d7ea658168454cc | teamwalnut/graphql-ppx | result_structure.ml | type exhaustive_flag = Exhaustive | Nonexhaustive
type loc = Source_pos.ast_location
type name = string Source_pos.spanning
type field_result =
| Fr_named_field of {
name : string;
loc_key : loc;
loc : loc;
type_ : t;
arguments : Graphql_ast.arguments;
}
| Fr_fragment_spread of {
key : string;
loc : loc;
name : string;
type_name : string option;
arguments : string list;
}
and t =
| Res_nullable of { loc : loc; inner : t }
| Res_array of { loc : loc; inner : t }
| Res_id of { loc : loc }
| Res_string of { loc : loc }
| Res_int of { loc : loc }
| Res_float of { loc : loc }
| Res_boolean of { loc : loc }
| Res_raw_scalar of { loc : loc }
| Res_poly_enum of {
loc : loc;
enum_meta : Schema.enum_meta;
omit_future_value : bool;
}
| Res_custom_decoder of { loc : loc; ident : string; inner : t }
| Res_record of {
loc : loc;
name : string;
fields : field_result list;
type_name : string option;
interface_fragments : (string * (string * t) list) option;
}
| Res_object of {
loc : loc;
name : string;
fields : field_result list;
type_name : string option;
interface_fragments : (string * (string * t) list) option;
}
| Res_poly_variant_selection_set of {
loc : loc;
name : string;
fragments : (name * t) list;
}
| Res_poly_variant_union of {
loc : loc;
name : string;
fragments : (name * t) list;
exhaustive : exhaustive_flag;
omit_future_value : bool;
}
| Res_poly_variant_interface of {
loc : loc;
name : string;
fragments : (string * t) list;
}
| Res_solo_fragment_spread of {
loc : loc;
name : string;
type_name : string;
arguments : string list;
}
| Res_error of { loc : loc; message : string }
type definition =
| Def_fragment of {
name : string;
variable_definitions :
Graphql_ast.variable_definitions Source_pos.spanning option;
has_error : bool;
fragment : Graphql_ast.fragment Source_pos.spanning;
type_name : string option;
inner : t;
}
| Def_operation of {
variable_definitions :
Graphql_ast.variable_definitions Source_pos.spanning option;
has_error : bool;
operation : Graphql_ast.operation Source_pos.spanning;
inner : t;
}
let res_loc = function
| Res_nullable { loc }
| Res_array { loc }
| Res_id { loc }
| Res_string { loc }
| Res_int { loc }
| Res_float { loc }
| Res_boolean { loc }
| Res_raw_scalar { loc }
| Res_poly_enum { loc }
| Res_custom_decoder { loc }
| Res_record { loc }
| Res_object { loc }
| Res_poly_variant_selection_set { loc }
| Res_poly_variant_union { loc }
| Res_poly_variant_interface { loc }
| Res_solo_fragment_spread { loc }
| Res_error { loc } ->
loc
let can_be_absent_as_field = function Res_nullable _ -> true | _ -> false
| null | https://raw.githubusercontent.com/teamwalnut/graphql-ppx/8276452ebe8d89a748b6b267afc94161650ab620/src/graphql_compiler/result_structure.ml | ocaml | type exhaustive_flag = Exhaustive | Nonexhaustive
type loc = Source_pos.ast_location
type name = string Source_pos.spanning
type field_result =
| Fr_named_field of {
name : string;
loc_key : loc;
loc : loc;
type_ : t;
arguments : Graphql_ast.arguments;
}
| Fr_fragment_spread of {
key : string;
loc : loc;
name : string;
type_name : string option;
arguments : string list;
}
and t =
| Res_nullable of { loc : loc; inner : t }
| Res_array of { loc : loc; inner : t }
| Res_id of { loc : loc }
| Res_string of { loc : loc }
| Res_int of { loc : loc }
| Res_float of { loc : loc }
| Res_boolean of { loc : loc }
| Res_raw_scalar of { loc : loc }
| Res_poly_enum of {
loc : loc;
enum_meta : Schema.enum_meta;
omit_future_value : bool;
}
| Res_custom_decoder of { loc : loc; ident : string; inner : t }
| Res_record of {
loc : loc;
name : string;
fields : field_result list;
type_name : string option;
interface_fragments : (string * (string * t) list) option;
}
| Res_object of {
loc : loc;
name : string;
fields : field_result list;
type_name : string option;
interface_fragments : (string * (string * t) list) option;
}
| Res_poly_variant_selection_set of {
loc : loc;
name : string;
fragments : (name * t) list;
}
| Res_poly_variant_union of {
loc : loc;
name : string;
fragments : (name * t) list;
exhaustive : exhaustive_flag;
omit_future_value : bool;
}
| Res_poly_variant_interface of {
loc : loc;
name : string;
fragments : (string * t) list;
}
| Res_solo_fragment_spread of {
loc : loc;
name : string;
type_name : string;
arguments : string list;
}
| Res_error of { loc : loc; message : string }
type definition =
| Def_fragment of {
name : string;
variable_definitions :
Graphql_ast.variable_definitions Source_pos.spanning option;
has_error : bool;
fragment : Graphql_ast.fragment Source_pos.spanning;
type_name : string option;
inner : t;
}
| Def_operation of {
variable_definitions :
Graphql_ast.variable_definitions Source_pos.spanning option;
has_error : bool;
operation : Graphql_ast.operation Source_pos.spanning;
inner : t;
}
let res_loc = function
| Res_nullable { loc }
| Res_array { loc }
| Res_id { loc }
| Res_string { loc }
| Res_int { loc }
| Res_float { loc }
| Res_boolean { loc }
| Res_raw_scalar { loc }
| Res_poly_enum { loc }
| Res_custom_decoder { loc }
| Res_record { loc }
| Res_object { loc }
| Res_poly_variant_selection_set { loc }
| Res_poly_variant_union { loc }
| Res_poly_variant_interface { loc }
| Res_solo_fragment_spread { loc }
| Res_error { loc } ->
loc
let can_be_absent_as_field = function Res_nullable _ -> true | _ -> false
| |
d265a59410ad2a44f369c5625b4ae49fef9cb6c1f221ddd6ebb246194d5372a2 | jtdaugherty/tracy | Box.hs | module Tracy.Objects.Box
( box
)
where
import Linear
import Tracy.Types
import Tracy.BoundingBox
import Tracy.Util
box :: Material -> Object
box m =
let bbox = boundingBox (V3 (-0.5) (-0.5) (-0.5)) (V3 0.5 0.5 0.5)
in Object { _objectMaterial = m
, _hit = hitBox bbox m
, _shadow_hit = shadowHitBox bbox
, _bounding_box = Just bbox
, _areaLightImpl = Nothing
}
shadowHitBox :: BBox -> Ray -> Maybe Double
shadowHitBox bb r =
case boundingBoxHit bb r of
Nothing -> Nothing
Just (_, _, t, _) -> Just t
hitBox :: BBox -> Material -> Ray -> Maybe (Shade, Double)
hitBox bbox m r =
case boundingBoxHit bbox r of
Nothing -> Nothing
Just (_, faceNorm, t, localHP) ->
let sh = defaultShade { _localHitPoint = localHP
, _material = m
, _normal = faceNorm
}
in Just (sh, t)
| null | https://raw.githubusercontent.com/jtdaugherty/tracy/ad36ea16a3b9cda5071ca72374d6e1c1b415d520/src/Tracy/Objects/Box.hs | haskell | module Tracy.Objects.Box
( box
)
where
import Linear
import Tracy.Types
import Tracy.BoundingBox
import Tracy.Util
box :: Material -> Object
box m =
let bbox = boundingBox (V3 (-0.5) (-0.5) (-0.5)) (V3 0.5 0.5 0.5)
in Object { _objectMaterial = m
, _hit = hitBox bbox m
, _shadow_hit = shadowHitBox bbox
, _bounding_box = Just bbox
, _areaLightImpl = Nothing
}
shadowHitBox :: BBox -> Ray -> Maybe Double
shadowHitBox bb r =
case boundingBoxHit bb r of
Nothing -> Nothing
Just (_, _, t, _) -> Just t
hitBox :: BBox -> Material -> Ray -> Maybe (Shade, Double)
hitBox bbox m r =
case boundingBoxHit bbox r of
Nothing -> Nothing
Just (_, faceNorm, t, localHP) ->
let sh = defaultShade { _localHitPoint = localHP
, _material = m
, _normal = faceNorm
}
in Just (sh, t)
| |
b1601d2b158456b9280b48e964a40a620ae50d5d11dba52b4f52c81a545d31a8 | eamsden/Animas | AFRPTestsDer.hs | $ I d : AFRPTestsDer.hs , v 1.2 2003/11/10 21:28:58 antony Exp $
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A F R P *
* *
* Module : AFRPTestsDer *
* Purpose : Test cases for derivative *
* Authors : and *
* *
* Copyright ( c ) Yale University , 2003 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
******************************************************************************
* A F R P *
* *
* Module: AFRPTestsDer *
* Purpose: Test cases for derivative *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module AFRPTestsDer (der_tr, der_trs) where
import FRP.Yampa
import AFRPTestsCommon
------------------------------------------------------------------------------
-- Test cases for derivative
------------------------------------------------------------------------------
der_step = 0.001
der_N = 1000
der_t0 :: [Double]
First value is always 0
embed derivative
(deltaEncode der_step
[sin(2 * pi * t) | t <- [0.0, der_step ..]])
-- For stepsize 0.1
der_t0r : : [ Double ]
der_t0r =
[ 0.0000 , 5.8779 , 3.6327 , 0.0000 , -3.6327 ,
-5.8779 , -5.8779 , -3.6327 , 0.0000 , 3.6327 ,
5.8779 , 5.8779 , 3.6327 , 0.0000 , -3.6327 ,
-5.8779 , -5.8779 , -3.6327 , 0.0000 , 3.6327 ]
-- For stepsize 0.1
der_t0r :: [Double]
der_t0r =
[ 0.0000, 5.8779, 3.6327, 0.0000, -3.6327,
-5.8779, -5.8779, -3.6327, 0.0000, 3.6327,
5.8779, 5.8779, 3.6327, 0.0000, -3.6327,
-5.8779, -5.8779, -3.6327, 0.0000, 3.6327]
-}
der_t0r :: [Double]
der_t0r = take der_N $
[2 * pi * cos (2 * pi * t) | t <- [0.0, der_step ..]]
-- We're happy if we are in the right ball park.
der_t0_max_diff = (maximum (zipWith (\x y -> abs (x - y))
(tail der_t0)
(tail der_t0r)))
der_trs =
[ der_t0_max_diff < 0.05
]
der_tr = and der_trs
| null | https://raw.githubusercontent.com/eamsden/Animas/2404d1de20982a337109fc6032cb77b022514f9d/tests/AFRPTestsDer.hs | haskell | ----------------------------------------------------------------------------
Test cases for derivative
----------------------------------------------------------------------------
For stepsize 0.1
For stepsize 0.1
We're happy if we are in the right ball park. | $ I d : AFRPTestsDer.hs , v 1.2 2003/11/10 21:28:58 antony Exp $
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A F R P *
* *
* Module : AFRPTestsDer *
* Purpose : Test cases for derivative *
* Authors : and *
* *
* Copyright ( c ) Yale University , 2003 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
******************************************************************************
* A F R P *
* *
* Module: AFRPTestsDer *
* Purpose: Test cases for derivative *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module AFRPTestsDer (der_tr, der_trs) where
import FRP.Yampa
import AFRPTestsCommon
der_step = 0.001
der_N = 1000
der_t0 :: [Double]
First value is always 0
embed derivative
(deltaEncode der_step
[sin(2 * pi * t) | t <- [0.0, der_step ..]])
der_t0r : : [ Double ]
der_t0r =
[ 0.0000 , 5.8779 , 3.6327 , 0.0000 , -3.6327 ,
-5.8779 , -5.8779 , -3.6327 , 0.0000 , 3.6327 ,
5.8779 , 5.8779 , 3.6327 , 0.0000 , -3.6327 ,
-5.8779 , -5.8779 , -3.6327 , 0.0000 , 3.6327 ]
der_t0r :: [Double]
der_t0r =
[ 0.0000, 5.8779, 3.6327, 0.0000, -3.6327,
-5.8779, -5.8779, -3.6327, 0.0000, 3.6327,
5.8779, 5.8779, 3.6327, 0.0000, -3.6327,
-5.8779, -5.8779, -3.6327, 0.0000, 3.6327]
-}
der_t0r :: [Double]
der_t0r = take der_N $
[2 * pi * cos (2 * pi * t) | t <- [0.0, der_step ..]]
der_t0_max_diff = (maximum (zipWith (\x y -> abs (x - y))
(tail der_t0)
(tail der_t0r)))
der_trs =
[ der_t0_max_diff < 0.05
]
der_tr = and der_trs
|
5086921f75c9341013a9bcda835037c2eeb451acee1086220e7c7382bdfcf955 | skanev/playground | 20.scm | SICP exercise 2.20
;
The procedures + , * and list take arbitrary number of arguments . One way to
; define such procedures is to use define with dotted-tail notation. In a
; procedure definition, a parameter list that has a dot before the last
; parameter name indicates that, when a procedure is called, the initial
; parameters (if any) will have as values the initial arguments, as usual, but
; the final parameter's value will be a list of any remaining arguments. For
; instance, given the definition
;
; (define (f x y . z) <body>)
;
the procedure f can be called with two or more arguments . If we evaluate
;
; (f 1 2 3 4 5 6)
;
then in the body of f , x will be 1 , y will be 2 , and z will be the list ( 3 4 5 6 ) .
; Given the definition
;
; (define (g . w) <body>)
;
the procedure g can be called with zero or more arguments . If we evaluate
;
; (g 1 2 3 4 5 6)
;
then in the body of g , w will be the list ( 1 2 3 4 5 6 ) .
;
Use this notation to write a procedure same - parity that takes one or more
; integers and returns a list of all the arguments that have the same even-odd
parity as the first argument . For example :
;
( same - parity 1 2 3 4 5 6 7 )
; (1 3 5 7)
;
( same - parity 2 3 4 5 6 7 )
( 2 4 6 )
(define (same-parity number . numbers)
(define (same-parity? n)
(= (remainder number 2) (remainder n 2)))
(define (filter-list numbers)
(cond ((null? numbers) (list))
((same-parity? (car numbers)) (cons (car numbers)
(filter-list (cdr numbers))))
(else (filter-list (cdr numbers)))))
(cons number (filter-list numbers)))
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/02/20.scm | scheme |
define such procedures is to use define with dotted-tail notation. In a
procedure definition, a parameter list that has a dot before the last
parameter name indicates that, when a procedure is called, the initial
parameters (if any) will have as values the initial arguments, as usual, but
the final parameter's value will be a list of any remaining arguments. For
instance, given the definition
(define (f x y . z) <body>)
(f 1 2 3 4 5 6)
Given the definition
(define (g . w) <body>)
(g 1 2 3 4 5 6)
integers and returns a list of all the arguments that have the same even-odd
(1 3 5 7)
| SICP exercise 2.20
The procedures + , * and list take arbitrary number of arguments . One way to
the procedure f can be called with two or more arguments . If we evaluate
then in the body of f , x will be 1 , y will be 2 , and z will be the list ( 3 4 5 6 ) .
the procedure g can be called with zero or more arguments . If we evaluate
then in the body of g , w will be the list ( 1 2 3 4 5 6 ) .
Use this notation to write a procedure same - parity that takes one or more
parity as the first argument . For example :
( same - parity 1 2 3 4 5 6 7 )
( same - parity 2 3 4 5 6 7 )
( 2 4 6 )
(define (same-parity number . numbers)
(define (same-parity? n)
(= (remainder number 2) (remainder n 2)))
(define (filter-list numbers)
(cond ((null? numbers) (list))
((same-parity? (car numbers)) (cons (car numbers)
(filter-list (cdr numbers))))
(else (filter-list (cdr numbers)))))
(cons number (filter-list numbers)))
|
277e0d775f0b70923af441b84572dd9b3cb4a25707709592c062867ff8517beb | RolfRolles/PandemicML | IRRandomizedEvaluator.ml | open DataStructures
(* Evaluator functionality. This function is factored out so it can be used
elsewhere. *)
let write_mem memctx a32 v32 s =
let n = (IRUtil.bits s)/8 in
let rec aux v32 i =
if i = n
then ()
else
let addr = Int32.add a32 (Int32.of_int i) in
Hashtbl.replace memctx addr (Int32.logand v32 0xffl);
aux (Int32.shift_right_logical v32 8) (i+1)
in aux v32 0
(* Given:
ir: a list of IR statements. Not in SSA form, and no jumps.
regctx: (IR.var,IR.expr) Hashtbl.t, the register context
memctx: (int32,int32) Hashtbl.t, the memory context
Evaluate the statements in the given contexts, computing information about
the memory behavior of the sequence, as described below:
Returns a pair (and modifies regctx/memctx):
reads: (int32 address,int32 value, int32 size) list
writes: (int32 address,int32 value, int32 size) list
*)
let concrete_evaluate_jumpless_nonssa ir regctx memctx =
let open IR in let open IRUtil in
All this shit is duplicated from IRLocalOpt
let bool2e = function | true -> Const(0x1L,TypeReg_1) | false -> Const(0x0L,TypeReg_1) in
let sign_extend_byte = function
| x when x <= 0x7FL -> x
| x -> Int64.logor x 0xFFFFFFFFFFFFFF00L
in
let sign_extend_word = function
| x when x <= 0x7FFFL -> x
| x -> Int64.logor x 0xFFFFFFFFFFFF0000L
in
let sign_extend_dword = function
| x when x <= 0x7FFFFFFFL -> x
| x -> Int64.logor x 0xFFFFFFFF00000000L
in
let sign_extend c = function
| TypeReg_1 -> invalid_arg "sign_extend: TypeReg_1"
| TypeReg_8 -> sign_extend_byte c
| TypeReg_16 -> sign_extend_word c
| TypeReg_32 -> sign_extend_dword c
| TypeReg_64 -> c
in
let ucomp clhs crhs c =
let c = match c with
| ULT -> ( < )
| ULE -> ( <= )
| _ -> invalid_arg "ucomp"
in
let b =
if clhs >= 0L && crhs >= 0L then c clhs crhs else
if clhs < 0L && crhs >= 0L then true else
if crhs < 0L && clhs >= 0L then false else
not (c clhs crhs)
in Const(i64_of_bool b,TypeReg_1)
in
let scomp clhs crhs c s =
let c = match c with
| SLT -> ( < )
| SLE -> ( <= )
| _ -> invalid_arg "scomp"
in
let b = match s with
| TypeReg_1 -> invalid_arg "scomp: Could probably handle this, but will it ever happen?"
| TypeReg_8 -> c (sign_extend_byte clhs) (sign_extend_byte crhs)
| TypeReg_16 -> c (sign_extend_word clhs) (sign_extend_word crhs)
| TypeReg_32 -> c (sign_extend_dword clhs) (sign_extend_dword crhs)
| TypeReg_64 -> c clhs crhs
in Const(i64_of_bool b,TypeReg_1)
in
let do_signed_cast c1 s1 s =
let c =
if (Int64.logand (mk_sign_const_i64 s1) c1) = 0L
then c1
else Int64.logor c1 (Int64.logxor (mk_max_const_i64 s) (mk_max_const_i64 s1))
in Const(c,s)
in
let do_high_cast c1 s1 s =
let s1 = IRUtil.bits s1 in
let s' = IRUtil.bits s in
if s'> s1 then failwith "do_high_cast: typechecking prevents this";
Const(Int64.shift_right_logical c1 (s1-s'),s) (* ' *)
in
(* End duplication *)
let cc = function | Const(c,s) -> c | _ -> failwith "concrete_evaluate_jumpless_nonssa::cc: typechecking prevents this" in
let cs = function | Const(c,s) -> s | _ -> failwith "concrete_evaluate_jumpless_nonssa::cs: typechecking prevents this" in
let reads = ref [] in
let uninit_reads = ref [] in
let uninit = Hashtbl.create 50 in
let writes = ref [] in
let read_mem a32 s =
let n = (IRUtil.bits s)/8 in
let rec aux c uni i =
if i < n
then
let addr = Int32.add a32 (Int32.of_int i) in
let byte = ht_find_opt memctx addr in
let byte,uninit =
match byte with
| Some(b) -> (b,match ht_find_opt uninit addr with | Some(_) -> true | None -> false)
| None ->
let new_byte = Int32.succ (Random.int32 0xFFl) in
Hashtbl.replace memctx addr new_byte;
Hashtbl.replace uninit addr new_byte;
(new_byte,true)
in
let c = Int32.logor (Int32.shift_left byte (i*8)) c in
aux c (uninit or uni) (i+1)
else (c,uni)
in
let c,uni = aux 0l false 0 in
(if uni
then uninit_reads := (a32,c,s)::(!uninit_reads));
c
in
let rec expr = function
| Var(v) -> Hashtbl.find regctx v
| Const(_,_) as c -> c
| Binop(l,Add,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.add (cc l) (cc r)),(cs l))
| Binop(l,Mul,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.mul (cc l) (cc r)),(cs l))
| Binop(l,And,r) -> let l,r = expr l,expr r in Const(Int64.logand (cc l) (cc r),(cs l))
| Binop(l,Xor,r) -> let l,r = expr l,expr r in Const(Int64.logxor (cc l) (cc r),(cs l))
| Binop(l, Or,r) -> let l,r = expr l,expr r in Const(Int64.logor (cc l) (cc r),(cs l))
| Binop(l,Sub,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.sub (cc l) (cc r)),(cs l))
| Binop(l,UDiv,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: UDIV unimplemented"
| Binop(l,SDiv,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: SDIV unimplemented"
| Binop(l,UMod,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: UMOD unimplemented"
| Binop(l,SMod,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: SMOD unimplemented"
| Binop(l,Shl,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.shift_left (cc l) (Int64.to_int (cc r))),(cs l))
| Binop(l,Shr,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.shift_right_logical (cc l) (Int64.to_int (cc r))),(cs l))
| Binop(l,Sar,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.shift_right (sign_extend (cc l) (cs l)) (Int64.to_int (cc r))),(cs l))
| Binop(l, EQ,r) -> let l,r = expr l,expr r in bool2e ((cc l) = (cc r))
| Binop(l, NE,r) -> let l,r = expr l,expr r in bool2e ((cc l) <> (cc r))
| Binop(l,ULT,r) -> let l,r = expr l,expr r in ucomp (cc l) (cc r) (ULT)
| Binop(l,ULE,r) -> let l,r = expr l,expr r in ucomp (cc l) (cc r) (ULE)
| Binop(l,SLT,r) -> let l,r = expr l,expr r in scomp (cc l) (cc r) (SLT) (cs l)
| Binop(l,SLE,r) -> let l,r = expr l,expr r in scomp (cc l) (cc r) (SLE) (cs l)
| Unop(Not,e) -> let e = expr e in Const(truncate_to (cs e) (Int64.logxor (-1L) (cc e)),(cs e))
| Unop(Neg,e) -> let e = expr e in Const(truncate_to (cs e) (Int64.add 1L (Int64.logxor (-1L) (cc e))),(cs e))
| Cast(Low,s,e) -> let e = expr e in Const(truncate_to s (cc e),s)
| Cast(Unsigned,s,e) -> let e = expr e in Const((cc e),s)
| Cast(Signed,s,e) -> let e = expr e in do_signed_cast (cc e) (cs e) s
| Cast(High,s,e) -> let e = expr e in do_high_cast (cc e) (cs e) s
| Load(_,a,s) ->
let a = Int64.to_int32 (cc (expr a)) in
let v = read_mem a s in
reads := (a,v,s)::(!reads);
IRUtil.mk_fixed_const (Int64.of_int32 v) s
| Store(m,a,t,s) ->
let a,t = Int64.to_int32 (cc (expr a)), Int64.to_int32 (cc (expr t)) in
write_mem memctx a t s;
writes := (a,t,s)::(!writes);
m
| Let(_,_,_) -> failwith "concrete_evaluate_jumpless_nonssa::expr: LET unimplemented"
in
let stmt = function
| Assign(Mem(_,_,_),e) -> let _ = expr e in ()
| Assign(v,e) -> Hashtbl.replace regctx v (expr e)
| Label(_) -> ()
| Comment(_) -> ()
| Assert(_) -> ()
| Jmp(e) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: JMP encountered"
| CJmp(e,t,f) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: CJMP encountered"
| Halt(_) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: HALT never used"
| Special(_) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: SPECIAL never used"
in
List.iter stmt ir;
(!reads,!writes)
| null | https://raw.githubusercontent.com/RolfRolles/PandemicML/9c31ecaf9c782dbbeb6cf502bc2a6730316d681e/Projects/Nxcroticism/IRRandomizedEvaluator.ml | ocaml | Evaluator functionality. This function is factored out so it can be used
elsewhere.
Given:
ir: a list of IR statements. Not in SSA form, and no jumps.
regctx: (IR.var,IR.expr) Hashtbl.t, the register context
memctx: (int32,int32) Hashtbl.t, the memory context
Evaluate the statements in the given contexts, computing information about
the memory behavior of the sequence, as described below:
Returns a pair (and modifies regctx/memctx):
reads: (int32 address,int32 value, int32 size) list
writes: (int32 address,int32 value, int32 size) list
'
End duplication | open DataStructures
let write_mem memctx a32 v32 s =
let n = (IRUtil.bits s)/8 in
let rec aux v32 i =
if i = n
then ()
else
let addr = Int32.add a32 (Int32.of_int i) in
Hashtbl.replace memctx addr (Int32.logand v32 0xffl);
aux (Int32.shift_right_logical v32 8) (i+1)
in aux v32 0
let concrete_evaluate_jumpless_nonssa ir regctx memctx =
let open IR in let open IRUtil in
All this shit is duplicated from IRLocalOpt
let bool2e = function | true -> Const(0x1L,TypeReg_1) | false -> Const(0x0L,TypeReg_1) in
let sign_extend_byte = function
| x when x <= 0x7FL -> x
| x -> Int64.logor x 0xFFFFFFFFFFFFFF00L
in
let sign_extend_word = function
| x when x <= 0x7FFFL -> x
| x -> Int64.logor x 0xFFFFFFFFFFFF0000L
in
let sign_extend_dword = function
| x when x <= 0x7FFFFFFFL -> x
| x -> Int64.logor x 0xFFFFFFFF00000000L
in
let sign_extend c = function
| TypeReg_1 -> invalid_arg "sign_extend: TypeReg_1"
| TypeReg_8 -> sign_extend_byte c
| TypeReg_16 -> sign_extend_word c
| TypeReg_32 -> sign_extend_dword c
| TypeReg_64 -> c
in
let ucomp clhs crhs c =
let c = match c with
| ULT -> ( < )
| ULE -> ( <= )
| _ -> invalid_arg "ucomp"
in
let b =
if clhs >= 0L && crhs >= 0L then c clhs crhs else
if clhs < 0L && crhs >= 0L then true else
if crhs < 0L && clhs >= 0L then false else
not (c clhs crhs)
in Const(i64_of_bool b,TypeReg_1)
in
let scomp clhs crhs c s =
let c = match c with
| SLT -> ( < )
| SLE -> ( <= )
| _ -> invalid_arg "scomp"
in
let b = match s with
| TypeReg_1 -> invalid_arg "scomp: Could probably handle this, but will it ever happen?"
| TypeReg_8 -> c (sign_extend_byte clhs) (sign_extend_byte crhs)
| TypeReg_16 -> c (sign_extend_word clhs) (sign_extend_word crhs)
| TypeReg_32 -> c (sign_extend_dword clhs) (sign_extend_dword crhs)
| TypeReg_64 -> c clhs crhs
in Const(i64_of_bool b,TypeReg_1)
in
let do_signed_cast c1 s1 s =
let c =
if (Int64.logand (mk_sign_const_i64 s1) c1) = 0L
then c1
else Int64.logor c1 (Int64.logxor (mk_max_const_i64 s) (mk_max_const_i64 s1))
in Const(c,s)
in
let do_high_cast c1 s1 s =
let s1 = IRUtil.bits s1 in
let s' = IRUtil.bits s in
if s'> s1 then failwith "do_high_cast: typechecking prevents this";
in
let cc = function | Const(c,s) -> c | _ -> failwith "concrete_evaluate_jumpless_nonssa::cc: typechecking prevents this" in
let cs = function | Const(c,s) -> s | _ -> failwith "concrete_evaluate_jumpless_nonssa::cs: typechecking prevents this" in
let reads = ref [] in
let uninit_reads = ref [] in
let uninit = Hashtbl.create 50 in
let writes = ref [] in
let read_mem a32 s =
let n = (IRUtil.bits s)/8 in
let rec aux c uni i =
if i < n
then
let addr = Int32.add a32 (Int32.of_int i) in
let byte = ht_find_opt memctx addr in
let byte,uninit =
match byte with
| Some(b) -> (b,match ht_find_opt uninit addr with | Some(_) -> true | None -> false)
| None ->
let new_byte = Int32.succ (Random.int32 0xFFl) in
Hashtbl.replace memctx addr new_byte;
Hashtbl.replace uninit addr new_byte;
(new_byte,true)
in
let c = Int32.logor (Int32.shift_left byte (i*8)) c in
aux c (uninit or uni) (i+1)
else (c,uni)
in
let c,uni = aux 0l false 0 in
(if uni
then uninit_reads := (a32,c,s)::(!uninit_reads));
c
in
let rec expr = function
| Var(v) -> Hashtbl.find regctx v
| Const(_,_) as c -> c
| Binop(l,Add,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.add (cc l) (cc r)),(cs l))
| Binop(l,Mul,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.mul (cc l) (cc r)),(cs l))
| Binop(l,And,r) -> let l,r = expr l,expr r in Const(Int64.logand (cc l) (cc r),(cs l))
| Binop(l,Xor,r) -> let l,r = expr l,expr r in Const(Int64.logxor (cc l) (cc r),(cs l))
| Binop(l, Or,r) -> let l,r = expr l,expr r in Const(Int64.logor (cc l) (cc r),(cs l))
| Binop(l,Sub,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.sub (cc l) (cc r)),(cs l))
| Binop(l,UDiv,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: UDIV unimplemented"
| Binop(l,SDiv,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: SDIV unimplemented"
| Binop(l,UMod,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: UMOD unimplemented"
| Binop(l,SMod,r) -> failwith "concrete_evaluate_jumpless_nonssa::expr: SMOD unimplemented"
| Binop(l,Shl,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.shift_left (cc l) (Int64.to_int (cc r))),(cs l))
| Binop(l,Shr,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.shift_right_logical (cc l) (Int64.to_int (cc r))),(cs l))
| Binop(l,Sar,r) -> let l,r = expr l,expr r in Const(truncate_to (cs l) (Int64.shift_right (sign_extend (cc l) (cs l)) (Int64.to_int (cc r))),(cs l))
| Binop(l, EQ,r) -> let l,r = expr l,expr r in bool2e ((cc l) = (cc r))
| Binop(l, NE,r) -> let l,r = expr l,expr r in bool2e ((cc l) <> (cc r))
| Binop(l,ULT,r) -> let l,r = expr l,expr r in ucomp (cc l) (cc r) (ULT)
| Binop(l,ULE,r) -> let l,r = expr l,expr r in ucomp (cc l) (cc r) (ULE)
| Binop(l,SLT,r) -> let l,r = expr l,expr r in scomp (cc l) (cc r) (SLT) (cs l)
| Binop(l,SLE,r) -> let l,r = expr l,expr r in scomp (cc l) (cc r) (SLE) (cs l)
| Unop(Not,e) -> let e = expr e in Const(truncate_to (cs e) (Int64.logxor (-1L) (cc e)),(cs e))
| Unop(Neg,e) -> let e = expr e in Const(truncate_to (cs e) (Int64.add 1L (Int64.logxor (-1L) (cc e))),(cs e))
| Cast(Low,s,e) -> let e = expr e in Const(truncate_to s (cc e),s)
| Cast(Unsigned,s,e) -> let e = expr e in Const((cc e),s)
| Cast(Signed,s,e) -> let e = expr e in do_signed_cast (cc e) (cs e) s
| Cast(High,s,e) -> let e = expr e in do_high_cast (cc e) (cs e) s
| Load(_,a,s) ->
let a = Int64.to_int32 (cc (expr a)) in
let v = read_mem a s in
reads := (a,v,s)::(!reads);
IRUtil.mk_fixed_const (Int64.of_int32 v) s
| Store(m,a,t,s) ->
let a,t = Int64.to_int32 (cc (expr a)), Int64.to_int32 (cc (expr t)) in
write_mem memctx a t s;
writes := (a,t,s)::(!writes);
m
| Let(_,_,_) -> failwith "concrete_evaluate_jumpless_nonssa::expr: LET unimplemented"
in
let stmt = function
| Assign(Mem(_,_,_),e) -> let _ = expr e in ()
| Assign(v,e) -> Hashtbl.replace regctx v (expr e)
| Label(_) -> ()
| Comment(_) -> ()
| Assert(_) -> ()
| Jmp(e) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: JMP encountered"
| CJmp(e,t,f) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: CJMP encountered"
| Halt(_) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: HALT never used"
| Special(_) -> failwith "concrete_evaluate_jumpless_nonssa::stmt: SPECIAL never used"
in
List.iter stmt ir;
(!reads,!writes)
|
8d5a0d3e6cb2c50cd493ed2aef8a43517aaf864290fc30243da659c76e7ede5a | jeromesimeon/Galax | compile_context.ml | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : compile_context.ml , v 1.28 2007/05/16 15:32:09 mff Exp $
(* Module: Compile_context
Description:
This module contains context information used during algebraic
compilation.
*)
open Namespace_util
open Namespace_names
open Norm_context
open Typing_context
open Xquery_algebra_ast
open Xquery_common_ast
open Error
(***********************)
(* Compilation context *)
(***********************)
type ('a,'b) compile_context =
{ compiled_static_context : static_context;
compiled_functions : (('a,'b) aalgop_function_body) RQNameIntHashtbl.t;
compiled_variables : (crname) RQNameHashtbl.t;
mutable compiled_has_input : bool;
next_variable : Namespace_generate.name_gen ref}
(* Creates a new compilation context *)
let build_compile_context stat_ctxt =
let mod_ctxt = Norm_context.module_context_from_norm_context (Typing_context.norm_context_from_stat_context stat_ctxt) in
let ng =
Processing_context.get_name_generator
mod_ctxt Namespace_builtin.glx_prefix Namespace_builtin.glx_uri ""
in
{ compiled_static_context = stat_ctxt;
compiled_functions = RQNameIntHashtbl.create 167;
compiled_variables = RQNameHashtbl.create 167;
compiled_has_input = false;
next_variable = ng }
(* Default compilation context *)
let default_compile_context norm_ctxt =
let default_stat_ctxt = default_static_context norm_ctxt in
build_compile_context default_stat_ctxt
(* Replace the static context *)
let replace_static_context_in_compile_context stat_ctxt comp_ctxt =
{ compiled_static_context = stat_ctxt;
compiled_functions = comp_ctxt.compiled_functions;
compiled_variables = comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable }
(* Accesses parts of the static context from the compilation context *)
let static_context_from_compile_context c = c.compiled_static_context
(* Replace the namespace environment *)
let replace_namespace_env_in_compile_context nsenv comp_ctxt =
: This is gross ...
let stat_ctxt = static_context_from_compile_context comp_ctxt in
let norm_ctxt = norm_context_from_stat_context stat_ctxt in
let norm_ctxt' = replace_namespace_env_in_norm_context nsenv norm_ctxt in
let stat_ctxt' = replace_norm_context_in_static_context norm_ctxt' stat_ctxt in
replace_static_context_in_compile_context stat_ctxt' comp_ctxt
let norm_context_from_compile_context alg_ctxt =
norm_context_from_stat_context (static_context_from_compile_context alg_ctxt)
(***************************)
(* Treatement of functions *)
(***************************)
let add_function_to_compile_context comp_ctxt (cfname,arity) fb =
if (RQNameIntHashtbl.mem comp_ctxt.compiled_functions (cfname,arity))
then
raise (Query (Symbol_Already_Defined ("Function ",
(prefixed_string_of_rqname cfname))))
else
RQNameIntHashtbl.add comp_ctxt.compiled_functions (cfname,arity) fb
let get_function_from_compile_context msg comp_ctxt (cfname,arity) =
try
RQNameIntHashtbl.find (comp_ctxt.compiled_functions) (cfname,arity)
with
| Not_found ->
raise (Query (Undefined(msg^"Function "
^ (curly_uri_string_of_rqname cfname)
^ " with arity "
^ (string_of_int arity)
^ " not found in compile context.")))
let mem_function_from_compile_context comp_ctxt (cfname,arity) =
RQNameIntHashtbl.mem comp_ctxt.compiled_functions (cfname,arity)
let update_physical_plan_in_compile_context comp_ctxt (name,arity) body =
let func_defn = get_function_from_compile_context "update_physical_plan" comp_ctxt (name,arity) in
match !(func_defn.palgop_func_physical_plan) with
| None -> func_defn.palgop_func_physical_plan := Some body
| Some _ -> raise(Query(Code_Selection("Physical plan for "^(Namespace_names.prefixed_string_of_rqname name)^" already defined.")))
Built in helper functions
let builtin_fn_hash = RQNameIntHashtbl.create 167;;
let register_builtin (cfname,arity) =
RQNameIntHashtbl.add builtin_fn_hash (cfname,arity) ();;
let is_builtin (cfname,arity) =
RQNameIntHashtbl.mem builtin_fn_hash (cfname,arity)
(***************************)
(* Treatement of variables *)
(***************************)
let copy_compile_context comp_ctxt =
{ compiled_static_context = comp_ctxt.compiled_static_context;
compiled_functions = RQNameIntHashtbl.copy comp_ctxt.compiled_functions;
compiled_variables = RQNameHashtbl.copy comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable}
let has_input_set comp_ctxt =
comp_ctxt.compiled_has_input
let get_new_name comp_ctxt orig_name =
Namespace_generate.generate_name_with_prefix !(comp_ctxt.next_variable) orig_name
let get_new_group_name comp_ctxt = get_new_name comp_ctxt "group_created"
let get_new_dot_name comp_ctxt = get_new_name comp_ctxt "dot_new"
let get_new_var_name comp_ctxt = get_new_name comp_ctxt "var_new"
let get_new_tuple_name comp_ctxt cvname =
let (_,_, orig_name) = cvname in
get_new_name comp_ctxt orig_name
let get_new_variable_name comp_ctxt cvname = get_new_name comp_ctxt cvname
(* Assumes we know it to be a tuple field name.. *)
let get_tuple_field_name comp_ctxt cvname =
try
RQNameHashtbl.find comp_ctxt.compiled_variables cvname
with Not_found ->
raise (Query (Compilation ("Looking for tuple field " ^
(Namespace_names.prefixed_string_of_rqname cvname)
^ " but has not been compiled")))
let add_variable_field_to_compile_context comp_ctxt cvname =
let comp_ctxt' = copy_compile_context comp_ctxt in
begin
let crname = get_new_tuple_name comp_ctxt' cvname in
RQNameHashtbl.replace comp_ctxt'.compiled_variables cvname crname;
comp_ctxt'.compiled_has_input <- true;
comp_ctxt'
end
let hide_variable_field_from_compile_context comp_ctxt cvname =
let comp_ctxt' = copy_compile_context comp_ctxt in
begin
RQNameHashtbl.remove comp_ctxt'.compiled_variables cvname;
comp_ctxt'
end
let get_variable_field_from_compile_context comp_ctxt cvname =
if (RQNameHashtbl.mem comp_ctxt.compiled_variables cvname)
then
Some (RQNameHashtbl.find comp_ctxt.compiled_variables cvname)
else
None
let no_more_input comp_ctxt =
comp_ctxt.compiled_has_input <- false
(* External Build functions *)
let update_compile_context_from_module comp_ctxt m =
let fns = m.palgop_module_prolog.palgop_prolog_functions in
List.iter (fun fdecl ->
let (name, _, body, _) = fdecl.palgop_function_decl_desc in
add_function_to_compile_context comp_ctxt name body)
fns;
comp_ctxt
let copy_without_functions comp_ctxt =
{ compiled_static_context = comp_ctxt.compiled_static_context;
compiled_functions = RQNameIntHashtbl.create 167;
compiled_variables = RQNameHashtbl.copy comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable}
let map_function_bodies comp_ctxt fn =
let ht = RQNameIntHashtbl.create 167 in
RQNameIntHashtbl.iter (fn ht) comp_ctxt.compiled_functions;
{ compiled_static_context = comp_ctxt.compiled_static_context;
compiled_functions = ht;
compiled_variables = RQNameHashtbl.copy comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable}
If a binding from context_2 is in context_1 , 's binding is replaced
let update_compile_context context_1 context_2 =
let check_fn ht key binding =
if RQNameIntHashtbl.mem context_1.compiled_functions key
then RQNameIntHashtbl.add ht key (RQNameIntHashtbl.find context_1.compiled_functions key)
else RQNameIntHashtbl.add ht key binding
in
map_function_bodies context_2 check_fn
type logical_compile_context = (unit, Ast_path_struct.path_annotation) compile_context
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/compile/compile_context.ml | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Compile_context
Description:
This module contains context information used during algebraic
compilation.
*********************
Compilation context
*********************
Creates a new compilation context
Default compilation context
Replace the static context
Accesses parts of the static context from the compilation context
Replace the namespace environment
*************************
Treatement of functions
*************************
*************************
Treatement of variables
*************************
Assumes we know it to be a tuple field name..
External Build functions | Copyright 2001 - 2007 .
$ I d : compile_context.ml , v 1.28 2007/05/16 15:32:09 mff Exp $
open Namespace_util
open Namespace_names
open Norm_context
open Typing_context
open Xquery_algebra_ast
open Xquery_common_ast
open Error
type ('a,'b) compile_context =
{ compiled_static_context : static_context;
compiled_functions : (('a,'b) aalgop_function_body) RQNameIntHashtbl.t;
compiled_variables : (crname) RQNameHashtbl.t;
mutable compiled_has_input : bool;
next_variable : Namespace_generate.name_gen ref}
let build_compile_context stat_ctxt =
let mod_ctxt = Norm_context.module_context_from_norm_context (Typing_context.norm_context_from_stat_context stat_ctxt) in
let ng =
Processing_context.get_name_generator
mod_ctxt Namespace_builtin.glx_prefix Namespace_builtin.glx_uri ""
in
{ compiled_static_context = stat_ctxt;
compiled_functions = RQNameIntHashtbl.create 167;
compiled_variables = RQNameHashtbl.create 167;
compiled_has_input = false;
next_variable = ng }
let default_compile_context norm_ctxt =
let default_stat_ctxt = default_static_context norm_ctxt in
build_compile_context default_stat_ctxt
let replace_static_context_in_compile_context stat_ctxt comp_ctxt =
{ compiled_static_context = stat_ctxt;
compiled_functions = comp_ctxt.compiled_functions;
compiled_variables = comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable }
let static_context_from_compile_context c = c.compiled_static_context
let replace_namespace_env_in_compile_context nsenv comp_ctxt =
: This is gross ...
let stat_ctxt = static_context_from_compile_context comp_ctxt in
let norm_ctxt = norm_context_from_stat_context stat_ctxt in
let norm_ctxt' = replace_namespace_env_in_norm_context nsenv norm_ctxt in
let stat_ctxt' = replace_norm_context_in_static_context norm_ctxt' stat_ctxt in
replace_static_context_in_compile_context stat_ctxt' comp_ctxt
let norm_context_from_compile_context alg_ctxt =
norm_context_from_stat_context (static_context_from_compile_context alg_ctxt)
let add_function_to_compile_context comp_ctxt (cfname,arity) fb =
if (RQNameIntHashtbl.mem comp_ctxt.compiled_functions (cfname,arity))
then
raise (Query (Symbol_Already_Defined ("Function ",
(prefixed_string_of_rqname cfname))))
else
RQNameIntHashtbl.add comp_ctxt.compiled_functions (cfname,arity) fb
let get_function_from_compile_context msg comp_ctxt (cfname,arity) =
try
RQNameIntHashtbl.find (comp_ctxt.compiled_functions) (cfname,arity)
with
| Not_found ->
raise (Query (Undefined(msg^"Function "
^ (curly_uri_string_of_rqname cfname)
^ " with arity "
^ (string_of_int arity)
^ " not found in compile context.")))
let mem_function_from_compile_context comp_ctxt (cfname,arity) =
RQNameIntHashtbl.mem comp_ctxt.compiled_functions (cfname,arity)
let update_physical_plan_in_compile_context comp_ctxt (name,arity) body =
let func_defn = get_function_from_compile_context "update_physical_plan" comp_ctxt (name,arity) in
match !(func_defn.palgop_func_physical_plan) with
| None -> func_defn.palgop_func_physical_plan := Some body
| Some _ -> raise(Query(Code_Selection("Physical plan for "^(Namespace_names.prefixed_string_of_rqname name)^" already defined.")))
Built in helper functions
let builtin_fn_hash = RQNameIntHashtbl.create 167;;
let register_builtin (cfname,arity) =
RQNameIntHashtbl.add builtin_fn_hash (cfname,arity) ();;
let is_builtin (cfname,arity) =
RQNameIntHashtbl.mem builtin_fn_hash (cfname,arity)
let copy_compile_context comp_ctxt =
{ compiled_static_context = comp_ctxt.compiled_static_context;
compiled_functions = RQNameIntHashtbl.copy comp_ctxt.compiled_functions;
compiled_variables = RQNameHashtbl.copy comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable}
let has_input_set comp_ctxt =
comp_ctxt.compiled_has_input
let get_new_name comp_ctxt orig_name =
Namespace_generate.generate_name_with_prefix !(comp_ctxt.next_variable) orig_name
let get_new_group_name comp_ctxt = get_new_name comp_ctxt "group_created"
let get_new_dot_name comp_ctxt = get_new_name comp_ctxt "dot_new"
let get_new_var_name comp_ctxt = get_new_name comp_ctxt "var_new"
let get_new_tuple_name comp_ctxt cvname =
let (_,_, orig_name) = cvname in
get_new_name comp_ctxt orig_name
let get_new_variable_name comp_ctxt cvname = get_new_name comp_ctxt cvname
let get_tuple_field_name comp_ctxt cvname =
try
RQNameHashtbl.find comp_ctxt.compiled_variables cvname
with Not_found ->
raise (Query (Compilation ("Looking for tuple field " ^
(Namespace_names.prefixed_string_of_rqname cvname)
^ " but has not been compiled")))
let add_variable_field_to_compile_context comp_ctxt cvname =
let comp_ctxt' = copy_compile_context comp_ctxt in
begin
let crname = get_new_tuple_name comp_ctxt' cvname in
RQNameHashtbl.replace comp_ctxt'.compiled_variables cvname crname;
comp_ctxt'.compiled_has_input <- true;
comp_ctxt'
end
let hide_variable_field_from_compile_context comp_ctxt cvname =
let comp_ctxt' = copy_compile_context comp_ctxt in
begin
RQNameHashtbl.remove comp_ctxt'.compiled_variables cvname;
comp_ctxt'
end
let get_variable_field_from_compile_context comp_ctxt cvname =
if (RQNameHashtbl.mem comp_ctxt.compiled_variables cvname)
then
Some (RQNameHashtbl.find comp_ctxt.compiled_variables cvname)
else
None
let no_more_input comp_ctxt =
comp_ctxt.compiled_has_input <- false
let update_compile_context_from_module comp_ctxt m =
let fns = m.palgop_module_prolog.palgop_prolog_functions in
List.iter (fun fdecl ->
let (name, _, body, _) = fdecl.palgop_function_decl_desc in
add_function_to_compile_context comp_ctxt name body)
fns;
comp_ctxt
let copy_without_functions comp_ctxt =
{ compiled_static_context = comp_ctxt.compiled_static_context;
compiled_functions = RQNameIntHashtbl.create 167;
compiled_variables = RQNameHashtbl.copy comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable}
let map_function_bodies comp_ctxt fn =
let ht = RQNameIntHashtbl.create 167 in
RQNameIntHashtbl.iter (fn ht) comp_ctxt.compiled_functions;
{ compiled_static_context = comp_ctxt.compiled_static_context;
compiled_functions = ht;
compiled_variables = RQNameHashtbl.copy comp_ctxt.compiled_variables;
compiled_has_input = comp_ctxt.compiled_has_input;
next_variable = comp_ctxt.next_variable}
If a binding from context_2 is in context_1 , 's binding is replaced
let update_compile_context context_1 context_2 =
let check_fn ht key binding =
if RQNameIntHashtbl.mem context_1.compiled_functions key
then RQNameIntHashtbl.add ht key (RQNameIntHashtbl.find context_1.compiled_functions key)
else RQNameIntHashtbl.add ht key binding
in
map_function_bodies context_2 check_fn
type logical_compile_context = (unit, Ast_path_struct.path_annotation) compile_context
|
07c63b6a465c64eae687e1e82e997659fb793f215036bb5ddfb084efa229e1be | CryptoKami/cryptokami-core | Configuration.hs | {-# LANGUAGE Rank2Types #-}
module Pos.Infra.Configuration
( InfraConfiguration (..)
, HasInfraConfiguration
, infraConfiguration
, withInfraConfiguration
, ntpServers
) where
import Data.Aeson (FromJSON (..), genericParseJSON)
import Data.Reflection (Given, give, given)
import Serokell.Aeson.Options (defaultOptions)
import Universum
data InfraConfiguration = InfraConfiguration
{
--------------------------------------------------------------------------
-- -- NTP slotting
--------------------------------------------------------------------------
ccNtpResponseTimeout :: !Int
^ How often request to NTP server and response collection
, ccNtpPollDelay :: !Int
^ How often send request to NTP server
, ccNtpMaxError :: !Int
^ error ( max difference between local and global time , which is trusted )
, ccNeighboursSendThreshold :: !Int
-- ^ Broadcasting threshold
, ccKademliaDumpInterval :: !Int
^ Interval for dumping Kademlia state in slots
, ccEnhancedMessageTimeout :: !Word
^ We consider node as known if it was pinged at most @ccEnhancedMessageTimeout@ sec ago
, ccEnhancedMessageBroadcast :: !Word
-- ^ Number of nodes from batch for enhanced bessage broadcast
--------------------------------------------------------------------------
-- -- Relay
--------------------------------------------------------------------------
, ccMaxReqSize :: !Word32
-- ^ Maximum `ReqMsg` size in bytes
, ccMaxInvSize :: !Word32
-- ^ Maximum `InvMsg` size in bytes
, ccMaxMempoolMsgSize :: !Word32
-- ^ Maximum `MempoolMsg` size in bytes
--------------------------------------------------------------------------
-- -- NTP checking
--------------------------------------------------------------------------
, ccTimeDifferenceWarnInterval :: !Integer
-- ^ NTP checking interval, microseconds
, ccTimeDifferenceWarnThreshold :: !Integer
-- ^ Maximum tolerable difference between NTP time
-- and local time, microseconds
} deriving (Show, Generic)
instance FromJSON InfraConfiguration where
parseJSON = genericParseJSON defaultOptions
type HasInfraConfiguration = Given InfraConfiguration
withInfraConfiguration :: InfraConfiguration -> (HasInfraConfiguration => r) -> r
withInfraConfiguration = give
infraConfiguration :: HasInfraConfiguration => InfraConfiguration
infraConfiguration = given
ntpServers :: [String]
ntpServers =
[ "time.windows.com"
, "clock.isc.org"
, "ntp5.stratum2.ru" ]
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/infra/Pos/Infra/Configuration.hs | haskell | # LANGUAGE Rank2Types #
------------------------------------------------------------------------
-- NTP slotting
------------------------------------------------------------------------
^ Broadcasting threshold
^ Number of nodes from batch for enhanced bessage broadcast
------------------------------------------------------------------------
-- Relay
------------------------------------------------------------------------
^ Maximum `ReqMsg` size in bytes
^ Maximum `InvMsg` size in bytes
^ Maximum `MempoolMsg` size in bytes
------------------------------------------------------------------------
-- NTP checking
------------------------------------------------------------------------
^ NTP checking interval, microseconds
^ Maximum tolerable difference between NTP time
and local time, microseconds | module Pos.Infra.Configuration
( InfraConfiguration (..)
, HasInfraConfiguration
, infraConfiguration
, withInfraConfiguration
, ntpServers
) where
import Data.Aeson (FromJSON (..), genericParseJSON)
import Data.Reflection (Given, give, given)
import Serokell.Aeson.Options (defaultOptions)
import Universum
data InfraConfiguration = InfraConfiguration
{
ccNtpResponseTimeout :: !Int
^ How often request to NTP server and response collection
, ccNtpPollDelay :: !Int
^ How often send request to NTP server
, ccNtpMaxError :: !Int
^ error ( max difference between local and global time , which is trusted )
, ccNeighboursSendThreshold :: !Int
, ccKademliaDumpInterval :: !Int
^ Interval for dumping Kademlia state in slots
, ccEnhancedMessageTimeout :: !Word
^ We consider node as known if it was pinged at most @ccEnhancedMessageTimeout@ sec ago
, ccEnhancedMessageBroadcast :: !Word
, ccMaxReqSize :: !Word32
, ccMaxInvSize :: !Word32
, ccMaxMempoolMsgSize :: !Word32
, ccTimeDifferenceWarnInterval :: !Integer
, ccTimeDifferenceWarnThreshold :: !Integer
} deriving (Show, Generic)
instance FromJSON InfraConfiguration where
parseJSON = genericParseJSON defaultOptions
type HasInfraConfiguration = Given InfraConfiguration
withInfraConfiguration :: InfraConfiguration -> (HasInfraConfiguration => r) -> r
withInfraConfiguration = give
infraConfiguration :: HasInfraConfiguration => InfraConfiguration
infraConfiguration = given
ntpServers :: [String]
ntpServers =
[ "time.windows.com"
, "clock.isc.org"
, "ntp5.stratum2.ru" ]
|
8c23142fc777a7dce8d6125fee3d9f32e51ba8045d2c0bdadd4256c72d47dda5 | sids/nerchuko | test.clj | (ns nerchuko.examples.newsgroups.test
(:use nerchuko.examples.newsgroups.helpers)
(:use [nerchuko helpers classification])
(:use clojure.contrib.command-line))
(defn -main [& args]
(with-command-line (if (seq args) args ["--help"])
"Classify the files in the given directories and compare the
classification with the correct class name (the directory name).
Prints the confusion matrix.
USAGE: test [options] <directory> [<directory> ...]"
[[model-file "File to load the learned model from." "/tmp/nerchuko.examples.newsgroups.model"]
dirs]
(if (seq dirs)
(let [model (read-string (slurp model-file))]
(->> dirs
load-test-dataset
(get-confusion-matrix model)
print-confusion-matrix))
(println "Error: Directory arguments missing."))))
| null | https://raw.githubusercontent.com/sids/nerchuko/8aa56497dd8e93e868713dd542667a56215522fb/src/nerchuko/examples/newsgroups/test.clj | clojure | (ns nerchuko.examples.newsgroups.test
(:use nerchuko.examples.newsgroups.helpers)
(:use [nerchuko helpers classification])
(:use clojure.contrib.command-line))
(defn -main [& args]
(with-command-line (if (seq args) args ["--help"])
"Classify the files in the given directories and compare the
classification with the correct class name (the directory name).
Prints the confusion matrix.
USAGE: test [options] <directory> [<directory> ...]"
[[model-file "File to load the learned model from." "/tmp/nerchuko.examples.newsgroups.model"]
dirs]
(if (seq dirs)
(let [model (read-string (slurp model-file))]
(->> dirs
load-test-dataset
(get-confusion-matrix model)
print-confusion-matrix))
(println "Error: Directory arguments missing."))))
| |
c4cda8ee99200b0bdad94233554909804cd86551f8922d10440a6c44f1b68e58 | ku-fpg/hermit | Interpreter.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE KindSignatures #
{-# LANGUAGE GADTs #-}
# LANGUAGE InstanceSigs #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
module HERMIT.Shell.Interpreter
( -- * The HERMIT Interpreter
Interp
, interp
, interpM
, interpEM
, interpExprH
, exprToDyns
) where
import Control.Monad (liftM, liftM2)
import Control.Monad.State (MonadState(get), gets)
import Data.Char
import Data.Dynamic
import qualified Data.Map as M
import HERMIT.External
import HERMIT.Kernel (AST)
import HERMIT.Kure
import HERMIT.Lemma
import HERMIT.Name
import HERMIT.Parser
import HERMIT.Dictionary.Navigation
import HERMIT.Dictionary.Rules
import HERMIT.Shell.Dictionary
import HERMIT.Shell.Types
| An ' Interp ' @cmd@ is a /possible/ means of converting a ' Typeable ' value to a value of type @cmd@.
data Interp :: (* -> *) -> * -> * where
Interp :: Typeable b => (b -> ExprH -> m a) -> Interp m a
-- | An 'Interp' with no effects.
interp :: (Monad m, Typeable b) => (b -> a) -> Interp m a
interp f = Interp (const . return . f)
| An ' Interp ' which can affect the shell .
interpM :: Typeable b => (b -> m a) -> Interp m a
interpM f = Interp (const . f)
| Like ' InterpM ' , but with access to the original expression .
interpEM :: Typeable b => (b -> ExprH -> m a) -> Interp m a
interpEM = Interp
instance Monad m => Functor (Interp m) where
fmap :: (a -> b) -> Interp m a -> Interp m b
fmap f (Interp g) = Interp (\ e -> liftM f . g e)
| Execute an ' ' using a given interpreter . The given interpretations
are tried in order . The first one to match ( have the proper type ) will be executed .
interpExprH :: CLMonad m => [Interp m b] -> ExprH -> m b
interpExprH interps e = exprToDyns e >>= runInterp e interps
runInterp :: forall m b. CLMonad m => ExprH -> [Interp m b] -> [Dynamic] -> m b
runInterp e interps dyns = case [f a e :: m b | Interp f <- interps, Just a <- map fromDynamic dyns] of
[] -> fail $ "Does not type-check: " ++ unparseExprH e ++ "\n"
comp:_ -> comp
exprToDyns :: MonadState CommandLineState m => ExprH -> m [Dynamic]
exprToDyns = exprToDyns' False
-- input: list length n, each elem is a variable length list of possible interpretations
-- output: variable length list, each elem is list of length n
fromDynList :: [[Dynamic]] -> [[Dynamic]]
fromDynList [] = [[]]
fromDynList (hs:dynss) = [ h:t | h <- hs, t <- fromDynList dynss ]
toBoxedList :: (Extern a, Typeable b) => [[Dynamic]] -> ([a] -> b) -> [Dynamic]
toBoxedList dyns boxCon = [ toDyn $ boxCon (map unbox l) | dl <- dyns, Just l <- [mapM fromDynamic dl] ]
exprToDyns' :: MonadState CommandLineState m => Bool -> ExprH -> m [Dynamic]
exprToDyns' _ (SrcName str) = do
TODO : remove StringBox option below
TODO : change to SrcName : : HermitName - >
return [ toDyn hn, toDyn (BindingName hn), toDyn (OccurrenceName hn), toDyn (RhsOfName hn), toDyn (StringBox str) ]
exprToDyns' _ (CoreH str) = return [ toDyn $ CoreString str ]
exprToDyns' _ (ListH exprs) = do
dyns <- liftM fromDynList $ mapM (exprToDyns' True) exprs
return $ toBoxedList dyns StringListBox
++ toBoxedList dyns (PathBox . pathToSnocPath)
-- ugly hack. The whole dynamic stuff could do with overhauling.
++ toBoxedList dyns (TransformLCorePathBox . return . pathToSnocPath)
++ toBoxedList dyns IntListBox
++ toBoxedList dyns OccurrenceNameListBox
++ toBoxedList dyns RuleNameListBox
++ toBoxedList dyns RewriteLCoreListBox
exprToDyns' rhs (CmdName str)
| all isDigit str = do
let i = read str
An Int is either an AST , or will be interpreted specially later .
toDyn $ IntBox i
, toDyn $ (read str :: AST)
]
| otherwise = do
dict <- gets (mkDictionary . cl_externals)
case M.lookup str dict of
Just dyns -> do
dyns' <- mapM provideState dyns
return $ if rhs then toDyn (StringBox str) : dyns' else dyns'
not a command , try as a string arg ... worst case : dynApply fails with " bad type of expression "
-- best case: 'help ls' works instead of 'help "ls"'. this is likewise done in the clause above
Nothing | rhs -> let f = maybe id ((:) . toDyn) $ string2considerable str
in return $ f [ toDyn $ StringBox str
, toDyn $ LemmaName str
, toDyn $ RuleName str]
| otherwise -> fail $ "User error, unrecognised HERMIT command: " ++ show str
exprToDyns' _ (AppH e1 e2) = liftM2 dynCrossApply (exprToDyns' False e1) (exprToDyns' True e2)
-- We treat externals of the type 'CommandLineState -> b' and 'PrettyPrinter -> b' specially,
-- providing their arguments from the shell state here, so they don't need a monadic return type
-- in order to access it themselves.
provideState :: MonadState CommandLineState m => Dynamic -> m Dynamic
provideState dyn = do
st <- get
case dynApply dyn (toDyn $ box st) of
Just d -> return d
Nothing -> case dynApply dyn (toDyn $ box $ cl_pretty st) of
Just d' -> return d'
Nothing -> return dyn
-- Cross product of possible applications.
dynCrossApply :: [Dynamic] -> [Dynamic] -> [Dynamic]
dynCrossApply fs xs = [ r | f <- fs, x <- xs, Just r <- return (dynApply f x)]
-------------------------------------------
| null | https://raw.githubusercontent.com/ku-fpg/hermit/3e7be430fae74a9e3860b8b574f36efbf9648dec/src/HERMIT/Shell/Interpreter.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE GADTs #
* The HERMIT Interpreter
| An 'Interp' with no effects.
input: list length n, each elem is a variable length list of possible interpretations
output: variable length list, each elem is list of length n
ugly hack. The whole dynamic stuff could do with overhauling.
best case: 'help ls' works instead of 'help "ls"'. this is likewise done in the clause above
We treat externals of the type 'CommandLineState -> b' and 'PrettyPrinter -> b' specially,
providing their arguments from the shell state here, so they don't need a monadic return type
in order to access it themselves.
Cross product of possible applications.
----------------------------------------- | # LANGUAGE KindSignatures #
# LANGUAGE InstanceSigs #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
module HERMIT.Shell.Interpreter
Interp
, interp
, interpM
, interpEM
, interpExprH
, exprToDyns
) where
import Control.Monad (liftM, liftM2)
import Control.Monad.State (MonadState(get), gets)
import Data.Char
import Data.Dynamic
import qualified Data.Map as M
import HERMIT.External
import HERMIT.Kernel (AST)
import HERMIT.Kure
import HERMIT.Lemma
import HERMIT.Name
import HERMIT.Parser
import HERMIT.Dictionary.Navigation
import HERMIT.Dictionary.Rules
import HERMIT.Shell.Dictionary
import HERMIT.Shell.Types
| An ' Interp ' @cmd@ is a /possible/ means of converting a ' Typeable ' value to a value of type @cmd@.
data Interp :: (* -> *) -> * -> * where
Interp :: Typeable b => (b -> ExprH -> m a) -> Interp m a
interp :: (Monad m, Typeable b) => (b -> a) -> Interp m a
interp f = Interp (const . return . f)
| An ' Interp ' which can affect the shell .
interpM :: Typeable b => (b -> m a) -> Interp m a
interpM f = Interp (const . f)
| Like ' InterpM ' , but with access to the original expression .
interpEM :: Typeable b => (b -> ExprH -> m a) -> Interp m a
interpEM = Interp
instance Monad m => Functor (Interp m) where
fmap :: (a -> b) -> Interp m a -> Interp m b
fmap f (Interp g) = Interp (\ e -> liftM f . g e)
| Execute an ' ' using a given interpreter . The given interpretations
are tried in order . The first one to match ( have the proper type ) will be executed .
interpExprH :: CLMonad m => [Interp m b] -> ExprH -> m b
interpExprH interps e = exprToDyns e >>= runInterp e interps
runInterp :: forall m b. CLMonad m => ExprH -> [Interp m b] -> [Dynamic] -> m b
runInterp e interps dyns = case [f a e :: m b | Interp f <- interps, Just a <- map fromDynamic dyns] of
[] -> fail $ "Does not type-check: " ++ unparseExprH e ++ "\n"
comp:_ -> comp
exprToDyns :: MonadState CommandLineState m => ExprH -> m [Dynamic]
exprToDyns = exprToDyns' False
fromDynList :: [[Dynamic]] -> [[Dynamic]]
fromDynList [] = [[]]
fromDynList (hs:dynss) = [ h:t | h <- hs, t <- fromDynList dynss ]
toBoxedList :: (Extern a, Typeable b) => [[Dynamic]] -> ([a] -> b) -> [Dynamic]
toBoxedList dyns boxCon = [ toDyn $ boxCon (map unbox l) | dl <- dyns, Just l <- [mapM fromDynamic dl] ]
exprToDyns' :: MonadState CommandLineState m => Bool -> ExprH -> m [Dynamic]
exprToDyns' _ (SrcName str) = do
TODO : remove StringBox option below
TODO : change to SrcName : : HermitName - >
return [ toDyn hn, toDyn (BindingName hn), toDyn (OccurrenceName hn), toDyn (RhsOfName hn), toDyn (StringBox str) ]
exprToDyns' _ (CoreH str) = return [ toDyn $ CoreString str ]
exprToDyns' _ (ListH exprs) = do
dyns <- liftM fromDynList $ mapM (exprToDyns' True) exprs
return $ toBoxedList dyns StringListBox
++ toBoxedList dyns (PathBox . pathToSnocPath)
++ toBoxedList dyns (TransformLCorePathBox . return . pathToSnocPath)
++ toBoxedList dyns IntListBox
++ toBoxedList dyns OccurrenceNameListBox
++ toBoxedList dyns RuleNameListBox
++ toBoxedList dyns RewriteLCoreListBox
exprToDyns' rhs (CmdName str)
| all isDigit str = do
let i = read str
An Int is either an AST , or will be interpreted specially later .
toDyn $ IntBox i
, toDyn $ (read str :: AST)
]
| otherwise = do
dict <- gets (mkDictionary . cl_externals)
case M.lookup str dict of
Just dyns -> do
dyns' <- mapM provideState dyns
return $ if rhs then toDyn (StringBox str) : dyns' else dyns'
not a command , try as a string arg ... worst case : dynApply fails with " bad type of expression "
Nothing | rhs -> let f = maybe id ((:) . toDyn) $ string2considerable str
in return $ f [ toDyn $ StringBox str
, toDyn $ LemmaName str
, toDyn $ RuleName str]
| otherwise -> fail $ "User error, unrecognised HERMIT command: " ++ show str
exprToDyns' _ (AppH e1 e2) = liftM2 dynCrossApply (exprToDyns' False e1) (exprToDyns' True e2)
provideState :: MonadState CommandLineState m => Dynamic -> m Dynamic
provideState dyn = do
st <- get
case dynApply dyn (toDyn $ box st) of
Just d -> return d
Nothing -> case dynApply dyn (toDyn $ box $ cl_pretty st) of
Just d' -> return d'
Nothing -> return dyn
dynCrossApply :: [Dynamic] -> [Dynamic] -> [Dynamic]
dynCrossApply fs xs = [ r | f <- fs, x <- xs, Just r <- return (dynApply f x)]
|
bebd984e5cccf1516dcd182c26e8749e16a46045edb6584af919abce52b2965f | CryptoKami/cryptokami-core | Script.hs | # LANGUAGE TemplateHaskell #
module Pos.Binary.Core.Script () where
import Universum
import Data.Hashable (Hashable, hashWithSalt)
import qualified PlutusCore.Program as PLCore
import qualified PlutusCore.Term as PLCore
import qualified PlutusTypes.ConSig as PLTypes
import qualified PlutusTypes.Type as PLTypes
import qualified Utils.ABT as ABT
import qualified Utils.Names as Names
import qualified Utils.Vars as Vars
import Pos.Binary.Class (Bi (..), Cons (..), Field (..), deriveSimpleBi, genericDecode,
genericEncode, serialize')
import Pos.Core.Common (Script (..), ScriptVersion)
import Pos.Core.Script ()
instance Bi Vars.FreeVar where
encode = genericEncode
decode = genericDecode
instance Bi Vars.MetaVar where
encode = genericEncode
decode = genericDecode
instance Bi Vars.BoundVar where
encode = genericEncode
decode = genericDecode
instance Bi PLTypes.TyConSig where
encode = genericEncode
decode = genericDecode
instance Bi PLTypes.ConSig where
encode = genericEncode
decode = genericDecode
instance Bi a => Bi (Names.Sourced a) where
encode = genericEncode
decode = genericDecode
instance Bi ABT.Variable where
encode = genericEncode
decode = genericDecode
instance (Typeable f, Bi (f (ABT.Scope f))) => Bi (ABT.ABT f) where
encode = genericEncode
decode = genericDecode
instance (Typeable f, Bi (f (ABT.Scope f))) => Bi (ABT.Scope f) where
encode = genericEncode
decode = genericDecode
instance (Typeable r, Bi r) => Bi (PLCore.ClauseF r) where
encode = genericEncode
decode = genericDecode
instance Bi a => Bi (PLCore.TermF a) where
encode = genericEncode
decode = genericDecode
instance Bi a => Bi (PLTypes.TypeF a) where
encode = genericEncode
decode = genericDecode
instance Bi PLCore.SimplePattern where
encode = genericEncode
decode = genericDecode
instance Bi PLCore.PrimData where
encode = genericEncode
decode = genericDecode
instance Bi PLCore.Program where
encode = genericEncode
decode = genericDecode
deriveSimpleBi ''Script [
Cons 'Script [
Field [| scrVersion :: ScriptVersion |],
Field [| scrScript :: ByteString |]
]]
instance Hashable PLCore.Term where
hashWithSalt s = hashWithSalt s . serialize'
instance Hashable PLCore.Program where
hashWithSalt s = hashWithSalt s . serialize'
| null | https://raw.githubusercontent.com/CryptoKami/cryptokami-core/12ca60a9ad167b6327397b3b2f928c19436ae114/core/Pos/Binary/Core/Script.hs | haskell | # LANGUAGE TemplateHaskell #
module Pos.Binary.Core.Script () where
import Universum
import Data.Hashable (Hashable, hashWithSalt)
import qualified PlutusCore.Program as PLCore
import qualified PlutusCore.Term as PLCore
import qualified PlutusTypes.ConSig as PLTypes
import qualified PlutusTypes.Type as PLTypes
import qualified Utils.ABT as ABT
import qualified Utils.Names as Names
import qualified Utils.Vars as Vars
import Pos.Binary.Class (Bi (..), Cons (..), Field (..), deriveSimpleBi, genericDecode,
genericEncode, serialize')
import Pos.Core.Common (Script (..), ScriptVersion)
import Pos.Core.Script ()
instance Bi Vars.FreeVar where
encode = genericEncode
decode = genericDecode
instance Bi Vars.MetaVar where
encode = genericEncode
decode = genericDecode
instance Bi Vars.BoundVar where
encode = genericEncode
decode = genericDecode
instance Bi PLTypes.TyConSig where
encode = genericEncode
decode = genericDecode
instance Bi PLTypes.ConSig where
encode = genericEncode
decode = genericDecode
instance Bi a => Bi (Names.Sourced a) where
encode = genericEncode
decode = genericDecode
instance Bi ABT.Variable where
encode = genericEncode
decode = genericDecode
instance (Typeable f, Bi (f (ABT.Scope f))) => Bi (ABT.ABT f) where
encode = genericEncode
decode = genericDecode
instance (Typeable f, Bi (f (ABT.Scope f))) => Bi (ABT.Scope f) where
encode = genericEncode
decode = genericDecode
instance (Typeable r, Bi r) => Bi (PLCore.ClauseF r) where
encode = genericEncode
decode = genericDecode
instance Bi a => Bi (PLCore.TermF a) where
encode = genericEncode
decode = genericDecode
instance Bi a => Bi (PLTypes.TypeF a) where
encode = genericEncode
decode = genericDecode
instance Bi PLCore.SimplePattern where
encode = genericEncode
decode = genericDecode
instance Bi PLCore.PrimData where
encode = genericEncode
decode = genericDecode
instance Bi PLCore.Program where
encode = genericEncode
decode = genericDecode
deriveSimpleBi ''Script [
Cons 'Script [
Field [| scrVersion :: ScriptVersion |],
Field [| scrScript :: ByteString |]
]]
instance Hashable PLCore.Term where
hashWithSalt s = hashWithSalt s . serialize'
instance Hashable PLCore.Program where
hashWithSalt s = hashWithSalt s . serialize'
| |
9b04a9b6a0d382ab8a0e03758b9f5d5af08ac689a7523217f369dbc6503380d7 | iu-parfunc/verified-instances | Sum.hs | {-@ LIQUID "--higherorder" @-}
{-@ LIQUID "--exactdc" @-}
{-@ LIQUID "--noadt" @-}
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
module GenericProofs.VerifiedEq.Examples.Sum where
import Language.Haskell.Liquid.ProofCombinators
import GenericProofs.Iso
import GenericProofs.TH
import GenericProofs.VerifiedEq
import GenericProofs.VerifiedEq.Generics
import GenericProofs.VerifiedEq.Instances
import Generics.Deriving.Newtypeless.Base.Internal
data MySum = MyLeft Int | MyRight Double
{-@ axiomatize fromMySum @-}
{-@ axiomatize toMySum @-}
{-@ tofMySum :: a:MySum
-> { toMySum (fromMySum a) == a }
@-}
{-@ fotMySum :: a:RepMySum x
-> { fromMySum (toMySum a) == a }
@-}
$(deriveIso "RepMySum"
"toMySum" "fromMySum"
"tofMySum" "fotMySum"
"isoMySum"
''MySum)
veqMySum :: VerifiedEq MySum
veqMySum = veqIso (isoSym isoMySum) $ veqM1
$ veqSum (veqM1 $ veqM1 $ veqK1 veqInt)
(veqM1 $ veqM1 $ veqK1 veqDouble)
| null | https://raw.githubusercontent.com/iu-parfunc/verified-instances/cebfdf1e3357a693360be74c90211be18ce3c045/generic-proofs/src/GenericProofs/VerifiedEq/Examples/Sum.hs | haskell | @ LIQUID "--higherorder" @
@ LIQUID "--exactdc" @
@ LIQUID "--noadt" @
@ axiomatize fromMySum @
@ axiomatize toMySum @
@ tofMySum :: a:MySum
-> { toMySum (fromMySum a) == a }
@
@ fotMySum :: a:RepMySum x
-> { fromMySum (toMySum a) == a }
@ | # LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
module GenericProofs.VerifiedEq.Examples.Sum where
import Language.Haskell.Liquid.ProofCombinators
import GenericProofs.Iso
import GenericProofs.TH
import GenericProofs.VerifiedEq
import GenericProofs.VerifiedEq.Generics
import GenericProofs.VerifiedEq.Instances
import Generics.Deriving.Newtypeless.Base.Internal
data MySum = MyLeft Int | MyRight Double
$(deriveIso "RepMySum"
"toMySum" "fromMySum"
"tofMySum" "fotMySum"
"isoMySum"
''MySum)
veqMySum :: VerifiedEq MySum
veqMySum = veqIso (isoSym isoMySum) $ veqM1
$ veqSum (veqM1 $ veqM1 $ veqK1 veqInt)
(veqM1 $ veqM1 $ veqK1 veqDouble)
|
2275782fcceedc0ec05ae6fb7ab55ae9244198ae9eea89aa94781e6fa2877cd5 | DeepSec-prover/deepsec | process.ml | (**************************************************************************)
(* *)
DeepSec
(* *)
, project PESTO ,
, project PESTO ,
, project PESTO ,
(* *)
Copyright ( C ) INRIA 2017 - 2020
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU General Public License version 3.0 as described in the
(* file LICENSE *)
(* *)
(**************************************************************************)
open Types
open Term
open Formula
open Display
(*** Simple tools on processes ***)
let rec instantiate_pattern = function
| PatEquality t -> PatEquality (Term.instantiate t)
| PatTuple(f,args) -> PatTuple(f,List.map instantiate_pattern args)
| pat -> pat
let rec instantiate = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(Term.instantiate c, Term.instantiate t,instantiate p,pos)
| Input(c,pat,p,pos) -> Input(Term.instantiate c, instantiate_pattern pat,instantiate p,pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(Term.instantiate t1, Term.instantiate t2, instantiate p1, instantiate p2,pos)
| Let(pat,t,p1,p2,pos) -> Let(instantiate_pattern pat,Term.instantiate t, instantiate p1, instantiate p2, pos)
| New(n,p,pos) -> New(n,instantiate p,pos)
| Par p_list -> Par (List.map instantiate p_list)
| Bang(p_list,pos) -> Bang(List.map instantiate p_list,pos)
| Choice(p1,p2,pos) -> Choice(instantiate p1,instantiate p2,pos)
(*** Display functions (for debugging) ***)
let display_with_tab n str =
let rec print_tab = function
| 0 -> ""
| n -> " "^(print_tab (n-1))
in
(print_tab n) ^ str ^"\n"
let display_position (i,args) =
if args = []
then string_of_int i
else Printf.sprintf "%d[%s]" i (display_list string_of_int "," args)
let rec display_pattern = function
| PatEquality t -> Printf.sprintf "=%s" (Term.display Terminal t)
| PatTuple(_,args) -> Printf.sprintf "%s%s%s" (langle Terminal) (display_list display_pattern "," args) (rangle Terminal)
| PatVar x -> Variable.display Terminal x
let rec display tab = function
| Nil -> (display_with_tab tab "Nil")
| Output(ch,t,p,pos) ->
let str = Printf.sprintf "{%s} out(%s,%s);" (display_position pos) (Term.display Terminal ch) (Term.display Terminal t) in
(display_with_tab tab str) ^ (display tab p)
| Input(ch,pat,p,pos) ->
let str = Printf.sprintf "{%s} in(%s,%s);" (display_position pos) (Term.display Terminal ch) (display_pattern pat) in
(display_with_tab tab str) ^ (display tab p)
| IfThenElse(t1,t2,pthen,Nil,pos) ->
let str = Printf.sprintf "{%s} if %s = %s then" (display_position pos) (Term.display Terminal t1) (Term.display Terminal t2) in
let str_then = display tab pthen in
(display_with_tab tab str) ^ str_then
| IfThenElse(t1,t2,pthen,pelse,pos) ->
let str = Printf.sprintf "{%s} if %s = %s then" (display_position pos) (Term.display Terminal t1) (Term.display Terminal t2) in
let str_then = display (tab+1) pthen in
let str_else = display (tab+1) pelse in
let str_neg = "else" in
(display_with_tab tab str) ^ str_then ^ (display_with_tab tab str_neg) ^ str_else
| Let(pat,t,pthen,Nil,pos) ->
let str = Printf.sprintf "{%s} let %s = %s in" (display_position pos) (display_pattern pat) (Term.display Terminal t) in
let str_then = display tab pthen in
(display_with_tab tab str) ^ str_then
| Let(pat,t,pthen,pelse,pos) ->
let str = Printf.sprintf "{%s} let %s = %s in" (display_position pos) (display_pattern pat) (Term.display Terminal t) in
let str_then = display (tab+1) pthen in
let str_else = display (tab+1) pelse in
let str_neg = "else" in
(display_with_tab tab str) ^ str_then ^ (display_with_tab tab str_neg) ^ str_else
| New(n,p,pos) ->
let str = Printf.sprintf "{%s} new %s;" (display_position pos) (Name.display Terminal n) in
(display_with_tab tab str) ^ (display tab p)
| Par p_list ->
(display_with_tab tab "(") ^
(display_list (display (tab+1)) (display_with_tab tab ") | (") p_list) ^
(display_with_tab tab ")")
| Bang(p_list,pos) ->
(display_with_tab tab (Printf.sprintf "{%s} [" (display_position pos))) ^
(display_list (display (tab+1)) (display_with_tab tab ") | (") p_list) ^
(display_with_tab tab "]")
| Choice(p1,p2,pos) ->
let str_1 = display (tab+1) p1 in
let str_2 = display (tab+1) p2 in
let str_plus = Printf.sprintf "{%s} +" (display_position pos) in
str_1 ^ (display_with_tab tab str_plus) ^ str_2
let display_transition = function
| AInput(ch,x,pos) -> Printf.sprintf "in(%s,%s,%s)" (Recipe.display Terminal ch) (Recipe.display Terminal x) (display_position pos)
| AOutput(ch,pos) -> Printf.sprintf "out(%s,%s)" (Recipe.display Terminal ch) (display_position pos)
| ATau pos -> Printf.sprintf "tau(%s)" (display_position pos)
| ABang(i,pos) -> Printf.sprintf "bang(%d,%s)" i (display_position pos)
| AChoice(pos,side) -> Printf.sprintf "choice(%s,%b)" (display_position pos) side
| AComm(pos_out,pos_in) -> Printf.sprintf "comm(%s,%s)" (display_position pos_out) (display_position pos_in)
| AEaves(ch,pos_out,pos_in) -> Printf.sprintf "eaves(%s,%s,%s)" (Recipe.display Terminal ch) (display_position pos_out) (display_position pos_in)
(*****************************************
Transformation and simplifications
******************************************)
(*** Transform process with pure fresh name ***)
exception Occur_More_Than_Once
let rec occur_at_most_once_term n already_occ_ref = function
| Name n' when n == n' ->
if !already_occ_ref
then raise Occur_More_Than_Once
else already_occ_ref := true
| Name _
| Var _ -> ()
| Func(_,args) -> List.iter (occur_at_most_once_term n already_occ_ref) args
let rec occur_in_term n = function
| Name n' when n == n' -> true
| Func(_,args) -> List.exists (occur_in_term n) args
| _ -> false
let rec occur_in_pattern n = function
| PatEquality t -> occur_in_term n t
| PatTuple(_,args) -> List.exists (occur_in_pattern n) args
| _ -> false
let rec occur_in_process n = function
| Nil -> false
| Output(c,t,p,_) -> occur_in_term n c || occur_in_term n t || occur_in_process n p
| Input(c,pat,p,_) -> occur_in_term n c || occur_in_pattern n pat || occur_in_process n p
| IfThenElse(t1,t2,p1,p2,_) ->
occur_in_term n t1 || occur_in_term n t2 || occur_in_process n p1 || occur_in_process n p2
| Let(pat,t,p1,p2,_) ->
occur_in_pattern n pat || occur_in_term n t || occur_in_process n p1 || occur_in_process n p2
| New(_,p,_) -> occur_in_process n p
| Par p_list
| Bang (p_list,_) -> List.exists (occur_in_process n) p_list
| Choice (p1,p2,_) -> occur_in_process n p1 || occur_in_process n p2
let is_name_pure_fresh n p =
let already_occ_ref = ref false in
let rec explore = function
| Nil -> ()
| Output(ch,t,p,_) ->
if occur_in_term n ch
then raise Occur_More_Than_Once;
occur_at_most_once_term n already_occ_ref t;
explore p
| Input(ch,_,p,_) ->
if occur_in_term n ch
then raise Occur_More_Than_Once;
explore p
| IfThenElse(t1,t2,p1,p2,_) ->
if occur_in_term n t1
then raise Occur_More_Than_Once;
if occur_in_term n t2
then raise Occur_More_Than_Once;
explore_branch p1 p2
| Let(pat,t,p1,p2,_) ->
if occur_in_pattern n pat
then raise Occur_More_Than_Once;
if occur_in_term n t
then raise Occur_More_Than_Once;
explore_branch p1 p2
| New(_,p,_) -> explore p;
| Par p_list
| Bang(p_list, _) -> List.iter explore p_list
| Choice(p1,p2,_) ->
explore_branch p1 p2
and explore_branch p1 p2 =
let tmp = !already_occ_ref in
explore p1;
let r1 = !already_occ_ref in
already_occ_ref := tmp;
explore p2;
already_occ_ref := r1 || !already_occ_ref
in
try
explore p;
true
with Occur_More_Than_Once -> false
let rec replace_name_in_term = function
| Name { link_n = NLink n'; _ } -> Name n'
| Func(f,args) -> Func(f,List.map replace_name_in_term args)
| t -> t
let rec replace_name_in_pattern = function
| PatEquality t -> PatEquality (replace_name_in_term t)
| PatTuple(f,args) -> PatTuple(f,List.map replace_name_in_pattern args)
| pat -> pat
let rec replace_name_in_process = function
| Nil -> Nil
| Output(ch,t,p,pos) ->
Output(replace_name_in_term ch,replace_name_in_term t,replace_name_in_process p,pos)
| Input(ch,x,p,pos) ->
Input(replace_name_in_term ch,x,replace_name_in_process p,pos)
| IfThenElse(t1,t2,p1,p2,pos) ->
IfThenElse(replace_name_in_term t1,replace_name_in_term t2,replace_name_in_process p1, replace_name_in_process p2,pos)
| Let(pat,t,p1,p2,pos) ->
Let(replace_name_in_pattern pat,replace_name_in_term t,replace_name_in_process p1, replace_name_in_process p2, pos)
| Choice(p1,p2,pos) ->
Choice(replace_name_in_process p1, replace_name_in_process p2,pos)
| Par p_list ->
Par (List.map replace_name_in_process p_list)
| Bang(p_list,pos) ->
Bang(List.map replace_name_in_process p_list,pos)
| New({ link_n = NLink n; _},p,pos)
| New(n,p,pos) -> New(n,replace_name_in_process p,pos)
let detect_and_replace_pure_fresh_name p =
let acc_pure_fresh_name = ref [] in
let rec retrieve_pure_fresh_name = function
| Nil -> ()
| Output(_,_,p,_)
| Input(_,_,p,_) -> retrieve_pure_fresh_name p
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_)
| Choice(p1,p2,_) ->
retrieve_pure_fresh_name p1;
retrieve_pure_fresh_name p2
| Par p_list
| Bang (p_list,_) -> List.iter retrieve_pure_fresh_name p_list
| New(n,p,_) ->
if is_name_pure_fresh n p
then acc_pure_fresh_name := n :: !acc_pure_fresh_name;
retrieve_pure_fresh_name p
in
retrieve_pure_fresh_name p;
Config.debug (fun () ->
let str =
if !acc_pure_fresh_name = []
then "None"
else Display.display_list (Name.display Display.Terminal) ", " !acc_pure_fresh_name
in
Config.log_in_debug Config.Always (Printf.sprintf "Pure fresh name detected: %s" str)
);
Name.auto_cleanup_with_reset_notail (fun () ->
List.iter (fun n ->
let n' = Name.pure_fresh_from n in
Name.link n n'
) !acc_pure_fresh_name;
replace_name_in_process p
)
(*** Clean processes ****)
let rec exists_input_or_output = function
| Nil -> false
| Output _
| Input _ -> true
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_) -> exists_input_or_output p1 || exists_input_or_output p2
| New(_,p,_) -> exists_input_or_output p
| Par p_list -> List.exists exists_input_or_output p_list
| Bang (p_list,_) -> List.exists exists_input_or_output p_list
| Choice(p1,p2,_) -> exists_input_or_output p1 || exists_input_or_output p2
let rec clean proc =
if exists_input_or_output proc
then
match proc with
| Nil -> Config.internal_error "[process.ml >> clean] Unexpected case."
| Output(c,t,p,pos) -> Output(c,t,clean p,pos)
| Input(c,x,p,pos) -> Input(c,x,clean p, pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,clean p1, clean p2, pos)
| Let(t1,t2,p1,p2,pos) -> Let(t1,t2,clean p1,clean p2,pos)
| New(n,p,pos) -> New(n,clean p,pos)
| Par p_list ->
let p_list' =
List.fold_right (fun p acc ->
let p' = clean p in
if p' = Nil
then acc
else p'::acc
) p_list []
in
if p_list' = []
then Nil
else Par p_list'
| Choice(p1,p2,pos) ->
let p1' = clean p1 in
let p2' = clean p2 in
if p1' = Nil && p2' = Nil
then Nil
else Choice(p1',p2',pos)
| Bang(p_list,pos) ->
let p_list' =
List.fold_right (fun p acc ->
let p' = clean p in
if p' = Nil
then acc
else p'::acc
) p_list []
in
if p_list' = []
then Nil
else Bang(p_list',pos)
else Nil
(*** Place the new names as far as possible ***)
let dummy_pos = (0,[])
let apply_replacement n fresh proc =
if fresh
then
let n' = Name.fresh_from n in
Name.auto_cleanup_with_reset_notail (fun () ->
Name.link n n';
New(n',replace_name_in_process proc,(0,[]))
)
else New(n,proc,dummy_pos)
let rec insert_name n fresh proc = match proc with
| Nil -> Nil
| Output(c,t,p,pos) ->
if occur_in_term n c || occur_in_term n t
then apply_replacement n fresh proc
else Output(c,t,insert_name n fresh p,pos)
| Input(c,pat,p,pos) ->
if occur_in_term n c || occur_in_pattern n pat
then apply_replacement n fresh proc
else Input(c,pat,insert_name n fresh p,pos)
| IfThenElse(t1,t2,p1,p2,pos) ->
if occur_in_term n t1 || occur_in_term n t2
then apply_replacement n fresh proc
else IfThenElse(t1,t2,insert_name n true p1, insert_name n true p2,pos)
| Let(pat,t,p1,p2,pos) ->
if occur_in_term n t || occur_in_pattern n pat
then apply_replacement n fresh proc
else Let(pat,t,insert_name n true p1, insert_name n true p2,pos)
| New(n',p,pos) -> New(n',insert_name n fresh p,pos)
| Par p_list ->
let rec explore prev = function
| [] -> None
| p::q ->
if occur_in_process n p
then Some(List.rev prev, p, q)
else explore (p::prev) q
in
begin match explore [] p_list with
| None -> proc
| Some(prev,p,next) ->
if List.exists (occur_in_process n) next
then apply_replacement n fresh proc
else Par(prev@(insert_name n fresh p)::next)
end
| Bang(p_list,_) ->
let p = List.hd p_list in
if occur_in_process n p
then apply_replacement n fresh proc
else proc
| Choice(p1,p2,pos) -> Choice(insert_name n true p1,insert_name n true p2,pos)
let rec move_new_name = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(c,t,move_new_name p,pos)
| Input(c,pat,p,pos) -> Input(c,pat,move_new_name p,pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,move_new_name p1, move_new_name p2, pos)
| Let(pat,t,p1,p2,pos) -> Let(pat,t,move_new_name p1, move_new_name p2, pos)
| Par p_list -> Par (List.map move_new_name p_list)
| Bang(p_list,pos) -> Bang(List.map move_new_name p_list,pos)
| Choice(p1,p2,pos) -> Choice(move_new_name p1,move_new_name p2,pos)
| New(n,p,_) -> insert_name n false (move_new_name p)
(*** Apply trivial let ***)
(* The function also replace terms in output and input by variables
when they have destructors. *)
let rec add_let_for_output_input = function
| Nil -> Nil
| Output(c,t,p,pos) ->
let x = Variable.fresh Free in
let y = Variable.fresh Free in
Let(PatVar x,c,
Let(PatVar y,t,
Output(Var x,Var y,add_let_for_output_input p,pos),
Nil,
dummy_pos
),
Nil,
dummy_pos
)
| Input(c,pat,p,pos) ->
begin match pat with
| PatVar _ ->
let x = Variable.fresh Free in
Let(PatVar x,c,Input(Var x,pat,add_let_for_output_input p,pos),Nil,dummy_pos)
| _ ->
let (x,y) = Variable.fresh Free, Variable.fresh Free in
let inside_proc = Let(pat,Var y,add_let_for_output_input p, Nil,dummy_pos) in
Let(PatVar x,c,Input(Var x,PatVar y,inside_proc,pos),Nil,dummy_pos)
end
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,add_let_for_output_input p1,add_let_for_output_input p2,pos)
| Let(pat,t,p1,p2,pos) -> Let(pat,t,add_let_for_output_input p1,add_let_for_output_input p2,pos)
| New(n,p,pos) -> New(n,add_let_for_output_input p,pos)
| Par p_list -> Par (List.map add_let_for_output_input p_list)
| Bang(p_list,pos) -> Bang(List.map add_let_for_output_input p_list,pos)
| Choice(p1,p2,pos) -> Choice(add_let_for_output_input p1, add_let_for_output_input p2,pos)
let rec does_not_occurs vars_pat = function
| Var v -> not (List.memq v vars_pat)
| Func(_,args) -> List.for_all (does_not_occurs vars_pat) args
| _ -> true
let rec term_of_pattern vars_pat = function
| PatVar x ->
vars_pat := x :: !vars_pat;
Var x
| PatTuple(f,args) -> Func(f,List.map (term_of_pattern vars_pat) args)
| PatEquality t -> t
let update_equations_with_pattern_vars equations pat_vars =
let rec explore prev = function
| [] -> prev
| (x,Var x')::q when not (List.memq x pat_vars) && List.memq x' pat_vars ->
let new_equations =
Variable.auto_cleanup_with_reset_notail (fun () ->
Variable.link_term x' (Var x);
let prev' = List.map (fun (y,t) -> (y,Term.instantiate t)) prev in
let q' = List.map (fun (y,t) -> (y,Term.instantiate t)) q in
(((x',Var x)::prev')@q')
)
in
explore [] new_equations
| eq::q -> explore (eq::prev) q
in
let new_equations = explore [] equations in
Config.debug (fun () ->
List.iter (function
| (x,Var x') when List.memq x' pat_vars && List.memq x pat_vars -> Config.internal_error "[process.ml >> update_equations_with_pattern_vars] Should not occur"
| _ -> ()
) new_equations
);
new_equations
let rec apply_trivial_let = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(c,t,apply_trivial_let p,pos)
| Input(c,pat,p,pos) -> Input(c,pat,apply_trivial_let p, pos)
| IfThenElse(t1,t2,p1,p2,pos) ->
let (_,formula_neg) = Rewrite_rules.compute_equality_modulo_and_rewrite [t1,t2] in
if formula_neg = Formula.T.Bot
then apply_trivial_let p1
else if formula_neg = Formula.T.Top
then apply_trivial_let p2
else IfThenElse(t1,t2,apply_trivial_let p1,apply_trivial_let p2,pos)
| Let(pat,t,p1,p2,pos) ->
let vars_pat = ref [] in
let (equations_list,_) = Rewrite_rules.compute_equality_modulo_and_rewrite [term_of_pattern vars_pat pat, t] in
begin match equations_list with
| [] -> (* Always else branch *)
apply_trivial_let p2
One unique solution
We first check that there is no existential variables
if List.for_all (fun (x,t') -> x.quantifier <> Existential && not (Term.quantified_var_occurs Existential t')) equations
then
let new_equations = update_equations_with_pattern_vars equations !vars_pat in
(* We now check that all variables in the domain are from the pattern *)
if List.for_all (fun (x,t') -> (List.memq x !vars_pat) && does_not_occurs !vars_pat t') new_equations
then
begin
(* We can instantiate and remove the Let *)
Config.debug (fun () ->
if not (List.for_all (fun (_,t') -> does_not_occurs !vars_pat t') new_equations)
then Config.internal_error "[process.ml >> apply_trivial_let] Having only variables from the pattern in the domain should imply that no variables in the image are from the pattern."
);
let p1' =
Variable.auto_cleanup_with_reset_notail (fun () ->
List.iter (fun (x,t') -> Variable.link_term x t') new_equations;
instantiate p1
)
in
apply_trivial_let p1'
end
else
(* We can instantiate but we need to keep the Let *)
let p1' =
Variable.auto_cleanup_with_reset_notail (fun () ->
List.iter (fun (x,t') -> Variable.link_term x t') new_equations;
instantiate p1
)
in
Let(pat,t,apply_trivial_let p1',apply_trivial_let p2,pos)
else Let(pat,t,apply_trivial_let p1, apply_trivial_let p2,pos)
| _ -> Let(pat,t,apply_trivial_let p1, apply_trivial_let p2,pos)
end
| New(n,p,pos) -> New(n,apply_trivial_let p,pos)
| Par p_list -> Par (List.map apply_trivial_let p_list)
| Bang(p_list,pos) -> Bang(List.map apply_trivial_let p_list,pos)
| Choice(p1,p2,pos) -> Choice(apply_trivial_let p1,apply_trivial_let p2, pos)
(*** Equality modulo renaming ***)
(* Since it is pre-process, we compute the bijective renaming slower than
we could do. *)
exception No_Match
let linked_bijection_vars = ref []
let linked_bijection_names = ref []
let cleanup_all_linked f_next =
Variable.auto_cleanup_with_exception (fun () ->
Name.auto_cleanup_with_exception (fun () ->
let tmp_vars = !linked_bijection_vars in
let tmp_names = !linked_bijection_names in
try
let r = f_next () in
linked_bijection_vars := tmp_vars;
linked_bijection_names := tmp_names;
r
with No_Match ->
linked_bijection_vars := tmp_vars;
linked_bijection_names := tmp_names;
raise No_Match
)
)
let rec match_term t1 t2 = match t1, t2 with
| Var { link = VLink x; _ }, Var y when x == y -> ()
| Var ({ link = NoLink; _} as x), Var y ->
if List.memq y !linked_bijection_vars
then raise No_Match;
Variable.link x y;
linked_bijection_vars := y :: !linked_bijection_vars
| Func(f1,args1), Func(f2,args2) when f1 == f2 ->
List.iter2 match_term args1 args2
| Name { link_n = NLink n1; _}, Name n2 when n1 == n2 -> ()
| Name ({ link_n = NNoLink; _} as n1), Name n2 ->
if List.memq n2 !linked_bijection_names
then raise No_Match;
Name.link n1 n2;
linked_bijection_names := n2 :: !linked_bijection_names
| _, _ -> raise No_Match
let rec match_pattern pat1 pat2 = match pat1, pat2 with
| PatEquality t1, PatEquality t2 -> match_term t1 t2
| PatVar x1, PatVar x2 -> match_term (Var x1) (Var x2)
| PatTuple(f1,args1), PatTuple(f2,args2) when f1 == f2 -> List.iter2 match_pattern args1 args2
| _ -> raise No_Match
let duplicate_position_match pos_match (_,args1) (_,args2) size1 =
let rec replace i prefix args = match prefix, args with
| [], [] -> Config.internal_error "[process.ml >> duplicate_position_match] The prefix should be strict."
| [], n::q ->
Config.debug (fun () ->
if size1 <> n
then Config.internal_error "[process.ml >> duplicate_position_match] Only the max index should have been added"
);
i::q
| n_p::q_p, n::q ->
Config.debug (fun () ->
if n_p <> n
then Config.internal_error "[process.ml >> duplicate_position_match] It should be a prefix."
);
replace i q_p q
| _, [] -> Config.internal_error "[process.ml >> duplicate_position_match] It should be a prefix (2)."
in
let new_pos_match = ref [] in
List.iter (fun (((id1',args1'),(id2',args2')) as matchings) ->
new_pos_match := matchings :: !new_pos_match;
for i = 1 to size1 - 1 do
let pos1 = (id1',replace i args1 args1') in
let pos2 = (id2',replace i args2 args2') in
new_pos_match := (pos1,pos2):: !new_pos_match
done
) pos_match;
!new_pos_match
let rec equal_modulo_renaming f_next proc1 proc2 = match proc1, proc2 with
| Nil, Nil -> f_next []
| Output(c1,t1,p1,pos1), Output(c2,t2,p2,pos2) ->
cleanup_all_linked (fun () ->
match_term c1 c2;
match_term t1 t2;
equal_modulo_renaming (fun pos_match ->
f_next ((pos1,pos2)::pos_match)
) p1 p2
)
| Input(c1,pat1,p1,pos1), Input(c2,pat2,p2,pos2) ->
cleanup_all_linked (fun () ->
match_term c1 c2;
match_pattern pat1 pat2;
equal_modulo_renaming (fun pos_match ->
f_next ((pos1,pos2)::pos_match)
) p1 p2
)
| IfThenElse(t1,t2,p1,p2,_), IfThenElse(t1',t2',p1',p2',_) ->
begin
try
cleanup_all_linked (fun () ->
match_term t1 t1';
match_term t2 t2';
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next (pos_match @ pos_match')
) p2 p2'
) p1 p1'
)
with No_Match ->
cleanup_all_linked (fun () ->
match_term t1 t2';
match_term t2 t1';
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next (pos_match @ pos_match')
) p2 p2'
) p1 p1'
)
end
| Let(pat,t,p1,p2,_), Let(pat',t',p1',p2',_) ->
cleanup_all_linked (fun () ->
match_pattern pat pat';
match_term t t';
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next (pos_match @ pos_match')
) p2 p2'
) p1 p1'
)
| New _, New _ -> gather_names_and_match f_next [] [] proc1 proc2
| Par p_list1, Par p_list2 when List.length p_list1 = List.length p_list2 -> equal_modulo_renaming_list f_next p_list1 p_list2
| Bang(p_list1,pos1), Bang(p_list2,pos2) ->
let size1 = List.length p_list1 in
let size2 = List.length p_list2 in
if size1 <> size2
then raise No_Match;
if size1 = 0
then Config.internal_error "[process.ml >> equal_modulo_renaming] Bang should have at least one process.";
let p1 = List.hd p_list1 in
let p2 = List.hd p_list2 in
equal_modulo_renaming (fun pos_match ->
let pos_match' = duplicate_position_match pos_match pos1 pos2 size1 in
f_next pos_match'
) p1 p2
| Choice(p1,p2,pos1), Choice(p1',p2',pos2) ->
begin
try
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next ((pos1,pos2)::pos_match @ pos_match')
) p2 p2'
) p1 p1'
with No_Match ->
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next ((pos1,pos2)::pos_match @ pos_match')
) p2 p1'
) p1 p2'
end
| _ -> raise No_Match
and equal_modulo_renaming_list f_next proc_l1 proc_l2 = match proc_l1 with
| [] -> f_next []
| p1::q1 ->
equal_modulo_renaming_list_one (fun pos_match q2 ->
equal_modulo_renaming_list (fun pos_match' ->
f_next (pos_match@pos_match')
) q1 q2
) p1 [] proc_l2
and equal_modulo_renaming_list_one f_next p1 prev_2 = function
| [] -> raise No_Match
| p2::q2 ->
try
equal_modulo_renaming (fun pos_match ->
f_next pos_match (prev_2@q2)
) p1 p2
with No_Match -> equal_modulo_renaming_list_one f_next p1 (p2::prev_2) q2
and gather_names_and_match f_next n_l1 n_l2 proc1 proc2 = match proc1, proc2 with
| New(n1,p1,_), New(n2,p2,_) -> gather_names_and_match f_next (n1::n_l1) (n2::n_l2) p1 p2
| New _, _
| _, New _ -> raise No_Match
| _, _ ->
equal_modulo_renaming (fun pos_match ->
List.iter (fun n -> match n.link_n with
| NLink n' ->
if not (List.memq n' n_l2)
then raise No_Match
| _ -> Config.internal_error "[process.ml >> gather_names_and_match] Used new names should have been removed."
) n_l1;
f_next pos_match
) proc1 proc2
(*** Join equal else branches ***)
let rec gather_names_let = function
| New(n,p,_) ->
begin match gather_names_let p with
| None -> None
| Some(pat,t,pthen,pelse,name_list) -> Some(pat,t,pthen,pelse,n::name_list)
end
| IfThenElse(t1,t2,pthen,pelse,_) -> Some(PatEquality t1,t2,pthen,pelse,[])
| Let(pat,t,pthen,pelse,_) -> Some(pat,t,pthen,pelse,[])
| _ -> None
let self_match_name n = match n.link_n with
| NLink n' ->
Config.debug (fun () ->
if n != n'
then Config.internal_error "[process.ml >> self_match_name] The name should be link to itself."
)
| NNoLink ->
Name.link n n;
linked_bijection_names := n :: !linked_bijection_names
| _ -> Config.internal_error "[process.ml >> self_match_name] Unexpected link."
let rec self_match_pattern = function
| PatEquality _ -> ()
| PatTuple(_,args) -> List.iter self_match_pattern args
| PatVar ({ link = VLink x; _ } as x') ->
Config.debug (fun () ->
if x != x'
then Config.internal_error "[process.ml >> self_match_pattern] The variable should be link to itself."
);
()
| PatVar ({ link = NoLink; _ } as x) ->
Variable.link x x;
linked_bijection_vars := x :: !linked_bijection_vars
| PatVar _ -> Config.internal_error "[process.ml >> self_match_pattern] Unexpected link for variable."
let rec add_names p = function
| [] -> p
| n::q -> New(n,add_names p q, dummy_pos)
let rec regroup_else_branches = function
| Nil -> Nil, []
| Output(c,t,p,pos) ->
let (p',pos_match') = regroup_else_branches p in
Output(c,t,p',pos), pos_match'
| Input(c,pat,p,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
let (p',pos_match') = regroup_else_branches p in
Input(c,pat,p',pos), pos_match'
)
| IfThenElse(t1,t2,p1,p2,pos) ->
let (p1',pos_match1) = regroup_else_branches p1 in
let (p2',pos_match2) = regroup_else_branches p2 in
begin match gather_names_let p1' with
| None -> IfThenElse(t1,t2,p1',p2',pos), (pos_match1 @ pos_match2)
| Some(pat,t,pthen,pelse,names_l) ->
begin
try
let new_matchings =
cleanup_all_linked (fun () ->
List.iter self_match_name names_l;
equal_modulo_renaming (fun matchings ->
matchings @ pos_match1 @ pos_match2
) p2' pelse
)
in
let f = Symbol.get_tuple 2 in
let new_pat = PatTuple(f,[PatEquality t1;pat]) in
let new_t = Func(f,[t2;t]) in
let p = Let(new_pat,new_t,pthen,pelse,dummy_pos) in
add_names p names_l, new_matchings
with No_Match -> IfThenElse(t1,t2,p1',p2',pos), (pos_match1 @ pos_match2)
end
end
| Let(pat,t,p1,p2,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
let (p1',pos_match1) = regroup_else_branches p1 in
let (p2',pos_match2) = regroup_else_branches p2 in
begin match gather_names_let p1' with
| None -> Let(pat,t,p1',p2',pos), (pos_match1 @ pos_match2)
| Some(pat',t',pthen,pelse,names_l) ->
begin
try
let new_matchings =
cleanup_all_linked (fun () ->
List.iter self_match_name names_l;
equal_modulo_renaming (fun matchings ->
matchings @ pos_match1 @ pos_match2
) p2' pelse
)
in
let f = Symbol.get_tuple 2 in
let new_pat = PatTuple(f,[pat;pat']) in
let new_t = Func(f,[t;t']) in
let p = Let(new_pat,new_t,pthen,pelse,dummy_pos) in
add_names p names_l, new_matchings
with No_Match ->
Let(pat,t,p1',p2',pos), (pos_match1 @ pos_match2)
end
end
)
| New(n,p,pos) ->
cleanup_all_linked (fun () ->
self_match_name n;
let (p',pos_match') = regroup_else_branches p in
New(n,p',pos), pos_match'
)
| Par p_list ->
let (p_list', pos_match) =
List.fold_right (fun p (acc_p,acc_match) ->
let (p',pos_match') = regroup_else_branches p in
(p'::acc_p,pos_match'@acc_match)
) p_list ([],[])
in
Par p_list', pos_match
| Bang(p_list,pos) ->
let (p_list', pos_match) =
List.fold_right (fun p (acc_p,acc_match) ->
let (p',pos_match') = regroup_else_branches p in
(p'::acc_p,pos_match'@acc_match)
) p_list ([],[])
in
Bang(p_list',pos), pos_match
| Choice(p1,p2,pos) ->
let (p1',pos_match1) = regroup_else_branches p1 in
let (p2',pos_match2) = regroup_else_branches p2 in
Choice(p1',p2',pos), pos_match1 @ pos_match2
(*** Regroup equal process from par in bang ***)
let rec regroup_equal_par_processes = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(c,t,regroup_equal_par_processes p,pos)
| Input(c,pat,p,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
Input(c,pat,regroup_equal_par_processes p,pos)
)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,regroup_equal_par_processes p1, regroup_equal_par_processes p2,pos)
| Let(pat,t,p1,p2,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
Let(pat,t,regroup_equal_par_processes p1,regroup_equal_par_processes p2,pos)
)
| New(n,p,pos) ->
cleanup_all_linked (fun () ->
self_match_name n;
New(n,regroup_equal_par_processes p,pos)
)
| Par p_list ->
let rec insert_in_proc_list_list p = function
| [] -> [[p]]
| (p'::q)::q_list ->
begin try
equal_modulo_renaming (fun _ -> ()) p p';
(p::p'::q)::q_list
with No_Match ->
(p'::q)::(insert_in_proc_list_list p q_list)
end
| []::_ -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Unexpected case"
in
let rec regroup_list = function
| [] -> []
| p::q ->
let proc_list_list = regroup_list q in
insert_in_proc_list_list p proc_list_list
in
let par_list =
List.map (function
| [] -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Unexpected case 2"
| [p] -> p
| p_list -> Bang(p_list,dummy_pos)
) (regroup_list p_list)
in
begin match par_list with
| [] -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Unexpected case 3"
| [p] -> p
| _ -> Par par_list
end
| Bang(p_list,pos) ->
let p_list' = List.map regroup_equal_par_processes p_list in
let p = List.hd p_list in
begin match p with
| Bang _ ->
let p_list'' =
List.fold_right (fun p' acc -> match p' with
| Bang(p_list'',_) -> p_list''@acc
| _ -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Should only be bang processes."
) p_list' []
in
Bang(p_list'',pos)
| _ -> Bang(p_list',pos)
end
| Choice(p1,p2,pos) -> Choice(regroup_equal_par_processes p1, regroup_equal_par_processes p2,pos)
(*** Replace private constant by names ***)
let rec replace_private_name_term assoc = function
| Func(f,[]) when not f.public -> Name (List.assq f assoc)
| Func(f,args) -> Func(f,List.map (replace_private_name_term assoc) args)
| t -> t
let rec replace_private_name_pattern assoc = function
| PatEquality t -> PatEquality(replace_private_name_term assoc t)
| PatTuple(f,args) -> PatTuple(f,List.map (replace_private_name_pattern assoc) args)
| pat -> pat
let rec replace_private_name_process assoc = function
| Nil -> Nil
| Output(ch,t,p,pos) -> Output(replace_private_name_term assoc ch, replace_private_name_term assoc t, replace_private_name_process assoc p,pos)
| Input(ch,pat,p,pos) -> Input(replace_private_name_term assoc ch, replace_private_name_pattern assoc pat, replace_private_name_process assoc p,pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(replace_private_name_term assoc t1, replace_private_name_term assoc t2, replace_private_name_process assoc p1, replace_private_name_process assoc p2,pos)
| Let(pat,t,p1,p2,pos) -> Let(replace_private_name_pattern assoc pat, replace_private_name_term assoc t, replace_private_name_process assoc p1, replace_private_name_process assoc p2,pos)
| New(n,p,pos) -> New(n,replace_private_name_process assoc p,pos)
| Par plist -> Par (List.map (replace_private_name_process assoc) plist)
| Bang(plist,pos) -> Bang(List.map (replace_private_name_process assoc) plist,pos)
| Choice(p1,p2,pos) -> Choice(replace_private_name_process assoc p1,replace_private_name_process assoc p2,pos)
let rec private_constant_not_in_term f = function
| Func(f',_) when f == f' -> false
| Func(_,args) -> List.for_all (private_constant_not_in_term f) args
| _ -> true
let private_constant_not_in_rewrite_rule f =
List.for_all (fun f' -> match f'.cat with
| Destructor rw_list ->
List.for_all (fun (lhs,rhs) ->
private_constant_not_in_term f rhs && List.for_all (private_constant_not_in_term f) lhs
) rw_list
| _ -> Config.internal_error "[process.ml >> private_constant_not_in_rewrite_rule] Should only contain destructor functions."
) !Symbol.all_destructors
let replace_private_name proc =
let assoc =
List.fold_left (fun acc f ->
if not f.public && f.arity = 0 && private_constant_not_in_rewrite_rule f
then
let n = Name.fresh_with_label f.label_s in
(f,n)::acc
else acc
) [] !Symbol.all_constructors
in
if assoc = []
then proc
else
List.fold_left (fun acc_p (_,n) ->
New(n,acc_p,dummy_pos)
) (replace_private_name_process assoc proc) assoc
(*** General function ***)
type configuration =
{
frame : term list;
process : process
}
let is_equal_pos pos_match pos pos' =
if pos = pos'
then true
else
try
(List.assoc pos pos_match) = pos'
with Not_found -> false
let is_pos_in_process pos_match pos proc =
let rec explore = function
| Nil -> false
| Output(_,_,_,pos')
| Input(_,_,_,pos') -> is_equal_pos pos_match pos' pos
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_) -> explore p1 || explore p2
| New(_,p,_) -> explore p
| Par p_list
| Bang (p_list,_) -> List.exists explore p_list
| Choice(_,_,pos') -> is_equal_pos pos_match pos' pos
in
explore proc
let instantiate_term t =
Variable.auto_cleanup_with_exception (fun () ->
Rewrite_rules.normalise (Term.instantiate t)
)
let instantiate_pattern pat =
Variable.auto_cleanup_with_exception (fun () ->
Rewrite_rules.normalise_pattern (Term.instantiate_pattern pat)
)
let apply_ground_recipe_on_frame frame r =
let rec explore = function
| RFunc(f,args) -> Func(f,List.map explore args)
| Axiom i -> List.nth frame (i-1)
| _ -> Config.internal_error "[process.ml >> apply_ground_recipe_on_frame] Unexpected recipe."
in
try
Variable.auto_cleanup_with_exception (fun () -> Rewrite_rules.normalise (explore r))
with Rewrite_rules.Not_message -> Config.internal_error "[process.ml >> apply_ground_recipe_on_frame] The recipe should be a message."
let rec retrieve_transition_list f_next pos_match act conf = match conf.process,act with
| Output(_,t,p,pos), AOutput(_,pos') when is_equal_pos pos_match pos pos' ->
f_next pos [] { frame = conf.frame@[Term.instantiate t]; process = p }
| Input(_,pat,p,pos), AInput(_,r_t,pos') when is_equal_pos pos_match pos pos' ->
let t = apply_ground_recipe_on_frame conf.frame r_t in
begin try
let pat' = instantiate_pattern pat in
Variable.auto_cleanup_with_exception (fun () ->
Term.unify pat' t;
f_next pos [] { conf with process = p }
)
with Term.Not_unifiable | Rewrite_rules.Not_message ->
f_next pos [] { conf with process = Nil }
end
| IfThenElse(t1,t2,p1,p2,pos), _ ->
let do_then_branch =
try
Term.is_equal (instantiate_term t1) (instantiate_term t2)
with Rewrite_rules.Not_message -> false
in
if do_then_branch
then retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p1 }
else retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p2 }
| Let(pat,t,p1,p2,pos), _ ->
begin try
let pat' = instantiate_pattern pat in
let t' = instantiate_term t in
Variable.auto_cleanup_with_exception (fun () ->
Term.unify pat' t';
retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p1 }
)
with Rewrite_rules.Not_message | Term.Not_unifiable ->
retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p2 }
end
| New(_,p,pos),_ -> retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p }
| Par p_list, (AOutput(_,pos) | AInput(_,_,pos) | AChoice(pos,_) ) ->
retrieve_transition_list_from_par f_next pos_match pos act conf.frame [] p_list
| Bang(p_list,pos_bang), (AOutput(_,pos) | AInput(_,_,pos) | AChoice(pos,_) ) ->
retrieve_transition_list_from_bang f_next pos_match pos pos_bang 1 act conf.frame [] p_list
| Choice(p1,p2,pos), AChoice(pos',choose_left) when is_equal_pos pos_match pos pos' ->
if choose_left
then f_next pos [] { conf with process = p1 }
else f_next pos [] { conf with process = p2 }
| _ -> Config.internal_error "[process.ml >> retrieve_transition_list] Unexpected case."
and retrieve_transition_list_from_par f_next pos_match pos act frame prev_p = function
| [] -> Config.internal_error "[process.ml >> retrieve_transition_list_from_par] We should find the position."
| p::q ->
if is_pos_in_process pos_match pos p
then
retrieve_transition_list (fun pos' act_l conf' ->
f_next pos' act_l { conf' with process = Par(prev_p @ (conf'.process :: q)) }
) pos_match act { frame = frame; process = p }
else retrieve_transition_list_from_par f_next pos_match pos act frame (prev_p@[p]) q
and retrieve_transition_list_from_bang f_next pos_match pos pos_bang nb_unfold act frame prev_p = function
| [] -> Config.internal_error "[process.ml >> retrieve_transition_list_from_bang] We should find the position."
| p::q ->
if is_pos_in_process pos_match pos p
then
retrieve_transition_list (fun pos' act_l conf' ->
if List.length q <= 1
then f_next pos' (ABang(nb_unfold,pos_bang)::act_l) { conf' with process = Par(prev_p @ (conf'.process::q)) }
else f_next pos' (ABang(nb_unfold,pos_bang)::act_l) { conf' with process = Par(prev_p @ [conf'.process; Bang(q,pos_bang)]) }
) pos_match act { frame = frame; process = p }
else
if List.length q <= 1
then retrieve_transition_list_from_bang f_next pos_match pos pos_bang nb_unfold act frame (prev_p@[p]) q
else retrieve_transition_list_from_bang f_next pos_match pos pos_bang (nb_unfold+1) act frame (prev_p@[p]) q
let rec retrieve_trace f_next pos_match conf = function
| [] -> f_next []
| AOutput(r,pos)::q ->
retrieve_transition_list (fun pos' act_l conf' ->
retrieve_trace (fun act_l' ->
f_next (act_l @ (AOutput(r,pos')::act_l'))
) pos_match conf' q
) pos_match (AOutput(r,pos)) conf
| AInput(r,r_t,pos)::q ->
retrieve_transition_list (fun pos' act_l conf' ->
retrieve_trace (fun act_l' ->
f_next (act_l @ (AInput(r,r_t,pos')::act_l'))
) pos_match conf' q
) pos_match (AInput(r,r_t,pos)) conf
| AChoice(pos,choose_left)::q ->
retrieve_transition_list (fun pos' act_l conf' ->
retrieve_trace (fun act_l' ->
f_next (act_l @ (AChoice(pos',choose_left)::act_l'))
) pos_match conf' q
) pos_match (AChoice(pos,choose_left)) conf
| AEaves(r,pos_out,pos_in)::q ->
retrieve_transition_list (fun pos_out' act_l_out conf' ->
retrieve_transition_list (fun pos_in' act_l_in conf'' ->
retrieve_trace (fun act_l' ->
f_next (act_l_out @ act_l_in @ (AEaves(r,pos_out',pos_in')::act_l'))
) pos_match conf'' q
) pos_match (AInput(r,Axiom (List.length conf'.frame),pos_in)) conf'
) pos_match (AOutput(r,pos_out)) conf
| AComm(pos_out,pos_in)::q ->
retrieve_transition_list (fun pos_out' act_l_out conf' ->
retrieve_transition_list (fun pos_in' act_l_in conf'' ->
retrieve_trace (fun act_l' ->
f_next (act_l_out @ act_l_in @ (AComm(pos_out',pos_in')::act_l'))
) pos_match { conf'' with frame = conf.frame } q
) pos_match (AInput(Axiom 0,Axiom (List.length conf'.frame),pos_in)) conf'
) pos_match (AOutput(Axiom 0,pos_out)) conf
| _ -> Config.internal_error "[process.ml >> retrieve_trace] Unexpected trace action."
let rec normalise_pos_match prev = function
| [] -> prev
| (pos1,pos2)::q ->
let f_apply (pos1',pos2') =
if pos2' = pos1
then (pos1',pos2)
else (pos1',pos2')
in
let prev' = List.map f_apply prev in
let q' = List.map f_apply q in
normalise_pos_match ((pos1,pos2)::prev') q'
let simplify_for_determinate p =
let p0 = replace_private_name p in
let p1 = clean p0 in
let p2 = add_let_for_output_input p1 in
let p3 = apply_trivial_let p2 in
let p4 = detect_and_replace_pure_fresh_name p3 in
let p5 = move_new_name p4 in
let (p6,pos_match) = regroup_else_branches p5 in
let pos_match_normalised = normalise_pos_match [] pos_match in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Before simplification :\n %s" (display 1 p));
Config.log_in_debug Config.Process (Printf.sprintf "After simplification :\n %s" (display 1 p6));
);
let retrieve_trace trans_list =
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Input retrieve_trace = %s\nPos Match Normalised = %s\nProcess:\n%s\n"
(display_list display_transition "; " trans_list)
(display_list (fun (pos1,pos2) -> Printf.sprintf "(%s,%s)" (display_position pos1) (display_position pos2)) "; " pos_match_normalised)
(display 1 p)
)
);
let result = retrieve_trace (fun x -> x) pos_match_normalised { frame = []; process = p } trans_list in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Output retrieve_trace = %s\n" (display_list display_transition "; " result))
);
result
in
p6, retrieve_trace
let simplify_for_generic p =
let p0 = replace_private_name p in
let p1 = clean p0 in
let p2 = add_let_for_output_input p1 in
let p3 = apply_trivial_let p2 in
let p4 = move_new_name p3 in
let (p5,pos_match) = regroup_else_branches p4 in
let p6 = regroup_equal_par_processes p5 in
let pos_match_normalised = normalise_pos_match [] pos_match in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Before simplification :\n %s" (display 1 p));
Config.log_in_debug Config.Process (Printf.sprintf "After simplification :\n %s" (display 1 p6));
);
let retrieve_trace trans_list =
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Input retrieve_trace = %s\n" (display_list display_transition "; " trans_list))
);
let result = retrieve_trace (fun x -> x) pos_match_normalised { frame = []; process = p } trans_list in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Output retrieve_trace = %s\n" (display_list display_transition "; " result))
);
result
in
p6, retrieve_trace
(*** Simplication for session equivalence ***)
exception Session_error of string
let check_process_for_session proc =
let priv_symbol_channels = ref [] in
let rec mark_channels = function
| Nil -> ()
| Output(Func(f,[]),_,p,_)
| Input(Func(f,[]),_,p,_) ->
if not f.public && not (List.memq f !priv_symbol_channels)
then priv_symbol_channels := f :: !priv_symbol_channels;
mark_channels p
| Output(Name n,_,p,_)
| Input(Name n,_,p,_) ->
if n.link_n = NNoLink then Name.link_search n;
mark_channels p
| Output(ch,_,_,_) ->
let err_msg =
Printf.sprintf
"The term %s was used as a channel for an output. However for session equivalence and session inclusion, only public/private names/constants are allowed."
(Term.display Terminal ch)
in
raise (Session_error err_msg)
| Input(ch,_,_,_) ->
let err_msg =
Printf.sprintf
"The term %s was used as a channel for an input. However for session equivalence and session inclusion, only public/private names/constants are allowed."
(Term.display Terminal ch)
in
raise (Session_error err_msg)
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_) ->
mark_channels p1;
mark_channels p2
| New(_,p,_) -> mark_channels p
| Par p_list
| Bang (p_list,_) -> List.iter mark_channels p_list
| Choice _ ->
let err_msg = "Choice operator is not allowed for session equivalence and session inclusion." in
raise (Session_error err_msg)
in
let rec check_channels_in_term = function
| Var _ -> ()
| Func(f,args) ->
if not f.public && List.memq f !priv_symbol_channels
then
begin
let err_msg =
Printf.sprintf
"The private name %s is used as a channel and within a message. In session equivalence and session inclusion, private names used as channels cannot be used within messages."
(Symbol.display Terminal f)
in
raise (Session_error err_msg)
end;
List.iter check_channels_in_term args
| Name n ->
match n.link_n with
| NNoLink -> ()
| NSLink ->
let err_msg =
Printf.sprintf
"The private name %s is used as a channel and within a message. In session equivalence and session inclusion, private names used as channels cannot be used within messages."
(Name.display Terminal n)
in
raise (Session_error err_msg)
| _ -> Config.internal_error "[process.ml >> check_process_for_session] Unexpected link."
in
let rec check_channels_in_pattern = function
| PatVar _ -> ()
| PatTuple(_,args) -> List.iter check_channels_in_pattern args
| PatEquality t -> check_channels_in_term t
in
let rec check_channels = function
| Nil -> ()
| Output(_,t,p,_) ->
check_channels_in_term t;
check_channels p
| Input(_,pat,p,_) ->
check_channels_in_pattern pat;
check_channels p
| IfThenElse(t1,t2,p1,p2,_) ->
check_channels_in_term t1;
check_channels_in_term t2;
check_channels p1;
check_channels p2
| Let(pat,t,p1,p2,_) ->
check_channels_in_term t;
check_channels_in_pattern pat;
check_channels p1;
check_channels p2
| New(_,p,_) -> check_channels p
| Par plist
| Bang (plist,_) -> List.iter check_channels plist
| Choice _ -> Config.internal_error "[process.ml >> check_process_for_session] Choice operator should have been catched before applying this function."
in
Name.auto_cleanup_with_exception (fun () ->
mark_channels proc;
check_channels proc
)
let rec only_public_channel = function
| Nil -> true
| Output(Func(f,[]),_,p,_)
| Input(Func(f,[]),_,p,_) when f.public -> only_public_channel p
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_)
| Choice(p1,p2,_) -> only_public_channel p1 && only_public_channel p2
| New(_,p,_) -> only_public_channel p
| Par p_list
| Bang(p_list,_) -> List.for_all only_public_channel p_list
| _ -> false
let simplify_for_session p =
let p0 = replace_private_name p in
let p1 = clean p0 in
let p2 = add_let_for_output_input p1 in
let p3 = apply_trivial_let p2 in
let p4 =
if only_public_channel p
then detect_and_replace_pure_fresh_name p3
else p3
in
let p5 = move_new_name p4 in
let (p6,pos_match) = regroup_else_branches p5 in
let p7 = regroup_equal_par_processes p6 in
let pos_match_normalised = normalise_pos_match [] pos_match in
Config.debug (fun () ->
Config.log_in_debug Config.Always (Printf.sprintf "Before simplification :\n %s" (display 1 p));
Config.log_in_debug Config.Always (Printf.sprintf "After simplification :\n %s" (display 1 p7));
);
let retrieve_trace trans_list =
Config.debug (fun () ->
Config.log_in_debug Config.Always (Printf.sprintf "[process.ml >> simplify_for_session] Input retrieve_trace = %s\n" (display_list display_transition "; " trans_list));
Config.log_in_debug Config.Always (Printf.sprintf "[process.ml >> simplify_for_session] Process =\n%s" (display 2 p))
);
let result = retrieve_trace (fun x -> x) pos_match_normalised { frame = []; process = p } trans_list in
Config.debug (fun () ->
Config.log_in_debug Config.Always (Printf.sprintf "Output retrieve_trace = %s\n" (display_list display_transition "; " result))
);
result
in
p7, retrieve_trace
| null | https://raw.githubusercontent.com/DeepSec-prover/deepsec/8ddc45ec79de5ec49810302ea7da32d3dc9f46e4/Source/core_library/process.ml | ocaml | ************************************************************************
All rights reserved. This file is distributed under the terms of
file LICENSE
************************************************************************
** Simple tools on processes **
** Display functions (for debugging) **
****************************************
Transformation and simplifications
*****************************************
** Transform process with pure fresh name **
** Clean processes ***
** Place the new names as far as possible **
** Apply trivial let **
The function also replace terms in output and input by variables
when they have destructors.
Always else branch
We now check that all variables in the domain are from the pattern
We can instantiate and remove the Let
We can instantiate but we need to keep the Let
** Equality modulo renaming **
Since it is pre-process, we compute the bijective renaming slower than
we could do.
** Join equal else branches **
** Regroup equal process from par in bang **
** Replace private constant by names **
** General function **
** Simplication for session equivalence ** | DeepSec
, project PESTO ,
, project PESTO ,
, project PESTO ,
Copyright ( C ) INRIA 2017 - 2020
the GNU General Public License version 3.0 as described in the
open Types
open Term
open Formula
open Display
let rec instantiate_pattern = function
| PatEquality t -> PatEquality (Term.instantiate t)
| PatTuple(f,args) -> PatTuple(f,List.map instantiate_pattern args)
| pat -> pat
let rec instantiate = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(Term.instantiate c, Term.instantiate t,instantiate p,pos)
| Input(c,pat,p,pos) -> Input(Term.instantiate c, instantiate_pattern pat,instantiate p,pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(Term.instantiate t1, Term.instantiate t2, instantiate p1, instantiate p2,pos)
| Let(pat,t,p1,p2,pos) -> Let(instantiate_pattern pat,Term.instantiate t, instantiate p1, instantiate p2, pos)
| New(n,p,pos) -> New(n,instantiate p,pos)
| Par p_list -> Par (List.map instantiate p_list)
| Bang(p_list,pos) -> Bang(List.map instantiate p_list,pos)
| Choice(p1,p2,pos) -> Choice(instantiate p1,instantiate p2,pos)
let display_with_tab n str =
let rec print_tab = function
| 0 -> ""
| n -> " "^(print_tab (n-1))
in
(print_tab n) ^ str ^"\n"
let display_position (i,args) =
if args = []
then string_of_int i
else Printf.sprintf "%d[%s]" i (display_list string_of_int "," args)
let rec display_pattern = function
| PatEquality t -> Printf.sprintf "=%s" (Term.display Terminal t)
| PatTuple(_,args) -> Printf.sprintf "%s%s%s" (langle Terminal) (display_list display_pattern "," args) (rangle Terminal)
| PatVar x -> Variable.display Terminal x
let rec display tab = function
| Nil -> (display_with_tab tab "Nil")
| Output(ch,t,p,pos) ->
let str = Printf.sprintf "{%s} out(%s,%s);" (display_position pos) (Term.display Terminal ch) (Term.display Terminal t) in
(display_with_tab tab str) ^ (display tab p)
| Input(ch,pat,p,pos) ->
let str = Printf.sprintf "{%s} in(%s,%s);" (display_position pos) (Term.display Terminal ch) (display_pattern pat) in
(display_with_tab tab str) ^ (display tab p)
| IfThenElse(t1,t2,pthen,Nil,pos) ->
let str = Printf.sprintf "{%s} if %s = %s then" (display_position pos) (Term.display Terminal t1) (Term.display Terminal t2) in
let str_then = display tab pthen in
(display_with_tab tab str) ^ str_then
| IfThenElse(t1,t2,pthen,pelse,pos) ->
let str = Printf.sprintf "{%s} if %s = %s then" (display_position pos) (Term.display Terminal t1) (Term.display Terminal t2) in
let str_then = display (tab+1) pthen in
let str_else = display (tab+1) pelse in
let str_neg = "else" in
(display_with_tab tab str) ^ str_then ^ (display_with_tab tab str_neg) ^ str_else
| Let(pat,t,pthen,Nil,pos) ->
let str = Printf.sprintf "{%s} let %s = %s in" (display_position pos) (display_pattern pat) (Term.display Terminal t) in
let str_then = display tab pthen in
(display_with_tab tab str) ^ str_then
| Let(pat,t,pthen,pelse,pos) ->
let str = Printf.sprintf "{%s} let %s = %s in" (display_position pos) (display_pattern pat) (Term.display Terminal t) in
let str_then = display (tab+1) pthen in
let str_else = display (tab+1) pelse in
let str_neg = "else" in
(display_with_tab tab str) ^ str_then ^ (display_with_tab tab str_neg) ^ str_else
| New(n,p,pos) ->
let str = Printf.sprintf "{%s} new %s;" (display_position pos) (Name.display Terminal n) in
(display_with_tab tab str) ^ (display tab p)
| Par p_list ->
(display_with_tab tab "(") ^
(display_list (display (tab+1)) (display_with_tab tab ") | (") p_list) ^
(display_with_tab tab ")")
| Bang(p_list,pos) ->
(display_with_tab tab (Printf.sprintf "{%s} [" (display_position pos))) ^
(display_list (display (tab+1)) (display_with_tab tab ") | (") p_list) ^
(display_with_tab tab "]")
| Choice(p1,p2,pos) ->
let str_1 = display (tab+1) p1 in
let str_2 = display (tab+1) p2 in
let str_plus = Printf.sprintf "{%s} +" (display_position pos) in
str_1 ^ (display_with_tab tab str_plus) ^ str_2
let display_transition = function
| AInput(ch,x,pos) -> Printf.sprintf "in(%s,%s,%s)" (Recipe.display Terminal ch) (Recipe.display Terminal x) (display_position pos)
| AOutput(ch,pos) -> Printf.sprintf "out(%s,%s)" (Recipe.display Terminal ch) (display_position pos)
| ATau pos -> Printf.sprintf "tau(%s)" (display_position pos)
| ABang(i,pos) -> Printf.sprintf "bang(%d,%s)" i (display_position pos)
| AChoice(pos,side) -> Printf.sprintf "choice(%s,%b)" (display_position pos) side
| AComm(pos_out,pos_in) -> Printf.sprintf "comm(%s,%s)" (display_position pos_out) (display_position pos_in)
| AEaves(ch,pos_out,pos_in) -> Printf.sprintf "eaves(%s,%s,%s)" (Recipe.display Terminal ch) (display_position pos_out) (display_position pos_in)
exception Occur_More_Than_Once
let rec occur_at_most_once_term n already_occ_ref = function
| Name n' when n == n' ->
if !already_occ_ref
then raise Occur_More_Than_Once
else already_occ_ref := true
| Name _
| Var _ -> ()
| Func(_,args) -> List.iter (occur_at_most_once_term n already_occ_ref) args
let rec occur_in_term n = function
| Name n' when n == n' -> true
| Func(_,args) -> List.exists (occur_in_term n) args
| _ -> false
let rec occur_in_pattern n = function
| PatEquality t -> occur_in_term n t
| PatTuple(_,args) -> List.exists (occur_in_pattern n) args
| _ -> false
let rec occur_in_process n = function
| Nil -> false
| Output(c,t,p,_) -> occur_in_term n c || occur_in_term n t || occur_in_process n p
| Input(c,pat,p,_) -> occur_in_term n c || occur_in_pattern n pat || occur_in_process n p
| IfThenElse(t1,t2,p1,p2,_) ->
occur_in_term n t1 || occur_in_term n t2 || occur_in_process n p1 || occur_in_process n p2
| Let(pat,t,p1,p2,_) ->
occur_in_pattern n pat || occur_in_term n t || occur_in_process n p1 || occur_in_process n p2
| New(_,p,_) -> occur_in_process n p
| Par p_list
| Bang (p_list,_) -> List.exists (occur_in_process n) p_list
| Choice (p1,p2,_) -> occur_in_process n p1 || occur_in_process n p2
let is_name_pure_fresh n p =
let already_occ_ref = ref false in
let rec explore = function
| Nil -> ()
| Output(ch,t,p,_) ->
if occur_in_term n ch
then raise Occur_More_Than_Once;
occur_at_most_once_term n already_occ_ref t;
explore p
| Input(ch,_,p,_) ->
if occur_in_term n ch
then raise Occur_More_Than_Once;
explore p
| IfThenElse(t1,t2,p1,p2,_) ->
if occur_in_term n t1
then raise Occur_More_Than_Once;
if occur_in_term n t2
then raise Occur_More_Than_Once;
explore_branch p1 p2
| Let(pat,t,p1,p2,_) ->
if occur_in_pattern n pat
then raise Occur_More_Than_Once;
if occur_in_term n t
then raise Occur_More_Than_Once;
explore_branch p1 p2
| New(_,p,_) -> explore p;
| Par p_list
| Bang(p_list, _) -> List.iter explore p_list
| Choice(p1,p2,_) ->
explore_branch p1 p2
and explore_branch p1 p2 =
let tmp = !already_occ_ref in
explore p1;
let r1 = !already_occ_ref in
already_occ_ref := tmp;
explore p2;
already_occ_ref := r1 || !already_occ_ref
in
try
explore p;
true
with Occur_More_Than_Once -> false
let rec replace_name_in_term = function
| Name { link_n = NLink n'; _ } -> Name n'
| Func(f,args) -> Func(f,List.map replace_name_in_term args)
| t -> t
let rec replace_name_in_pattern = function
| PatEquality t -> PatEquality (replace_name_in_term t)
| PatTuple(f,args) -> PatTuple(f,List.map replace_name_in_pattern args)
| pat -> pat
let rec replace_name_in_process = function
| Nil -> Nil
| Output(ch,t,p,pos) ->
Output(replace_name_in_term ch,replace_name_in_term t,replace_name_in_process p,pos)
| Input(ch,x,p,pos) ->
Input(replace_name_in_term ch,x,replace_name_in_process p,pos)
| IfThenElse(t1,t2,p1,p2,pos) ->
IfThenElse(replace_name_in_term t1,replace_name_in_term t2,replace_name_in_process p1, replace_name_in_process p2,pos)
| Let(pat,t,p1,p2,pos) ->
Let(replace_name_in_pattern pat,replace_name_in_term t,replace_name_in_process p1, replace_name_in_process p2, pos)
| Choice(p1,p2,pos) ->
Choice(replace_name_in_process p1, replace_name_in_process p2,pos)
| Par p_list ->
Par (List.map replace_name_in_process p_list)
| Bang(p_list,pos) ->
Bang(List.map replace_name_in_process p_list,pos)
| New({ link_n = NLink n; _},p,pos)
| New(n,p,pos) -> New(n,replace_name_in_process p,pos)
let detect_and_replace_pure_fresh_name p =
let acc_pure_fresh_name = ref [] in
let rec retrieve_pure_fresh_name = function
| Nil -> ()
| Output(_,_,p,_)
| Input(_,_,p,_) -> retrieve_pure_fresh_name p
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_)
| Choice(p1,p2,_) ->
retrieve_pure_fresh_name p1;
retrieve_pure_fresh_name p2
| Par p_list
| Bang (p_list,_) -> List.iter retrieve_pure_fresh_name p_list
| New(n,p,_) ->
if is_name_pure_fresh n p
then acc_pure_fresh_name := n :: !acc_pure_fresh_name;
retrieve_pure_fresh_name p
in
retrieve_pure_fresh_name p;
Config.debug (fun () ->
let str =
if !acc_pure_fresh_name = []
then "None"
else Display.display_list (Name.display Display.Terminal) ", " !acc_pure_fresh_name
in
Config.log_in_debug Config.Always (Printf.sprintf "Pure fresh name detected: %s" str)
);
Name.auto_cleanup_with_reset_notail (fun () ->
List.iter (fun n ->
let n' = Name.pure_fresh_from n in
Name.link n n'
) !acc_pure_fresh_name;
replace_name_in_process p
)
let rec exists_input_or_output = function
| Nil -> false
| Output _
| Input _ -> true
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_) -> exists_input_or_output p1 || exists_input_or_output p2
| New(_,p,_) -> exists_input_or_output p
| Par p_list -> List.exists exists_input_or_output p_list
| Bang (p_list,_) -> List.exists exists_input_or_output p_list
| Choice(p1,p2,_) -> exists_input_or_output p1 || exists_input_or_output p2
let rec clean proc =
if exists_input_or_output proc
then
match proc with
| Nil -> Config.internal_error "[process.ml >> clean] Unexpected case."
| Output(c,t,p,pos) -> Output(c,t,clean p,pos)
| Input(c,x,p,pos) -> Input(c,x,clean p, pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,clean p1, clean p2, pos)
| Let(t1,t2,p1,p2,pos) -> Let(t1,t2,clean p1,clean p2,pos)
| New(n,p,pos) -> New(n,clean p,pos)
| Par p_list ->
let p_list' =
List.fold_right (fun p acc ->
let p' = clean p in
if p' = Nil
then acc
else p'::acc
) p_list []
in
if p_list' = []
then Nil
else Par p_list'
| Choice(p1,p2,pos) ->
let p1' = clean p1 in
let p2' = clean p2 in
if p1' = Nil && p2' = Nil
then Nil
else Choice(p1',p2',pos)
| Bang(p_list,pos) ->
let p_list' =
List.fold_right (fun p acc ->
let p' = clean p in
if p' = Nil
then acc
else p'::acc
) p_list []
in
if p_list' = []
then Nil
else Bang(p_list',pos)
else Nil
let dummy_pos = (0,[])
let apply_replacement n fresh proc =
if fresh
then
let n' = Name.fresh_from n in
Name.auto_cleanup_with_reset_notail (fun () ->
Name.link n n';
New(n',replace_name_in_process proc,(0,[]))
)
else New(n,proc,dummy_pos)
let rec insert_name n fresh proc = match proc with
| Nil -> Nil
| Output(c,t,p,pos) ->
if occur_in_term n c || occur_in_term n t
then apply_replacement n fresh proc
else Output(c,t,insert_name n fresh p,pos)
| Input(c,pat,p,pos) ->
if occur_in_term n c || occur_in_pattern n pat
then apply_replacement n fresh proc
else Input(c,pat,insert_name n fresh p,pos)
| IfThenElse(t1,t2,p1,p2,pos) ->
if occur_in_term n t1 || occur_in_term n t2
then apply_replacement n fresh proc
else IfThenElse(t1,t2,insert_name n true p1, insert_name n true p2,pos)
| Let(pat,t,p1,p2,pos) ->
if occur_in_term n t || occur_in_pattern n pat
then apply_replacement n fresh proc
else Let(pat,t,insert_name n true p1, insert_name n true p2,pos)
| New(n',p,pos) -> New(n',insert_name n fresh p,pos)
| Par p_list ->
let rec explore prev = function
| [] -> None
| p::q ->
if occur_in_process n p
then Some(List.rev prev, p, q)
else explore (p::prev) q
in
begin match explore [] p_list with
| None -> proc
| Some(prev,p,next) ->
if List.exists (occur_in_process n) next
then apply_replacement n fresh proc
else Par(prev@(insert_name n fresh p)::next)
end
| Bang(p_list,_) ->
let p = List.hd p_list in
if occur_in_process n p
then apply_replacement n fresh proc
else proc
| Choice(p1,p2,pos) -> Choice(insert_name n true p1,insert_name n true p2,pos)
let rec move_new_name = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(c,t,move_new_name p,pos)
| Input(c,pat,p,pos) -> Input(c,pat,move_new_name p,pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,move_new_name p1, move_new_name p2, pos)
| Let(pat,t,p1,p2,pos) -> Let(pat,t,move_new_name p1, move_new_name p2, pos)
| Par p_list -> Par (List.map move_new_name p_list)
| Bang(p_list,pos) -> Bang(List.map move_new_name p_list,pos)
| Choice(p1,p2,pos) -> Choice(move_new_name p1,move_new_name p2,pos)
| New(n,p,_) -> insert_name n false (move_new_name p)
let rec add_let_for_output_input = function
| Nil -> Nil
| Output(c,t,p,pos) ->
let x = Variable.fresh Free in
let y = Variable.fresh Free in
Let(PatVar x,c,
Let(PatVar y,t,
Output(Var x,Var y,add_let_for_output_input p,pos),
Nil,
dummy_pos
),
Nil,
dummy_pos
)
| Input(c,pat,p,pos) ->
begin match pat with
| PatVar _ ->
let x = Variable.fresh Free in
Let(PatVar x,c,Input(Var x,pat,add_let_for_output_input p,pos),Nil,dummy_pos)
| _ ->
let (x,y) = Variable.fresh Free, Variable.fresh Free in
let inside_proc = Let(pat,Var y,add_let_for_output_input p, Nil,dummy_pos) in
Let(PatVar x,c,Input(Var x,PatVar y,inside_proc,pos),Nil,dummy_pos)
end
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,add_let_for_output_input p1,add_let_for_output_input p2,pos)
| Let(pat,t,p1,p2,pos) -> Let(pat,t,add_let_for_output_input p1,add_let_for_output_input p2,pos)
| New(n,p,pos) -> New(n,add_let_for_output_input p,pos)
| Par p_list -> Par (List.map add_let_for_output_input p_list)
| Bang(p_list,pos) -> Bang(List.map add_let_for_output_input p_list,pos)
| Choice(p1,p2,pos) -> Choice(add_let_for_output_input p1, add_let_for_output_input p2,pos)
let rec does_not_occurs vars_pat = function
| Var v -> not (List.memq v vars_pat)
| Func(_,args) -> List.for_all (does_not_occurs vars_pat) args
| _ -> true
let rec term_of_pattern vars_pat = function
| PatVar x ->
vars_pat := x :: !vars_pat;
Var x
| PatTuple(f,args) -> Func(f,List.map (term_of_pattern vars_pat) args)
| PatEquality t -> t
let update_equations_with_pattern_vars equations pat_vars =
let rec explore prev = function
| [] -> prev
| (x,Var x')::q when not (List.memq x pat_vars) && List.memq x' pat_vars ->
let new_equations =
Variable.auto_cleanup_with_reset_notail (fun () ->
Variable.link_term x' (Var x);
let prev' = List.map (fun (y,t) -> (y,Term.instantiate t)) prev in
let q' = List.map (fun (y,t) -> (y,Term.instantiate t)) q in
(((x',Var x)::prev')@q')
)
in
explore [] new_equations
| eq::q -> explore (eq::prev) q
in
let new_equations = explore [] equations in
Config.debug (fun () ->
List.iter (function
| (x,Var x') when List.memq x' pat_vars && List.memq x pat_vars -> Config.internal_error "[process.ml >> update_equations_with_pattern_vars] Should not occur"
| _ -> ()
) new_equations
);
new_equations
let rec apply_trivial_let = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(c,t,apply_trivial_let p,pos)
| Input(c,pat,p,pos) -> Input(c,pat,apply_trivial_let p, pos)
| IfThenElse(t1,t2,p1,p2,pos) ->
let (_,formula_neg) = Rewrite_rules.compute_equality_modulo_and_rewrite [t1,t2] in
if formula_neg = Formula.T.Bot
then apply_trivial_let p1
else if formula_neg = Formula.T.Top
then apply_trivial_let p2
else IfThenElse(t1,t2,apply_trivial_let p1,apply_trivial_let p2,pos)
| Let(pat,t,p1,p2,pos) ->
let vars_pat = ref [] in
let (equations_list,_) = Rewrite_rules.compute_equality_modulo_and_rewrite [term_of_pattern vars_pat pat, t] in
begin match equations_list with
apply_trivial_let p2
One unique solution
We first check that there is no existential variables
if List.for_all (fun (x,t') -> x.quantifier <> Existential && not (Term.quantified_var_occurs Existential t')) equations
then
let new_equations = update_equations_with_pattern_vars equations !vars_pat in
if List.for_all (fun (x,t') -> (List.memq x !vars_pat) && does_not_occurs !vars_pat t') new_equations
then
begin
Config.debug (fun () ->
if not (List.for_all (fun (_,t') -> does_not_occurs !vars_pat t') new_equations)
then Config.internal_error "[process.ml >> apply_trivial_let] Having only variables from the pattern in the domain should imply that no variables in the image are from the pattern."
);
let p1' =
Variable.auto_cleanup_with_reset_notail (fun () ->
List.iter (fun (x,t') -> Variable.link_term x t') new_equations;
instantiate p1
)
in
apply_trivial_let p1'
end
else
let p1' =
Variable.auto_cleanup_with_reset_notail (fun () ->
List.iter (fun (x,t') -> Variable.link_term x t') new_equations;
instantiate p1
)
in
Let(pat,t,apply_trivial_let p1',apply_trivial_let p2,pos)
else Let(pat,t,apply_trivial_let p1, apply_trivial_let p2,pos)
| _ -> Let(pat,t,apply_trivial_let p1, apply_trivial_let p2,pos)
end
| New(n,p,pos) -> New(n,apply_trivial_let p,pos)
| Par p_list -> Par (List.map apply_trivial_let p_list)
| Bang(p_list,pos) -> Bang(List.map apply_trivial_let p_list,pos)
| Choice(p1,p2,pos) -> Choice(apply_trivial_let p1,apply_trivial_let p2, pos)
exception No_Match
let linked_bijection_vars = ref []
let linked_bijection_names = ref []
let cleanup_all_linked f_next =
Variable.auto_cleanup_with_exception (fun () ->
Name.auto_cleanup_with_exception (fun () ->
let tmp_vars = !linked_bijection_vars in
let tmp_names = !linked_bijection_names in
try
let r = f_next () in
linked_bijection_vars := tmp_vars;
linked_bijection_names := tmp_names;
r
with No_Match ->
linked_bijection_vars := tmp_vars;
linked_bijection_names := tmp_names;
raise No_Match
)
)
let rec match_term t1 t2 = match t1, t2 with
| Var { link = VLink x; _ }, Var y when x == y -> ()
| Var ({ link = NoLink; _} as x), Var y ->
if List.memq y !linked_bijection_vars
then raise No_Match;
Variable.link x y;
linked_bijection_vars := y :: !linked_bijection_vars
| Func(f1,args1), Func(f2,args2) when f1 == f2 ->
List.iter2 match_term args1 args2
| Name { link_n = NLink n1; _}, Name n2 when n1 == n2 -> ()
| Name ({ link_n = NNoLink; _} as n1), Name n2 ->
if List.memq n2 !linked_bijection_names
then raise No_Match;
Name.link n1 n2;
linked_bijection_names := n2 :: !linked_bijection_names
| _, _ -> raise No_Match
let rec match_pattern pat1 pat2 = match pat1, pat2 with
| PatEquality t1, PatEquality t2 -> match_term t1 t2
| PatVar x1, PatVar x2 -> match_term (Var x1) (Var x2)
| PatTuple(f1,args1), PatTuple(f2,args2) when f1 == f2 -> List.iter2 match_pattern args1 args2
| _ -> raise No_Match
let duplicate_position_match pos_match (_,args1) (_,args2) size1 =
let rec replace i prefix args = match prefix, args with
| [], [] -> Config.internal_error "[process.ml >> duplicate_position_match] The prefix should be strict."
| [], n::q ->
Config.debug (fun () ->
if size1 <> n
then Config.internal_error "[process.ml >> duplicate_position_match] Only the max index should have been added"
);
i::q
| n_p::q_p, n::q ->
Config.debug (fun () ->
if n_p <> n
then Config.internal_error "[process.ml >> duplicate_position_match] It should be a prefix."
);
replace i q_p q
| _, [] -> Config.internal_error "[process.ml >> duplicate_position_match] It should be a prefix (2)."
in
let new_pos_match = ref [] in
List.iter (fun (((id1',args1'),(id2',args2')) as matchings) ->
new_pos_match := matchings :: !new_pos_match;
for i = 1 to size1 - 1 do
let pos1 = (id1',replace i args1 args1') in
let pos2 = (id2',replace i args2 args2') in
new_pos_match := (pos1,pos2):: !new_pos_match
done
) pos_match;
!new_pos_match
let rec equal_modulo_renaming f_next proc1 proc2 = match proc1, proc2 with
| Nil, Nil -> f_next []
| Output(c1,t1,p1,pos1), Output(c2,t2,p2,pos2) ->
cleanup_all_linked (fun () ->
match_term c1 c2;
match_term t1 t2;
equal_modulo_renaming (fun pos_match ->
f_next ((pos1,pos2)::pos_match)
) p1 p2
)
| Input(c1,pat1,p1,pos1), Input(c2,pat2,p2,pos2) ->
cleanup_all_linked (fun () ->
match_term c1 c2;
match_pattern pat1 pat2;
equal_modulo_renaming (fun pos_match ->
f_next ((pos1,pos2)::pos_match)
) p1 p2
)
| IfThenElse(t1,t2,p1,p2,_), IfThenElse(t1',t2',p1',p2',_) ->
begin
try
cleanup_all_linked (fun () ->
match_term t1 t1';
match_term t2 t2';
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next (pos_match @ pos_match')
) p2 p2'
) p1 p1'
)
with No_Match ->
cleanup_all_linked (fun () ->
match_term t1 t2';
match_term t2 t1';
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next (pos_match @ pos_match')
) p2 p2'
) p1 p1'
)
end
| Let(pat,t,p1,p2,_), Let(pat',t',p1',p2',_) ->
cleanup_all_linked (fun () ->
match_pattern pat pat';
match_term t t';
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next (pos_match @ pos_match')
) p2 p2'
) p1 p1'
)
| New _, New _ -> gather_names_and_match f_next [] [] proc1 proc2
| Par p_list1, Par p_list2 when List.length p_list1 = List.length p_list2 -> equal_modulo_renaming_list f_next p_list1 p_list2
| Bang(p_list1,pos1), Bang(p_list2,pos2) ->
let size1 = List.length p_list1 in
let size2 = List.length p_list2 in
if size1 <> size2
then raise No_Match;
if size1 = 0
then Config.internal_error "[process.ml >> equal_modulo_renaming] Bang should have at least one process.";
let p1 = List.hd p_list1 in
let p2 = List.hd p_list2 in
equal_modulo_renaming (fun pos_match ->
let pos_match' = duplicate_position_match pos_match pos1 pos2 size1 in
f_next pos_match'
) p1 p2
| Choice(p1,p2,pos1), Choice(p1',p2',pos2) ->
begin
try
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next ((pos1,pos2)::pos_match @ pos_match')
) p2 p2'
) p1 p1'
with No_Match ->
equal_modulo_renaming (fun pos_match ->
equal_modulo_renaming (fun pos_match' ->
f_next ((pos1,pos2)::pos_match @ pos_match')
) p2 p1'
) p1 p2'
end
| _ -> raise No_Match
and equal_modulo_renaming_list f_next proc_l1 proc_l2 = match proc_l1 with
| [] -> f_next []
| p1::q1 ->
equal_modulo_renaming_list_one (fun pos_match q2 ->
equal_modulo_renaming_list (fun pos_match' ->
f_next (pos_match@pos_match')
) q1 q2
) p1 [] proc_l2
and equal_modulo_renaming_list_one f_next p1 prev_2 = function
| [] -> raise No_Match
| p2::q2 ->
try
equal_modulo_renaming (fun pos_match ->
f_next pos_match (prev_2@q2)
) p1 p2
with No_Match -> equal_modulo_renaming_list_one f_next p1 (p2::prev_2) q2
and gather_names_and_match f_next n_l1 n_l2 proc1 proc2 = match proc1, proc2 with
| New(n1,p1,_), New(n2,p2,_) -> gather_names_and_match f_next (n1::n_l1) (n2::n_l2) p1 p2
| New _, _
| _, New _ -> raise No_Match
| _, _ ->
equal_modulo_renaming (fun pos_match ->
List.iter (fun n -> match n.link_n with
| NLink n' ->
if not (List.memq n' n_l2)
then raise No_Match
| _ -> Config.internal_error "[process.ml >> gather_names_and_match] Used new names should have been removed."
) n_l1;
f_next pos_match
) proc1 proc2
let rec gather_names_let = function
| New(n,p,_) ->
begin match gather_names_let p with
| None -> None
| Some(pat,t,pthen,pelse,name_list) -> Some(pat,t,pthen,pelse,n::name_list)
end
| IfThenElse(t1,t2,pthen,pelse,_) -> Some(PatEquality t1,t2,pthen,pelse,[])
| Let(pat,t,pthen,pelse,_) -> Some(pat,t,pthen,pelse,[])
| _ -> None
let self_match_name n = match n.link_n with
| NLink n' ->
Config.debug (fun () ->
if n != n'
then Config.internal_error "[process.ml >> self_match_name] The name should be link to itself."
)
| NNoLink ->
Name.link n n;
linked_bijection_names := n :: !linked_bijection_names
| _ -> Config.internal_error "[process.ml >> self_match_name] Unexpected link."
let rec self_match_pattern = function
| PatEquality _ -> ()
| PatTuple(_,args) -> List.iter self_match_pattern args
| PatVar ({ link = VLink x; _ } as x') ->
Config.debug (fun () ->
if x != x'
then Config.internal_error "[process.ml >> self_match_pattern] The variable should be link to itself."
);
()
| PatVar ({ link = NoLink; _ } as x) ->
Variable.link x x;
linked_bijection_vars := x :: !linked_bijection_vars
| PatVar _ -> Config.internal_error "[process.ml >> self_match_pattern] Unexpected link for variable."
let rec add_names p = function
| [] -> p
| n::q -> New(n,add_names p q, dummy_pos)
let rec regroup_else_branches = function
| Nil -> Nil, []
| Output(c,t,p,pos) ->
let (p',pos_match') = regroup_else_branches p in
Output(c,t,p',pos), pos_match'
| Input(c,pat,p,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
let (p',pos_match') = regroup_else_branches p in
Input(c,pat,p',pos), pos_match'
)
| IfThenElse(t1,t2,p1,p2,pos) ->
let (p1',pos_match1) = regroup_else_branches p1 in
let (p2',pos_match2) = regroup_else_branches p2 in
begin match gather_names_let p1' with
| None -> IfThenElse(t1,t2,p1',p2',pos), (pos_match1 @ pos_match2)
| Some(pat,t,pthen,pelse,names_l) ->
begin
try
let new_matchings =
cleanup_all_linked (fun () ->
List.iter self_match_name names_l;
equal_modulo_renaming (fun matchings ->
matchings @ pos_match1 @ pos_match2
) p2' pelse
)
in
let f = Symbol.get_tuple 2 in
let new_pat = PatTuple(f,[PatEquality t1;pat]) in
let new_t = Func(f,[t2;t]) in
let p = Let(new_pat,new_t,pthen,pelse,dummy_pos) in
add_names p names_l, new_matchings
with No_Match -> IfThenElse(t1,t2,p1',p2',pos), (pos_match1 @ pos_match2)
end
end
| Let(pat,t,p1,p2,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
let (p1',pos_match1) = regroup_else_branches p1 in
let (p2',pos_match2) = regroup_else_branches p2 in
begin match gather_names_let p1' with
| None -> Let(pat,t,p1',p2',pos), (pos_match1 @ pos_match2)
| Some(pat',t',pthen,pelse,names_l) ->
begin
try
let new_matchings =
cleanup_all_linked (fun () ->
List.iter self_match_name names_l;
equal_modulo_renaming (fun matchings ->
matchings @ pos_match1 @ pos_match2
) p2' pelse
)
in
let f = Symbol.get_tuple 2 in
let new_pat = PatTuple(f,[pat;pat']) in
let new_t = Func(f,[t;t']) in
let p = Let(new_pat,new_t,pthen,pelse,dummy_pos) in
add_names p names_l, new_matchings
with No_Match ->
Let(pat,t,p1',p2',pos), (pos_match1 @ pos_match2)
end
end
)
| New(n,p,pos) ->
cleanup_all_linked (fun () ->
self_match_name n;
let (p',pos_match') = regroup_else_branches p in
New(n,p',pos), pos_match'
)
| Par p_list ->
let (p_list', pos_match) =
List.fold_right (fun p (acc_p,acc_match) ->
let (p',pos_match') = regroup_else_branches p in
(p'::acc_p,pos_match'@acc_match)
) p_list ([],[])
in
Par p_list', pos_match
| Bang(p_list,pos) ->
let (p_list', pos_match) =
List.fold_right (fun p (acc_p,acc_match) ->
let (p',pos_match') = regroup_else_branches p in
(p'::acc_p,pos_match'@acc_match)
) p_list ([],[])
in
Bang(p_list',pos), pos_match
| Choice(p1,p2,pos) ->
let (p1',pos_match1) = regroup_else_branches p1 in
let (p2',pos_match2) = regroup_else_branches p2 in
Choice(p1',p2',pos), pos_match1 @ pos_match2
let rec regroup_equal_par_processes = function
| Nil -> Nil
| Output(c,t,p,pos) -> Output(c,t,regroup_equal_par_processes p,pos)
| Input(c,pat,p,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
Input(c,pat,regroup_equal_par_processes p,pos)
)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(t1,t2,regroup_equal_par_processes p1, regroup_equal_par_processes p2,pos)
| Let(pat,t,p1,p2,pos) ->
cleanup_all_linked (fun () ->
self_match_pattern pat;
Let(pat,t,regroup_equal_par_processes p1,regroup_equal_par_processes p2,pos)
)
| New(n,p,pos) ->
cleanup_all_linked (fun () ->
self_match_name n;
New(n,regroup_equal_par_processes p,pos)
)
| Par p_list ->
let rec insert_in_proc_list_list p = function
| [] -> [[p]]
| (p'::q)::q_list ->
begin try
equal_modulo_renaming (fun _ -> ()) p p';
(p::p'::q)::q_list
with No_Match ->
(p'::q)::(insert_in_proc_list_list p q_list)
end
| []::_ -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Unexpected case"
in
let rec regroup_list = function
| [] -> []
| p::q ->
let proc_list_list = regroup_list q in
insert_in_proc_list_list p proc_list_list
in
let par_list =
List.map (function
| [] -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Unexpected case 2"
| [p] -> p
| p_list -> Bang(p_list,dummy_pos)
) (regroup_list p_list)
in
begin match par_list with
| [] -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Unexpected case 3"
| [p] -> p
| _ -> Par par_list
end
| Bang(p_list,pos) ->
let p_list' = List.map regroup_equal_par_processes p_list in
let p = List.hd p_list in
begin match p with
| Bang _ ->
let p_list'' =
List.fold_right (fun p' acc -> match p' with
| Bang(p_list'',_) -> p_list''@acc
| _ -> Config.internal_error "[process.ml >> regroup_equal_par_processes] Should only be bang processes."
) p_list' []
in
Bang(p_list'',pos)
| _ -> Bang(p_list',pos)
end
| Choice(p1,p2,pos) -> Choice(regroup_equal_par_processes p1, regroup_equal_par_processes p2,pos)
let rec replace_private_name_term assoc = function
| Func(f,[]) when not f.public -> Name (List.assq f assoc)
| Func(f,args) -> Func(f,List.map (replace_private_name_term assoc) args)
| t -> t
let rec replace_private_name_pattern assoc = function
| PatEquality t -> PatEquality(replace_private_name_term assoc t)
| PatTuple(f,args) -> PatTuple(f,List.map (replace_private_name_pattern assoc) args)
| pat -> pat
let rec replace_private_name_process assoc = function
| Nil -> Nil
| Output(ch,t,p,pos) -> Output(replace_private_name_term assoc ch, replace_private_name_term assoc t, replace_private_name_process assoc p,pos)
| Input(ch,pat,p,pos) -> Input(replace_private_name_term assoc ch, replace_private_name_pattern assoc pat, replace_private_name_process assoc p,pos)
| IfThenElse(t1,t2,p1,p2,pos) -> IfThenElse(replace_private_name_term assoc t1, replace_private_name_term assoc t2, replace_private_name_process assoc p1, replace_private_name_process assoc p2,pos)
| Let(pat,t,p1,p2,pos) -> Let(replace_private_name_pattern assoc pat, replace_private_name_term assoc t, replace_private_name_process assoc p1, replace_private_name_process assoc p2,pos)
| New(n,p,pos) -> New(n,replace_private_name_process assoc p,pos)
| Par plist -> Par (List.map (replace_private_name_process assoc) plist)
| Bang(plist,pos) -> Bang(List.map (replace_private_name_process assoc) plist,pos)
| Choice(p1,p2,pos) -> Choice(replace_private_name_process assoc p1,replace_private_name_process assoc p2,pos)
let rec private_constant_not_in_term f = function
| Func(f',_) when f == f' -> false
| Func(_,args) -> List.for_all (private_constant_not_in_term f) args
| _ -> true
let private_constant_not_in_rewrite_rule f =
List.for_all (fun f' -> match f'.cat with
| Destructor rw_list ->
List.for_all (fun (lhs,rhs) ->
private_constant_not_in_term f rhs && List.for_all (private_constant_not_in_term f) lhs
) rw_list
| _ -> Config.internal_error "[process.ml >> private_constant_not_in_rewrite_rule] Should only contain destructor functions."
) !Symbol.all_destructors
let replace_private_name proc =
let assoc =
List.fold_left (fun acc f ->
if not f.public && f.arity = 0 && private_constant_not_in_rewrite_rule f
then
let n = Name.fresh_with_label f.label_s in
(f,n)::acc
else acc
) [] !Symbol.all_constructors
in
if assoc = []
then proc
else
List.fold_left (fun acc_p (_,n) ->
New(n,acc_p,dummy_pos)
) (replace_private_name_process assoc proc) assoc
type configuration =
{
frame : term list;
process : process
}
let is_equal_pos pos_match pos pos' =
if pos = pos'
then true
else
try
(List.assoc pos pos_match) = pos'
with Not_found -> false
let is_pos_in_process pos_match pos proc =
let rec explore = function
| Nil -> false
| Output(_,_,_,pos')
| Input(_,_,_,pos') -> is_equal_pos pos_match pos' pos
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_) -> explore p1 || explore p2
| New(_,p,_) -> explore p
| Par p_list
| Bang (p_list,_) -> List.exists explore p_list
| Choice(_,_,pos') -> is_equal_pos pos_match pos' pos
in
explore proc
let instantiate_term t =
Variable.auto_cleanup_with_exception (fun () ->
Rewrite_rules.normalise (Term.instantiate t)
)
let instantiate_pattern pat =
Variable.auto_cleanup_with_exception (fun () ->
Rewrite_rules.normalise_pattern (Term.instantiate_pattern pat)
)
let apply_ground_recipe_on_frame frame r =
let rec explore = function
| RFunc(f,args) -> Func(f,List.map explore args)
| Axiom i -> List.nth frame (i-1)
| _ -> Config.internal_error "[process.ml >> apply_ground_recipe_on_frame] Unexpected recipe."
in
try
Variable.auto_cleanup_with_exception (fun () -> Rewrite_rules.normalise (explore r))
with Rewrite_rules.Not_message -> Config.internal_error "[process.ml >> apply_ground_recipe_on_frame] The recipe should be a message."
let rec retrieve_transition_list f_next pos_match act conf = match conf.process,act with
| Output(_,t,p,pos), AOutput(_,pos') when is_equal_pos pos_match pos pos' ->
f_next pos [] { frame = conf.frame@[Term.instantiate t]; process = p }
| Input(_,pat,p,pos), AInput(_,r_t,pos') when is_equal_pos pos_match pos pos' ->
let t = apply_ground_recipe_on_frame conf.frame r_t in
begin try
let pat' = instantiate_pattern pat in
Variable.auto_cleanup_with_exception (fun () ->
Term.unify pat' t;
f_next pos [] { conf with process = p }
)
with Term.Not_unifiable | Rewrite_rules.Not_message ->
f_next pos [] { conf with process = Nil }
end
| IfThenElse(t1,t2,p1,p2,pos), _ ->
let do_then_branch =
try
Term.is_equal (instantiate_term t1) (instantiate_term t2)
with Rewrite_rules.Not_message -> false
in
if do_then_branch
then retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p1 }
else retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p2 }
| Let(pat,t,p1,p2,pos), _ ->
begin try
let pat' = instantiate_pattern pat in
let t' = instantiate_term t in
Variable.auto_cleanup_with_exception (fun () ->
Term.unify pat' t';
retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p1 }
)
with Rewrite_rules.Not_message | Term.Not_unifiable ->
retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p2 }
end
| New(_,p,pos),_ -> retrieve_transition_list (fun pos' act_l conf' -> f_next pos' ((ATau pos)::act_l) conf') pos_match act { conf with process = p }
| Par p_list, (AOutput(_,pos) | AInput(_,_,pos) | AChoice(pos,_) ) ->
retrieve_transition_list_from_par f_next pos_match pos act conf.frame [] p_list
| Bang(p_list,pos_bang), (AOutput(_,pos) | AInput(_,_,pos) | AChoice(pos,_) ) ->
retrieve_transition_list_from_bang f_next pos_match pos pos_bang 1 act conf.frame [] p_list
| Choice(p1,p2,pos), AChoice(pos',choose_left) when is_equal_pos pos_match pos pos' ->
if choose_left
then f_next pos [] { conf with process = p1 }
else f_next pos [] { conf with process = p2 }
| _ -> Config.internal_error "[process.ml >> retrieve_transition_list] Unexpected case."
and retrieve_transition_list_from_par f_next pos_match pos act frame prev_p = function
| [] -> Config.internal_error "[process.ml >> retrieve_transition_list_from_par] We should find the position."
| p::q ->
if is_pos_in_process pos_match pos p
then
retrieve_transition_list (fun pos' act_l conf' ->
f_next pos' act_l { conf' with process = Par(prev_p @ (conf'.process :: q)) }
) pos_match act { frame = frame; process = p }
else retrieve_transition_list_from_par f_next pos_match pos act frame (prev_p@[p]) q
and retrieve_transition_list_from_bang f_next pos_match pos pos_bang nb_unfold act frame prev_p = function
| [] -> Config.internal_error "[process.ml >> retrieve_transition_list_from_bang] We should find the position."
| p::q ->
if is_pos_in_process pos_match pos p
then
retrieve_transition_list (fun pos' act_l conf' ->
if List.length q <= 1
then f_next pos' (ABang(nb_unfold,pos_bang)::act_l) { conf' with process = Par(prev_p @ (conf'.process::q)) }
else f_next pos' (ABang(nb_unfold,pos_bang)::act_l) { conf' with process = Par(prev_p @ [conf'.process; Bang(q,pos_bang)]) }
) pos_match act { frame = frame; process = p }
else
if List.length q <= 1
then retrieve_transition_list_from_bang f_next pos_match pos pos_bang nb_unfold act frame (prev_p@[p]) q
else retrieve_transition_list_from_bang f_next pos_match pos pos_bang (nb_unfold+1) act frame (prev_p@[p]) q
let rec retrieve_trace f_next pos_match conf = function
| [] -> f_next []
| AOutput(r,pos)::q ->
retrieve_transition_list (fun pos' act_l conf' ->
retrieve_trace (fun act_l' ->
f_next (act_l @ (AOutput(r,pos')::act_l'))
) pos_match conf' q
) pos_match (AOutput(r,pos)) conf
| AInput(r,r_t,pos)::q ->
retrieve_transition_list (fun pos' act_l conf' ->
retrieve_trace (fun act_l' ->
f_next (act_l @ (AInput(r,r_t,pos')::act_l'))
) pos_match conf' q
) pos_match (AInput(r,r_t,pos)) conf
| AChoice(pos,choose_left)::q ->
retrieve_transition_list (fun pos' act_l conf' ->
retrieve_trace (fun act_l' ->
f_next (act_l @ (AChoice(pos',choose_left)::act_l'))
) pos_match conf' q
) pos_match (AChoice(pos,choose_left)) conf
| AEaves(r,pos_out,pos_in)::q ->
retrieve_transition_list (fun pos_out' act_l_out conf' ->
retrieve_transition_list (fun pos_in' act_l_in conf'' ->
retrieve_trace (fun act_l' ->
f_next (act_l_out @ act_l_in @ (AEaves(r,pos_out',pos_in')::act_l'))
) pos_match conf'' q
) pos_match (AInput(r,Axiom (List.length conf'.frame),pos_in)) conf'
) pos_match (AOutput(r,pos_out)) conf
| AComm(pos_out,pos_in)::q ->
retrieve_transition_list (fun pos_out' act_l_out conf' ->
retrieve_transition_list (fun pos_in' act_l_in conf'' ->
retrieve_trace (fun act_l' ->
f_next (act_l_out @ act_l_in @ (AComm(pos_out',pos_in')::act_l'))
) pos_match { conf'' with frame = conf.frame } q
) pos_match (AInput(Axiom 0,Axiom (List.length conf'.frame),pos_in)) conf'
) pos_match (AOutput(Axiom 0,pos_out)) conf
| _ -> Config.internal_error "[process.ml >> retrieve_trace] Unexpected trace action."
let rec normalise_pos_match prev = function
| [] -> prev
| (pos1,pos2)::q ->
let f_apply (pos1',pos2') =
if pos2' = pos1
then (pos1',pos2)
else (pos1',pos2')
in
let prev' = List.map f_apply prev in
let q' = List.map f_apply q in
normalise_pos_match ((pos1,pos2)::prev') q'
let simplify_for_determinate p =
let p0 = replace_private_name p in
let p1 = clean p0 in
let p2 = add_let_for_output_input p1 in
let p3 = apply_trivial_let p2 in
let p4 = detect_and_replace_pure_fresh_name p3 in
let p5 = move_new_name p4 in
let (p6,pos_match) = regroup_else_branches p5 in
let pos_match_normalised = normalise_pos_match [] pos_match in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Before simplification :\n %s" (display 1 p));
Config.log_in_debug Config.Process (Printf.sprintf "After simplification :\n %s" (display 1 p6));
);
let retrieve_trace trans_list =
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Input retrieve_trace = %s\nPos Match Normalised = %s\nProcess:\n%s\n"
(display_list display_transition "; " trans_list)
(display_list (fun (pos1,pos2) -> Printf.sprintf "(%s,%s)" (display_position pos1) (display_position pos2)) "; " pos_match_normalised)
(display 1 p)
)
);
let result = retrieve_trace (fun x -> x) pos_match_normalised { frame = []; process = p } trans_list in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Output retrieve_trace = %s\n" (display_list display_transition "; " result))
);
result
in
p6, retrieve_trace
let simplify_for_generic p =
let p0 = replace_private_name p in
let p1 = clean p0 in
let p2 = add_let_for_output_input p1 in
let p3 = apply_trivial_let p2 in
let p4 = move_new_name p3 in
let (p5,pos_match) = regroup_else_branches p4 in
let p6 = regroup_equal_par_processes p5 in
let pos_match_normalised = normalise_pos_match [] pos_match in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Before simplification :\n %s" (display 1 p));
Config.log_in_debug Config.Process (Printf.sprintf "After simplification :\n %s" (display 1 p6));
);
let retrieve_trace trans_list =
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Input retrieve_trace = %s\n" (display_list display_transition "; " trans_list))
);
let result = retrieve_trace (fun x -> x) pos_match_normalised { frame = []; process = p } trans_list in
Config.debug (fun () ->
Config.log_in_debug Config.Process (Printf.sprintf "Output retrieve_trace = %s\n" (display_list display_transition "; " result))
);
result
in
p6, retrieve_trace
exception Session_error of string
let check_process_for_session proc =
let priv_symbol_channels = ref [] in
let rec mark_channels = function
| Nil -> ()
| Output(Func(f,[]),_,p,_)
| Input(Func(f,[]),_,p,_) ->
if not f.public && not (List.memq f !priv_symbol_channels)
then priv_symbol_channels := f :: !priv_symbol_channels;
mark_channels p
| Output(Name n,_,p,_)
| Input(Name n,_,p,_) ->
if n.link_n = NNoLink then Name.link_search n;
mark_channels p
| Output(ch,_,_,_) ->
let err_msg =
Printf.sprintf
"The term %s was used as a channel for an output. However for session equivalence and session inclusion, only public/private names/constants are allowed."
(Term.display Terminal ch)
in
raise (Session_error err_msg)
| Input(ch,_,_,_) ->
let err_msg =
Printf.sprintf
"The term %s was used as a channel for an input. However for session equivalence and session inclusion, only public/private names/constants are allowed."
(Term.display Terminal ch)
in
raise (Session_error err_msg)
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_) ->
mark_channels p1;
mark_channels p2
| New(_,p,_) -> mark_channels p
| Par p_list
| Bang (p_list,_) -> List.iter mark_channels p_list
| Choice _ ->
let err_msg = "Choice operator is not allowed for session equivalence and session inclusion." in
raise (Session_error err_msg)
in
let rec check_channels_in_term = function
| Var _ -> ()
| Func(f,args) ->
if not f.public && List.memq f !priv_symbol_channels
then
begin
let err_msg =
Printf.sprintf
"The private name %s is used as a channel and within a message. In session equivalence and session inclusion, private names used as channels cannot be used within messages."
(Symbol.display Terminal f)
in
raise (Session_error err_msg)
end;
List.iter check_channels_in_term args
| Name n ->
match n.link_n with
| NNoLink -> ()
| NSLink ->
let err_msg =
Printf.sprintf
"The private name %s is used as a channel and within a message. In session equivalence and session inclusion, private names used as channels cannot be used within messages."
(Name.display Terminal n)
in
raise (Session_error err_msg)
| _ -> Config.internal_error "[process.ml >> check_process_for_session] Unexpected link."
in
let rec check_channels_in_pattern = function
| PatVar _ -> ()
| PatTuple(_,args) -> List.iter check_channels_in_pattern args
| PatEquality t -> check_channels_in_term t
in
let rec check_channels = function
| Nil -> ()
| Output(_,t,p,_) ->
check_channels_in_term t;
check_channels p
| Input(_,pat,p,_) ->
check_channels_in_pattern pat;
check_channels p
| IfThenElse(t1,t2,p1,p2,_) ->
check_channels_in_term t1;
check_channels_in_term t2;
check_channels p1;
check_channels p2
| Let(pat,t,p1,p2,_) ->
check_channels_in_term t;
check_channels_in_pattern pat;
check_channels p1;
check_channels p2
| New(_,p,_) -> check_channels p
| Par plist
| Bang (plist,_) -> List.iter check_channels plist
| Choice _ -> Config.internal_error "[process.ml >> check_process_for_session] Choice operator should have been catched before applying this function."
in
Name.auto_cleanup_with_exception (fun () ->
mark_channels proc;
check_channels proc
)
let rec only_public_channel = function
| Nil -> true
| Output(Func(f,[]),_,p,_)
| Input(Func(f,[]),_,p,_) when f.public -> only_public_channel p
| IfThenElse(_,_,p1,p2,_)
| Let(_,_,p1,p2,_)
| Choice(p1,p2,_) -> only_public_channel p1 && only_public_channel p2
| New(_,p,_) -> only_public_channel p
| Par p_list
| Bang(p_list,_) -> List.for_all only_public_channel p_list
| _ -> false
let simplify_for_session p =
let p0 = replace_private_name p in
let p1 = clean p0 in
let p2 = add_let_for_output_input p1 in
let p3 = apply_trivial_let p2 in
let p4 =
if only_public_channel p
then detect_and_replace_pure_fresh_name p3
else p3
in
let p5 = move_new_name p4 in
let (p6,pos_match) = regroup_else_branches p5 in
let p7 = regroup_equal_par_processes p6 in
let pos_match_normalised = normalise_pos_match [] pos_match in
Config.debug (fun () ->
Config.log_in_debug Config.Always (Printf.sprintf "Before simplification :\n %s" (display 1 p));
Config.log_in_debug Config.Always (Printf.sprintf "After simplification :\n %s" (display 1 p7));
);
let retrieve_trace trans_list =
Config.debug (fun () ->
Config.log_in_debug Config.Always (Printf.sprintf "[process.ml >> simplify_for_session] Input retrieve_trace = %s\n" (display_list display_transition "; " trans_list));
Config.log_in_debug Config.Always (Printf.sprintf "[process.ml >> simplify_for_session] Process =\n%s" (display 2 p))
);
let result = retrieve_trace (fun x -> x) pos_match_normalised { frame = []; process = p } trans_list in
Config.debug (fun () ->
Config.log_in_debug Config.Always (Printf.sprintf "Output retrieve_trace = %s\n" (display_list display_transition "; " result))
);
result
in
p7, retrieve_trace
|
e3426b92303f22f41bdb1c2fe72adc1abbf705871780715c8c45275b5039fd0d | toyokumo/tarayo | core.clj | (ns core
(:require
[criterium.core :as criterium]
[helper :as h]
[tarayo.core :as tarayo]))
(defn -main
[]
(h/with-test-smtp-server [_ port]
(println "TARAYO ----")
(criterium/bench
(with-open [conn (tarayo/connect {:port port})]
(tarayo/send! conn h/test-message)))))
| null | https://raw.githubusercontent.com/toyokumo/tarayo/f9b10b85b7bc1a188d808c3955e258916cd0b38a/benchmark/tarayo/core.clj | clojure | (ns core
(:require
[criterium.core :as criterium]
[helper :as h]
[tarayo.core :as tarayo]))
(defn -main
[]
(h/with-test-smtp-server [_ port]
(println "TARAYO ----")
(criterium/bench
(with-open [conn (tarayo/connect {:port port})]
(tarayo/send! conn h/test-message)))))
| |
87f44d7b8d0a927642e401a15af1fcc90b30d0ae92cf6e1d0311344337b817b3 | Eonblast/Scalaxis | yaws_api.erl | %%----------------------------------------------------------------------
%%% File : yaws_api.erl
Author : < >
%%% Purpose :
Created : 24 Jan 2002 by < >
%%%----------------------------------------------------------------------
-module(yaws_api).
-author('').
%% -compile(export_all).
-include("../include/yaws.hrl").
-include("../include/yaws_api.hrl").
-include("yaws_debug.hrl").
-export([parse_query/1, parse_post/1,
parse_multipart_post/1, parse_multipart_post/2,
parse_multipart/2, parse_multipart/3]).
-export([code_to_phrase/1, ssi/2, redirect/1]).
-export([setcookie/2, setcookie/3, setcookie/4, setcookie/5, setcookie/6]).
-export([pre_ssi_files/2, pre_ssi_string/1, pre_ssi_string/2,
set_content_type/1,
htmlize/1, htmlize_char/1, f/2, fl/1]).
-export([find_cookie_val/2, secs/0,
url_decode/1, url_decode_q_split/1,
url_encode/1, parse_url/1, parse_url/2, format_url/1,
format_partial_url/2]).
-export([is_absolute_URI/1]).
-export([path_norm/1, path_norm_reverse/1,
sanitize_file_name/1]).
-export([get_line/1, mime_type/1]).
-export([stream_chunk_deliver/2, stream_chunk_deliver_blocking/2,
stream_chunk_end/1]).
-export([stream_process_deliver/2, stream_process_deliver_chunk/2,
stream_process_deliver_final_chunk/2, stream_process_end/2]).
-export([websocket_send/2, websocket_receive/1,
websocket_unframe_data/1, websocket_setopts/2]).
-export([new_cookie_session/1, new_cookie_session/2, new_cookie_session/3,
cookieval_to_opaque/1, request_url/1,
print_cookie_sessions/0,
replace_cookie_session/2, delete_cookie_session/1]).
-export([getconf/0,
setconf/2,
embedded_start_conf/1, embedded_start_conf/2,
embedded_start_conf/3, embedded_start_conf/4]).
-export([set_status_code/1, reformat_header/1,
reformat_request/1, reformat_response/1, reformat_url/1]).
-export([set_trace/1,
set_tty_trace/1,
set_access_log/1]).
-export([call_cgi/2, call_cgi/3]).
-export([call_fcgi_responder/1, call_fcgi_responder/2,
call_fcgi_authorizer/1, call_fcgi_authorizer/2]).
-export([ehtml_expand/1, ehtml_expander/1, ehtml_apply/2,
ehtml_expander_test/0]).
-export([parse_set_cookie/1, format_set_cookie/1,
postvar/2, queryvar/2, getvar/2]).
-export([binding/1,binding_exists/1,
dir_listing/1, dir_listing/2, redirect_self/1]).
-export([arg_clisock/1
, arg_client_ip_port/1
, arg_headers/1
, arg_req/1
, arg_clidata/1
, arg_server_path/1
, arg_querydata/1
, arg_appmoddata/1
, arg_docroot/1
, arg_docroot_mount/1
, arg_fullpath/1
, arg_cont/1
, arg_state/1
, arg_pid/1
, arg_opaque/1
, arg_appmod_prepath/1
, arg_prepath/1
, arg_pathinfo/1
, http_request_method/1
, http_request_path/1
, http_request_version/1
, http_response_version/1
, http_response_status/1
, http_response_phrase/1
, headers_connection/1
, headers_accept/1
, headers_host/1
, headers_if_modified_since/1
, headers_if_match/1
, headers_if_none_match/1
, headers_if_range/1
, headers_if_unmodified_since/1
, headers_range/1
, headers_referer/1
, headers_user_agent/1
, headers_accept_ranges/1
, headers_cookie/1
, headers_keep_alive/1
, headers_location/1
, headers_content_length/1
, headers_content_type/1
, headers_content_encoding/1
, headers_authorization/1
, headers_transfer_encoding/1
, headers_x_forwarded_for/1
, headers_other/1
]).
-import(lists, [map/2, flatten/1, reverse/1]).
%% these are a bunch of function that are useful inside
%% yaws scripts
arg_clisock(#arg{clisock = X}) -> X.
arg_client_ip_port(#arg{client_ip_port = X}) -> X.
arg_headers(#arg{headers = X}) -> X.
arg_req(#arg{req = X}) -> X.
arg_clidata(#arg{clidata = X}) -> X.
arg_server_path(#arg{server_path = X}) -> X.
arg_querydata(#arg{querydata = X}) -> X.
arg_appmoddata(#arg{appmoddata = X}) -> X.
arg_docroot(#arg{docroot = X}) -> X.
arg_docroot_mount(#arg{docroot_mount = X}) -> X.
arg_fullpath(#arg{fullpath = X}) -> X.
arg_cont(#arg{cont = X}) -> X.
arg_state(#arg{state = X}) -> X.
arg_pid(#arg{pid = X}) -> X.
arg_opaque(#arg{opaque = X}) -> X.
arg_appmod_prepath(#arg{appmod_prepath = X}) -> X.
arg_prepath(#arg{prepath = X}) -> X.
arg_pathinfo(#arg{pathinfo = X}) -> X.
http_request_method(#http_request{method = X}) -> X.
http_request_path(#http_request{path = X}) -> X.
http_request_version(#http_request{version = X}) -> X.
http_response_version(#http_response{version = X}) -> X.
http_response_status(#http_response{status = X}) -> X.
http_response_phrase(#http_response{phrase = X}) -> X.
headers_connection(#headers{connection = X}) -> X.
headers_accept(#headers{accept = X}) -> X.
headers_host(#headers{host = X}) -> X.
headers_if_modified_since(#headers{if_modified_since = X}) -> X.
headers_if_match(#headers{if_match = X}) -> X.
headers_if_none_match(#headers{if_none_match = X}) -> X.
headers_if_range(#headers{if_range = X}) -> X.
headers_if_unmodified_since(#headers{if_unmodified_since = X}) -> X.
headers_range(#headers{range = X}) -> X.
headers_referer(#headers{referer = X}) -> X.
headers_user_agent(#headers{user_agent = X}) -> X.
headers_accept_ranges(#headers{accept_ranges = X}) -> X.
headers_cookie(#headers{cookie = X}) -> X.
headers_keep_alive(#headers{keep_alive = X}) -> X.
headers_location(#headers{location = X}) -> X.
headers_content_length(#headers{content_length = X}) -> X.
headers_content_type(#headers{content_type = X}) -> X.
headers_content_encoding(#headers{content_encoding = X}) -> X.
headers_authorization(#headers{authorization = X}) -> X.
headers_transfer_encoding(#headers{transfer_encoding = X}) -> X.
headers_x_forwarded_for(#headers{x_forwarded_for = X}) -> X.
headers_other(#headers{other = X}) -> X.
%% parse the command line query data
parse_query(Arg) ->
D = Arg#arg.querydata,
if
D == [] ->
[];
true ->
parse_post_data_urlencoded(D)
end.
%% parse url encoded POST data
parse_post(Arg) ->
D = Arg#arg.clidata,
Req = Arg#arg.req,
case Req#http_request.method of
'POST' ->
case D of
[] -> [];
_ ->
parse_post_data_urlencoded(D)
end;
Other ->
error_logger:error_msg(
"ERROR: Can't parse post body for ~p requests: URL: ~p",
[Other, Arg#arg.fullpath]),
[]
end.
%%
%% Changed implementation of multipart form data. There is a new config
%% parameter called
%%
partial_post_size
%%
%% which if set to an integer value
%% will cause the content of the post content to be sent to the out/1
%% function in chunks of this size.
%%
%% It is possible to get the server to maintain a state on behalf of the
out/1 user by returning { get_more , Cont , State } .
%%
%%
yaws_api : parse_multipart_post/1 will return either :
%%
{ cont , Cont , Res } where Res is new result(s ) from this segment . This
%% indicates that there is more data to come and the out/1 function
should return { get_more , Cont , User_state } where User_state might
%% usefully be a File Descriptor.
%%
%% or {result, Res} if this is the last (or only) segment.
%%
Res is a list of { header , Header } | , Binary } | { body , Binary }
%%
%% Example usage could be:
%%
%% <erl>
%%
%% out(A) ->
%% case yaws_api:parse_multipart_post(A) of
{ cont , Cont , Res } - >
St = handle_res(A , Res ) ,
{ get_more , Cont , St } ;
{ result ,
%% handle_res(A, Res),
{ html , f("<pre > Done < /pre > " , [ ] ) }
%% end.
%%
handle_res(A , [ { head , Name}|T ] ) - >
%% io:format("head:~p~n",[Name]),
%% handle_res(A, T);
handle_res(A , [ , Data}|T ] ) - >
%% io:format("part_body:~p~n",[Data]),
%% handle_res(A, T);
handle_res(A , [ { body , Data}|T ] ) - >
io : format("body:~p ~ n",[Data ] ) ,
%% handle_res(A, T);
%% handle_res(A, []) ->
%% io:format("End_res~n").
%%
%% </erl>
parse_multipart_post(Arg) ->
parse_multipart_post(Arg, [list]).
parse_multipart_post(Arg, Options) ->
H = Arg#arg.headers,
CT = H#headers.content_type,
Req = Arg#arg.req,
case Req#http_request.method of
'POST' ->
case CT of
undefined ->
error_logger:error_msg("Can't parse multipart if we "
"have no Content-Type header",[]),
[];
"multipart/form-data"++Line ->
case Arg#arg.cont of
{cont, Cont} ->
parse_multipart(
un_partial(Arg#arg.clidata),
{cont, Cont});
undefined ->
LineArgs = parse_arg_line(Line),
{value, {_, Boundary}} =
lists:keysearch(boundary, 1, LineArgs),
parse_multipart(
un_partial(Arg#arg.clidata),
Boundary, Options)
end;
_Other ->
error_logger:error_msg("Can't parse multipart if we "
"find no multipart/form-data",[]),
[]
end;
Other ->
error_logger:error_msg("Can't parse multipart if get a ~p",
[Other]),
[]
end.
un_partial({partial, Bin}) ->
Bin;
un_partial(Bin) ->
Bin.
parse_arg_line(Line) ->
parse_arg_line(Line, []).
parse_arg_line([],Acc) -> Acc;
parse_arg_line([$ |Line], Acc) ->
parse_arg_line(Line, Acc);
parse_arg_line([$;|Line], Acc) ->
{KV,Rest} = parse_arg_key(Line, [], []),
parse_arg_line(Rest, [KV|Acc]).
%%
parse_arg_key([], Key, Value) ->
make_parse_line_reply(Key, Value, []);
parse_arg_key([$;|Line], Key, Value) ->
make_parse_line_reply(Key, Value, [$;|Line]);
parse_arg_key([$ |Line], Key, Value) ->
parse_arg_key(Line, Key, Value);
parse_arg_key([$=|Line], Key, Value) ->
parse_arg_value(Line, Key, Value, false, false);
parse_arg_key([C|Line], Key, Value) ->
parse_arg_key(Line, [C|Key], Value).
%%
%% We need to deal with quotes and initial spaces here.
%% parse_arg_value(String, Key, ValueAcc, InQuoteBool, InValueBool)
%%
parse_arg_value([], Key, Value, _, _) ->
make_parse_line_reply(Key, Value, []);
parse_arg_value([$\\,$"|Line], Key, Value, Quote, Begun) ->
parse_arg_value(Line, Key, [$"|Value], Quote, Begun);
parse_arg_value([$"|Line], Key, Value, false, _) ->
parse_arg_value(Line, Key, Value, true, true);
parse_arg_value([$"], Key, Value, true, _) ->
make_parse_line_reply(Key, Value, []);
parse_arg_value([$",$;|Line], Key, Value, true, _) ->
make_parse_line_reply(Key, Value, [$;|Line]);
parse_arg_value([$;|Line], Key, Value, false, _) ->
make_parse_line_reply(Key, Value, [$;|Line]);
parse_arg_value([$ |Line], Key, Value, false, true) ->
make_parse_line_reply(Key, Value, Line);
parse_arg_value([$ |Line], Key, Value, false, false) ->
parse_arg_value(Line, Key, Value, false, false);
parse_arg_value([C|Line], Key, Value, Quote, _) ->
parse_arg_value(Line, Key, [C|Value], Quote, true).
%%
make_parse_line_reply(Key, Value, Rest) ->
{{list_to_atom(yaws:funreverse(Key, {yaws, to_lowerchar})),
lists:reverse(Value)}, Rest}.
-record(mp_parse_state, {
state,
boundary_ctx,
hdr_end_ctx,
old_data,
data_type
}).
%% Stateful parser of multipart data - allows easy re-entry
parse_multipart(Data, St) ->
parse_multipart(Data, St, [list]).
parse_multipart(Data, St, Options) ->
case parse_multi(Data, St, Options) of
{cont, St2, Res} ->
{cont, {cont, St2}, lists:reverse(Res)};
{result, Res} ->
{result, lists:reverse(Res)}
end.
%% Reentry point
parse_multi(Data, {cont, #mp_parse_state{old_data = OldData}=ParseState}, _) ->
NData = <<OldData/binary, Data/binary>>,
parse_multi(NData, ParseState, []);
parse_multi(<<"--\r\n", Data/binary>>,
#mp_parse_state{state=boundary}=ParseState, Acc) ->
parse_multi(Data, ParseState, Acc);
parse_multi(Data, #mp_parse_state{state=boundary}=ParseState, Acc) ->
#mp_parse_state{boundary_ctx = BoundaryCtx} = ParseState,
case bm_find(Data, BoundaryCtx) of
{0, Len} ->
LenPlusCRLF = Len+2,
<<_:LenPlusCRLF/binary, Rest/binary>> = Data,
NParseState = ParseState#mp_parse_state{state = start_header},
parse_multi(Rest, NParseState, Acc);
{_Pos, _Len} ->
{NParseState, NData} = case Data of
<<"\r\n", Rest/binary>> ->
{ParseState#mp_parse_state{
state = start_header},
Rest};
_ ->
{ParseState#mp_parse_state{
state = body}, Data}
end,
parse_multi(NData, NParseState, Acc);
nomatch ->
case Data of
<<>> ->
{result, Acc};
<<"\r\n">> ->
{result, Acc};
_ ->
NParseState = ParseState#mp_parse_state{old_data = Data},
{cont, NParseState, Acc}
end
end;
parse_multi(Data, #mp_parse_state{state=start_header}=ParseState, Acc) ->
NParseState = ParseState#mp_parse_state{state = header},
parse_multi(Data, NParseState, Acc, [], []);
parse_multi(Data, #mp_parse_state{state=body}=ParseState, Acc) ->
#mp_parse_state{boundary_ctx = BoundaryCtx} = ParseState,
case bm_find(Data, BoundaryCtx) of
{Pos, Len} ->
<<Body:Pos/binary, _:Len/binary, Rest/binary>> = Data,
BodyData = case ParseState#mp_parse_state.data_type of
list ->
binary_to_list(Body);
binary ->
Body
end,
NAcc = [{body, BodyData}|Acc],
NParseState = ParseState#mp_parse_state{state = boundary},
parse_multi(Rest, NParseState, NAcc);
nomatch ->
NParseState = ParseState#mp_parse_state{
state = body,
old_data = <<>>
},
BodyData = case ParseState#mp_parse_state.data_type of
list ->
binary_to_list(Data);
binary ->
Data
end,
NAcc = [{part_body, BodyData}|Acc],
{cont, NParseState, NAcc}
end;
%% Initial entry point
parse_multi(Data, Boundary, Options) ->
B1 = "\r\n--"++Boundary,
D1 = <<"\r\n", Data/binary>>,
BoundaryCtx = bm_start(B1),
HdrEndCtx = bm_start("\r\n\r\n"),
DataType = lists:foldl(fun(_, list) ->
list;
(list, _) ->
list;
(binary, undefined) ->
binary;
(_, Acc) ->
Acc
end, undefined, Options),
ParseState = #mp_parse_state{state = boundary,
boundary_ctx = BoundaryCtx,
hdr_end_ctx = HdrEndCtx,
data_type = DataType},
parse_multi(D1, ParseState, []).
parse_multi(Data, #mp_parse_state{state=start_header}=ParseState, Acc, [], []) ->
#mp_parse_state{hdr_end_ctx = HdrEndCtx} = ParseState,
case bm_find(Data, HdrEndCtx) of
nomatch ->
{cont, ParseState#mp_parse_state{old_data = Data}, Acc};
_ ->
NParseState = ParseState#mp_parse_state{state = header},
parse_multi(Data, NParseState, Acc, [], [])
end;
parse_multi(Data, #mp_parse_state{state=header}=ParseState, Acc, Name, Hdrs) ->
case erlang:decode_packet(httph_bin, Data, []) of
{ok, http_eoh, Rest} ->
Head = case Name of
[] ->
lists:reverse(Hdrs);
_ ->
{Name, lists:reverse(Hdrs)}
end,
NParseState = ParseState#mp_parse_state{state = body},
parse_multi(Rest, NParseState, [{head, Head}|Acc]);
{ok, {http_header, _, Hdr, _, HdrVal}, Rest} when is_atom(Hdr) ->
Header = {case Hdr of
'Content-Type' ->
content_type;
Else ->
Else
end,
binary_to_list(HdrVal)},
parse_multi(Rest, ParseState, Acc, Name, [Header|Hdrs]);
{ok, {http_header, _, Hdr, _, HdrVal}, Rest} ->
HdrValStr = binary_to_list(HdrVal),
case yaws:to_lower(binary_to_list(Hdr)) of
"content-disposition" ->
"form-data"++Params = HdrValStr,
Parameters = parse_arg_line(Params),
{value, {_, NewName}} = lists:keysearch(name, 1, Parameters),
parse_multi(Rest, ParseState, Acc,
NewName, Parameters++Hdrs);
LowerHdr ->
parse_multi(Rest, ParseState, Acc,
Name, [{LowerHdr, HdrValStr}|Hdrs])
end;
{error, _Reason}=Error ->
Error
end.
parse POST data when ENCTYPE is unset or
%% Content-type: application/x-www-form-urlencoded
is the content of ARG#arg.clidata
%% the alternative is
%% Content-type: multipart/form-data; boundary=-------------------7cd1d6371ec
%% which is used for file upload
parse_post_data_urlencoded(Bin) ->
do_parse_spec(Bin, nokey, [], key).
%% It will return a [{Key, Value}] list from the post data
, Hi:8 , Lo:8 , Tail / binary > > , Last , Cur , State )
when Hi /= $u ->
Hex = yaws:hex_to_integer([Hi, Lo]),
do_parse_spec(Tail, Last, [ Hex | Cur], State);
do_parse_spec(<<$&, Tail/binary>>, _Last , Cur, key) ->
[{lists:reverse(Cur), undefined} |
do_parse_spec(Tail, nokey, [], key)]; %% cont keymode
do_parse_spec(<<$&, Tail/binary>>, Last, Cur, value) ->
V = {Last, lists:reverse(Cur)},
[V | do_parse_spec(Tail, nokey, [], key)];
do_parse_spec(<<$+, Tail/binary>>, Last, Cur, State) ->
do_parse_spec(Tail, Last, [$\s|Cur], State);
do_parse_spec(<<$=, Tail/binary>>, _Last, Cur, key) ->
do_parse_spec(Tail, lists:reverse(Cur), [], value); %% change mode
, $ u , A:8 , B:8,C:8,D:8 , Tail / binary > > ,
Last, Cur, State) ->
non - standard encoding for Unicode characters : ,
Hex = yaws:hex_to_integer([A,B,C,D]),
do_parse_spec(Tail, Last, [ Hex | Cur], State);
do_parse_spec(<<H:8, Tail/binary>>, Last, Cur, State) ->
do_parse_spec(Tail, Last, [H|Cur], State);
do_parse_spec(<<>>, nokey, Cur, _State) ->
[{lists:reverse(Cur), undefined}];
do_parse_spec(<<>>, Last, Cur, _State) ->
[{Last, lists:reverse(Cur)}];
do_parse_spec(undefined,_,_,_) ->
[];
do_parse_spec(QueryList, Last, Cur, State) when is_list(QueryList) ->
do_parse_spec(list_to_binary(QueryList), Last, Cur, State).
code_to_phrase(100) -> "Continue";
code_to_phrase(101) -> "Switching Protocols ";
code_to_phrase(200) -> "OK";
code_to_phrase(201) -> "Created";
code_to_phrase(202) -> "Accepted";
code_to_phrase(203) -> "Non-Authoritative Information";
code_to_phrase(204) -> "No Content";
code_to_phrase(205) -> "Reset Content";
code_to_phrase(206) -> "Partial Content";
code_to_phrase(207) -> "Multi Status";
code_to_phrase(300) -> "Multiple Choices";
code_to_phrase(301) -> "Moved Permanently";
code_to_phrase(302) -> "Found";
code_to_phrase(303) -> "See Other";
code_to_phrase(304) -> "Not Modified";
code_to_phrase(305) -> "Use Proxy";
code_to_phrase(306) -> "(Unused)";
code_to_phrase(307) -> "Temporary Redirect";
code_to_phrase(400) -> "Bad Request";
code_to_phrase(401) -> "Unauthorized";
code_to_phrase(402) -> "Payment Required";
code_to_phrase(403) -> "Forbidden";
code_to_phrase(404) -> "Not Found";
code_to_phrase(405) -> "Method Not Allowed";
code_to_phrase(406) -> "Not Acceptable";
code_to_phrase(407) -> "Proxy Authentication Required";
code_to_phrase(408) -> "Request Timeout";
code_to_phrase(409) -> "Conflict";
code_to_phrase(410) -> "Gone";
code_to_phrase(411) -> "Length Required";
code_to_phrase(412) -> "Precondition Failed";
code_to_phrase(413) -> "Request Entity Too Large";
code_to_phrase(414) -> "Request-URI Too Long";
code_to_phrase(415) -> "Unsupported Media Type";
code_to_phrase(416) -> "Requested Range Not Satisfiable";
code_to_phrase(417) -> "Expectation Failed";
code_to_phrase(500) -> "Internal Server Error";
code_to_phrase(501) -> "Not Implemented";
code_to_phrase(502) -> "Bad Gateway";
code_to_phrase(503) -> "Service Unavailable";
code_to_phrase(504) -> "Gateway Timeout";
code_to_phrase(505) -> "HTTP Version Not Supported";
%% Below are some non-HTTP status codes from other protocol standards that
we 've seen used with HTTP in the wild , so we include them here . HTTP 1.1
%% section 6.1.1 allows for this sort of extensibility, but we recommend
%% sticking with the HTTP status codes above for maximal portability and
%% interoperability.
%%
from FTP ( RFC 959 )
from FTP ( RFC 959 )
from RTSP ( RFC 2326 )
%%
%% server side include
%%
ssi(DocRoot, Files) ->
L = lists:map(fun(F) ->
case file:read_file([DocRoot ++ [$/|F]]) of
{ok, Bin} ->
Bin;
{error, Reason} ->
io_lib:format("Cannot include file ~p: ~p",
[F, Reason])
end
end, Files),
{html, L}.
%% include pre
pre_ssi_files(DocRoot, Files) ->
{html, L} = ssi(DocRoot, Files),
pre_ssi_string(L).
pre_ssi_string(Str) ->
pre_ssi_string(Str, "box").
pre_ssi_string(Str, Class) ->
{html, ["<br><br>\n<div class=\"", Class, "\"> <pre>\n",
htmlize_l(Str),
"\n</pre></div>\n<br>\n\n"]}.
%% convenience
f(Fmt, Args) ->
io_lib:format(Fmt, Args).
fl([Fmt, Arg | Tail]) ->
[f(Fmt, Arg) | fl(Tail)];
fl([]) ->
[].
%% htmlize
htmlize(Bin) when is_binary(Bin) ->
list_to_binary(htmlize_l(binary_to_list(Bin)));
htmlize(List) when is_list(List) ->
htmlize_l(List).
htmlize_char($>) ->
<<">">>;
htmlize_char($<) ->
<<"<">>;
htmlize_char($&) ->
<<"&">>;
htmlize_char($") ->
<<""">>;
htmlize_char(X) ->
X.
%% htmlize list (usually much more efficient than above)
htmlize_l(List) ->
htmlize_l(List, []).
htmlize_l([], Acc) -> lists:reverse(Acc);
htmlize_l([$>|Tail], Acc) ->
htmlize_l(Tail, [$;,$t,$g,$&|Acc]);
htmlize_l([$<|Tail], Acc) ->
htmlize_l(Tail, [$;,$t,$l,$&|Acc]);
htmlize_l([$&|Tail], Acc) ->
htmlize_l(Tail, [$;,$p,$m,$a,$&|Acc]);
htmlize_l([$"|Tail], Acc) ->
htmlize_l(Tail, [$; , $t, $o, $u, $q ,$&|Acc]);
htmlize_l([X|Tail], Acc) when is_integer(X) ->
htmlize_l(Tail, [X|Acc]);
htmlize_l([X|Tail], Acc) when is_binary(X) ->
X2 = htmlize_l(binary_to_list(X)),
htmlize_l(Tail, [X2|Acc]);
htmlize_l([X|Tail], Ack) when is_list(X) ->
X2 = htmlize_l(X),
htmlize_l(Tail, [X2|Ack]).
secs() ->
{MS, S, _} = now(),
(MS * 1000000) + S.
setcookie(Name, Value) ->
{header, {set_cookie, f("~s=~s;", [Name, Value])}}.
setcookie(Name, Value, Path) ->
{header, {set_cookie, f("~s=~s; path=~s", [Name, Value, Path])}}.
setcookie(Name, Value, Path, Expire) ->
setcookie(Name, Value, Path, Expire, [], []).
setcookie(Name, Value, Path, Expire, Domain) ->
setcookie(Name, Value, Path, Expire, Domain,[]).
setcookie(Name, Value, Path, Expire, Domain, Secure) ->
SetDomain = if Domain == [] -> "";
true -> " Domain="++Domain++";"
end,
SetExpire = if Expire == [] -> "";
true -> " Expires="++Expire++";"
end,
SetPath = if Path == [] -> "/";
true -> Path
end,
SetSecure = if Secure == on -> " secure;";
true -> ""
end,
{header, {set_cookie, f("~s=~s;~s~s~s Path=~s",
[Name,Value,SetDomain,SetExpire,
SetSecure, SetPath])}}.
%% This function can be passed the cookie we get in the Arg#arg.headers.cookies
%% to search for a specific cookie
%% return [] if not found
if found
%% if serveral cookies with the same name are passed fron the browser,
only the first match is returned
find_cookie_val(Cookie, A) when is_record(A, arg) ->
find_cookie_val(Cookie, (A#arg.headers)#headers.cookie);
%%
find_cookie_val(_Cookie, []) ->
[];
find_cookie_val(Cookie, [FullCookie | FullCookieList]) ->
case eat_cookie(Cookie, FullCookie) of
[] ->
find_cookie_val(Cookie, FullCookieList);
Val ->
Val
end.
%% Remove leading spaces before eating.
eat_cookie([], _) -> [];
eat_cookie([$\s|T], Str) -> eat_cookie(T, Str);
eat_cookie(_, []) -> [];
eat_cookie(Cookie, [$\s|T]) -> eat_cookie(Cookie, T);
eat_cookie(Cookie, Str) when is_list(Cookie),is_list(Str) ->
try
eat_cookie2(Cookie++"=", Str, Cookie)
catch
_:_ -> []
end.
%% Look for the Cookie and extract its value.
eat_cookie2(_, [], _) ->
throw("not found");
eat_cookie2([H|T], [H|R], C) ->
eat_cookie2(T, R, C);
eat_cookie2([H|_], [X|R], C) when H =/= X ->
{_,Rest} = eat_until(R, $;),
eat_cookie(C, Rest);
eat_cookie2([], L, _) ->
{Meat,_} = eat_until(L, $;),
Meat.
eat_until(L, U) ->
eat_until(L, U, []).
eat_until([H|T], H, Acc) -> {lists:reverse(Acc), T};
eat_until([H|T], U, Acc) when H =/= U -> eat_until(T, U, [H|Acc]);
eat_until([], _, Acc) -> {lists:reverse(Acc), []}.
url_decode([$%, Hi, Lo | Tail]) ->
Hex = yaws:hex_to_integer([Hi, Lo]),
[Hex | url_decode(Tail)];
url_decode([$?|T]) ->
%% Don't decode the query string here, that is
%% parsed separately.
[$?|T];
url_decode([H|T]) when is_integer(H) ->
[H |url_decode(T)];
url_decode([]) ->
[];
%% deep lists
url_decode([H|T]) when is_list(H) ->
[url_decode(H) | url_decode(T)].
path_norm(Path) ->
path_norm_reverse(lists:reverse(Path)).
path_norm_reverse("/" ++ T) -> start_dir(0, "/", T);
path_norm_reverse( T) -> start_dir(0, "", T).
start_dir(N, Path, [$\\|T] ) -> start_dir(N, Path, [$/|T]);
start_dir(N, Path, ".." ) -> rest_dir(N, Path, "");
start_dir(N, Path, "/" ++ T ) -> start_dir(N , Path, T);
start_dir(N, Path, "./" ++ T ) -> start_dir(N , Path, T);
start_dir(N, Path, "../" ++ T ) -> start_dir(N + 1, Path, T);
start_dir(N, Path, T ) -> rest_dir (N , Path, T).
rest_dir (_N, Path, [] ) -> case Path of
[] -> "/";
_ -> Path
end;
rest_dir (0, Path, [ $/ | T ] ) -> start_dir(0 , [ $/ | Path ], T);
rest_dir (N, Path, [ $/ | T ] ) -> start_dir(N - 1, Path , T);
rest_dir (N, Path, [ $\\ | T ] ) -> rest_dir(N, Path, [$/|T]);
rest_dir (0, Path, [ H | T ] ) -> rest_dir (0 , [ H | Path ], T);
rest_dir (N, Path, [ _H | T ] ) -> rest_dir (N , Path , T).
%% url decode the path and return {Path, QueryPart}
url_decode_q_split(Path) ->
url_decode_q_split(Path, []).
, Hi , Lo | Tail ] ,
Hex = yaws:hex_to_integer([Hi, Lo]),
if Hex == 0 -> exit(badurl);
true -> ok
end,
url_decode_q_split(Tail, [Hex|Ack]);
url_decode_q_split([$?|T], Ack) ->
%% Don't decode the query string here,
%% that is parsed separately.
{path_norm_reverse(Ack), T};
url_decode_q_split([H|T], Ack) when H /= 0 ->
url_decode_q_split(T, [H|Ack]);
url_decode_q_split([], Ack) ->
{path_norm_reverse(Ack), []}.
url_encode([H|T]) ->
if
H >= $a, $z >= H ->
[H|url_encode(T)];
H >= $A, $Z >= H ->
[H|url_encode(T)];
H >= $0, $9 >= H ->
[H|url_encode(T)];
H == $_; H == $.; H == $-; H == $/; H == $: -> % FIXME: more..
[H|url_encode(T)];
true ->
case yaws:integer_to_hex(H) of
[X, Y] ->
[$%, X, Y | url_encode(T)];
[X] ->
, $ 0 , X | url_encode(T ) ]
end
end;
url_encode([]) ->
[].
redirect(Url) -> [{redirect, Url}].
is_nb_space(X) ->
lists:member(X, [$\s, $\t]).
ret : { line , Line , Trail } | { lastline , Line , Trail } | need_more
get_line(L) ->
get_line(L, []).
get_line("\r\n\r\n" ++ Tail, Cur) ->
{lastline, lists:reverse(Cur), Tail};
get_line("\r\n" ++ Tail, Cur) when Tail /= [] ->
case is_nb_space(hd(Tail)) of
true -> %% multiline ... continue
get_line(Tail, [$\n, $\r | Cur]);
false ->
{line, lists:reverse(Cur), Tail}
end;
get_line("\r\n", Cur) ->
{line, lists:reverse(Cur), []};
get_line([H|T], Cur) ->
get_line(T, [H|Cur]);
get_line([], _) ->
need_more.
mime_type(FileName) ->
case filename:extension(FileName) of
[_|T] ->
element(2, mime_types:t(T));
[] ->
element(2, mime_types:t([]))
end.
%% Asynchronously delivery
stream_chunk_deliver(YawsPid, Data) ->
YawsPid ! {streamcontent, Data}.
%% Synchronous (on ultimate gen_tcp:send) delivery
Returns : ok | { error , }
stream_chunk_deliver_blocking(YawsPid, Data) ->
Ref = erlang:monitor(process, YawsPid),
YawsPid ! {streamcontent_with_ack, self(), Data},
receive
{YawsPid, streamcontent_ack} ->
erlang:demonitor(Ref),
%% flush incase a DOWN message was sent before the demonitor call
receive
{'DOWN', Ref, _, _, _} ->
ok
after 0 ->
ok
end;
{'DOWN', Ref, _, _, Info} ->
{error, {ypid_crash, Info}}
end.
stream_chunk_end(YawsPid) ->
YawsPid ! endofstreamcontent.
stream_process_deliver(Sock={sslsocket,_,_}, IoList) ->
ssl:send(Sock, IoList);
stream_process_deliver(Sock, IoList) ->
gen_tcp:send(Sock, IoList).
stream_process_deliver_chunk(Sock, IoList) ->
Chunk = case erlang:iolist_size(IoList) of
0 ->
stream_process_deliver_final_chunk(Sock, IoList);
S ->
[yaws:integer_to_hex(S), "\r\n", IoList, "\r\n"]
end,
stream_process_deliver(Sock, Chunk).
stream_process_deliver_final_chunk(Sock, IoList) ->
Chunk = case erlang:iolist_size(IoList) of
0 ->
<<"0\r\n\r\n">>;
S ->
[yaws:integer_to_hex(S), "\r\n", IoList, "\r\n0\r\n\r\n"]
end,
stream_process_deliver(Sock, Chunk).
stream_process_end(closed, YawsPid) ->
YawsPid ! {endofstreamcontent, closed};
stream_process_end(Sock={sslsocket,_,_}, YawsPid) ->
ssl:controlling_process(Sock, YawsPid),
YawsPid ! endofstreamcontent;
stream_process_end(Sock, YawsPid) ->
gen_tcp:controlling_process(Sock, YawsPid),
YawsPid ! endofstreamcontent.
websocket_send(Socket, IoList) ->
DataFrame = [0, IoList, 255],
case Socket of
{sslsocket,_,_} ->
ssl:send(Socket, DataFrame);
_ ->
gen_tcp:send(Socket, DataFrame)
end.
websocket_receive(Socket) ->
R = case Socket of
{sslsocket,_,_} ->
ssl:recv(Socket, 0);
_ ->
gen_tcp:recv(Socket, 0)
end,
case R of
{ok, DataFrames} ->
ReceivedMsgs = yaws_websockets:unframe_all(DataFrames, []),
{ok, ReceivedMsgs};
_ -> R
end.
websocket_unframe_data(DataFrameBin) ->
{ok, Msg, <<>>} = yaws_websockets:unframe_one(DataFrameBin),
Msg.
websocket_setopts({sslsocket,_,_}=Socket, Opts) ->
ssl:setopts(Socket, Opts);
websocket_setopts(Socket, Opts) ->
inet:setopts(Socket, Opts).
%% Return new cookie string
new_cookie_session(Opaque) ->
yaws_session_server:new_session(Opaque).
new_cookie_session(Opaque, TTL) ->
yaws_session_server:new_session(Opaque, TTL).
new_cookie_session(Opaque, TTL, Cleanup) ->
yaws_session_server:new_session(Opaque, TTL, Cleanup).
as returned in # ysession.cookie
cookieval_to_opaque(CookieVal) ->
yaws_session_server:cookieval_to_opaque(CookieVal).
print_cookie_sessions() ->
yaws_session_server:print_sessions().
replace_cookie_session(Cookie, NewOpaque) ->
yaws_session_server:replace_session(Cookie, NewOpaque).
delete_cookie_session(Cookie) ->
yaws_session_server:delete_session(Cookie).
lmap(F, [H|T]) ->
[lists:map(F, H) | lmap(F, T)];
lmap(_, []) ->
[].
%% interactively turn on|off tracing
set_trace(Val) ->
Str = yaws_ctl:actl_trace(Val),
io:format("~s", [Str]).
set_access_log(Bool) ->
{ok, GC, Groups} = getconf(),
Groups2 = lmap(fun(SC) ->
?sc_set_access_log(SC, Bool)
end, Groups),
setconf(GC, Groups2).
%% interactively turn on|off tracing to the tty (as well)
%% typically useful in embedded mode
set_tty_trace(Bool) ->
yaws_log:trace_tty(Bool).
set_status_code(Code) ->
{status, Code}.
returns [ Header1 , Header2 ..... ]
reformat_header(H) ->
lists:zf(fun({Hname, Str}) ->
I = lists:flatten(io_lib:format("~s: ~s",[Hname, Str])),
{true, I};
(undefined) ->
false
end,
[
if H#headers.connection == undefined ->
undefined;
true ->
{"Connection", H#headers.connection}
end,
if H#headers.accept == undefined ->
undefined;
true ->
{"Accept", H#headers.accept}
end,
if H#headers.host == undefined ->
undefined;
true ->
{"Host", H#headers.host}
end,
if H#headers.if_modified_since == undefined ->
undefined;
true ->
{"If-Modified-Since", H#headers.if_modified_since}
end,
if H#headers.if_match == undefined ->
undefined;
true ->
{"If-Match", H#headers.if_match}
end,
if H#headers.if_none_match == undefined ->
undefined;
true ->
{"If-None-Match", H#headers.if_none_match}
end,
if H#headers.if_range == undefined ->
undefined;
true ->
{"If-Range", H#headers.if_range}
end,
if H#headers.if_unmodified_since == undefined ->
undefined;
true ->
{"If-Unmodified-Since", H#headers.if_unmodified_since}
end,
if H#headers.range == undefined ->
undefined;
true ->
{"Range", H#headers.range}
end,
if H#headers.referer == undefined ->
undefined;
true ->
{"Referer", H#headers.referer}
end,
if H#headers.user_agent == undefined ->
undefined;
true ->
{"User-Agent", H#headers.user_agent}
end,
if H#headers.accept_ranges == undefined ->
undefined;
true ->
{"Accept-Ranges", H#headers.accept_ranges}
end,
if H#headers.cookie == [] ->
undefined;
true ->
{"Cookie", H#headers.cookie}
end,
if H#headers.keep_alive == undefined ->
undefined;
true ->
{"Keep-Alive", H#headers.keep_alive}
end,
if H#headers.content_length == undefined ->
undefined;
true ->
{"Content-Length", H#headers.content_length}
end,
if H#headers.content_type == undefined ->
undefined;
true ->
{"Content-Type", H#headers.content_type}
end,
if H#headers.authorization == undefined ->
undefined;
true ->
{"Authorization", element(3, H#headers.authorization)}
end,
if H#headers.transfer_encoding == undefined ->
undefined;
true ->
{"Transfer-Encoding", H#headers.transfer_encoding}
end,
if H#headers.location == undefined ->
undefined;
true ->
{"Location", H#headers.location}
end
]
) ++
lists:map(
fun({http_header,_,K,_,V}) ->
lists:flatten(io_lib:format("~s: ~s",[K,V]))
end, H#headers.other).
reformat_request(#http_request{method = bad_request}) ->
["Bad request"];
reformat_request(Req) ->
Path = case Req#http_request.path of
{abs_path, AbsPath} ->
AbsPath;
{absoluteURI, _Scheme, _Host0, _Port, RawPath} ->
RawPath
end,
{Maj, Min} = Req#http_request.version,
[yaws:to_list(Req#http_request.method), " ", Path," HTTP/",
integer_to_list(Maj),".", integer_to_list(Min)].
reformat_response(Resp) ->
{Maj,Min} = Resp#http_response.version,
["HTTP/",integer_to_list(Maj),".", integer_to_list(Min),
" ", integer_to_list(Resp#http_response.status),
" ", Resp#http_response.phrase].
stringify the scheme[:port ] part of a # url
reformat_url(U) ->
[yaws:to_string(U#url.scheme),
"://",
U#url.host,
if
U#url.port == undefined ->
[];
true ->
[$: | integer_to_list(U#url.port)]
end].
set_content_type(MimeType) ->
{header, {content_type, MimeType}}.
%% returns a #url{} record
parse_url(Str) ->
parse_url(Str, strict).
parse_url(Str, Strict) ->
case Str of
"http://" ++ Rest ->
parse_url(host, Strict, #url{scheme = http}, Rest, []);
"https://" ++ Rest ->
parse_url(host, Strict, #url{scheme = https}, Rest, []);
"ftp://" ++ Rest ->
parse_url(host, Strict, #url{scheme = ftp}, Rest, []);
"file://" ++ Rest ->
parse_url(host, Strict, #url{scheme = file}, Rest, []);
_ when Strict == sloppy ->
parse_url(host, Strict, #url{scheme = undefined}, Str, [])
end.
parse_url(host, Strict, U, Str, Ack) ->
case Str of
[] ->
U#url{host = lists:reverse(Ack),
path = "/"
};
[$/|Tail] ->
U2 = U#url{host = lists:reverse(Ack)},
parse_url(path, Strict, U2, Tail,"/");
[$:|T] ->
U2 = U#url{host = lists:reverse(Ack)},
parse_url(port, Strict, U2, T,[]);
[$[|T] ->
parse_url(ipv6, Strict, U, T, [$[]);
[H|T] ->
parse_url(host, Strict, U, T, [H|Ack])
end;
parse_url(ipv6, Strict, U, Str, Ack) ->
case Str of
[$]] ->
U#url{host = lists:reverse([$]|Ack]),
path = "/"
};
[$], $/|T] ->
U2 = U#url{host = lists:reverse([$]|Ack])},
parse_url(path, Strict, U2, T,"/");
[$], $:|T] ->
U2 = U#url{host = lists:reverse([$]|Ack])},
parse_url(port, Strict, U2, T,[]);
[H|T] ->
parse_url(ipv6, Strict, U, T, [H|Ack])
end;
parse_url(port, Strict, U, Str, Ack) ->
case Str of
[] ->
U#url{port = list_to_integer(lists:reverse(Ack)),
path = "/"};
[$/|T] ->
U2 = U#url{port = list_to_integer(lists:reverse(Ack))},
parse_url(path, Strict, U2, T,"/");
[H|T] ->
parse_url(port, Strict, U,T,[H|Ack])
end;
parse_url(path, Strict, U, Str, Ack) ->
case Str of
[] ->
U#url{path = lists:reverse(Ack)};
[$?|T] ->
U#url{path = lists:reverse(Ack),
querypart = T};
[H|T] ->
parse_url(path, Strict, U, T, [H|Ack])
end.
used to construct redir headers from partial URLs such
as e.g. / bar
format_partial_url(Url, SC) ->
[if
Url#url.scheme == undefined ->
yaws:redirect_scheme(SC);
true ->
yaws:to_string(Url#url.scheme) ++ "://"
end,
if
Url#url.host == undefined ->
yaws:redirect_host(SC, undefined);
true ->
Url#url.host
end,
if
Url#url.port == undefined ->
yaws:redirect_port(SC);
true ->
[$: | integer_to_list(Url#url.port)]
end,
Url#url.path,
if
Url#url.querypart == [] ->
[];
true ->
[$?|Url#url.querypart]
end
].
format_url(Url) when is_record(Url, url) ->
[
if
Url#url.scheme == undefined ->
"http://";
true ->
yaws:to_string(Url#url.scheme) ++ "://"
end,
Url#url.host,
if
Url#url.port == undefined ->
[];
true ->
[$: | integer_to_list(Url#url.port)]
end,
Url#url.path,
if
Url#url.querypart == [] ->
[];
true ->
[$?|Url#url.querypart]
end
].
is_absolute_URI([C|T]) when ((C>=$a) and (C=<$z)) or ((C>=$A) and (C=<$Z))->
is_abs_URI1(T);
is_absolute_URI(_) ->
false.
is_abs_URI1([$:|_]) ->
true;
is_abs_URI1([C|T]) when
((C>=$a) and (C=<$z))
or ((C>=$A) and (C=<$Z))
or ((C>=$0) and (C=<$9))
or (C==$+) or (C==$-) or (C==$.) ->
is_abs_URI1(T);
is_abs_URI1(_) ->
false.
%% ------------------------------------------------------------
%% simple erlang term representation of HTML:
EHTML = [ EHTML ] | { Tag , , Body } | { Tag , Attrs } | { Tag } |
%% binary() | character()
%% Tag = atom()
= [ { Key , Value } ] or { EventTag , { jscall , FunName , [ ] } }
%% Key = atom()
%% Value = string()
%% Body = EHTML
ehtml_expand(Ch) when Ch >= 0, Ch =< 255 -> Ch; %yaws_api:htmlize_char(Ch);
yaws_api : ) ;
ehtml_expand({ssi,File, Del, Bs}) ->
case yaws_server:ssi(File, Del, Bs) of
{error, Rsn} ->
io_lib:format("ERROR: ~p~n",[Rsn]);
X ->
X
end;
! ( low priority ) - investigate whether tail - recursion would be of any
%% benefit here instead of the current ehtml_expand(Body) recursion.
%% - provide a tail_recursive version & add a file in the
%% benchmarks folder to measure it.
%
ehtml_expand({Tag}) ->
["<", atom_to_list(Tag), " />"];
ehtml_expand({pre_html, X}) -> X;
ehtml_expand({Tag, Attrs}) ->
NL = ehtml_nl(Tag),
[NL, "<", atom_to_list(Tag), ehtml_attrs(Attrs), "></",
atom_to_list(Tag), ">"];
ehtml_expand({Tag, Attrs, Body}) when is_atom(Tag) ->
Ts = atom_to_list(Tag),
NL = ehtml_nl(Tag),
[NL, "<", Ts, ehtml_attrs(Attrs), ">", ehtml_expand(Body), "</", Ts, ">"];
ehtml_expand([H|T]) -> [ehtml_expand(H)|ehtml_expand(T)];
ehtml_expand([]) -> [].
ehtml_attrs([]) -> [];
ehtml_attrs([Attribute|Tail]) when is_atom(Attribute) ->
[[$ |atom_to_list(Attribute)]|ehtml_attrs(Tail)];
ehtml_attrs([Attribute|Tail]) when is_list(Attribute) ->
[" ", Attribute|ehtml_attrs(Tail)];
ehtml_attrs([{Name, Value} | Tail]) ->
ValueString = if is_atom(Value) -> [$",atom_to_list(Value),$"];
is_list(Value) -> [$",Value,$"];
is_integer(Value) -> [$",integer_to_list(Value),$"];
is_float(Value) -> [$",float_to_list(Value),$"]
end,
[[$ |atom_to_list(Name)], [$=|ValueString]|ehtml_attrs(Tail)];
ehtml_attrs([{check, Name, Value} | Tail]) ->
ValueString = if is_atom(Value) -> [$",atom_to_list(Value),$"];
is_list(Value) ->
Q = case deepmember($", Value) of
true -> $';
false -> $"
end,
[Q,Value,Q];
is_integer(Value) -> [$",integer_to_list(Value),$"];
is_float(Value) -> [$",float_to_list(Value),$"]
end,
[[$ |atom_to_list(Name)],
[$=|ValueString]|ehtml_attrs(Tail)].
%% Tags for which we must not add extra white space.
%% FIXME: should there be anything more in this list?
ehtml_nl(a) -> [];
ehtml_nl(br) -> [];
ehtml_nl(span) -> [];
ehtml_nl(em) -> [];
ehtml_nl(strong) -> [];
ehtml_nl(dfn) -> [];
ehtml_nl(code) -> [];
ehtml_nl(samp) -> [];
ehtml_nl(kbd) -> [];
ehtml_nl(var) -> [];
ehtml_nl(cite) -> [];
ehtml_nl(abbr) -> [];
ehtml_nl(acronym) -> [];
ehtml_nl(q) -> [];
ehtml_nl(sub) -> [];
ehtml_nl(sup) -> [];
ehtml_nl(ins) -> [];
ehtml_nl(del) -> [];
ehtml_nl(img) -> [];
ehtml_nl(tt) -> [];
ehtml_nl(i) -> [];
ehtml_nl(b) -> [];
ehtml_nl(big) -> [];
ehtml_nl(small) -> [];
ehtml_nl(strike) -> [];
ehtml_nl(s) -> [];
ehtml_nl(u) -> [];
ehtml_nl(font) -> [];
ehtml_nl(basefont) -> [];
ehtml_nl(input) -> [];
ehtml_nl(button) -> [];
ehtml_nl(object) -> [];
ehtml_nl(_) -> "\n".
%% ------------------------------------------------------------
%% ehtml_expander/1: an EHTML optimizer
%%
%% This is an optimization for generating the same EHTML multiple times with
%% only small differences, by using fast re-usable templates that contain
%% variables. The variables are atoms starting with a dollar sign, like
' $ myvar ' . There are two functions : ehtml_expander/1 to create an optimized
%% EHTML template, then ehtml_apply/2 takes a template and a dictionary of
%% variable values and generates the actual HTML.
%%
%% If you are spending a lot of time regenerating similar EHTML fragments then
%% this is for you.
%%
Variables can appear in three places :
%% - As a body element, where you would normally have a tag. The values of
these variables are expanded as EHTML .
%% - As the name or value of an attribute. The values of these variables are
%% strings.
%% - As the CDR of an attribute list. The values of these variables are
%% key-value lists of more attributes.
%%
%% See ehtml_expander_test/0 for an example.
%%
The approach is inspired by the way that Yaws already treats .yaws files ,
and the article ` ` A Hacker 's Introduction To Partial Evaluation '' by
%% Bacon (cool guy), -p.org/htdocs/peval/peval.cgi
%%
%% (For now I flatter myself that this is some kind of partial evaluator, but
%% I don't really know :-) -luke)
ehtml_expander(X) ->
ehtml_expander_compress(flatten(ehtml_expander(X, [], [])), []).
%% Returns a deep list of text and variable references (atoms)
%% Text
ehtml_expander(Ch, Before, After) when Ch >= 0, Ch =< 255 ->
ehtml_expander_done(yaws_api:htmlize_char(Ch), Before, After);
ehtml_expander(Bin, Before, After) when is_binary(Bin) ->
ehtml_expander_done(yaws_api:htmlize(Bin), Before, After);
ehtml_expander({ssi,File, Del, Bs}, Before, After) ->
Str = case yaws_server:ssi(File, Del, Bs) of
{error, Rsn} ->
io_lib:format("ERROR: ~p~n",[Rsn]);
X ->
X
end,
ehtml_expander_done(Str, Before, After);
ehtml_expander({pre_html, X}, Before, After) ->
ehtml_expander_done(X, Before, After);
%% Tags
ehtml_expander({Tag}, Before, After) ->
ehtml_expander_done(["<", atom_to_list(Tag), " />"], Before, After);
ehtml_expander({Tag, Attrs}, Before, After) ->
NL = ehtml_nl(Tag),
ehtml_expander_done([NL, "<", atom_to_list(Tag), ehtml_attrs(Attrs), "></",
atom_to_list(Tag), ">"],
Before,
After);
ehtml_expander({Tag, Attrs, Body}, Before, After) ->
ehtml_expander(Body,
[["\n<", atom_to_list(Tag),
ehtml_attrs_expander(Attrs), ">"]|
Before],
["</", atom_to_list(Tag), ">"|After]);
%% Variable references
ehtml_expander(Var, Before, After) when is_atom(Var) ->
[reverse(Before), {ehtml, ehtml_var_name(Var)}, After];
%% Lists
ehtml_expander([H|T], Before, After) ->
ehtml_expander(T, [ehtml_expander(H, [], [])|Before], After);
ehtml_expander([], Before, After) ->
ehtml_expander_done("", Before, After).
%% Expander for attributes. The attribute name and value can each be a
%% variable reference.
ehtml_attrs_expander([]) -> "";
ehtml_attrs_expander([{Var,Val}|T]) ->
[[" ",
ehtml_attr_part_expander(Var),
"=",
"\"", ehtml_attr_part_expander(Val), "\""]|
ehtml_attrs_expander(T)];
ehtml_attrs_expander([Var|T]) ->
[[" ",
ehtml_attr_part_expander(Var)]|
ehtml_attrs_expander(T)];
ehtml_attrs_expander(Var) when is_atom(Var) ->
Var in the cdr of an attribute list
[{ehtml_attrs, ehtml_var_name(Var)}].
ehtml_attr_part_expander(A) when is_atom(A) ->
case atom_to_list(A) of
[$$|_Rest] -> {preformatted, ehtml_var_name(A)};
Other -> Other
end;
ehtml_attr_part_expander(I) when is_integer(I) -> integer_to_list(I);
ehtml_attr_part_expander(S) when is_list(S) -> S.
ehtml_expander_done(X, Before, After) -> [reverse([X|Before]), After].
%% Compress an EHTML expander, converting all adjacent bits of text into
%% binaries.
Returns : [ binary ( ) | { ehtml , } | { preformatted , } , { ehtml_attrs , } ]
Var = atom ( )
ehtml_expander_compress([Tag|T], Acc) when is_tuple(Tag) ->
[list_to_binary(reverse(Acc)), Tag | ehtml_expander_compress(T, [])];
ehtml_expander_compress([], Acc) -> [list_to_binary(reverse(Acc))];
ehtml_expander_compress([H|T], Acc) when is_integer(H) ->
ehtml_expander_compress(T, [H|Acc]).
%% Apply an expander with the variable bindings in Env. Env is a list of
%% {VarName, Value} tuples, where VarName is an atom and Value is an ehtml
%% term.
ehtml_apply(Expander, Env) -> [ehtml_eval(X, Env) || X <- Expander].
ehtml_eval(Bin, _Env) when is_binary(Bin) -> Bin;
ehtml_eval({Type, Var}, Env) ->
case lists:keysearch(Var, 1, Env) of
false -> erlang:error({ehtml_unbound, Var});
{value, {Var, Val}} ->
case Type of
ehtml -> ehtml_expand(Val);
preformatted -> Val;
ehtml_attrs -> ehtml_attrs(Val)
end
end.
%% Get the name part of a variable reference.
%% e.g. ehtml_var_name('$foo') -> foo.
ehtml_var_name(A) when is_atom(A) ->
case atom_to_list(A) of
[$$|Rest] -> list_to_atom(Rest);
_Other -> erlang:error({bad_ehtml_var_name, A})
end.
ehtml_expander_test() ->
%% Expr is a template containing variables.
Expr = {html, [{title, '$title'}],
{body, [],
[{h1, [], '$heading'},
'$text']}},
%% Expand is an expander that can be used to quickly generate the HTML
specified in .
Expand = ehtml_expander(Expr),
Bs{1,2 } are lists of variable bindings to fill in the gaps in the
%% template. We can reuse the template on many sets of bindings, and this
%% is much faster than doing a full ehtml of the whole page each time.
Bs1 = [{title, "First page"},
{heading, "Heading"},
{text, {pre_html, "<b>My text!</b>"}}],
Bs2 = [{title, "Second page"},
{heading, "Foobar"},
{text, {b, [], "My text again!"}}],
Page1 and are generated from the template . They are I / O lists
%% (i.e. deep lists of strings and binaries, ready to ship)
Page1 = ehtml_apply(Expand, Bs1),
Page2 = ehtml_apply(Expand, Bs2),
We return the two pages as strings , plus the actual expander ( which is
%% an "opaque" data structure, but maybe interesting to see.)
{binary_to_list(list_to_binary(Page1)),
binary_to_list(list_to_binary(Page2)),
Expand}.
%% call_cgi calls the script `Scriptfilename' (full path). If
%% `Exefilename' is given, it is the executable to handle this,
otherwise ` Scriptfilame ' is assumed to be executable itself .
%%
%% Note however, that these functions usually generate stream content.
%% (If you have good use for a version generating {content, _, _}
%% instead, contact )
%%
%% Also note, that they may return `get_more' and expect to be called
%% again.
call_cgi(Arg, Scriptfilename) ->
yaws_cgi:call_cgi(Arg, Scriptfilename).
call_cgi(Arg, Exefilename, Scriptfilename) ->
yaws_cgi:call_cgi(Arg, Exefilename, Scriptfilename).
%% call_fci_responder issues a responder role call to the FastCGI
%% application server. It returns the same return value as out/1.
%%
%% call_fci_authorizer issues a authorizer role call to the FastCGI
%% application server. It returns:
%%
%% {denied, Out} : Access is denied. Out is the same return value as
%% out/1.
%%
%% {allowed, Variables} : Access is allowed. Variables is a list of
%% environment variables returned by the authorization server using
Variable - XXX : YYY headers .
%%
%% Note: the FastCGI filter role is not yet supported.
%%
%% The following information is taken from the server configuration:
%% - The hostname (or address) and port number of the application server.
%% - Extra CGI variables.
%% - Trace FastCGI protocol messages?
%% - Log application server error messages?
%%
%% The caller can optionally provide an Options argument which supports
%% the following options. These override the defaults taken from the
%% server config.
%%
%% {app_server_host, string() | ip_address()} : The hostname or IP address
%% of the application server.
%%
%% {app_server_port, int()} : The TCP port number of the application server.
%%
{ path_info , string ( ) } : Override the string from Arg .
%%
%% {extra_env, [{string(), string()}]} : Extra environment variables to be
%% passed to the application server, as a list of name-value pairs.
%%
%% trace_protocol : Trace FastCGI protocol messages.
%%
log_app_error : Log application errors ( output to stderr and non - zero
%% exit value).
%%
call_fcgi_responder(Arg) ->
yaws_cgi:call_fcgi_responder(Arg).
call_fcgi_responder(Arg, Options) ->
yaws_cgi:call_fcgi_responder(Arg, Options).
call_fcgi_authorizer(Arg) ->
yaws_cgi:call_fcgi_authorizer(Arg).
call_fcgi_authorizer(Arg, Options) ->
yaws_cgi:call_fcgi_authorizer(Arg, Options).
%%
deepmember(_C,[]) ->
false;
deepmember(C,[C|_Cs]) ->
true;
deepmember(C,[L|Cs]) when is_list(L) ->
case deepmember(C,L) of
true -> true;
false -> deepmember(C,Cs)
end;
deepmember(C,[N|Cs]) when C /= N ->
deepmember(C, Cs).
a Set - Cookie header .
%%
RFC ( 2109 ) ports are from RFC 2965
%%
" Cookie : " cookie - version 1 * ( ( " ; " | " , " ) cookie - value )
" Set - Cookie : " cookies
%% "Set-Cookie2:" cookies
%% cookie-value = NAME "=" VALUE [";" path] [";" domain] [";" port]
%% cookie = NAME "=" VALUE *( ";" cookie-av )
%% cookie-version = "$Version" "=" value
%% NAME = attr
%% VALUE = value
%% path = "$Path" "=" value
%% domain = "$Domain" "=" value
%% port = "$Port" "=" <"> value <">
%%
cookie - av = " Comment " " = " value
%% | "CommentURL" "=" <"> http_URL <">
%% | "Discard"
%% | "Domain" "=" value
| " Max - Age " " = " value
%% | "Path" "=" value
%% | "Port" [ "=" <"> portlist <"> ]
%% | "Secure"
| " Version " " = " 1*DIGIT
%%
parse_set_cookie(Str) ->
parse_set_cookie(Str, #setcookie{}).
parse_set_cookie([], Cookie) ->
Cookie;
parse_set_cookie(Str, Cookie) ->
Rest00 = skip_space(Str),
{Key,Rest0} = parse_set_cookie_key(Rest00, []),
Rest1 = skip_space(Rest0),
case Rest1 of
[$=|Rest2] ->
{Value,Quoted,Rest3} = parse_set_cookie_value(Rest2),
NewC=add_set_cookie(Cookie,yaws:to_lower(Key),Value,Quoted),
parse_set_cookie(Rest3,NewC);
[$;|Rest2] ->
NewC =add_set_cookie(Cookie,yaws:to_lower(Key),undefined,false),
parse_set_cookie(Rest2,NewC);
_ ->
Cookie
end.
%%
parse_set_cookie_key([], Acc) ->
{lists:reverse(Acc), []};
parse_set_cookie_key(T=[$=|_], Acc) ->
{lists:reverse(Acc), T};
parse_set_cookie_key(T=[$;|_], Acc) ->
{lists:reverse(Acc), T};
parse_set_cookie_key([C|T], Acc) ->
parse_set_cookie_key(T, [C|Acc]).
%%
parse_set_cookie_value([$"|T]) ->
parse_quoted(T,[]);
parse_set_cookie_value(T) ->
parse_set_cookie_value(T,[]).
parse_set_cookie_value([],Acc) ->
{lists:reverse(Acc), false, []};
parse_set_cookie_value(T=[$;|_], Acc) ->
{lists:reverse(Acc), false, T};
parse_set_cookie_value([C|T], Acc) ->
parse_set_cookie_value(T, [C|Acc]).
parse_quoted([], Acc) ->
{lists:reverse(Acc), true, []};
parse_quoted([$"|T], Acc) ->
{lists:reverse(Acc), true, T};
parse_quoted([$\\,C|T], Acc) ->
parse_quoted(T,[C,$\\|Acc]);
parse_quoted([C|T], Acc) ->
parse_quoted(T,[C|Acc]).
%%
add_set_cookie(C, Key, Value, Quoted) when C#setcookie.key==undefined ->
C#setcookie{key=Key,value=Value,quoted=Quoted};
add_set_cookie(C, "comment", Value, _Quoted) ->
C#setcookie{comment=Value};
add_set_cookie(C, "commenturl", Value, _Quoted) ->
C#setcookie{comment_url=Value};
add_set_cookie(C, "discard", Value, _Quoted) ->
C#setcookie{discard=Value};
add_set_cookie(C, "domain", Value, _Quoted) ->
C#setcookie{domain=Value};
add_set_cookie(C, "max-age", Value, _Quoted) ->
C#setcookie{max_age=Value};
add_set_cookie(C, "path", Value, _Quoted) ->
C#setcookie{path=Value};
add_set_cookie(C, "port", Value, _Quoted) ->
C#setcookie{port=Value};
add_set_cookie(C, "secure", Value, _Quoted) ->
C#setcookie{secure=Value};
add_set_cookie(C, "version", Value, _Quoted) ->
C#setcookie{version=Value};
add_set_cookie(C, _Key, _Value, _Quoted) ->
C.
%%
format_set_cookie(C) when C#setcookie.value == undefined ->
[C#setcookie.key|format_set_cookie_opts(C)];
format_set_cookie(C) when C#setcookie.quoted ->
[C#setcookie.key,$=,$",C#setcookie.value,$"|
format_set_cookie_opts(C)];
format_set_cookie(C) ->
[C#setcookie.key,$=,C#setcookie.value|
format_set_cookie_opts(C)].
add_opt(_Key,undefined) -> [];
add_opt(Key,Opt) -> [$;,Key,$=,Opt].
format_set_cookie_opts(C) ->
[add_opt("Path",C#setcookie.path),
add_opt("Port",C#setcookie.port),
add_opt("Domain",C#setcookie.domain),
add_opt("Secure",C#setcookie.secure),
add_opt("Expires",C#setcookie.expires),
add_opt("Max-Age",C#setcookie.max_age),
add_opt("Discard",C#setcookie.discard),
add_opt("Comment",C#setcookie.comment),
add_opt("CommentURL",C#setcookie.comment_url),
add_opt("version",C#setcookie.version)].
%%
skip_space([]) -> [];
skip_space([$ |T]) -> skip_space(T);
skip_space([$\t|T]) -> skip_space(T);
skip_space(T) -> T.
%%
getvar(ARG,Key) when is_atom(Key) ->
getvar(ARG, atom_to_list(Key));
getvar(ARG,Key) ->
case (ARG#arg.req)#http_request.method of
'POST' -> postvar(ARG, Key);
'GET' -> queryvar(ARG, Key);
_ -> undefined
end.
queryvar(ARG,Key) when is_atom(Key) ->
queryvar(ARG, atom_to_list(Key));
queryvar(ARG, Key) ->
Parse = case get(query_parse) of
undefined ->
Pval = yaws_api:parse_query(ARG),
put(query_parse, Pval),
Pval;
Val0 ->
Val0
end,
filter_parse(Key, Parse).
postvar(ARG, Key) when is_atom(Key) ->
postvar(ARG, atom_to_list(Key));
postvar(ARG, Key) ->
Parse = case get(post_parse) of
undefined ->
Pval = yaws_api:parse_post(ARG),
put(post_parse, Pval),
Pval;
Val0 ->
Val0
end,
filter_parse(Key, Parse).
filter_parse(Key, Parse) ->
case lists:filter(fun(KV) ->
(Key == element(1, KV))
andalso
(element(2, KV) /= undefined)
end,
Parse) of
[] -> undefined;
[{_, V}] -> {ok,V};
Multivalued case - return list of values
Vs -> list_to_tuple(lists:map(fun(KV) ->
element(2, KV)
end,
Vs))
end.
binding(Key) ->
case get({binding, Key}) of
undefined -> erlang:error({unknown_binding, Key});
Value -> Value
end.
binding_exists(Key) ->
case get({binding, Key}) of
undefined -> false;
_ -> true
end.
%% Return the parsed url that the client requested.
request_url(ARG) ->
SC = get(sc),
Headers = ARG#arg.headers,
{abs_path, Path} = (ARG#arg.req)#http_request.path,
DecPath = url_decode(Path),
{P,Q} = yaws:split_at(DecPath, $?),
#url{scheme = case SC#sconf.ssl of
undefined ->
"http";
_ ->
"https"
end,
host = case Headers#headers.host of
undefined ->
yaws:upto_char($:, SC#sconf.servername);
HostHdr ->
yaws:upto_char($:, HostHdr)
end,
port = case {SC#sconf.ssl, SC#sconf.port} of
{_, 80} ->
undefined;
{_, 443} ->
undefined;
{_, Port} ->
Port
end,
path = P,
querypart = Q}.
%% remove sick characters
sanitize_file_name(".." ++ T) ->
sanitize_file_name([$.|T]);
sanitize_file_name([H|T]) ->
case lists:member(H, " &;'`{}!\\?<>\"()$") of
true ->
sanitize_file_name(T);
false ->
[H|sanitize_file_name(T)]
end;
sanitize_file_name([]) ->
[].
%% to be used in embedded mode, make it possible
%% to pass a config to yaws from another data source
%% than /etc/yaws/yaws.conf, for example from a database
%% this code is also called by the server -h hup code
setconf(GC0, Groups0) ->
setconf(GC0, Groups0, true).
setconf(GC0, Groups0, CheckCertsChanged) ->
CertsChanged = if CheckCertsChanged == true ->
lists:member(yes,gen_server:call(
yaws_server,
check_certs, infinity));
true ->
false
end,
if
CertsChanged ->
application:stop(ssl),
application:start(ssl);
true ->
ok
end,
{GC, Groups1} = yaws_config:verify_upgrade_args(GC0, Groups0),
Groups2 = lists:map(fun(X) -> yaws_config:add_yaws_auth(X) end, Groups1),
{ok, OLDGC, OldGroups} = yaws_api:getconf(),
case {yaws_config:can_hard_gc(GC, OLDGC),
yaws_config:can_soft_setconf(GC, Groups2, OLDGC, OldGroups)} of
{true, true} ->
yaws_config:soft_setconf(GC, Groups2, OLDGC, OldGroups);
{true, false} ->
yaws_config:hard_setconf(GC, Groups2);
_ ->
{error, need_restart}
end.
return { ok , GC , Groups } .
getconf() ->
gen_server:call(yaws_server, getconf, infinity).
embedded_start_conf(DocRoot) when is_list(DocRoot) ->
embedded_start_conf(DocRoot, []).
embedded_start_conf(DocRoot, SL) when is_list(DocRoot), is_list(SL) ->
embedded_start_conf(DocRoot, SL, []).
embedded_start_conf(DocRoot, SL, GL)
when is_list(DocRoot), is_list(SL), is_list(GL) ->
embedded_start_conf(DocRoot, SL, GL, "default").
embedded_start_conf(DocRoot, SL, GL, Id)
when is_list(DocRoot), is_list(SL), is_list(GL) ->
case application:load(yaws) of
ok -> ok;
{error, {already_loaded,yaws}} -> ok;
_ -> exit("cannot load yaws")
end,
ok = application:set_env(yaws, embedded, true),
ok = application:set_env(yaws, id, Id),
ChildSpecs = yaws_sup:child_specs(),
GC = yaws:create_gconf(GL, Id),
SCList = case SL of
[] ->
[[]];
[Cnf|_] when is_tuple(Cnf) ->
[[yaws:create_sconf(DocRoot, SL)]];
[Cnf|_] when is_list(Cnf) ->
[[yaws:create_sconf(DocRoot, SLItem)] || SLItem <- SL]
end,
SoapChild = yaws_config:add_yaws_soap_srv(GC, false),
%% In case a server is started before any configuration has been set,
%% this makes it possible to get hold of the 'pending' configuration.
%% (see for example the start of the yaws_session_server)
ok = application:set_env(yaws, embedded_conf, [{sclist,SCList},{gc,GC}]),
{ok, SCList, GC, ChildSpecs ++ SoapChild}.
%% Function which is invoked typically from an index.yaws file
dir_listing(Arg) ->
dir_listing(Arg, ".").
dir_listing(Arg, RelDir) ->
%% .yaws.auth
Dir0 = filename:dirname(Arg#arg.fullpath),
Dir = case RelDir of
"." -> Dir0;
_ -> filename:join([Dir0, RelDir])
end,
Req = Arg#arg.req,
case file:list_dir(Dir) of
{ok, Data0} ->
Data = Data0 -- [".yaws.auth", "index.yaws"],
yaws_ls:list_directory(Arg, Arg#arg.clisock, Data,
Dir,
Req, false),
ok;
_Err ->
%% Just ignore errors ??, the programmer has to
%% make sure it's a valid path here
ok
end.
%% Returns #redir_self{} record
redirect_self(A) ->
SC = get(sc),
{Port, PortStr} =
case {SC#sconf.rmethod, SC#sconf.ssl, SC#sconf.port} of
{"https", _, 443} -> {443, ""};
{"http", _, 80} -> {80, ""};
{_, undefined, 80} -> {80, ""};
{_, undefined, Port2} ->
{port, [$:|integer_to_list(Port2)]};
{_, _SSL, 443} ->
{443, ""};
{_, _SSL, Port2} ->
{Port2, [$:|integer_to_list(Port2)]}
end,
H = A#arg.headers,
Host0 = yaws:redirect_host(get(sc), H#headers.host),
%% redirect host contains the port number - for mysterious reasons
Host = case string:tokens(Host0, ":") of
[H0, _] -> H0;
[H1] -> H1
end,
{Scheme, SchemeStr} =
case {SC#sconf.ssl,SC#sconf.rmethod} of
{_, Method} when is_list(Method) ->
{list_to_atom(Method), Method++"://"};
{undefined,_} ->
{http, "http://"};
{_SSl,_} ->
{https, "https://"}
end,
#redir_self{host = Host,
scheme = Scheme,
scheme_str = SchemeStr,
port = Port,
port_str = PortStr}.
Boyer - Moore searching , used for parsing multipart / form - data
bm_start(Str) ->
Len = length(Str),
Tbl = bm_set_shifts(Str, Len),
{Tbl, list_to_binary(Str), lists:reverse(Str), Len}.
bm_find(Bin, SearchCtx) ->
bm_find(Bin, SearchCtx, 0).
bm_find(Bin, {_, _, _, Len}, Pos) when size(Bin) < (Pos + Len) ->
nomatch;
bm_find(Bin, {Tbl, BStr, RevStr, Len}=SearchCtx, Pos) ->
case Bin of
<<_:Pos/binary, BStr:Len/binary, _/binary>> ->
{Pos, Len};
<<_:Pos/binary, NoMatch:Len/binary, _/binary>> ->
RevNoMatch = lists:reverse(binary_to_list(NoMatch)),
Shift = bm_next_shift(RevNoMatch, RevStr, 0, Tbl),
bm_find(Bin, SearchCtx, Pos+Shift)
end.
bm_set_shifts(Str, Len) ->
erlang:make_tuple(256, Len, bm_set_shifts(Str, 0, Len, [])).
bm_set_shifts(_Str, Count, Len, Acc) when Count =:= Len-1 ->
lists:reverse(Acc);
bm_set_shifts([H|T], Count, Len, Acc) ->
Shift = Len - Count - 1,
bm_set_shifts(T, Count+1, Len, [{H+1,Shift}|Acc]).
bm_next_shift([H|T1], [H|T2], Comparisons, Tbl) ->
bm_next_shift(T1, T2, Comparisons+1, Tbl);
bm_next_shift([H|_], _, Comparisons, Tbl) ->
erlang:max(element(H+1, Tbl) - Comparisons, 1).
| null | https://raw.githubusercontent.com/Eonblast/Scalaxis/10287d11428e627dca8c41c818745763b9f7e8d4/contrib/yaws/src/yaws_api.erl | erlang | ----------------------------------------------------------------------
File : yaws_api.erl
Purpose :
----------------------------------------------------------------------
-compile(export_all).
these are a bunch of function that are useful inside
yaws scripts
parse the command line query data
parse url encoded POST data
Changed implementation of multipart form data. There is a new config
parameter called
which if set to an integer value
will cause the content of the post content to be sent to the out/1
function in chunks of this size.
It is possible to get the server to maintain a state on behalf of the
indicates that there is more data to come and the out/1 function
usefully be a File Descriptor.
or {result, Res} if this is the last (or only) segment.
Example usage could be:
<erl>
out(A) ->
case yaws_api:parse_multipart_post(A) of
handle_res(A, Res),
end.
io:format("head:~p~n",[Name]),
handle_res(A, T);
io:format("part_body:~p~n",[Data]),
handle_res(A, T);
handle_res(A, T);
handle_res(A, []) ->
io:format("End_res~n").
</erl>
We need to deal with quotes and initial spaces here.
parse_arg_value(String, Key, ValueAcc, InQuoteBool, InValueBool)
Stateful parser of multipart data - allows easy re-entry
Reentry point
Initial entry point
Content-type: application/x-www-form-urlencoded
the alternative is
Content-type: multipart/form-data; boundary=-------------------7cd1d6371ec
which is used for file upload
It will return a [{Key, Value}] list from the post data
cont keymode
change mode
Below are some non-HTTP status codes from other protocol standards that
section 6.1.1 allows for this sort of extensibility, but we recommend
sticking with the HTTP status codes above for maximal portability and
interoperability.
server side include
include pre
convenience
htmlize
htmlize list (usually much more efficient than above)
This function can be passed the cookie we get in the Arg#arg.headers.cookies
to search for a specific cookie
return [] if not found
if serveral cookies with the same name are passed fron the browser,
Remove leading spaces before eating.
Look for the Cookie and extract its value.
, Hi, Lo | Tail]) ->
Don't decode the query string here, that is
parsed separately.
deep lists
url decode the path and return {Path, QueryPart}
Don't decode the query string here,
that is parsed separately.
FIXME: more..
, X, Y | url_encode(T)];
multiline ... continue
Asynchronously delivery
Synchronous (on ultimate gen_tcp:send) delivery
flush incase a DOWN message was sent before the demonitor call
Return new cookie string
interactively turn on|off tracing
interactively turn on|off tracing to the tty (as well)
typically useful in embedded mode
returns a #url{} record
------------------------------------------------------------
simple erlang term representation of HTML:
binary() | character()
Tag = atom()
Key = atom()
Value = string()
Body = EHTML
yaws_api:htmlize_char(Ch);
benefit here instead of the current ehtml_expand(Body) recursion.
- provide a tail_recursive version & add a file in the
benchmarks folder to measure it.
Tags for which we must not add extra white space.
FIXME: should there be anything more in this list?
------------------------------------------------------------
ehtml_expander/1: an EHTML optimizer
This is an optimization for generating the same EHTML multiple times with
only small differences, by using fast re-usable templates that contain
variables. The variables are atoms starting with a dollar sign, like
EHTML template, then ehtml_apply/2 takes a template and a dictionary of
variable values and generates the actual HTML.
If you are spending a lot of time regenerating similar EHTML fragments then
this is for you.
- As a body element, where you would normally have a tag. The values of
- As the name or value of an attribute. The values of these variables are
strings.
- As the CDR of an attribute list. The values of these variables are
key-value lists of more attributes.
See ehtml_expander_test/0 for an example.
Bacon (cool guy), -p.org/htdocs/peval/peval.cgi
(For now I flatter myself that this is some kind of partial evaluator, but
I don't really know :-) -luke)
Returns a deep list of text and variable references (atoms)
Text
Tags
Variable references
Lists
Expander for attributes. The attribute name and value can each be a
variable reference.
Compress an EHTML expander, converting all adjacent bits of text into
binaries.
Apply an expander with the variable bindings in Env. Env is a list of
{VarName, Value} tuples, where VarName is an atom and Value is an ehtml
term.
Get the name part of a variable reference.
e.g. ehtml_var_name('$foo') -> foo.
Expr is a template containing variables.
Expand is an expander that can be used to quickly generate the HTML
template. We can reuse the template on many sets of bindings, and this
is much faster than doing a full ehtml of the whole page each time.
(i.e. deep lists of strings and binaries, ready to ship)
an "opaque" data structure, but maybe interesting to see.)
call_cgi calls the script `Scriptfilename' (full path). If
`Exefilename' is given, it is the executable to handle this,
Note however, that these functions usually generate stream content.
(If you have good use for a version generating {content, _, _}
instead, contact )
Also note, that they may return `get_more' and expect to be called
again.
call_fci_responder issues a responder role call to the FastCGI
application server. It returns the same return value as out/1.
call_fci_authorizer issues a authorizer role call to the FastCGI
application server. It returns:
{denied, Out} : Access is denied. Out is the same return value as
out/1.
{allowed, Variables} : Access is allowed. Variables is a list of
environment variables returned by the authorization server using
Note: the FastCGI filter role is not yet supported.
The following information is taken from the server configuration:
- The hostname (or address) and port number of the application server.
- Extra CGI variables.
- Trace FastCGI protocol messages?
- Log application server error messages?
The caller can optionally provide an Options argument which supports
the following options. These override the defaults taken from the
server config.
{app_server_host, string() | ip_address()} : The hostname or IP address
of the application server.
{app_server_port, int()} : The TCP port number of the application server.
{extra_env, [{string(), string()}]} : Extra environment variables to be
passed to the application server, as a list of name-value pairs.
trace_protocol : Trace FastCGI protocol messages.
exit value).
"Set-Cookie2:" cookies
cookie-value = NAME "=" VALUE [";" path] [";" domain] [";" port]
cookie = NAME "=" VALUE *( ";" cookie-av )
cookie-version = "$Version" "=" value
NAME = attr
VALUE = value
path = "$Path" "=" value
domain = "$Domain" "=" value
port = "$Port" "=" <"> value <">
| "CommentURL" "=" <"> http_URL <">
| "Discard"
| "Domain" "=" value
| "Path" "=" value
| "Port" [ "=" <"> portlist <"> ]
| "Secure"
Return the parsed url that the client requested.
remove sick characters
to be used in embedded mode, make it possible
to pass a config to yaws from another data source
than /etc/yaws/yaws.conf, for example from a database
this code is also called by the server -h hup code
In case a server is started before any configuration has been set,
this makes it possible to get hold of the 'pending' configuration.
(see for example the start of the yaws_session_server)
Function which is invoked typically from an index.yaws file
.yaws.auth
Just ignore errors ??, the programmer has to
make sure it's a valid path here
Returns #redir_self{} record
redirect host contains the port number - for mysterious reasons | Author : < >
Created : 24 Jan 2002 by < >
-module(yaws_api).
-author('').
-include("../include/yaws.hrl").
-include("../include/yaws_api.hrl").
-include("yaws_debug.hrl").
-export([parse_query/1, parse_post/1,
parse_multipart_post/1, parse_multipart_post/2,
parse_multipart/2, parse_multipart/3]).
-export([code_to_phrase/1, ssi/2, redirect/1]).
-export([setcookie/2, setcookie/3, setcookie/4, setcookie/5, setcookie/6]).
-export([pre_ssi_files/2, pre_ssi_string/1, pre_ssi_string/2,
set_content_type/1,
htmlize/1, htmlize_char/1, f/2, fl/1]).
-export([find_cookie_val/2, secs/0,
url_decode/1, url_decode_q_split/1,
url_encode/1, parse_url/1, parse_url/2, format_url/1,
format_partial_url/2]).
-export([is_absolute_URI/1]).
-export([path_norm/1, path_norm_reverse/1,
sanitize_file_name/1]).
-export([get_line/1, mime_type/1]).
-export([stream_chunk_deliver/2, stream_chunk_deliver_blocking/2,
stream_chunk_end/1]).
-export([stream_process_deliver/2, stream_process_deliver_chunk/2,
stream_process_deliver_final_chunk/2, stream_process_end/2]).
-export([websocket_send/2, websocket_receive/1,
websocket_unframe_data/1, websocket_setopts/2]).
-export([new_cookie_session/1, new_cookie_session/2, new_cookie_session/3,
cookieval_to_opaque/1, request_url/1,
print_cookie_sessions/0,
replace_cookie_session/2, delete_cookie_session/1]).
-export([getconf/0,
setconf/2,
embedded_start_conf/1, embedded_start_conf/2,
embedded_start_conf/3, embedded_start_conf/4]).
-export([set_status_code/1, reformat_header/1,
reformat_request/1, reformat_response/1, reformat_url/1]).
-export([set_trace/1,
set_tty_trace/1,
set_access_log/1]).
-export([call_cgi/2, call_cgi/3]).
-export([call_fcgi_responder/1, call_fcgi_responder/2,
call_fcgi_authorizer/1, call_fcgi_authorizer/2]).
-export([ehtml_expand/1, ehtml_expander/1, ehtml_apply/2,
ehtml_expander_test/0]).
-export([parse_set_cookie/1, format_set_cookie/1,
postvar/2, queryvar/2, getvar/2]).
-export([binding/1,binding_exists/1,
dir_listing/1, dir_listing/2, redirect_self/1]).
-export([arg_clisock/1
, arg_client_ip_port/1
, arg_headers/1
, arg_req/1
, arg_clidata/1
, arg_server_path/1
, arg_querydata/1
, arg_appmoddata/1
, arg_docroot/1
, arg_docroot_mount/1
, arg_fullpath/1
, arg_cont/1
, arg_state/1
, arg_pid/1
, arg_opaque/1
, arg_appmod_prepath/1
, arg_prepath/1
, arg_pathinfo/1
, http_request_method/1
, http_request_path/1
, http_request_version/1
, http_response_version/1
, http_response_status/1
, http_response_phrase/1
, headers_connection/1
, headers_accept/1
, headers_host/1
, headers_if_modified_since/1
, headers_if_match/1
, headers_if_none_match/1
, headers_if_range/1
, headers_if_unmodified_since/1
, headers_range/1
, headers_referer/1
, headers_user_agent/1
, headers_accept_ranges/1
, headers_cookie/1
, headers_keep_alive/1
, headers_location/1
, headers_content_length/1
, headers_content_type/1
, headers_content_encoding/1
, headers_authorization/1
, headers_transfer_encoding/1
, headers_x_forwarded_for/1
, headers_other/1
]).
-import(lists, [map/2, flatten/1, reverse/1]).
arg_clisock(#arg{clisock = X}) -> X.
arg_client_ip_port(#arg{client_ip_port = X}) -> X.
arg_headers(#arg{headers = X}) -> X.
arg_req(#arg{req = X}) -> X.
arg_clidata(#arg{clidata = X}) -> X.
arg_server_path(#arg{server_path = X}) -> X.
arg_querydata(#arg{querydata = X}) -> X.
arg_appmoddata(#arg{appmoddata = X}) -> X.
arg_docroot(#arg{docroot = X}) -> X.
arg_docroot_mount(#arg{docroot_mount = X}) -> X.
arg_fullpath(#arg{fullpath = X}) -> X.
arg_cont(#arg{cont = X}) -> X.
arg_state(#arg{state = X}) -> X.
arg_pid(#arg{pid = X}) -> X.
arg_opaque(#arg{opaque = X}) -> X.
arg_appmod_prepath(#arg{appmod_prepath = X}) -> X.
arg_prepath(#arg{prepath = X}) -> X.
arg_pathinfo(#arg{pathinfo = X}) -> X.
http_request_method(#http_request{method = X}) -> X.
http_request_path(#http_request{path = X}) -> X.
http_request_version(#http_request{version = X}) -> X.
http_response_version(#http_response{version = X}) -> X.
http_response_status(#http_response{status = X}) -> X.
http_response_phrase(#http_response{phrase = X}) -> X.
headers_connection(#headers{connection = X}) -> X.
headers_accept(#headers{accept = X}) -> X.
headers_host(#headers{host = X}) -> X.
headers_if_modified_since(#headers{if_modified_since = X}) -> X.
headers_if_match(#headers{if_match = X}) -> X.
headers_if_none_match(#headers{if_none_match = X}) -> X.
headers_if_range(#headers{if_range = X}) -> X.
headers_if_unmodified_since(#headers{if_unmodified_since = X}) -> X.
headers_range(#headers{range = X}) -> X.
headers_referer(#headers{referer = X}) -> X.
headers_user_agent(#headers{user_agent = X}) -> X.
headers_accept_ranges(#headers{accept_ranges = X}) -> X.
headers_cookie(#headers{cookie = X}) -> X.
headers_keep_alive(#headers{keep_alive = X}) -> X.
headers_location(#headers{location = X}) -> X.
headers_content_length(#headers{content_length = X}) -> X.
headers_content_type(#headers{content_type = X}) -> X.
headers_content_encoding(#headers{content_encoding = X}) -> X.
headers_authorization(#headers{authorization = X}) -> X.
headers_transfer_encoding(#headers{transfer_encoding = X}) -> X.
headers_x_forwarded_for(#headers{x_forwarded_for = X}) -> X.
headers_other(#headers{other = X}) -> X.
parse_query(Arg) ->
D = Arg#arg.querydata,
if
D == [] ->
[];
true ->
parse_post_data_urlencoded(D)
end.
parse_post(Arg) ->
D = Arg#arg.clidata,
Req = Arg#arg.req,
case Req#http_request.method of
'POST' ->
case D of
[] -> [];
_ ->
parse_post_data_urlencoded(D)
end;
Other ->
error_logger:error_msg(
"ERROR: Can't parse post body for ~p requests: URL: ~p",
[Other, Arg#arg.fullpath]),
[]
end.
partial_post_size
out/1 user by returning { get_more , Cont , State } .
yaws_api : parse_multipart_post/1 will return either :
{ cont , Cont , Res } where Res is new result(s ) from this segment . This
should return { get_more , Cont , User_state } where User_state might
Res is a list of { header , Header } | , Binary } | { body , Binary }
{ cont , Cont , Res } - >
St = handle_res(A , Res ) ,
{ get_more , Cont , St } ;
{ result ,
{ html , f("<pre > Done < /pre > " , [ ] ) }
handle_res(A , [ { head , Name}|T ] ) - >
handle_res(A , [ , Data}|T ] ) - >
handle_res(A , [ { body , Data}|T ] ) - >
io : format("body:~p ~ n",[Data ] ) ,
parse_multipart_post(Arg) ->
parse_multipart_post(Arg, [list]).
parse_multipart_post(Arg, Options) ->
H = Arg#arg.headers,
CT = H#headers.content_type,
Req = Arg#arg.req,
case Req#http_request.method of
'POST' ->
case CT of
undefined ->
error_logger:error_msg("Can't parse multipart if we "
"have no Content-Type header",[]),
[];
"multipart/form-data"++Line ->
case Arg#arg.cont of
{cont, Cont} ->
parse_multipart(
un_partial(Arg#arg.clidata),
{cont, Cont});
undefined ->
LineArgs = parse_arg_line(Line),
{value, {_, Boundary}} =
lists:keysearch(boundary, 1, LineArgs),
parse_multipart(
un_partial(Arg#arg.clidata),
Boundary, Options)
end;
_Other ->
error_logger:error_msg("Can't parse multipart if we "
"find no multipart/form-data",[]),
[]
end;
Other ->
error_logger:error_msg("Can't parse multipart if get a ~p",
[Other]),
[]
end.
un_partial({partial, Bin}) ->
Bin;
un_partial(Bin) ->
Bin.
parse_arg_line(Line) ->
parse_arg_line(Line, []).
parse_arg_line([],Acc) -> Acc;
parse_arg_line([$ |Line], Acc) ->
parse_arg_line(Line, Acc);
parse_arg_line([$;|Line], Acc) ->
{KV,Rest} = parse_arg_key(Line, [], []),
parse_arg_line(Rest, [KV|Acc]).
parse_arg_key([], Key, Value) ->
make_parse_line_reply(Key, Value, []);
parse_arg_key([$;|Line], Key, Value) ->
make_parse_line_reply(Key, Value, [$;|Line]);
parse_arg_key([$ |Line], Key, Value) ->
parse_arg_key(Line, Key, Value);
parse_arg_key([$=|Line], Key, Value) ->
parse_arg_value(Line, Key, Value, false, false);
parse_arg_key([C|Line], Key, Value) ->
parse_arg_key(Line, [C|Key], Value).
parse_arg_value([], Key, Value, _, _) ->
make_parse_line_reply(Key, Value, []);
parse_arg_value([$\\,$"|Line], Key, Value, Quote, Begun) ->
parse_arg_value(Line, Key, [$"|Value], Quote, Begun);
parse_arg_value([$"|Line], Key, Value, false, _) ->
parse_arg_value(Line, Key, Value, true, true);
parse_arg_value([$"], Key, Value, true, _) ->
make_parse_line_reply(Key, Value, []);
parse_arg_value([$",$;|Line], Key, Value, true, _) ->
make_parse_line_reply(Key, Value, [$;|Line]);
parse_arg_value([$;|Line], Key, Value, false, _) ->
make_parse_line_reply(Key, Value, [$;|Line]);
parse_arg_value([$ |Line], Key, Value, false, true) ->
make_parse_line_reply(Key, Value, Line);
parse_arg_value([$ |Line], Key, Value, false, false) ->
parse_arg_value(Line, Key, Value, false, false);
parse_arg_value([C|Line], Key, Value, Quote, _) ->
parse_arg_value(Line, Key, [C|Value], Quote, true).
make_parse_line_reply(Key, Value, Rest) ->
{{list_to_atom(yaws:funreverse(Key, {yaws, to_lowerchar})),
lists:reverse(Value)}, Rest}.
-record(mp_parse_state, {
state,
boundary_ctx,
hdr_end_ctx,
old_data,
data_type
}).
parse_multipart(Data, St) ->
parse_multipart(Data, St, [list]).
parse_multipart(Data, St, Options) ->
case parse_multi(Data, St, Options) of
{cont, St2, Res} ->
{cont, {cont, St2}, lists:reverse(Res)};
{result, Res} ->
{result, lists:reverse(Res)}
end.
parse_multi(Data, {cont, #mp_parse_state{old_data = OldData}=ParseState}, _) ->
NData = <<OldData/binary, Data/binary>>,
parse_multi(NData, ParseState, []);
parse_multi(<<"--\r\n", Data/binary>>,
#mp_parse_state{state=boundary}=ParseState, Acc) ->
parse_multi(Data, ParseState, Acc);
parse_multi(Data, #mp_parse_state{state=boundary}=ParseState, Acc) ->
#mp_parse_state{boundary_ctx = BoundaryCtx} = ParseState,
case bm_find(Data, BoundaryCtx) of
{0, Len} ->
LenPlusCRLF = Len+2,
<<_:LenPlusCRLF/binary, Rest/binary>> = Data,
NParseState = ParseState#mp_parse_state{state = start_header},
parse_multi(Rest, NParseState, Acc);
{_Pos, _Len} ->
{NParseState, NData} = case Data of
<<"\r\n", Rest/binary>> ->
{ParseState#mp_parse_state{
state = start_header},
Rest};
_ ->
{ParseState#mp_parse_state{
state = body}, Data}
end,
parse_multi(NData, NParseState, Acc);
nomatch ->
case Data of
<<>> ->
{result, Acc};
<<"\r\n">> ->
{result, Acc};
_ ->
NParseState = ParseState#mp_parse_state{old_data = Data},
{cont, NParseState, Acc}
end
end;
parse_multi(Data, #mp_parse_state{state=start_header}=ParseState, Acc) ->
NParseState = ParseState#mp_parse_state{state = header},
parse_multi(Data, NParseState, Acc, [], []);
parse_multi(Data, #mp_parse_state{state=body}=ParseState, Acc) ->
#mp_parse_state{boundary_ctx = BoundaryCtx} = ParseState,
case bm_find(Data, BoundaryCtx) of
{Pos, Len} ->
<<Body:Pos/binary, _:Len/binary, Rest/binary>> = Data,
BodyData = case ParseState#mp_parse_state.data_type of
list ->
binary_to_list(Body);
binary ->
Body
end,
NAcc = [{body, BodyData}|Acc],
NParseState = ParseState#mp_parse_state{state = boundary},
parse_multi(Rest, NParseState, NAcc);
nomatch ->
NParseState = ParseState#mp_parse_state{
state = body,
old_data = <<>>
},
BodyData = case ParseState#mp_parse_state.data_type of
list ->
binary_to_list(Data);
binary ->
Data
end,
NAcc = [{part_body, BodyData}|Acc],
{cont, NParseState, NAcc}
end;
parse_multi(Data, Boundary, Options) ->
B1 = "\r\n--"++Boundary,
D1 = <<"\r\n", Data/binary>>,
BoundaryCtx = bm_start(B1),
HdrEndCtx = bm_start("\r\n\r\n"),
DataType = lists:foldl(fun(_, list) ->
list;
(list, _) ->
list;
(binary, undefined) ->
binary;
(_, Acc) ->
Acc
end, undefined, Options),
ParseState = #mp_parse_state{state = boundary,
boundary_ctx = BoundaryCtx,
hdr_end_ctx = HdrEndCtx,
data_type = DataType},
parse_multi(D1, ParseState, []).
parse_multi(Data, #mp_parse_state{state=start_header}=ParseState, Acc, [], []) ->
#mp_parse_state{hdr_end_ctx = HdrEndCtx} = ParseState,
case bm_find(Data, HdrEndCtx) of
nomatch ->
{cont, ParseState#mp_parse_state{old_data = Data}, Acc};
_ ->
NParseState = ParseState#mp_parse_state{state = header},
parse_multi(Data, NParseState, Acc, [], [])
end;
parse_multi(Data, #mp_parse_state{state=header}=ParseState, Acc, Name, Hdrs) ->
case erlang:decode_packet(httph_bin, Data, []) of
{ok, http_eoh, Rest} ->
Head = case Name of
[] ->
lists:reverse(Hdrs);
_ ->
{Name, lists:reverse(Hdrs)}
end,
NParseState = ParseState#mp_parse_state{state = body},
parse_multi(Rest, NParseState, [{head, Head}|Acc]);
{ok, {http_header, _, Hdr, _, HdrVal}, Rest} when is_atom(Hdr) ->
Header = {case Hdr of
'Content-Type' ->
content_type;
Else ->
Else
end,
binary_to_list(HdrVal)},
parse_multi(Rest, ParseState, Acc, Name, [Header|Hdrs]);
{ok, {http_header, _, Hdr, _, HdrVal}, Rest} ->
HdrValStr = binary_to_list(HdrVal),
case yaws:to_lower(binary_to_list(Hdr)) of
"content-disposition" ->
"form-data"++Params = HdrValStr,
Parameters = parse_arg_line(Params),
{value, {_, NewName}} = lists:keysearch(name, 1, Parameters),
parse_multi(Rest, ParseState, Acc,
NewName, Parameters++Hdrs);
LowerHdr ->
parse_multi(Rest, ParseState, Acc,
Name, [{LowerHdr, HdrValStr}|Hdrs])
end;
{error, _Reason}=Error ->
Error
end.
parse POST data when ENCTYPE is unset or
is the content of ARG#arg.clidata
parse_post_data_urlencoded(Bin) ->
do_parse_spec(Bin, nokey, [], key).
, Hi:8 , Lo:8 , Tail / binary > > , Last , Cur , State )
when Hi /= $u ->
Hex = yaws:hex_to_integer([Hi, Lo]),
do_parse_spec(Tail, Last, [ Hex | Cur], State);
do_parse_spec(<<$&, Tail/binary>>, _Last , Cur, key) ->
[{lists:reverse(Cur), undefined} |
do_parse_spec(<<$&, Tail/binary>>, Last, Cur, value) ->
V = {Last, lists:reverse(Cur)},
[V | do_parse_spec(Tail, nokey, [], key)];
do_parse_spec(<<$+, Tail/binary>>, Last, Cur, State) ->
do_parse_spec(Tail, Last, [$\s|Cur], State);
do_parse_spec(<<$=, Tail/binary>>, _Last, Cur, key) ->
, $ u , A:8 , B:8,C:8,D:8 , Tail / binary > > ,
Last, Cur, State) ->
non - standard encoding for Unicode characters : ,
Hex = yaws:hex_to_integer([A,B,C,D]),
do_parse_spec(Tail, Last, [ Hex | Cur], State);
do_parse_spec(<<H:8, Tail/binary>>, Last, Cur, State) ->
do_parse_spec(Tail, Last, [H|Cur], State);
do_parse_spec(<<>>, nokey, Cur, _State) ->
[{lists:reverse(Cur), undefined}];
do_parse_spec(<<>>, Last, Cur, _State) ->
[{Last, lists:reverse(Cur)}];
do_parse_spec(undefined,_,_,_) ->
[];
do_parse_spec(QueryList, Last, Cur, State) when is_list(QueryList) ->
do_parse_spec(list_to_binary(QueryList), Last, Cur, State).
code_to_phrase(100) -> "Continue";
code_to_phrase(101) -> "Switching Protocols ";
code_to_phrase(200) -> "OK";
code_to_phrase(201) -> "Created";
code_to_phrase(202) -> "Accepted";
code_to_phrase(203) -> "Non-Authoritative Information";
code_to_phrase(204) -> "No Content";
code_to_phrase(205) -> "Reset Content";
code_to_phrase(206) -> "Partial Content";
code_to_phrase(207) -> "Multi Status";
code_to_phrase(300) -> "Multiple Choices";
code_to_phrase(301) -> "Moved Permanently";
code_to_phrase(302) -> "Found";
code_to_phrase(303) -> "See Other";
code_to_phrase(304) -> "Not Modified";
code_to_phrase(305) -> "Use Proxy";
code_to_phrase(306) -> "(Unused)";
code_to_phrase(307) -> "Temporary Redirect";
code_to_phrase(400) -> "Bad Request";
code_to_phrase(401) -> "Unauthorized";
code_to_phrase(402) -> "Payment Required";
code_to_phrase(403) -> "Forbidden";
code_to_phrase(404) -> "Not Found";
code_to_phrase(405) -> "Method Not Allowed";
code_to_phrase(406) -> "Not Acceptable";
code_to_phrase(407) -> "Proxy Authentication Required";
code_to_phrase(408) -> "Request Timeout";
code_to_phrase(409) -> "Conflict";
code_to_phrase(410) -> "Gone";
code_to_phrase(411) -> "Length Required";
code_to_phrase(412) -> "Precondition Failed";
code_to_phrase(413) -> "Request Entity Too Large";
code_to_phrase(414) -> "Request-URI Too Long";
code_to_phrase(415) -> "Unsupported Media Type";
code_to_phrase(416) -> "Requested Range Not Satisfiable";
code_to_phrase(417) -> "Expectation Failed";
code_to_phrase(500) -> "Internal Server Error";
code_to_phrase(501) -> "Not Implemented";
code_to_phrase(502) -> "Bad Gateway";
code_to_phrase(503) -> "Service Unavailable";
code_to_phrase(504) -> "Gateway Timeout";
code_to_phrase(505) -> "HTTP Version Not Supported";
we 've seen used with HTTP in the wild , so we include them here . HTTP 1.1
from FTP ( RFC 959 )
from FTP ( RFC 959 )
from RTSP ( RFC 2326 )
ssi(DocRoot, Files) ->
L = lists:map(fun(F) ->
case file:read_file([DocRoot ++ [$/|F]]) of
{ok, Bin} ->
Bin;
{error, Reason} ->
io_lib:format("Cannot include file ~p: ~p",
[F, Reason])
end
end, Files),
{html, L}.
pre_ssi_files(DocRoot, Files) ->
{html, L} = ssi(DocRoot, Files),
pre_ssi_string(L).
pre_ssi_string(Str) ->
pre_ssi_string(Str, "box").
pre_ssi_string(Str, Class) ->
{html, ["<br><br>\n<div class=\"", Class, "\"> <pre>\n",
htmlize_l(Str),
"\n</pre></div>\n<br>\n\n"]}.
f(Fmt, Args) ->
io_lib:format(Fmt, Args).
fl([Fmt, Arg | Tail]) ->
[f(Fmt, Arg) | fl(Tail)];
fl([]) ->
[].
htmlize(Bin) when is_binary(Bin) ->
list_to_binary(htmlize_l(binary_to_list(Bin)));
htmlize(List) when is_list(List) ->
htmlize_l(List).
htmlize_char($>) ->
<<">">>;
htmlize_char($<) ->
<<"<">>;
htmlize_char($&) ->
<<"&">>;
htmlize_char($") ->
<<""">>;
htmlize_char(X) ->
X.
htmlize_l(List) ->
htmlize_l(List, []).
htmlize_l([], Acc) -> lists:reverse(Acc);
htmlize_l([$>|Tail], Acc) ->
htmlize_l(Tail, [$;,$t,$g,$&|Acc]);
htmlize_l([$<|Tail], Acc) ->
htmlize_l(Tail, [$;,$t,$l,$&|Acc]);
htmlize_l([$&|Tail], Acc) ->
htmlize_l(Tail, [$;,$p,$m,$a,$&|Acc]);
htmlize_l([$"|Tail], Acc) ->
htmlize_l(Tail, [$; , $t, $o, $u, $q ,$&|Acc]);
htmlize_l([X|Tail], Acc) when is_integer(X) ->
htmlize_l(Tail, [X|Acc]);
htmlize_l([X|Tail], Acc) when is_binary(X) ->
X2 = htmlize_l(binary_to_list(X)),
htmlize_l(Tail, [X2|Acc]);
htmlize_l([X|Tail], Ack) when is_list(X) ->
X2 = htmlize_l(X),
htmlize_l(Tail, [X2|Ack]).
secs() ->
{MS, S, _} = now(),
(MS * 1000000) + S.
setcookie(Name, Value) ->
{header, {set_cookie, f("~s=~s;", [Name, Value])}}.
setcookie(Name, Value, Path) ->
{header, {set_cookie, f("~s=~s; path=~s", [Name, Value, Path])}}.
setcookie(Name, Value, Path, Expire) ->
setcookie(Name, Value, Path, Expire, [], []).
setcookie(Name, Value, Path, Expire, Domain) ->
setcookie(Name, Value, Path, Expire, Domain,[]).
setcookie(Name, Value, Path, Expire, Domain, Secure) ->
SetDomain = if Domain == [] -> "";
true -> " Domain="++Domain++";"
end,
SetExpire = if Expire == [] -> "";
true -> " Expires="++Expire++";"
end,
SetPath = if Path == [] -> "/";
true -> Path
end,
SetSecure = if Secure == on -> " secure;";
true -> ""
end,
{header, {set_cookie, f("~s=~s;~s~s~s Path=~s",
[Name,Value,SetDomain,SetExpire,
SetSecure, SetPath])}}.
if found
only the first match is returned
find_cookie_val(Cookie, A) when is_record(A, arg) ->
find_cookie_val(Cookie, (A#arg.headers)#headers.cookie);
find_cookie_val(_Cookie, []) ->
[];
find_cookie_val(Cookie, [FullCookie | FullCookieList]) ->
case eat_cookie(Cookie, FullCookie) of
[] ->
find_cookie_val(Cookie, FullCookieList);
Val ->
Val
end.
eat_cookie([], _) -> [];
eat_cookie([$\s|T], Str) -> eat_cookie(T, Str);
eat_cookie(_, []) -> [];
eat_cookie(Cookie, [$\s|T]) -> eat_cookie(Cookie, T);
eat_cookie(Cookie, Str) when is_list(Cookie),is_list(Str) ->
try
eat_cookie2(Cookie++"=", Str, Cookie)
catch
_:_ -> []
end.
eat_cookie2(_, [], _) ->
throw("not found");
eat_cookie2([H|T], [H|R], C) ->
eat_cookie2(T, R, C);
eat_cookie2([H|_], [X|R], C) when H =/= X ->
{_,Rest} = eat_until(R, $;),
eat_cookie(C, Rest);
eat_cookie2([], L, _) ->
{Meat,_} = eat_until(L, $;),
Meat.
eat_until(L, U) ->
eat_until(L, U, []).
eat_until([H|T], H, Acc) -> {lists:reverse(Acc), T};
eat_until([H|T], U, Acc) when H =/= U -> eat_until(T, U, [H|Acc]);
eat_until([], _, Acc) -> {lists:reverse(Acc), []}.
Hex = yaws:hex_to_integer([Hi, Lo]),
[Hex | url_decode(Tail)];
url_decode([$?|T]) ->
[$?|T];
url_decode([H|T]) when is_integer(H) ->
[H |url_decode(T)];
url_decode([]) ->
[];
url_decode([H|T]) when is_list(H) ->
[url_decode(H) | url_decode(T)].
path_norm(Path) ->
path_norm_reverse(lists:reverse(Path)).
path_norm_reverse("/" ++ T) -> start_dir(0, "/", T);
path_norm_reverse( T) -> start_dir(0, "", T).
start_dir(N, Path, [$\\|T] ) -> start_dir(N, Path, [$/|T]);
start_dir(N, Path, ".." ) -> rest_dir(N, Path, "");
start_dir(N, Path, "/" ++ T ) -> start_dir(N , Path, T);
start_dir(N, Path, "./" ++ T ) -> start_dir(N , Path, T);
start_dir(N, Path, "../" ++ T ) -> start_dir(N + 1, Path, T);
start_dir(N, Path, T ) -> rest_dir (N , Path, T).
rest_dir (_N, Path, [] ) -> case Path of
[] -> "/";
_ -> Path
end;
rest_dir (0, Path, [ $/ | T ] ) -> start_dir(0 , [ $/ | Path ], T);
rest_dir (N, Path, [ $/ | T ] ) -> start_dir(N - 1, Path , T);
rest_dir (N, Path, [ $\\ | T ] ) -> rest_dir(N, Path, [$/|T]);
rest_dir (0, Path, [ H | T ] ) -> rest_dir (0 , [ H | Path ], T);
rest_dir (N, Path, [ _H | T ] ) -> rest_dir (N , Path , T).
url_decode_q_split(Path) ->
url_decode_q_split(Path, []).
, Hi , Lo | Tail ] ,
Hex = yaws:hex_to_integer([Hi, Lo]),
if Hex == 0 -> exit(badurl);
true -> ok
end,
url_decode_q_split(Tail, [Hex|Ack]);
url_decode_q_split([$?|T], Ack) ->
{path_norm_reverse(Ack), T};
url_decode_q_split([H|T], Ack) when H /= 0 ->
url_decode_q_split(T, [H|Ack]);
url_decode_q_split([], Ack) ->
{path_norm_reverse(Ack), []}.
url_encode([H|T]) ->
if
H >= $a, $z >= H ->
[H|url_encode(T)];
H >= $A, $Z >= H ->
[H|url_encode(T)];
H >= $0, $9 >= H ->
[H|url_encode(T)];
[H|url_encode(T)];
true ->
case yaws:integer_to_hex(H) of
[X, Y] ->
[X] ->
, $ 0 , X | url_encode(T ) ]
end
end;
url_encode([]) ->
[].
redirect(Url) -> [{redirect, Url}].
is_nb_space(X) ->
lists:member(X, [$\s, $\t]).
ret : { line , Line , Trail } | { lastline , Line , Trail } | need_more
get_line(L) ->
get_line(L, []).
get_line("\r\n\r\n" ++ Tail, Cur) ->
{lastline, lists:reverse(Cur), Tail};
get_line("\r\n" ++ Tail, Cur) when Tail /= [] ->
case is_nb_space(hd(Tail)) of
get_line(Tail, [$\n, $\r | Cur]);
false ->
{line, lists:reverse(Cur), Tail}
end;
get_line("\r\n", Cur) ->
{line, lists:reverse(Cur), []};
get_line([H|T], Cur) ->
get_line(T, [H|Cur]);
get_line([], _) ->
need_more.
mime_type(FileName) ->
case filename:extension(FileName) of
[_|T] ->
element(2, mime_types:t(T));
[] ->
element(2, mime_types:t([]))
end.
stream_chunk_deliver(YawsPid, Data) ->
YawsPid ! {streamcontent, Data}.
Returns : ok | { error , }
stream_chunk_deliver_blocking(YawsPid, Data) ->
Ref = erlang:monitor(process, YawsPid),
YawsPid ! {streamcontent_with_ack, self(), Data},
receive
{YawsPid, streamcontent_ack} ->
erlang:demonitor(Ref),
receive
{'DOWN', Ref, _, _, _} ->
ok
after 0 ->
ok
end;
{'DOWN', Ref, _, _, Info} ->
{error, {ypid_crash, Info}}
end.
stream_chunk_end(YawsPid) ->
YawsPid ! endofstreamcontent.
stream_process_deliver(Sock={sslsocket,_,_}, IoList) ->
ssl:send(Sock, IoList);
stream_process_deliver(Sock, IoList) ->
gen_tcp:send(Sock, IoList).
stream_process_deliver_chunk(Sock, IoList) ->
Chunk = case erlang:iolist_size(IoList) of
0 ->
stream_process_deliver_final_chunk(Sock, IoList);
S ->
[yaws:integer_to_hex(S), "\r\n", IoList, "\r\n"]
end,
stream_process_deliver(Sock, Chunk).
stream_process_deliver_final_chunk(Sock, IoList) ->
Chunk = case erlang:iolist_size(IoList) of
0 ->
<<"0\r\n\r\n">>;
S ->
[yaws:integer_to_hex(S), "\r\n", IoList, "\r\n0\r\n\r\n"]
end,
stream_process_deliver(Sock, Chunk).
stream_process_end(closed, YawsPid) ->
YawsPid ! {endofstreamcontent, closed};
stream_process_end(Sock={sslsocket,_,_}, YawsPid) ->
ssl:controlling_process(Sock, YawsPid),
YawsPid ! endofstreamcontent;
stream_process_end(Sock, YawsPid) ->
gen_tcp:controlling_process(Sock, YawsPid),
YawsPid ! endofstreamcontent.
websocket_send(Socket, IoList) ->
DataFrame = [0, IoList, 255],
case Socket of
{sslsocket,_,_} ->
ssl:send(Socket, DataFrame);
_ ->
gen_tcp:send(Socket, DataFrame)
end.
websocket_receive(Socket) ->
R = case Socket of
{sslsocket,_,_} ->
ssl:recv(Socket, 0);
_ ->
gen_tcp:recv(Socket, 0)
end,
case R of
{ok, DataFrames} ->
ReceivedMsgs = yaws_websockets:unframe_all(DataFrames, []),
{ok, ReceivedMsgs};
_ -> R
end.
websocket_unframe_data(DataFrameBin) ->
{ok, Msg, <<>>} = yaws_websockets:unframe_one(DataFrameBin),
Msg.
websocket_setopts({sslsocket,_,_}=Socket, Opts) ->
ssl:setopts(Socket, Opts);
websocket_setopts(Socket, Opts) ->
inet:setopts(Socket, Opts).
new_cookie_session(Opaque) ->
yaws_session_server:new_session(Opaque).
new_cookie_session(Opaque, TTL) ->
yaws_session_server:new_session(Opaque, TTL).
new_cookie_session(Opaque, TTL, Cleanup) ->
yaws_session_server:new_session(Opaque, TTL, Cleanup).
as returned in # ysession.cookie
cookieval_to_opaque(CookieVal) ->
yaws_session_server:cookieval_to_opaque(CookieVal).
print_cookie_sessions() ->
yaws_session_server:print_sessions().
replace_cookie_session(Cookie, NewOpaque) ->
yaws_session_server:replace_session(Cookie, NewOpaque).
delete_cookie_session(Cookie) ->
yaws_session_server:delete_session(Cookie).
lmap(F, [H|T]) ->
[lists:map(F, H) | lmap(F, T)];
lmap(_, []) ->
[].
set_trace(Val) ->
Str = yaws_ctl:actl_trace(Val),
io:format("~s", [Str]).
set_access_log(Bool) ->
{ok, GC, Groups} = getconf(),
Groups2 = lmap(fun(SC) ->
?sc_set_access_log(SC, Bool)
end, Groups),
setconf(GC, Groups2).
set_tty_trace(Bool) ->
yaws_log:trace_tty(Bool).
set_status_code(Code) ->
{status, Code}.
returns [ Header1 , Header2 ..... ]
reformat_header(H) ->
lists:zf(fun({Hname, Str}) ->
I = lists:flatten(io_lib:format("~s: ~s",[Hname, Str])),
{true, I};
(undefined) ->
false
end,
[
if H#headers.connection == undefined ->
undefined;
true ->
{"Connection", H#headers.connection}
end,
if H#headers.accept == undefined ->
undefined;
true ->
{"Accept", H#headers.accept}
end,
if H#headers.host == undefined ->
undefined;
true ->
{"Host", H#headers.host}
end,
if H#headers.if_modified_since == undefined ->
undefined;
true ->
{"If-Modified-Since", H#headers.if_modified_since}
end,
if H#headers.if_match == undefined ->
undefined;
true ->
{"If-Match", H#headers.if_match}
end,
if H#headers.if_none_match == undefined ->
undefined;
true ->
{"If-None-Match", H#headers.if_none_match}
end,
if H#headers.if_range == undefined ->
undefined;
true ->
{"If-Range", H#headers.if_range}
end,
if H#headers.if_unmodified_since == undefined ->
undefined;
true ->
{"If-Unmodified-Since", H#headers.if_unmodified_since}
end,
if H#headers.range == undefined ->
undefined;
true ->
{"Range", H#headers.range}
end,
if H#headers.referer == undefined ->
undefined;
true ->
{"Referer", H#headers.referer}
end,
if H#headers.user_agent == undefined ->
undefined;
true ->
{"User-Agent", H#headers.user_agent}
end,
if H#headers.accept_ranges == undefined ->
undefined;
true ->
{"Accept-Ranges", H#headers.accept_ranges}
end,
if H#headers.cookie == [] ->
undefined;
true ->
{"Cookie", H#headers.cookie}
end,
if H#headers.keep_alive == undefined ->
undefined;
true ->
{"Keep-Alive", H#headers.keep_alive}
end,
if H#headers.content_length == undefined ->
undefined;
true ->
{"Content-Length", H#headers.content_length}
end,
if H#headers.content_type == undefined ->
undefined;
true ->
{"Content-Type", H#headers.content_type}
end,
if H#headers.authorization == undefined ->
undefined;
true ->
{"Authorization", element(3, H#headers.authorization)}
end,
if H#headers.transfer_encoding == undefined ->
undefined;
true ->
{"Transfer-Encoding", H#headers.transfer_encoding}
end,
if H#headers.location == undefined ->
undefined;
true ->
{"Location", H#headers.location}
end
]
) ++
lists:map(
fun({http_header,_,K,_,V}) ->
lists:flatten(io_lib:format("~s: ~s",[K,V]))
end, H#headers.other).
reformat_request(#http_request{method = bad_request}) ->
["Bad request"];
reformat_request(Req) ->
Path = case Req#http_request.path of
{abs_path, AbsPath} ->
AbsPath;
{absoluteURI, _Scheme, _Host0, _Port, RawPath} ->
RawPath
end,
{Maj, Min} = Req#http_request.version,
[yaws:to_list(Req#http_request.method), " ", Path," HTTP/",
integer_to_list(Maj),".", integer_to_list(Min)].
reformat_response(Resp) ->
{Maj,Min} = Resp#http_response.version,
["HTTP/",integer_to_list(Maj),".", integer_to_list(Min),
" ", integer_to_list(Resp#http_response.status),
" ", Resp#http_response.phrase].
stringify the scheme[:port ] part of a # url
reformat_url(U) ->
[yaws:to_string(U#url.scheme),
"://",
U#url.host,
if
U#url.port == undefined ->
[];
true ->
[$: | integer_to_list(U#url.port)]
end].
set_content_type(MimeType) ->
{header, {content_type, MimeType}}.
parse_url(Str) ->
parse_url(Str, strict).
parse_url(Str, Strict) ->
case Str of
"http://" ++ Rest ->
parse_url(host, Strict, #url{scheme = http}, Rest, []);
"https://" ++ Rest ->
parse_url(host, Strict, #url{scheme = https}, Rest, []);
"ftp://" ++ Rest ->
parse_url(host, Strict, #url{scheme = ftp}, Rest, []);
"file://" ++ Rest ->
parse_url(host, Strict, #url{scheme = file}, Rest, []);
_ when Strict == sloppy ->
parse_url(host, Strict, #url{scheme = undefined}, Str, [])
end.
parse_url(host, Strict, U, Str, Ack) ->
case Str of
[] ->
U#url{host = lists:reverse(Ack),
path = "/"
};
[$/|Tail] ->
U2 = U#url{host = lists:reverse(Ack)},
parse_url(path, Strict, U2, Tail,"/");
[$:|T] ->
U2 = U#url{host = lists:reverse(Ack)},
parse_url(port, Strict, U2, T,[]);
[$[|T] ->
parse_url(ipv6, Strict, U, T, [$[]);
[H|T] ->
parse_url(host, Strict, U, T, [H|Ack])
end;
parse_url(ipv6, Strict, U, Str, Ack) ->
case Str of
[$]] ->
U#url{host = lists:reverse([$]|Ack]),
path = "/"
};
[$], $/|T] ->
U2 = U#url{host = lists:reverse([$]|Ack])},
parse_url(path, Strict, U2, T,"/");
[$], $:|T] ->
U2 = U#url{host = lists:reverse([$]|Ack])},
parse_url(port, Strict, U2, T,[]);
[H|T] ->
parse_url(ipv6, Strict, U, T, [H|Ack])
end;
parse_url(port, Strict, U, Str, Ack) ->
case Str of
[] ->
U#url{port = list_to_integer(lists:reverse(Ack)),
path = "/"};
[$/|T] ->
U2 = U#url{port = list_to_integer(lists:reverse(Ack))},
parse_url(path, Strict, U2, T,"/");
[H|T] ->
parse_url(port, Strict, U,T,[H|Ack])
end;
parse_url(path, Strict, U, Str, Ack) ->
case Str of
[] ->
U#url{path = lists:reverse(Ack)};
[$?|T] ->
U#url{path = lists:reverse(Ack),
querypart = T};
[H|T] ->
parse_url(path, Strict, U, T, [H|Ack])
end.
used to construct redir headers from partial URLs such
as e.g. / bar
format_partial_url(Url, SC) ->
[if
Url#url.scheme == undefined ->
yaws:redirect_scheme(SC);
true ->
yaws:to_string(Url#url.scheme) ++ "://"
end,
if
Url#url.host == undefined ->
yaws:redirect_host(SC, undefined);
true ->
Url#url.host
end,
if
Url#url.port == undefined ->
yaws:redirect_port(SC);
true ->
[$: | integer_to_list(Url#url.port)]
end,
Url#url.path,
if
Url#url.querypart == [] ->
[];
true ->
[$?|Url#url.querypart]
end
].
format_url(Url) when is_record(Url, url) ->
[
if
Url#url.scheme == undefined ->
"http://";
true ->
yaws:to_string(Url#url.scheme) ++ "://"
end,
Url#url.host,
if
Url#url.port == undefined ->
[];
true ->
[$: | integer_to_list(Url#url.port)]
end,
Url#url.path,
if
Url#url.querypart == [] ->
[];
true ->
[$?|Url#url.querypart]
end
].
is_absolute_URI([C|T]) when ((C>=$a) and (C=<$z)) or ((C>=$A) and (C=<$Z))->
is_abs_URI1(T);
is_absolute_URI(_) ->
false.
is_abs_URI1([$:|_]) ->
true;
is_abs_URI1([C|T]) when
((C>=$a) and (C=<$z))
or ((C>=$A) and (C=<$Z))
or ((C>=$0) and (C=<$9))
or (C==$+) or (C==$-) or (C==$.) ->
is_abs_URI1(T);
is_abs_URI1(_) ->
false.
EHTML = [ EHTML ] | { Tag , , Body } | { Tag , Attrs } | { Tag } |
= [ { Key , Value } ] or { EventTag , { jscall , FunName , [ ] } }
yaws_api : ) ;
ehtml_expand({ssi,File, Del, Bs}) ->
case yaws_server:ssi(File, Del, Bs) of
{error, Rsn} ->
io_lib:format("ERROR: ~p~n",[Rsn]);
X ->
X
end;
! ( low priority ) - investigate whether tail - recursion would be of any
ehtml_expand({Tag}) ->
["<", atom_to_list(Tag), " />"];
ehtml_expand({pre_html, X}) -> X;
ehtml_expand({Tag, Attrs}) ->
NL = ehtml_nl(Tag),
[NL, "<", atom_to_list(Tag), ehtml_attrs(Attrs), "></",
atom_to_list(Tag), ">"];
ehtml_expand({Tag, Attrs, Body}) when is_atom(Tag) ->
Ts = atom_to_list(Tag),
NL = ehtml_nl(Tag),
[NL, "<", Ts, ehtml_attrs(Attrs), ">", ehtml_expand(Body), "</", Ts, ">"];
ehtml_expand([H|T]) -> [ehtml_expand(H)|ehtml_expand(T)];
ehtml_expand([]) -> [].
ehtml_attrs([]) -> [];
ehtml_attrs([Attribute|Tail]) when is_atom(Attribute) ->
[[$ |atom_to_list(Attribute)]|ehtml_attrs(Tail)];
ehtml_attrs([Attribute|Tail]) when is_list(Attribute) ->
[" ", Attribute|ehtml_attrs(Tail)];
ehtml_attrs([{Name, Value} | Tail]) ->
ValueString = if is_atom(Value) -> [$",atom_to_list(Value),$"];
is_list(Value) -> [$",Value,$"];
is_integer(Value) -> [$",integer_to_list(Value),$"];
is_float(Value) -> [$",float_to_list(Value),$"]
end,
[[$ |atom_to_list(Name)], [$=|ValueString]|ehtml_attrs(Tail)];
ehtml_attrs([{check, Name, Value} | Tail]) ->
ValueString = if is_atom(Value) -> [$",atom_to_list(Value),$"];
is_list(Value) ->
Q = case deepmember($", Value) of
true -> $';
false -> $"
end,
[Q,Value,Q];
is_integer(Value) -> [$",integer_to_list(Value),$"];
is_float(Value) -> [$",float_to_list(Value),$"]
end,
[[$ |atom_to_list(Name)],
[$=|ValueString]|ehtml_attrs(Tail)].
ehtml_nl(a) -> [];
ehtml_nl(br) -> [];
ehtml_nl(span) -> [];
ehtml_nl(em) -> [];
ehtml_nl(strong) -> [];
ehtml_nl(dfn) -> [];
ehtml_nl(code) -> [];
ehtml_nl(samp) -> [];
ehtml_nl(kbd) -> [];
ehtml_nl(var) -> [];
ehtml_nl(cite) -> [];
ehtml_nl(abbr) -> [];
ehtml_nl(acronym) -> [];
ehtml_nl(q) -> [];
ehtml_nl(sub) -> [];
ehtml_nl(sup) -> [];
ehtml_nl(ins) -> [];
ehtml_nl(del) -> [];
ehtml_nl(img) -> [];
ehtml_nl(tt) -> [];
ehtml_nl(i) -> [];
ehtml_nl(b) -> [];
ehtml_nl(big) -> [];
ehtml_nl(small) -> [];
ehtml_nl(strike) -> [];
ehtml_nl(s) -> [];
ehtml_nl(u) -> [];
ehtml_nl(font) -> [];
ehtml_nl(basefont) -> [];
ehtml_nl(input) -> [];
ehtml_nl(button) -> [];
ehtml_nl(object) -> [];
ehtml_nl(_) -> "\n".
' $ myvar ' . There are two functions : ehtml_expander/1 to create an optimized
Variables can appear in three places :
these variables are expanded as EHTML .
The approach is inspired by the way that Yaws already treats .yaws files ,
and the article ` ` A Hacker 's Introduction To Partial Evaluation '' by
ehtml_expander(X) ->
ehtml_expander_compress(flatten(ehtml_expander(X, [], [])), []).
ehtml_expander(Ch, Before, After) when Ch >= 0, Ch =< 255 ->
ehtml_expander_done(yaws_api:htmlize_char(Ch), Before, After);
ehtml_expander(Bin, Before, After) when is_binary(Bin) ->
ehtml_expander_done(yaws_api:htmlize(Bin), Before, After);
ehtml_expander({ssi,File, Del, Bs}, Before, After) ->
Str = case yaws_server:ssi(File, Del, Bs) of
{error, Rsn} ->
io_lib:format("ERROR: ~p~n",[Rsn]);
X ->
X
end,
ehtml_expander_done(Str, Before, After);
ehtml_expander({pre_html, X}, Before, After) ->
ehtml_expander_done(X, Before, After);
ehtml_expander({Tag}, Before, After) ->
ehtml_expander_done(["<", atom_to_list(Tag), " />"], Before, After);
ehtml_expander({Tag, Attrs}, Before, After) ->
NL = ehtml_nl(Tag),
ehtml_expander_done([NL, "<", atom_to_list(Tag), ehtml_attrs(Attrs), "></",
atom_to_list(Tag), ">"],
Before,
After);
ehtml_expander({Tag, Attrs, Body}, Before, After) ->
ehtml_expander(Body,
[["\n<", atom_to_list(Tag),
ehtml_attrs_expander(Attrs), ">"]|
Before],
["</", atom_to_list(Tag), ">"|After]);
ehtml_expander(Var, Before, After) when is_atom(Var) ->
[reverse(Before), {ehtml, ehtml_var_name(Var)}, After];
ehtml_expander([H|T], Before, After) ->
ehtml_expander(T, [ehtml_expander(H, [], [])|Before], After);
ehtml_expander([], Before, After) ->
ehtml_expander_done("", Before, After).
ehtml_attrs_expander([]) -> "";
ehtml_attrs_expander([{Var,Val}|T]) ->
[[" ",
ehtml_attr_part_expander(Var),
"=",
"\"", ehtml_attr_part_expander(Val), "\""]|
ehtml_attrs_expander(T)];
ehtml_attrs_expander([Var|T]) ->
[[" ",
ehtml_attr_part_expander(Var)]|
ehtml_attrs_expander(T)];
ehtml_attrs_expander(Var) when is_atom(Var) ->
Var in the cdr of an attribute list
[{ehtml_attrs, ehtml_var_name(Var)}].
ehtml_attr_part_expander(A) when is_atom(A) ->
case atom_to_list(A) of
[$$|_Rest] -> {preformatted, ehtml_var_name(A)};
Other -> Other
end;
ehtml_attr_part_expander(I) when is_integer(I) -> integer_to_list(I);
ehtml_attr_part_expander(S) when is_list(S) -> S.
ehtml_expander_done(X, Before, After) -> [reverse([X|Before]), After].
Returns : [ binary ( ) | { ehtml , } | { preformatted , } , { ehtml_attrs , } ]
Var = atom ( )
ehtml_expander_compress([Tag|T], Acc) when is_tuple(Tag) ->
[list_to_binary(reverse(Acc)), Tag | ehtml_expander_compress(T, [])];
ehtml_expander_compress([], Acc) -> [list_to_binary(reverse(Acc))];
ehtml_expander_compress([H|T], Acc) when is_integer(H) ->
ehtml_expander_compress(T, [H|Acc]).
ehtml_apply(Expander, Env) -> [ehtml_eval(X, Env) || X <- Expander].
ehtml_eval(Bin, _Env) when is_binary(Bin) -> Bin;
ehtml_eval({Type, Var}, Env) ->
case lists:keysearch(Var, 1, Env) of
false -> erlang:error({ehtml_unbound, Var});
{value, {Var, Val}} ->
case Type of
ehtml -> ehtml_expand(Val);
preformatted -> Val;
ehtml_attrs -> ehtml_attrs(Val)
end
end.
ehtml_var_name(A) when is_atom(A) ->
case atom_to_list(A) of
[$$|Rest] -> list_to_atom(Rest);
_Other -> erlang:error({bad_ehtml_var_name, A})
end.
ehtml_expander_test() ->
Expr = {html, [{title, '$title'}],
{body, [],
[{h1, [], '$heading'},
'$text']}},
specified in .
Expand = ehtml_expander(Expr),
Bs{1,2 } are lists of variable bindings to fill in the gaps in the
Bs1 = [{title, "First page"},
{heading, "Heading"},
{text, {pre_html, "<b>My text!</b>"}}],
Bs2 = [{title, "Second page"},
{heading, "Foobar"},
{text, {b, [], "My text again!"}}],
Page1 and are generated from the template . They are I / O lists
Page1 = ehtml_apply(Expand, Bs1),
Page2 = ehtml_apply(Expand, Bs2),
We return the two pages as strings , plus the actual expander ( which is
{binary_to_list(list_to_binary(Page1)),
binary_to_list(list_to_binary(Page2)),
Expand}.
otherwise ` Scriptfilame ' is assumed to be executable itself .
call_cgi(Arg, Scriptfilename) ->
yaws_cgi:call_cgi(Arg, Scriptfilename).
call_cgi(Arg, Exefilename, Scriptfilename) ->
yaws_cgi:call_cgi(Arg, Exefilename, Scriptfilename).
Variable - XXX : YYY headers .
{ path_info , string ( ) } : Override the string from Arg .
log_app_error : Log application errors ( output to stderr and non - zero
call_fcgi_responder(Arg) ->
yaws_cgi:call_fcgi_responder(Arg).
call_fcgi_responder(Arg, Options) ->
yaws_cgi:call_fcgi_responder(Arg, Options).
call_fcgi_authorizer(Arg) ->
yaws_cgi:call_fcgi_authorizer(Arg).
call_fcgi_authorizer(Arg, Options) ->
yaws_cgi:call_fcgi_authorizer(Arg, Options).
deepmember(_C,[]) ->
false;
deepmember(C,[C|_Cs]) ->
true;
deepmember(C,[L|Cs]) when is_list(L) ->
case deepmember(C,L) of
true -> true;
false -> deepmember(C,Cs)
end;
deepmember(C,[N|Cs]) when C /= N ->
deepmember(C, Cs).
a Set - Cookie header .
RFC ( 2109 ) ports are from RFC 2965
" Cookie : " cookie - version 1 * ( ( " ; " | " , " ) cookie - value )
" Set - Cookie : " cookies
cookie - av = " Comment " " = " value
| " Max - Age " " = " value
| " Version " " = " 1*DIGIT
parse_set_cookie(Str) ->
parse_set_cookie(Str, #setcookie{}).
parse_set_cookie([], Cookie) ->
Cookie;
parse_set_cookie(Str, Cookie) ->
Rest00 = skip_space(Str),
{Key,Rest0} = parse_set_cookie_key(Rest00, []),
Rest1 = skip_space(Rest0),
case Rest1 of
[$=|Rest2] ->
{Value,Quoted,Rest3} = parse_set_cookie_value(Rest2),
NewC=add_set_cookie(Cookie,yaws:to_lower(Key),Value,Quoted),
parse_set_cookie(Rest3,NewC);
[$;|Rest2] ->
NewC =add_set_cookie(Cookie,yaws:to_lower(Key),undefined,false),
parse_set_cookie(Rest2,NewC);
_ ->
Cookie
end.
parse_set_cookie_key([], Acc) ->
{lists:reverse(Acc), []};
parse_set_cookie_key(T=[$=|_], Acc) ->
{lists:reverse(Acc), T};
parse_set_cookie_key(T=[$;|_], Acc) ->
{lists:reverse(Acc), T};
parse_set_cookie_key([C|T], Acc) ->
parse_set_cookie_key(T, [C|Acc]).
parse_set_cookie_value([$"|T]) ->
parse_quoted(T,[]);
parse_set_cookie_value(T) ->
parse_set_cookie_value(T,[]).
parse_set_cookie_value([],Acc) ->
{lists:reverse(Acc), false, []};
parse_set_cookie_value(T=[$;|_], Acc) ->
{lists:reverse(Acc), false, T};
parse_set_cookie_value([C|T], Acc) ->
parse_set_cookie_value(T, [C|Acc]).
parse_quoted([], Acc) ->
{lists:reverse(Acc), true, []};
parse_quoted([$"|T], Acc) ->
{lists:reverse(Acc), true, T};
parse_quoted([$\\,C|T], Acc) ->
parse_quoted(T,[C,$\\|Acc]);
parse_quoted([C|T], Acc) ->
parse_quoted(T,[C|Acc]).
add_set_cookie(C, Key, Value, Quoted) when C#setcookie.key==undefined ->
C#setcookie{key=Key,value=Value,quoted=Quoted};
add_set_cookie(C, "comment", Value, _Quoted) ->
C#setcookie{comment=Value};
add_set_cookie(C, "commenturl", Value, _Quoted) ->
C#setcookie{comment_url=Value};
add_set_cookie(C, "discard", Value, _Quoted) ->
C#setcookie{discard=Value};
add_set_cookie(C, "domain", Value, _Quoted) ->
C#setcookie{domain=Value};
add_set_cookie(C, "max-age", Value, _Quoted) ->
C#setcookie{max_age=Value};
add_set_cookie(C, "path", Value, _Quoted) ->
C#setcookie{path=Value};
add_set_cookie(C, "port", Value, _Quoted) ->
C#setcookie{port=Value};
add_set_cookie(C, "secure", Value, _Quoted) ->
C#setcookie{secure=Value};
add_set_cookie(C, "version", Value, _Quoted) ->
C#setcookie{version=Value};
add_set_cookie(C, _Key, _Value, _Quoted) ->
C.
format_set_cookie(C) when C#setcookie.value == undefined ->
[C#setcookie.key|format_set_cookie_opts(C)];
format_set_cookie(C) when C#setcookie.quoted ->
[C#setcookie.key,$=,$",C#setcookie.value,$"|
format_set_cookie_opts(C)];
format_set_cookie(C) ->
[C#setcookie.key,$=,C#setcookie.value|
format_set_cookie_opts(C)].
add_opt(_Key,undefined) -> [];
add_opt(Key,Opt) -> [$;,Key,$=,Opt].
format_set_cookie_opts(C) ->
[add_opt("Path",C#setcookie.path),
add_opt("Port",C#setcookie.port),
add_opt("Domain",C#setcookie.domain),
add_opt("Secure",C#setcookie.secure),
add_opt("Expires",C#setcookie.expires),
add_opt("Max-Age",C#setcookie.max_age),
add_opt("Discard",C#setcookie.discard),
add_opt("Comment",C#setcookie.comment),
add_opt("CommentURL",C#setcookie.comment_url),
add_opt("version",C#setcookie.version)].
skip_space([]) -> [];
skip_space([$ |T]) -> skip_space(T);
skip_space([$\t|T]) -> skip_space(T);
skip_space(T) -> T.
getvar(ARG,Key) when is_atom(Key) ->
getvar(ARG, atom_to_list(Key));
getvar(ARG,Key) ->
case (ARG#arg.req)#http_request.method of
'POST' -> postvar(ARG, Key);
'GET' -> queryvar(ARG, Key);
_ -> undefined
end.
queryvar(ARG,Key) when is_atom(Key) ->
queryvar(ARG, atom_to_list(Key));
queryvar(ARG, Key) ->
Parse = case get(query_parse) of
undefined ->
Pval = yaws_api:parse_query(ARG),
put(query_parse, Pval),
Pval;
Val0 ->
Val0
end,
filter_parse(Key, Parse).
postvar(ARG, Key) when is_atom(Key) ->
postvar(ARG, atom_to_list(Key));
postvar(ARG, Key) ->
Parse = case get(post_parse) of
undefined ->
Pval = yaws_api:parse_post(ARG),
put(post_parse, Pval),
Pval;
Val0 ->
Val0
end,
filter_parse(Key, Parse).
filter_parse(Key, Parse) ->
case lists:filter(fun(KV) ->
(Key == element(1, KV))
andalso
(element(2, KV) /= undefined)
end,
Parse) of
[] -> undefined;
[{_, V}] -> {ok,V};
Multivalued case - return list of values
Vs -> list_to_tuple(lists:map(fun(KV) ->
element(2, KV)
end,
Vs))
end.
binding(Key) ->
case get({binding, Key}) of
undefined -> erlang:error({unknown_binding, Key});
Value -> Value
end.
binding_exists(Key) ->
case get({binding, Key}) of
undefined -> false;
_ -> true
end.
request_url(ARG) ->
SC = get(sc),
Headers = ARG#arg.headers,
{abs_path, Path} = (ARG#arg.req)#http_request.path,
DecPath = url_decode(Path),
{P,Q} = yaws:split_at(DecPath, $?),
#url{scheme = case SC#sconf.ssl of
undefined ->
"http";
_ ->
"https"
end,
host = case Headers#headers.host of
undefined ->
yaws:upto_char($:, SC#sconf.servername);
HostHdr ->
yaws:upto_char($:, HostHdr)
end,
port = case {SC#sconf.ssl, SC#sconf.port} of
{_, 80} ->
undefined;
{_, 443} ->
undefined;
{_, Port} ->
Port
end,
path = P,
querypart = Q}.
sanitize_file_name(".." ++ T) ->
sanitize_file_name([$.|T]);
sanitize_file_name([H|T]) ->
case lists:member(H, " &;'`{}!\\?<>\"()$") of
true ->
sanitize_file_name(T);
false ->
[H|sanitize_file_name(T)]
end;
sanitize_file_name([]) ->
[].
setconf(GC0, Groups0) ->
setconf(GC0, Groups0, true).
setconf(GC0, Groups0, CheckCertsChanged) ->
CertsChanged = if CheckCertsChanged == true ->
lists:member(yes,gen_server:call(
yaws_server,
check_certs, infinity));
true ->
false
end,
if
CertsChanged ->
application:stop(ssl),
application:start(ssl);
true ->
ok
end,
{GC, Groups1} = yaws_config:verify_upgrade_args(GC0, Groups0),
Groups2 = lists:map(fun(X) -> yaws_config:add_yaws_auth(X) end, Groups1),
{ok, OLDGC, OldGroups} = yaws_api:getconf(),
case {yaws_config:can_hard_gc(GC, OLDGC),
yaws_config:can_soft_setconf(GC, Groups2, OLDGC, OldGroups)} of
{true, true} ->
yaws_config:soft_setconf(GC, Groups2, OLDGC, OldGroups);
{true, false} ->
yaws_config:hard_setconf(GC, Groups2);
_ ->
{error, need_restart}
end.
return { ok , GC , Groups } .
getconf() ->
gen_server:call(yaws_server, getconf, infinity).
embedded_start_conf(DocRoot) when is_list(DocRoot) ->
embedded_start_conf(DocRoot, []).
embedded_start_conf(DocRoot, SL) when is_list(DocRoot), is_list(SL) ->
embedded_start_conf(DocRoot, SL, []).
embedded_start_conf(DocRoot, SL, GL)
when is_list(DocRoot), is_list(SL), is_list(GL) ->
embedded_start_conf(DocRoot, SL, GL, "default").
embedded_start_conf(DocRoot, SL, GL, Id)
when is_list(DocRoot), is_list(SL), is_list(GL) ->
case application:load(yaws) of
ok -> ok;
{error, {already_loaded,yaws}} -> ok;
_ -> exit("cannot load yaws")
end,
ok = application:set_env(yaws, embedded, true),
ok = application:set_env(yaws, id, Id),
ChildSpecs = yaws_sup:child_specs(),
GC = yaws:create_gconf(GL, Id),
SCList = case SL of
[] ->
[[]];
[Cnf|_] when is_tuple(Cnf) ->
[[yaws:create_sconf(DocRoot, SL)]];
[Cnf|_] when is_list(Cnf) ->
[[yaws:create_sconf(DocRoot, SLItem)] || SLItem <- SL]
end,
SoapChild = yaws_config:add_yaws_soap_srv(GC, false),
ok = application:set_env(yaws, embedded_conf, [{sclist,SCList},{gc,GC}]),
{ok, SCList, GC, ChildSpecs ++ SoapChild}.
dir_listing(Arg) ->
dir_listing(Arg, ".").
dir_listing(Arg, RelDir) ->
Dir0 = filename:dirname(Arg#arg.fullpath),
Dir = case RelDir of
"." -> Dir0;
_ -> filename:join([Dir0, RelDir])
end,
Req = Arg#arg.req,
case file:list_dir(Dir) of
{ok, Data0} ->
Data = Data0 -- [".yaws.auth", "index.yaws"],
yaws_ls:list_directory(Arg, Arg#arg.clisock, Data,
Dir,
Req, false),
ok;
_Err ->
ok
end.
redirect_self(A) ->
SC = get(sc),
{Port, PortStr} =
case {SC#sconf.rmethod, SC#sconf.ssl, SC#sconf.port} of
{"https", _, 443} -> {443, ""};
{"http", _, 80} -> {80, ""};
{_, undefined, 80} -> {80, ""};
{_, undefined, Port2} ->
{port, [$:|integer_to_list(Port2)]};
{_, _SSL, 443} ->
{443, ""};
{_, _SSL, Port2} ->
{Port2, [$:|integer_to_list(Port2)]}
end,
H = A#arg.headers,
Host0 = yaws:redirect_host(get(sc), H#headers.host),
Host = case string:tokens(Host0, ":") of
[H0, _] -> H0;
[H1] -> H1
end,
{Scheme, SchemeStr} =
case {SC#sconf.ssl,SC#sconf.rmethod} of
{_, Method} when is_list(Method) ->
{list_to_atom(Method), Method++"://"};
{undefined,_} ->
{http, "http://"};
{_SSl,_} ->
{https, "https://"}
end,
#redir_self{host = Host,
scheme = Scheme,
scheme_str = SchemeStr,
port = Port,
port_str = PortStr}.
Boyer - Moore searching , used for parsing multipart / form - data
bm_start(Str) ->
Len = length(Str),
Tbl = bm_set_shifts(Str, Len),
{Tbl, list_to_binary(Str), lists:reverse(Str), Len}.
bm_find(Bin, SearchCtx) ->
bm_find(Bin, SearchCtx, 0).
bm_find(Bin, {_, _, _, Len}, Pos) when size(Bin) < (Pos + Len) ->
nomatch;
bm_find(Bin, {Tbl, BStr, RevStr, Len}=SearchCtx, Pos) ->
case Bin of
<<_:Pos/binary, BStr:Len/binary, _/binary>> ->
{Pos, Len};
<<_:Pos/binary, NoMatch:Len/binary, _/binary>> ->
RevNoMatch = lists:reverse(binary_to_list(NoMatch)),
Shift = bm_next_shift(RevNoMatch, RevStr, 0, Tbl),
bm_find(Bin, SearchCtx, Pos+Shift)
end.
bm_set_shifts(Str, Len) ->
erlang:make_tuple(256, Len, bm_set_shifts(Str, 0, Len, [])).
bm_set_shifts(_Str, Count, Len, Acc) when Count =:= Len-1 ->
lists:reverse(Acc);
bm_set_shifts([H|T], Count, Len, Acc) ->
Shift = Len - Count - 1,
bm_set_shifts(T, Count+1, Len, [{H+1,Shift}|Acc]).
bm_next_shift([H|T1], [H|T2], Comparisons, Tbl) ->
bm_next_shift(T1, T2, Comparisons+1, Tbl);
bm_next_shift([H|_], _, Comparisons, Tbl) ->
erlang:max(element(H+1, Tbl) - Comparisons, 1).
|
7016b7f47a15603a35cd87b94576e0cbc0e4cd472a03bb8736d1b2c79d86e995 | tek/polysemy-http | UrlTest.hs | module Polysemy.Http.UrlTest where
import Exon (exon)
import Hedgehog ((===))
import Polysemy.Http.Data.Request (Host (Host), Path (Path), Port (Port), Tls (Tls))
import Polysemy.Http.Request (parseUrl)
import Polysemy.Http.Test (UnitTest)
test_url :: UnitTest
test_url = do
Right (Tls True, Host "host.com", Nothing, Path "path") === parseUrl ""
Right (Tls True, Host "host.com", Nothing, Path "path/to/file") === parseUrl "host.com/path/to/file"
Right (Tls False, Host "host.com", Just (Port 553), Path "path") === parseUrl ":553/path"
Left [exon|invalid port `foo` in url: #{url1}|] === parseUrl url1
Left [exon|invalid scheme `httpx` in url: #{url2}|] === parseUrl url2
where
url1 =
":foo"
url2 =
"httpx"
| null | https://raw.githubusercontent.com/tek/polysemy-http/92887b51c01add65dee03f30e3564e939ce4fbba/packages/polysemy-http/test/Polysemy/Http/UrlTest.hs | haskell | module Polysemy.Http.UrlTest where
import Exon (exon)
import Hedgehog ((===))
import Polysemy.Http.Data.Request (Host (Host), Path (Path), Port (Port), Tls (Tls))
import Polysemy.Http.Request (parseUrl)
import Polysemy.Http.Test (UnitTest)
test_url :: UnitTest
test_url = do
Right (Tls True, Host "host.com", Nothing, Path "path") === parseUrl ""
Right (Tls True, Host "host.com", Nothing, Path "path/to/file") === parseUrl "host.com/path/to/file"
Right (Tls False, Host "host.com", Just (Port 553), Path "path") === parseUrl ":553/path"
Left [exon|invalid port `foo` in url: #{url1}|] === parseUrl url1
Left [exon|invalid scheme `httpx` in url: #{url2}|] === parseUrl url2
where
url1 =
":foo"
url2 =
"httpx"
| |
15d433c449a53729b886a83a6005251b84d7a3ae82f7243df176111747b5adc4 | mransan/raft-udp | counter_pb.ml | [@@@ocaml.warning "-27-30-39"]
type app_data = {
increment : int;
process_id : int;
}
and app_data_mutable = {
mutable increment : int;
mutable process_id : int;
}
type app_result = {
from : int;
to_ : int option;
}
and app_result_mutable = {
mutable from : int;
mutable to_ : int option;
}
let rec default_app_data
?increment:((increment:int) = 0)
?process_id:((process_id:int) = 0)
() : app_data = {
increment;
process_id;
}
and default_app_data_mutable () : app_data_mutable = {
increment = 0;
process_id = 0;
}
let rec default_app_result
?from:((from:int) = 0)
?to_:((to_:int option) = None)
() : app_result = {
from;
to_;
}
and default_app_result_mutable () : app_result_mutable = {
from = 0;
to_ = None;
}
let rec decode_app_data d =
let v = default_app_data_mutable () in
let process_id_is_set = ref false in
let increment_is_set = ref false in
let rec loop () =
match Pbrt.Decoder.key d with
| None -> (
)
| Some (1, Pbrt.Varint) -> (
v.increment <- Pbrt.Decoder.int_as_varint d; increment_is_set := true;
loop ()
)
| Some (1, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_data), field(1)", pk))
)
| Some (2, Pbrt.Varint) -> (
v.process_id <- Pbrt.Decoder.int_as_varint d; process_id_is_set := true;
loop ()
)
| Some (2, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_data), field(2)", pk))
)
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind; loop ()
in
loop ();
begin if not !process_id_is_set then raise Protobuf.Decoder.(Failure (Missing_field "process_id")) end;
begin if not !increment_is_set then raise Protobuf.Decoder.(Failure (Missing_field "increment")) end;
let v:app_data = Obj.magic v in
v
let rec decode_app_result d =
let v = default_app_result_mutable () in
let from_is_set = ref false in
let rec loop () =
match Pbrt.Decoder.key d with
| None -> (
)
| Some (1, Pbrt.Varint) -> (
v.from <- Pbrt.Decoder.int_as_varint d; from_is_set := true;
loop ()
)
| Some (1, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_result), field(1)", pk))
)
| Some (2, Pbrt.Varint) -> (
v.to_ <- Some (Pbrt.Decoder.int_as_varint d);
loop ()
)
| Some (2, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_result), field(2)", pk))
)
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind; loop ()
in
loop ();
begin if not !from_is_set then raise Protobuf.Decoder.(Failure (Missing_field "from")) end;
let v:app_result = Obj.magic v in
v
let rec encode_app_data (v:app_data) encoder =
Pbrt.Encoder.key (1, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.increment encoder;
Pbrt.Encoder.key (2, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.process_id encoder;
()
let rec encode_app_result (v:app_result) encoder =
Pbrt.Encoder.key (1, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.from encoder;
(
match v.to_ with
| Some x -> (
Pbrt.Encoder.key (2, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint x encoder;
)
| None -> ();
);
()
let rec pp_app_data fmt (v:app_data) =
let pp_i fmt () =
Format.pp_open_vbox fmt 1;
Pbrt.Pp.pp_record_field "increment" Pbrt.Pp.pp_int fmt v.increment;
Pbrt.Pp.pp_record_field "process_id" Pbrt.Pp.pp_int fmt v.process_id;
Format.pp_close_box fmt ()
in
Pbrt.Pp.pp_brk pp_i fmt ()
let rec pp_app_result fmt (v:app_result) =
let pp_i fmt () =
Format.pp_open_vbox fmt 1;
Pbrt.Pp.pp_record_field "from" Pbrt.Pp.pp_int fmt v.from;
Pbrt.Pp.pp_record_field "to_" (Pbrt.Pp.pp_option Pbrt.Pp.pp_int) fmt v.to_;
Format.pp_close_box fmt ()
in
Pbrt.Pp.pp_brk pp_i fmt ()
| null | https://raw.githubusercontent.com/mransan/raft-udp/ffa307fa6d8bdaa3133f3cc66149ac7dfda5fc7c/tests/counter/counter_pb.ml | ocaml | [@@@ocaml.warning "-27-30-39"]
type app_data = {
increment : int;
process_id : int;
}
and app_data_mutable = {
mutable increment : int;
mutable process_id : int;
}
type app_result = {
from : int;
to_ : int option;
}
and app_result_mutable = {
mutable from : int;
mutable to_ : int option;
}
let rec default_app_data
?increment:((increment:int) = 0)
?process_id:((process_id:int) = 0)
() : app_data = {
increment;
process_id;
}
and default_app_data_mutable () : app_data_mutable = {
increment = 0;
process_id = 0;
}
let rec default_app_result
?from:((from:int) = 0)
?to_:((to_:int option) = None)
() : app_result = {
from;
to_;
}
and default_app_result_mutable () : app_result_mutable = {
from = 0;
to_ = None;
}
let rec decode_app_data d =
let v = default_app_data_mutable () in
let process_id_is_set = ref false in
let increment_is_set = ref false in
let rec loop () =
match Pbrt.Decoder.key d with
| None -> (
)
| Some (1, Pbrt.Varint) -> (
v.increment <- Pbrt.Decoder.int_as_varint d; increment_is_set := true;
loop ()
)
| Some (1, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_data), field(1)", pk))
)
| Some (2, Pbrt.Varint) -> (
v.process_id <- Pbrt.Decoder.int_as_varint d; process_id_is_set := true;
loop ()
)
| Some (2, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_data), field(2)", pk))
)
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind; loop ()
in
loop ();
begin if not !process_id_is_set then raise Protobuf.Decoder.(Failure (Missing_field "process_id")) end;
begin if not !increment_is_set then raise Protobuf.Decoder.(Failure (Missing_field "increment")) end;
let v:app_data = Obj.magic v in
v
let rec decode_app_result d =
let v = default_app_result_mutable () in
let from_is_set = ref false in
let rec loop () =
match Pbrt.Decoder.key d with
| None -> (
)
| Some (1, Pbrt.Varint) -> (
v.from <- Pbrt.Decoder.int_as_varint d; from_is_set := true;
loop ()
)
| Some (1, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_result), field(1)", pk))
)
| Some (2, Pbrt.Varint) -> (
v.to_ <- Some (Pbrt.Decoder.int_as_varint d);
loop ()
)
| Some (2, pk) -> raise (
Protobuf.Decoder.Failure (Protobuf.Decoder.Unexpected_payload ("Message(app_result), field(2)", pk))
)
| Some (_, payload_kind) -> Pbrt.Decoder.skip d payload_kind; loop ()
in
loop ();
begin if not !from_is_set then raise Protobuf.Decoder.(Failure (Missing_field "from")) end;
let v:app_result = Obj.magic v in
v
let rec encode_app_data (v:app_data) encoder =
Pbrt.Encoder.key (1, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.increment encoder;
Pbrt.Encoder.key (2, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.process_id encoder;
()
let rec encode_app_result (v:app_result) encoder =
Pbrt.Encoder.key (1, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint v.from encoder;
(
match v.to_ with
| Some x -> (
Pbrt.Encoder.key (2, Pbrt.Varint) encoder;
Pbrt.Encoder.int_as_varint x encoder;
)
| None -> ();
);
()
let rec pp_app_data fmt (v:app_data) =
let pp_i fmt () =
Format.pp_open_vbox fmt 1;
Pbrt.Pp.pp_record_field "increment" Pbrt.Pp.pp_int fmt v.increment;
Pbrt.Pp.pp_record_field "process_id" Pbrt.Pp.pp_int fmt v.process_id;
Format.pp_close_box fmt ()
in
Pbrt.Pp.pp_brk pp_i fmt ()
let rec pp_app_result fmt (v:app_result) =
let pp_i fmt () =
Format.pp_open_vbox fmt 1;
Pbrt.Pp.pp_record_field "from" Pbrt.Pp.pp_int fmt v.from;
Pbrt.Pp.pp_record_field "to_" (Pbrt.Pp.pp_option Pbrt.Pp.pp_int) fmt v.to_;
Format.pp_close_box fmt ()
in
Pbrt.Pp.pp_brk pp_i fmt ()
| |
a0857925805f2361780bc56e84198bb0c643d5006fa4235e91d8771e3c273b8c | plumatic/grab-bag | core_test.clj | (ns kinesis.core-test
(:use plumbing.core plumbing.test clojure.test)
(:require
[plumbing.serialize :as serialize]
[kinesis.core :as kinesis]))
(deftest round-trip-test
(with-millis 100
(let [r (serialize/pack (kinesis/record-encoder) ["m1" {:m 2}])]
(is-= 1 (count r))
(is-= {:date 100 :messages ["m1" {:m 2}]}
(kinesis/decode-record (first r))))))
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/lib/kinesis/test/kinesis/core_test.clj | clojure | (ns kinesis.core-test
(:use plumbing.core plumbing.test clojure.test)
(:require
[plumbing.serialize :as serialize]
[kinesis.core :as kinesis]))
(deftest round-trip-test
(with-millis 100
(let [r (serialize/pack (kinesis/record-encoder) ["m1" {:m 2}])]
(is-= 1 (count r))
(is-= {:date 100 :messages ["m1" {:m 2}]}
(kinesis/decode-record (first r))))))
| |
bcc32882338e56e4dd92497b67b5474192e1182e8519cd737db95edcb5866fb1 | mput/sicp-solutions | 2_41.rkt | #lang racket
Solution for exercise 2_41 .
(require rackunit "../solutions/2_40.rkt")
(provide sum-pairs)
(define (sum-pairs n s)
(map (lambda (x) (list (car x) (cadr x) (+ (car x) (cadr x))))
(filter (lambda (x) (= (+ (car x) (cadr x)) s))
(uniq-pairs n))))
| null | https://raw.githubusercontent.com/mput/sicp-solutions/fe12ad2b6f17c99978c8fe04b2495005986b8496/solutions/2_41.rkt | racket | #lang racket
Solution for exercise 2_41 .
(require rackunit "../solutions/2_40.rkt")
(provide sum-pairs)
(define (sum-pairs n s)
(map (lambda (x) (list (car x) (cadr x) (+ (car x) (cadr x))))
(filter (lambda (x) (= (+ (car x) (cadr x)) s))
(uniq-pairs n))))
| |
998a6f22dc716967d19d7a76f894a210d24341498932887f153350389848b77c | cedlemo/OCaml-GObject-Introspection | Test_property_info.ml |
* Copyright 2017 - 2019 ,
* This file is part of OCaml - GObject - Introspection .
*
* OCaml - GObject - Introspection is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* any later version .
*
* OCaml - GObject - Introspection is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with . If not , see < / > .
* Copyright 2017-2019 Cedric LE MOIGNE,
* This file is part of OCaml-GObject-Introspection.
*
* OCaml-GObject-Introspection is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* OCaml-GObject-Introspection is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OCaml-GObject-Introspection. If not, see </>.
*)
open Test_utils
open OUnit2
open GObject_introspection
let namespace = "Gtk"
let typelib = Repository.require namespace ()
let property_name = "accept-focus"
let test_get_property_from_repo _ =
match Repository.find_by_name namespace "Window" with
| None -> assert_equal_string property_name "No base info found"
| Some (base_info) ->
match Base_info.get_type base_info with
| Object -> (
let info = Object_info.from_baseinfo base_info in
let prop = Object_info.get_property info 0 in
let base_prop = Property_info.to_baseinfo prop in
match Base_info.get_type base_prop with
| Property -> assert_equal true true
| _ -> assert_equal_string "It should be a" "Property info"
)
| _ -> assert_equal_string property_name "No base info found"
let get_property_info () =
match Repository.find_by_name namespace "Window" with
| None -> None
| Some (base_info) ->
match Base_info.get_type base_info with
| Object -> let info = Object_info.from_baseinfo base_info in
let prop = Object_info.get_property info 0 in
Some prop
| _ -> None
let property_test fn =
match get_property_info () with
| None -> assert_equal_string property_name "No base info found"
| Some (info) -> fn info
let test_get_ownership_transfer _ =
property_test (fun info ->
let transfer = Property_info.get_ownership_transfer info in
assert_equal ~printer:(fun t ->
match t with
| Bindings.Arg_info.Nothing -> "nothing"
| Bindings.Arg_info.Container -> "container"
| Bindings.Arg_info.Everything -> "everything"
) Bindings.Arg_info.Nothing transfer
)
let test_get_type _ =
property_test (fun info ->
let info = Property_info.get_type info in
let type_name = Type_info.to_string info in
assert_equal_string "unknown" type_name
)
let test_get_flags _ =
property_test (fun info ->
let flags = Property_info.get_flags info in
let n = List.length flags in
let _ = assert_equal ~printer:string_of_int 3 n in
let flag_names = [
Bindings.GParam.Readwrite;
Bindings.GParam.Writable;
Bindings.GParam.Readable] in
let _ = List.fold_left (fun acc flag ->
let _ = assert_equal ~printer:(fun f -> GParam.flag_to_string f) (List.nth flag_names acc) flag in
acc + 1
) 0 flags
in ()
)
let tests =
"GObject Introspection InterfaceInfo tests" >:::
[
"Property_info find from repo" >:: test_get_property_from_repo;
"Property_info get ownership transfer" >:: test_get_ownership_transfer;
"Property_info get type" >:: test_get_type;
"Property_info get flags" >:: test_get_flags
]
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GObject-Introspection/261c76d9e5d90f706edff1121a63bf5eb611399b/tests/Test_property_info.ml | ocaml |
* Copyright 2017 - 2019 ,
* This file is part of OCaml - GObject - Introspection .
*
* OCaml - GObject - Introspection is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* any later version .
*
* OCaml - GObject - Introspection is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with . If not , see < / > .
* Copyright 2017-2019 Cedric LE MOIGNE,
* This file is part of OCaml-GObject-Introspection.
*
* OCaml-GObject-Introspection is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* OCaml-GObject-Introspection is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OCaml-GObject-Introspection. If not, see </>.
*)
open Test_utils
open OUnit2
open GObject_introspection
let namespace = "Gtk"
let typelib = Repository.require namespace ()
let property_name = "accept-focus"
let test_get_property_from_repo _ =
match Repository.find_by_name namespace "Window" with
| None -> assert_equal_string property_name "No base info found"
| Some (base_info) ->
match Base_info.get_type base_info with
| Object -> (
let info = Object_info.from_baseinfo base_info in
let prop = Object_info.get_property info 0 in
let base_prop = Property_info.to_baseinfo prop in
match Base_info.get_type base_prop with
| Property -> assert_equal true true
| _ -> assert_equal_string "It should be a" "Property info"
)
| _ -> assert_equal_string property_name "No base info found"
let get_property_info () =
match Repository.find_by_name namespace "Window" with
| None -> None
| Some (base_info) ->
match Base_info.get_type base_info with
| Object -> let info = Object_info.from_baseinfo base_info in
let prop = Object_info.get_property info 0 in
Some prop
| _ -> None
let property_test fn =
match get_property_info () with
| None -> assert_equal_string property_name "No base info found"
| Some (info) -> fn info
let test_get_ownership_transfer _ =
property_test (fun info ->
let transfer = Property_info.get_ownership_transfer info in
assert_equal ~printer:(fun t ->
match t with
| Bindings.Arg_info.Nothing -> "nothing"
| Bindings.Arg_info.Container -> "container"
| Bindings.Arg_info.Everything -> "everything"
) Bindings.Arg_info.Nothing transfer
)
let test_get_type _ =
property_test (fun info ->
let info = Property_info.get_type info in
let type_name = Type_info.to_string info in
assert_equal_string "unknown" type_name
)
let test_get_flags _ =
property_test (fun info ->
let flags = Property_info.get_flags info in
let n = List.length flags in
let _ = assert_equal ~printer:string_of_int 3 n in
let flag_names = [
Bindings.GParam.Readwrite;
Bindings.GParam.Writable;
Bindings.GParam.Readable] in
let _ = List.fold_left (fun acc flag ->
let _ = assert_equal ~printer:(fun f -> GParam.flag_to_string f) (List.nth flag_names acc) flag in
acc + 1
) 0 flags
in ()
)
let tests =
"GObject Introspection InterfaceInfo tests" >:::
[
"Property_info find from repo" >:: test_get_property_from_repo;
"Property_info get ownership transfer" >:: test_get_ownership_transfer;
"Property_info get type" >:: test_get_type;
"Property_info get flags" >:: test_get_flags
]
| |
befb573ee5b7ecbc375abfcba1bdd8d30c1722f9f5e78d6f233aace109fec255 | MaskRay/OJHaskell | AlternatingLane.hs | calc :: [Int] -> [Int] -> Double
calc low high = sum $ zipWith
(\(a,b) (c,d) ->
sum [
(let l = c; r = min i d
in if l <= r then (i-(l+r)/2.0)*(r-l+1)/(d-c+1)
else 0.0) -
(let r = d; l = max i c
in if l <= r then (i-(l+r)/2.0)*(r-l+1)/(d-c+1)
else 0.0)
| i <- [a..b]] / (b-a+1)) f $ tail f
where f = zip (fmap fromIntegral low :: [Double]) (fmap fromIntegral high :: [Double])
main = do
([nn], c) <- fmap (splitAt 1 . lines) getContents
let n = read nn
(low, dummy:high) = splitAt n c
print $ calc (fmap read low) (fmap read high) | null | https://raw.githubusercontent.com/MaskRay/OJHaskell/ba24050b2480619f10daa7d37fca558182ba006c/TopCoder/SRM%20494/AlternatingLane.hs | haskell | calc :: [Int] -> [Int] -> Double
calc low high = sum $ zipWith
(\(a,b) (c,d) ->
sum [
(let l = c; r = min i d
in if l <= r then (i-(l+r)/2.0)*(r-l+1)/(d-c+1)
else 0.0) -
(let r = d; l = max i c
in if l <= r then (i-(l+r)/2.0)*(r-l+1)/(d-c+1)
else 0.0)
| i <- [a..b]] / (b-a+1)) f $ tail f
where f = zip (fmap fromIntegral low :: [Double]) (fmap fromIntegral high :: [Double])
main = do
([nn], c) <- fmap (splitAt 1 . lines) getContents
let n = read nn
(low, dummy:high) = splitAt n c
print $ calc (fmap read low) (fmap read high) | |
8359accdd9a864418a960c47f63223e151d7e0c3fcc9db8e72cd52f238741525 | andrejbauer/coop | desugar.mli | (** The desugaring phase converts [Sugared] syntax to [Desugared] syntax. It
performs the following transformations.
The programmer may freely mix (effect-free) expressions and (effectful)
computations. The desugaring phase separtes them and hoists computations
which appear inside expressions into outer `let`-bindings, as necessary.
The desugaring phase also keeps track of known identifiers, operations,
signals and type names, and makes sure they do not get mixed up or shadowed.
*)
(** Desugaring errors *)
type desugar_error
* A desugaring context is a list of known identifiers , which is used to
compute indices .
compute de Bruijn indices. *)
type context
(** The initial empty context *)
val initial : context
(** The exception signalling a desugaring error*)
exception Error of desugar_error Location.located
(** Print desugaring error. *)
val print_error : desugar_error -> Format.formatter -> unit
* Load a file and it .
val load : context -> string -> context * Desugared.toplevel list
* a toplevel .
val toplevel : context -> Sugared.toplevel -> context * Desugared.toplevel
| null | https://raw.githubusercontent.com/andrejbauer/coop/173d0889795c55a370f79fb42425b77a5c0c8464/src/desugar.mli | ocaml | * The desugaring phase converts [Sugared] syntax to [Desugared] syntax. It
performs the following transformations.
The programmer may freely mix (effect-free) expressions and (effectful)
computations. The desugaring phase separtes them and hoists computations
which appear inside expressions into outer `let`-bindings, as necessary.
The desugaring phase also keeps track of known identifiers, operations,
signals and type names, and makes sure they do not get mixed up or shadowed.
* Desugaring errors
* The initial empty context
* The exception signalling a desugaring error
* Print desugaring error. |
type desugar_error
* A desugaring context is a list of known identifiers , which is used to
compute indices .
compute de Bruijn indices. *)
type context
val initial : context
exception Error of desugar_error Location.located
val print_error : desugar_error -> Format.formatter -> unit
* Load a file and it .
val load : context -> string -> context * Desugared.toplevel list
* a toplevel .
val toplevel : context -> Sugared.toplevel -> context * Desugared.toplevel
|
ec8943f57cb62bb1eda4af7abc1b1de4315e18fc2dfb6ba265f81c932055b949 | wdebeaum/DeepSemLex | tan.lisp | ;;;;
;;;; W::tan
;;;;
(define-words :pos w::adj :templ central-adj-templ
:words (
(W::tan
(SENSES
((meta-data :origin joust :entry-date 20091027 :change-date 20091027 :comments nil :wn ("tan%3:00:01:chromatic:00"))
(LF-PARENT ONT::tan)
(SYNTAX (W::morph (:forms (-er))))
(templ central-adj-templ)
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/DeepSemLex/ce0e7523dd2b1ebd42b9e88ffbcfdb0fd339aaee/trips/src/LexiconManager/Data/new/tan.lisp | lisp |
W::tan
|
(define-words :pos w::adj :templ central-adj-templ
:words (
(W::tan
(SENSES
((meta-data :origin joust :entry-date 20091027 :change-date 20091027 :comments nil :wn ("tan%3:00:01:chromatic:00"))
(LF-PARENT ONT::tan)
(SYNTAX (W::morph (:forms (-er))))
(templ central-adj-templ)
)
)
)
))
|
ccd9a3c816c43367428292d7d6645a95f45d66b01f76460392ffbecf94135e17 | stackbuilders/stache | Main.hs | # LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module Main (main) where
import Data.Aeson (Value (..))
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.KeyMap as Aeson.KeyMap
import Data.List (foldl')
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NE
import Data.Semigroup (sconcat)
import qualified Data.Text.Lazy.IO as T
import Data.Version (showVersion)
import qualified Data.Yaml as Yaml
import Development.GitRev
import Options.Applicative
import Paths_stache (version)
import System.Exit (exitFailure)
import System.FilePath (takeExtension)
import System.IO (hPutStrLn, stderr)
import Text.Mustache
main :: IO ()
main = do
Opts {..} <- execParser optsParserInfo
template <-
sconcat
<$> mapM (compileMustacheDir optTarget) optTemplateDirs
context <-
foldl' mergeContexts emptyContext
<$> mapM loadContext optContextFiles
let rendered = renderMustache template context
case optOutputFile of
Nothing -> T.putStrLn rendered
Just ofile -> T.writeFile ofile rendered
----------------------------------------------------------------------------
-- Command line options parsing
-- | Command line options.
data Opts = Opts
{ -- | Context files.
optContextFiles :: [FilePath],
-- | Where to save the result.
optOutputFile :: Maybe FilePath,
-- | Name of the template to render.
optTarget :: PName,
-- | Directories with templates.
optTemplateDirs :: NonEmpty FilePath
}
optsParserInfo :: ParserInfo Opts
optsParserInfo =
info (helper <*> ver <*> optsParser) . mconcat $
[ fullDesc,
progDesc "Command line interface to the Stache template processor",
header "stache—a simple implementation of Mustache templates"
]
where
ver :: Parser (a -> a)
ver =
infoOption verStr . mconcat $
[ long "version",
short 'v',
help "Print version of the program"
]
verStr =
unwords
[ "stache",
showVersion version,
$gitBranch,
$gitHash
]
optsParser :: Parser Opts
optsParser =
Opts
<$> many
( (strOption . mconcat)
[ long "context",
short 'c',
metavar "CONTEXT",
help "Context file in YAML or JSON format"
]
)
<*> (optional . strOption . mconcat)
[ long "ofile",
short 'o',
metavar "OFILE",
help "Save the rendered document to this file (otherwise write to stdout)"
]
<*> (argument str . mconcat)
[ metavar "TARGET",
help "Name of the template to render"
]
<*> (fmap NE.fromList . some)
( (argument str . mconcat)
[ metavar "DIR",
help "Template directories"
]
)
----------------------------------------------------------------------------
-- Helpers
-- | File context from a YAML or JSON file.
loadContext :: FilePath -> IO Value
loadContext file = do
let readYaml =
either
(Left . Yaml.prettyPrintParseException)
Right
<$> Yaml.decodeFileEither file
econtext <- case takeExtension file of
".yml" -> readYaml
".yaml" -> readYaml
_ -> Aeson.eitherDecodeFileStrict file
case econtext of
Left err -> spitErrorAndDie err
Right v@(Aeson.Object _) -> return v
Right _ -> spitErrorAndDie "context file should contain an object"
mergeContexts :: Value -> Value -> Value
mergeContexts (Aeson.Object m0) (Aeson.Object m1) =
Aeson.Object (Aeson.KeyMap.union m0 m1)
mergeContexts _ _ = error "context merge failed"
emptyContext :: Value
emptyContext = Aeson.object []
spitErrorAndDie :: String -> IO a
spitErrorAndDie err = do
hPutStrLn stderr err
exitFailure
| null | https://raw.githubusercontent.com/stackbuilders/stache/56dee2cd9a05926d916cb2873ca75dbb18da4f78/app/Main.hs | haskell | --------------------------------------------------------------------------
Command line options parsing
| Command line options.
| Context files.
| Where to save the result.
| Name of the template to render.
| Directories with templates.
--------------------------------------------------------------------------
Helpers
| File context from a YAML or JSON file. | # LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
module Main (main) where
import Data.Aeson (Value (..))
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.KeyMap as Aeson.KeyMap
import Data.List (foldl')
import Data.List.NonEmpty (NonEmpty (..))
import qualified Data.List.NonEmpty as NE
import Data.Semigroup (sconcat)
import qualified Data.Text.Lazy.IO as T
import Data.Version (showVersion)
import qualified Data.Yaml as Yaml
import Development.GitRev
import Options.Applicative
import Paths_stache (version)
import System.Exit (exitFailure)
import System.FilePath (takeExtension)
import System.IO (hPutStrLn, stderr)
import Text.Mustache
main :: IO ()
main = do
Opts {..} <- execParser optsParserInfo
template <-
sconcat
<$> mapM (compileMustacheDir optTarget) optTemplateDirs
context <-
foldl' mergeContexts emptyContext
<$> mapM loadContext optContextFiles
let rendered = renderMustache template context
case optOutputFile of
Nothing -> T.putStrLn rendered
Just ofile -> T.writeFile ofile rendered
data Opts = Opts
optContextFiles :: [FilePath],
optOutputFile :: Maybe FilePath,
optTarget :: PName,
optTemplateDirs :: NonEmpty FilePath
}
optsParserInfo :: ParserInfo Opts
optsParserInfo =
info (helper <*> ver <*> optsParser) . mconcat $
[ fullDesc,
progDesc "Command line interface to the Stache template processor",
header "stache—a simple implementation of Mustache templates"
]
where
ver :: Parser (a -> a)
ver =
infoOption verStr . mconcat $
[ long "version",
short 'v',
help "Print version of the program"
]
verStr =
unwords
[ "stache",
showVersion version,
$gitBranch,
$gitHash
]
optsParser :: Parser Opts
optsParser =
Opts
<$> many
( (strOption . mconcat)
[ long "context",
short 'c',
metavar "CONTEXT",
help "Context file in YAML or JSON format"
]
)
<*> (optional . strOption . mconcat)
[ long "ofile",
short 'o',
metavar "OFILE",
help "Save the rendered document to this file (otherwise write to stdout)"
]
<*> (argument str . mconcat)
[ metavar "TARGET",
help "Name of the template to render"
]
<*> (fmap NE.fromList . some)
( (argument str . mconcat)
[ metavar "DIR",
help "Template directories"
]
)
loadContext :: FilePath -> IO Value
loadContext file = do
let readYaml =
either
(Left . Yaml.prettyPrintParseException)
Right
<$> Yaml.decodeFileEither file
econtext <- case takeExtension file of
".yml" -> readYaml
".yaml" -> readYaml
_ -> Aeson.eitherDecodeFileStrict file
case econtext of
Left err -> spitErrorAndDie err
Right v@(Aeson.Object _) -> return v
Right _ -> spitErrorAndDie "context file should contain an object"
mergeContexts :: Value -> Value -> Value
mergeContexts (Aeson.Object m0) (Aeson.Object m1) =
Aeson.Object (Aeson.KeyMap.union m0 m1)
mergeContexts _ _ = error "context merge failed"
emptyContext :: Value
emptyContext = Aeson.object []
spitErrorAndDie :: String -> IO a
spitErrorAndDie err = do
hPutStrLn stderr err
exitFailure
|
4867ba18824d08c9a6b8020e395b5d7d7d570981a1a9c212fa572ed09f23c275 | dym/movitz | partitions.lisp | $ I d : , v 1.1 2004/08/19 00:28:56 Exp $
(require :tmp/harddisk)
(provide :tmp/partitions)
(in-package muerte.x86-pc.harddisk)
(defstruct partition
bootable
start-cylinder
start-head
start-sector
type
end-cylinder
end-head
end-sector
start
size)
(defun read-partition-table (hdn &optional (sect 0))
"Reads the partition table of hard disk hdn, assuming it is located at sector sect; returns an array of the partitions found (doesn't support extended partitions yet)"
(let ((data (hd-read-sectors hdn sect 1))
(arr (make-array 4 :initial-element nil)))
(if (and (= (aref data 510) #x55) (= (aref data 511) #xAA))
(dotimes (i 4)
(let* ((addr (+ 446 (* i 16)))
(start (+ (aref data (+ addr 8))
(* #x100 (aref data (+ addr 9)))
(* #x10000 (aref data (+ addr 10)))
(* #x1000000 (aref data (+ addr 11)))))
(size (+ (aref data (+ addr 12))
(* #x100 (aref data (+ addr 13)))
(* #x10000 (aref data (+ addr 14)))
(* #x1000000 (aref data (+ addr 15))))))
(when (> size 0)
(setf (aref arr i)
(make-partition
:bootable (aref data addr)
:type (aref data (+ addr 4))
:start start
:size size)))))
(error "Invalid partition table in hd ~D, sector ~D!" hdn sect))
arr))
| null | https://raw.githubusercontent.com/dym/movitz/56176e1ebe3eabc15c768df92eca7df3c197cb3d/losp/tmp/partitions.lisp | lisp | $ I d : , v 1.1 2004/08/19 00:28:56 Exp $
(require :tmp/harddisk)
(provide :tmp/partitions)
(in-package muerte.x86-pc.harddisk)
(defstruct partition
bootable
start-cylinder
start-head
start-sector
type
end-cylinder
end-head
end-sector
start
size)
(defun read-partition-table (hdn &optional (sect 0))
"Reads the partition table of hard disk hdn, assuming it is located at sector sect; returns an array of the partitions found (doesn't support extended partitions yet)"
(let ((data (hd-read-sectors hdn sect 1))
(arr (make-array 4 :initial-element nil)))
(if (and (= (aref data 510) #x55) (= (aref data 511) #xAA))
(dotimes (i 4)
(let* ((addr (+ 446 (* i 16)))
(start (+ (aref data (+ addr 8))
(* #x100 (aref data (+ addr 9)))
(* #x10000 (aref data (+ addr 10)))
(* #x1000000 (aref data (+ addr 11)))))
(size (+ (aref data (+ addr 12))
(* #x100 (aref data (+ addr 13)))
(* #x10000 (aref data (+ addr 14)))
(* #x1000000 (aref data (+ addr 15))))))
(when (> size 0)
(setf (aref arr i)
(make-partition
:bootable (aref data addr)
:type (aref data (+ addr 4))
:start start
:size size)))))
(error "Invalid partition table in hd ~D, sector ~D!" hdn sect))
arr))
| |
6b362ae8f09368ebea071195ee445cec5a0f1ad6a6dbf3278096dbae228a0f5f | kaoskorobase/hsndfile | Internal.hs | # LANGUAGE ForeignFunctionInterface #
module Sound.File.Sndfile.Buffer.Internal
(
IOFunc
, hBufIO
, sf_readf_int16
, sf_writef_int16
, sf_readf_int32
, sf_writef_int32
, sf_readf_float
, sf_writef_float
, sf_readf_double
, sf_writef_double
) where
import Data.Int (Int16, Int32)
import Foreign.Ptr (Ptr)
import Foreign.C.Types (CLLong(..))
import Sound.File.Sndfile.Interface (Count, Handle(..), HandlePtr)
type IOFunc a = HandlePtr -> Ptr a -> CLLong -> IO CLLong
hBufIO :: IOFunc a -> Handle -> Ptr a -> Count -> IO Count
hBufIO f h ptr = fmap fromIntegral . f (hPtr h) ptr . fromIntegral
foreign import ccall unsafe "sf_readf_short" sf_readf_int16 :: IOFunc Int16
foreign import ccall unsafe "sf_writef_short" sf_writef_int16 :: IOFunc Int16
foreign import ccall unsafe "sf_readf_int" sf_readf_int32 :: IOFunc Int32
foreign import ccall unsafe "sf_writef_int" sf_writef_int32 :: IOFunc Int32
foreign import ccall unsafe "sf_readf_float" sf_readf_float :: IOFunc Float
foreign import ccall unsafe "sf_writef_float" sf_writef_float :: IOFunc Float
foreign import ccall unsafe "sf_readf_double" sf_readf_double :: IOFunc Double
foreign import ccall unsafe "sf_writef_double" sf_writef_double :: IOFunc Double
| null | https://raw.githubusercontent.com/kaoskorobase/hsndfile/f532a1fdf119a543d7db20fe21f11ced5cffbb21/Sound/File/Sndfile/Buffer/Internal.hs | haskell | # LANGUAGE ForeignFunctionInterface #
module Sound.File.Sndfile.Buffer.Internal
(
IOFunc
, hBufIO
, sf_readf_int16
, sf_writef_int16
, sf_readf_int32
, sf_writef_int32
, sf_readf_float
, sf_writef_float
, sf_readf_double
, sf_writef_double
) where
import Data.Int (Int16, Int32)
import Foreign.Ptr (Ptr)
import Foreign.C.Types (CLLong(..))
import Sound.File.Sndfile.Interface (Count, Handle(..), HandlePtr)
type IOFunc a = HandlePtr -> Ptr a -> CLLong -> IO CLLong
hBufIO :: IOFunc a -> Handle -> Ptr a -> Count -> IO Count
hBufIO f h ptr = fmap fromIntegral . f (hPtr h) ptr . fromIntegral
foreign import ccall unsafe "sf_readf_short" sf_readf_int16 :: IOFunc Int16
foreign import ccall unsafe "sf_writef_short" sf_writef_int16 :: IOFunc Int16
foreign import ccall unsafe "sf_readf_int" sf_readf_int32 :: IOFunc Int32
foreign import ccall unsafe "sf_writef_int" sf_writef_int32 :: IOFunc Int32
foreign import ccall unsafe "sf_readf_float" sf_readf_float :: IOFunc Float
foreign import ccall unsafe "sf_writef_float" sf_writef_float :: IOFunc Float
foreign import ccall unsafe "sf_readf_double" sf_readf_double :: IOFunc Double
foreign import ccall unsafe "sf_writef_double" sf_writef_double :: IOFunc Double
| |
2b3b0e4cb00c071eb14f35fbf1b980646992b558ded2e951b6646c9d6919615c | danieljharvey/mimsa | Primitives.hs | {-# LANGUAGE OverloadedStrings #-}
module Calc.Parser.Primitives
( primParser,
intParser,
)
where
import Calc.Parser.Shared
import Calc.Parser.Types
import Calc.Types.Expr
import Data.Functor (($>))
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
----
intParser :: Parser Int
intParser =
L.signed (string "" $> ()) L.decimal
primParser :: Parser ParserExpr
primParser = myLexeme $ addLocation (EPrim mempty <$> intParser)
| null | https://raw.githubusercontent.com/danieljharvey/mimsa/296ab9bcbdbaf682fa76921ce3c80d4bbafb52ae/llvm-calc/src/Calc/Parser/Primitives.hs | haskell | # LANGUAGE OverloadedStrings #
-- |
module Calc.Parser.Primitives
( primParser,
intParser,
)
where
import Calc.Parser.Shared
import Calc.Parser.Types
import Calc.Types.Expr
import Data.Functor (($>))
import Text.Megaparsec.Char
import qualified Text.Megaparsec.Char.Lexer as L
intParser :: Parser Int
intParser =
L.signed (string "" $> ()) L.decimal
primParser :: Parser ParserExpr
primParser = myLexeme $ addLocation (EPrim mempty <$> intParser)
|
498998b05cfac0fe3e4f158edae15476e552a3846b0f4dcc7924f21583bd0c31 | agrafix/Spock | Routing.hs | # LANGUAGE DataKinds #
module Web.Spock.Routing where
import Control.Monad.Trans
import Data.HVect hiding (head)
import qualified Data.Text as T
import qualified Network.Wai as Wai
import Web.Routing.Combinators
import Web.Spock.Action
import Web.Spock.Internal.Wire (SpockMethod (..))
class RouteM t where
addMiddleware :: Monad m => Wai.Middleware -> t ctx m ()
withPrehook :: MonadIO m => ActionCtxT ctx m ctx' -> t ctx' m () -> t ctx m ()
wireAny :: Monad m => SpockMethod -> ([T.Text] -> ActionCtxT ctx m ()) -> t ctx m ()
wireRoute ::
(Monad m, HasRep xs) =>
SpockMethod ->
Path xs ps ->
HVectElim xs (ActionCtxT ctx m ()) ->
t ctx m ()
| null | https://raw.githubusercontent.com/agrafix/Spock/6055362b54f2fae5418188c3fc2fc1659ca43e79/Spock-core/src/Web/Spock/Routing.hs | haskell | # LANGUAGE DataKinds #
module Web.Spock.Routing where
import Control.Monad.Trans
import Data.HVect hiding (head)
import qualified Data.Text as T
import qualified Network.Wai as Wai
import Web.Routing.Combinators
import Web.Spock.Action
import Web.Spock.Internal.Wire (SpockMethod (..))
class RouteM t where
addMiddleware :: Monad m => Wai.Middleware -> t ctx m ()
withPrehook :: MonadIO m => ActionCtxT ctx m ctx' -> t ctx' m () -> t ctx m ()
wireAny :: Monad m => SpockMethod -> ([T.Text] -> ActionCtxT ctx m ()) -> t ctx m ()
wireRoute ::
(Monad m, HasRep xs) =>
SpockMethod ->
Path xs ps ->
HVectElim xs (ActionCtxT ctx m ()) ->
t ctx m ()
| |
7223eb1bd2a97ffbd50e78851132efd268fb0dd2c7d4281c79f1105bd9d5958f | iamFIREcracker/adventofcode | day19.lisp | (defpackage :aoc/2020/19 #.cl-user::*aoc-use*)
(in-package :aoc/2020/19)
(defun parse-sub-rule (string)
(if (find #\" string)
(list (string (char string 1)))
(mapcar #'parse-integer (cl-ppcre:split " " string))))
(defun parse-sub-rules (string)
(let ((sub-rules (cl-ppcre:split " \\| " string)))
(mapcar #'parse-sub-rule sub-rules)))
(defun parse-rule (string)
(cl-ppcre:register-groups-bind ((#'parse-integer id) rest)
("(\\d+): (.*)" string)
(list id (parse-sub-rules rest))))
(defun id (rule) (car rule))
(defun sub-rules (rule) (cadr rule))
(defun parse-input (data)
(let (groups current)
(dolist (string (append data '("")))
(if (string= string "")
(setf groups (cons (reverse current) groups) current nil)
(setf current (cons string current))))
(list
(mapcar #'parse-rule (second groups))
(first groups))))
(defun rules (input) (car input))
(defun messages (input) (cadr input))
(defparameter *max-recursion* 20)
(defun make-regexp (rules)
(let ((sub-rules-map (make-hash-table)))
(dolist (rule rules)
(setf (gethash (id rule) sub-rules-map) (sub-rules rule)))
(labels ((recur (id depth &aux (sub-rules (gethash id sub-rules-map)))
(cond
((= depth *max-recursion*) "") ; too much recursion, give up!
((not (numberp id)) id) ; termination rule, return it.
(t
(format nil "(~{~A~^|~})"
(loop for sub-rule in sub-rules collect
(format nil "~{~A~}"
(loop for id in sub-rule
collect (recur id (1+ depth))))))))))
(format nil "^~A$" (recur 0 0)))))
(defun count-matches (data &aux (input (parse-input data)))
(let* ((regexp (make-regexp (rules input)))
(scanner (cl-ppcre:create-scanner regexp)))
(count-if (lambda (s) (cl-ppcre:all-matches scanner s)) (messages input))))
(defun prepare-part2 (data)
(loop for string in data
if (string= string "8: 42") collect "8: 42 | 42 8"
else if (string= string "11: 42 31") collect "11: 42 31 | 42 11 31"
else collect string))
(define-solution (2020 19) (data)
(values (count-matches data)
(count-matches (prepare-part2 data))))
(define-test (2020 19) (248 381))
| null | https://raw.githubusercontent.com/iamFIREcracker/adventofcode/c395df5e15657f0b9be6ec555e68dc777b0eb7ab/src/2020/day19.lisp | lisp | too much recursion, give up!
termination rule, return it. | (defpackage :aoc/2020/19 #.cl-user::*aoc-use*)
(in-package :aoc/2020/19)
(defun parse-sub-rule (string)
(if (find #\" string)
(list (string (char string 1)))
(mapcar #'parse-integer (cl-ppcre:split " " string))))
(defun parse-sub-rules (string)
(let ((sub-rules (cl-ppcre:split " \\| " string)))
(mapcar #'parse-sub-rule sub-rules)))
(defun parse-rule (string)
(cl-ppcre:register-groups-bind ((#'parse-integer id) rest)
("(\\d+): (.*)" string)
(list id (parse-sub-rules rest))))
(defun id (rule) (car rule))
(defun sub-rules (rule) (cadr rule))
(defun parse-input (data)
(let (groups current)
(dolist (string (append data '("")))
(if (string= string "")
(setf groups (cons (reverse current) groups) current nil)
(setf current (cons string current))))
(list
(mapcar #'parse-rule (second groups))
(first groups))))
(defun rules (input) (car input))
(defun messages (input) (cadr input))
(defparameter *max-recursion* 20)
(defun make-regexp (rules)
(let ((sub-rules-map (make-hash-table)))
(dolist (rule rules)
(setf (gethash (id rule) sub-rules-map) (sub-rules rule)))
(labels ((recur (id depth &aux (sub-rules (gethash id sub-rules-map)))
(cond
(t
(format nil "(~{~A~^|~})"
(loop for sub-rule in sub-rules collect
(format nil "~{~A~}"
(loop for id in sub-rule
collect (recur id (1+ depth))))))))))
(format nil "^~A$" (recur 0 0)))))
(defun count-matches (data &aux (input (parse-input data)))
(let* ((regexp (make-regexp (rules input)))
(scanner (cl-ppcre:create-scanner regexp)))
(count-if (lambda (s) (cl-ppcre:all-matches scanner s)) (messages input))))
(defun prepare-part2 (data)
(loop for string in data
if (string= string "8: 42") collect "8: 42 | 42 8"
else if (string= string "11: 42 31") collect "11: 42 31 | 42 11 31"
else collect string))
(define-solution (2020 19) (data)
(values (count-matches data)
(count-matches (prepare-part2 data))))
(define-test (2020 19) (248 381))
|
83678a216c4f72ccecd918dae8838a5d16a0304d8d4de4e7e0e1b2993e309221 | malgo-lang/malgo | Unify.hs | # LANGUAGE CPP #
# LANGUAGE UndecidableInstances #
-- | Unification
module Malgo.Infer.Unify (Constraint (..), MonadBind (..), solve, generalize, generalizeMutRecs, instantiate) where
import Control.Lens (At (at), itraverse_, use, view, (%=), (?=))
import Data.HashMap.Strict qualified as HashMap
import Data.HashSet qualified as HashSet
import Data.Traversable (for)
import GHC.Records (HasField)
import Koriel.Id
import Koriel.Lens
import Koriel.MonadUniq
import Koriel.Pretty
import Malgo.Infer.TcEnv (TcEnv)
import Malgo.Infer.TypeRep
import Malgo.Prelude hiding (Constraint)
-- * Constraint
infixl 5 :~
-- | Constraint
-- a :~ b means 'a ~ b'
data Constraint = Type :~ Type
deriving stock (Eq, Ord, Show, Generic)
instance Pretty Constraint where
pPrint (t1 :~ t2) = pPrint t1 <+> "~" <+> pPrint t2
*
-- | Monad that handles substitution over type variables
class Monad m => MonadBind m where
lookupVar :: MetaVar -> m (Maybe Type)
default lookupVar :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => MetaVar -> m (Maybe Type)
lookupVar v = lift (lookupVar v)
freshVar :: Maybe Text -> m MetaVar
default freshVar :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => Maybe Text -> m MetaVar
freshVar = lift . freshVar
bindVar :: HasCallStack => Range -> MetaVar -> Type -> m ()
default bindVar :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => Range -> MetaVar -> Type -> m ()
bindVar x v t = lift (bindVar x v t)
-- | Apply all substituation
zonk :: Type -> m Type
default zonk :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => Type -> m Type
zonk t = lift (zonk t)
instance MonadBind m => MonadBind (ReaderT r m)
instance MonadBind m => MonadBind (ExceptT e m)
instance MonadBind m => MonadBind (StateT s m)
instance (Monoid w, MonadBind m) => MonadBind (WriterT w m)
-- | 'Right' (substituation, new constraints) or 'Left' (position, error message)
type UnifyResult = Either (Range, Doc) (HashMap MetaVar Type, [(Range, Constraint)])
| Unify two types
unify :: Range -> Type -> Type -> UnifyResult
unify _ (TyMeta v1) (TyMeta v2)
| v1 == v2 = pure (mempty, [])
| otherwise = pure (one (v1, TyMeta v2), [])
unify _ (TyMeta v) t = pure (one (v, t), [])
unify _ t (TyMeta v) = pure (one (v, t), [])
unify x (TyApp t11 t12) (TyApp t21 t22) = pure (mempty, [(x, t11 :~ t21), (x, t12 :~ t22)])
unify _ (TyVar v1) (TyVar v2) | v1 == v2 = pure (mempty, [])
unify _ (TyCon c1) (TyCon c2) | c1 == c2 = pure (mempty, [])
unify _ (TyPrim p1) (TyPrim p2) | p1 == p2 = pure (mempty, [])
unify x (TyArr l1 r1) (TyArr l2 r2) = pure (mempty, [(x, l1 :~ l2), (x, r1 :~ r2)])
unify _ (TyTuple n1) (TyTuple n2) | n1 == n2 = pure (mempty, [])
unify x (TyRecord kts1) (TyRecord kts2)
| HashMap.keys kts1 == HashMap.keys kts2 = pure (mempty, zipWith (\t1 t2 -> (x, t1 :~ t2)) (HashMap.elems kts1) (HashMap.elems kts2))
unify _ TyPtr TyPtr = pure (mempty, [])
unify _ TYPE TYPE = pure (mempty, [])
unify x t1 t2 = Left (x, unifyErrorMessage t1 t2)
where
unifyErrorMessage t1 t2 = "Couldn't match" $$ nest 7 (pPrint t1) $$ nest 2 ("with" <+> pPrint t2)
instance (MonadReader env m, HasUniqSupply env, MonadIO m, MonadState TcEnv m, HasModuleName env) => MonadBind (TypeUnifyT m) where
lookupVar v = view (at v) <$> TypeUnifyT get
freshVar hint = do
hint <- pure $ fromMaybe "t" hint
kind <- newTemporalId ("k" <> hint) ()
newVar <- newInternalId hint ()
kindCtx %= insertKind newVar (TyMeta $ MetaVar kind)
pure $ MetaVar newVar
bindVar x v t = do
when (occursCheck v t) $ errorOn x $ "Occurs check:" <+> quotes (pPrint v) <+> "for" <+> pPrint t
ctx <- use kindCtx
solve [(x, kindOf ctx v.metaVar :~ kindOf ctx t)]
TypeUnifyT $ at v ?= t
where
occursCheck :: MetaVar -> Type -> Bool
occursCheck v t = HashSet.member v (freevars t)
zonk (TyApp t1 t2) = TyApp <$> zonk t1 <*> zonk t2
zonk (TyVar v) = do
ctx <- use kindCtx
k <- zonk $ kindOf ctx v
kindCtx %= insertKind v k
pure $ TyVar v
zonk (TyCon c) = do
ctx <- use kindCtx
k <- zonk $ kindOf ctx c
kindCtx %= insertKind c k
pure $ TyCon c
zonk t@TyPrim {} = pure t
zonk (TyArr t1 t2) = TyArr <$> zonk t1 <*> zonk t2
zonk t@TyTuple {} = pure t
zonk (TyRecord kts) = TyRecord <$> traverse zonk kts
zonk TyPtr = pure TyPtr
zonk TYPE = pure TYPE
zonk t@(TyMeta v) = fromMaybe t <$> (traverse zonk =<< lookupVar v)
-- * Constraint solver
solve :: (MonadIO f, MonadBind f, MonadState TcEnv f) => [(Range, Constraint)] -> f ()
solve = solveLoop (5000 :: Int)
where
solveLoop n _ | n <= 0 = error "Constraint solver error: iteration limit"
solveLoop _ [] = pass
solveLoop n ((x, t1 :~ t2) : cs) = do
abbrEnv <- use typeSynonymMap
let t1' = fromMaybe t1 (expandTypeSynonym abbrEnv t1)
let t2' = fromMaybe t2 (expandTypeSynonym abbrEnv t2)
case unify x t1' t2' of
Left (pos, message) -> errorOn pos message
Right (binds, cs') -> do
itraverse_ (bindVar x) binds
constraints <- traverse zonkConstraint (cs' <> cs)
solveLoop (n - 1) constraints
zonkConstraint (m, x :~ y) = (m,) <$> ((:~) <$> zonk x <*> zonk y)
generalize :: MonadBind m => Range -> Type -> m (Scheme Type)
generalize x term = do
zonkedTerm <- zonk term
let fvs = HashSet.toList $ freevars zonkedTerm
let as = map toBound fvs
zipWithM_ (\fv a -> bindVar x fv $ TyVar a) fvs as
Forall as <$> zonk zonkedTerm
generalizeMutRecs :: MonadBind m => Range -> [Type] -> m ([TypeVar], [Type])
generalizeMutRecs x terms = do
zonkedTerms <- traverse zonk terms
let fvs = HashSet.toList $ mconcat $ map freevars zonkedTerms
let as = map toBound fvs
zipWithM_ (\fv a -> bindVar x fv $ TyVar a) fvs as
(as,) <$> traverse zonk zonkedTerms
-- `toBound` "generates" a new bound variable from a free variable.
-- But it's not really generating a new variable, it's just using the free variable as a bound variable.
-- The free variable will zonk to the bound variable as soon as the bound variable is bound (`bindVar`).
-- So we can reuse the free variable as a bound variable.
toBound :: HasField "metaVar" r a => r -> a
toBound tv = tv.metaVar
instantiate :: (MonadBind m, MonadIO m, MonadState TcEnv m) => Range -> Scheme Type -> m Type
instantiate x (Forall as t) = do
avs <- for as \a -> do
v <- TyMeta <$> freshVar (Just $ a.name)
ctx <- use kindCtx
solve [(x, kindOf ctx a :~ kindOf ctx v)]
pure (a, v)
pure $ applySubst (HashMap.fromList avs) t
| null | https://raw.githubusercontent.com/malgo-lang/malgo/002f522bf6376edf67716cef99033d87b46112f4/src/Malgo/Infer/Unify.hs | haskell | | Unification
* Constraint
| Constraint
a :~ b means 'a ~ b'
| Monad that handles substitution over type variables
| Apply all substituation
| 'Right' (substituation, new constraints) or 'Left' (position, error message)
* Constraint solver
`toBound` "generates" a new bound variable from a free variable.
But it's not really generating a new variable, it's just using the free variable as a bound variable.
The free variable will zonk to the bound variable as soon as the bound variable is bound (`bindVar`).
So we can reuse the free variable as a bound variable. | # LANGUAGE CPP #
# LANGUAGE UndecidableInstances #
module Malgo.Infer.Unify (Constraint (..), MonadBind (..), solve, generalize, generalizeMutRecs, instantiate) where
import Control.Lens (At (at), itraverse_, use, view, (%=), (?=))
import Data.HashMap.Strict qualified as HashMap
import Data.HashSet qualified as HashSet
import Data.Traversable (for)
import GHC.Records (HasField)
import Koriel.Id
import Koriel.Lens
import Koriel.MonadUniq
import Koriel.Pretty
import Malgo.Infer.TcEnv (TcEnv)
import Malgo.Infer.TypeRep
import Malgo.Prelude hiding (Constraint)
infixl 5 :~
data Constraint = Type :~ Type
deriving stock (Eq, Ord, Show, Generic)
instance Pretty Constraint where
pPrint (t1 :~ t2) = pPrint t1 <+> "~" <+> pPrint t2
*
class Monad m => MonadBind m where
lookupVar :: MetaVar -> m (Maybe Type)
default lookupVar :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => MetaVar -> m (Maybe Type)
lookupVar v = lift (lookupVar v)
freshVar :: Maybe Text -> m MetaVar
default freshVar :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => Maybe Text -> m MetaVar
freshVar = lift . freshVar
bindVar :: HasCallStack => Range -> MetaVar -> Type -> m ()
default bindVar :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => Range -> MetaVar -> Type -> m ()
bindVar x v t = lift (bindVar x v t)
zonk :: Type -> m Type
default zonk :: (MonadTrans tr, MonadBind m1, m ~ tr m1) => Type -> m Type
zonk t = lift (zonk t)
instance MonadBind m => MonadBind (ReaderT r m)
instance MonadBind m => MonadBind (ExceptT e m)
instance MonadBind m => MonadBind (StateT s m)
instance (Monoid w, MonadBind m) => MonadBind (WriterT w m)
type UnifyResult = Either (Range, Doc) (HashMap MetaVar Type, [(Range, Constraint)])
| Unify two types
unify :: Range -> Type -> Type -> UnifyResult
unify _ (TyMeta v1) (TyMeta v2)
| v1 == v2 = pure (mempty, [])
| otherwise = pure (one (v1, TyMeta v2), [])
unify _ (TyMeta v) t = pure (one (v, t), [])
unify _ t (TyMeta v) = pure (one (v, t), [])
unify x (TyApp t11 t12) (TyApp t21 t22) = pure (mempty, [(x, t11 :~ t21), (x, t12 :~ t22)])
unify _ (TyVar v1) (TyVar v2) | v1 == v2 = pure (mempty, [])
unify _ (TyCon c1) (TyCon c2) | c1 == c2 = pure (mempty, [])
unify _ (TyPrim p1) (TyPrim p2) | p1 == p2 = pure (mempty, [])
unify x (TyArr l1 r1) (TyArr l2 r2) = pure (mempty, [(x, l1 :~ l2), (x, r1 :~ r2)])
unify _ (TyTuple n1) (TyTuple n2) | n1 == n2 = pure (mempty, [])
unify x (TyRecord kts1) (TyRecord kts2)
| HashMap.keys kts1 == HashMap.keys kts2 = pure (mempty, zipWith (\t1 t2 -> (x, t1 :~ t2)) (HashMap.elems kts1) (HashMap.elems kts2))
unify _ TyPtr TyPtr = pure (mempty, [])
unify _ TYPE TYPE = pure (mempty, [])
unify x t1 t2 = Left (x, unifyErrorMessage t1 t2)
where
unifyErrorMessage t1 t2 = "Couldn't match" $$ nest 7 (pPrint t1) $$ nest 2 ("with" <+> pPrint t2)
instance (MonadReader env m, HasUniqSupply env, MonadIO m, MonadState TcEnv m, HasModuleName env) => MonadBind (TypeUnifyT m) where
lookupVar v = view (at v) <$> TypeUnifyT get
freshVar hint = do
hint <- pure $ fromMaybe "t" hint
kind <- newTemporalId ("k" <> hint) ()
newVar <- newInternalId hint ()
kindCtx %= insertKind newVar (TyMeta $ MetaVar kind)
pure $ MetaVar newVar
bindVar x v t = do
when (occursCheck v t) $ errorOn x $ "Occurs check:" <+> quotes (pPrint v) <+> "for" <+> pPrint t
ctx <- use kindCtx
solve [(x, kindOf ctx v.metaVar :~ kindOf ctx t)]
TypeUnifyT $ at v ?= t
where
occursCheck :: MetaVar -> Type -> Bool
occursCheck v t = HashSet.member v (freevars t)
zonk (TyApp t1 t2) = TyApp <$> zonk t1 <*> zonk t2
zonk (TyVar v) = do
ctx <- use kindCtx
k <- zonk $ kindOf ctx v
kindCtx %= insertKind v k
pure $ TyVar v
zonk (TyCon c) = do
ctx <- use kindCtx
k <- zonk $ kindOf ctx c
kindCtx %= insertKind c k
pure $ TyCon c
zonk t@TyPrim {} = pure t
zonk (TyArr t1 t2) = TyArr <$> zonk t1 <*> zonk t2
zonk t@TyTuple {} = pure t
zonk (TyRecord kts) = TyRecord <$> traverse zonk kts
zonk TyPtr = pure TyPtr
zonk TYPE = pure TYPE
zonk t@(TyMeta v) = fromMaybe t <$> (traverse zonk =<< lookupVar v)
solve :: (MonadIO f, MonadBind f, MonadState TcEnv f) => [(Range, Constraint)] -> f ()
solve = solveLoop (5000 :: Int)
where
solveLoop n _ | n <= 0 = error "Constraint solver error: iteration limit"
solveLoop _ [] = pass
solveLoop n ((x, t1 :~ t2) : cs) = do
abbrEnv <- use typeSynonymMap
let t1' = fromMaybe t1 (expandTypeSynonym abbrEnv t1)
let t2' = fromMaybe t2 (expandTypeSynonym abbrEnv t2)
case unify x t1' t2' of
Left (pos, message) -> errorOn pos message
Right (binds, cs') -> do
itraverse_ (bindVar x) binds
constraints <- traverse zonkConstraint (cs' <> cs)
solveLoop (n - 1) constraints
zonkConstraint (m, x :~ y) = (m,) <$> ((:~) <$> zonk x <*> zonk y)
generalize :: MonadBind m => Range -> Type -> m (Scheme Type)
generalize x term = do
zonkedTerm <- zonk term
let fvs = HashSet.toList $ freevars zonkedTerm
let as = map toBound fvs
zipWithM_ (\fv a -> bindVar x fv $ TyVar a) fvs as
Forall as <$> zonk zonkedTerm
generalizeMutRecs :: MonadBind m => Range -> [Type] -> m ([TypeVar], [Type])
generalizeMutRecs x terms = do
zonkedTerms <- traverse zonk terms
let fvs = HashSet.toList $ mconcat $ map freevars zonkedTerms
let as = map toBound fvs
zipWithM_ (\fv a -> bindVar x fv $ TyVar a) fvs as
(as,) <$> traverse zonk zonkedTerms
toBound :: HasField "metaVar" r a => r -> a
toBound tv = tv.metaVar
instantiate :: (MonadBind m, MonadIO m, MonadState TcEnv m) => Range -> Scheme Type -> m Type
instantiate x (Forall as t) = do
avs <- for as \a -> do
v <- TyMeta <$> freshVar (Just $ a.name)
ctx <- use kindCtx
solve [(x, kindOf ctx a :~ kindOf ctx v)]
pure (a, v)
pure $ applySubst (HashMap.fromList avs) t
|
0830172fab01d4501cb6d1b074dae05cb16a2cfdc4e0f647dd708bbf877b68b9 | bennn/dissertation | base-types.rkt | #lang typed/racket
(define-type Color Symbol)
(require require-typed-check)
(require/typed/check "data.rkt"
[#:struct posn ([x : Real]
[y : Real])]
[#:struct block ([x : Real]
[y : Real]
[color : Color])]
[#:struct tetra ([center : posn]
[blocks : (Listof Block)])]
[#:struct world ([tetra : tetra]
[blocks : (Listof Block)])])
(define-type Posn posn)
(define-type Block block)
(define-type Tetra tetra)
(define-type World world)
(define-type BSet (Listof Block))
(provide
(struct-out posn)
(struct-out block)
(struct-out tetra)
(struct-out world)
Posn
Block
Tetra
World
Color
BSet
Color
BSet)
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/tetris/both/base-types.rkt | racket | #lang typed/racket
(define-type Color Symbol)
(require require-typed-check)
(require/typed/check "data.rkt"
[#:struct posn ([x : Real]
[y : Real])]
[#:struct block ([x : Real]
[y : Real]
[color : Color])]
[#:struct tetra ([center : posn]
[blocks : (Listof Block)])]
[#:struct world ([tetra : tetra]
[blocks : (Listof Block)])])
(define-type Posn posn)
(define-type Block block)
(define-type Tetra tetra)
(define-type World world)
(define-type BSet (Listof Block))
(provide
(struct-out posn)
(struct-out block)
(struct-out tetra)
(struct-out world)
Posn
Block
Tetra
World
Color
BSet
Color
BSet)
| |
07571ab3f34ec0622d827f1b75478249ce713929015bfbe80da25eaa025e174d | b-ryan/farmhand | config_test.clj | (ns farmhand.config-test
(:require [clojure.test :refer :all]
[farmhand.config :as cfg]))
(deftest test-redis-config
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_REDIS_HOST" "abc"
"FARMHAND_REDIS_PORT" "123"
"FARMHAND_REDIS_URI" "foo"
"FARMHAND_REDIS_PASSWORD" "pw"})
cfg/classpath (atom {:redis {:password "betterpw"}})]
(cfg/redis {:uri "bar"}))
{:host "abc" :port 123 :uri "bar" :password "betterpw"})))
(deftest test-num-workers-config
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_NUM_WORKERS" "4"})]
(cfg/num-workers nil))
4))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_NUM_WORKERS" "4"})
cfg/classpath (atom {:num-workers 8})]
(cfg/num-workers nil))
8))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_NUM_WORKERS" "4"})
cfg/classpath (atom {:num-workers 8})]
(cfg/num-workers 12))
12)))
(deftest test-queues-config
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_QUEUES_EDN" "[{:name \"foo\"}]"})]
(cfg/queues nil))
[{:name "foo"}]))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_QUEUES_EDN" "[{:name \"foo\"}]"})
cfg/classpath (atom {:queues [{:name "bar"}]})]
(cfg/queues nil))
[{:name "bar"}]))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_QUEUES_EDN" "[{:name \"foo\"}]"})
cfg/classpath (atom {:queues [{:name "bar"}]})]
(cfg/queues [{:name "baz"}]))
[{:name "baz"}])))
| null | https://raw.githubusercontent.com/b-ryan/farmhand/b5c79124c710b69cce9d4a436228f9ae7e2cbb99/test/farmhand/config_test.clj | clojure | (ns farmhand.config-test
(:require [clojure.test :refer :all]
[farmhand.config :as cfg]))
(deftest test-redis-config
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_REDIS_HOST" "abc"
"FARMHAND_REDIS_PORT" "123"
"FARMHAND_REDIS_URI" "foo"
"FARMHAND_REDIS_PASSWORD" "pw"})
cfg/classpath (atom {:redis {:password "betterpw"}})]
(cfg/redis {:uri "bar"}))
{:host "abc" :port 123 :uri "bar" :password "betterpw"})))
(deftest test-num-workers-config
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_NUM_WORKERS" "4"})]
(cfg/num-workers nil))
4))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_NUM_WORKERS" "4"})
cfg/classpath (atom {:num-workers 8})]
(cfg/num-workers nil))
8))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_NUM_WORKERS" "4"})
cfg/classpath (atom {:num-workers 8})]
(cfg/num-workers 12))
12)))
(deftest test-queues-config
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_QUEUES_EDN" "[{:name \"foo\"}]"})]
(cfg/queues nil))
[{:name "foo"}]))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_QUEUES_EDN" "[{:name \"foo\"}]"})
cfg/classpath (atom {:queues [{:name "bar"}]})]
(cfg/queues nil))
[{:name "bar"}]))
(is (= (with-redefs [cfg/all-env-vars (atom {"FARMHAND_QUEUES_EDN" "[{:name \"foo\"}]"})
cfg/classpath (atom {:queues [{:name "bar"}]})]
(cfg/queues [{:name "baz"}]))
[{:name "baz"}])))
| |
bd7462ad1d8ac77e4f49f5602d2ad6a26dbf0593618f60caad3f415517a36df3 | robert-strandh/Second-Climacs | define-modify-macro.lisp | (cl:in-package #:second-climacs-syntax-common-lisp)
(define-indentation-automaton compute-define-modify-macro-indentations
(tagbody
(next)
;; The current wad is the operator.
(maybe-assign-indentation 1 6)
(next)
;; The current wad ought to be the name.
(maybe-assign-indentation 6 4)
(next)
;; The current wad ought to be the lambda list.
(maybe-assign-indentation 4 2)
(compute-lambda-list-indentation current-wad client)
(next)
;; The current wad ought to be the function symbol.
(maybe-assign-indentation 2 2)
(next)
;; Come here if the optional documentation is given
(maybe-assign-indentation 2 2)))
(define-form-indentation-method
('#:common-lisp '#:define-modify-macro)
compute-define-modify-macro-indentations)
| null | https://raw.githubusercontent.com/robert-strandh/Second-Climacs/b654fc1f3f0db73970db3074e1a7878e15a5e9f2/Code/Syntax/Common-Lisp/Indentation/define-modify-macro.lisp | lisp | The current wad is the operator.
The current wad ought to be the name.
The current wad ought to be the lambda list.
The current wad ought to be the function symbol.
Come here if the optional documentation is given | (cl:in-package #:second-climacs-syntax-common-lisp)
(define-indentation-automaton compute-define-modify-macro-indentations
(tagbody
(next)
(maybe-assign-indentation 1 6)
(next)
(maybe-assign-indentation 6 4)
(next)
(maybe-assign-indentation 4 2)
(compute-lambda-list-indentation current-wad client)
(next)
(maybe-assign-indentation 2 2)
(next)
(maybe-assign-indentation 2 2)))
(define-form-indentation-method
('#:common-lisp '#:define-modify-macro)
compute-define-modify-macro-indentations)
|
6cd6e9da96b489d2d24dd9004e161f511d9267561d028f71bb4521f7093bf4b4 | haskell-repa/repa | Elt.hs | | Values that can be stored in Arrays .
# LANGUAGE MagicHash , UnboxedTuples , TypeSynonymInstances , FlexibleInstances #
# LANGUAGE DefaultSignatures , FlexibleContexts , TypeOperators #
module Data.Array.Repa.Eval.Elt
(Elt (..))
where
import GHC.Prim
import GHC.Exts
import GHC.Types
import GHC.Word
import GHC.Int
import GHC.Generics
-- Note that the touch# function is special because we can pass it boxed or unboxed
-- values. The argument type has kind ?, not just * or #.
-- | Element types that can be used with the blockwise filling functions.
--
-- This class is mainly used to define the `touch` method. This is used internally
in the imeplementation of to prevent let - binding from being floated
inappropriately by the GHC simplifier . Doing a ` seq ` sometimes is n't enough ,
because the GHC simplifier can erase these , and still move around the bindings .
--
class Elt a where
-- | Place a demand on a value at a particular point in an IO computation.
touch :: a -> IO ()
default touch :: (Generic a, GElt (Rep a)) => a -> IO ()
touch = gtouch . from
# INLINE touch #
| Generic zero value , helpful for debugging .
zero :: a
default zero :: (Generic a, GElt (Rep a)) => a
zero = to gzero
# INLINE zero #
| Generic one value , helpful for debugging .
one :: a
default one :: (Generic a, GElt (Rep a)) => a
one = to gone
# INLINE one #
class GElt f where
-- | Generic version of touch
gtouch :: f a -> IO ()
| Generic version of zero
gzero :: f a
-- | Generic version of gone
gone :: f a
-- Generic Definition ----------------------------------------------------------
instance GElt U1 where
gtouch _ = return ()
# INLINE gtouch #
gzero = U1
# INLINE gzero #
gone = U1
{-# INLINE gone #-}
instance (GElt a, GElt b) => GElt (a :*: b) where
gtouch (x :*: y) = gtouch x >> gtouch y
# INLINE gtouch #
gzero = gzero :*: gzero
# INLINE gzero #
gone = gone :*: gone
{-# INLINE gone #-}
instance (GElt a, GElt b) => GElt (a :+: b) where
gtouch (L1 x) = gtouch x
gtouch (R1 x) = gtouch x
# INLINE gtouch #
gzero = L1 gzero
# INLINE gzero #
gone = R1 gone
{-# INLINE gone #-}
instance (GElt a) => GElt (M1 i c a) where
gtouch (M1 x) = gtouch x
# INLINE gtouch #
gzero = M1 gzero
# INLINE gzero #
gone = M1 gone
{-# INLINE gone #-}
instance (Elt a) => GElt (K1 i a) where
gtouch (K1 x) = touch x
# INLINE gtouch #
gzero = K1 zero
# INLINE gzero #
gone = K1 one
{-# INLINE gone #-}
-- Bool -----------------------------------------------------------------------
instance Elt Bool where
# INLINE touch #
touch b
= IO (\state -> case touch# b state of
state' -> (# state', () #))
# INLINE zero #
zero = False
# INLINE one #
one = True
-- Floating -------------------------------------------------------------------
instance Elt Float where
# INLINE touch #
touch (F# f)
= IO (\state -> case touch# f state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Double where
# INLINE touch #
touch (D# d)
= IO (\state -> case touch# d state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
-- Int ------------------------------------------------------------------------
instance Elt Int where
# INLINE touch #
touch (I# i)
= IO (\state -> case touch# i state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int8 where
# INLINE touch #
touch (I8# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int16 where
# INLINE touch #
touch (I16# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int32 where
# INLINE touch #
touch (I32# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int64 where
# INLINE touch #
touch (I64# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
-- Word -----------------------------------------------------------------------
instance Elt Word where
# INLINE touch #
touch (W# i)
= IO (\state -> case touch# i state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word8 where
# INLINE touch #
touch (W8# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word16 where
# INLINE touch #
touch (W16# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word32 where
# INLINE touch #
touch (W32# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word64 where
# INLINE touch #
touch (W64# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
-- Tuple ----------------------------------------------------------------------
instance (Elt a, Elt b) => Elt (a, b) where
# INLINE touch #
touch (a, b)
= do touch a
touch b
# INLINE zero #
zero = (zero, zero)
# INLINE one #
one = (one, one)
instance (Elt a, Elt b, Elt c) => Elt (a, b, c) where
# INLINE touch #
touch (a, b, c)
= do touch a
touch b
touch c
# INLINE zero #
zero = (zero, zero, zero)
# INLINE one #
one = (one, one, one)
instance (Elt a, Elt b, Elt c, Elt d) => Elt (a, b, c, d) where
# INLINE touch #
touch (a, b, c, d)
= do touch a
touch b
touch c
touch d
# INLINE zero #
zero = (zero, zero, zero, zero)
# INLINE one #
one = (one, one, one, one)
instance (Elt a, Elt b, Elt c, Elt d, Elt e) => Elt (a, b, c, d, e) where
# INLINE touch #
touch (a, b, c, d, e)
= do touch a
touch b
touch c
touch d
touch e
# INLINE zero #
zero = (zero, zero, zero, zero, zero)
# INLINE one #
one = (one, one, one, one, one)
instance (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f) => Elt (a, b, c, d, e, f) where
# INLINE touch #
touch (a, b, c, d, e, f)
= do touch a
touch b
touch c
touch d
touch e
touch f
# INLINE zero #
zero = (zero, zero, zero, zero, zero, zero)
# INLINE one #
one = (one, one, one, one, one, one)
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/repa/Data/Array/Repa/Eval/Elt.hs | haskell | Note that the touch# function is special because we can pass it boxed or unboxed
values. The argument type has kind ?, not just * or #.
| Element types that can be used with the blockwise filling functions.
This class is mainly used to define the `touch` method. This is used internally
| Place a demand on a value at a particular point in an IO computation.
| Generic version of touch
| Generic version of gone
Generic Definition ----------------------------------------------------------
# INLINE gone #
# INLINE gone #
# INLINE gone #
# INLINE gone #
# INLINE gone #
Bool -----------------------------------------------------------------------
Floating -------------------------------------------------------------------
Int ------------------------------------------------------------------------
Word -----------------------------------------------------------------------
Tuple ---------------------------------------------------------------------- | | Values that can be stored in Arrays .
# LANGUAGE MagicHash , UnboxedTuples , TypeSynonymInstances , FlexibleInstances #
# LANGUAGE DefaultSignatures , FlexibleContexts , TypeOperators #
module Data.Array.Repa.Eval.Elt
(Elt (..))
where
import GHC.Prim
import GHC.Exts
import GHC.Types
import GHC.Word
import GHC.Int
import GHC.Generics
in the imeplementation of to prevent let - binding from being floated
inappropriately by the GHC simplifier . Doing a ` seq ` sometimes is n't enough ,
because the GHC simplifier can erase these , and still move around the bindings .
class Elt a where
touch :: a -> IO ()
default touch :: (Generic a, GElt (Rep a)) => a -> IO ()
touch = gtouch . from
# INLINE touch #
| Generic zero value , helpful for debugging .
zero :: a
default zero :: (Generic a, GElt (Rep a)) => a
zero = to gzero
# INLINE zero #
| Generic one value , helpful for debugging .
one :: a
default one :: (Generic a, GElt (Rep a)) => a
one = to gone
# INLINE one #
class GElt f where
gtouch :: f a -> IO ()
| Generic version of zero
gzero :: f a
gone :: f a
instance GElt U1 where
gtouch _ = return ()
# INLINE gtouch #
gzero = U1
# INLINE gzero #
gone = U1
instance (GElt a, GElt b) => GElt (a :*: b) where
gtouch (x :*: y) = gtouch x >> gtouch y
# INLINE gtouch #
gzero = gzero :*: gzero
# INLINE gzero #
gone = gone :*: gone
instance (GElt a, GElt b) => GElt (a :+: b) where
gtouch (L1 x) = gtouch x
gtouch (R1 x) = gtouch x
# INLINE gtouch #
gzero = L1 gzero
# INLINE gzero #
gone = R1 gone
instance (GElt a) => GElt (M1 i c a) where
gtouch (M1 x) = gtouch x
# INLINE gtouch #
gzero = M1 gzero
# INLINE gzero #
gone = M1 gone
instance (Elt a) => GElt (K1 i a) where
gtouch (K1 x) = touch x
# INLINE gtouch #
gzero = K1 zero
# INLINE gzero #
gone = K1 one
instance Elt Bool where
# INLINE touch #
touch b
= IO (\state -> case touch# b state of
state' -> (# state', () #))
# INLINE zero #
zero = False
# INLINE one #
one = True
instance Elt Float where
# INLINE touch #
touch (F# f)
= IO (\state -> case touch# f state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Double where
# INLINE touch #
touch (D# d)
= IO (\state -> case touch# d state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int where
# INLINE touch #
touch (I# i)
= IO (\state -> case touch# i state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int8 where
# INLINE touch #
touch (I8# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int16 where
# INLINE touch #
touch (I16# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int32 where
# INLINE touch #
touch (I32# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Int64 where
# INLINE touch #
touch (I64# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word where
# INLINE touch #
touch (W# i)
= IO (\state -> case touch# i state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word8 where
# INLINE touch #
touch (W8# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word16 where
# INLINE touch #
touch (W16# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word32 where
# INLINE touch #
touch (W32# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance Elt Word64 where
# INLINE touch #
touch (W64# w)
= IO (\state -> case touch# w state of
state' -> (# state', () #))
# INLINE zero #
zero = 0
# INLINE one #
one = 1
instance (Elt a, Elt b) => Elt (a, b) where
# INLINE touch #
touch (a, b)
= do touch a
touch b
# INLINE zero #
zero = (zero, zero)
# INLINE one #
one = (one, one)
instance (Elt a, Elt b, Elt c) => Elt (a, b, c) where
# INLINE touch #
touch (a, b, c)
= do touch a
touch b
touch c
# INLINE zero #
zero = (zero, zero, zero)
# INLINE one #
one = (one, one, one)
instance (Elt a, Elt b, Elt c, Elt d) => Elt (a, b, c, d) where
# INLINE touch #
touch (a, b, c, d)
= do touch a
touch b
touch c
touch d
# INLINE zero #
zero = (zero, zero, zero, zero)
# INLINE one #
one = (one, one, one, one)
instance (Elt a, Elt b, Elt c, Elt d, Elt e) => Elt (a, b, c, d, e) where
# INLINE touch #
touch (a, b, c, d, e)
= do touch a
touch b
touch c
touch d
touch e
# INLINE zero #
zero = (zero, zero, zero, zero, zero)
# INLINE one #
one = (one, one, one, one, one)
instance (Elt a, Elt b, Elt c, Elt d, Elt e, Elt f) => Elt (a, b, c, d, e, f) where
# INLINE touch #
touch (a, b, c, d, e, f)
= do touch a
touch b
touch c
touch d
touch e
touch f
# INLINE zero #
zero = (zero, zero, zero, zero, zero, zero)
# INLINE one #
one = (one, one, one, one, one, one)
|
a23a83f5972e24bc43ace49dd3571ce9632a023663d517f3fa490e9d287cebeb | rhaberkorn/ermacs | edit_window.erl | -module(edit_window).
-include("edit.hrl").
-compile(export_all).
%%-export([Function/Arity, ...]).
%% NB: Height is the total height including modeline
make_window(Buffer, Y, Width, Height) ->
Id = make_ref(),
W = #window{start_mark={start, Id},
y=Y,
width=Width,
height=Height,
id=Id},
attach(W, Buffer).
%% Number of lines for viewing text - excludes modeline
text_lines(W) when W#window.minibuffer == true ->
physical_lines(W);
text_lines(W) ->
physical_lines(W) - 1.
physical_lines(W) ->
W#window.height.
width(W) ->
W#window.width.
%% "Attach" a window to a buffer. Puts a mark in the buffer so that
%% the window knows where it's up to.
attach(Window, Buffer) ->
attach(Window, Buffer, 1).
attach(Window, Buffer, _Start) ->
edit_buf:add_mark(Buffer, Window#window.start_mark, 1, backward),
Window#window{buffer=Buffer}.
| null | https://raw.githubusercontent.com/rhaberkorn/ermacs/35c8f9b83ae85e25c646882be6ea6d340a88b05b/src/edit_window.erl | erlang | -export([Function/Arity, ...]).
NB: Height is the total height including modeline
Number of lines for viewing text - excludes modeline
"Attach" a window to a buffer. Puts a mark in the buffer so that
the window knows where it's up to. | -module(edit_window).
-include("edit.hrl").
-compile(export_all).
make_window(Buffer, Y, Width, Height) ->
Id = make_ref(),
W = #window{start_mark={start, Id},
y=Y,
width=Width,
height=Height,
id=Id},
attach(W, Buffer).
text_lines(W) when W#window.minibuffer == true ->
physical_lines(W);
text_lines(W) ->
physical_lines(W) - 1.
physical_lines(W) ->
W#window.height.
width(W) ->
W#window.width.
attach(Window, Buffer) ->
attach(Window, Buffer, 1).
attach(Window, Buffer, _Start) ->
edit_buf:add_mark(Buffer, Window#window.start_mark, 1, backward),
Window#window{buffer=Buffer}.
|
27856ada0758e0f2e9dfef5e2fed90279fce72cad636f037f030b1f51cdbc804 | dawidovsky/IIUWr | Szablon.rkt | #lang racket
;; expressions
(define (const? t)
(number? t))
(define (op? t)
(and (list? t)
(member (car t) '(+ - * /))))
(define (op-op e)
(car e))
(define (op-args e)
(cdr e))
(define (op-cons op args)
(cons op args))
(define (op->proc op)
(cond [(eq? op '+) +]
[(eq? op '*) *]
[(eq? op '-) -]
[(eq? op '/) /]))
(define (let-def? t)
(and (list? t)
(= (length t) 2)
(symbol? (car t))))
(define (let-def-var e)
(car e))
(define (let-def-expr e)
(cadr e))
(define (let-def-cons x e)
(list x e))
(define (let? t)
(and (list? t)
(= (length t) 3)
(eq? (car t) 'let)
(let-def? (cadr t))))
(define (let-def e)
(cadr e))
(define (let-expr e)
(caddr e))
(define (let-cons def e)
(list 'let def e))
(define (var? t)
(symbol? t))
(define (var-var e)
e)
(define (var-cons x)
x)
(define (arith/let-expr? t)
(or (const? t)
(and (op? t)
(andmap arith/let-expr? (op-args t)))
(and (let? t)
(arith/let-expr? (let-expr t))
(arith/let-expr? (let-def-expr (let-def t))))
(var? t)))
;; let-lifted expressions
(define (arith-expr? t)
(or (const? t)
(and (op? t)
(andmap arith-expr? (op-args t)))
(var? t)))
(define (let-lifted-expr? t)
(or (and (let? t)
(let-lifted-expr? (let-expr t))
(arith-expr? (let-def-expr (let-def t))))
(arith-expr? t)))
;; generating a symbol using a counter
(define (number->symbol i)
(string->symbol (string-append "x" (number->string i))))
;; environments (could be useful for something)
(define empty-env
null)
(define (add-to-env x v env)
(cons (list x v) env))
(define (find-in-env x env)
(cond [(null? env) (error "undefined variable" x)]
[(eq? x (caar env)) (cadar env)]
[else (find-in-env x (cdr env))]))
;; the let-lift procedure
(define (let-lift e)
TODO : Zaimplementuj !
(error "nie zaimplementowano!")
)
| null | https://raw.githubusercontent.com/dawidovsky/IIUWr/73f0f65fb141f82a05dac2573f39f6fa48a81409/MP/Pracownia7/Szablon.rkt | racket | expressions
let-lifted expressions
generating a symbol using a counter
environments (could be useful for something)
the let-lift procedure | #lang racket
(define (const? t)
(number? t))
(define (op? t)
(and (list? t)
(member (car t) '(+ - * /))))
(define (op-op e)
(car e))
(define (op-args e)
(cdr e))
(define (op-cons op args)
(cons op args))
(define (op->proc op)
(cond [(eq? op '+) +]
[(eq? op '*) *]
[(eq? op '-) -]
[(eq? op '/) /]))
(define (let-def? t)
(and (list? t)
(= (length t) 2)
(symbol? (car t))))
(define (let-def-var e)
(car e))
(define (let-def-expr e)
(cadr e))
(define (let-def-cons x e)
(list x e))
(define (let? t)
(and (list? t)
(= (length t) 3)
(eq? (car t) 'let)
(let-def? (cadr t))))
(define (let-def e)
(cadr e))
(define (let-expr e)
(caddr e))
(define (let-cons def e)
(list 'let def e))
(define (var? t)
(symbol? t))
(define (var-var e)
e)
(define (var-cons x)
x)
(define (arith/let-expr? t)
(or (const? t)
(and (op? t)
(andmap arith/let-expr? (op-args t)))
(and (let? t)
(arith/let-expr? (let-expr t))
(arith/let-expr? (let-def-expr (let-def t))))
(var? t)))
(define (arith-expr? t)
(or (const? t)
(and (op? t)
(andmap arith-expr? (op-args t)))
(var? t)))
(define (let-lifted-expr? t)
(or (and (let? t)
(let-lifted-expr? (let-expr t))
(arith-expr? (let-def-expr (let-def t))))
(arith-expr? t)))
(define (number->symbol i)
(string->symbol (string-append "x" (number->string i))))
(define empty-env
null)
(define (add-to-env x v env)
(cons (list x v) env))
(define (find-in-env x env)
(cond [(null? env) (error "undefined variable" x)]
[(eq? x (caar env)) (cadar env)]
[else (find-in-env x (cdr env))]))
(define (let-lift e)
TODO : Zaimplementuj !
(error "nie zaimplementowano!")
)
|
75ac686303d2f69b074259a67f66d2f3b0a634f350f1ba716cfab2924e1ceadf | coalton-lang/coalton | addressable.lisp | (coalton-library/utils:defstdlib-package #:coalton-library/addressable
(:use
#:coalton
#:coalton-library/classes)
(:export
#:Addressable #:eq? #:eq-hash))
(cl:in-package #:coalton-library/addressable)
this package mostly exists to reexport the ` Addressable ' class , and its method ` eq ? ' , which is defined in
early-classes.lisp but not exported from coalton - library / classes . it also seems a sensible place to put all
;; the instances we need to manually define, and to define functions which operate on `Addressable' instances.
#+coalton-release
(cl:declaim #.coalton-impl:*coalton-optimize-library*)
(coalton-toplevel
(declare unsafe-internal-eq? (:any -> :any -> Boolean))
(define (unsafe-internal-eq? a b)
(lisp Boolean (a b)
(cl:eq a b))))
(cl:defmacro define-instance-addressable (ty)
`(coalton-toplevel
(define-instance (Addressable ,ty)
(define eq? unsafe-internal-eq?))))
(define-instance-addressable (List :elt))
(define-instance-addressable String)
(coalton-toplevel
(declare eq-hash (Addressable :obj => :obj -> UFix))
(define (eq-hash obj)
"Compute a hash for OBJ based on its `eq?' identity.
Calling `eq-hash' on objects which are `eq?' will always return the same hash, assuming the `Addressable'
instance is law-abiding, i.e. directly wraps `cl:eq'.
Calls `CL:SXHASH' internally, so results are implementation-dependent. Recent SBCL versions make a reasonable
effort to provide unique hashes for non-`eq?' objects, but other implementations and older SBCL versions may
be prone to hash collisions on some types."
(lisp UFix (obj)
(cl:sxhash obj))))
| null | https://raw.githubusercontent.com/coalton-lang/coalton/d61804903f56c96094a0570a2fedc5eff65f211f/library/addressable.lisp | lisp | the instances we need to manually define, and to define functions which operate on `Addressable' instances. | (coalton-library/utils:defstdlib-package #:coalton-library/addressable
(:use
#:coalton
#:coalton-library/classes)
(:export
#:Addressable #:eq? #:eq-hash))
(cl:in-package #:coalton-library/addressable)
this package mostly exists to reexport the ` Addressable ' class , and its method ` eq ? ' , which is defined in
early-classes.lisp but not exported from coalton - library / classes . it also seems a sensible place to put all
#+coalton-release
(cl:declaim #.coalton-impl:*coalton-optimize-library*)
(coalton-toplevel
(declare unsafe-internal-eq? (:any -> :any -> Boolean))
(define (unsafe-internal-eq? a b)
(lisp Boolean (a b)
(cl:eq a b))))
(cl:defmacro define-instance-addressable (ty)
`(coalton-toplevel
(define-instance (Addressable ,ty)
(define eq? unsafe-internal-eq?))))
(define-instance-addressable (List :elt))
(define-instance-addressable String)
(coalton-toplevel
(declare eq-hash (Addressable :obj => :obj -> UFix))
(define (eq-hash obj)
"Compute a hash for OBJ based on its `eq?' identity.
Calling `eq-hash' on objects which are `eq?' will always return the same hash, assuming the `Addressable'
instance is law-abiding, i.e. directly wraps `cl:eq'.
Calls `CL:SXHASH' internally, so results are implementation-dependent. Recent SBCL versions make a reasonable
effort to provide unique hashes for non-`eq?' objects, but other implementations and older SBCL versions may
be prone to hash collisions on some types."
(lisp UFix (obj)
(cl:sxhash obj))))
|
bb7357bdf940c3790f6e48989a4e0be4c1e6e46809c7ef18657bc72b169f8824 | v-kolesnikov/sicp | 1_02_test.clj | (ns sicp.chapter01.1-02-test
(:require [clojure.test :refer :all]
[sicp.test-helper :refer :all]
[sicp.chapter01.1-02 :refer :all]))
(deftest test-asserts
(assert-equal -37/150 solution))
| null | https://raw.githubusercontent.com/v-kolesnikov/sicp/4298de6083440a75898e97aad658025a8cecb631/test/sicp/chapter01/1_02_test.clj | clojure | (ns sicp.chapter01.1-02-test
(:require [clojure.test :refer :all]
[sicp.test-helper :refer :all]
[sicp.chapter01.1-02 :refer :all]))
(deftest test-asserts
(assert-equal -37/150 solution))
| |
a8727ce3a37dfe1d93241ce7459d357fddf1562c338eeb1b0e3372a6325fdc7b | Flexiana/xiana-template | core.clj | (ns {{sanitized-name}}.core
(:require
[{{sanitized-name}}.controllers.index :as index]
[{{sanitized-name}}.controllers.re-frame :as re-frame]
[{{sanitized-name}}.controllers.swagger :as swagger]
[xiana.config :as config]
[xiana.db :as db]
[xiana.interceptor :as interceptors]
[xiana.rbac :as rbac]
[xiana.route :as routes]
[xiana.swagger :as xsw]
[xiana.session :as session]
[xiana.webserver :as ws]
[reitit.ring :as ring]
[clojure.walk]
[ring.util.response]
[reitit.coercion.malli]
[malli.util :as mu]
[reitit.swagger :as sswagger]
[xiana.commons :refer [rename-key]]))
(def routes
[["/" {:action #'index/handle-index
:swagger {:produces ["text/html"]}}]
["/re-frame" {:action #'re-frame/handle-index
:swagger {:produces ["text/html"]}}]
["/assets/*" (ring/create-resource-handler {:path "/"})]])
(defn ->system
[app-cfg]
(-> (config/config app-cfg)
(rename-key :framework.app/auth :auth)
xsw/->swagger-data
routes/reset
rbac/init
session/init-backend
db/connect
db/migrate!
ws/start))
(def app-cfg
{:routes routes
:router-interceptors []
:controller-interceptors [(interceptors/muuntaja)
interceptors/params
session/guest-session-interceptor
interceptors/view
interceptors/side-effect
db/db-access
rbac/interceptor]})
(defn -main
[& _args]
(->system app-cfg))
| null | https://raw.githubusercontent.com/Flexiana/xiana-template/6f42f39d179e8272fdac99248521a67d1a49e9f9/resources/leiningen/new/xiana/src/backend/app_name/core.clj | clojure | (ns {{sanitized-name}}.core
(:require
[{{sanitized-name}}.controllers.index :as index]
[{{sanitized-name}}.controllers.re-frame :as re-frame]
[{{sanitized-name}}.controllers.swagger :as swagger]
[xiana.config :as config]
[xiana.db :as db]
[xiana.interceptor :as interceptors]
[xiana.rbac :as rbac]
[xiana.route :as routes]
[xiana.swagger :as xsw]
[xiana.session :as session]
[xiana.webserver :as ws]
[reitit.ring :as ring]
[clojure.walk]
[ring.util.response]
[reitit.coercion.malli]
[malli.util :as mu]
[reitit.swagger :as sswagger]
[xiana.commons :refer [rename-key]]))
(def routes
[["/" {:action #'index/handle-index
:swagger {:produces ["text/html"]}}]
["/re-frame" {:action #'re-frame/handle-index
:swagger {:produces ["text/html"]}}]
["/assets/*" (ring/create-resource-handler {:path "/"})]])
(defn ->system
[app-cfg]
(-> (config/config app-cfg)
(rename-key :framework.app/auth :auth)
xsw/->swagger-data
routes/reset
rbac/init
session/init-backend
db/connect
db/migrate!
ws/start))
(def app-cfg
{:routes routes
:router-interceptors []
:controller-interceptors [(interceptors/muuntaja)
interceptors/params
session/guest-session-interceptor
interceptors/view
interceptors/side-effect
db/db-access
rbac/interceptor]})
(defn -main
[& _args]
(->system app-cfg))
| |
970bcb88d10182ff339b44d709c8ca27545336075bcf13024c220b56415d2be0 | BinaryAnalysisPlatform/bap-plugins | expect.mli | * Given a sequence of expected strings [ E ] and test corpora [ S ] , we
want to ensure , that each expected string matches at least one
substring in a testing corpora , that is not matched by other
expected string .
This is a Maximum Bipartile Matching problem . First we find a MBP
solution , and if in this solution all persons got a job , then we
are satisfied , otherwise we give a set of expectations , that were
not satisfied .
An example , may clarify the problem . Given a following expectation
specification : [ x;x;y ] , we will any input that has at least two
[ x ] and at least one [ y ] .
want to ensure, that each expected string matches at least one
substring in a testing corpora, that is not matched by other
expected string.
This is a Maximum Bipartile Matching problem. First we find a MBP
solution, and if in this solution all persons got a job, then we
are satisfied, otherwise we give a set of expectations, that were
not satisfied.
An example, may clarify the problem. Given a following expectation
specification: [x;x;y], we will any input that has at least two
[x] and at least one [y].
*)
type t
type misses
(** [create regexp] takes a list of POSIX regular expressions
and converts it into an expectation *)
val create : string list -> t
(** [all_matches expectation data] checks that provided list of
strings [data] satisfies given [expectation] *)
val all_matches : t -> string list -> [`Yes | `Missed of misses]
(** [pp_misses ppf misses] prints missed expectation into a given
formatter. *)
val pp_misses : Format.formatter -> misses -> unit
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap-plugins/2e9aa5c7c24ef494d0e7db1b43c5ceedcb4196a8/test-expect/expect.mli | ocaml | * [create regexp] takes a list of POSIX regular expressions
and converts it into an expectation
* [all_matches expectation data] checks that provided list of
strings [data] satisfies given [expectation]
* [pp_misses ppf misses] prints missed expectation into a given
formatter. | * Given a sequence of expected strings [ E ] and test corpora [ S ] , we
want to ensure , that each expected string matches at least one
substring in a testing corpora , that is not matched by other
expected string .
This is a Maximum Bipartile Matching problem . First we find a MBP
solution , and if in this solution all persons got a job , then we
are satisfied , otherwise we give a set of expectations , that were
not satisfied .
An example , may clarify the problem . Given a following expectation
specification : [ x;x;y ] , we will any input that has at least two
[ x ] and at least one [ y ] .
want to ensure, that each expected string matches at least one
substring in a testing corpora, that is not matched by other
expected string.
This is a Maximum Bipartile Matching problem. First we find a MBP
solution, and if in this solution all persons got a job, then we
are satisfied, otherwise we give a set of expectations, that were
not satisfied.
An example, may clarify the problem. Given a following expectation
specification: [x;x;y], we will any input that has at least two
[x] and at least one [y].
*)
type t
type misses
val create : string list -> t
val all_matches : t -> string list -> [`Yes | `Missed of misses]
val pp_misses : Format.formatter -> misses -> unit
|
d3cc27739d654007832156a7e151f2466c54efe48658c90f68bc6bcab3f7d3bc | mirage/ocaml-uri | gen_services.ml |
* Copyright ( c ) 2012 Anil Madhavapeddy < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2012 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
Convert a UNIX /etc / services into an ML module to lookup entries
open Printf
let hashtbl_add_list h k v =
try
let l = Hashtbl.find h k in
l := v :: !l
with Not_found -> Hashtbl.add h k (ref [v])
let spaced_list = Stringext.split_trim_left ~on:" " ~trim:" "
let nonempty = List.filter ((<>) "")
let iter f h =
let bindings = Hashtbl.fold (fun k v a -> (k,v)::a) h [] in
List.iter (fun (k, v) -> f k v) (List.sort compare bindings)
let _ =
let fin = open_in Sys.argv.(1) in
let tcp_ports = Hashtbl.create 1 in
let udp_ports = Hashtbl.create 1 in
let ports_tcp = Hashtbl.create 1 in
let ports_udp = Hashtbl.create 1 in
let tcp_services = Hashtbl.create 1 in
let udp_services = Hashtbl.create 1 in
(try while true do
let line = input_line fin in
match line.[0] with
|'#'|' ' -> ()
|_ ->
Scanf.sscanf line "%s %d/%s %s@\n" (fun svc port proto rest ->
let alias_s = List.hd (Stringext.split ~on:'#' (" "^rest)) in
let aliases = nonempty (spaced_list alias_s) in
match proto with
|"tcp" ->
List.iter (fun svc ->
hashtbl_add_list tcp_ports svc port;
hashtbl_add_list ports_tcp port svc;
Hashtbl.replace tcp_services svc ()
) (svc::aliases)
|"udp" ->
List.iter (fun svc ->
hashtbl_add_list udp_ports svc port;
hashtbl_add_list ports_udp port svc;
Hashtbl.replace udp_services svc ();
) (svc::aliases)
|"ddp" | "sctp" | "divert" -> ()
|x -> failwith ("unknown proto " ^ x)
)
done with End_of_file -> ());
let print_keys quote ppf table =
iter (fun k _v -> fprintf ppf ("%s; ") (quote k)) table in
let print_values quote ppf table =
iter (fun _k v -> fprintf ppf "[ %s ]; "
(String.concat "; " (List.map quote !v))) table in
let quote_string s = sprintf "%S" s in
printf "(* Autogenerated by gen_services.ml, do not edit directly *)\n";
printf "let tcp_port_of_service_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys quote_string) tcp_ports
(print_values string_of_int) tcp_ports;
printf "let udp_port_of_service_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys quote_string) udp_ports
(print_values string_of_int) udp_ports;
printf "let service_of_tcp_port_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys string_of_int) ports_tcp
(print_values quote_string) ports_tcp;
printf "let service_of_udp_port_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys string_of_int) ports_udp
(print_values quote_string) ports_udp;
let hashset_elems table =
Hashtbl.fold (fun k () a -> quote_string k :: a) table []
|> List.sort String.compare
|> String.concat "; "
in
printf "let known_tcp_services =\n [ %s ]\n\n"
(hashset_elems tcp_services);
printf "let known_udp_services =\n [ %s ]\n\n"
(hashset_elems udp_services);
printf "let known_services = [\n";
printf " (\"tcp\", known_tcp_services);\n";
printf " (\"udp\", known_udp_services) ]\n\n";
| null | https://raw.githubusercontent.com/mirage/ocaml-uri/b4a8375d9352d29ff495d35fc309609fad74631a/config/gen_services.ml | ocaml |
* Copyright ( c ) 2012 Anil Madhavapeddy < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2012 Anil Madhavapeddy <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
Convert a UNIX /etc / services into an ML module to lookup entries
open Printf
let hashtbl_add_list h k v =
try
let l = Hashtbl.find h k in
l := v :: !l
with Not_found -> Hashtbl.add h k (ref [v])
let spaced_list = Stringext.split_trim_left ~on:" " ~trim:" "
let nonempty = List.filter ((<>) "")
let iter f h =
let bindings = Hashtbl.fold (fun k v a -> (k,v)::a) h [] in
List.iter (fun (k, v) -> f k v) (List.sort compare bindings)
let _ =
let fin = open_in Sys.argv.(1) in
let tcp_ports = Hashtbl.create 1 in
let udp_ports = Hashtbl.create 1 in
let ports_tcp = Hashtbl.create 1 in
let ports_udp = Hashtbl.create 1 in
let tcp_services = Hashtbl.create 1 in
let udp_services = Hashtbl.create 1 in
(try while true do
let line = input_line fin in
match line.[0] with
|'#'|' ' -> ()
|_ ->
Scanf.sscanf line "%s %d/%s %s@\n" (fun svc port proto rest ->
let alias_s = List.hd (Stringext.split ~on:'#' (" "^rest)) in
let aliases = nonempty (spaced_list alias_s) in
match proto with
|"tcp" ->
List.iter (fun svc ->
hashtbl_add_list tcp_ports svc port;
hashtbl_add_list ports_tcp port svc;
Hashtbl.replace tcp_services svc ()
) (svc::aliases)
|"udp" ->
List.iter (fun svc ->
hashtbl_add_list udp_ports svc port;
hashtbl_add_list ports_udp port svc;
Hashtbl.replace udp_services svc ();
) (svc::aliases)
|"ddp" | "sctp" | "divert" -> ()
|x -> failwith ("unknown proto " ^ x)
)
done with End_of_file -> ());
let print_keys quote ppf table =
iter (fun k _v -> fprintf ppf ("%s; ") (quote k)) table in
let print_values quote ppf table =
iter (fun _k v -> fprintf ppf "[ %s ]; "
(String.concat "; " (List.map quote !v))) table in
let quote_string s = sprintf "%S" s in
printf "(* Autogenerated by gen_services.ml, do not edit directly *)\n";
printf "let tcp_port_of_service_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys quote_string) tcp_ports
(print_values string_of_int) tcp_ports;
printf "let udp_port_of_service_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys quote_string) udp_ports
(print_values string_of_int) udp_ports;
printf "let service_of_tcp_port_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys string_of_int) ports_tcp
(print_values quote_string) ports_tcp;
printf "let service_of_udp_port_tables = (\n [| %a |],\n [| %a |]\n)\n\n"
(print_keys string_of_int) ports_udp
(print_values quote_string) ports_udp;
let hashset_elems table =
Hashtbl.fold (fun k () a -> quote_string k :: a) table []
|> List.sort String.compare
|> String.concat "; "
in
printf "let known_tcp_services =\n [ %s ]\n\n"
(hashset_elems tcp_services);
printf "let known_udp_services =\n [ %s ]\n\n"
(hashset_elems udp_services);
printf "let known_services = [\n";
printf " (\"tcp\", known_tcp_services);\n";
printf " (\"udp\", known_udp_services) ]\n\n";
| |
6ed1f4e3165bc2b04cf77aad9cc321680fbe524f6f0322f546b78c1914d808f6 | sansarip/owlbear | user.cljs | (ns cljs.user
(:require [owlbear.parse :as obp]))
(defn load-wasms!
"Returns a promise encapsulating the loading of
all necessary WASM resources
Optionally accepts a collection of `wasms` e.g.
```clojure
[[:html \"path/to/html.wasm\"]]
```"
([] (load-wasms! [[obp/html-lang-id "resources/tree-sitter-html.wasm"]
[obp/md-lang-id "resources/tree-sitter-markdown.wasm"]
[obp/ts-lang-id "resources/tree-sitter-typescript.wasm"]
[obp/tsx-lang-id "resources/tree-sitter-tsx.wasm"]]))
([wasms]
(js/Promise.all (map #(apply obp/load-language-wasm! %) wasms))))
(println "Loading language WASMs...")
(.then (load-wasms!) #(println "Finished loading WASMs!"))
| null | https://raw.githubusercontent.com/sansarip/owlbear/81c6c58e85b34079e6f5c6f45c49de16ab8823bc/dev/cljs/user.cljs | clojure | (ns cljs.user
(:require [owlbear.parse :as obp]))
(defn load-wasms!
"Returns a promise encapsulating the loading of
all necessary WASM resources
Optionally accepts a collection of `wasms` e.g.
```clojure
[[:html \"path/to/html.wasm\"]]
```"
([] (load-wasms! [[obp/html-lang-id "resources/tree-sitter-html.wasm"]
[obp/md-lang-id "resources/tree-sitter-markdown.wasm"]
[obp/ts-lang-id "resources/tree-sitter-typescript.wasm"]
[obp/tsx-lang-id "resources/tree-sitter-tsx.wasm"]]))
([wasms]
(js/Promise.all (map #(apply obp/load-language-wasm! %) wasms))))
(println "Loading language WASMs...")
(.then (load-wasms!) #(println "Finished loading WASMs!"))
| |
d59e500ce3d575614b26ee9d9fb0adf0f29bd33a74b6f8d1e71d24c5d47d2bbd | oakes/play-clj-examples | desktop_launcher.clj | (ns breakout.core.desktop-launcher
(:require [breakout.core :refer :all])
(:import [com.badlogic.gdx.backends.lwjgl LwjglApplication]
[org.lwjgl.input Keyboard])
(:gen-class))
(defn -main
[]
(LwjglApplication. breakout "breakout" 800 600)
(Keyboard/enableRepeatEvents true))
| null | https://raw.githubusercontent.com/oakes/play-clj-examples/449a505a068faeeb35d4ee4622c6a05e3fff6763/breakout/desktop/src/breakout/core/desktop_launcher.clj | clojure | (ns breakout.core.desktop-launcher
(:require [breakout.core :refer :all])
(:import [com.badlogic.gdx.backends.lwjgl LwjglApplication]
[org.lwjgl.input Keyboard])
(:gen-class))
(defn -main
[]
(LwjglApplication. breakout "breakout" 800 600)
(Keyboard/enableRepeatEvents true))
| |
44bc506f8ab8e5d687ac91c7cc6b1ce7669106db900e5116088079144c1c0174 | gsakkas/rite | 20060324-20:45:28-232a876d350ebc97006824843fc62110.seminal.ml |
exception Unimplemented
exception AlreadyDone
(*** part a ***)
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
type move = Home | Forward of float | Turn of float | For of int*(move list)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # #
############ *)
type state = {cur_x:float ; cur_y:float ; cur_dir:float}
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
############################ *)
let pi = acos(-1.0)
let unit_circle = 2.0 *. pi
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
let left = pi /. 2.0
let right = (-1.0 *. left)
let reverse = pi
(*** part b ***)
# # # # # # # # # # # #
let makePoly sides len =
if (sides = 0 ) then Forward 0.0
else (
let rot = unit_circle /. (float sides) in
let move_list = [Forward len;Turn rot] in
For (sides, move_list)
)
(*** part c ***)
let interpLarge (movelist : move list) : (float*float) list =
let rec loop movelist x y dir acc =
# # # # # # # # # # # # # # # # # # # # # # # # # # #
match movelist with
[] -> acc
| Home::tl -> loop tl 0.0 0.0 0.0 ((0.0,0.0)::acc)
| Forward(f)::tl -> loop tl (x +. (f*. (cos dir))) (y +. (f*. (sin dir))) dir
(((x +. (f *. (cos dir))),(y+.(f *. (sin dir))))::acc)
| Turn(rot)::tl ->
let new_dir = dir +. rot in
if (new_dir < 0.0) then (loop tl x y (new_dir +. unit_circle) acc)
else ( if (new_dir > unit_circle) then (loop tl x y (new_dir -. unit_circle) acc)
else (loop tl x y new_dir acc)
)
| For(i,lst)::tl ->
let rec append_for_lst i acc=
if (i=0) then acc
else (append_for_lst (i-1) (lst@acc))
in loop ((append_for_lst i [])@tl) x y dir acc
in List.rev (loop movelist 0.0 0.0 0.0 [(0.0,0.0)])
(*** part d ***)
let interpSmall (movelist : move list) : (float*float) list =
let interpSmallStep movelist x y dir : move list * float * float * float =
# # # # # # # # # # # # # # # # # # # # # # # # # # #
match movelist with
[] -> raise AlreadyDone
| Home::tl -> (tl,0.0,0.0,0.0)
| Forward(f)::tl -> (tl,(x+.(f*.(cos dir))),(y+.(f*.(sin dir))),dir)
| Turn(rot)::tl->(tl,x,y,(dir+.rot))
| For(i,lst)::tl->
let rec append_for_lst idx =
if (idx=0) then tl
else (lst@(append_for_lst (idx-1)))
in
((append_for_lst i), x, y, dir)
in
let rec loop movelist x y dir acc =
match movelist with
[] -> acc
| hd::tl -> let (lst,new_x,new_y,new_dir) =
interpSmallStep movelist x y dir in
if ((new_x <> x) || (new_y <> y)) then
(loop lst new_x new_y new_dir ((new_x,new_y)::acc))
else (loop lst x y new_dir acc)
in
List.rev (loop movelist 0.0 0.0 0.0 [(0.0,0.0)])
(*** part e ***)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#############################################################################
##############################################################################
################################################################
################################################################# *)
(*** part f ***)
let rec interpTrans movelist : float->float->float-> (float * float) list * float=
let compose f1 f2=
let composed_answer x1 y1 dir1 =
let (f1_list, f1_dir) = f1 x1 y1 dir1 in
let f1_rev = List.rev f1_list in
match (List.hd f1_rev) with
[] -> f2 x1 y1 f1_dir
| (f1_x,f1_y) ->
let (f2_list,f2_dir) = (f2 f1_x f1_y f1_dir) in
(f1_list@f2_list,f2_dir)
in
composed_answer
in
match movelist with
[] -> (fun x y dir -> ([], dir))
| Home::tl ->
let (tl_list,tl_dir) = interpTrans tl 0.0 0.0 0.0
in fun x y dir -> ((0.0,0.0)::tl_list, (0.0 +. tl_dir) )
| Forward(f)::tl ->
let f1 = interpTrans tl in
(fun x y dir ->
let x_coord = (x +. (f *. (cos dir))) in
let y_coord = (y +. (f *. (sin dir))) in
let (tl_list, tl_dir) = f1 x_coord y_coord dir
in
( (x_coord,y_coord)::tl_list,(dir +. tl_dir))
)
| Turn(rot)::tl ->
let f1 = interpTrans tl in
fun x y dir -> (f1 x y (dir +. rot))
| For(i,lst)::tl ->
let _ = print_endline ("For loop") in
let f_tl = interpTrans tl in
let f_body = interpTrans lst in
let rec loop i1=
if (i1=0) then (print_endline ("loop 0");f_tl)
else (print_endline ("loop "^string_of_int i1);
compose (f_body) (loop (i1 - 1)))
in
fun x y dir -> print_endline ("for loop");(loop i x y dir)
# # # # # # # # # # # # # # # # # # # # # # # # # # #
(*** possibly helpful testing code ***)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
let example_logo_prog1 = [Forward 1.0; (makePoly 4 3.0); Home]
let example_logo_prog1= [Forward 1.0;
For (3,[Forward 1.0; Turn left]) ;
Forward 2.0;
Home;
Turn right;
Forward 1.0]
let example_logo_prog = [For(3,[Turn left; Forward 1.0])]
let example_logo_prog1 = [Turn left;
Forward 1.0;
Turn left;
Forward 1.0;
Turn left;
Forward 1.0]
let interpL_vs_S = [Forward 0.0;Home]
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
################################### *)
let ansL = interpLarge example_logo_prog
let ansS = interpSmall example_logo_prog
let ansI = (0.0,0.0)::(fst ((interpTrans example_logo_prog) 0.0 0.0 0.0))
let ansdirI = snd ((interpTrans example_logo_prog) 0.0 0.0 0.0)
let rec pr lst =
match lst with
[] -> ()
| (x,y)::tl ->
print_string("(" ^ (string_of_float x) ^ "," ^ (string_of_float y) ^ ")");
pr tl
let _ =
print_endline("Interp_large: ");
pr ansL; print_newline ();
print_endline("Interp_small: ");
pr ansS; print_newline ();
print_endline("Interp_trans: ");
pr ansI; print_newline ();
print_endline("Interp_trans direction: " ^ (string_of_float ansdirI))
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/features/data/seminal/20060324-20%3A45%3A28-232a876d350ebc97006824843fc62110.seminal.ml | ocaml | ** part a **
** part b **
** part c **
** part d **
** part e **
** part f **
** possibly helpful testing code ** |
exception Unimplemented
exception AlreadyDone
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
type move = Home | Forward of float | Turn of float | For of int*(move list)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # #
############ *)
type state = {cur_x:float ; cur_y:float ; cur_dir:float}
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # #
############################ *)
let pi = acos(-1.0)
let unit_circle = 2.0 *. pi
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
let left = pi /. 2.0
let right = (-1.0 *. left)
let reverse = pi
# # # # # # # # # # # #
let makePoly sides len =
if (sides = 0 ) then Forward 0.0
else (
let rot = unit_circle /. (float sides) in
let move_list = [Forward len;Turn rot] in
For (sides, move_list)
)
let interpLarge (movelist : move list) : (float*float) list =
let rec loop movelist x y dir acc =
# # # # # # # # # # # # # # # # # # # # # # # # # # #
match movelist with
[] -> acc
| Home::tl -> loop tl 0.0 0.0 0.0 ((0.0,0.0)::acc)
| Forward(f)::tl -> loop tl (x +. (f*. (cos dir))) (y +. (f*. (sin dir))) dir
(((x +. (f *. (cos dir))),(y+.(f *. (sin dir))))::acc)
| Turn(rot)::tl ->
let new_dir = dir +. rot in
if (new_dir < 0.0) then (loop tl x y (new_dir +. unit_circle) acc)
else ( if (new_dir > unit_circle) then (loop tl x y (new_dir -. unit_circle) acc)
else (loop tl x y new_dir acc)
)
| For(i,lst)::tl ->
let rec append_for_lst i acc=
if (i=0) then acc
else (append_for_lst (i-1) (lst@acc))
in loop ((append_for_lst i [])@tl) x y dir acc
in List.rev (loop movelist 0.0 0.0 0.0 [(0.0,0.0)])
let interpSmall (movelist : move list) : (float*float) list =
let interpSmallStep movelist x y dir : move list * float * float * float =
# # # # # # # # # # # # # # # # # # # # # # # # # # #
match movelist with
[] -> raise AlreadyDone
| Home::tl -> (tl,0.0,0.0,0.0)
| Forward(f)::tl -> (tl,(x+.(f*.(cos dir))),(y+.(f*.(sin dir))),dir)
| Turn(rot)::tl->(tl,x,y,(dir+.rot))
| For(i,lst)::tl->
let rec append_for_lst idx =
if (idx=0) then tl
else (lst@(append_for_lst (idx-1)))
in
((append_for_lst i), x, y, dir)
in
let rec loop movelist x y dir acc =
match movelist with
[] -> acc
| hd::tl -> let (lst,new_x,new_y,new_dir) =
interpSmallStep movelist x y dir in
if ((new_x <> x) || (new_y <> y)) then
(loop lst new_x new_y new_dir ((new_x,new_y)::acc))
else (loop lst x y new_dir acc)
in
List.rev (loop movelist 0.0 0.0 0.0 [(0.0,0.0)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#############################################################################
##############################################################################
################################################################
################################################################# *)
let rec interpTrans movelist : float->float->float-> (float * float) list * float=
let compose f1 f2=
let composed_answer x1 y1 dir1 =
let (f1_list, f1_dir) = f1 x1 y1 dir1 in
let f1_rev = List.rev f1_list in
match (List.hd f1_rev) with
[] -> f2 x1 y1 f1_dir
| (f1_x,f1_y) ->
let (f2_list,f2_dir) = (f2 f1_x f1_y f1_dir) in
(f1_list@f2_list,f2_dir)
in
composed_answer
in
match movelist with
[] -> (fun x y dir -> ([], dir))
| Home::tl ->
let (tl_list,tl_dir) = interpTrans tl 0.0 0.0 0.0
in fun x y dir -> ((0.0,0.0)::tl_list, (0.0 +. tl_dir) )
| Forward(f)::tl ->
let f1 = interpTrans tl in
(fun x y dir ->
let x_coord = (x +. (f *. (cos dir))) in
let y_coord = (y +. (f *. (sin dir))) in
let (tl_list, tl_dir) = f1 x_coord y_coord dir
in
( (x_coord,y_coord)::tl_list,(dir +. tl_dir))
)
| Turn(rot)::tl ->
let f1 = interpTrans tl in
fun x y dir -> (f1 x y (dir +. rot))
| For(i,lst)::tl ->
let _ = print_endline ("For loop") in
let f_tl = interpTrans tl in
let f_body = interpTrans lst in
let rec loop i1=
if (i1=0) then (print_endline ("loop 0");f_tl)
else (print_endline ("loop "^string_of_int i1);
compose (f_body) (loop (i1 - 1)))
in
fun x y dir -> print_endline ("for loop");(loop i x y dir)
# # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
let example_logo_prog1 = [Forward 1.0; (makePoly 4 3.0); Home]
let example_logo_prog1= [Forward 1.0;
For (3,[Forward 1.0; Turn left]) ;
Forward 2.0;
Home;
Turn right;
Forward 1.0]
let example_logo_prog = [For(3,[Turn left; Forward 1.0])]
let example_logo_prog1 = [Turn left;
Forward 1.0;
Turn left;
Forward 1.0;
Turn left;
Forward 1.0]
let interpL_vs_S = [Forward 0.0;Home]
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
################################### *)
let ansL = interpLarge example_logo_prog
let ansS = interpSmall example_logo_prog
let ansI = (0.0,0.0)::(fst ((interpTrans example_logo_prog) 0.0 0.0 0.0))
let ansdirI = snd ((interpTrans example_logo_prog) 0.0 0.0 0.0)
let rec pr lst =
match lst with
[] -> ()
| (x,y)::tl ->
print_string("(" ^ (string_of_float x) ^ "," ^ (string_of_float y) ^ ")");
pr tl
let _ =
print_endline("Interp_large: ");
pr ansL; print_newline ();
print_endline("Interp_small: ");
pr ansS; print_newline ();
print_endline("Interp_trans: ");
pr ansI; print_newline ();
print_endline("Interp_trans direction: " ^ (string_of_float ansdirI))
|
b2aa1ce4ba0be712ca03d73d611a36abf508e46c9009424920e2f3946ee58618 | functional-koans/clojure-koan-engine | koans.clj | (ns koan-engine.koans
(:use [clojure.java.io :only [file resource]])
(:require [koan-engine.util :as u]
[clojure.string :as str]))
;; TODO: Proper koan validation. Accept the path as an argument.
(defn ordered-koans [answer-path]
(map first (u/try-read (.getPath (resource answer-path)))))
(defn ordered-koan-paths [koan-root answer-path]
(map (fn [koan-name]
(.getCanonicalPath (file koan-root (str koan-name ".clj"))))
(ordered-koans answer-path)))
(defn among-paths? [files]
(into #{} (map #(.getCanonicalPath %) files)))
(defn next-koan-path [koan-path-seq last-koan-path]
(loop [[this-koan & more :as koan-paths] koan-path-seq]
(when (seq more)
(if (= last-koan-path this-koan)
(first more)
(recur more)))))
(defn report-error [file-path line error]
(let [message (or (.getMessage error) (.toString error))]
(println "\nNow meditate upon"
(str (last (str/split file-path #"/"))
(when line (str ":" line))))
(println "---------------------")
(println "Assertion failed!")
(println (.replaceFirst message "^Assert failed: " ""))))
(defn tests-pass? [dojo-path file-path]
(u/with-dojo [dojo-path]
(flush)
(try (load-file file-path)
true
(catch clojure.lang.Compiler$CompilerException e
(let [cause (.getCause e)]
(report-error file-path (:line (ex-data cause)) cause))
false)
(catch clojure.lang.ExceptionInfo ei
(report-error file-path (:line (ex-data ei)) ei)
false)
(catch Throwable e
(report-error file-path nil e)
false))))
(defn namaste []
(println "\nYou have achieved clojure enlightenment. Namaste."))
| null | https://raw.githubusercontent.com/functional-koans/clojure-koan-engine/2ef0699a74a46978305ee116a193d2995038c21f/src/koan_engine/koans.clj | clojure | TODO: Proper koan validation. Accept the path as an argument. | (ns koan-engine.koans
(:use [clojure.java.io :only [file resource]])
(:require [koan-engine.util :as u]
[clojure.string :as str]))
(defn ordered-koans [answer-path]
(map first (u/try-read (.getPath (resource answer-path)))))
(defn ordered-koan-paths [koan-root answer-path]
(map (fn [koan-name]
(.getCanonicalPath (file koan-root (str koan-name ".clj"))))
(ordered-koans answer-path)))
(defn among-paths? [files]
(into #{} (map #(.getCanonicalPath %) files)))
(defn next-koan-path [koan-path-seq last-koan-path]
(loop [[this-koan & more :as koan-paths] koan-path-seq]
(when (seq more)
(if (= last-koan-path this-koan)
(first more)
(recur more)))))
(defn report-error [file-path line error]
(let [message (or (.getMessage error) (.toString error))]
(println "\nNow meditate upon"
(str (last (str/split file-path #"/"))
(when line (str ":" line))))
(println "---------------------")
(println "Assertion failed!")
(println (.replaceFirst message "^Assert failed: " ""))))
(defn tests-pass? [dojo-path file-path]
(u/with-dojo [dojo-path]
(flush)
(try (load-file file-path)
true
(catch clojure.lang.Compiler$CompilerException e
(let [cause (.getCause e)]
(report-error file-path (:line (ex-data cause)) cause))
false)
(catch clojure.lang.ExceptionInfo ei
(report-error file-path (:line (ex-data ei)) ei)
false)
(catch Throwable e
(report-error file-path nil e)
false))))
(defn namaste []
(println "\nYou have achieved clojure enlightenment. Namaste."))
|
404762b05604196bab5dc94419335a50ef80ebcb3bf86408d53d2991701e6049 | smallhadroncollider/taskell | ParserTest.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
module Taskell.IO.Keyboard.ParserTest
( test_parser
) where
import ClassyPrelude
import Test.Tasty
import Test.Tasty.HUnit
import Data.FileEmbed (embedFile)
import Text.RawString.QQ (r)
import qualified Taskell.Events.Actions.Types as A
import Taskell.IO.Keyboard.Parser (bindings)
import Taskell.IO.Keyboard.Types
basic :: Text
basic = "quit = q"
basicResult :: Bindings
basicResult = [(BChar 'q', A.Quit)]
basicMulti :: Text
basicMulti =
[r|
quit = q
detail = <Enter>
|]
basicMultiResult :: Bindings
basicMultiResult = [(BChar 'q', A.Quit), (BKey "Enter", A.Detail)]
ini :: Text
ini = decodeUtf8 $(embedFile "test/Taskell/IO/Keyboard/data/bindings.ini")
iniResult :: Bindings
iniResult =
[ (BChar 'q', A.Quit)
, (BChar 'u', A.Undo)
, (BChar 'r', A.Redo)
, (BChar '/', A.Search)
, (BChar '?', A.Help)
, (BChar '!', A.Due)
, (BChar 'k', A.Previous)
, (BChar 'j', A.Next)
, (BChar 'h', A.Left)
, (BChar 'l', A.Right)
, (BChar 'G', A.Bottom)
, (BChar 'a', A.New)
, (BChar 'O', A.NewAbove)
, (BChar 'o', A.NewBelow)
, (BChar '+', A.Duplicate)
, (BChar 'e', A.Edit)
, (BChar 'A', A.Edit)
, (BChar 'i', A.Edit)
, (BChar 'C', A.Clear)
, (BChar 'D', A.Delete)
, (BKey "Enter", A.Detail)
, (BChar '@', A.DueDate)
, (BKey "Backspace", A.ClearDate)
, (BChar 'K', A.MoveUp)
, (BChar 'J', A.MoveDown)
, (BChar 'H', A.MoveLeftBottom)
, (BChar 'L', A.MoveRightBottom)
, (BKey "Space", A.Complete)
, (BChar 'T', A.CompleteToTop)
, (BChar 'm', A.MoveMenu)
, (BChar 'N', A.ListNew)
, (BChar 'E', A.ListEdit)
, (BChar 'X', A.ListDelete)
, (BChar '>', A.ListRight)
, (BChar '<', A.ListLeft)
]
comma :: Text
comma = "quit = ,"
commaResult :: Bindings
commaResult = [(BChar ',', A.Quit)]
test_parser :: TestTree
test_parser =
testGroup
"IO.Keyboard.Parser"
[ testCase "basic" (assertEqual "Parses quit" (Right basicResult) (bindings basic))
, testCase
"basic multiline"
(assertEqual "Parses both" (Right basicMultiResult) (bindings basicMulti))
, testCase "full ini file" (assertEqual "Parses all" (Right iniResult) (bindings ini))
, testCase "comma" (assertEqual "Parses comma" (Right commaResult) (bindings comma))
]
| null | https://raw.githubusercontent.com/smallhadroncollider/taskell/944b713f55ecf47433890f740835021ea86fd995/test/Taskell/IO/Keyboard/ParserTest.hs | haskell | # LANGUAGE TemplateHaskell #
# LANGUAGE QuasiQuotes #
module Taskell.IO.Keyboard.ParserTest
( test_parser
) where
import ClassyPrelude
import Test.Tasty
import Test.Tasty.HUnit
import Data.FileEmbed (embedFile)
import Text.RawString.QQ (r)
import qualified Taskell.Events.Actions.Types as A
import Taskell.IO.Keyboard.Parser (bindings)
import Taskell.IO.Keyboard.Types
basic :: Text
basic = "quit = q"
basicResult :: Bindings
basicResult = [(BChar 'q', A.Quit)]
basicMulti :: Text
basicMulti =
[r|
quit = q
detail = <Enter>
|]
basicMultiResult :: Bindings
basicMultiResult = [(BChar 'q', A.Quit), (BKey "Enter", A.Detail)]
ini :: Text
ini = decodeUtf8 $(embedFile "test/Taskell/IO/Keyboard/data/bindings.ini")
iniResult :: Bindings
iniResult =
[ (BChar 'q', A.Quit)
, (BChar 'u', A.Undo)
, (BChar 'r', A.Redo)
, (BChar '/', A.Search)
, (BChar '?', A.Help)
, (BChar '!', A.Due)
, (BChar 'k', A.Previous)
, (BChar 'j', A.Next)
, (BChar 'h', A.Left)
, (BChar 'l', A.Right)
, (BChar 'G', A.Bottom)
, (BChar 'a', A.New)
, (BChar 'O', A.NewAbove)
, (BChar 'o', A.NewBelow)
, (BChar '+', A.Duplicate)
, (BChar 'e', A.Edit)
, (BChar 'A', A.Edit)
, (BChar 'i', A.Edit)
, (BChar 'C', A.Clear)
, (BChar 'D', A.Delete)
, (BKey "Enter", A.Detail)
, (BChar '@', A.DueDate)
, (BKey "Backspace", A.ClearDate)
, (BChar 'K', A.MoveUp)
, (BChar 'J', A.MoveDown)
, (BChar 'H', A.MoveLeftBottom)
, (BChar 'L', A.MoveRightBottom)
, (BKey "Space", A.Complete)
, (BChar 'T', A.CompleteToTop)
, (BChar 'm', A.MoveMenu)
, (BChar 'N', A.ListNew)
, (BChar 'E', A.ListEdit)
, (BChar 'X', A.ListDelete)
, (BChar '>', A.ListRight)
, (BChar '<', A.ListLeft)
]
comma :: Text
comma = "quit = ,"
commaResult :: Bindings
commaResult = [(BChar ',', A.Quit)]
test_parser :: TestTree
test_parser =
testGroup
"IO.Keyboard.Parser"
[ testCase "basic" (assertEqual "Parses quit" (Right basicResult) (bindings basic))
, testCase
"basic multiline"
(assertEqual "Parses both" (Right basicMultiResult) (bindings basicMulti))
, testCase "full ini file" (assertEqual "Parses all" (Right iniResult) (bindings ini))
, testCase "comma" (assertEqual "Parses comma" (Right commaResult) (bindings comma))
]
| |
f863a4945992ce4897589e5b1e06f3b0db2583c336b4396d78bc3a73c3aeac4d | dnlkrgr/hsreduce | MultiParams.hs | # language MultiParamTypeClasses #
# language FlexibleInstances #
module MultiParams where
class Arst a b where
inRelation :: a -> b -> Bool
instance Arst a a where
inRelation _ _ = True
instance Arst [ a ] ( Maybe a ) where
-- inRelation
| null | https://raw.githubusercontent.com/dnlkrgr/hsreduce/8f66fdee036f8639053067572b55d9a64359d22c/test-cases/regressions/MultiParams.hs | haskell | inRelation | # language MultiParamTypeClasses #
# language FlexibleInstances #
module MultiParams where
class Arst a b where
inRelation :: a -> b -> Bool
instance Arst a a where
inRelation _ _ = True
instance Arst [ a ] ( Maybe a ) where
|
ce1f61708583ea1e41609d55688309540f1fd114ee57f2d36548a69bc5d430b9 | Vaguery/klapaucius | inputs_test.clj | (ns push.interpreter.inputs-test
(:require [push.interpreter.templates.minimum :as m]
[push.util.stack-manipulation :as u])
(:use midje.sweet)
(:use [push.interpreter.core])
)
(fact "a bare naked Interpreter has an empty :bindings hashmap"
(:bindings (m/basic-interpreter)) => {})
(fact "`bind-input` saves a key-value pair in the :bindings field, using a stack"
(:bindings (bind-input (m/basic-interpreter) :foo 99)) => {:foo '(99)})
(fact "the `(bound-keyword? interpreter item)` recognizer returns true for registered inputs"
(bound-keyword? (m/basic-interpreter) :kung) => false
(bound-keyword? (bind-input (m/basic-interpreter) :kung 77) :kung) => true)
(fact "an Interpreter will recognize a registered input keyword in a
running program and replace it with the stored value"
(let [neo (bind-input (m/basic-interpreter) :kung "foo")]
(u/get-stack (handle-item neo :kung) :exec) => '("foo")))
(fact "`bind-input` saves a new key-value pair with a generated key if none is given"
(:bindings (bind-input (m/basic-interpreter) 9912)) =>
{:input!1 '(9912)})
(fact "bind-input pushes multiple items if called repeatedly"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-input knows-foo :foo 99)) => {:foo '(99 1 2 3)}
(:bindings
(bind-input
(bind-input knows-foo :foo 99)
:foo 88)) => {:foo '(88 99 1 2 3)}))
;; bind-value
(fact "bind-value pushes the item onto the indicated stack, if it exists"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :foo 99)) => {:foo '(99 1 2 3)}))
(fact "bind-value pushes the item onto a new stack, if it doesn't already exist"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :bar 99)) => {:bar '(99), :foo '(1 2 3)}))
(fact "bind-value does not push nil items"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :foo nil)) => {:foo '(1 2 3)}))
(fact "bind-value creates an empty stack even for nil items, if needed"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :bar nil)) => {:bar '(), :foo '(1 2 3)}))
;; peek-at-binding
(fact "peek-at-binding returns the top item on the indicated :bindings item (by key)"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(peek-at-binding knows-foo :foo) => 1
(peek-at-binding knows-foo :bar) => nil
(peek-at-binding (assoc-in (m/basic-interpreter) [:bindings :foo] '()) :foo) => nil))
;; bind-inputs
(fact "`bind-inputs` can take a hashmap and will register all the items as :input pairs"
(:bindings (bind-inputs (m/basic-interpreter) {:a 1 :b 2 :c 3}))
=> {:a '(1), :b '(2), :c '(3)})
(fact "`bind-inputs` can take a vector of items and will register
each of them under generated `input!` keys"
(:bindings (bind-inputs (m/basic-interpreter) [1 2 false 99]))
=> {:input!1 '(1), :input!2 '(2), :input!3 '(false), :input!4 '(99)})
| null | https://raw.githubusercontent.com/Vaguery/klapaucius/17b55eb76feaa520a85d4df93597cccffe6bdba4/test/push/interpreter/inputs_test.clj | clojure | bind-value
peek-at-binding
bind-inputs | (ns push.interpreter.inputs-test
(:require [push.interpreter.templates.minimum :as m]
[push.util.stack-manipulation :as u])
(:use midje.sweet)
(:use [push.interpreter.core])
)
(fact "a bare naked Interpreter has an empty :bindings hashmap"
(:bindings (m/basic-interpreter)) => {})
(fact "`bind-input` saves a key-value pair in the :bindings field, using a stack"
(:bindings (bind-input (m/basic-interpreter) :foo 99)) => {:foo '(99)})
(fact "the `(bound-keyword? interpreter item)` recognizer returns true for registered inputs"
(bound-keyword? (m/basic-interpreter) :kung) => false
(bound-keyword? (bind-input (m/basic-interpreter) :kung 77) :kung) => true)
(fact "an Interpreter will recognize a registered input keyword in a
running program and replace it with the stored value"
(let [neo (bind-input (m/basic-interpreter) :kung "foo")]
(u/get-stack (handle-item neo :kung) :exec) => '("foo")))
(fact "`bind-input` saves a new key-value pair with a generated key if none is given"
(:bindings (bind-input (m/basic-interpreter) 9912)) =>
{:input!1 '(9912)})
(fact "bind-input pushes multiple items if called repeatedly"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-input knows-foo :foo 99)) => {:foo '(99 1 2 3)}
(:bindings
(bind-input
(bind-input knows-foo :foo 99)
:foo 88)) => {:foo '(88 99 1 2 3)}))
(fact "bind-value pushes the item onto the indicated stack, if it exists"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :foo 99)) => {:foo '(99 1 2 3)}))
(fact "bind-value pushes the item onto a new stack, if it doesn't already exist"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :bar 99)) => {:bar '(99), :foo '(1 2 3)}))
(fact "bind-value does not push nil items"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :foo nil)) => {:foo '(1 2 3)}))
(fact "bind-value creates an empty stack even for nil items, if needed"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(:bindings (bind-value knows-foo :bar nil)) => {:bar '(), :foo '(1 2 3)}))
(fact "peek-at-binding returns the top item on the indicated :bindings item (by key)"
(let [knows-foo (assoc-in (m/basic-interpreter) [:bindings :foo] '(1 2 3))]
(peek-at-binding knows-foo :foo) => 1
(peek-at-binding knows-foo :bar) => nil
(peek-at-binding (assoc-in (m/basic-interpreter) [:bindings :foo] '()) :foo) => nil))
(fact "`bind-inputs` can take a hashmap and will register all the items as :input pairs"
(:bindings (bind-inputs (m/basic-interpreter) {:a 1 :b 2 :c 3}))
=> {:a '(1), :b '(2), :c '(3)})
(fact "`bind-inputs` can take a vector of items and will register
each of them under generated `input!` keys"
(:bindings (bind-inputs (m/basic-interpreter) [1 2 false 99]))
=> {:input!1 '(1), :input!2 '(2), :input!3 '(false), :input!4 '(99)})
|
92578eb755de6fcedf91d971a3c82e27aa8db70cda866e2611f802486f2d3737 | softwarelanguageslab/maf | R5RS_WeiChenRompf2019_regex-derivative-3.scm | ; Changes:
* removed : 0
* added : 1
* swaps : 1
* negated predicates : 1
* swapped branches : 1
* calls to i d fun : 2
(letrec ((debug-trace (lambda ()
'do-nothing))
(cadr (lambda (p)
(<change>
(car (cdr p))
((lambda (x) x) (car (cdr p))))))
(caddr (lambda (p)
(car (cdr (cdr p)))))
(regex-NULL #f)
(regex-BLANK #t)
(regex-alt? (lambda (re)
(if (pair? re) (eq? (car re) 'alt) #f)))
(regex-seq? (lambda (re)
(if (pair? re) (eq? (car re) 'seq) #f)))
(regex-rep? (lambda (re)
(if (pair? re) (eq? (car re) 'rep) #f)))
(regex-null? (lambda (re)
(<change>
(eq? re #f)
((lambda (x) x) (eq? re #f)))))
(regex-empty? (lambda (re)
(eq? re #t)))
(regex-atom? (lambda (re)
(let ((__or_res (char? re)))
(if __or_res __or_res (symbol? re)))))
(match-seq (lambda (re f)
(if (regex-seq? re) (f (cadr re) (caddr re)) #f)))
(match-alt (lambda (re f)
(<change>
()
(display re))
(if (regex-alt? re) (f (cadr re) (caddr re)) #f)))
(match-rep (lambda (re f)
(if (regex-rep? re) (f (cadr re)) #f)))
(seq (lambda (pat1 pat2)
(if (regex-null? pat1)
regex-NULL
(if (regex-null? pat2)
regex-NULL
(if (regex-empty? pat1)
pat2
(if (regex-empty? pat2)
pat1
(cons 'seq (cons pat1 (cons pat2 ())))))))))
(alt (lambda (pat1 pat2)
(if (regex-null? pat1)
(<change>
pat2
(if (regex-null? pat2)
pat1
(cons 'alt (cons pat1 (cons pat2 ())))))
(<change>
(if (regex-null? pat2)
pat1
(cons 'alt (cons pat1 (cons pat2 ()))))
pat2))))
(rep (lambda (pat)
(if (regex-null? pat)
regex-BLANK
(if (regex-empty? pat)
regex-BLANK
(cons 'rep (cons pat ()))))))
(regex-empty (lambda (re)
(if (regex-empty? re)
#t
(if (regex-null? re)
#f
(if (regex-atom? re)
#f
(if (match-seq re (lambda (pat1 pat2) (seq (regex-empty pat1) (regex-empty pat2))))
#f
(if (match-alt re (lambda (pat1 pat2) (alt (regex-empty pat1) (regex-empty pat2))))
#f
(if (<change> (regex-rep? re) (not (regex-rep? re)))
#t
#f))))))))
(d/dc (lambda (re c)
(debug-trace)
(if (regex-empty? re)
regex-NULL
(if (regex-null? re)
regex-NULL
(if (eq? c re)
regex-BLANK
(if (regex-atom? re)
regex-NULL
(if (match-seq re (lambda (pat1 pat2) (alt (seq (d/dc pat1 c) pat2) (seq (regex-empty pat1) (d/dc pat2 c)))))
#f
(if (match-alt re (lambda (pat1 pat2) (alt (d/dc pat1 c) (d/dc pat2 c))))
#f
(if (match-rep re (lambda (pat) (seq (d/dc pat c) (rep pat))))
#f
regex-NULL)))))))))
(regex-match (lambda (pattern data)
(if (null? data)
(regex-empty? (regex-empty pattern))
(regex-match (d/dc pattern (car data)) (cdr data)))))
(check-expect (lambda (check expect)
(if (not (equal? check expect))
(begin
(display "check-expect failed; got: ")
(display check)
(<change>
(display "; expected: ")
(display expect))
(<change>
(display expect)
(display "; expected: "))
(newline))
(void)))))
(check-expect (d/dc 'baz 'f) #f)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_WeiChenRompf2019_regex-derivative-3.scm | scheme | Changes: | * removed : 0
* added : 1
* swaps : 1
* negated predicates : 1
* swapped branches : 1
* calls to i d fun : 2
(letrec ((debug-trace (lambda ()
'do-nothing))
(cadr (lambda (p)
(<change>
(car (cdr p))
((lambda (x) x) (car (cdr p))))))
(caddr (lambda (p)
(car (cdr (cdr p)))))
(regex-NULL #f)
(regex-BLANK #t)
(regex-alt? (lambda (re)
(if (pair? re) (eq? (car re) 'alt) #f)))
(regex-seq? (lambda (re)
(if (pair? re) (eq? (car re) 'seq) #f)))
(regex-rep? (lambda (re)
(if (pair? re) (eq? (car re) 'rep) #f)))
(regex-null? (lambda (re)
(<change>
(eq? re #f)
((lambda (x) x) (eq? re #f)))))
(regex-empty? (lambda (re)
(eq? re #t)))
(regex-atom? (lambda (re)
(let ((__or_res (char? re)))
(if __or_res __or_res (symbol? re)))))
(match-seq (lambda (re f)
(if (regex-seq? re) (f (cadr re) (caddr re)) #f)))
(match-alt (lambda (re f)
(<change>
()
(display re))
(if (regex-alt? re) (f (cadr re) (caddr re)) #f)))
(match-rep (lambda (re f)
(if (regex-rep? re) (f (cadr re)) #f)))
(seq (lambda (pat1 pat2)
(if (regex-null? pat1)
regex-NULL
(if (regex-null? pat2)
regex-NULL
(if (regex-empty? pat1)
pat2
(if (regex-empty? pat2)
pat1
(cons 'seq (cons pat1 (cons pat2 ())))))))))
(alt (lambda (pat1 pat2)
(if (regex-null? pat1)
(<change>
pat2
(if (regex-null? pat2)
pat1
(cons 'alt (cons pat1 (cons pat2 ())))))
(<change>
(if (regex-null? pat2)
pat1
(cons 'alt (cons pat1 (cons pat2 ()))))
pat2))))
(rep (lambda (pat)
(if (regex-null? pat)
regex-BLANK
(if (regex-empty? pat)
regex-BLANK
(cons 'rep (cons pat ()))))))
(regex-empty (lambda (re)
(if (regex-empty? re)
#t
(if (regex-null? re)
#f
(if (regex-atom? re)
#f
(if (match-seq re (lambda (pat1 pat2) (seq (regex-empty pat1) (regex-empty pat2))))
#f
(if (match-alt re (lambda (pat1 pat2) (alt (regex-empty pat1) (regex-empty pat2))))
#f
(if (<change> (regex-rep? re) (not (regex-rep? re)))
#t
#f))))))))
(d/dc (lambda (re c)
(debug-trace)
(if (regex-empty? re)
regex-NULL
(if (regex-null? re)
regex-NULL
(if (eq? c re)
regex-BLANK
(if (regex-atom? re)
regex-NULL
(if (match-seq re (lambda (pat1 pat2) (alt (seq (d/dc pat1 c) pat2) (seq (regex-empty pat1) (d/dc pat2 c)))))
#f
(if (match-alt re (lambda (pat1 pat2) (alt (d/dc pat1 c) (d/dc pat2 c))))
#f
(if (match-rep re (lambda (pat) (seq (d/dc pat c) (rep pat))))
#f
regex-NULL)))))))))
(regex-match (lambda (pattern data)
(if (null? data)
(regex-empty? (regex-empty pattern))
(regex-match (d/dc pattern (car data)) (cdr data)))))
(check-expect (lambda (check expect)
(if (not (equal? check expect))
(begin
(display "check-expect failed; got: ")
(display check)
(<change>
(display "; expected: ")
(display expect))
(<change>
(display expect)
(display "; expected: "))
(newline))
(void)))))
(check-expect (d/dc 'baz 'f) #f)) |
c538e24e50f8b952c7f2528227a7cf6413bc999a9b62dfec268753c044d19c5f | Bodigrim/poly | TestUtils.hs | # LANGUAGE CPP #
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE UndecidableInstances #-}
# OPTIONS_GHC -fno - warn - orphans #
module TestUtils
( ShortPoly(..)
, tenTimesLess
, myNumLaws
#ifdef MIN_VERSION_quickcheck_classes
, mySemiringLaws
, myRingLaws
, myGcdDomainLaws
, myEuclideanLaws
#endif
, myIsListLaws
, myShowLaws
) where
import Prelude hiding (lcm, rem)
import Data.Euclidean
import Data.Mod.Word
import Data.Proxy
import Data.Semiring (Semiring(..), Ring)
import qualified Data.Vector.Generic as G
import GHC.Exts
import GHC.TypeNats (KnownNat)
import Test.QuickCheck.Classes.Base
import Test.Tasty
import Test.Tasty.QuickCheck
#ifdef MIN_VERSION_quickcheck_classes
import Test.QuickCheck.Classes
#endif
import qualified Data.Poly.Semiring as Dense
import qualified Data.Poly.Laurent as DenseLaurent
#ifdef SupportSparse
import Control.Arrow
import Data.Finite
import qualified Data.Vector.Generic.Sized as SG
import qualified Data.Vector.Unboxed.Sized as SU
import Data.Poly.Multi.Semiring
import qualified Data.Poly.Multi.Laurent as MultiLaurent
#endif
newtype ShortPoly a = ShortPoly { unShortPoly :: a }
deriving (Eq, Show, Semiring, GcdDomain, Euclidean, Num)
instance KnownNat m => Arbitrary (Mod m) where
arbitrary = oneof [arbitraryBoundedEnum, fromInteger <$> arbitrary]
shrink = map fromInteger . shrink . toInteger . unMod
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (Dense.Poly v a) where
arbitrary = Dense.toPoly . G.fromList <$> arbitrary
shrink = fmap (Dense.toPoly . G.fromList) . shrink . G.toList . Dense.unPoly
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (ShortPoly (Dense.Poly v a)) where
arbitrary = ShortPoly . Dense.toPoly . G.fromList . (\xs -> take (length xs `mod` 10) xs) <$> arbitrary
shrink = fmap (ShortPoly . Dense.toPoly . G.fromList) . shrink . G.toList . Dense.unPoly . unShortPoly
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (DenseLaurent.Laurent v a) where
arbitrary = DenseLaurent.toLaurent <$> ((`rem` 10) <$> arbitrary) <*> arbitrary
shrink = fmap (uncurry DenseLaurent.toLaurent) . shrink . DenseLaurent.unLaurent
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (ShortPoly (DenseLaurent.Laurent v a)) where
arbitrary = (ShortPoly .) . DenseLaurent.toLaurent <$> ((`rem` 10) <$> arbitrary) <*> (unShortPoly <$> arbitrary)
shrink = fmap (ShortPoly . uncurry DenseLaurent.toLaurent . fmap unShortPoly) . shrink . fmap ShortPoly . DenseLaurent.unLaurent . unShortPoly
#ifdef SupportSparse
instance KnownNat n => Arbitrary (Finite n) where
arbitrary = elements finites
instance (Arbitrary a, KnownNat n, G.Vector v a) => Arbitrary (SG.Vector v n a) where
arbitrary = SG.replicateM arbitrary
shrink vs = [ vs SG.// [(i, x)] | i <- finites, x <- shrink (SG.index vs i) ]
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (SU.Vector n Word, a)) => Arbitrary (MultiPoly v n a) where
arbitrary = toMultiPoly . G.fromList <$> arbitrary
shrink = fmap (toMultiPoly . G.fromList) . shrink . G.toList . unMultiPoly
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (SU.Vector n Word, a)) => Arbitrary (ShortPoly (MultiPoly v n a)) where
arbitrary = ShortPoly . toMultiPoly . G.fromList . (\xs -> take (length xs `mod` 4) (map (first (SU.map (`mod` 3))) xs)) <$> arbitrary
shrink = fmap (ShortPoly . toMultiPoly . G.fromList) . shrink . G.toList . unMultiPoly . unShortPoly
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (Word, a), G.Vector v (SU.Vector n Word, a)) => Arbitrary (MultiLaurent.MultiLaurent v n a) where
arbitrary = MultiLaurent.toMultiLaurent <$> (SU.map (`rem` 10) <$> arbitrary) <*> arbitrary
shrink = fmap (uncurry MultiLaurent.toMultiLaurent) . shrink . MultiLaurent.unMultiLaurent
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (Word, a), G.Vector v (SU.Vector n Word, a)) => Arbitrary (ShortPoly (MultiLaurent.MultiLaurent v n a)) where
arbitrary = (ShortPoly .) . MultiLaurent.toMultiLaurent <$> (SU.map (`rem` 10) <$> arbitrary) <*> (unShortPoly <$> arbitrary)
shrink = fmap (ShortPoly . uncurry MultiLaurent.toMultiLaurent . fmap unShortPoly) . shrink . fmap ShortPoly . MultiLaurent.unMultiLaurent . unShortPoly
#endif
-------------------------------------------------------------------------------
tenTimesLess :: TestTree -> TestTree
tenTimesLess = adjustOption $
\(QuickCheckTests n) -> QuickCheckTests (max 100 (n `div` 10))
myNumLaws :: (Eq a, Num a, Arbitrary a, Show a) => Proxy a -> TestTree
myNumLaws proxy = testGroup tpclss $ map tune props
where
Laws tpclss props = numLaws proxy
tune pair = case fst pair of
"Multiplicative Associativity" ->
tenTimesLess test
"Multiplication Left Distributes Over Addition" ->
tenTimesLess test
"Multiplication Right Distributes Over Addition" ->
tenTimesLess test
"Subtraction" ->
tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
#ifdef MIN_VERSION_quickcheck_classes
mySemiringLaws :: (Eq a, Semiring a, Arbitrary a, Show a) => Proxy a -> TestTree
mySemiringLaws proxy = testGroup tpclss $ map tune props
where
Laws tpclss props = semiringLaws proxy
tune pair = case fst pair of
"Multiplicative Associativity" ->
tenTimesLess test
"Multiplication Left Distributes Over Addition" ->
tenTimesLess test
"Multiplication Right Distributes Over Addition" ->
tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
myRingLaws :: (Eq a, Ring a, Arbitrary a, Show a) => Proxy a -> TestTree
myRingLaws proxy = testGroup tpclss $ map (uncurry testProperty) props
where
Laws tpclss props = ringLaws proxy
myGcdDomainLaws :: forall a. (Eq a, GcdDomain a, Arbitrary a, Show a) => Proxy a -> TestTree
myGcdDomainLaws proxy = testGroup tpclss $ map tune $ lcm0 : props
where
Laws tpclss props = gcdDomainLaws proxy
tune pair = case fst pair of
"gcd1" -> tenTimesLess $ tenTimesLess test
"gcd2" -> tenTimesLess $ tenTimesLess test
"lcm1" -> tenTimesLess $ tenTimesLess $ tenTimesLess test
"lcm2" -> tenTimesLess test
"coprime" -> tenTimesLess $ tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
lcm0 = ("lcm0", property $ \(x :: a) -> lcm x zero === zero .&&. lcm zero x === zero)
myEuclideanLaws :: (Eq a, Euclidean a, Arbitrary a, Show a) => Proxy a -> TestTree
myEuclideanLaws proxy = testGroup tpclss $ map (uncurry testProperty) props
where
Laws tpclss props = euclideanLaws proxy
#endif
myIsListLaws :: (Eq a, IsList a, Arbitrary a, Show a, Show (Item a), Arbitrary (Item a)) => Proxy a -> TestTree
myIsListLaws proxy = testGroup tpclss $ map (uncurry testProperty) props
where
Laws tpclss props = isListLaws proxy
myShowLaws :: (Eq a, Arbitrary a, Show a) => Proxy a -> TestTree
myShowLaws proxy = testGroup tpclss $ map tune props
where
Laws tpclss props = showLaws proxy
tune pair = case fst pair of
"Equivariance: showList" -> tenTimesLess $ tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
| null | https://raw.githubusercontent.com/Bodigrim/poly/3c8e97fc6dbcab0ae0c7d6e3e569f2becc41dc9c/test/TestUtils.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE UndecidableInstances #
----------------------------------------------------------------------------- | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE ScopedTypeVariables #
# OPTIONS_GHC -fno - warn - orphans #
module TestUtils
( ShortPoly(..)
, tenTimesLess
, myNumLaws
#ifdef MIN_VERSION_quickcheck_classes
, mySemiringLaws
, myRingLaws
, myGcdDomainLaws
, myEuclideanLaws
#endif
, myIsListLaws
, myShowLaws
) where
import Prelude hiding (lcm, rem)
import Data.Euclidean
import Data.Mod.Word
import Data.Proxy
import Data.Semiring (Semiring(..), Ring)
import qualified Data.Vector.Generic as G
import GHC.Exts
import GHC.TypeNats (KnownNat)
import Test.QuickCheck.Classes.Base
import Test.Tasty
import Test.Tasty.QuickCheck
#ifdef MIN_VERSION_quickcheck_classes
import Test.QuickCheck.Classes
#endif
import qualified Data.Poly.Semiring as Dense
import qualified Data.Poly.Laurent as DenseLaurent
#ifdef SupportSparse
import Control.Arrow
import Data.Finite
import qualified Data.Vector.Generic.Sized as SG
import qualified Data.Vector.Unboxed.Sized as SU
import Data.Poly.Multi.Semiring
import qualified Data.Poly.Multi.Laurent as MultiLaurent
#endif
newtype ShortPoly a = ShortPoly { unShortPoly :: a }
deriving (Eq, Show, Semiring, GcdDomain, Euclidean, Num)
instance KnownNat m => Arbitrary (Mod m) where
arbitrary = oneof [arbitraryBoundedEnum, fromInteger <$> arbitrary]
shrink = map fromInteger . shrink . toInteger . unMod
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (Dense.Poly v a) where
arbitrary = Dense.toPoly . G.fromList <$> arbitrary
shrink = fmap (Dense.toPoly . G.fromList) . shrink . G.toList . Dense.unPoly
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (ShortPoly (Dense.Poly v a)) where
arbitrary = ShortPoly . Dense.toPoly . G.fromList . (\xs -> take (length xs `mod` 10) xs) <$> arbitrary
shrink = fmap (ShortPoly . Dense.toPoly . G.fromList) . shrink . G.toList . Dense.unPoly . unShortPoly
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (DenseLaurent.Laurent v a) where
arbitrary = DenseLaurent.toLaurent <$> ((`rem` 10) <$> arbitrary) <*> arbitrary
shrink = fmap (uncurry DenseLaurent.toLaurent) . shrink . DenseLaurent.unLaurent
instance (Eq a, Semiring a, Arbitrary a, G.Vector v a) => Arbitrary (ShortPoly (DenseLaurent.Laurent v a)) where
arbitrary = (ShortPoly .) . DenseLaurent.toLaurent <$> ((`rem` 10) <$> arbitrary) <*> (unShortPoly <$> arbitrary)
shrink = fmap (ShortPoly . uncurry DenseLaurent.toLaurent . fmap unShortPoly) . shrink . fmap ShortPoly . DenseLaurent.unLaurent . unShortPoly
#ifdef SupportSparse
instance KnownNat n => Arbitrary (Finite n) where
arbitrary = elements finites
instance (Arbitrary a, KnownNat n, G.Vector v a) => Arbitrary (SG.Vector v n a) where
arbitrary = SG.replicateM arbitrary
shrink vs = [ vs SG.// [(i, x)] | i <- finites, x <- shrink (SG.index vs i) ]
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (SU.Vector n Word, a)) => Arbitrary (MultiPoly v n a) where
arbitrary = toMultiPoly . G.fromList <$> arbitrary
shrink = fmap (toMultiPoly . G.fromList) . shrink . G.toList . unMultiPoly
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (SU.Vector n Word, a)) => Arbitrary (ShortPoly (MultiPoly v n a)) where
arbitrary = ShortPoly . toMultiPoly . G.fromList . (\xs -> take (length xs `mod` 4) (map (first (SU.map (`mod` 3))) xs)) <$> arbitrary
shrink = fmap (ShortPoly . toMultiPoly . G.fromList) . shrink . G.toList . unMultiPoly . unShortPoly
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (Word, a), G.Vector v (SU.Vector n Word, a)) => Arbitrary (MultiLaurent.MultiLaurent v n a) where
arbitrary = MultiLaurent.toMultiLaurent <$> (SU.map (`rem` 10) <$> arbitrary) <*> arbitrary
shrink = fmap (uncurry MultiLaurent.toMultiLaurent) . shrink . MultiLaurent.unMultiLaurent
instance (Eq a, Semiring a, Arbitrary a, KnownNat n, G.Vector v (Word, a), G.Vector v (SU.Vector n Word, a)) => Arbitrary (ShortPoly (MultiLaurent.MultiLaurent v n a)) where
arbitrary = (ShortPoly .) . MultiLaurent.toMultiLaurent <$> (SU.map (`rem` 10) <$> arbitrary) <*> (unShortPoly <$> arbitrary)
shrink = fmap (ShortPoly . uncurry MultiLaurent.toMultiLaurent . fmap unShortPoly) . shrink . fmap ShortPoly . MultiLaurent.unMultiLaurent . unShortPoly
#endif
tenTimesLess :: TestTree -> TestTree
tenTimesLess = adjustOption $
\(QuickCheckTests n) -> QuickCheckTests (max 100 (n `div` 10))
myNumLaws :: (Eq a, Num a, Arbitrary a, Show a) => Proxy a -> TestTree
myNumLaws proxy = testGroup tpclss $ map tune props
where
Laws tpclss props = numLaws proxy
tune pair = case fst pair of
"Multiplicative Associativity" ->
tenTimesLess test
"Multiplication Left Distributes Over Addition" ->
tenTimesLess test
"Multiplication Right Distributes Over Addition" ->
tenTimesLess test
"Subtraction" ->
tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
#ifdef MIN_VERSION_quickcheck_classes
mySemiringLaws :: (Eq a, Semiring a, Arbitrary a, Show a) => Proxy a -> TestTree
mySemiringLaws proxy = testGroup tpclss $ map tune props
where
Laws tpclss props = semiringLaws proxy
tune pair = case fst pair of
"Multiplicative Associativity" ->
tenTimesLess test
"Multiplication Left Distributes Over Addition" ->
tenTimesLess test
"Multiplication Right Distributes Over Addition" ->
tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
myRingLaws :: (Eq a, Ring a, Arbitrary a, Show a) => Proxy a -> TestTree
myRingLaws proxy = testGroup tpclss $ map (uncurry testProperty) props
where
Laws tpclss props = ringLaws proxy
myGcdDomainLaws :: forall a. (Eq a, GcdDomain a, Arbitrary a, Show a) => Proxy a -> TestTree
myGcdDomainLaws proxy = testGroup tpclss $ map tune $ lcm0 : props
where
Laws tpclss props = gcdDomainLaws proxy
tune pair = case fst pair of
"gcd1" -> tenTimesLess $ tenTimesLess test
"gcd2" -> tenTimesLess $ tenTimesLess test
"lcm1" -> tenTimesLess $ tenTimesLess $ tenTimesLess test
"lcm2" -> tenTimesLess test
"coprime" -> tenTimesLess $ tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
lcm0 = ("lcm0", property $ \(x :: a) -> lcm x zero === zero .&&. lcm zero x === zero)
myEuclideanLaws :: (Eq a, Euclidean a, Arbitrary a, Show a) => Proxy a -> TestTree
myEuclideanLaws proxy = testGroup tpclss $ map (uncurry testProperty) props
where
Laws tpclss props = euclideanLaws proxy
#endif
myIsListLaws :: (Eq a, IsList a, Arbitrary a, Show a, Show (Item a), Arbitrary (Item a)) => Proxy a -> TestTree
myIsListLaws proxy = testGroup tpclss $ map (uncurry testProperty) props
where
Laws tpclss props = isListLaws proxy
myShowLaws :: (Eq a, Arbitrary a, Show a) => Proxy a -> TestTree
myShowLaws proxy = testGroup tpclss $ map tune props
where
Laws tpclss props = showLaws proxy
tune pair = case fst pair of
"Equivariance: showList" -> tenTimesLess $ tenTimesLess test
_ -> test
where
test = uncurry testProperty pair
|
748c183100a5a099e0490782f81ff6583702a816ccafd177a6a69d6571d157fc | anmonteiro/aws-lambda-ocaml-runtime | request.ml | ----------------------------------------------------------------------------
* Copyright ( c ) 2019
*
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
* AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS
* INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
* CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE )
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE .
* ---------------------------------------------------------------------------
* Copyright (c) 2019 António Nuno Monteiro
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*---------------------------------------------------------------------------*)
module StringMap = Lambda_runtime.StringMap
module Request = Piaf.Request
module Body = Piaf.Body
module Headers = Piaf.Headers
type vercel_proxy_request =
{ path : string
; http_method : string [@key "method"]
; host : string
; headers : string StringMap.t
; body : string option [@default None]
; encoding : string option [@default None]
}
[@@deriving of_yojson]
type vercel_event =
{ action : string [@key "Action"]
; body : string
}
[@@deriving of_yojson]
type t = Request.t
let of_yojson json =
match vercel_event_of_yojson json with
| Ok { body = event_body; _ } ->
(match
Yojson.Safe.from_string event_body |> vercel_proxy_request_of_yojson
with
| Ok { body; encoding; path; http_method; host; headers } ->
let meth = Piaf.Method.of_string http_method in
let headers =
Message.string_map_to_headers
~init:
(match
StringMap.(find_opt "host" headers, find_opt "Host" headers)
with
| Some _, _ | _, Some _ -> Headers.empty
| None, None -> Headers.of_list [ "Host", host ])
headers
in
let body =
match Message.decode_body ~encoding body with
| None -> Body.empty
| Some s -> Body.of_string s
in
let request =
Request.create
~scheme:`HTTP
~version:Piaf.Versions.HTTP.HTTP_1_1
~headers
~meth
~body
path
in
Ok request
| Error _ -> Error "Failed to parse event to Vercel request type"
| exception Yojson.Json_error error ->
Error
(Printf.sprintf
"Failed to parse event to Vercel request type: %s"
error))
| Error _ -> Error "Failed to parse event to Vercel request type"
| null | https://raw.githubusercontent.com/anmonteiro/aws-lambda-ocaml-runtime/5d3210e0c6683f390cb5870efe7f774420ad4b22/vercel/request.ml | ocaml | ----------------------------------------------------------------------------
* Copyright ( c ) 2019
*
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
*
* 2 . Redistributions in binary form must reproduce the above copyright
* notice , this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution .
*
* 3 . Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
* AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
* LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS
* INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , IN
* CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR OTHERWISE )
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE .
* ---------------------------------------------------------------------------
* Copyright (c) 2019 António Nuno Monteiro
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*---------------------------------------------------------------------------*)
module StringMap = Lambda_runtime.StringMap
module Request = Piaf.Request
module Body = Piaf.Body
module Headers = Piaf.Headers
type vercel_proxy_request =
{ path : string
; http_method : string [@key "method"]
; host : string
; headers : string StringMap.t
; body : string option [@default None]
; encoding : string option [@default None]
}
[@@deriving of_yojson]
type vercel_event =
{ action : string [@key "Action"]
; body : string
}
[@@deriving of_yojson]
type t = Request.t
let of_yojson json =
match vercel_event_of_yojson json with
| Ok { body = event_body; _ } ->
(match
Yojson.Safe.from_string event_body |> vercel_proxy_request_of_yojson
with
| Ok { body; encoding; path; http_method; host; headers } ->
let meth = Piaf.Method.of_string http_method in
let headers =
Message.string_map_to_headers
~init:
(match
StringMap.(find_opt "host" headers, find_opt "Host" headers)
with
| Some _, _ | _, Some _ -> Headers.empty
| None, None -> Headers.of_list [ "Host", host ])
headers
in
let body =
match Message.decode_body ~encoding body with
| None -> Body.empty
| Some s -> Body.of_string s
in
let request =
Request.create
~scheme:`HTTP
~version:Piaf.Versions.HTTP.HTTP_1_1
~headers
~meth
~body
path
in
Ok request
| Error _ -> Error "Failed to parse event to Vercel request type"
| exception Yojson.Json_error error ->
Error
(Printf.sprintf
"Failed to parse event to Vercel request type: %s"
error))
| Error _ -> Error "Failed to parse event to Vercel request type"
| |
67f36ddfec491c1994707ec1e79cea43dc7ab5288ddf77373921976af6958bec | jfaure/Irie-lang | AdjointFoldsCH.hs | # Language FlexibleInstances , MultiParamTypeClasses , GADTs , RankNTypes , QuantifiedConstraints , PolyKinds , ImpredicativeTypes ,
module CH where
import Prelude hiding (Product , Sum)
import Data.Functor.Rep
import Data.Functor.Base
import Data.Functor.Sum
import Data.Functor.Product
import Control.Monad.Free
import Control.Comonad
import Control.Comonad.Cofree
import Control.Comonad.Trans.Env
import Control.Arrow
import Data.Functor.Compose
import GHC.Prim
hylo Control functor ( A + _ ) models tail recursion
tailRecurse :: (c -> Either a c) -> c -> a
tailRecurse c = h where h = (identity ||| h) . c -- <=> h = (id ||| id) . (A + h) . c
Rolling rule : algebras or coalgebras where two functors compose to create base functor
= an adjunction like correspondence between 2 types of hylomorphisms
-- there is bijection between sets of hylos:
-- L_ (C , c) >-> (a,A) ~= (C,c) >-> _R (a,A)
natural in ( C , c ) : R.L - Coalg D and ( a , A ) : L.R - Alg C
This is witnessd by la x = R x . c and = a . L y
these trivially form a bijection between fixed points of ra . and of la .
-- thus: x = a . L (R x) . L c <=> y = R a . R (L y) . c
here is colifted L to coalgebras and f2 is lifted R to algebras
rollLhs :: (Functor f1, Functor f2) =>
(f1 (f2 b) -> b) -> (a -> f2 (f1 a)) -> f1 a -> b
rollLhs a c = h where h = a . fmap (fmap h) . fmap c
rollRhs :: (Functor f1, Functor f2) =>
(f2 (f1 b) -> b) -> (a -> f1 (f2 a)) -> a -> f1 b
rollRhs a c = h where h = fmap a . fmap (fmap h) . c
instance = > Adj ( EnvT e w ) ( ReaderT e m ) where
aunit = ReaderT . flip fmap EnvT . flip lAdjunct
acounit ( EnvT e w ) = rAdjunct ( flip runReaderT e ) w
instance ( , ' ) = > Adj ( Sum f f ' ) ( Product g g ' ) where
aunit a = Pair ( lAdjunct a ) ( lAdjunct InR a )
acounit ( InL l ) = rAdjunct ( \(Pair x _ ) - > x ) l
acounit ( InR r ) = rAdjunct ( \(Pair _ x ) - > x ) r
instance Adj w m => Adj (EnvT e w) (ReaderT e m) where
aunit = ReaderT . flip fmap EnvT . flip lAdjunct
acounit (EnvT e w) = rAdjunct (flip runReaderT e) w
instance (Adj f g, Adj f' g') => Adj (Sum f f') (Product g g') where
aunit a = Pair (lAdjunct InL a) (lAdjunct InR a)
acounit (InL l) = rAdjunct (\(Pair x _) -> x) l
acounit (InR r) = rAdjunct (\(Pair _ x) -> x) r
-}
-- iff there is a bijection between the sets of arrows: (natural in both A and B)
-- adjunctL : C(L a -> b) ~= D(A -> R B) : adjunctR
data Adjunction l r = Adjunction
{ aunit :: forall a. a -> r (l a) -- = adjunctL identity
, counit :: forall a. l (r a) -> a -- = adjunctR identity
, adjunctL :: forall a b. (l a -> b) -> a -> r b -- = (\f->fmap f . unit)
= ( \f->counit . fmap f )
}
-- shortcut to define adjunctions using (unit * counit) | (adjunctL * adjunctR)
mkAdjUnit :: (Functor l , Functor r) =>
(forall a b. a -> r (l a)) -> (forall a b. l (r a) -> a) -> Adjunction l r
mkAdjUnit unit counit = Adjunction unit counit (\f->fmap f . unit) (\f->counit . fmap f)
mkAdjlArA :: (Functor l , Functor r) =>
(forall a b. (l a -> b) -> a -> r b) -> (forall a b. (a -> r b) -> l a -> b) -> Adjunction l r
mkAdjlArA lA rA = Adjunction (lA identity) (rA identity) lA rA
Adjoint functors give rise to a monad : unit , join = fmap counit and bind :
adjBind :: (Functor f1, Functor l, Functor r, Functor f2) =>
Adjunction l r -> f1 (f2 a) -> (a -> b) -> f1 (r (l (f2 b)))
adjBind a = \x f -> fmap (aunit a) (fmap (fmap f) x)
-- | A right adjoint functor admits an intrinsic notion of zipping
zipR :: Adjunction f u -> (u a , u b) -> u (a , b)
zipR a = adjunctL a (adjunctR a fst &&& adjunctR a snd)
splitL :: Adjunction f u -> f a -> (a, f ())
splitL a = adjunctR a (flip (adjunctL a) () . (,))
unsplitL :: Functor f => a -> f () -> f a
unsplitL = (<$)
| Every functor in Haskell permits unzipping
unzipR :: Functor u => u (a, b) -> (u a, u b)
unzipR = fmap fst &&& fmap snd
adjCompose :: ( Functor l , Functor r , Functor l' , Functor r')
=> Adjunction l r -> Adjunction l' r' -> Adjunction (Compose l l') (Compose r' r)
adjCompose
(Adjunction { adjunctL = la , adjunctR = ra})
(Adjunction { adjunctL = la' , adjunctR = ra'})
= mkAdjUnit (Compose . la' (la Compose)) (ra (ra' getCompose) . getCompose)
( Compose . fmap ( fmap Compose . au ) . bu )
( aco . fmap ( bco . fmap getCompose ) . getCompose )
identityAdjunction = mkAdjlArA (\f -> Identity . f . Identity) (\f -> runIdentity . f . runIdentity)
curryAdjunction :: Adjunction ((,) t) ((->) t)
curryAdjunction = mkAdjlArA (\f a e -> f (e , a)) (\f (e , a) -> f a e)
adjM1 :: (Functor f, Functor r) =>
Adjunction f r -> Adjunction (M1 i1 c1 f) (M1 i2 c2 r)
adjM1 a = mkAdjUnit (M1 . adjunctL a M1) (adjunctR a unM1 . unM1)
-- adj fusion (due to naturality properties of the adjuncts) --
-- R k · la f · h = la (k . f · L h)
k · · L h = ra ( R k · g · h )
-- product fusion
-- (k1 × k2) · (f1 M f2) · h = (k1 · f1 · h) M (k2 · f2 · h)
pfLHS , pfRHS :: (x -> r) -> (y -> k) -> (b -> x) -> (b -> y) -> (a -> b) -> a -> (r , k)
pfLHS k1 k2 f1 f2 h = (k1 *** k2) . (f1 &&& f2) . h
pfRHS k1 k2 f1 f2 h = (k1 . f1 . h) &&& (k2 . f2 . h)
r (k1,k2) = (k1 *** k2)
prodMorph (x , y) = (x , y)
-- L = Diag ; R = X
la = ( Diag a - > b ) - > a - > Prod b
-- a = s
-- r b = (x , y)
-- (l a -> b) -> a = (s -> x , s -> y)
la :: Arrow f => (f s x , f s y) -> f s (x , y)
la (f1 , f2) = f1 &&& f2
( l a - > b ) - > a - > r b = > ( Diag a - > ( ) ) - > ( a - > X ( b1 , b2 ) )
where Diag a = ( a , a ) and X ( a , b ) = a X b
data Prod (a :: (a1 , a2)) = Prod a1 a2
mkProd (a , b) = Prod a b
newtype Diag a = Diag (a , a)
mkDiag a = Diag (a , a)
lla : : forall a ( b : : ( * , * ) ) . ( Diag a - > _ ) - > a - > Prod b
lla : : ( Diag a - > b ) - > a - > Prod b
lla d a = mkProd ( d ( Diag a ) )
--lla d a = mkProd (d (mkDiag a))
llla : : ( Diag a - > _ ) - > a - > Prod b
pairs of arrows with same source BIJ with Arrows to a product
Product : = x ( f , ) = f x g
Diag : = D a = ( a , a )
-- (l a -> b) -> a -> r b
( ( a , a ) - > ( ) ) - > ( a - > x ( ) )
-- L x = (x , x)
-- L _ = (s -> a , s -> b)
-- coUnit :: (l (r a)) -> a
-- coUnit = (outl , outr)
-- la : (l a -> b) -> a -> r b
pairs of arrows with the same source are 1 - 1 with arrows from a product
pairs of arrows with the same target are 1 - 1 with arrows from a coproduct
-- envReaderAdjunction = mkAdjUnit
diagProdAdj = mkAdjUnit ( \a - > _ ( Pair a a ) ) _
sumProductAdj :: (Functor f, Functor g, Functor r1, Functor r2) =>
Adjunction f r1 -> Adjunction g r2 -> Adjunction (Sum f g) (Product r1 r2)
sumProductAdj f g = mkAdjUnit (\a -> Pair (adjunctL f InL a) (adjunctL g InR a)) $ \case
InL l -> adjunctR f (\(Pair x _) -> x) l
InR r -> adjunctR g (\(Pair _ x) -> x) r
sumProdAdj :: (Functor f , Functor f' , Functor g , Functor g')
=> Adjunction f g -> Adjunction f' g' -> Adjunction (f :+: f') (g :*: g')
sumProdAdj adj1 adj2 = mkAdjUnit (\a -> (adjunctL adj1) L1 a :*: (adjunctL adj2) R1 a) (\case { L1 l -> (adjunctR adj1) (\(x :*: _) -> x) l ; R1 r -> (adjunctR adj2) (\(_ :*: x) -> x) r} )
-- Conjugate rule: c1 ~= c2
c1 :: (Functor d , Functor l , Functor r) =>
Adjunction l r -> (l (d (r b)) -> b) -> (a -> d a) -> l a -> b
( 3.9 )
c2 adj a c = h where h = (adjunctR adj) (adjunctL adj a . fmap{-D-} (adjunctL adj h) . c)
hyloShift :: Functor d => (Identity (d (Identity b)) -> b) -> (a -> d a) -> Identity a -> b
hyloShift = c1 identityAdjunction
hAccu :: Functor d => ((p , d (p -> b)) -> b) -> (a -> d a) -> (p , a) -> b
hAccu = c1 curryAdjunction
: : Functor f = > ( f b - > b ) - > ( a - > f a ) - > ( a , p ) - > b
a c = h where
h = a . fmap h . . ( c * * * identity )
-- strength :: Functor d => (d a , p) -> d (a , p)
strength ( f , p ) = fmap ( , p ) f
| null | https://raw.githubusercontent.com/jfaure/Irie-lang/186640a095d14560ff102ef613648e558e5b3f1e/docs/AdjointFoldsCH.hs | haskell | <=> h = (id ||| id) . (A + h) . c
there is bijection between sets of hylos:
L_ (C , c) >-> (a,A) ~= (C,c) >-> _R (a,A)
thus: x = a . L (R x) . L c <=> y = R a . R (L y) . c
iff there is a bijection between the sets of arrows: (natural in both A and B)
adjunctL : C(L a -> b) ~= D(A -> R B) : adjunctR
= adjunctL identity
= adjunctR identity
= (\f->fmap f . unit)
shortcut to define adjunctions using (unit * counit) | (adjunctL * adjunctR)
| A right adjoint functor admits an intrinsic notion of zipping
adj fusion (due to naturality properties of the adjuncts) --
R k · la f · h = la (k . f · L h)
product fusion
(k1 × k2) · (f1 M f2) · h = (k1 · f1 · h) M (k2 · f2 · h)
L = Diag ; R = X
a = s
r b = (x , y)
(l a -> b) -> a = (s -> x , s -> y)
lla d a = mkProd (d (mkDiag a))
(l a -> b) -> a -> r b
L x = (x , x)
L _ = (s -> a , s -> b)
coUnit :: (l (r a)) -> a
coUnit = (outl , outr)
la : (l a -> b) -> a -> r b
envReaderAdjunction = mkAdjUnit
Conjugate rule: c1 ~= c2
D
strength :: Functor d => (d a , p) -> d (a , p) | # Language FlexibleInstances , MultiParamTypeClasses , GADTs , RankNTypes , QuantifiedConstraints , PolyKinds , ImpredicativeTypes ,
module CH where
import Prelude hiding (Product , Sum)
import Data.Functor.Rep
import Data.Functor.Base
import Data.Functor.Sum
import Data.Functor.Product
import Control.Monad.Free
import Control.Comonad
import Control.Comonad.Cofree
import Control.Comonad.Trans.Env
import Control.Arrow
import Data.Functor.Compose
import GHC.Prim
hylo Control functor ( A + _ ) models tail recursion
tailRecurse :: (c -> Either a c) -> c -> a
Rolling rule : algebras or coalgebras where two functors compose to create base functor
= an adjunction like correspondence between 2 types of hylomorphisms
natural in ( C , c ) : R.L - Coalg D and ( a , A ) : L.R - Alg C
This is witnessd by la x = R x . c and = a . L y
these trivially form a bijection between fixed points of ra . and of la .
here is colifted L to coalgebras and f2 is lifted R to algebras
rollLhs :: (Functor f1, Functor f2) =>
(f1 (f2 b) -> b) -> (a -> f2 (f1 a)) -> f1 a -> b
rollLhs a c = h where h = a . fmap (fmap h) . fmap c
rollRhs :: (Functor f1, Functor f2) =>
(f2 (f1 b) -> b) -> (a -> f1 (f2 a)) -> a -> f1 b
rollRhs a c = h where h = fmap a . fmap (fmap h) . c
instance = > Adj ( EnvT e w ) ( ReaderT e m ) where
aunit = ReaderT . flip fmap EnvT . flip lAdjunct
acounit ( EnvT e w ) = rAdjunct ( flip runReaderT e ) w
instance ( , ' ) = > Adj ( Sum f f ' ) ( Product g g ' ) where
aunit a = Pair ( lAdjunct a ) ( lAdjunct InR a )
acounit ( InL l ) = rAdjunct ( \(Pair x _ ) - > x ) l
acounit ( InR r ) = rAdjunct ( \(Pair _ x ) - > x ) r
instance Adj w m => Adj (EnvT e w) (ReaderT e m) where
aunit = ReaderT . flip fmap EnvT . flip lAdjunct
acounit (EnvT e w) = rAdjunct (flip runReaderT e) w
instance (Adj f g, Adj f' g') => Adj (Sum f f') (Product g g') where
aunit a = Pair (lAdjunct InL a) (lAdjunct InR a)
acounit (InL l) = rAdjunct (\(Pair x _) -> x) l
acounit (InR r) = rAdjunct (\(Pair _ x) -> x) r
-}
data Adjunction l r = Adjunction
= ( \f->counit . fmap f )
}
mkAdjUnit :: (Functor l , Functor r) =>
(forall a b. a -> r (l a)) -> (forall a b. l (r a) -> a) -> Adjunction l r
mkAdjUnit unit counit = Adjunction unit counit (\f->fmap f . unit) (\f->counit . fmap f)
mkAdjlArA :: (Functor l , Functor r) =>
(forall a b. (l a -> b) -> a -> r b) -> (forall a b. (a -> r b) -> l a -> b) -> Adjunction l r
mkAdjlArA lA rA = Adjunction (lA identity) (rA identity) lA rA
Adjoint functors give rise to a monad : unit , join = fmap counit and bind :
adjBind :: (Functor f1, Functor l, Functor r, Functor f2) =>
Adjunction l r -> f1 (f2 a) -> (a -> b) -> f1 (r (l (f2 b)))
adjBind a = \x f -> fmap (aunit a) (fmap (fmap f) x)
zipR :: Adjunction f u -> (u a , u b) -> u (a , b)
zipR a = adjunctL a (adjunctR a fst &&& adjunctR a snd)
splitL :: Adjunction f u -> f a -> (a, f ())
splitL a = adjunctR a (flip (adjunctL a) () . (,))
unsplitL :: Functor f => a -> f () -> f a
unsplitL = (<$)
| Every functor in Haskell permits unzipping
unzipR :: Functor u => u (a, b) -> (u a, u b)
unzipR = fmap fst &&& fmap snd
adjCompose :: ( Functor l , Functor r , Functor l' , Functor r')
=> Adjunction l r -> Adjunction l' r' -> Adjunction (Compose l l') (Compose r' r)
adjCompose
(Adjunction { adjunctL = la , adjunctR = ra})
(Adjunction { adjunctL = la' , adjunctR = ra'})
= mkAdjUnit (Compose . la' (la Compose)) (ra (ra' getCompose) . getCompose)
( Compose . fmap ( fmap Compose . au ) . bu )
( aco . fmap ( bco . fmap getCompose ) . getCompose )
identityAdjunction = mkAdjlArA (\f -> Identity . f . Identity) (\f -> runIdentity . f . runIdentity)
curryAdjunction :: Adjunction ((,) t) ((->) t)
curryAdjunction = mkAdjlArA (\f a e -> f (e , a)) (\f (e , a) -> f a e)
adjM1 :: (Functor f, Functor r) =>
Adjunction f r -> Adjunction (M1 i1 c1 f) (M1 i2 c2 r)
adjM1 a = mkAdjUnit (M1 . adjunctL a M1) (adjunctR a unM1 . unM1)
k · · L h = ra ( R k · g · h )
pfLHS , pfRHS :: (x -> r) -> (y -> k) -> (b -> x) -> (b -> y) -> (a -> b) -> a -> (r , k)
pfLHS k1 k2 f1 f2 h = (k1 *** k2) . (f1 &&& f2) . h
pfRHS k1 k2 f1 f2 h = (k1 . f1 . h) &&& (k2 . f2 . h)
r (k1,k2) = (k1 *** k2)
prodMorph (x , y) = (x , y)
la = ( Diag a - > b ) - > a - > Prod b
la :: Arrow f => (f s x , f s y) -> f s (x , y)
la (f1 , f2) = f1 &&& f2
( l a - > b ) - > a - > r b = > ( Diag a - > ( ) ) - > ( a - > X ( b1 , b2 ) )
where Diag a = ( a , a ) and X ( a , b ) = a X b
data Prod (a :: (a1 , a2)) = Prod a1 a2
mkProd (a , b) = Prod a b
newtype Diag a = Diag (a , a)
mkDiag a = Diag (a , a)
lla : : forall a ( b : : ( * , * ) ) . ( Diag a - > _ ) - > a - > Prod b
lla : : ( Diag a - > b ) - > a - > Prod b
lla d a = mkProd ( d ( Diag a ) )
llla : : ( Diag a - > _ ) - > a - > Prod b
pairs of arrows with same source BIJ with Arrows to a product
Product : = x ( f , ) = f x g
Diag : = D a = ( a , a )
( ( a , a ) - > ( ) ) - > ( a - > x ( ) )
pairs of arrows with the same source are 1 - 1 with arrows from a product
pairs of arrows with the same target are 1 - 1 with arrows from a coproduct
diagProdAdj = mkAdjUnit ( \a - > _ ( Pair a a ) ) _
sumProductAdj :: (Functor f, Functor g, Functor r1, Functor r2) =>
Adjunction f r1 -> Adjunction g r2 -> Adjunction (Sum f g) (Product r1 r2)
sumProductAdj f g = mkAdjUnit (\a -> Pair (adjunctL f InL a) (adjunctL g InR a)) $ \case
InL l -> adjunctR f (\(Pair x _) -> x) l
InR r -> adjunctR g (\(Pair _ x) -> x) r
sumProdAdj :: (Functor f , Functor f' , Functor g , Functor g')
=> Adjunction f g -> Adjunction f' g' -> Adjunction (f :+: f') (g :*: g')
sumProdAdj adj1 adj2 = mkAdjUnit (\a -> (adjunctL adj1) L1 a :*: (adjunctL adj2) R1 a) (\case { L1 l -> (adjunctR adj1) (\(x :*: _) -> x) l ; R1 r -> (adjunctR adj2) (\(_ :*: x) -> x) r} )
c1 :: (Functor d , Functor l , Functor r) =>
Adjunction l r -> (l (d (r b)) -> b) -> (a -> d a) -> l a -> b
( 3.9 )
hyloShift :: Functor d => (Identity (d (Identity b)) -> b) -> (a -> d a) -> Identity a -> b
hyloShift = c1 identityAdjunction
hAccu :: Functor d => ((p , d (p -> b)) -> b) -> (a -> d a) -> (p , a) -> b
hAccu = c1 curryAdjunction
: : Functor f = > ( f b - > b ) - > ( a - > f a ) - > ( a , p ) - > b
a c = h where
h = a . fmap h . . ( c * * * identity )
strength ( f , p ) = fmap ( , p ) f
|
c79d58185edbb6f595302f4032cac48fa822146cef02941324f83c2e568f6581 | patricoferris/ppx_deriving_graphql | ppx_deriving_graphql.ml | -----------------------------------------------------------------------------
Copyright ( c ) 2022 < >
Distributed under the MIT license . See terms at the end of this file .
-----------------------------------------------------------------------------
Copyright (c) 2022 Patrick Ferris <>
Distributed under the MIT license. See terms at the end of this file.
-----------------------------------------------------------------------------*)
open Ppxlib
open Ast_helper
open Ast_builder.Default
let upper_snake s =
String.split_on_char '_' s
|> List.map String.capitalize_ascii
|> String.concat ""
let upper_first_lower_snake s =
let words =
match String.split_on_char '_' s with
| s :: rest ->
String.uncapitalize_ascii s :: List.map String.capitalize_ascii rest
| [] -> []
in
String.concat "" words
let mangle_name_schema =
let base = "schema_typ" in
function "t" -> base | x -> x ^ "_" ^ base
let mangle_name_arg =
let base = "arg_typ" in
function "t" -> base | x -> x ^ "_" ^ base
let mangle_longident_schema = function
| Lident l -> Lident (mangle_name_schema l)
| Ldot (l, n) -> Ldot (l, mangle_name_schema n)
| _ -> assert false
let mangle_longident_arg = function
| Lident l -> Lident (mangle_name_arg l)
| Ldot (l, n) -> Ldot (l, mangle_name_arg n)
| _ -> assert false
let longident_loc_from_label lbl = { txt = Lident lbl.txt; loc = lbl.loc }
(* <><><> Attributes <><><> *)
module Attrs = struct
let resolver =
Attribute.declare "graphql.schema.resolver"
Attribute.Context.label_declaration
Ast_pattern.(single_expr_payload __)
(fun x -> x)
let schema_doc =
Attribute.declare "graphql.schema.doc" Attribute.Context.label_declaration
Ast_pattern.(single_expr_payload __)
(fun x -> x)
let constr_doc =
Attribute.declare "graphql.schema.doc"
Attribute.Context.constructor_declaration
Ast_pattern.(single_expr_payload __)
(fun x -> x)
end
(* <><><> Schemas <><><> *)
let rec type_to_schema ?(name = "") ?(non_null = true) typ =
let loc = typ.ptyp_loc in
let fn =
if non_null then [%expr Graphql_lwt.Schema.non_null] else [%expr Fun.id]
in
match typ with
| [%type: int] -> [%expr [%e fn] Graphql_lwt.Schema.int]
| [%type: float] -> [%expr [%e fn] Graphql_lwt.Schema.float]
| [%type: string] -> [%expr [%e fn] Graphql_lwt.Schema.string]
| [%type: bool] -> [%expr [%e fn] Graphql_lwt.Schema.bool]
(* | [%type: char] -> [%expr fun (x : char) -> `String (String.make 1 x)] *)
| [%type: [%t? typ] list] ->
[%expr [%e fn] (Graphql_lwt.Schema.list [%e type_to_schema typ])]
| [%type: [%t? typ] option] -> [%expr [%e type_to_schema ~non_null:false typ]]
| { ptyp_desc = Ptyp_constr ({ txt; _ }, _args); _ } ->
let name = mangle_longident_schema txt |> Longident.name in
[%expr [%e fn] [%e evar ~loc name]]
| { ptyp_desc = Ptyp_var name; _ } ->
Location.raise_errorf "Cannot derive schema for ptyp_var %s" name
| { ptyp_desc = Ptyp_poly _; _ } ->
Location.raise_errorf "Polymorphic functions not currently supported"
| { ptyp_desc = Ptyp_tuple _; _ } ->
Location.raise_errorf "Tuples not currently supported"
| { ptyp_desc = Ptyp_variant (row_fields, _, _); _ } ->
let enum_values =
List.fold_left
(fun expr_acc (field : row_field) ->
match field.prf_desc with
| Rtag (label, true, []) ->
let v = pexp_variant ~loc label.txt None in
[%expr
Graphql_lwt.Schema.enum_value [%e estring ~loc label.txt]
~value:[%e v]
:: [%e expr_acc]]
| _ -> failwith "Not implemented")
[%expr []] row_fields
in
[%expr
Graphql_lwt.Schema.enum [%e estring ~loc name] ~values:[%e enum_values]]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (typ_to_schema)"
let record_to_schema ~loc ~label fields =
let label = estring ~loc label in
let fields =
List.fold_left
(fun expr_acc (field : label_declaration) ->
let var_name f = f ~loc "p" in
let field_name = field.pld_name.txt in
let accessor_name : longident_loc =
{ txt = Lident field_name; loc = field.pld_name.loc }
in
let accessor = pexp_field ~loc (var_name evar) accessor_name in
let accessor_func =
match Attribute.get Attrs.resolver field with
| Some expr -> expr
| None -> [%expr fun _ [%p var_name pvar] -> [%e accessor]]
in
let field_doc =
match Attribute.get Attrs.schema_doc field with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.field ?doc:[%e field_doc]
[%e estring ~loc (upper_first_lower_snake field_name)]
~typ:[%e type_to_schema field.pld_type]
~args:[] ~resolve:[%e accessor_func]
:: [%e expr_acc]])
[%expr []] fields
in
[%expr Graphql_lwt.Schema.obj [%e label] ~fields:[%e fields]]
(* <><><> Arguments <><><> *)
let rec type_to_arg ?(name = "") ?(non_null = true) typ =
let loc = typ.ptyp_loc in
let fn =
if non_null then [%expr Graphql_lwt.Schema.Arg.non_null] else [%expr Fun.id]
in
match typ with
| [%type: int] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.int]
| [%type: float] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.float]
| [%type: string] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.string]
| [%type: bool] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.bool]
(* | [%type: char] -> [%expr fun (x : char) -> `String (String.make 1 x)] *)
| [%type: [%t? typ] list] ->
[%expr [%e fn] (Graphql_lwt.Schema.Arg.list [%e type_to_arg typ])]
| [%type: [%t? typ] option] -> [%expr [%e type_to_arg ~non_null:false typ]]
| { ptyp_desc = Ptyp_constr ({ txt; _ }, _args); _ } ->
let name = mangle_longident_arg txt |> Longident.name in
[%expr [%e fn] [%e evar ~loc name]]
| { ptyp_desc = Ptyp_var name; _ } ->
Location.raise_errorf "Cannot derive arg for ptyp_var %s" name
| { ptyp_desc = Ptyp_poly _; _ } ->
Location.raise_errorf "Polymorphic functions not currently supported"
| { ptyp_desc = Ptyp_tuple _; _ } ->
Location.raise_errorf "Tuples not currently supported"
| { ptyp_desc = Ptyp_variant (row_fields, _, _); _ } ->
let enum_values =
List.fold_left
(fun expr_acc (field : row_field) ->
match field.prf_desc with
| Rtag (label, true, []) ->
let v = pexp_variant ~loc label.txt None in
[%expr
Graphql_lwt.Schema.enum_value [%e estring ~loc label.txt]
~value:[%e v]
:: [%e expr_acc]]
| _ -> failwith "Not implemented")
[%expr []] row_fields
in
[%expr
Graphql_lwt.Schema.Arg.enum [%e estring ~loc name]
~values:[%e enum_values]]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (typ_to_arg)"
let record_to_arg ~loc ~label fields =
let label = estring ~loc label in
let efields =
List.fold_left
(fun expr_acc (field : label_declaration) ->
let field_name = field.pld_name.txt in
let field_doc =
match Attribute.get Attrs.schema_doc field with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.Arg.arg ?doc:[%e field_doc]
[%e estring ~loc (upper_first_lower_snake field_name)]
~typ:[%e type_to_arg field.pld_type]
:: [%e expr_acc]])
[%expr []] fields
in
let record =
let field_bindings =
List.map
(fun lbl ->
( longident_loc_from_label lbl.pld_name,
[%expr [%e evar ~loc lbl.pld_name.txt]] ))
fields
in
pexp_record ~loc field_bindings None
in
let ecoerce =
List.fold_left
(fun expr_acc (field : label_declaration) ->
let field_name = pvar ~loc field.pld_name.txt in
[%expr fun [%p field_name] -> [%e expr_acc]])
[%expr [%e record]] fields
in
[%expr
Graphql_lwt.Schema.Arg.obj [%e label] ~fields:[%e efields]
~coerce:[%e ecoerce]]
let generate_impl_schema ~ctxt (_rec_flag, type_decls) =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.concat
(List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_variant constructors; ptype_name; _ } ->
let txt = mangle_name_schema ptype_name.txt in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type: (ctx, _) Graphql_lwt.Schema.typ]
in
let enum_values =
List.fold_left
(fun expr_acc (constr : constructor_declaration) ->
let v =
pexp_construct ~loc
(longident_loc_from_label constr.pcd_name)
None
in
let constr_doc =
match Attribute.get Attrs.constr_doc constr with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.enum_value
[%e estring ~loc constr.pcd_name.txt]
?doc:[%e constr_doc] ~value:[%e v]
:: [%e expr_acc]])
[%expr []] constructors
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
[%expr
Graphql_lwt.Schema.enum
[%e estring ~loc (upper_snake ptype_name.txt)]
~values:[%e enum_values]];
];
]
| { ptype_kind = Ptype_record fields; ptype_loc = _; ptype_name; _ } ->
let txt = mangle_name_schema ptype_name.txt in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type: (ctx, _) Graphql_lwt.Schema.typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(record_to_schema ~loc
~label:(upper_snake ptype_name.txt)
fields);
];
]
| {
ptype_kind = Ptype_abstract;
ptype_manifest = Some manifest;
ptype_name;
_;
} ->
let txt = mangle_name_schema ptype_name.txt in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type: (ctx, _) Graphql_lwt.Schema.typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(type_to_schema
~name:(upper_snake ptype_name.txt)
~non_null:false manifest);
];
]
| { ptype_kind = Ptype_abstract; ptype_manifest = None; _ } ->
Location.raise_errorf ~loc
"Abstract types with no manifest are currently unsupported for \
generating GraphQL schemas."
| { ptype_kind = Ptype_open; _ } ->
Location.raise_errorf ~loc
"Open types are currently unsupported for generating GraphQL \
schemas.")
type_decls)
let generate_impl_arg ~ctxt (_rec_flag, type_decls) =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.concat
(List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_variant constructors; ptype_name; _ } ->
let txt = mangle_name_arg ptype_name.txt in
let lident = longident_loc_from_label typ_decl.ptype_name in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]
in
let enum_values =
List.fold_left
(fun expr_acc (constr : constructor_declaration) ->
let v =
pexp_construct ~loc
(longident_loc_from_label constr.pcd_name)
None
in
let field_doc =
match Attribute.get Attrs.constr_doc constr with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.enum_value
[%e estring ~loc constr.pcd_name.txt]
?doc:[%e field_doc] ~value:[%e v]
:: [%e expr_acc]])
[%expr []] constructors
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
[%expr
Graphql_lwt.Schema.Arg.enum
[%e
estring ~loc (upper_snake ptype_name.txt ^ "Input")]
~values:[%e enum_values]];
];
]
| { ptype_kind = Ptype_record fields; ptype_loc = _; ptype_name; _ } ->
let txt = mangle_name_arg ptype_name.txt in
let lident = longident_loc_from_label typ_decl.ptype_name in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(record_to_arg ~loc
~label:(upper_snake ptype_name.txt ^ "Input")
fields);
];
]
| {
ptype_kind = Ptype_abstract;
ptype_manifest = Some manifest;
ptype_name;
_;
} ->
let txt = mangle_name_arg ptype_name.txt in
let lident = longident_loc_from_label typ_decl.ptype_name in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(type_to_arg ~non_null:false
~name:(upper_snake ptype_name.txt ^ "Input")
manifest);
];
]
| { ptype_kind = Ptype_abstract; ptype_manifest = None; _ } ->
Location.raise_errorf ~loc
"Abstract types with no manifest are currently unsupported for \
generating GraphQL arguments."
| { ptype_kind = Ptype_open; _ } ->
Location.raise_errorf ~loc
"Open types are currently unsupported for generating GraphQL \
arguments.")
type_decls)
let generate_intf_schema ~ctxt (_rec_flag, type_decls) :
Ppxlib.Ast.signature_item list =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_abstract | Ptype_record _; _ } ->
let lident = longident_loc_from_label typ_decl.ptype_name in
[
psig_value ~loc
(Val.mk
{
loc = typ_decl.ptype_name.loc;
txt = mangle_name_schema typ_decl.ptype_name.txt;
}
[%type:
( ctx,
[%t ptyp_constr ~loc lident []] option )
Graphql_lwt.Schema.typ]);
]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (intf schema)")
type_decls
|> List.concat
let generate_intf_arg ~ctxt (_rec_flag, type_decls) :
Ppxlib.Ast.signature_item list =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_abstract | Ptype_record _; _ } ->
let lident = longident_loc_from_label typ_decl.ptype_name in
[
psig_value ~loc
(Val.mk
{
loc = typ_decl.ptype_name.loc;
txt = mangle_name_arg typ_decl.ptype_name.txt;
}
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]);
]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (intf arg)")
type_decls
|> List.concat
let impl_generator impl = Deriving.Generator.V2.make_noarg impl
let intf_generator intf = Deriving.Generator.V2.make_noarg intf
let deriver =
let schema =
Deriving.add "graphql_schema"
~str_type_decl:(impl_generator generate_impl_schema)
~sig_type_decl:(intf_generator generate_intf_schema)
in
let arg =
Deriving.add "graphql_arg"
~str_type_decl:(impl_generator generate_impl_arg)
~sig_type_decl:(intf_generator generate_intf_arg)
in
Deriving.add_alias "graphql" [ schema; arg ]
-----------------------------------------------------------------------------
Copyright ( c ) 2022 < >
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE .
-----------------------------------------------------------------------------
Copyright (c) 2022 Patrick Ferris <>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/patricoferris/ppx_deriving_graphql/baebe154de846b06961b2661fe9311c95ca30d63/src/ppx_deriving_graphql.ml | ocaml | <><><> Attributes <><><>
<><><> Schemas <><><>
| [%type: char] -> [%expr fun (x : char) -> `String (String.make 1 x)]
<><><> Arguments <><><>
| [%type: char] -> [%expr fun (x : char) -> `String (String.make 1 x)] | -----------------------------------------------------------------------------
Copyright ( c ) 2022 < >
Distributed under the MIT license . See terms at the end of this file .
-----------------------------------------------------------------------------
Copyright (c) 2022 Patrick Ferris <>
Distributed under the MIT license. See terms at the end of this file.
-----------------------------------------------------------------------------*)
open Ppxlib
open Ast_helper
open Ast_builder.Default
let upper_snake s =
String.split_on_char '_' s
|> List.map String.capitalize_ascii
|> String.concat ""
let upper_first_lower_snake s =
let words =
match String.split_on_char '_' s with
| s :: rest ->
String.uncapitalize_ascii s :: List.map String.capitalize_ascii rest
| [] -> []
in
String.concat "" words
let mangle_name_schema =
let base = "schema_typ" in
function "t" -> base | x -> x ^ "_" ^ base
let mangle_name_arg =
let base = "arg_typ" in
function "t" -> base | x -> x ^ "_" ^ base
let mangle_longident_schema = function
| Lident l -> Lident (mangle_name_schema l)
| Ldot (l, n) -> Ldot (l, mangle_name_schema n)
| _ -> assert false
let mangle_longident_arg = function
| Lident l -> Lident (mangle_name_arg l)
| Ldot (l, n) -> Ldot (l, mangle_name_arg n)
| _ -> assert false
let longident_loc_from_label lbl = { txt = Lident lbl.txt; loc = lbl.loc }
module Attrs = struct
let resolver =
Attribute.declare "graphql.schema.resolver"
Attribute.Context.label_declaration
Ast_pattern.(single_expr_payload __)
(fun x -> x)
let schema_doc =
Attribute.declare "graphql.schema.doc" Attribute.Context.label_declaration
Ast_pattern.(single_expr_payload __)
(fun x -> x)
let constr_doc =
Attribute.declare "graphql.schema.doc"
Attribute.Context.constructor_declaration
Ast_pattern.(single_expr_payload __)
(fun x -> x)
end
let rec type_to_schema ?(name = "") ?(non_null = true) typ =
let loc = typ.ptyp_loc in
let fn =
if non_null then [%expr Graphql_lwt.Schema.non_null] else [%expr Fun.id]
in
match typ with
| [%type: int] -> [%expr [%e fn] Graphql_lwt.Schema.int]
| [%type: float] -> [%expr [%e fn] Graphql_lwt.Schema.float]
| [%type: string] -> [%expr [%e fn] Graphql_lwt.Schema.string]
| [%type: bool] -> [%expr [%e fn] Graphql_lwt.Schema.bool]
| [%type: [%t? typ] list] ->
[%expr [%e fn] (Graphql_lwt.Schema.list [%e type_to_schema typ])]
| [%type: [%t? typ] option] -> [%expr [%e type_to_schema ~non_null:false typ]]
| { ptyp_desc = Ptyp_constr ({ txt; _ }, _args); _ } ->
let name = mangle_longident_schema txt |> Longident.name in
[%expr [%e fn] [%e evar ~loc name]]
| { ptyp_desc = Ptyp_var name; _ } ->
Location.raise_errorf "Cannot derive schema for ptyp_var %s" name
| { ptyp_desc = Ptyp_poly _; _ } ->
Location.raise_errorf "Polymorphic functions not currently supported"
| { ptyp_desc = Ptyp_tuple _; _ } ->
Location.raise_errorf "Tuples not currently supported"
| { ptyp_desc = Ptyp_variant (row_fields, _, _); _ } ->
let enum_values =
List.fold_left
(fun expr_acc (field : row_field) ->
match field.prf_desc with
| Rtag (label, true, []) ->
let v = pexp_variant ~loc label.txt None in
[%expr
Graphql_lwt.Schema.enum_value [%e estring ~loc label.txt]
~value:[%e v]
:: [%e expr_acc]]
| _ -> failwith "Not implemented")
[%expr []] row_fields
in
[%expr
Graphql_lwt.Schema.enum [%e estring ~loc name] ~values:[%e enum_values]]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (typ_to_schema)"
let record_to_schema ~loc ~label fields =
let label = estring ~loc label in
let fields =
List.fold_left
(fun expr_acc (field : label_declaration) ->
let var_name f = f ~loc "p" in
let field_name = field.pld_name.txt in
let accessor_name : longident_loc =
{ txt = Lident field_name; loc = field.pld_name.loc }
in
let accessor = pexp_field ~loc (var_name evar) accessor_name in
let accessor_func =
match Attribute.get Attrs.resolver field with
| Some expr -> expr
| None -> [%expr fun _ [%p var_name pvar] -> [%e accessor]]
in
let field_doc =
match Attribute.get Attrs.schema_doc field with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.field ?doc:[%e field_doc]
[%e estring ~loc (upper_first_lower_snake field_name)]
~typ:[%e type_to_schema field.pld_type]
~args:[] ~resolve:[%e accessor_func]
:: [%e expr_acc]])
[%expr []] fields
in
[%expr Graphql_lwt.Schema.obj [%e label] ~fields:[%e fields]]
let rec type_to_arg ?(name = "") ?(non_null = true) typ =
let loc = typ.ptyp_loc in
let fn =
if non_null then [%expr Graphql_lwt.Schema.Arg.non_null] else [%expr Fun.id]
in
match typ with
| [%type: int] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.int]
| [%type: float] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.float]
| [%type: string] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.string]
| [%type: bool] -> [%expr [%e fn] Graphql_lwt.Schema.Arg.bool]
| [%type: [%t? typ] list] ->
[%expr [%e fn] (Graphql_lwt.Schema.Arg.list [%e type_to_arg typ])]
| [%type: [%t? typ] option] -> [%expr [%e type_to_arg ~non_null:false typ]]
| { ptyp_desc = Ptyp_constr ({ txt; _ }, _args); _ } ->
let name = mangle_longident_arg txt |> Longident.name in
[%expr [%e fn] [%e evar ~loc name]]
| { ptyp_desc = Ptyp_var name; _ } ->
Location.raise_errorf "Cannot derive arg for ptyp_var %s" name
| { ptyp_desc = Ptyp_poly _; _ } ->
Location.raise_errorf "Polymorphic functions not currently supported"
| { ptyp_desc = Ptyp_tuple _; _ } ->
Location.raise_errorf "Tuples not currently supported"
| { ptyp_desc = Ptyp_variant (row_fields, _, _); _ } ->
let enum_values =
List.fold_left
(fun expr_acc (field : row_field) ->
match field.prf_desc with
| Rtag (label, true, []) ->
let v = pexp_variant ~loc label.txt None in
[%expr
Graphql_lwt.Schema.enum_value [%e estring ~loc label.txt]
~value:[%e v]
:: [%e expr_acc]]
| _ -> failwith "Not implemented")
[%expr []] row_fields
in
[%expr
Graphql_lwt.Schema.Arg.enum [%e estring ~loc name]
~values:[%e enum_values]]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (typ_to_arg)"
let record_to_arg ~loc ~label fields =
let label = estring ~loc label in
let efields =
List.fold_left
(fun expr_acc (field : label_declaration) ->
let field_name = field.pld_name.txt in
let field_doc =
match Attribute.get Attrs.schema_doc field with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.Arg.arg ?doc:[%e field_doc]
[%e estring ~loc (upper_first_lower_snake field_name)]
~typ:[%e type_to_arg field.pld_type]
:: [%e expr_acc]])
[%expr []] fields
in
let record =
let field_bindings =
List.map
(fun lbl ->
( longident_loc_from_label lbl.pld_name,
[%expr [%e evar ~loc lbl.pld_name.txt]] ))
fields
in
pexp_record ~loc field_bindings None
in
let ecoerce =
List.fold_left
(fun expr_acc (field : label_declaration) ->
let field_name = pvar ~loc field.pld_name.txt in
[%expr fun [%p field_name] -> [%e expr_acc]])
[%expr [%e record]] fields
in
[%expr
Graphql_lwt.Schema.Arg.obj [%e label] ~fields:[%e efields]
~coerce:[%e ecoerce]]
let generate_impl_schema ~ctxt (_rec_flag, type_decls) =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.concat
(List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_variant constructors; ptype_name; _ } ->
let txt = mangle_name_schema ptype_name.txt in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type: (ctx, _) Graphql_lwt.Schema.typ]
in
let enum_values =
List.fold_left
(fun expr_acc (constr : constructor_declaration) ->
let v =
pexp_construct ~loc
(longident_loc_from_label constr.pcd_name)
None
in
let constr_doc =
match Attribute.get Attrs.constr_doc constr with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.enum_value
[%e estring ~loc constr.pcd_name.txt]
?doc:[%e constr_doc] ~value:[%e v]
:: [%e expr_acc]])
[%expr []] constructors
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
[%expr
Graphql_lwt.Schema.enum
[%e estring ~loc (upper_snake ptype_name.txt)]
~values:[%e enum_values]];
];
]
| { ptype_kind = Ptype_record fields; ptype_loc = _; ptype_name; _ } ->
let txt = mangle_name_schema ptype_name.txt in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type: (ctx, _) Graphql_lwt.Schema.typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(record_to_schema ~loc
~label:(upper_snake ptype_name.txt)
fields);
];
]
| {
ptype_kind = Ptype_abstract;
ptype_manifest = Some manifest;
ptype_name;
_;
} ->
let txt = mangle_name_schema ptype_name.txt in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type: (ctx, _) Graphql_lwt.Schema.typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(type_to_schema
~name:(upper_snake ptype_name.txt)
~non_null:false manifest);
];
]
| { ptype_kind = Ptype_abstract; ptype_manifest = None; _ } ->
Location.raise_errorf ~loc
"Abstract types with no manifest are currently unsupported for \
generating GraphQL schemas."
| { ptype_kind = Ptype_open; _ } ->
Location.raise_errorf ~loc
"Open types are currently unsupported for generating GraphQL \
schemas.")
type_decls)
let generate_impl_arg ~ctxt (_rec_flag, type_decls) =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.concat
(List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_variant constructors; ptype_name; _ } ->
let txt = mangle_name_arg ptype_name.txt in
let lident = longident_loc_from_label typ_decl.ptype_name in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]
in
let enum_values =
List.fold_left
(fun expr_acc (constr : constructor_declaration) ->
let v =
pexp_construct ~loc
(longident_loc_from_label constr.pcd_name)
None
in
let field_doc =
match Attribute.get Attrs.constr_doc constr with
| Some doc -> [%expr Some [%e doc]]
| None -> [%expr None]
in
[%expr
Graphql_lwt.Schema.enum_value
[%e estring ~loc constr.pcd_name.txt]
?doc:[%e field_doc] ~value:[%e v]
:: [%e expr_acc]])
[%expr []] constructors
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
[%expr
Graphql_lwt.Schema.Arg.enum
[%e
estring ~loc (upper_snake ptype_name.txt ^ "Input")]
~values:[%e enum_values]];
];
]
| { ptype_kind = Ptype_record fields; ptype_loc = _; ptype_name; _ } ->
let txt = mangle_name_arg ptype_name.txt in
let lident = longident_loc_from_label typ_decl.ptype_name in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(record_to_arg ~loc
~label:(upper_snake ptype_name.txt ^ "Input")
fields);
];
]
| {
ptype_kind = Ptype_abstract;
ptype_manifest = Some manifest;
ptype_name;
_;
} ->
let txt = mangle_name_arg ptype_name.txt in
let lident = longident_loc_from_label typ_decl.ptype_name in
let p =
ppat_constraint ~loc
(ppat_var ~loc { loc; txt })
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]
in
[
pstr_value ~loc Nonrecursive
[
Vb.mk p
(type_to_arg ~non_null:false
~name:(upper_snake ptype_name.txt ^ "Input")
manifest);
];
]
| { ptype_kind = Ptype_abstract; ptype_manifest = None; _ } ->
Location.raise_errorf ~loc
"Abstract types with no manifest are currently unsupported for \
generating GraphQL arguments."
| { ptype_kind = Ptype_open; _ } ->
Location.raise_errorf ~loc
"Open types are currently unsupported for generating GraphQL \
arguments.")
type_decls)
let generate_intf_schema ~ctxt (_rec_flag, type_decls) :
Ppxlib.Ast.signature_item list =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_abstract | Ptype_record _; _ } ->
let lident = longident_loc_from_label typ_decl.ptype_name in
[
psig_value ~loc
(Val.mk
{
loc = typ_decl.ptype_name.loc;
txt = mangle_name_schema typ_decl.ptype_name.txt;
}
[%type:
( ctx,
[%t ptyp_constr ~loc lident []] option )
Graphql_lwt.Schema.typ]);
]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (intf schema)")
type_decls
|> List.concat
let generate_intf_arg ~ctxt (_rec_flag, type_decls) :
Ppxlib.Ast.signature_item list =
let loc = Expansion_context.Deriver.derived_item_loc ctxt in
List.map
(fun typ_decl ->
match typ_decl with
| { ptype_kind = Ptype_abstract | Ptype_record _; _ } ->
let lident = longident_loc_from_label typ_decl.ptype_name in
[
psig_value ~loc
(Val.mk
{
loc = typ_decl.ptype_name.loc;
txt = mangle_name_arg typ_decl.ptype_name.txt;
}
[%type:
[%t ptyp_constr ~loc lident []] option
Graphql_lwt.Schema.Arg.arg_typ]);
]
| _ ->
Location.raise_errorf ~loc
"Cannot derive anything for this type (intf arg)")
type_decls
|> List.concat
let impl_generator impl = Deriving.Generator.V2.make_noarg impl
let intf_generator intf = Deriving.Generator.V2.make_noarg intf
let deriver =
let schema =
Deriving.add "graphql_schema"
~str_type_decl:(impl_generator generate_impl_schema)
~sig_type_decl:(intf_generator generate_intf_schema)
in
let arg =
Deriving.add "graphql_arg"
~str_type_decl:(impl_generator generate_impl_arg)
~sig_type_decl:(intf_generator generate_intf_arg)
in
Deriving.add_alias "graphql" [ schema; arg ]
-----------------------------------------------------------------------------
Copyright ( c ) 2022 < >
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE .
-----------------------------------------------------------------------------
Copyright (c) 2022 Patrick Ferris <>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------*)
|
5b3cdeb905e2f2293211e8d5d4ac295a37458d469382bef937b1af0fe74a37d1 | froozen/kademlia | Networking.hs | |
Module : Network . Kademlia . Networking
Description : All of the UDP network code
Network . Kademlia . Networking implements all the UDP network functionality .
Module : Network.Kademlia.Networking
Description : All of the UDP network code
Network.Kademlia.Networking implements all the UDP network functionality.
-}
module Network.Kademlia.Networking
( openOn
, startRecvProcess
, send
, expect
, closeK
, KademliaHandle
) where
Just to make sure I 'll only use the ByteString functions
import Network.Socket hiding (send, sendTo, recv, recvFrom, Closed)
import qualified Network.Socket.ByteString as S
import Data.ByteString
import Control.Monad (forever, unless)
import Control.Exception (finally)
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import System.IO.Error (ioError, userError)
import Network.Kademlia.Types
import Network.Kademlia.Protocol
import Network.Kademlia.ReplyQueue
| A handle to a UDP socket running the Kademlia connection
data KademliaHandle i a = KH {
kSock :: Socket
, sendThread :: ThreadId
, sendChan :: Chan (Command i a, Peer)
, replyQueue :: ReplyQueue i a
, recvThread :: MVar ThreadId
}
| Open a Kademlia connection on specified port and return a corresponding
-- KademliaHandle
openOn :: (Serialize i, Serialize a) => String -> i -> ReplyQueue i a
-> IO (KademliaHandle i a)
openOn port id rq = withSocketsDo $ do
-- Get addr to bind to
(serveraddr:_) <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just port)
-- Create socket and bind to it
sock <- socket (addrFamily serveraddr) Datagram defaultProtocol
bindSocket sock (addrAddress serveraddr)
chan <- newChan
tId <- forkIO . sendProcess sock id $ chan
mvar <- newEmptyMVar
-- Return the handle
return $ KH sock tId chan rq mvar
sendProcess :: (Serialize i, Serialize a) => Socket -> i
-> Chan (Command i a, Peer) -> IO ()
sendProcess sock id chan = (withSocketsDo . forever $ do
(cmd, Peer host port) <- readChan chan
-- Get Peer's address
(peeraddr:_) <- getAddrInfo Nothing (Just host)
(Just . show . fromIntegral $ port)
-- Send the signal
let sig = serialize id cmd
S.sendTo sock sig (addrAddress peeraddr))
-- Close socket on exception (ThreadKilled)
`finally` sClose sock
-- | Dispatch the receiving process
--
Receive a signal and first try to dispatch it via the ReplyQueue . If that
-- fails, send it to the supplied default channel instead.
--
This throws an exception if called a second time .
startRecvProcess :: (Serialize i, Serialize a, Eq i, Eq a) => KademliaHandle i a
-> IO ()
startRecvProcess kh = do
tId <- forkIO $ (withSocketsDo . forever $ do
-- Read from socket
(received, addr) <- S.recvFrom (kSock kh) 1500
-- Try to create peer
peer <- toPeer addr
case peer of
Nothing -> return ()
Just p ->
-- Try parsing the signal
case parse p received of
Left _ -> return ()
Right sig ->
-- Send the signal to the receivng process of instance
writeChan (timeoutChan . replyQueue $ kh) $ Answer sig)
-- Send Closed reply to all handlers
`finally` do
flush . replyQueue $ kh
writeChan (timeoutChan . replyQueue $ kh) Closed
success <- tryPutMVar (recvThread kh) tId
unless success . ioError . userError $ "Receiving process already running"
| Send a Signal to a Peer over the connection corresponding to the
-- KademliaHandle
send :: (Serialize i, Serialize a) => KademliaHandle i a -> Peer -> Command i a
-> IO ()
send kh peer cmd = writeChan (sendChan kh) (cmd, peer)
-- | Register a handler channel for a Reply
expect :: (Serialize i, Serialize a, Eq i) => KademliaHandle i a
-> ReplyRegistration i -> Chan (Reply i a) -> IO ()
expect kh reg = register reg . replyQueue $ kh
-- | Close the connection corresponding to a KademliaHandle
closeK :: KademliaHandle i a -> IO ()
closeK kh = do
-- Kill recvThread
empty <- isEmptyMVar . recvThread $ kh
unless empty $ do
tId <- takeMVar . recvThread $ kh
killThread tId
killThread . sendThread $ kh
yield
| null | https://raw.githubusercontent.com/froozen/kademlia/60f05d0455b92960a1a51abd2932cd12d9e422db/src/Network/Kademlia/Networking.hs | haskell | KademliaHandle
Get addr to bind to
Create socket and bind to it
Return the handle
Get Peer's address
Send the signal
Close socket on exception (ThreadKilled)
| Dispatch the receiving process
fails, send it to the supplied default channel instead.
Read from socket
Try to create peer
Try parsing the signal
Send the signal to the receivng process of instance
Send Closed reply to all handlers
KademliaHandle
| Register a handler channel for a Reply
| Close the connection corresponding to a KademliaHandle
Kill recvThread | |
Module : Network . Kademlia . Networking
Description : All of the UDP network code
Network . Kademlia . Networking implements all the UDP network functionality .
Module : Network.Kademlia.Networking
Description : All of the UDP network code
Network.Kademlia.Networking implements all the UDP network functionality.
-}
module Network.Kademlia.Networking
( openOn
, startRecvProcess
, send
, expect
, closeK
, KademliaHandle
) where
Just to make sure I 'll only use the ByteString functions
import Network.Socket hiding (send, sendTo, recv, recvFrom, Closed)
import qualified Network.Socket.ByteString as S
import Data.ByteString
import Control.Monad (forever, unless)
import Control.Exception (finally)
import Control.Concurrent
import Control.Concurrent.STM
import Control.Concurrent.Chan
import Control.Concurrent.MVar
import System.IO.Error (ioError, userError)
import Network.Kademlia.Types
import Network.Kademlia.Protocol
import Network.Kademlia.ReplyQueue
| A handle to a UDP socket running the Kademlia connection
data KademliaHandle i a = KH {
kSock :: Socket
, sendThread :: ThreadId
, sendChan :: Chan (Command i a, Peer)
, replyQueue :: ReplyQueue i a
, recvThread :: MVar ThreadId
}
| Open a Kademlia connection on specified port and return a corresponding
openOn :: (Serialize i, Serialize a) => String -> i -> ReplyQueue i a
-> IO (KademliaHandle i a)
openOn port id rq = withSocketsDo $ do
(serveraddr:_) <- getAddrInfo
(Just (defaultHints {addrFlags = [AI_PASSIVE]}))
Nothing (Just port)
sock <- socket (addrFamily serveraddr) Datagram defaultProtocol
bindSocket sock (addrAddress serveraddr)
chan <- newChan
tId <- forkIO . sendProcess sock id $ chan
mvar <- newEmptyMVar
return $ KH sock tId chan rq mvar
sendProcess :: (Serialize i, Serialize a) => Socket -> i
-> Chan (Command i a, Peer) -> IO ()
sendProcess sock id chan = (withSocketsDo . forever $ do
(cmd, Peer host port) <- readChan chan
(peeraddr:_) <- getAddrInfo Nothing (Just host)
(Just . show . fromIntegral $ port)
let sig = serialize id cmd
S.sendTo sock sig (addrAddress peeraddr))
`finally` sClose sock
Receive a signal and first try to dispatch it via the ReplyQueue . If that
This throws an exception if called a second time .
startRecvProcess :: (Serialize i, Serialize a, Eq i, Eq a) => KademliaHandle i a
-> IO ()
startRecvProcess kh = do
tId <- forkIO $ (withSocketsDo . forever $ do
(received, addr) <- S.recvFrom (kSock kh) 1500
peer <- toPeer addr
case peer of
Nothing -> return ()
Just p ->
case parse p received of
Left _ -> return ()
Right sig ->
writeChan (timeoutChan . replyQueue $ kh) $ Answer sig)
`finally` do
flush . replyQueue $ kh
writeChan (timeoutChan . replyQueue $ kh) Closed
success <- tryPutMVar (recvThread kh) tId
unless success . ioError . userError $ "Receiving process already running"
| Send a Signal to a Peer over the connection corresponding to the
send :: (Serialize i, Serialize a) => KademliaHandle i a -> Peer -> Command i a
-> IO ()
send kh peer cmd = writeChan (sendChan kh) (cmd, peer)
expect :: (Serialize i, Serialize a, Eq i) => KademliaHandle i a
-> ReplyRegistration i -> Chan (Reply i a) -> IO ()
expect kh reg = register reg . replyQueue $ kh
closeK :: KademliaHandle i a -> IO ()
closeK kh = do
empty <- isEmptyMVar . recvThread $ kh
unless empty $ do
tId <- takeMVar . recvThread $ kh
killThread tId
killThread . sendThread $ kh
yield
|
b6bd75703a8ae1315c77803130695d641318b64c365a46f5207dbc24051a86b1 | dongcarl/guix | lint.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2014 < >
Copyright © 2014 , 2015 < >
Copyright © 2013 , 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 < >
Copyright © 2015 , 2016 < >
Copyright © 2016 < dannym+ >
Copyright © 2016 < >
Copyright © 2017 < >
Copyright © 2017 < >
Copyright © 2017 , 2018 < >
Copyright © 2018 , 2019 Arun Isaac < >
Copyright © 2019 , 2020 < >
Copyright © 2020 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix scripts lint)
#:use-module (guix packages)
#:use-module (guix lint)
#:use-module (guix ui)
#:use-module (guix store)
#:use-module (guix scripts)
#:use-module (guix scripts build)
#:use-module (gnu packages)
#:use-module (ice-9 match)
#:use-module (ice-9 format)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-37)
#:export (guix-lint
run-checkers))
(define (emit-warnings warnings)
;; Emit a warning about PACKAGE, printing the location of FIELD if it is
;; given, the location of PACKAGE otherwise, the full name of PACKAGE and the
;; provided MESSAGE.
(for-each
(lambda (lint-warning)
(let* ((package (lint-warning-package lint-warning))
(name (package-name package))
(version (package-version package))
(loc (lint-warning-location lint-warning))
(message (lint-warning-message lint-warning)))
(parameterize
((guix-warning-port (current-output-port)))
(info loc (G_ "~a@~a: ~a~%")
name version message))))
warnings))
(define* (run-checkers package checkers #:key store)
"Run the given CHECKERS on PACKAGE."
(let ((tty? (isatty? (current-error-port))))
(for-each (lambda (checker)
(when tty?
(format (current-error-port) "checking ~a@~a [~a]...\x1b[K\r"
(package-name package) (package-version package)
(lint-checker-name checker))
(force-output (current-error-port)))
(emit-warnings
(if (lint-checker-requires-store? checker)
((lint-checker-check checker) package #:store store)
((lint-checker-check checker) package))))
checkers)
(when tty?
(format (current-error-port) "\x1b[K")
(force-output (current-error-port)))))
(define (list-checkers-and-exit checkers)
;; Print information about all available checkers and exit.
(format #t (G_ "Available checkers:~%"))
(for-each (lambda (checker)
(format #t "- ~a: ~a~%"
(lint-checker-name checker)
(G_ (lint-checker-description checker))))
checkers)
(exit 0))
;;;
;;; Command-line options.
;;;
(define %default-options
Alist of default option values .
'())
(define (show-help)
(display (G_ "Usage: guix lint [OPTION]... [PACKAGE]...
Run a set of checkers on the specified package; if none is specified,
run the checkers on all packages.\n"))
(display (G_ "
-c, --checkers=CHECKER1,CHECKER2...
only run the specified checkers"))
(display (G_ "
-x, --exclude=CHECKER1,CHECKER2...
exclude the specified checkers"))
(display (G_ "
-n, --no-network only run checkers that do not access the network"))
(display (G_ "
-L, --load-path=DIR prepend DIR to the package module search path"))
(newline)
(display (G_ "
-h, --help display this help and exit"))
(display (G_ "
-l, --list-checkers display the list of available lint checkers"))
(display (G_ "
-V, --version display version information and exit"))
(newline)
(show-bug-report-information))
(define (option-checker short-long)
Factorize the creation of the two options -c/--checkers and -x/--exclude ,
;; see %options. The parameter SHORT-LONG is the list containing the short
;; and long name. The alist uses the long name as symbol.
(option short-long #t #f
(lambda (opt name arg result)
(let ((names (map string->symbol (string-split arg #\,)))
(checker-names (map lint-checker-name %all-checkers))
(option-name (string->symbol (match short-long
((short long) long)))))
(for-each (lambda (c)
(unless (memq c checker-names)
(leave (G_ "~a: invalid checker~%") c)))
names)
(alist-cons option-name
(filter (lambda (checker)
(member (lint-checker-name checker)
names))
%all-checkers)
result)))))
(define %options
;; Specification of the command-line options.
;; TODO: add some options:
;; * --certainty=[low,medium,high]: only run checkers that have at least this
;; 'certainty'.
(list (option-checker '(#\c "checkers"))
(option-checker '(#\x "exclude"))
(option '(#\n "no-network") #f #f
(lambda (opt name arg result)
(alist-cons 'no-network? #t result)))
(find (lambda (option)
(member "load-path" (option-names option)))
%standard-build-options)
(option '(#\h "help") #f #f
(lambda args
(show-help)
(exit 0)))
(option '(#\l "list-checkers") #f #f
(lambda (opt name arg result)
(alist-cons 'list?
#t
result)))
(option '(#\V "version") #f #f
(lambda args
(show-version-and-exit "guix lint")))))
;;;
;;; Entry Point
;;;
(define-command (guix-lint . args)
(category packaging)
(synopsis "validate package definitions")
(define (parse-options)
;; Return the alist of option values.
(parse-command-line args %options (list %default-options)
#:build-options? #f))
(let* ((opts (parse-options))
(args (filter-map (match-lambda
(('argument . value)
value)
(_ #f))
(reverse opts)))
(no-checkers (or (assoc-ref opts 'exclude) '()))
(the-checkers (filter (lambda (checker)
(not (member checker no-checkers)))
(or (assoc-ref opts 'checkers) %all-checkers)))
(checkers
(if (assoc-ref opts 'no-network?)
(filter (lambda (checker)
(member checker %local-checkers))
the-checkers)
the-checkers)))
(when (assoc-ref opts 'list?)
(list-checkers-and-exit checkers))
(with-error-handling
(let ((any-lint-checker-requires-store?
(any lint-checker-requires-store? checkers)))
(define (call-maybe-with-store proc)
(if any-lint-checker-requires-store?
(with-store store
(proc store))
(proc #f)))
(call-maybe-with-store
(lambda (store)
(cond
((null? args)
(fold-packages (lambda (p r) (run-checkers p checkers
#:store store)) '()))
(else
(for-each (lambda (spec)
(run-checkers (specification->package spec) checkers
#:store store))
args)))))))))
| null | https://raw.githubusercontent.com/dongcarl/guix/82543e9649da2da9a5285ede4ec4f718fd740fcb/guix/scripts/lint.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Emit a warning about PACKAGE, printing the location of FIELD if it is
given, the location of PACKAGE otherwise, the full name of PACKAGE and the
provided MESSAGE.
Print information about all available checkers and exit.
Command-line options.
if none is specified,
see %options. The parameter SHORT-LONG is the list containing the short
and long name. The alist uses the long name as symbol.
Specification of the command-line options.
TODO: add some options:
* --certainty=[low,medium,high]: only run checkers that have at least this
'certainty'.
Entry Point
Return the alist of option values. | Copyright © 2014 < >
Copyright © 2014 , 2015 < >
Copyright © 2013 , 2014 , 2015 , 2016 , 2017 , 2018 , 2019 , 2020 < >
Copyright © 2015 , 2016 < >
Copyright © 2016 < dannym+ >
Copyright © 2016 < >
Copyright © 2017 < >
Copyright © 2017 < >
Copyright © 2017 , 2018 < >
Copyright © 2018 , 2019 Arun Isaac < >
Copyright © 2019 , 2020 < >
Copyright © 2020 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix scripts lint)
#:use-module (guix packages)
#:use-module (guix lint)
#:use-module (guix ui)
#:use-module (guix store)
#:use-module (guix scripts)
#:use-module (guix scripts build)
#:use-module (gnu packages)
#:use-module (ice-9 match)
#:use-module (ice-9 format)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-37)
#:export (guix-lint
run-checkers))
(define (emit-warnings warnings)
(for-each
(lambda (lint-warning)
(let* ((package (lint-warning-package lint-warning))
(name (package-name package))
(version (package-version package))
(loc (lint-warning-location lint-warning))
(message (lint-warning-message lint-warning)))
(parameterize
((guix-warning-port (current-output-port)))
(info loc (G_ "~a@~a: ~a~%")
name version message))))
warnings))
(define* (run-checkers package checkers #:key store)
"Run the given CHECKERS on PACKAGE."
(let ((tty? (isatty? (current-error-port))))
(for-each (lambda (checker)
(when tty?
(format (current-error-port) "checking ~a@~a [~a]...\x1b[K\r"
(package-name package) (package-version package)
(lint-checker-name checker))
(force-output (current-error-port)))
(emit-warnings
(if (lint-checker-requires-store? checker)
((lint-checker-check checker) package #:store store)
((lint-checker-check checker) package))))
checkers)
(when tty?
(format (current-error-port) "\x1b[K")
(force-output (current-error-port)))))
(define (list-checkers-and-exit checkers)
(format #t (G_ "Available checkers:~%"))
(for-each (lambda (checker)
(format #t "- ~a: ~a~%"
(lint-checker-name checker)
(G_ (lint-checker-description checker))))
checkers)
(exit 0))
(define %default-options
Alist of default option values .
'())
(define (show-help)
(display (G_ "Usage: guix lint [OPTION]... [PACKAGE]...
run the checkers on all packages.\n"))
(display (G_ "
-c, --checkers=CHECKER1,CHECKER2...
only run the specified checkers"))
(display (G_ "
-x, --exclude=CHECKER1,CHECKER2...
exclude the specified checkers"))
(display (G_ "
-n, --no-network only run checkers that do not access the network"))
(display (G_ "
-L, --load-path=DIR prepend DIR to the package module search path"))
(newline)
(display (G_ "
-h, --help display this help and exit"))
(display (G_ "
-l, --list-checkers display the list of available lint checkers"))
(display (G_ "
-V, --version display version information and exit"))
(newline)
(show-bug-report-information))
(define (option-checker short-long)
Factorize the creation of the two options -c/--checkers and -x/--exclude ,
(option short-long #t #f
(lambda (opt name arg result)
(let ((names (map string->symbol (string-split arg #\,)))
(checker-names (map lint-checker-name %all-checkers))
(option-name (string->symbol (match short-long
((short long) long)))))
(for-each (lambda (c)
(unless (memq c checker-names)
(leave (G_ "~a: invalid checker~%") c)))
names)
(alist-cons option-name
(filter (lambda (checker)
(member (lint-checker-name checker)
names))
%all-checkers)
result)))))
(define %options
(list (option-checker '(#\c "checkers"))
(option-checker '(#\x "exclude"))
(option '(#\n "no-network") #f #f
(lambda (opt name arg result)
(alist-cons 'no-network? #t result)))
(find (lambda (option)
(member "load-path" (option-names option)))
%standard-build-options)
(option '(#\h "help") #f #f
(lambda args
(show-help)
(exit 0)))
(option '(#\l "list-checkers") #f #f
(lambda (opt name arg result)
(alist-cons 'list?
#t
result)))
(option '(#\V "version") #f #f
(lambda args
(show-version-and-exit "guix lint")))))
(define-command (guix-lint . args)
(category packaging)
(synopsis "validate package definitions")
(define (parse-options)
(parse-command-line args %options (list %default-options)
#:build-options? #f))
(let* ((opts (parse-options))
(args (filter-map (match-lambda
(('argument . value)
value)
(_ #f))
(reverse opts)))
(no-checkers (or (assoc-ref opts 'exclude) '()))
(the-checkers (filter (lambda (checker)
(not (member checker no-checkers)))
(or (assoc-ref opts 'checkers) %all-checkers)))
(checkers
(if (assoc-ref opts 'no-network?)
(filter (lambda (checker)
(member checker %local-checkers))
the-checkers)
the-checkers)))
(when (assoc-ref opts 'list?)
(list-checkers-and-exit checkers))
(with-error-handling
(let ((any-lint-checker-requires-store?
(any lint-checker-requires-store? checkers)))
(define (call-maybe-with-store proc)
(if any-lint-checker-requires-store?
(with-store store
(proc store))
(proc #f)))
(call-maybe-with-store
(lambda (store)
(cond
((null? args)
(fold-packages (lambda (p r) (run-checkers p checkers
#:store store)) '()))
(else
(for-each (lambda (spec)
(run-checkers (specification->package spec) checkers
#:store store))
args)))))))))
|
30ff845674fba4188f6873e5a21fd31d76909373a12332f2b56969c2feb0c576 | Eugleo/roguelike | generator.rkt | #lang racket
(provide village-generator%)
(require "rectangle.rkt" "tile.rkt")
(define generator
(interface () generate-tiles))
(define village-generator%
(class* object% (generator)
(super-new)
(init-field width height)
(define/public (generate-tiles width height)
(define rooms
(generate-rooms 3 #:size-bounds (list 7 12) #:position-bounds (list width height)))
(tiles-add-doors (tiles-add-rooms (make-empty-tiles width height) rooms) rooms))
(define (tiles-add-wall tiles x y)
(tiles-set x y (new wall%) tiles))
(define (tiles-add-doors tiles rooms)
(for/fold ([acc tiles])
([room (in-list rooms)])
(define-values (x y) (room-get-door-coords room))
(tiles-set x y (new door%) acc)))
(define (tiles-add-rooms tiles rooms)
(for/fold ([acc tiles] #:result (set-walls-orientation acc))
([room rooms])
(tiles-add-room acc room)))
(define (tiles-add-room tiles room)
(define tiles/walls
(for*/fold ([acc tiles])
([x (in-range (rect-x room) (add1 (rect-x-bound room)))]
[y (in-range (rect-y room) (add1 (rect-y-bound room)))])
(tiles-set x y (new wall%) acc)))
(for*/fold ([acc tiles/walls])
([x (in-range (add1 (rect-x room)) (rect-x-bound room))]
[y (in-range (add1 (rect-y room)) (rect-y-bound room))])
(tiles-set x y (new wooden-floor%) acc)))
(define (make-empty-tiles width height)
(for/vector #:length width ([xi (in-range width)])
(for/vector #:length height ([yi (in-range height)])
(new grass%))))
;; int (int, int) (int, int) -> (list rect)
;; Generate a given number of rooms of random size and position
(define (generate-rooms number #:size-bounds size-bounds #:position-bounds pos-bounds)
(define min-size (first size-bounds))
(define max-size (second size-bounds))
(define max-x (first pos-bounds))
(define max-y (second pos-bounds))
(for/fold ([rooms '()])
([i (in-naturals)] #:break (= (length rooms) number))
(define width (random min-size (add1 max-size)))
(define height (random min-size (add1 max-size)))
(define x (random 1 (- max-x width)))
(define y (random 1 (- max-y height)))
(define room (rect x y width height))
(cond
[(all ((curry (compose not room-within-distance?)) 4 room) rooms) (cons room rooms)]
[else rooms])))
(define (set-walls-orientation tiles)
(for* ([x (in-range (tiles-width tiles))]
[y (in-range (tiles-height tiles))]
#:when (is-a? (get-tile x y tiles) wall%))
(define top (get-tile x (sub1 y) tiles))
(define right (get-tile (add1 x) y tiles))
(define bottom (get-tile x (add1 y) tiles))
(define left (get-tile (sub1 x) y tiles))
(define center (get-tile x y tiles))
(cond
[(and (is-a? top wall%) (is-a? bottom wall%) (is-a? left wall%) (is-a? right wall%))
(set-field! orientation center 'cross)]
[(and (is-a? top wall%) (is-a? bottom wall%) (is-a? left wall%))
(set-field! orientation center 't-left)]
[(and (is-a? top wall%) (is-a? bottom wall%) (is-a? right wall%))
(set-field! orientation center 't-right)]
[(and (is-a? top wall%) (is-a? left wall%) (is-a? right wall%))
(set-field! orientation center 't-up)]
[(and (is-a? left wall%) (is-a? bottom wall%) (is-a? right wall%))
(set-field! orientation center 't-down)]
[(and (is-a? top wall%) (is-a? bottom wall%)) (set-field! orientation center 'vertical)]
[(and (is-a? left wall%) (is-a? right wall%)) (set-field! orientation center 'horizontal)]
[(and (is-a? left wall%) (is-a? top wall%)) (set-field! orientation center 'up-left)]
[(and (is-a? right wall%) (is-a? top wall%)) (set-field! orientation center 'up-right)]
[(and (is-a? right wall%) (is-a? bottom wall%)) (set-field! orientation center 'down-right)]
[(and (is-a? left wall%) (is-a? bottom wall%)) (set-field! orientation center 'down-left)]
[(or (is-a? top wall%) (is-a? bottom wall%)) (set-field! orientation center 'vertical)]
[(or (is-a? left wall%) (is-a? right wall%)) (set-field! orientation center 'horizontal)]))
tiles)
;; int int terrain -> tile
;; Return the tile on the given coords
(define (get-tile x y tiles)
(if (and (< -1 x (vector-length tiles))
(< -1 y (vector-length (vector-ref tiles 0))))
(vector-ref (vector-ref tiles x) y)
#f))
(define (tiles-set x y tile tiles)
(define tiles-copy (vector-copy tiles))
(vector-set! (vector-ref tiles-copy x) y tile)
tiles-copy)
;; (matrix tile) -> int
;;; Return the width of the map
(define (tiles-width tiles)
(vector-length tiles))
;; (matrix tile) -> int
;;; Return the height of the map
(define (tiles-height tiles)
(if (> (tiles-width tiles) 0) (vector-length (vector-ref tiles 0)) 0))
(define (all predicate lst)
(for/and ([item (in-list lst)]) (predicate item)))
(define (mean n m)
(/ (+ n m) 2))))
| null | https://raw.githubusercontent.com/Eugleo/roguelike/72c6988d9a9f184077f80a07cb6f8fddda9f7ec5/generator.rkt | racket | int (int, int) (int, int) -> (list rect)
Generate a given number of rooms of random size and position
int int terrain -> tile
Return the tile on the given coords
(matrix tile) -> int
Return the width of the map
(matrix tile) -> int
Return the height of the map | #lang racket
(provide village-generator%)
(require "rectangle.rkt" "tile.rkt")
(define generator
(interface () generate-tiles))
(define village-generator%
(class* object% (generator)
(super-new)
(init-field width height)
(define/public (generate-tiles width height)
(define rooms
(generate-rooms 3 #:size-bounds (list 7 12) #:position-bounds (list width height)))
(tiles-add-doors (tiles-add-rooms (make-empty-tiles width height) rooms) rooms))
(define (tiles-add-wall tiles x y)
(tiles-set x y (new wall%) tiles))
(define (tiles-add-doors tiles rooms)
(for/fold ([acc tiles])
([room (in-list rooms)])
(define-values (x y) (room-get-door-coords room))
(tiles-set x y (new door%) acc)))
(define (tiles-add-rooms tiles rooms)
(for/fold ([acc tiles] #:result (set-walls-orientation acc))
([room rooms])
(tiles-add-room acc room)))
(define (tiles-add-room tiles room)
(define tiles/walls
(for*/fold ([acc tiles])
([x (in-range (rect-x room) (add1 (rect-x-bound room)))]
[y (in-range (rect-y room) (add1 (rect-y-bound room)))])
(tiles-set x y (new wall%) acc)))
(for*/fold ([acc tiles/walls])
([x (in-range (add1 (rect-x room)) (rect-x-bound room))]
[y (in-range (add1 (rect-y room)) (rect-y-bound room))])
(tiles-set x y (new wooden-floor%) acc)))
(define (make-empty-tiles width height)
(for/vector #:length width ([xi (in-range width)])
(for/vector #:length height ([yi (in-range height)])
(new grass%))))
(define (generate-rooms number #:size-bounds size-bounds #:position-bounds pos-bounds)
(define min-size (first size-bounds))
(define max-size (second size-bounds))
(define max-x (first pos-bounds))
(define max-y (second pos-bounds))
(for/fold ([rooms '()])
([i (in-naturals)] #:break (= (length rooms) number))
(define width (random min-size (add1 max-size)))
(define height (random min-size (add1 max-size)))
(define x (random 1 (- max-x width)))
(define y (random 1 (- max-y height)))
(define room (rect x y width height))
(cond
[(all ((curry (compose not room-within-distance?)) 4 room) rooms) (cons room rooms)]
[else rooms])))
(define (set-walls-orientation tiles)
(for* ([x (in-range (tiles-width tiles))]
[y (in-range (tiles-height tiles))]
#:when (is-a? (get-tile x y tiles) wall%))
(define top (get-tile x (sub1 y) tiles))
(define right (get-tile (add1 x) y tiles))
(define bottom (get-tile x (add1 y) tiles))
(define left (get-tile (sub1 x) y tiles))
(define center (get-tile x y tiles))
(cond
[(and (is-a? top wall%) (is-a? bottom wall%) (is-a? left wall%) (is-a? right wall%))
(set-field! orientation center 'cross)]
[(and (is-a? top wall%) (is-a? bottom wall%) (is-a? left wall%))
(set-field! orientation center 't-left)]
[(and (is-a? top wall%) (is-a? bottom wall%) (is-a? right wall%))
(set-field! orientation center 't-right)]
[(and (is-a? top wall%) (is-a? left wall%) (is-a? right wall%))
(set-field! orientation center 't-up)]
[(and (is-a? left wall%) (is-a? bottom wall%) (is-a? right wall%))
(set-field! orientation center 't-down)]
[(and (is-a? top wall%) (is-a? bottom wall%)) (set-field! orientation center 'vertical)]
[(and (is-a? left wall%) (is-a? right wall%)) (set-field! orientation center 'horizontal)]
[(and (is-a? left wall%) (is-a? top wall%)) (set-field! orientation center 'up-left)]
[(and (is-a? right wall%) (is-a? top wall%)) (set-field! orientation center 'up-right)]
[(and (is-a? right wall%) (is-a? bottom wall%)) (set-field! orientation center 'down-right)]
[(and (is-a? left wall%) (is-a? bottom wall%)) (set-field! orientation center 'down-left)]
[(or (is-a? top wall%) (is-a? bottom wall%)) (set-field! orientation center 'vertical)]
[(or (is-a? left wall%) (is-a? right wall%)) (set-field! orientation center 'horizontal)]))
tiles)
(define (get-tile x y tiles)
(if (and (< -1 x (vector-length tiles))
(< -1 y (vector-length (vector-ref tiles 0))))
(vector-ref (vector-ref tiles x) y)
#f))
(define (tiles-set x y tile tiles)
(define tiles-copy (vector-copy tiles))
(vector-set! (vector-ref tiles-copy x) y tile)
tiles-copy)
(define (tiles-width tiles)
(vector-length tiles))
(define (tiles-height tiles)
(if (> (tiles-width tiles) 0) (vector-length (vector-ref tiles 0)) 0))
(define (all predicate lst)
(for/and ([item (in-list lst)]) (predicate item)))
(define (mean n m)
(/ (+ n m) 2))))
|
97802373861407c205ca07c4a2d3a2607cbde9f02ca349a2a2072bbb5c75d044 | TOTBWF/teenytt | TermBuilder.hs | | Helpers for constructing terms that involve Arithmetic .
module TeenyTT.Core.TermBuilder
( TB
, runTB
, var
, tpvar
-- * Term Builders
-- ** Pi Types
, pi
, lam
, ap
, aps
-- ** Sigma Types
, sigma
, pair
, fst
, snd
* *
, nat
, zero
, suc
-- ** Universes
, univ
, el
) where
import Prelude hiding (pi, fst, snd)
import Control.Monad.Reader
import Data.List (foldl')
import TeenyTT.Base.Ident
import TeenyTT.Core.Syntax qualified as S
newtype TB a = TB { unTB :: Reader TBEnv a }
deriving newtype (Functor, Applicative, Monad, MonadReader TBEnv)
-- | A Term Builder 'Env' keeps track of the /number/ of types
-- and terms that have been bound. This allows us to convert
between and levels . See ' var ' and ' tpvar ' .
data TBEnv = TBEnv { types :: Int, values :: Int }
runTB :: Int -> Int -> TB a -> a
runTB values types (TB m) =
let tbenv = TBEnv { values, types }
in runReader m tbenv
--------------------------------------------------------------------------------
-- Level Arithmetiic
var :: Int -> TB S.Term
var lvl = do
size <- asks (\env -> env.values)
pure $ S.Local (size - lvl - 1)
tpvar :: Int -> TB S.Type
tpvar lvl = do
size <- asks (\env -> env.types)
pure $ S.TpVar (size - lvl - 1)
extend :: (Int -> TB a) -> TB a
extend k = do
size <- asks (\env -> env.values)
local (\env -> env { values = env.values + 1 }) (k size)
extendTp :: (Int -> TB a) -> TB a
extendTp k = do
size <- asks (\env -> env.types)
local (\env -> env { values = env.values + 1 }) (k size)
scope :: (TB S.Term -> TB a) -> TB a
scope k = extend (k . var)
--------------------------------------------------------------------------------
Term Builders
-- Pi Types
pi :: Ident -> TB S.Type -> (TB S.Term -> TB S.Type) -> TB S.Type
pi x tbase tfam = S.Pi x <$> tbase <*> scope tfam
lam :: Ident -> (TB S.Term -> TB S.Term) -> TB S.Term
lam x k = S.Lam x <$> scope k
ap :: TB S.Term -> TB S.Term -> TB S.Term
ap tfn targ = S.Ap <$> tfn <*> targ
aps :: TB S.Term -> [TB S.Term] -> TB S.Term
aps tfn targs = foldl' ap tfn targs
-- Sigma Types
sigma :: Ident -> TB S.Type -> (TB S.Term -> TB S.Type) -> TB S.Type
sigma x tbase tfam = S.Sigma x <$> tbase <*> scope tfam
pair :: TB S.Term -> TB S.Term -> TB S.Term
pair tl tr = S.Pair <$> tl <*> tr
fst :: TB S.Term -> TB S.Term
fst tm = S.Fst <$> tm
snd :: TB S.Term -> TB S.Term
snd tm = S.Snd <$> tm
-- Nats
nat :: TB S.Type
nat = pure S.Nat
zero :: TB S.Term
zero = pure S.Zero
suc :: TB S.Term -> TB S.Term
suc tm = S.Suc <$> tm
Universes
univ :: TB S.Type
univ = pure S.Univ
el :: TB S.Term -> TB S.Type
el tm = S.El <$> tm
| null | https://raw.githubusercontent.com/TOTBWF/teenytt/a45162254b4b3c056b1607f4759bd608fd355165/src/TeenyTT/Core/TermBuilder.hs | haskell | * Term Builders
** Pi Types
** Sigma Types
** Universes
| A Term Builder 'Env' keeps track of the /number/ of types
and terms that have been bound. This allows us to convert
------------------------------------------------------------------------------
Level Arithmetiic
------------------------------------------------------------------------------
Pi Types
Sigma Types
Nats | | Helpers for constructing terms that involve Arithmetic .
module TeenyTT.Core.TermBuilder
( TB
, runTB
, var
, tpvar
, pi
, lam
, ap
, aps
, sigma
, pair
, fst
, snd
* *
, nat
, zero
, suc
, univ
, el
) where
import Prelude hiding (pi, fst, snd)
import Control.Monad.Reader
import Data.List (foldl')
import TeenyTT.Base.Ident
import TeenyTT.Core.Syntax qualified as S
newtype TB a = TB { unTB :: Reader TBEnv a }
deriving newtype (Functor, Applicative, Monad, MonadReader TBEnv)
between and levels . See ' var ' and ' tpvar ' .
data TBEnv = TBEnv { types :: Int, values :: Int }
runTB :: Int -> Int -> TB a -> a
runTB values types (TB m) =
let tbenv = TBEnv { values, types }
in runReader m tbenv
var :: Int -> TB S.Term
var lvl = do
size <- asks (\env -> env.values)
pure $ S.Local (size - lvl - 1)
tpvar :: Int -> TB S.Type
tpvar lvl = do
size <- asks (\env -> env.types)
pure $ S.TpVar (size - lvl - 1)
extend :: (Int -> TB a) -> TB a
extend k = do
size <- asks (\env -> env.values)
local (\env -> env { values = env.values + 1 }) (k size)
extendTp :: (Int -> TB a) -> TB a
extendTp k = do
size <- asks (\env -> env.types)
local (\env -> env { values = env.values + 1 }) (k size)
scope :: (TB S.Term -> TB a) -> TB a
scope k = extend (k . var)
Term Builders
pi :: Ident -> TB S.Type -> (TB S.Term -> TB S.Type) -> TB S.Type
pi x tbase tfam = S.Pi x <$> tbase <*> scope tfam
lam :: Ident -> (TB S.Term -> TB S.Term) -> TB S.Term
lam x k = S.Lam x <$> scope k
ap :: TB S.Term -> TB S.Term -> TB S.Term
ap tfn targ = S.Ap <$> tfn <*> targ
aps :: TB S.Term -> [TB S.Term] -> TB S.Term
aps tfn targs = foldl' ap tfn targs
sigma :: Ident -> TB S.Type -> (TB S.Term -> TB S.Type) -> TB S.Type
sigma x tbase tfam = S.Sigma x <$> tbase <*> scope tfam
pair :: TB S.Term -> TB S.Term -> TB S.Term
pair tl tr = S.Pair <$> tl <*> tr
fst :: TB S.Term -> TB S.Term
fst tm = S.Fst <$> tm
snd :: TB S.Term -> TB S.Term
snd tm = S.Snd <$> tm
nat :: TB S.Type
nat = pure S.Nat
zero :: TB S.Term
zero = pure S.Zero
suc :: TB S.Term -> TB S.Term
suc tm = S.Suc <$> tm
Universes
univ :: TB S.Type
univ = pure S.Univ
el :: TB S.Term -> TB S.Type
el tm = S.El <$> tm
|
6d06805420bf3b5f813d83d7d1ece0086e2f2080a13eca874f8f99726eb01572 | metaocaml/ber-metaocaml | magic_number.ml | (* TEST
include config
binary_modules = "config build_path_prefix_map misc"
* bytecode
*)
open Misc
open Magic_number
(* sanity checking: the magic number at a given kind can be parsed back *)
let error kind test =
fatal_errorf
"Internal compiler error (%s): there is a magic number mismatch on kind %s"
test
(string_of_kind kind)
let check_raw_kind kind =
let valid =
match parse_kind (raw_kind kind) with
| None -> false
| Some kind_roundtrip ->
kind_roundtrip = kind
in
if not valid then error kind "raw_kind"
let check_current_raw kind =
let valid =
match parse (current_raw kind) with
| Error _ -> false
| Ok magic ->
magic.kind = kind
&& raw magic = current_raw kind
in
if not valid then error kind "current_raw"
let () =
all_kinds
|> List.iter (fun kind -> check_raw_kind kind; check_current_raw kind)
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/utils/magic_number.ml | ocaml | TEST
include config
binary_modules = "config build_path_prefix_map misc"
* bytecode
sanity checking: the magic number at a given kind can be parsed back |
open Misc
open Magic_number
let error kind test =
fatal_errorf
"Internal compiler error (%s): there is a magic number mismatch on kind %s"
test
(string_of_kind kind)
let check_raw_kind kind =
let valid =
match parse_kind (raw_kind kind) with
| None -> false
| Some kind_roundtrip ->
kind_roundtrip = kind
in
if not valid then error kind "raw_kind"
let check_current_raw kind =
let valid =
match parse (current_raw kind) with
| Error _ -> false
| Ok magic ->
magic.kind = kind
&& raw magic = current_raw kind
in
if not valid then error kind "current_raw"
let () =
all_kinds
|> List.iter (fun kind -> check_raw_kind kind; check_current_raw kind)
|
d56bbf2fd11f5c384669ef87257b828535680813015654ff803094ccf11d59bb | robrix/isometry | Framebuffer.hs | # LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
module GL.Framebuffer
( Framebuffer(..)
, Attachment(..)
, attachTexture
, Bind(..)
) where
import Control.Effect.Lift
import Control.Monad (unless)
import Data.Proxy
import GHC.Stack
import GL.Effect.Check
import GL.Enum as GL
import GL.Error
import GL.Object
import qualified GL.Texture as GL
import Graphics.GL.Core41
import Graphics.GL.Types
newtype Framebuffer = Framebuffer { unFramebuffer :: GLuint }
instance Object Framebuffer where
gen = defaultGenWith glGenFramebuffers Framebuffer
delete = defaultDeleteWith glDeleteFramebuffers unFramebuffer
instance Bind Framebuffer where
bind = checking . sendIO . glBindFramebuffer GL_FRAMEBUFFER . maybe 0 unFramebuffer
newtype Attachment
= Colour Int
instance GL.Enum Attachment where
glEnum = \case
Colour n -> GL_COLOR_ATTACHMENT0 + fromIntegral n
attachTexture :: forall ty sig m . (HasCallStack, Has Check sig m, Has (Lift IO) sig m) => GL.KnownType ty => Attachment -> GL.Texture ty -> m ()
attachTexture attachment (GL.Texture texture) = do
checking . sendIO $ glFramebufferTexture2D GL_FRAMEBUFFER (glEnum attachment) (glEnum (GL.typeVal (Proxy :: Proxy ty))) texture 0
status <- sendIO $ glCheckFramebufferStatus GL_FRAMEBUFFER
unless (status == GL_FRAMEBUFFER_COMPLETE) (throwGLError status)
| null | https://raw.githubusercontent.com/robrix/isometry/171b9261b8d7ea32c86ce6019c8c3973742f0349/src/GL/Framebuffer.hs | haskell | # LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
module GL.Framebuffer
( Framebuffer(..)
, Attachment(..)
, attachTexture
, Bind(..)
) where
import Control.Effect.Lift
import Control.Monad (unless)
import Data.Proxy
import GHC.Stack
import GL.Effect.Check
import GL.Enum as GL
import GL.Error
import GL.Object
import qualified GL.Texture as GL
import Graphics.GL.Core41
import Graphics.GL.Types
newtype Framebuffer = Framebuffer { unFramebuffer :: GLuint }
instance Object Framebuffer where
gen = defaultGenWith glGenFramebuffers Framebuffer
delete = defaultDeleteWith glDeleteFramebuffers unFramebuffer
instance Bind Framebuffer where
bind = checking . sendIO . glBindFramebuffer GL_FRAMEBUFFER . maybe 0 unFramebuffer
newtype Attachment
= Colour Int
instance GL.Enum Attachment where
glEnum = \case
Colour n -> GL_COLOR_ATTACHMENT0 + fromIntegral n
attachTexture :: forall ty sig m . (HasCallStack, Has Check sig m, Has (Lift IO) sig m) => GL.KnownType ty => Attachment -> GL.Texture ty -> m ()
attachTexture attachment (GL.Texture texture) = do
checking . sendIO $ glFramebufferTexture2D GL_FRAMEBUFFER (glEnum attachment) (glEnum (GL.typeVal (Proxy :: Proxy ty))) texture 0
status <- sendIO $ glCheckFramebufferStatus GL_FRAMEBUFFER
unless (status == GL_FRAMEBUFFER_COMPLETE) (throwGLError status)
| |
662e25e977e08169dcef4eeaeaf8faaf043474f69f836d032a05bbbe7b74e054 | tmfg/mmtis-national-access-point | cloudwatch.clj | (ns dashboard.data.cloudwatch
(:require [amazonica.aws.cloudwatch :as cloudwatch]))
(defn last-five-minutes []
{:start-time (java.util.Date. (- (System/currentTimeMillis)
(* 1000 60 5)))
:end-time (java.util.Date.)
:period 300})
(defn fetch-metric
"Fetch last five minutes' min/max/avg of given metric"
[namespace metric-name dimensions]
(cloudwatch/get-metric-statistics (merge (last-five-minutes)
{:statistics ["Average" "Minimum" "Maximum"]
:namespace namespace
:metric-name metric-name
:dimensions dimensions})))
(defn finap-db-load []
(-> (fetch-metric "AWS/RDS"
"CPUUtilization"
[{:name "DBInstanceIdentifier" :value "napote-db-prod"}])
:datapoints first (select-keys #{:minimum :maximum :average})))
| null | https://raw.githubusercontent.com/tmfg/mmtis-national-access-point/a86cc890ffa1fe4f773083be5d2556e87a93d975/tools/dashboard/src/clj/dashboard/data/cloudwatch.clj | clojure | (ns dashboard.data.cloudwatch
(:require [amazonica.aws.cloudwatch :as cloudwatch]))
(defn last-five-minutes []
{:start-time (java.util.Date. (- (System/currentTimeMillis)
(* 1000 60 5)))
:end-time (java.util.Date.)
:period 300})
(defn fetch-metric
"Fetch last five minutes' min/max/avg of given metric"
[namespace metric-name dimensions]
(cloudwatch/get-metric-statistics (merge (last-five-minutes)
{:statistics ["Average" "Minimum" "Maximum"]
:namespace namespace
:metric-name metric-name
:dimensions dimensions})))
(defn finap-db-load []
(-> (fetch-metric "AWS/RDS"
"CPUUtilization"
[{:name "DBInstanceIdentifier" :value "napote-db-prod"}])
:datapoints first (select-keys #{:minimum :maximum :average})))
| |
25581c7016ccfb4ce67dbf997b1ba756c2138b6f4c7c3d7b8bbeb7ebddfd7b8f | racket/web-server | stateless.rkt | #lang web-server/base
(define-syntax-rule (require-provide mod ...)
(begin (require mod ...) (provide (all-from-out mod) ...)))
(require-provide "lib.rkt"
"input.rkt"
"syntax.rkt"
"dyn-syntax.rkt"
"embed.rkt")
(require racket/contract
web-server/private/xexpr
"unsafe/stateless-send.rkt")
(provide (contract-out
[send/formlet (->* (formlet*/c)
(#:method (or/c "GET" "POST" "get" "post")
#:wrap (-> pretty-xexpr/c pretty-xexpr/c))
any)]))
| null | https://raw.githubusercontent.com/racket/web-server/f718800b5b3f407f7935adf85dfa663c4bba1651/web-server-lib/web-server/formlets/stateless.rkt | racket | #lang web-server/base
(define-syntax-rule (require-provide mod ...)
(begin (require mod ...) (provide (all-from-out mod) ...)))
(require-provide "lib.rkt"
"input.rkt"
"syntax.rkt"
"dyn-syntax.rkt"
"embed.rkt")
(require racket/contract
web-server/private/xexpr
"unsafe/stateless-send.rkt")
(provide (contract-out
[send/formlet (->* (formlet*/c)
(#:method (or/c "GET" "POST" "get" "post")
#:wrap (-> pretty-xexpr/c pretty-xexpr/c))
any)]))
| |
b22cfee81e39ab40cfa033cffc427f4d36843fc0a92671035cf15e768b9d8dba | pveber/bistro | idr.mli | open Bistro
open Formats
type 'a format
val narrowPeak : Macs2.narrow_peaks format
val broadPeak : Macs2.broad_peaks format
val bed : bed3 format
val gff : gff format
type 'a output = [`idr_output of 'a]
val idr :
input_file_type:'a format ->
?idr_threshold:float ->
?soft_idr_threshold:float ->
?peak_merge_method:[ `sum | `avg | `min | `max] ->
?rank:[ `signal | `pvalue | `qvalue ] ->
?random_seed:int ->
?peak_list:'a file ->
'a file ->
'a file ->
'a output directory
val items : 'a output directory -> 'a file
val figure : _ output directory -> png file
| null | https://raw.githubusercontent.com/pveber/bistro/da0ebc969c8c5ca091905366875cbf8366622280/lib/bio/idr.mli | ocaml | open Bistro
open Formats
type 'a format
val narrowPeak : Macs2.narrow_peaks format
val broadPeak : Macs2.broad_peaks format
val bed : bed3 format
val gff : gff format
type 'a output = [`idr_output of 'a]
val idr :
input_file_type:'a format ->
?idr_threshold:float ->
?soft_idr_threshold:float ->
?peak_merge_method:[ `sum | `avg | `min | `max] ->
?rank:[ `signal | `pvalue | `qvalue ] ->
?random_seed:int ->
?peak_list:'a file ->
'a file ->
'a file ->
'a output directory
val items : 'a output directory -> 'a file
val figure : _ output directory -> png file
| |
82cb936b5575fbbd03c57108a76c60b1c2db4d454d2b6339e446efa2228ed65a | duncanatt/detecter | analyzer.erl | %%% ----------------------------------------------------------------------------
@author
%%%
%%% @doc Module description (becomes module heading).
%%%
%%% @end
%%%
Copyright ( c ) 2021 , < >
%%%
%%% This program is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option )
%%% any later version.
%%%
%%% This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
%%% FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
%%% more details.
%%%
You should have received a copy of the GNU General Public License along with
%%% this program. If not, see </>.
%%% ----------------------------------------------------------------------------
-module(analyzer).
-author("Duncan Paul Attard").
%%% Includes.
-include_lib("stdlib/include/assert.hrl").
-include("log.hrl").
%%% Public API.
-export([start/2, stop/1]).
-export([embed/1, dispatch/1, do_monitor/2, filter/1]).
%%% Internal callbacks.
-export([init/1]).
%%% Types.
-export_type([mfa_spec/0, monitor/0]).
%%% ----------------------------------------------------------------------------
Macro and record definitions .
%%% ----------------------------------------------------------------------------
%% Process dictionary key used to store the synthesised analysis function that
%% is applied to trace events. The result of this function application is used
%% to overwrite the previous result.
-define(MONITOR, '$monitor').
Three types of irrevocable verdicts reached by the analysis .
-define(VERDICT_YES, yes).
-define(VERDICT_NO, no).
-define(VERDICT_END, 'end').
%%% ----------------------------------------------------------------------------
%%% Type definitions.
%%% ----------------------------------------------------------------------------
-type verdict() :: ?VERDICT_YES | ?VERDICT_NO | ?VERDICT_END.
Three analysis verdicts .
%% TODO: IMO, should also add monitor() to the below, monitor() | verdict(). Check with dialyzer.
-type monitor() :: fun((Event :: term()) -> verdict() | no_return()).
%% Analyzer that accepts a trace event and transitions to its subsequent
%% unfolded continuation, or a verdict stage when the no such transitions are
%% possible. An analyzer can also diverge indefinitely, in which case events are
%% consumed albeit a final verdict is never reached.
-type mfa_spec() :: fun((Mfa :: mfa()) -> {ok, monitor()} | undefined).
%% Function mapping that returns the analysis encoding as an anonymous function.
%% The analysis encoding corresponds to the logic formula that specifies the
%% runtime property to be analysed, and therefore, is the product of the
%% synthesis, see {@link hml_eval}.
%% When the mapping is `undefined', the system process corresponding to the
%% forked function will share the same tracer of its parent process, otherwise,
%% a new and separate tracer is forked for the new process, see {@link tracer}.
%% Note that only external function calls can be tracked, and therefore,
%% instrumented with a new tracer.
%%% ----------------------------------------------------------------------------
%%% Public API.
%%% ----------------------------------------------------------------------------
%% @doc Starts the analyzer.
%%
%% {@params
%% {@name AnlFun}
%% {@desc Analysis function that is applied to trace events to determine their
%% correct or incorrect sequence.
%% }
%% {@name Parent}
%% {@desc PID of supervisor to be linked to the analyzer process or `self' if
%% no supervision is required.
%% }
%% }
%%
%% {@returns PID of analyzer process.}
-spec start(AnlFun, Parent) -> pid()
when
AnlFun :: monitor(),
Parent :: tracer:parent().
start(AnlFun, Parent) ->
spawn(fun() -> put(?MONITOR, AnlFun), init(Parent) end).
%% @doc Stops the analyzer identified by the specified PID.
%%
%% {@params
{ @name Pid }
%% {@desc PID of analyzer to stop.}
%% }
%%
%% {@returns `ok' to indicate successful termination.}
-spec stop(Pid :: pid()) -> ok.
stop(Pid) ->
util:rpc_async(Pid, stop),
ok.
@doc the trace event analysis function into the process dictionary .
%%
%% {@params
%% {@name AnlFun}
%% {@desc Analysis function that is applied to trace events to determine their
%% correct or incorrect sequence.
%% }
%% }
%%
%% {@returns `true' to indicate success, otherwise `false'.}
-spec embed(AnlFun :: monitor()) -> true.
embed(AnlFun) ->
undefined =:= put(?MONITOR, AnlFun).
%% @doc Dispatches the specified abstract event to the monitor for analysis.
%%
%% {@params
%% {@name Event}
%% {@desc The abstract event that the monitor is to analyze.}
%% }
%%
%% {@returns Depends on the event type. See {@link event:event/0}.
%% {@ul
%% {@item When event is of type `fork', the PID of the new child
%% process is returned;
%% }
%% {@item When event is of type `init', the PID of the parent
%% process is returned;
%% }
%% {@item When event is of type `exit', the exit reason is
%% returned;
%% }
%% {@item When event is of type `send', the message is returned;}
%% {@item When event is of type `recv', the message is returned.}
%% }
%% }
-spec dispatch(Event :: event:int_event()) -> term().
dispatch(Event = {fork, _Parent, Child, _Mfa}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Child;
dispatch(Event = {init, _Child, Parent, _Mfa}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Parent;
dispatch(Event = {exit, _Process, Reason}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Reason;
dispatch(Event = {send, _Sender, _Receiver, Msg}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Msg;
dispatch(Event = {recv, _Receiver, Msg}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Msg.
%% @doc Retrieves the monitor function stored in the process dictionary (if
%% any), and applies it on the event. The result is put back in the process
%% dictionary. If a verdict state is reached, the callback function is invoked,
%% otherwise nothing is done. When no monitor function is stored inside the
%% process dictionary (i.e. meaning that the process is not monitored), the atom
%% `undefined' is returned.
-spec do_monitor(Event, VerdictFun) -> monitor() | undefined
when
Event :: event:evm_event(),
VerdictFun :: fun((Verdict :: verdict()) -> any()).
do_monitor(Event, VerdictFun) when is_function(VerdictFun, 1) ->
case get(?MONITOR) of
undefined ->
?TRACE("Analyzer undefined; discarding trace event ~w.", [Event]),
undefined;
Monitor ->
% Analyze event. At this point, monitor might have reached a verdict.
% Check whether verdict is reached to enable immediate detection, should
% this be the case.
put(?MONITOR, Monitor0 = analyze(Monitor, Event)),
case is_verdict(Monitor0) of
true ->
VerdictFun(Monitor0);
false ->
ok
end,
Monitor0
end.
%% @doc Default filter that allows all events to pass.
-spec filter(Event :: event:int_event()) -> true.
filter(_) ->
true. % True = keep event.
%%% ----------------------------------------------------------------------------
%%% Internal callbacks.
%%% ----------------------------------------------------------------------------
@private Monitor initialization .
-spec init(Parent) -> no_return()
when
Parent :: tracer:parent().
init(Parent) ->
if is_pid(Parent) -> link(Parent); true -> ok end,
loop(Parent).
%%% ----------------------------------------------------------------------------
%%% Private helper functions.
%%% ----------------------------------------------------------------------------
@private Main monitor loop .
-spec loop(Parent) -> no_return()
when
Parent :: tracer:parent().
loop(Parent) ->
receive
{From, _, stop} ->
% There should be no more trace messages left when the stop command is
% received.
?assertEqual({messages, []}, process_info(self(), messages)),
?INFO("Analyzer received STOP command from tracer ~w.", [From]),
exit({garbage_collect, {monitor, ?VERDICT_END}});
Event ->
% At this point, the monitor should only receive trace events. Events
% should also be of specific types.
?assertEqual(trace, element(1, Event)),
?assert(
element(3, Event) =:= spawn orelse element(3, Event) =:= exit orelse
element(3, Event) =:= send orelse element(3, Event) =:= 'receive' orelse
element(3, Event) =:= spawned
),
% Analyze event and garbage collect monitor is verdict is reached.
do_monitor(Event,
fun(Verdict) -> exit({garbage_collect, {monitor, Verdict}}) end
),
% TODO: Test this
loop(Parent)
end.
@private Determines whether the specified monitor is indeed a verdict .
-spec is_verdict(Verdict :: term()) -> boolean().
is_verdict(Verdict) when Verdict =:= yes; Verdict =:= no; Verdict =:= 'end' ->
true;
is_verdict(_) ->
false.
%% @private Effects the analysis by applying the monitor function to the
%% specified event. If a verdict state is reached, the event is silently
%% discarded.
-spec analyze(Monitor, Event) -> monitor()
when
Monitor :: monitor(),
Event :: event:int_event().
analyze(Monitor, Event) ->
case is_verdict(Monitor) of
true ->
% Monitor is at the verdict state, and the event, even though it is
% analyzed, does not alter the current verdict.
?TRACE("Analyzing event ~w and reached verdict '~s'.",
[Event, Monitor]),
Monitor;
false ->
% Monitor is not yet at the verdict state and can analyze the event.
% Return next monitor unfolding.
?TRACE("Analyzing event ~w.", [Event]),
Monitor(Event)
end. | null | https://raw.githubusercontent.com/duncanatt/detecter/7070ddd6b16dd323a81077a2915f4b938f9f0b80/detecter/src/monitoring/analyzer.erl | erlang | ----------------------------------------------------------------------------
@doc Module description (becomes module heading).
@end
This program is free software: you can redistribute it and/or modify it
any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
this program. If not, see </>.
----------------------------------------------------------------------------
Includes.
Public API.
Internal callbacks.
Types.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Process dictionary key used to store the synthesised analysis function that
is applied to trace events. The result of this function application is used
to overwrite the previous result.
----------------------------------------------------------------------------
Type definitions.
----------------------------------------------------------------------------
TODO: IMO, should also add monitor() to the below, monitor() | verdict(). Check with dialyzer.
Analyzer that accepts a trace event and transitions to its subsequent
unfolded continuation, or a verdict stage when the no such transitions are
possible. An analyzer can also diverge indefinitely, in which case events are
consumed albeit a final verdict is never reached.
Function mapping that returns the analysis encoding as an anonymous function.
The analysis encoding corresponds to the logic formula that specifies the
runtime property to be analysed, and therefore, is the product of the
synthesis, see {@link hml_eval}.
When the mapping is `undefined', the system process corresponding to the
forked function will share the same tracer of its parent process, otherwise,
a new and separate tracer is forked for the new process, see {@link tracer}.
Note that only external function calls can be tracked, and therefore,
instrumented with a new tracer.
----------------------------------------------------------------------------
Public API.
----------------------------------------------------------------------------
@doc Starts the analyzer.
{@params
{@name AnlFun}
{@desc Analysis function that is applied to trace events to determine their
correct or incorrect sequence.
}
{@name Parent}
{@desc PID of supervisor to be linked to the analyzer process or `self' if
no supervision is required.
}
}
{@returns PID of analyzer process.}
@doc Stops the analyzer identified by the specified PID.
{@params
{@desc PID of analyzer to stop.}
}
{@returns `ok' to indicate successful termination.}
{@params
{@name AnlFun}
{@desc Analysis function that is applied to trace events to determine their
correct or incorrect sequence.
}
}
{@returns `true' to indicate success, otherwise `false'.}
@doc Dispatches the specified abstract event to the monitor for analysis.
{@params
{@name Event}
{@desc The abstract event that the monitor is to analyze.}
}
{@returns Depends on the event type. See {@link event:event/0}.
{@ul
{@item When event is of type `fork', the PID of the new child
process is returned;
}
{@item When event is of type `init', the PID of the parent
process is returned;
}
{@item When event is of type `exit', the exit reason is
returned;
}
{@item When event is of type `send', the message is returned;}
{@item When event is of type `recv', the message is returned.}
}
}
@doc Retrieves the monitor function stored in the process dictionary (if
any), and applies it on the event. The result is put back in the process
dictionary. If a verdict state is reached, the callback function is invoked,
otherwise nothing is done. When no monitor function is stored inside the
process dictionary (i.e. meaning that the process is not monitored), the atom
`undefined' is returned.
Analyze event. At this point, monitor might have reached a verdict.
Check whether verdict is reached to enable immediate detection, should
this be the case.
@doc Default filter that allows all events to pass.
True = keep event.
----------------------------------------------------------------------------
Internal callbacks.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
Private helper functions.
----------------------------------------------------------------------------
There should be no more trace messages left when the stop command is
received.
At this point, the monitor should only receive trace events. Events
should also be of specific types.
Analyze event and garbage collect monitor is verdict is reached.
TODO: Test this
@private Effects the analysis by applying the monitor function to the
specified event. If a verdict state is reached, the event is silently
discarded.
Monitor is at the verdict state, and the event, even though it is
analyzed, does not alter the current verdict.
Monitor is not yet at the verdict state and can analyze the event.
Return next monitor unfolding. | @author
Copyright ( c ) 2021 , < >
under the terms of the GNU General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option )
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
You should have received a copy of the GNU General Public License along with
-module(analyzer).
-author("Duncan Paul Attard").
-include_lib("stdlib/include/assert.hrl").
-include("log.hrl").
-export([start/2, stop/1]).
-export([embed/1, dispatch/1, do_monitor/2, filter/1]).
-export([init/1]).
-export_type([mfa_spec/0, monitor/0]).
Macro and record definitions .
-define(MONITOR, '$monitor').
Three types of irrevocable verdicts reached by the analysis .
-define(VERDICT_YES, yes).
-define(VERDICT_NO, no).
-define(VERDICT_END, 'end').
-type verdict() :: ?VERDICT_YES | ?VERDICT_NO | ?VERDICT_END.
Three analysis verdicts .
-type monitor() :: fun((Event :: term()) -> verdict() | no_return()).
-type mfa_spec() :: fun((Mfa :: mfa()) -> {ok, monitor()} | undefined).
-spec start(AnlFun, Parent) -> pid()
when
AnlFun :: monitor(),
Parent :: tracer:parent().
start(AnlFun, Parent) ->
spawn(fun() -> put(?MONITOR, AnlFun), init(Parent) end).
{ @name Pid }
-spec stop(Pid :: pid()) -> ok.
stop(Pid) ->
util:rpc_async(Pid, stop),
ok.
@doc the trace event analysis function into the process dictionary .
-spec embed(AnlFun :: monitor()) -> true.
embed(AnlFun) ->
undefined =:= put(?MONITOR, AnlFun).
-spec dispatch(Event :: event:int_event()) -> term().
dispatch(Event = {fork, _Parent, Child, _Mfa}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Child;
dispatch(Event = {init, _Child, Parent, _Mfa}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Parent;
dispatch(Event = {exit, _Process, Reason}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Reason;
dispatch(Event = {send, _Sender, _Receiver, Msg}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Msg;
dispatch(Event = {recv, _Receiver, Msg}) ->
do_monitor(event:to_evm_event(Event),
fun(Verdict) -> ?INFO("Reached verdict '~s' after ~w.", [Verdict, Event]) end
),
Msg.
-spec do_monitor(Event, VerdictFun) -> monitor() | undefined
when
Event :: event:evm_event(),
VerdictFun :: fun((Verdict :: verdict()) -> any()).
do_monitor(Event, VerdictFun) when is_function(VerdictFun, 1) ->
case get(?MONITOR) of
undefined ->
?TRACE("Analyzer undefined; discarding trace event ~w.", [Event]),
undefined;
Monitor ->
put(?MONITOR, Monitor0 = analyze(Monitor, Event)),
case is_verdict(Monitor0) of
true ->
VerdictFun(Monitor0);
false ->
ok
end,
Monitor0
end.
-spec filter(Event :: event:int_event()) -> true.
filter(_) ->
@private Monitor initialization .
-spec init(Parent) -> no_return()
when
Parent :: tracer:parent().
init(Parent) ->
if is_pid(Parent) -> link(Parent); true -> ok end,
loop(Parent).
@private Main monitor loop .
-spec loop(Parent) -> no_return()
when
Parent :: tracer:parent().
loop(Parent) ->
receive
{From, _, stop} ->
?assertEqual({messages, []}, process_info(self(), messages)),
?INFO("Analyzer received STOP command from tracer ~w.", [From]),
exit({garbage_collect, {monitor, ?VERDICT_END}});
Event ->
?assertEqual(trace, element(1, Event)),
?assert(
element(3, Event) =:= spawn orelse element(3, Event) =:= exit orelse
element(3, Event) =:= send orelse element(3, Event) =:= 'receive' orelse
element(3, Event) =:= spawned
),
do_monitor(Event,
fun(Verdict) -> exit({garbage_collect, {monitor, Verdict}}) end
),
loop(Parent)
end.
@private Determines whether the specified monitor is indeed a verdict .
-spec is_verdict(Verdict :: term()) -> boolean().
is_verdict(Verdict) when Verdict =:= yes; Verdict =:= no; Verdict =:= 'end' ->
true;
is_verdict(_) ->
false.
-spec analyze(Monitor, Event) -> monitor()
when
Monitor :: monitor(),
Event :: event:int_event().
analyze(Monitor, Event) ->
case is_verdict(Monitor) of
true ->
?TRACE("Analyzing event ~w and reached verdict '~s'.",
[Event, Monitor]),
Monitor;
false ->
?TRACE("Analyzing event ~w.", [Event]),
Monitor(Event)
end. |
fc6d0b3bd452d52c8d4a7762276df04b292fae94df8dda636169657f98e31714 | ghc/ghc | Ppr.hs |
| Provides facilities for pretty - printing ' Nabla 's in a way appropriate for
-- user facing pattern match warnings.
module GHC.HsToCore.Pmc.Ppr (
pprUncovered
) where
import GHC.Prelude
import GHC.Data.List.Infinite (Infinite (..))
import qualified GHC.Data.List.Infinite as Inf
import GHC.Types.Basic
import GHC.Types.Id
import GHC.Types.Var.Env
import GHC.Types.Unique.DFM
import GHC.Core.ConLike
import GHC.Core.DataCon
import GHC.Builtin.Types
import GHC.Utils.Outputable
import GHC.Utils.Panic
import GHC.Utils.Panic.Plain
import Control.Monad.Trans.RWS.CPS
import GHC.Data.Maybe
import Data.List.NonEmpty (NonEmpty, nonEmpty, toList)
import GHC.HsToCore.Pmc.Types
-- | Pretty-print the guts of an uncovered value vector abstraction, i.e., its
-- components and refutable shapes associated to any mentioned variables.
--
Example for @([Just p , q ] , [ p :-> [ 3,4 ] , q :-> [ 0,5]])@ :
--
-- @
-- (Just p) q
where p is not one of { 3 , 4 }
q is not one of { 0 , 5 }
-- @
--
When the set of refutable shapes contains more than 3 elements , the
-- additional elements are indicated by "...".
pprUncovered :: Nabla -> [Id] -> SDoc
pprUncovered nabla vas
| isNullUDFM refuts = fsep vec -- there are no refutations
| otherwise = hang (fsep vec) 4 $
text "where" <+> vcat (map (pprRefutableShapes . snd) (udfmToList refuts))
where
init_prec
-- No outer parentheses when it's a unary pattern by assuming lowest
-- precedence
| [_] <- vas = topPrec
| otherwise = appPrec
ppr_action = mapM (pprPmVar init_prec) vas
(vec, renamings) = runPmPpr nabla ppr_action
refuts = prettifyRefuts nabla renamings
| Output refutable shapes of a variable in the form of @var is not one of { 2 ,
Nothing , 3}@. Will never print more than 3 refutable shapes , the tail is
-- indicated by an ellipsis.
pprRefutableShapes :: (SDoc,[PmAltCon]) -> SDoc
pprRefutableShapes (var, alts)
= var <+> text "is not one of" <+> format_alts alts
where
format_alts = braces . fsep . punctuate comma . shorten . map ppr_alt
shorten (a:b:c:_:_) = a:b:c:[text "..."]
shorten xs = xs
ppr_alt (PmAltConLike cl) = ppr cl
ppr_alt (PmAltLit lit) = ppr lit
1 . Literals
~~~~~~~~~~~~~~
Starting with a function definition like :
f : : Int - > Bool
f 5 = True
f 6 = True
The uncovered set looks like :
{ var | > var /= 5 , var /= 6 }
Yet , we would like to print this nicely as follows :
x , where x not one of { 5,6 }
Since these variables will be shown to the programmer , we give them better names
( t1 , t2 , .. ) in ' prettifyRefuts ' , hence the SDoc in ' PrettyPmRefutEnv ' .
2 . Residual Constraints
~~~~~~~~~~~~~~~~~~~~~~~
Unhandled constraints that refer to HsExpr are typically ignored by the solver
( it does not even substitute in HsExpr so they are even printed as wildcards ) .
Additionally , the oracle returns a substitution if it succeeds so we apply this
substitution to the vectors before printing them out ( see function ` pprOne ' in
" GHC.HsToCore . Pmc " ) to be more precise .
~~~~~~~~~~~~~~
Starting with a function definition like:
f :: Int -> Bool
f 5 = True
f 6 = True
The uncovered set looks like:
{ var |> var /= 5, var /= 6 }
Yet, we would like to print this nicely as follows:
x , where x not one of {5,6}
Since these variables will be shown to the programmer, we give them better names
(t1, t2, ..) in 'prettifyRefuts', hence the SDoc in 'PrettyPmRefutEnv'.
2. Residual Constraints
~~~~~~~~~~~~~~~~~~~~~~~
Unhandled constraints that refer to HsExpr are typically ignored by the solver
(it does not even substitute in HsExpr so they are even printed as wildcards).
Additionally, the oracle returns a substitution if it succeeds so we apply this
substitution to the vectors before printing them out (see function `pprOne' in
"GHC.HsToCore.Pmc") to be more precise.
-}
-- | Extract and assigns pretty names to constraint variables with refutable
-- shapes.
prettifyRefuts :: Nabla -> DIdEnv (Id, SDoc) -> DIdEnv (SDoc, [PmAltCon])
prettifyRefuts nabla = listToUDFM_Directly . map attach_refuts . udfmToList
where
attach_refuts (u, (x, sdoc)) = (u, (sdoc, lookupRefuts nabla x))
type PmPprM a = RWS Nabla () (DIdEnv (Id, SDoc), Infinite SDoc) a
Try nice names p , q , r , s , t before using the ( ugly ) t_i
nameList :: Infinite SDoc
nameList = map text ["p","q","r","s","t"] Inf.++ flip Inf.unfoldr (0 :: Int) (\ u -> (text ('t':show u), u+1))
runPmPpr :: Nabla -> PmPprM a -> (a, DIdEnv (Id, SDoc))
runPmPpr nabla m = case runRWS m nabla (emptyDVarEnv, nameList) of
(a, (renamings, _), _) -> (a, renamings)
-- | Allocates a new, clean name for the given 'Id' if it doesn't already have
-- one.
getCleanName :: Id -> PmPprM SDoc
getCleanName x = do
(renamings, name_supply) <- get
let Inf clean_name name_supply' = name_supply
case lookupDVarEnv renamings x of
Just (_, nm) -> pure nm
Nothing -> do
put (extendDVarEnv renamings x (x, clean_name), name_supply')
pure clean_name
checkRefuts :: Id -> PmPprM (Maybe SDoc) -- the clean name if it has negative info attached
checkRefuts x = do
nabla <- ask
case lookupRefuts nabla x of
[] -> pure Nothing -- Will just be a wildcard later on
_ -> Just <$> getCleanName x
-- | Pretty print a variable, but remember to prettify the names of the variables
-- that refer to neg-literals. The ones that cannot be shown are printed as
-- underscores.
pprPmVar :: PprPrec -> Id -> PmPprM SDoc
pprPmVar prec x = do
nabla <- ask
case lookupSolution nabla x of
Just (PACA alt _tvs args) -> pprPmAltCon prec alt args
Nothing -> fromMaybe underscore <$> checkRefuts x
pprPmAltCon :: PprPrec -> PmAltCon -> [Id] -> PmPprM SDoc
pprPmAltCon _prec (PmAltLit l) _ = pure (ppr l)
pprPmAltCon prec (PmAltConLike cl) args = do
nabla <- ask
pprConLike nabla prec cl args
pprConLike :: Nabla -> PprPrec -> ConLike -> [Id] -> PmPprM SDoc
pprConLike nabla _prec cl args
| Just pm_expr_list <- pmExprAsList nabla (PmAltConLike cl) args
= case pm_expr_list of
NilTerminated list ->
brackets . fsep . punctuate comma <$> mapM (pprPmVar appPrec) list
WcVarTerminated pref x ->
parens . fcat . punctuate colon <$> mapM (pprPmVar appPrec) (toList pref ++ [x])
pprConLike _nabla _prec (RealDataCon con) args
| isUnboxedTupleDataCon con
, let hash_parens doc = text "(#" <+> doc <+> text "#)"
= hash_parens . fsep . punctuate comma <$> mapM (pprPmVar appPrec) args
| isTupleDataCon con
= parens . fsep . punctuate comma <$> mapM (pprPmVar appPrec) args
pprConLike _nabla prec cl args
| conLikeIsInfix cl = case args of
[x, y] -> do x' <- pprPmVar funPrec x
y' <- pprPmVar funPrec y
return (cparen (prec > opPrec) (x' <+> ppr cl <+> y'))
can it be infix but have more than two arguments ?
list -> pprPanic "pprConLike:" (ppr list)
| null args = return (ppr cl)
| otherwise = do args' <- mapM (pprPmVar appPrec) args
return (cparen (prec > funPrec) (fsep (ppr cl : args')))
-- | The result of 'pmExprAsList'.
data PmExprList
= NilTerminated [Id]
| WcVarTerminated (NonEmpty Id) Id
| Extract a list of ' I d 's out of a sequence of cons cells , optionally
-- terminated by a wildcard variable instead of @[]@. Some examples:
--
* @pmExprAsList ( 1:2 : [ ] ) = = Just ( ' NilTerminated ' [ 1,2])@ , a regular ,
-- @[]@-terminated list. Should be pretty-printed as @[1,2]@.
* @pmExprAsList ( 1:2 :x ) = = Just ( ' WcVarTerminated ' [ 1,2 ] x)@ , a list prefix
-- ending in a wildcard variable x (of list type). Should be pretty-printed as
( 1:2 : _ ) .
* @pmExprAsList [ ] = = Just ( ' NilTerminated ' [ ] ) @
pmExprAsList :: Nabla -> PmAltCon -> [Id] -> Maybe PmExprList
pmExprAsList nabla = go_con []
where
go_var rev_pref x
| Just (PACA alt _tvs args) <- lookupSolution nabla x
= go_con rev_pref alt args
go_var rev_pref x
| Just pref <- nonEmpty (reverse rev_pref)
= Just (WcVarTerminated pref x)
go_var _ _
= Nothing
go_con rev_pref (PmAltConLike (RealDataCon c)) es
| c == nilDataCon
= assert (null es) $ Just (NilTerminated (reverse rev_pref))
| c == consDataCon
= assert (length es == 2) $ go_var (es !! 0 : rev_pref) (es !! 1)
go_con _ _ _
= Nothing
| null | https://raw.githubusercontent.com/ghc/ghc/37cfe3c0f4fb16189bbe3bb735f758cd6e3d9157/compiler/GHC/HsToCore/Pmc/Ppr.hs | haskell | user facing pattern match warnings.
| Pretty-print the guts of an uncovered value vector abstraction, i.e., its
components and refutable shapes associated to any mentioned variables.
@
(Just p) q
@
additional elements are indicated by "...".
there are no refutations
No outer parentheses when it's a unary pattern by assuming lowest
precedence
indicated by an ellipsis.
| Extract and assigns pretty names to constraint variables with refutable
shapes.
| Allocates a new, clean name for the given 'Id' if it doesn't already have
one.
the clean name if it has negative info attached
Will just be a wildcard later on
| Pretty print a variable, but remember to prettify the names of the variables
that refer to neg-literals. The ones that cannot be shown are printed as
underscores.
| The result of 'pmExprAsList'.
terminated by a wildcard variable instead of @[]@. Some examples:
@[]@-terminated list. Should be pretty-printed as @[1,2]@.
ending in a wildcard variable x (of list type). Should be pretty-printed as |
| Provides facilities for pretty - printing ' Nabla 's in a way appropriate for
module GHC.HsToCore.Pmc.Ppr (
pprUncovered
) where
import GHC.Prelude
import GHC.Data.List.Infinite (Infinite (..))
import qualified GHC.Data.List.Infinite as Inf
import GHC.Types.Basic
import GHC.Types.Id
import GHC.Types.Var.Env
import GHC.Types.Unique.DFM
import GHC.Core.ConLike
import GHC.Core.DataCon
import GHC.Builtin.Types
import GHC.Utils.Outputable
import GHC.Utils.Panic
import GHC.Utils.Panic.Plain
import Control.Monad.Trans.RWS.CPS
import GHC.Data.Maybe
import Data.List.NonEmpty (NonEmpty, nonEmpty, toList)
import GHC.HsToCore.Pmc.Types
Example for @([Just p , q ] , [ p :-> [ 3,4 ] , q :-> [ 0,5]])@ :
where p is not one of { 3 , 4 }
q is not one of { 0 , 5 }
When the set of refutable shapes contains more than 3 elements , the
pprUncovered :: Nabla -> [Id] -> SDoc
pprUncovered nabla vas
| otherwise = hang (fsep vec) 4 $
text "where" <+> vcat (map (pprRefutableShapes . snd) (udfmToList refuts))
where
init_prec
| [_] <- vas = topPrec
| otherwise = appPrec
ppr_action = mapM (pprPmVar init_prec) vas
(vec, renamings) = runPmPpr nabla ppr_action
refuts = prettifyRefuts nabla renamings
| Output refutable shapes of a variable in the form of @var is not one of { 2 ,
Nothing , 3}@. Will never print more than 3 refutable shapes , the tail is
pprRefutableShapes :: (SDoc,[PmAltCon]) -> SDoc
pprRefutableShapes (var, alts)
= var <+> text "is not one of" <+> format_alts alts
where
format_alts = braces . fsep . punctuate comma . shorten . map ppr_alt
shorten (a:b:c:_:_) = a:b:c:[text "..."]
shorten xs = xs
ppr_alt (PmAltConLike cl) = ppr cl
ppr_alt (PmAltLit lit) = ppr lit
1 . Literals
~~~~~~~~~~~~~~
Starting with a function definition like :
f : : Int - > Bool
f 5 = True
f 6 = True
The uncovered set looks like :
{ var | > var /= 5 , var /= 6 }
Yet , we would like to print this nicely as follows :
x , where x not one of { 5,6 }
Since these variables will be shown to the programmer , we give them better names
( t1 , t2 , .. ) in ' prettifyRefuts ' , hence the SDoc in ' PrettyPmRefutEnv ' .
2 . Residual Constraints
~~~~~~~~~~~~~~~~~~~~~~~
Unhandled constraints that refer to HsExpr are typically ignored by the solver
( it does not even substitute in HsExpr so they are even printed as wildcards ) .
Additionally , the oracle returns a substitution if it succeeds so we apply this
substitution to the vectors before printing them out ( see function ` pprOne ' in
" GHC.HsToCore . Pmc " ) to be more precise .
~~~~~~~~~~~~~~
Starting with a function definition like:
f :: Int -> Bool
f 5 = True
f 6 = True
The uncovered set looks like:
{ var |> var /= 5, var /= 6 }
Yet, we would like to print this nicely as follows:
x , where x not one of {5,6}
Since these variables will be shown to the programmer, we give them better names
(t1, t2, ..) in 'prettifyRefuts', hence the SDoc in 'PrettyPmRefutEnv'.
2. Residual Constraints
~~~~~~~~~~~~~~~~~~~~~~~
Unhandled constraints that refer to HsExpr are typically ignored by the solver
(it does not even substitute in HsExpr so they are even printed as wildcards).
Additionally, the oracle returns a substitution if it succeeds so we apply this
substitution to the vectors before printing them out (see function `pprOne' in
"GHC.HsToCore.Pmc") to be more precise.
-}
prettifyRefuts :: Nabla -> DIdEnv (Id, SDoc) -> DIdEnv (SDoc, [PmAltCon])
prettifyRefuts nabla = listToUDFM_Directly . map attach_refuts . udfmToList
where
attach_refuts (u, (x, sdoc)) = (u, (sdoc, lookupRefuts nabla x))
type PmPprM a = RWS Nabla () (DIdEnv (Id, SDoc), Infinite SDoc) a
Try nice names p , q , r , s , t before using the ( ugly ) t_i
nameList :: Infinite SDoc
nameList = map text ["p","q","r","s","t"] Inf.++ flip Inf.unfoldr (0 :: Int) (\ u -> (text ('t':show u), u+1))
runPmPpr :: Nabla -> PmPprM a -> (a, DIdEnv (Id, SDoc))
runPmPpr nabla m = case runRWS m nabla (emptyDVarEnv, nameList) of
(a, (renamings, _), _) -> (a, renamings)
getCleanName :: Id -> PmPprM SDoc
getCleanName x = do
(renamings, name_supply) <- get
let Inf clean_name name_supply' = name_supply
case lookupDVarEnv renamings x of
Just (_, nm) -> pure nm
Nothing -> do
put (extendDVarEnv renamings x (x, clean_name), name_supply')
pure clean_name
checkRefuts x = do
nabla <- ask
case lookupRefuts nabla x of
_ -> Just <$> getCleanName x
pprPmVar :: PprPrec -> Id -> PmPprM SDoc
pprPmVar prec x = do
nabla <- ask
case lookupSolution nabla x of
Just (PACA alt _tvs args) -> pprPmAltCon prec alt args
Nothing -> fromMaybe underscore <$> checkRefuts x
pprPmAltCon :: PprPrec -> PmAltCon -> [Id] -> PmPprM SDoc
pprPmAltCon _prec (PmAltLit l) _ = pure (ppr l)
pprPmAltCon prec (PmAltConLike cl) args = do
nabla <- ask
pprConLike nabla prec cl args
pprConLike :: Nabla -> PprPrec -> ConLike -> [Id] -> PmPprM SDoc
pprConLike nabla _prec cl args
| Just pm_expr_list <- pmExprAsList nabla (PmAltConLike cl) args
= case pm_expr_list of
NilTerminated list ->
brackets . fsep . punctuate comma <$> mapM (pprPmVar appPrec) list
WcVarTerminated pref x ->
parens . fcat . punctuate colon <$> mapM (pprPmVar appPrec) (toList pref ++ [x])
pprConLike _nabla _prec (RealDataCon con) args
| isUnboxedTupleDataCon con
, let hash_parens doc = text "(#" <+> doc <+> text "#)"
= hash_parens . fsep . punctuate comma <$> mapM (pprPmVar appPrec) args
| isTupleDataCon con
= parens . fsep . punctuate comma <$> mapM (pprPmVar appPrec) args
pprConLike _nabla prec cl args
| conLikeIsInfix cl = case args of
[x, y] -> do x' <- pprPmVar funPrec x
y' <- pprPmVar funPrec y
return (cparen (prec > opPrec) (x' <+> ppr cl <+> y'))
can it be infix but have more than two arguments ?
list -> pprPanic "pprConLike:" (ppr list)
| null args = return (ppr cl)
| otherwise = do args' <- mapM (pprPmVar appPrec) args
return (cparen (prec > funPrec) (fsep (ppr cl : args')))
data PmExprList
= NilTerminated [Id]
| WcVarTerminated (NonEmpty Id) Id
| Extract a list of ' I d 's out of a sequence of cons cells , optionally
* @pmExprAsList ( 1:2 : [ ] ) = = Just ( ' NilTerminated ' [ 1,2])@ , a regular ,
* @pmExprAsList ( 1:2 :x ) = = Just ( ' WcVarTerminated ' [ 1,2 ] x)@ , a list prefix
( 1:2 : _ ) .
* @pmExprAsList [ ] = = Just ( ' NilTerminated ' [ ] ) @
pmExprAsList :: Nabla -> PmAltCon -> [Id] -> Maybe PmExprList
pmExprAsList nabla = go_con []
where
go_var rev_pref x
| Just (PACA alt _tvs args) <- lookupSolution nabla x
= go_con rev_pref alt args
go_var rev_pref x
| Just pref <- nonEmpty (reverse rev_pref)
= Just (WcVarTerminated pref x)
go_var _ _
= Nothing
go_con rev_pref (PmAltConLike (RealDataCon c)) es
| c == nilDataCon
= assert (null es) $ Just (NilTerminated (reverse rev_pref))
| c == consDataCon
= assert (length es == 2) $ go_var (es !! 0 : rev_pref) (es !! 1)
go_con _ _ _
= Nothing
|
b575ea583ad40bae1670ba9a74a7a2ea7a9a6998b9ea8046a961bdce4f5c01bb | exercism/erlang | example.erl | -module(example).
-export([score/2]).
score(X, Y) when X*X+Y*Y=<1 -> 10;
score(X, Y) when X*X+Y*Y=<25 -> 5;
score(X, Y) when X*X+Y*Y=<100 -> 1;
score(_, _) -> 0.
| null | https://raw.githubusercontent.com/exercism/erlang/57ac2707dae643682950715e74eb271f732e2100/exercises/practice/darts/.meta/example.erl | erlang | -module(example).
-export([score/2]).
score(X, Y) when X*X+Y*Y=<1 -> 10;
score(X, Y) when X*X+Y*Y=<25 -> 5;
score(X, Y) when X*X+Y*Y=<100 -> 1;
score(_, _) -> 0.
| |
129b6ed0af25fadc196a5df16c5713355992ce1e644021a0f57fc4945ea8c9b0 | armedbear/abcl | test-cffi.lisp | (require :asdf)
(require :abcl-contrib)
(asdf:make :abcl-asdf)
(asdf:make :jna)
(ql:quickload
'(:cffi :cffi-tests))
(time
(asdf:test-system :cffi))
| null | https://raw.githubusercontent.com/armedbear/abcl/dad0efca42e4b90b042e415bd188ea86cba4145e/ci/test-cffi.lisp | lisp | (require :asdf)
(require :abcl-contrib)
(asdf:make :abcl-asdf)
(asdf:make :jna)
(ql:quickload
'(:cffi :cffi-tests))
(time
(asdf:test-system :cffi))
| |
257defdabeae13bf675bb7b09c125d3ac4b51250b42b2a18b9ffbcc4551ec52c | rayiner/amd64-asm | encoders.lisp | ; encoders.lisp
; Encoders for AMD64 instruction set.
(in-package "AMD64-ASM")
; maybe move condition definitions somewhere else
(define-condition assembler-error (error)
(()))
(define-condition encoding-error (assembler-error)
((form :initarg :form :reader encoding-error-form)))
(define-condition assertion-failed (encoding-error)
((check :initarg :check :reader assertion-failed-check)))
; make this more sophisticated for error handling later
(defmacro with-checks (pred &body body)
`(if ,pred
(progn ,@body)
(error 'assertion-failed :check ',pred)))
(defparameter *byte-regs* '(:al :bl :cl :dl :sil :dil :bpl :spl
:r8b :r9b :r10b :r11b :r12b :r13b :r14b :r15b))
(defparameter *half-regs* '(:eax :ebx :ecx :edx :esi :edi :ebp :esp
:r8d :r9d :r10d :r11d :r12d :r13d :r14d :r15d))
(defparameter *word-regs* '(:rax :rbx :rcx :rdx :rsi :rdi :rbp :rsp
:r8 :r9 :r10 :r11 :r12 :r13 :r14 :r15))
(defparameter *vec-regs* '(:xmm0 :xmm3 :xmm1 :xmm2 :xmm6 :xmm7 :xmm5 :xmm4
:xmm8 :xmm9 :xmm10 :xmm11 :xmm12 :xmm13 :xmm14
:xmm15))
(defparameter *sdis* '(:jo :jno :jb :jnb :jz :jnz :jbe :jnbe
:js :jns :jp :jnp :jl :jge :jle :jg :jmp))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *prefixes* '(#x66 #x67 #x64 #x65 #xF0 #xF3 #xF2)))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *encoders* nil))
; info about operands of an instruction
(defstruct oprinfo
oc.ext
modrm.mod
modrm.reg
modrm.rm
sib.scale
sib.index
sib.base
disp
imm
imm.bytes
imm.rel-type
imm.rel-addn
disp.bytes
disp.rel-type
disp.rel-addn)
; info about the opcodes of an instruction
(defstruct ocinfo
override?
prefixes
opcodes)
(defun new-ocinfo ()
(make-ocinfo :opcodes (make-array 0 :fill-pointer t)
:prefixes (make-array 0 :fill-pointer t)))
(defun specifier-width (spec)
(case spec
(:byte 1)
(:half 4)
(:word 8)
(:wide 16)))
(defun specifier-next (spec)
(case spec
(:byte :half)
(:half :word)
(:word :word)))
(defun register-number (reg)
(let ((rnums '(0 3 1 2 6 7 5 4 8 9 10 11 12 13 14 15)))
(let ((idx (or (position reg *byte-regs*)
(position reg *half-regs*)
(position reg *word-regs*)
(position reg *vec-regs*))))
(when idx (nth idx rnums)))))
(defun reg? (operand)
(register-number operand))
(defun byte-reg? (reg)
(member reg *byte-regs*))
(defun half-reg? (reg)
(member reg *half-regs*))
(defun word-reg? (reg)
(member reg *word-regs*))
(defun xmm-reg? (reg)
(member reg *vec-regs*))
(defun same-reg? (rega regb)
(eql (register-number rega) (register-number regb)))
(defun immediate? (operand)
(or (integerp operand)
(and (listp operand)
(or (and (eql (length operand) 2)
(symbolp (first operand))
(symbolp (second operand)))
(and (eql (length operand) 3)
(symbolp (first operand))
(symbolp (second operand))
(integerp (third operand))
(or (<= (signed-width (third operand))
(specifier-width (first operand)))
(<= (unsigned-width (third operand))
(specifier-width (first operand)))))))))
(defun immediate-width (operand)
(if (integerp operand)
(signed-width operand)
(specifier-width (first operand))))
(defun byte-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 1)))
(defun short-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 2)))
(defun half-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 4)))
(defun word-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 8)))
(defun mem? (operand)
(and (listp operand)
(eql (length operand) 5)
(symbolp (first operand))
(symbolp (second operand))
(symbolp (third operand))
(integerp (fourth operand))
(immediate? (fifth operand))))
(defun byte-mem? (operand)
(and (mem? operand) (eql (first operand) :byte)))
(defun half-mem? (operand)
(and (mem? operand) (eql (first operand) :half)))
(defun word-mem? (operand)
(and (mem? operand) (eql (first operand) :word)))
(defun wide-mem? (operand)
(and (mem? operand) (eql (first operand) :wide)))
(defun sdi? (insn)
(and (member (first insn) *sdis*)
(symbolp (second insn))))
(defun compose-rex (w r x b)
(with-checks (and (< w 2) (< r 2) (< x 2) (< b 2))
(+ #x40 b (ash x 1) (ash r 2) (ash w 3))))
(defun decode-rex (r)
(with-checks (integerp r)
(list :w (ash (logand r #x8) -3)
:r (ash (logand r #x4) -2)
:x (ash (logand r #x2) -1)
:b (logand r #x1))))
(defun compose-modrm (mod reg rm)
(with-checks (and (< mod 4) (< reg 8) (< rm 8))
(+ rm (ash reg 3) (ash mod 6))))
(defun decode-modrm (m)
(with-checks (integerp m)
(list :mod (logand (ash m -6) #x3)
:reg (logand (ash m -3) #x7)
:rm (logand m #x7))))
(defun compose-sib (scale index base)
(with-checks (and (< scale 8) (< index 8) (< base 8))
(+ base (ash index 3) (ash scale 6))))
(defun decode-sib (s)
(with-checks (integerp s)
(list :scale (logand (ash s -6) #x3)
:index (logand (ash s -3) #x7)
:base (logand s #x7))))
(defun add-reg-operand (insn reg where)
(with-checks (reg? reg)
(let ((num (register-number reg)))
(ecase where
(reg (setf (oprinfo-modrm.reg insn) num))
(rm (setf (oprinfo-modrm.mod insn) #x3)
(setf (oprinfo-modrm.rm insn) num))
(op (setf (oprinfo-oc.ext insn) num))))))
(defun add-immediate-operand (insn imm width type)
(with-checks (immediate? imm)
(if (integerp imm)
(progn
(setf (oprinfo-imm insn) imm)
(setf (oprinfo-imm.bytes insn) width))
(progn
(setf (oprinfo-imm insn) (second imm))
(setf (oprinfo-imm.bytes insn) width)
(setf (oprinfo-imm.rel-type insn) type)
(setf (oprinfo-imm.rel-addn insn) (or (third imm) 0))))))
(defun add-opcode-extension (insn subcode)
(with-checks (integerp subcode)
(setf (oprinfo-modrm.reg insn) subcode)))
(defun modrm.mod-for-disp (disp)
(cond
((eql disp 0) 0)
((integerp disp)
(ecase (signed-width disp)
(1 1)
((2 4) 2)))
((listp disp)
(ecase (first disp)
(:byte 1)
(:half 2)))))
(defun sib.scale-for-scale (scale)
(ecase scale
(1 0)
(2 1)
(4 2)
(8 3)))
(defun add-sib.index (insn index scale)
(setf (oprinfo-sib.scale insn) (sib.scale-for-scale scale))
(if index
(setf (oprinfo-sib.index insn) (register-number index))
(setf (oprinfo-sib.index insn) #x04)))
(defun compute-disp.bytes (disp bytes)
(or bytes
(let ((sz (immediate-width disp)))
(if (eql sz 2) 4 sz))))
(defun add-disp (insn disp type &optional bytes)
(if (or (not (eql disp 0)) bytes)
(let ((sz (compute-disp.bytes disp bytes)))
(if (integerp disp)
(progn
(setf (oprinfo-disp insn) disp)
(setf (oprinfo-disp.bytes insn) sz))
(progn
(setf (oprinfo-disp insn) (second disp))
(setf (oprinfo-disp.bytes insn) sz)
(setf (oprinfo-disp.rel-type insn) type)
(setf (oprinfo-disp.rel-addn insn) (or (third disp) 0)))))))
(defun add-mem-rest (insn base index scale)
(if (or index (same-reg? base :rsp) (same-reg? base :r12))
(progn (setf (oprinfo-modrm.rm insn) #x04)
(setf (oprinfo-sib.base insn)
(or (register-number base) #x5))
(add-sib.index insn index scale))
(setf (oprinfo-modrm.rm insn) (register-number base))))
(defun add-modrm.mod-and-modrm.rm (insn mod rm)
(setf (oprinfo-modrm.mod insn) mod)
(setf (oprinfo-modrm.rm insn) rm))
(defun add-modrm.mod-only (insn mod)
(setf (oprinfo-modrm.mod insn) mod))
(defun add-mem-operand (insn mem)
(with-checks (mem? mem)
(destructuring-bind (sz base index scale disp) mem
(declare (ignore sz))
(unless (or (member base '(:rip :abs)) (register-number base))
(error 'encoding-error :form mem))
(cond
((eql base :rip)
(add-modrm.mod-and-modrm.rm insn #x0 #x05)
(add-disp insn disp :rel #x04))
((eql base :abs)
(add-modrm.mod-and-modrm.rm insn #x0 #x04)
(setf (oprinfo-sib.base insn) #x05)
(add-sib.index insn index scale)
(add-disp insn disp :rel #x04))
((and (or (same-reg? base :rbp)
(same-reg? base :r13))
(eql disp 0))
(add-modrm.mod-only insn #x01)
(add-disp insn disp :rel #x01)
(add-mem-rest insn base index scale))
(t
(add-modrm.mod-only insn (modrm.mod-for-disp disp))
(add-disp insn disp :rel)
(add-mem-rest insn base index scale))))))
; Syntax for defining instruction encoders.
; Encoder consists of sequences of clauses, each
with two parts : a pattern , and a production .
The pattern is a sequence of one or more of
; the following symbols:
r8 rm8 r32 imm8 imm32 imm64 s32 s64
x
; rX stands for a register of width X bits
; x stands for a vector register
xmX stands for a vector register or memory
; operand of width X bits
; rmX stands for a register or memory operand
; of width X bits
; immX stands for an immediate of width X bits.
; sX stands for a symbolic immediate of width X bits.
; The product is a sequence of either integers,
representing opcodes , or one or more of the
; following symbols:
ib i d iq cb cd /0 /1 /2 /3 /4 /5 /6 /7 /r /rm + r *
* means that the instruction defaults to 64 - bit , and needs
; no override prefix. It must be specified at the beginning.
; ib id and iq mean to follow the instruction
with a 1 , 4 , or 8 byte immediate , respectively .
; /0 through /7 mean to specify that digit in modrm.reg
; /r means to use a regular modrm form, with modrm.reg as dest
; /rm means to use a regular modrm form, with modrm.rm as dest
; +r means to use a short form, adding the dest register to opcode
; The instruction width is determined by the form of the destination.
; The /0 through /7 /r /rm and +r terms are necessary to match
; the syntax of the processor reference manual, but are somewhat
; awkward to use programatically because they have multiple
; implications. These terms are transformed as follows:
; /0 through /7 -> /n /rm
; ib through iw -> ix
; /r -> /r /rm
; /rm -> /rm /r
; These terms are used as follows, mapped to corresponding operand
; /n -> set modrm.reg to subcode
; /rm -> add reg or mem operand to modrm.rm
; /r -> add reg parameter to modrm.reg
; ix -> add immediate operand
; cx -> add immediate operand (RIP-relative)
(defun operand-matches? (opr constraint)
(if (or (reg? constraint) (immediate? constraint))
(eql opr constraint)
(ecase constraint
(rm8 (or (byte-reg? opr) (byte-mem? opr)))
(rm32 (or (half-reg? opr) (half-mem? opr)))
(rm64 (or (word-reg? opr) (word-mem? opr)))
(m8 (byte-mem? opr))
(m32 (half-mem? opr))
(m64 (word-mem? opr))
(m128 (wide-mem? opr))
(r8 (byte-reg? opr))
(r32 (half-reg? opr))
(r64 (word-reg? opr))
(x (xmm-reg? opr))
(xm32 (or (xmm-reg? opr) (half-mem? opr)))
(xm64 (or (xmm-reg? opr) (word-mem? opr)))
(xm128 (or (xmm-reg? opr) (wide-mem? opr)))
(imm8 (byte-immediate? opr))
(imm16 (short-immediate? opr))
(imm32 (half-immediate? opr))
(imm64 (word-immediate? opr)))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun operand-needs-override? (opr)
(member opr '(rm64 r64 imm64)))
(defun subcode-for-subcode-command (cmd)
(case cmd
(/0 0)
(/1 1)
(/2 2)
(/3 3)
(/4 4)
(/5 5)
(/6 6)
(/7 7)
(t nil)))
(defun subcode-command? (cmd)
(member cmd '(/0 /1 /2 /3 /4 /5 /6 /7)))
(defun width-for-immediate-command (cmd)
(case cmd
((ib cb) 1)
(iw 2)
((id cd) 4)
(iq 8)
(t nil)))
(defun rel-for-immediate-command (cmd)
(case cmd
((ib iw id iq) :abs)
((cb cd) :bra)))
(defun immediate-command? (cmd)
(member cmd '(ib iw id iq cb cd)))
(defun regularize-commands (cmds)
(iter (for cmd in cmds)
(cond
((subcode-command? cmd)
(collect cmd)
(collect '/rm))
((immediate-command? cmd)
(collect cmd))
((eql cmd '/r)
(collect '/r)
(collect '/rm))
((eql cmd '/rm)
(collect '/rm)
(collect '/r))
(t
(collect cmd)))))
(defun generate-operand-handlers (ocinfo oinfo cmds operands)
(if (and cmds operands)
(let ((cmd (car cmds))
(opr (car operands)))
(flet ((advance () (generate-operand-handlers ocinfo oinfo (cdr cmds)
(cdr operands)))
(ignore () (generate-operand-handlers ocinfo oinfo (cdr cmds)
operands)))
(cond
((subcode-command? cmd)
(cons `(add-opcode-extension ,oinfo
,(subcode-for-subcode-command cmd))
(ignore)))
((immediate-command? cmd)
(cons `(add-immediate-operand ,oinfo ,opr
,(width-for-immediate-command cmd)
',(rel-for-immediate-command cmd))
(advance)))
((eql cmd '+r)
(cons `(add-reg-operand ,oinfo ,opr 'op) (advance)))
((eql cmd '/r)
(cons `(add-reg-operand ,oinfo ,opr 'reg) (advance)))
((eql cmd '/rm)
(cons `(cond ((reg? ,opr)
(add-reg-operand ,oinfo ,opr 'rm))
((mem? ,opr)
(add-mem-operand ,oinfo ,opr)))
(advance)))
((eql cmd '*)
(cons `(setf (ocinfo-override? ,ocinfo) nil)
(ignore)))
(t (ignore)))))))
(defun find-first-non-prefix (ocs)
(position (find-if-not #'(lambda (elt)
(member elt *prefixes*))
ocs)
ocs))
(defun collect-prefixes (ocs)
(subseq ocs 0 (find-first-non-prefix ocs)))
(defun collect-opcodes (ocs)
(subseq ocs (find-first-non-prefix ocs) nil))
(defun generate-opcode-handlers (ocinfo cmds)
(let* ((ocs (remove-if-not #'integerp cmds))
(pfxs (collect-prefixes ocs))
(opcodes (collect-opcodes ocs)))
`(,@(mapcar #'(lambda (pfx)
`(vector-push-extend ,pfx (ocinfo-prefixes ,ocinfo)))
pfxs)
,@(mapcar #'(lambda (oc)
`(vector-push-extend ,oc (ocinfo-opcodes ,ocinfo)))
opcodes))))
; note that this may latter be undone in the command handlers
(defun maybe-generate-override-setter (ocinfo constraints)
(if (some #'operand-needs-override? constraints)
`(setf (ocinfo-override? ,ocinfo) t)
`(progn)))
(defun transform-production (pattern production operands)
(let ((cmds (regularize-commands production))
(oprinfo (gensym))
(ocinfo (gensym)))
`(let ((,oprinfo (make-oprinfo))
(,ocinfo (new-ocinfo)))
,(maybe-generate-override-setter ocinfo pattern)
,@(generate-operand-handlers ocinfo oprinfo cmds operands)
,@(generate-opcode-handlers ocinfo cmds)
(values ,ocinfo ,oprinfo))))
(defun transform-constraint (constraint operand)
`(operand-matches? ,operand ',constraint))
(defun transform-clause (clause operands)
(let ((pattern (car clause))
(production (cadr clause)))
`((and ,@(mapcar #'transform-constraint pattern operands))
,(transform-production pattern production operands)))))
(defmacro define-encoder (insn operands &body body)
(let ((name (prefixsym "ENCODE-" insn)))
(push (list (intern (symbol-name insn) "KEYWORD") body) *encoders*)
`(defun ,name ,operands
(cond ,@(mapcar #'(lambda (clause)
(transform-clause clause operands))
body)))))
(defun register-low-part (reg)
(if (integerp reg) (logand reg #x7)))
(defun req-rex-bit (&rest regs)
(let ((vals (iter (for reg in regs)
(if (and (integerp reg) (> reg 7))
(collect 1)
(collect 0)))))
(apply #'max vals)))
(defun maybe-emit-rex (ln ocinfo oprinfo)
(with-slots ((reg modrm.reg)
(rm modrm.rm)
(index sib.index)
(base sib.base)
(ext oc.ext)) oprinfo
(let ((rex (compose-rex (if (ocinfo-override? ocinfo) 1 0)
(req-rex-bit reg)
(req-rex-bit index)
(req-rex-bit base rm ext))))
(if (not (eql rex #x40))
(emit-byte ln rex)))))
(defun maybe-emit-prefixes (ln ocinfo)
(iter (for pfx in-vector (ocinfo-prefixes ocinfo))
(emit-byte ln pfx)))
(defun emit-opcode-maybe-extended (ln opc oprinfo)
(emit-byte ln (+ opc (register-low-part (or (oprinfo-oc.ext oprinfo) 0)))))
(defun emit-opcodes (ln ocinfo oprinfo)
(if (eql (elt (ocinfo-opcodes ocinfo) 0) #x0F)
(progn
(emit-byte ln #x0F)
(emit-opcode-maybe-extended ln (elt (ocinfo-opcodes ocinfo) 1) oprinfo))
(emit-opcode-maybe-extended ln (elt (ocinfo-opcodes ocinfo) 0) oprinfo)))
(defun maybe-emit-modrm (ln oprinfo)
(with-slots ((mod modrm.mod) (reg modrm.reg) (rm modrm.rm)) oprinfo
(and mod reg rm
(emit-byte ln (compose-modrm mod
(register-low-part reg)
(register-low-part rm))))))
(defun maybe-emit-sib (ln oprinfo)
(with-slots ((scale sib.scale) (index sib.index) (base sib.base)) oprinfo
(and scale index base
(emit-byte ln (compose-sib scale
(register-low-part index)
(register-low-part base))))))
(defun do-emit-disp-or-imm (ln disp-or-imm bytes type addn)
(when (and disp-or-imm bytes)
(if (integerp disp-or-imm)
(emit-bytes ln disp-or-imm bytes)
(progn
(emit-reloc ln disp-or-imm bytes type)
(emit-bytes ln addn bytes)))))
(defun maybe-emit-disp (ln oprinfo)
(with-slots ((disp disp) (bytes disp.bytes)) oprinfo
(do-emit-disp-or-imm ln disp bytes (oprinfo-disp.rel-type oprinfo)
(oprinfo-disp.rel-addn oprinfo))))
(defun maybe-emit-imm (ln oprinfo)
(with-slots ((imm imm) (bytes imm.bytes)) oprinfo
(do-emit-disp-or-imm ln imm bytes (oprinfo-imm.rel-type oprinfo)
(oprinfo-imm.rel-addn oprinfo))))
(defun encode-instruction (ln ocinfo oprinfo)
(maybe-emit-prefixes ln ocinfo)
(maybe-emit-rex ln ocinfo oprinfo)
(emit-opcodes ln ocinfo oprinfo)
(maybe-emit-modrm ln oprinfo)
(maybe-emit-sib ln oprinfo)
(maybe-emit-disp ln oprinfo)
(maybe-emit-imm ln oprinfo))
(defun do-encode (ln fun args)
(multiple-value-bind (ocinfo oprinfo)
(apply fun args)
(encode-instruction ln ocinfo oprinfo)))
(defun encode-insn (insn ln)
(handler-case
(let ((fun (prefixsym "ENCODE-" (car insn) "AMD64-ASM")))
(do-encode ln (symbol-function fun) (cdr insn)))
(assertion-failed (as) (error 'assertion-failed :form insn
:check (assertion-failed-check as)))
(condition (condition) (declare (ignore condition))
(error 'encoding-error :form insn))))
Encoders for general 8/32/64 - bit integer instructions
; instructions not encoded
aaa , aad , aam , aas , bound , call ( far ) , cbw , cwde ,
cdqe , cwd , cdq , cqo , cmov , cmps , cmps , , cmpsd , cmpsq ,
daa , das , enter , in , ins , insb , insw , insd , into , jcx , , ,
lahf , lds , les , lfs , lgs , lss , lfence , lods , , lodsw , lodsd ,
lodsq , loop , loope , loopne , loopnz , loopz , mfence , movs , movsb ,
movsw , movsd , movsq , outs , outsb , outsw , outsd , popa , popad , popf ,
popa , popad , popf , popfd , popfq , prefetch , prefetchw , pusha , pushad ,
pushf , pushfd , ret ( far ) , sahf , scas , scasb , scasw , scasd , scasq ,
sfence , shld , shrd , std , stos , stosb , stosw , stosd , stosq , xlat , xlatb
(defmacro define-type0-encoder (name base subcode)
(let ((base1 base)
(base2 (+ base 1))
(base3 (+ base 2))
(base4 (+ base 3)))
`(define-encoder ,name (dest source)
((rm8 imm8) (#x80 ,subcode ib))
((rm32 imm8) (#x83 ,subcode ib))
((rm64 imm8) (#x83 ,subcode ib))
((rm32 imm32) (#x81 ,subcode id))
((rm64 imm32) (#x81 ,subcode id))
((rm8 r8) (,base1 /rm))
((rm32 r32) (,base2 /rm))
((rm64 r64) (,base2 /rm))
((r8 rm8) (,base3 /r))
((r32 rm32) (,base4 /r))
((r64 rm64) (,base4 /r)))))
(defmacro define-type1-encoder (name base code)
(let ((base1 base)
(base2 (+ base 1)))
`(define-encoder ,name (dest)
((rm8) (,base1 ,code))
((rm32) (,base2 ,code))
((rm64) (,base2 ,code)))))
(defmacro define-type2-encoder (name subcode)
`(define-encoder ,name (dest source)
((rm8 1) (#xD0 ,subcode))
((rm32 1) (#xD1 ,subcode))
((rm64 1) (#xD1 ,subcode))
((rm8 imm8) (#xC0 ,subcode ib))
((rm32 imm8) (#xC1 ,subcode ib))
((rm64 imm8) (#xC1 ,subcode ib))))
(defmacro define-type3-encoder (name &rest opcodes)
`(define-encoder ,name ()
(() (,@opcodes))))
(defmacro define-type4-encoder (name base1 base2 code)
`(define-encoder ,name (dest source)
((rm32 r32) (#x0F ,base1 /rm))
((rm64 r64) (#x0F ,base1 /rm))
((rm32 imm8) (#x0F ,base2 ,code ib))
((rm64 imm8) (#x0F ,base2 ,code ib))))
(defmacro define-type5-encoder (name code)
`(define-encoder ,name (dest count)
((rm8 1) (#xD0 ,code))
((rm8 :cl) (#xD2 ,code))
((rm8 imm8) (#xC0 ,code ib))
((rm32 1) (#xD1 ,code))
((rm32 :cl) (#xD3 ,code))
((rm32 imm8) (#xC1 ,code ib))
((rm64 1) (#xD1 ,code))
((rm64 :cl) (#xD3 ,code))
((rm64 imm8) (#xC1 ,code ib))))
(define-type0-encoder add #x00 /0)
(define-type0-encoder adc #x10 /2)
(define-type0-encoder and #x20 /4)
(define-type0-encoder xor #x30 /6)
(define-type0-encoder or #x08 /1)
(define-type0-encoder sbb #x18 /3)
(define-type0-encoder sub #x28 /5)
(define-type0-encoder cmp #x38 /7)
(define-encoder bsf (dest source)
((r32 rm32) (#x0F #xBC /r))
((r64 rm64) (#x0F #xBC /r)))
(define-encoder bsr (dest source)
((r32 rm32) (#x0F #xBD /r))
((r64 rm64) (#x0F #xBD /r)))
(define-encoder bswap (dest)
((r32) (#x0F #xC8 +r))
((r64) (#x0F #xC8 +r)))
(define-type4-encoder bt #xA3 #xBA /4)
(define-type4-encoder btc #xBB #xBA /7)
(define-type4-encoder btr #xB3 #xBA /6)
(define-type4-encoder bts #xAB #xBA /5)
(define-encoder call (target)
((imm32) (#xE8 cd))
((rm64) (* #xFF /2)))
(define-type3-encoder clc #xF8)
(define-encoder clflush (addr)
((m8) (#x0F #xAE /7)))
(define-type3-encoder cmc #xF5)
(defmacro define-cmovcc-encoders ()
`(progn ,@(iter (for oc from #x40 to #x4F)
(for insn in '(cmovo cmovno cmovb cmovnb
cmovz cmovnz cmovbe cmovnbe
cmovs cmovns cmovp cmovnp
cmovl cmovge cmovle cmovg))
(collect
`(define-encoder ,insn (dest source)
((r32 rm32) (#x0F ,oc /r))
((r64 rm64) (#x0F ,oc /r)))))))
(define-cmovcc-encoders)
(define-encoder cmpxchg (dest source)
((rm8 r8) (#x0F #xB0 /rm))
((rm32 r32) (#x0F #xB1 /rm))
((rm64 r64) (#x0F #xB1 /rm)))
(define-type3-encoder cpuid #x0F #xA2)
(define-type1-encoder dec #xFE /1)
(define-type1-encoder div #xF6 /6)
(define-type1-encoder idiv #xF6 /7)
(define-type1-encoder inc #xFE /0)
(define-type1-encoder mul #xF6 /4)
(define-type1-encoder neg #xF6 /3)
(define-type1-encoder not #xF6 /2)
(define-encoder imul (dest source)
((r32 rm32) (#x0F #xAF /r))
((r64 rm64) (#x0F #xAF /r)))
(define-encoder imul3 (dest source scale)
((r32 rm32 imm8) (#x6B /r ib))
((r64 rm64 imm8) (#x6B /r ib))
((r32 rm32 imm32) (#x69 /r id))
((r64 rm64 imm32) (#x69 /r id)))
(define-encoder int (idx)
((imm8) (#xCD ib)))
(defmacro define-jcc-encoders ()
`(progn ,@(iter (for oc from #x70 to #x7F)
(for oc2 from #x80 to #x8F)
(for insn in '(jo jno jb jnb
jz jnz jbe jnbe
js jns jp jnp
jl jge jle jg))
(collect
`(define-encoder ,insn (offset)
((imm8) (,oc cb))
((imm32) (#x0F ,oc2 cd)))))))
(define-jcc-encoders)
(define-encoder jmp (target)
((imm8) (#xEB cb))
((imm32) (#xE9 cd))
((rm64) (* #xFF /4)))
(define-type3-encoder leave #xC9)
(define-encoder mov (dest source)
((rm8 r8) (#x88 /rm))
((rm32 r32) (#x89 /rm))
((rm64 r64) (#x89 /rm))
((r8 rm8) (#x8A /r))
((r32 rm32) (#x8B /r))
((r64 rm64) (#x8B /r))
((r8 imm8) (#xB0 +r ib))
((r32 imm32) (#xB8 +r id))
((rm64 imm32) (#xC7 /0 id))
((r64 imm64) (#xB8 +r iq))
((rm8 imm8) (#xC6 /0 ib))
((rm32 imm32) (#xC7 /0 id)))
(define-encoder movnti (dest source)
((m32 r32) (#x0F #xC3 /rm))
((m64 r64) (#x0F #xC3 /rm)))
(define-encoder movsx (dest source)
((r32 rm8) (#x0F #xBE /r))
((r64 rm8) (#x0F #xBE /r)))
(define-encoder movsxd (dest source)
((r64 rm32) (#x63 /r)))
(define-encoder movzx (dest source)
((r32 rm8) (#x0F #xB6 /r))
((r64 rm8) (#x0F #xB6 /r)))
(define-type3-encoder nop #x90)
(define-type3-encoder pause #xF3 #x90)
(define-encoder pop (dest)
((r64) (* #x58 +r))
((rm64) (* #x8F /0)))
(define-encoder push (source)
((r64) (* #x50 +r))
((rm64) (* #xFF /6))
((imm8) (#x6A ib))
((imm32) (#x68 id)))
(define-type5-encoder rcl /2)
(define-type5-encoder rcr /3)
(define-type3-encoder ret #xC3)
(define-type5-encoder rol /0)
(define-type5-encoder ror /1)
(define-encoder retn (bytes)
((imm16) (#xC2 iw)))
(defmacro define-setcc-encoders ()
`(progn ,@(iter (for oc from #x90 to #x9F)
(for insn in '(seto setno setb setnb
setz setnz setbe setnbe
sets setns setp setnp
setl setge setle setg))
(collect
`(define-encoder ,insn (offset)
((rm8) (#x0F ,oc /2)))))))
(define-setcc-encoders)
(define-type2-encoder sal /4)
(define-type2-encoder sar /7)
(define-type2-encoder shr /5)
(define-type3-encoder stc #xF9)
(define-encoder test (dest source)
((rm8 imm8) (#xF6 /0 ib))
((rm32 imm32) (#xF7 /0 id))
((rm64 imm32) (#xF7 /0 id))
((rm8 r8) (#x84 /rm))
((rm32 r32) (#x85 /rm))
((rm64 r64) (#x85 /rm)))
(define-encoder xchg (dest source)
((rm8 r8) (#x86 /rm))
((r8 rm8) (#x86 /r))
((rm32 r32) (#x87 /rm))
((r32 rm32) (#x87 /r))
((rm64 r64) (#x87 /rm))
((r64 rm64) (#x87 /r)))
(define-encoder xadd (dest source)
((rm8 r8) (#x0F #xC0 /rm))
((rm32 r32) (#x0F #xC1 /rm))
((rm64 r64) (#x0F #xC1 /rm)))
Man there are a lot of SSE instructions
The choice of xm32 / xm64 / xm128 is seemingly random .
The specifier chosen is the one that makes yasm happy .
(defmacro define-x-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x x) (,@opcodes /r))))
(defmacro define-xm128-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x xm128) (,@opcodes /r))))
(defmacro define-xm64-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x xm64) (,@opcodes /r))))
(defmacro define-xm32-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x xm32) (,@opcodes /r))))
(defmacro define-cmp-encoder (name &rest opcodes)
`(define-encoder ,name (dest source cmp)
((x xm128 imm8) (,@opcodes /r ib))))
(defmacro define-rx64-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 xm64) (,@opcodes /r))
((r64 xm64) (,@opcodes /r))))
(defmacro define-rx32-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 xm32) (,@opcodes /r))
((r64 xm32) (,@opcodes /r))))
(defmacro define-rx-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 xm32) (,@opcodes /r))
((r64 xm64) (,@opcodes /r))))
(defmacro define-xr-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x rm32) (,@opcodes /r))
((x rm64) (,@opcodes /r))))
(defmacro define-shift0-encoder (name code1 code2 sub)
`(define-encoder ,name (dest shift)
((x xm128) (#x66 #x0F ,code1 /r))
((x imm8) (#x66 #x0F ,code2 ,sub ib))))
(defmacro define-shift1-encoder (name code sub)
`(define-encoder ,name (dest shift)
((x imm8) (#x66 #x0F ,code ,sub ib))))
(defmacro define-mov1-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x m64) (,@opcodes1 /r))
((m64 x) (,@opcodes2 /rm))))
(defmacro define-mov2-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 x) (,@opcodes /r))))
(defmacro define-mov3-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((m128 x) (,@opcodes /rm))))
(defmacro define-mov0-128-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x xm128) (,@opcodes1 /r))
((xm128 x) (,@opcodes2 /rm))))
(defmacro define-mov0-64-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x xm64) (,@opcodes1 /r))
((xm64 x) (,@opcodes2 /rm))))
(defmacro define-mov0-32-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x xm32) (,@opcodes1 /r))
((xm32 x) (,@opcodes2 /rm))))
(define-xm128-encoder addpd #x66 #x0F #x58)
(define-xm128-encoder addps #x0F #x58)
(define-xm128-encoder addsd #xF2 #x0F #x58)
(define-xm128-encoder addss #xF3 #x0F #x58)
(define-xm128-encoder addsubpd #x66 #x0F #xD0)
(define-xm128-encoder addsubps #xF2 #x0F #xD0)
(define-xm128-encoder andnpd #x66 #x0F #x55)
(define-xm128-encoder andnps #x0F #x55)
(define-xm128-encoder andpd #x66 #x0F #x54)
(define-xm128-encoder andps #x0F #x54)
(define-cmp-encoder cmppd #x66 #x0F #xC2)
(define-cmp-encoder cmpps #x0F #xC2)
(define-cmp-encoder cmpsd #xF2 #x0F #xC2)
(define-cmp-encoder cmpss #xF3 #x0F #xC2)
(define-xm128-encoder comisd #x66 #x0F #x2F)
(define-xm128-encoder comiss #x0F #x2F)
(define-xm64-encoder cvtdq2pd #xF3 #x0F #xE6)
(define-xm128-encoder cvtdq2ps #x0F #x5B)
(define-xm128-encoder cvtpd2dq #xF2 #x0F #xE6)
; cvtpd2pi
(define-xm128-encoder cvtpd2ps #x66 #x0F #x5A)
; cvtpi2pd
; cvtpi2ps
(define-xm128-encoder cvtps2dq #x66 #x0F #x5B)
(define-xm64-encoder cvtps2pd #x0F #x5A)
; cvtps2pi
(define-rx64-encoder cvtsd2si #xF2 #x0F #x2D)
(define-xm64-encoder cvtsd2ss #xF2 #x0F #x5A)
(define-xr-encoder cvtsi2sd #xF2 #x0F #x2A)
(define-xr-encoder cvtsi2ss #xF3 #x0F #x2A)
(define-xm32-encoder cvtss2sd #xF3 #x0F #x5A)
(define-rx32-encoder cvtss2si #xF3 #x0F #x2D)
(define-xm128-encoder cvttpd2dq #x66 #x0F #xE6)
; cvtpd2pi
(define-xm128-encoder cvttps2dq #xF3 #x0F #x5b)
; cvttpd2pi
(define-rx64-encoder cvttsd2si #xF2 #x0F #x2C)
(define-rx32-encoder cvttss2si #xF3 #x0F #x2C)
(define-xm128-encoder divpd #x66 #x0F #x5E)
(define-xm128-encoder divps #x0F #x5E)
(define-xm128-encoder divsd #xF2 #x0F #x5E)
(define-xm128-encoder divss #xF3 #x0F #x5E)
; fxrstor
; fxsave
(define-xm128-encoder haddpd #x66 #x0F #x7C)
(define-xm128-encoder haddps #xF2 #x0F #x7C)
(define-xm128-encoder hsubpd #x66 #x0F #x7D)
(define-xm128-encoder hsubps #xF2 #x0F #x7D)
lddqu
(define-encoder ldmxcsr (source)
((m32) (#x0F #xAE /2)))
(define-x-encoder maskmovdqu #x66 #x0F #xF7)
(define-xm128-encoder maxpd #x66 #x0F #x5F)
(define-xm128-encoder maxps #x0F #x5F)
(define-xm128-encoder maxsd #xF2 #x0F #x5F)
(define-xm128-encoder maxss #xF3 #x0F #x5F)
(define-xm128-encoder minpd #x66 #x0F #x5D)
(define-xm128-encoder minps #x0F #x5D)
(define-xm128-encoder minsd #xF2 #x0F #x5D)
(define-xm128-encoder minss #xF3 #x0F #x5D)
(define-mov0-128-encoder movapd (#x66 #x0F #x28) (#x66 #x0F #x29))
(define-mov0-128-encoder movaps (#x0F #x28) (#x0F #x29))
(define-encoder movd (dest source)
((x rm32) (#x66 #x0F #x6E /r))
((x rm64) (#x66 #x0F #x6E /r))
((rm32 x) (#x66 #x0F #x7E /rm))
((rm64 x) (#x66 #x0F #x7E /rm)))
(define-xm64-encoder movddup #xF2 #x0F #x12)
; movdq2q
(define-mov0-128-encoder movdqa (#x66 #x0F #x6F) (#x66 #x0F #x7F))
(define-mov0-128-encoder movdqu (#xF3 #x0F #x6F) (#xF3 #x0F #x7F))
(define-x-encoder movhlps #x0F #x12)
(define-mov1-encoder movhpd (#x66 #x0F #x16) (#x66 #x0F #x17))
(define-mov1-encoder movhps (#x0F #x16) (#x0F #x17))
(define-x-encoder movlhps #x0F #x16)
(define-mov1-encoder movlpd (#x66 #x0F #x12) (#x66 #x0F #x13))
(define-mov1-encoder movlps (#x0F #x12) (#x0F #x13))
(define-mov2-encoder movmskpd #x66 #x0F #x50)
(define-mov2-encoder movmskps #x0F #x50)
(define-mov3-encoder movntdq #x66 #x0F #xE7)
(define-mov3-encoder movntpd #x66 #x0F #x2B)
(define-mov3-encoder movntps #x0F #x2B)
(define-mov0-64-encoder movq (#xF3 #x0F #x7E) (#x66 #x0F #xD6))
; movq2dq
(define-mov0-64-encoder movsd (#xF2 #x0F #x10) (#xF2 #x0F #x11))
(define-xm128-encoder movshdup #xF3 #x0F #x16)
(define-xm128-encoder movsldup #xF3 #x0F #x12)
(define-mov0-32-encoder movss (#xF3 #x0F #x10) (#xF3 #x0F #x11))
(define-mov0-128-encoder movupd (#x66 #x0F #x10) (#x66 #x0F #x11))
(define-mov0-128-encoder movups (#x0F #x10) (#x0F #x11))
(define-xm128-encoder mulpd #x66 #x0F #x59)
(define-xm128-encoder mulps #x0F #x59)
(define-xm128-encoder mulsd #xF2 #x0F #x59)
(define-xm128-encoder mulss #xF3 #x0F #x59)
(define-xm128-encoder orpd #x66 #x0F #x56)
(define-xm128-encoder orps #x0F #x56)
(define-xm128-encoder packssdw #x66 #x0F #x6B)
(define-xm128-encoder packsswb #x66 #x0F #x63)
(define-xm128-encoder packuswb #x66 #x0F #x67)
(define-xm128-encoder paddb #x66 #x0F #xFC)
(define-xm128-encoder paddd #x66 #x0F #xFE)
(define-xm128-encoder paddq #x66 #x0F #xD4)
(define-xm128-encoder paddsb #x66 #x0F #xEC)
(define-xm128-encoder paddsw #x66 #x0F #xED)
(define-xm128-encoder paddusb #x66 #x0F #xDC)
(define-xm128-encoder paddusw #x66 #x0F #xDD)
(define-xm128-encoder paddw #x66 #x0F #xFD)
(define-xm128-encoder pand #x66 #x0F #xDB)
(define-xm128-encoder pandn #x66 #x0F #xDF)
(define-xm128-encoder pavgb #x66 #x0F #xE0)
(define-xm128-encoder pavgw #x66 #x0F #xE3)
(define-xm128-encoder pcmpeqb #x66 #x0F #x74)
(define-xm128-encoder pcmpeqd #x66 #x0F #x76)
(define-xm128-encoder pcmpeqw #x66 #x0F #x75)
(define-xm128-encoder pcmpgtb #x66 #x0F #x64)
(define-xm128-encoder pcmpgtd #x66 #x0F #x66)
(define-xm128-encoder pcmpgtw #x66 #x0F #x65)
(define-encoder pextrw (dest source sel)
((r32 x imm8) (#x66 #x0F #xC5 /r ib)))
(define-encoder pinsrw (dest source sel)
((x rm32 imm8) (#x66 #x0F #xC4 /r ib)))
(define-xm128-encoder pmaddwd #x66 #x0F #xF5)
(define-xm128-encoder pmaxsw #x66 #x0F #xEE)
(define-xm128-encoder pmaxub #x66 #x0F #xDE)
(define-xm128-encoder pminsw #x66 #x0F #xEA)
(define-xm128-encoder pminub #x66 #x0F #xDA)
(define-mov2-encoder pmovmskb #x66 #x0F #xD7)
(define-xm128-encoder pmulhuw #x66 #x0F #xE4)
(define-xm128-encoder pmulhw #x66 #x0F #xE5)
(define-xm128-encoder pmullw #x66 #x0F #xD5)
(define-xm128-encoder pmuludq #x66 #x0F #xF4)
(define-xm128-encoder por #x66 #x0F #xEB)
(define-xm128-encoder psadbw #x66 #x0F #xF6)
(define-cmp-encoder pshufd #x66 #x0F #x70)
(define-cmp-encoder pshufhw #xF3 #x0F #x70)
(define-cmp-encoder pshuflw #xF2 #x0F #x70)
(define-shift0-encoder pslld #xF2 #x72 /6)
(define-shift1-encoder pslldq #x73 /7)
(define-shift0-encoder psllq #xF3 #x73 /6)
(define-shift0-encoder psllw #xF1 #x71 /6)
(define-shift0-encoder psrad #xE2 #x72 /4)
(define-shift0-encoder psraw #xE1 #x71 /4)
(define-shift0-encoder psrld #xD2 #x72 /2)
(define-shift1-encoder psrldq #x73 /3)
(define-shift0-encoder psrlq #xD3 #x73 /2)
(define-shift0-encoder psrlw #xD1 #x71 /2)
(define-xm128-encoder psubb #x66 #x0F #xF8)
(define-xm128-encoder psubd #x66 #x0F #xFA)
(define-xm128-encoder psubq #x66 #x0F #xFB)
(define-xm128-encoder psubsb #x66 #x0F #xE8)
(define-xm128-encoder psubsw #x66 #x0F #xE9)
(define-xm128-encoder psubusb #x66 #x0F #xD8)
(define-xm128-encoder psubusw #x66 #x0F #xD9)
(define-xm128-encoder psubw #x66 #x0F #xF9)
(define-xm128-encoder punpckhbw #x66 #x0F #x68)
(define-xm128-encoder punpckhdq #x66 #x0F #x6A)
(define-xm128-encoder punpckhqdq #x66 #x0F #x6D)
(define-xm128-encoder punpckhwd #x66 #x0F #x69)
(define-xm128-encoder punpcklbw #x66 #x0F #x60)
(define-xm128-encoder punpckldq #x66 #x0F #x62)
(define-xm128-encoder punpcklqdq #x66 #x0F #x6C)
(define-xm128-encoder punpcklwd #x66 #x0F #x61)
(define-xm128-encoder pxor #x66 #x0F #xEF)
(define-xm128-encoder rcpps #x0F #x53)
(define-xm128-encoder rcpss #xF3 #x0F #x53)
(define-xm128-encoder rsqrtps #x0F #x52)
(define-xm128-encoder rsqrtss #xF3 #x0F #x52)
(define-cmp-encoder shufpd #x66 #x0F #xC6)
(define-cmp-encoder shufps #x0F #xC6)
(define-xm128-encoder sqrtpd #x66 #x0F #x51)
(define-xm128-encoder sqrtps #x0F #x51)
(define-xm128-encoder sqrtsd #xF2 #x0F #x51)
(define-xm128-encoder sqrtss #xF3 #x0F #x51)
(define-encoder stmxcsr (dest)
((m32) (#x0F #xAE /3)))
(define-xm128-encoder subpd #x66 #x0F #x5C)
(define-xm128-encoder subps #x0F #x5C)
(define-xm128-encoder subsd #xF2 #x0F #x5C)
(define-xm128-encoder subss #xF3 #x0F #x5C)
(define-xm128-encoder ucomisd #x66 #x0F #x2E)
(define-xm128-encoder ucomiss #x0F #x2E)
(define-xm128-encoder unpckhpd #x66 #x0F #x15)
(define-xm128-encoder unpckhps #x0F #x15)
(define-xm128-encoder unpcklpd #x66 #x0F #x14)
(define-xm128-encoder unpcklps #x0F #x14)
(define-xm128-encoder xorpd #x66 #x0F #x57)
(define-xm128-encoder xorps #x0F #x57)
| null | https://raw.githubusercontent.com/rayiner/amd64-asm/27ac3e683557d691cd68472c94d110f32334f61a/encoders.lisp | lisp | encoders.lisp
Encoders for AMD64 instruction set.
maybe move condition definitions somewhere else
make this more sophisticated for error handling later
info about operands of an instruction
info about the opcodes of an instruction
Syntax for defining instruction encoders.
Encoder consists of sequences of clauses, each
the following symbols:
rX stands for a register of width X bits
x stands for a vector register
operand of width X bits
rmX stands for a register or memory operand
of width X bits
immX stands for an immediate of width X bits.
sX stands for a symbolic immediate of width X bits.
The product is a sequence of either integers,
following symbols:
no override prefix. It must be specified at the beginning.
ib id and iq mean to follow the instruction
/0 through /7 mean to specify that digit in modrm.reg
/r means to use a regular modrm form, with modrm.reg as dest
/rm means to use a regular modrm form, with modrm.rm as dest
+r means to use a short form, adding the dest register to opcode
The instruction width is determined by the form of the destination.
The /0 through /7 /r /rm and +r terms are necessary to match
the syntax of the processor reference manual, but are somewhat
awkward to use programatically because they have multiple
implications. These terms are transformed as follows:
/0 through /7 -> /n /rm
ib through iw -> ix
/r -> /r /rm
/rm -> /rm /r
These terms are used as follows, mapped to corresponding operand
/n -> set modrm.reg to subcode
/rm -> add reg or mem operand to modrm.rm
/r -> add reg parameter to modrm.reg
ix -> add immediate operand
cx -> add immediate operand (RIP-relative)
note that this may latter be undone in the command handlers
instructions not encoded
cvtpd2pi
cvtpi2pd
cvtpi2ps
cvtps2pi
cvtpd2pi
cvttpd2pi
fxrstor
fxsave
movdq2q
movq2dq |
(in-package "AMD64-ASM")
(define-condition assembler-error (error)
(()))
(define-condition encoding-error (assembler-error)
((form :initarg :form :reader encoding-error-form)))
(define-condition assertion-failed (encoding-error)
((check :initarg :check :reader assertion-failed-check)))
(defmacro with-checks (pred &body body)
`(if ,pred
(progn ,@body)
(error 'assertion-failed :check ',pred)))
(defparameter *byte-regs* '(:al :bl :cl :dl :sil :dil :bpl :spl
:r8b :r9b :r10b :r11b :r12b :r13b :r14b :r15b))
(defparameter *half-regs* '(:eax :ebx :ecx :edx :esi :edi :ebp :esp
:r8d :r9d :r10d :r11d :r12d :r13d :r14d :r15d))
(defparameter *word-regs* '(:rax :rbx :rcx :rdx :rsi :rdi :rbp :rsp
:r8 :r9 :r10 :r11 :r12 :r13 :r14 :r15))
(defparameter *vec-regs* '(:xmm0 :xmm3 :xmm1 :xmm2 :xmm6 :xmm7 :xmm5 :xmm4
:xmm8 :xmm9 :xmm10 :xmm11 :xmm12 :xmm13 :xmm14
:xmm15))
(defparameter *sdis* '(:jo :jno :jb :jnb :jz :jnz :jbe :jnbe
:js :jns :jp :jnp :jl :jge :jle :jg :jmp))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *prefixes* '(#x66 #x67 #x64 #x65 #xF0 #xF3 #xF2)))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defparameter *encoders* nil))
(defstruct oprinfo
oc.ext
modrm.mod
modrm.reg
modrm.rm
sib.scale
sib.index
sib.base
disp
imm
imm.bytes
imm.rel-type
imm.rel-addn
disp.bytes
disp.rel-type
disp.rel-addn)
(defstruct ocinfo
override?
prefixes
opcodes)
(defun new-ocinfo ()
(make-ocinfo :opcodes (make-array 0 :fill-pointer t)
:prefixes (make-array 0 :fill-pointer t)))
(defun specifier-width (spec)
(case spec
(:byte 1)
(:half 4)
(:word 8)
(:wide 16)))
(defun specifier-next (spec)
(case spec
(:byte :half)
(:half :word)
(:word :word)))
(defun register-number (reg)
(let ((rnums '(0 3 1 2 6 7 5 4 8 9 10 11 12 13 14 15)))
(let ((idx (or (position reg *byte-regs*)
(position reg *half-regs*)
(position reg *word-regs*)
(position reg *vec-regs*))))
(when idx (nth idx rnums)))))
(defun reg? (operand)
(register-number operand))
(defun byte-reg? (reg)
(member reg *byte-regs*))
(defun half-reg? (reg)
(member reg *half-regs*))
(defun word-reg? (reg)
(member reg *word-regs*))
(defun xmm-reg? (reg)
(member reg *vec-regs*))
(defun same-reg? (rega regb)
(eql (register-number rega) (register-number regb)))
(defun immediate? (operand)
(or (integerp operand)
(and (listp operand)
(or (and (eql (length operand) 2)
(symbolp (first operand))
(symbolp (second operand)))
(and (eql (length operand) 3)
(symbolp (first operand))
(symbolp (second operand))
(integerp (third operand))
(or (<= (signed-width (third operand))
(specifier-width (first operand)))
(<= (unsigned-width (third operand))
(specifier-width (first operand)))))))))
(defun immediate-width (operand)
(if (integerp operand)
(signed-width operand)
(specifier-width (first operand))))
(defun byte-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 1)))
(defun short-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 2)))
(defun half-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 4)))
(defun word-immediate? (operand)
(and (immediate? operand) (<= (immediate-width operand) 8)))
(defun mem? (operand)
(and (listp operand)
(eql (length operand) 5)
(symbolp (first operand))
(symbolp (second operand))
(symbolp (third operand))
(integerp (fourth operand))
(immediate? (fifth operand))))
(defun byte-mem? (operand)
(and (mem? operand) (eql (first operand) :byte)))
(defun half-mem? (operand)
(and (mem? operand) (eql (first operand) :half)))
(defun word-mem? (operand)
(and (mem? operand) (eql (first operand) :word)))
(defun wide-mem? (operand)
(and (mem? operand) (eql (first operand) :wide)))
(defun sdi? (insn)
(and (member (first insn) *sdis*)
(symbolp (second insn))))
(defun compose-rex (w r x b)
(with-checks (and (< w 2) (< r 2) (< x 2) (< b 2))
(+ #x40 b (ash x 1) (ash r 2) (ash w 3))))
(defun decode-rex (r)
(with-checks (integerp r)
(list :w (ash (logand r #x8) -3)
:r (ash (logand r #x4) -2)
:x (ash (logand r #x2) -1)
:b (logand r #x1))))
(defun compose-modrm (mod reg rm)
(with-checks (and (< mod 4) (< reg 8) (< rm 8))
(+ rm (ash reg 3) (ash mod 6))))
(defun decode-modrm (m)
(with-checks (integerp m)
(list :mod (logand (ash m -6) #x3)
:reg (logand (ash m -3) #x7)
:rm (logand m #x7))))
(defun compose-sib (scale index base)
(with-checks (and (< scale 8) (< index 8) (< base 8))
(+ base (ash index 3) (ash scale 6))))
(defun decode-sib (s)
(with-checks (integerp s)
(list :scale (logand (ash s -6) #x3)
:index (logand (ash s -3) #x7)
:base (logand s #x7))))
(defun add-reg-operand (insn reg where)
(with-checks (reg? reg)
(let ((num (register-number reg)))
(ecase where
(reg (setf (oprinfo-modrm.reg insn) num))
(rm (setf (oprinfo-modrm.mod insn) #x3)
(setf (oprinfo-modrm.rm insn) num))
(op (setf (oprinfo-oc.ext insn) num))))))
(defun add-immediate-operand (insn imm width type)
(with-checks (immediate? imm)
(if (integerp imm)
(progn
(setf (oprinfo-imm insn) imm)
(setf (oprinfo-imm.bytes insn) width))
(progn
(setf (oprinfo-imm insn) (second imm))
(setf (oprinfo-imm.bytes insn) width)
(setf (oprinfo-imm.rel-type insn) type)
(setf (oprinfo-imm.rel-addn insn) (or (third imm) 0))))))
(defun add-opcode-extension (insn subcode)
(with-checks (integerp subcode)
(setf (oprinfo-modrm.reg insn) subcode)))
(defun modrm.mod-for-disp (disp)
(cond
((eql disp 0) 0)
((integerp disp)
(ecase (signed-width disp)
(1 1)
((2 4) 2)))
((listp disp)
(ecase (first disp)
(:byte 1)
(:half 2)))))
(defun sib.scale-for-scale (scale)
(ecase scale
(1 0)
(2 1)
(4 2)
(8 3)))
(defun add-sib.index (insn index scale)
(setf (oprinfo-sib.scale insn) (sib.scale-for-scale scale))
(if index
(setf (oprinfo-sib.index insn) (register-number index))
(setf (oprinfo-sib.index insn) #x04)))
(defun compute-disp.bytes (disp bytes)
(or bytes
(let ((sz (immediate-width disp)))
(if (eql sz 2) 4 sz))))
(defun add-disp (insn disp type &optional bytes)
(if (or (not (eql disp 0)) bytes)
(let ((sz (compute-disp.bytes disp bytes)))
(if (integerp disp)
(progn
(setf (oprinfo-disp insn) disp)
(setf (oprinfo-disp.bytes insn) sz))
(progn
(setf (oprinfo-disp insn) (second disp))
(setf (oprinfo-disp.bytes insn) sz)
(setf (oprinfo-disp.rel-type insn) type)
(setf (oprinfo-disp.rel-addn insn) (or (third disp) 0)))))))
(defun add-mem-rest (insn base index scale)
(if (or index (same-reg? base :rsp) (same-reg? base :r12))
(progn (setf (oprinfo-modrm.rm insn) #x04)
(setf (oprinfo-sib.base insn)
(or (register-number base) #x5))
(add-sib.index insn index scale))
(setf (oprinfo-modrm.rm insn) (register-number base))))
(defun add-modrm.mod-and-modrm.rm (insn mod rm)
(setf (oprinfo-modrm.mod insn) mod)
(setf (oprinfo-modrm.rm insn) rm))
(defun add-modrm.mod-only (insn mod)
(setf (oprinfo-modrm.mod insn) mod))
(defun add-mem-operand (insn mem)
(with-checks (mem? mem)
(destructuring-bind (sz base index scale disp) mem
(declare (ignore sz))
(unless (or (member base '(:rip :abs)) (register-number base))
(error 'encoding-error :form mem))
(cond
((eql base :rip)
(add-modrm.mod-and-modrm.rm insn #x0 #x05)
(add-disp insn disp :rel #x04))
((eql base :abs)
(add-modrm.mod-and-modrm.rm insn #x0 #x04)
(setf (oprinfo-sib.base insn) #x05)
(add-sib.index insn index scale)
(add-disp insn disp :rel #x04))
((and (or (same-reg? base :rbp)
(same-reg? base :r13))
(eql disp 0))
(add-modrm.mod-only insn #x01)
(add-disp insn disp :rel #x01)
(add-mem-rest insn base index scale))
(t
(add-modrm.mod-only insn (modrm.mod-for-disp disp))
(add-disp insn disp :rel)
(add-mem-rest insn base index scale))))))
with two parts : a pattern , and a production .
The pattern is a sequence of one or more of
r8 rm8 r32 imm8 imm32 imm64 s32 s64
x
xmX stands for a vector register or memory
representing opcodes , or one or more of the
ib i d iq cb cd /0 /1 /2 /3 /4 /5 /6 /7 /r /rm + r *
* means that the instruction defaults to 64 - bit , and needs
with a 1 , 4 , or 8 byte immediate , respectively .
(defun operand-matches? (opr constraint)
(if (or (reg? constraint) (immediate? constraint))
(eql opr constraint)
(ecase constraint
(rm8 (or (byte-reg? opr) (byte-mem? opr)))
(rm32 (or (half-reg? opr) (half-mem? opr)))
(rm64 (or (word-reg? opr) (word-mem? opr)))
(m8 (byte-mem? opr))
(m32 (half-mem? opr))
(m64 (word-mem? opr))
(m128 (wide-mem? opr))
(r8 (byte-reg? opr))
(r32 (half-reg? opr))
(r64 (word-reg? opr))
(x (xmm-reg? opr))
(xm32 (or (xmm-reg? opr) (half-mem? opr)))
(xm64 (or (xmm-reg? opr) (word-mem? opr)))
(xm128 (or (xmm-reg? opr) (wide-mem? opr)))
(imm8 (byte-immediate? opr))
(imm16 (short-immediate? opr))
(imm32 (half-immediate? opr))
(imm64 (word-immediate? opr)))))
(eval-when (:compile-toplevel :load-toplevel :execute)
(defun operand-needs-override? (opr)
(member opr '(rm64 r64 imm64)))
(defun subcode-for-subcode-command (cmd)
(case cmd
(/0 0)
(/1 1)
(/2 2)
(/3 3)
(/4 4)
(/5 5)
(/6 6)
(/7 7)
(t nil)))
(defun subcode-command? (cmd)
(member cmd '(/0 /1 /2 /3 /4 /5 /6 /7)))
(defun width-for-immediate-command (cmd)
(case cmd
((ib cb) 1)
(iw 2)
((id cd) 4)
(iq 8)
(t nil)))
(defun rel-for-immediate-command (cmd)
(case cmd
((ib iw id iq) :abs)
((cb cd) :bra)))
(defun immediate-command? (cmd)
(member cmd '(ib iw id iq cb cd)))
(defun regularize-commands (cmds)
(iter (for cmd in cmds)
(cond
((subcode-command? cmd)
(collect cmd)
(collect '/rm))
((immediate-command? cmd)
(collect cmd))
((eql cmd '/r)
(collect '/r)
(collect '/rm))
((eql cmd '/rm)
(collect '/rm)
(collect '/r))
(t
(collect cmd)))))
(defun generate-operand-handlers (ocinfo oinfo cmds operands)
(if (and cmds operands)
(let ((cmd (car cmds))
(opr (car operands)))
(flet ((advance () (generate-operand-handlers ocinfo oinfo (cdr cmds)
(cdr operands)))
(ignore () (generate-operand-handlers ocinfo oinfo (cdr cmds)
operands)))
(cond
((subcode-command? cmd)
(cons `(add-opcode-extension ,oinfo
,(subcode-for-subcode-command cmd))
(ignore)))
((immediate-command? cmd)
(cons `(add-immediate-operand ,oinfo ,opr
,(width-for-immediate-command cmd)
',(rel-for-immediate-command cmd))
(advance)))
((eql cmd '+r)
(cons `(add-reg-operand ,oinfo ,opr 'op) (advance)))
((eql cmd '/r)
(cons `(add-reg-operand ,oinfo ,opr 'reg) (advance)))
((eql cmd '/rm)
(cons `(cond ((reg? ,opr)
(add-reg-operand ,oinfo ,opr 'rm))
((mem? ,opr)
(add-mem-operand ,oinfo ,opr)))
(advance)))
((eql cmd '*)
(cons `(setf (ocinfo-override? ,ocinfo) nil)
(ignore)))
(t (ignore)))))))
(defun find-first-non-prefix (ocs)
(position (find-if-not #'(lambda (elt)
(member elt *prefixes*))
ocs)
ocs))
(defun collect-prefixes (ocs)
(subseq ocs 0 (find-first-non-prefix ocs)))
(defun collect-opcodes (ocs)
(subseq ocs (find-first-non-prefix ocs) nil))
(defun generate-opcode-handlers (ocinfo cmds)
(let* ((ocs (remove-if-not #'integerp cmds))
(pfxs (collect-prefixes ocs))
(opcodes (collect-opcodes ocs)))
`(,@(mapcar #'(lambda (pfx)
`(vector-push-extend ,pfx (ocinfo-prefixes ,ocinfo)))
pfxs)
,@(mapcar #'(lambda (oc)
`(vector-push-extend ,oc (ocinfo-opcodes ,ocinfo)))
opcodes))))
(defun maybe-generate-override-setter (ocinfo constraints)
(if (some #'operand-needs-override? constraints)
`(setf (ocinfo-override? ,ocinfo) t)
`(progn)))
(defun transform-production (pattern production operands)
(let ((cmds (regularize-commands production))
(oprinfo (gensym))
(ocinfo (gensym)))
`(let ((,oprinfo (make-oprinfo))
(,ocinfo (new-ocinfo)))
,(maybe-generate-override-setter ocinfo pattern)
,@(generate-operand-handlers ocinfo oprinfo cmds operands)
,@(generate-opcode-handlers ocinfo cmds)
(values ,ocinfo ,oprinfo))))
(defun transform-constraint (constraint operand)
`(operand-matches? ,operand ',constraint))
(defun transform-clause (clause operands)
(let ((pattern (car clause))
(production (cadr clause)))
`((and ,@(mapcar #'transform-constraint pattern operands))
,(transform-production pattern production operands)))))
(defmacro define-encoder (insn operands &body body)
(let ((name (prefixsym "ENCODE-" insn)))
(push (list (intern (symbol-name insn) "KEYWORD") body) *encoders*)
`(defun ,name ,operands
(cond ,@(mapcar #'(lambda (clause)
(transform-clause clause operands))
body)))))
(defun register-low-part (reg)
(if (integerp reg) (logand reg #x7)))
(defun req-rex-bit (&rest regs)
(let ((vals (iter (for reg in regs)
(if (and (integerp reg) (> reg 7))
(collect 1)
(collect 0)))))
(apply #'max vals)))
(defun maybe-emit-rex (ln ocinfo oprinfo)
(with-slots ((reg modrm.reg)
(rm modrm.rm)
(index sib.index)
(base sib.base)
(ext oc.ext)) oprinfo
(let ((rex (compose-rex (if (ocinfo-override? ocinfo) 1 0)
(req-rex-bit reg)
(req-rex-bit index)
(req-rex-bit base rm ext))))
(if (not (eql rex #x40))
(emit-byte ln rex)))))
(defun maybe-emit-prefixes (ln ocinfo)
(iter (for pfx in-vector (ocinfo-prefixes ocinfo))
(emit-byte ln pfx)))
(defun emit-opcode-maybe-extended (ln opc oprinfo)
(emit-byte ln (+ opc (register-low-part (or (oprinfo-oc.ext oprinfo) 0)))))
(defun emit-opcodes (ln ocinfo oprinfo)
(if (eql (elt (ocinfo-opcodes ocinfo) 0) #x0F)
(progn
(emit-byte ln #x0F)
(emit-opcode-maybe-extended ln (elt (ocinfo-opcodes ocinfo) 1) oprinfo))
(emit-opcode-maybe-extended ln (elt (ocinfo-opcodes ocinfo) 0) oprinfo)))
(defun maybe-emit-modrm (ln oprinfo)
(with-slots ((mod modrm.mod) (reg modrm.reg) (rm modrm.rm)) oprinfo
(and mod reg rm
(emit-byte ln (compose-modrm mod
(register-low-part reg)
(register-low-part rm))))))
(defun maybe-emit-sib (ln oprinfo)
(with-slots ((scale sib.scale) (index sib.index) (base sib.base)) oprinfo
(and scale index base
(emit-byte ln (compose-sib scale
(register-low-part index)
(register-low-part base))))))
(defun do-emit-disp-or-imm (ln disp-or-imm bytes type addn)
(when (and disp-or-imm bytes)
(if (integerp disp-or-imm)
(emit-bytes ln disp-or-imm bytes)
(progn
(emit-reloc ln disp-or-imm bytes type)
(emit-bytes ln addn bytes)))))
(defun maybe-emit-disp (ln oprinfo)
(with-slots ((disp disp) (bytes disp.bytes)) oprinfo
(do-emit-disp-or-imm ln disp bytes (oprinfo-disp.rel-type oprinfo)
(oprinfo-disp.rel-addn oprinfo))))
(defun maybe-emit-imm (ln oprinfo)
(with-slots ((imm imm) (bytes imm.bytes)) oprinfo
(do-emit-disp-or-imm ln imm bytes (oprinfo-imm.rel-type oprinfo)
(oprinfo-imm.rel-addn oprinfo))))
(defun encode-instruction (ln ocinfo oprinfo)
(maybe-emit-prefixes ln ocinfo)
(maybe-emit-rex ln ocinfo oprinfo)
(emit-opcodes ln ocinfo oprinfo)
(maybe-emit-modrm ln oprinfo)
(maybe-emit-sib ln oprinfo)
(maybe-emit-disp ln oprinfo)
(maybe-emit-imm ln oprinfo))
(defun do-encode (ln fun args)
(multiple-value-bind (ocinfo oprinfo)
(apply fun args)
(encode-instruction ln ocinfo oprinfo)))
(defun encode-insn (insn ln)
(handler-case
(let ((fun (prefixsym "ENCODE-" (car insn) "AMD64-ASM")))
(do-encode ln (symbol-function fun) (cdr insn)))
(assertion-failed (as) (error 'assertion-failed :form insn
:check (assertion-failed-check as)))
(condition (condition) (declare (ignore condition))
(error 'encoding-error :form insn))))
Encoders for general 8/32/64 - bit integer instructions
aaa , aad , aam , aas , bound , call ( far ) , cbw , cwde ,
cdqe , cwd , cdq , cqo , cmov , cmps , cmps , , cmpsd , cmpsq ,
daa , das , enter , in , ins , insb , insw , insd , into , jcx , , ,
lahf , lds , les , lfs , lgs , lss , lfence , lods , , lodsw , lodsd ,
lodsq , loop , loope , loopne , loopnz , loopz , mfence , movs , movsb ,
movsw , movsd , movsq , outs , outsb , outsw , outsd , popa , popad , popf ,
popa , popad , popf , popfd , popfq , prefetch , prefetchw , pusha , pushad ,
pushf , pushfd , ret ( far ) , sahf , scas , scasb , scasw , scasd , scasq ,
sfence , shld , shrd , std , stos , stosb , stosw , stosd , stosq , xlat , xlatb
(defmacro define-type0-encoder (name base subcode)
(let ((base1 base)
(base2 (+ base 1))
(base3 (+ base 2))
(base4 (+ base 3)))
`(define-encoder ,name (dest source)
((rm8 imm8) (#x80 ,subcode ib))
((rm32 imm8) (#x83 ,subcode ib))
((rm64 imm8) (#x83 ,subcode ib))
((rm32 imm32) (#x81 ,subcode id))
((rm64 imm32) (#x81 ,subcode id))
((rm8 r8) (,base1 /rm))
((rm32 r32) (,base2 /rm))
((rm64 r64) (,base2 /rm))
((r8 rm8) (,base3 /r))
((r32 rm32) (,base4 /r))
((r64 rm64) (,base4 /r)))))
(defmacro define-type1-encoder (name base code)
(let ((base1 base)
(base2 (+ base 1)))
`(define-encoder ,name (dest)
((rm8) (,base1 ,code))
((rm32) (,base2 ,code))
((rm64) (,base2 ,code)))))
(defmacro define-type2-encoder (name subcode)
`(define-encoder ,name (dest source)
((rm8 1) (#xD0 ,subcode))
((rm32 1) (#xD1 ,subcode))
((rm64 1) (#xD1 ,subcode))
((rm8 imm8) (#xC0 ,subcode ib))
((rm32 imm8) (#xC1 ,subcode ib))
((rm64 imm8) (#xC1 ,subcode ib))))
(defmacro define-type3-encoder (name &rest opcodes)
`(define-encoder ,name ()
(() (,@opcodes))))
(defmacro define-type4-encoder (name base1 base2 code)
`(define-encoder ,name (dest source)
((rm32 r32) (#x0F ,base1 /rm))
((rm64 r64) (#x0F ,base1 /rm))
((rm32 imm8) (#x0F ,base2 ,code ib))
((rm64 imm8) (#x0F ,base2 ,code ib))))
(defmacro define-type5-encoder (name code)
`(define-encoder ,name (dest count)
((rm8 1) (#xD0 ,code))
((rm8 :cl) (#xD2 ,code))
((rm8 imm8) (#xC0 ,code ib))
((rm32 1) (#xD1 ,code))
((rm32 :cl) (#xD3 ,code))
((rm32 imm8) (#xC1 ,code ib))
((rm64 1) (#xD1 ,code))
((rm64 :cl) (#xD3 ,code))
((rm64 imm8) (#xC1 ,code ib))))
(define-type0-encoder add #x00 /0)
(define-type0-encoder adc #x10 /2)
(define-type0-encoder and #x20 /4)
(define-type0-encoder xor #x30 /6)
(define-type0-encoder or #x08 /1)
(define-type0-encoder sbb #x18 /3)
(define-type0-encoder sub #x28 /5)
(define-type0-encoder cmp #x38 /7)
(define-encoder bsf (dest source)
((r32 rm32) (#x0F #xBC /r))
((r64 rm64) (#x0F #xBC /r)))
(define-encoder bsr (dest source)
((r32 rm32) (#x0F #xBD /r))
((r64 rm64) (#x0F #xBD /r)))
(define-encoder bswap (dest)
((r32) (#x0F #xC8 +r))
((r64) (#x0F #xC8 +r)))
(define-type4-encoder bt #xA3 #xBA /4)
(define-type4-encoder btc #xBB #xBA /7)
(define-type4-encoder btr #xB3 #xBA /6)
(define-type4-encoder bts #xAB #xBA /5)
(define-encoder call (target)
((imm32) (#xE8 cd))
((rm64) (* #xFF /2)))
(define-type3-encoder clc #xF8)
(define-encoder clflush (addr)
((m8) (#x0F #xAE /7)))
(define-type3-encoder cmc #xF5)
(defmacro define-cmovcc-encoders ()
`(progn ,@(iter (for oc from #x40 to #x4F)
(for insn in '(cmovo cmovno cmovb cmovnb
cmovz cmovnz cmovbe cmovnbe
cmovs cmovns cmovp cmovnp
cmovl cmovge cmovle cmovg))
(collect
`(define-encoder ,insn (dest source)
((r32 rm32) (#x0F ,oc /r))
((r64 rm64) (#x0F ,oc /r)))))))
(define-cmovcc-encoders)
(define-encoder cmpxchg (dest source)
((rm8 r8) (#x0F #xB0 /rm))
((rm32 r32) (#x0F #xB1 /rm))
((rm64 r64) (#x0F #xB1 /rm)))
(define-type3-encoder cpuid #x0F #xA2)
(define-type1-encoder dec #xFE /1)
(define-type1-encoder div #xF6 /6)
(define-type1-encoder idiv #xF6 /7)
(define-type1-encoder inc #xFE /0)
(define-type1-encoder mul #xF6 /4)
(define-type1-encoder neg #xF6 /3)
(define-type1-encoder not #xF6 /2)
(define-encoder imul (dest source)
((r32 rm32) (#x0F #xAF /r))
((r64 rm64) (#x0F #xAF /r)))
(define-encoder imul3 (dest source scale)
((r32 rm32 imm8) (#x6B /r ib))
((r64 rm64 imm8) (#x6B /r ib))
((r32 rm32 imm32) (#x69 /r id))
((r64 rm64 imm32) (#x69 /r id)))
(define-encoder int (idx)
((imm8) (#xCD ib)))
(defmacro define-jcc-encoders ()
`(progn ,@(iter (for oc from #x70 to #x7F)
(for oc2 from #x80 to #x8F)
(for insn in '(jo jno jb jnb
jz jnz jbe jnbe
js jns jp jnp
jl jge jle jg))
(collect
`(define-encoder ,insn (offset)
((imm8) (,oc cb))
((imm32) (#x0F ,oc2 cd)))))))
(define-jcc-encoders)
(define-encoder jmp (target)
((imm8) (#xEB cb))
((imm32) (#xE9 cd))
((rm64) (* #xFF /4)))
(define-type3-encoder leave #xC9)
(define-encoder mov (dest source)
((rm8 r8) (#x88 /rm))
((rm32 r32) (#x89 /rm))
((rm64 r64) (#x89 /rm))
((r8 rm8) (#x8A /r))
((r32 rm32) (#x8B /r))
((r64 rm64) (#x8B /r))
((r8 imm8) (#xB0 +r ib))
((r32 imm32) (#xB8 +r id))
((rm64 imm32) (#xC7 /0 id))
((r64 imm64) (#xB8 +r iq))
((rm8 imm8) (#xC6 /0 ib))
((rm32 imm32) (#xC7 /0 id)))
(define-encoder movnti (dest source)
((m32 r32) (#x0F #xC3 /rm))
((m64 r64) (#x0F #xC3 /rm)))
(define-encoder movsx (dest source)
((r32 rm8) (#x0F #xBE /r))
((r64 rm8) (#x0F #xBE /r)))
(define-encoder movsxd (dest source)
((r64 rm32) (#x63 /r)))
(define-encoder movzx (dest source)
((r32 rm8) (#x0F #xB6 /r))
((r64 rm8) (#x0F #xB6 /r)))
(define-type3-encoder nop #x90)
(define-type3-encoder pause #xF3 #x90)
(define-encoder pop (dest)
((r64) (* #x58 +r))
((rm64) (* #x8F /0)))
(define-encoder push (source)
((r64) (* #x50 +r))
((rm64) (* #xFF /6))
((imm8) (#x6A ib))
((imm32) (#x68 id)))
(define-type5-encoder rcl /2)
(define-type5-encoder rcr /3)
(define-type3-encoder ret #xC3)
(define-type5-encoder rol /0)
(define-type5-encoder ror /1)
(define-encoder retn (bytes)
((imm16) (#xC2 iw)))
(defmacro define-setcc-encoders ()
`(progn ,@(iter (for oc from #x90 to #x9F)
(for insn in '(seto setno setb setnb
setz setnz setbe setnbe
sets setns setp setnp
setl setge setle setg))
(collect
`(define-encoder ,insn (offset)
((rm8) (#x0F ,oc /2)))))))
(define-setcc-encoders)
(define-type2-encoder sal /4)
(define-type2-encoder sar /7)
(define-type2-encoder shr /5)
(define-type3-encoder stc #xF9)
(define-encoder test (dest source)
((rm8 imm8) (#xF6 /0 ib))
((rm32 imm32) (#xF7 /0 id))
((rm64 imm32) (#xF7 /0 id))
((rm8 r8) (#x84 /rm))
((rm32 r32) (#x85 /rm))
((rm64 r64) (#x85 /rm)))
(define-encoder xchg (dest source)
((rm8 r8) (#x86 /rm))
((r8 rm8) (#x86 /r))
((rm32 r32) (#x87 /rm))
((r32 rm32) (#x87 /r))
((rm64 r64) (#x87 /rm))
((r64 rm64) (#x87 /r)))
(define-encoder xadd (dest source)
((rm8 r8) (#x0F #xC0 /rm))
((rm32 r32) (#x0F #xC1 /rm))
((rm64 r64) (#x0F #xC1 /rm)))
Man there are a lot of SSE instructions
The choice of xm32 / xm64 / xm128 is seemingly random .
The specifier chosen is the one that makes yasm happy .
(defmacro define-x-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x x) (,@opcodes /r))))
(defmacro define-xm128-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x xm128) (,@opcodes /r))))
(defmacro define-xm64-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x xm64) (,@opcodes /r))))
(defmacro define-xm32-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x xm32) (,@opcodes /r))))
(defmacro define-cmp-encoder (name &rest opcodes)
`(define-encoder ,name (dest source cmp)
((x xm128 imm8) (,@opcodes /r ib))))
(defmacro define-rx64-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 xm64) (,@opcodes /r))
((r64 xm64) (,@opcodes /r))))
(defmacro define-rx32-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 xm32) (,@opcodes /r))
((r64 xm32) (,@opcodes /r))))
(defmacro define-rx-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 xm32) (,@opcodes /r))
((r64 xm64) (,@opcodes /r))))
(defmacro define-xr-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((x rm32) (,@opcodes /r))
((x rm64) (,@opcodes /r))))
(defmacro define-shift0-encoder (name code1 code2 sub)
`(define-encoder ,name (dest shift)
((x xm128) (#x66 #x0F ,code1 /r))
((x imm8) (#x66 #x0F ,code2 ,sub ib))))
(defmacro define-shift1-encoder (name code sub)
`(define-encoder ,name (dest shift)
((x imm8) (#x66 #x0F ,code ,sub ib))))
(defmacro define-mov1-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x m64) (,@opcodes1 /r))
((m64 x) (,@opcodes2 /rm))))
(defmacro define-mov2-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((r32 x) (,@opcodes /r))))
(defmacro define-mov3-encoder (name &rest opcodes)
`(define-encoder ,name (dest source)
((m128 x) (,@opcodes /rm))))
(defmacro define-mov0-128-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x xm128) (,@opcodes1 /r))
((xm128 x) (,@opcodes2 /rm))))
(defmacro define-mov0-64-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x xm64) (,@opcodes1 /r))
((xm64 x) (,@opcodes2 /rm))))
(defmacro define-mov0-32-encoder (name opcodes1 opcodes2)
`(define-encoder ,name (dest source)
((x xm32) (,@opcodes1 /r))
((xm32 x) (,@opcodes2 /rm))))
(define-xm128-encoder addpd #x66 #x0F #x58)
(define-xm128-encoder addps #x0F #x58)
(define-xm128-encoder addsd #xF2 #x0F #x58)
(define-xm128-encoder addss #xF3 #x0F #x58)
(define-xm128-encoder addsubpd #x66 #x0F #xD0)
(define-xm128-encoder addsubps #xF2 #x0F #xD0)
(define-xm128-encoder andnpd #x66 #x0F #x55)
(define-xm128-encoder andnps #x0F #x55)
(define-xm128-encoder andpd #x66 #x0F #x54)
(define-xm128-encoder andps #x0F #x54)
(define-cmp-encoder cmppd #x66 #x0F #xC2)
(define-cmp-encoder cmpps #x0F #xC2)
(define-cmp-encoder cmpsd #xF2 #x0F #xC2)
(define-cmp-encoder cmpss #xF3 #x0F #xC2)
(define-xm128-encoder comisd #x66 #x0F #x2F)
(define-xm128-encoder comiss #x0F #x2F)
(define-xm64-encoder cvtdq2pd #xF3 #x0F #xE6)
(define-xm128-encoder cvtdq2ps #x0F #x5B)
(define-xm128-encoder cvtpd2dq #xF2 #x0F #xE6)
(define-xm128-encoder cvtpd2ps #x66 #x0F #x5A)
(define-xm128-encoder cvtps2dq #x66 #x0F #x5B)
(define-xm64-encoder cvtps2pd #x0F #x5A)
(define-rx64-encoder cvtsd2si #xF2 #x0F #x2D)
(define-xm64-encoder cvtsd2ss #xF2 #x0F #x5A)
(define-xr-encoder cvtsi2sd #xF2 #x0F #x2A)
(define-xr-encoder cvtsi2ss #xF3 #x0F #x2A)
(define-xm32-encoder cvtss2sd #xF3 #x0F #x5A)
(define-rx32-encoder cvtss2si #xF3 #x0F #x2D)
(define-xm128-encoder cvttpd2dq #x66 #x0F #xE6)
(define-xm128-encoder cvttps2dq #xF3 #x0F #x5b)
(define-rx64-encoder cvttsd2si #xF2 #x0F #x2C)
(define-rx32-encoder cvttss2si #xF3 #x0F #x2C)
(define-xm128-encoder divpd #x66 #x0F #x5E)
(define-xm128-encoder divps #x0F #x5E)
(define-xm128-encoder divsd #xF2 #x0F #x5E)
(define-xm128-encoder divss #xF3 #x0F #x5E)
(define-xm128-encoder haddpd #x66 #x0F #x7C)
(define-xm128-encoder haddps #xF2 #x0F #x7C)
(define-xm128-encoder hsubpd #x66 #x0F #x7D)
(define-xm128-encoder hsubps #xF2 #x0F #x7D)
lddqu
(define-encoder ldmxcsr (source)
((m32) (#x0F #xAE /2)))
(define-x-encoder maskmovdqu #x66 #x0F #xF7)
(define-xm128-encoder maxpd #x66 #x0F #x5F)
(define-xm128-encoder maxps #x0F #x5F)
(define-xm128-encoder maxsd #xF2 #x0F #x5F)
(define-xm128-encoder maxss #xF3 #x0F #x5F)
(define-xm128-encoder minpd #x66 #x0F #x5D)
(define-xm128-encoder minps #x0F #x5D)
(define-xm128-encoder minsd #xF2 #x0F #x5D)
(define-xm128-encoder minss #xF3 #x0F #x5D)
(define-mov0-128-encoder movapd (#x66 #x0F #x28) (#x66 #x0F #x29))
(define-mov0-128-encoder movaps (#x0F #x28) (#x0F #x29))
(define-encoder movd (dest source)
((x rm32) (#x66 #x0F #x6E /r))
((x rm64) (#x66 #x0F #x6E /r))
((rm32 x) (#x66 #x0F #x7E /rm))
((rm64 x) (#x66 #x0F #x7E /rm)))
(define-xm64-encoder movddup #xF2 #x0F #x12)
(define-mov0-128-encoder movdqa (#x66 #x0F #x6F) (#x66 #x0F #x7F))
(define-mov0-128-encoder movdqu (#xF3 #x0F #x6F) (#xF3 #x0F #x7F))
(define-x-encoder movhlps #x0F #x12)
(define-mov1-encoder movhpd (#x66 #x0F #x16) (#x66 #x0F #x17))
(define-mov1-encoder movhps (#x0F #x16) (#x0F #x17))
(define-x-encoder movlhps #x0F #x16)
(define-mov1-encoder movlpd (#x66 #x0F #x12) (#x66 #x0F #x13))
(define-mov1-encoder movlps (#x0F #x12) (#x0F #x13))
(define-mov2-encoder movmskpd #x66 #x0F #x50)
(define-mov2-encoder movmskps #x0F #x50)
(define-mov3-encoder movntdq #x66 #x0F #xE7)
(define-mov3-encoder movntpd #x66 #x0F #x2B)
(define-mov3-encoder movntps #x0F #x2B)
(define-mov0-64-encoder movq (#xF3 #x0F #x7E) (#x66 #x0F #xD6))
(define-mov0-64-encoder movsd (#xF2 #x0F #x10) (#xF2 #x0F #x11))
(define-xm128-encoder movshdup #xF3 #x0F #x16)
(define-xm128-encoder movsldup #xF3 #x0F #x12)
(define-mov0-32-encoder movss (#xF3 #x0F #x10) (#xF3 #x0F #x11))
(define-mov0-128-encoder movupd (#x66 #x0F #x10) (#x66 #x0F #x11))
(define-mov0-128-encoder movups (#x0F #x10) (#x0F #x11))
(define-xm128-encoder mulpd #x66 #x0F #x59)
(define-xm128-encoder mulps #x0F #x59)
(define-xm128-encoder mulsd #xF2 #x0F #x59)
(define-xm128-encoder mulss #xF3 #x0F #x59)
(define-xm128-encoder orpd #x66 #x0F #x56)
(define-xm128-encoder orps #x0F #x56)
(define-xm128-encoder packssdw #x66 #x0F #x6B)
(define-xm128-encoder packsswb #x66 #x0F #x63)
(define-xm128-encoder packuswb #x66 #x0F #x67)
(define-xm128-encoder paddb #x66 #x0F #xFC)
(define-xm128-encoder paddd #x66 #x0F #xFE)
(define-xm128-encoder paddq #x66 #x0F #xD4)
(define-xm128-encoder paddsb #x66 #x0F #xEC)
(define-xm128-encoder paddsw #x66 #x0F #xED)
(define-xm128-encoder paddusb #x66 #x0F #xDC)
(define-xm128-encoder paddusw #x66 #x0F #xDD)
(define-xm128-encoder paddw #x66 #x0F #xFD)
(define-xm128-encoder pand #x66 #x0F #xDB)
(define-xm128-encoder pandn #x66 #x0F #xDF)
(define-xm128-encoder pavgb #x66 #x0F #xE0)
(define-xm128-encoder pavgw #x66 #x0F #xE3)
(define-xm128-encoder pcmpeqb #x66 #x0F #x74)
(define-xm128-encoder pcmpeqd #x66 #x0F #x76)
(define-xm128-encoder pcmpeqw #x66 #x0F #x75)
(define-xm128-encoder pcmpgtb #x66 #x0F #x64)
(define-xm128-encoder pcmpgtd #x66 #x0F #x66)
(define-xm128-encoder pcmpgtw #x66 #x0F #x65)
(define-encoder pextrw (dest source sel)
((r32 x imm8) (#x66 #x0F #xC5 /r ib)))
(define-encoder pinsrw (dest source sel)
((x rm32 imm8) (#x66 #x0F #xC4 /r ib)))
(define-xm128-encoder pmaddwd #x66 #x0F #xF5)
(define-xm128-encoder pmaxsw #x66 #x0F #xEE)
(define-xm128-encoder pmaxub #x66 #x0F #xDE)
(define-xm128-encoder pminsw #x66 #x0F #xEA)
(define-xm128-encoder pminub #x66 #x0F #xDA)
(define-mov2-encoder pmovmskb #x66 #x0F #xD7)
(define-xm128-encoder pmulhuw #x66 #x0F #xE4)
(define-xm128-encoder pmulhw #x66 #x0F #xE5)
(define-xm128-encoder pmullw #x66 #x0F #xD5)
(define-xm128-encoder pmuludq #x66 #x0F #xF4)
(define-xm128-encoder por #x66 #x0F #xEB)
(define-xm128-encoder psadbw #x66 #x0F #xF6)
(define-cmp-encoder pshufd #x66 #x0F #x70)
(define-cmp-encoder pshufhw #xF3 #x0F #x70)
(define-cmp-encoder pshuflw #xF2 #x0F #x70)
(define-shift0-encoder pslld #xF2 #x72 /6)
(define-shift1-encoder pslldq #x73 /7)
(define-shift0-encoder psllq #xF3 #x73 /6)
(define-shift0-encoder psllw #xF1 #x71 /6)
(define-shift0-encoder psrad #xE2 #x72 /4)
(define-shift0-encoder psraw #xE1 #x71 /4)
(define-shift0-encoder psrld #xD2 #x72 /2)
(define-shift1-encoder psrldq #x73 /3)
(define-shift0-encoder psrlq #xD3 #x73 /2)
(define-shift0-encoder psrlw #xD1 #x71 /2)
(define-xm128-encoder psubb #x66 #x0F #xF8)
(define-xm128-encoder psubd #x66 #x0F #xFA)
(define-xm128-encoder psubq #x66 #x0F #xFB)
(define-xm128-encoder psubsb #x66 #x0F #xE8)
(define-xm128-encoder psubsw #x66 #x0F #xE9)
(define-xm128-encoder psubusb #x66 #x0F #xD8)
(define-xm128-encoder psubusw #x66 #x0F #xD9)
(define-xm128-encoder psubw #x66 #x0F #xF9)
(define-xm128-encoder punpckhbw #x66 #x0F #x68)
(define-xm128-encoder punpckhdq #x66 #x0F #x6A)
(define-xm128-encoder punpckhqdq #x66 #x0F #x6D)
(define-xm128-encoder punpckhwd #x66 #x0F #x69)
(define-xm128-encoder punpcklbw #x66 #x0F #x60)
(define-xm128-encoder punpckldq #x66 #x0F #x62)
(define-xm128-encoder punpcklqdq #x66 #x0F #x6C)
(define-xm128-encoder punpcklwd #x66 #x0F #x61)
(define-xm128-encoder pxor #x66 #x0F #xEF)
(define-xm128-encoder rcpps #x0F #x53)
(define-xm128-encoder rcpss #xF3 #x0F #x53)
(define-xm128-encoder rsqrtps #x0F #x52)
(define-xm128-encoder rsqrtss #xF3 #x0F #x52)
(define-cmp-encoder shufpd #x66 #x0F #xC6)
(define-cmp-encoder shufps #x0F #xC6)
(define-xm128-encoder sqrtpd #x66 #x0F #x51)
(define-xm128-encoder sqrtps #x0F #x51)
(define-xm128-encoder sqrtsd #xF2 #x0F #x51)
(define-xm128-encoder sqrtss #xF3 #x0F #x51)
(define-encoder stmxcsr (dest)
((m32) (#x0F #xAE /3)))
(define-xm128-encoder subpd #x66 #x0F #x5C)
(define-xm128-encoder subps #x0F #x5C)
(define-xm128-encoder subsd #xF2 #x0F #x5C)
(define-xm128-encoder subss #xF3 #x0F #x5C)
(define-xm128-encoder ucomisd #x66 #x0F #x2E)
(define-xm128-encoder ucomiss #x0F #x2E)
(define-xm128-encoder unpckhpd #x66 #x0F #x15)
(define-xm128-encoder unpckhps #x0F #x15)
(define-xm128-encoder unpcklpd #x66 #x0F #x14)
(define-xm128-encoder unpcklps #x0F #x14)
(define-xm128-encoder xorpd #x66 #x0F #x57)
(define-xm128-encoder xorps #x0F #x57)
|
44f992819988a592d4e05bd424cd7a2446e6128384db84804adefb5618682f6e | racket/racket7 | class.rkt | #lang racket/base
(require "chyte.rkt"
"chyte-case.rkt"
"../common/range.rkt")
(provide parse-class
parse-posix-char-class)
;; returns (values success? range pos)
(define (parse-class s pos config)
We know there 's at least one character
(define (success v) (values #t v (add1 pos)))
(chyte-case
(chytes-ref s pos)
[(#\d) (success (range:d))]
[(#\D) (success (range-invert (range:d) (chytes-limit s)))]
[(#\w) (success (range:w))]
[(#\W) (success (range-invert (range:w) (chytes-limit s)))]
[(#\s) (success (range:s))]
[(#\S) (success (range-invert (range:s) (chytes-limit s)))]
[else (values #f #f #f)]))
(define (range:d)
(range-add-span empty-range (chyte #\0) (chyte #\9)))
(define (range:w)
(range-add
(range-add-span
(range-add-span
(range:d)
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))
(chyte #\_)))
(define (range:s)
(let* ([r (range-add empty-range (chyte #\space))]
[r (range-add r (chyte #\tab))]
[r (range-add r (chyte #\newline))]
[r (range-add r (chyte #\page))]
[r (range-add r (chyte #\return))])
r))
;; ----------------------------------------
;; Returns (values success? range position)
(define (parse-posix-char-class s pos)
(chyte-case/eos
s pos
[(#\:)
(define class
(let loop ([accum null] [pos (add1 pos)])
(cond
[(= pos (chytes-length s)) #f]
[else
(define c (chytes-ref s pos))
(cond
[(and (c . >= . (chyte #\a)) (c . <= . (chyte #\z)))
(loop (cons c accum) (add1 pos))]
[(and (= c (chyte #\:))
((add1 pos) . < . (chytes-length s))
(= (chytes-ref s (add1 pos)) (chyte #\])))
(list->bytes (reverse accum))]
[else #f])])))
(define range
(case class
[(#"alpha") (range-add-span
(range-add-span
empty-range
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))]
[(#"upper") (range-add-span
empty-range
(chyte #\A) (chyte #\Z))]
[(#"lower") (range-add-span
empty-range
(chyte #\a) (chyte #\z))]
[(#"digit") (range-add-span
empty-range
(chyte #\0) (chyte #\9))]
[(#"xdigit") (range-add-span
(range-add-span
(range-add-span
empty-range
(chyte #\0) (chyte #\9))
(chyte #\a) (chyte #\f))
(chyte #\A) (chyte #\F))]
[(#"alnum") (range-add-span
(range-add-span
(range-add-span
empty-range
(chyte #\0) (chyte #\9))
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))]
[(#"word") (range-add
(range-add-span
(range-add-span
empty-range
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))
(chyte #\_))]
[(#"blank") (range-add
(range-add empty-range (chyte #\space))
(chyte #\tab))]
[(#"space") (range:s)]
[(#"graph" #"print")
(define range
(for/fold ([range empty-range]) ([i (in-range 0 128)])
(if (char-graphic? (integer->char i))
(range-add range i)
range)))
(if (equal? class #"print")
(range-add
(range-add range (chyte #\space))
(chyte #\tab))
range)]
[(#"cntrl") (range-add-span empty-range 0 31)]
[(#"ascii") (range-add-span empty-range 0 127)]
[else #f]))
(if range
(values #t range (+ pos 3 (bytes-length class)))
(values #f #f #f))]
[else (values #f #f #f)]))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/src/regexp/parse/class.rkt | racket | returns (values success? range pos)
----------------------------------------
Returns (values success? range position) | #lang racket/base
(require "chyte.rkt"
"chyte-case.rkt"
"../common/range.rkt")
(provide parse-class
parse-posix-char-class)
(define (parse-class s pos config)
We know there 's at least one character
(define (success v) (values #t v (add1 pos)))
(chyte-case
(chytes-ref s pos)
[(#\d) (success (range:d))]
[(#\D) (success (range-invert (range:d) (chytes-limit s)))]
[(#\w) (success (range:w))]
[(#\W) (success (range-invert (range:w) (chytes-limit s)))]
[(#\s) (success (range:s))]
[(#\S) (success (range-invert (range:s) (chytes-limit s)))]
[else (values #f #f #f)]))
(define (range:d)
(range-add-span empty-range (chyte #\0) (chyte #\9)))
(define (range:w)
(range-add
(range-add-span
(range-add-span
(range:d)
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))
(chyte #\_)))
(define (range:s)
(let* ([r (range-add empty-range (chyte #\space))]
[r (range-add r (chyte #\tab))]
[r (range-add r (chyte #\newline))]
[r (range-add r (chyte #\page))]
[r (range-add r (chyte #\return))])
r))
(define (parse-posix-char-class s pos)
(chyte-case/eos
s pos
[(#\:)
(define class
(let loop ([accum null] [pos (add1 pos)])
(cond
[(= pos (chytes-length s)) #f]
[else
(define c (chytes-ref s pos))
(cond
[(and (c . >= . (chyte #\a)) (c . <= . (chyte #\z)))
(loop (cons c accum) (add1 pos))]
[(and (= c (chyte #\:))
((add1 pos) . < . (chytes-length s))
(= (chytes-ref s (add1 pos)) (chyte #\])))
(list->bytes (reverse accum))]
[else #f])])))
(define range
(case class
[(#"alpha") (range-add-span
(range-add-span
empty-range
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))]
[(#"upper") (range-add-span
empty-range
(chyte #\A) (chyte #\Z))]
[(#"lower") (range-add-span
empty-range
(chyte #\a) (chyte #\z))]
[(#"digit") (range-add-span
empty-range
(chyte #\0) (chyte #\9))]
[(#"xdigit") (range-add-span
(range-add-span
(range-add-span
empty-range
(chyte #\0) (chyte #\9))
(chyte #\a) (chyte #\f))
(chyte #\A) (chyte #\F))]
[(#"alnum") (range-add-span
(range-add-span
(range-add-span
empty-range
(chyte #\0) (chyte #\9))
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))]
[(#"word") (range-add
(range-add-span
(range-add-span
empty-range
(chyte #\a) (chyte #\z))
(chyte #\A) (chyte #\Z))
(chyte #\_))]
[(#"blank") (range-add
(range-add empty-range (chyte #\space))
(chyte #\tab))]
[(#"space") (range:s)]
[(#"graph" #"print")
(define range
(for/fold ([range empty-range]) ([i (in-range 0 128)])
(if (char-graphic? (integer->char i))
(range-add range i)
range)))
(if (equal? class #"print")
(range-add
(range-add range (chyte #\space))
(chyte #\tab))
range)]
[(#"cntrl") (range-add-span empty-range 0 31)]
[(#"ascii") (range-add-span empty-range 0 127)]
[else #f]))
(if range
(values #t range (+ pos 3 (bytes-length class)))
(values #f #f #f))]
[else (values #f #f #f)]))
|
ac30292953e45fc6a247bd6a4fa037369d8548cba8c91b5743ae1a09c64dff7e | tezos/tezos-mirror | RPC_server.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2022 - 2023 Trili Tech < >
Copyright ( c ) 2022 Nomadic Labs < >
Copyright ( c ) 2023 Marigold < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Tezos_rpc_http
open Tezos_rpc_http_server
type error +=
| Cannot_construct_external_message
| Cannot_deserialize_external_message
let () =
register_error_kind
`Permanent
~id:"dac_cannot_construct_external_message"
~title:"External rollup message could not be constructed"
~description:"External rollup message could not be constructed"
~pp:(fun ppf () ->
Format.fprintf ppf "External rollup message could not be constructed")
Data_encoding.unit
(function Cannot_construct_external_message -> Some () | _ -> None)
(fun () -> Cannot_construct_external_message) ;
register_error_kind
`Permanent
~id:"dac_cannot_deserialize_rollup_external_message"
~title:"External rollup message could not be deserialized"
~description:"External rollup message could not be deserialized"
~pp:(fun ppf () ->
Format.fprintf ppf "External rollup message could not be deserialized")
Data_encoding.unit
(function Cannot_deserialize_external_message -> Some () | _ -> None)
(fun () -> Cannot_deserialize_external_message)
let add_service registerer service handler directory =
registerer directory service handler
let handle_serialize_dac_store_preimage dac_plugin cctxt dac_sk_uris page_store
hash_streamer (data, pagination_scheme) =
let open Lwt_result_syntax in
let open Pages_encoding in
let* root_hash =
match pagination_scheme with
| Pagination_scheme.Merkle_tree_V0 ->
FIXME : /-/issues/4897
Once new " PUT /preimage " endpoint is implemented , pushing
a new root hash to the data streamer should be moved there .
for testing streaming of root hashes should also use
the new endpoint .
Once new "PUT /preimage" endpoint is implemented, pushing
a new root hash to the data streamer should be moved there.
Tezt for testing streaming of root hashes should also use
the new endpoint. *)
let* root_hash =
Merkle_tree.V0.Filesystem.serialize_payload
dac_plugin
~page_store
data
in
let () = Data_streamer.publish hash_streamer root_hash in
let*! () =
Event.emit_root_hash_pushed_to_data_streamer dac_plugin root_hash
in
return root_hash
| Pagination_scheme.Hash_chain_V0 ->
Hash_chain.V0.serialize_payload
dac_plugin
~for_each_page:(fun (hash, content) ->
Page_store.Filesystem.save dac_plugin page_store ~hash ~content)
data
in
let* signature, witnesses =
Signature_manager.sign_root_hash dac_plugin cctxt dac_sk_uris root_hash
in
let*! external_message =
External_message.Default.make dac_plugin root_hash signature witnesses
in
match external_message with
| Ok external_message -> return @@ (root_hash, external_message)
| Error _ -> tzfail @@ Cannot_construct_external_message
let handle_verify_external_message_signature dac_plugin public_keys_opt
encoded_l1_message =
let open Lwt_result_syntax in
let external_message =
let open Option_syntax in
let* encoded_l1_message in
let* as_bytes = Hex.to_bytes @@ `Hex encoded_l1_message in
let ((module P) : Dac_plugin.t) = dac_plugin in
External_message.Default.of_bytes P.encoding as_bytes
in
match external_message with
| None -> tzfail @@ Cannot_deserialize_external_message
| Some {root_hash; signature; witnesses} ->
Signature_manager.verify
dac_plugin
~public_keys_opt
root_hash
signature
witnesses
let handle_retrieve_preimage dac_plugin page_store hash =
Page_store.Filesystem.load dac_plugin page_store hash
let handle_coordinator_preimage_endpoint dac_plugin page_store hash_streamer
payload =
let open Lwt_result_syntax in
let* root_hash =
Pages_encoding.Merkle_tree.V0.Filesystem.serialize_payload
dac_plugin
~page_store
payload
in
let () = Data_streamer.publish hash_streamer root_hash in
let*! () =
Event.emit_root_hash_pushed_to_data_streamer dac_plugin root_hash
in
return root_hash
(* Handler for subscribing to the streaming of root hashes via
GET monitor/root_hashes RPC call. *)
let handle_monitor_root_hashes hash_streamer =
let open Lwt_syntax in
let stream, stopper = Data_streamer.handle_subscribe hash_streamer in
let shutdown () = Lwt_watcher.shutdown stopper in
let next () = Lwt_stream.get stream in
let* () = Event.(emit handle_new_subscription_to_hash_streamer ()) in
Tezos_rpc.Answer.return_stream {next; shutdown}
let handle_get_certificate ctx root_hash =
let open Lwt_result_syntax in
let node_store = Node_context.get_node_store ctx Store_sigs.Read_only in
let+ value_opt = Store.Certificate_store.find node_store root_hash in
Option.map
(fun Store.{aggregate_signature; witnesses} ->
Certificate_repr.{aggregate_signature; witnesses; root_hash})
value_opt
let register_serialize_dac_store_preimage ctx cctxt dac_sk_uris page_store
hash_streamer directory =
directory
|> add_service
Tezos_rpc.Directory.register0
(RPC_services.dac_store_preimage ctx)
(fun () input ->
handle_serialize_dac_store_preimage
ctx
cctxt
dac_sk_uris
page_store
hash_streamer
input)
let register_verify_external_message_signature dac_plugin public_keys_opt
directory =
directory
|> add_service
Tezos_rpc.Directory.register0
RPC_services.verify_external_message_signature
(fun external_message () ->
handle_verify_external_message_signature
dac_plugin
public_keys_opt
external_message)
let register_retrieve_preimage dac_plugin page_store =
add_service
Tezos_rpc.Directory.register1
(RPC_services.retrieve_preimage dac_plugin)
(fun hash () () -> handle_retrieve_preimage dac_plugin page_store hash)
let register_monitor_root_hashes dac_plugin hash_streamer dir =
Tezos_rpc.Directory.gen_register
dir
(Monitor_services.S.root_hashes dac_plugin)
(fun () () () -> handle_monitor_root_hashes hash_streamer)
let register_store_dac_member_signature ctx dac_plugin cctxt =
add_service
Tezos_rpc.Directory.register0
(RPC_services.store_dac_member_signature dac_plugin)
(fun () dac_member_signature ->
Signature_manager.Coordinator.handle_store_dac_member_signature
ctx
cctxt
dac_member_signature)
let register_coordinator_preimage_endpoint dac_plugin hash_streamer page_store =
add_service
Tezos_rpc.Directory.register0
(RPC_services.coordinator_post_preimage dac_plugin)
(fun () payload ->
handle_coordinator_preimage_endpoint
dac_plugin
page_store
hash_streamer
payload)
let register_get_certificate ctx dac_plugin =
add_service
Tezos_rpc.Directory.register1
(RPC_services.get_certificate dac_plugin)
(fun root_hash () () -> handle_get_certificate ctx root_hash)
let register dac_plugin node_context cctxt dac_public_keys_opt dac_sk_uris
hash_streamer =
let page_store = Node_context.get_page_store node_context in
Tezos_rpc.Directory.empty
|> register_serialize_dac_store_preimage
dac_plugin
cctxt
dac_sk_uris
page_store
hash_streamer
|> register_verify_external_message_signature dac_plugin dac_public_keys_opt
|> register_retrieve_preimage dac_plugin page_store
|> register_monitor_root_hashes dac_plugin hash_streamer
|> register_store_dac_member_signature node_context dac_plugin cctxt
(* TODO: /-/issues/4934
Once profiles are implemented, registration of the coordinator's
"/preimage" endpoint should be moved out of the [start_legacy]. *)
|> register_coordinator_preimage_endpoint dac_plugin hash_streamer page_store
|> register_get_certificate node_context dac_plugin
TODO : /-/issues/4750
Move this to RPC_server . Legacy once all operating modes are supported .
Move this to RPC_server.Legacy once all operating modes are supported. *)
let start_legacy ~rpc_address ~rpc_port ~threshold cctxt ctxt dac_pks_opt
dac_sk_uris =
let open Lwt_syntax in
let dir =
Tezos_rpc.Directory.register_dynamic_directory
Tezos_rpc.Directory.empty
Tezos_rpc.Path.open_root
(fun () ->
match Node_context.get_status ctxt with
| Ready {dac_plugin = (module Dac_plugin); hash_streamer} ->
let _threshold = threshold in
Lwt.return
(register
(module Dac_plugin)
ctxt
cctxt
dac_pks_opt
dac_sk_uris
hash_streamer)
| Starting -> Lwt.return Tezos_rpc.Directory.empty)
in
let rpc_address = P2p_addr.of_string_exn rpc_address in
let host = Ipaddr.V6.to_string rpc_address in
let node = `TCP (`Port rpc_port) in
let acl = RPC_server.Acl.default rpc_address in
let server =
RPC_server.init_server dir ~acl ~media_types:Media_type.all_media_types
in
Lwt.catch
(fun () ->
let* () =
RPC_server.launch
~host
server
~callback:(RPC_server.resto_callback server)
node
in
return_ok server)
fail_with_exn
let shutdown = RPC_server.shutdown
let install_finalizer rpc_server =
let open Lwt_syntax in
Lwt_exit.register_clean_up_callback ~loc:__LOC__ @@ fun exit_status ->
let* () = shutdown rpc_server in
let* () = Event.(emit shutdown_node exit_status) in
Tezos_base_unix.Internal_event_unix.close ()
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/bbca5502eb430d3915ad697259d3bffc62c2d01d/src/lib_dac_node/RPC_server.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
Handler for subscribing to the streaming of root hashes via
GET monitor/root_hashes RPC call.
TODO: /-/issues/4934
Once profiles are implemented, registration of the coordinator's
"/preimage" endpoint should be moved out of the [start_legacy]. | Copyright ( c ) 2022 - 2023 Trili Tech < >
Copyright ( c ) 2022 Nomadic Labs < >
Copyright ( c ) 2023 Marigold < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Tezos_rpc_http
open Tezos_rpc_http_server
type error +=
| Cannot_construct_external_message
| Cannot_deserialize_external_message
let () =
register_error_kind
`Permanent
~id:"dac_cannot_construct_external_message"
~title:"External rollup message could not be constructed"
~description:"External rollup message could not be constructed"
~pp:(fun ppf () ->
Format.fprintf ppf "External rollup message could not be constructed")
Data_encoding.unit
(function Cannot_construct_external_message -> Some () | _ -> None)
(fun () -> Cannot_construct_external_message) ;
register_error_kind
`Permanent
~id:"dac_cannot_deserialize_rollup_external_message"
~title:"External rollup message could not be deserialized"
~description:"External rollup message could not be deserialized"
~pp:(fun ppf () ->
Format.fprintf ppf "External rollup message could not be deserialized")
Data_encoding.unit
(function Cannot_deserialize_external_message -> Some () | _ -> None)
(fun () -> Cannot_deserialize_external_message)
let add_service registerer service handler directory =
registerer directory service handler
let handle_serialize_dac_store_preimage dac_plugin cctxt dac_sk_uris page_store
hash_streamer (data, pagination_scheme) =
let open Lwt_result_syntax in
let open Pages_encoding in
let* root_hash =
match pagination_scheme with
| Pagination_scheme.Merkle_tree_V0 ->
FIXME : /-/issues/4897
Once new " PUT /preimage " endpoint is implemented , pushing
a new root hash to the data streamer should be moved there .
for testing streaming of root hashes should also use
the new endpoint .
Once new "PUT /preimage" endpoint is implemented, pushing
a new root hash to the data streamer should be moved there.
Tezt for testing streaming of root hashes should also use
the new endpoint. *)
let* root_hash =
Merkle_tree.V0.Filesystem.serialize_payload
dac_plugin
~page_store
data
in
let () = Data_streamer.publish hash_streamer root_hash in
let*! () =
Event.emit_root_hash_pushed_to_data_streamer dac_plugin root_hash
in
return root_hash
| Pagination_scheme.Hash_chain_V0 ->
Hash_chain.V0.serialize_payload
dac_plugin
~for_each_page:(fun (hash, content) ->
Page_store.Filesystem.save dac_plugin page_store ~hash ~content)
data
in
let* signature, witnesses =
Signature_manager.sign_root_hash dac_plugin cctxt dac_sk_uris root_hash
in
let*! external_message =
External_message.Default.make dac_plugin root_hash signature witnesses
in
match external_message with
| Ok external_message -> return @@ (root_hash, external_message)
| Error _ -> tzfail @@ Cannot_construct_external_message
let handle_verify_external_message_signature dac_plugin public_keys_opt
encoded_l1_message =
let open Lwt_result_syntax in
let external_message =
let open Option_syntax in
let* encoded_l1_message in
let* as_bytes = Hex.to_bytes @@ `Hex encoded_l1_message in
let ((module P) : Dac_plugin.t) = dac_plugin in
External_message.Default.of_bytes P.encoding as_bytes
in
match external_message with
| None -> tzfail @@ Cannot_deserialize_external_message
| Some {root_hash; signature; witnesses} ->
Signature_manager.verify
dac_plugin
~public_keys_opt
root_hash
signature
witnesses
let handle_retrieve_preimage dac_plugin page_store hash =
Page_store.Filesystem.load dac_plugin page_store hash
let handle_coordinator_preimage_endpoint dac_plugin page_store hash_streamer
payload =
let open Lwt_result_syntax in
let* root_hash =
Pages_encoding.Merkle_tree.V0.Filesystem.serialize_payload
dac_plugin
~page_store
payload
in
let () = Data_streamer.publish hash_streamer root_hash in
let*! () =
Event.emit_root_hash_pushed_to_data_streamer dac_plugin root_hash
in
return root_hash
let handle_monitor_root_hashes hash_streamer =
let open Lwt_syntax in
let stream, stopper = Data_streamer.handle_subscribe hash_streamer in
let shutdown () = Lwt_watcher.shutdown stopper in
let next () = Lwt_stream.get stream in
let* () = Event.(emit handle_new_subscription_to_hash_streamer ()) in
Tezos_rpc.Answer.return_stream {next; shutdown}
let handle_get_certificate ctx root_hash =
let open Lwt_result_syntax in
let node_store = Node_context.get_node_store ctx Store_sigs.Read_only in
let+ value_opt = Store.Certificate_store.find node_store root_hash in
Option.map
(fun Store.{aggregate_signature; witnesses} ->
Certificate_repr.{aggregate_signature; witnesses; root_hash})
value_opt
let register_serialize_dac_store_preimage ctx cctxt dac_sk_uris page_store
hash_streamer directory =
directory
|> add_service
Tezos_rpc.Directory.register0
(RPC_services.dac_store_preimage ctx)
(fun () input ->
handle_serialize_dac_store_preimage
ctx
cctxt
dac_sk_uris
page_store
hash_streamer
input)
let register_verify_external_message_signature dac_plugin public_keys_opt
directory =
directory
|> add_service
Tezos_rpc.Directory.register0
RPC_services.verify_external_message_signature
(fun external_message () ->
handle_verify_external_message_signature
dac_plugin
public_keys_opt
external_message)
let register_retrieve_preimage dac_plugin page_store =
add_service
Tezos_rpc.Directory.register1
(RPC_services.retrieve_preimage dac_plugin)
(fun hash () () -> handle_retrieve_preimage dac_plugin page_store hash)
let register_monitor_root_hashes dac_plugin hash_streamer dir =
Tezos_rpc.Directory.gen_register
dir
(Monitor_services.S.root_hashes dac_plugin)
(fun () () () -> handle_monitor_root_hashes hash_streamer)
let register_store_dac_member_signature ctx dac_plugin cctxt =
add_service
Tezos_rpc.Directory.register0
(RPC_services.store_dac_member_signature dac_plugin)
(fun () dac_member_signature ->
Signature_manager.Coordinator.handle_store_dac_member_signature
ctx
cctxt
dac_member_signature)
let register_coordinator_preimage_endpoint dac_plugin hash_streamer page_store =
add_service
Tezos_rpc.Directory.register0
(RPC_services.coordinator_post_preimage dac_plugin)
(fun () payload ->
handle_coordinator_preimage_endpoint
dac_plugin
page_store
hash_streamer
payload)
let register_get_certificate ctx dac_plugin =
add_service
Tezos_rpc.Directory.register1
(RPC_services.get_certificate dac_plugin)
(fun root_hash () () -> handle_get_certificate ctx root_hash)
let register dac_plugin node_context cctxt dac_public_keys_opt dac_sk_uris
hash_streamer =
let page_store = Node_context.get_page_store node_context in
Tezos_rpc.Directory.empty
|> register_serialize_dac_store_preimage
dac_plugin
cctxt
dac_sk_uris
page_store
hash_streamer
|> register_verify_external_message_signature dac_plugin dac_public_keys_opt
|> register_retrieve_preimage dac_plugin page_store
|> register_monitor_root_hashes dac_plugin hash_streamer
|> register_store_dac_member_signature node_context dac_plugin cctxt
|> register_coordinator_preimage_endpoint dac_plugin hash_streamer page_store
|> register_get_certificate node_context dac_plugin
TODO : /-/issues/4750
Move this to RPC_server . Legacy once all operating modes are supported .
Move this to RPC_server.Legacy once all operating modes are supported. *)
let start_legacy ~rpc_address ~rpc_port ~threshold cctxt ctxt dac_pks_opt
dac_sk_uris =
let open Lwt_syntax in
let dir =
Tezos_rpc.Directory.register_dynamic_directory
Tezos_rpc.Directory.empty
Tezos_rpc.Path.open_root
(fun () ->
match Node_context.get_status ctxt with
| Ready {dac_plugin = (module Dac_plugin); hash_streamer} ->
let _threshold = threshold in
Lwt.return
(register
(module Dac_plugin)
ctxt
cctxt
dac_pks_opt
dac_sk_uris
hash_streamer)
| Starting -> Lwt.return Tezos_rpc.Directory.empty)
in
let rpc_address = P2p_addr.of_string_exn rpc_address in
let host = Ipaddr.V6.to_string rpc_address in
let node = `TCP (`Port rpc_port) in
let acl = RPC_server.Acl.default rpc_address in
let server =
RPC_server.init_server dir ~acl ~media_types:Media_type.all_media_types
in
Lwt.catch
(fun () ->
let* () =
RPC_server.launch
~host
server
~callback:(RPC_server.resto_callback server)
node
in
return_ok server)
fail_with_exn
let shutdown = RPC_server.shutdown
let install_finalizer rpc_server =
let open Lwt_syntax in
Lwt_exit.register_clean_up_callback ~loc:__LOC__ @@ fun exit_status ->
let* () = shutdown rpc_server in
let* () = Event.(emit shutdown_node exit_status) in
Tezos_base_unix.Internal_event_unix.close ()
|
7b013a62f28f75fe75232130a2a27763c1b40b8284cc72030b62fd91bcb19f77 | ndmitchell/extra | Extra.hs | module Data.Foldable.Extra
( module Data.Foldable
, notNull
, sum'
, product'
, sumOn'
, productOn'
, anyM
, allM
, orM
, andM
, findM
, firstJustM
) where
import Data.Foldable
import qualified Control.Monad.Extra as MX
-- | Composition of 'not' and 'null'
notNull :: Foldable f => f a -> Bool
notNull = not . null
| A generalization of ' '' to ' Foldable ' instances .
sum' :: (Foldable f, Num a) => f a -> a
sum' = foldl' (+) 0
-- | A generalization of 'Data.List.Extra.product'' to 'Foldable' instances.
product' :: (Foldable f, Num a) => f a -> a
product' = foldl' (*) 1
-- | A generalization of 'Data.List.Extra.sumOn'' to 'Foldable' instances.
sumOn' :: (Foldable f, Num b) => (a -> b) -> f a -> b
sumOn' f = foldl' (\acc x -> acc + f x) 0
| A generalization of ' Data . List . '' to ' Foldable ' instances .
productOn' :: (Foldable f, Num b) => (a -> b) -> f a -> b
productOn' f = foldl' (\acc x -> acc * f x) 1
| A generalization of ' Control . Monad . Extra.anyM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
anyM :: (Foldable f, Monad m) => (a -> m Bool) -> f a -> m Bool
anyM p = foldr ((MX.||^) . p) (pure False)
| A generalization of ' Control . . Extra.allM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
allM :: (Foldable f, Monad m) => (a -> m Bool) -> f a -> m Bool
allM p = foldr ((MX.&&^) . p) (pure True)
| A generalization of ' Control . . Extra.orM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
orM :: (Foldable f, Monad m) => f (m Bool) -> m Bool
orM = anyM id
| A generalization of ' Control . . Extra.andM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
andM :: (Foldable f, Monad m) => f (m Bool) -> m Bool
andM = allM id
| A generalization of ' Control . . ' to ' Foldable ' instances .
findM :: (Foldable f, Monad m) => (a -> m Bool) -> f a -> m (Maybe a)
findM p = foldr (\x -> MX.ifM (p x) (pure $ Just x)) (pure Nothing)
| A generalization of ' Control . Monad . Extra.firstJustM ' to ' Foldable ' instances .
firstJustM :: (Foldable f, Monad m) => (a -> m (Maybe b)) -> f a -> m (Maybe b)
firstJustM p = MX.firstJustM p . toList
| null | https://raw.githubusercontent.com/ndmitchell/extra/207894522b3a9261bca021db9c91b514c0d0667c/src/Data/Foldable/Extra.hs | haskell | | Composition of 'not' and 'null'
| A generalization of 'Data.List.Extra.product'' to 'Foldable' instances.
| A generalization of 'Data.List.Extra.sumOn'' to 'Foldable' instances. | module Data.Foldable.Extra
( module Data.Foldable
, notNull
, sum'
, product'
, sumOn'
, productOn'
, anyM
, allM
, orM
, andM
, findM
, firstJustM
) where
import Data.Foldable
import qualified Control.Monad.Extra as MX
notNull :: Foldable f => f a -> Bool
notNull = not . null
| A generalization of ' '' to ' Foldable ' instances .
sum' :: (Foldable f, Num a) => f a -> a
sum' = foldl' (+) 0
product' :: (Foldable f, Num a) => f a -> a
product' = foldl' (*) 1
sumOn' :: (Foldable f, Num b) => (a -> b) -> f a -> b
sumOn' f = foldl' (\acc x -> acc + f x) 0
| A generalization of ' Data . List . '' to ' Foldable ' instances .
productOn' :: (Foldable f, Num b) => (a -> b) -> f a -> b
productOn' f = foldl' (\acc x -> acc * f x) 1
| A generalization of ' Control . Monad . Extra.anyM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
anyM :: (Foldable f, Monad m) => (a -> m Bool) -> f a -> m Bool
anyM p = foldr ((MX.||^) . p) (pure False)
| A generalization of ' Control . . Extra.allM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
allM :: (Foldable f, Monad m) => (a -> m Bool) -> f a -> m Bool
allM p = foldr ((MX.&&^) . p) (pure True)
| A generalization of ' Control . . Extra.orM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
orM :: (Foldable f, Monad m) => f (m Bool) -> m Bool
orM = anyM id
| A generalization of ' Control . . Extra.andM ' to ' Foldable ' instances . Retains the short - circuiting behaviour .
andM :: (Foldable f, Monad m) => f (m Bool) -> m Bool
andM = allM id
| A generalization of ' Control . . ' to ' Foldable ' instances .
findM :: (Foldable f, Monad m) => (a -> m Bool) -> f a -> m (Maybe a)
findM p = foldr (\x -> MX.ifM (p x) (pure $ Just x)) (pure Nothing)
| A generalization of ' Control . Monad . Extra.firstJustM ' to ' Foldable ' instances .
firstJustM :: (Foldable f, Monad m) => (a -> m (Maybe b)) -> f a -> m (Maybe b)
firstJustM p = MX.firstJustM p . toList
|
3f8df23117699850ad456b9d6b830ae9462c649acd7a71a93acd0745285e1f9c | revnull/fixfile | TestBTree.hs | # LANGUAGE DataKinds #
module TestBTree(testBTree) where
import Data.Function
import Data.List hiding (null)
import Data.Monoid
import Prelude hiding (null)
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.QuickCheck
import Data.FixFile
import Data.FixFile.BTree
import qualified Data.FixFile.Tree23 as T23
prop_BTreeInsert :: [(Int, String)] -> Bool
prop_BTreeInsert xs = allIns where
empt = empty :: Fix (BTree 3 Int String)
fullSet = foldr (uncurry insertBTree) empt xs
allIns = all (not . null . flip lookupBTree fullSet) $ fmap fst xs
prop_BTreeDelete :: [(Int, String)] -> [Int] -> Bool
prop_BTreeDelete xs ys = allIns where
empt = empty :: Fix (BTree 3 Int String)
fullSet = foldr (uncurry insertBTree) empt xs
delSet = foldr deleteBTree fullSet ys
allIns = all (null . flip lookupBTree delSet) ys
prop_BTreeDeleteAll :: [(Int, String)] -> Bool
prop_BTreeDeleteAll xs = allIns where
empt = empty :: Fix (BTree 3 Int String)
keys = map fst xs
fullSet = foldr (uncurry insertBTree) empt xs
delSet = foldr deleteBTree fullSet keys
allIns = all (null . flip lookupBTree delSet) keys
prop_BTreeFilter :: [(Int, String)] -> Int -> String -> Bool
prop_BTreeFilter xs k v = testFilt where
empt = empty :: Fix (BTree 3 Int String)
baseSet = foldr (uncurry insertBTree) empt xs
delSet = deleteBTree k baseSet
fullSet = insertBTree k v $ insertBTree k ('a':v) delSet
filtSet = filterBTree k (== v) fullSet
testFilt = [v] == lookupBTree k filtSet
prop_BTreePartition :: [(Int, String)] -> Int -> Bool
prop_BTreePartition xs k = testPart where
empt = empty :: Fix (BTree 3 Int String)
fullTree = foldr (uncurry insertBTree) empt xs
(treeL, treeR) = partitionBTree k fullTree
emptT23 = empty :: Fix (T23.Tree23 (T23.Map Int Int))
counts = T23.toListMap $ foldr countItems emptT23 xs
countItems (k', _) m = T23.alterMap k' (Just . maybe 1 (1+)) m
correctTree (k', l) =
let (tree1,tree2) = if k' < k
then (treeL, treeR)
else (treeR, treeL)
in l == (length (lookupBTree k' tree1)) &&
null (lookupBTree k' tree2)
testPart = all correctTree counts
prop_BTreeNodeSize :: [(Int, String)] -> Bool
prop_BTreeNodeSize xs = depth fullSet1 >= depth fullSet2 where
empt1 = empty :: Fix (BTree 2 Int String)
empt2 = empty :: Fix (BTree 5 Int String)
fullSet1 = foldr (uncurry insertBTree) empt1 xs
fullSet2 = foldr (uncurry insertBTree) empt2 xs
prop_BTreeFunctor :: [String] -> Bool
prop_BTreeFunctor xs = testList == toListBTree bt' where
xs' = zip xs xs
testList = fmap (fmap length) $ sortBy (compare `on` fst) xs'
bt' = fmapF' length (fromListBTree xs') :: Fix (BTree 3 String Int)
prop_BTreeFoldable :: [(Int, Int)] -> Bool
prop_BTreeFoldable xs = listSum == btreeSum where
listSum = getSum $ foldMap (Sum . snd) xs
bt = fromListBTree xs :: Fix (BTree 3 Int Int)
btreeSum = getSum $ foldMapF Sum bt
prop_BTreeTraversable :: [(Int, Int)] -> Bool
prop_BTreeTraversable xs = testEvens evens' && testOdds odds' where
odds = fromListBTree $ filter (odd . snd) xs :: Fix (BTree 3 Int Int)
evens = fromListBTree $ filter (even . snd) xs :: Fix (BTree 3 Int Int)
f x = if even x then Nothing else Just x
odds' = toListBTree <$> traverseF' f odds
evens' = toListBTree <$> traverseF' f evens
testEvens Nothing = True
testEvens (Just l) = null l
testOdds Nothing = False
testOdds _ = True
testBTree = testGroup "BTree"
[
testProperty "BTree Insert" prop_BTreeInsert
,testProperty "BTree Delete" prop_BTreeDelete
,testProperty "BTree Delete All" prop_BTreeDeleteAll
,testProperty "BTree Filter" prop_BTreeFilter
,testProperty "BTree Partition" prop_BTreePartition
,testProperty "BTree Node Size" prop_BTreeNodeSize
,testProperty "BTree Functor" prop_BTreeFunctor
,testProperty "BTree Foldable" prop_BTreeFoldable
,testProperty "BTree Traversable" prop_BTreeTraversable
]
| null | https://raw.githubusercontent.com/revnull/fixfile/888945268c04a1cb2636e85e7368c0e8d36b8033/tests/TestBTree.hs | haskell | # LANGUAGE DataKinds #
module TestBTree(testBTree) where
import Data.Function
import Data.List hiding (null)
import Data.Monoid
import Prelude hiding (null)
import Test.Tasty
import Test.Tasty.QuickCheck
import Test.QuickCheck
import Data.FixFile
import Data.FixFile.BTree
import qualified Data.FixFile.Tree23 as T23
prop_BTreeInsert :: [(Int, String)] -> Bool
prop_BTreeInsert xs = allIns where
empt = empty :: Fix (BTree 3 Int String)
fullSet = foldr (uncurry insertBTree) empt xs
allIns = all (not . null . flip lookupBTree fullSet) $ fmap fst xs
prop_BTreeDelete :: [(Int, String)] -> [Int] -> Bool
prop_BTreeDelete xs ys = allIns where
empt = empty :: Fix (BTree 3 Int String)
fullSet = foldr (uncurry insertBTree) empt xs
delSet = foldr deleteBTree fullSet ys
allIns = all (null . flip lookupBTree delSet) ys
prop_BTreeDeleteAll :: [(Int, String)] -> Bool
prop_BTreeDeleteAll xs = allIns where
empt = empty :: Fix (BTree 3 Int String)
keys = map fst xs
fullSet = foldr (uncurry insertBTree) empt xs
delSet = foldr deleteBTree fullSet keys
allIns = all (null . flip lookupBTree delSet) keys
prop_BTreeFilter :: [(Int, String)] -> Int -> String -> Bool
prop_BTreeFilter xs k v = testFilt where
empt = empty :: Fix (BTree 3 Int String)
baseSet = foldr (uncurry insertBTree) empt xs
delSet = deleteBTree k baseSet
fullSet = insertBTree k v $ insertBTree k ('a':v) delSet
filtSet = filterBTree k (== v) fullSet
testFilt = [v] == lookupBTree k filtSet
prop_BTreePartition :: [(Int, String)] -> Int -> Bool
prop_BTreePartition xs k = testPart where
empt = empty :: Fix (BTree 3 Int String)
fullTree = foldr (uncurry insertBTree) empt xs
(treeL, treeR) = partitionBTree k fullTree
emptT23 = empty :: Fix (T23.Tree23 (T23.Map Int Int))
counts = T23.toListMap $ foldr countItems emptT23 xs
countItems (k', _) m = T23.alterMap k' (Just . maybe 1 (1+)) m
correctTree (k', l) =
let (tree1,tree2) = if k' < k
then (treeL, treeR)
else (treeR, treeL)
in l == (length (lookupBTree k' tree1)) &&
null (lookupBTree k' tree2)
testPart = all correctTree counts
prop_BTreeNodeSize :: [(Int, String)] -> Bool
prop_BTreeNodeSize xs = depth fullSet1 >= depth fullSet2 where
empt1 = empty :: Fix (BTree 2 Int String)
empt2 = empty :: Fix (BTree 5 Int String)
fullSet1 = foldr (uncurry insertBTree) empt1 xs
fullSet2 = foldr (uncurry insertBTree) empt2 xs
prop_BTreeFunctor :: [String] -> Bool
prop_BTreeFunctor xs = testList == toListBTree bt' where
xs' = zip xs xs
testList = fmap (fmap length) $ sortBy (compare `on` fst) xs'
bt' = fmapF' length (fromListBTree xs') :: Fix (BTree 3 String Int)
prop_BTreeFoldable :: [(Int, Int)] -> Bool
prop_BTreeFoldable xs = listSum == btreeSum where
listSum = getSum $ foldMap (Sum . snd) xs
bt = fromListBTree xs :: Fix (BTree 3 Int Int)
btreeSum = getSum $ foldMapF Sum bt
prop_BTreeTraversable :: [(Int, Int)] -> Bool
prop_BTreeTraversable xs = testEvens evens' && testOdds odds' where
odds = fromListBTree $ filter (odd . snd) xs :: Fix (BTree 3 Int Int)
evens = fromListBTree $ filter (even . snd) xs :: Fix (BTree 3 Int Int)
f x = if even x then Nothing else Just x
odds' = toListBTree <$> traverseF' f odds
evens' = toListBTree <$> traverseF' f evens
testEvens Nothing = True
testEvens (Just l) = null l
testOdds Nothing = False
testOdds _ = True
testBTree = testGroup "BTree"
[
testProperty "BTree Insert" prop_BTreeInsert
,testProperty "BTree Delete" prop_BTreeDelete
,testProperty "BTree Delete All" prop_BTreeDeleteAll
,testProperty "BTree Filter" prop_BTreeFilter
,testProperty "BTree Partition" prop_BTreePartition
,testProperty "BTree Node Size" prop_BTreeNodeSize
,testProperty "BTree Functor" prop_BTreeFunctor
,testProperty "BTree Foldable" prop_BTreeFoldable
,testProperty "BTree Traversable" prop_BTreeTraversable
]
| |
03aaef6a235f867885da88e5847039c10e9732396ebe1269632381b972edf1dd | facebook/duckling | Corpus.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ordinal.EL.Corpus
( corpus ) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Ordinal.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale EL Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (OrdinalData 1)
[ "πρώτος"
, "1ος"
, "1ου"
, "πρώτων"
]
, examples (OrdinalData 2)
[ "δεύτερος"
, "2ου"
, "δευτέρου"
]
, examples (OrdinalData 3)
[ "τρίτος"
, "3ης"
]
, examples (OrdinalData 4)
[ "τέταρτος"
, "4ος"
]
, examples (OrdinalData 8)
[ "όγδοος"
, "ογδόου"
, "8ος"
]
, examples (OrdinalData 25)
[ "εικοστός πέμπτος"
, "25ος"
, "εικοστού πέμπτου"
]
, examples (OrdinalData 31)
[ "τριακοστός πρώτος"
, "31ος"
]
, examples (OrdinalData 42)
[ "τεσσαρακοστός δεύτερος"
, "42 ος"
]
, examples (OrdinalData 77)
[ "εβδομηκοστού εβδόμου"
, "77ου"
]
, examples (OrdinalData 90)
[ "ενενηκοστός"
, "90ος"
]
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/Duckling/Ordinal/EL/Corpus.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
# LANGUAGE OverloadedStrings # | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Ordinal.EL.Corpus
( corpus ) where
import Data.String
import Prelude
import Duckling.Locale
import Duckling.Ordinal.Types
import Duckling.Resolve
import Duckling.Testing.Types
corpus :: Corpus
corpus = (testContext {locale = makeLocale EL Nothing}, testOptions, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (OrdinalData 1)
[ "πρώτος"
, "1ος"
, "1ου"
, "πρώτων"
]
, examples (OrdinalData 2)
[ "δεύτερος"
, "2ου"
, "δευτέρου"
]
, examples (OrdinalData 3)
[ "τρίτος"
, "3ης"
]
, examples (OrdinalData 4)
[ "τέταρτος"
, "4ος"
]
, examples (OrdinalData 8)
[ "όγδοος"
, "ογδόου"
, "8ος"
]
, examples (OrdinalData 25)
[ "εικοστός πέμπτος"
, "25ος"
, "εικοστού πέμπτου"
]
, examples (OrdinalData 31)
[ "τριακοστός πρώτος"
, "31ος"
]
, examples (OrdinalData 42)
[ "τεσσαρακοστός δεύτερος"
, "42 ος"
]
, examples (OrdinalData 77)
[ "εβδομηκοστού εβδόμου"
, "77ου"
]
, examples (OrdinalData 90)
[ "ενενηκοστός"
, "90ος"
]
]
|
a40e4a5c4bc893ac095fa89e49ae19ca59bc04833ca70017ecb00cdcc7b6db68 | jdsandifer/AutoLISP | CONTRAILCOUNTDIALOG.lsp | ;;;;;;;[ Continuous Rail Count Dialog ];;;;;;;;;
;; ;;
;; Function for counting rails with long ;;
;; center lines. Uses a dialog box to select ;;
the layer for the lines . ; ;
;; ;;
;;::::::::::::::::::::::::::::::::::::::::::::::;;
;; ;;
Author : ( Copyright 2016 ) ; ;
;; Written: 02/03/2016 ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; ;;
;; 02/03/2016 ;;
;; - Started with the dialog box. ;;
;; ;;
;; Todo: ;;
;; - ;;
;; ;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
JD : ClearHash - Copyright 2016
(defun C:ContRailCountDialog ( / )
(princ "Hi there!")
; silent end
(princ))
; silent load
(princ) | null | https://raw.githubusercontent.com/jdsandifer/AutoLISP/054c8400fae84c223c3113e049a1ab87d374ba37/Old/CONTRAILCOUNTDIALOG.lsp | lisp | [ Continuous Rail Count Dialog ];;;;;;;;;
;;
Function for counting rails with long ;;
center lines. Uses a dialog box to select ;;
;
;;
::::::::::::::::::::::::::::::::::::::::::::::;;
;;
;
Written: 02/03/2016 ;;
;;
;;
02/03/2016 ;;
- Started with the dialog box. ;;
;;
Todo: ;;
- ;;
;;
silent end
silent load |
JD : ClearHash - Copyright 2016
(defun C:ContRailCountDialog ( / )
(princ "Hi there!")
(princ))
(princ) |
91ed5d6aa834eb2d07cc5ab4b16ad764e8a52210977f5b31818ca60d19295836 | buildsome/buildsome | Printer.hs | # LANGUAGE FlexibleInstances , RecordWildCards , GeneralizedNewtypeDeriving #
module Lib.Printer
( Id(..)
, Printable
, Printer, new, newFrom, render
, printStrLn, rawPrintStrLn
, printWrap
, ColorScheme(..)
, rawPrintWrap, rawPrinterWrap
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS8
import Data.IORef
import qualified Data.List as List
import Data.String (IsString(..))
import Lib.ColorText (ColorText)
import qualified Lib.ColorText as ColorText
import Lib.Exception (onExceptionWith, bracket_, putLn, swallowExceptions)
import qualified Lib.Show as Show
import qualified System.IO as IO
import Text.Printf (printf)
import qualified Prelude.Compat as Prelude
import Prelude.Compat hiding (lines, putStrLn)
newtype Id = Id Int
deriving Enum
instance Show Id where
# INLINE show #
show (Id i) = printf "T%03d" i
class (IsString p, Monoid p) => Printable p where
intercalate :: p -> [p] -> p
lines :: p -> [p]
putStrLn :: (ColorText -> ByteString) -> p -> IO ()
instance Printable String where
intercalate = List.intercalate
lines = List.lines
putStrLn _ = Prelude.putStrLn
# INLINE intercalate #
# INLINE lines #
# INLINE putStrLn #
instance Printable ByteString where
intercalate = BS8.intercalate
lines = BS8.lines
putStrLn _ = BS8.putStrLn
# INLINE intercalate #
# INLINE lines #
# INLINE putStrLn #
instance Printable ColorText where
intercalate = ColorText.intercalate
lines = ColorText.lines
putStrLn toBS = BS8.putStrLn . toBS
# INLINE intercalate #
# INLINE lines #
# INLINE putStrLn #
data Printer = Printer
{ _printerId :: Id
, printerRender :: ColorText -> ByteString
, printerIndentLevelRef :: IORef Int
}
render :: Printer -> ColorText -> ByteString
render = printerRender
{-# INLINE new #-}
new :: (ColorText -> ByteString) -> Id -> IO Printer
new toBS pid = Printer pid toBS <$> newIORef 0
# INLINE newFrom #
newFrom :: Printer -> Id -> IO Printer
newFrom (Printer _id toBS indentRef) pid =
Printer pid toBS <$> (newIORef =<< readIORef indentRef)
# INLINE printStrLn #
printStrLn :: Printable str => Printer -> str -> IO ()
printStrLn printer@(Printer pid _ indentRef) str = do
indentLevel <- readIORef indentRef
let prefix = Show.show pid <> " " <> mconcat (replicate indentLevel " ")
rawPrintStrLn printer $ intercalate "\n" $ map (prefix <>) $ lines str
# INLINE rawPrintStrLn #
rawPrintStrLn :: Printable str => Printer -> str -> IO ()
rawPrintStrLn (Printer _ toBS _) = swallowExceptions . putStrLn toBS
data ColorScheme = ColorScheme
{ cException :: ColorText -> ColorText
, cOk :: ColorText -> ColorText
}
# INLINE printWrap #
printWrap :: ColorScheme -> Printer -> ColorText -> ColorText -> IO a -> IO a
printWrap ColorScheme{..} printer str entryMsg body = do
printStrLn printer before
res <-
wrappedBody `onExceptionWith` \e ->
printStrLn printer $ after $ cException $
"EXCEPTION: " <> (fromString . concat . take 1 . lines . show) e
printStrLn printer $ after $ cOk "OK"
pure res
where
indentLevel = printerIndentLevelRef printer
addIndent d = atomicModifyIORef' indentLevel $ \old -> (old+d, ())
wrappedBody = bracket_ (addIndent 1) (addIndent (-1)) body
before = mconcat ["{ ", str, " ", entryMsg]
after suffix = mconcat ["} ", str, " ", suffix]
# INLINE rawPrintWrap #
rawPrintWrap :: String -> IO a -> IO a
rawPrintWrap str = bracket_ (putLn IO.stdout (str ++ "{")) (putLn IO.stdout (str ++ "}"))
# INLINE rawPrinterWrap #
rawPrinterWrap :: Printer -> String -> IO a -> IO a
rawPrinterWrap printer str =
bracket_ (printStrLn printer (str ++ "{")) (printStrLn printer (str ++ "}"))
| null | https://raw.githubusercontent.com/buildsome/buildsome/479b92bb74a474a5f0c3292b79202cc850bd8653/src/Lib/Printer.hs | haskell | # INLINE new # | # LANGUAGE FlexibleInstances , RecordWildCards , GeneralizedNewtypeDeriving #
module Lib.Printer
( Id(..)
, Printable
, Printer, new, newFrom, render
, printStrLn, rawPrintStrLn
, printWrap
, ColorScheme(..)
, rawPrintWrap, rawPrinterWrap
) where
import Data.ByteString (ByteString)
import qualified Data.ByteString.Char8 as BS8
import Data.IORef
import qualified Data.List as List
import Data.String (IsString(..))
import Lib.ColorText (ColorText)
import qualified Lib.ColorText as ColorText
import Lib.Exception (onExceptionWith, bracket_, putLn, swallowExceptions)
import qualified Lib.Show as Show
import qualified System.IO as IO
import Text.Printf (printf)
import qualified Prelude.Compat as Prelude
import Prelude.Compat hiding (lines, putStrLn)
newtype Id = Id Int
deriving Enum
instance Show Id where
# INLINE show #
show (Id i) = printf "T%03d" i
class (IsString p, Monoid p) => Printable p where
intercalate :: p -> [p] -> p
lines :: p -> [p]
putStrLn :: (ColorText -> ByteString) -> p -> IO ()
instance Printable String where
intercalate = List.intercalate
lines = List.lines
putStrLn _ = Prelude.putStrLn
# INLINE intercalate #
# INLINE lines #
# INLINE putStrLn #
instance Printable ByteString where
intercalate = BS8.intercalate
lines = BS8.lines
putStrLn _ = BS8.putStrLn
# INLINE intercalate #
# INLINE lines #
# INLINE putStrLn #
instance Printable ColorText where
intercalate = ColorText.intercalate
lines = ColorText.lines
putStrLn toBS = BS8.putStrLn . toBS
# INLINE intercalate #
# INLINE lines #
# INLINE putStrLn #
data Printer = Printer
{ _printerId :: Id
, printerRender :: ColorText -> ByteString
, printerIndentLevelRef :: IORef Int
}
render :: Printer -> ColorText -> ByteString
render = printerRender
new :: (ColorText -> ByteString) -> Id -> IO Printer
new toBS pid = Printer pid toBS <$> newIORef 0
# INLINE newFrom #
newFrom :: Printer -> Id -> IO Printer
newFrom (Printer _id toBS indentRef) pid =
Printer pid toBS <$> (newIORef =<< readIORef indentRef)
# INLINE printStrLn #
printStrLn :: Printable str => Printer -> str -> IO ()
printStrLn printer@(Printer pid _ indentRef) str = do
indentLevel <- readIORef indentRef
let prefix = Show.show pid <> " " <> mconcat (replicate indentLevel " ")
rawPrintStrLn printer $ intercalate "\n" $ map (prefix <>) $ lines str
# INLINE rawPrintStrLn #
rawPrintStrLn :: Printable str => Printer -> str -> IO ()
rawPrintStrLn (Printer _ toBS _) = swallowExceptions . putStrLn toBS
data ColorScheme = ColorScheme
{ cException :: ColorText -> ColorText
, cOk :: ColorText -> ColorText
}
# INLINE printWrap #
printWrap :: ColorScheme -> Printer -> ColorText -> ColorText -> IO a -> IO a
printWrap ColorScheme{..} printer str entryMsg body = do
printStrLn printer before
res <-
wrappedBody `onExceptionWith` \e ->
printStrLn printer $ after $ cException $
"EXCEPTION: " <> (fromString . concat . take 1 . lines . show) e
printStrLn printer $ after $ cOk "OK"
pure res
where
indentLevel = printerIndentLevelRef printer
addIndent d = atomicModifyIORef' indentLevel $ \old -> (old+d, ())
wrappedBody = bracket_ (addIndent 1) (addIndent (-1)) body
before = mconcat ["{ ", str, " ", entryMsg]
after suffix = mconcat ["} ", str, " ", suffix]
# INLINE rawPrintWrap #
rawPrintWrap :: String -> IO a -> IO a
rawPrintWrap str = bracket_ (putLn IO.stdout (str ++ "{")) (putLn IO.stdout (str ++ "}"))
# INLINE rawPrinterWrap #
rawPrinterWrap :: Printer -> String -> IO a -> IO a
rawPrinterWrap printer str =
bracket_ (printStrLn printer (str ++ "{")) (printStrLn printer (str ++ "}"))
|
c611e87e1a296de0b2fbd4dbfda22401d8d8f85677c779a3904032a652517789 | cblp/crdt | PNCounter.hs | # LANGUAGE NamedFieldPuns #
module CRDT.Cv.PNCounter
( PNCounter (..)
, initial
, query
-- * Operations
, decrement
, increment
) where
import Data.Semilattice (Semilattice)
import CRDT.Cv.GCounter (GCounter)
import qualified CRDT.Cv.GCounter as GCounter
|
Positive - negative counter . Allows incrementing and decrementing .
Nice example of combining of existing CvRDT ( ' GCounter ' in this case )
to create another CvRDT .
Positive-negative counter. Allows incrementing and decrementing.
Nice example of combining of existing CvRDT ('GCounter' in this case)
to create another CvRDT.
-}
data PNCounter a = PNCounter
{ positive :: !(GCounter a)
, negative :: !(GCounter a)
}
deriving (Eq, Show)
instance Ord a => Semigroup (PNCounter a) where
PNCounter p1 n1 <> PNCounter p2 n2 = PNCounter (p1 <> p2) (n1 <> n2)
| See ' CvRDT '
instance Ord a => Semilattice (PNCounter a)
-- | Get value from the state
query :: Num a => PNCounter a -> a
query PNCounter{positive, negative} =
GCounter.query positive - GCounter.query negative
-- | Decrement counter
decrement
:: Num a
=> Word -- ^ replica id
-> PNCounter a
-> PNCounter a
decrement i pnc@PNCounter{negative} =
pnc{negative = GCounter.increment i negative}
-- | Increment counter
increment
:: Num a
=> Word -- ^ replica id
-> PNCounter a
-> PNCounter a
increment i pnc@PNCounter{positive} =
pnc{positive = GCounter.increment i positive}
-- | Initial state
initial :: PNCounter a
initial = PNCounter{positive = GCounter.initial, negative = GCounter.initial}
| null | https://raw.githubusercontent.com/cblp/crdt/175d7ee7df66de1f013ee167ac31719752e0c20b/crdt/lib/CRDT/Cv/PNCounter.hs | haskell | * Operations
| Get value from the state
| Decrement counter
^ replica id
| Increment counter
^ replica id
| Initial state | # LANGUAGE NamedFieldPuns #
module CRDT.Cv.PNCounter
( PNCounter (..)
, initial
, query
, decrement
, increment
) where
import Data.Semilattice (Semilattice)
import CRDT.Cv.GCounter (GCounter)
import qualified CRDT.Cv.GCounter as GCounter
|
Positive - negative counter . Allows incrementing and decrementing .
Nice example of combining of existing CvRDT ( ' GCounter ' in this case )
to create another CvRDT .
Positive-negative counter. Allows incrementing and decrementing.
Nice example of combining of existing CvRDT ('GCounter' in this case)
to create another CvRDT.
-}
data PNCounter a = PNCounter
{ positive :: !(GCounter a)
, negative :: !(GCounter a)
}
deriving (Eq, Show)
instance Ord a => Semigroup (PNCounter a) where
PNCounter p1 n1 <> PNCounter p2 n2 = PNCounter (p1 <> p2) (n1 <> n2)
| See ' CvRDT '
instance Ord a => Semilattice (PNCounter a)
query :: Num a => PNCounter a -> a
query PNCounter{positive, negative} =
GCounter.query positive - GCounter.query negative
decrement
:: Num a
-> PNCounter a
-> PNCounter a
decrement i pnc@PNCounter{negative} =
pnc{negative = GCounter.increment i negative}
increment
:: Num a
-> PNCounter a
-> PNCounter a
increment i pnc@PNCounter{positive} =
pnc{positive = GCounter.increment i positive}
initial :: PNCounter a
initial = PNCounter{positive = GCounter.initial, negative = GCounter.initial}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.