_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
0553f0bac96e69b57beee926b1cf38ab54634f2e8f87a5438bd7b7ab4b8bc5b8 | CSCfi/rems | form_templates_for_old_forms.clj | (ns rems.migrations.form-templates-for-old-forms
(:require [clojure.test :refer :all]
[hugsql.core :as hugsql]
[rems.json :as json]))
;; SQL queries repeated here so that this migration is standalone
(hugsql/def-db-fns-from-string
"
-- :name get-forms :? :*
SELECT id FROM application_form;
-- :name get-form-template-impl :? :1
SELECT
id,
organization,
title,
start,
endt as \"end\",
fields::TEXT,
enabled,
archived
FROM form_template
WHERE id = :id;
-- :name set-template-fields! :!
UPDATE form_template
SET fields = :fields::jsonb
WHERE id = :id;
-- :name get-form-items :? :*
SELECT
item.id,
formitemoptional,
type,
value,
itemorder,
item.visibility,
itemmap.maxlength
FROM application_form form
LEFT OUTER JOIN application_form_item_map itemmap ON form.id = itemmap.formId
LEFT OUTER JOIN application_form_item item ON item.id = itemmap.formItemId
WHERE form.id = :id AND item.id IS NOT NULL
ORDER BY itemorder;")
(defn get-template-fields [db params]
(-> (get-form-template-impl db params)
:fields
json/parse-string))
(defn should-update? [fields]
(not (every? #(contains? % :id) fields)))
(defn update-fields [template-fields items]
(assert (= (count template-fields)
(count items)))
(let [result (mapv #(assoc %2 :id (:id %1)) items template-fields)]
(assert (not (should-update? result)))
result))
(deftest test-update-template
(is (= [{:type "description"
:title {:fi "Projektin nimi" :en "Project name"}
:input-prompt {:fi "Projekti" :en "Project"}
:id 1
:optional false}
{:type "texta"
:title {:fi "Projektin tarkoitus" :en "Purpose of the project"}
:input-prompt
{:fi "Projektin tarkoitus on..."
:en "The purpose of the project is to..."}
:id 7
:optional false}
{:type "date"
:title
{:fi "Projektin aloituspäivä" :en "Start date of the project"}
:id 3
:optional true}]
(update-fields
[{:type "description"
:title {:fi "Projektin nimi" :en "Project name"}
:input-prompt {:fi "Projekti" :en "Project"}
:optional false}
{:type "texta"
:title {:fi "Projektin tarkoitus" :en "Purpose of the project"}
:input-prompt
{:fi "Projektin tarkoitus on..."
:en "The purpose of the project is to..."}
:optional false}
{:type "date"
:title
{:fi "Projektin aloituspäivä" :en "Start date of the project"}
:optional true}]
[{:id 1} {:id 7} {:id 3}]))))
(defn migrate-up [{:keys [conn]}]
(doseq [{:keys [id]} (get-forms conn)]
(let [fields (get-template-fields conn {:id id})]
(when (should-update? fields)
(let [items (get-form-items conn {:id id})
updated (update-fields fields items)]
(set-template-fields! conn {:id id :fields (json/generate-string updated)}))))))
| null | https://raw.githubusercontent.com/CSCfi/rems/644ef6df4518b8e382cdfeadd7719e29508a26f0/src/clj/rems/migrations/form_templates_for_old_forms.clj | clojure | SQL queries repeated here so that this migration is standalone
") | (ns rems.migrations.form-templates-for-old-forms
(:require [clojure.test :refer :all]
[hugsql.core :as hugsql]
[rems.json :as json]))
(hugsql/def-db-fns-from-string
"
-- :name get-forms :? :*
-- :name get-form-template-impl :? :1
SELECT
id,
organization,
title,
start,
endt as \"end\",
fields::TEXT,
enabled,
archived
FROM form_template
-- :name set-template-fields! :!
UPDATE form_template
SET fields = :fields::jsonb
-- :name get-form-items :? :*
SELECT
item.id,
formitemoptional,
type,
value,
itemorder,
item.visibility,
itemmap.maxlength
FROM application_form form
LEFT OUTER JOIN application_form_item_map itemmap ON form.id = itemmap.formId
LEFT OUTER JOIN application_form_item item ON item.id = itemmap.formItemId
WHERE form.id = :id AND item.id IS NOT NULL
(defn get-template-fields [db params]
(-> (get-form-template-impl db params)
:fields
json/parse-string))
(defn should-update? [fields]
(not (every? #(contains? % :id) fields)))
(defn update-fields [template-fields items]
(assert (= (count template-fields)
(count items)))
(let [result (mapv #(assoc %2 :id (:id %1)) items template-fields)]
(assert (not (should-update? result)))
result))
(deftest test-update-template
(is (= [{:type "description"
:title {:fi "Projektin nimi" :en "Project name"}
:input-prompt {:fi "Projekti" :en "Project"}
:id 1
:optional false}
{:type "texta"
:title {:fi "Projektin tarkoitus" :en "Purpose of the project"}
:input-prompt
{:fi "Projektin tarkoitus on..."
:en "The purpose of the project is to..."}
:id 7
:optional false}
{:type "date"
:title
{:fi "Projektin aloituspäivä" :en "Start date of the project"}
:id 3
:optional true}]
(update-fields
[{:type "description"
:title {:fi "Projektin nimi" :en "Project name"}
:input-prompt {:fi "Projekti" :en "Project"}
:optional false}
{:type "texta"
:title {:fi "Projektin tarkoitus" :en "Purpose of the project"}
:input-prompt
{:fi "Projektin tarkoitus on..."
:en "The purpose of the project is to..."}
:optional false}
{:type "date"
:title
{:fi "Projektin aloituspäivä" :en "Start date of the project"}
:optional true}]
[{:id 1} {:id 7} {:id 3}]))))
(defn migrate-up [{:keys [conn]}]
(doseq [{:keys [id]} (get-forms conn)]
(let [fields (get-template-fields conn {:id id})]
(when (should-update? fields)
(let [items (get-form-items conn {:id id})
updated (update-fields fields items)]
(set-template-fields! conn {:id id :fields (json/generate-string updated)}))))))
|
563ba168065f194cfbdcaeb7840302626cdbb3bdd60277881f94561822883579 | johnwhitington/ocamli | exercise05.ml | let rec insert f x l =
match l with
[] -> [x]
| h::t ->
if f x h
then x :: h :: t
else h :: insert f x t
let rec sort f l =
match l with
[] -> []
| h::t -> insert f h (sort f t)
| null | https://raw.githubusercontent.com/johnwhitington/ocamli/28da5d87478a51583a6cb792bf3a8ee44b990e9f/OCaml%20from%20the%20Very%20Beginning/Chapter%206/exercise05.ml | ocaml | let rec insert f x l =
match l with
[] -> [x]
| h::t ->
if f x h
then x :: h :: t
else h :: insert f x t
let rec sort f l =
match l with
[] -> []
| h::t -> insert f h (sort f t)
| |
010507fff528a13bf8f66f0e5b4668bae0dbd7ac9f29689d8df276cc444bc332 | input-output-hk/plutus-apps | Crowdfunding.hs | | Crowdfunding contract implemented using the [ [ ] ] interface .
-- This is the fully parallel version that collects all contributions
-- in a single transaction. This is, of course, limited by the maximum
-- number of inputs a transaction can have.
# LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE NamedFieldPuns #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - ignore - interface - pragmas #
# OPTIONS_GHC -fno - omit - interface - pragmas #
# OPTIONS_GHC -fno - specialise #
# OPTIONS_GHC -fplugin - opt PlutusTx . Plugin : debug - context #
module Plutus.Contracts.Crowdfunding (
-- * Campaign parameters
Campaign(..)
, CrowdfundingSchema
, crowdfunding
, theCampaign
-- * Functionality for campaign contributors
, contribute
, Contribution(..)
-- * Functionality for campaign owners
, scheduleCollection
, campaignAddress
-- * Validator script
, contributionScript
, mkValidator
, mkCampaign
, CampaignAction(..)
, collectionRange
, refundRange
-- * Traces
, startCampaign
, makeContribution
, successfulCampaign
) where
import Cardano.Node.Emulator.Params qualified as Params
import Cardano.Node.Emulator.TimeSlot qualified as TimeSlot
import Control.Applicative (Applicative (..))
import Control.Monad (void)
import Data.Aeson (FromJSON, ToJSON)
import Data.Text (Text)
import Data.Text qualified as Text
import GHC.Generics (Generic)
import Ledger (PaymentPubKeyHash (unPaymentPubKeyHash), getCardanoTxId)
import Ledger qualified
import Ledger.Interval qualified as Interval
import Ledger.Tx.Constraints qualified as Constraints
import Ledger.Tx.Constraints.ValidityInterval qualified as ValidityInterval
import Ledger.Typed.Scripts qualified as Scripts hiding (validatorHash)
import Plutus.Contract
import Plutus.Script.Utils.Ada qualified as Ada
import Plutus.Script.Utils.V2.Scripts as V2
import Plutus.Script.Utils.V2.Typed.Scripts qualified as V2 hiding (validatorHash)
import Plutus.Trace.Effects.EmulatorControl (getSlotConfig)
import Plutus.Trace.Emulator (ContractHandle, EmulatorTrace)
import Plutus.Trace.Emulator qualified as Trace
import Plutus.V2.Ledger.Api qualified as V2
import Plutus.V2.Ledger.Contexts qualified as V2
import PlutusTx qualified
import PlutusTx.Prelude hiding (Applicative (..), Semigroup (..), return, (<$>), (>>), (>>=))
import Prelude (Semigroup (..), (<$>), (>>=))
import Prelude qualified as Haskell
import Wallet.Emulator (Wallet (..), knownWallet)
import Wallet.Emulator qualified as Emulator
-- | A crowdfunding campaign.
data Campaign = Campaign
{ campaignDeadline :: V2.POSIXTime
-- ^ The date by which the campaign funds can be contributed.
, campaignCollectionDeadline :: V2.POSIXTime
-- ^ The date by which the campaign owner has to collect the funds
, campaignOwner :: PaymentPubKeyHash
-- ^ Public key of the campaign owner. This key is entitled to retrieve the
-- funds if the campaign is successful.
} deriving (Generic, ToJSON, FromJSON, Haskell.Show)
PlutusTx.makeLift ''Campaign
-- | Action that can be taken by the participants in this contract. A value of
-- `CampaignAction` is provided as the redeemer. The validator script then
-- checks if the conditions for performing this action are met.
--
data CampaignAction = Collect | Refund
PlutusTx.unstableMakeIsData ''CampaignAction
PlutusTx.makeLift ''CampaignAction
type CrowdfundingSchema =
Endpoint "schedule collection" ()
.\/ Endpoint "contribute" Contribution
newtype Contribution = Contribution
{ contribValue :: V2.Value
-- ^ how much to contribute
} deriving stock (Haskell.Eq, Haskell.Show, Generic)
deriving anyclass (ToJSON, FromJSON)
-- | Construct a 'Campaign' value from the campaign parameters,
-- using the wallet's public key.
mkCampaign :: V2.POSIXTime -> V2.POSIXTime -> Wallet -> Campaign
mkCampaign ddl collectionDdl ownerWallet =
Campaign
{ campaignDeadline = ddl
, campaignCollectionDeadline = collectionDdl
, campaignOwner = Emulator.mockWalletPaymentPubKeyHash ownerWallet
}
-- | The 'ValidityInterval POSIXTime' during which the funds can be collected
# INLINABLE collectionRange #
collectionRange :: Campaign -> ValidityInterval.ValidityInterval V2.POSIXTime
collectionRange cmp = ValidityInterval.interval (campaignDeadline cmp) (campaignCollectionDeadline cmp)
-- | The 'ValidityInterval POSIXTime' during which a refund may be claimed
# INLINABLE refundRange #
refundRange :: Campaign -> ValidityInterval.ValidityInterval V2.POSIXTime
refundRange cmp = ValidityInterval.from (campaignCollectionDeadline cmp)
data Crowdfunding
instance Scripts.ValidatorTypes Crowdfunding where
type instance RedeemerType Crowdfunding = CampaignAction
type instance DatumType Crowdfunding = PaymentPubKeyHash
typedValidator :: Campaign -> V2.TypedValidator Crowdfunding
typedValidator = V2.mkTypedValidatorParam @Crowdfunding
$$(PlutusTx.compile [|| mkValidator ||])
$$(PlutusTx.compile [|| wrap ||])
where
wrap = Scripts.mkUntypedValidator
# INLINABLE validRefund #
validRefund :: Campaign -> PaymentPubKeyHash -> V2.TxInfo -> Bool
validRefund campaign contributor txinfo =
-- Check that the transaction falls in the refund range of the campaign
ValidityInterval.toPlutusInterval (refundRange campaign) `Interval.contains` V2.txInfoValidRange txinfo
-- Check that the transaction is signed by the contributor
&& (txinfo `V2.txSignedBy` unPaymentPubKeyHash contributor)
# INLINABLE validCollection #
validCollection :: Campaign -> V2.TxInfo -> Bool
validCollection campaign txinfo =
-- Check that the transaction falls in the collection range of the campaign
(ValidityInterval.toPlutusInterval (collectionRange campaign) `Interval.contains` V2.txInfoValidRange txinfo)
-- Check that the transaction is signed by the campaign owner
&& (txinfo `V2.txSignedBy` unPaymentPubKeyHash (campaignOwner campaign))
# INLINABLE mkValidator #
-- | The validator script is of type 'CrowdfundingValidator', and is
-- additionally parameterized by a 'Campaign' definition. This argument is
provided by the client , using ' ' .
-- As a result, the 'Campaign' definition is part of the script address,
-- and different campaigns have different addresses. The Campaign{..} syntax
-- means that all fields of the 'Campaign' value are in scope
( for example ' campaignDeadline ' in l. 70 ) .
mkValidator :: Campaign -> PaymentPubKeyHash -> CampaignAction -> V2.ScriptContext -> Bool
mkValidator c con act V2.ScriptContext{V2.scriptContextTxInfo} = case act of
-- the "refund" branch
Refund -> validRefund c con scriptContextTxInfo
-- the "collection" branch
Collect -> validCollection c scriptContextTxInfo
-- | The validator script that determines whether the campaign owner can
-- retrieve the funds or the contributors can claim a refund.
--
contributionScript :: Campaign -> V2.Validator
contributionScript = V2.validatorScript . typedValidator
-- | The address of a [[Campaign]]
campaignAddress :: Campaign -> V2.ValidatorHash
campaignAddress = V2.validatorHash . contributionScript
-- | The crowdfunding contract for the 'Campaign'.
crowdfunding :: Campaign -> Contract () CrowdfundingSchema ContractError ()
crowdfunding c = selectList [contribute c, scheduleCollection c]
-- | A sample campaign
theCampaign :: V2.POSIXTime -> Campaign
theCampaign startTime = Campaign
{ campaignDeadline = startTime + 20000
, campaignCollectionDeadline = startTime + 30000
, campaignOwner = Emulator.mockWalletPaymentPubKeyHash (knownWallet 1)
}
-- | The "contribute" branch of the contract for a specific 'Campaign'. Exposes
-- an endpoint that allows the user to enter their public key and the
-- contribution. Then waits until the campaign is over, and collects the
-- refund if the funding was not collected.
contribute :: Campaign -> Promise () CrowdfundingSchema ContractError ()
contribute cmp = endpoint @"contribute" $ \Contribution{contribValue} -> do
logInfo @Text $ "Contributing " <> Text.pack (Haskell.show contribValue)
contributor <- ownFirstPaymentPubKeyHash
let inst = typedValidator cmp
validityTimeRange = ValidityInterval.lessThan (campaignDeadline cmp)
tx = Constraints.mustPayToTheScriptWithDatumInTx contributor contribValue
<> Constraints.mustValidateInTimeRange validityTimeRange
txid <- fmap getCardanoTxId $ mkTxConstraints (Constraints.typedValidatorLookups inst) tx
>>= adjustUnbalancedTx >>= submitUnbalancedTx
utxo <- watchAddressUntilTime (Scripts.validatorCardanoAddress Params.testnet inst) $ campaignCollectionDeadline cmp
-- 'utxo' is the set of unspent outputs at the campaign address at the
-- collection deadline. If 'utxo' still contains our own contribution
-- then we can claim a refund.
let flt Ledger.TxOutRef{txOutRefId} _ = txid Haskell.== txOutRefId
tx' = Constraints.collectFromTheScriptFilter flt utxo Refund
<> Constraints.mustValidateInTimeRange (refundRange cmp)
<> Constraints.mustBeSignedBy contributor
if Constraints.modifiesUtxoSet tx'
then do
logInfo @Text "Claiming refund"
void $ mkTxConstraints (Constraints.typedValidatorLookups inst
<> Constraints.unspentOutputs utxo) tx'
>>= adjustUnbalancedTx >>= submitUnbalancedTx
else pure ()
-- | The campaign owner's branch of the contract for a given 'Campaign'. It
-- watches the campaign address for contributions and collects them if
-- the funding goal was reached in time.
scheduleCollection :: Campaign -> Promise () CrowdfundingSchema ContractError ()
scheduleCollection cmp = endpoint @"schedule collection" $ \() -> do
let inst = typedValidator cmp
-- Expose an endpoint that lets the user fire the starting gun on the
-- campaign. (This endpoint isn't technically necessary, we could just
run the ' trg ' action right away )
logInfo @Text "Campaign started. Waiting for campaign deadline to collect funds."
_ <- awaitTime $ campaignDeadline cmp
unspentOutputs <- utxosAt (Scripts.validatorCardanoAddress Params.testnet inst)
let tx = Constraints.collectFromTheScript unspentOutputs Collect
<> Constraints.mustBeSignedBy (campaignOwner cmp)
<> Constraints.mustValidateInTimeRange (collectionRange cmp)
logInfo @Text "Collecting funds"
void $ mkTxConstraints (Constraints.typedValidatorLookups inst
<> Constraints.unspentOutputs unspentOutputs) tx
>>= adjustUnbalancedTx >>= submitUnbalancedTx
-- | Call the "schedule collection" endpoint and instruct the campaign owner's
wallet ( wallet 1 ) to start watching the campaign address .
startCampaign :: EmulatorTrace (ContractHandle () CrowdfundingSchema ContractError)
startCampaign = do
startTime <- TimeSlot.scSlotZeroTime <$> getSlotConfig
hdl <- Trace.activateContractWallet (knownWallet 1) (crowdfunding $ theCampaign startTime)
Trace.callEndpoint @"schedule collection" hdl ()
pure hdl
-- | Call the "contribute" endpoint, contributing the amount from the wallet
makeContribution :: Wallet -> V2.Value -> EmulatorTrace ()
makeContribution w v = do
startTime <- TimeSlot.scSlotZeroTime <$> getSlotConfig
hdl <- Trace.activateContractWallet w (crowdfunding $ theCampaign startTime)
Trace.callEndpoint @"contribute" hdl Contribution{contribValue=v}
| Run a successful campaign with contributions from wallets 2 , 3 and 4 .
successfulCampaign :: EmulatorTrace ()
successfulCampaign = do
_ <- startCampaign
makeContribution (knownWallet 2) (Ada.adaValueOf 10)
makeContribution (knownWallet 3) (Ada.adaValueOf 10)
makeContribution (knownWallet 4) (Ada.adaValueOf 2.5)
void $ Trace.waitUntilSlot 21
| null | https://raw.githubusercontent.com/input-output-hk/plutus-apps/8949ce26588166d9961205aa61edd66e4f83d4f5/plutus-use-cases/src/Plutus/Contracts/Crowdfunding.hs | haskell | This is the fully parallel version that collects all contributions
in a single transaction. This is, of course, limited by the maximum
number of inputs a transaction can have.
# LANGUAGE DataKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedStrings #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeOperators #
* Campaign parameters
* Functionality for campaign contributors
* Functionality for campaign owners
* Validator script
* Traces
| A crowdfunding campaign.
^ The date by which the campaign funds can be contributed.
^ The date by which the campaign owner has to collect the funds
^ Public key of the campaign owner. This key is entitled to retrieve the
funds if the campaign is successful.
| Action that can be taken by the participants in this contract. A value of
`CampaignAction` is provided as the redeemer. The validator script then
checks if the conditions for performing this action are met.
^ how much to contribute
| Construct a 'Campaign' value from the campaign parameters,
using the wallet's public key.
| The 'ValidityInterval POSIXTime' during which the funds can be collected
| The 'ValidityInterval POSIXTime' during which a refund may be claimed
Check that the transaction falls in the refund range of the campaign
Check that the transaction is signed by the contributor
Check that the transaction falls in the collection range of the campaign
Check that the transaction is signed by the campaign owner
| The validator script is of type 'CrowdfundingValidator', and is
additionally parameterized by a 'Campaign' definition. This argument is
As a result, the 'Campaign' definition is part of the script address,
and different campaigns have different addresses. The Campaign{..} syntax
means that all fields of the 'Campaign' value are in scope
the "refund" branch
the "collection" branch
| The validator script that determines whether the campaign owner can
retrieve the funds or the contributors can claim a refund.
| The address of a [[Campaign]]
| The crowdfunding contract for the 'Campaign'.
| A sample campaign
| The "contribute" branch of the contract for a specific 'Campaign'. Exposes
an endpoint that allows the user to enter their public key and the
contribution. Then waits until the campaign is over, and collects the
refund if the funding was not collected.
'utxo' is the set of unspent outputs at the campaign address at the
collection deadline. If 'utxo' still contains our own contribution
then we can claim a refund.
| The campaign owner's branch of the contract for a given 'Campaign'. It
watches the campaign address for contributions and collects them if
the funding goal was reached in time.
Expose an endpoint that lets the user fire the starting gun on the
campaign. (This endpoint isn't technically necessary, we could just
| Call the "schedule collection" endpoint and instruct the campaign owner's
| Call the "contribute" endpoint, contributing the amount from the wallet | | Crowdfunding contract implemented using the [ [ ] ] interface .
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DeriveGeneric #
# LANGUAGE NamedFieldPuns #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - ignore - interface - pragmas #
# OPTIONS_GHC -fno - omit - interface - pragmas #
# OPTIONS_GHC -fno - specialise #
# OPTIONS_GHC -fplugin - opt PlutusTx . Plugin : debug - context #
module Plutus.Contracts.Crowdfunding (
Campaign(..)
, CrowdfundingSchema
, crowdfunding
, theCampaign
, contribute
, Contribution(..)
, scheduleCollection
, campaignAddress
, contributionScript
, mkValidator
, mkCampaign
, CampaignAction(..)
, collectionRange
, refundRange
, startCampaign
, makeContribution
, successfulCampaign
) where
import Cardano.Node.Emulator.Params qualified as Params
import Cardano.Node.Emulator.TimeSlot qualified as TimeSlot
import Control.Applicative (Applicative (..))
import Control.Monad (void)
import Data.Aeson (FromJSON, ToJSON)
import Data.Text (Text)
import Data.Text qualified as Text
import GHC.Generics (Generic)
import Ledger (PaymentPubKeyHash (unPaymentPubKeyHash), getCardanoTxId)
import Ledger qualified
import Ledger.Interval qualified as Interval
import Ledger.Tx.Constraints qualified as Constraints
import Ledger.Tx.Constraints.ValidityInterval qualified as ValidityInterval
import Ledger.Typed.Scripts qualified as Scripts hiding (validatorHash)
import Plutus.Contract
import Plutus.Script.Utils.Ada qualified as Ada
import Plutus.Script.Utils.V2.Scripts as V2
import Plutus.Script.Utils.V2.Typed.Scripts qualified as V2 hiding (validatorHash)
import Plutus.Trace.Effects.EmulatorControl (getSlotConfig)
import Plutus.Trace.Emulator (ContractHandle, EmulatorTrace)
import Plutus.Trace.Emulator qualified as Trace
import Plutus.V2.Ledger.Api qualified as V2
import Plutus.V2.Ledger.Contexts qualified as V2
import PlutusTx qualified
import PlutusTx.Prelude hiding (Applicative (..), Semigroup (..), return, (<$>), (>>), (>>=))
import Prelude (Semigroup (..), (<$>), (>>=))
import Prelude qualified as Haskell
import Wallet.Emulator (Wallet (..), knownWallet)
import Wallet.Emulator qualified as Emulator
data Campaign = Campaign
{ campaignDeadline :: V2.POSIXTime
, campaignCollectionDeadline :: V2.POSIXTime
, campaignOwner :: PaymentPubKeyHash
} deriving (Generic, ToJSON, FromJSON, Haskell.Show)
PlutusTx.makeLift ''Campaign
data CampaignAction = Collect | Refund
PlutusTx.unstableMakeIsData ''CampaignAction
PlutusTx.makeLift ''CampaignAction
type CrowdfundingSchema =
Endpoint "schedule collection" ()
.\/ Endpoint "contribute" Contribution
newtype Contribution = Contribution
{ contribValue :: V2.Value
} deriving stock (Haskell.Eq, Haskell.Show, Generic)
deriving anyclass (ToJSON, FromJSON)
mkCampaign :: V2.POSIXTime -> V2.POSIXTime -> Wallet -> Campaign
mkCampaign ddl collectionDdl ownerWallet =
Campaign
{ campaignDeadline = ddl
, campaignCollectionDeadline = collectionDdl
, campaignOwner = Emulator.mockWalletPaymentPubKeyHash ownerWallet
}
# INLINABLE collectionRange #
collectionRange :: Campaign -> ValidityInterval.ValidityInterval V2.POSIXTime
collectionRange cmp = ValidityInterval.interval (campaignDeadline cmp) (campaignCollectionDeadline cmp)
# INLINABLE refundRange #
refundRange :: Campaign -> ValidityInterval.ValidityInterval V2.POSIXTime
refundRange cmp = ValidityInterval.from (campaignCollectionDeadline cmp)
data Crowdfunding
instance Scripts.ValidatorTypes Crowdfunding where
type instance RedeemerType Crowdfunding = CampaignAction
type instance DatumType Crowdfunding = PaymentPubKeyHash
typedValidator :: Campaign -> V2.TypedValidator Crowdfunding
typedValidator = V2.mkTypedValidatorParam @Crowdfunding
$$(PlutusTx.compile [|| mkValidator ||])
$$(PlutusTx.compile [|| wrap ||])
where
wrap = Scripts.mkUntypedValidator
# INLINABLE validRefund #
validRefund :: Campaign -> PaymentPubKeyHash -> V2.TxInfo -> Bool
validRefund campaign contributor txinfo =
ValidityInterval.toPlutusInterval (refundRange campaign) `Interval.contains` V2.txInfoValidRange txinfo
&& (txinfo `V2.txSignedBy` unPaymentPubKeyHash contributor)
# INLINABLE validCollection #
validCollection :: Campaign -> V2.TxInfo -> Bool
validCollection campaign txinfo =
(ValidityInterval.toPlutusInterval (collectionRange campaign) `Interval.contains` V2.txInfoValidRange txinfo)
&& (txinfo `V2.txSignedBy` unPaymentPubKeyHash (campaignOwner campaign))
# INLINABLE mkValidator #
provided by the client , using ' ' .
( for example ' campaignDeadline ' in l. 70 ) .
mkValidator :: Campaign -> PaymentPubKeyHash -> CampaignAction -> V2.ScriptContext -> Bool
mkValidator c con act V2.ScriptContext{V2.scriptContextTxInfo} = case act of
Refund -> validRefund c con scriptContextTxInfo
Collect -> validCollection c scriptContextTxInfo
contributionScript :: Campaign -> V2.Validator
contributionScript = V2.validatorScript . typedValidator
campaignAddress :: Campaign -> V2.ValidatorHash
campaignAddress = V2.validatorHash . contributionScript
crowdfunding :: Campaign -> Contract () CrowdfundingSchema ContractError ()
crowdfunding c = selectList [contribute c, scheduleCollection c]
theCampaign :: V2.POSIXTime -> Campaign
theCampaign startTime = Campaign
{ campaignDeadline = startTime + 20000
, campaignCollectionDeadline = startTime + 30000
, campaignOwner = Emulator.mockWalletPaymentPubKeyHash (knownWallet 1)
}
contribute :: Campaign -> Promise () CrowdfundingSchema ContractError ()
contribute cmp = endpoint @"contribute" $ \Contribution{contribValue} -> do
logInfo @Text $ "Contributing " <> Text.pack (Haskell.show contribValue)
contributor <- ownFirstPaymentPubKeyHash
let inst = typedValidator cmp
validityTimeRange = ValidityInterval.lessThan (campaignDeadline cmp)
tx = Constraints.mustPayToTheScriptWithDatumInTx contributor contribValue
<> Constraints.mustValidateInTimeRange validityTimeRange
txid <- fmap getCardanoTxId $ mkTxConstraints (Constraints.typedValidatorLookups inst) tx
>>= adjustUnbalancedTx >>= submitUnbalancedTx
utxo <- watchAddressUntilTime (Scripts.validatorCardanoAddress Params.testnet inst) $ campaignCollectionDeadline cmp
let flt Ledger.TxOutRef{txOutRefId} _ = txid Haskell.== txOutRefId
tx' = Constraints.collectFromTheScriptFilter flt utxo Refund
<> Constraints.mustValidateInTimeRange (refundRange cmp)
<> Constraints.mustBeSignedBy contributor
if Constraints.modifiesUtxoSet tx'
then do
logInfo @Text "Claiming refund"
void $ mkTxConstraints (Constraints.typedValidatorLookups inst
<> Constraints.unspentOutputs utxo) tx'
>>= adjustUnbalancedTx >>= submitUnbalancedTx
else pure ()
scheduleCollection :: Campaign -> Promise () CrowdfundingSchema ContractError ()
scheduleCollection cmp = endpoint @"schedule collection" $ \() -> do
let inst = typedValidator cmp
run the ' trg ' action right away )
logInfo @Text "Campaign started. Waiting for campaign deadline to collect funds."
_ <- awaitTime $ campaignDeadline cmp
unspentOutputs <- utxosAt (Scripts.validatorCardanoAddress Params.testnet inst)
let tx = Constraints.collectFromTheScript unspentOutputs Collect
<> Constraints.mustBeSignedBy (campaignOwner cmp)
<> Constraints.mustValidateInTimeRange (collectionRange cmp)
logInfo @Text "Collecting funds"
void $ mkTxConstraints (Constraints.typedValidatorLookups inst
<> Constraints.unspentOutputs unspentOutputs) tx
>>= adjustUnbalancedTx >>= submitUnbalancedTx
wallet ( wallet 1 ) to start watching the campaign address .
startCampaign :: EmulatorTrace (ContractHandle () CrowdfundingSchema ContractError)
startCampaign = do
startTime <- TimeSlot.scSlotZeroTime <$> getSlotConfig
hdl <- Trace.activateContractWallet (knownWallet 1) (crowdfunding $ theCampaign startTime)
Trace.callEndpoint @"schedule collection" hdl ()
pure hdl
makeContribution :: Wallet -> V2.Value -> EmulatorTrace ()
makeContribution w v = do
startTime <- TimeSlot.scSlotZeroTime <$> getSlotConfig
hdl <- Trace.activateContractWallet w (crowdfunding $ theCampaign startTime)
Trace.callEndpoint @"contribute" hdl Contribution{contribValue=v}
| Run a successful campaign with contributions from wallets 2 , 3 and 4 .
successfulCampaign :: EmulatorTrace ()
successfulCampaign = do
_ <- startCampaign
makeContribution (knownWallet 2) (Ada.adaValueOf 10)
makeContribution (knownWallet 3) (Ada.adaValueOf 10)
makeContribution (knownWallet 4) (Ada.adaValueOf 2.5)
void $ Trace.waitUntilSlot 21
|
fc96fd0bc3233fe2c623fc1bc22deb01b6606b38550b3a1fdee34f32a0b17b40 | adaliu-gh/htdp | 12-226.228.rkt | ;;==========================
A FSM is one of :
; – '()
– ( cons Transition FSM )
(define-struct transition [current next])
; A Transition is a structure:
( make - transition FSM - State FSM - State )
FSM - State is a Color .
interpretation A FSM represents the transitions that a
finite state machine can take from one state to another
; in reaction to key strokes
(define fsm-traffic
(list (make-transition "red" "green")
(make-transition "green" "yellow")
(make-transition "yellow" "red")))
;;============================
226
State State - > Boolean
(define (state=? a b)
(string=? a b))
;;===========================
228
FSM FSM - State - > FSM - State
; finds the state representing current in transition
; and retrieve the next field
(check-expect (find fsm-traffic "red") "green")
(check-expect (find fsm-traffic "green") "yellow")
(define (find transitions current)
(cond
[(empty? transitions) (string-append "not found: " current)]
[else (if (state=? current (transition-current (first transitions)))
(transition-next (first transitions))
(find (rest transitions) current))]))
| null | https://raw.githubusercontent.com/adaliu-gh/htdp/a0fca8af2ae8bdcef40d56f6f45021dd92df2995/8-13%20Arbitrarily%20Large%20Data/12-226.228.rkt | racket | ==========================
– '()
A Transition is a structure:
in reaction to key strokes
============================
===========================
finds the state representing current in transition
and retrieve the next field | A FSM is one of :
– ( cons Transition FSM )
(define-struct transition [current next])
( make - transition FSM - State FSM - State )
FSM - State is a Color .
interpretation A FSM represents the transitions that a
finite state machine can take from one state to another
(define fsm-traffic
(list (make-transition "red" "green")
(make-transition "green" "yellow")
(make-transition "yellow" "red")))
226
State State - > Boolean
(define (state=? a b)
(string=? a b))
228
FSM FSM - State - > FSM - State
(check-expect (find fsm-traffic "red") "green")
(check-expect (find fsm-traffic "green") "yellow")
(define (find transitions current)
(cond
[(empty? transitions) (string-append "not found: " current)]
[else (if (state=? current (transition-current (first transitions)))
(transition-next (first transitions))
(find (rest transitions) current))]))
|
c2c1097276c24771de14cd4d79c7a499e75dceb3041abfdc02f52cd76ec83abe | coq/coq | coqcargs.mli | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
* Compilation modes :
- BuildVo : process statements and proofs ( standard compilation ) ,
and also output an empty .vos file and .vok file
- BuildVio : process statements , delay proofs in futures
- Vio2Vo : load delayed proofs and process them
- BuildVos : process statements , and discard proofs ,
and load .vos files for required libraries
- BuildVok : like , but load .vos files for required libraries
When loading the .vos version of a required library , if the file exists but is
empty , then we attempt to load the .vo version of that library .
This trick is useful to avoid the need for the user to compile .vos version
when an up to date .vo version is already available .
- BuildVo : process statements and proofs (standard compilation),
and also output an empty .vos file and .vok file
- BuildVio : process statements, delay proofs in futures
- Vio2Vo : load delayed proofs and process them
- BuildVos : process statements, and discard proofs,
and load .vos files for required libraries
- BuildVok : like BuildVo, but load .vos files for required libraries
When loading the .vos version of a required library, if the file exists but is
empty, then we attempt to load the .vo version of that library.
This trick is useful to avoid the need for the user to compile .vos version
when an up to date .vo version is already available.
*)
type compilation_mode = BuildVo | BuildVio | Vio2Vo | BuildVos | BuildVok
type t =
{ compilation_mode : compilation_mode
; compile_file: (string * bool) option (* bool is verbosity *)
; compilation_output_name : string option
; vio_checking : bool
; vio_tasks : (int list * string) list
; vio_files : string list
; vio_files_j : int
; echo : bool
; glob_out : Dumpglob.glob_output
; output_context : bool
}
val default : t
val parse : string list -> t
| null | https://raw.githubusercontent.com/coq/coq/b30c8272f0d9dc50933cac0bc81030898f104dde/toplevel/coqcargs.mli | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
bool is verbosity | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
* Compilation modes :
- BuildVo : process statements and proofs ( standard compilation ) ,
and also output an empty .vos file and .vok file
- BuildVio : process statements , delay proofs in futures
- Vio2Vo : load delayed proofs and process them
- BuildVos : process statements , and discard proofs ,
and load .vos files for required libraries
- BuildVok : like , but load .vos files for required libraries
When loading the .vos version of a required library , if the file exists but is
empty , then we attempt to load the .vo version of that library .
This trick is useful to avoid the need for the user to compile .vos version
when an up to date .vo version is already available .
- BuildVo : process statements and proofs (standard compilation),
and also output an empty .vos file and .vok file
- BuildVio : process statements, delay proofs in futures
- Vio2Vo : load delayed proofs and process them
- BuildVos : process statements, and discard proofs,
and load .vos files for required libraries
- BuildVok : like BuildVo, but load .vos files for required libraries
When loading the .vos version of a required library, if the file exists but is
empty, then we attempt to load the .vo version of that library.
This trick is useful to avoid the need for the user to compile .vos version
when an up to date .vo version is already available.
*)
type compilation_mode = BuildVo | BuildVio | Vio2Vo | BuildVos | BuildVok
type t =
{ compilation_mode : compilation_mode
; compilation_output_name : string option
; vio_checking : bool
; vio_tasks : (int list * string) list
; vio_files : string list
; vio_files_j : int
; echo : bool
; glob_out : Dumpglob.glob_output
; output_context : bool
}
val default : t
val parse : string list -> t
|
43288bc5e4c06b190649a3260ff4ac72eea43726ca5e0d9e8f07400e7e9103db | cljs-audio/cljs-audio | updates.cljc | (ns cljs-audio.updates
(:require [editscript.core :as e]
[cljs.core.match :refer [match]]))
(defn path-type [path patch]
(case path
[:group :cljs-audio.updates/out] :top
[:group :cljs-audio.updates/in] nil
(let [id (last path)]
(cond
(= id :cljs-audio.updates/out) :node
(= id :cljs-audio.updates/in) :node
(vector? id) (if (get-in patch (into (vec (butlast path)) [(first id)]))
:parameter
nil)
:else (let [thing (get-in patch path)]
(cond
(:type thing) :node
(:connections thing) :patch
:else (println "Can't parse " thing path patch)))))))
(defn resolve-from [path root-patch]
(let [type (path-type path root-patch)]
(case type
nil nil
:node path
:top nil
:patch (into path [:group ::out])
)
))
(defn resolve-to [path root-patch]
(let [type (path-type path root-patch)]
(case type
nil nil
:node path
:top [:ctx]
:patch (into path [:group ::in])
:parameter path
)
))
(defn schedule [path name [command & args]]
[:schedule path name command (vec args)])
(defn set-parameter [path name value]
(case name
:start [[:start path value]]
:stop [[:stop path value]]
(if (vector? value)
(into [] (mapv (fn [value] (schedule path name value)) value))
[[:set path name value]])))
(defn make-set-params [path params]
(into [] (mapcat (fn [[name value]] (set-parameter path name value)) (vec params))))
(defn make-add-node [path type create-args]
[[:add-node path type (or create-args [])]])
(defn make-connect [connection-path patch]
(let [parent-patch-path (vec (drop-last 2 connection-path))
[from-id to-id] (last connection-path)]
(let [from-path (into parent-patch-path [:group from-id])
from (resolve-from from-path patch)]
(if (vector? to-id)
(let [[to-id parameter-name] to-id
to-path (into parent-patch-path [:group to-id])
to (resolve-to to-path patch)]
[(into [:connect-parameter] [from to parameter-name])])
(let [to-path (into parent-patch-path [:group to-id])
to (resolve-to to-path patch)]
[(into [:connect] [from to])])))))
(defn find-node-connections [node-path patch]
(let [node-id (last node-path)
all-connections (get-in patch (into (vec (drop-last 2 node-path)) [:connections]))
node-connections (filter (fn [[from to]] (or (= from node-id)
(= to node-id)))
(into [] all-connections))]
(mapv (fn [id] (into (vec (drop-last 2 node-path)) [:connections id])) node-connections)))
(defn add-node [path patch]
(let [node (get-in patch path)
{:keys [type parameters create-args]} node]
(into (into (make-add-node path type create-args)
(make-set-params path parameters))
(let [node-connections-paths (find-node-connections path patch)]
(vec (mapcat (fn [path] (make-connect path patch)) node-connections-paths))))))
(defn make-remove-node [path]
[[:disconnect path]
[:remove-node path]])
(defn add-group [path patch]
(let [group (get-in patch path)
in-path (into path [::in])
out-path (into path [::out])
patch (-> patch
(assoc-in in-path {:type :gain :parameters {} :create-args []})
(assoc-in out-path {:type :gain :parameters {} :create-args []}))
create-in-out-commands (if (= path [:group])
[]
(into (add-node in-path patch)
(add-node out-path patch)))]
(into
create-in-out-commands
(mapcat (fn [[id node]]
(cond
(:type node) (add-node (into path [id]) patch)
:else (add-group (into path [id :group]) patch)))
(into [] group)))))
(defn remove-group [path patch]
(let [group (get-in patch path)
make-remove-in-out (if (= path [:group])
[]
(into (make-remove-node (into path [::in]))
(make-remove-node (into path [::out]))))]
(if (empty? group)
[]
(into make-remove-in-out
(mapcat (fn [[id node]]
(cond
(:type node) (make-remove-node (into path [id]))
(:group node) (remove-group (into path [id :group]) patch)
:else (println "ERROR" path node)))
(into group))))))
(declare ->patch-ast)
(defn edit->update [edit]
(let [[path op v] edit]
(match [[(into [] (reverse path)) op v]]
;; connections
[[[:connections & r] :r #{}]] #js [:remove-all-connections path]
[[[:connections & r] :r v]] #js [:replace-all-connections path]
[[[_ :connections & r] :+ v]] #js [:add-connection path]
[[[_ :connections & r] :- v]] #js [:remove-connection path]
;; parameters
[[[:start :parameters & r] :r v]] #js [:start path]
[[[:start :parameters & r] :+ v]] #js [:start path]
[[[:stop :parameters & r] :r v]] #js [:stop path]
[[[:stop :parameters & r] :+ v]] #js [:stop path]
[[[_ :parameters & r] :r v]] #js [:replace-parameter path]
[[[_ _ :parameters & r] :r v]] #js [:replace-parameter (drop-last 1 path)]
[[[_ _ _ :parameters & r] :r v]] #js [:replace-parameter (drop-last 2 path)]
[[[_ :parameters & r] :+ v]] #js [:set-parameter path]
[[[_ _ :parameters & r] :+ v]] #js [:set-parameter (drop-last 1 path)]
[[[_ _ _ :parameters & r] :+ v]] #js [:set-parameter (drop-last 2 path)]
[[[_ :parameters & r] :- v]] #js [:no-op]
[[[_ _ :parameters & r] :- v]] #js [:no-op]
[[[_ _ _ :parameters & r] :- v]] #js [:no-op]
;; nodes
[[[_ :group & r] :r v]] #js [:replace-node path]
[[[_ :group & r] :+ v]] #js [:add-node path]
[[[:group & r] :r v]] #js [:replace-group path]
[[[:group & r] :+ v]] #js [:replace-group path]
[[[:create-args id & r] :r v]] #js [:recreate-node path] ;; TODO:!!!
)))
(defn make-updates [a b]
(let [edits (e/get-edits (e/diff a b) {:algo :quick})]
(to-array (mapv (fn [edit] (edit->update edit)) edits))))
(defn make-disconnect [connection-path patch]
(let [parent-patch-path (vec (drop-last 2 connection-path))
nodes (last connection-path)
[from to] (mapv (fn [node] (into parent-patch-path [:group node])) nodes)]
[(into [:disconnect] [(resolve-from from patch) (resolve-to to patch)])]))
(defn remove-all-connections [path patch]
(let [connections-path (vec (drop-last path))
connections (get-in patch path)]
(into [] (mapcat (fn [connection] (make-disconnect (into connections-path [connection]) patch)) connections))))
(defn add-all-connections [path patch]
(let [connections-path (vec (drop-last path))
connections (get-in patch path)]
(into [] (mapcat (fn [connection] (make-connect (into connections-path [:connections connection]) patch)) connections))))
(defn replace-node [path old-patch new-patch]
(into
(let [node (get-in old-patch path)]
(cond
(:type node) (make-remove-node path)
:else (remove-group (into path [:group]) old-patch)))
(let [node (get-in new-patch path)]
(cond
(:type node) (add-node path new-patch)
:else (add-group (into path [:group]) new-patch)))))
(defn update->commands [[name path] old-patch new-patch]
(case name
:add-connection (make-connect path new-patch)
:remove-connection (make-disconnect path old-patch)
:remove-all-connections (remove-all-connections path old-patch)
:replace-all-connections (concat (remove-all-connections path old-patch)
(add-all-connections path new-patch))
:set-parameters (let [node-path (vec (drop-last 2 path))]
(vec (mapcat (fn [name value]
(set-parameter node-path name value))
(get-in new-patch path))))
:set-parameter (let [value (get-in new-patch path)
parameter-name (last path)
node-path (vec (drop-last 2 path))]
(set-parameter node-path parameter-name value))
:replace-parameter (let [value (get-in new-patch path)
parameter-name (last path)
node-path (vec (drop-last 2 path))]
(set-parameter node-path parameter-name value))
:replace-group (into (remove-group path old-patch) (add-group path new-patch))
:add-node (let [node (get-in new-patch path)]
(cond
(:type node) (add-node path new-patch)
:else (add-group (into path [:group]) new-patch)))
:replace-node (replace-node path old-patch new-patch)
:start (let [value (get-in new-patch path)
node-path (vec (drop-last 2 path))]
(into (replace-node node-path old-patch new-patch)
[[:start node-path value]]))
:stop (let [value (get-in new-patch path)
node-path (vec (drop-last 2 path))]
[[:stop node-path value]])
:no-op []
))
(defn ->node-ast [[type parameters create-args]] {:type type :parameters parameters :create-args create-args})
(defn ->group-ast [group] (into {} (mapv (fn [[id node]] (if (keyword? (first node))
[id (->node-ast node)]
[id (->patch-ast node)]
)) (into [] group))))
(defn ->connections-ast [connections] (set (mapv (fn [[from-id to-id]] [(if (= from-id :>) ::in from-id)
(if (= to-id :>) ::out to-id)])
(into [] connections))))
(defn ->ports-ast [ports] ports)
(defn ->patch-ast [patch]
(let [[group connections ports] patch]
{:group (->group-ast group)
:connections (->connections-ast connections)
:ports (->ports-ast ports)}))
(defn cleanup-meaningless-ops [[name a1 a2]]
(not (and (= name :connect) (or (= a1 []) (= a2 [])))))
(def priorities (into {} (map-indexed
(fn [ndx command] [command ndx])
[:stop
:disconnect
:remove-node
:add-node
:start
:set
:connect
:connect-parameter
:schedule])))
(defn sort-updates-by-priority [updates]
(sort-by (fn [thing]
(let [[update-name] thing]
(update-name
priorities)))
updates))
(defn patches->commands [old new]
(let [a (->patch-ast old)
b (->patch-ast new)
updates (make-updates a b)]
(vec (sort-updates-by-priority (filter cleanup-meaningless-ops (distinct (apply concat (mapv #(update->commands % a b) updates)))))))) | null | https://raw.githubusercontent.com/cljs-audio/cljs-audio/75b8326ff0d5d610be3ea1b39979eac45dfd5b4b/src/main/cljs_audio/updates.cljc | clojure | connections
parameters
nodes
TODO:!!! | (ns cljs-audio.updates
(:require [editscript.core :as e]
[cljs.core.match :refer [match]]))
(defn path-type [path patch]
(case path
[:group :cljs-audio.updates/out] :top
[:group :cljs-audio.updates/in] nil
(let [id (last path)]
(cond
(= id :cljs-audio.updates/out) :node
(= id :cljs-audio.updates/in) :node
(vector? id) (if (get-in patch (into (vec (butlast path)) [(first id)]))
:parameter
nil)
:else (let [thing (get-in patch path)]
(cond
(:type thing) :node
(:connections thing) :patch
:else (println "Can't parse " thing path patch)))))))
(defn resolve-from [path root-patch]
(let [type (path-type path root-patch)]
(case type
nil nil
:node path
:top nil
:patch (into path [:group ::out])
)
))
(defn resolve-to [path root-patch]
(let [type (path-type path root-patch)]
(case type
nil nil
:node path
:top [:ctx]
:patch (into path [:group ::in])
:parameter path
)
))
(defn schedule [path name [command & args]]
[:schedule path name command (vec args)])
(defn set-parameter [path name value]
(case name
:start [[:start path value]]
:stop [[:stop path value]]
(if (vector? value)
(into [] (mapv (fn [value] (schedule path name value)) value))
[[:set path name value]])))
(defn make-set-params [path params]
(into [] (mapcat (fn [[name value]] (set-parameter path name value)) (vec params))))
(defn make-add-node [path type create-args]
[[:add-node path type (or create-args [])]])
(defn make-connect [connection-path patch]
(let [parent-patch-path (vec (drop-last 2 connection-path))
[from-id to-id] (last connection-path)]
(let [from-path (into parent-patch-path [:group from-id])
from (resolve-from from-path patch)]
(if (vector? to-id)
(let [[to-id parameter-name] to-id
to-path (into parent-patch-path [:group to-id])
to (resolve-to to-path patch)]
[(into [:connect-parameter] [from to parameter-name])])
(let [to-path (into parent-patch-path [:group to-id])
to (resolve-to to-path patch)]
[(into [:connect] [from to])])))))
(defn find-node-connections [node-path patch]
(let [node-id (last node-path)
all-connections (get-in patch (into (vec (drop-last 2 node-path)) [:connections]))
node-connections (filter (fn [[from to]] (or (= from node-id)
(= to node-id)))
(into [] all-connections))]
(mapv (fn [id] (into (vec (drop-last 2 node-path)) [:connections id])) node-connections)))
(defn add-node [path patch]
(let [node (get-in patch path)
{:keys [type parameters create-args]} node]
(into (into (make-add-node path type create-args)
(make-set-params path parameters))
(let [node-connections-paths (find-node-connections path patch)]
(vec (mapcat (fn [path] (make-connect path patch)) node-connections-paths))))))
(defn make-remove-node [path]
[[:disconnect path]
[:remove-node path]])
(defn add-group [path patch]
(let [group (get-in patch path)
in-path (into path [::in])
out-path (into path [::out])
patch (-> patch
(assoc-in in-path {:type :gain :parameters {} :create-args []})
(assoc-in out-path {:type :gain :parameters {} :create-args []}))
create-in-out-commands (if (= path [:group])
[]
(into (add-node in-path patch)
(add-node out-path patch)))]
(into
create-in-out-commands
(mapcat (fn [[id node]]
(cond
(:type node) (add-node (into path [id]) patch)
:else (add-group (into path [id :group]) patch)))
(into [] group)))))
(defn remove-group [path patch]
(let [group (get-in patch path)
make-remove-in-out (if (= path [:group])
[]
(into (make-remove-node (into path [::in]))
(make-remove-node (into path [::out]))))]
(if (empty? group)
[]
(into make-remove-in-out
(mapcat (fn [[id node]]
(cond
(:type node) (make-remove-node (into path [id]))
(:group node) (remove-group (into path [id :group]) patch)
:else (println "ERROR" path node)))
(into group))))))
(declare ->patch-ast)
(defn edit->update [edit]
(let [[path op v] edit]
(match [[(into [] (reverse path)) op v]]
[[[:connections & r] :r #{}]] #js [:remove-all-connections path]
[[[:connections & r] :r v]] #js [:replace-all-connections path]
[[[_ :connections & r] :+ v]] #js [:add-connection path]
[[[_ :connections & r] :- v]] #js [:remove-connection path]
[[[:start :parameters & r] :r v]] #js [:start path]
[[[:start :parameters & r] :+ v]] #js [:start path]
[[[:stop :parameters & r] :r v]] #js [:stop path]
[[[:stop :parameters & r] :+ v]] #js [:stop path]
[[[_ :parameters & r] :r v]] #js [:replace-parameter path]
[[[_ _ :parameters & r] :r v]] #js [:replace-parameter (drop-last 1 path)]
[[[_ _ _ :parameters & r] :r v]] #js [:replace-parameter (drop-last 2 path)]
[[[_ :parameters & r] :+ v]] #js [:set-parameter path]
[[[_ _ :parameters & r] :+ v]] #js [:set-parameter (drop-last 1 path)]
[[[_ _ _ :parameters & r] :+ v]] #js [:set-parameter (drop-last 2 path)]
[[[_ :parameters & r] :- v]] #js [:no-op]
[[[_ _ :parameters & r] :- v]] #js [:no-op]
[[[_ _ _ :parameters & r] :- v]] #js [:no-op]
[[[_ :group & r] :r v]] #js [:replace-node path]
[[[_ :group & r] :+ v]] #js [:add-node path]
[[[:group & r] :r v]] #js [:replace-group path]
[[[:group & r] :+ v]] #js [:replace-group path]
)))
(defn make-updates [a b]
(let [edits (e/get-edits (e/diff a b) {:algo :quick})]
(to-array (mapv (fn [edit] (edit->update edit)) edits))))
(defn make-disconnect [connection-path patch]
(let [parent-patch-path (vec (drop-last 2 connection-path))
nodes (last connection-path)
[from to] (mapv (fn [node] (into parent-patch-path [:group node])) nodes)]
[(into [:disconnect] [(resolve-from from patch) (resolve-to to patch)])]))
(defn remove-all-connections [path patch]
(let [connections-path (vec (drop-last path))
connections (get-in patch path)]
(into [] (mapcat (fn [connection] (make-disconnect (into connections-path [connection]) patch)) connections))))
(defn add-all-connections [path patch]
(let [connections-path (vec (drop-last path))
connections (get-in patch path)]
(into [] (mapcat (fn [connection] (make-connect (into connections-path [:connections connection]) patch)) connections))))
(defn replace-node [path old-patch new-patch]
(into
(let [node (get-in old-patch path)]
(cond
(:type node) (make-remove-node path)
:else (remove-group (into path [:group]) old-patch)))
(let [node (get-in new-patch path)]
(cond
(:type node) (add-node path new-patch)
:else (add-group (into path [:group]) new-patch)))))
(defn update->commands [[name path] old-patch new-patch]
(case name
:add-connection (make-connect path new-patch)
:remove-connection (make-disconnect path old-patch)
:remove-all-connections (remove-all-connections path old-patch)
:replace-all-connections (concat (remove-all-connections path old-patch)
(add-all-connections path new-patch))
:set-parameters (let [node-path (vec (drop-last 2 path))]
(vec (mapcat (fn [name value]
(set-parameter node-path name value))
(get-in new-patch path))))
:set-parameter (let [value (get-in new-patch path)
parameter-name (last path)
node-path (vec (drop-last 2 path))]
(set-parameter node-path parameter-name value))
:replace-parameter (let [value (get-in new-patch path)
parameter-name (last path)
node-path (vec (drop-last 2 path))]
(set-parameter node-path parameter-name value))
:replace-group (into (remove-group path old-patch) (add-group path new-patch))
:add-node (let [node (get-in new-patch path)]
(cond
(:type node) (add-node path new-patch)
:else (add-group (into path [:group]) new-patch)))
:replace-node (replace-node path old-patch new-patch)
:start (let [value (get-in new-patch path)
node-path (vec (drop-last 2 path))]
(into (replace-node node-path old-patch new-patch)
[[:start node-path value]]))
:stop (let [value (get-in new-patch path)
node-path (vec (drop-last 2 path))]
[[:stop node-path value]])
:no-op []
))
(defn ->node-ast [[type parameters create-args]] {:type type :parameters parameters :create-args create-args})
(defn ->group-ast [group] (into {} (mapv (fn [[id node]] (if (keyword? (first node))
[id (->node-ast node)]
[id (->patch-ast node)]
)) (into [] group))))
(defn ->connections-ast [connections] (set (mapv (fn [[from-id to-id]] [(if (= from-id :>) ::in from-id)
(if (= to-id :>) ::out to-id)])
(into [] connections))))
(defn ->ports-ast [ports] ports)
(defn ->patch-ast [patch]
(let [[group connections ports] patch]
{:group (->group-ast group)
:connections (->connections-ast connections)
:ports (->ports-ast ports)}))
(defn cleanup-meaningless-ops [[name a1 a2]]
(not (and (= name :connect) (or (= a1 []) (= a2 [])))))
(def priorities (into {} (map-indexed
(fn [ndx command] [command ndx])
[:stop
:disconnect
:remove-node
:add-node
:start
:set
:connect
:connect-parameter
:schedule])))
(defn sort-updates-by-priority [updates]
(sort-by (fn [thing]
(let [[update-name] thing]
(update-name
priorities)))
updates))
(defn patches->commands [old new]
(let [a (->patch-ast old)
b (->patch-ast new)
updates (make-updates a b)]
(vec (sort-updates-by-priority (filter cleanup-meaningless-ops (distinct (apply concat (mapv #(update->commands % a b) updates)))))))) |
e56db8f5532613583d4bf6387aa88e58bc66bca2bd42aab0640d10c48af282b5 | igorhvr/bedlam | r4rsyn.scm | " r4rsyn.scm " R4RS syntax -*-Scheme-*-
Copyright ( c ) 1989 - 91 Massachusetts Institute of Technology
;;;
;;; This material was developed by the Scheme project at the
Massachusetts Institute of Technology , Department of Electrical
Engineering and Computer Science . Permission to copy and modify
;;; this software, to redistribute either the original software or a
;;; modified version, and to use this software for any purpose is
;;; granted, subject to the following restrictions and understandings.
;;;
1 . Any copy made of this software must include this copyright
;;; notice in full.
;;;
2 . Users of this software agree to make their best efforts ( a ) to
return to the MIT Scheme project any improvements or extensions
;;; that they make, so that these may be included in future releases;
and ( b ) to inform MIT of noteworthy uses of this software .
;;;
3 . All materials developed as a consequence of the use of this
;;; software shall duly acknowledge such use, in accordance with the
;;; usual standards of acknowledging credit in academic research.
;;;
4 . MIT has made no warranty or representation that the operation
of this software will be error - free , and MIT is under no
;;; obligation to provide any services, by way of maintenance, update,
;;; or otherwise.
;;;
5 . In conjunction with products arising from the use of this
material , there shall be no use of the name of the Massachusetts
Institute of Technology nor of any adaptation thereof in any
;;; advertising, promotional, or sales literature without prior
written consent from MIT in each case .
;;;; R4RS Syntax
(define scheme-syntactic-environment #f)
(define (initialize-scheme-syntactic-environment!)
(set! scheme-syntactic-environment
((compose-macrologies
(make-core-primitive-macrology)
(make-binding-macrology syntactic-binding-theory
'LET-SYNTAX 'LETREC-SYNTAX 'DEFINE-SYNTAX)
(make-binding-macrology variable-binding-theory
'LET 'LETREC 'DEFINE)
(make-r4rs-primitive-macrology)
(make-core-expander-macrology)
(make-syntax-rules-macrology))
root-syntactic-environment)))
Core Primitives
(define (make-core-primitive-macrology)
(make-primitive-macrology
(lambda (define-classifier define-compiler)
(define-classifier 'BEGIN
(lambda (form environment definition-environment)
(syntax-check '(KEYWORD * FORM) form)
(make-body-item (classify/subforms (cdr form)
environment
definition-environment))))
(define-compiler 'DELAY
(lambda (form environment)
(syntax-check '(KEYWORD EXPRESSION) form)
(output/delay
(compile/subexpression (cadr form)
environment))))
(define-compiler 'IF
(lambda (form environment)
(syntax-check '(KEYWORD EXPRESSION EXPRESSION ? EXPRESSION) form)
(output/conditional
(compile/subexpression (cadr form) environment)
(compile/subexpression (caddr form) environment)
(if (null? (cdddr form))
(output/unspecific)
(compile/subexpression (cadddr form)
environment)))))
(define-compiler 'QUOTE
(lambda (form environment)
environment ;ignore
(syntax-check '(KEYWORD DATUM) form)
(output/literal-quoted (strip-syntactic-closures (cadr form))))))))
;;;; Bindings
(define (make-binding-macrology binding-theory
let-keyword letrec-keyword define-keyword)
(make-primitive-macrology
(lambda (define-classifier define-compiler)
(let ((pattern/let-like
'(KEYWORD (* (IDENTIFIER EXPRESSION)) + FORM))
(compile/let-like
(lambda (form environment body-environment output/let)
Force evaluation order .
(let ((bindings
(let loop
((bindings
(map (lambda (binding)
(cons (car binding)
(classify/subexpression
(cadr binding)
environment)))
(cadr form))))
(if (null? bindings)
'()
(let ((binding
(binding-theory body-environment
(caar bindings)
(cdar bindings))))
(if binding
(cons binding (loop (cdr bindings)))
(loop (cdr bindings))))))))
(output/let (map car bindings)
(map (lambda (binding)
(compile-item/expression (cdr binding)))
bindings)
(compile-item/expression
(classify/body (cddr form)
body-environment)))))))
(define-compiler let-keyword
(lambda (form environment)
(syntax-check pattern/let-like form)
(compile/let-like form
environment
(internal-syntactic-environment environment)
output/let)))
(define-compiler letrec-keyword
(lambda (form environment)
(syntax-check pattern/let-like form)
(let ((environment (internal-syntactic-environment environment)))
(reserve-names! (map car (cadr form)) environment)
(compile/let-like form
environment
environment
output/letrec)))))
(define-classifier define-keyword
(lambda (form environment definition-environment)
(syntax-check '(KEYWORD IDENTIFIER EXPRESSION) form)
(syntactic-environment/define! definition-environment
(cadr form)
(make-reserved-name-item))
(make-definition-item binding-theory
(cadr form)
(make-promise
(lambda ()
(classify/subexpression
(caddr form)
environment)))))))))
;;;; Bodies
(define (classify/body forms environment)
(let ((environment (internal-syntactic-environment environment)))
(let forms-loop
((forms forms)
(bindings '()))
(if (null? forms)
(syntax-error "no expressions in body"
"")
(let items-loop
((items
(item->list
(classify/subform (car forms)
environment
environment)))
(bindings bindings))
(cond ((null? items)
(forms-loop (cdr forms)
bindings))
((definition-item? (car items))
(items-loop (cdr items)
(let ((binding
(bind-definition-item! environment
(car items))))
(if binding
(cons binding bindings)
bindings))))
(else
(let ((body
(make-body-item
(append items
(flatten-body-items
(classify/subforms
(cdr forms)
environment
environment))))))
(make-expression-item
(lambda ()
(output/letrec
(map car bindings)
(map (lambda (binding)
(compile-item/expression (cdr binding)))
bindings)
(compile-item/expression body))) forms)))))))))
R4RS Primitives
(define (make-r4rs-primitive-macrology)
(make-primitive-macrology
(lambda (define-classifier define-compiler)
(define (transformer-keyword expander->classifier)
(lambda (form environment definition-environment)
definition-environment ;ignore
(syntax-check '(KEYWORD EXPRESSION) form)
(let ((item
(classify/subexpression (cadr form)
scheme-syntactic-environment)))
(let ((transformer (base:eval (compile-item/expression item))))
(if (procedure? transformer)
(make-keyword-item
(expander->classifier transformer environment) item)
(syntax-error "transformer not a procedure"
transformer))))))
(define-classifier 'TRANSFORMER
;; "Syntactic Closures" transformer
(transformer-keyword sc-expander->classifier))
(define-classifier 'ER-TRANSFORMER
;; "Explicit Renaming" transformer
(transformer-keyword er-expander->classifier))
(define-compiler 'LAMBDA
(lambda (form environment)
(syntax-check '(KEYWORD R4RS-BVL + FORM) form)
(let ((environment (internal-syntactic-environment environment)))
Force order -- bind names before classifying body .
(let ((bvl-description
(let ((rename
(lambda (identifier)
(bind-variable! environment identifier))))
(let loop ((bvl (cadr form)))
(cond ((null? bvl)
'())
((pair? bvl)
(cons (rename (car bvl)) (loop (cdr bvl))))
(else
(rename bvl)))))))
(output/lambda bvl-description
(compile-item/expression
(classify/body (cddr form)
environment)))))))
(define-compiler 'SET!
(lambda (form environment)
(syntax-check '(KEYWORD FORM EXPRESSION) form)
(output/assignment
(let loop
((form (cadr form))
(environment environment))
(cond ((identifier? form)
(let ((item
(syntactic-environment/lookup environment form)))
(if (variable-item? item)
(variable-item/name item)
(slib:error "target of assignment not a variable"
form))))
((syntactic-closure? form)
(let ((form (syntactic-closure/form form))
(environment
(filter-syntactic-environment
(syntactic-closure/free-names form)
environment
(syntactic-closure/environment form))))
(loop form
environment)))
(else
(slib:error "target of assignment not an identifier"
form))))
(compile/subexpression (caddr form)
environment))))
;; end MAKE-R4RS-PRIMITIVE-MACROLOGY
)))
Core Expanders
(define (make-core-expander-macrology)
(make-er-expander-macrology
(lambda (define-expander base-environment)
(let ((keyword (make-syntactic-closure base-environment '() 'DEFINE)))
(define-expander 'DEFINE
(lambda (form rename compare)
compare ;ignore
(if (syntax-match? '((IDENTIFIER . R4RS-BVL) + FORM) (cdr form))
`(,keyword ,(caadr form)
(,(rename 'LAMBDA) ,(cdadr form) ,@(cddr form)))
`(,keyword ,@(cdr form))))))
(let ((keyword (make-syntactic-closure base-environment '() 'LET)))
(define-expander 'LET
(lambda (form rename compare)
compare ;ignore
(if (syntax-match? '(IDENTIFIER (* (IDENTIFIER EXPRESSION)) + FORM)
(cdr form))
(let ((name (cadr form))
(bindings (caddr form)))
`((,(rename 'LETREC)
((,name (,(rename 'LAMBDA) ,(map car bindings) ,@(cdddr form))))
,name)
,@(map cadr bindings)))
`(,keyword ,@(cdr form))))))
(define-expander 'LET*
(lambda (form rename compare)
compare ;ignore
(if (syntax-match? '((* (IDENTIFIER EXPRESSION)) + FORM) (cdr form))
(let ((bindings (cadr form))
(body (cddr form))
(keyword (rename 'LET)))
(if (null? bindings)
`(,keyword ,bindings ,@body)
(let loop ((bindings bindings))
(if (null? (cdr bindings))
`(,keyword ,bindings ,@body)
`(,keyword (,(car bindings))
,(loop (cdr bindings)))))))
(ill-formed-syntax form))))
(define-expander 'AND
(lambda (form rename compare)
compare ;ignore
(if (syntax-match? '(* EXPRESSION) (cdr form))
(let ((operands (cdr form)))
(if (null? operands)
`#T
(let ((if-keyword (rename 'IF)))
(let loop ((operands operands))
(if (null? (cdr operands))
(car operands)
`(,if-keyword ,(car operands)
,(loop (cdr operands))
#F))))))
(ill-formed-syntax form))))
(define-expander 'OR
(lambda (form rename compare)
compare ;ignore
(if (syntax-match? '(* EXPRESSION) (cdr form))
(let ((operands (cdr form)))
(if (null? operands)
`#F
(let ((let-keyword (rename 'LET))
(if-keyword (rename 'IF))
(temp (rename 'TEMP)))
(let loop ((operands operands))
(if (null? (cdr operands))
(car operands)
`(,let-keyword ((,temp ,(car operands)))
(,if-keyword ,temp
,temp
,(loop (cdr operands)))))))))
(ill-formed-syntax form))))
(define-expander 'CASE
(lambda (form rename compare)
(if (syntax-match? '(EXPRESSION + (DATUM + EXPRESSION)) (cdr form))
(letrec
((process-clause
(lambda (clause rest)
(cond ((null? (car clause))
(process-rest rest))
((and (identifier? (car clause))
(compare (rename 'ELSE) (car clause))
(null? rest))
`(,(rename 'BEGIN) ,@(cdr clause)))
((list? (car clause))
`(,(rename 'IF) (,(rename 'MEMV) ,(rename 'TEMP)
',(car clause))
(,(rename 'BEGIN) ,@(cdr clause))
,(process-rest rest)))
(else
(syntax-error "ill-formed clause" clause)))))
(process-rest
(lambda (rest)
(if (null? rest)
(unspecific-expression)
(process-clause (car rest) (cdr rest))))))
`(,(rename 'LET) ((,(rename 'TEMP) ,(cadr form)))
,(process-clause (caddr form) (cdddr form))))
(ill-formed-syntax form))))
(define-expander 'COND
(lambda (form rename compare)
(letrec
((process-clause
(lambda (clause rest)
(cond
((or (not (list? clause))
(null? clause))
(syntax-error "ill-formed clause" clause))
((and (identifier? (car clause))
(compare (rename 'ELSE) (car clause)))
(cond
((or (null? (cdr clause))
(and (identifier? (cadr clause))
(compare (rename '=>) (cadr clause))))
(syntax-error "ill-formed ELSE clause" clause))
((not (null? rest))
(syntax-error "misplaced ELSE clause" clause))
(else
`(,(rename 'BEGIN) ,@(cdr clause)))))
((null? (cdr clause))
`(,(rename 'OR) ,(car clause) ,(process-rest rest)))
((and (identifier? (cadr clause))
(compare (rename '=>) (cadr clause)))
(if (and (pair? (cddr clause))
(null? (cdddr clause)))
`(,(rename 'LET)
((,(rename 'TEMP) ,(car clause)))
(,(rename 'IF) ,(rename 'TEMP)
(,(caddr clause) ,(rename 'TEMP))
,(process-rest rest)))
(syntax-error "ill-formed => clause" clause)))
(else
`(,(rename 'IF) ,(car clause)
(,(rename 'BEGIN) ,@(cdr clause))
,(process-rest rest))))))
(process-rest
(lambda (rest)
(if (null? rest)
(unspecific-expression)
(process-clause (car rest) (cdr rest))))))
(let ((clauses (cdr form)))
(if (null? clauses)
(syntax-error "no clauses" form)
(process-clause (car clauses) (cdr clauses)))))))
(define-expander 'DO
(lambda (form rename compare)
compare ;ignore
(if (syntax-match? '((* (IDENTIFIER EXPRESSION ? EXPRESSION))
(+ EXPRESSION)
* FORM)
(cdr form))
(let ((bindings (cadr form)))
`(,(rename 'LETREC)
((,(rename 'DO-LOOP)
(,(rename 'LAMBDA)
,(map car bindings)
(,(rename 'IF) ,(caaddr form)
,(if (null? (cdaddr form))
(unspecific-expression)
`(,(rename 'BEGIN) ,@(cdaddr form)))
(,(rename 'BEGIN)
,@(cdddr form)
(,(rename 'DO-LOOP)
,@(map (lambda (binding)
(if (null? (cddr binding))
(car binding)
(caddr binding)))
bindings)))))))
(,(rename 'DO-LOOP) ,@(map cadr bindings))))
(ill-formed-syntax form))))
(define-expander 'QUASIQUOTE
(lambda (form rename compare)
(define (descend-quasiquote x level return)
(cond ((pair? x) (descend-quasiquote-pair x level return))
((vector? x) (descend-quasiquote-vector x level return))
(else (return 'QUOTE x))))
(define (descend-quasiquote-pair x level return)
(cond ((not (and (pair? x)
(identifier? (car x))
(pair? (cdr x))
(null? (cddr x))))
(descend-quasiquote-pair* x level return))
((compare (rename 'QUASIQUOTE) (car x))
(descend-quasiquote-pair* x (+ level 1) return))
((compare (rename 'UNQUOTE) (car x))
(if (zero? level)
(return 'UNQUOTE (cadr x))
(descend-quasiquote-pair* x (- level 1) return)))
((compare (rename 'UNQUOTE-SPLICING) (car x))
(if (zero? level)
(return 'UNQUOTE-SPLICING (cadr x))
(descend-quasiquote-pair* x (- level 1) return)))
(else
(descend-quasiquote-pair* x level return))))
(define (descend-quasiquote-pair* x level return)
(descend-quasiquote
(car x) level
(lambda (car-mode car-arg)
(descend-quasiquote
(cdr x) level
(lambda (cdr-mode cdr-arg)
(cond ((and (eq? car-mode 'QUOTE) (eq? cdr-mode 'QUOTE))
(return 'QUOTE x))
((eq? car-mode 'UNQUOTE-SPLICING)
(if (and (eq? cdr-mode 'QUOTE) (null? cdr-arg))
(return 'UNQUOTE car-arg)
(return 'APPEND
(list car-arg
(finalize-quasiquote cdr-mode
cdr-arg)))))
((and (eq? cdr-mode 'QUOTE) (list? cdr-arg))
(return 'LIST
(cons (finalize-quasiquote car-mode car-arg)
(map (lambda (element)
(finalize-quasiquote 'QUOTE
element))
cdr-arg))))
((eq? cdr-mode 'LIST)
(return 'LIST
(cons (finalize-quasiquote car-mode car-arg)
cdr-arg)))
(else
(return
'CONS
(list (finalize-quasiquote car-mode car-arg)
(finalize-quasiquote cdr-mode cdr-arg))))))))))
(define (descend-quasiquote-vector x level return)
(descend-quasiquote
(vector->list x) level
(lambda (mode arg)
(case mode
((QUOTE) (return 'QUOTE x))
((LIST) (return 'VECTOR arg))
(else
(return 'LIST->VECTOR
(list (finalize-quasiquote mode arg))))))))
(define (finalize-quasiquote mode arg)
(case mode
((QUOTE) `(,(rename 'QUOTE) ,arg))
((UNQUOTE) arg)
((UNQUOTE-SPLICING) (syntax-error ",@ in illegal context" arg))
(else `(,(rename mode) ,@arg))))
(if (syntax-match? '(EXPRESSION) (cdr form))
(descend-quasiquote (cadr form) 0 finalize-quasiquote)
(ill-formed-syntax form))))
;;; end MAKE-CORE-EXPANDER-MACROLOGY
)))
| null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/iasylum/slib/3b2/r4rsyn.scm | scheme |
This material was developed by the Scheme project at the
this software, to redistribute either the original software or a
modified version, and to use this software for any purpose is
granted, subject to the following restrictions and understandings.
notice in full.
that they make, so that these may be included in future releases;
software shall duly acknowledge such use, in accordance with the
usual standards of acknowledging credit in academic research.
obligation to provide any services, by way of maintenance, update,
or otherwise.
advertising, promotional, or sales literature without prior
R4RS Syntax
ignore
Bindings
Bodies
ignore
"Syntactic Closures" transformer
"Explicit Renaming" transformer
end MAKE-R4RS-PRIMITIVE-MACROLOGY
ignore
ignore
ignore
ignore
ignore
ignore
end MAKE-CORE-EXPANDER-MACROLOGY | " r4rsyn.scm " R4RS syntax -*-Scheme-*-
Copyright ( c ) 1989 - 91 Massachusetts Institute of Technology
Massachusetts Institute of Technology , Department of Electrical
Engineering and Computer Science . Permission to copy and modify
1 . Any copy made of this software must include this copyright
2 . Users of this software agree to make their best efforts ( a ) to
return to the MIT Scheme project any improvements or extensions
and ( b ) to inform MIT of noteworthy uses of this software .
3 . All materials developed as a consequence of the use of this
4 . MIT has made no warranty or representation that the operation
of this software will be error - free , and MIT is under no
5 . In conjunction with products arising from the use of this
material , there shall be no use of the name of the Massachusetts
Institute of Technology nor of any adaptation thereof in any
written consent from MIT in each case .
(define scheme-syntactic-environment #f)
(define (initialize-scheme-syntactic-environment!)
(set! scheme-syntactic-environment
((compose-macrologies
(make-core-primitive-macrology)
(make-binding-macrology syntactic-binding-theory
'LET-SYNTAX 'LETREC-SYNTAX 'DEFINE-SYNTAX)
(make-binding-macrology variable-binding-theory
'LET 'LETREC 'DEFINE)
(make-r4rs-primitive-macrology)
(make-core-expander-macrology)
(make-syntax-rules-macrology))
root-syntactic-environment)))
Core Primitives
(define (make-core-primitive-macrology)
(make-primitive-macrology
(lambda (define-classifier define-compiler)
(define-classifier 'BEGIN
(lambda (form environment definition-environment)
(syntax-check '(KEYWORD * FORM) form)
(make-body-item (classify/subforms (cdr form)
environment
definition-environment))))
(define-compiler 'DELAY
(lambda (form environment)
(syntax-check '(KEYWORD EXPRESSION) form)
(output/delay
(compile/subexpression (cadr form)
environment))))
(define-compiler 'IF
(lambda (form environment)
(syntax-check '(KEYWORD EXPRESSION EXPRESSION ? EXPRESSION) form)
(output/conditional
(compile/subexpression (cadr form) environment)
(compile/subexpression (caddr form) environment)
(if (null? (cdddr form))
(output/unspecific)
(compile/subexpression (cadddr form)
environment)))))
(define-compiler 'QUOTE
(lambda (form environment)
(syntax-check '(KEYWORD DATUM) form)
(output/literal-quoted (strip-syntactic-closures (cadr form))))))))
(define (make-binding-macrology binding-theory
let-keyword letrec-keyword define-keyword)
(make-primitive-macrology
(lambda (define-classifier define-compiler)
(let ((pattern/let-like
'(KEYWORD (* (IDENTIFIER EXPRESSION)) + FORM))
(compile/let-like
(lambda (form environment body-environment output/let)
Force evaluation order .
(let ((bindings
(let loop
((bindings
(map (lambda (binding)
(cons (car binding)
(classify/subexpression
(cadr binding)
environment)))
(cadr form))))
(if (null? bindings)
'()
(let ((binding
(binding-theory body-environment
(caar bindings)
(cdar bindings))))
(if binding
(cons binding (loop (cdr bindings)))
(loop (cdr bindings))))))))
(output/let (map car bindings)
(map (lambda (binding)
(compile-item/expression (cdr binding)))
bindings)
(compile-item/expression
(classify/body (cddr form)
body-environment)))))))
(define-compiler let-keyword
(lambda (form environment)
(syntax-check pattern/let-like form)
(compile/let-like form
environment
(internal-syntactic-environment environment)
output/let)))
(define-compiler letrec-keyword
(lambda (form environment)
(syntax-check pattern/let-like form)
(let ((environment (internal-syntactic-environment environment)))
(reserve-names! (map car (cadr form)) environment)
(compile/let-like form
environment
environment
output/letrec)))))
(define-classifier define-keyword
(lambda (form environment definition-environment)
(syntax-check '(KEYWORD IDENTIFIER EXPRESSION) form)
(syntactic-environment/define! definition-environment
(cadr form)
(make-reserved-name-item))
(make-definition-item binding-theory
(cadr form)
(make-promise
(lambda ()
(classify/subexpression
(caddr form)
environment)))))))))
(define (classify/body forms environment)
(let ((environment (internal-syntactic-environment environment)))
(let forms-loop
((forms forms)
(bindings '()))
(if (null? forms)
(syntax-error "no expressions in body"
"")
(let items-loop
((items
(item->list
(classify/subform (car forms)
environment
environment)))
(bindings bindings))
(cond ((null? items)
(forms-loop (cdr forms)
bindings))
((definition-item? (car items))
(items-loop (cdr items)
(let ((binding
(bind-definition-item! environment
(car items))))
(if binding
(cons binding bindings)
bindings))))
(else
(let ((body
(make-body-item
(append items
(flatten-body-items
(classify/subforms
(cdr forms)
environment
environment))))))
(make-expression-item
(lambda ()
(output/letrec
(map car bindings)
(map (lambda (binding)
(compile-item/expression (cdr binding)))
bindings)
(compile-item/expression body))) forms)))))))))
R4RS Primitives
(define (make-r4rs-primitive-macrology)
(make-primitive-macrology
(lambda (define-classifier define-compiler)
(define (transformer-keyword expander->classifier)
(lambda (form environment definition-environment)
(syntax-check '(KEYWORD EXPRESSION) form)
(let ((item
(classify/subexpression (cadr form)
scheme-syntactic-environment)))
(let ((transformer (base:eval (compile-item/expression item))))
(if (procedure? transformer)
(make-keyword-item
(expander->classifier transformer environment) item)
(syntax-error "transformer not a procedure"
transformer))))))
(define-classifier 'TRANSFORMER
(transformer-keyword sc-expander->classifier))
(define-classifier 'ER-TRANSFORMER
(transformer-keyword er-expander->classifier))
(define-compiler 'LAMBDA
(lambda (form environment)
(syntax-check '(KEYWORD R4RS-BVL + FORM) form)
(let ((environment (internal-syntactic-environment environment)))
Force order -- bind names before classifying body .
(let ((bvl-description
(let ((rename
(lambda (identifier)
(bind-variable! environment identifier))))
(let loop ((bvl (cadr form)))
(cond ((null? bvl)
'())
((pair? bvl)
(cons (rename (car bvl)) (loop (cdr bvl))))
(else
(rename bvl)))))))
(output/lambda bvl-description
(compile-item/expression
(classify/body (cddr form)
environment)))))))
(define-compiler 'SET!
(lambda (form environment)
(syntax-check '(KEYWORD FORM EXPRESSION) form)
(output/assignment
(let loop
((form (cadr form))
(environment environment))
(cond ((identifier? form)
(let ((item
(syntactic-environment/lookup environment form)))
(if (variable-item? item)
(variable-item/name item)
(slib:error "target of assignment not a variable"
form))))
((syntactic-closure? form)
(let ((form (syntactic-closure/form form))
(environment
(filter-syntactic-environment
(syntactic-closure/free-names form)
environment
(syntactic-closure/environment form))))
(loop form
environment)))
(else
(slib:error "target of assignment not an identifier"
form))))
(compile/subexpression (caddr form)
environment))))
)))
Core Expanders
(define (make-core-expander-macrology)
(make-er-expander-macrology
(lambda (define-expander base-environment)
(let ((keyword (make-syntactic-closure base-environment '() 'DEFINE)))
(define-expander 'DEFINE
(lambda (form rename compare)
(if (syntax-match? '((IDENTIFIER . R4RS-BVL) + FORM) (cdr form))
`(,keyword ,(caadr form)
(,(rename 'LAMBDA) ,(cdadr form) ,@(cddr form)))
`(,keyword ,@(cdr form))))))
(let ((keyword (make-syntactic-closure base-environment '() 'LET)))
(define-expander 'LET
(lambda (form rename compare)
(if (syntax-match? '(IDENTIFIER (* (IDENTIFIER EXPRESSION)) + FORM)
(cdr form))
(let ((name (cadr form))
(bindings (caddr form)))
`((,(rename 'LETREC)
((,name (,(rename 'LAMBDA) ,(map car bindings) ,@(cdddr form))))
,name)
,@(map cadr bindings)))
`(,keyword ,@(cdr form))))))
(define-expander 'LET*
(lambda (form rename compare)
(if (syntax-match? '((* (IDENTIFIER EXPRESSION)) + FORM) (cdr form))
(let ((bindings (cadr form))
(body (cddr form))
(keyword (rename 'LET)))
(if (null? bindings)
`(,keyword ,bindings ,@body)
(let loop ((bindings bindings))
(if (null? (cdr bindings))
`(,keyword ,bindings ,@body)
`(,keyword (,(car bindings))
,(loop (cdr bindings)))))))
(ill-formed-syntax form))))
(define-expander 'AND
(lambda (form rename compare)
(if (syntax-match? '(* EXPRESSION) (cdr form))
(let ((operands (cdr form)))
(if (null? operands)
`#T
(let ((if-keyword (rename 'IF)))
(let loop ((operands operands))
(if (null? (cdr operands))
(car operands)
`(,if-keyword ,(car operands)
,(loop (cdr operands))
#F))))))
(ill-formed-syntax form))))
(define-expander 'OR
(lambda (form rename compare)
(if (syntax-match? '(* EXPRESSION) (cdr form))
(let ((operands (cdr form)))
(if (null? operands)
`#F
(let ((let-keyword (rename 'LET))
(if-keyword (rename 'IF))
(temp (rename 'TEMP)))
(let loop ((operands operands))
(if (null? (cdr operands))
(car operands)
`(,let-keyword ((,temp ,(car operands)))
(,if-keyword ,temp
,temp
,(loop (cdr operands)))))))))
(ill-formed-syntax form))))
(define-expander 'CASE
(lambda (form rename compare)
(if (syntax-match? '(EXPRESSION + (DATUM + EXPRESSION)) (cdr form))
(letrec
((process-clause
(lambda (clause rest)
(cond ((null? (car clause))
(process-rest rest))
((and (identifier? (car clause))
(compare (rename 'ELSE) (car clause))
(null? rest))
`(,(rename 'BEGIN) ,@(cdr clause)))
((list? (car clause))
`(,(rename 'IF) (,(rename 'MEMV) ,(rename 'TEMP)
',(car clause))
(,(rename 'BEGIN) ,@(cdr clause))
,(process-rest rest)))
(else
(syntax-error "ill-formed clause" clause)))))
(process-rest
(lambda (rest)
(if (null? rest)
(unspecific-expression)
(process-clause (car rest) (cdr rest))))))
`(,(rename 'LET) ((,(rename 'TEMP) ,(cadr form)))
,(process-clause (caddr form) (cdddr form))))
(ill-formed-syntax form))))
(define-expander 'COND
(lambda (form rename compare)
(letrec
((process-clause
(lambda (clause rest)
(cond
((or (not (list? clause))
(null? clause))
(syntax-error "ill-formed clause" clause))
((and (identifier? (car clause))
(compare (rename 'ELSE) (car clause)))
(cond
((or (null? (cdr clause))
(and (identifier? (cadr clause))
(compare (rename '=>) (cadr clause))))
(syntax-error "ill-formed ELSE clause" clause))
((not (null? rest))
(syntax-error "misplaced ELSE clause" clause))
(else
`(,(rename 'BEGIN) ,@(cdr clause)))))
((null? (cdr clause))
`(,(rename 'OR) ,(car clause) ,(process-rest rest)))
((and (identifier? (cadr clause))
(compare (rename '=>) (cadr clause)))
(if (and (pair? (cddr clause))
(null? (cdddr clause)))
`(,(rename 'LET)
((,(rename 'TEMP) ,(car clause)))
(,(rename 'IF) ,(rename 'TEMP)
(,(caddr clause) ,(rename 'TEMP))
,(process-rest rest)))
(syntax-error "ill-formed => clause" clause)))
(else
`(,(rename 'IF) ,(car clause)
(,(rename 'BEGIN) ,@(cdr clause))
,(process-rest rest))))))
(process-rest
(lambda (rest)
(if (null? rest)
(unspecific-expression)
(process-clause (car rest) (cdr rest))))))
(let ((clauses (cdr form)))
(if (null? clauses)
(syntax-error "no clauses" form)
(process-clause (car clauses) (cdr clauses)))))))
(define-expander 'DO
(lambda (form rename compare)
(if (syntax-match? '((* (IDENTIFIER EXPRESSION ? EXPRESSION))
(+ EXPRESSION)
* FORM)
(cdr form))
(let ((bindings (cadr form)))
`(,(rename 'LETREC)
((,(rename 'DO-LOOP)
(,(rename 'LAMBDA)
,(map car bindings)
(,(rename 'IF) ,(caaddr form)
,(if (null? (cdaddr form))
(unspecific-expression)
`(,(rename 'BEGIN) ,@(cdaddr form)))
(,(rename 'BEGIN)
,@(cdddr form)
(,(rename 'DO-LOOP)
,@(map (lambda (binding)
(if (null? (cddr binding))
(car binding)
(caddr binding)))
bindings)))))))
(,(rename 'DO-LOOP) ,@(map cadr bindings))))
(ill-formed-syntax form))))
(define-expander 'QUASIQUOTE
(lambda (form rename compare)
(define (descend-quasiquote x level return)
(cond ((pair? x) (descend-quasiquote-pair x level return))
((vector? x) (descend-quasiquote-vector x level return))
(else (return 'QUOTE x))))
(define (descend-quasiquote-pair x level return)
(cond ((not (and (pair? x)
(identifier? (car x))
(pair? (cdr x))
(null? (cddr x))))
(descend-quasiquote-pair* x level return))
((compare (rename 'QUASIQUOTE) (car x))
(descend-quasiquote-pair* x (+ level 1) return))
((compare (rename 'UNQUOTE) (car x))
(if (zero? level)
(return 'UNQUOTE (cadr x))
(descend-quasiquote-pair* x (- level 1) return)))
((compare (rename 'UNQUOTE-SPLICING) (car x))
(if (zero? level)
(return 'UNQUOTE-SPLICING (cadr x))
(descend-quasiquote-pair* x (- level 1) return)))
(else
(descend-quasiquote-pair* x level return))))
(define (descend-quasiquote-pair* x level return)
(descend-quasiquote
(car x) level
(lambda (car-mode car-arg)
(descend-quasiquote
(cdr x) level
(lambda (cdr-mode cdr-arg)
(cond ((and (eq? car-mode 'QUOTE) (eq? cdr-mode 'QUOTE))
(return 'QUOTE x))
((eq? car-mode 'UNQUOTE-SPLICING)
(if (and (eq? cdr-mode 'QUOTE) (null? cdr-arg))
(return 'UNQUOTE car-arg)
(return 'APPEND
(list car-arg
(finalize-quasiquote cdr-mode
cdr-arg)))))
((and (eq? cdr-mode 'QUOTE) (list? cdr-arg))
(return 'LIST
(cons (finalize-quasiquote car-mode car-arg)
(map (lambda (element)
(finalize-quasiquote 'QUOTE
element))
cdr-arg))))
((eq? cdr-mode 'LIST)
(return 'LIST
(cons (finalize-quasiquote car-mode car-arg)
cdr-arg)))
(else
(return
'CONS
(list (finalize-quasiquote car-mode car-arg)
(finalize-quasiquote cdr-mode cdr-arg))))))))))
(define (descend-quasiquote-vector x level return)
(descend-quasiquote
(vector->list x) level
(lambda (mode arg)
(case mode
((QUOTE) (return 'QUOTE x))
((LIST) (return 'VECTOR arg))
(else
(return 'LIST->VECTOR
(list (finalize-quasiquote mode arg))))))))
(define (finalize-quasiquote mode arg)
(case mode
((QUOTE) `(,(rename 'QUOTE) ,arg))
((UNQUOTE) arg)
((UNQUOTE-SPLICING) (syntax-error ",@ in illegal context" arg))
(else `(,(rename mode) ,@arg))))
(if (syntax-match? '(EXPRESSION) (cdr form))
(descend-quasiquote (cadr form) 0 finalize-quasiquote)
(ill-formed-syntax form))))
)))
|
c8a5a91a77bb5727468ed1565d88d94f5c745e684313e5c9cc635152714495e9 | faylang/fay | doAssingPatternMatch.hs | module DoAssingPatternMatch where
main :: Fay ()
main = do
[1,2] <- return [1,2]
putStrLn "OK."
| null | https://raw.githubusercontent.com/faylang/fay/8455d975f9f0db2ecc922410e43e484fbd134699/tests/doAssingPatternMatch.hs | haskell | module DoAssingPatternMatch where
main :: Fay ()
main = do
[1,2] <- return [1,2]
putStrLn "OK."
| |
4aff73d8661f38acf2b2a0cbb5581d417b5ddebb6caa73403485d4c1224bb1e9 | mattmight/bib2sx | bib2sx.rkt | #lang racket
bib2sx : A Racket script for converting a bibtex .bib file to S - Expressions
Copyright ( C ) 2015
; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
; (at your option) any later version.
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
You should have received a copy of the GNU General Public License
; along with this program. If not, see </>.
; ISSUES:
; + Does not support @comment or @preamble
; TODO:
; + Add --tokenize flag to tokenize into strings meaningful to BibTeX/LaTeX:
; $, \<cmd>, <whitespace>, <word>
; + Add flag to choose name fields (currently author, editor)
; + Add support for @comment and @preamble
Output grammar :
; <bibtex-ast> ::= (<item> ...)
; <item> ::= (<item-type> <id> <attr> ...)
; <attr> ::= (<id> <expr> ...) ;
; | (<id> . <string>) ; if --flatten
; | (<id> <name> ...) ; if --parse-names
; <expr> ::= <string>
; | '<expr>
; | '(<expr> ...)
; | <id>
< name > : : = ( name ( first < expr > ... ) ; if --parse - names
( < expr > ... )
; (last <expr> ...)
( > ) )
; <item-type> ::= inproceedings | article | ...
(require "bibtex/main.rkt")
(require xml)
;; Handling command line options:
; <config>
(define bibtex-input-port (current-input-port))
(define bibtex-output-format 'sx)
(define bibtex-input-format 'bib)
(define inline? #f)
(define flatten? #f)
(define texenize? #f)
(define lex-only? #f)
(define parse-names? #f)
; </config>
(define (parse-options! args)
(match args
['()
(void)]
[(cons "--drracket" rest)
(set! bibtex-input-port (open-input-file "test/test.bib"))
(parse-options! rest)]
; choose the input format:
[`("--in" ,format . ,rest)
(set! bibtex-input-format (string->symbol format))
(parse-options! rest)]
; choose the output format:
[`("--out" ,format . ,rest)
(set! bibtex-output-format (string->symbol format))
(parse-options! rest)]
; just lexically analyze; don't parse:
[(cons "--lex" rest)
(set! lex-only? #t)
(parse-options! rest)]
; flatten values into a single string:
[(cons "--flatten" rest)
(set! flatten? #t)
(parse-options! rest)]
; parse names into a vector of vectors:
[(cons "--parse-names" rest)
(set! parse-names? #t)
(parse-options! rest)]
; inline all @string declarations:
[(cons "--inline" rest)
(set! inline? #t)
(parse-options! rest)]
tokenize strings into units meaningful to :
[(cons "--texenize" rest)
; useful if you want to render to a different output
format such as HTML , and you need ot interpret
(set! texenize? #t)
(parse-options! rest)]
; convert to a bibtex .bib file:
[(cons "--bib" rest)
(set! bibtex-output-format 'bib)
(parse-options! rest)]
; convert to JSON:
[(cons "--json" rest)
(set! bibtex-output-format 'json)
(parse-options! rest)]
; convert to JSON:
[(cons "--xml" rest)
(set! bibtex-output-format 'xml)
(parse-options! rest)]
; provide a filename to parse:
[(cons filename rest)
(set! bibtex-input-port (open-input-file filename))
(parse-options! rest)]
))
for testing in :
;(current-command-line-arguments #("--inline" "--flatten" "test/test.bib"))
(parse-options! (vector->list (current-command-line-arguments)))
; for debugging, allow looking at the lexical analyzers output:
(when lex-only?
(define tokens (bibtex-lexer bibtex-input-port))
(pretty-write (stream->list tokens))
(exit))
(define bibtex-ast
(match bibtex-input-format
['bib
(define token-generator (generate-token-generator bibtex-input-port))
(bibtex-parse token-generator)]
['sx
(read bibtex-input-port)]
[else
(error (format "unrecognized input format: ~a" bibtex-input-format))]))
(when inline?
(set! bibtex-ast (bibtex-inline-strings bibtex-ast)))
(when parse-names?
(set! bibtex-ast (map bibtex-parse-names bibtex-ast)))
(when flatten?
(set! bibtex-ast (bibtex-flatten-strings bibtex-ast)))
(when texenize?
(set! bibtex-ast (map bibtex-texenize-item bibtex-ast)))
(match bibtex-output-format
['sx (pretty-write bibtex-ast)]
['bib (for ([item bibtex-ast])
(display (bibtex-item->bibstring item))
(newline) (newline))]
['xml (let ([xml (bibtex-ast->xml bibtex-ast)])
(display-xml/content xml))]
['json (display (bibtex-ast->json bibtex-ast))]
[else (error (format "unrecognized output format: ~a"
bibtex-output-format))])
| null | https://raw.githubusercontent.com/mattmight/bib2sx/fa1de50096a13a48fbc0b3ffe0d91c27177303b7/bib2sx.rkt | racket | This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
ISSUES:
+ Does not support @comment or @preamble
TODO:
+ Add --tokenize flag to tokenize into strings meaningful to BibTeX/LaTeX:
$, \<cmd>, <whitespace>, <word>
+ Add flag to choose name fields (currently author, editor)
+ Add support for @comment and @preamble
<bibtex-ast> ::= (<item> ...)
<item> ::= (<item-type> <id> <attr> ...)
<attr> ::= (<id> <expr> ...) ;
| (<id> . <string>) ; if --flatten
| (<id> <name> ...) ; if --parse-names
<expr> ::= <string>
| '<expr>
| '(<expr> ...)
| <id>
if --parse - names
(last <expr> ...)
<item-type> ::= inproceedings | article | ...
Handling command line options:
<config>
</config>
choose the input format:
choose the output format:
just lexically analyze; don't parse:
flatten values into a single string:
parse names into a vector of vectors:
inline all @string declarations:
useful if you want to render to a different output
convert to a bibtex .bib file:
convert to JSON:
convert to JSON:
provide a filename to parse:
(current-command-line-arguments #("--inline" "--flatten" "test/test.bib"))
for debugging, allow looking at the lexical analyzers output: | #lang racket
bib2sx : A Racket script for converting a bibtex .bib file to S - Expressions
Copyright ( C ) 2015
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Output grammar :
( < expr > ... )
( > ) )
(require "bibtex/main.rkt")
(require xml)
(define bibtex-input-port (current-input-port))
(define bibtex-output-format 'sx)
(define bibtex-input-format 'bib)
(define inline? #f)
(define flatten? #f)
(define texenize? #f)
(define lex-only? #f)
(define parse-names? #f)
(define (parse-options! args)
(match args
['()
(void)]
[(cons "--drracket" rest)
(set! bibtex-input-port (open-input-file "test/test.bib"))
(parse-options! rest)]
[`("--in" ,format . ,rest)
(set! bibtex-input-format (string->symbol format))
(parse-options! rest)]
[`("--out" ,format . ,rest)
(set! bibtex-output-format (string->symbol format))
(parse-options! rest)]
[(cons "--lex" rest)
(set! lex-only? #t)
(parse-options! rest)]
[(cons "--flatten" rest)
(set! flatten? #t)
(parse-options! rest)]
[(cons "--parse-names" rest)
(set! parse-names? #t)
(parse-options! rest)]
[(cons "--inline" rest)
(set! inline? #t)
(parse-options! rest)]
tokenize strings into units meaningful to :
[(cons "--texenize" rest)
format such as HTML , and you need ot interpret
(set! texenize? #t)
(parse-options! rest)]
[(cons "--bib" rest)
(set! bibtex-output-format 'bib)
(parse-options! rest)]
[(cons "--json" rest)
(set! bibtex-output-format 'json)
(parse-options! rest)]
[(cons "--xml" rest)
(set! bibtex-output-format 'xml)
(parse-options! rest)]
[(cons filename rest)
(set! bibtex-input-port (open-input-file filename))
(parse-options! rest)]
))
for testing in :
(parse-options! (vector->list (current-command-line-arguments)))
(when lex-only?
(define tokens (bibtex-lexer bibtex-input-port))
(pretty-write (stream->list tokens))
(exit))
(define bibtex-ast
(match bibtex-input-format
['bib
(define token-generator (generate-token-generator bibtex-input-port))
(bibtex-parse token-generator)]
['sx
(read bibtex-input-port)]
[else
(error (format "unrecognized input format: ~a" bibtex-input-format))]))
(when inline?
(set! bibtex-ast (bibtex-inline-strings bibtex-ast)))
(when parse-names?
(set! bibtex-ast (map bibtex-parse-names bibtex-ast)))
(when flatten?
(set! bibtex-ast (bibtex-flatten-strings bibtex-ast)))
(when texenize?
(set! bibtex-ast (map bibtex-texenize-item bibtex-ast)))
(match bibtex-output-format
['sx (pretty-write bibtex-ast)]
['bib (for ([item bibtex-ast])
(display (bibtex-item->bibstring item))
(newline) (newline))]
['xml (let ([xml (bibtex-ast->xml bibtex-ast)])
(display-xml/content xml))]
['json (display (bibtex-ast->json bibtex-ast))]
[else (error (format "unrecognized output format: ~a"
bibtex-output-format))])
|
ab436c53e164274839e7bc8491b69b7d6f7085bc33d7a78b60ac3721f9e5711e | fractalide/fractalide | identity.rkt | #lang racket
(require fractalide/modules/rkt/rkt-fbp/agent)
(define-agent
#:input '("in")
#:output '("out")
(send (output "out") (recv (input "in"))))
(module+ test
(require rackunit)
(require syntax/location)
(require fractalide/modules/rkt/rkt-fbp/def)
(require fractalide/modules/rkt/rkt-fbp/port)
(require fractalide/modules/rkt/rkt-fbp/scheduler)
(test-case
"Sending message X returns message X"
(define sched (make-scheduler #f))
(define tap (make-port 30 #f #f #f))
(define msg "hello")
(sched (msg-add-agent "agent-under-test" (quote-module-path ".."))
(msg-raw-connect "agent-under-test" "out" tap))
(sched (msg-mesg "agent-under-test" "in" msg))
(check-equal? (port-recv tap) msg)
(sched (msg-stop))))
| null | https://raw.githubusercontent.com/fractalide/fractalide/9c54ec2615fcc2a1f3363292d4eed2a0fcb9c3a5/modules/rkt/rkt-fbp/agents/plumbing/identity.rkt | racket | #lang racket
(require fractalide/modules/rkt/rkt-fbp/agent)
(define-agent
#:input '("in")
#:output '("out")
(send (output "out") (recv (input "in"))))
(module+ test
(require rackunit)
(require syntax/location)
(require fractalide/modules/rkt/rkt-fbp/def)
(require fractalide/modules/rkt/rkt-fbp/port)
(require fractalide/modules/rkt/rkt-fbp/scheduler)
(test-case
"Sending message X returns message X"
(define sched (make-scheduler #f))
(define tap (make-port 30 #f #f #f))
(define msg "hello")
(sched (msg-add-agent "agent-under-test" (quote-module-path ".."))
(msg-raw-connect "agent-under-test" "out" tap))
(sched (msg-mesg "agent-under-test" "in" msg))
(check-equal? (port-recv tap) msg)
(sched (msg-stop))))
| |
87a7b270245df3c65a7f4c8ee6d09571a17e08ed1e01eed10d349ba1a4796e66 | threatgrid/ctia | default_test.clj | (ns ctia.encryption.default-test
(:require
[ctia.encryption.default :as sut]
[ctia.test-helpers.core :as helpers]
[clojure.test :as t :refer [deftest testing is]]
[puppetlabs.trapperkeeper.testutils.bootstrap :refer [with-app-with-config]]))
(deftest encryption-default-record-test
(let [key-path "resources/cert/ctia-encryption.key"]
(testing "failed service init"
(is
(thrown-with-msg?
AssertionError
#"no secret or key filepath provided"
(with-app-with-config app
[sut/default-encryption-service]
{:ctia {:encryption nil}}))))
(with-app-with-config app
[sut/default-encryption-service]
{:ctia {:encryption {:key {:filepath key-path}}}}
(testing "encrypt and decrypt a string using the record"
(let [{:keys [decrypt encrypt]} (helpers/get-service-map app :IEncryption)
plain "foo"
enc (encrypt "foo")
dec (decrypt enc)]
(is (string? enc))
(is (not= plain enc))
(is (= dec plain)))))))
| null | https://raw.githubusercontent.com/threatgrid/ctia/32857663cdd7ac385161103dbafa8dc4f98febf0/test/ctia/encryption/default_test.clj | clojure | (ns ctia.encryption.default-test
(:require
[ctia.encryption.default :as sut]
[ctia.test-helpers.core :as helpers]
[clojure.test :as t :refer [deftest testing is]]
[puppetlabs.trapperkeeper.testutils.bootstrap :refer [with-app-with-config]]))
(deftest encryption-default-record-test
(let [key-path "resources/cert/ctia-encryption.key"]
(testing "failed service init"
(is
(thrown-with-msg?
AssertionError
#"no secret or key filepath provided"
(with-app-with-config app
[sut/default-encryption-service]
{:ctia {:encryption nil}}))))
(with-app-with-config app
[sut/default-encryption-service]
{:ctia {:encryption {:key {:filepath key-path}}}}
(testing "encrypt and decrypt a string using the record"
(let [{:keys [decrypt encrypt]} (helpers/get-service-map app :IEncryption)
plain "foo"
enc (encrypt "foo")
dec (decrypt enc)]
(is (string? enc))
(is (not= plain enc))
(is (= dec plain)))))))
| |
2112519c0d85641faafbfe92bb60dbea6bb808d35a14f51cb30b688b89040a65 | patperry/hs-ieee754 | Tests.hs | module Main
where
import Control.Monad( forM_, unless )
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit
import Data.AEq
import Numeric.IEEE
type D = Double
type F = Float
infix 1 @?~=, @?==
(@?~=) actual expected =
unless (actual ~== expected) (assertFailure msg)
where
msg = "expected: " ++ show expected ++ "\n but got: " ++ show actual
(@?==) actual expected =
unless (actual === expected) (assertFailure msg)
where
msg = "expected: " ++ show expected ++ "\n but got: " ++ show actual
test_maxNum = testGroup "maxNum"
[ testCase "D1" test_maxNum_D1
, testCase "D2" test_maxNum_D2
, testCase "D3" test_maxNum_D3
, testCase "D4" test_maxNum_D4
, testCase "D5" test_maxNum_D5
, testCase "F1" test_maxNum_F1
, testCase "F2" test_maxNum_F2
, testCase "F3" test_maxNum_F3
, testCase "F4" test_maxNum_F4
, testCase "F5" test_maxNum_F5
]
test_maxNum_D1 = maxNum nan 1 @?= (1 :: D)
test_maxNum_D2 = maxNum 1 nan @?= (1 :: D)
test_maxNum_D3 = maxNum 1 0 @?= (1 :: D)
test_maxNum_D4 = maxNum 0 1 @?= (1 :: D)
test_maxNum_D5 =
maxNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_maxNum_F1 = maxNum nan 1 @?= (1 :: F)
test_maxNum_F2 = maxNum 1 nan @?= (1 :: F)
test_maxNum_F3 = maxNum 1 0 @?= (1 :: F)
test_maxNum_F4 = maxNum 0 1 @?= (1 :: F)
test_maxNum_F5 =
maxNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_minNum = testGroup "minNum"
[ testCase "D1" test_minNum_D1
, testCase "D2" test_minNum_D2
, testCase "D3" test_minNum_D3
, testCase "D4" test_minNum_D4
, testCase "D5" test_minNum_D5
, testCase "F1" test_minNum_F1
, testCase "F2" test_minNum_F2
, testCase "F3" test_minNum_F3
, testCase "F4" test_minNum_F4
, testCase "F5" test_minNum_F5
]
test_minNum_D1 = minNum nan 1 @?= (1 :: D)
test_minNum_D2 = minNum 1 nan @?= (1 :: D)
test_minNum_D3 = minNum 1 2 @?= (1 :: D)
test_minNum_D4 = minNum 2 1 @?= (1 :: D)
test_minNum_D5 =
minNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_minNum_F1 = minNum nan 1 @?= (1 :: F)
test_minNum_F2 = minNum 1 nan @?= (1 :: F)
test_minNum_F3 = minNum 1 2 @?= (1 :: F)
test_minNum_F4 = minNum 2 1 @?= (1 :: F)
test_minNum_F5 =
minNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_maxNaN = testGroup "maxNaN"
[ testCase "D1" test_maxNaN_D1
, testCase "D2" test_maxNaN_D2
, testCase "D3" test_maxNaN_D3
, testCase "D4" test_maxNaN_D4
, testCase "D5" test_maxNaN_D5
, testCase "F1" test_maxNaN_F1
, testCase "F2" test_maxNaN_F2
, testCase "F3" test_maxNaN_F3
, testCase "F4" test_maxNaN_F4
, testCase "F5" test_maxNaN_F5
]
test_maxNaN_D1 = maxNaN nan 1 @?== (nan :: D)
test_maxNaN_D2 = maxNaN 1 nan @?== (nan :: D)
test_maxNaN_D3 = maxNaN 1 0 @?== (1 :: D)
test_maxNaN_D4 = maxNaN 0 1 @?== (1 :: D)
test_maxNaN_D5 =
maxNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_maxNaN_F1 = maxNaN nan 1 @?== (nan :: F)
test_maxNaN_F2 = maxNaN 1 nan @?== (nan :: F)
test_maxNaN_F3 = maxNaN 1 0 @?== (1 :: F)
test_maxNaN_F4 = maxNaN 0 1 @?== (1 :: F)
test_maxNaN_F5 =
maxNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_minNaN = testGroup "minNaN"
[ testCase "D1" test_minNaN_D1
, testCase "D2" test_minNaN_D2
, testCase "D3" test_minNaN_D3
, testCase "D4" test_minNaN_D4
, testCase "D5" test_minNaN_D5
, testCase "F1" test_minNaN_F1
, testCase "F2" test_minNaN_F2
, testCase "F3" test_minNaN_F3
, testCase "F4" test_minNaN_F4
, testCase "F5" test_minNaN_F5
]
test_minNaN_D1 = minNaN nan 1 @?== (nan :: D)
test_minNaN_D2 = minNaN 1 nan @?== (nan :: D)
test_minNaN_D3 = minNaN 1 2 @?== (1 :: D)
test_minNaN_D4 = minNaN 2 1 @?== (1 :: D)
test_minNaN_D5 =
minNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_minNaN_F1 = minNaN nan 1 @?== (nan :: F)
test_minNaN_F2 = minNaN 1 nan @?== (nan :: F)
test_minNaN_F3 = minNaN 1 2 @?== (1 :: F)
test_minNaN_F4 = minNaN 2 1 @?== (1 :: F)
test_minNaN_F5 =
minNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_nan = testGroup "nan" $
[ testCase "D" test_nan_D
, testCase "F" test_nan_F
]
test_nan_D = isNaN (nan :: D) @?= True
test_nan_F = isNaN (nan :: F) @?= True
test_infinity = testGroup "infinity"
[ testCase "D1" test_infinity_D1
, testCase "D2" test_infinity_D2
, testCase "F1" test_infinity_F1
, testCase "F2" test_infinity_F2
]
test_infinity_D1 = isInfinite (infinity :: D) @?= True
test_infinity_D2 = infinity > (0 :: D) @?= True
test_infinity_F1 = isInfinite (infinity :: F) @?= True
test_infinity_F2 = infinity > (0 :: F) @?= True
test_minDenormal = testGroup "minDenormal"
[ testCase "D1" (succIEEE 0 @?= (minDenormal :: D))
, testCase "D2" (isDenormalized (minDenormal :: D) @?= True)
, testCase "F1" (succIEEE 0 @?= (minDenormal :: F))
, testCase "F2" (isDenormalized (minDenormal :: F) @?= True)
]
test_minNormal = testGroup "minNormal"
[ testCase "D" (go (minNormal :: D))
, testCase "F" (go (minNormal :: F))
]
where
go n = let (e,_) = floatRange (undefined `asTypeOf` n)
in n @?= encodeFloat 1 (e-1)
test_maxFinite = testGroup "maxFinite"
[ testCase "D" (go (maxFinite :: D))
, testCase "F" (go (maxFinite :: F))
]
where
go n = let r = floatRadix (undefined `asTypeOf` n)
d = floatDigits (undefined `asTypeOf` n)
(_,e) = floatRange (undefined `asTypeOf` n)
in n @?= encodeFloat (r^d-1) (e-d)
test_epsilon = testGroup "epsilon"
[ testCase "D" (go (epsilon :: D))
, testCase "F" (go (epsilon :: F))
]
where
go n = let d = floatDigits (undefined `asTypeOf` n)
in n @?= encodeFloat 1 (1-d)
succIEEE and predIEEE tests ported from tango / math / IEEE.d
test_succIEEE = testGroup "succIEEE"
[ testCase "nan D" test_succIEEE_nan_D
, testCase "neg D1" test_succIEEE_neg_D1
, testCase "neg D2" test_succIEEE_neg_D2
, testCase "neg D3" test_succIEEE_neg_D3
, testCase "neg denorm D1" test_succIEEE_neg_denorm_D1
, testCase "neg denorm D2" test_succIEEE_neg_denorm_D2
, testCase "neg denrom D3" test_succIEEE_neg_denorm_D3
, testCase "zero D1" test_succIEEE_zero_D1
, testCase "zero D2" test_succIEEE_zero_D2
, testCase "pos denorm D1" test_succIEEE_pos_denorm_D1
, testCase "pos denorm D2" test_succIEEE_pos_denorm_D2
, testCase "pos D1" test_succIEEE_pos_D1
, testCase "pos D2" test_succIEEE_pos_D2
, testCase "pos D3" test_succIEEE_pos_D3
, testCase "nan F" test_succIEEE_nan_F
, testCase "neg F1" test_succIEEE_neg_F1
, testCase "neg F2" test_succIEEE_neg_F2
, testCase "neg F3" test_succIEEE_neg_F3
, testCase "neg denorm F1" test_succIEEE_neg_denorm_F1
, testCase "neg denorm F2" test_succIEEE_neg_denorm_F2
, testCase "neg denrom F3" test_succIEEE_neg_denorm_F3
, testCase "zero F1" test_succIEEE_zero_F1
, testCase "zero F2" test_succIEEE_zero_F2
, testCase "pos denorm F1" test_succIEEE_pos_denorm_F1
, testCase "pos denorm F2" test_succIEEE_pos_denorm_F2
, testCase "pos F1" test_succIEEE_pos_F1
, testCase "pos F2" test_succIEEE_pos_F2
, testCase "pos F3" test_succIEEE_pos_F3
]
test_succIEEE_nan_D = isNaN (succIEEE (nan :: D)) @?= True
test_succIEEE_neg_D1 = succIEEE (-infinity) @?= (-maxFinite :: D)
test_succIEEE_neg_D2 = succIEEE (-1 - epsilon) @?= (-1 :: D)
test_succIEEE_neg_D3 = succIEEE (-2) @?= (-2 + epsilon :: D)
test_succIEEE_neg_denorm_D1 = succIEEE (-minNormal) @?= (-minNormal*(1 - epsilon) :: D)
test_succIEEE_neg_denorm_D2 = succIEEE (-minNormal*(1-epsilon)) @?= (-minNormal*(1-2*epsilon) :: D)
test_succIEEE_neg_denorm_D3 = isNegativeZero (succIEEE (-minNormal*epsilon :: D)) @?= True
test_succIEEE_zero_D1 = succIEEE (-0) @?= (minNormal * epsilon :: D)
test_succIEEE_zero_D2 = succIEEE 0 @?= (minNormal * epsilon :: D)
test_succIEEE_pos_denorm_D1 = succIEEE (minNormal*(1-epsilon)) @?= (minNormal :: D)
test_succIEEE_pos_denorm_D2 = succIEEE (minNormal) @?= (minNormal*(1+epsilon) :: D)
test_succIEEE_pos_D1 = succIEEE 1 @?= (1 + epsilon :: D)
test_succIEEE_pos_D2 = succIEEE (2 - epsilon) @?= (2 :: D)
test_succIEEE_pos_D3 = succIEEE maxFinite @?= (infinity :: D)
test_succIEEE_nan_F = isNaN (succIEEE (nan :: F)) @?= True
test_succIEEE_neg_F1 = succIEEE (-infinity) @?= (-maxFinite :: F)
test_succIEEE_neg_F2 = succIEEE (-1 - epsilon) @?= (-1 :: F)
test_succIEEE_neg_F3 = succIEEE (-2) @?= (-2 + epsilon :: F)
test_succIEEE_neg_denorm_F1 = succIEEE (-minNormal) @?= (-minNormal*(1 - epsilon) :: F)
test_succIEEE_neg_denorm_F2 = succIEEE (-minNormal*(1-epsilon)) @?= (-minNormal*(1-2*epsilon) :: F)
test_succIEEE_neg_denorm_F3 = isNegativeZero (succIEEE (-minNormal*epsilon :: F)) @?= True
test_succIEEE_zero_F1 = succIEEE (-0) @?= (minNormal * epsilon :: F)
test_succIEEE_zero_F2 = succIEEE 0 @?= (minNormal * epsilon :: F)
test_succIEEE_pos_denorm_F1 = succIEEE (minNormal*(1-epsilon)) @?= (minNormal :: F)
test_succIEEE_pos_denorm_F2 = succIEEE (minNormal) @?= (minNormal*(1+epsilon) :: F)
test_succIEEE_pos_F1 = succIEEE 1 @?= (1 + epsilon :: F)
test_succIEEE_pos_F2 = succIEEE (2 - epsilon) @?= (2 :: F)
test_succIEEE_pos_F3 = succIEEE maxFinite @?= (infinity :: F)
test_predIEEE = testGroup "predIEEE"
[ testCase "D" test_predIEEE_D
, testCase "F" test_predIEEE_F
]
test_predIEEE_D = predIEEE (1 + epsilon) @?= (1 :: D)
test_predIEEE_F = predIEEE (1 + epsilon) @?= (1 :: F)
test_bisectIEEE = testGroup "bisectIEEE"
[ testCase "D1" test_bisectIEEE_D1
, testCase "D2" test_bisectIEEE_D2
, testCase "D3" test_bisectIEEE_D3
, testCase "D4" test_bisectIEEE_D4
, testCase "D5" test_bisectIEEE_D5
, testCase "D6" test_bisectIEEE_D6
, testCase "D7" test_bisectIEEE_D7
, testCase "D8" test_bisectIEEE_D8
, testCase "D9" test_bisectIEEE_D9
, testCase "F1" test_bisectIEEE_F1
, testCase "F2" test_bisectIEEE_F2
, testCase "F3" test_bisectIEEE_F3
, testCase "F4" test_bisectIEEE_F4
, testCase "F5" test_bisectIEEE_F5
, testCase "F6" test_bisectIEEE_F6
, testCase "F7" test_bisectIEEE_F7
, testCase "F8" test_bisectIEEE_F8
, testCase "F9" test_bisectIEEE_F9
]
test_bisectIEEE_D1 = bisectIEEE (-0) (-1e-20) < (0 :: D) @?= True
test_bisectIEEE_D2 = bisectIEEE (0) (1e-20) > (0 :: D) @?= True
test_bisectIEEE_D3 = bisectIEEE 1 4 @?= (2 :: D)
test_bisectIEEE_D4 = bisectIEEE (2*1.013) (8*1.013) @?= (4*1.013 :: D)
test_bisectIEEE_D5 = bisectIEEE (-1) (-4) @?= (-2 :: D)
test_bisectIEEE_D6 = bisectIEEE (-1) (-2) @?= (-1.5 :: D)
test_bisectIEEE_D7 =
bisectIEEE (-1*(1+8*epsilon)) (-2*(1+8*epsilon))
@?= (-1.5*(1+5*epsilon) :: D)
test_bisectIEEE_D8 =
bisectIEEE (encodeFloat 0x100000 60) (encodeFloat 0x100000 (-10))
@?= (encodeFloat 0x100000 25 :: D)
test_bisectIEEE_D9 =
bisectIEEE 0 infinity @?= (1.5 :: D)
test_bisectIEEE_F1 = bisectIEEE (-0) (-1e-20) < (0 :: F) @?= True
test_bisectIEEE_F2 = bisectIEEE (0) (1e-20) > (0 :: F) @?= True
test_bisectIEEE_F3 = bisectIEEE 1 4 @?= (2 :: F)
test_bisectIEEE_F4 = bisectIEEE (2*1.013) (8*1.013) @?= (4*1.013 :: F)
test_bisectIEEE_F5 = bisectIEEE (-1) (-4) @?= (-2 :: F)
test_bisectIEEE_F6 = bisectIEEE (-1) (-2) @?= (-1.5 :: F)
test_bisectIEEE_F7 =
bisectIEEE (-1*(1+8*epsilon)) (-2*(1+8*epsilon))
@?= (-1.5*(1+5*epsilon) :: F)
test_bisectIEEE_F8 =
bisectIEEE (encodeFloat 0x100000 60) (encodeFloat 0x100000 (-10))
@?= (encodeFloat 0x100000 25 :: F)
test_bisectIEEE_F9 =
bisectIEEE 0 infinity @?= (1.5 :: F)
test_sameSignificandBits = testGroup "sameSignificandBits" $
[ testCase "exact D1" test_sameSignificandBits_exact_D1
, testCase "exact D2" test_sameSignificandBits_exact_D2
, testCase "exact D3" test_sameSignificandBits_exact_D3
, testCase "exact D4" test_sameSignificandBits_exact_D4
, testCase "fewbits D1" test_sameSignificandBits_fewbits_D1
, testCase "fewbits D2" test_sameSignificandBits_fewbits_D2
, testCase "fewbits D3" test_sameSignificandBits_fewbits_D3
, testCase "fewbits D4" test_sameSignificandBits_fewbits_D4
, testCase "fewbits D5" test_sameSignificandBits_fewbits_D5
, testCase "fewbits D6" test_sameSignificandBits_fewbits_D6
, testCase "fewbits D7" test_sameSignificandBits_fewbits_D7
, testCase "close D1" test_sameSignificandBits_close_D1
, testCase "close D2" test_sameSignificandBits_close_D2
, testCase "close D3" test_sameSignificandBits_close_D3
, testCase "close D4" test_sameSignificandBits_close_D4
, testCase "close D5" test_sameSignificandBits_close_D5
, testCase "2factors D1" test_sameSignificandBits_2factors_D1
, testCase "2factors D2" test_sameSignificandBits_2factors_D2
, testCase "2factors D3" test_sameSignificandBits_2factors_D3
, testCase "2factors D4" test_sameSignificandBits_2factors_D4
, testCase "extreme D1" test_sameSignificandBits_extreme_D1
, testCase "extreme D2" test_sameSignificandBits_extreme_D2
, testCase "extreme D3" test_sameSignificandBits_extreme_D3
, testCase "extreme D4" test_sameSignificandBits_extreme_D4
, testCase "extreme D5" test_sameSignificandBits_extreme_D5
, testCase "extreme D6" test_sameSignificandBits_extreme_D6
, testCase "exact F1" test_sameSignificandBits_exact_F1
, testCase "exact F2" test_sameSignificandBits_exact_F2
, testCase "exact F3" test_sameSignificandBits_exact_F3
, testCase "exact F4" test_sameSignificandBits_exact_F4
, testCase "fewbits F1" test_sameSignificandBits_fewbits_F1
, testCase "fewbits F2" test_sameSignificandBits_fewbits_F2
, testCase "fewbits F3" test_sameSignificandBits_fewbits_F3
, testCase "fewbits F4" test_sameSignificandBits_fewbits_F4
, testCase "fewbits F5" test_sameSignificandBits_fewbits_F5
, testCase "fewbits F6" test_sameSignificandBits_fewbits_F6
, testCase "fewbits F7" test_sameSignificandBits_fewbits_F7
, testCase "close F1" test_sameSignificandBits_close_F1
, testCase "close F2" test_sameSignificandBits_close_F2
, testCase "close F3" test_sameSignificandBits_close_F3
, testCase "close F4" test_sameSignificandBits_close_F4
, testCase "close F5" test_sameSignificandBits_close_F5
, testCase "2factors F1" test_sameSignificandBits_2factors_F1
, testCase "2factors F2" test_sameSignificandBits_2factors_F2
, testCase "2factors F3" test_sameSignificandBits_2factors_F3
, testCase "2factors F4" test_sameSignificandBits_2factors_F4
, testCase "extreme F1" test_sameSignificandBits_extreme_F1
, testCase "extreme F2" test_sameSignificandBits_extreme_F2
, testCase "extreme F3" test_sameSignificandBits_extreme_F3
, testCase "extreme F4" test_sameSignificandBits_extreme_F4
, testCase "extreme F5" test_sameSignificandBits_extreme_F5
, testCase "extreme F6" test_sameSignificandBits_extreme_F6
]
test_sameSignificandBits_exact_D1 =
sameSignificandBits (maxFinite :: D) maxFinite
@?= floatDigits (undefined :: D)
test_sameSignificandBits_exact_D2 =
sameSignificandBits (0 :: D) 0
@?= floatDigits (undefined :: D)
test_sameSignificandBits_exact_D3 =
sameSignificandBits (7.1824 :: D) 7.1824
@?= floatDigits (undefined :: D)
test_sameSignificandBits_exact_D4 =
sameSignificandBits (infinity :: D) infinity
@?= floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D1 =
forM_ [ 0..mantDig-1 ] $ \i ->
sameSignificandBits (1 + 2^^i * epsilon) (1 :: D) @?= mantDig - i - 1
where
mantDig = floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D2 =
forM_ [ 0..mantDig-3 ] $ \i ->
sameSignificandBits (1 - 2^^i * epsilon) (1 :: D) @?= mantDig - i - 1
where
mantDig = floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D3 =
forM_ [ 0..mantDig-1 ] $ \i ->
sameSignificandBits (1 :: D) (1 + (2^^i - 1) * epsilon)
@?= mantDig - i
where
mantDig = floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D4 =
sameSignificandBits (1.5 + epsilon) (1.5 :: D)
@?= floatDigits (undefined :: D) - 1
test_sameSignificandBits_fewbits_D5 =
sameSignificandBits (1.5 - epsilon) (1.5 :: D)
@?= floatDigits (undefined :: D) - 1
test_sameSignificandBits_fewbits_D6 =
sameSignificandBits (1.5 - epsilon) (1.5 + epsilon :: D)
@?= floatDigits (undefined :: D) - 2
test_sameSignificandBits_fewbits_D7 =
sameSignificandBits (minNormal / 8) (minNormal / 17 :: D)
@?= 3
test_sameSignificandBits_close_D1 =
sameSignificandBits (encodeFloat 0x1B0000 84) (encodeFloat 0x1B8000 84 :: D)
@?= 5
test_sameSignificandBits_close_D2 =
sameSignificandBits (encodeFloat 0x180000 10) (encodeFloat 0x1C0000 10 :: D)
@?= 2
test_sameSignificandBits_close_D3 =
sameSignificandBits (1.5 * (1 - epsilon)) (1 :: D) @?= 2
test_sameSignificandBits_close_D4 =
sameSignificandBits 1.5 (1 :: D) @?= 1
test_sameSignificandBits_close_D5 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: D) @?= 1
test_sameSignificandBits_2factors_D1 =
sameSignificandBits maxFinite (infinity :: D) @?= 0
test_sameSignificandBits_2factors_D2 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: D) @?= 1
test_sameSignificandBits_2factors_D3 =
sameSignificandBits 1 (2 :: D) @?= 0
test_sameSignificandBits_2factors_D4 =
sameSignificandBits 4 (1 :: D) @?= 0
test_sameSignificandBits_extreme_D1 =
sameSignificandBits nan (nan :: D) @?= 0
test_sameSignificandBits_extreme_D2 =
sameSignificandBits 0 (-nan :: D) @?= 0
test_sameSignificandBits_extreme_D3 =
sameSignificandBits nan (infinity :: D) @?= 0
test_sameSignificandBits_extreme_D4 =
sameSignificandBits infinity (-infinity :: D) @?= 0
test_sameSignificandBits_extreme_D5 =
sameSignificandBits (-maxFinite) (infinity :: D) @?= 0
test_sameSignificandBits_extreme_D6 =
sameSignificandBits (maxFinite) (-maxFinite :: D) @?= 0
test_sameSignificandBits_exact_F1 =
sameSignificandBits (maxFinite :: F) maxFinite
@?= floatDigits (undefined :: F)
test_sameSignificandBits_exact_F2 =
sameSignificandBits (0 :: F) 0
@?= floatDigits (undefined :: F)
test_sameSignificandBits_exact_F3 =
sameSignificandBits (7.1824 :: F) 7.1824
@?= floatDigits (undefined :: F)
test_sameSignificandBits_exact_F4 =
sameSignificandBits (infinity :: F) infinity
@?= floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F1 =
forM_ [ 0..mantFig-1 ] $ \i ->
sameSignificandBits (1 + 2^^i * epsilon) (1 :: F) @?= mantFig - i - 1
where
mantFig = floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F2 =
forM_ [ 0..mantFig-3 ] $ \i ->
sameSignificandBits (1 - 2^^i * epsilon) (1 :: F) @?= mantFig - i - 1
where
mantFig = floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F3 =
forM_ [ 0..mantFig-1 ] $ \i ->
sameSignificandBits (1 :: F) (1 + (2^^i - 1) * epsilon)
@?= mantFig - i
where
mantFig = floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F4 =
sameSignificandBits (1.5 + epsilon) (1.5 :: F)
@?= floatDigits (undefined :: F) - 1
test_sameSignificandBits_fewbits_F5 =
sameSignificandBits (1.5 - epsilon) (1.5 :: F)
@?= floatDigits (undefined :: F) - 1
test_sameSignificandBits_fewbits_F6 =
sameSignificandBits (1.5 - epsilon) (1.5 + epsilon :: F)
@?= floatDigits (undefined :: F) - 2
test_sameSignificandBits_fewbits_F7 =
sameSignificandBits (minNormal / 8) (minNormal / 17 :: F)
@?= 3
test_sameSignificandBits_close_F1 =
sameSignificandBits (encodeFloat 0x1B0000 84) (encodeFloat 0x1B8000 84 :: F)
@?= 5
test_sameSignificandBits_close_F2 =
sameSignificandBits (encodeFloat 0x180000 10) (encodeFloat 0x1C0000 10 :: F)
@?= 2
test_sameSignificandBits_close_F3 =
sameSignificandBits (1.5 * (1 - epsilon)) (1 :: F) @?= 2
test_sameSignificandBits_close_F4 =
sameSignificandBits 1.5 (1 :: F) @?= 1
test_sameSignificandBits_close_F5 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: F) @?= 1
test_sameSignificandBits_2factors_F1 =
sameSignificandBits maxFinite (infinity :: F) @?= 0
test_sameSignificandBits_2factors_F2 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: F) @?= 1
test_sameSignificandBits_2factors_F3 =
sameSignificandBits 1 (2 :: F) @?= 0
test_sameSignificandBits_2factors_F4 =
sameSignificandBits 4 (1 :: F) @?= 0
test_sameSignificandBits_extreme_F1 =
sameSignificandBits nan (nan :: F) @?= 0
test_sameSignificandBits_extreme_F2 =
sameSignificandBits 0 (-nan :: F) @?= 0
test_sameSignificandBits_extreme_F3 =
sameSignificandBits nan (infinity :: F) @?= 0
test_sameSignificandBits_extreme_F4 =
sameSignificandBits infinity (-infinity :: F) @?= 0
test_sameSignificandBits_extreme_F5 =
sameSignificandBits (-maxFinite) (infinity :: F) @?= 0
test_sameSignificandBits_extreme_F6 =
sameSignificandBits (maxFinite) (-maxFinite :: F) @?= 0
test_nanWithPayload = testGroup "nanWithPayload"
[ testCase "D1" test_nanWithPayload_D1
, testCase "D2" test_nanWithPayload_D2
, testCase "F1" test_nanWithPayload_F1
, testCase "F2" test_nanWithPayload_F2
]
test_nanWithPayload_D1 =
isNaN (nanWithPayload 1 :: D) @?= True
test_nanWithPayload_D2 =
isNaN (nanWithPayload maxPayload :: D) @?= True
where
maxPayload = maxNaNPayload (undefined :: D)
test_nanWithPayload_F1 =
isNaN (nanWithPayload 1 :: F) @?= True
test_nanWithPayload_F2 =
isNaN (nanWithPayload maxPayload :: F) @?= True
where
maxPayload = maxNaNPayload (undefined :: F)
test_maxNaNPayload = testGroup "maxNaNPayload"
[ testCase "D" (go (undefined :: D))
, testCase "F" (go (undefined :: F))
]
where
go n = let b = floatRadix (undefined `asTypeOf` n)
d = floatDigits (undefined `asTypeOf` n)
in maxNaNPayload n @?= fromIntegral (b^(d-2)-1)
test_nanPayload = testGroup "nanPayload"
[ testCase "D1" test_nanPayload_D1
, testCase "D2" test_nanPayload_D2
, testCase "D3" test_nanPayload_D3
, testCase "F1" test_nanPayload_F1
, testCase "F2" test_nanPayload_F2
, testCase "F3" test_nanPayload_F3
]
test_nanPayload_D1 =
nanPayload (nanWithPayload 1 :: D) @?= 1
test_nanPayload_D2 =
nanPayload (nanWithPayload maxPayload :: D) @?= maxPayload
where
maxPayload = maxNaNPayload (undefined :: D)
test_nanPayload_D3 =
nanPayload (nanWithPayload (maxPayload + 1) :: D) @?= 0
where
maxPayload = maxNaNPayload (undefined :: D)
test_nanPayload_F1 =
nanPayload (nanWithPayload 1 :: F) @?= 1
test_nanPayload_F2 =
nanPayload (nanWithPayload maxPayload :: F) @?= maxPayload
where
maxPayload = maxNaNPayload (undefined :: F)
test_nanPayload_F3 =
nanPayload (nanWithPayload (maxPayload + 1) :: F) @?= 0
where
maxPayload = maxNaNPayload (undefined :: F)
test_copySign = testGroup "copySign"
[ testCase "D1" test_copySign_D1
, testCase "D2" test_copySign_D2
, testCase "D3" test_copySign_D3
, testCase "D4" test_copySign_D4
, testCase "D5" test_copySign_D5
, testCase "D6" test_copySign_D6
, testCase "F1" test_copySign_F1
, testCase "F2" test_copySign_F2
, testCase "F3" test_copySign_F3
, testCase "F4" test_copySign_F4
, testCase "F5" test_copySign_F5
, testCase "F6" test_copySign_F6
]
test_copySign_D1 =
copySign 0.9 (-1.2) @?= (-0.9 :: D)
test_copySign_D2 =
copySign 0.9 (1.2) @?= (0.9 :: D)
test_copySign_D3 =
copySign (-0.9 )(1.2) @?= (0.9 :: D)
test_copySign_D4 =
copySign (-0.9) (-1.2) @?= (-0.9 :: D)
test_copySign_D5 =
copySign 1 (copySign nan 1) @?= (1 :: D)
test_copySign_D6 =
copySign 1 (copySign nan (-1)) @?= (-1 :: D)
test_copySign_F1 =
copySign 0.9 (-1.2) @?= (-0.9 :: F)
test_copySign_F2 =
copySign 0.9 (1.2) @?= (0.9 :: F)
test_copySign_F3 =
copySign (-0.9 )(1.2) @?= (0.9 :: F)
test_copySign_F4 =
copySign (-0.9) (-1.2) @?= (-0.9 :: F)
test_copySign_F5 =
copySign 1 (copySign nan 1) @?= (1 :: F)
test_copySign_F6 =
copySign 1 (copySign nan (-1)) @?= (-1 :: F)
test_IEEE = testGroup "IEEE"
[ test_infinity
, test_minNormal
, test_minDenormal
, test_maxFinite
, test_epsilon
, test_copySign
, test_succIEEE
, test_predIEEE
, test_bisectIEEE
, test_sameSignificandBits
, test_maxNum
, test_minNum
, test_maxNaN
, test_minNaN
, test_nan
, test_nanWithPayload
, test_maxNaNPayload
, test_nanPayload
]
main :: IO ()
main = defaultMain [ test_IEEE
]
| null | https://raw.githubusercontent.com/patperry/hs-ieee754/32ed20b21635e47ce50ffc90e10889f2d6d58caa/tests/Tests.hs | haskell | module Main
where
import Control.Monad( forM_, unless )
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit
import Data.AEq
import Numeric.IEEE
type D = Double
type F = Float
infix 1 @?~=, @?==
(@?~=) actual expected =
unless (actual ~== expected) (assertFailure msg)
where
msg = "expected: " ++ show expected ++ "\n but got: " ++ show actual
(@?==) actual expected =
unless (actual === expected) (assertFailure msg)
where
msg = "expected: " ++ show expected ++ "\n but got: " ++ show actual
test_maxNum = testGroup "maxNum"
[ testCase "D1" test_maxNum_D1
, testCase "D2" test_maxNum_D2
, testCase "D3" test_maxNum_D3
, testCase "D4" test_maxNum_D4
, testCase "D5" test_maxNum_D5
, testCase "F1" test_maxNum_F1
, testCase "F2" test_maxNum_F2
, testCase "F3" test_maxNum_F3
, testCase "F4" test_maxNum_F4
, testCase "F5" test_maxNum_F5
]
test_maxNum_D1 = maxNum nan 1 @?= (1 :: D)
test_maxNum_D2 = maxNum 1 nan @?= (1 :: D)
test_maxNum_D3 = maxNum 1 0 @?= (1 :: D)
test_maxNum_D4 = maxNum 0 1 @?= (1 :: D)
test_maxNum_D5 =
maxNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_maxNum_F1 = maxNum nan 1 @?= (1 :: F)
test_maxNum_F2 = maxNum 1 nan @?= (1 :: F)
test_maxNum_F3 = maxNum 1 0 @?= (1 :: F)
test_maxNum_F4 = maxNum 0 1 @?= (1 :: F)
test_maxNum_F5 =
maxNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_minNum = testGroup "minNum"
[ testCase "D1" test_minNum_D1
, testCase "D2" test_minNum_D2
, testCase "D3" test_minNum_D3
, testCase "D4" test_minNum_D4
, testCase "D5" test_minNum_D5
, testCase "F1" test_minNum_F1
, testCase "F2" test_minNum_F2
, testCase "F3" test_minNum_F3
, testCase "F4" test_minNum_F4
, testCase "F5" test_minNum_F5
]
test_minNum_D1 = minNum nan 1 @?= (1 :: D)
test_minNum_D2 = minNum 1 nan @?= (1 :: D)
test_minNum_D3 = minNum 1 2 @?= (1 :: D)
test_minNum_D4 = minNum 2 1 @?= (1 :: D)
test_minNum_D5 =
minNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_minNum_F1 = minNum nan 1 @?= (1 :: F)
test_minNum_F2 = minNum 1 nan @?= (1 :: F)
test_minNum_F3 = minNum 1 2 @?= (1 :: F)
test_minNum_F4 = minNum 2 1 @?= (1 :: F)
test_minNum_F5 =
minNum (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_maxNaN = testGroup "maxNaN"
[ testCase "D1" test_maxNaN_D1
, testCase "D2" test_maxNaN_D2
, testCase "D3" test_maxNaN_D3
, testCase "D4" test_maxNaN_D4
, testCase "D5" test_maxNaN_D5
, testCase "F1" test_maxNaN_F1
, testCase "F2" test_maxNaN_F2
, testCase "F3" test_maxNaN_F3
, testCase "F4" test_maxNaN_F4
, testCase "F5" test_maxNaN_F5
]
test_maxNaN_D1 = maxNaN nan 1 @?== (nan :: D)
test_maxNaN_D2 = maxNaN 1 nan @?== (nan :: D)
test_maxNaN_D3 = maxNaN 1 0 @?== (1 :: D)
test_maxNaN_D4 = maxNaN 0 1 @?== (1 :: D)
test_maxNaN_D5 =
maxNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_maxNaN_F1 = maxNaN nan 1 @?== (nan :: F)
test_maxNaN_F2 = maxNaN 1 nan @?== (nan :: F)
test_maxNaN_F3 = maxNaN 1 0 @?== (1 :: F)
test_maxNaN_F4 = maxNaN 0 1 @?== (1 :: F)
test_maxNaN_F5 =
maxNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_minNaN = testGroup "minNaN"
[ testCase "D1" test_minNaN_D1
, testCase "D2" test_minNaN_D2
, testCase "D3" test_minNaN_D3
, testCase "D4" test_minNaN_D4
, testCase "D5" test_minNaN_D5
, testCase "F1" test_minNaN_F1
, testCase "F2" test_minNaN_F2
, testCase "F3" test_minNaN_F3
, testCase "F4" test_minNaN_F4
, testCase "F5" test_minNaN_F5
]
test_minNaN_D1 = minNaN nan 1 @?== (nan :: D)
test_minNaN_D2 = minNaN 1 nan @?== (nan :: D)
test_minNaN_D3 = minNaN 1 2 @?== (1 :: D)
test_minNaN_D4 = minNaN 2 1 @?== (1 :: D)
test_minNaN_D5 =
minNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: D)
test_minNaN_F1 = minNaN nan 1 @?== (nan :: F)
test_minNaN_F2 = minNaN 1 nan @?== (nan :: F)
test_minNaN_F3 = minNaN 1 2 @?== (1 :: F)
test_minNaN_F4 = minNaN 2 1 @?== (1 :: F)
test_minNaN_F5 =
minNaN (nanWithPayload 1) (nanWithPayload 2)
@?== (nanWithPayload 1 :: F)
test_nan = testGroup "nan" $
[ testCase "D" test_nan_D
, testCase "F" test_nan_F
]
test_nan_D = isNaN (nan :: D) @?= True
test_nan_F = isNaN (nan :: F) @?= True
test_infinity = testGroup "infinity"
[ testCase "D1" test_infinity_D1
, testCase "D2" test_infinity_D2
, testCase "F1" test_infinity_F1
, testCase "F2" test_infinity_F2
]
test_infinity_D1 = isInfinite (infinity :: D) @?= True
test_infinity_D2 = infinity > (0 :: D) @?= True
test_infinity_F1 = isInfinite (infinity :: F) @?= True
test_infinity_F2 = infinity > (0 :: F) @?= True
test_minDenormal = testGroup "minDenormal"
[ testCase "D1" (succIEEE 0 @?= (minDenormal :: D))
, testCase "D2" (isDenormalized (minDenormal :: D) @?= True)
, testCase "F1" (succIEEE 0 @?= (minDenormal :: F))
, testCase "F2" (isDenormalized (minDenormal :: F) @?= True)
]
test_minNormal = testGroup "minNormal"
[ testCase "D" (go (minNormal :: D))
, testCase "F" (go (minNormal :: F))
]
where
go n = let (e,_) = floatRange (undefined `asTypeOf` n)
in n @?= encodeFloat 1 (e-1)
test_maxFinite = testGroup "maxFinite"
[ testCase "D" (go (maxFinite :: D))
, testCase "F" (go (maxFinite :: F))
]
where
go n = let r = floatRadix (undefined `asTypeOf` n)
d = floatDigits (undefined `asTypeOf` n)
(_,e) = floatRange (undefined `asTypeOf` n)
in n @?= encodeFloat (r^d-1) (e-d)
test_epsilon = testGroup "epsilon"
[ testCase "D" (go (epsilon :: D))
, testCase "F" (go (epsilon :: F))
]
where
go n = let d = floatDigits (undefined `asTypeOf` n)
in n @?= encodeFloat 1 (1-d)
succIEEE and predIEEE tests ported from tango / math / IEEE.d
test_succIEEE = testGroup "succIEEE"
[ testCase "nan D" test_succIEEE_nan_D
, testCase "neg D1" test_succIEEE_neg_D1
, testCase "neg D2" test_succIEEE_neg_D2
, testCase "neg D3" test_succIEEE_neg_D3
, testCase "neg denorm D1" test_succIEEE_neg_denorm_D1
, testCase "neg denorm D2" test_succIEEE_neg_denorm_D2
, testCase "neg denrom D3" test_succIEEE_neg_denorm_D3
, testCase "zero D1" test_succIEEE_zero_D1
, testCase "zero D2" test_succIEEE_zero_D2
, testCase "pos denorm D1" test_succIEEE_pos_denorm_D1
, testCase "pos denorm D2" test_succIEEE_pos_denorm_D2
, testCase "pos D1" test_succIEEE_pos_D1
, testCase "pos D2" test_succIEEE_pos_D2
, testCase "pos D3" test_succIEEE_pos_D3
, testCase "nan F" test_succIEEE_nan_F
, testCase "neg F1" test_succIEEE_neg_F1
, testCase "neg F2" test_succIEEE_neg_F2
, testCase "neg F3" test_succIEEE_neg_F3
, testCase "neg denorm F1" test_succIEEE_neg_denorm_F1
, testCase "neg denorm F2" test_succIEEE_neg_denorm_F2
, testCase "neg denrom F3" test_succIEEE_neg_denorm_F3
, testCase "zero F1" test_succIEEE_zero_F1
, testCase "zero F2" test_succIEEE_zero_F2
, testCase "pos denorm F1" test_succIEEE_pos_denorm_F1
, testCase "pos denorm F2" test_succIEEE_pos_denorm_F2
, testCase "pos F1" test_succIEEE_pos_F1
, testCase "pos F2" test_succIEEE_pos_F2
, testCase "pos F3" test_succIEEE_pos_F3
]
test_succIEEE_nan_D = isNaN (succIEEE (nan :: D)) @?= True
test_succIEEE_neg_D1 = succIEEE (-infinity) @?= (-maxFinite :: D)
test_succIEEE_neg_D2 = succIEEE (-1 - epsilon) @?= (-1 :: D)
test_succIEEE_neg_D3 = succIEEE (-2) @?= (-2 + epsilon :: D)
test_succIEEE_neg_denorm_D1 = succIEEE (-minNormal) @?= (-minNormal*(1 - epsilon) :: D)
test_succIEEE_neg_denorm_D2 = succIEEE (-minNormal*(1-epsilon)) @?= (-minNormal*(1-2*epsilon) :: D)
test_succIEEE_neg_denorm_D3 = isNegativeZero (succIEEE (-minNormal*epsilon :: D)) @?= True
test_succIEEE_zero_D1 = succIEEE (-0) @?= (minNormal * epsilon :: D)
test_succIEEE_zero_D2 = succIEEE 0 @?= (minNormal * epsilon :: D)
test_succIEEE_pos_denorm_D1 = succIEEE (minNormal*(1-epsilon)) @?= (minNormal :: D)
test_succIEEE_pos_denorm_D2 = succIEEE (minNormal) @?= (minNormal*(1+epsilon) :: D)
test_succIEEE_pos_D1 = succIEEE 1 @?= (1 + epsilon :: D)
test_succIEEE_pos_D2 = succIEEE (2 - epsilon) @?= (2 :: D)
test_succIEEE_pos_D3 = succIEEE maxFinite @?= (infinity :: D)
test_succIEEE_nan_F = isNaN (succIEEE (nan :: F)) @?= True
test_succIEEE_neg_F1 = succIEEE (-infinity) @?= (-maxFinite :: F)
test_succIEEE_neg_F2 = succIEEE (-1 - epsilon) @?= (-1 :: F)
test_succIEEE_neg_F3 = succIEEE (-2) @?= (-2 + epsilon :: F)
test_succIEEE_neg_denorm_F1 = succIEEE (-minNormal) @?= (-minNormal*(1 - epsilon) :: F)
test_succIEEE_neg_denorm_F2 = succIEEE (-minNormal*(1-epsilon)) @?= (-minNormal*(1-2*epsilon) :: F)
test_succIEEE_neg_denorm_F3 = isNegativeZero (succIEEE (-minNormal*epsilon :: F)) @?= True
test_succIEEE_zero_F1 = succIEEE (-0) @?= (minNormal * epsilon :: F)
test_succIEEE_zero_F2 = succIEEE 0 @?= (minNormal * epsilon :: F)
test_succIEEE_pos_denorm_F1 = succIEEE (minNormal*(1-epsilon)) @?= (minNormal :: F)
test_succIEEE_pos_denorm_F2 = succIEEE (minNormal) @?= (minNormal*(1+epsilon) :: F)
test_succIEEE_pos_F1 = succIEEE 1 @?= (1 + epsilon :: F)
test_succIEEE_pos_F2 = succIEEE (2 - epsilon) @?= (2 :: F)
test_succIEEE_pos_F3 = succIEEE maxFinite @?= (infinity :: F)
test_predIEEE = testGroup "predIEEE"
[ testCase "D" test_predIEEE_D
, testCase "F" test_predIEEE_F
]
test_predIEEE_D = predIEEE (1 + epsilon) @?= (1 :: D)
test_predIEEE_F = predIEEE (1 + epsilon) @?= (1 :: F)
test_bisectIEEE = testGroup "bisectIEEE"
[ testCase "D1" test_bisectIEEE_D1
, testCase "D2" test_bisectIEEE_D2
, testCase "D3" test_bisectIEEE_D3
, testCase "D4" test_bisectIEEE_D4
, testCase "D5" test_bisectIEEE_D5
, testCase "D6" test_bisectIEEE_D6
, testCase "D7" test_bisectIEEE_D7
, testCase "D8" test_bisectIEEE_D8
, testCase "D9" test_bisectIEEE_D9
, testCase "F1" test_bisectIEEE_F1
, testCase "F2" test_bisectIEEE_F2
, testCase "F3" test_bisectIEEE_F3
, testCase "F4" test_bisectIEEE_F4
, testCase "F5" test_bisectIEEE_F5
, testCase "F6" test_bisectIEEE_F6
, testCase "F7" test_bisectIEEE_F7
, testCase "F8" test_bisectIEEE_F8
, testCase "F9" test_bisectIEEE_F9
]
test_bisectIEEE_D1 = bisectIEEE (-0) (-1e-20) < (0 :: D) @?= True
test_bisectIEEE_D2 = bisectIEEE (0) (1e-20) > (0 :: D) @?= True
test_bisectIEEE_D3 = bisectIEEE 1 4 @?= (2 :: D)
test_bisectIEEE_D4 = bisectIEEE (2*1.013) (8*1.013) @?= (4*1.013 :: D)
test_bisectIEEE_D5 = bisectIEEE (-1) (-4) @?= (-2 :: D)
test_bisectIEEE_D6 = bisectIEEE (-1) (-2) @?= (-1.5 :: D)
test_bisectIEEE_D7 =
bisectIEEE (-1*(1+8*epsilon)) (-2*(1+8*epsilon))
@?= (-1.5*(1+5*epsilon) :: D)
test_bisectIEEE_D8 =
bisectIEEE (encodeFloat 0x100000 60) (encodeFloat 0x100000 (-10))
@?= (encodeFloat 0x100000 25 :: D)
test_bisectIEEE_D9 =
bisectIEEE 0 infinity @?= (1.5 :: D)
test_bisectIEEE_F1 = bisectIEEE (-0) (-1e-20) < (0 :: F) @?= True
test_bisectIEEE_F2 = bisectIEEE (0) (1e-20) > (0 :: F) @?= True
test_bisectIEEE_F3 = bisectIEEE 1 4 @?= (2 :: F)
test_bisectIEEE_F4 = bisectIEEE (2*1.013) (8*1.013) @?= (4*1.013 :: F)
test_bisectIEEE_F5 = bisectIEEE (-1) (-4) @?= (-2 :: F)
test_bisectIEEE_F6 = bisectIEEE (-1) (-2) @?= (-1.5 :: F)
test_bisectIEEE_F7 =
bisectIEEE (-1*(1+8*epsilon)) (-2*(1+8*epsilon))
@?= (-1.5*(1+5*epsilon) :: F)
test_bisectIEEE_F8 =
bisectIEEE (encodeFloat 0x100000 60) (encodeFloat 0x100000 (-10))
@?= (encodeFloat 0x100000 25 :: F)
test_bisectIEEE_F9 =
bisectIEEE 0 infinity @?= (1.5 :: F)
test_sameSignificandBits = testGroup "sameSignificandBits" $
[ testCase "exact D1" test_sameSignificandBits_exact_D1
, testCase "exact D2" test_sameSignificandBits_exact_D2
, testCase "exact D3" test_sameSignificandBits_exact_D3
, testCase "exact D4" test_sameSignificandBits_exact_D4
, testCase "fewbits D1" test_sameSignificandBits_fewbits_D1
, testCase "fewbits D2" test_sameSignificandBits_fewbits_D2
, testCase "fewbits D3" test_sameSignificandBits_fewbits_D3
, testCase "fewbits D4" test_sameSignificandBits_fewbits_D4
, testCase "fewbits D5" test_sameSignificandBits_fewbits_D5
, testCase "fewbits D6" test_sameSignificandBits_fewbits_D6
, testCase "fewbits D7" test_sameSignificandBits_fewbits_D7
, testCase "close D1" test_sameSignificandBits_close_D1
, testCase "close D2" test_sameSignificandBits_close_D2
, testCase "close D3" test_sameSignificandBits_close_D3
, testCase "close D4" test_sameSignificandBits_close_D4
, testCase "close D5" test_sameSignificandBits_close_D5
, testCase "2factors D1" test_sameSignificandBits_2factors_D1
, testCase "2factors D2" test_sameSignificandBits_2factors_D2
, testCase "2factors D3" test_sameSignificandBits_2factors_D3
, testCase "2factors D4" test_sameSignificandBits_2factors_D4
, testCase "extreme D1" test_sameSignificandBits_extreme_D1
, testCase "extreme D2" test_sameSignificandBits_extreme_D2
, testCase "extreme D3" test_sameSignificandBits_extreme_D3
, testCase "extreme D4" test_sameSignificandBits_extreme_D4
, testCase "extreme D5" test_sameSignificandBits_extreme_D5
, testCase "extreme D6" test_sameSignificandBits_extreme_D6
, testCase "exact F1" test_sameSignificandBits_exact_F1
, testCase "exact F2" test_sameSignificandBits_exact_F2
, testCase "exact F3" test_sameSignificandBits_exact_F3
, testCase "exact F4" test_sameSignificandBits_exact_F4
, testCase "fewbits F1" test_sameSignificandBits_fewbits_F1
, testCase "fewbits F2" test_sameSignificandBits_fewbits_F2
, testCase "fewbits F3" test_sameSignificandBits_fewbits_F3
, testCase "fewbits F4" test_sameSignificandBits_fewbits_F4
, testCase "fewbits F5" test_sameSignificandBits_fewbits_F5
, testCase "fewbits F6" test_sameSignificandBits_fewbits_F6
, testCase "fewbits F7" test_sameSignificandBits_fewbits_F7
, testCase "close F1" test_sameSignificandBits_close_F1
, testCase "close F2" test_sameSignificandBits_close_F2
, testCase "close F3" test_sameSignificandBits_close_F3
, testCase "close F4" test_sameSignificandBits_close_F4
, testCase "close F5" test_sameSignificandBits_close_F5
, testCase "2factors F1" test_sameSignificandBits_2factors_F1
, testCase "2factors F2" test_sameSignificandBits_2factors_F2
, testCase "2factors F3" test_sameSignificandBits_2factors_F3
, testCase "2factors F4" test_sameSignificandBits_2factors_F4
, testCase "extreme F1" test_sameSignificandBits_extreme_F1
, testCase "extreme F2" test_sameSignificandBits_extreme_F2
, testCase "extreme F3" test_sameSignificandBits_extreme_F3
, testCase "extreme F4" test_sameSignificandBits_extreme_F4
, testCase "extreme F5" test_sameSignificandBits_extreme_F5
, testCase "extreme F6" test_sameSignificandBits_extreme_F6
]
test_sameSignificandBits_exact_D1 =
sameSignificandBits (maxFinite :: D) maxFinite
@?= floatDigits (undefined :: D)
test_sameSignificandBits_exact_D2 =
sameSignificandBits (0 :: D) 0
@?= floatDigits (undefined :: D)
test_sameSignificandBits_exact_D3 =
sameSignificandBits (7.1824 :: D) 7.1824
@?= floatDigits (undefined :: D)
test_sameSignificandBits_exact_D4 =
sameSignificandBits (infinity :: D) infinity
@?= floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D1 =
forM_ [ 0..mantDig-1 ] $ \i ->
sameSignificandBits (1 + 2^^i * epsilon) (1 :: D) @?= mantDig - i - 1
where
mantDig = floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D2 =
forM_ [ 0..mantDig-3 ] $ \i ->
sameSignificandBits (1 - 2^^i * epsilon) (1 :: D) @?= mantDig - i - 1
where
mantDig = floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D3 =
forM_ [ 0..mantDig-1 ] $ \i ->
sameSignificandBits (1 :: D) (1 + (2^^i - 1) * epsilon)
@?= mantDig - i
where
mantDig = floatDigits (undefined :: D)
test_sameSignificandBits_fewbits_D4 =
sameSignificandBits (1.5 + epsilon) (1.5 :: D)
@?= floatDigits (undefined :: D) - 1
test_sameSignificandBits_fewbits_D5 =
sameSignificandBits (1.5 - epsilon) (1.5 :: D)
@?= floatDigits (undefined :: D) - 1
test_sameSignificandBits_fewbits_D6 =
sameSignificandBits (1.5 - epsilon) (1.5 + epsilon :: D)
@?= floatDigits (undefined :: D) - 2
test_sameSignificandBits_fewbits_D7 =
sameSignificandBits (minNormal / 8) (minNormal / 17 :: D)
@?= 3
test_sameSignificandBits_close_D1 =
sameSignificandBits (encodeFloat 0x1B0000 84) (encodeFloat 0x1B8000 84 :: D)
@?= 5
test_sameSignificandBits_close_D2 =
sameSignificandBits (encodeFloat 0x180000 10) (encodeFloat 0x1C0000 10 :: D)
@?= 2
test_sameSignificandBits_close_D3 =
sameSignificandBits (1.5 * (1 - epsilon)) (1 :: D) @?= 2
test_sameSignificandBits_close_D4 =
sameSignificandBits 1.5 (1 :: D) @?= 1
test_sameSignificandBits_close_D5 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: D) @?= 1
test_sameSignificandBits_2factors_D1 =
sameSignificandBits maxFinite (infinity :: D) @?= 0
test_sameSignificandBits_2factors_D2 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: D) @?= 1
test_sameSignificandBits_2factors_D3 =
sameSignificandBits 1 (2 :: D) @?= 0
test_sameSignificandBits_2factors_D4 =
sameSignificandBits 4 (1 :: D) @?= 0
test_sameSignificandBits_extreme_D1 =
sameSignificandBits nan (nan :: D) @?= 0
test_sameSignificandBits_extreme_D2 =
sameSignificandBits 0 (-nan :: D) @?= 0
test_sameSignificandBits_extreme_D3 =
sameSignificandBits nan (infinity :: D) @?= 0
test_sameSignificandBits_extreme_D4 =
sameSignificandBits infinity (-infinity :: D) @?= 0
test_sameSignificandBits_extreme_D5 =
sameSignificandBits (-maxFinite) (infinity :: D) @?= 0
test_sameSignificandBits_extreme_D6 =
sameSignificandBits (maxFinite) (-maxFinite :: D) @?= 0
test_sameSignificandBits_exact_F1 =
sameSignificandBits (maxFinite :: F) maxFinite
@?= floatDigits (undefined :: F)
test_sameSignificandBits_exact_F2 =
sameSignificandBits (0 :: F) 0
@?= floatDigits (undefined :: F)
test_sameSignificandBits_exact_F3 =
sameSignificandBits (7.1824 :: F) 7.1824
@?= floatDigits (undefined :: F)
test_sameSignificandBits_exact_F4 =
sameSignificandBits (infinity :: F) infinity
@?= floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F1 =
forM_ [ 0..mantFig-1 ] $ \i ->
sameSignificandBits (1 + 2^^i * epsilon) (1 :: F) @?= mantFig - i - 1
where
mantFig = floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F2 =
forM_ [ 0..mantFig-3 ] $ \i ->
sameSignificandBits (1 - 2^^i * epsilon) (1 :: F) @?= mantFig - i - 1
where
mantFig = floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F3 =
forM_ [ 0..mantFig-1 ] $ \i ->
sameSignificandBits (1 :: F) (1 + (2^^i - 1) * epsilon)
@?= mantFig - i
where
mantFig = floatDigits (undefined :: F)
test_sameSignificandBits_fewbits_F4 =
sameSignificandBits (1.5 + epsilon) (1.5 :: F)
@?= floatDigits (undefined :: F) - 1
test_sameSignificandBits_fewbits_F5 =
sameSignificandBits (1.5 - epsilon) (1.5 :: F)
@?= floatDigits (undefined :: F) - 1
test_sameSignificandBits_fewbits_F6 =
sameSignificandBits (1.5 - epsilon) (1.5 + epsilon :: F)
@?= floatDigits (undefined :: F) - 2
test_sameSignificandBits_fewbits_F7 =
sameSignificandBits (minNormal / 8) (minNormal / 17 :: F)
@?= 3
test_sameSignificandBits_close_F1 =
sameSignificandBits (encodeFloat 0x1B0000 84) (encodeFloat 0x1B8000 84 :: F)
@?= 5
test_sameSignificandBits_close_F2 =
sameSignificandBits (encodeFloat 0x180000 10) (encodeFloat 0x1C0000 10 :: F)
@?= 2
test_sameSignificandBits_close_F3 =
sameSignificandBits (1.5 * (1 - epsilon)) (1 :: F) @?= 2
test_sameSignificandBits_close_F4 =
sameSignificandBits 1.5 (1 :: F) @?= 1
test_sameSignificandBits_close_F5 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: F) @?= 1
test_sameSignificandBits_2factors_F1 =
sameSignificandBits maxFinite (infinity :: F) @?= 0
test_sameSignificandBits_2factors_F2 =
sameSignificandBits (2 * (1 - epsilon)) (1 :: F) @?= 1
test_sameSignificandBits_2factors_F3 =
sameSignificandBits 1 (2 :: F) @?= 0
test_sameSignificandBits_2factors_F4 =
sameSignificandBits 4 (1 :: F) @?= 0
test_sameSignificandBits_extreme_F1 =
sameSignificandBits nan (nan :: F) @?= 0
test_sameSignificandBits_extreme_F2 =
sameSignificandBits 0 (-nan :: F) @?= 0
test_sameSignificandBits_extreme_F3 =
sameSignificandBits nan (infinity :: F) @?= 0
test_sameSignificandBits_extreme_F4 =
sameSignificandBits infinity (-infinity :: F) @?= 0
test_sameSignificandBits_extreme_F5 =
sameSignificandBits (-maxFinite) (infinity :: F) @?= 0
test_sameSignificandBits_extreme_F6 =
sameSignificandBits (maxFinite) (-maxFinite :: F) @?= 0
test_nanWithPayload = testGroup "nanWithPayload"
[ testCase "D1" test_nanWithPayload_D1
, testCase "D2" test_nanWithPayload_D2
, testCase "F1" test_nanWithPayload_F1
, testCase "F2" test_nanWithPayload_F2
]
test_nanWithPayload_D1 =
isNaN (nanWithPayload 1 :: D) @?= True
test_nanWithPayload_D2 =
isNaN (nanWithPayload maxPayload :: D) @?= True
where
maxPayload = maxNaNPayload (undefined :: D)
test_nanWithPayload_F1 =
isNaN (nanWithPayload 1 :: F) @?= True
test_nanWithPayload_F2 =
isNaN (nanWithPayload maxPayload :: F) @?= True
where
maxPayload = maxNaNPayload (undefined :: F)
test_maxNaNPayload = testGroup "maxNaNPayload"
[ testCase "D" (go (undefined :: D))
, testCase "F" (go (undefined :: F))
]
where
go n = let b = floatRadix (undefined `asTypeOf` n)
d = floatDigits (undefined `asTypeOf` n)
in maxNaNPayload n @?= fromIntegral (b^(d-2)-1)
test_nanPayload = testGroup "nanPayload"
[ testCase "D1" test_nanPayload_D1
, testCase "D2" test_nanPayload_D2
, testCase "D3" test_nanPayload_D3
, testCase "F1" test_nanPayload_F1
, testCase "F2" test_nanPayload_F2
, testCase "F3" test_nanPayload_F3
]
test_nanPayload_D1 =
nanPayload (nanWithPayload 1 :: D) @?= 1
test_nanPayload_D2 =
nanPayload (nanWithPayload maxPayload :: D) @?= maxPayload
where
maxPayload = maxNaNPayload (undefined :: D)
test_nanPayload_D3 =
nanPayload (nanWithPayload (maxPayload + 1) :: D) @?= 0
where
maxPayload = maxNaNPayload (undefined :: D)
test_nanPayload_F1 =
nanPayload (nanWithPayload 1 :: F) @?= 1
test_nanPayload_F2 =
nanPayload (nanWithPayload maxPayload :: F) @?= maxPayload
where
maxPayload = maxNaNPayload (undefined :: F)
test_nanPayload_F3 =
nanPayload (nanWithPayload (maxPayload + 1) :: F) @?= 0
where
maxPayload = maxNaNPayload (undefined :: F)
test_copySign = testGroup "copySign"
[ testCase "D1" test_copySign_D1
, testCase "D2" test_copySign_D2
, testCase "D3" test_copySign_D3
, testCase "D4" test_copySign_D4
, testCase "D5" test_copySign_D5
, testCase "D6" test_copySign_D6
, testCase "F1" test_copySign_F1
, testCase "F2" test_copySign_F2
, testCase "F3" test_copySign_F3
, testCase "F4" test_copySign_F4
, testCase "F5" test_copySign_F5
, testCase "F6" test_copySign_F6
]
test_copySign_D1 =
copySign 0.9 (-1.2) @?= (-0.9 :: D)
test_copySign_D2 =
copySign 0.9 (1.2) @?= (0.9 :: D)
test_copySign_D3 =
copySign (-0.9 )(1.2) @?= (0.9 :: D)
test_copySign_D4 =
copySign (-0.9) (-1.2) @?= (-0.9 :: D)
test_copySign_D5 =
copySign 1 (copySign nan 1) @?= (1 :: D)
test_copySign_D6 =
copySign 1 (copySign nan (-1)) @?= (-1 :: D)
test_copySign_F1 =
copySign 0.9 (-1.2) @?= (-0.9 :: F)
test_copySign_F2 =
copySign 0.9 (1.2) @?= (0.9 :: F)
test_copySign_F3 =
copySign (-0.9 )(1.2) @?= (0.9 :: F)
test_copySign_F4 =
copySign (-0.9) (-1.2) @?= (-0.9 :: F)
test_copySign_F5 =
copySign 1 (copySign nan 1) @?= (1 :: F)
test_copySign_F6 =
copySign 1 (copySign nan (-1)) @?= (-1 :: F)
test_IEEE = testGroup "IEEE"
[ test_infinity
, test_minNormal
, test_minDenormal
, test_maxFinite
, test_epsilon
, test_copySign
, test_succIEEE
, test_predIEEE
, test_bisectIEEE
, test_sameSignificandBits
, test_maxNum
, test_minNum
, test_maxNaN
, test_minNaN
, test_nan
, test_nanWithPayload
, test_maxNaNPayload
, test_nanPayload
]
main :: IO ()
main = defaultMain [ test_IEEE
]
| |
2477025df1d8728c9f39bd32c7b247e04407330297deeb8f975e0b29c1a52e89 | elaforge/karya | Retune_test.hs | Copyright 2015
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
module Derive.C.Post.Retune_test where
import Util.Test
import qualified Ui.UiTest as UiTest
import qualified Derive.DeriveTest as DeriveTest
import qualified Perform.NN as NN
test_retune :: Test
test_retune = do
let run = DeriveTest.extract DeriveTest.e_nns_rounded
. DeriveTest.derive_tracks
"import retune | realize-retune | retune-time=2 | retune-dist=4"
. UiTest.note_track
equal (run [(0, 4, "retune | -- 4c")]) ([[(0, NN.c4)]], [])
equal (run [(0, 1, "4c"), (1, 4, "retune | -- 4c")])
([[(0, NN.c4), (1, NN.c4)], [(1, NN.c4)]], [])
equal (run [(0, 1, "4c"), (1, 4, "retune | -- 5c")])
( [ [(0, NN.c4), (1, NN.c4)]
, [(1, 76), (2, 72.64), (3, NN.c5)]
]
, []
)
| null | https://raw.githubusercontent.com/elaforge/karya/8ea15e6a5fb57e2f15f8c19836751e315f9c09f2/Derive/C/Post/Retune_test.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt | Copyright 2015
module Derive.C.Post.Retune_test where
import Util.Test
import qualified Ui.UiTest as UiTest
import qualified Derive.DeriveTest as DeriveTest
import qualified Perform.NN as NN
test_retune :: Test
test_retune = do
let run = DeriveTest.extract DeriveTest.e_nns_rounded
. DeriveTest.derive_tracks
"import retune | realize-retune | retune-time=2 | retune-dist=4"
. UiTest.note_track
equal (run [(0, 4, "retune | -- 4c")]) ([[(0, NN.c4)]], [])
equal (run [(0, 1, "4c"), (1, 4, "retune | -- 4c")])
([[(0, NN.c4), (1, NN.c4)], [(1, NN.c4)]], [])
equal (run [(0, 1, "4c"), (1, 4, "retune | -- 5c")])
( [ [(0, NN.c4), (1, NN.c4)]
, [(1, 76), (2, 72.64), (3, NN.c5)]
]
, []
)
|
caaac108a52da20e9cdbdb9b27a0f4519a1f991300fe2c6506b149e16a2d5432 | MichaelBurge/yxdb-utils | Serialization.hs | {-# LANGUAGE OverloadedStrings,MultiParamTypeClasses #-}
# OPTIONS_GHC -fno - warn - orphans #
module Database.Alteryx.Serialization
(
buildBlock,
buildRecord,
calgaryHeaderSize,
dbFileId,
getCalgaryRecords,
getCalgaryBlockIndex,
getOneBlock,
getOneBlockCalgaryRecords,
getRecord,
getValue,
putRecord,
putValue,
headerPageSize,
miniblockThreshold,
numMetadataBytesActual,
numMetadataBytesHeader,
numBlockBytesActual,
numBlockBytesHeader,
parseRecordsUntil,
recordsPerBlock,
startOfBlocksByteIndex
) where
import Database.Alteryx.Fields
import Database.Alteryx.Types
import Blaze.ByteString.Builder
import Codec.Compression.LZF.ByteString (decompressByteStringFixed, compressByteStringFixed)
import qualified Control.Newtype as NT
import Control.Applicative
import Control.Lens
import Control.Monad as M
import Control.Monad.Loops
import Data.Array.IArray (listArray, bounds, elems)
import Data.Binary
import Data.Binary.C ()
import Data.Binary.Get
import Data.Binary.Put
import Data.Bits
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Data.Conduit
import Data.Conduit.List (sourceList)
import Data.Conduit.Lazy (lazyConsume)
import qualified Data.Map as Map
import Data.Maybe (isJust, listToMaybe)
import Data.Monoid
import Data.ReinterpretCast (floatToWord, wordToFloat, doubleToWord, wordToDouble)
import Data.Text as T
import Data.Text.Encoding
import qualified Data.Text.Lazy as TL
import Data.Time.Clock.POSIX
import qualified Data.Vector as V
import System.IO.Unsafe (unsafePerformIO)
import Text.XML hiding (renderText)
import Text.XML.Cursor as XMLC
(
Cursor,
($.//),
attribute,
element,
fromDocument
)
import Text.XML.Stream.Render (renderText)
import Text.XML.Unresolved (toEvents)
-- | Number of records before each block is flushed and added to the block index
recordsPerBlock :: Int
recordsPerBlock = 0x10000
spatialIndexRecordBlockSize = 32
-- | Number of bytes taken by the fixed header
headerPageSize :: Int
headerPageSize = 512
| Number of bytes taken by the Calgary format 's header
calgaryHeaderSize :: Int
calgaryHeaderSize = 8192
-- | When writing miniblocks, how many bytes should each miniblock aim for?
miniblockThreshold :: Int
miniblockThreshold = 0x10000
-- | When decompressing miniblocks, how many bytes should be allocated for the output?
bufferSize :: Int
bufferSize = 0x40000
dbFileId :: DbType -> Word32
dbFileId WrigleyDb = 0x00440205
dbFileId WrigleyDb_NoSpatialIndex = 0x00440204
dbFileId CalgaryDb = 0x00450101
Start of metadata : 8196
End of metadata : 10168
1972 = 3da * 2 = 986 * 2
numBytes :: (Binary b, Num t) => b -> t
numBytes x = fromIntegral $ BSL.length $ runPut $ put x
numMetadataBytesHeader :: Header -> Int
numMetadataBytesHeader header = fromIntegral $ 2 * (header ^. metaInfoLength)
numMetadataBytesActual :: RecordInfo -> Int
numMetadataBytesActual recordInfo = numBytes recordInfo
numBlockBytesHeader :: Header -> Int
numBlockBytesHeader header =
let start = headerPageSize + (numMetadataBytesHeader header)
end = (fromIntegral $ header ^. recordBlockIndexPos)
in end - start
numBlockBytesActual :: Block -> Int
numBlockBytesActual block = numBytes block
startOfBlocksByteIndex :: Header -> Int
startOfBlocksByteIndex header =
headerPageSize + (numMetadataBytesHeader header)
parseRecordsUntil :: RecordInfo -> Get [Record]
parseRecordsUntil recordInfo = do
done <- isEmpty
if done
then return $ []
else (:) <$> getRecord recordInfo <*> parseRecordsUntil recordInfo
| This binary instance is really slow because the YxdbFile type stores a list of records . Use the Conduit functions instead .
instance Binary YxdbFile where
put yxdbFile = do
put $ yxdbFile ^. yxdbFileHeader
put $ yxdbFile ^. yxdbFileMetadata
mapM_ (putRecord $ yxdbFile ^. yxdbFileMetadata) $ yxdbFile ^. yxdbFileRecords
put $ yxdbFile ^. yxdbFileBlockIndex
get = do
fHeader <- label "Header" $ isolate (fromIntegral headerPageSize) get
fMetadata <- label "Metadata" $ isolate (numMetadataBytesHeader fHeader) $ get
let numBlockBytes = numBlockBytesHeader $ fHeader
fBlocks <- label ("Blocks of size " ++ show numBlockBytes) $
isolate numBlockBytes get :: Get Block
fBlockIndex <- label "Block Index" get
let fRecords = runGet (label "Records" $ parseRecordsUntil fMetadata) $ NT.unpack fBlocks
return $ YxdbFile {
_yxdbFileHeader = fHeader,
_yxdbFileMetadata = fMetadata,
_yxdbFileRecords = fRecords,
_yxdbFileBlockIndex = fBlockIndex
}
instance Binary CalgaryRecordInfo where
put calgaryRecordInfo = error "CalgaryRecordInfo: put undefined"
get = CalgaryRecordInfo <$> getCalgaryRecordInfo
Start : 27bc = 10172
End : 2826 = 10278
Diff : 106 = 6A
Start : 27c1 = 10177
End : 2998 = 10648
Diff : 1D7 = 471
8192 byte header
-- Read 4 bytes to get number of UTF-16 characters(so double for number of bytes)
blockSize is 16 - bit
block is a 32767 - byte compressed buffer
-- Is follow
getCalgaryRecords :: CalgaryRecordInfo -> Get (V.Vector Record)
getCalgaryRecords (CalgaryRecordInfo recordInfo) = do
0
1 : Number of records ? Matches first word in block index
0
let ( RecordInfo fs ) = recordInfo
-- f1 <- getValue $ fs !! 0
-- error $ show f1
-- bs <- getRemainingLazyByteString
-- error $ show bs
V.replicateM (fromIntegral mystery2) $ (getRecord recordInfo)
headend_no < - getValue $ Field " headend_no " FTInt32 Nothing Nothing
hh_no < - getValue $ Field " hh_no " FTInt32 Nothing Nothing
hashed_id < - getValue $ Field " hashed_id " FTVString Nothing Nothing
-- error $ show hashed_id
-- bs <- getRemainingLazyByteString
-- r <- getRecord recordInfo
V.replicateM ( fromIntegral mystery2 ) $ ( recordInfo )
-- error $ show [ show , show , show ]
-- x <- getRecord recordInfo
-- y <- getRecord recordInfo
-- error $ show y
byte < - getValue $ Field " byte " FTByte Nothing Nothing
-- int16 <- getValue $ Field "int16" FTInt16 Nothing Nothing
-- int32 <- getValue $ Field "int32" FTInt32 Nothing Nothing
int64 < - getValue $ Field " int64 " FTInt64 Nothing Nothing
-- decimal <- getValue $ Field "decimal" FTFixedDecimal (Just 7) Nothing
-- mystery6 <- getWord32le -- 0
float < - getValue $ Field " float " Nothing Nothing
double < - getValue $ Field " double " FTDouble Nothing Nothing
string < - getValue $ Field " string " FTString ( Just 7 ) Nothing
wstring < - getValue $ Field " wstring " FTWString ( Just 2 ) Nothing
let vfield = Field " vstring " FTVString Nothing Nothing
vwfield = Field " vwstring " FTVWString Nothing Nothing
vstring < - vfield
-- vwstring <- getValue vwfield
date < - getValue $ Field " date " FTDate Nothing Nothing
time < - getValue $ Field " time " FTTime Nothing Nothing
datetime < - getValue $ Field " datetime " FTDateTime Nothing Nothing
< - getWord32le -- 13
-- -- error $ show [
-- show , show , show , show mystery4 , show mystery5 ,
-- show mystery6 , show
-- -- ]
-- error $ show [
-- show byte,
-- show int16,
show int32 ,
show int64 ,
-- show decimal,
-- show float,
-- show double,
-- show string,
-- show wstring,
-- show vstring,
-- show vwstring,
-- show date,
-- show time,
-- show datetime
-- ]
-- vfieldVarBs <- getVariableData
-- vwfieldVarBs <- getVariableData
-- remainder <- getRemainingLazyByteString
-- error $ show remainder
-- error $ show [
-- show ,
-- show byte,
-- -- show byteNul,
-- show ,
-- -- show short,
-- -- show shortNul,
-- show ,
-- -- show int,
-- -- show intNul,
show int64 ,
-- -- show int64Nul
-- show remainder
-- ]
-- error $ show [ mystery1 , byte , byteNul , mystery2 , short , shortNul , remainder ]
getOneBlock :: Get (Maybe Block)
getOneBlock = do
blockSize <- getWord16le
block <- getByteString $ fromIntegral blockSize
let decompressed = decompressByteStringFixed 100000 block
return $ Block <$> BSL.fromStrict <$> decompressed
getOneBlockCalgaryRecords :: CalgaryRecordInfo -> Get (V.Vector Record)
getOneBlockCalgaryRecords recordInfo = do
(Just (Block block)) <- getOneBlock
let records = runGet (getCalgaryRecords recordInfo ) block
return records
instance Binary CalgaryFile where
put calgaryFile = error "CalgaryFile: put undefined"
get = do
fHeader <- label "Header" $ isolate (fromIntegral calgaryHeaderSize) get :: Get CalgaryHeader
fNumMetadataBytes <- (2*) <$> fromIntegral <$> getWord32le
fRecordInfo <- label "Metadata" $ isolate fNumMetadataBytes $ get :: Get CalgaryRecordInfo
let numRecords = fromIntegral $ fHeader ^. calgaryHeaderNumRecords
let readAllBlocks remainingRecords = do
if remainingRecords > 0
then do
records <- getOneBlockCalgaryRecords fRecordInfo
let newRecords = remainingRecords - V.length records
(records :) <$> readAllBlocks newRecords
else return []
recordses <- readAllBlocks numRecords :: Get [ V.Vector Record ]
blockIndex <- getCalgaryBlockIndex
-- Should be done by here
let result = CalgaryFile {
_calgaryFileHeader = fHeader,
_calgaryFileRecordInfo = fRecordInfo,
_calgaryFileRecords = recordses,
_calgaryFileIndex = blockIndex
}
error $ show result
getString :: Get CalgaryIndexFile
getString = do
fHeader <- isolate (fromIntegral calgaryHeaderSize) get :: Get CalgaryHeader
block <- getOneBlock
error $ show block
121 : Index to vardata ?
1 : Number of records ?
= -1
512
1
= 32768 + 8 = -8
mystery7 <- getWord16le
0
value <- getLazyByteStringNul
= -1
3104
replicateM_ 31 $ do
mystery10 <- getWord16le
32
return ()
1 : Number of values ?
0
7 : Length of platano ?
value2 <- getByteString $ fromIntegral mystery14
mystery15 <- getWord64le
8192 : Index of first block ?
instance Binary CalgaryIndexFile where
put _ = error "CalgaryIndexFile: put undefined"
get = do
fHeader <- isolate (fromIntegral calgaryHeaderSize) get :: Get CalgaryHeader
(Just (Block block)) <- getOneBlock
let getRecords = do
0
1
8
val <- getValue $ Field "x" FTString (Just 7) Nothing
mystery4 <- getWord16le -- -1
bs <- getRemainingLazyByteString
error $ show bs
runGet getRecords block
error $ show block
getCalgaryBlockIndex :: Get CalgaryBlockIndex
getCalgaryBlockIndex = do
let getOneIndex = do
mystery1 <- getWord64le
getWord64le
indices <- V.fromList <$> untilM' getOneIndex isEmpty
return $ CalgaryBlockIndex $ V.map fromIntegral indices
documentToTextWithoutXMLHeader :: Document -> T.Text
documentToTextWithoutXMLHeader document =
let events = Prelude.tail $ toEvents $ toXMLDocument document
in T.concat $
unsafePerformIO $
lazyConsume $
sourceList events $=
renderText def
getRecordInfoText :: Bool -> Get T.Text
getRecordInfoText isNullTerminated = do
if isNullTerminated
then do
bs <- BS.concat . BSL.toChunks <$>
getRemainingLazyByteString
when (BS.length bs < 4) $ fail $ "No trailing newline and null: " ++ show bs
let text = T.init $ T.init $ decodeUtf16LE bs
return text
else decodeUtf16LE <$> BS.concat . BSL.toChunks <$> getRemainingLazyByteString
getCalgaryRecordInfo :: Get RecordInfo
getCalgaryRecordInfo = do
text <- getRecordInfoText False
let document = parseText_ def $ TL.fromStrict text
cursor = fromDocument document
recordInfos = parseXmlRecordInfo cursor
case recordInfos of
[] -> fail "No RecordInfo entries found"
x:[] -> return x
xs -> fail "Too many RecordInfo entries found"
getYxdbRecordInfo :: Get RecordInfo
getYxdbRecordInfo = do
text <- getRecordInfoText True
let document = parseText_ def $ TL.fromStrict text
cursor = fromDocument document
recordInfos = parseXmlRecordInfo cursor
case recordInfos of
[] -> fail "No RecordInfo entries found"
x:[] -> return x
xs -> fail "Too many RecordInfo entries found"
instance Binary RecordInfo where
put metadata =
let fieldMap :: Field -> Map.Map Name Text
fieldMap field =
let
requiredAttributes =
[
("name", field ^. fieldName),
("type", renderFieldType $ field ^. fieldType)
]
sizeAttributes =
case field ^. fieldSize of
Nothing -> [ ]
Just x -> [ ("size", T.pack $ show x) ]
scaleAttributes =
case field ^. fieldScale of
Nothing -> [ ]
Just x -> [ ("scale", T.pack $ show x) ]
in Map.fromList $
Prelude.concat $
[ requiredAttributes, sizeAttributes, scaleAttributes ]
transformField field =
NodeElement $
Element "Field" (fieldMap field) [ ]
transformRecordInfo recordInfo =
NodeElement $
Element "RecordInfo" Map.empty $
Prelude.map transformField recordInfo
transformMetaInfo (RecordInfo recordInfo) =
Element "MetaInfo" Map.empty [ transformRecordInfo recordInfo]
transformToDocument node = Document (Prologue [] Nothing []) node []
renderMetaInfo metadata =
encodeUtf16LE $
flip T.snoc '\0' $
flip T.snoc '\n' $
documentToTextWithoutXMLHeader $
transformToDocument $
transformMetaInfo metadata
in putByteString $ renderMetaInfo metadata
get = getYxdbRecordInfo
parseXmlField :: Cursor -> [Field]
parseXmlField cursor = do
let fieldCursors = cursor $.// XMLC.element "Field"
fieldCursor <- fieldCursors
aName <- attribute "name" fieldCursor
aType <- attribute "type" fieldCursor
let aDesc = listToMaybe $ attribute "description" fieldCursor
let aSize = listToMaybe $ attribute "size" fieldCursor
let aScale = listToMaybe $ attribute "scale" fieldCursor
return $ Field {
_fieldName = aName,
_fieldType = parseFieldType aType,
_fieldSize = parseInt <$> aSize,
_fieldScale = parseInt <$> aScale
}
parseXmlRecordInfo :: Cursor -> [RecordInfo]
parseXmlRecordInfo cursor = do
let recordInfoCursors = cursor $.// XMLC.element "RecordInfo"
recordInfoCursor <- recordInfoCursors
let fields = parseXmlField recordInfoCursor
return $ RecordInfo fields
parseInt :: Text -> Int
parseInt text = read $ T.unpack text :: Int
-- | True if any fields have associated variable data in the variable data portion of the record.
hasVariableData :: RecordInfo -> Bool
hasVariableData (RecordInfo recordInfo) =
let fieldHasVariableData field =
case field ^. fieldType of
FTVString -> True
FTVWString -> True
FTBlob -> True
_ -> False
in Prelude.any fieldHasVariableData recordInfo
-- | Writes a record using the provided metadata.
putRecord :: RecordInfo -> Record -> Put
putRecord recordInfo record = putByteString $ toByteString $ buildRecord recordInfo record
buildRecord :: RecordInfo -> Record -> Builder
buildRecord recordInfo@(RecordInfo fields) (Record fieldValues) =
if hasVariableData recordInfo
then error "putRecord: Variable data unimplemented"
else mconcat $ Prelude.zipWith buildValue fields fieldValues
-- | Records consists of a fixed amount of data for each field, and also a possibly large amoutn of variable data at the end.
getRecord :: RecordInfo -> Get Record
getRecord recordInfo@(RecordInfo fields) = do
record <- Record <$> mapM getValue fields
when (hasVariableData recordInfo) $ do
_ <- getAllVariableData
return ()
return record
instance Binary BlockIndex where
get = do
arraySize <- label "Index Array Size" $ fromIntegral <$> getWord32le
let numBlockIndexBytes = arraySize * 8
blocks <- label ("Reading block of size " ++ show arraySize) $
isolate numBlockIndexBytes $
replicateM arraySize (fromIntegral <$> getWord64le)
return $ BlockIndex $ listArray (0, arraySize-1) blocks
put (BlockIndex blockIndex) = do
let (_, iMax) = bounds blockIndex
putWord32le $ fromIntegral $ iMax + 1
mapM_ (putWord64le . fromIntegral) $ elems blockIndex
instance Binary Block where
get =
let tryGetOne = do
done <- isEmpty
if done
then return Nothing
else Just <$> get :: Get (Maybe Miniblock)
in NT.pack <$>
BSL.fromChunks <$>
Prelude.map NT.unpack <$>
unfoldM tryGetOne
put block = putByteString $ toByteString $ buildBlock block
buildBlock :: Block -> Builder
buildBlock (Block bs) =
case BSL.toChunks bs of
[] -> buildMiniblock $ Miniblock $ BS.empty
xs -> mconcat $ Prelude.map (buildMiniblock . Miniblock) xs
instance Binary Miniblock where
get = do
writtenSize <- label "Block size" getWord32le
let compressionBitIndex = 31
let isCompressed = not $ testBit writtenSize compressionBitIndex
let size = fromIntegral $ clearBit writtenSize compressionBitIndex
bs <- label ("Block of size " ++ show size) $ isolate size $ getByteString $ size
let chunk = if isCompressed
then case decompressByteStringFixed bufferSize bs of
Nothing -> fail "Unable to decompress. Increase buffer size?"
Just x -> return $ x
else return bs
Miniblock <$> chunk
put miniblock = putByteString $ toByteString $ buildMiniblock miniblock
buildMiniblock :: Miniblock -> Builder
buildMiniblock (Miniblock bs) =
let compressionBitIndex = 31
compressedBlock = compressByteStringFixed ((BS.length bs)-1) bs
blockToWrite = case compressedBlock of
Nothing -> bs
Just x -> x
size = BS.length blockToWrite
writtenSize = if isJust compressedBlock
then size
else setBit size compressionBitIndex
in mconcat [
fromWord32le $ fromIntegral writtenSize,
fromByteString blockToWrite
]
instance Binary Header where
put header = do
let actualDescriptionBS = BS.take 64 $ encodeUtf8 $ header ^. description
let numPaddingBytes = fromIntegral $ 64 - BS.length actualDescriptionBS
let paddingDescriptionBS = BSL.toStrict $ BSL.take numPaddingBytes $ BSL.repeat 0
putByteString actualDescriptionBS
putByteString paddingDescriptionBS
putWord32le $ header ^. fileId
putWord32le $ truncate $ utcTimeToPOSIXSeconds $ header ^. creationDate
putWord32le $ header ^. flags1
putWord32le $ header ^. flags2
putWord32le $ header ^. metaInfoLength
putWord32le $ header ^. mystery
putWord64le $ header ^. spatialIndexPos
putWord64le $ header ^. recordBlockIndexPos
putWord64le $ header ^. numRecords
putWord32le $ header ^. compressionVersion
putByteString $ header ^. reservedSpace
get = do
fDescription <- label "Description" $ decodeUtf8 <$> getByteString 64
fFileId <- label "FileId" getWord32le
fCreationDate <- label "Creation Date" getWord32le
fFlags1 <- label "Flags 1" getWord32le
fFlags2 <- label "Flags 2" getWord32le
fMetaInfoLength <- label "Metadata Length" getWord32le
fMystery <- label "Mystery Field" getWord32le
fSpatialIndexPos <- label "Spatial Index" getWord64le
fRecordBlockIndexPos <- label "Record Block" getWord64le
fNumRecords <- label "Num Records" getWord64le
fCompressionVersion <- label "Compression Version" getWord32le
fReservedSpace <- label "Reserved Space" $ (BSL.toStrict <$> getRemainingLazyByteString)
return $ Header {
_description = fDescription,
_fileId = fFileId,
_creationDate = posixSecondsToUTCTime $ fromIntegral fCreationDate,
_flags1 = fFlags1,
_flags2 = fFlags2,
_metaInfoLength = fMetaInfoLength,
_mystery = fMystery,
_spatialIndexPos = fSpatialIndexPos,
_recordBlockIndexPos = fRecordBlockIndexPos,
_numRecords = fNumRecords,
_compressionVersion = fCompressionVersion,
_reservedSpace = fReservedSpace
}
instance Binary CalgaryHeader where
put header = error "CalgaryHeader::put is unimplemented"
get = do
description <- decodeUtf8 <$> getByteString 64
fileId <- getWord32le
creationDate <- posixSecondsToUTCTime <$> fromIntegral <$> getWord32le
indexPosition <- getWord32le
mystery1 <- getWord32le
numBlocks <- getWord32le
mystery2 <- getWord32le
mystery3 <- getWord32le
mystery4 <- getWord32le
mystery5 <- getWord32le
mystery6 <- getWord64le
numRecords <- getWord32le
reserved <- BSL.toStrict <$> getRemainingLazyByteString
return CalgaryHeader {
_calgaryHeaderDescription = description,
_calgaryHeaderFileId = fileId,
_calgaryHeaderCreationDate = creationDate,
_calgaryHeaderIndexPosition = indexPosition,
_calgaryHeaderMystery1 = mystery1,
_calgaryHeaderNumRecords = numRecords,
_calgaryHeaderMystery2 = mystery2,
_calgaryHeaderMystery3 = mystery3,
_calgaryHeaderMystery4 = mystery4,
_calgaryHeaderMystery5 = mystery5,
_calgaryHeaderMystery6 = mystery6,
_calgaryHeaderNumBlocks = numBlocks,
_calgaryHeaderReserved = reserved
}
| null | https://raw.githubusercontent.com/MichaelBurge/yxdb-utils/955995a95cf56b7cb3c45e0175e2b574411d1a18/src/Database/Alteryx/Serialization.hs | haskell | # LANGUAGE OverloadedStrings,MultiParamTypeClasses #
| Number of records before each block is flushed and added to the block index
| Number of bytes taken by the fixed header
| When writing miniblocks, how many bytes should each miniblock aim for?
| When decompressing miniblocks, how many bytes should be allocated for the output?
Read 4 bytes to get number of UTF-16 characters(so double for number of bytes)
Is follow
f1 <- getValue $ fs !! 0
error $ show f1
bs <- getRemainingLazyByteString
error $ show bs
error $ show hashed_id
bs <- getRemainingLazyByteString
r <- getRecord recordInfo
error $ show [ show , show , show ]
x <- getRecord recordInfo
y <- getRecord recordInfo
error $ show y
int16 <- getValue $ Field "int16" FTInt16 Nothing Nothing
int32 <- getValue $ Field "int32" FTInt32 Nothing Nothing
decimal <- getValue $ Field "decimal" FTFixedDecimal (Just 7) Nothing
mystery6 <- getWord32le -- 0
vwstring <- getValue vwfield
13
-- error $ show [
show , show , show , show mystery4 , show mystery5 ,
show mystery6 , show
-- ]
error $ show [
show byte,
show int16,
show decimal,
show float,
show double,
show string,
show wstring,
show vstring,
show vwstring,
show date,
show time,
show datetime
]
vfieldVarBs <- getVariableData
vwfieldVarBs <- getVariableData
remainder <- getRemainingLazyByteString
error $ show remainder
error $ show [
show ,
show byte,
-- show byteNul,
show ,
-- show short,
-- show shortNul,
show ,
-- show int,
-- show intNul,
-- show int64Nul
show remainder
]
error $ show [ mystery1 , byte , byteNul , mystery2 , short , shortNul , remainder ]
Should be done by here
-1
| True if any fields have associated variable data in the variable data portion of the record.
| Writes a record using the provided metadata.
| Records consists of a fixed amount of data for each field, and also a possibly large amoutn of variable data at the end. | # OPTIONS_GHC -fno - warn - orphans #
module Database.Alteryx.Serialization
(
buildBlock,
buildRecord,
calgaryHeaderSize,
dbFileId,
getCalgaryRecords,
getCalgaryBlockIndex,
getOneBlock,
getOneBlockCalgaryRecords,
getRecord,
getValue,
putRecord,
putValue,
headerPageSize,
miniblockThreshold,
numMetadataBytesActual,
numMetadataBytesHeader,
numBlockBytesActual,
numBlockBytesHeader,
parseRecordsUntil,
recordsPerBlock,
startOfBlocksByteIndex
) where
import Database.Alteryx.Fields
import Database.Alteryx.Types
import Blaze.ByteString.Builder
import Codec.Compression.LZF.ByteString (decompressByteStringFixed, compressByteStringFixed)
import qualified Control.Newtype as NT
import Control.Applicative
import Control.Lens
import Control.Monad as M
import Control.Monad.Loops
import Data.Array.IArray (listArray, bounds, elems)
import Data.Binary
import Data.Binary.C ()
import Data.Binary.Get
import Data.Binary.Put
import Data.Bits
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BSL
import Data.Conduit
import Data.Conduit.List (sourceList)
import Data.Conduit.Lazy (lazyConsume)
import qualified Data.Map as Map
import Data.Maybe (isJust, listToMaybe)
import Data.Monoid
import Data.ReinterpretCast (floatToWord, wordToFloat, doubleToWord, wordToDouble)
import Data.Text as T
import Data.Text.Encoding
import qualified Data.Text.Lazy as TL
import Data.Time.Clock.POSIX
import qualified Data.Vector as V
import System.IO.Unsafe (unsafePerformIO)
import Text.XML hiding (renderText)
import Text.XML.Cursor as XMLC
(
Cursor,
($.//),
attribute,
element,
fromDocument
)
import Text.XML.Stream.Render (renderText)
import Text.XML.Unresolved (toEvents)
recordsPerBlock :: Int
recordsPerBlock = 0x10000
spatialIndexRecordBlockSize = 32
headerPageSize :: Int
headerPageSize = 512
| Number of bytes taken by the Calgary format 's header
calgaryHeaderSize :: Int
calgaryHeaderSize = 8192
miniblockThreshold :: Int
miniblockThreshold = 0x10000
bufferSize :: Int
bufferSize = 0x40000
dbFileId :: DbType -> Word32
dbFileId WrigleyDb = 0x00440205
dbFileId WrigleyDb_NoSpatialIndex = 0x00440204
dbFileId CalgaryDb = 0x00450101
Start of metadata : 8196
End of metadata : 10168
1972 = 3da * 2 = 986 * 2
numBytes :: (Binary b, Num t) => b -> t
numBytes x = fromIntegral $ BSL.length $ runPut $ put x
numMetadataBytesHeader :: Header -> Int
numMetadataBytesHeader header = fromIntegral $ 2 * (header ^. metaInfoLength)
numMetadataBytesActual :: RecordInfo -> Int
numMetadataBytesActual recordInfo = numBytes recordInfo
numBlockBytesHeader :: Header -> Int
numBlockBytesHeader header =
let start = headerPageSize + (numMetadataBytesHeader header)
end = (fromIntegral $ header ^. recordBlockIndexPos)
in end - start
numBlockBytesActual :: Block -> Int
numBlockBytesActual block = numBytes block
startOfBlocksByteIndex :: Header -> Int
startOfBlocksByteIndex header =
headerPageSize + (numMetadataBytesHeader header)
parseRecordsUntil :: RecordInfo -> Get [Record]
parseRecordsUntil recordInfo = do
done <- isEmpty
if done
then return $ []
else (:) <$> getRecord recordInfo <*> parseRecordsUntil recordInfo
| This binary instance is really slow because the YxdbFile type stores a list of records . Use the Conduit functions instead .
instance Binary YxdbFile where
put yxdbFile = do
put $ yxdbFile ^. yxdbFileHeader
put $ yxdbFile ^. yxdbFileMetadata
mapM_ (putRecord $ yxdbFile ^. yxdbFileMetadata) $ yxdbFile ^. yxdbFileRecords
put $ yxdbFile ^. yxdbFileBlockIndex
get = do
fHeader <- label "Header" $ isolate (fromIntegral headerPageSize) get
fMetadata <- label "Metadata" $ isolate (numMetadataBytesHeader fHeader) $ get
let numBlockBytes = numBlockBytesHeader $ fHeader
fBlocks <- label ("Blocks of size " ++ show numBlockBytes) $
isolate numBlockBytes get :: Get Block
fBlockIndex <- label "Block Index" get
let fRecords = runGet (label "Records" $ parseRecordsUntil fMetadata) $ NT.unpack fBlocks
return $ YxdbFile {
_yxdbFileHeader = fHeader,
_yxdbFileMetadata = fMetadata,
_yxdbFileRecords = fRecords,
_yxdbFileBlockIndex = fBlockIndex
}
instance Binary CalgaryRecordInfo where
put calgaryRecordInfo = error "CalgaryRecordInfo: put undefined"
get = CalgaryRecordInfo <$> getCalgaryRecordInfo
Start : 27bc = 10172
End : 2826 = 10278
Diff : 106 = 6A
Start : 27c1 = 10177
End : 2998 = 10648
Diff : 1D7 = 471
8192 byte header
blockSize is 16 - bit
block is a 32767 - byte compressed buffer
getCalgaryRecords :: CalgaryRecordInfo -> Get (V.Vector Record)
getCalgaryRecords (CalgaryRecordInfo recordInfo) = do
0
1 : Number of records ? Matches first word in block index
0
let ( RecordInfo fs ) = recordInfo
V.replicateM (fromIntegral mystery2) $ (getRecord recordInfo)
headend_no < - getValue $ Field " headend_no " FTInt32 Nothing Nothing
hh_no < - getValue $ Field " hh_no " FTInt32 Nothing Nothing
hashed_id < - getValue $ Field " hashed_id " FTVString Nothing Nothing
V.replicateM ( fromIntegral mystery2 ) $ ( recordInfo )
byte < - getValue $ Field " byte " FTByte Nothing Nothing
int64 < - getValue $ Field " int64 " FTInt64 Nothing Nothing
float < - getValue $ Field " float " Nothing Nothing
double < - getValue $ Field " double " FTDouble Nothing Nothing
string < - getValue $ Field " string " FTString ( Just 7 ) Nothing
wstring < - getValue $ Field " wstring " FTWString ( Just 2 ) Nothing
let vfield = Field " vstring " FTVString Nothing Nothing
vwfield = Field " vwstring " FTVWString Nothing Nothing
vstring < - vfield
date < - getValue $ Field " date " FTDate Nothing Nothing
time < - getValue $ Field " time " FTTime Nothing Nothing
datetime < - getValue $ Field " datetime " FTDateTime Nothing Nothing
show int32 ,
show int64 ,
show int64 ,
getOneBlock :: Get (Maybe Block)
getOneBlock = do
blockSize <- getWord16le
block <- getByteString $ fromIntegral blockSize
let decompressed = decompressByteStringFixed 100000 block
return $ Block <$> BSL.fromStrict <$> decompressed
getOneBlockCalgaryRecords :: CalgaryRecordInfo -> Get (V.Vector Record)
getOneBlockCalgaryRecords recordInfo = do
(Just (Block block)) <- getOneBlock
let records = runGet (getCalgaryRecords recordInfo ) block
return records
instance Binary CalgaryFile where
put calgaryFile = error "CalgaryFile: put undefined"
get = do
fHeader <- label "Header" $ isolate (fromIntegral calgaryHeaderSize) get :: Get CalgaryHeader
fNumMetadataBytes <- (2*) <$> fromIntegral <$> getWord32le
fRecordInfo <- label "Metadata" $ isolate fNumMetadataBytes $ get :: Get CalgaryRecordInfo
let numRecords = fromIntegral $ fHeader ^. calgaryHeaderNumRecords
let readAllBlocks remainingRecords = do
if remainingRecords > 0
then do
records <- getOneBlockCalgaryRecords fRecordInfo
let newRecords = remainingRecords - V.length records
(records :) <$> readAllBlocks newRecords
else return []
recordses <- readAllBlocks numRecords :: Get [ V.Vector Record ]
blockIndex <- getCalgaryBlockIndex
let result = CalgaryFile {
_calgaryFileHeader = fHeader,
_calgaryFileRecordInfo = fRecordInfo,
_calgaryFileRecords = recordses,
_calgaryFileIndex = blockIndex
}
error $ show result
getString :: Get CalgaryIndexFile
getString = do
fHeader <- isolate (fromIntegral calgaryHeaderSize) get :: Get CalgaryHeader
block <- getOneBlock
error $ show block
121 : Index to vardata ?
1 : Number of records ?
= -1
512
1
= 32768 + 8 = -8
mystery7 <- getWord16le
0
value <- getLazyByteStringNul
= -1
3104
replicateM_ 31 $ do
mystery10 <- getWord16le
32
return ()
1 : Number of values ?
0
7 : Length of platano ?
value2 <- getByteString $ fromIntegral mystery14
mystery15 <- getWord64le
8192 : Index of first block ?
instance Binary CalgaryIndexFile where
put _ = error "CalgaryIndexFile: put undefined"
get = do
fHeader <- isolate (fromIntegral calgaryHeaderSize) get :: Get CalgaryHeader
(Just (Block block)) <- getOneBlock
let getRecords = do
0
1
8
val <- getValue $ Field "x" FTString (Just 7) Nothing
bs <- getRemainingLazyByteString
error $ show bs
runGet getRecords block
error $ show block
getCalgaryBlockIndex :: Get CalgaryBlockIndex
getCalgaryBlockIndex = do
let getOneIndex = do
mystery1 <- getWord64le
getWord64le
indices <- V.fromList <$> untilM' getOneIndex isEmpty
return $ CalgaryBlockIndex $ V.map fromIntegral indices
documentToTextWithoutXMLHeader :: Document -> T.Text
documentToTextWithoutXMLHeader document =
let events = Prelude.tail $ toEvents $ toXMLDocument document
in T.concat $
unsafePerformIO $
lazyConsume $
sourceList events $=
renderText def
getRecordInfoText :: Bool -> Get T.Text
getRecordInfoText isNullTerminated = do
if isNullTerminated
then do
bs <- BS.concat . BSL.toChunks <$>
getRemainingLazyByteString
when (BS.length bs < 4) $ fail $ "No trailing newline and null: " ++ show bs
let text = T.init $ T.init $ decodeUtf16LE bs
return text
else decodeUtf16LE <$> BS.concat . BSL.toChunks <$> getRemainingLazyByteString
getCalgaryRecordInfo :: Get RecordInfo
getCalgaryRecordInfo = do
text <- getRecordInfoText False
let document = parseText_ def $ TL.fromStrict text
cursor = fromDocument document
recordInfos = parseXmlRecordInfo cursor
case recordInfos of
[] -> fail "No RecordInfo entries found"
x:[] -> return x
xs -> fail "Too many RecordInfo entries found"
getYxdbRecordInfo :: Get RecordInfo
getYxdbRecordInfo = do
text <- getRecordInfoText True
let document = parseText_ def $ TL.fromStrict text
cursor = fromDocument document
recordInfos = parseXmlRecordInfo cursor
case recordInfos of
[] -> fail "No RecordInfo entries found"
x:[] -> return x
xs -> fail "Too many RecordInfo entries found"
instance Binary RecordInfo where
put metadata =
let fieldMap :: Field -> Map.Map Name Text
fieldMap field =
let
requiredAttributes =
[
("name", field ^. fieldName),
("type", renderFieldType $ field ^. fieldType)
]
sizeAttributes =
case field ^. fieldSize of
Nothing -> [ ]
Just x -> [ ("size", T.pack $ show x) ]
scaleAttributes =
case field ^. fieldScale of
Nothing -> [ ]
Just x -> [ ("scale", T.pack $ show x) ]
in Map.fromList $
Prelude.concat $
[ requiredAttributes, sizeAttributes, scaleAttributes ]
transformField field =
NodeElement $
Element "Field" (fieldMap field) [ ]
transformRecordInfo recordInfo =
NodeElement $
Element "RecordInfo" Map.empty $
Prelude.map transformField recordInfo
transformMetaInfo (RecordInfo recordInfo) =
Element "MetaInfo" Map.empty [ transformRecordInfo recordInfo]
transformToDocument node = Document (Prologue [] Nothing []) node []
renderMetaInfo metadata =
encodeUtf16LE $
flip T.snoc '\0' $
flip T.snoc '\n' $
documentToTextWithoutXMLHeader $
transformToDocument $
transformMetaInfo metadata
in putByteString $ renderMetaInfo metadata
get = getYxdbRecordInfo
parseXmlField :: Cursor -> [Field]
parseXmlField cursor = do
let fieldCursors = cursor $.// XMLC.element "Field"
fieldCursor <- fieldCursors
aName <- attribute "name" fieldCursor
aType <- attribute "type" fieldCursor
let aDesc = listToMaybe $ attribute "description" fieldCursor
let aSize = listToMaybe $ attribute "size" fieldCursor
let aScale = listToMaybe $ attribute "scale" fieldCursor
return $ Field {
_fieldName = aName,
_fieldType = parseFieldType aType,
_fieldSize = parseInt <$> aSize,
_fieldScale = parseInt <$> aScale
}
parseXmlRecordInfo :: Cursor -> [RecordInfo]
parseXmlRecordInfo cursor = do
let recordInfoCursors = cursor $.// XMLC.element "RecordInfo"
recordInfoCursor <- recordInfoCursors
let fields = parseXmlField recordInfoCursor
return $ RecordInfo fields
parseInt :: Text -> Int
parseInt text = read $ T.unpack text :: Int
hasVariableData :: RecordInfo -> Bool
hasVariableData (RecordInfo recordInfo) =
let fieldHasVariableData field =
case field ^. fieldType of
FTVString -> True
FTVWString -> True
FTBlob -> True
_ -> False
in Prelude.any fieldHasVariableData recordInfo
putRecord :: RecordInfo -> Record -> Put
putRecord recordInfo record = putByteString $ toByteString $ buildRecord recordInfo record
buildRecord :: RecordInfo -> Record -> Builder
buildRecord recordInfo@(RecordInfo fields) (Record fieldValues) =
if hasVariableData recordInfo
then error "putRecord: Variable data unimplemented"
else mconcat $ Prelude.zipWith buildValue fields fieldValues
getRecord :: RecordInfo -> Get Record
getRecord recordInfo@(RecordInfo fields) = do
record <- Record <$> mapM getValue fields
when (hasVariableData recordInfo) $ do
_ <- getAllVariableData
return ()
return record
instance Binary BlockIndex where
get = do
arraySize <- label "Index Array Size" $ fromIntegral <$> getWord32le
let numBlockIndexBytes = arraySize * 8
blocks <- label ("Reading block of size " ++ show arraySize) $
isolate numBlockIndexBytes $
replicateM arraySize (fromIntegral <$> getWord64le)
return $ BlockIndex $ listArray (0, arraySize-1) blocks
put (BlockIndex blockIndex) = do
let (_, iMax) = bounds blockIndex
putWord32le $ fromIntegral $ iMax + 1
mapM_ (putWord64le . fromIntegral) $ elems blockIndex
instance Binary Block where
get =
let tryGetOne = do
done <- isEmpty
if done
then return Nothing
else Just <$> get :: Get (Maybe Miniblock)
in NT.pack <$>
BSL.fromChunks <$>
Prelude.map NT.unpack <$>
unfoldM tryGetOne
put block = putByteString $ toByteString $ buildBlock block
buildBlock :: Block -> Builder
buildBlock (Block bs) =
case BSL.toChunks bs of
[] -> buildMiniblock $ Miniblock $ BS.empty
xs -> mconcat $ Prelude.map (buildMiniblock . Miniblock) xs
instance Binary Miniblock where
get = do
writtenSize <- label "Block size" getWord32le
let compressionBitIndex = 31
let isCompressed = not $ testBit writtenSize compressionBitIndex
let size = fromIntegral $ clearBit writtenSize compressionBitIndex
bs <- label ("Block of size " ++ show size) $ isolate size $ getByteString $ size
let chunk = if isCompressed
then case decompressByteStringFixed bufferSize bs of
Nothing -> fail "Unable to decompress. Increase buffer size?"
Just x -> return $ x
else return bs
Miniblock <$> chunk
put miniblock = putByteString $ toByteString $ buildMiniblock miniblock
buildMiniblock :: Miniblock -> Builder
buildMiniblock (Miniblock bs) =
let compressionBitIndex = 31
compressedBlock = compressByteStringFixed ((BS.length bs)-1) bs
blockToWrite = case compressedBlock of
Nothing -> bs
Just x -> x
size = BS.length blockToWrite
writtenSize = if isJust compressedBlock
then size
else setBit size compressionBitIndex
in mconcat [
fromWord32le $ fromIntegral writtenSize,
fromByteString blockToWrite
]
instance Binary Header where
put header = do
let actualDescriptionBS = BS.take 64 $ encodeUtf8 $ header ^. description
let numPaddingBytes = fromIntegral $ 64 - BS.length actualDescriptionBS
let paddingDescriptionBS = BSL.toStrict $ BSL.take numPaddingBytes $ BSL.repeat 0
putByteString actualDescriptionBS
putByteString paddingDescriptionBS
putWord32le $ header ^. fileId
putWord32le $ truncate $ utcTimeToPOSIXSeconds $ header ^. creationDate
putWord32le $ header ^. flags1
putWord32le $ header ^. flags2
putWord32le $ header ^. metaInfoLength
putWord32le $ header ^. mystery
putWord64le $ header ^. spatialIndexPos
putWord64le $ header ^. recordBlockIndexPos
putWord64le $ header ^. numRecords
putWord32le $ header ^. compressionVersion
putByteString $ header ^. reservedSpace
get = do
fDescription <- label "Description" $ decodeUtf8 <$> getByteString 64
fFileId <- label "FileId" getWord32le
fCreationDate <- label "Creation Date" getWord32le
fFlags1 <- label "Flags 1" getWord32le
fFlags2 <- label "Flags 2" getWord32le
fMetaInfoLength <- label "Metadata Length" getWord32le
fMystery <- label "Mystery Field" getWord32le
fSpatialIndexPos <- label "Spatial Index" getWord64le
fRecordBlockIndexPos <- label "Record Block" getWord64le
fNumRecords <- label "Num Records" getWord64le
fCompressionVersion <- label "Compression Version" getWord32le
fReservedSpace <- label "Reserved Space" $ (BSL.toStrict <$> getRemainingLazyByteString)
return $ Header {
_description = fDescription,
_fileId = fFileId,
_creationDate = posixSecondsToUTCTime $ fromIntegral fCreationDate,
_flags1 = fFlags1,
_flags2 = fFlags2,
_metaInfoLength = fMetaInfoLength,
_mystery = fMystery,
_spatialIndexPos = fSpatialIndexPos,
_recordBlockIndexPos = fRecordBlockIndexPos,
_numRecords = fNumRecords,
_compressionVersion = fCompressionVersion,
_reservedSpace = fReservedSpace
}
instance Binary CalgaryHeader where
put header = error "CalgaryHeader::put is unimplemented"
get = do
description <- decodeUtf8 <$> getByteString 64
fileId <- getWord32le
creationDate <- posixSecondsToUTCTime <$> fromIntegral <$> getWord32le
indexPosition <- getWord32le
mystery1 <- getWord32le
numBlocks <- getWord32le
mystery2 <- getWord32le
mystery3 <- getWord32le
mystery4 <- getWord32le
mystery5 <- getWord32le
mystery6 <- getWord64le
numRecords <- getWord32le
reserved <- BSL.toStrict <$> getRemainingLazyByteString
return CalgaryHeader {
_calgaryHeaderDescription = description,
_calgaryHeaderFileId = fileId,
_calgaryHeaderCreationDate = creationDate,
_calgaryHeaderIndexPosition = indexPosition,
_calgaryHeaderMystery1 = mystery1,
_calgaryHeaderNumRecords = numRecords,
_calgaryHeaderMystery2 = mystery2,
_calgaryHeaderMystery3 = mystery3,
_calgaryHeaderMystery4 = mystery4,
_calgaryHeaderMystery5 = mystery5,
_calgaryHeaderMystery6 = mystery6,
_calgaryHeaderNumBlocks = numBlocks,
_calgaryHeaderReserved = reserved
}
|
39589f7fcb3df5cbe3c1adbf792b4ecba1dc00ce1af81b7aa860b37983b1690f | takikawa/racket-ppa | body.rkt | #lang racket/base
(require "../common/struct-star.rkt"
"../syntax/syntax.rkt"
"../syntax/scope.rkt"
"../syntax/taint.rkt"
"../syntax/match.rkt"
"../namespace/module.rkt"
"../syntax/binding.rkt"
"env.rkt"
"../syntax/track.rkt"
"../syntax/error.rkt"
"../expand/parsed.rkt"
"dup-check.rkt"
"use-site.rkt"
"../namespace/core.rkt"
"../boot/runtime-primitive.rkt"
"context.rkt"
"liberal-def-ctx.rkt"
"reference-record.rkt"
"prepare.rkt"
"log.rkt"
"main.rkt")
(provide expand-body
expand-and-split-bindings-by-reference)
;; Expand a sequence of body forms in a definition context; returns a
;; list of body forms
(define (expand-body bodys ctx
#:source s
#:stratified? [stratified? #f])
(log-expand ctx 'enter-block bodys)
;; In principle, we have an outside-edge scope that identifies the
;; original content of the definition context --- but a body always
;; exists inside some binding form, so that form's scope will do;
;; the inside-edge scope identifies any form that appears (perhaps
;; through macro expansion) in the definition context
(define inside-sc (new-scope 'intdef))
(define init-bodys
(for/list ([body (in-list bodys)])
(add-scope body inside-sc)))
(log-expand ctx 'block-renames init-bodys bodys)
(define phase (expand-context-phase ctx))
(define frame-id (make-reference-record)) ; accumulates info on referenced variables
(define def-ctx-scopes (box null))
;; Create an expansion context for expanding only immediate macros;
;; this partial-expansion phase uncovers macro- and variable
;; definitions in the definition context
(define body-ctx (struct*-copy expand-context ctx
[context (list (make-liberal-define-context))]
[name #f]
[only-immediate? #t]
[def-ctx-scopes def-ctx-scopes]
[post-expansion #:parent root-expand-context
(lambda (s) (add-scope s inside-sc))]
[scopes (cons inside-sc
(expand-context-scopes ctx))]
[use-site-scopes #:parent root-expand-context (box null)]
[frame-id #:parent root-expand-context frame-id]
[reference-records (cons frame-id
(expand-context-reference-records ctx))]))
;; Increment the binding layer relative to `ctx` when we encounter a binding
(define (maybe-increment-binding-layer ids body-ctx)
(if (eq? (expand-context-binding-layer body-ctx)
(expand-context-binding-layer ctx))
(increment-binding-layer ids body-ctx inside-sc)
(expand-context-binding-layer body-ctx)))
;; Save the name for the last form
(define name (expand-context-name ctx))
;; Loop through the body forms for partial expansion
(let loop ([body-ctx body-ctx]
[bodys init-bodys]
[done-bodys null] ; accumulated expressions
[val-idss null] ; accumulated binding identifiers
[val-keyss null] ; accumulated binding keys
[val-rhss null] ; accumulated binding right-hand sides
[track-stxs null] ; accumulated syntax for tracking
[trans-idss null] ; accumulated `define-syntaxes` identifiers that have disappeared
[trans-stxs null] ; accumulated `define-syntaxes` forms for tracking
[stx-clauses null] ; accumulated syntax-binding clauses, used when observing
[dups (make-check-no-duplicate-table)]
[just-saw-define-syntaxes? #f]) ; make sure that `define-syntaxes` isn't last
(cond
[(null? bodys)
;; Partial expansion is complete, so finish by rewriting to
;; `letrec-values`
(finish-expanding-body body-ctx frame-id def-ctx-scopes
(reverse val-idss) (reverse val-keyss) (reverse val-rhss) (reverse track-stxs)
(reverse stx-clauses) (reverse done-bodys)
#:original-bodys init-bodys
#:source s
#:stratified? stratified?
#:just-saw-define-syntaxes? just-saw-define-syntaxes?
#:name name
#:disappeared-transformer-bindings (reverse trans-idss)
#:disappeared-transformer-forms (reverse trans-stxs))]
[else
(define rest-bodys (cdr bodys))
(log-expand body-ctx 'next)
(define exp-body (expand (car bodys) (if (and name (null? (cdr bodys)))
(struct*-copy expand-context body-ctx
[name name])
body-ctx)))
(case (core-form-sym exp-body phase)
[(begin)
;; Splice a `begin` form
(log-expand body-ctx 'prim-begin exp-body)
(define-match m exp-body '(begin e ...))
(define (track e) (syntax-track-origin e exp-body))
(define splice-bodys (append (map track (m 'e)) rest-bodys))
(log-expand body-ctx 'splice splice-bodys)
(loop body-ctx
splice-bodys
done-bodys
val-idss
val-keyss
val-rhss
track-stxs
trans-idss
trans-stxs
stx-clauses
dups
just-saw-define-syntaxes?)]
[(define-values)
;; Found a variable definition; add bindings, extend the
;; environment, and continue
(log-expand body-ctx 'prim-define-values exp-body)
(define-match m exp-body '(define-values (id ...) rhs))
(define ids (remove-use-site-scopes (m 'id) body-ctx))
(log-expand body-ctx 'rename-one (list ids (m 'rhs)))
(define new-dups (check-no-duplicate-ids ids phase exp-body dups))
(define counter (root-expand-context-counter ctx))
(define local-sym (and (expand-context-normalize-locals? ctx) 'loc))
(define keys (for/list ([id (in-list ids)])
(add-local-binding! id phase counter
#:frame-id frame-id #:in exp-body
#:local-sym local-sym)))
(define extended-env (for/fold ([env (expand-context-env body-ctx)]) ([key (in-list keys)]
[id (in-list ids)])
(env-extend env key (local-variable id))))
(loop (struct*-copy expand-context body-ctx
[env extended-env]
[binding-layer (maybe-increment-binding-layer ids body-ctx)])
rest-bodys
null
;; If we had accumulated some expressions, we
;; need to turn each into the equivalent of
;; (defined-values () (begin <expr> (values)))
;; form so it can be kept with definitions to
;; preserve order
(cons ids (append
(for/list ([done-body (in-list done-bodys)])
null)
val-idss))
(cons keys (append
(for/list ([done-body (in-list done-bodys)])
null)
val-keyss))
(cons (m 'rhs) (append
(for/list ([done-body (in-list done-bodys)])
(no-binds done-body s phase))
val-rhss))
(cons (keep-as-needed body-ctx exp-body #:for-track? #t)
(append
(for/list ([done-body (in-list done-bodys)])
#f)
track-stxs))
trans-idss
trans-stxs
stx-clauses
new-dups
#f)]
[(define-syntaxes)
;; Found a macro definition; add bindings, evaluate the
;; compile-time right-hand side, install the compile-time
;; values in the environment, and continue
(log-expand body-ctx 'prim-define-syntaxes exp-body)
(define-match m exp-body '(define-syntaxes (id ...) rhs))
(define ids (remove-use-site-scopes (m 'id) body-ctx))
(log-expand body-ctx 'rename-one (list ids (m 'rhs)))
(define new-dups (check-no-duplicate-ids ids phase exp-body dups))
(define counter (root-expand-context-counter ctx))
(define local-sym (and (expand-context-normalize-locals? ctx) 'mac))
(define keys (for/list ([id (in-list ids)])
(add-local-binding! id phase counter
#:frame-id frame-id #:in exp-body
#:local-sym local-sym)))
(log-expand body-ctx 'prepare-env)
(prepare-next-phase-namespace ctx)
(log-expand body-ctx 'enter-bind)
(define vals (eval-for-syntaxes-binding 'define-syntaxes (m 'rhs) ids body-ctx))
(define extended-env (for/fold ([env (expand-context-env body-ctx)]) ([key (in-list keys)]
[val (in-list vals)]
[id (in-list ids)])
(maybe-install-free=id-in-context! val id phase body-ctx)
(env-extend env key val)))
(log-expand body-ctx 'exit-bind)
(loop (struct*-copy expand-context body-ctx
[env extended-env]
[binding-layer (maybe-increment-binding-layer ids body-ctx)])
rest-bodys
done-bodys
val-idss
val-keyss
val-rhss
track-stxs
(cons ids trans-idss)
(cons (keep-as-needed body-ctx exp-body #:for-track? #t) trans-stxs)
(cons (datum->syntax #f (list ids (m 'rhs)) exp-body) stx-clauses)
new-dups
#t)]
[else
(cond
[stratified?
;; Found an expression, so no more definitions are allowed
(unless (null? done-bodys) (error "internal error: accumulated expressions not empty"))
(loop body-ctx
null
(if (and (null? val-idss) (null? trans-idss))
(reverse (cons exp-body rest-bodys))
(list (datum->syntax #f (cons (core-id '#%stratified-body phase)
(cons exp-body rest-bodys)))))
val-idss
val-keyss
val-rhss
track-stxs
trans-idss
trans-stxs
stx-clauses
dups
#f)]
[else
;; Found an expression; accumulate it and continue
(loop body-ctx
rest-bodys
(cons exp-body done-bodys)
val-idss
val-keyss
val-rhss
track-stxs
trans-idss
trans-stxs
stx-clauses
dups
#f)])])])))
precondition : xs is a list with at least one element
(define (last xs)
(if (null? (cdr xs))
(car xs)
(last (cdr xs))))
;; Partial expansion is complete, so assumble the result as a
;; `letrec-values` form and continue expanding
(define (finish-expanding-body body-ctx frame-id def-ctx-scopes
val-idss val-keyss val-rhss track-stxs
stx-clauses done-bodys
#:original-bodys init-bodys
#:source s
#:stratified? stratified?
#:just-saw-define-syntaxes? just-saw-define-syntaxes?
#:name name
#:disappeared-transformer-bindings disappeared-transformer-bindings
#:disappeared-transformer-forms disappeared-transformer-forms)
(when (or (null? done-bodys)
just-saw-define-syntaxes?)
(raise-syntax-error (string->symbol "begin (possibly implicit)")
"the last form is not an expression"
(datum->syntax #f (cons 'begin init-bodys) s)
(if (null? init-bodys) #f (last init-bodys))))
;; As we finish expanding, we're no longer in a definition context
(define finish-ctx (struct*-copy expand-context (accumulate-def-ctx-scopes body-ctx def-ctx-scopes)
[context 'expression]
[use-site-scopes #:parent root-expand-context (box null)]
[only-immediate? #f]
[def-ctx-scopes #f]
[post-expansion #:parent root-expand-context #f]))
;; Helper to expand and wrap the ending expressions in `begin`, if needed:
(define (finish-bodys)
(define last-i (sub1 (length done-bodys)))
(log-expand body-ctx 'enter-list done-bodys)
(define exp-bodys
(for/list ([done-body (in-list done-bodys)]
[i (in-naturals)])
(log-expand body-ctx 'next)
(expand done-body (if (and name (= i last-i))
(struct*-copy expand-context finish-ctx
[name name])
finish-ctx))))
(log-expand body-ctx 'exit-list exp-bodys)
(reference-record-clear! frame-id)
exp-bodys)
(cond
[(and (null? val-idss)
(null? disappeared-transformer-bindings))
;; No definitions, so just return the body list
(log-expand finish-ctx 'block->list)
(finish-bodys)]
[else
(log-expand finish-ctx 'block->letrec val-idss val-rhss done-bodys)
;; Roughly, finish expanding the right-hand sides, finish the body
;; expression, then add a `letrec-values` wrapper:
(define exp-s (expand-and-split-bindings-by-reference
val-idss val-keyss val-rhss track-stxs
#:split? (not stratified?)
#:frame-id frame-id #:ctx finish-ctx
#:source s #:had-stxes? (pair? stx-clauses)
#:get-body finish-bodys #:track? #f))
(if (expand-context-to-parsed? body-ctx)
(list exp-s)
(let ([exp-s (attach-disappeared-transformer-bindings
exp-s
disappeared-transformer-bindings)])
(let ([tracked-exp-s
(for/fold ([exp-s exp-s]) ([form (in-list disappeared-transformer-forms)]
#:when form)
(syntax-track-origin exp-s form))])
(log-expand finish-ctx 'finish-block (list tracked-exp-s))
(list tracked-exp-s))))]))
;; Roughly, create a `letrec-values` for for the given ids, right-hand sides, and
;; body. While expanding right-hand sides, though, keep track of whether any
;; forward references appear, and if not, generate a `let-values` form, instead,
;; at each binding clause. Similar, end a `letrec-values` form and start a new
;; one if there were forward references up to the clause but not beyond.
;; Returns a single form.
(define (expand-and-split-bindings-by-reference idss keyss rhss track-stxs
#:split? split?
#:frame-id frame-id #:ctx ctx
#:source s #:had-stxes? had-stxes?
#:get-body get-body #:track? track?)
(define phase (expand-context-phase ctx))
(let loop ([idss idss] [keyss keyss] [rhss rhss] [track-stxs track-stxs]
[accum-idss null] [accum-keyss null] [accum-rhss null] [accum-track-stxs null]
[track? track?] [get-list? #f])
(cond
[(null? idss)
(cond
[(and (null? accum-idss)
get-list?)
(get-body)]
[else
(define exp-body (get-body))
(define result-s
(if (expand-context-to-parsed? ctx)
(if (null? accum-idss)
(parsed-let-values (keep-properties-only s) null null exp-body)
(parsed-letrec-values (keep-properties-only s)
(reverse accum-idss)
(reverse (map list accum-keyss accum-rhss))
exp-body))
(rebuild
#:track? track?
s
`(,(if (null? accum-idss)
(core-id 'let-values phase)
(core-id 'letrec-values phase))
,(build-clauses accum-idss accum-rhss accum-track-stxs)
,@exp-body))))
(if get-list? (list result-s) result-s)])]
[else
(log-expand ctx 'next)
(define ids (car idss))
(define expanded-rhs (expand (car rhss) (as-named-context ctx ids)))
(define track-stx (car track-stxs))
(define local-or-forward-references? (reference-record-forward-references? frame-id))
(reference-record-bound! frame-id (car keyss))
(define forward-references? (reference-record-forward-references? frame-id))
(cond
[(and (not local-or-forward-references?)
split?)
(unless (null? accum-idss) (error "internal error: accumulated ids not empty"))
(define exp-rest (loop (cdr idss) (cdr keyss) (cdr rhss) (cdr track-stxs)
null null null null
#f #t))
(define result-s
(if (expand-context-to-parsed? ctx)
(parsed-let-values (keep-properties-only s)
(list ids)
(list (list (car keyss) expanded-rhs))
exp-rest)
(rebuild
#:track? track?
s
`(,(core-id 'let-values phase)
(,(build-clause ids expanded-rhs track-stx))
,@exp-rest))))
(if get-list? (list result-s) result-s)]
[(and (not forward-references?)
(or split? (null? (cdr idss))))
(define exp-rest (loop (cdr idss) (cdr keyss) (cdr rhss) (cdr track-stxs)
null null null null
#f #t))
(define result-s
(if (expand-context-to-parsed? ctx)
(parsed-letrec-values (keep-properties-only s)
(reverse (cons ids accum-idss))
(reverse
(cons (list (car keyss) expanded-rhs)
(map list accum-keyss accum-rhss)))
exp-rest)
(rebuild
#:track? track?
s
`(,(core-id 'letrec-values phase)
,(build-clauses (cons ids accum-idss)
(cons expanded-rhs accum-rhss)
(cons track-stx accum-track-stxs))
,@exp-rest))))
(if get-list? (list result-s) result-s)]
[else
(loop (cdr idss) (cdr keyss) (cdr rhss) (cdr track-stxs)
(cons ids accum-idss) (cons (car keyss) accum-keyss)
(cons expanded-rhs accum-rhss) (cons track-stx accum-track-stxs)
track? get-list?)])])))
(define (build-clauses accum-idss accum-rhss accum-track-stxs)
(map build-clause
(reverse accum-idss)
(reverse accum-rhss)
(reverse accum-track-stxs)))
(define (build-clause ids rhs track-stx)
(define clause (datum->syntax #f `[,ids ,rhs]))
(if track-stx
(syntax-track-origin clause track-stx)
clause))
Helper to turn an expression into a binding clause with zero
;; bindings
(define (no-binds expr s phase)
(define s-runtime-stx (syntax-shift-phase-level runtime-stx phase))
(datum->syntax (core-id '#%app phase) ; for `values` application
`(,(core-id 'begin phase)
,expr
(,(datum->syntax s-runtime-stx 'values)))
s))
(define (log-tag? had-stxes? ctx)
(and had-stxes?
(not (expand-context-only-immediate? ctx))))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/caff086a1cd48208815cec2a22645a3091c11d4c/src/expander/expand/body.rkt | racket | Expand a sequence of body forms in a definition context; returns a
list of body forms
In principle, we have an outside-edge scope that identifies the
original content of the definition context --- but a body always
exists inside some binding form, so that form's scope will do;
the inside-edge scope identifies any form that appears (perhaps
through macro expansion) in the definition context
accumulates info on referenced variables
Create an expansion context for expanding only immediate macros;
this partial-expansion phase uncovers macro- and variable
definitions in the definition context
Increment the binding layer relative to `ctx` when we encounter a binding
Save the name for the last form
Loop through the body forms for partial expansion
accumulated expressions
accumulated binding identifiers
accumulated binding keys
accumulated binding right-hand sides
accumulated syntax for tracking
accumulated `define-syntaxes` identifiers that have disappeared
accumulated `define-syntaxes` forms for tracking
accumulated syntax-binding clauses, used when observing
make sure that `define-syntaxes` isn't last
Partial expansion is complete, so finish by rewriting to
`letrec-values`
Splice a `begin` form
Found a variable definition; add bindings, extend the
environment, and continue
If we had accumulated some expressions, we
need to turn each into the equivalent of
(defined-values () (begin <expr> (values)))
form so it can be kept with definitions to
preserve order
Found a macro definition; add bindings, evaluate the
compile-time right-hand side, install the compile-time
values in the environment, and continue
Found an expression, so no more definitions are allowed
Found an expression; accumulate it and continue
Partial expansion is complete, so assumble the result as a
`letrec-values` form and continue expanding
As we finish expanding, we're no longer in a definition context
Helper to expand and wrap the ending expressions in `begin`, if needed:
No definitions, so just return the body list
Roughly, finish expanding the right-hand sides, finish the body
expression, then add a `letrec-values` wrapper:
Roughly, create a `letrec-values` for for the given ids, right-hand sides, and
body. While expanding right-hand sides, though, keep track of whether any
forward references appear, and if not, generate a `let-values` form, instead,
at each binding clause. Similar, end a `letrec-values` form and start a new
one if there were forward references up to the clause but not beyond.
Returns a single form.
bindings
for `values` application | #lang racket/base
(require "../common/struct-star.rkt"
"../syntax/syntax.rkt"
"../syntax/scope.rkt"
"../syntax/taint.rkt"
"../syntax/match.rkt"
"../namespace/module.rkt"
"../syntax/binding.rkt"
"env.rkt"
"../syntax/track.rkt"
"../syntax/error.rkt"
"../expand/parsed.rkt"
"dup-check.rkt"
"use-site.rkt"
"../namespace/core.rkt"
"../boot/runtime-primitive.rkt"
"context.rkt"
"liberal-def-ctx.rkt"
"reference-record.rkt"
"prepare.rkt"
"log.rkt"
"main.rkt")
(provide expand-body
expand-and-split-bindings-by-reference)
(define (expand-body bodys ctx
#:source s
#:stratified? [stratified? #f])
(log-expand ctx 'enter-block bodys)
(define inside-sc (new-scope 'intdef))
(define init-bodys
(for/list ([body (in-list bodys)])
(add-scope body inside-sc)))
(log-expand ctx 'block-renames init-bodys bodys)
(define phase (expand-context-phase ctx))
(define def-ctx-scopes (box null))
(define body-ctx (struct*-copy expand-context ctx
[context (list (make-liberal-define-context))]
[name #f]
[only-immediate? #t]
[def-ctx-scopes def-ctx-scopes]
[post-expansion #:parent root-expand-context
(lambda (s) (add-scope s inside-sc))]
[scopes (cons inside-sc
(expand-context-scopes ctx))]
[use-site-scopes #:parent root-expand-context (box null)]
[frame-id #:parent root-expand-context frame-id]
[reference-records (cons frame-id
(expand-context-reference-records ctx))]))
(define (maybe-increment-binding-layer ids body-ctx)
(if (eq? (expand-context-binding-layer body-ctx)
(expand-context-binding-layer ctx))
(increment-binding-layer ids body-ctx inside-sc)
(expand-context-binding-layer body-ctx)))
(define name (expand-context-name ctx))
(let loop ([body-ctx body-ctx]
[bodys init-bodys]
[dups (make-check-no-duplicate-table)]
(cond
[(null? bodys)
(finish-expanding-body body-ctx frame-id def-ctx-scopes
(reverse val-idss) (reverse val-keyss) (reverse val-rhss) (reverse track-stxs)
(reverse stx-clauses) (reverse done-bodys)
#:original-bodys init-bodys
#:source s
#:stratified? stratified?
#:just-saw-define-syntaxes? just-saw-define-syntaxes?
#:name name
#:disappeared-transformer-bindings (reverse trans-idss)
#:disappeared-transformer-forms (reverse trans-stxs))]
[else
(define rest-bodys (cdr bodys))
(log-expand body-ctx 'next)
(define exp-body (expand (car bodys) (if (and name (null? (cdr bodys)))
(struct*-copy expand-context body-ctx
[name name])
body-ctx)))
(case (core-form-sym exp-body phase)
[(begin)
(log-expand body-ctx 'prim-begin exp-body)
(define-match m exp-body '(begin e ...))
(define (track e) (syntax-track-origin e exp-body))
(define splice-bodys (append (map track (m 'e)) rest-bodys))
(log-expand body-ctx 'splice splice-bodys)
(loop body-ctx
splice-bodys
done-bodys
val-idss
val-keyss
val-rhss
track-stxs
trans-idss
trans-stxs
stx-clauses
dups
just-saw-define-syntaxes?)]
[(define-values)
(log-expand body-ctx 'prim-define-values exp-body)
(define-match m exp-body '(define-values (id ...) rhs))
(define ids (remove-use-site-scopes (m 'id) body-ctx))
(log-expand body-ctx 'rename-one (list ids (m 'rhs)))
(define new-dups (check-no-duplicate-ids ids phase exp-body dups))
(define counter (root-expand-context-counter ctx))
(define local-sym (and (expand-context-normalize-locals? ctx) 'loc))
(define keys (for/list ([id (in-list ids)])
(add-local-binding! id phase counter
#:frame-id frame-id #:in exp-body
#:local-sym local-sym)))
(define extended-env (for/fold ([env (expand-context-env body-ctx)]) ([key (in-list keys)]
[id (in-list ids)])
(env-extend env key (local-variable id))))
(loop (struct*-copy expand-context body-ctx
[env extended-env]
[binding-layer (maybe-increment-binding-layer ids body-ctx)])
rest-bodys
null
(cons ids (append
(for/list ([done-body (in-list done-bodys)])
null)
val-idss))
(cons keys (append
(for/list ([done-body (in-list done-bodys)])
null)
val-keyss))
(cons (m 'rhs) (append
(for/list ([done-body (in-list done-bodys)])
(no-binds done-body s phase))
val-rhss))
(cons (keep-as-needed body-ctx exp-body #:for-track? #t)
(append
(for/list ([done-body (in-list done-bodys)])
#f)
track-stxs))
trans-idss
trans-stxs
stx-clauses
new-dups
#f)]
[(define-syntaxes)
(log-expand body-ctx 'prim-define-syntaxes exp-body)
(define-match m exp-body '(define-syntaxes (id ...) rhs))
(define ids (remove-use-site-scopes (m 'id) body-ctx))
(log-expand body-ctx 'rename-one (list ids (m 'rhs)))
(define new-dups (check-no-duplicate-ids ids phase exp-body dups))
(define counter (root-expand-context-counter ctx))
(define local-sym (and (expand-context-normalize-locals? ctx) 'mac))
(define keys (for/list ([id (in-list ids)])
(add-local-binding! id phase counter
#:frame-id frame-id #:in exp-body
#:local-sym local-sym)))
(log-expand body-ctx 'prepare-env)
(prepare-next-phase-namespace ctx)
(log-expand body-ctx 'enter-bind)
(define vals (eval-for-syntaxes-binding 'define-syntaxes (m 'rhs) ids body-ctx))
(define extended-env (for/fold ([env (expand-context-env body-ctx)]) ([key (in-list keys)]
[val (in-list vals)]
[id (in-list ids)])
(maybe-install-free=id-in-context! val id phase body-ctx)
(env-extend env key val)))
(log-expand body-ctx 'exit-bind)
(loop (struct*-copy expand-context body-ctx
[env extended-env]
[binding-layer (maybe-increment-binding-layer ids body-ctx)])
rest-bodys
done-bodys
val-idss
val-keyss
val-rhss
track-stxs
(cons ids trans-idss)
(cons (keep-as-needed body-ctx exp-body #:for-track? #t) trans-stxs)
(cons (datum->syntax #f (list ids (m 'rhs)) exp-body) stx-clauses)
new-dups
#t)]
[else
(cond
[stratified?
(unless (null? done-bodys) (error "internal error: accumulated expressions not empty"))
(loop body-ctx
null
(if (and (null? val-idss) (null? trans-idss))
(reverse (cons exp-body rest-bodys))
(list (datum->syntax #f (cons (core-id '#%stratified-body phase)
(cons exp-body rest-bodys)))))
val-idss
val-keyss
val-rhss
track-stxs
trans-idss
trans-stxs
stx-clauses
dups
#f)]
[else
(loop body-ctx
rest-bodys
(cons exp-body done-bodys)
val-idss
val-keyss
val-rhss
track-stxs
trans-idss
trans-stxs
stx-clauses
dups
#f)])])])))
precondition : xs is a list with at least one element
(define (last xs)
(if (null? (cdr xs))
(car xs)
(last (cdr xs))))
(define (finish-expanding-body body-ctx frame-id def-ctx-scopes
val-idss val-keyss val-rhss track-stxs
stx-clauses done-bodys
#:original-bodys init-bodys
#:source s
#:stratified? stratified?
#:just-saw-define-syntaxes? just-saw-define-syntaxes?
#:name name
#:disappeared-transformer-bindings disappeared-transformer-bindings
#:disappeared-transformer-forms disappeared-transformer-forms)
(when (or (null? done-bodys)
just-saw-define-syntaxes?)
(raise-syntax-error (string->symbol "begin (possibly implicit)")
"the last form is not an expression"
(datum->syntax #f (cons 'begin init-bodys) s)
(if (null? init-bodys) #f (last init-bodys))))
(define finish-ctx (struct*-copy expand-context (accumulate-def-ctx-scopes body-ctx def-ctx-scopes)
[context 'expression]
[use-site-scopes #:parent root-expand-context (box null)]
[only-immediate? #f]
[def-ctx-scopes #f]
[post-expansion #:parent root-expand-context #f]))
(define (finish-bodys)
(define last-i (sub1 (length done-bodys)))
(log-expand body-ctx 'enter-list done-bodys)
(define exp-bodys
(for/list ([done-body (in-list done-bodys)]
[i (in-naturals)])
(log-expand body-ctx 'next)
(expand done-body (if (and name (= i last-i))
(struct*-copy expand-context finish-ctx
[name name])
finish-ctx))))
(log-expand body-ctx 'exit-list exp-bodys)
(reference-record-clear! frame-id)
exp-bodys)
(cond
[(and (null? val-idss)
(null? disappeared-transformer-bindings))
(log-expand finish-ctx 'block->list)
(finish-bodys)]
[else
(log-expand finish-ctx 'block->letrec val-idss val-rhss done-bodys)
(define exp-s (expand-and-split-bindings-by-reference
val-idss val-keyss val-rhss track-stxs
#:split? (not stratified?)
#:frame-id frame-id #:ctx finish-ctx
#:source s #:had-stxes? (pair? stx-clauses)
#:get-body finish-bodys #:track? #f))
(if (expand-context-to-parsed? body-ctx)
(list exp-s)
(let ([exp-s (attach-disappeared-transformer-bindings
exp-s
disappeared-transformer-bindings)])
(let ([tracked-exp-s
(for/fold ([exp-s exp-s]) ([form (in-list disappeared-transformer-forms)]
#:when form)
(syntax-track-origin exp-s form))])
(log-expand finish-ctx 'finish-block (list tracked-exp-s))
(list tracked-exp-s))))]))
(define (expand-and-split-bindings-by-reference idss keyss rhss track-stxs
#:split? split?
#:frame-id frame-id #:ctx ctx
#:source s #:had-stxes? had-stxes?
#:get-body get-body #:track? track?)
(define phase (expand-context-phase ctx))
(let loop ([idss idss] [keyss keyss] [rhss rhss] [track-stxs track-stxs]
[accum-idss null] [accum-keyss null] [accum-rhss null] [accum-track-stxs null]
[track? track?] [get-list? #f])
(cond
[(null? idss)
(cond
[(and (null? accum-idss)
get-list?)
(get-body)]
[else
(define exp-body (get-body))
(define result-s
(if (expand-context-to-parsed? ctx)
(if (null? accum-idss)
(parsed-let-values (keep-properties-only s) null null exp-body)
(parsed-letrec-values (keep-properties-only s)
(reverse accum-idss)
(reverse (map list accum-keyss accum-rhss))
exp-body))
(rebuild
#:track? track?
s
`(,(if (null? accum-idss)
(core-id 'let-values phase)
(core-id 'letrec-values phase))
,(build-clauses accum-idss accum-rhss accum-track-stxs)
,@exp-body))))
(if get-list? (list result-s) result-s)])]
[else
(log-expand ctx 'next)
(define ids (car idss))
(define expanded-rhs (expand (car rhss) (as-named-context ctx ids)))
(define track-stx (car track-stxs))
(define local-or-forward-references? (reference-record-forward-references? frame-id))
(reference-record-bound! frame-id (car keyss))
(define forward-references? (reference-record-forward-references? frame-id))
(cond
[(and (not local-or-forward-references?)
split?)
(unless (null? accum-idss) (error "internal error: accumulated ids not empty"))
(define exp-rest (loop (cdr idss) (cdr keyss) (cdr rhss) (cdr track-stxs)
null null null null
#f #t))
(define result-s
(if (expand-context-to-parsed? ctx)
(parsed-let-values (keep-properties-only s)
(list ids)
(list (list (car keyss) expanded-rhs))
exp-rest)
(rebuild
#:track? track?
s
`(,(core-id 'let-values phase)
(,(build-clause ids expanded-rhs track-stx))
,@exp-rest))))
(if get-list? (list result-s) result-s)]
[(and (not forward-references?)
(or split? (null? (cdr idss))))
(define exp-rest (loop (cdr idss) (cdr keyss) (cdr rhss) (cdr track-stxs)
null null null null
#f #t))
(define result-s
(if (expand-context-to-parsed? ctx)
(parsed-letrec-values (keep-properties-only s)
(reverse (cons ids accum-idss))
(reverse
(cons (list (car keyss) expanded-rhs)
(map list accum-keyss accum-rhss)))
exp-rest)
(rebuild
#:track? track?
s
`(,(core-id 'letrec-values phase)
,(build-clauses (cons ids accum-idss)
(cons expanded-rhs accum-rhss)
(cons track-stx accum-track-stxs))
,@exp-rest))))
(if get-list? (list result-s) result-s)]
[else
(loop (cdr idss) (cdr keyss) (cdr rhss) (cdr track-stxs)
(cons ids accum-idss) (cons (car keyss) accum-keyss)
(cons expanded-rhs accum-rhss) (cons track-stx accum-track-stxs)
track? get-list?)])])))
(define (build-clauses accum-idss accum-rhss accum-track-stxs)
(map build-clause
(reverse accum-idss)
(reverse accum-rhss)
(reverse accum-track-stxs)))
(define (build-clause ids rhs track-stx)
(define clause (datum->syntax #f `[,ids ,rhs]))
(if track-stx
(syntax-track-origin clause track-stx)
clause))
Helper to turn an expression into a binding clause with zero
(define (no-binds expr s phase)
(define s-runtime-stx (syntax-shift-phase-level runtime-stx phase))
`(,(core-id 'begin phase)
,expr
(,(datum->syntax s-runtime-stx 'values)))
s))
(define (log-tag? had-stxes? ctx)
(and had-stxes?
(not (expand-context-only-immediate? ctx))))
|
3d756f1d56ae8a9e284e76ca09864924685907a60eaba83c37ac7499d9d6bb78 | semilin/layoup | f'ed_up.lisp |
(MAKE-LAYOUT :NAME "f'ed_up" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX
NIL :KEYBOARD NIL) | null | https://raw.githubusercontent.com/semilin/layoup/27ec9ba9a9388cd944ac46206d10424e3ab45499/data/layouts/f'ed_up.lisp | lisp |
(MAKE-LAYOUT :NAME "f'ed_up" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX
NIL :KEYBOARD NIL) | |
522bd8c1b41ea368b00debac21392797daae4ee0dfe39c52076d2b94c61c53ce | mbutterick/beautiful-racket | report-args.rkt | #lang basic-demo-3
10 print "arg0 is " ; arg0
20 print "arg1 + arg1 is " ; arg1 + arg1
40 print "arg3 is " ; arg3
50 print "arg4 is " ; arg4 | null | https://raw.githubusercontent.com/mbutterick/beautiful-racket/f0e2cb5b325733b3f9cbd554cc7d2bb236af9ee9/beautiful-racket-demo/basic-demo-3/report-args.rkt | racket | arg0
arg1 + arg1
arg3
arg4 | #lang basic-demo-3 |
71da63c4844e48d6c51b6983938036a9d08efa84e1447cf66ef1adc73780be1f | marcoonroad/hieroglyphs | utils.ml | module String = Core.String
module Str = Re.Str
let _HASH_LENGTH = 128
let regexp = Str.regexp "^[a-f0-9]+$"
let is_hash text =
Str.string_match regexp text 0 && String.length text = _HASH_LENGTH
| null | https://raw.githubusercontent.com/marcoonroad/hieroglyphs/03050bcf78b2871591752a696a16587989065163/test/spec/utils.ml | ocaml | module String = Core.String
module Str = Re.Str
let _HASH_LENGTH = 128
let regexp = Str.regexp "^[a-f0-9]+$"
let is_hash text =
Str.string_match regexp text 0 && String.length text = _HASH_LENGTH
| |
64b7eff67cf4e25c32e6dd39d20411faf59d441a83265bb9721adbb22c4ac20d | ghcjs/ghcjs-dom | CSSPageRule.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
module GHCJS.DOM.JSFFI.Generated.CSSPageRule
(js_setSelectorText, setSelectorText, js_getSelectorText,
getSelectorText, getSelectorTextUnsafe, getSelectorTextUnchecked,
js_getStyle, getStyle, CSSPageRule(..), gTypeCSSPageRule)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"selectorText\"] = $2;"
js_setSelectorText :: CSSPageRule -> Optional JSString -> IO ()
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
setSelectorText ::
(MonadIO m, ToJSString val) => CSSPageRule -> Maybe val -> m ()
setSelectorText self val
= liftIO (js_setSelectorText self (toOptionalJSString val))
foreign import javascript unsafe "$1[\"selectorText\"]"
js_getSelectorText :: CSSPageRule -> IO (Nullable JSString)
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
getSelectorText ::
(MonadIO m, FromJSString result) => CSSPageRule -> m (Maybe result)
getSelectorText self
= liftIO (fromMaybeJSString <$> (js_getSelectorText self))
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
getSelectorTextUnsafe ::
(MonadIO m, HasCallStack, FromJSString result) =>
CSSPageRule -> m result
getSelectorTextUnsafe self
= liftIO
((fromMaybeJSString <$> (js_getSelectorText self)) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
getSelectorTextUnchecked ::
(MonadIO m, FromJSString result) => CSSPageRule -> m result
getSelectorTextUnchecked self
= liftIO
(fromJust . fromMaybeJSString <$> (js_getSelectorText self))
foreign import javascript unsafe "$1[\"style\"]" js_getStyle ::
CSSPageRule -> IO CSSStyleDeclaration
| < -US/docs/Web/API/CSSPageRule.style Mozilla CSSPageRule.style documentation >
getStyle :: (MonadIO m) => CSSPageRule -> m CSSStyleDeclaration
getStyle self = liftIO (js_getStyle self) | null | https://raw.githubusercontent.com/ghcjs/ghcjs-dom/749963557d878d866be2d0184079836f367dd0ea/ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/CSSPageRule.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
module GHCJS.DOM.JSFFI.Generated.CSSPageRule
(js_setSelectorText, setSelectorText, js_getSelectorText,
getSelectorText, getSelectorTextUnsafe, getSelectorTextUnchecked,
js_getStyle, getStyle, CSSPageRule(..), gTypeCSSPageRule)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"selectorText\"] = $2;"
js_setSelectorText :: CSSPageRule -> Optional JSString -> IO ()
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
setSelectorText ::
(MonadIO m, ToJSString val) => CSSPageRule -> Maybe val -> m ()
setSelectorText self val
= liftIO (js_setSelectorText self (toOptionalJSString val))
foreign import javascript unsafe "$1[\"selectorText\"]"
js_getSelectorText :: CSSPageRule -> IO (Nullable JSString)
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
getSelectorText ::
(MonadIO m, FromJSString result) => CSSPageRule -> m (Maybe result)
getSelectorText self
= liftIO (fromMaybeJSString <$> (js_getSelectorText self))
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
getSelectorTextUnsafe ::
(MonadIO m, HasCallStack, FromJSString result) =>
CSSPageRule -> m result
getSelectorTextUnsafe self
= liftIO
((fromMaybeJSString <$> (js_getSelectorText self)) >>=
maybe (Prelude.error "Nothing to return") return)
| < -US/docs/Web/API/CSSPageRule.selectorText Mozilla CSSPageRule.selectorText documentation >
getSelectorTextUnchecked ::
(MonadIO m, FromJSString result) => CSSPageRule -> m result
getSelectorTextUnchecked self
= liftIO
(fromJust . fromMaybeJSString <$> (js_getSelectorText self))
foreign import javascript unsafe "$1[\"style\"]" js_getStyle ::
CSSPageRule -> IO CSSStyleDeclaration
| < -US/docs/Web/API/CSSPageRule.style Mozilla CSSPageRule.style documentation >
getStyle :: (MonadIO m) => CSSPageRule -> m CSSStyleDeclaration
getStyle self = liftIO (js_getStyle self) |
c82fd694fb97f1acdf071604468ef7185859d188a2050f86d754f0bec2d32880 | ladderlife/om-css | output_css.clj | (ns om-css.output-css
(:require [cljs.analyzer.api :as ana-api]
[clojure.java.io :as io]
[clojure.string :as string]))
(defn setup-io! []
(let [{:keys [css-output-to output-dir output-to]} (ana-api/get-options)
default-fname "out.css"
fname (or css-output-to
(str output-dir default-fname)
(string/join "/"
(-> output-to
(string/split #"/")
pop
(conj default-fname))))]
(add-watch om-css.core/css :watcher
(fn [k atom old-state new-state]
(with-open [out ^java.io.Writer (io/make-writer fname {})]
(binding [*out* out]
(println (string/join "\n" (vals new-state)))
(println)))))))
(setup-io!)
| null | https://raw.githubusercontent.com/ladderlife/om-css/54190d24f385d8e32057a2f5ed3f89e87f3eacdb/src/main/om_css/output_css.clj | clojure | (ns om-css.output-css
(:require [cljs.analyzer.api :as ana-api]
[clojure.java.io :as io]
[clojure.string :as string]))
(defn setup-io! []
(let [{:keys [css-output-to output-dir output-to]} (ana-api/get-options)
default-fname "out.css"
fname (or css-output-to
(str output-dir default-fname)
(string/join "/"
(-> output-to
(string/split #"/")
pop
(conj default-fname))))]
(add-watch om-css.core/css :watcher
(fn [k atom old-state new-state]
(with-open [out ^java.io.Writer (io/make-writer fname {})]
(binding [*out* out]
(println (string/join "\n" (vals new-state)))
(println)))))))
(setup-io!)
| |
0c8a4fb4b36f53727d5c73a0b00635dd9b39e23918de0dc8e5e3eb3ca28ff890 | MinaProtocol/mina | kubernetes_network.ml | open Core_kernel
open Async
open Integration_test_lib
open Mina_transaction
(* exclude from bisect_ppx to avoid type error on GraphQL modules *)
[@@@coverage exclude_file]
let mina_archive_container_id = "archive"
let mina_archive_username = "mina"
let mina_archive_pw = "zo3moong7moog4Iep7eNgo3iecaesahH"
let postgres_url =
Printf.sprintf "postgres:%s@archive-1-postgresql:5432/archive"
mina_archive_username mina_archive_pw
let node_password = "naughty blue worm"
type config =
{ testnet_name : string
; cluster : string
; namespace : string
; graphql_enabled : bool
}
let base_kube_args { cluster; namespace; _ } =
[ "--cluster"; cluster; "--namespace"; namespace ]
module Node = struct
type pod_info =
{ network_keypair : Network_keypair.t option
; primary_container_id : string
(* this is going to be probably either "mina" or "worker" *)
; has_archive_container : bool
(* archive pods have a "mina" container and an "archive" container alongside *)
}
type t =
{ app_id : string; pod_id : string; pod_info : pod_info; config : config }
let id { pod_id; _ } = pod_id
let network_keypair { pod_info = { network_keypair; _ }; _ } = network_keypair
let base_kube_args t = [ "--cluster"; t.cluster; "--namespace"; t.namespace ]
let get_logs_in_container ?container_id { pod_id; config; pod_info; _ } =
let container_id =
Option.value container_id ~default:pod_info.primary_container_id
in
let%bind cwd = Unix.getcwd () in
Integration_test_lib.Util.run_cmd_or_hard_error ~exit_code:13 cwd "kubectl"
(base_kube_args config @ [ "logs"; "-c"; container_id; pod_id ])
let run_in_container ?(exit_code = 10) ?container_id ?override_with_pod_id
~cmd t =
let { config; pod_info; _ } = t in
let pod_id =
match override_with_pod_id with Some pid -> pid | None -> t.pod_id
in
let container_id =
Option.value container_id ~default:pod_info.primary_container_id
in
let%bind cwd = Unix.getcwd () in
Integration_test_lib.Util.run_cmd_or_hard_error ~exit_code cwd "kubectl"
( base_kube_args config
@ [ "exec"; "-c"; container_id; "-i"; pod_id; "--" ]
@ cmd )
let cp_string_to_container_file ?container_id ~str ~dest t =
let { pod_id; config; pod_info; _ } = t in
let container_id =
Option.value container_id ~default:pod_info.primary_container_id
in
let tmp_file, oc =
Caml.Filename.open_temp_file ~temp_dir:Filename.temp_dir_name
"integration_test_cp_string" ".tmp"
in
Out_channel.output_string oc str ;
Out_channel.close oc ;
let%bind cwd = Unix.getcwd () in
let dest_file = sprintf "%s/%s:%s" config.namespace pod_id dest in
Integration_test_lib.Util.run_cmd_or_error cwd "kubectl"
(base_kube_args config @ [ "cp"; "-c"; container_id; tmp_file; dest_file ])
let start ~fresh_state node : unit Malleable_error.t =
let open Malleable_error.Let_syntax in
let%bind () =
if fresh_state then
run_in_container node ~cmd:[ "sh"; "-c"; "rm -rf .mina-config/*" ]
>>| ignore
else Malleable_error.return ()
in
run_in_container ~exit_code:11 node ~cmd:[ "/start.sh" ] >>| ignore
let stop node =
let open Malleable_error.Let_syntax in
run_in_container ~exit_code:12 node ~cmd:[ "/stop.sh" ] >>| ignore
let logger_metadata node =
[ ("namespace", `String node.config.namespace)
; ("app_id", `String node.app_id)
; ("pod_id", `String node.pod_id)
]
module Scalars = Graphql_lib.Scalars
module Graphql = struct
let ingress_uri node =
let host =
sprintf "%s.graphql.test.o1test.net" node.config.testnet_name
in
let path = sprintf "/%s/graphql" node.app_id in
Uri.make ~scheme:"http" ~host ~path ~port:80 ()
module Client = Graphql_lib.Client.Make (struct
let preprocess_variables_string = Fn.id
let headers = String.Map.empty
end)
graphql_ppx uses symbols instead of Base
open Stdlib
module Encoders = Mina_graphql.Types.Input
module Unlock_account =
[%graphql
({|
mutation ($password: String!, $public_key: PublicKey!) @encoders(module: "Encoders"){
unlockAccount(input: {password: $password, publicKey: $public_key }) {
account {
public_key: publicKey
}
}
}
|}
[@encoders Encoders] )]
module Send_test_payments =
[%graphql
{|
mutation ($senders: [PrivateKey!]!,
$receiver: PublicKey!,
$amount: UInt64!,
$fee: UInt64!,
$repeat_count: UInt32!,
$repeat_delay_ms: UInt32!) @encoders(module: "Encoders"){
sendTestPayments(
senders: $senders, receiver: $receiver, amount: $amount, fee: $fee,
repeat_count: $repeat_count,
repeat_delay_ms: $repeat_delay_ms)
}
|}]
module Send_payment =
[%graphql
{|
mutation ($input: SendPaymentInput!)@encoders(module: "Encoders"){
sendPayment(input: $input){
payment {
id
nonce
hash
}
}
}
|}]
module Send_payment_with_raw_sig =
[%graphql
{|
mutation (
$input:SendPaymentInput!,
$rawSignature: String!
)@encoders(module: "Encoders")
{
sendPayment(
input:$input,
signature: {rawSignature: $rawSignature}
)
{
payment {
id
nonce
hash
}
}
}
|}]
module Send_delegation =
[%graphql
{|
mutation ($input: SendDelegationInput!) @encoders(module: "Encoders"){
sendDelegation(input:$input){
delegation {
id
nonce
hash
}
}
}
|}]
(* TODO: temporary version *)
module Send_test_zkapp = Generated_graphql_queries.Send_test_zkapp
module Pooled_zkapp_commands =
Generated_graphql_queries.Pooled_zkapp_commands
module Query_peer_id =
[%graphql
{|
query {
daemonStatus {
addrsAndPorts {
peer {
peerId
}
}
peers { peerId }
}
}
|}]
module Best_chain =
[%graphql
{|
query ($max_length: Int) @encoders(module: "Encoders"){
bestChain (maxLength: $max_length) {
stateHash @ppxCustom(module: "Graphql_lib.Scalars.String_json")
commandTransactionCount
creatorAccount {
publicKey @ppxCustom(module: "Graphql_lib.Scalars.JSON")
}
}
}
|}]
module Query_metrics =
[%graphql
{|
query {
daemonStatus {
metrics {
blockProductionDelay
transactionPoolDiffReceived
transactionPoolDiffBroadcasted
transactionsAddedToPool
transactionPoolSize
}
}
}
|}]
module Account =
[%graphql
{|
query ($public_key: PublicKey!, $token: UInt64) {
account (publicKey : $public_key, token : $token) {
balance { liquid
locked
total
}
delegate
nonce
permissions { editActionState
editState
incrementNonce
receive
send
access
setDelegate
setPermissions
setZkappUri
setTokenSymbol
setVerificationKey
setVotingFor
setTiming
}
actionState
zkappState
zkappUri
timing { cliffTime @ppxCustom(module: "Graphql_lib.Scalars.JSON")
cliffAmount
vestingPeriod @ppxCustom(module: "Graphql_lib.Scalars.JSON")
vestingIncrement
initialMinimumBalance
}
token
tokenSymbol
verificationKey { verificationKey
hash
}
votingFor
}
}
|}]
end
(* this function will repeatedly attempt to connect to graphql port <num_tries> times before giving up *)
let exec_graphql_request ?(num_tries = 10) ?(retry_delay_sec = 30.0)
?(initial_delay_sec = 0.) ~logger ~node ~query_name query_obj =
let open Deferred.Let_syntax in
if not node.config.graphql_enabled then
Deferred.Or_error.error_string
"graphql is not enabled (hint: set `requires_graphql= true` in the \
test config)"
else
let uri = Graphql.ingress_uri node in
let metadata =
[ ("query", `String query_name)
; ("uri", `String (Uri.to_string uri))
; ("init_delay", `Float initial_delay_sec)
]
in
[%log info]
"Attempting to send GraphQL request \"$query\" to \"$uri\" after \
$init_delay sec"
~metadata ;
let rec retry n =
if n <= 0 then (
[%log error]
"GraphQL request \"$query\" to \"$uri\" failed too many times"
~metadata ;
Deferred.Or_error.errorf
"GraphQL \"%s\" to \"%s\" request failed too many times" query_name
(Uri.to_string uri) )
else
match%bind Graphql.Client.query query_obj uri with
| Ok result ->
[%log info] "GraphQL request \"$query\" to \"$uri\" succeeded"
~metadata ;
Deferred.Or_error.return result
| Error (`Failed_request err_string) ->
[%log warn]
"GraphQL request \"$query\" to \"$uri\" failed: \"$error\" \
($num_tries attempts left)"
~metadata:
( metadata
@ [ ("error", `String err_string)
; ("num_tries", `Int (n - 1))
] ) ;
let%bind () = after (Time.Span.of_sec retry_delay_sec) in
retry (n - 1)
| Error (`Graphql_error err_string) ->
[%log error]
"GraphQL request \"$query\" to \"$uri\" returned an error: \
\"$error\" (this is a graphql error so not retrying)"
~metadata:(metadata @ [ ("error", `String err_string) ]) ;
Deferred.Or_error.error_string err_string
in
let%bind () = after (Time.Span.of_sec initial_delay_sec) in
retry num_tries
let get_peer_id ~logger t =
let open Deferred.Or_error.Let_syntax in
[%log info] "Getting node's peer_id, and the peer_ids of node's peers"
~metadata:(logger_metadata t) ;
let query_obj = Graphql.Query_peer_id.(make @@ makeVariables ()) in
let%bind query_result_obj =
exec_graphql_request ~logger ~node:t ~query_name:"query_peer_id" query_obj
in
[%log info] "get_peer_id, finished exec_graphql_request" ;
let self_id_obj = query_result_obj.daemonStatus.addrsAndPorts.peer in
let%bind self_id =
match self_id_obj with
| None ->
Deferred.Or_error.error_string "Peer not found"
| Some peer ->
return peer.peerId
in
let peers = query_result_obj.daemonStatus.peers |> Array.to_list in
let peer_ids = List.map peers ~f:(fun peer -> peer.peerId) in
[%log info] "get_peer_id, result of graphql query (self_id,[peers]) (%s,%s)"
self_id
(String.concat ~sep:" " peer_ids) ;
return (self_id, peer_ids)
let must_get_peer_id ~logger t =
get_peer_id ~logger t |> Deferred.bind ~f:Malleable_error.or_hard_error
let get_best_chain ?max_length ~logger t =
let open Deferred.Or_error.Let_syntax in
let query = Graphql.Best_chain.(make @@ makeVariables ?max_length ()) in
let%bind result =
exec_graphql_request ~logger ~node:t ~query_name:"best_chain" query
in
match result.bestChain with
| None | Some [||] ->
Deferred.Or_error.error_string "failed to get best chains"
| Some chain ->
return
@@ List.map
~f:(fun block ->
Intf.
{ state_hash = block.stateHash
; command_transaction_count = block.commandTransactionCount
; creator_pk =
( match block.creatorAccount.publicKey with
| `String pk ->
pk
| _ ->
"unknown" )
} )
(Array.to_list chain)
let must_get_best_chain ?max_length ~logger t =
get_best_chain ?max_length ~logger t
|> Deferred.bind ~f:Malleable_error.or_hard_error
let get_account ~logger t ~account_id =
let pk = Mina_base.Account_id.public_key account_id in
let token = Mina_base.Account_id.token_id account_id in
[%log info] "Getting account"
~metadata:
( ("pub_key", Signature_lib.Public_key.Compressed.to_yojson pk)
:: logger_metadata t ) ;
let get_account_obj =
Graphql.Account.(
make
@@ makeVariables
~public_key:(Graphql_lib.Encoders.public_key pk)
~token:(Graphql_lib.Encoders.token token)
())
in
exec_graphql_request ~logger ~node:t ~query_name:"get_account_graphql"
get_account_obj
type account_data =
{ nonce : Mina_numbers.Account_nonce.t
; total_balance : Currency.Balance.t
; liquid_balance_opt : Currency.Balance.t option
; locked_balance_opt : Currency.Balance.t option
}
let get_account_data ~logger t ~account_id =
let open Deferred.Or_error.Let_syntax in
let public_key = Mina_base.Account_id.public_key account_id in
let token = Mina_base.Account_id.token_id account_id in
[%log info] "Getting account data, which is its balances and nonce"
~metadata:
( ("pub_key", Signature_lib.Public_key.Compressed.to_yojson public_key)
:: logger_metadata t ) ;
let%bind account_obj = get_account ~logger t ~account_id in
match account_obj.account with
| None ->
Deferred.Or_error.errorf
!"Account with Account id %{sexp:Mina_base.Account_id.t}, public_key \
%s, and token %s not found"
account_id
(Signature_lib.Public_key.Compressed.to_string public_key)
(Mina_base.Token_id.to_string token)
| Some acc ->
return
{ nonce =
Option.value_exn
~message:
"the nonce from get_balance is None, which should be \
impossible"
acc.nonce
; total_balance = acc.balance.total
; liquid_balance_opt = acc.balance.liquid
; locked_balance_opt = acc.balance.locked
}
let must_get_account_data ~logger t ~account_id =
get_account_data ~logger t ~account_id
|> Deferred.bind ~f:Malleable_error.or_hard_error
let permissions_of_account_permissions account_permissions :
Mina_base.Permissions.t =
the polymorphic variants come from Partial_accounts.auth_required in Mina_graphql
let to_auth_required = function
| `Either ->
Mina_base.Permissions.Auth_required.Either
| `Impossible ->
Impossible
| `None ->
None
| `Proof ->
Proof
| `Signature ->
Signature
in
let open Graphql.Account in
{ edit_action_state = to_auth_required account_permissions.editActionState
; edit_state = to_auth_required account_permissions.editState
; increment_nonce = to_auth_required account_permissions.incrementNonce
; receive = to_auth_required account_permissions.receive
; send = to_auth_required account_permissions.send
; access = to_auth_required account_permissions.access
; set_delegate = to_auth_required account_permissions.setDelegate
; set_permissions = to_auth_required account_permissions.setPermissions
; set_zkapp_uri = to_auth_required account_permissions.setZkappUri
; set_token_symbol = to_auth_required account_permissions.setTokenSymbol
; set_verification_key =
to_auth_required account_permissions.setVerificationKey
; set_voting_for = to_auth_required account_permissions.setVotingFor
; set_timing = to_auth_required account_permissions.setTiming
}
let graphql_uri node = Graphql.ingress_uri node |> Uri.to_string
let get_account_permissions ~logger t ~account_id =
let open Deferred.Or_error in
let open Let_syntax in
let%bind account_obj = get_account ~logger t ~account_id in
match account_obj.account with
| Some account -> (
match account.permissions with
| Some ledger_permissions ->
return @@ permissions_of_account_permissions ledger_permissions
| None ->
fail
(Error.of_string "Could not get permissions from ledger account")
)
| None ->
fail (Error.of_string "Could not get account from ledger")
return a Account_update . Update.t with all fields ` Set ` to the
value in the account , or ` Keep ` if value unavailable ,
as if this update had been applied to the account
value in the account, or `Keep` if value unavailable,
as if this update had been applied to the account
*)
let get_account_update ~logger t ~account_id =
let open Deferred.Or_error in
let open Let_syntax in
let%bind account_obj = get_account ~logger t ~account_id in
match account_obj.account with
| Some account ->
let open Mina_base.Zkapp_basic.Set_or_keep in
let%bind app_state =
match account.zkappState with
| Some strs ->
let fields =
Array.to_list strs |> Base.List.map ~f:(fun s -> Set s)
in
return (Mina_base.Zkapp_state.V.of_list_exn fields)
| None ->
fail
(Error.of_string
(sprintf
"Expected zkApp account with an app state for public key \
%s"
(Signature_lib.Public_key.Compressed.to_base58_check
(Mina_base.Account_id.public_key account_id) ) ) )
in
let%bind delegate =
match account.delegate with
| Some s ->
return (Set s)
| None ->
fail (Error.of_string "Expected delegate in account")
in
let%bind verification_key =
match account.verificationKey with
| Some vk_obj ->
let data = vk_obj.verificationKey in
let hash = vk_obj.hash in
return (Set ({ data; hash } : _ With_hash.t))
| None ->
fail
(Error.of_string
(sprintf
"Expected zkApp account with a verification key for \
public_key %s"
(Signature_lib.Public_key.Compressed.to_base58_check
(Mina_base.Account_id.public_key account_id) ) ) )
in
let%bind permissions =
match account.permissions with
| Some perms ->
return @@ Set (permissions_of_account_permissions perms)
| None ->
fail (Error.of_string "Expected permissions in account")
in
let%bind zkapp_uri =
match account.zkappUri with
| Some s ->
return @@ Set s
| None ->
fail (Error.of_string "Expected zkApp URI in account")
in
let%bind token_symbol =
match account.tokenSymbol with
| Some s ->
return @@ Set s
| None ->
fail (Error.of_string "Expected token symbol in account")
in
let%bind timing =
let timing = account.timing in
let cliff_amount = timing.cliffAmount in
let cliff_time = timing.cliffTime in
let vesting_period = timing.vestingPeriod in
let vesting_increment = timing.vestingIncrement in
let initial_minimum_balance = timing.initialMinimumBalance in
match
( cliff_amount
, cliff_time
, vesting_period
, vesting_increment
, initial_minimum_balance )
with
| None, None, None, None, None ->
return @@ Keep
| Some amt, Some tm, Some period, Some incr, Some bal ->
let cliff_amount = amt in
let%bind cliff_time =
match tm with
| `String s ->
return @@ Mina_numbers.Global_slot.of_string s
| _ ->
fail
(Error.of_string
"Expected string for cliff time in account timing" )
in
let%bind vesting_period =
match period with
| `String s ->
return @@ Mina_numbers.Global_slot.of_string s
| _ ->
fail
(Error.of_string
"Expected string for vesting period in account timing" )
in
let vesting_increment = incr in
let initial_minimum_balance = bal in
return
(Set
( { initial_minimum_balance
; cliff_amount
; cliff_time
; vesting_period
; vesting_increment
}
: Mina_base.Account_update.Update.Timing_info.t ) )
| _ ->
fail (Error.of_string "Some pieces of account timing are missing")
in
let%bind voting_for =
match account.votingFor with
| Some s ->
return @@ Set s
| None ->
fail (Error.of_string "Expected voting-for state hash in account")
in
return
( { app_state
; delegate
; verification_key
; permissions
; zkapp_uri
; token_symbol
; timing
; voting_for
}
: Mina_base.Account_update.Update.t )
| None ->
fail (Error.of_string "Could not get account from ledger")
type signed_command_result =
{ id : string
; hash : Transaction_hash.t
; nonce : Mina_numbers.Account_nonce.t
}
let transaction_id_to_string id =
Yojson.Basic.to_string (Graphql_lib.Scalars.TransactionId.serialize id)
(* if we expect failure, might want retry_on_graphql_error to be false *)
let send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee =
[%log info] "Sending a payment" ~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let sender_pk_str =
Signature_lib.Public_key.Compressed.to_string sender_pub_key
in
[%log info] "send_payment: unlocking account"
~metadata:[ ("sender_pk", `String sender_pk_str) ] ;
let unlock_sender_account_graphql () =
let unlock_account_obj =
Graphql.Unlock_account.(
make
@@ makeVariables ~password:node_password ~public_key:sender_pub_key ())
in
exec_graphql_request ~logger ~node:t ~initial_delay_sec:0.
~query_name:"unlock_sender_account_graphql" unlock_account_obj
in
let%bind _unlock_acct_obj = unlock_sender_account_graphql () in
let send_payment_graphql () =
let input =
Mina_graphql.Types.Input.SendPaymentInput.make_input
~from:sender_pub_key ~to_:receiver_pub_key ~amount ~fee ()
in
let send_payment_obj =
Graphql.Send_payment.(make @@ makeVariables ~input ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_payment_graphql"
send_payment_obj
in
let%map sent_payment_obj = send_payment_graphql () in
let return_obj = sent_payment_obj.sendPayment.payment in
let res =
{ id = transaction_id_to_string return_obj.id
; hash = return_obj.hash
; nonce = Mina_numbers.Account_nonce.of_int return_obj.nonce
}
in
[%log info] "Sent payment"
~metadata:
[ ("user_command_id", `String res.id)
; ("hash", `String (Transaction_hash.to_base58_check res.hash))
; ("nonce", `Int (Mina_numbers.Account_nonce.to_int res.nonce))
] ;
res
let must_send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee
=
send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee
|> Deferred.bind ~f:Malleable_error.or_hard_error
let send_zkapp ~logger (t : t) ~(zkapp_command : Mina_base.Zkapp_command.t) =
[%log info] "Sending a zkapp"
~metadata:
[ ("namespace", `String t.config.namespace)
; ("pod_id", `String (id t))
] ;
let open Deferred.Or_error.Let_syntax in
let zkapp_command_json =
Mina_base.Zkapp_command.to_json zkapp_command |> Yojson.Safe.to_basic
in
let send_zkapp_graphql () =
let send_zkapp_obj =
Graphql.Send_test_zkapp.(
make @@ makeVariables ~zkapp_command:zkapp_command_json ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_zkapp_graphql"
send_zkapp_obj
in
let%bind sent_zkapp_obj = send_zkapp_graphql () in
let%bind () =
match sent_zkapp_obj.internalSendZkapp.zkapp.failureReason with
| None ->
return ()
| Some s ->
Deferred.Or_error.errorf "Zkapp failed, reason: %s"
( Array.fold ~init:[] s ~f:(fun acc f ->
match f with
| None ->
acc
| Some f ->
let t =
( Option.value_exn f.index
, f.failures |> Array.to_list |> List.rev )
in
t :: acc )
|> Mina_base.Transaction_status.Failure.Collection.Display.to_yojson
|> Yojson.Safe.to_string )
in
let zkapp_id =
transaction_id_to_string sent_zkapp_obj.internalSendZkapp.zkapp.id
in
[%log info] "Sent zkapp" ~metadata:[ ("zkapp_id", `String zkapp_id) ] ;
return zkapp_id
let get_pooled_zkapp_commands ~logger (t : t)
~(pk : Signature_lib.Public_key.Compressed.t) =
[%log info] "Retrieving zkapp_commands from transaction pool"
~metadata:
[ ("namespace", `String t.config.namespace)
; ("pod_id", `String (id t))
; ("pub_key", Signature_lib.Public_key.Compressed.to_yojson pk)
] ;
let open Deferred.Or_error.Let_syntax in
let get_pooled_zkapp_commands_graphql () =
let get_pooled_zkapp_commands =
Graphql.Pooled_zkapp_commands.(
make
@@ makeVariables ~public_key:(Graphql_lib.Encoders.public_key pk) ())
in
exec_graphql_request ~logger ~node:t
~query_name:"get_pooled_zkapp_commands" get_pooled_zkapp_commands
in
let%bind zkapp_pool_obj = get_pooled_zkapp_commands_graphql () in
let%bind () =
match zkapp_pool_obj.pooledZkappCommands with
| [||] ->
return ()
| zkapp_commands ->
Deferred.Or_error.errorf "Zkapp failed, reasons: %s"
( Array.fold ~init:[] zkapp_commands
~f:(fun failures zkapp_command ->
match zkapp_command.failureReason with
| None ->
failures
| Some f ->
let inner_failures =
Array.fold ~init:[] f ~f:(fun failures failure ->
match failure with
| None ->
failures
| Some f ->
( Option.value_exn f.index
, f.failures |> Array.to_list |> List.rev )
:: failures )
in
List.map inner_failures ~f:(fun f -> f :: failures)
|> List.concat )
|> Mina_base.Transaction_status.Failure.Collection.Display.to_yojson
|> Yojson.Safe.to_string )
in
let transaction_ids =
Array.map zkapp_pool_obj.pooledZkappCommands ~f:(fun zkapp_command ->
zkapp_command.id |> Transaction_id.to_base64 )
|> Array.to_list
in
[%log info] "Retrieved zkapp_commands from transaction pool"
~metadata:
[ ("namespace", `String t.config.namespace)
; ("pod_id", `String (id t))
; ( "transaction ids"
, `List (List.map ~f:(fun t -> `String t) transaction_ids) )
] ;
return transaction_ids
let send_delegation ~logger t ~sender_pub_key ~receiver_pub_key ~fee =
[%log info] "Sending stake delegation" ~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let sender_pk_str =
Signature_lib.Public_key.Compressed.to_string sender_pub_key
in
[%log info] "send_delegation: unlocking account"
~metadata:[ ("sender_pk", `String sender_pk_str) ] ;
let unlock_sender_account_graphql () =
let unlock_account_obj =
Graphql.Unlock_account.(
make
@@ makeVariables ~password:"naughty blue worm"
~public_key:sender_pub_key ())
in
exec_graphql_request ~logger ~node:t
~query_name:"unlock_sender_account_graphql" unlock_account_obj
in
let%bind _ = unlock_sender_account_graphql () in
let send_delegation_graphql () =
let input =
Mina_graphql.Types.Input.SendDelegationInput.make_input
~from:sender_pub_key ~to_:receiver_pub_key ~fee ()
in
let send_delegation_obj =
Graphql.Send_delegation.(make @@ makeVariables ~input ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_delegation_graphql"
send_delegation_obj
in
let%map result_obj = send_delegation_graphql () in
let return_obj = result_obj.sendDelegation.delegation in
let res =
{ id = transaction_id_to_string return_obj.id
; hash = return_obj.hash
; nonce = Mina_numbers.Account_nonce.of_int return_obj.nonce
}
in
[%log info] "stake delegation sent"
~metadata:
[ ("user_command_id", `String res.id)
; ("hash", `String (Transaction_hash.to_base58_check res.hash))
; ("nonce", `Int (Mina_numbers.Account_nonce.to_int res.nonce))
] ;
res
let send_payment_with_raw_sig ~logger t ~sender_pub_key ~receiver_pub_key
~amount ~fee ~nonce ~memo ~(valid_until : Mina_numbers.Global_slot.t)
~raw_signature =
[%log info] "Sending a payment with raw signature"
~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let send_payment_graphql () =
let open Graphql.Send_payment_with_raw_sig in
let input =
Mina_graphql.Types.Input.SendPaymentInput.make_input
~from:sender_pub_key ~to_:receiver_pub_key ~amount ~fee ~memo ~nonce
~valid_until:(Mina_numbers.Global_slot.to_uint32 valid_until)
()
in
let variables = makeVariables ~input ~rawSignature:raw_signature () in
let send_payment_obj = make variables in
let variables_json_basic =
variablesToJson (serializeVariables variables)
in
An awkward conversion from Yojson . Basic to Yojson . Safe
let variables_json =
Yojson.Basic.to_string variables_json_basic |> Yojson.Safe.from_string
in
[%log info] "send_payment_obj with $variables "
~metadata:[ ("variables", variables_json) ] ;
exec_graphql_request ~logger ~node:t
~query_name:"Send_payment_with_raw_sig_graphql" send_payment_obj
in
let%map sent_payment_obj = send_payment_graphql () in
let return_obj = sent_payment_obj.sendPayment.payment in
let res =
{ id = transaction_id_to_string return_obj.id
; hash = return_obj.hash
; nonce = Mina_numbers.Account_nonce.of_int return_obj.nonce
}
in
[%log info] "Sent payment"
~metadata:
[ ("user_command_id", `String res.id)
; ("hash", `String (Transaction_hash.to_base58_check res.hash))
; ("nonce", `Int (Mina_numbers.Account_nonce.to_int res.nonce))
] ;
res
let must_send_payment_with_raw_sig ~logger t ~sender_pub_key ~receiver_pub_key
~amount ~fee ~nonce ~memo ~valid_until ~raw_signature =
send_payment_with_raw_sig ~logger t ~sender_pub_key ~receiver_pub_key
~amount ~fee ~nonce ~memo ~valid_until ~raw_signature
|> Deferred.bind ~f:Malleable_error.or_hard_error
let must_send_delegation ~logger t ~sender_pub_key ~receiver_pub_key ~fee =
send_delegation ~logger t ~sender_pub_key ~receiver_pub_key ~fee
|> Deferred.bind ~f:Malleable_error.or_hard_error
let send_test_payments ~repeat_count ~repeat_delay_ms ~logger t ~senders
~receiver_pub_key ~amount ~fee =
[%log info] "Sending a series of test payments"
~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let send_payment_graphql () =
let send_payment_obj =
Graphql.Send_test_payments.(
make
@@ makeVariables ~senders ~receiver:receiver_pub_key
~amount:(Currency.Amount.to_uint64 amount)
~fee:(Currency.Fee.to_uint64 fee)
~repeat_count ~repeat_delay_ms ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_payment_graphql"
send_payment_obj
in
let%map _ = send_payment_graphql () in
[%log info] "Sent test payments"
let must_send_test_payments ~repeat_count ~repeat_delay_ms ~logger t ~senders
~receiver_pub_key ~amount ~fee =
send_test_payments ~repeat_count ~repeat_delay_ms ~logger t ~senders
~receiver_pub_key ~amount ~fee
|> Deferred.bind ~f:Malleable_error.or_hard_error
let dump_archive_data ~logger (t : t) ~data_file =
(* this function won't work if `t` doesn't happen to be an archive node *)
if not t.pod_info.has_archive_container then
failwith
"No archive container found. One can only dump archive data of an \
archive node." ;
let open Malleable_error.Let_syntax in
let postgresql_pod_id = t.app_id ^ "-postgresql-0" in
let postgresql_container_id = "postgresql" in
Some quick clarification on the archive nodes :
An archive node archives all blocks as they come through , but does not produce blocks .
An archive node uses postgresql as storage , the postgresql db needs to be separately brought up and is sort of it 's own thing infra wise
Archive nodes can be run side - by - side with an actual mina node
in the integration test framework , every archive node will have it 's own single postgresql instance .
thus in the integration testing framework there will always be a one to one correspondence between archive node and postgresql db .
however more generally , it 's entirely possible for a mina user / operator set up multiple archive nodes to be backed by a single postgresql database .
But for now we will assume that we do n't need to test that
The integration test framework creates kubenetes deployments or " workloads " as they are called in GKE , but Nodes are mainly tracked by pod_id
A postgresql workload in the integration test framework will always have 1 managed pod ,
whose pod_id is simply the app id / workload name of the archive node appended with " -postgresql-0 " .
so if the archive node is called " archive-1 " , then the corresponding postgresql managed pod will be called " archive-1 - postgresql-0 " .
That managed pod will have exactly 1 container , and it will be called simply " postgresql "
It 's rather hardcoded but this was just the simplest way to go , as our kubernetes_network tracks Nodes , ie MINA nodes . a postgresql db is hard to account for
It 's possible to run pg_dump from the archive node instead of directly reaching out to the postgresql pod , and that 's what we used to do but there were occasionally version mismatches between the pg_dump on the archive node and the postgresql on the postgresql db
An archive node archives all blocks as they come through, but does not produce blocks.
An archive node uses postgresql as storage, the postgresql db needs to be separately brought up and is sort of it's own thing infra wise
Archive nodes can be run side-by-side with an actual mina node
in the integration test framework, every archive node will have it's own single postgresql instance.
thus in the integration testing framework there will always be a one to one correspondence between archive node and postgresql db.
however more generally, it's entirely possible for a mina user/operator set up multiple archive nodes to be backed by a single postgresql database.
But for now we will assume that we don't need to test that
The integration test framework creates kubenetes deployments or "workloads" as they are called in GKE, but Nodes are mainly tracked by pod_id
A postgresql workload in the integration test framework will always have 1 managed pod,
whose pod_id is simply the app id/workload name of the archive node appended with "-postgresql-0".
so if the archive node is called "archive-1", then the corresponding postgresql managed pod will be called "archive-1-postgresql-0".
That managed pod will have exactly 1 container, and it will be called simply "postgresql"
It's rather hardcoded but this was just the simplest way to go, as our kubernetes_network tracks Nodes, ie MINA nodes. a postgresql db is hard to account for
It's possible to run pg_dump from the archive node instead of directly reaching out to the postgresql pod, and that's what we used to do but there were occasionally version mismatches between the pg_dump on the archive node and the postgresql on the postgresql db
*)
[%log info] "Dumping archive data from (node: %s, container: %s)"
postgresql_pod_id postgresql_container_id ;
let%map data =
run_in_container t ~container_id:postgresql_container_id
~override_with_pod_id:postgresql_pod_id
~cmd:[ "pg_dump"; "--create"; "--no-owner"; postgres_url ]
in
[%log info] "Dumping archive data to file %s" data_file ;
Out_channel.with_file data_file ~f:(fun out_ch ->
Out_channel.output_string out_ch data )
let run_replayer ~logger (t : t) =
[%log info] "Running replayer on archived data (node: %s, container: %s)"
t.pod_id mina_archive_container_id ;
let open Malleable_error.Let_syntax in
let%bind accounts =
run_in_container t
~cmd:[ "jq"; "-c"; ".ledger.accounts"; "/config/daemon.json" ]
in
let replayer_input =
sprintf
{| { "genesis_ledger": { "accounts": %s, "add_genesis_winner": true }} |}
accounts
in
let dest = "replayer-input.json" in
let%bind _res =
Deferred.bind ~f:Malleable_error.return
(cp_string_to_container_file t ~container_id:mina_archive_container_id
~str:replayer_input ~dest )
in
run_in_container t ~container_id:mina_archive_container_id
~cmd:
[ "mina-replayer"
; "--archive-uri"
; postgres_url
; "--input-file"
; dest
; "--output-file"
; "/dev/null"
; "--continue-on-error"
]
let dump_mina_logs ~logger (t : t) ~log_file =
let open Malleable_error.Let_syntax in
[%log info] "Dumping container logs from (node: %s, container: %s)" t.pod_id
t.pod_info.primary_container_id ;
let%map logs = get_logs_in_container t in
[%log info] "Dumping container log to file %s" log_file ;
Out_channel.with_file log_file ~f:(fun out_ch ->
Out_channel.output_string out_ch logs )
let dump_precomputed_blocks ~logger (t : t) =
let open Malleable_error.Let_syntax in
[%log info]
"Dumping precomputed blocks from logs for (node: %s, container: %s)"
t.pod_id t.pod_info.primary_container_id ;
let%bind logs = get_logs_in_container t in
(* kubectl logs may include non-log output, like "Using password from environment variable" *)
let log_lines =
String.split logs ~on:'\n'
|> List.filter ~f:(String.is_prefix ~prefix:"{\"timestamp\":")
in
let jsons = List.map log_lines ~f:Yojson.Safe.from_string in
let metadata_jsons =
List.map jsons ~f:(fun json ->
match json with
| `Assoc items -> (
match List.Assoc.find items ~equal:String.equal "metadata" with
| Some md ->
md
| None ->
failwithf "Log line is missing metadata: %s"
(Yojson.Safe.to_string json)
() )
| other ->
failwithf "Expected log line to be a JSON record, got: %s"
(Yojson.Safe.to_string other)
() )
in
let state_hash_and_blocks =
List.fold metadata_jsons ~init:[] ~f:(fun acc json ->
match json with
| `Assoc items -> (
match
List.Assoc.find items ~equal:String.equal "precomputed_block"
with
| Some block -> (
match
List.Assoc.find items ~equal:String.equal "state_hash"
with
| Some state_hash ->
(state_hash, block) :: acc
| None ->
failwith
"Log metadata contains a precomputed block, but no \
state hash" )
| None ->
acc )
| other ->
failwithf "Expected log line to be a JSON record, got: %s"
(Yojson.Safe.to_string other)
() )
in
let%bind.Deferred () =
Deferred.List.iter state_hash_and_blocks
~f:(fun (state_hash_json, block_json) ->
let double_quoted_state_hash =
Yojson.Safe.to_string state_hash_json
in
let state_hash =
String.sub double_quoted_state_hash ~pos:1
~len:(String.length double_quoted_state_hash - 2)
in
let block = Yojson.Safe.pretty_to_string block_json in
let filename = state_hash ^ ".json" in
match%map.Deferred Sys.file_exists filename with
| `Yes ->
[%log info]
"File already exists for precomputed block with state hash %s"
state_hash
| _ ->
[%log info]
"Dumping precomputed block with state hash %s to file %s"
state_hash filename ;
Out_channel.with_file filename ~f:(fun out_ch ->
Out_channel.output_string out_ch block ) )
in
Malleable_error.return ()
let get_metrics ~logger t =
let open Deferred.Or_error.Let_syntax in
[%log info] "Getting node's metrics" ~metadata:(logger_metadata t) ;
let query_obj = Graphql.Query_metrics.make () in
let%bind query_result_obj =
exec_graphql_request ~logger ~node:t ~query_name:"query_metrics" query_obj
in
[%log info] "get_metrics, finished exec_graphql_request" ;
let block_production_delay =
Array.to_list
@@ query_result_obj.daemonStatus.metrics.blockProductionDelay
in
let metrics = query_result_obj.daemonStatus.metrics in
let transaction_pool_diff_received = metrics.transactionPoolDiffReceived in
let transaction_pool_diff_broadcasted =
metrics.transactionPoolDiffBroadcasted
in
let transactions_added_to_pool = metrics.transactionsAddedToPool in
let transaction_pool_size = metrics.transactionPoolSize in
[%log info]
"get_metrics, result of graphql query (block_production_delay; \
tx_received; tx_broadcasted; txs_added_to_pool; tx_pool_size) (%s; %d; \
%d; %d; %d)"
( String.concat ~sep:", "
@@ List.map ~f:string_of_int block_production_delay )
transaction_pool_diff_received transaction_pool_diff_broadcasted
transactions_added_to_pool transaction_pool_size ;
return
Intf.
{ block_production_delay
; transaction_pool_diff_broadcasted
; transaction_pool_diff_received
; transactions_added_to_pool
; transaction_pool_size
}
end
module Workload_to_deploy = struct
type t = { workload_id : string; pod_info : Node.pod_info list }
let construct_workload workload_id pod_info : t = { workload_id; pod_info }
let cons_pod_info ?network_keypair ?(has_archive_container = false)
primary_container_id : Node.pod_info =
{ network_keypair; has_archive_container; primary_container_id }
let get_nodes_from_workload t ~config =
let%bind cwd = Unix.getcwd () in
let open Malleable_error.Let_syntax in
let%bind app_id =
Deferred.bind ~f:Malleable_error.or_hard_error
(Integration_test_lib.Util.run_cmd_or_error cwd "kubectl"
( base_kube_args config
@ [ "get"
; "deployment"
; t.workload_id
; "-o"
; "jsonpath={.spec.selector.matchLabels.app}"
] ) )
in
let%map pod_ids_str =
Integration_test_lib.Util.run_cmd_or_hard_error cwd "kubectl"
( base_kube_args config
@ [ "get"; "pod"; "-l"; "app=" ^ app_id; "-o"; "name" ] )
in
let pod_ids =
String.split pod_ids_str ~on:'\n'
|> List.filter ~f:(Fn.compose not String.is_empty)
|> List.map ~f:(String.substr_replace_first ~pattern:"pod/" ~with_:"")
in
if Stdlib.List.compare_lengths t.pod_info pod_ids <> 0 then
failwithf
"Unexpected number of replicas in kubernetes deployment for workload \
%s: expected %d, got %d"
t.workload_id (List.length t.pod_info) (List.length pod_ids) () ;
List.zip_exn t.pod_info pod_ids
|> List.map ~f:(fun (pod_info, pod_id) ->
{ Node.app_id; pod_id; pod_info; config } )
end
type t =
{ namespace : string
; constants : Test_config.constants
; seeds : Node.t list
; block_producers : Node.t list
; snark_coordinators : Node.t list
; snark_workers : Node.t list
; archive_nodes : Node.t list
; : . Keypair.t list
; block_producer_keypairs : Signature_lib.Keypair.t list
; extra_genesis_keypairs : Signature_lib.Keypair.t list
; nodes_by_pod_id : Node.t String.Map.t
}
let constants { constants; _ } = constants
let constraint_constants { constants; _ } = constants.constraints
let genesis_constants { constants; _ } = constants.genesis
let seeds { seeds; _ } = seeds
let block_producers { block_producers; _ } = block_producers
let snark_coordinators { snark_coordinators; _ } = snark_coordinators
let snark_workers { snark_workers; _ } = snark_workers
let archive_nodes { archive_nodes; _ } = archive_nodes
(* all_nodes returns all *actual* mina nodes; note that a snark_worker is a pod within the network but not technically a mina node, therefore not included here. snark coordinators on the other hand ARE mina nodes *)
let all_nodes { seeds; block_producers; snark_coordinators; archive_nodes; _ } =
List.concat [ seeds; block_producers; snark_coordinators; archive_nodes ]
(* all_pods returns everything in the network. remember that snark_workers will never initialize and will never sync, and aren't supposed to *)
let all_pods
{ seeds
; block_producers
; snark_coordinators
; snark_workers
; archive_nodes
; _
} =
List.concat
[ seeds; block_producers; snark_coordinators; snark_workers; archive_nodes ]
(* all_non_seed_pods returns everything in the network except seed nodes *)
let all_non_seed_pods
{ block_producers; snark_coordinators; snark_workers; archive_nodes; _ } =
List.concat
[ block_producers; snark_coordinators; snark_workers; archive_nodes ]
let all_keypairs { block_producer_keypairs; extra_genesis_keypairs; _ } =
block_producer_keypairs @ extra_genesis_keypairs
let block_producer_keypairs { block_producer_keypairs; _ } =
block_producer_keypairs
let extra_genesis_keypairs { extra_genesis_keypairs; _ } =
extra_genesis_keypairs
let lookup_node_by_pod_id t = Map.find t.nodes_by_pod_id
let all_pod_ids t = Map.keys t.nodes_by_pod_id
let initialize_infra ~logger network =
let open Malleable_error.Let_syntax in
let poll_interval = Time.Span.of_sec 15.0 in
10 mins
let all_pods_set =
all_pods network
|> List.map ~f:(fun { pod_id; _ } -> pod_id)
|> String.Set.of_list
in
let kube_get_pods () =
Integration_test_lib.Util.run_cmd_or_error_timeout ~timeout_seconds:60 "/"
"kubectl"
[ "-n"
; network.namespace
; "get"
; "pods"
; "-ojsonpath={range \
.items[*]}{.metadata.name}{':'}{.status.phase}{'\\n'}{end}"
]
in
let parse_pod_statuses result_str =
result_str |> String.split_lines
|> List.map ~f:(fun line ->
let parts = String.split line ~on:':' in
assert (Mina_stdlib.List.Length.Compare.(parts = 2)) ;
(List.nth_exn parts 0, List.nth_exn parts 1) )
|> List.filter ~f:(fun (pod_name, _) ->
String.Set.mem all_pods_set pod_name )
(* this filters out the archive bootstrap pods, since they aren't in all_pods_set. in fact the bootstrap pods aren't tracked at all in the framework *)
|> String.Map.of_alist_exn
in
let rec poll n =
[%log debug] "Checking kubernetes pod statuses, n=%d" n ;
let is_successful_pod_status status = String.equal "Running" status in
match%bind Deferred.bind ~f:Malleable_error.return (kube_get_pods ()) with
| Ok str ->
let pod_statuses = parse_pod_statuses str in
[%log debug] "pod_statuses: \n %s"
( String.Map.to_alist pod_statuses
|> List.map ~f:(fun (key, data) -> key ^ ": " ^ data ^ "\n")
|> String.concat ) ;
[%log debug] "all_pods: \n %s"
(String.Set.elements all_pods_set |> String.concat ~sep:", ") ;
let all_pods_are_present =
List.for_all (String.Set.elements all_pods_set) ~f:(fun pod_id ->
String.Map.mem pod_statuses pod_id )
in
let any_pods_are_not_running =
(* there could be duplicate keys... *)
List.exists
(String.Map.data pod_statuses)
~f:(Fn.compose not is_successful_pod_status)
in
if not all_pods_are_present then (
let present_pods = String.Map.keys pod_statuses in
[%log fatal]
"Not all pods were found when querying namespace; this indicates a \
deployment error. Refusing to continue. \n\
Expected pods: [%s]. \n\
Present pods: [%s]"
(String.Set.elements all_pods_set |> String.concat ~sep:"; ")
(present_pods |> String.concat ~sep:"; ") ;
Malleable_error.hard_error_string ~exit_code:5
"Some pods were not found in namespace." )
else if any_pods_are_not_running then
let failed_pod_statuses =
List.filter (String.Map.to_alist pod_statuses)
~f:(fun (_, status) -> not (is_successful_pod_status status))
in
if n > 0 then (
[%log debug] "Got bad pod statuses, polling again ($failed_statuses"
~metadata:
[ ( "failed_statuses"
, `Assoc
(List.Assoc.map failed_pod_statuses ~f:(fun v ->
`String v ) ) )
] ;
let%bind () =
after poll_interval |> Deferred.bind ~f:Malleable_error.return
in
poll (n - 1) )
else (
[%log fatal]
"Got bad pod statuses, not all pods were assigned to nodes and \
ready in time. pod statuses: ($failed_statuses"
~metadata:
[ ( "failed_statuses"
, `Assoc
(List.Assoc.map failed_pod_statuses ~f:(fun v ->
`String v ) ) )
] ;
Malleable_error.hard_error_string ~exit_code:4
"Some pods either were not assigned to nodes or did not deploy \
properly." )
else return ()
| Error _ ->
[%log debug] "`kubectl get pods` timed out, polling again" ;
let%bind () =
after poll_interval |> Deferred.bind ~f:Malleable_error.return
in
poll n
in
[%log info] "Waiting for pods to be assigned nodes and become ready" ;
let res = poll max_polls in
match%bind.Deferred res with
| Error _ ->
[%log error] "Not all pods were assigned nodes, cannot proceed!" ;
res
| Ok _ ->
[%log info] "Pods assigned to nodes" ;
res
| null | https://raw.githubusercontent.com/MinaProtocol/mina/9a97ea71909a802f2ade0305a8069f7cbace5619/src/lib/integration_test_cloud_engine/kubernetes_network.ml | ocaml | exclude from bisect_ppx to avoid type error on GraphQL modules
this is going to be probably either "mina" or "worker"
archive pods have a "mina" container and an "archive" container alongside
TODO: temporary version
this function will repeatedly attempt to connect to graphql port <num_tries> times before giving up
if we expect failure, might want retry_on_graphql_error to be false
this function won't work if `t` doesn't happen to be an archive node
kubectl logs may include non-log output, like "Using password from environment variable"
all_nodes returns all *actual* mina nodes; note that a snark_worker is a pod within the network but not technically a mina node, therefore not included here. snark coordinators on the other hand ARE mina nodes
all_pods returns everything in the network. remember that snark_workers will never initialize and will never sync, and aren't supposed to
all_non_seed_pods returns everything in the network except seed nodes
this filters out the archive bootstrap pods, since they aren't in all_pods_set. in fact the bootstrap pods aren't tracked at all in the framework
there could be duplicate keys... | open Core_kernel
open Async
open Integration_test_lib
open Mina_transaction
[@@@coverage exclude_file]
let mina_archive_container_id = "archive"
let mina_archive_username = "mina"
let mina_archive_pw = "zo3moong7moog4Iep7eNgo3iecaesahH"
let postgres_url =
Printf.sprintf "postgres:%s@archive-1-postgresql:5432/archive"
mina_archive_username mina_archive_pw
let node_password = "naughty blue worm"
type config =
{ testnet_name : string
; cluster : string
; namespace : string
; graphql_enabled : bool
}
let base_kube_args { cluster; namespace; _ } =
[ "--cluster"; cluster; "--namespace"; namespace ]
module Node = struct
type pod_info =
{ network_keypair : Network_keypair.t option
; primary_container_id : string
; has_archive_container : bool
}
type t =
{ app_id : string; pod_id : string; pod_info : pod_info; config : config }
let id { pod_id; _ } = pod_id
let network_keypair { pod_info = { network_keypair; _ }; _ } = network_keypair
let base_kube_args t = [ "--cluster"; t.cluster; "--namespace"; t.namespace ]
let get_logs_in_container ?container_id { pod_id; config; pod_info; _ } =
let container_id =
Option.value container_id ~default:pod_info.primary_container_id
in
let%bind cwd = Unix.getcwd () in
Integration_test_lib.Util.run_cmd_or_hard_error ~exit_code:13 cwd "kubectl"
(base_kube_args config @ [ "logs"; "-c"; container_id; pod_id ])
let run_in_container ?(exit_code = 10) ?container_id ?override_with_pod_id
~cmd t =
let { config; pod_info; _ } = t in
let pod_id =
match override_with_pod_id with Some pid -> pid | None -> t.pod_id
in
let container_id =
Option.value container_id ~default:pod_info.primary_container_id
in
let%bind cwd = Unix.getcwd () in
Integration_test_lib.Util.run_cmd_or_hard_error ~exit_code cwd "kubectl"
( base_kube_args config
@ [ "exec"; "-c"; container_id; "-i"; pod_id; "--" ]
@ cmd )
let cp_string_to_container_file ?container_id ~str ~dest t =
let { pod_id; config; pod_info; _ } = t in
let container_id =
Option.value container_id ~default:pod_info.primary_container_id
in
let tmp_file, oc =
Caml.Filename.open_temp_file ~temp_dir:Filename.temp_dir_name
"integration_test_cp_string" ".tmp"
in
Out_channel.output_string oc str ;
Out_channel.close oc ;
let%bind cwd = Unix.getcwd () in
let dest_file = sprintf "%s/%s:%s" config.namespace pod_id dest in
Integration_test_lib.Util.run_cmd_or_error cwd "kubectl"
(base_kube_args config @ [ "cp"; "-c"; container_id; tmp_file; dest_file ])
let start ~fresh_state node : unit Malleable_error.t =
let open Malleable_error.Let_syntax in
let%bind () =
if fresh_state then
run_in_container node ~cmd:[ "sh"; "-c"; "rm -rf .mina-config/*" ]
>>| ignore
else Malleable_error.return ()
in
run_in_container ~exit_code:11 node ~cmd:[ "/start.sh" ] >>| ignore
let stop node =
let open Malleable_error.Let_syntax in
run_in_container ~exit_code:12 node ~cmd:[ "/stop.sh" ] >>| ignore
let logger_metadata node =
[ ("namespace", `String node.config.namespace)
; ("app_id", `String node.app_id)
; ("pod_id", `String node.pod_id)
]
module Scalars = Graphql_lib.Scalars
module Graphql = struct
let ingress_uri node =
let host =
sprintf "%s.graphql.test.o1test.net" node.config.testnet_name
in
let path = sprintf "/%s/graphql" node.app_id in
Uri.make ~scheme:"http" ~host ~path ~port:80 ()
module Client = Graphql_lib.Client.Make (struct
let preprocess_variables_string = Fn.id
let headers = String.Map.empty
end)
graphql_ppx uses symbols instead of Base
open Stdlib
module Encoders = Mina_graphql.Types.Input
module Unlock_account =
[%graphql
({|
mutation ($password: String!, $public_key: PublicKey!) @encoders(module: "Encoders"){
unlockAccount(input: {password: $password, publicKey: $public_key }) {
account {
public_key: publicKey
}
}
}
|}
[@encoders Encoders] )]
module Send_test_payments =
[%graphql
{|
mutation ($senders: [PrivateKey!]!,
$receiver: PublicKey!,
$amount: UInt64!,
$fee: UInt64!,
$repeat_count: UInt32!,
$repeat_delay_ms: UInt32!) @encoders(module: "Encoders"){
sendTestPayments(
senders: $senders, receiver: $receiver, amount: $amount, fee: $fee,
repeat_count: $repeat_count,
repeat_delay_ms: $repeat_delay_ms)
}
|}]
module Send_payment =
[%graphql
{|
mutation ($input: SendPaymentInput!)@encoders(module: "Encoders"){
sendPayment(input: $input){
payment {
id
nonce
hash
}
}
}
|}]
module Send_payment_with_raw_sig =
[%graphql
{|
mutation (
$input:SendPaymentInput!,
$rawSignature: String!
)@encoders(module: "Encoders")
{
sendPayment(
input:$input,
signature: {rawSignature: $rawSignature}
)
{
payment {
id
nonce
hash
}
}
}
|}]
module Send_delegation =
[%graphql
{|
mutation ($input: SendDelegationInput!) @encoders(module: "Encoders"){
sendDelegation(input:$input){
delegation {
id
nonce
hash
}
}
}
|}]
module Send_test_zkapp = Generated_graphql_queries.Send_test_zkapp
module Pooled_zkapp_commands =
Generated_graphql_queries.Pooled_zkapp_commands
module Query_peer_id =
[%graphql
{|
query {
daemonStatus {
addrsAndPorts {
peer {
peerId
}
}
peers { peerId }
}
}
|}]
module Best_chain =
[%graphql
{|
query ($max_length: Int) @encoders(module: "Encoders"){
bestChain (maxLength: $max_length) {
stateHash @ppxCustom(module: "Graphql_lib.Scalars.String_json")
commandTransactionCount
creatorAccount {
publicKey @ppxCustom(module: "Graphql_lib.Scalars.JSON")
}
}
}
|}]
module Query_metrics =
[%graphql
{|
query {
daemonStatus {
metrics {
blockProductionDelay
transactionPoolDiffReceived
transactionPoolDiffBroadcasted
transactionsAddedToPool
transactionPoolSize
}
}
}
|}]
module Account =
[%graphql
{|
query ($public_key: PublicKey!, $token: UInt64) {
account (publicKey : $public_key, token : $token) {
balance { liquid
locked
total
}
delegate
nonce
permissions { editActionState
editState
incrementNonce
receive
send
access
setDelegate
setPermissions
setZkappUri
setTokenSymbol
setVerificationKey
setVotingFor
setTiming
}
actionState
zkappState
zkappUri
timing { cliffTime @ppxCustom(module: "Graphql_lib.Scalars.JSON")
cliffAmount
vestingPeriod @ppxCustom(module: "Graphql_lib.Scalars.JSON")
vestingIncrement
initialMinimumBalance
}
token
tokenSymbol
verificationKey { verificationKey
hash
}
votingFor
}
}
|}]
end
let exec_graphql_request ?(num_tries = 10) ?(retry_delay_sec = 30.0)
?(initial_delay_sec = 0.) ~logger ~node ~query_name query_obj =
let open Deferred.Let_syntax in
if not node.config.graphql_enabled then
Deferred.Or_error.error_string
"graphql is not enabled (hint: set `requires_graphql= true` in the \
test config)"
else
let uri = Graphql.ingress_uri node in
let metadata =
[ ("query", `String query_name)
; ("uri", `String (Uri.to_string uri))
; ("init_delay", `Float initial_delay_sec)
]
in
[%log info]
"Attempting to send GraphQL request \"$query\" to \"$uri\" after \
$init_delay sec"
~metadata ;
let rec retry n =
if n <= 0 then (
[%log error]
"GraphQL request \"$query\" to \"$uri\" failed too many times"
~metadata ;
Deferred.Or_error.errorf
"GraphQL \"%s\" to \"%s\" request failed too many times" query_name
(Uri.to_string uri) )
else
match%bind Graphql.Client.query query_obj uri with
| Ok result ->
[%log info] "GraphQL request \"$query\" to \"$uri\" succeeded"
~metadata ;
Deferred.Or_error.return result
| Error (`Failed_request err_string) ->
[%log warn]
"GraphQL request \"$query\" to \"$uri\" failed: \"$error\" \
($num_tries attempts left)"
~metadata:
( metadata
@ [ ("error", `String err_string)
; ("num_tries", `Int (n - 1))
] ) ;
let%bind () = after (Time.Span.of_sec retry_delay_sec) in
retry (n - 1)
| Error (`Graphql_error err_string) ->
[%log error]
"GraphQL request \"$query\" to \"$uri\" returned an error: \
\"$error\" (this is a graphql error so not retrying)"
~metadata:(metadata @ [ ("error", `String err_string) ]) ;
Deferred.Or_error.error_string err_string
in
let%bind () = after (Time.Span.of_sec initial_delay_sec) in
retry num_tries
let get_peer_id ~logger t =
let open Deferred.Or_error.Let_syntax in
[%log info] "Getting node's peer_id, and the peer_ids of node's peers"
~metadata:(logger_metadata t) ;
let query_obj = Graphql.Query_peer_id.(make @@ makeVariables ()) in
let%bind query_result_obj =
exec_graphql_request ~logger ~node:t ~query_name:"query_peer_id" query_obj
in
[%log info] "get_peer_id, finished exec_graphql_request" ;
let self_id_obj = query_result_obj.daemonStatus.addrsAndPorts.peer in
let%bind self_id =
match self_id_obj with
| None ->
Deferred.Or_error.error_string "Peer not found"
| Some peer ->
return peer.peerId
in
let peers = query_result_obj.daemonStatus.peers |> Array.to_list in
let peer_ids = List.map peers ~f:(fun peer -> peer.peerId) in
[%log info] "get_peer_id, result of graphql query (self_id,[peers]) (%s,%s)"
self_id
(String.concat ~sep:" " peer_ids) ;
return (self_id, peer_ids)
let must_get_peer_id ~logger t =
get_peer_id ~logger t |> Deferred.bind ~f:Malleable_error.or_hard_error
let get_best_chain ?max_length ~logger t =
let open Deferred.Or_error.Let_syntax in
let query = Graphql.Best_chain.(make @@ makeVariables ?max_length ()) in
let%bind result =
exec_graphql_request ~logger ~node:t ~query_name:"best_chain" query
in
match result.bestChain with
| None | Some [||] ->
Deferred.Or_error.error_string "failed to get best chains"
| Some chain ->
return
@@ List.map
~f:(fun block ->
Intf.
{ state_hash = block.stateHash
; command_transaction_count = block.commandTransactionCount
; creator_pk =
( match block.creatorAccount.publicKey with
| `String pk ->
pk
| _ ->
"unknown" )
} )
(Array.to_list chain)
let must_get_best_chain ?max_length ~logger t =
get_best_chain ?max_length ~logger t
|> Deferred.bind ~f:Malleable_error.or_hard_error
let get_account ~logger t ~account_id =
let pk = Mina_base.Account_id.public_key account_id in
let token = Mina_base.Account_id.token_id account_id in
[%log info] "Getting account"
~metadata:
( ("pub_key", Signature_lib.Public_key.Compressed.to_yojson pk)
:: logger_metadata t ) ;
let get_account_obj =
Graphql.Account.(
make
@@ makeVariables
~public_key:(Graphql_lib.Encoders.public_key pk)
~token:(Graphql_lib.Encoders.token token)
())
in
exec_graphql_request ~logger ~node:t ~query_name:"get_account_graphql"
get_account_obj
type account_data =
{ nonce : Mina_numbers.Account_nonce.t
; total_balance : Currency.Balance.t
; liquid_balance_opt : Currency.Balance.t option
; locked_balance_opt : Currency.Balance.t option
}
let get_account_data ~logger t ~account_id =
let open Deferred.Or_error.Let_syntax in
let public_key = Mina_base.Account_id.public_key account_id in
let token = Mina_base.Account_id.token_id account_id in
[%log info] "Getting account data, which is its balances and nonce"
~metadata:
( ("pub_key", Signature_lib.Public_key.Compressed.to_yojson public_key)
:: logger_metadata t ) ;
let%bind account_obj = get_account ~logger t ~account_id in
match account_obj.account with
| None ->
Deferred.Or_error.errorf
!"Account with Account id %{sexp:Mina_base.Account_id.t}, public_key \
%s, and token %s not found"
account_id
(Signature_lib.Public_key.Compressed.to_string public_key)
(Mina_base.Token_id.to_string token)
| Some acc ->
return
{ nonce =
Option.value_exn
~message:
"the nonce from get_balance is None, which should be \
impossible"
acc.nonce
; total_balance = acc.balance.total
; liquid_balance_opt = acc.balance.liquid
; locked_balance_opt = acc.balance.locked
}
let must_get_account_data ~logger t ~account_id =
get_account_data ~logger t ~account_id
|> Deferred.bind ~f:Malleable_error.or_hard_error
let permissions_of_account_permissions account_permissions :
Mina_base.Permissions.t =
the polymorphic variants come from Partial_accounts.auth_required in Mina_graphql
let to_auth_required = function
| `Either ->
Mina_base.Permissions.Auth_required.Either
| `Impossible ->
Impossible
| `None ->
None
| `Proof ->
Proof
| `Signature ->
Signature
in
let open Graphql.Account in
{ edit_action_state = to_auth_required account_permissions.editActionState
; edit_state = to_auth_required account_permissions.editState
; increment_nonce = to_auth_required account_permissions.incrementNonce
; receive = to_auth_required account_permissions.receive
; send = to_auth_required account_permissions.send
; access = to_auth_required account_permissions.access
; set_delegate = to_auth_required account_permissions.setDelegate
; set_permissions = to_auth_required account_permissions.setPermissions
; set_zkapp_uri = to_auth_required account_permissions.setZkappUri
; set_token_symbol = to_auth_required account_permissions.setTokenSymbol
; set_verification_key =
to_auth_required account_permissions.setVerificationKey
; set_voting_for = to_auth_required account_permissions.setVotingFor
; set_timing = to_auth_required account_permissions.setTiming
}
let graphql_uri node = Graphql.ingress_uri node |> Uri.to_string
let get_account_permissions ~logger t ~account_id =
let open Deferred.Or_error in
let open Let_syntax in
let%bind account_obj = get_account ~logger t ~account_id in
match account_obj.account with
| Some account -> (
match account.permissions with
| Some ledger_permissions ->
return @@ permissions_of_account_permissions ledger_permissions
| None ->
fail
(Error.of_string "Could not get permissions from ledger account")
)
| None ->
fail (Error.of_string "Could not get account from ledger")
return a Account_update . Update.t with all fields ` Set ` to the
value in the account , or ` Keep ` if value unavailable ,
as if this update had been applied to the account
value in the account, or `Keep` if value unavailable,
as if this update had been applied to the account
*)
let get_account_update ~logger t ~account_id =
let open Deferred.Or_error in
let open Let_syntax in
let%bind account_obj = get_account ~logger t ~account_id in
match account_obj.account with
| Some account ->
let open Mina_base.Zkapp_basic.Set_or_keep in
let%bind app_state =
match account.zkappState with
| Some strs ->
let fields =
Array.to_list strs |> Base.List.map ~f:(fun s -> Set s)
in
return (Mina_base.Zkapp_state.V.of_list_exn fields)
| None ->
fail
(Error.of_string
(sprintf
"Expected zkApp account with an app state for public key \
%s"
(Signature_lib.Public_key.Compressed.to_base58_check
(Mina_base.Account_id.public_key account_id) ) ) )
in
let%bind delegate =
match account.delegate with
| Some s ->
return (Set s)
| None ->
fail (Error.of_string "Expected delegate in account")
in
let%bind verification_key =
match account.verificationKey with
| Some vk_obj ->
let data = vk_obj.verificationKey in
let hash = vk_obj.hash in
return (Set ({ data; hash } : _ With_hash.t))
| None ->
fail
(Error.of_string
(sprintf
"Expected zkApp account with a verification key for \
public_key %s"
(Signature_lib.Public_key.Compressed.to_base58_check
(Mina_base.Account_id.public_key account_id) ) ) )
in
let%bind permissions =
match account.permissions with
| Some perms ->
return @@ Set (permissions_of_account_permissions perms)
| None ->
fail (Error.of_string "Expected permissions in account")
in
let%bind zkapp_uri =
match account.zkappUri with
| Some s ->
return @@ Set s
| None ->
fail (Error.of_string "Expected zkApp URI in account")
in
let%bind token_symbol =
match account.tokenSymbol with
| Some s ->
return @@ Set s
| None ->
fail (Error.of_string "Expected token symbol in account")
in
let%bind timing =
let timing = account.timing in
let cliff_amount = timing.cliffAmount in
let cliff_time = timing.cliffTime in
let vesting_period = timing.vestingPeriod in
let vesting_increment = timing.vestingIncrement in
let initial_minimum_balance = timing.initialMinimumBalance in
match
( cliff_amount
, cliff_time
, vesting_period
, vesting_increment
, initial_minimum_balance )
with
| None, None, None, None, None ->
return @@ Keep
| Some amt, Some tm, Some period, Some incr, Some bal ->
let cliff_amount = amt in
let%bind cliff_time =
match tm with
| `String s ->
return @@ Mina_numbers.Global_slot.of_string s
| _ ->
fail
(Error.of_string
"Expected string for cliff time in account timing" )
in
let%bind vesting_period =
match period with
| `String s ->
return @@ Mina_numbers.Global_slot.of_string s
| _ ->
fail
(Error.of_string
"Expected string for vesting period in account timing" )
in
let vesting_increment = incr in
let initial_minimum_balance = bal in
return
(Set
( { initial_minimum_balance
; cliff_amount
; cliff_time
; vesting_period
; vesting_increment
}
: Mina_base.Account_update.Update.Timing_info.t ) )
| _ ->
fail (Error.of_string "Some pieces of account timing are missing")
in
let%bind voting_for =
match account.votingFor with
| Some s ->
return @@ Set s
| None ->
fail (Error.of_string "Expected voting-for state hash in account")
in
return
( { app_state
; delegate
; verification_key
; permissions
; zkapp_uri
; token_symbol
; timing
; voting_for
}
: Mina_base.Account_update.Update.t )
| None ->
fail (Error.of_string "Could not get account from ledger")
type signed_command_result =
{ id : string
; hash : Transaction_hash.t
; nonce : Mina_numbers.Account_nonce.t
}
let transaction_id_to_string id =
Yojson.Basic.to_string (Graphql_lib.Scalars.TransactionId.serialize id)
let send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee =
[%log info] "Sending a payment" ~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let sender_pk_str =
Signature_lib.Public_key.Compressed.to_string sender_pub_key
in
[%log info] "send_payment: unlocking account"
~metadata:[ ("sender_pk", `String sender_pk_str) ] ;
let unlock_sender_account_graphql () =
let unlock_account_obj =
Graphql.Unlock_account.(
make
@@ makeVariables ~password:node_password ~public_key:sender_pub_key ())
in
exec_graphql_request ~logger ~node:t ~initial_delay_sec:0.
~query_name:"unlock_sender_account_graphql" unlock_account_obj
in
let%bind _unlock_acct_obj = unlock_sender_account_graphql () in
let send_payment_graphql () =
let input =
Mina_graphql.Types.Input.SendPaymentInput.make_input
~from:sender_pub_key ~to_:receiver_pub_key ~amount ~fee ()
in
let send_payment_obj =
Graphql.Send_payment.(make @@ makeVariables ~input ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_payment_graphql"
send_payment_obj
in
let%map sent_payment_obj = send_payment_graphql () in
let return_obj = sent_payment_obj.sendPayment.payment in
let res =
{ id = transaction_id_to_string return_obj.id
; hash = return_obj.hash
; nonce = Mina_numbers.Account_nonce.of_int return_obj.nonce
}
in
[%log info] "Sent payment"
~metadata:
[ ("user_command_id", `String res.id)
; ("hash", `String (Transaction_hash.to_base58_check res.hash))
; ("nonce", `Int (Mina_numbers.Account_nonce.to_int res.nonce))
] ;
res
let must_send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee
=
send_payment ~logger t ~sender_pub_key ~receiver_pub_key ~amount ~fee
|> Deferred.bind ~f:Malleable_error.or_hard_error
let send_zkapp ~logger (t : t) ~(zkapp_command : Mina_base.Zkapp_command.t) =
[%log info] "Sending a zkapp"
~metadata:
[ ("namespace", `String t.config.namespace)
; ("pod_id", `String (id t))
] ;
let open Deferred.Or_error.Let_syntax in
let zkapp_command_json =
Mina_base.Zkapp_command.to_json zkapp_command |> Yojson.Safe.to_basic
in
let send_zkapp_graphql () =
let send_zkapp_obj =
Graphql.Send_test_zkapp.(
make @@ makeVariables ~zkapp_command:zkapp_command_json ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_zkapp_graphql"
send_zkapp_obj
in
let%bind sent_zkapp_obj = send_zkapp_graphql () in
let%bind () =
match sent_zkapp_obj.internalSendZkapp.zkapp.failureReason with
| None ->
return ()
| Some s ->
Deferred.Or_error.errorf "Zkapp failed, reason: %s"
( Array.fold ~init:[] s ~f:(fun acc f ->
match f with
| None ->
acc
| Some f ->
let t =
( Option.value_exn f.index
, f.failures |> Array.to_list |> List.rev )
in
t :: acc )
|> Mina_base.Transaction_status.Failure.Collection.Display.to_yojson
|> Yojson.Safe.to_string )
in
let zkapp_id =
transaction_id_to_string sent_zkapp_obj.internalSendZkapp.zkapp.id
in
[%log info] "Sent zkapp" ~metadata:[ ("zkapp_id", `String zkapp_id) ] ;
return zkapp_id
let get_pooled_zkapp_commands ~logger (t : t)
~(pk : Signature_lib.Public_key.Compressed.t) =
[%log info] "Retrieving zkapp_commands from transaction pool"
~metadata:
[ ("namespace", `String t.config.namespace)
; ("pod_id", `String (id t))
; ("pub_key", Signature_lib.Public_key.Compressed.to_yojson pk)
] ;
let open Deferred.Or_error.Let_syntax in
let get_pooled_zkapp_commands_graphql () =
let get_pooled_zkapp_commands =
Graphql.Pooled_zkapp_commands.(
make
@@ makeVariables ~public_key:(Graphql_lib.Encoders.public_key pk) ())
in
exec_graphql_request ~logger ~node:t
~query_name:"get_pooled_zkapp_commands" get_pooled_zkapp_commands
in
let%bind zkapp_pool_obj = get_pooled_zkapp_commands_graphql () in
let%bind () =
match zkapp_pool_obj.pooledZkappCommands with
| [||] ->
return ()
| zkapp_commands ->
Deferred.Or_error.errorf "Zkapp failed, reasons: %s"
( Array.fold ~init:[] zkapp_commands
~f:(fun failures zkapp_command ->
match zkapp_command.failureReason with
| None ->
failures
| Some f ->
let inner_failures =
Array.fold ~init:[] f ~f:(fun failures failure ->
match failure with
| None ->
failures
| Some f ->
( Option.value_exn f.index
, f.failures |> Array.to_list |> List.rev )
:: failures )
in
List.map inner_failures ~f:(fun f -> f :: failures)
|> List.concat )
|> Mina_base.Transaction_status.Failure.Collection.Display.to_yojson
|> Yojson.Safe.to_string )
in
let transaction_ids =
Array.map zkapp_pool_obj.pooledZkappCommands ~f:(fun zkapp_command ->
zkapp_command.id |> Transaction_id.to_base64 )
|> Array.to_list
in
[%log info] "Retrieved zkapp_commands from transaction pool"
~metadata:
[ ("namespace", `String t.config.namespace)
; ("pod_id", `String (id t))
; ( "transaction ids"
, `List (List.map ~f:(fun t -> `String t) transaction_ids) )
] ;
return transaction_ids
let send_delegation ~logger t ~sender_pub_key ~receiver_pub_key ~fee =
[%log info] "Sending stake delegation" ~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let sender_pk_str =
Signature_lib.Public_key.Compressed.to_string sender_pub_key
in
[%log info] "send_delegation: unlocking account"
~metadata:[ ("sender_pk", `String sender_pk_str) ] ;
let unlock_sender_account_graphql () =
let unlock_account_obj =
Graphql.Unlock_account.(
make
@@ makeVariables ~password:"naughty blue worm"
~public_key:sender_pub_key ())
in
exec_graphql_request ~logger ~node:t
~query_name:"unlock_sender_account_graphql" unlock_account_obj
in
let%bind _ = unlock_sender_account_graphql () in
let send_delegation_graphql () =
let input =
Mina_graphql.Types.Input.SendDelegationInput.make_input
~from:sender_pub_key ~to_:receiver_pub_key ~fee ()
in
let send_delegation_obj =
Graphql.Send_delegation.(make @@ makeVariables ~input ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_delegation_graphql"
send_delegation_obj
in
let%map result_obj = send_delegation_graphql () in
let return_obj = result_obj.sendDelegation.delegation in
let res =
{ id = transaction_id_to_string return_obj.id
; hash = return_obj.hash
; nonce = Mina_numbers.Account_nonce.of_int return_obj.nonce
}
in
[%log info] "stake delegation sent"
~metadata:
[ ("user_command_id", `String res.id)
; ("hash", `String (Transaction_hash.to_base58_check res.hash))
; ("nonce", `Int (Mina_numbers.Account_nonce.to_int res.nonce))
] ;
res
let send_payment_with_raw_sig ~logger t ~sender_pub_key ~receiver_pub_key
~amount ~fee ~nonce ~memo ~(valid_until : Mina_numbers.Global_slot.t)
~raw_signature =
[%log info] "Sending a payment with raw signature"
~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let send_payment_graphql () =
let open Graphql.Send_payment_with_raw_sig in
let input =
Mina_graphql.Types.Input.SendPaymentInput.make_input
~from:sender_pub_key ~to_:receiver_pub_key ~amount ~fee ~memo ~nonce
~valid_until:(Mina_numbers.Global_slot.to_uint32 valid_until)
()
in
let variables = makeVariables ~input ~rawSignature:raw_signature () in
let send_payment_obj = make variables in
let variables_json_basic =
variablesToJson (serializeVariables variables)
in
An awkward conversion from Yojson . Basic to Yojson . Safe
let variables_json =
Yojson.Basic.to_string variables_json_basic |> Yojson.Safe.from_string
in
[%log info] "send_payment_obj with $variables "
~metadata:[ ("variables", variables_json) ] ;
exec_graphql_request ~logger ~node:t
~query_name:"Send_payment_with_raw_sig_graphql" send_payment_obj
in
let%map sent_payment_obj = send_payment_graphql () in
let return_obj = sent_payment_obj.sendPayment.payment in
let res =
{ id = transaction_id_to_string return_obj.id
; hash = return_obj.hash
; nonce = Mina_numbers.Account_nonce.of_int return_obj.nonce
}
in
[%log info] "Sent payment"
~metadata:
[ ("user_command_id", `String res.id)
; ("hash", `String (Transaction_hash.to_base58_check res.hash))
; ("nonce", `Int (Mina_numbers.Account_nonce.to_int res.nonce))
] ;
res
let must_send_payment_with_raw_sig ~logger t ~sender_pub_key ~receiver_pub_key
~amount ~fee ~nonce ~memo ~valid_until ~raw_signature =
send_payment_with_raw_sig ~logger t ~sender_pub_key ~receiver_pub_key
~amount ~fee ~nonce ~memo ~valid_until ~raw_signature
|> Deferred.bind ~f:Malleable_error.or_hard_error
let must_send_delegation ~logger t ~sender_pub_key ~receiver_pub_key ~fee =
send_delegation ~logger t ~sender_pub_key ~receiver_pub_key ~fee
|> Deferred.bind ~f:Malleable_error.or_hard_error
let send_test_payments ~repeat_count ~repeat_delay_ms ~logger t ~senders
~receiver_pub_key ~amount ~fee =
[%log info] "Sending a series of test payments"
~metadata:(logger_metadata t) ;
let open Deferred.Or_error.Let_syntax in
let send_payment_graphql () =
let send_payment_obj =
Graphql.Send_test_payments.(
make
@@ makeVariables ~senders ~receiver:receiver_pub_key
~amount:(Currency.Amount.to_uint64 amount)
~fee:(Currency.Fee.to_uint64 fee)
~repeat_count ~repeat_delay_ms ())
in
exec_graphql_request ~logger ~node:t ~query_name:"send_payment_graphql"
send_payment_obj
in
let%map _ = send_payment_graphql () in
[%log info] "Sent test payments"
let must_send_test_payments ~repeat_count ~repeat_delay_ms ~logger t ~senders
~receiver_pub_key ~amount ~fee =
send_test_payments ~repeat_count ~repeat_delay_ms ~logger t ~senders
~receiver_pub_key ~amount ~fee
|> Deferred.bind ~f:Malleable_error.or_hard_error
let dump_archive_data ~logger (t : t) ~data_file =
if not t.pod_info.has_archive_container then
failwith
"No archive container found. One can only dump archive data of an \
archive node." ;
let open Malleable_error.Let_syntax in
let postgresql_pod_id = t.app_id ^ "-postgresql-0" in
let postgresql_container_id = "postgresql" in
Some quick clarification on the archive nodes :
An archive node archives all blocks as they come through , but does not produce blocks .
An archive node uses postgresql as storage , the postgresql db needs to be separately brought up and is sort of it 's own thing infra wise
Archive nodes can be run side - by - side with an actual mina node
in the integration test framework , every archive node will have it 's own single postgresql instance .
thus in the integration testing framework there will always be a one to one correspondence between archive node and postgresql db .
however more generally , it 's entirely possible for a mina user / operator set up multiple archive nodes to be backed by a single postgresql database .
But for now we will assume that we do n't need to test that
The integration test framework creates kubenetes deployments or " workloads " as they are called in GKE , but Nodes are mainly tracked by pod_id
A postgresql workload in the integration test framework will always have 1 managed pod ,
whose pod_id is simply the app id / workload name of the archive node appended with " -postgresql-0 " .
so if the archive node is called " archive-1 " , then the corresponding postgresql managed pod will be called " archive-1 - postgresql-0 " .
That managed pod will have exactly 1 container , and it will be called simply " postgresql "
It 's rather hardcoded but this was just the simplest way to go , as our kubernetes_network tracks Nodes , ie MINA nodes . a postgresql db is hard to account for
It 's possible to run pg_dump from the archive node instead of directly reaching out to the postgresql pod , and that 's what we used to do but there were occasionally version mismatches between the pg_dump on the archive node and the postgresql on the postgresql db
An archive node archives all blocks as they come through, but does not produce blocks.
An archive node uses postgresql as storage, the postgresql db needs to be separately brought up and is sort of it's own thing infra wise
Archive nodes can be run side-by-side with an actual mina node
in the integration test framework, every archive node will have it's own single postgresql instance.
thus in the integration testing framework there will always be a one to one correspondence between archive node and postgresql db.
however more generally, it's entirely possible for a mina user/operator set up multiple archive nodes to be backed by a single postgresql database.
But for now we will assume that we don't need to test that
The integration test framework creates kubenetes deployments or "workloads" as they are called in GKE, but Nodes are mainly tracked by pod_id
A postgresql workload in the integration test framework will always have 1 managed pod,
whose pod_id is simply the app id/workload name of the archive node appended with "-postgresql-0".
so if the archive node is called "archive-1", then the corresponding postgresql managed pod will be called "archive-1-postgresql-0".
That managed pod will have exactly 1 container, and it will be called simply "postgresql"
It's rather hardcoded but this was just the simplest way to go, as our kubernetes_network tracks Nodes, ie MINA nodes. a postgresql db is hard to account for
It's possible to run pg_dump from the archive node instead of directly reaching out to the postgresql pod, and that's what we used to do but there were occasionally version mismatches between the pg_dump on the archive node and the postgresql on the postgresql db
*)
[%log info] "Dumping archive data from (node: %s, container: %s)"
postgresql_pod_id postgresql_container_id ;
let%map data =
run_in_container t ~container_id:postgresql_container_id
~override_with_pod_id:postgresql_pod_id
~cmd:[ "pg_dump"; "--create"; "--no-owner"; postgres_url ]
in
[%log info] "Dumping archive data to file %s" data_file ;
Out_channel.with_file data_file ~f:(fun out_ch ->
Out_channel.output_string out_ch data )
let run_replayer ~logger (t : t) =
[%log info] "Running replayer on archived data (node: %s, container: %s)"
t.pod_id mina_archive_container_id ;
let open Malleable_error.Let_syntax in
let%bind accounts =
run_in_container t
~cmd:[ "jq"; "-c"; ".ledger.accounts"; "/config/daemon.json" ]
in
let replayer_input =
sprintf
{| { "genesis_ledger": { "accounts": %s, "add_genesis_winner": true }} |}
accounts
in
let dest = "replayer-input.json" in
let%bind _res =
Deferred.bind ~f:Malleable_error.return
(cp_string_to_container_file t ~container_id:mina_archive_container_id
~str:replayer_input ~dest )
in
run_in_container t ~container_id:mina_archive_container_id
~cmd:
[ "mina-replayer"
; "--archive-uri"
; postgres_url
; "--input-file"
; dest
; "--output-file"
; "/dev/null"
; "--continue-on-error"
]
let dump_mina_logs ~logger (t : t) ~log_file =
let open Malleable_error.Let_syntax in
[%log info] "Dumping container logs from (node: %s, container: %s)" t.pod_id
t.pod_info.primary_container_id ;
let%map logs = get_logs_in_container t in
[%log info] "Dumping container log to file %s" log_file ;
Out_channel.with_file log_file ~f:(fun out_ch ->
Out_channel.output_string out_ch logs )
let dump_precomputed_blocks ~logger (t : t) =
let open Malleable_error.Let_syntax in
[%log info]
"Dumping precomputed blocks from logs for (node: %s, container: %s)"
t.pod_id t.pod_info.primary_container_id ;
let%bind logs = get_logs_in_container t in
let log_lines =
String.split logs ~on:'\n'
|> List.filter ~f:(String.is_prefix ~prefix:"{\"timestamp\":")
in
let jsons = List.map log_lines ~f:Yojson.Safe.from_string in
let metadata_jsons =
List.map jsons ~f:(fun json ->
match json with
| `Assoc items -> (
match List.Assoc.find items ~equal:String.equal "metadata" with
| Some md ->
md
| None ->
failwithf "Log line is missing metadata: %s"
(Yojson.Safe.to_string json)
() )
| other ->
failwithf "Expected log line to be a JSON record, got: %s"
(Yojson.Safe.to_string other)
() )
in
let state_hash_and_blocks =
List.fold metadata_jsons ~init:[] ~f:(fun acc json ->
match json with
| `Assoc items -> (
match
List.Assoc.find items ~equal:String.equal "precomputed_block"
with
| Some block -> (
match
List.Assoc.find items ~equal:String.equal "state_hash"
with
| Some state_hash ->
(state_hash, block) :: acc
| None ->
failwith
"Log metadata contains a precomputed block, but no \
state hash" )
| None ->
acc )
| other ->
failwithf "Expected log line to be a JSON record, got: %s"
(Yojson.Safe.to_string other)
() )
in
let%bind.Deferred () =
Deferred.List.iter state_hash_and_blocks
~f:(fun (state_hash_json, block_json) ->
let double_quoted_state_hash =
Yojson.Safe.to_string state_hash_json
in
let state_hash =
String.sub double_quoted_state_hash ~pos:1
~len:(String.length double_quoted_state_hash - 2)
in
let block = Yojson.Safe.pretty_to_string block_json in
let filename = state_hash ^ ".json" in
match%map.Deferred Sys.file_exists filename with
| `Yes ->
[%log info]
"File already exists for precomputed block with state hash %s"
state_hash
| _ ->
[%log info]
"Dumping precomputed block with state hash %s to file %s"
state_hash filename ;
Out_channel.with_file filename ~f:(fun out_ch ->
Out_channel.output_string out_ch block ) )
in
Malleable_error.return ()
let get_metrics ~logger t =
let open Deferred.Or_error.Let_syntax in
[%log info] "Getting node's metrics" ~metadata:(logger_metadata t) ;
let query_obj = Graphql.Query_metrics.make () in
let%bind query_result_obj =
exec_graphql_request ~logger ~node:t ~query_name:"query_metrics" query_obj
in
[%log info] "get_metrics, finished exec_graphql_request" ;
let block_production_delay =
Array.to_list
@@ query_result_obj.daemonStatus.metrics.blockProductionDelay
in
let metrics = query_result_obj.daemonStatus.metrics in
let transaction_pool_diff_received = metrics.transactionPoolDiffReceived in
let transaction_pool_diff_broadcasted =
metrics.transactionPoolDiffBroadcasted
in
let transactions_added_to_pool = metrics.transactionsAddedToPool in
let transaction_pool_size = metrics.transactionPoolSize in
[%log info]
"get_metrics, result of graphql query (block_production_delay; \
tx_received; tx_broadcasted; txs_added_to_pool; tx_pool_size) (%s; %d; \
%d; %d; %d)"
( String.concat ~sep:", "
@@ List.map ~f:string_of_int block_production_delay )
transaction_pool_diff_received transaction_pool_diff_broadcasted
transactions_added_to_pool transaction_pool_size ;
return
Intf.
{ block_production_delay
; transaction_pool_diff_broadcasted
; transaction_pool_diff_received
; transactions_added_to_pool
; transaction_pool_size
}
end
module Workload_to_deploy = struct
type t = { workload_id : string; pod_info : Node.pod_info list }
let construct_workload workload_id pod_info : t = { workload_id; pod_info }
let cons_pod_info ?network_keypair ?(has_archive_container = false)
primary_container_id : Node.pod_info =
{ network_keypair; has_archive_container; primary_container_id }
let get_nodes_from_workload t ~config =
let%bind cwd = Unix.getcwd () in
let open Malleable_error.Let_syntax in
let%bind app_id =
Deferred.bind ~f:Malleable_error.or_hard_error
(Integration_test_lib.Util.run_cmd_or_error cwd "kubectl"
( base_kube_args config
@ [ "get"
; "deployment"
; t.workload_id
; "-o"
; "jsonpath={.spec.selector.matchLabels.app}"
] ) )
in
let%map pod_ids_str =
Integration_test_lib.Util.run_cmd_or_hard_error cwd "kubectl"
( base_kube_args config
@ [ "get"; "pod"; "-l"; "app=" ^ app_id; "-o"; "name" ] )
in
let pod_ids =
String.split pod_ids_str ~on:'\n'
|> List.filter ~f:(Fn.compose not String.is_empty)
|> List.map ~f:(String.substr_replace_first ~pattern:"pod/" ~with_:"")
in
if Stdlib.List.compare_lengths t.pod_info pod_ids <> 0 then
failwithf
"Unexpected number of replicas in kubernetes deployment for workload \
%s: expected %d, got %d"
t.workload_id (List.length t.pod_info) (List.length pod_ids) () ;
List.zip_exn t.pod_info pod_ids
|> List.map ~f:(fun (pod_info, pod_id) ->
{ Node.app_id; pod_id; pod_info; config } )
end
type t =
{ namespace : string
; constants : Test_config.constants
; seeds : Node.t list
; block_producers : Node.t list
; snark_coordinators : Node.t list
; snark_workers : Node.t list
; archive_nodes : Node.t list
; : . Keypair.t list
; block_producer_keypairs : Signature_lib.Keypair.t list
; extra_genesis_keypairs : Signature_lib.Keypair.t list
; nodes_by_pod_id : Node.t String.Map.t
}
let constants { constants; _ } = constants
let constraint_constants { constants; _ } = constants.constraints
let genesis_constants { constants; _ } = constants.genesis
let seeds { seeds; _ } = seeds
let block_producers { block_producers; _ } = block_producers
let snark_coordinators { snark_coordinators; _ } = snark_coordinators
let snark_workers { snark_workers; _ } = snark_workers
let archive_nodes { archive_nodes; _ } = archive_nodes
let all_nodes { seeds; block_producers; snark_coordinators; archive_nodes; _ } =
List.concat [ seeds; block_producers; snark_coordinators; archive_nodes ]
let all_pods
{ seeds
; block_producers
; snark_coordinators
; snark_workers
; archive_nodes
; _
} =
List.concat
[ seeds; block_producers; snark_coordinators; snark_workers; archive_nodes ]
let all_non_seed_pods
{ block_producers; snark_coordinators; snark_workers; archive_nodes; _ } =
List.concat
[ block_producers; snark_coordinators; snark_workers; archive_nodes ]
let all_keypairs { block_producer_keypairs; extra_genesis_keypairs; _ } =
block_producer_keypairs @ extra_genesis_keypairs
let block_producer_keypairs { block_producer_keypairs; _ } =
block_producer_keypairs
let extra_genesis_keypairs { extra_genesis_keypairs; _ } =
extra_genesis_keypairs
let lookup_node_by_pod_id t = Map.find t.nodes_by_pod_id
let all_pod_ids t = Map.keys t.nodes_by_pod_id
let initialize_infra ~logger network =
let open Malleable_error.Let_syntax in
let poll_interval = Time.Span.of_sec 15.0 in
10 mins
let all_pods_set =
all_pods network
|> List.map ~f:(fun { pod_id; _ } -> pod_id)
|> String.Set.of_list
in
let kube_get_pods () =
Integration_test_lib.Util.run_cmd_or_error_timeout ~timeout_seconds:60 "/"
"kubectl"
[ "-n"
; network.namespace
; "get"
; "pods"
; "-ojsonpath={range \
.items[*]}{.metadata.name}{':'}{.status.phase}{'\\n'}{end}"
]
in
let parse_pod_statuses result_str =
result_str |> String.split_lines
|> List.map ~f:(fun line ->
let parts = String.split line ~on:':' in
assert (Mina_stdlib.List.Length.Compare.(parts = 2)) ;
(List.nth_exn parts 0, List.nth_exn parts 1) )
|> List.filter ~f:(fun (pod_name, _) ->
String.Set.mem all_pods_set pod_name )
|> String.Map.of_alist_exn
in
let rec poll n =
[%log debug] "Checking kubernetes pod statuses, n=%d" n ;
let is_successful_pod_status status = String.equal "Running" status in
match%bind Deferred.bind ~f:Malleable_error.return (kube_get_pods ()) with
| Ok str ->
let pod_statuses = parse_pod_statuses str in
[%log debug] "pod_statuses: \n %s"
( String.Map.to_alist pod_statuses
|> List.map ~f:(fun (key, data) -> key ^ ": " ^ data ^ "\n")
|> String.concat ) ;
[%log debug] "all_pods: \n %s"
(String.Set.elements all_pods_set |> String.concat ~sep:", ") ;
let all_pods_are_present =
List.for_all (String.Set.elements all_pods_set) ~f:(fun pod_id ->
String.Map.mem pod_statuses pod_id )
in
let any_pods_are_not_running =
List.exists
(String.Map.data pod_statuses)
~f:(Fn.compose not is_successful_pod_status)
in
if not all_pods_are_present then (
let present_pods = String.Map.keys pod_statuses in
[%log fatal]
"Not all pods were found when querying namespace; this indicates a \
deployment error. Refusing to continue. \n\
Expected pods: [%s]. \n\
Present pods: [%s]"
(String.Set.elements all_pods_set |> String.concat ~sep:"; ")
(present_pods |> String.concat ~sep:"; ") ;
Malleable_error.hard_error_string ~exit_code:5
"Some pods were not found in namespace." )
else if any_pods_are_not_running then
let failed_pod_statuses =
List.filter (String.Map.to_alist pod_statuses)
~f:(fun (_, status) -> not (is_successful_pod_status status))
in
if n > 0 then (
[%log debug] "Got bad pod statuses, polling again ($failed_statuses"
~metadata:
[ ( "failed_statuses"
, `Assoc
(List.Assoc.map failed_pod_statuses ~f:(fun v ->
`String v ) ) )
] ;
let%bind () =
after poll_interval |> Deferred.bind ~f:Malleable_error.return
in
poll (n - 1) )
else (
[%log fatal]
"Got bad pod statuses, not all pods were assigned to nodes and \
ready in time. pod statuses: ($failed_statuses"
~metadata:
[ ( "failed_statuses"
, `Assoc
(List.Assoc.map failed_pod_statuses ~f:(fun v ->
`String v ) ) )
] ;
Malleable_error.hard_error_string ~exit_code:4
"Some pods either were not assigned to nodes or did not deploy \
properly." )
else return ()
| Error _ ->
[%log debug] "`kubectl get pods` timed out, polling again" ;
let%bind () =
after poll_interval |> Deferred.bind ~f:Malleable_error.return
in
poll n
in
[%log info] "Waiting for pods to be assigned nodes and become ready" ;
let res = poll max_polls in
match%bind.Deferred res with
| Error _ ->
[%log error] "Not all pods were assigned nodes, cannot proceed!" ;
res
| Ok _ ->
[%log info] "Pods assigned to nodes" ;
res
|
119dca0fcf4a57703d95ebae3a4bc520ebe19db128c345e29609dc493ba73fae | lillo/compiler-course-unipi | json.ml | type value =
| Obj of (string * value) list
| List of value list
| String of string
| Int of int
| Float of float
| Bool of bool
| Null
[@@deriving show]
| null | https://raw.githubusercontent.com/lillo/compiler-course-unipi/2fdb7f9a49eb0abebf11b6903f67c8187df86a93/frontend-material/code/json/json.ml | ocaml | type value =
| Obj of (string * value) list
| List of value list
| String of string
| Int of int
| Float of float
| Bool of bool
| Null
[@@deriving show]
| |
219ab3d7c0aaf65bc9720cf004cb076e98694ce7150ec4be070b9efc910506ab | charJe/funds | stack.lisp | ;;;;
Copyright 2007
;;;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;;;; you may not use this file except in compliance with the License.
;;;; You may obtain a copy of the License at
;;;;
;;;; -2.0
;;;;
;;;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;;; See the License for the specific language governing permissions and
;;;; limitations under the License.
;;;;
(in-package :funds)
(defun make-stack ()
"An empty stack."
(list))
(defun stack-push (stack item)
"The stack that results when the given item is pushed onto the given stack."
(cons item stack))
(defun stack-pop (stack)
"The stack that results when the top item is popped off the given stack."
(cdr stack))
(defun stack-top (stack)
"The top item on the given stack."
(car stack))
(defun stack-empty-p (stack)
"Whether the given stack is empty."
(null stack))
(defun stack-size (stack)
"The number of items on this stack; note that this is an O(n) operation."
(labels ((f (stack accum)
(if (stack-empty-p stack)
accum
(f (stack-pop stack) (1+ accum)))))
(f stack 0)))
(defun map-stack (function stack)
"A stack whose elements are those of the given stack when function is applied
to them."
(mapcar function stack))
(defun stack-from-list (list)
"This function is here in case the implementation of stack changes from what
it is now, a list."
list)
(defun stack-as-list (stack)
"This function is here in case the implementation of stack changes from what
it is now, a list."
stack)
(defun stack-count (item stack &key (key #'identity) (test #'eql))
(count item stack :key key :test test))
(defun stack-count-if (predicate stack &key (key #'identity))
(count-if predicate stack :key key))
| null | https://raw.githubusercontent.com/charJe/funds/bdc40045d08a36a6e81bd33ffffa752d99b08e60/src/stack.lisp | lisp |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| Copyright 2007
distributed under the License is distributed on an " AS IS " BASIS ,
(in-package :funds)
(defun make-stack ()
"An empty stack."
(list))
(defun stack-push (stack item)
"The stack that results when the given item is pushed onto the given stack."
(cons item stack))
(defun stack-pop (stack)
"The stack that results when the top item is popped off the given stack."
(cdr stack))
(defun stack-top (stack)
"The top item on the given stack."
(car stack))
(defun stack-empty-p (stack)
"Whether the given stack is empty."
(null stack))
(defun stack-size (stack)
"The number of items on this stack; note that this is an O(n) operation."
(labels ((f (stack accum)
(if (stack-empty-p stack)
accum
(f (stack-pop stack) (1+ accum)))))
(f stack 0)))
(defun map-stack (function stack)
"A stack whose elements are those of the given stack when function is applied
to them."
(mapcar function stack))
(defun stack-from-list (list)
"This function is here in case the implementation of stack changes from what
it is now, a list."
list)
(defun stack-as-list (stack)
"This function is here in case the implementation of stack changes from what
it is now, a list."
stack)
(defun stack-count (item stack &key (key #'identity) (test #'eql))
(count item stack :key key :test test))
(defun stack-count-if (predicate stack &key (key #'identity))
(count-if predicate stack :key key))
|
74df2cc1837ecbbc17d45f6a0bab31e10d4d06964bbc1a41d893661a65dbaeb9 | ygrek/mldonkey | bt_dht_node.ml | (** standalone DHT node *)
open BT_DHT
let bracket res destroy k =
let x = try k res with exn -> destroy res; raise exn in
destroy res;
x
let with_open_in_bin file = bracket (open_in_bin file) close_in_noerr
let with_open_out_bin file = bracket (open_out_bin file) close_out_noerr
let load file : Kademlia.table = with_open_in_bin file Marshal.from_channel
let store file (t:Kademlia.table) =
let temp = file ^ ".tmp" in
try
with_open_out_bin temp (fun ch -> Marshal.to_channel ch t []; Unix2.fsync (Unix.descr_of_out_channel ch));
Sys.rename temp file
with exn ->
lprintf_nl ~exn "write to %S failed" file; Sys.remove temp
let parse_peer s =
try
match String2.split s ':' with
| [addr;port] -> addr, int_of_string port
| _ -> raise Not_found
with _ ->
Printf.eprintf "E: bad peer %S, expecting <addr>:<port>\n%!" s;
exit 2
let init file = try load file with _ -> Kademlia.create ()
let run_queries =
let ids = [|
"FA959F240D5859CAC30F32ECD21BD89F576481F0";
"BDE98D04AB6BD6E8EA7440F82870E5191E130A84";
"857224361969AE12066166539538F07BD5EF48B4";
"81F643A195BBE3BB1DE1AC9184B9F84D74A37EFF";
"7CC9963D90B54DF1710469743C1B43E0E20489C0";
"C2C65A1AA5537406183F4D815C77A2A578B00BFB";
"72F5A608AFBDF6111E5A86B337E9FC27D6020663";
"FE73D74660695208F3ACD221B7A9A128A3D36D47";
|] in
fun dht ->
let id = Kademlia.H.of_hexa ids.(Random.int (Array.length ids)) in
query_peers dht id (fun node token peers ->
lprintf_nl "run_queries : %s returned %d peers : %s"
(show_node node) (List.length peers) (strl Kademlia.show_addr peers))
let () =
Random.self_init ();
try
match List.tl (Array.to_list Sys.argv) with
| file::port::peers ->
let peers = List.map parse_peer peers in
let bw = UdpSocket.new_bandwidth_controler
(TcpBufferedSocket.create_write_bandwidth_controler "UNLIMIT" 0) in
let dht = start (init file) (int_of_string port) bw in
let finish () = store file dht.M.rt; stop dht; exit 0 in
Sys.set_signal Sys.sigint (Sys.Signal_handle (fun _ -> show dht; finish ()));
Sys.set_signal Sys.sigterm (Sys.Signal_handle (fun _ -> show dht; finish ()));
Sys.set_signal Sys.sighup (Sys.Signal_handle (fun _ -> show dht));
BasicSocket.add_infinite_timer 1800. (fun () -> run_queries dht);
BasicSocket.add_infinite_timer 3600. (fun () -> store file dht.M.rt);
let routers = ["router.bittorrent.com", 8991] @ peers in
bootstrap dht ~routers;
BasicSocket.loop ()
| _ -> Printf.eprintf "Usage : %s <storage> <port> [<peer_addr:port>]*\n" Sys.argv.(0)
with
exn -> lprintf_nl "main : %s" (Printexc.to_string exn)
| null | https://raw.githubusercontent.com/ygrek/mldonkey/333868a12bb6cd25fed49391dd2c3a767741cb51/tools/bt_dht_node.ml | ocaml | * standalone DHT node |
open BT_DHT
let bracket res destroy k =
let x = try k res with exn -> destroy res; raise exn in
destroy res;
x
let with_open_in_bin file = bracket (open_in_bin file) close_in_noerr
let with_open_out_bin file = bracket (open_out_bin file) close_out_noerr
let load file : Kademlia.table = with_open_in_bin file Marshal.from_channel
let store file (t:Kademlia.table) =
let temp = file ^ ".tmp" in
try
with_open_out_bin temp (fun ch -> Marshal.to_channel ch t []; Unix2.fsync (Unix.descr_of_out_channel ch));
Sys.rename temp file
with exn ->
lprintf_nl ~exn "write to %S failed" file; Sys.remove temp
let parse_peer s =
try
match String2.split s ':' with
| [addr;port] -> addr, int_of_string port
| _ -> raise Not_found
with _ ->
Printf.eprintf "E: bad peer %S, expecting <addr>:<port>\n%!" s;
exit 2
let init file = try load file with _ -> Kademlia.create ()
let run_queries =
let ids = [|
"FA959F240D5859CAC30F32ECD21BD89F576481F0";
"BDE98D04AB6BD6E8EA7440F82870E5191E130A84";
"857224361969AE12066166539538F07BD5EF48B4";
"81F643A195BBE3BB1DE1AC9184B9F84D74A37EFF";
"7CC9963D90B54DF1710469743C1B43E0E20489C0";
"C2C65A1AA5537406183F4D815C77A2A578B00BFB";
"72F5A608AFBDF6111E5A86B337E9FC27D6020663";
"FE73D74660695208F3ACD221B7A9A128A3D36D47";
|] in
fun dht ->
let id = Kademlia.H.of_hexa ids.(Random.int (Array.length ids)) in
query_peers dht id (fun node token peers ->
lprintf_nl "run_queries : %s returned %d peers : %s"
(show_node node) (List.length peers) (strl Kademlia.show_addr peers))
let () =
Random.self_init ();
try
match List.tl (Array.to_list Sys.argv) with
| file::port::peers ->
let peers = List.map parse_peer peers in
let bw = UdpSocket.new_bandwidth_controler
(TcpBufferedSocket.create_write_bandwidth_controler "UNLIMIT" 0) in
let dht = start (init file) (int_of_string port) bw in
let finish () = store file dht.M.rt; stop dht; exit 0 in
Sys.set_signal Sys.sigint (Sys.Signal_handle (fun _ -> show dht; finish ()));
Sys.set_signal Sys.sigterm (Sys.Signal_handle (fun _ -> show dht; finish ()));
Sys.set_signal Sys.sighup (Sys.Signal_handle (fun _ -> show dht));
BasicSocket.add_infinite_timer 1800. (fun () -> run_queries dht);
BasicSocket.add_infinite_timer 3600. (fun () -> store file dht.M.rt);
let routers = ["router.bittorrent.com", 8991] @ peers in
bootstrap dht ~routers;
BasicSocket.loop ()
| _ -> Printf.eprintf "Usage : %s <storage> <port> [<peer_addr:port>]*\n" Sys.argv.(0)
with
exn -> lprintf_nl "main : %s" (Printexc.to_string exn)
|
b1c112de3dbe4514b927250ce2f69990e4ab7ecb7a237327be58fab3f8f4f799 | athensresearch/athens | inline_search.cljs | (ns athens.events.inline-search
"Inline Search Events"
(:require
[re-frame.core :as rf]))
(rf/reg-event-db
::set-type!
(fn [db [_ uid type]]
(assoc-in db [:inline-search uid :type] type)))
(rf/reg-event-db
::close!
(fn [db [_ uid]]
(assoc-in db [:inline-search uid :type] nil)))
(rf/reg-event-db
::set-index!
(fn [db [_ uid index]]
(assoc-in db [:inline-search uid :index] index)))
(rf/reg-event-db
::set-results!
(fn [db [_ uid results]]
(assoc-in db [:inline-search uid :results] results)))
(rf/reg-event-db
::clear-results!
(fn [db [_ uid]]
(assoc-in db [:inline-search uid :results] [])))
(rf/reg-event-db
::set-query!
(fn [db [_ uid query]]
(assoc-in db [:inline-search uid :query] query)))
(rf/reg-event-db
::clear-query!
(fn [db [_ uid]]
(assoc-in db [:inline-search uid :query] "")))
| null | https://raw.githubusercontent.com/athensresearch/athens/9ea22566fc10cfd478320b0aaeeee2f32a506cc6/src/cljs/athens/events/inline_search.cljs | clojure | (ns athens.events.inline-search
"Inline Search Events"
(:require
[re-frame.core :as rf]))
(rf/reg-event-db
::set-type!
(fn [db [_ uid type]]
(assoc-in db [:inline-search uid :type] type)))
(rf/reg-event-db
::close!
(fn [db [_ uid]]
(assoc-in db [:inline-search uid :type] nil)))
(rf/reg-event-db
::set-index!
(fn [db [_ uid index]]
(assoc-in db [:inline-search uid :index] index)))
(rf/reg-event-db
::set-results!
(fn [db [_ uid results]]
(assoc-in db [:inline-search uid :results] results)))
(rf/reg-event-db
::clear-results!
(fn [db [_ uid]]
(assoc-in db [:inline-search uid :results] [])))
(rf/reg-event-db
::set-query!
(fn [db [_ uid query]]
(assoc-in db [:inline-search uid :query] query)))
(rf/reg-event-db
::clear-query!
(fn [db [_ uid]]
(assoc-in db [:inline-search uid :query] "")))
| |
8c51eb1bcecced9a7f74ffcde379f58c3c2ec66dec21c89245c32fdaa419fcfb | semerdzhiev/fp-2020-21 | 01-get-from-list.rkt | #lang racket
(require rackunit)
(require rackunit/text-ui)
Нека имаме следния списък
(define my-list '(1 2 3 (4 5) (6 (7 8))))
Искаме с подходящи извиквания на car вземем .
Първите две са за пример .
(define get-one (void))
(define get-two (void))
(define get-three (void))
(define get-four (void))
(define get-five (void))
(define get-six (void))
(define get-seven (void))
(define get-eight (void))
(define tests
(test-suite "dummy tests"
(check-equal? get-one 1)
(check-equal? get-two 2)
(check-equal? get-three 3)
(check-equal? get-four 4)
(check-equal? get-five 5)
(check-equal? get-six 6)
(check-equal? get-seven 7)
(check-equal? get-eight 8)
)
)
(run-tests tests 'verbose)
| null | https://raw.githubusercontent.com/semerdzhiev/fp-2020-21/64fa00c4f940f75a28cc5980275b124ca21244bc/group-b/exercises/04.lists/01-get-from-list.rkt | racket | #lang racket
(require rackunit)
(require rackunit/text-ui)
Нека имаме следния списък
(define my-list '(1 2 3 (4 5) (6 (7 8))))
Искаме с подходящи извиквания на car вземем .
Първите две са за пример .
(define get-one (void))
(define get-two (void))
(define get-three (void))
(define get-four (void))
(define get-five (void))
(define get-six (void))
(define get-seven (void))
(define get-eight (void))
(define tests
(test-suite "dummy tests"
(check-equal? get-one 1)
(check-equal? get-two 2)
(check-equal? get-three 3)
(check-equal? get-four 4)
(check-equal? get-five 5)
(check-equal? get-six 6)
(check-equal? get-seven 7)
(check-equal? get-eight 8)
)
)
(run-tests tests 'verbose)
| |
8445d7536fce3a05325e091ba4d2b23e8cf3b3c8b005268a22a33b82a7801c3a | clojure-garden/clojure-garden | core.cljs | (ns platform.ui.core
(:require
[antd.core :as ant]
[goog.dom :as gdom]
[platform.ui.db :as db]
[platform.ui.deps]
[platform.ui.logger :as logger]
[platform.ui.pages.root :as root]
[platform.ui.router.core :as router]
[re-frame.core :as rf]
[reagent.dom :as dom]))
(defn setup-tools
"Setup tools."
[]
(logger/init!))
(defn app
[]
(when-some [locale @(rf/subscribe [:i18n/locale])]
[ant/config-provider {:locale (:antd locale)
:componentSize "large"}
[root/page]]))
(defn mount-root
"Mount root component."
{:dev/after-load true}
[]
(when-some [root-elem (gdom/getElement "root")]
(rf/clear-subscription-cache!)
(router/init!)
(dom/render [app] root-elem)))
(defn init!
"UI initializer."
{:export true}
[]
(rf/dispatch-sync [::db/init])
(setup-tools)
(mount-root))
| null | https://raw.githubusercontent.com/clojure-garden/clojure-garden/9c58ce33f91aeab5f7bd0d16647ecc450ba8967f/modules/frontend/src/platform/ui/core.cljs | clojure | (ns platform.ui.core
(:require
[antd.core :as ant]
[goog.dom :as gdom]
[platform.ui.db :as db]
[platform.ui.deps]
[platform.ui.logger :as logger]
[platform.ui.pages.root :as root]
[platform.ui.router.core :as router]
[re-frame.core :as rf]
[reagent.dom :as dom]))
(defn setup-tools
"Setup tools."
[]
(logger/init!))
(defn app
[]
(when-some [locale @(rf/subscribe [:i18n/locale])]
[ant/config-provider {:locale (:antd locale)
:componentSize "large"}
[root/page]]))
(defn mount-root
"Mount root component."
{:dev/after-load true}
[]
(when-some [root-elem (gdom/getElement "root")]
(rf/clear-subscription-cache!)
(router/init!)
(dom/render [app] root-elem)))
(defn init!
"UI initializer."
{:export true}
[]
(rf/dispatch-sync [::db/init])
(setup-tools)
(mount-root))
| |
b4bd5ebf178120259f37d36ff7b540a59328a51576aec92adc0f6289107b7f54 | heralden/heckendorf | item.clj | (ns heckendorf.item
(:require [heckendorf.random :refer [rand-range perc-vec]]
[heckendorf.data :refer [materials grades]]))
(def weapons
{:fist {:att 1 :spd 10} ; Special "weapon" that you only start with.
:dagger {:att 3 :spd 12}
:sword {:att 7 :spd 9}
:mace {:att 8 :spd 8}
:greatsword {:att 12 :spd 6}})
(defn- truncate-decimals
([x]
(truncate-decimals x 1))
([x amount]
(let [multiplier (Math/pow 10 amount)]
(float (/ (Math/round (* x multiplier))
multiplier)))))
(defn- calc-dmg
"We use the weapon spd to determine the chance of hitting. To calculate the
damage, we use the weapon grade, weapon form att and the player's strength."
[str-multiplier grade-multiplier enemy-spd wep]
(let [{:keys [att spd]} wep
chance-to-miss (* 4 (/ enemy-spd spd 100))
final-att (+ (/ att 10) 1)
dmg (* grade-multiplier str-multiplier final-att)
jitter (* dmg 0.2)
real-dmg (+ (* (rand) jitter) (- dmg (/ jitter 2)))]
(if (> (rand) chance-to-miss)
(truncate-decimals real-dmg)
0)))
(defn dmg-with
"Calculates damage using strength, speed of enemy and weapon item."
[strength enemy-spd {:keys [form grade]}]
(let [str-multiplier (+ (/ strength 10) 1)
grade-multiplier (get grades grade)
weapon (get weapons form)]
(calc-dmg str-multiplier grade-multiplier enemy-spd weapon)))
(def potion
{:minor #(rand-range 20 30)
:lesser #(rand-range 40 60)
:greater #(rand-range 180 250)})
(defn potion->hp [{:keys [grade type]}]
(assert (= type :potion))
((potion grade)))
(defn rand-weapon [type]
{:type :weapon
:form (-> weapons
(dissoc :fist)
keys
rand-nth)
:grade (-> (type materials)
keys
rand-nth)})
(defn rand-potion [grade]
{:type :potion
:grade grade})
(defmulti gen-item :type)
(defmethod gen-item :chest/common [_]
(perc-vec [[80 #(rand-potion :minor)]
[50 #(rand-potion :minor)]
[60 #(rand-weapon :common)]]))
(defmethod gen-item :chest/uncommon [_]
(perc-vec [[90 #(rand-potion :minor)]
[40 #(rand-potion :minor)]
[50 #(rand-potion :lesser)]
[60 #(rand-weapon :uncommon)]]))
(defmethod gen-item :chest/rare [_]
(perc-vec [[80 #(rand-potion :lesser)]
[60 #(rand-potion :greater)]
[70 #(rand-potion :minor)]
[70 #(rand-weapon :rare)]]))
(defmethod gen-item :chest/epic [_]
(perc-vec [[80 #(rand-potion :greater)]
[60 #(rand-potion :lesser)]
[50 #(rand-potion :minor)]
[60 #(rand-weapon :epic)]
[80 #(rand-weapon :rare)]]))
| null | https://raw.githubusercontent.com/heralden/heckendorf/b5d7e75f9072dddf39598aa48bb4aacfeed5fc81/src/clj/heckendorf/item.clj | clojure | Special "weapon" that you only start with. | (ns heckendorf.item
(:require [heckendorf.random :refer [rand-range perc-vec]]
[heckendorf.data :refer [materials grades]]))
(def weapons
:dagger {:att 3 :spd 12}
:sword {:att 7 :spd 9}
:mace {:att 8 :spd 8}
:greatsword {:att 12 :spd 6}})
(defn- truncate-decimals
([x]
(truncate-decimals x 1))
([x amount]
(let [multiplier (Math/pow 10 amount)]
(float (/ (Math/round (* x multiplier))
multiplier)))))
(defn- calc-dmg
"We use the weapon spd to determine the chance of hitting. To calculate the
damage, we use the weapon grade, weapon form att and the player's strength."
[str-multiplier grade-multiplier enemy-spd wep]
(let [{:keys [att spd]} wep
chance-to-miss (* 4 (/ enemy-spd spd 100))
final-att (+ (/ att 10) 1)
dmg (* grade-multiplier str-multiplier final-att)
jitter (* dmg 0.2)
real-dmg (+ (* (rand) jitter) (- dmg (/ jitter 2)))]
(if (> (rand) chance-to-miss)
(truncate-decimals real-dmg)
0)))
(defn dmg-with
"Calculates damage using strength, speed of enemy and weapon item."
[strength enemy-spd {:keys [form grade]}]
(let [str-multiplier (+ (/ strength 10) 1)
grade-multiplier (get grades grade)
weapon (get weapons form)]
(calc-dmg str-multiplier grade-multiplier enemy-spd weapon)))
(def potion
{:minor #(rand-range 20 30)
:lesser #(rand-range 40 60)
:greater #(rand-range 180 250)})
(defn potion->hp [{:keys [grade type]}]
(assert (= type :potion))
((potion grade)))
(defn rand-weapon [type]
{:type :weapon
:form (-> weapons
(dissoc :fist)
keys
rand-nth)
:grade (-> (type materials)
keys
rand-nth)})
(defn rand-potion [grade]
{:type :potion
:grade grade})
(defmulti gen-item :type)
(defmethod gen-item :chest/common [_]
(perc-vec [[80 #(rand-potion :minor)]
[50 #(rand-potion :minor)]
[60 #(rand-weapon :common)]]))
(defmethod gen-item :chest/uncommon [_]
(perc-vec [[90 #(rand-potion :minor)]
[40 #(rand-potion :minor)]
[50 #(rand-potion :lesser)]
[60 #(rand-weapon :uncommon)]]))
(defmethod gen-item :chest/rare [_]
(perc-vec [[80 #(rand-potion :lesser)]
[60 #(rand-potion :greater)]
[70 #(rand-potion :minor)]
[70 #(rand-weapon :rare)]]))
(defmethod gen-item :chest/epic [_]
(perc-vec [[80 #(rand-potion :greater)]
[60 #(rand-potion :lesser)]
[50 #(rand-potion :minor)]
[60 #(rand-weapon :epic)]
[80 #(rand-weapon :rare)]]))
|
61995d4dc8d6a9afa048cd0ac8b0c4d296deca0b0115a0c0c3c44db275e2d8c3 | graninas/Andromeda | Main.hs | # LANGUAGE TypeFamilies #
module Main where
import Graphics.QML as QML
import ViewModels
startUiApplication workspace = do
let view = fileDocument "app/Views/ShellView.qml"
viewModel <- createShellVM workspace
runEngineLoop QML.defaultEngineConfig
{ initialDocument = view
, contextObject = Just $ QML.anyObjRef viewModel
}
main :: IO ()
main = do
print "Andromeda Control Software, version 0.1"
workspace <- createSimulatorWorkspace
startUiApplication workspace
| null | https://raw.githubusercontent.com/graninas/Andromeda/6b56052bca64fc6f55a28f8001dd775a744b95bf/app/Main.hs | haskell | # LANGUAGE TypeFamilies #
module Main where
import Graphics.QML as QML
import ViewModels
startUiApplication workspace = do
let view = fileDocument "app/Views/ShellView.qml"
viewModel <- createShellVM workspace
runEngineLoop QML.defaultEngineConfig
{ initialDocument = view
, contextObject = Just $ QML.anyObjRef viewModel
}
main :: IO ()
main = do
print "Andromeda Control Software, version 0.1"
workspace <- createSimulatorWorkspace
startUiApplication workspace
| |
83a11c14b3ef8f3c35f9faf46f4c520fe99a27fdd739459679a7b4fbdba744b6 | hstreamdb/hstream | LogDeviceSpec.hs | module HStream.Store.LogDeviceSpec where
import Control.Exception (bracket)
import Control.Monad (void)
import Data.Default (def)
import Data.List (sort)
import qualified Data.Map.Strict as Map
import Test.Hspec
import Z.Data.CBytes (CBytes)
import qualified Z.IO.FileSystem as FS
import qualified HStream.Store as S
import qualified HStream.Store.Internal.LogDevice as I
import HStream.Store.SpecUtils
spec :: Spec
spec = do
loggroupSpec
logdirSpec
logdirAround :: I.LogAttributes -> SpecWith CBytes -> Spec
logdirAround attrs = aroundAll $ \runTest -> bracket setup clean runTest
where
setup = do
dirname <- ("/" `FS.join`) =<< newRandomName 10
lddir <- I.makeLogDirectory client dirname attrs False
void $ I.syncLogsConfigVersion client =<< I.logDirectoryGetVersion lddir
return dirname
clean dirname =
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
logdirSpec :: Spec
logdirSpec = describe "LogDirectory" $ do
let attrs = def { I.logReplicationFactor = I.defAttr1 1
, I.logBacklogDuration = I.defAttr1 (Just 60)
, I.logAttrsExtras = Map.fromList [("A", "B")]
}
it "get log directory children name" $ do
dirname <- ("/" `FS.join`) =<< newRandomName 10
_ <- I.makeLogDirectory client dirname attrs False
_ <- I.makeLogDirectory client (dirname <> "/A") attrs False
version <- I.logDirectoryGetVersion =<< I.makeLogDirectory client (dirname <> "/B") attrs False
I.syncLogsConfigVersion client version
dir <- I.getLogDirectory client dirname
names <- I.logDirChildrenNames dir
sort names `shouldBe` ["A", "B"]
I.logDirLogsNames dir `shouldReturn` []
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
I.getLogDirectory client dirname `shouldThrow` anyException
it "get log directory logs name" $ do
let logid1 = 101
logid2 = 102
dirname <- ("/" `FS.join`) =<< newRandomName 10
_ <- I.makeLogDirectory client dirname attrs False
_ <- I.makeLogGroup client (dirname <> "/A") logid1 logid1 attrs False
version <- I.logGroupGetVersion =<<
I.makeLogGroup client (dirname <> "/B") logid2 logid2 attrs False
I.syncLogsConfigVersion client version
dir <- I.getLogDirectory client dirname
names <- I.logDirLogsNames dir
sort names `shouldBe` ["A", "B"]
I.logDirChildrenNames dir `shouldReturn` []
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
I.getLogDirectory client dirname `shouldThrow` anyException
it "get log group and child directory" $ do
let logid = 103
dirname <- ("/" `FS.join`) =<< newRandomName 10
_ <- I.makeLogDirectory client dirname attrs False
_ <- I.makeLogDirectory client (dirname <> "/A") attrs False
version <- I.logGroupGetVersion =<<
I.makeLogGroup client (dirname <> "/B") logid logid attrs False
I.syncLogsConfigVersion client version
dir <- I.getLogDirectory client dirname
nameA <- I.logDirectoryGetFullName =<< I.getLogDirectory client =<< I.logDirChildFullName dir "A"
nameA `shouldBe` dirname <> "/A/"
nameB <- I.logGroupGetFullName =<< I.getLogGroup client =<< I.logDirLogFullName dir "B"
nameB `shouldBe` dirname <> "/B"
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
I.getLogDirectory client dirname `shouldThrow` anyException
let attrs_ra = def { I.logReplicateAcross = I.defAttr1 [(S.NodeLocationScope_DATA_CENTER, 3)] }
logdirAround attrs_ra $ it "attributes: logReplicateAcross" $ \dirname -> do
dir <- I.getLogDirectory client dirname
attrs_got <- I.logDirectoryGetAttrs dir
S.logReplicateAcross attrs_got `shouldBe` I.defAttr1 [(S.NodeLocationScope_DATA_CENTER, 3)]
it "Loggroup's attributes should be inherited by the parent directory" $ do
dirname <- ("/" `FS.join`) =<< newRandomName 10
let logid = 104
lgname = dirname <> "/A"
_ <- I.makeLogDirectory client dirname attrs False
I.syncLogsConfigVersion client =<< I.logGroupGetVersion
=<< I.makeLogGroup client lgname logid logid def False
lg <- I.getLogGroup client lgname
attrs' <- I.logGroupGetAttrs lg
I.logReplicationFactor attrs' `shouldBe` I.Attribute (Just 1) True
I.logBacklogDuration attrs' `shouldBe` I.Attribute (Just (Just 60)) True
Map.lookup "A" (I.logAttrsExtras attrs') `shouldBe` Just "B"
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
loggroupAround :: SpecWith (CBytes, S.C_LogID) -> Spec
loggroupAround = aroundAll $ \runTest -> bracket setup clean runTest
where
setup = do
let attrs = def { I.logReplicationFactor = I.defAttr1 1
, I.logBacklogDuration = I.defAttr1 (Just 60)
, I.logSingleWriter = I.defAttr1 True
, I.logSyncReplicationScope = I.defAttr1 S.NodeLocationScope_DATA_CENTER
, I.logAttrsExtras = Map.fromList [("A", "B")]
}
logid = 104
logname = "LogDeviceSpec_LogGroupSpec"
lg <- I.makeLogGroup client logname logid logid attrs False
void $ I.syncLogsConfigVersion client =<< I.logGroupGetVersion lg
return (logname, logid)
clean (logname, _logid) =
I.syncLogsConfigVersion client =<< I.removeLogGroup client logname
loggroupSpec :: Spec
loggroupSpec = describe "LogGroup" $ loggroupAround $ parallel $ do
it "log group get attrs" $ \(lgname, _logid) -> do
lg <- I.getLogGroup client lgname
attrs' <- I.logGroupGetAttrs lg
I.logReplicationFactor attrs' `shouldBe` I.defAttr1 1
I.logBacklogDuration attrs' `shouldBe` I.defAttr1 (Just 60)
I.logSingleWriter attrs' `shouldBe` I.defAttr1 True
I.logSyncReplicationScope attrs' `shouldBe` I.defAttr1 S.NodeLocationScope_DATA_CENTER
Map.lookup "A" (I.logAttrsExtras attrs') `shouldBe` Just "B"
it "log group get and set range" $ \(lgname, logid) -> do
let logid' = logid + 1
lg <- I.getLogGroup client lgname
I.logGroupGetRange lg `shouldReturn`(logid, logid)
I.syncLogsConfigVersion client =<< I.logGroupSetRange client lgname (logid',logid')
range' <- I.logGroupGetRange =<< I.getLogGroup client lgname
range' `shouldBe` (logid', logid')
it "get a nonexist loggroup should throw NOTFOUND" $ \(_, _) -> do
I.getLogGroup client "this_is_a_non_exist_logroup" `shouldThrow` S.isNOTFOUND
| null | https://raw.githubusercontent.com/hstreamdb/hstream/b8779986ab3371e331ead8cc08bc51fcc5c80ec9/hstream-store/test/HStream/Store/LogDeviceSpec.hs | haskell | module HStream.Store.LogDeviceSpec where
import Control.Exception (bracket)
import Control.Monad (void)
import Data.Default (def)
import Data.List (sort)
import qualified Data.Map.Strict as Map
import Test.Hspec
import Z.Data.CBytes (CBytes)
import qualified Z.IO.FileSystem as FS
import qualified HStream.Store as S
import qualified HStream.Store.Internal.LogDevice as I
import HStream.Store.SpecUtils
spec :: Spec
spec = do
loggroupSpec
logdirSpec
logdirAround :: I.LogAttributes -> SpecWith CBytes -> Spec
logdirAround attrs = aroundAll $ \runTest -> bracket setup clean runTest
where
setup = do
dirname <- ("/" `FS.join`) =<< newRandomName 10
lddir <- I.makeLogDirectory client dirname attrs False
void $ I.syncLogsConfigVersion client =<< I.logDirectoryGetVersion lddir
return dirname
clean dirname =
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
logdirSpec :: Spec
logdirSpec = describe "LogDirectory" $ do
let attrs = def { I.logReplicationFactor = I.defAttr1 1
, I.logBacklogDuration = I.defAttr1 (Just 60)
, I.logAttrsExtras = Map.fromList [("A", "B")]
}
it "get log directory children name" $ do
dirname <- ("/" `FS.join`) =<< newRandomName 10
_ <- I.makeLogDirectory client dirname attrs False
_ <- I.makeLogDirectory client (dirname <> "/A") attrs False
version <- I.logDirectoryGetVersion =<< I.makeLogDirectory client (dirname <> "/B") attrs False
I.syncLogsConfigVersion client version
dir <- I.getLogDirectory client dirname
names <- I.logDirChildrenNames dir
sort names `shouldBe` ["A", "B"]
I.logDirLogsNames dir `shouldReturn` []
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
I.getLogDirectory client dirname `shouldThrow` anyException
it "get log directory logs name" $ do
let logid1 = 101
logid2 = 102
dirname <- ("/" `FS.join`) =<< newRandomName 10
_ <- I.makeLogDirectory client dirname attrs False
_ <- I.makeLogGroup client (dirname <> "/A") logid1 logid1 attrs False
version <- I.logGroupGetVersion =<<
I.makeLogGroup client (dirname <> "/B") logid2 logid2 attrs False
I.syncLogsConfigVersion client version
dir <- I.getLogDirectory client dirname
names <- I.logDirLogsNames dir
sort names `shouldBe` ["A", "B"]
I.logDirChildrenNames dir `shouldReturn` []
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
I.getLogDirectory client dirname `shouldThrow` anyException
it "get log group and child directory" $ do
let logid = 103
dirname <- ("/" `FS.join`) =<< newRandomName 10
_ <- I.makeLogDirectory client dirname attrs False
_ <- I.makeLogDirectory client (dirname <> "/A") attrs False
version <- I.logGroupGetVersion =<<
I.makeLogGroup client (dirname <> "/B") logid logid attrs False
I.syncLogsConfigVersion client version
dir <- I.getLogDirectory client dirname
nameA <- I.logDirectoryGetFullName =<< I.getLogDirectory client =<< I.logDirChildFullName dir "A"
nameA `shouldBe` dirname <> "/A/"
nameB <- I.logGroupGetFullName =<< I.getLogGroup client =<< I.logDirLogFullName dir "B"
nameB `shouldBe` dirname <> "/B"
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
I.getLogDirectory client dirname `shouldThrow` anyException
let attrs_ra = def { I.logReplicateAcross = I.defAttr1 [(S.NodeLocationScope_DATA_CENTER, 3)] }
logdirAround attrs_ra $ it "attributes: logReplicateAcross" $ \dirname -> do
dir <- I.getLogDirectory client dirname
attrs_got <- I.logDirectoryGetAttrs dir
S.logReplicateAcross attrs_got `shouldBe` I.defAttr1 [(S.NodeLocationScope_DATA_CENTER, 3)]
it "Loggroup's attributes should be inherited by the parent directory" $ do
dirname <- ("/" `FS.join`) =<< newRandomName 10
let logid = 104
lgname = dirname <> "/A"
_ <- I.makeLogDirectory client dirname attrs False
I.syncLogsConfigVersion client =<< I.logGroupGetVersion
=<< I.makeLogGroup client lgname logid logid def False
lg <- I.getLogGroup client lgname
attrs' <- I.logGroupGetAttrs lg
I.logReplicationFactor attrs' `shouldBe` I.Attribute (Just 1) True
I.logBacklogDuration attrs' `shouldBe` I.Attribute (Just (Just 60)) True
Map.lookup "A" (I.logAttrsExtras attrs') `shouldBe` Just "B"
I.syncLogsConfigVersion client =<< I.removeLogDirectory client dirname True
loggroupAround :: SpecWith (CBytes, S.C_LogID) -> Spec
loggroupAround = aroundAll $ \runTest -> bracket setup clean runTest
where
setup = do
let attrs = def { I.logReplicationFactor = I.defAttr1 1
, I.logBacklogDuration = I.defAttr1 (Just 60)
, I.logSingleWriter = I.defAttr1 True
, I.logSyncReplicationScope = I.defAttr1 S.NodeLocationScope_DATA_CENTER
, I.logAttrsExtras = Map.fromList [("A", "B")]
}
logid = 104
logname = "LogDeviceSpec_LogGroupSpec"
lg <- I.makeLogGroup client logname logid logid attrs False
void $ I.syncLogsConfigVersion client =<< I.logGroupGetVersion lg
return (logname, logid)
clean (logname, _logid) =
I.syncLogsConfigVersion client =<< I.removeLogGroup client logname
loggroupSpec :: Spec
loggroupSpec = describe "LogGroup" $ loggroupAround $ parallel $ do
it "log group get attrs" $ \(lgname, _logid) -> do
lg <- I.getLogGroup client lgname
attrs' <- I.logGroupGetAttrs lg
I.logReplicationFactor attrs' `shouldBe` I.defAttr1 1
I.logBacklogDuration attrs' `shouldBe` I.defAttr1 (Just 60)
I.logSingleWriter attrs' `shouldBe` I.defAttr1 True
I.logSyncReplicationScope attrs' `shouldBe` I.defAttr1 S.NodeLocationScope_DATA_CENTER
Map.lookup "A" (I.logAttrsExtras attrs') `shouldBe` Just "B"
it "log group get and set range" $ \(lgname, logid) -> do
let logid' = logid + 1
lg <- I.getLogGroup client lgname
I.logGroupGetRange lg `shouldReturn`(logid, logid)
I.syncLogsConfigVersion client =<< I.logGroupSetRange client lgname (logid',logid')
range' <- I.logGroupGetRange =<< I.getLogGroup client lgname
range' `shouldBe` (logid', logid')
it "get a nonexist loggroup should throw NOTFOUND" $ \(_, _) -> do
I.getLogGroup client "this_is_a_non_exist_logroup" `shouldThrow` S.isNOTFOUND
| |
8d19c0f14881516832fd77790c66a879a9fb845f0fb3edcaff6ff6a6e52e8119 | jackfirth/point-free | definition-forms.rkt | #lang racket
(provide define/wind
define/wind-pre
define/wind-post
define/wind*
define/wind-pre*
define/wind-post*)
(require "parallel-composition.rkt")
;; Helper macro for defining lots of simple macros
(define-syntax-rule (define-syntaxes-rule [pattern expansion] ...)
(begin (define-syntax-rule pattern expansion) ...))
;; Definition forms
(define-syntaxes-rule
[(define/wind id f (pre ...) (post ...)) (define id ((wind f pre ...) post ...))]
[(define/wind-pre id f pre ...) (define id (wind-pre f pre ...))]
[(define/wind-post id f post ...) (define id (wind-post f post ...))]
[(define/wind* id f pre post) (define id (wind* f pre post))]
[(define/wind-pre* id f pre) (define id (wind-pre* f pre))]
[(define/wind-post* id f post) (define id (wind-post* f post))])
| null | https://raw.githubusercontent.com/jackfirth/point-free/d294a342466d5071dd2c8f16ba9e50f9006b54af/point-free/definition-forms.rkt | racket | Helper macro for defining lots of simple macros
Definition forms | #lang racket
(provide define/wind
define/wind-pre
define/wind-post
define/wind*
define/wind-pre*
define/wind-post*)
(require "parallel-composition.rkt")
(define-syntax-rule (define-syntaxes-rule [pattern expansion] ...)
(begin (define-syntax-rule pattern expansion) ...))
(define-syntaxes-rule
[(define/wind id f (pre ...) (post ...)) (define id ((wind f pre ...) post ...))]
[(define/wind-pre id f pre ...) (define id (wind-pre f pre ...))]
[(define/wind-post id f post ...) (define id (wind-post f post ...))]
[(define/wind* id f pre post) (define id (wind* f pre post))]
[(define/wind-pre* id f pre) (define id (wind-pre* f pre))]
[(define/wind-post* id f post) (define id (wind-post* f post))])
|
7404a631c3dd471ef2219c17f6b90866965086051f28948dc6e60fd9b8cfc7a6 | erlang/otp | erl_scan_SUITE.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1998 - 2022 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
-module(erl_scan_SUITE).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
init_per_group/2,end_per_group/2]).
-export([error_1/1, error_2/1, iso88591/1, otp_7810/1, otp_10302/1,
otp_10990/1, otp_10992/1, otp_11807/1, otp_16480/1, otp_17024/1,
text_fun/1]).
-import(lists, [nth/2,flatten/1]).
-import(io_lib, [print/1]).
%%
%% Define to run outside of test server
%%
%%-define(STANDALONE,1).
-ifdef(STANDALONE).
-compile(export_all).
-define(line, put(line, ?LINE), ).
-define(config(A,B),config(A,B)).
-define(t, test_server).
%% config(priv_dir, _) ->
%% ".";
%% config(data_dir, _) ->
%% ".".
-else.
-include_lib("common_test/include/ct.hrl").
-endif.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
ok.
suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,20}}].
all() ->
[{group, error}, iso88591, otp_7810, otp_10302, otp_10990, otp_10992,
otp_11807, otp_16480, otp_17024, text_fun].
groups() ->
[{error, [], [error_1, error_2]}].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
%% (OTP-2347)
error_1(Config) when is_list(Config) ->
{error, _, _} = erl_scan:string("'a"),
ok.
%% Checks that format_error works on the error cases.
error_2(Config) when is_list(Config) ->
lists:foreach(fun check/1, error_cases()),
ok.
error_cases() ->
["'a",
"\"a",
"'\\",
"\"\\",
"$",
"$\\",
"2.3e",
"2.3e-",
"91#9"
].
assert_type(N, integer) when is_integer(N) ->
ok;
assert_type(N, atom) when is_atom(N) ->
ok.
check(String) ->
Error = erl_scan:string(String),
check_error(Error, erl_scan).
%%% (This should be useful for all format_error functions.)
check_error({error, Info, EndLine}, Module0) ->
{ErrorLine, Module, Desc} = Info,
true = (Module == Module0),
assert_type(EndLine, integer),
assert_type(ErrorLine, integer),
true = (ErrorLine =< EndLine),
String = lists:flatten(Module0:format_error(Desc)),
true = io_lib:printable_list(String).
Tests the support for ISO-8859 - 1 i.e Latin-1 .
iso88591(Config) when is_list(Config) ->
ok =
case catch begin
%% Some atom and variable names
V1s = [$Á,$á,$é,$ë],
V2s = [$N,$ä,$r],
A1s = [$h,$ä,$r],
A2s = [$ö,$r,$e],
%% Test parsing atom and variable characters.
{ok,Ts1,_} = erl_scan_string(V1s ++ " " ++ V2s ++
"\327" ++
A1s ++ " " ++ A2s),
V1s = atom_to_list(element(3, nth(1, Ts1))),
V2s = atom_to_list(element(3, nth(2, Ts1))),
A1s = atom_to_list(element(3, nth(4, Ts1))),
A2s = atom_to_list(element(3, nth(5, Ts1))),
%% Test printing atoms
A1s = flatten(print(element(3, nth(4, Ts1)))),
A2s = flatten(print(element(3, nth(5, Ts1)))),
%% Test parsing and printing strings.
S1 = V1s ++ "\327" ++ A1s ++ "\250" ++ A2s,
S1s = "\"" ++ S1 ++ "\"",
{ok,Ts2,_} = erl_scan_string(S1s),
S1 = element(3, nth(1, Ts2)),
S1s = flatten(print(element(3, nth(1, Ts2)))),
ok %It all worked
end of
{'EXIT',R} -> %Something went wrong!
{error,R};
end.
%% OTP-7810. White spaces, comments, and more...
otp_7810(Config) when is_list(Config) ->
ok = reserved_words(),
ok = atoms(),
ok = punctuations(),
ok = comments(),
ok = errors(),
ok = integers(),
ok = base_integers(),
ok = floats(),
ok = dots(),
ok = chars(),
ok = variables(),
ok = eof(),
ok = illegal(),
ok = crashes(),
ok = options(),
ok = token_info(),
ok = column_errors(),
ok = white_spaces(),
ok = unicode(),
ok = more_chars(),
ok = more_options(),
ok = anno_info(),
ok.
reserved_words() ->
L = ['after', 'begin', 'case', 'try', 'cond', 'catch',
'andalso', 'orelse', 'end', 'fun', 'if', 'let', 'of',
'receive', 'when', 'bnot', 'not', 'div',
'rem', 'band', 'and', 'bor', 'bxor', 'bsl', 'bsr',
'or', 'xor'],
[begin
{RW, true} = {RW, erl_scan:reserved_word(RW)},
S = atom_to_list(RW),
Ts = [{RW,{1,1}}],
test_string(S, Ts)
end || RW <- L],
ok.
atoms() ->
test_string("a
b", [{atom,{1,1},a},{atom,{2,18},b}]),
test_string("'a b'", [{atom,{1,1},'a b'}]),
test_string("a", [{atom,{1,1},a}]),
test_string("a@2", [{atom,{1,1},a@2}]),
test_string([39,65,200,39], [{atom,{1,1},'AÈ'}]),
test_string("ärlig östen", [{atom,{1,1},ärlig},{atom,{1,7},östen}]),
{ok,[{atom,_,'$a'}],{1,6}} =
erl_scan_string("'$\\a'", {1,1}),
test("'$\\a'"),
ok.
punctuations() ->
L = ["<<", "<-", "<=", "<", ">>", ">=", ">", "->", "--",
"-", "++", "+", "=:=", "=/=", "=<", "=>", "==", "=", "/=",
"/", "||", "|", ":=", "::", ":"],
One token at a time :
[begin
W = list_to_atom(S),
Ts = [{W,{1,1}}],
test_string(S, Ts)
end || S <- L],
three tokens ...
No = Three ++ L,
SL0 = [{S1++S2,{-length(S1),S1,S2}} ||
S1 <- L,
S2 <- L,
not lists:member(S1++S2, No)],
SL = family_list(SL0),
Two tokens . When there are several answers , the one with
the longest first token is chosen :
%% [the special case "=<<" is among the tested ones]
[begin
W1 = list_to_atom(S1),
W2 = list_to_atom(S2),
Ts = [{W1,{1,1}},{W2,{1,-L2+1}}],
test_string(S, Ts)
end || {S,[{L2,S1,S2}|_]} <- SL],
PTs1 = [{'!',{1,1}},{'(',{1,2}},{')',{1,3}},{',',{1,4}},{';',{1,5}},
{'=',{1,6}},{'[',{1,7}},{']',{1,8}},{'{',{1,9}},{'|',{1,10}},
{'}',{1,11}}],
test_string("!(),;=[]{|}", PTs1),
PTs2 = [{'#',{1,1}},{'&',{1,2}},{'*',{1,3}},{'+',{1,4}},{'/',{1,5}},
{':',{1,6}},{'<',{1,7}},{'>',{1,8}},{'?',{1,9}},{'@',{1,10}},
{'\\',{1,11}},{'^',{1,12}},{'`',{1,13}},{'~',{1,14}}],
test_string("#&*+/:<>?@\\^`~", PTs2),
test_string(".. ", [{'..',{1,1}}]),
test_string("1 .. 2",
[{integer,{1,1},1},{'..',{1,3}},{integer,{1,6},2}]),
test_string("...", [{'...',{1,1}}]),
ok.
comments() ->
test("a %%\n b"),
{ok,[],1} = erl_scan_string("%"),
test("a %%\n b"),
{ok,[{atom,{1,1},a},{atom,{2,2},b}],{2,3}} =
erl_scan_string("a %%\n b", {1,1}),
{ok,[{atom,{1,1},a},{comment,{1,3},"%%"},{atom,{2,2},b}],{2,3}} =
erl_scan_string("a %%\n b",{1,1}, [return_comments]),
{ok,[{atom,{1,1},a},
{white_space,{1,2}," "},
{white_space,{1,5},"\n "},
{atom,{2,2},b}],
{2,3}} =
erl_scan_string("a %%\n b",{1,1},[return_white_spaces]),
{ok,[{atom,{1,1},a},
{white_space,{1,2}," "},
{comment,{1,3},"%%"},
{white_space,{1,5},"\n "},
{atom,{2,2},b}],
{2,3}} = erl_scan_string("a %%\n b",{1,1},[return]),
ok.
errors() ->
{error,{1,erl_scan,{string,$',"qa"}},1} = erl_scan:string("'qa"), %'
{error,{{1,1},erl_scan,{string,$',"qa"}},{1,4}} = %'
erl_scan:string("'qa", {1,1}, []), %'
{error,{1,erl_scan,{string,$","str"}},1} = %"
erl_scan:string("\"str"), %"
{error,{{1,1},erl_scan,{string,$","str"}},{1,5}} = %"
erl_scan:string("\"str", {1,1}, []), %"
{error,{1,erl_scan,char},1} = erl_scan:string("$"),
{error,{{1,1},erl_scan,char},{1,2}} = erl_scan:string("$", {1,1}, []),
test_string([34,65,200,34], [{string,{1,1},"AÈ"}]),
test_string("\\", [{'\\',{1,1}}]),
{'EXIT',_} =
(catch {foo, erl_scan:string('$\\a', {1,1})}), % type error
{'EXIT',_} =
(catch {foo, erl_scan:tokens([], '$\\a', {1,1})}), % type error
"{a,tuple}" = erl_scan:format_error({a,tuple}),
ok.
integers() ->
[begin
I = list_to_integer(S),
Ts = [{integer,{1,1},I}],
test_string(S, Ts)
end || S <- [[N] || N <- lists:seq($0, $9)] ++ ["2323","000"] ],
UnderscoreSamples =
[{"123_456", 123456},
{"123_456_789", 123456789},
{"1_2", 12}],
lists:foreach(
fun({S, I}) ->
test_string(S, [{integer, {1, 1}, I}])
end, UnderscoreSamples),
UnderscoreErrors =
["123_",
"123__",
"123_456_",
"123__456",
"_123",
"__123"],
lists:foreach(
fun(S) ->
case erl_scan:string(S) of
{ok, [{integer, _, _}], _} ->
error({unexpected_integer, S});
_ ->
ok
end
end, UnderscoreErrors),
test_string("_123", [{var,{1,1},'_123'}]),
test_string("123_", [{integer,{1,1},123},{var,{1,4},'_'}]),
ok.
base_integers() ->
[begin
B = list_to_integer(BS),
I = erlang:list_to_integer(S, B),
Ts = [{integer,{1,1},I}],
test_string(BS++"#"++S, Ts)
end || {BS,S} <- [{"2","11"}, {"5","23234"}, {"12","05a"},
{"16","abcdef"}, {"16","ABCDEF"}] ],
{error,{1,erl_scan,{base,1}},1} = erl_scan:string("1#000"),
{error,{{1,1},erl_scan,{base,1}},{1,2}} =
erl_scan:string("1#000", {1,1}, []),
{error,{1,erl_scan,{base,1}},1} = erl_scan:string("1#000"),
{error,{{1,1},erl_scan,{base,1000}},{1,6}} =
erl_scan:string("1_000#000", {1,1}, []),
test_string("12#bc", [{integer,{1,1},11},{atom,{1,5},c}]),
[begin
Str = BS ++ "#" ++ S,
E = 2 + length(BS),
{error,{{1,1},erl_scan,{illegal,integer}},{1,E}} =
erl_scan:string(Str, {1,1}, [])
end || {BS,S} <- [{"3","3"},{"15","f"},{"12","c"},
{"1_5","f"},{"1_2","c"}] ],
{ok,[{integer,1,239},{'@',1}],1} = erl_scan_string("16#ef@"),
{ok,[{integer,{1,1},239},{'@',{1,6}}],{1,7}} =
erl_scan_string("16#ef@", {1,1}, []),
{ok,[{integer,{1,1},14},{atom,{1,5},g@}],{1,7}} =
erl_scan_string("16#eg@", {1,1}, []),
UnderscoreSamples =
[{"16#1234_ABCD_EF56", 16#1234abcdef56},
{"2#0011_0101_0011", 2#001101010011},
{"1_6#123ABC", 16#123abc},
{"1_6#123_ABC", 16#123abc},
{"16#abcdef", 16#ABCDEF}],
lists:foreach(
fun({S, I}) ->
test_string(S, [{integer, {1, 1}, I}])
end, UnderscoreSamples),
UnderscoreErrors =
["16_#123ABC",
"16#123_",
"16#_123",
"16#ABC_",
"16#_ABC",
"2#_0101",
"1__6#ABC",
"16#AB__CD"],
lists:foreach(
fun(S) ->
case erl_scan:string(S) of
{ok, [{integer, _, _}], _} ->
error({unexpected_integer, S});
_ ->
ok
end
end, UnderscoreErrors),
test_string("16#123_", [{integer,{1,1},291},{var,{1,7},'_'}]),
test_string("_16#ABC", [{var,{1,1},'_16'},{'#',{1,4}},{var,{1,5},'ABC'}]),
ok.
floats() ->
[begin
F = list_to_float(FS),
Ts = [{float,{1,1},F}],
test_string(FS, Ts)
end || FS <- ["1.0","001.17","3.31200","1.0e0","1.0E17",
"34.21E-18", "17.0E+14"]],
test_string("1.e2", [{integer,{1,1},1},{'.',{1,2}},{atom,{1,3},e2}]),
{error,{1,erl_scan,{illegal,float}},1} =
erl_scan:string("1.0e400"),
{error,{{1,1},erl_scan,{illegal,float}},{1,8}} =
erl_scan:string("1.0e400", {1,1}, []),
{error,{{1,1},erl_scan,{illegal,float}},{1,9}} =
erl_scan:string("1.0e4_00", {1,1}, []),
[begin
{error,{1,erl_scan,{illegal,float}},1} = erl_scan:string(S),
{error,{{1,1},erl_scan,{illegal,float}},{1,_}} =
erl_scan:string(S, {1,1}, [])
end || S <- ["1.14Ea"]],
UnderscoreSamples =
[{"123_456.789", 123456.789},
{"123.456_789", 123.456789},
{"1.2_345e10", 1.2345e10},
{"1.234e1_06", 1.234e106},
{"12_34.56_78e1_6", 1234.5678e16},
{"12_34.56_78e-1_8", 1234.5678e-18}],
lists:foreach(
fun({S, I}) ->
test_string(S, [{float, {1, 1}, I}])
end, UnderscoreSamples),
UnderscoreErrors =
["123_.456",
"123._456",
"123.456_",
"123._",
"1._23e10",
"1.23e_10",
"1.23e10_"],
lists:foreach(
fun(S) ->
case erl_scan:string(S) of
{ok, [{float, _, _}], _} ->
error({unexpected_float, S});
_ ->
ok
end
end, UnderscoreErrors),
test_string("123._", [{integer,{1,1},123},{'.',{1,4}},{var,{1,5},'_'}]),
test_string("1.23_e10", [{float,{1,1},1.23},{var,{1,5},'_e10'}]),
ok.
dots() ->
Dot = [{".", {ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,2}}},
{". ", {ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,3}}},
{".\n", {ok,[{dot,1}],2}, {ok,[{dot,{1,1}}],{2,1}}},
{".%", {ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,3}}},
{".\210",{ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,3}}},
{".% öh",{ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,6}}},
{".%\n", {ok,[{dot,1}],2}, {ok,[{dot,{1,1}}],{2,1}}},
{".$", {error,{1,erl_scan,char},1},
{error,{{1,2},erl_scan,char},{1,3}}},
{".$\\", {error,{1,erl_scan,char},1},
{error,{{1,2},erl_scan,char},{1,4}}},
{".a", {ok,[{'.',1},{atom,1,a}],1},
{ok,[{'.',{1,1}},{atom,{1,2},a}],{1,3}}}
],
[begin
R = erl_scan_string(S),
R2 = erl_scan_string(S, {1,1}, [])
end || {S, R, R2} <- Dot],
{ok,[{dot,_}=T1],{1,2}} = erl_scan:string(".", {1,1}, text),
[1, 1, "."] = token_info(T1),
{ok,[{dot,_}=T2],{1,3}} = erl_scan:string(".%", {1,1}, text),
[1, 1, "."] = token_info(T2),
{ok,[{dot,_}=T3],{1,6}} =
erl_scan:string(".% öh", {1,1}, text),
[1, 1, "."] = token_info(T3),
{error,{{1,2},erl_scan,char},{1,3}} = erl_scan:string(".$", {1,1}),
{error,{{1,2},erl_scan,char},{1,4}} = erl_scan:string(".$\\", {1,1}),
test_string(". ", [{dot,{1,1}}]),
test_string(". ", [{dot,{1,1}}]),
test_string(".\n", [{dot,{1,1}}]),
test_string(".\n\n", [{dot,{1,1}}]),
test_string(".\n\r", [{dot,{1,1}}]),
test_string(".\n\n\n", [{dot,{1,1}}]),
test_string(".\210", [{dot,{1,1}}]),
test_string(".%\n", [{dot,{1,1}}]),
test_string(".a", [{'.',{1,1}},{atom,{1,2},a}]),
test_string("%. \n. ", [{dot,{2,1}}]),
{more,C} = erl_scan:tokens([], "%. ",{1,1}, return),
{done,{ok,[{comment,{1,1},"%. "},
{white_space,{1,4},"\n"},
{dot,{2,1}}],
{2,3}}, ""} =
any loc , any options
[test_string(S, R) ||
{S, R} <- [{".$\n", [{'.',{1,1}},{char,{1,2},$\n}]},
{"$\\\n", [{char,{1,1},$\n}]},
{"'\\\n'", [{atom,{1,1},'\n'}]},
{"$\n", [{char,{1,1},$\n}]}] ],
ok.
chars() ->
[begin
L = lists:flatten(io_lib:format("$\\~.8b", [C])),
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255)],
%% Leading zeroes...
[begin
L = lists:flatten(io_lib:format("$\\~3.8.0b", [C])),
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255)],
%% GH-6477. Test legal use of caret notation.
[begin
L = "$\\^" ++ [C],
Ts = case C of
$? ->
[{char,{1,1},127}];
_ ->
[{char,{1,1},C band 2#11111}]
end,
test_string(L, Ts)
end || C <- lists:seq($?, $Z) ++ lists:seq($a, $z)],
[begin
L = "$\\" ++ [C],
Ts = [{char,{1,1},V}],
test_string(L, Ts)
end || {C,V} <- [{$n,$\n}, {$r,$\r}, {$t,$\t}, {$v,$\v},
{$b,$\b}, {$f,$\f}, {$e,$\e}, {$s,$\s},
{$d,$\d}]],
EC = [$\n,$\r,$\t,$\v,$\b,$\f,$\e,$\s,$\d],
Ds = lists:seq($0, $9),
X = [$^,$n,$r,$t,$v,$b,$f,$e,$s,$d],
New = [${,$x],
No = EC ++ Ds ++ X ++ New,
[begin
L = "$\\" ++ [C],
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255) -- No],
[begin
L = "'$\\" ++ [C] ++ "'",
Ts = [{atom,{1,1},list_to_atom("$"++[C])}],
test_string(L, Ts)
end || C <- lists:seq(0, 255) -- No],
test_string("\"\\013a\\\n\"", [{string,{1,1},"\va\n"}]),
test_string("'\n'", [{atom,{1,1},'\n'}]),
test_string("\"\n\a\"", [{string,{1,1},"\na"}]),
%% No escape
[begin
L = "$" ++ [C],
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255) -- (No ++ [$\\])],
test_string("$\n", [{char,{1,1},$\n}]),
{error,{{1,1},erl_scan,char},{1,4}} =
erl_scan:string("$\\^",{1,1}),
test_string("$\\\n", [{char,{1,1},$\n}]),
's scanner returns line 1 :
test_string("$\\\n", [{char,{1,1},$\n}]),
test_string("$\n\n", [{char,{1,1},$\n}]),
test("$\n\n"),
ok.
variables() ->
test_string(" \237_Aouåeiyäö", [{var,{1,7},'_Aouåeiyäö'}]),
test_string("A_b_c@", [{var,{1,1},'A_b_c@'}]),
test_string("V@2", [{var,{1,1},'V@2'}]),
test_string("ABDÀ", [{var,{1,1},'ABDÀ'}]),
test_string("Ärlig Östen", [{var,{1,1},'Ärlig'},{var,{1,7},'Östen'}]),
ok.
eof() ->
{done,{eof,1},eof} = erl_scan:tokens([], eof, 1),
{more, C1} = erl_scan:tokens([]," \n", 1),
{done,{eof,2},eof} = erl_scan:tokens(C1, eof, 1),
{more, C2} = erl_scan:tokens([], "abra", 1),
%% An error before R13A.
%% {done,Err={error,{1,erl_scan,scan},1},eof} =
{done,{ok,[{atom,1,abra}],1},eof} =
erl_scan_tokens(C2, eof, 1),
%% With column.
{more, C3} = erl_scan:tokens([]," \n",{1,1}),
{done,{eof,{2,1}},eof} = erl_scan:tokens(C3, eof, 1),
{more, C4} = erl_scan:tokens([], "abra", {1,1}),
%% An error before R13A.
%% {done,{error,{{1,1},erl_scan,scan},{1,5}},eof} =
{done,{ok,[{atom,_,abra}],{1,5}},eof} =
erl_scan_tokens(C4, eof, 1),
's scanner returns " " as LeftoverChars ;
the R12B scanner returns eof as LeftoverChars : ( eof is correct )
{more, C5} = erl_scan:tokens([], "a", 1),
%% An error before R13A.
{ done,{error,{1,erl_scan , scan},1},eof } =
{done,{ok,[{atom,1,a}],1},eof} =
erl_scan_tokens(C5,eof,1),
%% With column.
{more, C6} = erl_scan:tokens([], "a", {1,1}),
%% An error before R13A.
{ done,{error,{1,erl_scan , scan},1},eof } =
{done,{ok,[{atom,{1,1},a}],{1,2}},eof} =
erl_scan_tokens(C6,eof,1),
A dot followed by eof is special :
{more, C} = erl_scan:tokens([], "a.", 1),
{done,{ok,[{atom,1,a},{dot,1}],1},eof} = erl_scan_tokens(C,eof,1),
{ok,[{atom,1,foo},{dot,1}],1} = erl_scan_string("foo."),
%% With column.
{more, CCol} = erl_scan:tokens([], "a.", {1,1}),
{done,{ok,[{atom,{1,1},a},{dot,{1,2}}],{1,3}},eof} =
erl_scan_tokens(CCol,eof,1),
{ok,[{atom,{1,1},foo},{dot,{1,4}}],{1,5}} =
erl_scan_string("foo.", {1,1}, []),
ok.
illegal() ->
Atom = lists:duplicate(1000, $a),
{error,{1,erl_scan,{illegal,atom}},1} = erl_scan:string(Atom),
{done,{error,{1,erl_scan,{illegal,atom}},1},". "} =
erl_scan:tokens([], Atom++". ", 1),
QAtom = "'" ++ Atom ++ "'",
{error,{1,erl_scan,{illegal,atom}},1} = erl_scan:string(QAtom),
{done,{error,{1,erl_scan,{illegal,atom}},1},". "} =
erl_scan:tokens([], QAtom++". ", 1),
Var = lists:duplicate(1000, $A),
{error,{1,erl_scan,{illegal,var}},1} = erl_scan:string(Var),
{done,{error,{1,erl_scan,{illegal,var}},1},". "} =
erl_scan:tokens([], Var++". ", 1),
Float = "1" ++ lists:duplicate(400, $0) ++ ".0",
{error,{1,erl_scan,{illegal,float}},1} = erl_scan:string(Float),
{done,{error,{1,erl_scan,{illegal,float}},1},". "} =
erl_scan:tokens([], Float++". ", 1),
String = "\"43\\x{aaaaaa}34\"",
{error,{1,erl_scan,{illegal,character}},1} = erl_scan:string(String),
{done,{error,{1,erl_scan,{illegal,character}},1},"34\". "} =
%% Would be nice if `34\"' were skipped...
Maybe , but then the LeftOverChars would not be the characters
%% immediately following the end location of the error.
erl_scan:tokens([], String++". ", 1),
{error,{{1,1},erl_scan,{illegal,atom}},{1,1001}} =
erl_scan:string(Atom, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,atom}},{1,1005}},". "} =
erl_scan:tokens([], "foo "++Atom++". ", {1,1}),
{error,{{1,1},erl_scan,{illegal,atom}},{1,1003}} =
erl_scan:string(QAtom, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,atom}},{1,1007}},". "} =
erl_scan:tokens([], "foo "++QAtom++". ", {1,1}),
{error,{{1,1},erl_scan,{illegal,var}},{1,1001}} =
erl_scan:string(Var, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,var}},{1,1005}},". "} =
erl_scan:tokens([], "foo "++Var++". ", {1,1}),
{error,{{1,1},erl_scan,{illegal,float}},{1,404}} =
erl_scan:string(Float, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,float}},{1,408}},". "} =
erl_scan:tokens([], "foo "++Float++". ", {1,1}),
{error,{{1,4},erl_scan,{illegal,character}},{1,14}} =
erl_scan:string(String, {1,1}),
{done,{error,{{1,4},erl_scan,{illegal,character}},{1,14}},"34\". "} =
erl_scan:tokens([], String++". ", {1,1}),
%% GH-6477. Test for illegal characters in caret notation.
_ = [begin
S = [$$,$\\,$^,C],
{error,{1,erl_scan,{illegal,character}},1} = erl_scan:string(S)
end || C <- lists:seq(0, 16#3e) ++ [16#60] ++ lists:seq($z+1, 16#10ffff)],
ok.
crashes() ->
{'EXIT',_} = (catch {foo, erl_scan:string([-1])}), % type error
{'EXIT',_} = (catch erl_scan:string("'a" ++ [999999999] ++ "c'")),
{'EXIT',_} = (catch {foo, erl_scan:string("$"++[-1])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\"++[-1])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\^"++[-1])}),
{'EXIT',_} = (catch {foo, erl_scan:string([$",-1,$"],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string("\"\\v"++[-1,$"])}), %$"
{'EXIT',_} = (catch {foo, erl_scan:string([$",-1,$"])}),
{'EXIT',_} = (catch {foo, erl_scan:string("% foo"++[-1])}),
{'EXIT',_} =
(catch {foo, erl_scan:string("% foo"++[-1],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string([a])}), % type error
{'EXIT',_} = (catch {foo, erl_scan:string("$"++[a])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\"++[a])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\^"++[a])}),
{'EXIT',_} = (catch {foo, erl_scan:string([$",a,$"],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string("\"\\v"++[a,$"])}), %$"
{'EXIT',_} = (catch {foo, erl_scan:string([$",a,$"])}),
{'EXIT',_} = (catch {foo, erl_scan:string("% foo"++[a])}),
{'EXIT',_} =
(catch {foo, erl_scan:string("% foo"++[a],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string([3.0])}), % type error
{'EXIT',_} = (catch {foo, erl_scan:string("A" ++ [999999999])}),
ok.
options() ->
%% line and column are not options, but tested here
{ok,[{atom,1,foo},{white_space,1," "},{comment,1,"% bar"}], 1} =
erl_scan_string("foo % bar", 1, return),
{ok,[{atom,1,foo},{white_space,1," "}],1} =
erl_scan_string("foo % bar", 1, return_white_spaces),
{ok,[{atom,1,foo},{comment,1,"% bar"}],1} =
erl_scan_string("foo % bar", 1, return_comments),
{ok,[{atom,17,foo}],17} =
erl_scan_string("foo % bar", 17),
{'EXIT',{function_clause,_}} =
(catch {foo,
erl_scan:string("foo % bar", {a,1}, [])}), % type error
{ok,[{atom,_,foo}],{17,18}} =
erl_scan_string("foo % bar", {17,9}, []),
{'EXIT',{function_clause,_}} =
(catch {foo,
erl_scan:string("foo % bar", {1,0}, [])}), % type error
{ok,[{foo,1}],1} =
erl_scan_string("foo % bar",1, [{reserved_word_fun,
fun(W) -> W =:= foo end}]),
{'EXIT',{badarg,_}} =
(catch {foo,
erl_scan:string("foo % bar",1, % type error
[{reserved_word_fun,
fun(W,_) -> W =:= foo end}])}),
ok.
more_options() ->
{ok,[{atom,_,foo}=T1],{19,20}} =
erl_scan:string("foo", {19,17},[]),
{19,17} = erl_scan:location(T1),
{done,{ok,[{atom,_,foo}=T2,{dot,_}],{19,22}},[]} =
erl_scan:tokens([], "foo. ", {19,17}, [bad_opt]), % type error
{19,17} = erl_scan:location(T2),
{ok,[{atom,_,foo}=T3],{19,20}} =
erl_scan:string("foo", {19,17},[text]),
{19,17} = erl_scan:location(T3),
"foo" = erl_scan:text(T3),
{ok,[{atom,_,foo}=T4],1} = erl_scan:string("foo", 1, [text]),
1 = erl_scan:line(T4),
1 = erl_scan:location(T4),
"foo" = erl_scan:text(T4),
ok.
token_info() ->
{ok,[T1],_} = erl_scan:string("foo", {1,18}, [text]),
{'EXIT',{badarg,_}} =
(catch {foo, erl_scan:category(foo)}), % type error
{'EXIT',{badarg,_}} =
(catch {foo, erl_scan:symbol(foo)}), % type error
atom = erl_scan:category(T1),
foo = erl_scan:symbol(T1),
{ok,[T2],_} = erl_scan:string("foo", 1, []),
1 = erl_scan:line(T2),
undefined = erl_scan:column(T2),
undefined = erl_scan:text(T2),
1 = erl_scan:location(T2),
{ok,[T3],_} = erl_scan:string("=", 1, []),
'=' = erl_scan:category(T3),
'=' = erl_scan:symbol(T3),
ok.
anno_info() ->
{'EXIT',_} =
(catch {foo,erl_scan:line(foo)}), % type error
{ok,[{atom,_,foo}=T0],_} = erl_scan:string("foo", 19, [text]),
19 = erl_scan:location(T0),
19 = erl_scan:end_location(T0),
{ok,[{atom,_,foo}=T3],_} = erl_scan:string("foo", {1,3}, [text]),
1 = erl_scan:line(T3),
3 = erl_scan:column(T3),
{1,3} = erl_scan:location(T3),
{1,6} = erl_scan:end_location(T3),
"foo" = erl_scan:text(T3),
{ok,[{atom,_,foo}=T4],_} = erl_scan:string("foo", 2, [text]),
2 = erl_scan:line(T4),
undefined = erl_scan:column(T4),
2 = erl_scan:location(T4),
"foo" = erl_scan:text(T4),
{ok,[{atom,_,foo}=T5],_} = erl_scan:string("foo", {1,3}, []),
1 = erl_scan:line(T5),
3 = erl_scan:column(T5),
{1,3} = erl_scan:location(T5),
undefined = erl_scan:text(T5),
ok.
column_errors() ->
{error,{{1,1},erl_scan,{string,$',""}},{1,3}} = % $'
erl_scan:string("'\\",{1,1}),
{error,{{1,1},erl_scan,{string,$",""}},{1,3}} = % $"
erl_scan:string("\"\\",{1,1}),
{error,{{1,1},erl_scan,{string,$',""}},{1,2}} = % $'
erl_scan:string("'",{1,1}),
{error,{{1,1},erl_scan,{string,$",""}},{1,2}} = % $"
erl_scan:string("\"",{1,1}),
{error,{{1,1},erl_scan,char},{1,2}} =
erl_scan:string("$",{1,1}),
{error,{{1,2},erl_scan,{string,$',"1234567890123456"}},{1,20}} = %'
erl_scan:string(" '12345678901234567", {1,1}),
{error,{{1,2},erl_scan,{string,$',"123456789012345 "}}, {1,20}} = %'
erl_scan:string(" '123456789012345\\s", {1,1}),
{error,{{1,2},erl_scan,{string,$","1234567890123456"}},{1,20}} = %"
erl_scan:string(" \"12345678901234567", {1,1}),
{error,{{1,2},erl_scan,{string,$","123456789012345 "}}, {1,20}} = %"
erl_scan:string(" \"123456789012345\\s", {1,1}),
{error,{{1,2},erl_scan,{string,$',"1234567890123456"}},{2,1}} = %'
erl_scan:string(" '12345678901234567\n", {1,1}),
ok.
white_spaces() ->
{ok,[{white_space,_,"\r"},
{white_space,_," "},
{atom,_,a},
{white_space,_,"\n"}],
_} = erl_scan_string("\r a\n", {1,1}, return),
test("\r a\n"),
L = "{\"a\nb\", \"a\\nb\",\nabc\r,def}.\n\n",
{ok,[{'{',_},
{string,_,"a\nb"},
{',',_},
{white_space,_," "},
{string,_,"a\nb"},
{',',_},
{white_space,_,"\n"},
{atom,_,abc},
{white_space,_,"\r"},
{',',_},
{atom,_,def},
{'}',_},
{dot,_},
{white_space,_,"\n"}],
_} = erl_scan_string(L, {1,1}, return),
test(L),
test("\"\n\"\n"),
test("\n\r\n"),
test("\n\r"),
test("\r\n"),
test("\n\f"),
[test(lists:duplicate(N, $\t)) || N <- lists:seq(1, 20)],
[test([$\n|lists:duplicate(N, $\t)]) || N <- lists:seq(1, 20)],
[test(lists:duplicate(N, $\s)) || N <- lists:seq(1, 20)],
[test([$\n|lists:duplicate(N, $\s)]) || N <- lists:seq(1, 20)],
test("\v\f\n\v "),
test("\n\e\n\b\f\n\da\n"),
ok.
unicode() ->
{ok,[{char,1,83},{integer,1,45}],1} =
erl_scan_string("$\\12345"), % not unicode
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string([1089]),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([1089], {1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([16#D800], {1,1}),
test("\"a"++[1089]++"b\""),
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan_string([$$,$\\,$^,1089], 1),
{error,{1,erl_scan,Error},1} =
erl_scan:string("\"qa\x{aaa}", 1),
"unterminated string starting with \"qa"++[2730]++"\"" =
erl_scan:format_error(Error),
{error,{{1,1},erl_scan,_},{1,11}} =
erl_scan:string("\"qa\\x{aaa}",{1,1}),
{error,{{1,1},erl_scan,_},{1,11}} =
erl_scan:string("'qa\\x{aaa}",{1,1}),
{ok,[{char,1,1089}],1} =
erl_scan_string([$$,1089], 1),
{ok,[{char,1,1089}],1} =
erl_scan_string([$$,$\\,1089], 1),
Qs = "$\\x{aaa}",
{ok,[{char,1,$\x{aaa}}],1} =
erl_scan_string(Qs, 1),
{ok,[Q2],{1,9}} =
erl_scan:string("$\\x{aaa}", {1,1}, [text]),
[{category,char},{column,1},{line,1},{symbol,16#aaa},{text,Qs}] =
token_info_long(Q2),
U1 = "\"\\x{aaa}\"",
{ok,[{string,_,[2730]}=T1],{1,10}} = erl_scan:string(U1, {1,1}, [text]),
{1,1} = erl_scan:location(T1),
"\"\\x{aaa}\"" = erl_scan:text(T1),
{ok,[{string,1,[2730]}],1} = erl_scan_string(U1, 1),
U2 = "\"\\x41\\x{fff}\\x42\"",
{ok,[{string,1,[$\x41,$\x{fff},$\x42]}],1} = erl_scan_string(U2, 1),
U3 = "\"a\n\\x{fff}\n\"",
{ok,[{string,1,[$a,$\n,$\x{fff},$\n]}],3} = erl_scan_string(U3, 1),
U4 = "\"\n\\x{aaa}\n\"",
{ok,[{string,1,[$\n,$\x{aaa},$\n]}],3} = erl_scan_string(U4, 1),
%% Keep these tests:
test(Qs),
test(U1),
test(U2),
test(U3),
test(U4),
Str1 = "\"ab" ++ [1089] ++ "cd\"",
{ok,[{string,1,[$a,$b,1089,$c,$d]}],1} = erl_scan_string(Str1, 1),
{ok,[{string,{1,1},[$a,$b,1089,$c,$d]}],{1,8}} =
erl_scan_string(Str1, {1,1}),
test(Str1),
Comment = "%% "++[1089],
{ok,[{comment,1,[$%,$%,$\s,1089]}],1} =
erl_scan_string(Comment, 1, [return]),
, $ % , $ \s,1089]}],{1,5 } } =
erl_scan_string(Comment, {1,1}, [return]),
ok.
more_chars() ->
%% Due to unicode, the syntax has been incompatibly augmented:
$ \x { ... } , $ \xHH
%% All kinds of tests...
{ok,[{char,_,123}],{1,4}} =
erl_scan_string("$\\{",{1,1}),
{more, C1} = erl_scan:tokens([], "$\\{", {1,1}),
{done,{ok,[{char,_,123}],{1,4}},eof} =
erl_scan_tokens(C1, eof, 1),
{ok,[{char,1,123},{atom,1,a},{'}',1}],1} =
erl_scan_string("$\\{a}"),
{error,{{1,1},erl_scan,char},{1,4}} =
erl_scan:string("$\\x", {1,1}),
{error,{{1,1},erl_scan,char},{1,5}} =
erl_scan:string("$\\x{",{1,1}),
{more, C3} = erl_scan:tokens([], "$\\x", {1,1}),
{done,{error,{{1,1},erl_scan,char},{1,4}},eof} =
erl_scan:tokens(C3, eof, 1),
{error,{{1,1},erl_scan,char},{1,5}} =
erl_scan:string("$\\x{",{1,1}),
{more, C2} = erl_scan:tokens([], "$\\x{", {1,1}),
{done,{error,{{1,1},erl_scan,char},{1,5}},eof} =
erl_scan:tokens(C2, eof, 1),
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("$\\x{g}"),
{error,{{1,1},erl_scan,{illegal,character}},{1,5}} =
erl_scan:string("$\\x{g}", {1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,6}} =
erl_scan:string("$\\x{}",{1,1}),
test("\"\\{0}\""),
test("\"\\x{0}\""),
test("\'\\{0}\'"),
test("\'\\x{0}\'"),
{error,{{2,3},erl_scan,{illegal,character}},{2,6}} =
erl_scan:string("\"ab \n $\\x{g}\"",{1,1}),
{error,{{2,3},erl_scan,{illegal,character}},{2,6}} =
erl_scan:string("\'ab \n $\\x{g}\'",{1,1}),
test("$\\{34}"),
test("$\\x{34}"),
test("$\\{377}"),
test("$\\x{FF}"),
test("$\\{400}"),
test("$\\x{100}"),
test("$\\x{10FFFF}"),
test("$\\x{10ffff}"),
test("\"$\n \\{1}\""),
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("$\\x{110000}"),
{error,{{1,1},erl_scan,{illegal,character}},{1,12}} =
erl_scan:string("$\\x{110000}", {1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string("$\\xfg", {1,1}),
test("$\\xffg"),
{error,{{1,1},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string("$\\xg", {1,1}),
ok.
%% OTP-10302. Unicode characters scanner/parser.
otp_10302(Config) when is_list(Config) ->
%% From unicode():
{ok,[{atom,1,'aсb'}],1} =
erl_scan_string("'a"++[1089]++"b'", 1),
{ok,[{atom,{1,1},'qaપ'}],{1,12}} =
erl_scan_string("'qa\\x{aaa}'",{1,1}),
{ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1),
{ok,[{char,1,1089}],1} = erl_scan_string([$$,$\\,1089],1),
Qs = "$\\x{aaa}",
{ok,[{char,1,2730}],1} = erl_scan_string(Qs, 1),
{ok,[Q2],{1,9}} = erl_scan:string(Qs,{1,1},[text]),
[{category,char},{column,1},{line,1},{symbol,16#aaa},{text,Qs}] =
token_info_long(Q2),
U1 = "\"\\x{aaa}\"",
{ok,[T1],{1,10}} = erl_scan:string(U1, {1,1}, [text]),
[{category,string},{column,1},{line,1},{symbol,[16#aaa]},{text,U1}] =
token_info_long(T1),
U2 = "\"\\x41\\x{fff}\\x42\"",
{ok,[{string,1,[65,4095,66]}],1} = erl_scan_string(U2, 1),
U3 = "\"a\n\\x{fff}\n\"",
{ok,[{string,1,[97,10,4095,10]}],3} = erl_scan_string(U3, 1),
U4 = "\"\n\\x{aaa}\n\"",
{ok,[{string,1,[10,2730,10]}],3} = erl_scan_string(U4, 1,[]),
Str1 = "\"ab" ++ [1089] ++ "cd\"",
{ok,[{string,1,[97,98,1089,99,100]}],1} =
erl_scan_string(Str1,1),
{ok,[{string,{1,1},[97,98,1089,99,100]}],{1,8}} =
erl_scan_string(Str1, {1,1}),
OK1 = 16#D800-1,
OK2 = 16#DFFF+1,
OK3 = 16#FFFE-1,
OK4 = 16#FFFF+1,
OKL = [OK1,OK2,OK3,OK4],
Illegal1 = 16#D800,
Illegal2 = 16#DFFF,
Illegal3 = 16#FFFE,
Illegal4 = 16#FFFF,
IllegalL = [Illegal1,Illegal2,Illegal3,Illegal4],
[{ok,[{comment,1,[$%,$%,$\s,OK]}],1} =
erl_scan_string("%% "++[OK], 1, [return]) ||
OK <- OKL],
{ok,[{comment,_,[$%,$%,$\s,OK1]}],{1,5}} =
erl_scan_string("%% "++[OK1], {1,1}, [return]),
[{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("%% "++[Illegal], 1, [return]) ||
Illegal <- IllegalL],
{error,{{1,1},erl_scan,{illegal,character}},{1,5}} =
erl_scan:string("%% "++[Illegal1], {1,1}, [return]),
[{ok,[],1} = erl_scan_string("%% "++[OK], 1, []) ||
OK <- OKL],
{ok,[],{1,5}} = erl_scan_string("%% "++[OK1], {1,1}, []),
[{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("%% "++[Illegal], 1, []) ||
Illegal <- IllegalL],
{error,{{1,1},erl_scan,{illegal,character}},{1,5}} =
erl_scan:string("%% "++[Illegal1], {1,1}, []),
[{ok,[{string,{1,1},[OK]}],{1,4}} =
erl_scan_string("\""++[OK]++"\"",{1,1}) ||
OK <- OKL],
[{error,{{1,2},erl_scan,{illegal,character}},{1,3}} =
erl_scan:string("\""++[OK]++"\"",{1,1}) ||
OK <- IllegalL],
[{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([Illegal],{1,1}) ||
Illegal <- IllegalL],
{ok,[{char,{1,1},OK1}],{1,3}} =
erl_scan_string([$$,OK1],{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([$$,Illegal1],{1,1}),
{ok,[{char,{1,1},OK1}],{1,4}} =
erl_scan_string([$$,$\\,OK1],{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string([$$,$\\,Illegal1],{1,1}),
{ok,[{string,{1,1},[55295]}],{1,5}} =
erl_scan_string("\"\\"++[OK1]++"\"",{1,1}),
{error,{{1,2},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string("\"\\"++[Illegal1]++"\"",{1,1}),
{ok,[{char,{1,1},OK1}],{1,10}} =
erl_scan_string("$\\x{D7FF}",{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,10}} =
erl_scan:string("$\\x{D800}",{1,1}),
%% Not erl_scan, but erl_parse.
{integer,0,1} = erl_parse_abstract(1),
Float = 3.14, {float,0,Float} = erl_parse_abstract(Float),
{nil,0} = erl_parse_abstract([]),
{bin,0,
[{bin_element,0,{integer,0,1},default,default},
{bin_element,0,{integer,0,2},default,default}]} =
erl_parse_abstract(<<1,2>>),
{cons,0,{tuple,0,[{atom,0,a}]},{atom,0,b}} =
erl_parse_abstract([{a} | b]),
{string,0,"str"} = erl_parse_abstract("str"),
{cons,0,
{integer,0,$a},
{cons,0,{integer,0,55296},{string,0,"c"}}} =
erl_parse_abstract("a"++[55296]++"c"),
Line = 17,
{integer,Line,1} = erl_parse_abstract(1, Line),
Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Line),
{nil,Line} = erl_parse_abstract([], Line),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
erl_parse_abstract(<<1,2>>, Line),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
erl_parse_abstract([{a} | b], Line),
{string,Line,"str"} = erl_parse_abstract("str", Line),
{cons,Line,
{integer,Line,$a},
{cons,Line,{integer,Line,55296},{string,Line,"c"}}} =
erl_parse_abstract("a"++[55296]++"c", Line),
Opts1 = [{line,17}],
{integer,Line,1} = erl_parse_abstract(1, Opts1),
Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Opts1),
{nil,Line} = erl_parse_abstract([], Opts1),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
erl_parse_abstract(<<1,2>>, Opts1),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
erl_parse_abstract([{a} | b], Opts1),
{string,Line,"str"} = erl_parse_abstract("str", Opts1),
{cons,Line,
{integer,Line,$a},
{cons,Line,{integer,Line,55296},{string,Line,"c"}}} =
erl_parse_abstract("a"++[55296]++"c", Opts1),
[begin
{integer,Line,1} = erl_parse_abstract(1, Opts2),
Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Opts2),
{nil,Line} = erl_parse_abstract([], Opts2),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
erl_parse_abstract(<<1,2>>, Opts2),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
erl_parse_abstract([{a} | b], Opts2),
{string,Line,"str"} = erl_parse_abstract("str", Opts2),
{string,Line,[97,1024,99]} =
erl_parse_abstract("a"++[1024]++"c", Opts2)
end || Opts2 <- [[{encoding,unicode},{line,Line}],
[{encoding,utf8},{line,Line}]]],
{cons,0,
{integer,0,97},
{cons,0,{integer,0,1024},{string,0,"c"}}} =
erl_parse_abstract("a"++[1024]++"c", [{encoding,latin1}]),
ok.
OTP-10990 . Floating point number in input string .
otp_10990(Config) when is_list(Config) ->
{'EXIT',_} = (catch {foo, erl_scan:string([$",42.0,$"],1)}),
ok.
OTP-10992 . List of floats to abstract format .
otp_10992(Config) when is_list(Config) ->
{cons,0,{float,0,42.0},{nil,0}} =
erl_parse_abstract([42.0], [{encoding,unicode}]),
{cons,0,{float,0,42.0},{nil,0}} =
erl_parse_abstract([42.0], [{encoding,utf8}]),
{cons,0,{integer,0,65},{cons,0,{float,0,42.0},{nil,0}}} =
erl_parse_abstract([$A,42.0], [{encoding,unicode}]),
{cons,0,{integer,0,65},{cons,0,{float,0,42.0},{nil,0}}} =
erl_parse_abstract([$A,42.0], [{encoding,utf8}]),
ok.
OTP-11807 . erl_parse : abstract/2 .
otp_11807(Config) when is_list(Config) ->
{cons,0,{integer,0,97},{cons,0,{integer,0,98},{nil,0}}} =
erl_parse_abstract("ab", [{encoding,none}]),
{cons,0,{integer,0,-1},{nil,0}} =
erl_parse_abstract([-1], [{encoding,latin1}]),
ASCII = fun(I) -> I >= 0 andalso I < 128 end,
{string,0,"xyz"} = erl_parse_abstract("xyz", [{encoding,ASCII}]),
{cons,0,{integer,0,228},{nil,0}} =
erl_parse_abstract([228], [{encoding,ASCII}]),
{cons,0,{integer,0,97},{atom,0,a}} =
erl_parse_abstract("a"++a, [{encoding,latin1}]),
{'EXIT', {{badarg,bad},_}} = % minor backward incompatibility
(catch erl_parse:abstract("string", [{encoding,bad}])),
ok.
otp_16480(Config) when is_list(Config) ->
F = fun mod:func/19,
F = erl_parse:normalise(erl_parse_abstract(F)),
ok.
otp_17024(Config) when is_list(Config) ->
Line = 17,
Opts1 = [{location,Line}],
{integer,Line,1} = erl_parse_abstract(1, Opts1),
Location = {17, 42},
{integer,Location,1} = erl_parse_abstract(1, Location),
Opts2 = [{location,Location}],
{integer,Location,1} = erl_parse_abstract(1, Opts2),
ok.
text_fun(Config) when is_list(Config) ->
KeepClass = fun(Class) ->
fun(C, _) -> C == Class end
end,
Join = fun(L, S) -> string:join(L, S) end,
String = fun(L) -> Join(L, " ") end,
TextAtom = KeepClass(atom),
TextInt = KeepClass(integer),
%% Keep text for integers written with a base.
TextBase = fun(C, S) ->
C == integer andalso string:find(S, "#") /= nomatch
end,
%% Keep text for long strings, regardless of class
TextLong = fun(_, S) -> length(S) > 10 end,
Texts = fun(Toks) -> [erl_scan:text(T) || T <- Toks] end,
Values = fun(Toks) -> [erl_scan:symbol(T) || T <- Toks] end,
Atom1 = "foo",
Atom2 = "'this is a long atom'",
Int1 = "42",
Int2 = "16#10",
Int3 = "8#20",
Int4 = "16",
Int5 = "12345678901234567890",
String1 = "\"A String\"",
String2 = "\"guitar string\"",
Name1 = "Short",
Name2 = "LongAndDescriptiveName",
Sep1 = "{",
Sep2 = "+",
Sep3 = "]",
Sep4 = "/",
All = [Atom1, Atom2, Int1, Int2, Int3, Int4, Int5,
String1, String2, Name1, Name2,
Sep1, Sep2, Sep3, Sep4],
{ok, Tokens0, 2} =
erl_scan:string(String([Atom1, Int1]), 2, [{text_fun, TextAtom}]),
[Atom1, undefined] = Texts(Tokens0),
[foo, 42] = Values(Tokens0),
{ok, Tokens1, 3} =
erl_scan:string(Join([Int2, Int3, Int4], "\n"), 1,
[{text_fun, TextInt}]),
[Int2, Int3, Int4] = Texts(Tokens1),
[16, 16, 16] = Values(Tokens1),
TS = [Int2, String1, Atom1, Int3, Int4, String2],
{ok, Tokens2, 6} =
%% If text is present, we supply text for *all* tokens.
erl_scan:string(Join(TS, "\n"), 1, [{text_fun, TextAtom}, text]),
TS = Texts(Tokens2),
[16, "A String", foo, 16, 16, "guitar string"] = Values(Tokens2),
Ints = [Int1, Int2, Int3, Int4],
{ok, Tokens3, 1} = erl_scan:string(String(Ints), 1, [{text_fun, TextBase}]),
[undefined, Int2, Int3, undefined] = Texts(Tokens3),
[42, 16, 16, 16] = Values(Tokens3),
Longs = lists:filter(fun(S) -> length(S) > 10 end, All),
{ok, Tokens4, 1} =
erl_scan:string(String(All), 1, [{text_fun, TextLong}]),
Longs = lists:filter(fun(T) -> T /= undefined end, Texts(Tokens4)),
{ok, Tokens5, 7} =
erl_scan:string(String(All), 7, [{text_fun, KeepClass('{')}]),
[Sep1] = lists:filter(fun(T) -> T /= undefined end, Texts(Tokens5)).
test_string(String, ExpectedWithCol) ->
{ok, ExpectedWithCol, _EndWithCol} = erl_scan_string(String, {1, 1}, []),
Expected = [ begin
{L,_C} = element(2, T),
setelement(2, T, L)
end
|| T <- ExpectedWithCol ],
{ok, Expected, _End} = erl_scan_string(String),
test(String).
erl_scan_string(String) ->
erl_scan_string(String, 1, []).
erl_scan_string(String, StartLocation) ->
erl_scan_string(String, StartLocation, []).
erl_scan_string(String, StartLocation, Options) ->
case erl_scan:string(String, StartLocation, Options) of
{ok, Tokens, EndLocation} ->
{ok, unopaque_tokens(Tokens), EndLocation};
Else ->
Else
end.
erl_scan_tokens(C, S, L) ->
erl_scan_tokens(C, S, L, []).
erl_scan_tokens(C, S, L, O) ->
case erl_scan:tokens(C, S, L, O) of
{done, {ok, Ts, End}, R} ->
{done, {ok, unopaque_tokens(Ts), End}, R};
Else ->
Else
end.
unopaque_tokens([]) ->
[];
unopaque_tokens([Token|Tokens]) ->
Attrs = element(2, Token),
Term = erl_anno:to_term(Attrs),
T = setelement(2, Token, Term),
[T | unopaque_tokens(Tokens)].
erl_parse_abstract(Term) ->
erl_parse_abstract(Term, []).
erl_parse_abstract(Term, Options) ->
Abstr = erl_parse:abstract(Term, Options),
unopaque_abstract(Abstr).
unopaque_abstract(Abstr) ->
erl_parse:anno_to_term(Abstr).
test_string(String , Expected , StartLocation , Options ) - >
{ ok , Expected , _ End } = erl_scan : string(String , StartLocation , Options ) ,
%% test(String).
%% There are no checks of the tags...
test(String) ->
%% io:format("Testing `~ts'~n", [String]),
[{Tokens, End},
{Wtokens, Wend},
{Ctokens, Cend},
{CWtokens, CWend},
{CWtokens2, _}] =
[scan_string_with_column(String, X) ||
X <- [[],
[return_white_spaces],
[return_comments],
[return],
[return]]], % for white space compaction test
{end1,End,Wend} = {end1,Wend,End},
{end2,Wend,Cend} = {end2,Cend,Wend},
{end3,Cend,CWend} = {end3,CWend,Cend},
Test that the tokens that are common to two token lists are identical .
{none,Tokens} = {none, filter_tokens(CWtokens, [white_space,comment])},
{comments,Ctokens} =
{comments,filter_tokens(CWtokens, [white_space])},
{white_spaces,Wtokens} =
{white_spaces,filter_tokens(CWtokens, [comment])},
%% Use token attributes to extract parts from the original string,
%% and check that the parts are identical to the token strings.
{Line,Column} = test_decorated_tokens(String, CWtokens),
{deco,{Line,Column},End} = {deco,End,{Line,Column}},
%% Almost the same again: concat texts to get the original:
Text = get_text(CWtokens),
{text,Text,String} = {text,String,Text},
%% Test that white spaces occupy less heap than the worst case.
ok = test_white_space_compaction(CWtokens, CWtokens2),
Test that white newlines are always first in text :
WhiteTokens = select_tokens(CWtokens, [white_space]),
ok = newlines_first(WhiteTokens),
%% Line attribute only:
[Simple,Wsimple,Csimple,WCsimple] = Simples =
[element(2, erl_scan:string(String, 1, Opts)) ||
Opts <- [[],
[return_white_spaces],
[return_comments],
[return]]],
{consistent,true} = {consistent,consistent_attributes(Simples)},
{simple_wc,WCsimple} = {simple_wc,simplify(CWtokens)},
{simple,Simple} = {simple,filter_tokens(WCsimple, [white_space,comment])},
{simple_c,Csimple} = {simple_c,filter_tokens(WCsimple, [white_space])},
{simple_w,Wsimple} = {simple_w,filter_tokens(WCsimple, [comment])},
%% Line attribute only, with text:
[SimpleTxt,WsimpleTxt,CsimpleTxt,WCsimpleTxt] = SimplesTxt =
[element(2, erl_scan:string(String, 1, [text|Opts])) ||
Opts <- [[],
[return_white_spaces],
[return_comments],
[return]]],
TextTxt = get_text(WCsimpleTxt),
{text_txt,TextTxt,String} = {text_txt,String,TextTxt},
{consistent_txt,true} =
{consistent_txt,consistent_attributes(SimplesTxt)},
{simple_txt,SimpleTxt} =
{simple_txt,filter_tokens(WCsimpleTxt, [white_space,comment])},
{simple_c_txt,CsimpleTxt} =
{simple_c_txt,filter_tokens(WCsimpleTxt, [white_space])},
{simple_w_txt,WsimpleTxt} =
{simple_w_txt,filter_tokens(WCsimpleTxt, [comment])},
ok.
test_white_space_compaction(Tokens, Tokens2) when Tokens =:= Tokens2 ->
[WS, WS2] = [select_tokens(Ts, [white_space]) || Ts <- [Tokens, Tokens2]],
test_wsc(WS, WS2).
test_wsc([], []) ->
ok;
test_wsc([Token|Tokens], [Token2|Tokens2]) ->
[Text, Text2] = [Text ||
Text <- [erl_scan:text(T) || T <- [Token, Token2]]],
Sz = erts_debug:size(Text),
Sz2 = erts_debug:size({Text, Text2}),
IsCompacted = Sz2 < 2*Sz+erts_debug:size({a,a}),
ToBeCompacted = is_compacted(Text),
if
IsCompacted =:= ToBeCompacted ->
test_wsc(Tokens, Tokens2);
true ->
{compaction_error, Token}
end.
is_compacted("\r") ->
true;
is_compacted("\n\r") ->
true;
is_compacted("\n\f") ->
true;
is_compacted([$\n|String]) ->
all_spaces(String)
orelse
all_tabs(String);
is_compacted(String) ->
all_spaces(String)
orelse
all_tabs(String).
all_spaces(L) ->
all_same(L, $\s).
all_tabs(L) ->
all_same(L, $\t).
all_same(L, Char) ->
lists:all(fun(C) -> C =:= Char end, L).
newlines_first([]) ->
ok;
newlines_first([Token|Tokens]) ->
Text = erl_scan:text(Token),
Nnls = length([C || C <- Text, C =:= $\n]),
OK = case Text of
[$\n|_] ->
Nnls =:= 1;
_ ->
Nnls =:= 0
end,
if
OK -> newlines_first(Tokens);
true -> OK
end.
filter_tokens(Tokens, Tags) ->
lists:filter(fun(T) -> not lists:member(element(1, T), Tags) end, Tokens).
select_tokens(Tokens, Tags) ->
lists:filter(fun(T) -> lists:member(element(1, T), Tags) end, Tokens).
simplify([Token|Tokens]) ->
Line = erl_scan:line(Token),
[setelement(2, Token, erl_anno:new(Line)) | simplify(Tokens)];
simplify([]) ->
[].
get_text(Tokens) ->
lists:flatten(
[T ||
Token <- Tokens,
(T = erl_scan:text(Token)) =/= []]).
test_decorated_tokens(String, Tokens) ->
ToksAttrs = token_attrs(Tokens),
test_strings(ToksAttrs, String, 1, 1).
token_attrs(Tokens) ->
[{L,C,length(T),T} ||
Token <- Tokens,
([C,L,T] = token_info(Token)) =/= []].
token_info(T) ->
Column = erl_scan:column(T),
Line = erl_scan:line(T),
Text = erl_scan:text(T),
[Column, Line, Text].
token_info_long(T) ->
Column = erl_scan:column(T),
Line = erl_scan:line(T),
Text = erl_scan:text(T),
Category = erl_scan:category(T),
Symbol = erl_scan:symbol(T),
[{category,Category},{column,Column},{line,Line},
{symbol,Symbol},{text,Text}].
test_strings([], _S, Line, Column) ->
{Line,Column};
test_strings([{L,C,Len,T}=Attr|Attrs], String0, Line0, Column0) ->
{String1, Column1} = skip_newlines(String0, L, Line0, Column0),
String = skip_chars(String1, C-Column1),
{Str,Rest} = lists:split(Len, String),
if
Str =:= T ->
{Line,Column} = string_newlines(T, L, C),
test_strings(Attrs, Rest, Line, Column);
true ->
{token_error, Attr, Str}
end.
skip_newlines(String, Line, Line, Column) ->
{String, Column};
skip_newlines([$\n|String], L, Line, _Column) ->
skip_newlines(String, L, Line+1, 1);
skip_newlines([_|String], L, Line, Column) ->
skip_newlines(String, L, Line, Column+1).
skip_chars(String, 0) ->
String;
skip_chars([_|String], N) ->
skip_chars(String, N-1).
string_newlines([$\n|String], Line, _Column) ->
string_newlines(String, Line+1, 1);
string_newlines([], Line, Column) ->
{Line, Column};
string_newlines([_|String], Line, Column) ->
string_newlines(String, Line, Column+1).
scan_string_with_column(String, Options0) ->
Options = [text | Options0],
StartLoc = {1, 1},
{ok, Ts1, End1} = erl_scan:string(String, StartLoc, Options),
TString = String ++ ". ",
{ok,Ts2,End2} = scan_tokens(TString, Options, [], StartLoc),
{ok, Ts3, End3} =
scan_tokens_1({more, []}, TString, Options, [], StartLoc),
{end_2,End2,End3} = {end_2,End3,End2},
{EndLine1,EndColumn1} = End1,
End2 = {EndLine1,EndColumn1+2},
{ts_1,Ts2,Ts3} = {ts_1,Ts3,Ts2},
Ts2 = Ts1 ++ [lists:last(Ts2)],
%% Attributes are keylists, but have no text.
{ok, Ts7, End7} = erl_scan:string(String, {1,1}, Options),
{ok, Ts8, End8} = scan_tokens(TString, Options, [], {1,1}),
{end1, End1} = {end1, End7},
{end2, End2} = {end2, End8},
Ts8 = Ts7 ++ [lists:last(Ts8)],
{cons,true} = {cons,consistent_attributes([Ts1,Ts2,Ts3,Ts7,Ts8])},
{Ts1, End1}.
scan_tokens(String, Options, Rs, Location) ->
case erl_scan:tokens([], String, Location, Options) of
{done, {ok,Ts,End}, ""} ->
{ok, lists:append(lists:reverse([Ts|Rs])), End};
{done, {ok,Ts,End}, Rest} ->
scan_tokens(Rest, Options, [Ts|Rs], End)
end.
scan_tokens_1({done, {ok,Ts,End}, ""}, "", _Options, Rs, _Location) ->
{ok,lists:append(lists:reverse([Ts|Rs])),End};
scan_tokens_1({done, {ok,Ts,End}, Rest}, Cs, Options, Rs, _Location) ->
scan_tokens_1({more,[]}, Rest++Cs, Options, [Ts|Rs], End);
scan_tokens_1({more, Cont}, [C | Cs], Options, Rs, Loc) ->
R = erl_scan:tokens(Cont, [C], Loc, Options),
scan_tokens_1(R, Cs, Options, Rs, Loc).
consistent_attributes([]) ->
true;
consistent_attributes([Ts | TsL]) ->
L = [T || T <- Ts, is_integer(element(2, T))],
case L of
[] ->
TagsL = [[Tag || {Tag,_} <- defined(token_info_long(T))] ||
T <- Ts],
case lists:usort(TagsL) of
[_] ->
consistent_attributes(TsL);
[] when Ts =:= [] ->
consistent_attributes(TsL);
_ ->
Ts
end;
Ts ->
consistent_attributes(TsL);
_ ->
Ts
end.
defined(L) ->
[{T,V} || {T,V} <- L, V =/= undefined].
family_list(L) ->
sofs:to_external(family(L)).
family(L) ->
sofs:relation_to_family(sofs:relation(L)).
| null | https://raw.githubusercontent.com/erlang/otp/7f1bc6a19a4253aa03a11ddfa1014231bf7a5127/lib/stdlib/test/erl_scan_SUITE.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
Define to run outside of test server
-define(STANDALONE,1).
config(priv_dir, _) ->
".";
config(data_dir, _) ->
".".
(OTP-2347)
Checks that format_error works on the error cases.
(This should be useful for all format_error functions.)
Some atom and variable names
Test parsing atom and variable characters.
Test printing atoms
Test parsing and printing strings.
It all worked
Something went wrong!
OTP-7810. White spaces, comments, and more...
[the special case "=<<" is among the tested ones]
'
'
'
type error
type error
Leading zeroes...
GH-6477. Test legal use of caret notation.
No escape
An error before R13A.
{done,Err={error,{1,erl_scan,scan},1},eof} =
With column.
An error before R13A.
{done,{error,{{1,1},erl_scan,scan},{1,5}},eof} =
An error before R13A.
With column.
An error before R13A.
With column.
Would be nice if `34\"' were skipped...
immediately following the end location of the error.
GH-6477. Test for illegal characters in caret notation.
type error
$"
type error
$"
type error
line and column are not options, but tested here
type error
type error
type error
type error
type error
type error
type error
$'
$'
'
'
'
not unicode
Keep these tests:
,$%,$\s,1089]}],1} =
, $ \s,1089]}],{1,5 } } =
Due to unicode, the syntax has been incompatibly augmented:
All kinds of tests...
OTP-10302. Unicode characters scanner/parser.
From unicode():
,$%,$\s,OK]}],1} =
,$%,$\s,OK1]}],{1,5}} =
Not erl_scan, but erl_parse.
minor backward incompatibility
Keep text for integers written with a base.
Keep text for long strings, regardless of class
If text is present, we supply text for *all* tokens.
test(String).
There are no checks of the tags...
io:format("Testing `~ts'~n", [String]),
for white space compaction test
Use token attributes to extract parts from the original string,
and check that the parts are identical to the token strings.
Almost the same again: concat texts to get the original:
Test that white spaces occupy less heap than the worst case.
Line attribute only:
Line attribute only, with text:
Attributes are keylists, but have no text. | Copyright Ericsson AB 1998 - 2022 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(erl_scan_SUITE).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
init_per_group/2,end_per_group/2]).
-export([error_1/1, error_2/1, iso88591/1, otp_7810/1, otp_10302/1,
otp_10990/1, otp_10992/1, otp_11807/1, otp_16480/1, otp_17024/1,
text_fun/1]).
-import(lists, [nth/2,flatten/1]).
-import(io_lib, [print/1]).
-ifdef(STANDALONE).
-compile(export_all).
-define(line, put(line, ?LINE), ).
-define(config(A,B),config(A,B)).
-define(t, test_server).
-else.
-include_lib("common_test/include/ct.hrl").
-endif.
init_per_testcase(_Case, Config) ->
Config.
end_per_testcase(_Case, _Config) ->
ok.
suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,20}}].
all() ->
[{group, error}, iso88591, otp_7810, otp_10302, otp_10990, otp_10992,
otp_11807, otp_16480, otp_17024, text_fun].
groups() ->
[{error, [], [error_1, error_2]}].
init_per_suite(Config) ->
Config.
end_per_suite(_Config) ->
ok.
init_per_group(_GroupName, Config) ->
Config.
end_per_group(_GroupName, Config) ->
Config.
error_1(Config) when is_list(Config) ->
{error, _, _} = erl_scan:string("'a"),
ok.
error_2(Config) when is_list(Config) ->
lists:foreach(fun check/1, error_cases()),
ok.
error_cases() ->
["'a",
"\"a",
"'\\",
"\"\\",
"$",
"$\\",
"2.3e",
"2.3e-",
"91#9"
].
assert_type(N, integer) when is_integer(N) ->
ok;
assert_type(N, atom) when is_atom(N) ->
ok.
check(String) ->
Error = erl_scan:string(String),
check_error(Error, erl_scan).
check_error({error, Info, EndLine}, Module0) ->
{ErrorLine, Module, Desc} = Info,
true = (Module == Module0),
assert_type(EndLine, integer),
assert_type(ErrorLine, integer),
true = (ErrorLine =< EndLine),
String = lists:flatten(Module0:format_error(Desc)),
true = io_lib:printable_list(String).
Tests the support for ISO-8859 - 1 i.e Latin-1 .
iso88591(Config) when is_list(Config) ->
ok =
case catch begin
V1s = [$Á,$á,$é,$ë],
V2s = [$N,$ä,$r],
A1s = [$h,$ä,$r],
A2s = [$ö,$r,$e],
{ok,Ts1,_} = erl_scan_string(V1s ++ " " ++ V2s ++
"\327" ++
A1s ++ " " ++ A2s),
V1s = atom_to_list(element(3, nth(1, Ts1))),
V2s = atom_to_list(element(3, nth(2, Ts1))),
A1s = atom_to_list(element(3, nth(4, Ts1))),
A2s = atom_to_list(element(3, nth(5, Ts1))),
A1s = flatten(print(element(3, nth(4, Ts1)))),
A2s = flatten(print(element(3, nth(5, Ts1)))),
S1 = V1s ++ "\327" ++ A1s ++ "\250" ++ A2s,
S1s = "\"" ++ S1 ++ "\"",
{ok,Ts2,_} = erl_scan_string(S1s),
S1 = element(3, nth(1, Ts2)),
S1s = flatten(print(element(3, nth(1, Ts2)))),
end of
{error,R};
end.
otp_7810(Config) when is_list(Config) ->
ok = reserved_words(),
ok = atoms(),
ok = punctuations(),
ok = comments(),
ok = errors(),
ok = integers(),
ok = base_integers(),
ok = floats(),
ok = dots(),
ok = chars(),
ok = variables(),
ok = eof(),
ok = illegal(),
ok = crashes(),
ok = options(),
ok = token_info(),
ok = column_errors(),
ok = white_spaces(),
ok = unicode(),
ok = more_chars(),
ok = more_options(),
ok = anno_info(),
ok.
reserved_words() ->
L = ['after', 'begin', 'case', 'try', 'cond', 'catch',
'andalso', 'orelse', 'end', 'fun', 'if', 'let', 'of',
'receive', 'when', 'bnot', 'not', 'div',
'rem', 'band', 'and', 'bor', 'bxor', 'bsl', 'bsr',
'or', 'xor'],
[begin
{RW, true} = {RW, erl_scan:reserved_word(RW)},
S = atom_to_list(RW),
Ts = [{RW,{1,1}}],
test_string(S, Ts)
end || RW <- L],
ok.
atoms() ->
test_string("a
b", [{atom,{1,1},a},{atom,{2,18},b}]),
test_string("'a b'", [{atom,{1,1},'a b'}]),
test_string("a", [{atom,{1,1},a}]),
test_string("a@2", [{atom,{1,1},a@2}]),
test_string([39,65,200,39], [{atom,{1,1},'AÈ'}]),
test_string("ärlig östen", [{atom,{1,1},ärlig},{atom,{1,7},östen}]),
{ok,[{atom,_,'$a'}],{1,6}} =
erl_scan_string("'$\\a'", {1,1}),
test("'$\\a'"),
ok.
punctuations() ->
L = ["<<", "<-", "<=", "<", ">>", ">=", ">", "->", "--",
"-", "++", "+", "=:=", "=/=", "=<", "=>", "==", "=", "/=",
"/", "||", "|", ":=", "::", ":"],
One token at a time :
[begin
W = list_to_atom(S),
Ts = [{W,{1,1}}],
test_string(S, Ts)
end || S <- L],
three tokens ...
No = Three ++ L,
SL0 = [{S1++S2,{-length(S1),S1,S2}} ||
S1 <- L,
S2 <- L,
not lists:member(S1++S2, No)],
SL = family_list(SL0),
Two tokens . When there are several answers , the one with
the longest first token is chosen :
[begin
W1 = list_to_atom(S1),
W2 = list_to_atom(S2),
Ts = [{W1,{1,1}},{W2,{1,-L2+1}}],
test_string(S, Ts)
end || {S,[{L2,S1,S2}|_]} <- SL],
PTs1 = [{'!',{1,1}},{'(',{1,2}},{')',{1,3}},{',',{1,4}},{';',{1,5}},
{'=',{1,6}},{'[',{1,7}},{']',{1,8}},{'{',{1,9}},{'|',{1,10}},
{'}',{1,11}}],
test_string("!(),;=[]{|}", PTs1),
PTs2 = [{'#',{1,1}},{'&',{1,2}},{'*',{1,3}},{'+',{1,4}},{'/',{1,5}},
{':',{1,6}},{'<',{1,7}},{'>',{1,8}},{'?',{1,9}},{'@',{1,10}},
{'\\',{1,11}},{'^',{1,12}},{'`',{1,13}},{'~',{1,14}}],
test_string("#&*+/:<>?@\\^`~", PTs2),
test_string(".. ", [{'..',{1,1}}]),
test_string("1 .. 2",
[{integer,{1,1},1},{'..',{1,3}},{integer,{1,6},2}]),
test_string("...", [{'...',{1,1}}]),
ok.
comments() ->
test("a %%\n b"),
{ok,[],1} = erl_scan_string("%"),
test("a %%\n b"),
{ok,[{atom,{1,1},a},{atom,{2,2},b}],{2,3}} =
erl_scan_string("a %%\n b", {1,1}),
{ok,[{atom,{1,1},a},{comment,{1,3},"%%"},{atom,{2,2},b}],{2,3}} =
erl_scan_string("a %%\n b",{1,1}, [return_comments]),
{ok,[{atom,{1,1},a},
{white_space,{1,2}," "},
{white_space,{1,5},"\n "},
{atom,{2,2},b}],
{2,3}} =
erl_scan_string("a %%\n b",{1,1},[return_white_spaces]),
{ok,[{atom,{1,1},a},
{white_space,{1,2}," "},
{comment,{1,3},"%%"},
{white_space,{1,5},"\n "},
{atom,{2,2},b}],
{2,3}} = erl_scan_string("a %%\n b",{1,1},[return]),
ok.
errors() ->
{error,{1,erl_scan,{string,$","str"}},1} = %"
erl_scan:string("\"str"), %"
{error,{{1,1},erl_scan,{string,$","str"}},{1,5}} = %"
erl_scan:string("\"str", {1,1}, []), %"
{error,{1,erl_scan,char},1} = erl_scan:string("$"),
{error,{{1,1},erl_scan,char},{1,2}} = erl_scan:string("$", {1,1}, []),
test_string([34,65,200,34], [{string,{1,1},"AÈ"}]),
test_string("\\", [{'\\',{1,1}}]),
{'EXIT',_} =
{'EXIT',_} =
"{a,tuple}" = erl_scan:format_error({a,tuple}),
ok.
integers() ->
[begin
I = list_to_integer(S),
Ts = [{integer,{1,1},I}],
test_string(S, Ts)
end || S <- [[N] || N <- lists:seq($0, $9)] ++ ["2323","000"] ],
UnderscoreSamples =
[{"123_456", 123456},
{"123_456_789", 123456789},
{"1_2", 12}],
lists:foreach(
fun({S, I}) ->
test_string(S, [{integer, {1, 1}, I}])
end, UnderscoreSamples),
UnderscoreErrors =
["123_",
"123__",
"123_456_",
"123__456",
"_123",
"__123"],
lists:foreach(
fun(S) ->
case erl_scan:string(S) of
{ok, [{integer, _, _}], _} ->
error({unexpected_integer, S});
_ ->
ok
end
end, UnderscoreErrors),
test_string("_123", [{var,{1,1},'_123'}]),
test_string("123_", [{integer,{1,1},123},{var,{1,4},'_'}]),
ok.
base_integers() ->
[begin
B = list_to_integer(BS),
I = erlang:list_to_integer(S, B),
Ts = [{integer,{1,1},I}],
test_string(BS++"#"++S, Ts)
end || {BS,S} <- [{"2","11"}, {"5","23234"}, {"12","05a"},
{"16","abcdef"}, {"16","ABCDEF"}] ],
{error,{1,erl_scan,{base,1}},1} = erl_scan:string("1#000"),
{error,{{1,1},erl_scan,{base,1}},{1,2}} =
erl_scan:string("1#000", {1,1}, []),
{error,{1,erl_scan,{base,1}},1} = erl_scan:string("1#000"),
{error,{{1,1},erl_scan,{base,1000}},{1,6}} =
erl_scan:string("1_000#000", {1,1}, []),
test_string("12#bc", [{integer,{1,1},11},{atom,{1,5},c}]),
[begin
Str = BS ++ "#" ++ S,
E = 2 + length(BS),
{error,{{1,1},erl_scan,{illegal,integer}},{1,E}} =
erl_scan:string(Str, {1,1}, [])
end || {BS,S} <- [{"3","3"},{"15","f"},{"12","c"},
{"1_5","f"},{"1_2","c"}] ],
{ok,[{integer,1,239},{'@',1}],1} = erl_scan_string("16#ef@"),
{ok,[{integer,{1,1},239},{'@',{1,6}}],{1,7}} =
erl_scan_string("16#ef@", {1,1}, []),
{ok,[{integer,{1,1},14},{atom,{1,5},g@}],{1,7}} =
erl_scan_string("16#eg@", {1,1}, []),
UnderscoreSamples =
[{"16#1234_ABCD_EF56", 16#1234abcdef56},
{"2#0011_0101_0011", 2#001101010011},
{"1_6#123ABC", 16#123abc},
{"1_6#123_ABC", 16#123abc},
{"16#abcdef", 16#ABCDEF}],
lists:foreach(
fun({S, I}) ->
test_string(S, [{integer, {1, 1}, I}])
end, UnderscoreSamples),
UnderscoreErrors =
["16_#123ABC",
"16#123_",
"16#_123",
"16#ABC_",
"16#_ABC",
"2#_0101",
"1__6#ABC",
"16#AB__CD"],
lists:foreach(
fun(S) ->
case erl_scan:string(S) of
{ok, [{integer, _, _}], _} ->
error({unexpected_integer, S});
_ ->
ok
end
end, UnderscoreErrors),
test_string("16#123_", [{integer,{1,1},291},{var,{1,7},'_'}]),
test_string("_16#ABC", [{var,{1,1},'_16'},{'#',{1,4}},{var,{1,5},'ABC'}]),
ok.
floats() ->
[begin
F = list_to_float(FS),
Ts = [{float,{1,1},F}],
test_string(FS, Ts)
end || FS <- ["1.0","001.17","3.31200","1.0e0","1.0E17",
"34.21E-18", "17.0E+14"]],
test_string("1.e2", [{integer,{1,1},1},{'.',{1,2}},{atom,{1,3},e2}]),
{error,{1,erl_scan,{illegal,float}},1} =
erl_scan:string("1.0e400"),
{error,{{1,1},erl_scan,{illegal,float}},{1,8}} =
erl_scan:string("1.0e400", {1,1}, []),
{error,{{1,1},erl_scan,{illegal,float}},{1,9}} =
erl_scan:string("1.0e4_00", {1,1}, []),
[begin
{error,{1,erl_scan,{illegal,float}},1} = erl_scan:string(S),
{error,{{1,1},erl_scan,{illegal,float}},{1,_}} =
erl_scan:string(S, {1,1}, [])
end || S <- ["1.14Ea"]],
UnderscoreSamples =
[{"123_456.789", 123456.789},
{"123.456_789", 123.456789},
{"1.2_345e10", 1.2345e10},
{"1.234e1_06", 1.234e106},
{"12_34.56_78e1_6", 1234.5678e16},
{"12_34.56_78e-1_8", 1234.5678e-18}],
lists:foreach(
fun({S, I}) ->
test_string(S, [{float, {1, 1}, I}])
end, UnderscoreSamples),
UnderscoreErrors =
["123_.456",
"123._456",
"123.456_",
"123._",
"1._23e10",
"1.23e_10",
"1.23e10_"],
lists:foreach(
fun(S) ->
case erl_scan:string(S) of
{ok, [{float, _, _}], _} ->
error({unexpected_float, S});
_ ->
ok
end
end, UnderscoreErrors),
test_string("123._", [{integer,{1,1},123},{'.',{1,4}},{var,{1,5},'_'}]),
test_string("1.23_e10", [{float,{1,1},1.23},{var,{1,5},'_e10'}]),
ok.
dots() ->
Dot = [{".", {ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,2}}},
{". ", {ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,3}}},
{".\n", {ok,[{dot,1}],2}, {ok,[{dot,{1,1}}],{2,1}}},
{".%", {ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,3}}},
{".\210",{ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,3}}},
{".% öh",{ok,[{dot,1}],1}, {ok,[{dot,{1,1}}],{1,6}}},
{".%\n", {ok,[{dot,1}],2}, {ok,[{dot,{1,1}}],{2,1}}},
{".$", {error,{1,erl_scan,char},1},
{error,{{1,2},erl_scan,char},{1,3}}},
{".$\\", {error,{1,erl_scan,char},1},
{error,{{1,2},erl_scan,char},{1,4}}},
{".a", {ok,[{'.',1},{atom,1,a}],1},
{ok,[{'.',{1,1}},{atom,{1,2},a}],{1,3}}}
],
[begin
R = erl_scan_string(S),
R2 = erl_scan_string(S, {1,1}, [])
end || {S, R, R2} <- Dot],
{ok,[{dot,_}=T1],{1,2}} = erl_scan:string(".", {1,1}, text),
[1, 1, "."] = token_info(T1),
{ok,[{dot,_}=T2],{1,3}} = erl_scan:string(".%", {1,1}, text),
[1, 1, "."] = token_info(T2),
{ok,[{dot,_}=T3],{1,6}} =
erl_scan:string(".% öh", {1,1}, text),
[1, 1, "."] = token_info(T3),
{error,{{1,2},erl_scan,char},{1,3}} = erl_scan:string(".$", {1,1}),
{error,{{1,2},erl_scan,char},{1,4}} = erl_scan:string(".$\\", {1,1}),
test_string(". ", [{dot,{1,1}}]),
test_string(". ", [{dot,{1,1}}]),
test_string(".\n", [{dot,{1,1}}]),
test_string(".\n\n", [{dot,{1,1}}]),
test_string(".\n\r", [{dot,{1,1}}]),
test_string(".\n\n\n", [{dot,{1,1}}]),
test_string(".\210", [{dot,{1,1}}]),
test_string(".%\n", [{dot,{1,1}}]),
test_string(".a", [{'.',{1,1}},{atom,{1,2},a}]),
test_string("%. \n. ", [{dot,{2,1}}]),
{more,C} = erl_scan:tokens([], "%. ",{1,1}, return),
{done,{ok,[{comment,{1,1},"%. "},
{white_space,{1,4},"\n"},
{dot,{2,1}}],
{2,3}}, ""} =
any loc , any options
[test_string(S, R) ||
{S, R} <- [{".$\n", [{'.',{1,1}},{char,{1,2},$\n}]},
{"$\\\n", [{char,{1,1},$\n}]},
{"'\\\n'", [{atom,{1,1},'\n'}]},
{"$\n", [{char,{1,1},$\n}]}] ],
ok.
chars() ->
[begin
L = lists:flatten(io_lib:format("$\\~.8b", [C])),
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255)],
[begin
L = lists:flatten(io_lib:format("$\\~3.8.0b", [C])),
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255)],
[begin
L = "$\\^" ++ [C],
Ts = case C of
$? ->
[{char,{1,1},127}];
_ ->
[{char,{1,1},C band 2#11111}]
end,
test_string(L, Ts)
end || C <- lists:seq($?, $Z) ++ lists:seq($a, $z)],
[begin
L = "$\\" ++ [C],
Ts = [{char,{1,1},V}],
test_string(L, Ts)
end || {C,V} <- [{$n,$\n}, {$r,$\r}, {$t,$\t}, {$v,$\v},
{$b,$\b}, {$f,$\f}, {$e,$\e}, {$s,$\s},
{$d,$\d}]],
EC = [$\n,$\r,$\t,$\v,$\b,$\f,$\e,$\s,$\d],
Ds = lists:seq($0, $9),
X = [$^,$n,$r,$t,$v,$b,$f,$e,$s,$d],
New = [${,$x],
No = EC ++ Ds ++ X ++ New,
[begin
L = "$\\" ++ [C],
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255) -- No],
[begin
L = "'$\\" ++ [C] ++ "'",
Ts = [{atom,{1,1},list_to_atom("$"++[C])}],
test_string(L, Ts)
end || C <- lists:seq(0, 255) -- No],
test_string("\"\\013a\\\n\"", [{string,{1,1},"\va\n"}]),
test_string("'\n'", [{atom,{1,1},'\n'}]),
test_string("\"\n\a\"", [{string,{1,1},"\na"}]),
[begin
L = "$" ++ [C],
Ts = [{char,{1,1},C}],
test_string(L, Ts)
end || C <- lists:seq(0, 255) -- (No ++ [$\\])],
test_string("$\n", [{char,{1,1},$\n}]),
{error,{{1,1},erl_scan,char},{1,4}} =
erl_scan:string("$\\^",{1,1}),
test_string("$\\\n", [{char,{1,1},$\n}]),
's scanner returns line 1 :
test_string("$\\\n", [{char,{1,1},$\n}]),
test_string("$\n\n", [{char,{1,1},$\n}]),
test("$\n\n"),
ok.
variables() ->
test_string(" \237_Aouåeiyäö", [{var,{1,7},'_Aouåeiyäö'}]),
test_string("A_b_c@", [{var,{1,1},'A_b_c@'}]),
test_string("V@2", [{var,{1,1},'V@2'}]),
test_string("ABDÀ", [{var,{1,1},'ABDÀ'}]),
test_string("Ärlig Östen", [{var,{1,1},'Ärlig'},{var,{1,7},'Östen'}]),
ok.
eof() ->
{done,{eof,1},eof} = erl_scan:tokens([], eof, 1),
{more, C1} = erl_scan:tokens([]," \n", 1),
{done,{eof,2},eof} = erl_scan:tokens(C1, eof, 1),
{more, C2} = erl_scan:tokens([], "abra", 1),
{done,{ok,[{atom,1,abra}],1},eof} =
erl_scan_tokens(C2, eof, 1),
{more, C3} = erl_scan:tokens([]," \n",{1,1}),
{done,{eof,{2,1}},eof} = erl_scan:tokens(C3, eof, 1),
{more, C4} = erl_scan:tokens([], "abra", {1,1}),
{done,{ok,[{atom,_,abra}],{1,5}},eof} =
erl_scan_tokens(C4, eof, 1),
's scanner returns " " as LeftoverChars ;
the R12B scanner returns eof as LeftoverChars : ( eof is correct )
{more, C5} = erl_scan:tokens([], "a", 1),
{ done,{error,{1,erl_scan , scan},1},eof } =
{done,{ok,[{atom,1,a}],1},eof} =
erl_scan_tokens(C5,eof,1),
{more, C6} = erl_scan:tokens([], "a", {1,1}),
{ done,{error,{1,erl_scan , scan},1},eof } =
{done,{ok,[{atom,{1,1},a}],{1,2}},eof} =
erl_scan_tokens(C6,eof,1),
A dot followed by eof is special :
{more, C} = erl_scan:tokens([], "a.", 1),
{done,{ok,[{atom,1,a},{dot,1}],1},eof} = erl_scan_tokens(C,eof,1),
{ok,[{atom,1,foo},{dot,1}],1} = erl_scan_string("foo."),
{more, CCol} = erl_scan:tokens([], "a.", {1,1}),
{done,{ok,[{atom,{1,1},a},{dot,{1,2}}],{1,3}},eof} =
erl_scan_tokens(CCol,eof,1),
{ok,[{atom,{1,1},foo},{dot,{1,4}}],{1,5}} =
erl_scan_string("foo.", {1,1}, []),
ok.
illegal() ->
Atom = lists:duplicate(1000, $a),
{error,{1,erl_scan,{illegal,atom}},1} = erl_scan:string(Atom),
{done,{error,{1,erl_scan,{illegal,atom}},1},". "} =
erl_scan:tokens([], Atom++". ", 1),
QAtom = "'" ++ Atom ++ "'",
{error,{1,erl_scan,{illegal,atom}},1} = erl_scan:string(QAtom),
{done,{error,{1,erl_scan,{illegal,atom}},1},". "} =
erl_scan:tokens([], QAtom++". ", 1),
Var = lists:duplicate(1000, $A),
{error,{1,erl_scan,{illegal,var}},1} = erl_scan:string(Var),
{done,{error,{1,erl_scan,{illegal,var}},1},". "} =
erl_scan:tokens([], Var++". ", 1),
Float = "1" ++ lists:duplicate(400, $0) ++ ".0",
{error,{1,erl_scan,{illegal,float}},1} = erl_scan:string(Float),
{done,{error,{1,erl_scan,{illegal,float}},1},". "} =
erl_scan:tokens([], Float++". ", 1),
String = "\"43\\x{aaaaaa}34\"",
{error,{1,erl_scan,{illegal,character}},1} = erl_scan:string(String),
{done,{error,{1,erl_scan,{illegal,character}},1},"34\". "} =
Maybe , but then the LeftOverChars would not be the characters
erl_scan:tokens([], String++". ", 1),
{error,{{1,1},erl_scan,{illegal,atom}},{1,1001}} =
erl_scan:string(Atom, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,atom}},{1,1005}},". "} =
erl_scan:tokens([], "foo "++Atom++". ", {1,1}),
{error,{{1,1},erl_scan,{illegal,atom}},{1,1003}} =
erl_scan:string(QAtom, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,atom}},{1,1007}},". "} =
erl_scan:tokens([], "foo "++QAtom++". ", {1,1}),
{error,{{1,1},erl_scan,{illegal,var}},{1,1001}} =
erl_scan:string(Var, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,var}},{1,1005}},". "} =
erl_scan:tokens([], "foo "++Var++". ", {1,1}),
{error,{{1,1},erl_scan,{illegal,float}},{1,404}} =
erl_scan:string(Float, {1,1}),
{done,{error,{{1,5},erl_scan,{illegal,float}},{1,408}},". "} =
erl_scan:tokens([], "foo "++Float++". ", {1,1}),
{error,{{1,4},erl_scan,{illegal,character}},{1,14}} =
erl_scan:string(String, {1,1}),
{done,{error,{{1,4},erl_scan,{illegal,character}},{1,14}},"34\". "} =
erl_scan:tokens([], String++". ", {1,1}),
_ = [begin
S = [$$,$\\,$^,C],
{error,{1,erl_scan,{illegal,character}},1} = erl_scan:string(S)
end || C <- lists:seq(0, 16#3e) ++ [16#60] ++ lists:seq($z+1, 16#10ffff)],
ok.
crashes() ->
{'EXIT',_} = (catch erl_scan:string("'a" ++ [999999999] ++ "c'")),
{'EXIT',_} = (catch {foo, erl_scan:string("$"++[-1])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\"++[-1])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\^"++[-1])}),
{'EXIT',_} = (catch {foo, erl_scan:string([$",-1,$"],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string([$",-1,$"])}),
{'EXIT',_} = (catch {foo, erl_scan:string("% foo"++[-1])}),
{'EXIT',_} =
(catch {foo, erl_scan:string("% foo"++[-1],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string("$"++[a])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\"++[a])}),
{'EXIT',_} = (catch {foo, erl_scan:string("$\\^"++[a])}),
{'EXIT',_} = (catch {foo, erl_scan:string([$",a,$"],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string([$",a,$"])}),
{'EXIT',_} = (catch {foo, erl_scan:string("% foo"++[a])}),
{'EXIT',_} =
(catch {foo, erl_scan:string("% foo"++[a],{1,1})}),
{'EXIT',_} = (catch {foo, erl_scan:string("A" ++ [999999999])}),
ok.
options() ->
{ok,[{atom,1,foo},{white_space,1," "},{comment,1,"% bar"}], 1} =
erl_scan_string("foo % bar", 1, return),
{ok,[{atom,1,foo},{white_space,1," "}],1} =
erl_scan_string("foo % bar", 1, return_white_spaces),
{ok,[{atom,1,foo},{comment,1,"% bar"}],1} =
erl_scan_string("foo % bar", 1, return_comments),
{ok,[{atom,17,foo}],17} =
erl_scan_string("foo % bar", 17),
{'EXIT',{function_clause,_}} =
(catch {foo,
{ok,[{atom,_,foo}],{17,18}} =
erl_scan_string("foo % bar", {17,9}, []),
{'EXIT',{function_clause,_}} =
(catch {foo,
{ok,[{foo,1}],1} =
erl_scan_string("foo % bar",1, [{reserved_word_fun,
fun(W) -> W =:= foo end}]),
{'EXIT',{badarg,_}} =
(catch {foo,
[{reserved_word_fun,
fun(W,_) -> W =:= foo end}])}),
ok.
more_options() ->
{ok,[{atom,_,foo}=T1],{19,20}} =
erl_scan:string("foo", {19,17},[]),
{19,17} = erl_scan:location(T1),
{done,{ok,[{atom,_,foo}=T2,{dot,_}],{19,22}},[]} =
{19,17} = erl_scan:location(T2),
{ok,[{atom,_,foo}=T3],{19,20}} =
erl_scan:string("foo", {19,17},[text]),
{19,17} = erl_scan:location(T3),
"foo" = erl_scan:text(T3),
{ok,[{atom,_,foo}=T4],1} = erl_scan:string("foo", 1, [text]),
1 = erl_scan:line(T4),
1 = erl_scan:location(T4),
"foo" = erl_scan:text(T4),
ok.
token_info() ->
{ok,[T1],_} = erl_scan:string("foo", {1,18}, [text]),
{'EXIT',{badarg,_}} =
{'EXIT',{badarg,_}} =
atom = erl_scan:category(T1),
foo = erl_scan:symbol(T1),
{ok,[T2],_} = erl_scan:string("foo", 1, []),
1 = erl_scan:line(T2),
undefined = erl_scan:column(T2),
undefined = erl_scan:text(T2),
1 = erl_scan:location(T2),
{ok,[T3],_} = erl_scan:string("=", 1, []),
'=' = erl_scan:category(T3),
'=' = erl_scan:symbol(T3),
ok.
anno_info() ->
{'EXIT',_} =
{ok,[{atom,_,foo}=T0],_} = erl_scan:string("foo", 19, [text]),
19 = erl_scan:location(T0),
19 = erl_scan:end_location(T0),
{ok,[{atom,_,foo}=T3],_} = erl_scan:string("foo", {1,3}, [text]),
1 = erl_scan:line(T3),
3 = erl_scan:column(T3),
{1,3} = erl_scan:location(T3),
{1,6} = erl_scan:end_location(T3),
"foo" = erl_scan:text(T3),
{ok,[{atom,_,foo}=T4],_} = erl_scan:string("foo", 2, [text]),
2 = erl_scan:line(T4),
undefined = erl_scan:column(T4),
2 = erl_scan:location(T4),
"foo" = erl_scan:text(T4),
{ok,[{atom,_,foo}=T5],_} = erl_scan:string("foo", {1,3}, []),
1 = erl_scan:line(T5),
3 = erl_scan:column(T5),
{1,3} = erl_scan:location(T5),
undefined = erl_scan:text(T5),
ok.
column_errors() ->
erl_scan:string("'\\",{1,1}),
{error,{{1,1},erl_scan,{string,$",""}},{1,3}} = % $"
erl_scan:string("\"\\",{1,1}),
erl_scan:string("'",{1,1}),
{error,{{1,1},erl_scan,{string,$",""}},{1,2}} = % $"
erl_scan:string("\"",{1,1}),
{error,{{1,1},erl_scan,char},{1,2}} =
erl_scan:string("$",{1,1}),
erl_scan:string(" '12345678901234567", {1,1}),
erl_scan:string(" '123456789012345\\s", {1,1}),
{error,{{1,2},erl_scan,{string,$","1234567890123456"}},{1,20}} = %"
erl_scan:string(" \"12345678901234567", {1,1}),
{error,{{1,2},erl_scan,{string,$","123456789012345 "}}, {1,20}} = %"
erl_scan:string(" \"123456789012345\\s", {1,1}),
erl_scan:string(" '12345678901234567\n", {1,1}),
ok.
white_spaces() ->
{ok,[{white_space,_,"\r"},
{white_space,_," "},
{atom,_,a},
{white_space,_,"\n"}],
_} = erl_scan_string("\r a\n", {1,1}, return),
test("\r a\n"),
L = "{\"a\nb\", \"a\\nb\",\nabc\r,def}.\n\n",
{ok,[{'{',_},
{string,_,"a\nb"},
{',',_},
{white_space,_," "},
{string,_,"a\nb"},
{',',_},
{white_space,_,"\n"},
{atom,_,abc},
{white_space,_,"\r"},
{',',_},
{atom,_,def},
{'}',_},
{dot,_},
{white_space,_,"\n"}],
_} = erl_scan_string(L, {1,1}, return),
test(L),
test("\"\n\"\n"),
test("\n\r\n"),
test("\n\r"),
test("\r\n"),
test("\n\f"),
[test(lists:duplicate(N, $\t)) || N <- lists:seq(1, 20)],
[test([$\n|lists:duplicate(N, $\t)]) || N <- lists:seq(1, 20)],
[test(lists:duplicate(N, $\s)) || N <- lists:seq(1, 20)],
[test([$\n|lists:duplicate(N, $\s)]) || N <- lists:seq(1, 20)],
test("\v\f\n\v "),
test("\n\e\n\b\f\n\da\n"),
ok.
unicode() ->
{ok,[{char,1,83},{integer,1,45}],1} =
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string([1089]),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([1089], {1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([16#D800], {1,1}),
test("\"a"++[1089]++"b\""),
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan_string([$$,$\\,$^,1089], 1),
{error,{1,erl_scan,Error},1} =
erl_scan:string("\"qa\x{aaa}", 1),
"unterminated string starting with \"qa"++[2730]++"\"" =
erl_scan:format_error(Error),
{error,{{1,1},erl_scan,_},{1,11}} =
erl_scan:string("\"qa\\x{aaa}",{1,1}),
{error,{{1,1},erl_scan,_},{1,11}} =
erl_scan:string("'qa\\x{aaa}",{1,1}),
{ok,[{char,1,1089}],1} =
erl_scan_string([$$,1089], 1),
{ok,[{char,1,1089}],1} =
erl_scan_string([$$,$\\,1089], 1),
Qs = "$\\x{aaa}",
{ok,[{char,1,$\x{aaa}}],1} =
erl_scan_string(Qs, 1),
{ok,[Q2],{1,9}} =
erl_scan:string("$\\x{aaa}", {1,1}, [text]),
[{category,char},{column,1},{line,1},{symbol,16#aaa},{text,Qs}] =
token_info_long(Q2),
U1 = "\"\\x{aaa}\"",
{ok,[{string,_,[2730]}=T1],{1,10}} = erl_scan:string(U1, {1,1}, [text]),
{1,1} = erl_scan:location(T1),
"\"\\x{aaa}\"" = erl_scan:text(T1),
{ok,[{string,1,[2730]}],1} = erl_scan_string(U1, 1),
U2 = "\"\\x41\\x{fff}\\x42\"",
{ok,[{string,1,[$\x41,$\x{fff},$\x42]}],1} = erl_scan_string(U2, 1),
U3 = "\"a\n\\x{fff}\n\"",
{ok,[{string,1,[$a,$\n,$\x{fff},$\n]}],3} = erl_scan_string(U3, 1),
U4 = "\"\n\\x{aaa}\n\"",
{ok,[{string,1,[$\n,$\x{aaa},$\n]}],3} = erl_scan_string(U4, 1),
test(Qs),
test(U1),
test(U2),
test(U3),
test(U4),
Str1 = "\"ab" ++ [1089] ++ "cd\"",
{ok,[{string,1,[$a,$b,1089,$c,$d]}],1} = erl_scan_string(Str1, 1),
{ok,[{string,{1,1},[$a,$b,1089,$c,$d]}],{1,8}} =
erl_scan_string(Str1, {1,1}),
test(Str1),
Comment = "%% "++[1089],
erl_scan_string(Comment, 1, [return]),
erl_scan_string(Comment, {1,1}, [return]),
ok.
more_chars() ->
$ \x { ... } , $ \xHH
{ok,[{char,_,123}],{1,4}} =
erl_scan_string("$\\{",{1,1}),
{more, C1} = erl_scan:tokens([], "$\\{", {1,1}),
{done,{ok,[{char,_,123}],{1,4}},eof} =
erl_scan_tokens(C1, eof, 1),
{ok,[{char,1,123},{atom,1,a},{'}',1}],1} =
erl_scan_string("$\\{a}"),
{error,{{1,1},erl_scan,char},{1,4}} =
erl_scan:string("$\\x", {1,1}),
{error,{{1,1},erl_scan,char},{1,5}} =
erl_scan:string("$\\x{",{1,1}),
{more, C3} = erl_scan:tokens([], "$\\x", {1,1}),
{done,{error,{{1,1},erl_scan,char},{1,4}},eof} =
erl_scan:tokens(C3, eof, 1),
{error,{{1,1},erl_scan,char},{1,5}} =
erl_scan:string("$\\x{",{1,1}),
{more, C2} = erl_scan:tokens([], "$\\x{", {1,1}),
{done,{error,{{1,1},erl_scan,char},{1,5}},eof} =
erl_scan:tokens(C2, eof, 1),
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("$\\x{g}"),
{error,{{1,1},erl_scan,{illegal,character}},{1,5}} =
erl_scan:string("$\\x{g}", {1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,6}} =
erl_scan:string("$\\x{}",{1,1}),
test("\"\\{0}\""),
test("\"\\x{0}\""),
test("\'\\{0}\'"),
test("\'\\x{0}\'"),
{error,{{2,3},erl_scan,{illegal,character}},{2,6}} =
erl_scan:string("\"ab \n $\\x{g}\"",{1,1}),
{error,{{2,3},erl_scan,{illegal,character}},{2,6}} =
erl_scan:string("\'ab \n $\\x{g}\'",{1,1}),
test("$\\{34}"),
test("$\\x{34}"),
test("$\\{377}"),
test("$\\x{FF}"),
test("$\\{400}"),
test("$\\x{100}"),
test("$\\x{10FFFF}"),
test("$\\x{10ffff}"),
test("\"$\n \\{1}\""),
{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("$\\x{110000}"),
{error,{{1,1},erl_scan,{illegal,character}},{1,12}} =
erl_scan:string("$\\x{110000}", {1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string("$\\xfg", {1,1}),
test("$\\xffg"),
{error,{{1,1},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string("$\\xg", {1,1}),
ok.
otp_10302(Config) when is_list(Config) ->
{ok,[{atom,1,'aсb'}],1} =
erl_scan_string("'a"++[1089]++"b'", 1),
{ok,[{atom,{1,1},'qaપ'}],{1,12}} =
erl_scan_string("'qa\\x{aaa}'",{1,1}),
{ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1),
{ok,[{char,1,1089}],1} = erl_scan_string([$$,$\\,1089],1),
Qs = "$\\x{aaa}",
{ok,[{char,1,2730}],1} = erl_scan_string(Qs, 1),
{ok,[Q2],{1,9}} = erl_scan:string(Qs,{1,1},[text]),
[{category,char},{column,1},{line,1},{symbol,16#aaa},{text,Qs}] =
token_info_long(Q2),
U1 = "\"\\x{aaa}\"",
{ok,[T1],{1,10}} = erl_scan:string(U1, {1,1}, [text]),
[{category,string},{column,1},{line,1},{symbol,[16#aaa]},{text,U1}] =
token_info_long(T1),
U2 = "\"\\x41\\x{fff}\\x42\"",
{ok,[{string,1,[65,4095,66]}],1} = erl_scan_string(U2, 1),
U3 = "\"a\n\\x{fff}\n\"",
{ok,[{string,1,[97,10,4095,10]}],3} = erl_scan_string(U3, 1),
U4 = "\"\n\\x{aaa}\n\"",
{ok,[{string,1,[10,2730,10]}],3} = erl_scan_string(U4, 1,[]),
Str1 = "\"ab" ++ [1089] ++ "cd\"",
{ok,[{string,1,[97,98,1089,99,100]}],1} =
erl_scan_string(Str1,1),
{ok,[{string,{1,1},[97,98,1089,99,100]}],{1,8}} =
erl_scan_string(Str1, {1,1}),
OK1 = 16#D800-1,
OK2 = 16#DFFF+1,
OK3 = 16#FFFE-1,
OK4 = 16#FFFF+1,
OKL = [OK1,OK2,OK3,OK4],
Illegal1 = 16#D800,
Illegal2 = 16#DFFF,
Illegal3 = 16#FFFE,
Illegal4 = 16#FFFF,
IllegalL = [Illegal1,Illegal2,Illegal3,Illegal4],
erl_scan_string("%% "++[OK], 1, [return]) ||
OK <- OKL],
erl_scan_string("%% "++[OK1], {1,1}, [return]),
[{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("%% "++[Illegal], 1, [return]) ||
Illegal <- IllegalL],
{error,{{1,1},erl_scan,{illegal,character}},{1,5}} =
erl_scan:string("%% "++[Illegal1], {1,1}, [return]),
[{ok,[],1} = erl_scan_string("%% "++[OK], 1, []) ||
OK <- OKL],
{ok,[],{1,5}} = erl_scan_string("%% "++[OK1], {1,1}, []),
[{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("%% "++[Illegal], 1, []) ||
Illegal <- IllegalL],
{error,{{1,1},erl_scan,{illegal,character}},{1,5}} =
erl_scan:string("%% "++[Illegal1], {1,1}, []),
[{ok,[{string,{1,1},[OK]}],{1,4}} =
erl_scan_string("\""++[OK]++"\"",{1,1}) ||
OK <- OKL],
[{error,{{1,2},erl_scan,{illegal,character}},{1,3}} =
erl_scan:string("\""++[OK]++"\"",{1,1}) ||
OK <- IllegalL],
[{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([Illegal],{1,1}) ||
Illegal <- IllegalL],
{ok,[{char,{1,1},OK1}],{1,3}} =
erl_scan_string([$$,OK1],{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([$$,Illegal1],{1,1}),
{ok,[{char,{1,1},OK1}],{1,4}} =
erl_scan_string([$$,$\\,OK1],{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string([$$,$\\,Illegal1],{1,1}),
{ok,[{string,{1,1},[55295]}],{1,5}} =
erl_scan_string("\"\\"++[OK1]++"\"",{1,1}),
{error,{{1,2},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string("\"\\"++[Illegal1]++"\"",{1,1}),
{ok,[{char,{1,1},OK1}],{1,10}} =
erl_scan_string("$\\x{D7FF}",{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,10}} =
erl_scan:string("$\\x{D800}",{1,1}),
{integer,0,1} = erl_parse_abstract(1),
Float = 3.14, {float,0,Float} = erl_parse_abstract(Float),
{nil,0} = erl_parse_abstract([]),
{bin,0,
[{bin_element,0,{integer,0,1},default,default},
{bin_element,0,{integer,0,2},default,default}]} =
erl_parse_abstract(<<1,2>>),
{cons,0,{tuple,0,[{atom,0,a}]},{atom,0,b}} =
erl_parse_abstract([{a} | b]),
{string,0,"str"} = erl_parse_abstract("str"),
{cons,0,
{integer,0,$a},
{cons,0,{integer,0,55296},{string,0,"c"}}} =
erl_parse_abstract("a"++[55296]++"c"),
Line = 17,
{integer,Line,1} = erl_parse_abstract(1, Line),
Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Line),
{nil,Line} = erl_parse_abstract([], Line),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
erl_parse_abstract(<<1,2>>, Line),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
erl_parse_abstract([{a} | b], Line),
{string,Line,"str"} = erl_parse_abstract("str", Line),
{cons,Line,
{integer,Line,$a},
{cons,Line,{integer,Line,55296},{string,Line,"c"}}} =
erl_parse_abstract("a"++[55296]++"c", Line),
Opts1 = [{line,17}],
{integer,Line,1} = erl_parse_abstract(1, Opts1),
Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Opts1),
{nil,Line} = erl_parse_abstract([], Opts1),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
erl_parse_abstract(<<1,2>>, Opts1),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
erl_parse_abstract([{a} | b], Opts1),
{string,Line,"str"} = erl_parse_abstract("str", Opts1),
{cons,Line,
{integer,Line,$a},
{cons,Line,{integer,Line,55296},{string,Line,"c"}}} =
erl_parse_abstract("a"++[55296]++"c", Opts1),
[begin
{integer,Line,1} = erl_parse_abstract(1, Opts2),
Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Opts2),
{nil,Line} = erl_parse_abstract([], Opts2),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
erl_parse_abstract(<<1,2>>, Opts2),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
erl_parse_abstract([{a} | b], Opts2),
{string,Line,"str"} = erl_parse_abstract("str", Opts2),
{string,Line,[97,1024,99]} =
erl_parse_abstract("a"++[1024]++"c", Opts2)
end || Opts2 <- [[{encoding,unicode},{line,Line}],
[{encoding,utf8},{line,Line}]]],
{cons,0,
{integer,0,97},
{cons,0,{integer,0,1024},{string,0,"c"}}} =
erl_parse_abstract("a"++[1024]++"c", [{encoding,latin1}]),
ok.
OTP-10990 . Floating point number in input string .
otp_10990(Config) when is_list(Config) ->
{'EXIT',_} = (catch {foo, erl_scan:string([$",42.0,$"],1)}),
ok.
OTP-10992 . List of floats to abstract format .
otp_10992(Config) when is_list(Config) ->
{cons,0,{float,0,42.0},{nil,0}} =
erl_parse_abstract([42.0], [{encoding,unicode}]),
{cons,0,{float,0,42.0},{nil,0}} =
erl_parse_abstract([42.0], [{encoding,utf8}]),
{cons,0,{integer,0,65},{cons,0,{float,0,42.0},{nil,0}}} =
erl_parse_abstract([$A,42.0], [{encoding,unicode}]),
{cons,0,{integer,0,65},{cons,0,{float,0,42.0},{nil,0}}} =
erl_parse_abstract([$A,42.0], [{encoding,utf8}]),
ok.
OTP-11807 . erl_parse : abstract/2 .
otp_11807(Config) when is_list(Config) ->
{cons,0,{integer,0,97},{cons,0,{integer,0,98},{nil,0}}} =
erl_parse_abstract("ab", [{encoding,none}]),
{cons,0,{integer,0,-1},{nil,0}} =
erl_parse_abstract([-1], [{encoding,latin1}]),
ASCII = fun(I) -> I >= 0 andalso I < 128 end,
{string,0,"xyz"} = erl_parse_abstract("xyz", [{encoding,ASCII}]),
{cons,0,{integer,0,228},{nil,0}} =
erl_parse_abstract([228], [{encoding,ASCII}]),
{cons,0,{integer,0,97},{atom,0,a}} =
erl_parse_abstract("a"++a, [{encoding,latin1}]),
(catch erl_parse:abstract("string", [{encoding,bad}])),
ok.
otp_16480(Config) when is_list(Config) ->
F = fun mod:func/19,
F = erl_parse:normalise(erl_parse_abstract(F)),
ok.
otp_17024(Config) when is_list(Config) ->
Line = 17,
Opts1 = [{location,Line}],
{integer,Line,1} = erl_parse_abstract(1, Opts1),
Location = {17, 42},
{integer,Location,1} = erl_parse_abstract(1, Location),
Opts2 = [{location,Location}],
{integer,Location,1} = erl_parse_abstract(1, Opts2),
ok.
text_fun(Config) when is_list(Config) ->
KeepClass = fun(Class) ->
fun(C, _) -> C == Class end
end,
Join = fun(L, S) -> string:join(L, S) end,
String = fun(L) -> Join(L, " ") end,
TextAtom = KeepClass(atom),
TextInt = KeepClass(integer),
TextBase = fun(C, S) ->
C == integer andalso string:find(S, "#") /= nomatch
end,
TextLong = fun(_, S) -> length(S) > 10 end,
Texts = fun(Toks) -> [erl_scan:text(T) || T <- Toks] end,
Values = fun(Toks) -> [erl_scan:symbol(T) || T <- Toks] end,
Atom1 = "foo",
Atom2 = "'this is a long atom'",
Int1 = "42",
Int2 = "16#10",
Int3 = "8#20",
Int4 = "16",
Int5 = "12345678901234567890",
String1 = "\"A String\"",
String2 = "\"guitar string\"",
Name1 = "Short",
Name2 = "LongAndDescriptiveName",
Sep1 = "{",
Sep2 = "+",
Sep3 = "]",
Sep4 = "/",
All = [Atom1, Atom2, Int1, Int2, Int3, Int4, Int5,
String1, String2, Name1, Name2,
Sep1, Sep2, Sep3, Sep4],
{ok, Tokens0, 2} =
erl_scan:string(String([Atom1, Int1]), 2, [{text_fun, TextAtom}]),
[Atom1, undefined] = Texts(Tokens0),
[foo, 42] = Values(Tokens0),
{ok, Tokens1, 3} =
erl_scan:string(Join([Int2, Int3, Int4], "\n"), 1,
[{text_fun, TextInt}]),
[Int2, Int3, Int4] = Texts(Tokens1),
[16, 16, 16] = Values(Tokens1),
TS = [Int2, String1, Atom1, Int3, Int4, String2],
{ok, Tokens2, 6} =
erl_scan:string(Join(TS, "\n"), 1, [{text_fun, TextAtom}, text]),
TS = Texts(Tokens2),
[16, "A String", foo, 16, 16, "guitar string"] = Values(Tokens2),
Ints = [Int1, Int2, Int3, Int4],
{ok, Tokens3, 1} = erl_scan:string(String(Ints), 1, [{text_fun, TextBase}]),
[undefined, Int2, Int3, undefined] = Texts(Tokens3),
[42, 16, 16, 16] = Values(Tokens3),
Longs = lists:filter(fun(S) -> length(S) > 10 end, All),
{ok, Tokens4, 1} =
erl_scan:string(String(All), 1, [{text_fun, TextLong}]),
Longs = lists:filter(fun(T) -> T /= undefined end, Texts(Tokens4)),
{ok, Tokens5, 7} =
erl_scan:string(String(All), 7, [{text_fun, KeepClass('{')}]),
[Sep1] = lists:filter(fun(T) -> T /= undefined end, Texts(Tokens5)).
test_string(String, ExpectedWithCol) ->
{ok, ExpectedWithCol, _EndWithCol} = erl_scan_string(String, {1, 1}, []),
Expected = [ begin
{L,_C} = element(2, T),
setelement(2, T, L)
end
|| T <- ExpectedWithCol ],
{ok, Expected, _End} = erl_scan_string(String),
test(String).
erl_scan_string(String) ->
erl_scan_string(String, 1, []).
erl_scan_string(String, StartLocation) ->
erl_scan_string(String, StartLocation, []).
erl_scan_string(String, StartLocation, Options) ->
case erl_scan:string(String, StartLocation, Options) of
{ok, Tokens, EndLocation} ->
{ok, unopaque_tokens(Tokens), EndLocation};
Else ->
Else
end.
erl_scan_tokens(C, S, L) ->
erl_scan_tokens(C, S, L, []).
erl_scan_tokens(C, S, L, O) ->
case erl_scan:tokens(C, S, L, O) of
{done, {ok, Ts, End}, R} ->
{done, {ok, unopaque_tokens(Ts), End}, R};
Else ->
Else
end.
unopaque_tokens([]) ->
[];
unopaque_tokens([Token|Tokens]) ->
Attrs = element(2, Token),
Term = erl_anno:to_term(Attrs),
T = setelement(2, Token, Term),
[T | unopaque_tokens(Tokens)].
erl_parse_abstract(Term) ->
erl_parse_abstract(Term, []).
erl_parse_abstract(Term, Options) ->
Abstr = erl_parse:abstract(Term, Options),
unopaque_abstract(Abstr).
unopaque_abstract(Abstr) ->
erl_parse:anno_to_term(Abstr).
test_string(String , Expected , StartLocation , Options ) - >
{ ok , Expected , _ End } = erl_scan : string(String , StartLocation , Options ) ,
test(String) ->
[{Tokens, End},
{Wtokens, Wend},
{Ctokens, Cend},
{CWtokens, CWend},
{CWtokens2, _}] =
[scan_string_with_column(String, X) ||
X <- [[],
[return_white_spaces],
[return_comments],
[return],
{end1,End,Wend} = {end1,Wend,End},
{end2,Wend,Cend} = {end2,Cend,Wend},
{end3,Cend,CWend} = {end3,CWend,Cend},
Test that the tokens that are common to two token lists are identical .
{none,Tokens} = {none, filter_tokens(CWtokens, [white_space,comment])},
{comments,Ctokens} =
{comments,filter_tokens(CWtokens, [white_space])},
{white_spaces,Wtokens} =
{white_spaces,filter_tokens(CWtokens, [comment])},
{Line,Column} = test_decorated_tokens(String, CWtokens),
{deco,{Line,Column},End} = {deco,End,{Line,Column}},
Text = get_text(CWtokens),
{text,Text,String} = {text,String,Text},
ok = test_white_space_compaction(CWtokens, CWtokens2),
Test that white newlines are always first in text :
WhiteTokens = select_tokens(CWtokens, [white_space]),
ok = newlines_first(WhiteTokens),
[Simple,Wsimple,Csimple,WCsimple] = Simples =
[element(2, erl_scan:string(String, 1, Opts)) ||
Opts <- [[],
[return_white_spaces],
[return_comments],
[return]]],
{consistent,true} = {consistent,consistent_attributes(Simples)},
{simple_wc,WCsimple} = {simple_wc,simplify(CWtokens)},
{simple,Simple} = {simple,filter_tokens(WCsimple, [white_space,comment])},
{simple_c,Csimple} = {simple_c,filter_tokens(WCsimple, [white_space])},
{simple_w,Wsimple} = {simple_w,filter_tokens(WCsimple, [comment])},
[SimpleTxt,WsimpleTxt,CsimpleTxt,WCsimpleTxt] = SimplesTxt =
[element(2, erl_scan:string(String, 1, [text|Opts])) ||
Opts <- [[],
[return_white_spaces],
[return_comments],
[return]]],
TextTxt = get_text(WCsimpleTxt),
{text_txt,TextTxt,String} = {text_txt,String,TextTxt},
{consistent_txt,true} =
{consistent_txt,consistent_attributes(SimplesTxt)},
{simple_txt,SimpleTxt} =
{simple_txt,filter_tokens(WCsimpleTxt, [white_space,comment])},
{simple_c_txt,CsimpleTxt} =
{simple_c_txt,filter_tokens(WCsimpleTxt, [white_space])},
{simple_w_txt,WsimpleTxt} =
{simple_w_txt,filter_tokens(WCsimpleTxt, [comment])},
ok.
test_white_space_compaction(Tokens, Tokens2) when Tokens =:= Tokens2 ->
[WS, WS2] = [select_tokens(Ts, [white_space]) || Ts <- [Tokens, Tokens2]],
test_wsc(WS, WS2).
test_wsc([], []) ->
ok;
test_wsc([Token|Tokens], [Token2|Tokens2]) ->
[Text, Text2] = [Text ||
Text <- [erl_scan:text(T) || T <- [Token, Token2]]],
Sz = erts_debug:size(Text),
Sz2 = erts_debug:size({Text, Text2}),
IsCompacted = Sz2 < 2*Sz+erts_debug:size({a,a}),
ToBeCompacted = is_compacted(Text),
if
IsCompacted =:= ToBeCompacted ->
test_wsc(Tokens, Tokens2);
true ->
{compaction_error, Token}
end.
is_compacted("\r") ->
true;
is_compacted("\n\r") ->
true;
is_compacted("\n\f") ->
true;
is_compacted([$\n|String]) ->
all_spaces(String)
orelse
all_tabs(String);
is_compacted(String) ->
all_spaces(String)
orelse
all_tabs(String).
all_spaces(L) ->
all_same(L, $\s).
all_tabs(L) ->
all_same(L, $\t).
all_same(L, Char) ->
lists:all(fun(C) -> C =:= Char end, L).
newlines_first([]) ->
ok;
newlines_first([Token|Tokens]) ->
Text = erl_scan:text(Token),
Nnls = length([C || C <- Text, C =:= $\n]),
OK = case Text of
[$\n|_] ->
Nnls =:= 1;
_ ->
Nnls =:= 0
end,
if
OK -> newlines_first(Tokens);
true -> OK
end.
filter_tokens(Tokens, Tags) ->
lists:filter(fun(T) -> not lists:member(element(1, T), Tags) end, Tokens).
select_tokens(Tokens, Tags) ->
lists:filter(fun(T) -> lists:member(element(1, T), Tags) end, Tokens).
simplify([Token|Tokens]) ->
Line = erl_scan:line(Token),
[setelement(2, Token, erl_anno:new(Line)) | simplify(Tokens)];
simplify([]) ->
[].
get_text(Tokens) ->
lists:flatten(
[T ||
Token <- Tokens,
(T = erl_scan:text(Token)) =/= []]).
test_decorated_tokens(String, Tokens) ->
ToksAttrs = token_attrs(Tokens),
test_strings(ToksAttrs, String, 1, 1).
token_attrs(Tokens) ->
[{L,C,length(T),T} ||
Token <- Tokens,
([C,L,T] = token_info(Token)) =/= []].
token_info(T) ->
Column = erl_scan:column(T),
Line = erl_scan:line(T),
Text = erl_scan:text(T),
[Column, Line, Text].
token_info_long(T) ->
Column = erl_scan:column(T),
Line = erl_scan:line(T),
Text = erl_scan:text(T),
Category = erl_scan:category(T),
Symbol = erl_scan:symbol(T),
[{category,Category},{column,Column},{line,Line},
{symbol,Symbol},{text,Text}].
test_strings([], _S, Line, Column) ->
{Line,Column};
test_strings([{L,C,Len,T}=Attr|Attrs], String0, Line0, Column0) ->
{String1, Column1} = skip_newlines(String0, L, Line0, Column0),
String = skip_chars(String1, C-Column1),
{Str,Rest} = lists:split(Len, String),
if
Str =:= T ->
{Line,Column} = string_newlines(T, L, C),
test_strings(Attrs, Rest, Line, Column);
true ->
{token_error, Attr, Str}
end.
skip_newlines(String, Line, Line, Column) ->
{String, Column};
skip_newlines([$\n|String], L, Line, _Column) ->
skip_newlines(String, L, Line+1, 1);
skip_newlines([_|String], L, Line, Column) ->
skip_newlines(String, L, Line, Column+1).
skip_chars(String, 0) ->
String;
skip_chars([_|String], N) ->
skip_chars(String, N-1).
string_newlines([$\n|String], Line, _Column) ->
string_newlines(String, Line+1, 1);
string_newlines([], Line, Column) ->
{Line, Column};
string_newlines([_|String], Line, Column) ->
string_newlines(String, Line, Column+1).
scan_string_with_column(String, Options0) ->
Options = [text | Options0],
StartLoc = {1, 1},
{ok, Ts1, End1} = erl_scan:string(String, StartLoc, Options),
TString = String ++ ". ",
{ok,Ts2,End2} = scan_tokens(TString, Options, [], StartLoc),
{ok, Ts3, End3} =
scan_tokens_1({more, []}, TString, Options, [], StartLoc),
{end_2,End2,End3} = {end_2,End3,End2},
{EndLine1,EndColumn1} = End1,
End2 = {EndLine1,EndColumn1+2},
{ts_1,Ts2,Ts3} = {ts_1,Ts3,Ts2},
Ts2 = Ts1 ++ [lists:last(Ts2)],
{ok, Ts7, End7} = erl_scan:string(String, {1,1}, Options),
{ok, Ts8, End8} = scan_tokens(TString, Options, [], {1,1}),
{end1, End1} = {end1, End7},
{end2, End2} = {end2, End8},
Ts8 = Ts7 ++ [lists:last(Ts8)],
{cons,true} = {cons,consistent_attributes([Ts1,Ts2,Ts3,Ts7,Ts8])},
{Ts1, End1}.
scan_tokens(String, Options, Rs, Location) ->
case erl_scan:tokens([], String, Location, Options) of
{done, {ok,Ts,End}, ""} ->
{ok, lists:append(lists:reverse([Ts|Rs])), End};
{done, {ok,Ts,End}, Rest} ->
scan_tokens(Rest, Options, [Ts|Rs], End)
end.
scan_tokens_1({done, {ok,Ts,End}, ""}, "", _Options, Rs, _Location) ->
{ok,lists:append(lists:reverse([Ts|Rs])),End};
scan_tokens_1({done, {ok,Ts,End}, Rest}, Cs, Options, Rs, _Location) ->
scan_tokens_1({more,[]}, Rest++Cs, Options, [Ts|Rs], End);
scan_tokens_1({more, Cont}, [C | Cs], Options, Rs, Loc) ->
R = erl_scan:tokens(Cont, [C], Loc, Options),
scan_tokens_1(R, Cs, Options, Rs, Loc).
consistent_attributes([]) ->
true;
consistent_attributes([Ts | TsL]) ->
L = [T || T <- Ts, is_integer(element(2, T))],
case L of
[] ->
TagsL = [[Tag || {Tag,_} <- defined(token_info_long(T))] ||
T <- Ts],
case lists:usort(TagsL) of
[_] ->
consistent_attributes(TsL);
[] when Ts =:= [] ->
consistent_attributes(TsL);
_ ->
Ts
end;
Ts ->
consistent_attributes(TsL);
_ ->
Ts
end.
defined(L) ->
[{T,V} || {T,V} <- L, V =/= undefined].
family_list(L) ->
sofs:to_external(family(L)).
family(L) ->
sofs:relation_to_family(sofs:relation(L)).
|
2bc6438975d52f53165f511ad469a3707feae3d5f8e5cfcc624141c55ad3ec68 | imitator-model-checker/imitator | AlgoBCRandom.mli | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Université Paris 13 , LIPN , CNRS , France
* Université de Lorraine , CNRS , , LORIA , Nancy , France
*
* Module description : Random Behavioral Cartography with a maximum number of consecutive failed attempts to find a non - integer point not covered by any tile [ AF10 ]
*
* File contributors : * Created : 2016/02/02
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Université Paris 13, LIPN, CNRS, France
* Université de Lorraine, CNRS, Inria, LORIA, Nancy, France
*
* Module description: Random Behavioral Cartography with a maximum number of consecutive failed attempts to find a non-integer point not covered by any tile [AF10]
*
* File contributors : Étienne André
* Created : 2016/02/02
*
************************************************************)
(************************************************************)
(* Modules *)
(************************************************************)
open AlgoCartoGeneric
(************************************************************)
(* Class definition *)
(************************************************************)
class algoBCRandom : HyperRectangle.hyper_rectangle -> NumConst.t -> int -> (PVal.pval -> AlgoStateBased.algoStateBased) -> tiles_storage ->
object inherit algoCartoGeneric
(************************************************************)
(* Class variables *)
(************************************************************)
(************************************************************)
(* Class methods *)
(************************************************************)
method algorithm_name : string
method initialize_variables : unit
* Return a new instance of the algorithm to be iteratively called ( typically IM or PRP )
(* method algorithm_instance : AlgoIMK.algoIMK *)
(* Create the initial point for the analysis *)
method get_initial_point : more_points
(* Find the next point *)
method find_next_point : more_points
Processing the result of IM
(* method process_result : Result.im_result -> PVal.pval -> unit *)
method compute_bc_result : Result.imitator_result
end | null | https://raw.githubusercontent.com/imitator-model-checker/imitator/105408ae2bd8c3e3291f286e4d127defd492a58b/src/AlgoBCRandom.mli | ocaml | **********************************************************
Modules
**********************************************************
**********************************************************
Class definition
**********************************************************
**********************************************************
Class variables
**********************************************************
**********************************************************
Class methods
**********************************************************
method algorithm_instance : AlgoIMK.algoIMK
Create the initial point for the analysis
Find the next point
method process_result : Result.im_result -> PVal.pval -> unit | * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Université Paris 13 , LIPN , CNRS , France
* Université de Lorraine , CNRS , , LORIA , Nancy , France
*
* Module description : Random Behavioral Cartography with a maximum number of consecutive failed attempts to find a non - integer point not covered by any tile [ AF10 ]
*
* File contributors : * Created : 2016/02/02
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*
* IMITATOR
*
* Université Paris 13, LIPN, CNRS, France
* Université de Lorraine, CNRS, Inria, LORIA, Nancy, France
*
* Module description: Random Behavioral Cartography with a maximum number of consecutive failed attempts to find a non-integer point not covered by any tile [AF10]
*
* File contributors : Étienne André
* Created : 2016/02/02
*
************************************************************)
open AlgoCartoGeneric
class algoBCRandom : HyperRectangle.hyper_rectangle -> NumConst.t -> int -> (PVal.pval -> AlgoStateBased.algoStateBased) -> tiles_storage ->
object inherit algoCartoGeneric
method algorithm_name : string
method initialize_variables : unit
* Return a new instance of the algorithm to be iteratively called ( typically IM or PRP )
method get_initial_point : more_points
method find_next_point : more_points
Processing the result of IM
method compute_bc_result : Result.imitator_result
end |
f3fed781a33c53da53284175aa5b6e8487868f69246e7c513e3f728e8e7dc492 | awolven/cl-vulkan | window.lisp | Copyright 2019 , 2020
;;
;; Permission is hereby granted, free of charge, to any person obtaining
;; a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
;; without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
;; the following conditions:
;;
;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
;; OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
;; WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
(in-package :vk)
(defcallback error-callback :void ((error :int) (description (:pointer :char)))
(error-callback-function error description))
(defun error-callback-function (error description)
(format *error-output* "GLFW Error: ~A: ~A~%" error (foreign-string-to-lisp description))
(values))
(defcallback window-close-callback :void ((window :pointer))
(glfwSetWindowShouldClose window GLFW_TRUE)
(values))
(defun set-window-close-callback (window &optional (callback-name 'window-close-callback))
(glfwSetWindowCloseCallback (h window) (get-callback callback-name)))
todo : in the ffi define this slot as int or uint
(find handle (window-registry *app*)
:key #'h :test #'pointer-eq))
(defmethod window-class (app)
'window)
(defun create-window (app &key title width height)
(assert (typep width 'integer))
(assert (typep height 'integer))
(when (zerop (glfwInit))
(error "GLFW failed to initialize."))
(glfwSetErrorCallback (get-callback 'error-callback))
(when (zerop (glfwVulkanSupported))
(error "GLFW: Vulkan Not Supported."))
(glfwWindowHint GLFW_CLIENT_API GLFW_NO_API)
(let ((window
(make-instance (window-class app)
:app app
:handle (glfwCreateWindow width height title +nullptr+ +nullptr+))))
(push window (window-registry app))
(set-framebuffer-size-callback window)
(set-window-close-callback window)
;;(glfwSetWindowUserPointer (h window) (h window))
window))
(defun create-vulkan-window (app device title width height)
(let* ((window (create-window app :width width :height height :title title))
(surface (create-window-surface (vulkan-instance app) window))
(gpu (physical-device device))
(index (get-queue-family-index-with-wsi-support gpu surface)))
(initialize-window-surface surface gpu index)
(let* ((surface-format (find-supported-format surface))
(present-mode (get-physical-device-surface-present-mode gpu surface))
(swapchain (create-swapchain device window width height surface-format present-mode)))
(setup-framebuffers device (render-pass swapchain) swapchain)
(setf (default-descriptor-pool app) (create-descriptor-pool device))
(create-frame-resources swapchain index)
window)))
(defun destroy-window (window)
(let* ((app (application window))
(device (default-logical-device app))
(vkinstance (vulkan-instance app)))
(vkDeviceWaitIdle device)
(destroy-swapchain (swapchain window))
(vkDestroySurfaceKHR (h vkinstance) (h (render-surface window)) (h (allocator device)))
(glfwDestroyWindow (h window))))
;; todo, make sure to put a glfwTerminate in destroy-application
;; maybe put glfwInit in create-application
(defun window-should-close-p (window)
(not (zerop (glfwWindowShouldClose (h window)))))
(defun (setf window-should-close-p) (value window)
(glfwSetWindowShouldClose (h window) (if value 1 0)))
(defun (setf window-title) (title window)
(glfwSetWindowTitle (h window) title))
(defun get-window-pos (window)
(with-foreign-objects ((p-x :int)
(p-y :int))
(glfwGetWindowPos (h window) p-x p-y)
(values (mem-aref p-x :int)
(mem-aref p-y :int))))
(defun set-window-pos (window x y)
(glfwSetWindowPos (h window) (round x) (round y)))
(defun get-cursor-pos (window)
(with-foreign-objects ((p-x :double)
(p-y :double))
(glfwGetCursorPos (h window) p-x p-y)
(values (mem-aref p-x :double)
(mem-aref p-y :double))))
(defun get-window-size (window)
(with-foreign-objects ((p-width :int)
(p-height :int))
(glfwGetWindowSize (h window) p-width p-height)
(values (mem-aref p-width :int)
(mem-aref p-height :int))))
(defun focus-window (window)
(glfwFocusWindow (h window)))
(defun hide-window (window)
(glfwHideWindow (h window)))
(defun show-window (window)
(glfwShowWindow (h window)))
(defun maximize-window (window)
(glfwMaximizeWindow (h window)))
(defun restore-window (window)
(glfwRestoreWindow (h window)))
(defun iconify-window (window)
(glfwIconifyWindow (h window)))
(defun window-frame-size (window)
(with-foreign-objects ((p-left :int)
(p-top :int)
(p-right :int)
(p-bottom :int))
(glfwGetWindowFrameSize (h window) p-left p-top p-right p-bottom)
(values (mem-aref p-left :int) (mem-aref p-top :int)
(mem-aref p-right :int) (mem-aref p-bottom :int))))
(defun get-framebuffer-size (window)
(with-foreign-objects ((p-width :int)
(p-height :int))
(glfwGetFramebufferSize (h window) p-width p-height)
(values (mem-aref p-width :int) (mem-aref p-height :int))))
(defun set-window-size (window height width)
(glfwSetWindowSize (h window) height width))
(defun set-window-aspect-ratio (window numer denom)
(glfwSetWindowAspectRatio (h window) numer denom))
(defun set-window-size-limits (window min-width min-height max-width max-height)
(glfwSetWindowSizeLimits (h window) min-width min-height max-width max-height))
| null | https://raw.githubusercontent.com/awolven/cl-vulkan/2b345589793748d114ab60a336d4e91852d533dc/src/window.lisp | lisp |
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
(glfwSetWindowUserPointer (h window) (h window))
todo, make sure to put a glfwTerminate in destroy-application
maybe put glfwInit in create-application | Copyright 2019 , 2020
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
LIABLE FOR ANY CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION
(in-package :vk)
(defcallback error-callback :void ((error :int) (description (:pointer :char)))
(error-callback-function error description))
(defun error-callback-function (error description)
(format *error-output* "GLFW Error: ~A: ~A~%" error (foreign-string-to-lisp description))
(values))
(defcallback window-close-callback :void ((window :pointer))
(glfwSetWindowShouldClose window GLFW_TRUE)
(values))
(defun set-window-close-callback (window &optional (callback-name 'window-close-callback))
(glfwSetWindowCloseCallback (h window) (get-callback callback-name)))
todo : in the ffi define this slot as int or uint
(find handle (window-registry *app*)
:key #'h :test #'pointer-eq))
(defmethod window-class (app)
'window)
(defun create-window (app &key title width height)
(assert (typep width 'integer))
(assert (typep height 'integer))
(when (zerop (glfwInit))
(error "GLFW failed to initialize."))
(glfwSetErrorCallback (get-callback 'error-callback))
(when (zerop (glfwVulkanSupported))
(error "GLFW: Vulkan Not Supported."))
(glfwWindowHint GLFW_CLIENT_API GLFW_NO_API)
(let ((window
(make-instance (window-class app)
:app app
:handle (glfwCreateWindow width height title +nullptr+ +nullptr+))))
(push window (window-registry app))
(set-framebuffer-size-callback window)
(set-window-close-callback window)
window))
(defun create-vulkan-window (app device title width height)
(let* ((window (create-window app :width width :height height :title title))
(surface (create-window-surface (vulkan-instance app) window))
(gpu (physical-device device))
(index (get-queue-family-index-with-wsi-support gpu surface)))
(initialize-window-surface surface gpu index)
(let* ((surface-format (find-supported-format surface))
(present-mode (get-physical-device-surface-present-mode gpu surface))
(swapchain (create-swapchain device window width height surface-format present-mode)))
(setup-framebuffers device (render-pass swapchain) swapchain)
(setf (default-descriptor-pool app) (create-descriptor-pool device))
(create-frame-resources swapchain index)
window)))
(defun destroy-window (window)
(let* ((app (application window))
(device (default-logical-device app))
(vkinstance (vulkan-instance app)))
(vkDeviceWaitIdle device)
(destroy-swapchain (swapchain window))
(vkDestroySurfaceKHR (h vkinstance) (h (render-surface window)) (h (allocator device)))
(glfwDestroyWindow (h window))))
(defun window-should-close-p (window)
(not (zerop (glfwWindowShouldClose (h window)))))
(defun (setf window-should-close-p) (value window)
(glfwSetWindowShouldClose (h window) (if value 1 0)))
(defun (setf window-title) (title window)
(glfwSetWindowTitle (h window) title))
(defun get-window-pos (window)
(with-foreign-objects ((p-x :int)
(p-y :int))
(glfwGetWindowPos (h window) p-x p-y)
(values (mem-aref p-x :int)
(mem-aref p-y :int))))
(defun set-window-pos (window x y)
(glfwSetWindowPos (h window) (round x) (round y)))
(defun get-cursor-pos (window)
(with-foreign-objects ((p-x :double)
(p-y :double))
(glfwGetCursorPos (h window) p-x p-y)
(values (mem-aref p-x :double)
(mem-aref p-y :double))))
(defun get-window-size (window)
(with-foreign-objects ((p-width :int)
(p-height :int))
(glfwGetWindowSize (h window) p-width p-height)
(values (mem-aref p-width :int)
(mem-aref p-height :int))))
(defun focus-window (window)
(glfwFocusWindow (h window)))
(defun hide-window (window)
(glfwHideWindow (h window)))
(defun show-window (window)
(glfwShowWindow (h window)))
(defun maximize-window (window)
(glfwMaximizeWindow (h window)))
(defun restore-window (window)
(glfwRestoreWindow (h window)))
(defun iconify-window (window)
(glfwIconifyWindow (h window)))
(defun window-frame-size (window)
(with-foreign-objects ((p-left :int)
(p-top :int)
(p-right :int)
(p-bottom :int))
(glfwGetWindowFrameSize (h window) p-left p-top p-right p-bottom)
(values (mem-aref p-left :int) (mem-aref p-top :int)
(mem-aref p-right :int) (mem-aref p-bottom :int))))
(defun get-framebuffer-size (window)
(with-foreign-objects ((p-width :int)
(p-height :int))
(glfwGetFramebufferSize (h window) p-width p-height)
(values (mem-aref p-width :int) (mem-aref p-height :int))))
(defun set-window-size (window height width)
(glfwSetWindowSize (h window) height width))
(defun set-window-aspect-ratio (window numer denom)
(glfwSetWindowAspectRatio (h window) numer denom))
(defun set-window-size-limits (window min-width min-height max-width max-height)
(glfwSetWindowSizeLimits (h window) min-width min-height max-width max-height))
|
feba0fe424bf184369420ac1ba5d0032490c0925758713c7e9cedcf94eb5a8cf | chaw/r7rs-libs | md5-test.sps | Copyright © 2009 , 2010 < >
;; Permission is hereby granted, free of charge, to any person obtaining a
;; copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction , including without limitation
;; the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software , and to permit persons to whom the
;; Software is furnished to do so, subject to the following conditions:
;; The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
;; THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
;; DEALINGS IN THE SOFTWARE.
(import (scheme base)
(scheme char)
(weinholt bytevector)
(weinholt md5)
(srfi 64))
(test-begin "weinholt-md5")
(define (m str) (string-downcase (md5->string (md5 (string->utf8 str)))))
(test-equal (m (make-string 100000 #\A)) "5793f7e3037448b250ae716b43ece2c2")
(test-equal (m (make-string 1000000 #\A)) "48fcdb8b87ce8ef779774199a856091d")
;;; From RFC 1321
(test-equal (m "")
"d41d8cd98f00b204e9800998ecf8427e")
(test-equal (m "a")
"0cc175b9c0f1b6a831c399e269772661")
(test-equal (m "abc")
"900150983cd24fb0d6963f7d28e17f72")
(test-equal (m "message digest")
"f96b697d7cb7938d525a2f31aaf161d0")
(test-equal (m "abcdefghijklmnopqrstuvwxyz")
"c3fcd3d76192e4007dfb496cca67e13b")
(test-equal (m "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
"d174ab98d277d9f5a5611c2c9f419d9f")
(test-equal (m "12345678901234567890123456789012345678901234567890123456789012345678901234567890")
"57edf4a22be3c955ac49da2e2107b67a")
From RFC 2104/2202
(define (h key data) (string-downcase (md5->string (hmac-md5 key data))))
(test-equal (h (make-bytevector 16 #x0b)
(string->utf8 "Hi There"))
"9294727a3638bb1c13f48ef8158bfc9d")
(test-equal (h (string->utf8 "Jefe")
(string->utf8 "what do ya want for nothing?"))
"750c783e6ab0b503eaa86e310a5db738")
(test-equal (h (make-bytevector 16 #xAA)
(make-bytevector 50 #xDD))
"56be34521d144c88dbb8c733f0e8b3f6")
(test-equal (h #u8(#x01 #x02 #x03 #x04 #x05 #x06 #x07 #x08 #x09 #x0a #x0b #x0c
#x0d #x0e #x0f #x10 #x11 #x12 #x13 #x14 #x15 #x16 #x17 #x18 #x19)
(make-bytevector 50 #xcd))
"697eaf0aca3a3aea3a75164746ffaa79")
(test-equal (h (make-bytevector 16 #x0c)
(string->utf8 "Test With Truncation"))
"56461ef2342edc00f9bab995690efd4c") ; not testing truncation...
(test-equal (md5-hash=?
(hmac-md5 (uint->bytevector #x0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c)
(string->utf8 "Test With Truncation"))
(uint->bytevector #x56461ef2342edc00f9bab995690efd4c))
#t)
(test-equal (md5-96-hash=?
(hmac-md5 (uint->bytevector #x0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c)
(string->utf8 "Test With Truncation"))
(uint->bytevector #x56461ef2342edc00f9bab995))
#t)
(test-equal (md5-96-hash=?
(hmac-md5 (uint->bytevector #x0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c)
(string->utf8 "Test With Truncation"))
(uint->bytevector #x56461ef2342edc00f9bab990))
#f) ;bad mac
(test-equal (h (make-bytevector 80 #xaa)
(string->utf8 "Test Using Larger Than Block-Size Key - Hash Key First"))
"6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd")
(test-equal (h (make-bytevector 80 #xaa)
(string->utf8 "Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data"))
"6f630fad67cda0ee1fb1f562db3aa53e")
(test-end)
| null | https://raw.githubusercontent.com/chaw/r7rs-libs/b8b625c36b040ff3d4b723e4346629a8a0e8d6c2/weinholt-tests/md5-test.sps | scheme | Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
From RFC 1321
not testing truncation...
bad mac | Copyright © 2009 , 2010 < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(import (scheme base)
(scheme char)
(weinholt bytevector)
(weinholt md5)
(srfi 64))
(test-begin "weinholt-md5")
(define (m str) (string-downcase (md5->string (md5 (string->utf8 str)))))
(test-equal (m (make-string 100000 #\A)) "5793f7e3037448b250ae716b43ece2c2")
(test-equal (m (make-string 1000000 #\A)) "48fcdb8b87ce8ef779774199a856091d")
(test-equal (m "")
"d41d8cd98f00b204e9800998ecf8427e")
(test-equal (m "a")
"0cc175b9c0f1b6a831c399e269772661")
(test-equal (m "abc")
"900150983cd24fb0d6963f7d28e17f72")
(test-equal (m "message digest")
"f96b697d7cb7938d525a2f31aaf161d0")
(test-equal (m "abcdefghijklmnopqrstuvwxyz")
"c3fcd3d76192e4007dfb496cca67e13b")
(test-equal (m "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789")
"d174ab98d277d9f5a5611c2c9f419d9f")
(test-equal (m "12345678901234567890123456789012345678901234567890123456789012345678901234567890")
"57edf4a22be3c955ac49da2e2107b67a")
From RFC 2104/2202
(define (h key data) (string-downcase (md5->string (hmac-md5 key data))))
(test-equal (h (make-bytevector 16 #x0b)
(string->utf8 "Hi There"))
"9294727a3638bb1c13f48ef8158bfc9d")
(test-equal (h (string->utf8 "Jefe")
(string->utf8 "what do ya want for nothing?"))
"750c783e6ab0b503eaa86e310a5db738")
(test-equal (h (make-bytevector 16 #xAA)
(make-bytevector 50 #xDD))
"56be34521d144c88dbb8c733f0e8b3f6")
(test-equal (h #u8(#x01 #x02 #x03 #x04 #x05 #x06 #x07 #x08 #x09 #x0a #x0b #x0c
#x0d #x0e #x0f #x10 #x11 #x12 #x13 #x14 #x15 #x16 #x17 #x18 #x19)
(make-bytevector 50 #xcd))
"697eaf0aca3a3aea3a75164746ffaa79")
(test-equal (h (make-bytevector 16 #x0c)
(string->utf8 "Test With Truncation"))
(test-equal (md5-hash=?
(hmac-md5 (uint->bytevector #x0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c)
(string->utf8 "Test With Truncation"))
(uint->bytevector #x56461ef2342edc00f9bab995690efd4c))
#t)
(test-equal (md5-96-hash=?
(hmac-md5 (uint->bytevector #x0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c)
(string->utf8 "Test With Truncation"))
(uint->bytevector #x56461ef2342edc00f9bab995))
#t)
(test-equal (md5-96-hash=?
(hmac-md5 (uint->bytevector #x0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c0c)
(string->utf8 "Test With Truncation"))
(uint->bytevector #x56461ef2342edc00f9bab990))
(test-equal (h (make-bytevector 80 #xaa)
(string->utf8 "Test Using Larger Than Block-Size Key - Hash Key First"))
"6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd")
(test-equal (h (make-bytevector 80 #xaa)
(string->utf8 "Test Using Larger Than Block-Size Key and Larger Than One Block-Size Data"))
"6f630fad67cda0ee1fb1f562db3aa53e")
(test-end)
|
1a64cd12f0d0bda6b45d468bc01fc9bc1e023e748180a1641756c37fda4b1c23 | kelamg/HtDP2e-workthrough | ex27.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex27) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define base-attendees 120)
(define base-price 5.0)
(define base-attendance-change 15)
(define base-ten-cent-change 0.1)
(define fixed-cost 180)
(define variable-cost 0.04)
(define (attendees ticket-price)
(- base-attendees (* (- ticket-price base-price)
(/ base-attendance-change
base-ten-cent-change))))
(define (revenue ticket-price)
(* ticket-price (attendees ticket-price)))
(define (cost ticket-price)
(+ fixed-cost (* variable-cost (attendees ticket-price))))
(define (profit ticket-price)
(- (revenue ticket-price)
(cost ticket-price))) | null | https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Fixed-size-Data/ex27.rkt | racket | about the language level of this file in a form that our tools can easily process. | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex27) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define base-attendees 120)
(define base-price 5.0)
(define base-attendance-change 15)
(define base-ten-cent-change 0.1)
(define fixed-cost 180)
(define variable-cost 0.04)
(define (attendees ticket-price)
(- base-attendees (* (- ticket-price base-price)
(/ base-attendance-change
base-ten-cent-change))))
(define (revenue ticket-price)
(* ticket-price (attendees ticket-price)))
(define (cost ticket-price)
(+ fixed-cost (* variable-cost (attendees ticket-price))))
(define (profit ticket-price)
(- (revenue ticket-price)
(cost ticket-price))) |
19256a9cd70ef3d57f6d4777a0d0ebc737a7a5520c56ecb6af1cf0205d596239 | masateruk/micro-caml | alpha.mli | val f : KNormal.def list -> KNormal.def list
for Inline.g
| null | https://raw.githubusercontent.com/masateruk/micro-caml/0c0bd066b87cf54ce33709355c422993a85a86a1/alpha.mli | ocaml | val f : KNormal.def list -> KNormal.def list
for Inline.g
| |
2696ca027985e6fb3b2e2d313a71a11edca8ee6a0bb63095928ab66315f2696b | JakobBruenker/monadic-bang | RunGhcParser.hs | # LANGUAGE NamedFieldPuns #
# LANGUAGE BlockArguments #
{-# OPTIONS -fplugin=MonadicBang #-}
| This module makes it possible to run GHC 's Parser with plugins on source
-- files, and check what (if any) errors it produced
module MonadicBang.Test.Utils.RunGhcParser where
import Control.Monad.IO.Class
import Control.Monad.Trans.Except
import Data.Foldable
import GHC
import GHC.Driver.Plugins
import GHC.Driver.Env.Types
import GHC.Driver.Config.Finder
import GHC.Driver.Session
import GHC.LanguageExtensions qualified as LangExt
import GHC.Data.EnumSet qualified as ES
import GHC.Data.StringBuffer
import GHC.Settings.IO
import GHC.Types.SourceFile
import GHC.Types.SourceError
import GHC.Unit.Types
import GHC.Unit.Finder
import GHC.Utils.Fingerprint
import GHC.Paths qualified
import MonadicBang qualified
-- | Parses a module
parseGhc :: MonadIO m => String -> m (Either SourceError ParsedModule)
parseGhc src = do
let dflags = !initialDynFlags
modNameStr = "MonadicBang.Test.Tmp"
modName = mkModuleName modNameStr
modSummary = ModSummary
{ ms_mod = mkModule (stringToUnit modNameStr) modName
, ms_hsc_src = HsSrcFile
, ms_location = mkHomeModLocation (initFinderOpts dflags) modName "/home/user/tmp/nothing"
, ms_hs_hash = fingerprintString src
, ms_obj_date = Nothing
, ms_dyn_obj_date = Nothing
, ms_iface_date = Nothing
, ms_hie_date = Nothing
, ms_srcimps = []
, ms_textual_imps = []
, ms_ghc_prim_import = False
, ms_parsed_mod = Nothing
, ms_hspp_file = modNameStr
, ms_hspp_opts = dflags
, ms_hspp_buf = Just $ stringToStringBuffer src
}
runDefaultGhc dflags . handleSourceError (pure . Left) $
Right <$> parseModule modSummary
runDefaultGhc :: MonadIO m => DynFlags -> Ghc a -> m a
runDefaultGhc dflags action = liftIO do
runGhc (Just GHC.Paths.libdir) (do setSessionDynFlags dflags >> addPlugin >> action)
where
addPlugin = do
let session = !getSession
plugins = hsc_plugins session
setSession (session{hsc_plugins = plugins{staticPlugins = StaticPlugin (PluginWithArgs MonadicBang.plugin []) : staticPlugins plugins}})
initialDynFlags :: MonadIO m => m DynFlags
initialDynFlags = do
dflags <- withExts
pure $ dflags{generalFlags = ES.insert Opt_ImplicitImportQualified $ generalFlags dflags}
where
withExts = do pure $ foldl' xopt_set (defaultDynFlags !settings' llvmConfig') $ exts
exts = [LangExt.LambdaCase]
settings' :: MonadIO m => m Settings
settings' = either (error . showSettingsError) id <$> runExceptT (initSettings GHC.Paths.libdir)
where
showSettingsError (SettingsError_MissingData s) = s
showSettingsError (SettingsError_BadData s) = s
llvmConfig' :: LlvmConfig
llvmConfig' = error "llvmConfig"
| null | https://raw.githubusercontent.com/JakobBruenker/monadic-bang/d956083c06c21956e327209c8752c7f3c9f5d6b0/test/MonadicBang/Test/Utils/RunGhcParser.hs | haskell | # OPTIONS -fplugin=MonadicBang #
files, and check what (if any) errors it produced
| Parses a module | # LANGUAGE NamedFieldPuns #
# LANGUAGE BlockArguments #
| This module makes it possible to run GHC 's Parser with plugins on source
module MonadicBang.Test.Utils.RunGhcParser where
import Control.Monad.IO.Class
import Control.Monad.Trans.Except
import Data.Foldable
import GHC
import GHC.Driver.Plugins
import GHC.Driver.Env.Types
import GHC.Driver.Config.Finder
import GHC.Driver.Session
import GHC.LanguageExtensions qualified as LangExt
import GHC.Data.EnumSet qualified as ES
import GHC.Data.StringBuffer
import GHC.Settings.IO
import GHC.Types.SourceFile
import GHC.Types.SourceError
import GHC.Unit.Types
import GHC.Unit.Finder
import GHC.Utils.Fingerprint
import GHC.Paths qualified
import MonadicBang qualified
parseGhc :: MonadIO m => String -> m (Either SourceError ParsedModule)
parseGhc src = do
let dflags = !initialDynFlags
modNameStr = "MonadicBang.Test.Tmp"
modName = mkModuleName modNameStr
modSummary = ModSummary
{ ms_mod = mkModule (stringToUnit modNameStr) modName
, ms_hsc_src = HsSrcFile
, ms_location = mkHomeModLocation (initFinderOpts dflags) modName "/home/user/tmp/nothing"
, ms_hs_hash = fingerprintString src
, ms_obj_date = Nothing
, ms_dyn_obj_date = Nothing
, ms_iface_date = Nothing
, ms_hie_date = Nothing
, ms_srcimps = []
, ms_textual_imps = []
, ms_ghc_prim_import = False
, ms_parsed_mod = Nothing
, ms_hspp_file = modNameStr
, ms_hspp_opts = dflags
, ms_hspp_buf = Just $ stringToStringBuffer src
}
runDefaultGhc dflags . handleSourceError (pure . Left) $
Right <$> parseModule modSummary
runDefaultGhc :: MonadIO m => DynFlags -> Ghc a -> m a
runDefaultGhc dflags action = liftIO do
runGhc (Just GHC.Paths.libdir) (do setSessionDynFlags dflags >> addPlugin >> action)
where
addPlugin = do
let session = !getSession
plugins = hsc_plugins session
setSession (session{hsc_plugins = plugins{staticPlugins = StaticPlugin (PluginWithArgs MonadicBang.plugin []) : staticPlugins plugins}})
initialDynFlags :: MonadIO m => m DynFlags
initialDynFlags = do
dflags <- withExts
pure $ dflags{generalFlags = ES.insert Opt_ImplicitImportQualified $ generalFlags dflags}
where
withExts = do pure $ foldl' xopt_set (defaultDynFlags !settings' llvmConfig') $ exts
exts = [LangExt.LambdaCase]
settings' :: MonadIO m => m Settings
settings' = either (error . showSettingsError) id <$> runExceptT (initSettings GHC.Paths.libdir)
where
showSettingsError (SettingsError_MissingData s) = s
showSettingsError (SettingsError_BadData s) = s
llvmConfig' :: LlvmConfig
llvmConfig' = error "llvmConfig"
|
9586983da32c4cee7d19e45c5ac6a368d43bf714658f8106d11c9e97afe2d129 | aggieben/weblocks | helpers.lisp |
(in-package :weblocks-test)
;;; Test make-slot-writer
(deftest make-slot-writer-1
(let ((obj (copy-template *joe*)))
(funcall
(make-slot-writer 'name (lambda (value)
(declare (ignore value))
"foo"))
"bak" obj)
(first-name obj))
"foo")
| null | https://raw.githubusercontent.com/aggieben/weblocks/8d86be6a4fff8dde0b94181ba60d0dca2cbd9e25/test/views/formview/helpers.lisp | lisp | Test make-slot-writer |
(in-package :weblocks-test)
(deftest make-slot-writer-1
(let ((obj (copy-template *joe*)))
(funcall
(make-slot-writer 'name (lambda (value)
(declare (ignore value))
"foo"))
"bak" obj)
(first-name obj))
"foo")
|
7b90a3113e47204bcfe0191c119a51f586820330e4a273ba180aaf534ca833cc | javalib-team/javalib | genericMap.ml |
* Copyright ( C ) 2013 , ( INRIA )
* 2016 , ,
* This software is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 , with the special exception on linking described in file
* LICENSE .
* This software is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE .
* Copyright (C) 2013, Pierre Vittet (INRIA)
* 2016, David Pichardie, Laurent Guillo
* This software is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1, with the special exception on linking described in file
* LICENSE.
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*)
module type S =
sig
type t
val get_hash : t -> int
val compare : t -> t -> int
end
module type GenericMapSig =
sig
type key
type 'a t
val empty : 'a t
val is_empty : 'a t -> bool
val add : key -> 'a -> 'a t -> 'a t
val cardinal : 'a t -> int
val modify: key -> ('a option -> 'a) -> 'a t -> 'a t
val find : key -> 'a t -> 'a
val remove : key -> 'a t -> 'a t
val mem : key -> 'a t -> bool
val iter : (key -> 'a -> unit) -> 'a t -> unit
val iter_ordered : (key -> 'a -> unit) -> 'a t -> unit
val map : ('a -> 'b) -> 'a t -> 'b t
val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val fold_ordered : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t
* [ merge f m1 m2 ] returns a map that has the bindings of [ m1 ] and [ m2 ] and
which binds [ k ] to [ f d1 d2 ] if [ m1 ] and [ m2 ] binds the same [ k ] to
different [ d1 ] and [ d2 ] , respectively . If [ d1 ] equals [ d2 ] , [ f d1 d2 ] is
supposed to return [ d1 ] .
which binds [k] to [f d1 d2] if [m1] and [m2] binds the same [k] to
different [d1] and [d2], respectively. If [d1] equals [d2], [f d1 d2] is
supposed to return [d1].
*)
val choose_and_remove : 'a t -> key * 'a * 'a t
(** [choose_and_remove t] returns (i,d,t') such that [t'] equals to [remove
i t] and [d] equals to [find i t].
@raise Not_found if [t] is empty. *)
val filter : ('a -> bool) -> 'a t -> 'a t
val filteri : (key -> 'a -> bool) -> 'a t -> 'a t
val key_elements : 'a t -> key list
val value_elements : 'a t -> 'a list
val elements : 'a t -> (key * 'a) list
val subset : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
end
module Make ( S : sig
type t
val get_hash : t -> int
val compare : t -> t -> int
end ) =
struct
type f = S.t
type key = f
type 'a t = (key * 'a) Ptmap.t
let empty = Ptmap.empty
let is_empty = Ptmap.is_empty
let add key o m = Ptmap.add (S.get_hash key) (key, o) m
let cardinal m = Ptmap.cardinal m
let modify key f m = Ptmap.modify (S.get_hash key)
(fun x -> match x with
| None -> (key, f None)
| Some (_,a) -> (key, f (Some a))
) m
let find key m = snd (Ptmap.find (S.get_hash key) m)
let remove key m = Ptmap.remove (S.get_hash key) m
let mem key m = Ptmap.mem (S.get_hash key) m
let iter f m = Ptmap.iter (fun _ (k,d) -> f k d) m
let iter_ordered f m =
Ptmap.iter_ordered
(fun (_,(k1,_)) (_,(k2,_)) -> S.compare k1 k2)
(fun _ (k,d) -> f k d)
m
let map f m = Ptmap.map (fun (k,d) -> (k, f d)) m
let mapi f m = Ptmap.mapi (fun _ (k,d) -> (k, f k d)) m
let fold f m e = Ptmap.fold (fun _ (k,d) -> f k d) m e
let fold_ordered f m e =
Ptmap.fold_ordered
(fun (_,(k1,_)) (_,(k2,_)) -> S.compare k1 k2)
(fun _ (k,d) -> f k d)
m e
let compare f m1 m2 = Ptmap.compare (fun a b -> f (snd a) (snd b)) m1 m2
let equal f m1 m2 = Ptmap.equal (fun a b -> f (snd a) (snd b)) m1 m2
let merge f m1 m2 = Ptmap.merge (fun a b -> (fst a), f (snd a) (snd b)) m1 m2
let choose_and_remove m =
let (_,(k,d),m) = Ptmap.choose_and_remove m in
(k, d, m)
let filter f m =
Ptmap.filter (fun (_,d) -> f d) m
let filteri f m =
Ptmap.filter (fun (k,d) -> f k d) m
let key_elements m =
Ptmap.fold (fun _ (k,_) l -> k :: l) m []
let value_elements m =
Ptmap.fold (fun _ (_,b) l -> b :: l) m []
let elements m =
Ptmap.fold (fun _ e l -> e :: l) m []
let subset s m1 m2 =
Ptmap.subset (fun (_,v1) (_,v2) -> s v1 v2) m1 m2
end
module MaptoSet ( S : sig type t end )
( GMap : GenericMapSig with type key = S.t )
( GSet : GenericSet.GenericSetSig with type elt = S.t ) =
struct
let to_set m =
GMap.fold (fun k _ s -> GSet.add k s) m GSet.empty
end
| null | https://raw.githubusercontent.com/javalib-team/javalib/0699f904dbb17e87ec0ad6ed0c258a1737e60329/src/ptrees/genericMap.ml | ocaml | * [choose_and_remove t] returns (i,d,t') such that [t'] equals to [remove
i t] and [d] equals to [find i t].
@raise Not_found if [t] is empty. |
* Copyright ( C ) 2013 , ( INRIA )
* 2016 , ,
* This software is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1 , with the special exception on linking described in file
* LICENSE .
* This software is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE .
* Copyright (C) 2013, Pierre Vittet (INRIA)
* 2016, David Pichardie, Laurent Guillo
* This software is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
* version 2.1, with the special exception on linking described in file
* LICENSE.
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*)
module type S =
sig
type t
val get_hash : t -> int
val compare : t -> t -> int
end
module type GenericMapSig =
sig
type key
type 'a t
val empty : 'a t
val is_empty : 'a t -> bool
val add : key -> 'a -> 'a t -> 'a t
val cardinal : 'a t -> int
val modify: key -> ('a option -> 'a) -> 'a t -> 'a t
val find : key -> 'a t -> 'a
val remove : key -> 'a t -> 'a t
val mem : key -> 'a t -> bool
val iter : (key -> 'a -> unit) -> 'a t -> unit
val iter_ordered : (key -> 'a -> unit) -> 'a t -> unit
val map : ('a -> 'b) -> 'a t -> 'b t
val mapi : (key -> 'a -> 'b) -> 'a t -> 'b t
val fold : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val fold_ordered : (key -> 'a -> 'b -> 'b) -> 'a t -> 'b -> 'b
val compare : ('a -> 'a -> int) -> 'a t -> 'a t -> int
val equal : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
val merge : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t
* [ merge f m1 m2 ] returns a map that has the bindings of [ m1 ] and [ m2 ] and
which binds [ k ] to [ f d1 d2 ] if [ m1 ] and [ m2 ] binds the same [ k ] to
different [ d1 ] and [ d2 ] , respectively . If [ d1 ] equals [ d2 ] , [ f d1 d2 ] is
supposed to return [ d1 ] .
which binds [k] to [f d1 d2] if [m1] and [m2] binds the same [k] to
different [d1] and [d2], respectively. If [d1] equals [d2], [f d1 d2] is
supposed to return [d1].
*)
val choose_and_remove : 'a t -> key * 'a * 'a t
val filter : ('a -> bool) -> 'a t -> 'a t
val filteri : (key -> 'a -> bool) -> 'a t -> 'a t
val key_elements : 'a t -> key list
val value_elements : 'a t -> 'a list
val elements : 'a t -> (key * 'a) list
val subset : ('a -> 'a -> bool) -> 'a t -> 'a t -> bool
end
module Make ( S : sig
type t
val get_hash : t -> int
val compare : t -> t -> int
end ) =
struct
type f = S.t
type key = f
type 'a t = (key * 'a) Ptmap.t
let empty = Ptmap.empty
let is_empty = Ptmap.is_empty
let add key o m = Ptmap.add (S.get_hash key) (key, o) m
let cardinal m = Ptmap.cardinal m
let modify key f m = Ptmap.modify (S.get_hash key)
(fun x -> match x with
| None -> (key, f None)
| Some (_,a) -> (key, f (Some a))
) m
let find key m = snd (Ptmap.find (S.get_hash key) m)
let remove key m = Ptmap.remove (S.get_hash key) m
let mem key m = Ptmap.mem (S.get_hash key) m
let iter f m = Ptmap.iter (fun _ (k,d) -> f k d) m
let iter_ordered f m =
Ptmap.iter_ordered
(fun (_,(k1,_)) (_,(k2,_)) -> S.compare k1 k2)
(fun _ (k,d) -> f k d)
m
let map f m = Ptmap.map (fun (k,d) -> (k, f d)) m
let mapi f m = Ptmap.mapi (fun _ (k,d) -> (k, f k d)) m
let fold f m e = Ptmap.fold (fun _ (k,d) -> f k d) m e
let fold_ordered f m e =
Ptmap.fold_ordered
(fun (_,(k1,_)) (_,(k2,_)) -> S.compare k1 k2)
(fun _ (k,d) -> f k d)
m e
let compare f m1 m2 = Ptmap.compare (fun a b -> f (snd a) (snd b)) m1 m2
let equal f m1 m2 = Ptmap.equal (fun a b -> f (snd a) (snd b)) m1 m2
let merge f m1 m2 = Ptmap.merge (fun a b -> (fst a), f (snd a) (snd b)) m1 m2
let choose_and_remove m =
let (_,(k,d),m) = Ptmap.choose_and_remove m in
(k, d, m)
let filter f m =
Ptmap.filter (fun (_,d) -> f d) m
let filteri f m =
Ptmap.filter (fun (k,d) -> f k d) m
let key_elements m =
Ptmap.fold (fun _ (k,_) l -> k :: l) m []
let value_elements m =
Ptmap.fold (fun _ (_,b) l -> b :: l) m []
let elements m =
Ptmap.fold (fun _ e l -> e :: l) m []
let subset s m1 m2 =
Ptmap.subset (fun (_,v1) (_,v2) -> s v1 v2) m1 m2
end
module MaptoSet ( S : sig type t end )
( GMap : GenericMapSig with type key = S.t )
( GSet : GenericSet.GenericSetSig with type elt = S.t ) =
struct
let to_set m =
GMap.fold (fun k _ s -> GSet.add k s) m GSet.empty
end
|
1af193884579714934e22578e64826ab5f372c464ffe2ad4ca7a0d95d397ee0a | cloudant/mango | mango_util.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(mango_util).
-export([
open_doc/2,
open_ddocs/1,
load_ddoc/2,
defer/3,
do_defer/3,
assert_ejson/1,
to_lower/1,
enc_dbname/1,
dec_dbname/1,
enc_hex/1,
dec_hex/1,
lucene_escape_field/1,
lucene_escape_query_value/1,
lucene_escape_user/1,
is_number_string/1,
check_lang/1,
has_suffix/2,
join/2,
parse_field/1,
cached_re/2
]).
-include_lib("couch/include/couch_db.hrl").
-include("mango.hrl").
-define(DIGITS, "(\\p{N}+)").
-define(HEXDIGITS, "([0-9a-fA-F]+)").
-define(EXP, "[eE][+-]?" ++ ?DIGITS).
-define(NUMSTRING,
"[\\x00-\\x20]*" ++ "[+-]?(" ++ "NaN|"
++ "Infinity|" ++ "((("
++ ?DIGITS
++ "(\\.)?("
++ ?DIGITS
++ "?)("
++ ?EXP
++ ")?)|"
++ "(\\.("
++ ?DIGITS
++ ")("
++ ?EXP
++ ")?)|"
++ "(("
++ "(0[xX]"
++ ?HEXDIGITS
++ "(\\.)?)|"
++ "(0[xX]"
++ ?HEXDIGITS
++ "?(\\.)"
++ ?HEXDIGITS
++ ")"
++ ")[pP][+-]?" ++ ?DIGITS ++ "))" ++ "[fFdD]?))" ++ "[\\x00-\\x20]*").
open_doc(Db, DocId) ->
Opts = [deleted],
case mango_util:defer(fabric, open_doc, [Db, DocId, Opts]) of
{ok, Doc} ->
{ok, Doc};
{not_found, _} ->
not_found;
_ ->
?MANGO_ERROR({error_loading_doc, DocId})
end.
open_ddocs(Db) ->
case mango_util:defer(fabric, design_docs, [Db]) of
{ok, Docs} ->
{ok, Docs};
_ ->
?MANGO_ERROR(error_loading_ddocs)
end.
load_ddoc(Db, DDocId) ->
case mango_util:open_doc(Db, DDocId) of
{ok, Doc} ->
{ok, check_lang(Doc)};
not_found ->
Body = {[
{<<"language">>, <<"query">>}
]},
{ok, #doc{id = DDocId, body = Body}}
end.
defer(Mod, Fun, Args) ->
twig : , " MFA : ~p " , [ { Mod , Fun , Args } ] ) ,
{Pid, Ref} = erlang:spawn_monitor(?MODULE, do_defer, [Mod, Fun, Args]),
receive
{'DOWN', Ref, process, Pid, {mango_defer_ok, Value}} ->
Value;
{'DOWN', Ref, process, Pid, {mango_defer_throw, Value}} ->
erlang:throw(Value);
{'DOWN', Ref, process, Pid, {mango_defer_error, Value}} ->
erlang:error(Value);
{'DOWN', Ref, process, Pid, {mango_defer_exit, Value}} ->
erlang:exit(Value)
end.
do_defer(Mod, Fun, Args) ->
try erlang:apply(Mod, Fun, Args) of
Resp ->
erlang:exit({mango_defer_ok, Resp})
catch
throw:Error ->
Stack = erlang:get_stacktrace(),
twig:log(err, "Defered error: ~w~n ~p", [{throw, Error}, Stack]),
erlang:exit({mango_defer_throw, Error});
error:Error ->
Stack = erlang:get_stacktrace(),
twig:log(err, "Defered error: ~w~n ~p", [{error, Error}, Stack]),
erlang:exit({mango_defer_error, Error});
exit:Error ->
Stack = erlang:get_stacktrace(),
twig:log(err, "Defered error: ~w~n ~p", [{exit, Error}, Stack]),
erlang:exit({mango_defer_exit, Error})
end.
assert_ejson({Props}) ->
assert_ejson_obj(Props);
assert_ejson(Vals) when is_list(Vals) ->
assert_ejson_arr(Vals);
assert_ejson(null) ->
true;
assert_ejson(true) ->
true;
assert_ejson(false) ->
true;
assert_ejson(String) when is_binary(String) ->
true;
assert_ejson(Number) when is_number(Number) ->
true;
assert_ejson(_Else) ->
false.
assert_ejson_obj([]) ->
true;
assert_ejson_obj([{Key, Val} | Rest]) when is_binary(Key) ->
case assert_ejson(Val) of
true ->
assert_ejson_obj(Rest);
false ->
false
end;
assert_ejson_obj(_Else) ->
false.
assert_ejson_arr([]) ->
true;
assert_ejson_arr([Val | Rest]) ->
case assert_ejson(Val) of
true ->
assert_ejson_arr(Rest);
false ->
false
end.
check_lang(#doc{id = Id, deleted = true}) ->
Body = {[
{<<"language">>, <<"query">>}
]},
#doc{id = Id, body = Body};
check_lang(#doc{body = {Props}} = Doc) ->
case lists:keyfind(<<"language">>, 1, Props) of
{<<"language">>, <<"query">>} ->
Doc;
Else ->
?MANGO_ERROR({invalid_ddoc_lang, Else})
end.
to_lower(Key) when is_binary(Key) ->
KStr = binary_to_list(Key),
KLower = string:to_lower(KStr),
list_to_binary(KLower).
enc_dbname(<<>>) ->
<<>>;
enc_dbname(<<A:8/integer, Rest/binary>>) ->
Bytes = enc_db_byte(A),
Tail = enc_dbname(Rest),
<<Bytes/binary, Tail/binary>>.
enc_db_byte(N) when N >= $a, N =< $z -> <<N>>;
enc_db_byte(N) when N >= $0, N =< $9 -> <<N>>;
enc_db_byte(N) when N == $/; N == $_; N == $- -> <<N>>;
enc_db_byte(N) ->
H = enc_hex_byte(N div 16),
L = enc_hex_byte(N rem 16),
<<$$, H:8/integer, L:8/integer>>.
dec_dbname(<<>>) ->
<<>>;
dec_dbname(<<$$, _:8/integer>>) ->
throw(invalid_dbname_encoding);
dec_dbname(<<$$, H:8/integer, L:8/integer, Rest/binary>>) ->
Byte = (dec_hex_byte(H) bsl 4) bor dec_hex_byte(L),
Tail = dec_dbname(Rest),
<<Byte:8/integer, Tail/binary>>;
dec_dbname(<<N:8/integer, Rest/binary>>) ->
Tail = dec_dbname(Rest),
<<N:8/integer, Tail/binary>>.
enc_hex(<<>>) ->
<<>>;
enc_hex(<<V:8/integer, Rest/binary>>) ->
H = enc_hex_byte(V div 16),
L = enc_hex_byte(V rem 16),
Tail = enc_hex(Rest),
<<H:8/integer, L:8/integer, Tail/binary>>.
enc_hex_byte(N) when N >= 0, N < 10 -> $0 + N;
enc_hex_byte(N) when N >= 10, N < 16 -> $a + (N - 10);
enc_hex_byte(N) -> throw({invalid_hex_value, N}).
dec_hex(<<>>) ->
<<>>;
dec_hex(<<_:8/integer>>) ->
throw(invalid_hex_string);
dec_hex(<<H:8/integer, L:8/integer, Rest/binary>>) ->
Byte = (dec_hex_byte(H) bsl 4) bor dec_hex_byte(L),
Tail = dec_hex(Rest),
<<Byte:8/integer, Tail/binary>>.
dec_hex_byte(N) when N >= $0, N =< $9 -> (N - $0);
dec_hex_byte(N) when N >= $a, N =< $f -> (N - $a) + 10;
dec_hex_byte(N) when N >= $A, N =< $F -> (N - $A) + 10;
dec_hex_byte(N) -> throw({invalid_hex_character, N}).
lucene_escape_field(Bin) when is_binary(Bin) ->
Str = binary_to_list(Bin),
Enc = lucene_escape_field(Str),
iolist_to_binary(Enc);
lucene_escape_field([H | T]) when is_number(H), H >= 0, H =< 255 ->
if
H >= $a, $z >= H ->
[H | lucene_escape_field(T)];
H >= $A, $Z >= H ->
[H | lucene_escape_field(T)];
H >= $0, $9 >= H ->
[H | lucene_escape_field(T)];
true ->
Hi = enc_hex_byte(H div 16),
Lo = enc_hex_byte(H rem 16),
[$_, Hi, Lo | lucene_escape_field(T)]
end;
lucene_escape_field([]) ->
[].
lucene_escape_query_value(IoList) when is_list(IoList) ->
lucene_escape_query_value(iolist_to_binary(IoList));
lucene_escape_query_value(Bin) when is_binary(Bin) ->
IoList = lucene_escape_qv(Bin),
iolist_to_binary(IoList).
This escapes the special Lucene query characters
% listed below as well as any whitespace.
%
+ - & & || ! ( ) { } [ ] ^ ~ * ? : \ " /
%
lucene_escape_qv(<<>>) -> [];
lucene_escape_qv(<<"&&", Rest/binary>>) ->
["\\&&" | lucene_escape_qv(Rest)];
lucene_escape_qv(<<"||", Rest/binary>>) ->
["\\||" | lucene_escape_qv(Rest)];
lucene_escape_qv(<<C, Rest/binary>>) ->
NeedsEscape = "+-(){}[]!^~*?:/\\\" \t\r\n",
Out = case lists:member(C, NeedsEscape) of
true -> ["\\", C];
false -> [C]
end,
Out ++ lucene_escape_qv(Rest).
lucene_escape_user(Field) ->
{ok, Path} = parse_field(Field),
Escaped = [mango_util:lucene_escape_field(P) || P <- Path],
iolist_to_binary(join(".", Escaped)).
has_suffix(Bin, Suffix) when is_binary(Bin), is_binary(Suffix) ->
SBin = size(Bin),
SSuffix = size(Suffix),
if SBin < SSuffix -> false; true ->
PSize = SBin - SSuffix,
case Bin of
<<_:PSize/binary, Suffix/binary>> ->
true;
_ ->
false
end
end.
join(_Sep, [Item]) ->
[Item];
join(Sep, [Item | Rest]) ->
[Item, Sep | join(Sep, Rest)].
is_number_string(Value) when is_binary(Value) ->
is_number_string(binary_to_list(Value));
is_number_string(Value) when is_list(Value)->
MP = cached_re(mango_numstring_re, ?NUMSTRING),
case re:run(Value, MP) of
nomatch ->
false;
_ ->
true
end.
cached_re(Name, RE) ->
case mochiglobal:get(Name) of
undefined ->
{ok, MP} = re:compile(RE),
ok = mochiglobal:put(Name, MP),
MP;
MP ->
MP
end.
parse_field(Field) ->
case binary:match(Field, <<"\\">>, []) of
nomatch ->
% Fast path, no regex required
{ok, check_non_empty(Field, binary:split(Field, <<".">>, [global]))};
_ ->
parse_field_slow(Field)
end.
parse_field_slow(Field) ->
Path = lists:map(fun
(P) when P =:= <<>> ->
?MANGO_ERROR({invalid_field_name, Field});
(P) ->
re:replace(P, <<"\\\\">>, <<>>, [global, {return, binary}])
end, re:split(Field, <<"(?<!\\\\)\\.">>)),
{ok, Path}.
check_non_empty(Field, Parts) ->
case lists:member(<<>>, Parts) of
true ->
?MANGO_ERROR({invalid_field_name, Field});
false ->
Parts
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
parse_field_test() ->
?assertEqual({ok, [<<"ab">>]}, parse_field(<<"ab">>)),
?assertEqual({ok, [<<"a">>, <<"b">>]}, parse_field(<<"a.b">>)),
?assertEqual({ok, [<<"a.b">>]}, parse_field(<<"a\\.b">>)),
?assertEqual({ok, [<<"a">>, <<"b">>, <<"c">>]}, parse_field(<<"a.b.c">>)),
?assertEqual({ok, [<<"a">>, <<"b.c">>]}, parse_field(<<"a.b\\.c">>)),
Exception = {mango_error, ?MODULE, {invalid_field_name, <<"a..b">>}},
?assertThrow(Exception, parse_field(<<"a..b">>)).
is_number_string_test() ->
?assert(is_number_string("0")),
?assert(is_number_string("1")),
?assert(is_number_string("1.0")),
?assert(is_number_string("1.0E10")),
?assert(is_number_string("0d")),
?assert(is_number_string("-1")),
?assert(is_number_string("-1.0")),
?assertNot(is_number_string("hello")),
?assertNot(is_number_string("")),
?assertMatch({match, _}, re:run("1.0", mochiglobal:get(mango_numstring_re))).
-endif.
| null | https://raw.githubusercontent.com/cloudant/mango/3da3110a6ee169c4d6991c00b8f8d59d20b7fe1f/src/mango_util.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
listed below as well as any whitespace.
Fast path, no regex required | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(mango_util).
-export([
open_doc/2,
open_ddocs/1,
load_ddoc/2,
defer/3,
do_defer/3,
assert_ejson/1,
to_lower/1,
enc_dbname/1,
dec_dbname/1,
enc_hex/1,
dec_hex/1,
lucene_escape_field/1,
lucene_escape_query_value/1,
lucene_escape_user/1,
is_number_string/1,
check_lang/1,
has_suffix/2,
join/2,
parse_field/1,
cached_re/2
]).
-include_lib("couch/include/couch_db.hrl").
-include("mango.hrl").
-define(DIGITS, "(\\p{N}+)").
-define(HEXDIGITS, "([0-9a-fA-F]+)").
-define(EXP, "[eE][+-]?" ++ ?DIGITS).
-define(NUMSTRING,
"[\\x00-\\x20]*" ++ "[+-]?(" ++ "NaN|"
++ "Infinity|" ++ "((("
++ ?DIGITS
++ "(\\.)?("
++ ?DIGITS
++ "?)("
++ ?EXP
++ ")?)|"
++ "(\\.("
++ ?DIGITS
++ ")("
++ ?EXP
++ ")?)|"
++ "(("
++ "(0[xX]"
++ ?HEXDIGITS
++ "(\\.)?)|"
++ "(0[xX]"
++ ?HEXDIGITS
++ "?(\\.)"
++ ?HEXDIGITS
++ ")"
++ ")[pP][+-]?" ++ ?DIGITS ++ "))" ++ "[fFdD]?))" ++ "[\\x00-\\x20]*").
open_doc(Db, DocId) ->
Opts = [deleted],
case mango_util:defer(fabric, open_doc, [Db, DocId, Opts]) of
{ok, Doc} ->
{ok, Doc};
{not_found, _} ->
not_found;
_ ->
?MANGO_ERROR({error_loading_doc, DocId})
end.
open_ddocs(Db) ->
case mango_util:defer(fabric, design_docs, [Db]) of
{ok, Docs} ->
{ok, Docs};
_ ->
?MANGO_ERROR(error_loading_ddocs)
end.
load_ddoc(Db, DDocId) ->
case mango_util:open_doc(Db, DDocId) of
{ok, Doc} ->
{ok, check_lang(Doc)};
not_found ->
Body = {[
{<<"language">>, <<"query">>}
]},
{ok, #doc{id = DDocId, body = Body}}
end.
defer(Mod, Fun, Args) ->
twig : , " MFA : ~p " , [ { Mod , Fun , Args } ] ) ,
{Pid, Ref} = erlang:spawn_monitor(?MODULE, do_defer, [Mod, Fun, Args]),
receive
{'DOWN', Ref, process, Pid, {mango_defer_ok, Value}} ->
Value;
{'DOWN', Ref, process, Pid, {mango_defer_throw, Value}} ->
erlang:throw(Value);
{'DOWN', Ref, process, Pid, {mango_defer_error, Value}} ->
erlang:error(Value);
{'DOWN', Ref, process, Pid, {mango_defer_exit, Value}} ->
erlang:exit(Value)
end.
do_defer(Mod, Fun, Args) ->
try erlang:apply(Mod, Fun, Args) of
Resp ->
erlang:exit({mango_defer_ok, Resp})
catch
throw:Error ->
Stack = erlang:get_stacktrace(),
twig:log(err, "Defered error: ~w~n ~p", [{throw, Error}, Stack]),
erlang:exit({mango_defer_throw, Error});
error:Error ->
Stack = erlang:get_stacktrace(),
twig:log(err, "Defered error: ~w~n ~p", [{error, Error}, Stack]),
erlang:exit({mango_defer_error, Error});
exit:Error ->
Stack = erlang:get_stacktrace(),
twig:log(err, "Defered error: ~w~n ~p", [{exit, Error}, Stack]),
erlang:exit({mango_defer_exit, Error})
end.
assert_ejson({Props}) ->
assert_ejson_obj(Props);
assert_ejson(Vals) when is_list(Vals) ->
assert_ejson_arr(Vals);
assert_ejson(null) ->
true;
assert_ejson(true) ->
true;
assert_ejson(false) ->
true;
assert_ejson(String) when is_binary(String) ->
true;
assert_ejson(Number) when is_number(Number) ->
true;
assert_ejson(_Else) ->
false.
assert_ejson_obj([]) ->
true;
assert_ejson_obj([{Key, Val} | Rest]) when is_binary(Key) ->
case assert_ejson(Val) of
true ->
assert_ejson_obj(Rest);
false ->
false
end;
assert_ejson_obj(_Else) ->
false.
assert_ejson_arr([]) ->
true;
assert_ejson_arr([Val | Rest]) ->
case assert_ejson(Val) of
true ->
assert_ejson_arr(Rest);
false ->
false
end.
check_lang(#doc{id = Id, deleted = true}) ->
Body = {[
{<<"language">>, <<"query">>}
]},
#doc{id = Id, body = Body};
check_lang(#doc{body = {Props}} = Doc) ->
case lists:keyfind(<<"language">>, 1, Props) of
{<<"language">>, <<"query">>} ->
Doc;
Else ->
?MANGO_ERROR({invalid_ddoc_lang, Else})
end.
to_lower(Key) when is_binary(Key) ->
KStr = binary_to_list(Key),
KLower = string:to_lower(KStr),
list_to_binary(KLower).
enc_dbname(<<>>) ->
<<>>;
enc_dbname(<<A:8/integer, Rest/binary>>) ->
Bytes = enc_db_byte(A),
Tail = enc_dbname(Rest),
<<Bytes/binary, Tail/binary>>.
enc_db_byte(N) when N >= $a, N =< $z -> <<N>>;
enc_db_byte(N) when N >= $0, N =< $9 -> <<N>>;
enc_db_byte(N) when N == $/; N == $_; N == $- -> <<N>>;
enc_db_byte(N) ->
H = enc_hex_byte(N div 16),
L = enc_hex_byte(N rem 16),
<<$$, H:8/integer, L:8/integer>>.
dec_dbname(<<>>) ->
<<>>;
dec_dbname(<<$$, _:8/integer>>) ->
throw(invalid_dbname_encoding);
dec_dbname(<<$$, H:8/integer, L:8/integer, Rest/binary>>) ->
Byte = (dec_hex_byte(H) bsl 4) bor dec_hex_byte(L),
Tail = dec_dbname(Rest),
<<Byte:8/integer, Tail/binary>>;
dec_dbname(<<N:8/integer, Rest/binary>>) ->
Tail = dec_dbname(Rest),
<<N:8/integer, Tail/binary>>.
enc_hex(<<>>) ->
<<>>;
enc_hex(<<V:8/integer, Rest/binary>>) ->
H = enc_hex_byte(V div 16),
L = enc_hex_byte(V rem 16),
Tail = enc_hex(Rest),
<<H:8/integer, L:8/integer, Tail/binary>>.
enc_hex_byte(N) when N >= 0, N < 10 -> $0 + N;
enc_hex_byte(N) when N >= 10, N < 16 -> $a + (N - 10);
enc_hex_byte(N) -> throw({invalid_hex_value, N}).
dec_hex(<<>>) ->
<<>>;
dec_hex(<<_:8/integer>>) ->
throw(invalid_hex_string);
dec_hex(<<H:8/integer, L:8/integer, Rest/binary>>) ->
Byte = (dec_hex_byte(H) bsl 4) bor dec_hex_byte(L),
Tail = dec_hex(Rest),
<<Byte:8/integer, Tail/binary>>.
dec_hex_byte(N) when N >= $0, N =< $9 -> (N - $0);
dec_hex_byte(N) when N >= $a, N =< $f -> (N - $a) + 10;
dec_hex_byte(N) when N >= $A, N =< $F -> (N - $A) + 10;
dec_hex_byte(N) -> throw({invalid_hex_character, N}).
lucene_escape_field(Bin) when is_binary(Bin) ->
Str = binary_to_list(Bin),
Enc = lucene_escape_field(Str),
iolist_to_binary(Enc);
lucene_escape_field([H | T]) when is_number(H), H >= 0, H =< 255 ->
if
H >= $a, $z >= H ->
[H | lucene_escape_field(T)];
H >= $A, $Z >= H ->
[H | lucene_escape_field(T)];
H >= $0, $9 >= H ->
[H | lucene_escape_field(T)];
true ->
Hi = enc_hex_byte(H div 16),
Lo = enc_hex_byte(H rem 16),
[$_, Hi, Lo | lucene_escape_field(T)]
end;
lucene_escape_field([]) ->
[].
lucene_escape_query_value(IoList) when is_list(IoList) ->
lucene_escape_query_value(iolist_to_binary(IoList));
lucene_escape_query_value(Bin) when is_binary(Bin) ->
IoList = lucene_escape_qv(Bin),
iolist_to_binary(IoList).
This escapes the special Lucene query characters
+ - & & || ! ( ) { } [ ] ^ ~ * ? : \ " /
lucene_escape_qv(<<>>) -> [];
lucene_escape_qv(<<"&&", Rest/binary>>) ->
["\\&&" | lucene_escape_qv(Rest)];
lucene_escape_qv(<<"||", Rest/binary>>) ->
["\\||" | lucene_escape_qv(Rest)];
lucene_escape_qv(<<C, Rest/binary>>) ->
NeedsEscape = "+-(){}[]!^~*?:/\\\" \t\r\n",
Out = case lists:member(C, NeedsEscape) of
true -> ["\\", C];
false -> [C]
end,
Out ++ lucene_escape_qv(Rest).
lucene_escape_user(Field) ->
{ok, Path} = parse_field(Field),
Escaped = [mango_util:lucene_escape_field(P) || P <- Path],
iolist_to_binary(join(".", Escaped)).
has_suffix(Bin, Suffix) when is_binary(Bin), is_binary(Suffix) ->
SBin = size(Bin),
SSuffix = size(Suffix),
if SBin < SSuffix -> false; true ->
PSize = SBin - SSuffix,
case Bin of
<<_:PSize/binary, Suffix/binary>> ->
true;
_ ->
false
end
end.
join(_Sep, [Item]) ->
[Item];
join(Sep, [Item | Rest]) ->
[Item, Sep | join(Sep, Rest)].
is_number_string(Value) when is_binary(Value) ->
is_number_string(binary_to_list(Value));
is_number_string(Value) when is_list(Value)->
MP = cached_re(mango_numstring_re, ?NUMSTRING),
case re:run(Value, MP) of
nomatch ->
false;
_ ->
true
end.
cached_re(Name, RE) ->
case mochiglobal:get(Name) of
undefined ->
{ok, MP} = re:compile(RE),
ok = mochiglobal:put(Name, MP),
MP;
MP ->
MP
end.
parse_field(Field) ->
case binary:match(Field, <<"\\">>, []) of
nomatch ->
{ok, check_non_empty(Field, binary:split(Field, <<".">>, [global]))};
_ ->
parse_field_slow(Field)
end.
parse_field_slow(Field) ->
Path = lists:map(fun
(P) when P =:= <<>> ->
?MANGO_ERROR({invalid_field_name, Field});
(P) ->
re:replace(P, <<"\\\\">>, <<>>, [global, {return, binary}])
end, re:split(Field, <<"(?<!\\\\)\\.">>)),
{ok, Path}.
check_non_empty(Field, Parts) ->
case lists:member(<<>>, Parts) of
true ->
?MANGO_ERROR({invalid_field_name, Field});
false ->
Parts
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
parse_field_test() ->
?assertEqual({ok, [<<"ab">>]}, parse_field(<<"ab">>)),
?assertEqual({ok, [<<"a">>, <<"b">>]}, parse_field(<<"a.b">>)),
?assertEqual({ok, [<<"a.b">>]}, parse_field(<<"a\\.b">>)),
?assertEqual({ok, [<<"a">>, <<"b">>, <<"c">>]}, parse_field(<<"a.b.c">>)),
?assertEqual({ok, [<<"a">>, <<"b.c">>]}, parse_field(<<"a.b\\.c">>)),
Exception = {mango_error, ?MODULE, {invalid_field_name, <<"a..b">>}},
?assertThrow(Exception, parse_field(<<"a..b">>)).
is_number_string_test() ->
?assert(is_number_string("0")),
?assert(is_number_string("1")),
?assert(is_number_string("1.0")),
?assert(is_number_string("1.0E10")),
?assert(is_number_string("0d")),
?assert(is_number_string("-1")),
?assert(is_number_string("-1.0")),
?assertNot(is_number_string("hello")),
?assertNot(is_number_string("")),
?assertMatch({match, _}, re:run("1.0", mochiglobal:get(mango_numstring_re))).
-endif.
|
526c5bec336bc2ecae3777db55b4c6978e6189ad682b9e9925bb66e778aacd50 | gildor478/ounit | oUnitResultSummary.ml | (**************************************************************************)
The OUnit library
(* *)
Copyright ( C ) 2002 - 2008 Maas - Maarten Zeeman .
Copyright ( C ) 2010 OCamlCore SARL
Copyright ( C ) 2013
(* *)
The package OUnit is copyright by Maas - Maarten Zeeman , OCamlCore SARL
and .
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining *)
a copy of this document and the OUnit software ( " the Software " ) , to
deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, *)
sublicense , and/or sell copies of the Software , and to permit persons
to whom the Software is furnished to do so , subject to the following
(* conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be *)
included in all copies or substantial portions of the Software .
(* *)
(* The Software is provided ``as is'', without warranty of any kind, *)
(* express or implied, including but not limited to the warranties of *)
(* merchantability, fitness for a particular purpose and noninfringement. *)
In no event shall be liable for any claim , damages
(* or other liability, whether in an action of contract, tort or *)
otherwise , arising from , out of or in connection with the Software or
(* the use or other dealings in the software. *)
(* *)
See LICENSE.txt for details .
(**************************************************************************)
(*
Summary of the results, based on captured log events.
*)
open OUnitUtils
open OUnitTest
open OUnitLogger
type log_entry =
float (* time since start of the test *) *
log_severity option *
string (* log entry without \n *)
type test_data =
{
test_name: string;
timestamp_start: float; (* UNIX timestamp *)
timestamp_end: float; (* UNIX timestamp *)
log_entries: log_entry list; (* time sorted log entry, timestamp from
timestamp_start *)
test_result: OUnitTest.result;
}
type t =
{
suite_name: string;
start_at: float;
charset: string;
conf: (string * string) list;
running_time: float;
global_results: OUnitTest.result_list;
test_case_count: int;
tests: test_data list;
errors: int;
failures: int;
skips: int;
todos: int;
timeouts: int;
successes: int;
}
let is_success =
function
| RSuccess -> true
| RFailure _ | RError _ | RSkip _ | RTodo _ | RTimeout _ -> false
let is_failure =
function
| RFailure _ -> true
| RSuccess | RError _ | RSkip _ | RTodo _ | RTimeout _ -> false
let is_error =
function
| RError _ -> true
| RSuccess | RFailure _ | RSkip _ | RTodo _ | RTimeout _ -> false
let is_skip =
function
| RSkip _ -> true
| RSuccess | RFailure _ | RError _ | RTodo _ | RTimeout _ -> false
let is_todo =
function
| RTodo _ -> true
| RSuccess | RFailure _ | RError _ | RSkip _ | RTimeout _ -> false
let is_timeout =
function
| RTimeout _ -> true
| RSuccess | RFailure _ | RError _ | RSkip _ | RTodo _ -> false
let result_flavour =
function
| RError _ -> "Error"
| RFailure _ -> "Failure"
| RSuccess -> "Success"
| RSkip _ -> "Skip"
| RTodo _ -> "Todo"
| RTimeout _ -> "Timeout"
let result_msg =
function
| RSuccess -> "Success"
| RError (msg, _)
| RFailure (msg, _, _)
| RSkip msg
| RTodo msg -> msg
| RTimeout test_length ->
Printf.sprintf "Timeout after %.1fs" (delay_of_length test_length)
let worst_cmp result1 result2 =
let rank =
function
| RSuccess -> 0
| RSkip _ -> 1
| RTodo _ -> 2
| RFailure _ -> 3
| RError _ -> 4
| RTimeout _ -> 5
in
(rank result1) - (rank result2)
let worst_result_full result_full lst =
let worst =
List.fold_left
(fun ((_, result1, _) as result_full1)
((_, result2, _) as result_full2) ->
if worst_cmp result1 result2 < 0 then
result_full2
else
result_full1)
result_full lst
in
worst,
List.filter
(fun result_full -> not (result_full == worst))
(result_full :: lst)
let was_successful lst =
List.for_all
(fun (_, rslt, _) ->
match rslt with
| RSuccess | RSkip _ -> true
| _ -> false)
lst
let encoding =
OUnitConf.make_string
"log_encoding"
"utf-8"
"Encoding of the log."
let of_log_events conf events =
let global_conf =
List.fold_left
(fun acc log_ev ->
match log_ev.event with
| GlobalEvent (GConf (k, v)) -> (k, v) :: acc
| _ -> acc)
[]
(List.rev events)
in
let running_time, global_results, test_case_count =
let rec find_results =
function
| {event =
GlobalEvent
(GResults (running_time, results, test_case_count)); _} :: _ ->
running_time, results, test_case_count
| _ :: tl ->
find_results tl
| [] ->
failwith "Cannot find results in OUnitResult.of_log_events."
in
find_results events
in
let tests =
let rec split_raw tmstp str lst =
try
let idx = String.index str '\n' in
split_raw tmstp
(String.sub str (idx + 1) (String.length str - idx - 1))
((tmstp, None, String.sub str 0 idx) :: lst)
with Not_found ->
(tmstp, None, str) :: lst
in
let finalize t =
let log_entries =
List.sort
(fun (f1, _, _) (f2, _, _) -> Stdlib.compare f2 f1)
t.log_entries
in
let log_entries =
List.rev_map
(fun (f, a, b) -> f -. t.timestamp_start, a, b)
log_entries
in
{t with log_entries = log_entries}
in
let default_timestamp = 0.0 in
let rec process_log_event tests log_event =
let timestamp = log_event.timestamp in
match log_event.event with
| GlobalEvent _ ->
tests
| TestEvent (path, ev) ->
begin
let t =
try
MapPath.find path tests
with Not_found ->
{
test_name = string_of_path path;
timestamp_start = default_timestamp;
timestamp_end = default_timestamp;
log_entries = [];
test_result = RFailure ("Not finished", None, None);
}
in
let alt0 t1 t2 =
if t1 = default_timestamp then
t2
else
t1
in
let t' =
match ev with
| EStart ->
{t with
timestamp_start = timestamp;
timestamp_end = alt0 t.timestamp_end timestamp}
| EEnd ->
{t with
timestamp_end = timestamp;
timestamp_start = alt0 t.timestamp_start timestamp}
| EResult rslt ->
{t with test_result = rslt}
| ELog (svrt, str) ->
{t with log_entries = (timestamp, Some svrt, str)
:: t.log_entries}
| ELogRaw str ->
{t with log_entries =
split_raw timestamp str t.log_entries}
in
MapPath.add path t' tests
end
and group_test tests =
function
| hd :: tl ->
group_test
(process_log_event tests hd)
tl
| [] ->
let lst =
MapPath.fold
(fun _ test lst ->
finalize test :: lst)
tests []
in
List.sort
(fun t1 t2 ->
Stdlib.compare t1.timestamp_start t2.timestamp_start)
lst
in
group_test MapPath.empty events
in
let start_at =
List.fold_left
(fun start_at log_ev ->
min start_at log_ev.timestamp)
(now ())
events
in
let suite_name =
match global_results with
| (path, _, _) :: _ ->
List.fold_left
(fun acc nd ->
match nd with
| ListItem _ -> acc
| Label str -> str)
"noname"
path
| [] ->
"noname"
in
let count f =
List.length
(List.filter (fun (_, test_result, _) -> f test_result)
global_results)
in
let charset = encoding conf in
{
suite_name = suite_name;
start_at = start_at;
charset = charset;
conf = global_conf;
running_time = running_time;
global_results = global_results;
test_case_count = test_case_count;
tests = tests;
errors = count is_error;
failures = count is_failure;
skips = count is_skip;
todos = count is_todo;
timeouts = count is_timeout;
successes = count is_success;
}
| null | https://raw.githubusercontent.com/gildor478/ounit/faf4936b17507406c7592186dcaa3f25c6fc138a/src/lib/ounit2/advanced/oUnitResultSummary.ml | ocaml | ************************************************************************
Permission is hereby granted, free of charge, to any person obtaining
the rights to use, copy, modify, merge, publish, distribute,
conditions:
The above copyright notice and this permission notice shall be
The Software is provided ``as is'', without warranty of any kind,
express or implied, including but not limited to the warranties of
merchantability, fitness for a particular purpose and noninfringement.
or other liability, whether in an action of contract, tort or
the use or other dealings in the software.
************************************************************************
Summary of the results, based on captured log events.
time since start of the test
log entry without \n
UNIX timestamp
UNIX timestamp
time sorted log entry, timestamp from
timestamp_start | The OUnit library
Copyright ( C ) 2002 - 2008 Maas - Maarten Zeeman .
Copyright ( C ) 2010 OCamlCore SARL
Copyright ( C ) 2013
The package OUnit is copyright by Maas - Maarten Zeeman , OCamlCore SARL
and .
a copy of this document and the OUnit software ( " the Software " ) , to
deal in the Software without restriction , including without limitation
sublicense , and/or sell copies of the Software , and to permit persons
to whom the Software is furnished to do so , subject to the following
included in all copies or substantial portions of the Software .
In no event shall be liable for any claim , damages
otherwise , arising from , out of or in connection with the Software or
See LICENSE.txt for details .
open OUnitUtils
open OUnitTest
open OUnitLogger
type log_entry =
log_severity option *
type test_data =
{
test_name: string;
test_result: OUnitTest.result;
}
type t =
{
suite_name: string;
start_at: float;
charset: string;
conf: (string * string) list;
running_time: float;
global_results: OUnitTest.result_list;
test_case_count: int;
tests: test_data list;
errors: int;
failures: int;
skips: int;
todos: int;
timeouts: int;
successes: int;
}
let is_success =
function
| RSuccess -> true
| RFailure _ | RError _ | RSkip _ | RTodo _ | RTimeout _ -> false
let is_failure =
function
| RFailure _ -> true
| RSuccess | RError _ | RSkip _ | RTodo _ | RTimeout _ -> false
let is_error =
function
| RError _ -> true
| RSuccess | RFailure _ | RSkip _ | RTodo _ | RTimeout _ -> false
let is_skip =
function
| RSkip _ -> true
| RSuccess | RFailure _ | RError _ | RTodo _ | RTimeout _ -> false
let is_todo =
function
| RTodo _ -> true
| RSuccess | RFailure _ | RError _ | RSkip _ | RTimeout _ -> false
let is_timeout =
function
| RTimeout _ -> true
| RSuccess | RFailure _ | RError _ | RSkip _ | RTodo _ -> false
let result_flavour =
function
| RError _ -> "Error"
| RFailure _ -> "Failure"
| RSuccess -> "Success"
| RSkip _ -> "Skip"
| RTodo _ -> "Todo"
| RTimeout _ -> "Timeout"
let result_msg =
function
| RSuccess -> "Success"
| RError (msg, _)
| RFailure (msg, _, _)
| RSkip msg
| RTodo msg -> msg
| RTimeout test_length ->
Printf.sprintf "Timeout after %.1fs" (delay_of_length test_length)
let worst_cmp result1 result2 =
let rank =
function
| RSuccess -> 0
| RSkip _ -> 1
| RTodo _ -> 2
| RFailure _ -> 3
| RError _ -> 4
| RTimeout _ -> 5
in
(rank result1) - (rank result2)
let worst_result_full result_full lst =
let worst =
List.fold_left
(fun ((_, result1, _) as result_full1)
((_, result2, _) as result_full2) ->
if worst_cmp result1 result2 < 0 then
result_full2
else
result_full1)
result_full lst
in
worst,
List.filter
(fun result_full -> not (result_full == worst))
(result_full :: lst)
let was_successful lst =
List.for_all
(fun (_, rslt, _) ->
match rslt with
| RSuccess | RSkip _ -> true
| _ -> false)
lst
let encoding =
OUnitConf.make_string
"log_encoding"
"utf-8"
"Encoding of the log."
let of_log_events conf events =
let global_conf =
List.fold_left
(fun acc log_ev ->
match log_ev.event with
| GlobalEvent (GConf (k, v)) -> (k, v) :: acc
| _ -> acc)
[]
(List.rev events)
in
let running_time, global_results, test_case_count =
let rec find_results =
function
| {event =
GlobalEvent
(GResults (running_time, results, test_case_count)); _} :: _ ->
running_time, results, test_case_count
| _ :: tl ->
find_results tl
| [] ->
failwith "Cannot find results in OUnitResult.of_log_events."
in
find_results events
in
let tests =
let rec split_raw tmstp str lst =
try
let idx = String.index str '\n' in
split_raw tmstp
(String.sub str (idx + 1) (String.length str - idx - 1))
((tmstp, None, String.sub str 0 idx) :: lst)
with Not_found ->
(tmstp, None, str) :: lst
in
let finalize t =
let log_entries =
List.sort
(fun (f1, _, _) (f2, _, _) -> Stdlib.compare f2 f1)
t.log_entries
in
let log_entries =
List.rev_map
(fun (f, a, b) -> f -. t.timestamp_start, a, b)
log_entries
in
{t with log_entries = log_entries}
in
let default_timestamp = 0.0 in
let rec process_log_event tests log_event =
let timestamp = log_event.timestamp in
match log_event.event with
| GlobalEvent _ ->
tests
| TestEvent (path, ev) ->
begin
let t =
try
MapPath.find path tests
with Not_found ->
{
test_name = string_of_path path;
timestamp_start = default_timestamp;
timestamp_end = default_timestamp;
log_entries = [];
test_result = RFailure ("Not finished", None, None);
}
in
let alt0 t1 t2 =
if t1 = default_timestamp then
t2
else
t1
in
let t' =
match ev with
| EStart ->
{t with
timestamp_start = timestamp;
timestamp_end = alt0 t.timestamp_end timestamp}
| EEnd ->
{t with
timestamp_end = timestamp;
timestamp_start = alt0 t.timestamp_start timestamp}
| EResult rslt ->
{t with test_result = rslt}
| ELog (svrt, str) ->
{t with log_entries = (timestamp, Some svrt, str)
:: t.log_entries}
| ELogRaw str ->
{t with log_entries =
split_raw timestamp str t.log_entries}
in
MapPath.add path t' tests
end
and group_test tests =
function
| hd :: tl ->
group_test
(process_log_event tests hd)
tl
| [] ->
let lst =
MapPath.fold
(fun _ test lst ->
finalize test :: lst)
tests []
in
List.sort
(fun t1 t2 ->
Stdlib.compare t1.timestamp_start t2.timestamp_start)
lst
in
group_test MapPath.empty events
in
let start_at =
List.fold_left
(fun start_at log_ev ->
min start_at log_ev.timestamp)
(now ())
events
in
let suite_name =
match global_results with
| (path, _, _) :: _ ->
List.fold_left
(fun acc nd ->
match nd with
| ListItem _ -> acc
| Label str -> str)
"noname"
path
| [] ->
"noname"
in
let count f =
List.length
(List.filter (fun (_, test_result, _) -> f test_result)
global_results)
in
let charset = encoding conf in
{
suite_name = suite_name;
start_at = start_at;
charset = charset;
conf = global_conf;
running_time = running_time;
global_results = global_results;
test_case_count = test_case_count;
tests = tests;
errors = count is_error;
failures = count is_failure;
skips = count is_skip;
todos = count is_todo;
timeouts = count is_timeout;
successes = count is_success;
}
|
687fbfe110ef665dc62a87572833c3680e9f4dfe6bee36a81d4eed7adbdd758a | huangz1990/SICP-answers | test-p34-expmod.scm | (load "test-manager/load.scm")
(load "p34-expmod.scm")
(define-each-check
(= 1
(expmod 7 1 3))
(= 2
(expmod 8 1 3))
)
(run-registered-tests)
| null | https://raw.githubusercontent.com/huangz1990/SICP-answers/15e3475003ef10eb738cf93c1932277bc56bacbe/chp1/code/test-p34-expmod.scm | scheme | (load "test-manager/load.scm")
(load "p34-expmod.scm")
(define-each-check
(= 1
(expmod 7 1 3))
(= 2
(expmod 8 1 3))
)
(run-registered-tests)
| |
f63a407b580d060dc4c6f27ed5401538250a79e6f5f6c47190d1589d2902995c | JKTKops/ProtoHaskell | operators.hs | module Operators where
normalOpApp :: Int -> Int -> Int
normalOpApp x y = x + y
prefixOpApp :: Int -> Int -> Int
prefixOpApp x y = (+) x y
etaOpApp :: Int -> Int -> Int
etaOpApp = (+)
functionAsOp :: Int -> Int -> Int
functionAsOp x y = x `mod` y
badParensOp :: Int -> Int -> Int
badParensOp x y = ( +) x y
badBackticksFun :: Int -> Int -> Int
badBackticksFun x y = x ` mod ` y
| null | https://raw.githubusercontent.com/JKTKops/ProtoHaskell/437c37d7bd6d862008f86d7e8045c7a60b12b532/test/Compiler/Parser/testcases/shouldsucceed/operators.hs | haskell | module Operators where
normalOpApp :: Int -> Int -> Int
normalOpApp x y = x + y
prefixOpApp :: Int -> Int -> Int
prefixOpApp x y = (+) x y
etaOpApp :: Int -> Int -> Int
etaOpApp = (+)
functionAsOp :: Int -> Int -> Int
functionAsOp x y = x `mod` y
badParensOp :: Int -> Int -> Int
badParensOp x y = ( +) x y
badBackticksFun :: Int -> Int -> Int
badBackticksFun x y = x ` mod ` y
| |
a6a0300ba04446d77e82ba332269161a80c004d6b618ebb3e0ad53577ddc2885 | onyx-platform/onyx | aggregation_count_test.clj | (ns onyx.windowing.aggregation-count-test
(:require [clojure.core.async :refer [chan >!! <!! close! sliding-buffer]]
[clojure.test :refer [deftest is]]
[onyx.plugin.core-async :refer [take-segments!]]
[onyx.test-helper :refer [load-config with-test-env]]
[onyx.static.uuid :refer [random-uuid]]
[onyx.api]))
(def input
[{:id 1 :age 21 :event-time #inst "2015-09-13T03:00:00.829-00:00"}
{:id 2 :age 12 :event-time #inst "2015-09-13T03:04:00.829-00:00"}
{:id 3 :age 3 :event-time #inst "2015-09-13T03:05:00.829-00:00"}
{:id 4 :age 64 :event-time #inst "2015-09-13T03:06:00.829-00:00"}
{:id 5 :age 53 :event-time #inst "2015-09-13T03:07:00.829-00:00"}
{:id 6 :age 52 :event-time #inst "2015-09-13T03:08:00.829-00:00"}
{:id 7 :age 24 :event-time #inst "2015-09-13T03:09:00.829-00:00"}
{:id 8 :age 35 :event-time #inst "2015-09-13T03:15:00.829-00:00"}
{:id 9 :age 49 :event-time #inst "2015-09-13T03:25:00.829-00:00"}
{:id 10 :age 37 :event-time #inst "2015-09-13T03:45:00.829-00:00"}
{:id 11 :age 15 :event-time #inst "2015-09-13T03:03:00.829-00:00"}
{:id 12 :age 22 :event-time #inst "2015-09-13T03:56:00.829-00:00"}
{:id 13 :age 83 :event-time #inst "2015-09-13T03:59:00.829-00:00"}
{:id 14 :age 60 :event-time #inst "2015-09-13T03:32:00.829-00:00"}
{:id 15 :age 35 :event-time #inst "2015-09-13T03:16:00.829-00:00"}])
(def expected-windows
[[#inst "2015-09-13T03:00:00.000-00:00" #inst "2015-09-13T03:04:59.999-00:00" 3]
[#inst "2015-09-13T03:05:00.000-00:00" #inst "2015-09-13T03:09:59.999-00:00" 5]
[#inst "2015-09-13T03:15:00.000-00:00" #inst "2015-09-13T03:19:59.999-00:00" 2]
[#inst "2015-09-13T03:25:00.000-00:00" #inst "2015-09-13T03:29:59.999-00:00" 1]
[#inst "2015-09-13T03:45:00.000-00:00" #inst "2015-09-13T03:49:59.999-00:00" 1]
[#inst "2015-09-13T03:55:00.000-00:00" #inst "2015-09-13T03:59:59.999-00:00" 2]
[#inst "2015-09-13T03:30:00.000-00:00" #inst "2015-09-13T03:34:59.999-00:00" 1]])
(def test-state (atom []))
(defn update-atom! [event window trigger {:keys [lower-bound upper-bound event-type] :as opts} extent-state]
(when-not (= :job-completed event-type)
(swap! test-state conj [(java.util.Date. (long lower-bound))
(java.util.Date. (long upper-bound))
extent-state])))
(def in-chan (atom nil))
(def in-buffer (atom nil))
(def out-chan (atom nil))
(defn inject-in-ch [event lifecycle]
{:core.async/buffer in-buffer
:core.async/chan @in-chan})
(defn inject-out-ch [event lifecycle]
{:core.async/chan @out-chan})
(def in-calls
{:lifecycle/before-task-start inject-in-ch})
(def out-calls
{:lifecycle/before-task-start inject-out-ch})
(deftest ^:smoke count-test
(let [id (random-uuid)
config (load-config)
env-config (assoc (:env-config config) :onyx/tenancy-id id)
peer-config (assoc (:peer-config config) :onyx/tenancy-id id)
batch-size 20
workflow
[[:in :identity] [:identity :out]]
catalog
[{:onyx/name :in
:onyx/plugin :onyx.plugin.core-async/input
:onyx/type :input
:onyx/medium :core.async
:onyx/batch-size batch-size
:onyx/max-peers 1
:onyx/doc "Reads segments from a core.async channel"}
{:onyx/name :identity
:onyx/fn :clojure.core/identity
:onyx/type :function
:onyx/max-peers 1
:onyx/batch-size batch-size}
{:onyx/name :out
:onyx/plugin :onyx.plugin.core-async/output
:onyx/type :output
:onyx/medium :core.async
:onyx/batch-size batch-size
:onyx/max-peers 1
:onyx/doc "Writes segments to a core.async channel"}]
windows
[{:window/id :collect-segments
:window/task :identity
:window/type :fixed
:window/aggregation :onyx.windowing.aggregation/count
:window/window-key :event-time
:window/range [5 :minutes]}]
triggers
[{:trigger/window-id :collect-segments
:trigger/id :sync
:trigger/fire-all-extents? true
:trigger/on :onyx.triggers/segment
:trigger/threshold [15 :elements]
:trigger/sync ::update-atom!}]
lifecycles
[{:lifecycle/task :in
:lifecycle/calls ::in-calls}
{:lifecycle/task :out
:lifecycle/calls ::out-calls}]]
(reset! in-chan (chan (inc (count input))))
(reset! in-buffer {})
(reset! out-chan (chan (sliding-buffer (inc (count input)))))
(reset! test-state [])
(with-test-env [test-env [3 env-config peer-config]]
(doseq [i input]
(>!! @in-chan i))
(close! @in-chan)
(let [{:keys [job-id]} (onyx.api/submit-job peer-config
{:catalog catalog
:workflow workflow
:lifecycles lifecycles
:windows windows
:triggers triggers
:task-scheduler :onyx.task-scheduler/balanced})
_ (onyx.test-helper/feedback-exception! peer-config job-id)
results (take-segments! @out-chan 50)]
(is (= input results))
(is (= (sort expected-windows) (sort @test-state)))))))
| null | https://raw.githubusercontent.com/onyx-platform/onyx/74f9ae58cdbcfcb1163464595f1e6ae6444c9782/test/onyx/windowing/aggregation_count_test.clj | clojure | (ns onyx.windowing.aggregation-count-test
(:require [clojure.core.async :refer [chan >!! <!! close! sliding-buffer]]
[clojure.test :refer [deftest is]]
[onyx.plugin.core-async :refer [take-segments!]]
[onyx.test-helper :refer [load-config with-test-env]]
[onyx.static.uuid :refer [random-uuid]]
[onyx.api]))
(def input
[{:id 1 :age 21 :event-time #inst "2015-09-13T03:00:00.829-00:00"}
{:id 2 :age 12 :event-time #inst "2015-09-13T03:04:00.829-00:00"}
{:id 3 :age 3 :event-time #inst "2015-09-13T03:05:00.829-00:00"}
{:id 4 :age 64 :event-time #inst "2015-09-13T03:06:00.829-00:00"}
{:id 5 :age 53 :event-time #inst "2015-09-13T03:07:00.829-00:00"}
{:id 6 :age 52 :event-time #inst "2015-09-13T03:08:00.829-00:00"}
{:id 7 :age 24 :event-time #inst "2015-09-13T03:09:00.829-00:00"}
{:id 8 :age 35 :event-time #inst "2015-09-13T03:15:00.829-00:00"}
{:id 9 :age 49 :event-time #inst "2015-09-13T03:25:00.829-00:00"}
{:id 10 :age 37 :event-time #inst "2015-09-13T03:45:00.829-00:00"}
{:id 11 :age 15 :event-time #inst "2015-09-13T03:03:00.829-00:00"}
{:id 12 :age 22 :event-time #inst "2015-09-13T03:56:00.829-00:00"}
{:id 13 :age 83 :event-time #inst "2015-09-13T03:59:00.829-00:00"}
{:id 14 :age 60 :event-time #inst "2015-09-13T03:32:00.829-00:00"}
{:id 15 :age 35 :event-time #inst "2015-09-13T03:16:00.829-00:00"}])
(def expected-windows
[[#inst "2015-09-13T03:00:00.000-00:00" #inst "2015-09-13T03:04:59.999-00:00" 3]
[#inst "2015-09-13T03:05:00.000-00:00" #inst "2015-09-13T03:09:59.999-00:00" 5]
[#inst "2015-09-13T03:15:00.000-00:00" #inst "2015-09-13T03:19:59.999-00:00" 2]
[#inst "2015-09-13T03:25:00.000-00:00" #inst "2015-09-13T03:29:59.999-00:00" 1]
[#inst "2015-09-13T03:45:00.000-00:00" #inst "2015-09-13T03:49:59.999-00:00" 1]
[#inst "2015-09-13T03:55:00.000-00:00" #inst "2015-09-13T03:59:59.999-00:00" 2]
[#inst "2015-09-13T03:30:00.000-00:00" #inst "2015-09-13T03:34:59.999-00:00" 1]])
(def test-state (atom []))
(defn update-atom! [event window trigger {:keys [lower-bound upper-bound event-type] :as opts} extent-state]
(when-not (= :job-completed event-type)
(swap! test-state conj [(java.util.Date. (long lower-bound))
(java.util.Date. (long upper-bound))
extent-state])))
(def in-chan (atom nil))
(def in-buffer (atom nil))
(def out-chan (atom nil))
(defn inject-in-ch [event lifecycle]
{:core.async/buffer in-buffer
:core.async/chan @in-chan})
(defn inject-out-ch [event lifecycle]
{:core.async/chan @out-chan})
(def in-calls
{:lifecycle/before-task-start inject-in-ch})
(def out-calls
{:lifecycle/before-task-start inject-out-ch})
(deftest ^:smoke count-test
(let [id (random-uuid)
config (load-config)
env-config (assoc (:env-config config) :onyx/tenancy-id id)
peer-config (assoc (:peer-config config) :onyx/tenancy-id id)
batch-size 20
workflow
[[:in :identity] [:identity :out]]
catalog
[{:onyx/name :in
:onyx/plugin :onyx.plugin.core-async/input
:onyx/type :input
:onyx/medium :core.async
:onyx/batch-size batch-size
:onyx/max-peers 1
:onyx/doc "Reads segments from a core.async channel"}
{:onyx/name :identity
:onyx/fn :clojure.core/identity
:onyx/type :function
:onyx/max-peers 1
:onyx/batch-size batch-size}
{:onyx/name :out
:onyx/plugin :onyx.plugin.core-async/output
:onyx/type :output
:onyx/medium :core.async
:onyx/batch-size batch-size
:onyx/max-peers 1
:onyx/doc "Writes segments to a core.async channel"}]
windows
[{:window/id :collect-segments
:window/task :identity
:window/type :fixed
:window/aggregation :onyx.windowing.aggregation/count
:window/window-key :event-time
:window/range [5 :minutes]}]
triggers
[{:trigger/window-id :collect-segments
:trigger/id :sync
:trigger/fire-all-extents? true
:trigger/on :onyx.triggers/segment
:trigger/threshold [15 :elements]
:trigger/sync ::update-atom!}]
lifecycles
[{:lifecycle/task :in
:lifecycle/calls ::in-calls}
{:lifecycle/task :out
:lifecycle/calls ::out-calls}]]
(reset! in-chan (chan (inc (count input))))
(reset! in-buffer {})
(reset! out-chan (chan (sliding-buffer (inc (count input)))))
(reset! test-state [])
(with-test-env [test-env [3 env-config peer-config]]
(doseq [i input]
(>!! @in-chan i))
(close! @in-chan)
(let [{:keys [job-id]} (onyx.api/submit-job peer-config
{:catalog catalog
:workflow workflow
:lifecycles lifecycles
:windows windows
:triggers triggers
:task-scheduler :onyx.task-scheduler/balanced})
_ (onyx.test-helper/feedback-exception! peer-config job-id)
results (take-segments! @out-chan 50)]
(is (= input results))
(is (= (sort expected-windows) (sort @test-state)))))))
| |
4877498980f4989848c69242e3a816d6824ec05a3a8773da90844659d75ce519 | tsloughter/mirrormaster | mirrormaster_sup.erl | %%%-------------------------------------------------------------------
%% @doc mirrormaster top level supervisor.
%% @end
%%%-------------------------------------------------------------------
-module(mirrormaster_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
init([]) ->
SupFlags = #{strategy => one_for_one, intensity => 1, period => 5},
{ok, PackageDir} = application:get_env(mirrormaster, package_dir),
filelib:ensure_dir(filename:join(PackageDir, "subdir")),
{ok, PrivateKeyFile} = application:get_env(mirrormaster, private_key_file),
{ok, PrivateKey} = file:read_file(to_filename(PrivateKeyFile)),
{ok, Name} = application:get_env(mirrormaster, repo_name),
Repos = application:get_env(mirrormaster, repos, [hex_core:default_config()]),
ElliOpts = [{callback, mm_handler},
{callback_args, [#{repo_name => Name,
repos => Repos,
package_dir => PackageDir,
private_key => PrivateKey}]},
{port, 3000}],
ChildSpecs = [#{id => mm_http,
start => {elli, start_link, [ElliOpts]},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [elli]}],
{ok, {SupFlags, ChildSpecs}}.
%%
to_filename({priv, Filename}) ->
Priv = code:priv_dir(mirrormaster),
filename:join(Priv, Filename);
to_filename(Filename) ->
Filename.
| null | https://raw.githubusercontent.com/tsloughter/mirrormaster/ed12b0c1dd2674b63fdd4d19797a93c235b76942/src/mirrormaster_sup.erl | erlang | -------------------------------------------------------------------
@doc mirrormaster top level supervisor.
@end
-------------------------------------------------------------------
| -module(mirrormaster_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-define(SERVER, ?MODULE).
start_link() ->
supervisor:start_link({local, ?SERVER}, ?MODULE, []).
init([]) ->
SupFlags = #{strategy => one_for_one, intensity => 1, period => 5},
{ok, PackageDir} = application:get_env(mirrormaster, package_dir),
filelib:ensure_dir(filename:join(PackageDir, "subdir")),
{ok, PrivateKeyFile} = application:get_env(mirrormaster, private_key_file),
{ok, PrivateKey} = file:read_file(to_filename(PrivateKeyFile)),
{ok, Name} = application:get_env(mirrormaster, repo_name),
Repos = application:get_env(mirrormaster, repos, [hex_core:default_config()]),
ElliOpts = [{callback, mm_handler},
{callback_args, [#{repo_name => Name,
repos => Repos,
package_dir => PackageDir,
private_key => PrivateKey}]},
{port, 3000}],
ChildSpecs = [#{id => mm_http,
start => {elli, start_link, [ElliOpts]},
restart => permanent,
shutdown => 5000,
type => worker,
modules => [elli]}],
{ok, {SupFlags, ChildSpecs}}.
to_filename({priv, Filename}) ->
Priv = code:priv_dir(mirrormaster),
filename:join(Priv, Filename);
to_filename(Filename) ->
Filename.
|
eb0df858648625efb42e618093bb9e630aa0daad6b21e82dc4ca70b8465dcf67 | apache/couchdb-twig | twig_util.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(twig_util).
-export([format/2, get_env/2, level/1, facility/1, iso8601_timestamp/0]).
level(debug) -> 7;
level(info) -> 6;
level(notice) -> 5;
level(warn) -> 4;
level(warning) -> 4;
level(err) -> 3;
level(error) -> 3;
level(crit) -> 2;
level(alert) -> 1;
level(emerg) -> 0;
level(panic) -> 0;
level(I) when is_integer(I), I >= 0, I =< 7 ->
I;
level(_BadLevel) ->
3.
facility(kern) -> (0 bsl 3) ; % kernel messages
facility(user) -> (1 bsl 3) ; % random user-level messages
facility(mail) -> (2 bsl 3) ; % mail system
facility(daemon) -> (3 bsl 3) ; % system daemons
facility(auth) -> (4 bsl 3) ; % security/authorization messages
facility(syslog) -> (5 bsl 3) ; % messages generated internally by syslogd
facility(lpr) -> (6 bsl 3) ; % line printer subsystem
facility(news) -> (7 bsl 3) ; % network news subsystem
UUCP subsystem
facility(cron) -> (9 bsl 3) ; % clock daemon
facility(authpriv) -> (10 bsl 3); % security/authorization messages (private)
facility(ftp) -> (11 bsl 3); % ftp daemon
facility(local0) -> (16 bsl 3);
facility(local1) -> (17 bsl 3);
facility(local2) -> (18 bsl 3);
facility(local3) -> (19 bsl 3);
facility(local4) -> (20 bsl 3);
facility(local5) -> (21 bsl 3);
facility(local6) -> (22 bsl 3);
facility(local7) -> (23 bsl 3).
iso8601_timestamp() ->
{_,_,Micro} = Now = os:timestamp(),
{{Year,Month,Date},{Hour,Minute,Second}} = calendar:now_to_datetime(Now),
Format = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0B.~6.10.0BZ",
io_lib:format(Format, [Year, Month, Date, Hour, Minute, Second, Micro]).
format(Format, Data) ->
MaxTermSize = get_env(max_term_size, 8192),
case erts_debug:flat_size(Data) > MaxTermSize of
true ->
MaxString = get_env(max_message_size, 16000),
{Truncated, _} = trunc_io:print(Data, MaxString),
["*Truncated* ", Format, " - ", Truncated];
false ->
io_lib:format(Format, Data)
end.
get_env(Key, Default) ->
case application:get_env(twig, Key) of
{ok, Value} ->
Value;
undefined ->
Default
end.
| null | https://raw.githubusercontent.com/apache/couchdb-twig/7b58ab232f3db5e54d1c81b6965678f87f89ae11/src/twig_util.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
kernel messages
random user-level messages
mail system
system daemons
security/authorization messages
messages generated internally by syslogd
line printer subsystem
network news subsystem
clock daemon
security/authorization messages (private)
ftp daemon | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(twig_util).
-export([format/2, get_env/2, level/1, facility/1, iso8601_timestamp/0]).
level(debug) -> 7;
level(info) -> 6;
level(notice) -> 5;
level(warn) -> 4;
level(warning) -> 4;
level(err) -> 3;
level(error) -> 3;
level(crit) -> 2;
level(alert) -> 1;
level(emerg) -> 0;
level(panic) -> 0;
level(I) when is_integer(I), I >= 0, I =< 7 ->
I;
level(_BadLevel) ->
3.
UUCP subsystem
facility(local0) -> (16 bsl 3);
facility(local1) -> (17 bsl 3);
facility(local2) -> (18 bsl 3);
facility(local3) -> (19 bsl 3);
facility(local4) -> (20 bsl 3);
facility(local5) -> (21 bsl 3);
facility(local6) -> (22 bsl 3);
facility(local7) -> (23 bsl 3).
iso8601_timestamp() ->
{_,_,Micro} = Now = os:timestamp(),
{{Year,Month,Date},{Hour,Minute,Second}} = calendar:now_to_datetime(Now),
Format = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0B.~6.10.0BZ",
io_lib:format(Format, [Year, Month, Date, Hour, Minute, Second, Micro]).
format(Format, Data) ->
MaxTermSize = get_env(max_term_size, 8192),
case erts_debug:flat_size(Data) > MaxTermSize of
true ->
MaxString = get_env(max_message_size, 16000),
{Truncated, _} = trunc_io:print(Data, MaxString),
["*Truncated* ", Format, " - ", Truncated];
false ->
io_lib:format(Format, Data)
end.
get_env(Key, Default) ->
case application:get_env(twig, Key) of
{ok, Value} ->
Value;
undefined ->
Default
end.
|
44012282d1fa9ec67a0b18932a94de452067641325aa06ddecf6cb3fad863ed4 | titola/neuropa | infrasound.scm | Infrasound propagation through two differentiators .
Tests for the sound editor .
;;;
;;; The functions reverse-differentiator-1, reverse-differentiator-2,
;;; differentiator-1 and differentiator-2 work with the recorded
;;; and published sound file "low_freq_with_6min_for_tinnitus.wav".
(define (reverse-differentiator k mult)
(let ((sum 0))
(map-channel
(lambda (x)
(* mult (set! sum (+ sum x k)))))))
(define (reverse-differentiator-1)
(let ((k -0.05))
(reverse-differentiator k (/ k -164.11))))
(define (reverse-differentiator-2)
(let ((k (/ 80.5)))
(reverse-differentiator k (/ k 10.35))))
(define (differentiator k mult)
(let ((x0 0))
(map-channel
(lambda (x)
(let* ((x (* x mult))
(y (- x x0 k)))
(set! x0 x)
y)))))
(define (differentiator-1)
(let ((k -0.05))
(differentiator k (/ -164.11 k))))
(define (differentiator-2)
(let ((k (/ 80.5)))
(differentiator k (/ 10.35 k))))
;; (open-sound "low_freq_with_6min_for_tinnitus.wav")
;; (reverse-differentiator-2)
;; (reverse-differentiator-1) ; => Initial filtered pulses.
;; (differentiator-1)
;; (differentiator-2)
| null | https://raw.githubusercontent.com/titola/neuropa/708509e5ca8c73f3b16fc5e38d066e172d5691c8/src/infrasound.scm | scheme |
The functions reverse-differentiator-1, reverse-differentiator-2,
differentiator-1 and differentiator-2 work with the recorded
and published sound file "low_freq_with_6min_for_tinnitus.wav".
(open-sound "low_freq_with_6min_for_tinnitus.wav")
(reverse-differentiator-2)
(reverse-differentiator-1) ; => Initial filtered pulses.
(differentiator-1)
(differentiator-2) | Infrasound propagation through two differentiators .
Tests for the sound editor .
(define (reverse-differentiator k mult)
(let ((sum 0))
(map-channel
(lambda (x)
(* mult (set! sum (+ sum x k)))))))
(define (reverse-differentiator-1)
(let ((k -0.05))
(reverse-differentiator k (/ k -164.11))))
(define (reverse-differentiator-2)
(let ((k (/ 80.5)))
(reverse-differentiator k (/ k 10.35))))
(define (differentiator k mult)
(let ((x0 0))
(map-channel
(lambda (x)
(let* ((x (* x mult))
(y (- x x0 k)))
(set! x0 x)
y)))))
(define (differentiator-1)
(let ((k -0.05))
(differentiator k (/ -164.11 k))))
(define (differentiator-2)
(let ((k (/ 80.5)))
(differentiator k (/ 10.35 k))))
|
89304986e43a1313731f3fd7d5380730c5c5842145e2241df159aba4f2baaa1f | cram2/cram | swank-indentation.lisp |
(in-package :prolog)
(eval-when (:load-toplevel)
(when (and (find-package "SWANK")
(boundp (intern "*APPLICATION-HINTS-TABLES*"
(find-package "SWANK"))))
(push (alexandria:alist-hash-table
'((with-production . let)
(with-production-handlers . flet)))
(symbol-value (intern "*APPLICATION-HINTS-TABLES*"
(find-package "SWANK"))))))
| null | https://raw.githubusercontent.com/cram2/cram/dcb73031ee944d04215bbff9e98b9e8c210ef6c5/cram_core/cram_prolog/src/swank-indentation.lisp | lisp |
(in-package :prolog)
(eval-when (:load-toplevel)
(when (and (find-package "SWANK")
(boundp (intern "*APPLICATION-HINTS-TABLES*"
(find-package "SWANK"))))
(push (alexandria:alist-hash-table
'((with-production . let)
(with-production-handlers . flet)))
(symbol-value (intern "*APPLICATION-HINTS-TABLES*"
(find-package "SWANK"))))))
| |
4576c64daab4d85f91a78e24a8557357630ad1243de84a3c9c2cad216db0e226 | jaredly/reason-language-server | parmatch.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Detection of partial matches and unused match cases. *)
open Misc
open Asttypes
open Types
open Typedtree
(*************************************)
Utilities for building patterns
(*************************************)
let make_pat desc ty tenv =
{pat_desc = desc; pat_loc = Location.none; pat_extra = [];
pat_type = ty ; pat_env = tenv;
pat_attributes = [];
}
let omega = make_pat Tpat_any Ctype.none Env.empty
let extra_pat =
make_pat
(Tpat_var (Ident.create "+", mknoloc "+"))
Ctype.none Env.empty
let rec omegas i =
if i <= 0 then [] else omega :: omegas (i-1)
let omega_list l = List.map (fun _ -> omega) l
let zero = make_pat (Tpat_constant (Const_int 0)) Ctype.none Env.empty
(*******************)
Coherence check
(*******************)
For some of the operations we do in this module , we would like ( because it
simplifies matters ) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ ( think " of the same type " ) .
Unfortunately that is not always true .
Consider the following ( well - typed ) example :
{ [
type _ t = S : string t | U : unit t
let f ( type a ) ( t1 : a t ) ( t2 : a t ) ( a : a ) =
match t1 , t2 , a with
| U , _ , ( ) - > ( )
| _ , S , " " - > ( )
] }
Clearly the 3rd column contains incoherent patterns .
On the example above , most of the algorithms will explore the pattern matrix
as illustrated by the following tree :
{ v
S
------- > | " " |
U | S , " " | _ _ / | ( ) |
-------- > | _ , ( ) | \ ¬ S
| U , _ , ( ) | _ _ / ------- > | ( ) |
| _ , S , " " | \
--------- > | S , " " | ---------- > | " " |
¬ U S
v }
where following an edge labelled by a pattern P means " assuming the value I
am matching on is filtered by [ P ] on the column I am currently looking at ,
then the following submatrix is still reachable " .
Notice that at any point of that tree , if the first column of a matrix is
incoherent , then the branch leading to it can only be taken if the scrutinee
is ill - typed .
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [ t1 , t2 , a ] to be [ U , S , ... ] . However such
a value would be ill - typed , so we can never actually get there .
Checking the first column at each step of the recursion and making the
concious decision of " aborting " the algorithm whenever the first column
becomes incoherent , allows us to retain the initial assumption in later
stages of the algorithms .
---
N.B. two patterns can be considered coherent even though they might not be of
the same type .
That 's in part because we only care about the " head " of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm :
( ' a ' , ' b ' ) and ( 1 , ( ) ) will be deemed coherent because they are both a tuples
of arity 2 ( we 'll notice at a later stage the incoherence of ' a ' and 1 ) .
But also because it can be hard / costly to determine exactly whether two
patterns are of the same type or not ( eg . in the example above with _ and S ,
but see also the module [ Coherence_illustration ] in
testsuite / tests / basic - more / robustmatch.ml ) .
For the moment our weak , loosely - syntactic , coherence check seems to be
enough and we leave it to each user to consider ( and document ! ) what happens
when an " incoherence " is not detected by this check .
simplifies matters) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ (think "of the same type").
Unfortunately that is not always true.
Consider the following (well-typed) example:
{[
type _ t = S : string t | U : unit t
let f (type a) (t1 : a t) (t2 : a t) (a : a) =
match t1, t2, a with
| U, _, () -> ()
| _, S, "" -> ()
]}
Clearly the 3rd column contains incoherent patterns.
On the example above, most of the algorithms will explore the pattern matrix
as illustrated by the following tree:
{v
S
-------> | "" |
U | S, "" | __/ | () |
--------> | _, () | \ ¬ S
| U, _, () | __/ -------> | () |
| _, S, "" | \
---------> | S, "" | ----------> | "" |
¬ U S
v}
where following an edge labelled by a pattern P means "assuming the value I
am matching on is filtered by [P] on the column I am currently looking at,
then the following submatrix is still reachable".
Notice that at any point of that tree, if the first column of a matrix is
incoherent, then the branch leading to it can only be taken if the scrutinee
is ill-typed.
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [t1, t2, a] to be [U, S, ...]. However such
a value would be ill-typed, so we can never actually get there.
Checking the first column at each step of the recursion and making the
concious decision of "aborting" the algorithm whenever the first column
becomes incoherent, allows us to retain the initial assumption in later
stages of the algorithms.
---
N.B. two patterns can be considered coherent even though they might not be of
the same type.
That's in part because we only care about the "head" of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm:
('a', 'b') and (1, ()) will be deemed coherent because they are both a tuples
of arity 2 (we'll notice at a later stage the incoherence of 'a' and 1).
But also because it can be hard/costly to determine exactly whether two
patterns are of the same type or not (eg. in the example above with _ and S,
but see also the module [Coherence_illustration] in
testsuite/tests/basic-more/robustmatch.ml).
For the moment our weak, loosely-syntactic, coherence check seems to be
enough and we leave it to each user to consider (and document!) what happens
when an "incoherence" is not detected by this check.
*)
let simplify_head_pat p k =
let rec simplify_head_pat p k =
match p.pat_desc with
| Tpat_alias (p,_,_) -> simplify_head_pat p k
| Tpat_var (_,_) -> omega :: k
| Tpat_or (p1,p2,_) -> simplify_head_pat p1 (simplify_head_pat p2 k)
| _ -> p :: k
in simplify_head_pat p k
let rec simplified_first_col = function
| [] -> []
| [] :: _ -> assert false (* the rows are non-empty! *)
| (p::_) :: rows ->
simplify_head_pat p (simplified_first_col rows)
Given the simplified first column of a matrix , this function first looks for
a " discriminating " pattern on that column ( i.e. a non - omega one ) and then
check that every other head pattern in the column is coherent with that one .
a "discriminating" pattern on that column (i.e. a non-omega one) and then
check that every other head pattern in the column is coherent with that one.
*)
let all_coherent column =
let coherent_heads hp1 hp2 =
match hp1.pat_desc, hp2.pat_desc with
| (Tpat_var _ | Tpat_alias _ | Tpat_or _), _
| _, (Tpat_var _ | Tpat_alias _ | Tpat_or _) ->
assert false
| Tpat_construct (_, c, _), Tpat_construct (_, c', _) ->
c.cstr_consts = c'.cstr_consts
&& c.cstr_nonconsts = c'.cstr_nonconsts
| Tpat_constant c1, Tpat_constant c2 -> begin
match c1, c2 with
| Const_char _, Const_char _
| Const_int _, Const_int _
| Const_int32 _, Const_int32 _
| Const_int64 _, Const_int64 _
| Const_nativeint _, Const_nativeint _
| Const_float _, Const_float _
| Const_string _, Const_string _ -> true
| ( Const_char _
| Const_int _
| Const_int32 _
| Const_int64 _
| Const_nativeint _
| Const_float _
| Const_string _), _ -> false
end
| Tpat_tuple l1, Tpat_tuple l2 -> List.length l1 = List.length l2
| Tpat_record ((_, lbl1, _) :: _, _), Tpat_record ((_, lbl2, _) :: _, _) ->
Array.length lbl1.lbl_all = Array.length lbl2.lbl_all
| Tpat_any, _
| _, Tpat_any
| Tpat_record ([], _), Tpat_record (_, _)
| Tpat_record (_, _), Tpat_record ([], _)
| Tpat_variant _, Tpat_variant _
| Tpat_array _, Tpat_array _
| Tpat_lazy _, Tpat_lazy _ -> true
| _, _ -> false
in
match
List.find (fun head_pat ->
match head_pat.pat_desc with
| Tpat_var _ | Tpat_alias _ | Tpat_or _ -> assert false
| Tpat_any -> false
| _ -> true
) column
with
| exception Not_found ->
(* only omegas on the column: the column is coherent. *)
true
| discr_pat ->
List.for_all (coherent_heads discr_pat) column
let first_column simplified_matrix =
List.map fst simplified_matrix
(***********************)
(* Compatibility check *)
(***********************)
Patterns p and q compatible means :
there exists value V that matches both , However ....
The case of extension types is dubious , as constructor rebind permits
that different constructors are the same ( and are thus compatible ) .
Compilation must take this into account , consider :
type t = ..
type t + = A|B
type t + = C = A
let f x y = match x , y with
| true , A - > ' 1 '
| _ , C - > ' 2 '
| false , A - > ' 3 '
| _ , _ - > ' _ '
As C is bound to A the value of f false A is ' 2 ' ( and not ' 3 ' as it would
be in the absence of rebinding ) .
Not considering rebinding , patterns " false , A " and " _ , C " are incompatible
and the compiler can swap the second and third clause , resulting in the
( more efficiently compiled ) matching
match x , y with
| true , A - > ' 1 '
| false , A - > ' 3 '
| _ , C - > ' 2 '
| _ , _ - > ' _ '
This is not correct : when C is bound to A , " f false A " returns ' 2 ' ( not ' 3 ' )
However , diagnostics do not take constructor rebinding into account .
Notice , that due to module abstraction constructor rebinding is hidden .
module X : sig type t = .. type t + = A|B end = struct
type t = ..
type t + = A
type t + = B = A
end
open X
let f x = match x with
| A - > ' 1 '
| B - > ' 2 '
| _ - > ' _ '
The second clause above will NOT ( and can not ) be flagged as useless .
Finally , there are two compatibility fonction
compat p q --- > ' syntactic compatibility , used for diagnostics .
may_compat p q --- > a safe approximation of possible compat ,
for compilation
there exists value V that matches both, However....
The case of extension types is dubious, as constructor rebind permits
that different constructors are the same (and are thus compatible).
Compilation must take this into account, consider:
type t = ..
type t += A|B
type t += C=A
let f x y = match x,y with
| true,A -> '1'
| _,C -> '2'
| false,A -> '3'
| _,_ -> '_'
As C is bound to A the value of f false A is '2' (and not '3' as it would
be in the absence of rebinding).
Not considering rebinding, patterns "false,A" and "_,C" are incompatible
and the compiler can swap the second and third clause, resulting in the
(more efficiently compiled) matching
match x,y with
| true,A -> '1'
| false,A -> '3'
| _,C -> '2'
| _,_ -> '_'
This is not correct: when C is bound to A, "f false A" returns '2' (not '3')
However, diagnostics do not take constructor rebinding into account.
Notice, that due to module abstraction constructor rebinding is hidden.
module X : sig type t = .. type t += A|B end = struct
type t = ..
type t += A
type t += B=A
end
open X
let f x = match x with
| A -> '1'
| B -> '2'
| _ -> '_'
The second clause above will NOT (and cannot) be flagged as useless.
Finally, there are two compatibility fonction
compat p q ---> 'syntactic compatibility, used for diagnostics.
may_compat p q ---> a safe approximation of possible compat,
for compilation
*)
let is_absent tag row = Btype.row_field tag !row = Rabsent
let is_absent_pat p = match p.pat_desc with
| Tpat_variant (tag, _, row) -> is_absent tag row
| _ -> false
let const_compare x y =
match x,y with
| Const_float f1, Const_float f2 ->
Pervasives.compare (float_of_string f1) (float_of_string f2)
| Const_string (s1, _), Const_string (s2, _) ->
String.compare s1 s2
| _, _ -> Pervasives.compare x y
let records_args l1 l2 =
Invariant : fields are already sorted by Typecore.type_label_a_list
let rec combine r1 r2 l1 l2 = match l1,l2 with
| [],[] -> List.rev r1, List.rev r2
| [],(_,_,p2)::rem2 -> combine (omega::r1) (p2::r2) [] rem2
| (_,_,p1)::rem1,[] -> combine (p1::r1) (omega::r2) rem1 []
| (_,lbl1,p1)::rem1, ( _,lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
combine (p1::r1) (omega::r2) rem1 l2
else if lbl1.lbl_pos > lbl2.lbl_pos then
combine (omega::r1) (p2::r2) l1 rem2
else (* same label on both sides *)
combine (p1::r1) (p2::r2) rem1 rem2 in
combine [] [] l1 l2
module Compat
(Constr:sig
val equal :
Types.constructor_description ->
Types.constructor_description ->
bool
end) = struct
let rec compat p q = match p.pat_desc,q.pat_desc with
(* Variables match any value *)
| ((Tpat_any|Tpat_var _),_)
| (_,(Tpat_any|Tpat_var _)) -> true
(* Structural induction *)
| Tpat_alias (p,_,_),_ -> compat p q
| _,Tpat_alias (q,_,_) -> compat p q
| Tpat_or (p1,p2,_),_ ->
(compat p1 q || compat p2 q)
| _,Tpat_or (q1,q2,_) ->
(compat p q1 || compat p q2)
(* Constructors, with special case for extension *)
| Tpat_construct (_, c1,ps1), Tpat_construct (_, c2,ps2) ->
Constr.equal c1 c2 && compats ps1 ps2
(* More standard stuff *)
| Tpat_variant(l1,op1, _), Tpat_variant(l2,op2,_) ->
l1=l2 && ocompat op1 op2
| Tpat_constant c1, Tpat_constant c2 ->
const_compare c1 c2 = 0
| Tpat_tuple ps, Tpat_tuple qs -> compats ps qs
| Tpat_lazy p, Tpat_lazy q -> compat p q
| Tpat_record (l1,_),Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
compats ps qs
| Tpat_array ps, Tpat_array qs ->
List.length ps = List.length qs &&
compats ps qs
| _,_ -> false
and ocompat op oq = match op,oq with
| None,None -> true
| Some p,Some q -> compat p q
| (None,Some _)|(Some _,None) -> false
and compats ps qs = match ps,qs with
| [], [] -> true
| p::ps, q::qs -> compat p q && compats ps qs
| _,_ -> false
end
module SyntacticCompat =
Compat
(struct
let equal c1 c2 = Types.equal_tag c1.cstr_tag c2.cstr_tag
end)
let compat = SyntacticCompat.compat
and compats = SyntacticCompat.compats
Due to ( potential ) rebinding , two extension constructors
of the same arity type may equal
of the same arity type may equal *)
exception Empty (* Empty pattern *)
(****************************************)
Utilities for retrieving type paths
(****************************************)
May need a clean copy , cf . PR#4745
let clean_copy ty =
if ty.level = Btype.generic_level then ty
else Subst.type_expr Subst.identity ty
let get_type_path ty tenv =
let ty = Ctype.repr (Ctype.expand_head tenv (clean_copy ty)) in
match ty.desc with
| Tconstr (path,_,_) -> path
| _ -> fatal_error "Parmatch.get_type_path"
(*************************************)
(* Values as patterns pretty printer *)
(*************************************)
open Format
;;
let is_cons = function
| {cstr_name = "::"} -> true
| _ -> false
let pretty_const c = match c with
| Const_int i -> Printf.sprintf "%d" i
| Const_char c -> Printf.sprintf "%C" c
| Const_string (s, _) -> Printf.sprintf "%S" s
| Const_float f -> Printf.sprintf "%s" f
| Const_int32 i -> Printf.sprintf "%ldl" i
| Const_int64 i -> Printf.sprintf "%LdL" i
| Const_nativeint i -> Printf.sprintf "%ndn" i
let rec pretty_val ppf v =
match v.pat_extra with
(cstr, _loc, _attrs) :: rem ->
begin match cstr with
| Tpat_unpack ->
fprintf ppf "@[(module %a)@]" pretty_val { v with pat_extra = rem }
| Tpat_constraint _ ->
fprintf ppf "@[(%a : _)@]" pretty_val { v with pat_extra = rem }
| Tpat_type _ ->
fprintf ppf "@[(# %a)@]" pretty_val { v with pat_extra = rem }
| Tpat_open _ ->
fprintf ppf "@[(# %a)@]" pretty_val { v with pat_extra = rem }
end
| [] ->
match v.pat_desc with
| Tpat_any -> fprintf ppf "_"
| Tpat_var (x,_) -> fprintf ppf "%s" (Ident.name x)
| Tpat_constant c -> fprintf ppf "%s" (pretty_const c)
| Tpat_tuple vs ->
fprintf ppf "@[(%a)@]" (pretty_vals ",") vs
| Tpat_construct (_, cstr, []) ->
fprintf ppf "%s" cstr.cstr_name
| Tpat_construct (_, cstr, [w]) ->
fprintf ppf "@[<2>%s@ %a@]" cstr.cstr_name pretty_arg w
| Tpat_construct (_, cstr, vs) ->
let name = cstr.cstr_name in
begin match (name, vs) with
("::", [v1;v2]) ->
fprintf ppf "@[%a::@,%a@]" pretty_car v1 pretty_cdr v2
| _ ->
fprintf ppf "@[<2>%s@ @[(%a)@]@]" name (pretty_vals ",") vs
end
| Tpat_variant (l, None, _) ->
fprintf ppf "`%s" l
| Tpat_variant (l, Some w, _) ->
fprintf ppf "@[<2>`%s@ %a@]" l pretty_arg w
| Tpat_record (lvs,_) ->
let filtered_lvs = List.filter
(function
| (_,_,{pat_desc=Tpat_any}) -> false (* do not show lbl=_ *)
| _ -> true) lvs in
begin match filtered_lvs with
| [] -> fprintf ppf "_"
| (_, lbl, _) :: q ->
let elision_mark ppf =
(* we assume that there is no label repetitions here *)
if Array.length lbl.lbl_all > 1 + List.length q then
fprintf ppf ";@ _@ "
else () in
fprintf ppf "@[{%a%t}@]"
pretty_lvals filtered_lvs elision_mark
end
| Tpat_array vs ->
fprintf ppf "@[[| %a |]@]" (pretty_vals " ;") vs
| Tpat_lazy v ->
fprintf ppf "@[<2>lazy@ %a@]" pretty_arg v
| Tpat_alias (v, x,_) ->
fprintf ppf "@[(%a@ as %a)@]" pretty_val v Ident.print x
| Tpat_or (v,w,_) ->
fprintf ppf "@[(%a|@,%a)@]" pretty_or v pretty_or w
and pretty_car ppf v = match v.pat_desc with
| Tpat_construct (_,cstr, [_ ; _])
when is_cons cstr ->
fprintf ppf "(%a)" pretty_val v
| _ -> pretty_val ppf v
and pretty_cdr ppf v = match v.pat_desc with
| Tpat_construct (_,cstr, [v1 ; v2])
when is_cons cstr ->
fprintf ppf "%a::@,%a" pretty_car v1 pretty_cdr v2
| _ -> pretty_val ppf v
and pretty_arg ppf v = match v.pat_desc with
| Tpat_construct (_,_,_::_)
| Tpat_variant (_, Some _, _) -> fprintf ppf "(%a)" pretty_val v
| _ -> pretty_val ppf v
and pretty_or ppf v = match v.pat_desc with
| Tpat_or (v,w,_) ->
fprintf ppf "%a|@,%a" pretty_or v pretty_or w
| _ -> pretty_val ppf v
and pretty_vals sep ppf = function
| [] -> ()
| [v] -> pretty_val ppf v
| v::vs ->
fprintf ppf "%a%s@ %a" pretty_val v sep (pretty_vals sep) vs
and pretty_lvals ppf = function
| [] -> ()
| [_,lbl,v] ->
fprintf ppf "%s=%a" lbl.lbl_name pretty_val v
| (_, lbl,v)::rest ->
fprintf ppf "%s=%a;@ %a"
lbl.lbl_name pretty_val v pretty_lvals rest
let top_pretty ppf v =
fprintf ppf "@[%a@]@?" pretty_val v
let pretty_pat p =
top_pretty Format.str_formatter p ;
prerr_string (Format.flush_str_formatter ())
type matrix = pattern list list
let pretty_line ps =
List.iter
(fun p ->
top_pretty Format.str_formatter p ;
prerr_string " <" ;
prerr_string (Format.flush_str_formatter ()) ;
prerr_string ">")
ps
let pretty_matrix (pss : matrix) =
prerr_endline "begin matrix" ;
List.iter
(fun ps ->
pretty_line ps ;
prerr_endline "")
pss ;
prerr_endline "end matrix"
(****************************)
Utilities for matching
(****************************)
(* Check top matching *)
let simple_match p1 p2 =
match p1.pat_desc, p2.pat_desc with
| Tpat_construct(_, c1, _), Tpat_construct(_, c2, _) ->
Types.equal_tag c1.cstr_tag c2.cstr_tag
| Tpat_variant(l1, _, _), Tpat_variant(l2, _, _) ->
l1 = l2
| Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0
| Tpat_lazy _, Tpat_lazy _ -> true
| Tpat_record _ , Tpat_record _ -> true
| Tpat_tuple p1s, Tpat_tuple p2s
| Tpat_array p1s, Tpat_array p2s -> List.length p1s = List.length p2s
| _, (Tpat_any | Tpat_var(_)) -> true
| _, _ -> false
(* extract record fields as a whole *)
let record_arg p = match p.pat_desc with
| Tpat_any -> []
| Tpat_record (args,_) -> args
| _ -> fatal_error "Parmatch.as_record"
(* Raise Not_found when pos is not present in arg *)
let get_field pos arg =
let _,_, p = List.find (fun (_,lbl,_) -> pos = lbl.lbl_pos) arg in
p
let extract_fields omegas arg =
List.map
(fun (_,lbl,_) ->
try
get_field lbl.lbl_pos arg
with Not_found -> omega)
omegas
let all_record_args lbls = match lbls with
| (_,{lbl_all=lbl_all},_)::_ ->
let t =
Array.map
(fun lbl -> mknoloc (Longident.Lident "?temp?"), lbl,omega)
lbl_all in
List.iter
(fun ((_, lbl,_) as x) -> t.(lbl.lbl_pos) <- x)
lbls ;
Array.to_list t
| _ -> fatal_error "Parmatch.all_record_args"
(* Build argument list when p2 >= p1, where p1 is a simple pattern *)
let rec simple_match_args p1 p2 = match p2.pat_desc with
| Tpat_alias (p2,_,_) -> simple_match_args p1 p2
| Tpat_construct(_, _, args) -> args
| Tpat_variant(_, Some arg, _) -> [arg]
| Tpat_tuple(args) -> args
| Tpat_record(args,_) -> extract_fields (record_arg p1) args
| Tpat_array(args) -> args
| Tpat_lazy arg -> [arg]
| (Tpat_any | Tpat_var(_)) ->
begin match p1.pat_desc with
Tpat_construct(_, _,args) -> omega_list args
| Tpat_variant(_, Some _, _) -> [omega]
| Tpat_tuple(args) -> omega_list args
| Tpat_record(args,_) -> omega_list args
| Tpat_array(args) -> omega_list args
| Tpat_lazy _ -> [omega]
| _ -> []
end
| _ -> []
(*
Normalize a pattern ->
all arguments are omega (simple pattern) and no more variables
*)
let rec normalize_pat q = match q.pat_desc with
| Tpat_any | Tpat_constant _ -> q
| Tpat_var _ -> make_pat Tpat_any q.pat_type q.pat_env
| Tpat_alias (p,_,_) -> normalize_pat p
| Tpat_tuple (args) ->
make_pat (Tpat_tuple (omega_list args)) q.pat_type q.pat_env
| Tpat_construct (lid, c,args) ->
make_pat
(Tpat_construct (lid, c,omega_list args))
q.pat_type q.pat_env
| Tpat_variant (l, arg, row) ->
make_pat (Tpat_variant (l, may_map (fun _ -> omega) arg, row))
q.pat_type q.pat_env
| Tpat_array (args) ->
make_pat (Tpat_array (omega_list args)) q.pat_type q.pat_env
| Tpat_record (largs, closed) ->
make_pat
(Tpat_record (List.map (fun (lid,lbl,_) ->
lid, lbl,omega) largs, closed))
q.pat_type q.pat_env
| Tpat_lazy _ ->
make_pat (Tpat_lazy omega) q.pat_type q.pat_env
| Tpat_or _ -> fatal_error "Parmatch.normalize_pat"
(*
Build normalized (cf. supra) discriminating pattern,
in the non-data type case
*)
let discr_pat q pss =
let rec acc_pat acc pss = match pss with
({pat_desc = Tpat_alias (p,_,_)}::ps)::pss ->
acc_pat acc ((p::ps)::pss)
| ({pat_desc = Tpat_or (p1,p2,_)}::ps)::pss ->
acc_pat acc ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var _)}::_)::pss ->
acc_pat acc pss
| (({pat_desc = Tpat_tuple _} as p)::_)::_ -> normalize_pat p
| (({pat_desc = Tpat_lazy _} as p)::_)::_ -> normalize_pat p
| (({pat_desc = Tpat_record (largs,closed)} as p)::_)::pss ->
let new_omegas =
List.fold_right
(fun (lid, lbl,_) r ->
try
let _ = get_field lbl.lbl_pos r in
r
with Not_found ->
(lid, lbl,omega)::r)
largs (record_arg acc)
in
acc_pat
(make_pat (Tpat_record (new_omegas, closed)) p.pat_type p.pat_env)
pss
| _ -> acc in
match normalize_pat q with
| {pat_desc= (Tpat_any | Tpat_record _)} as q -> acc_pat q pss
| q -> q
(*
In case a matching value is found, set actual arguments
of the matching pattern.
*)
let rec read_args xs r = match xs,r with
| [],_ -> [],r
| _::xs, arg::rest ->
let args,rest = read_args xs rest in
arg::args,rest
| _,_ ->
fatal_error "Parmatch.read_args"
let do_set_args erase_mutable q r = match q with
| {pat_desc = Tpat_tuple omegas} ->
let args,rest = read_args omegas r in
make_pat (Tpat_tuple args) q.pat_type q.pat_env::rest
| {pat_desc = Tpat_record (omegas,closed)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_record
(List.map2 (fun (lid, lbl,_) arg ->
if
erase_mutable &&
(match lbl.lbl_mut with
| Mutable -> true | Immutable -> false)
then
lid, lbl, omega
else
lid, lbl, arg)
omegas args, closed))
q.pat_type q.pat_env::
rest
| {pat_desc = Tpat_construct (lid, c,omegas)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_construct (lid, c,args))
q.pat_type q.pat_env::
rest
| {pat_desc = Tpat_variant (l, omega, row)} ->
let arg, rest =
match omega, r with
Some _, a::r -> Some a, r
| None, r -> None, r
| _ -> assert false
in
make_pat
(Tpat_variant (l, arg, row)) q.pat_type q.pat_env::
rest
| {pat_desc = Tpat_lazy _omega} ->
begin match r with
arg::rest ->
make_pat (Tpat_lazy arg) q.pat_type q.pat_env::rest
| _ -> fatal_error "Parmatch.do_set_args (lazy)"
end
| {pat_desc = Tpat_array omegas} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_array args) q.pat_type q.pat_env::
rest
| {pat_desc=Tpat_constant _|Tpat_any} ->
q::r (* case any is used in matching.ml *)
| _ -> fatal_error "Parmatch.set_args"
let set_args q r = do_set_args false q r
and set_args_erase_mutable q r = do_set_args true q r
filter pss according to pattern q
let filter_one q pss =
let rec filter_rec = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_rec ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_rec ((p1::ps)::(p2::ps)::pss)
| (p::ps)::pss ->
if simple_match q p
then (simple_match_args q p @ ps) :: filter_rec pss
else filter_rec pss
| _ -> [] in
filter_rec pss
(*
Filter pss in the ``extra case''. This applies :
- According to an extra constructor (datatype case, non-complete signature).
- According to anything (all-variables case).
*)
let filter_extra pss =
let rec filter_rec = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_rec ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_rec ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))} :: qs) :: pss ->
qs :: filter_rec pss
| _::pss -> filter_rec pss
| [] -> [] in
filter_rec pss
Pattern p0 is the discriminating pattern ,
returns [ ( q0,pss0 ) ; ... ; ( qn , pssn ) ]
where the qi 's are simple patterns and the pssi 's are
matched matrices .
NOTES
* ( qi , [ ] ) is impossible .
* In the case when matching is useless ( all - variable case ) ,
returns [ ]
Pattern p0 is the discriminating pattern,
returns [(q0,pss0) ; ... ; (qn,pssn)]
where the qi's are simple patterns and the pssi's are
matched matrices.
NOTES
* (qi,[]) is impossible.
* In the case when matching is useless (all-variable case),
returns []
*)
let filter_all pat0 pss =
let rec insert q qs env =
match env with
[] ->
let q0 = normalize_pat q in
[q0, [simple_match_args q0 q @ qs]]
| ((q0,pss) as c)::env ->
if simple_match q0 q
then (q0, ((simple_match_args q0 q @ qs) :: pss)) :: env
else c :: insert q qs env in
let rec filter_rec env = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_rec env ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_rec env ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))}::_)::pss ->
filter_rec env pss
| (p::ps)::pss ->
filter_rec (insert p ps env) pss
| _ -> env
and filter_omega env = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_omega env ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_omega env ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))}::ps)::pss ->
filter_omega
(List.map (fun (q,qss) -> (q,(simple_match_args q omega @ ps) :: qss))
env)
pss
| _::pss -> filter_omega env pss
| [] -> env in
filter_omega
(filter_rec
(match pat0.pat_desc with
(Tpat_record(_) | Tpat_tuple(_) | Tpat_lazy(_)) -> [pat0,[]]
| _ -> [])
pss)
pss
(* Variant related functions *)
let rec set_last a = function
[] -> []
| [_] -> [a]
| x::l -> x :: set_last a l
(* mark constructor lines for failure when they are incomplete *)
let rec mark_partial = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
mark_partial ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
mark_partial ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))} :: _ as ps) :: pss ->
ps :: mark_partial pss
| ps::pss ->
(set_last zero ps) :: mark_partial pss
| [] -> []
let close_variant env row =
let row = Btype.row_repr row in
let nm =
List.fold_left
(fun nm (_tag,f) ->
match Btype.row_field_repr f with
| Reither(_, _, false, e) ->
(* m=false means that this tag is not explicitly matched *)
Btype.set_row_field e Rabsent;
None
| Rabsent | Reither (_, _, true, _) | Rpresent _ -> nm)
row.row_name row.row_fields in
if not row.row_closed || nm != row.row_name then begin
(* this unification cannot fail *)
Ctype.unify env row.row_more
(Btype.newgenty
(Tvariant {row with row_fields = []; row_more = Btype.newgenvar();
row_closed = true; row_name = nm}))
end
let row_of_pat pat =
match Ctype.expand_head pat.pat_env pat.pat_type with
{desc = Tvariant row} -> Btype.row_repr row
| _ -> assert false
Check whether the first column of env makes up a complete signature or
not .
Check whether the first column of env makes up a complete signature or
not.
*)
let full_match closing env = match env with
| ({pat_desc = Tpat_construct(_,c,_)},_) :: _ ->
if c.cstr_consts < 0 then false (* extensions *)
else List.length env = c.cstr_consts + c.cstr_nonconsts
| ({pat_desc = Tpat_variant _} as p,_) :: _ ->
let fields =
List.map
(function ({pat_desc = Tpat_variant (tag, _, _)}, _) -> tag
| _ -> assert false)
env
in
let row = row_of_pat p in
if closing && not (Btype.row_fixed row) then
(* closing=true, we are considering the variant as closed *)
List.for_all
(fun (tag,f) ->
match Btype.row_field_repr f with
Rabsent | Reither(_, _, false, _) -> true
| Reither (_, _, true, _)
(* m=true, do not discard matched tags, rather warn *)
| Rpresent _ -> List.mem tag fields)
row.row_fields
else
row.row_closed &&
List.for_all
(fun (tag,f) ->
Btype.row_field_repr f = Rabsent || List.mem tag fields)
row.row_fields
| ({pat_desc = Tpat_constant(Const_char _)},_) :: _ ->
List.length env = 256
| ({pat_desc = Tpat_constant(_)},_) :: _ -> false
| ({pat_desc = Tpat_tuple(_)},_) :: _ -> true
| ({pat_desc = Tpat_record(_)},_) :: _ -> true
| ({pat_desc = Tpat_array(_)},_) :: _ -> false
| ({pat_desc = Tpat_lazy(_)},_) :: _ -> true
| ({pat_desc = (Tpat_any|Tpat_var _|Tpat_alias _|Tpat_or _)},_) :: _
| []
->
assert false
(* Written as a non-fragile matching, PR#7451 originated from a fragile matching below. *)
let should_extend ext env = match ext with
| None -> false
| Some ext -> begin match env with
| [] -> assert false
| (p,_)::_ ->
begin match p.pat_desc with
| Tpat_construct
(_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},_) ->
let path = get_type_path p.pat_type p.pat_env in
Path.same path ext
| Tpat_construct
(_, {cstr_tag=(Cstr_extension _)},_) -> false
| Tpat_constant _|Tpat_tuple _|Tpat_variant _
| Tpat_record _|Tpat_array _ | Tpat_lazy _
-> false
| Tpat_any|Tpat_var _|Tpat_alias _|Tpat_or _
-> assert false
end
end
module ConstructorTagHashtbl = Hashtbl.Make(
struct
type t = Types.constructor_tag
let hash = Hashtbl.hash
let equal = Types.equal_tag
end
)
(* complement constructor tags *)
let complete_tags nconsts nconstrs tags =
let seen_const = Array.make nconsts false
and seen_constr = Array.make nconstrs false in
List.iter
(function
| Cstr_constant i -> seen_const.(i) <- true
| Cstr_block i -> seen_constr.(i) <- true
| _ -> assert false)
tags ;
let r = ConstructorTagHashtbl.create (nconsts+nconstrs) in
for i = 0 to nconsts-1 do
if not seen_const.(i) then
ConstructorTagHashtbl.add r (Cstr_constant i) ()
done ;
for i = 0 to nconstrs-1 do
if not seen_constr.(i) then
ConstructorTagHashtbl.add r (Cstr_block i) ()
done ;
r
(* build a pattern from a constructor list *)
let pat_of_constr ex_pat cstr =
{ex_pat with pat_desc =
Tpat_construct (mknoloc (Longident.Lident "?pat_of_constr?"),
cstr, omegas cstr.cstr_arity)}
let orify x y = make_pat (Tpat_or (x, y, None)) x.pat_type x.pat_env
let rec orify_many = function
| [] -> assert false
| [x] -> x
| x :: xs -> orify x (orify_many xs)
let pat_of_constrs ex_pat cstrs =
if cstrs = [] then raise Empty else
orify_many (List.map (pat_of_constr ex_pat) cstrs)
let pats_of_type ?(always=false) env ty =
let ty' = Ctype.expand_head env ty in
match ty'.desc with
| Tconstr (path, _, _) ->
begin try match (Env.find_type path env).type_kind with
| Type_variant cl when always || List.length cl = 1 ||
List.for_all (fun cd -> cd.Types.cd_res <> None) cl ->
let cstrs = fst (Env.find_type_descrs path env) in
List.map (pat_of_constr (make_pat Tpat_any ty env)) cstrs
| Type_record _ ->
let labels = snd (Env.find_type_descrs path env) in
let fields =
List.map (fun ld ->
mknoloc (Longident.Lident "?pat_of_label?"), ld, omega)
labels
in
[make_pat (Tpat_record (fields, Closed)) ty env]
| _ -> [omega]
with Not_found -> [omega]
end
| Ttuple tl ->
[make_pat (Tpat_tuple (omegas (List.length tl))) ty env]
| _ -> [omega]
let rec get_variant_constructors env ty =
match (Ctype.repr ty).desc with
| Tconstr (path,_,_) -> begin
try match Env.find_type path env with
| {type_kind=Type_variant _} ->
fst (Env.find_type_descrs path env)
| {type_manifest = Some _} ->
get_variant_constructors env
(Ctype.expand_head_once env (clean_copy ty))
| _ -> fatal_error "Parmatch.get_variant_constructors"
with Not_found ->
fatal_error "Parmatch.get_variant_constructors"
end
| _ -> fatal_error "Parmatch.get_variant_constructors"
(* Sends back a pattern that complements constructor tags all_tag *)
let complete_constrs p all_tags =
let c =
match p.pat_desc with Tpat_construct (_, c, _) -> c | _ -> assert false in
let not_tags = complete_tags c.cstr_consts c.cstr_nonconsts all_tags in
let constrs = get_variant_constructors p.pat_env c.cstr_res in
let others =
List.filter
(fun cnstr -> ConstructorTagHashtbl.mem not_tags cnstr.cstr_tag)
constrs in
let const, nonconst =
List.partition (fun cnstr -> cnstr.cstr_arity = 0) others in
const @ nonconst
let build_other_constrs env p =
match p.pat_desc with
Tpat_construct (_, {cstr_tag=Cstr_constant _|Cstr_block _}, _) ->
let get_tag = function
| {pat_desc = Tpat_construct (_,c,_)} -> c.cstr_tag
| _ -> fatal_error "Parmatch.get_tag" in
let all_tags = List.map (fun (p,_) -> get_tag p) env in
pat_of_constrs p (complete_constrs p all_tags)
| _ -> extra_pat
(* Auxiliary for build_other *)
let build_other_constant proj make first next p env =
let all = List.map (fun (p, _) -> proj p.pat_desc) env in
let rec try_const i =
if List.mem i all
then try_const (next i)
else make_pat (make i) p.pat_type p.pat_env
in try_const first
Builds a pattern that is incompatible with all patterns in
in the first column of env
Builds a pattern that is incompatible with all patterns in
in the first column of env
*)
let some_other_tag = "<some other tag>"
let build_other ext env = match env with
| ({pat_desc = Tpat_construct (lid, {cstr_tag=Cstr_extension _},_)},_) :: _ ->
(* let c = {c with cstr_name = "*extension*"} in *) (* PR#7330 *)
make_pat (Tpat_var (Ident.create "*extension*",
{lid with txt="*extension*"})) Ctype.none Env.empty
| ({pat_desc = Tpat_construct _} as p,_) :: _ ->
begin match ext with
| Some ext when Path.same ext (get_type_path p.pat_type p.pat_env) ->
extra_pat
| _ ->
build_other_constrs env p
end
| ({pat_desc = Tpat_variant (_,_,r)} as p,_) :: _ ->
let tags =
List.map
(function ({pat_desc = Tpat_variant (tag, _, _)}, _) -> tag
| _ -> assert false)
env
in
let row = row_of_pat p in
let make_other_pat tag const =
let arg = if const then None else Some omega in
make_pat (Tpat_variant(tag, arg, r)) p.pat_type p.pat_env in
begin match
List.fold_left
(fun others (tag,f) ->
if List.mem tag tags then others else
match Btype.row_field_repr f with
Rabsent (* | Reither _ *) -> others
(* This one is called after erasing pattern info *)
| Reither (c, _, _, _) -> make_other_pat tag c :: others
| Rpresent arg -> make_other_pat tag (arg = None) :: others)
[] row.row_fields
with
[] ->
make_other_pat some_other_tag true
| pat::other_pats ->
List.fold_left
(fun p_res pat ->
make_pat (Tpat_or (pat, p_res, None)) p.pat_type p.pat_env)
pat other_pats
end
| ({pat_desc = Tpat_constant(Const_char _)} as p,_) :: _ ->
let all_chars =
List.map
(fun (p,_) -> match p.pat_desc with
| Tpat_constant (Const_char c) -> c
| _ -> assert false)
env in
let rec find_other i imax =
if i > imax then raise Not_found
else
let ci = Char.chr i in
if List.mem ci all_chars then
find_other (i+1) imax
else
make_pat (Tpat_constant (Const_char ci)) p.pat_type p.pat_env in
let rec try_chars = function
| [] -> omega
| (c1,c2) :: rest ->
try
find_other (Char.code c1) (Char.code c2)
with
| Not_found -> try_chars rest in
try_chars
[ 'a', 'z' ; 'A', 'Z' ; '0', '9' ;
' ', '~' ; Char.chr 0 , Char.chr 255]
| ({pat_desc=(Tpat_constant (Const_int _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_int i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int i))
0 succ p env
| ({pat_desc=(Tpat_constant (Const_int32 _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_int32 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int32 i))
0l Int32.succ p env
| ({pat_desc=(Tpat_constant (Const_int64 _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_int64 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int64 i))
0L Int64.succ p env
| ({pat_desc=(Tpat_constant (Const_nativeint _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_nativeint i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_nativeint i))
0n Nativeint.succ p env
| ({pat_desc=(Tpat_constant (Const_string _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_string (s, _)) -> String.length s
| _ -> assert false)
(function i -> Tpat_constant(Const_string(String.make i '*', None)))
0 succ p env
| ({pat_desc=(Tpat_constant (Const_float _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_float f) -> float_of_string f
| _ -> assert false)
(function f -> Tpat_constant(Const_float (string_of_float f)))
0.0 (fun f -> f +. 1.0) p env
| ({pat_desc = Tpat_array _} as p,_)::_ ->
let all_lengths =
List.map
(fun (p,_) -> match p.pat_desc with
| Tpat_array args -> List.length args
| _ -> assert false)
env in
let rec try_arrays l =
if List.mem l all_lengths then try_arrays (l+1)
else
make_pat
(Tpat_array (omegas l))
p.pat_type p.pat_env in
try_arrays 0
| [] -> omega
| _ -> omega
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector , es such that :
1- for all ps in pss ps # es ( ps and es are not compatible )
2- qs < = es ( es matches qs )
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector, es such that :
1- for all ps in pss ps # es (ps and es are not compatible)
2- qs <= es (es matches qs)
*)
let rec has_instance p = match p.pat_desc with
| Tpat_variant (l,_,r) when is_absent l r -> false
| Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_variant (_,None,_) -> true
| Tpat_alias (p,_,_) | Tpat_variant (_,Some p,_) -> has_instance p
| Tpat_or (p1,p2,_) -> has_instance p1 || has_instance p2
| Tpat_construct (_,_,ps) | Tpat_tuple ps | Tpat_array ps ->
has_instances ps
| Tpat_record (lps,_) -> has_instances (List.map (fun (_,_,x) -> x) lps)
| Tpat_lazy p
-> has_instance p
and has_instances = function
| [] -> true
| q::rem -> has_instance q && has_instances rem
In two places in the following function , we check the coherence of the first
column of ( pss + qs ) .
If it is incoherent , then we exit early saying that ( pss + qs ) is not
satisfiable ( which is equivalent to saying " oh , we should n't have considered
that branch , no good result came come from here " ) .
But what happens if we have a coherent but ill - typed column ?
- we might end up returning [ false ] , which is equivalent to noticing the
incompatibility : clearly this is fine .
- if we end up returning [ true ] then we 're saying that [ qs ] is useful while
it is not . This is sad but not the end of the world , we 're just allowing dead
code to survive .
In two places in the following function, we check the coherence of the first
column of (pss + qs).
If it is incoherent, then we exit early saying that (pss + qs) is not
satisfiable (which is equivalent to saying "oh, we shouldn't have considered
that branch, no good result came come from here").
But what happens if we have a coherent but ill-typed column?
- we might end up returning [false], which is equivalent to noticing the
incompatibility: clearly this is fine.
- if we end up returning [true] then we're saying that [qs] is useful while
it is not. This is sad but not the end of the world, we're just allowing dead
code to survive.
*)
let rec satisfiable pss qs = match pss with
| [] -> has_instances qs
| _ ->
match qs with
| [] -> false
| {pat_desc = Tpat_or(q1,q2,_)}::qs ->
satisfiable pss (q1::qs) || satisfiable pss (q2::qs)
| {pat_desc = Tpat_alias(q,_,_)}::qs ->
satisfiable pss (q::qs)
| {pat_desc = (Tpat_any | Tpat_var(_))}::qs ->
if not (all_coherent (simplified_first_col pss)) then
false
else begin
let q0 = discr_pat omega pss in
match filter_all q0 pss with
first column of pss is made of variables only
| [] -> satisfiable (filter_extra pss) qs
| constrs ->
if full_match false constrs then
List.exists
(fun (p,pss) ->
not (is_absent_pat p) &&
satisfiable pss (simple_match_args p omega @ qs))
constrs
else
satisfiable (filter_extra pss) qs
end
| {pat_desc=Tpat_variant (l,_,r)}::_ when is_absent l r -> false
| q::qs ->
if not (all_coherent (q :: simplified_first_col pss)) then
false
else begin
let q0 = discr_pat q pss in
satisfiable (filter_one q0 pss) (simple_match_args q0 q @ qs)
end
Also return the remaining cases , to enable GADT handling
For considerations regarding the coherence check , see the comment on
[ satisfiable ] above .
For considerations regarding the coherence check, see the comment on
[satisfiable] above. *)
let rec satisfiables pss qs = match pss with
| [] -> if has_instances qs then [qs] else []
| _ ->
match qs with
| [] -> []
| {pat_desc = Tpat_or(q1,q2,_)}::qs ->
satisfiables pss (q1::qs) @ satisfiables pss (q2::qs)
| {pat_desc = Tpat_alias(q,_,_)}::qs ->
satisfiables pss (q::qs)
| {pat_desc = (Tpat_any | Tpat_var(_))}::qs ->
if not (all_coherent (simplified_first_col pss)) then
[]
else begin
let q0 = discr_pat omega pss in
let wild p =
List.map (fun qs -> p::qs) (satisfiables (filter_extra pss) qs) in
match filter_all q0 pss with
first column of pss is made of variables only
| [] ->
wild omega
| (p,_)::_ as constrs ->
let for_constrs () =
List.flatten (
List.map
(fun (p,pss) ->
if is_absent_pat p then [] else
List.map (set_args p)
(satisfiables pss (simple_match_args p omega @ qs)))
constrs )
in
if full_match false constrs then for_constrs () else
match p.pat_desc with
Tpat_construct _ ->
(* activate this code for checking non-gadt constructors *)
wild (build_other_constrs constrs p) @ for_constrs ()
| _ ->
wild omega
end
| {pat_desc=Tpat_variant (l,_,r)}::_ when is_absent l r -> []
| q::qs ->
if not (all_coherent (q :: simplified_first_col pss)) then
[]
else begin
let q0 = discr_pat q pss in
List.map (set_args q0)
(satisfiables (filter_one q0 pss) (simple_match_args q0 q @ qs))
end
(*
Now another satisfiable function that additionally
supplies an example of a matching value.
This function should be called for exhaustiveness check only.
*)
type 'a result =
| Rnone (* No matching value *)
| Rsome of 'a (* This matching value *)
let rec try_many f = function
| [ ] - > Rnone
| ( p , pss)::rest - >
match f ( p , pss ) with
| Rnone - > try_many f rest
| r - > r
let rec try_many f = function
| [] -> Rnone
| (p,pss)::rest ->
match f (p,pss) with
| Rnone -> try_many f rest
| r -> r
*)
let rappend r1 r2 =
match r1, r2 with
| Rnone, _ -> r2
| _, Rnone -> r1
| Rsome l1, Rsome l2 -> Rsome (l1 @ l2)
let rec try_many_gadt f = function
| [] -> Rnone
| (p,pss)::rest ->
rappend (f (p, pss)) (try_many_gadt f rest)
let rec exhaust ext pss n = match pss with
| [ ] - > Rsome ( omegas n )
| [ ] : : _ - > Rnone
| pss - >
let q0 = discr_pat omega pss in
begin match filter_all q0 pss with
( * first column of pss is made of variables only
let rec exhaust ext pss n = match pss with
| [] -> Rsome (omegas n)
| []::_ -> Rnone
| pss ->
let q0 = discr_pat omega pss in
begin match filter_all q0 pss with
(* first column of pss is made of variables only *)
| [] ->
begin match exhaust ext (filter_extra pss) (n-1) with
| Rsome r -> Rsome (q0::r)
| r -> r
end
| constrs ->
let try_non_omega (p,pss) =
if is_absent_pat p then
Rnone
else
match
exhaust
ext pss (List.length (simple_match_args p omega) + n - 1)
with
| Rsome r -> Rsome (set_args p r)
| r -> r in
if
full_match true false constrs && not (should_extend ext constrs)
then
try_many try_non_omega constrs
else
D = filter_extra pss is the default matrix
as it is included in pss , one can avoid
recursive calls on specialized matrices ,
Essentially :
* D exhaustive = > pss exhaustive
* D non - exhaustive = > we have a non - filtered value
D = filter_extra pss is the default matrix
as it is included in pss, one can avoid
recursive calls on specialized matrices,
Essentially :
* D exhaustive => pss exhaustive
* D non-exhaustive => we have a non-filtered value
*)
let r = exhaust ext (filter_extra pss) (n-1) in
match r with
| Rnone -> Rnone
| Rsome r ->
try
Rsome (build_other ext constrs::r)
with
(* cannot occur, since constructors don't make a full signature *)
| Empty -> fatal_error "Parmatch.exhaust"
end
let combinations f lst lst' =
let rec iter2 x =
function
[] -> []
| y :: ys ->
f x y :: iter2 x ys
in
let rec iter =
function
[] -> []
| x :: xs -> iter2 x lst' @ iter xs
in
iter lst
*)
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ - > " v "
| Tpat_any - > " _ "
| Tpat_alias ( p , x ) - > Printf.sprintf " ( % s ) as ? " ( string_of_pat p )
| Tpat_constant n - > " 0 "
| Tpat_construct ( _ , lid , _ ) - >
Printf.sprintf " % s " ( String.concat " . " ( Longident.flatten lid.txt ) )
| Tpat_lazy p - >
Printf.sprintf " ( lazy % s ) " ( string_of_pat p )
| Tpat_or ( p1,p2 , _ ) - >
Printf.sprintf " ( % s | % s ) " ( string_of_pat p1 ) ( string_of_pat p2 )
| Tpat_tuple list - >
Printf.sprintf " ( % s ) " ( String.concat " , " ( List.map string_of_pat list ) )
| Tpat_variant ( _ , _ , _ ) - > " variant "
| Tpat_record ( _ , _ ) - > " record "
| Tpat_array _ - > " array "
in
Printf.fprintf stderr " PAT[%s]\n% ! " ( string_of_pat pat )
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ -> "v"
| Tpat_any -> "_"
| Tpat_alias (p, x) -> Printf.sprintf "(%s) as ?" (string_of_pat p)
| Tpat_constant n -> "0"
| Tpat_construct (_, lid, _) ->
Printf.sprintf "%s" (String.concat "." (Longident.flatten lid.txt))
| Tpat_lazy p ->
Printf.sprintf "(lazy %s)" (string_of_pat p)
| Tpat_or (p1,p2,_) ->
Printf.sprintf "(%s | %s)" (string_of_pat p1) (string_of_pat p2)
| Tpat_tuple list ->
Printf.sprintf "(%s)" (String.concat "," (List.map string_of_pat list))
| Tpat_variant (_, _, _) -> "variant"
| Tpat_record (_, _) -> "record"
| Tpat_array _ -> "array"
in
Printf.fprintf stderr "PAT[%s]\n%!" (string_of_pat pat)
*)
(* strictly more powerful than exhaust; however, exhaust
was kept for backwards compatibility *)
let rec exhaust_gadt (ext:Path.t option) pss n = match pss with
| [] -> Rsome [omegas n]
| []::_ -> Rnone
| pss ->
if not (all_coherent (simplified_first_col pss)) then
(* We're considering an ill-typed branch, we won't actually be able to
produce a well typed value taking that branch. *)
Rnone
else begin
Assuming the first column is ill - typed but considered coherent , we
might end up producing an ill - typed witness of non - exhaustivity
corresponding to the current branch .
If [ exhaust ] has been called by [ do_check_partial ] , then the witnesses
produced get typechecked and the ill - typed ones are discarded .
If [ exhaust ] has been called by [ do_check_fragile ] , then it is possible
we might fail to warn the user that the matching is fragile . See for
example testsuite / tests / warnings / w04_failure.ml .
might end up producing an ill-typed witness of non-exhaustivity
corresponding to the current branch.
If [exhaust] has been called by [do_check_partial], then the witnesses
produced get typechecked and the ill-typed ones are discarded.
If [exhaust] has been called by [do_check_fragile], then it is possible
we might fail to warn the user that the matching is fragile. See for
example testsuite/tests/warnings/w04_failure.ml. *)
let q0 = discr_pat omega pss in
match filter_all q0 pss with
first column of pss is made of variables only
| [] ->
begin match exhaust_gadt ext (filter_extra pss) (n-1) with
| Rsome r -> Rsome (List.map (fun row -> q0::row) r)
| r -> r
end
| constrs ->
let try_non_omega (p,pss) =
if is_absent_pat p then
Rnone
else
match
exhaust_gadt
ext pss (List.length (simple_match_args p omega) + n - 1)
with
| Rsome r -> Rsome (List.map (fun row -> (set_args p row)) r)
| r -> r in
let before = try_many_gadt try_non_omega constrs in
if
full_match false constrs && not (should_extend ext constrs)
then
before
else
D = filter_extra pss is the default matrix
as it is included in pss , one can avoid
recursive calls on specialized matrices ,
Essentially :
* D exhaustive = > pss exhaustive
* D non - exhaustive = > we have a non - filtered value
D = filter_extra pss is the default matrix
as it is included in pss, one can avoid
recursive calls on specialized matrices,
Essentially :
* D exhaustive => pss exhaustive
* D non-exhaustive => we have a non-filtered value
*)
let r = exhaust_gadt ext (filter_extra pss) (n-1) in
match r with
| Rnone -> before
| Rsome r ->
try
let p = build_other ext constrs in
let dug = List.map (fun tail -> p :: tail) r in
match before with
| Rnone -> Rsome dug
| Rsome x -> Rsome (x @ dug)
with
(* cannot occur, since constructors don't make a full signature *)
| Empty -> fatal_error "Parmatch.exhaust"
end
let exhaust_gadt ext pss n =
let ret = exhaust_gadt ext pss n in
match ret with
Rnone -> Rnone
| Rsome lst ->
(* The following line is needed to compile stdlib/printf.ml *)
if lst = [] then Rsome (omegas n) else
let singletons =
List.map
(function
[x] -> x
| _ -> assert false)
lst
in
Rsome [orify_many singletons]
(*
Another exhaustiveness check, enforcing variant typing.
Note that it does not check exact exhaustiveness, but whether a
matching could be made exhaustive by closing all variant types.
When this is true of all other columns, the current column is left
open (even if it means that the whole matching is not exhaustive as
a result).
When this is false for the matrix minus the current column, and the
current column is composed of variant tags, we close the variant
(even if it doesn't help in making the matching exhaustive).
*)
let rec pressure_variants tdefs = function
| [] -> false
| []::_ -> true
| pss ->
if not (all_coherent (simplified_first_col pss)) then
true
else begin
let q0 = discr_pat omega pss in
match filter_all q0 pss with
[] -> pressure_variants tdefs (filter_extra pss)
| constrs ->
let rec try_non_omega = function
(_p,pss) :: rem ->
let ok = pressure_variants tdefs pss in
try_non_omega rem && ok
| [] -> true
in
if full_match (tdefs=None) constrs then
try_non_omega constrs
else if tdefs = None then
pressure_variants None (filter_extra pss)
else
let full = full_match true constrs in
let ok =
if full then try_non_omega constrs
else try_non_omega (filter_all q0 (mark_partial pss))
in
begin match constrs, tdefs with
({pat_desc=Tpat_variant _} as p,_):: _, Some env ->
let row = row_of_pat p in
if Btype.row_fixed row
|| pressure_variants None (filter_extra pss) then ()
else close_variant env row
| _ -> ()
end;
ok
end
(* Yet another satisfiable function *)
This time every_satisfiable pss qs checks the
utility of every expansion of qs .
Expansion means expansion of or - patterns inside qs
This time every_satisfiable pss qs checks the
utility of every expansion of qs.
Expansion means expansion of or-patterns inside qs
*)
type answer =
| Used (* Useful pattern *)
| Unused (* Useless pattern *)
| Upartial of Typedtree.pattern list (* Mixed, with list of useless ones *)
(* this row type enable column processing inside the matrix
- left -> elements not to be processed,
- right -> elements to be processed
*)
type 'a row = {no_ors : 'a list ; ors : 'a list ; active : 'a list}
(*
let pretty_row {ors=ors ; no_ors=no_ors; active=active} =
pretty_line ors ; prerr_string " *" ;
pretty_line no_ors ; prerr_string " *" ;
pretty_line active
let pretty_rows rs =
prerr_endline "begin matrix" ;
List.iter
(fun r ->
pretty_row r ;
prerr_endline "")
rs ;
prerr_endline "end matrix"
*)
(* Initial build *)
let make_row ps = {ors=[] ; no_ors=[]; active=ps}
let make_rows pss = List.map make_row pss
(* Useful to detect and expand or pats inside as pats *)
let rec unalias p = match p.pat_desc with
| Tpat_alias (p,_,_) -> unalias p
| _ -> p
let is_var p = match (unalias p).pat_desc with
| Tpat_any|Tpat_var _ -> true
| _ -> false
let is_var_column rs =
List.for_all
(fun r -> match r.active with
| p::_ -> is_var p
| [] -> assert false)
rs
(* Standard or-args for left-to-right matching *)
let rec or_args p = match p.pat_desc with
| Tpat_or (p1,p2,_) -> p1,p2
| Tpat_alias (p,_,_) -> or_args p
| _ -> assert false
(* Just remove current column *)
let remove r = match r.active with
| _::rem -> {r with active=rem}
| [] -> assert false
let remove_column rs = List.map remove rs
(* Current column has been processed *)
let push_no_or r = match r.active with
| p::rem -> { r with no_ors = p::r.no_ors ; active=rem}
| [] -> assert false
let push_or r = match r.active with
| p::rem -> { r with ors = p::r.ors ; active=rem}
| [] -> assert false
let push_or_column rs = List.map push_or rs
and push_no_or_column rs = List.map push_no_or rs
Those are adaptations of the previous homonymous functions that
work on the current column , instead of the first column
work on the current column, instead of the first column
*)
let discr_pat q rs =
discr_pat q (List.map (fun r -> r.active) rs)
let filter_one q rs =
let rec filter_rec rs = match rs with
| [] -> []
| r::rem ->
match r.active with
| [] -> assert false
| {pat_desc = Tpat_alias(p,_,_)}::ps ->
filter_rec ({r with active = p::ps}::rem)
| {pat_desc = Tpat_or(p1,p2,_)}::ps ->
filter_rec
({r with active = p1::ps}::
{r with active = p2::ps}::
rem)
| p::ps ->
if simple_match q p then
{r with active=simple_match_args q p @ ps} :: filter_rec rem
else
filter_rec rem in
filter_rec rs
(* Back to normal matrices *)
let make_vector r = List.rev r.no_ors
let make_matrix rs = List.map make_vector rs
Standard union on answers
let union_res r1 r2 = match r1, r2 with
| (Unused,_)
| (_, Unused) -> Unused
| Used,_ -> r2
| _, Used -> r1
| Upartial u1, Upartial u2 -> Upartial (u1@u2)
(* propose or pats for expansion *)
let extract_elements qs =
let rec do_rec seen = function
| [] -> []
| q::rem ->
{no_ors= List.rev_append seen rem @ qs.no_ors ;
ors=[] ;
active = [q]}::
do_rec (q::seen) rem in
do_rec [] qs.ors
(* idem for matrices *)
let transpose rs = match rs with
| [] -> assert false
| r::rem ->
let i = List.map (fun x -> [x]) r in
List.fold_left
(List.map2 (fun r x -> x::r))
i rem
let extract_columns pss qs = match pss with
| [] -> List.map (fun _ -> []) qs.ors
| _ ->
let rows = List.map extract_elements pss in
transpose rows
Core function
The idea is to first look for or patterns ( recursive case ) , then
check or - patterns argument usefulness ( terminal case )
The idea is to first look for or patterns (recursive case), then
check or-patterns argument usefulness (terminal case)
*)
let rec simplified_first_usefulness_col = function
| [] -> []
| row :: rows ->
match row.active with
| [] -> assert false (* the rows are non-empty! *)
| p :: _ -> simplify_head_pat p (simplified_first_usefulness_col rows)
let rec every_satisfiables pss qs = match qs.active with
| [] ->
(* qs is now partitionned, check usefulness *)
begin match qs.ors with
| [] -> (* no or-patterns *)
if satisfiable (make_matrix pss) (make_vector qs) then
Used
else
Unused
n or - patterns - > 2n expansions
List.fold_right2
(fun pss qs r -> match r with
| Unused -> Unused
| _ ->
match qs.active with
| [q] ->
let q1,q2 = or_args q in
let r_loc = every_both pss qs q1 q2 in
union_res r r_loc
| _ -> assert false)
(extract_columns pss qs) (extract_elements qs)
Used
end
| q::rem ->
let uq = unalias q in
begin match uq.pat_desc with
| Tpat_any | Tpat_var _ ->
if is_var_column pss then
(* forget about ``all-variable'' columns now *)
every_satisfiables (remove_column pss) (remove qs)
else
(* otherwise this is direct food for satisfiable *)
every_satisfiables (push_no_or_column pss) (push_no_or qs)
| Tpat_or (q1,q2,_) ->
if
q1.pat_loc.Location.loc_ghost &&
q2.pat_loc.Location.loc_ghost
then
(* syntactically generated or-pats should not be expanded *)
every_satisfiables (push_no_or_column pss) (push_no_or qs)
else
(* this is a real or-pattern *)
every_satisfiables (push_or_column pss) (push_or qs)
| Tpat_variant (l,_,r) when is_absent l r -> (* Ah Jacques... *)
Unused
| _ ->
(* standard case, filter matrix *)
(* The handling of incoherent matrices is kept in line with
[satisfiable] *)
if not (all_coherent (uq :: simplified_first_usefulness_col pss)) then
Unused
else begin
let q0 = discr_pat q pss in
every_satisfiables
(filter_one q0 pss)
{qs with active=simple_match_args q0 q @ rem}
end
end
This function ` ` every_both '' performs the usefulness check
of or - pat q1|q2 .
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2 ,
That way ,
- others orpats in qs.ors will not get expanded .
- all matching work performed on qs.no_ors is not performed again .
This function ``every_both'' performs the usefulness check
of or-pat q1|q2.
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2,
That way,
- others orpats in qs.ors will not get expanded.
- all matching work performed on qs.no_ors is not performed again.
*)
and every_both pss qs q1 q2 =
let qs1 = {qs with active=[q1]}
and qs2 = {qs with active=[q2]} in
let r1 = every_satisfiables pss qs1
and r2 = every_satisfiables (if compat q1 q2 then qs1::pss else pss) qs2 in
match r1 with
| Unused ->
begin match r2 with
| Unused -> Unused
| Used -> Upartial [q1]
| Upartial u2 -> Upartial (q1::u2)
end
| Used ->
begin match r2 with
| Unused -> Upartial [q2]
| _ -> r2
end
| Upartial u1 ->
begin match r2 with
| Unused -> Upartial (u1@[q2])
| Used -> r1
| Upartial u2 -> Upartial (u1 @ u2)
end
(* le_pat p q means, forall V, V matches q implies V matches p *)
let rec le_pat p q =
match (p.pat_desc, q.pat_desc) with
| (Tpat_var _|Tpat_any),_ -> true
| Tpat_alias(p,_,_), _ -> le_pat p q
| _, Tpat_alias(q,_,_) -> le_pat p q
| Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0
| Tpat_construct(_,c1,ps), Tpat_construct(_,c2,qs) ->
Types.equal_tag c1.cstr_tag c2.cstr_tag && le_pats ps qs
| Tpat_variant(l1,Some p1,_), Tpat_variant(l2,Some p2,_) ->
(l1 = l2 && le_pat p1 p2)
| Tpat_variant(l1,None,_r1), Tpat_variant(l2,None,_) ->
l1 = l2
| Tpat_variant(_,_,_), Tpat_variant(_,_,_) -> false
| Tpat_tuple(ps), Tpat_tuple(qs) -> le_pats ps qs
| Tpat_lazy p, Tpat_lazy q -> le_pat p q
| Tpat_record (l1,_), Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
le_pats ps qs
| Tpat_array(ps), Tpat_array(qs) ->
List.length ps = List.length qs && le_pats ps qs
(* In all other cases, enumeration is performed *)
| _,_ -> not (satisfiable [[p]] [q])
and le_pats ps qs =
match ps,qs with
p::ps, q::qs -> le_pat p q && le_pats ps qs
| _, _ -> true
let get_mins le ps =
let rec select_rec r = function
[] -> r
| p::ps ->
if List.exists (fun p0 -> le p0 p) ps
then select_rec r ps
else select_rec (p::r) ps in
select_rec [] (select_rec [] ps)
lub p q is a pattern that matches all values matched by p and q
may raise Empty , when p and q are not compatible
lub p q is a pattern that matches all values matched by p and q
may raise Empty, when p and q are not compatible
*)
let rec lub p q = match p.pat_desc,q.pat_desc with
| Tpat_alias (p,_,_),_ -> lub p q
| _,Tpat_alias (q,_,_) -> lub p q
| (Tpat_any|Tpat_var _),_ -> q
| _,(Tpat_any|Tpat_var _) -> p
| Tpat_or (p1,p2,_),_ -> orlub p1 p2 q
Thanks god , lub is commutative
| Tpat_constant c1, Tpat_constant c2 when const_compare c1 c2 = 0 -> p
| Tpat_tuple ps, Tpat_tuple qs ->
let rs = lubs ps qs in
make_pat (Tpat_tuple rs) p.pat_type p.pat_env
| Tpat_lazy p, Tpat_lazy q ->
let r = lub p q in
make_pat (Tpat_lazy r) p.pat_type p.pat_env
| Tpat_construct (lid, c1,ps1), Tpat_construct (_,c2,ps2)
when Types.equal_tag c1.cstr_tag c2.cstr_tag ->
let rs = lubs ps1 ps2 in
make_pat (Tpat_construct (lid, c1,rs))
p.pat_type p.pat_env
| Tpat_variant(l1,Some p1,row), Tpat_variant(l2,Some p2,_)
when l1=l2 ->
let r=lub p1 p2 in
make_pat (Tpat_variant (l1,Some r,row)) p.pat_type p.pat_env
| Tpat_variant (l1,None,_row), Tpat_variant(l2,None,_)
when l1 = l2 -> p
| Tpat_record (l1,closed),Tpat_record (l2,_) ->
let rs = record_lubs l1 l2 in
make_pat (Tpat_record (rs, closed)) p.pat_type p.pat_env
| Tpat_array ps, Tpat_array qs
when List.length ps = List.length qs ->
let rs = lubs ps qs in
make_pat (Tpat_array rs) p.pat_type p.pat_env
| _,_ ->
raise Empty
and orlub p1 p2 q =
try
let r1 = lub p1 q in
try
{q with pat_desc=(Tpat_or (r1,lub p2 q,None))}
with
| Empty -> r1
with
| Empty -> lub p2 q
and record_lubs l1 l2 =
let rec lub_rec l1 l2 = match l1,l2 with
| [],_ -> l2
| _,[] -> l1
| (lid1, lbl1,p1)::rem1, (lid2, lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
(lid1, lbl1,p1)::lub_rec rem1 l2
else if lbl2.lbl_pos < lbl1.lbl_pos then
(lid2, lbl2,p2)::lub_rec l1 rem2
else
(lid1, lbl1,lub p1 p2)::lub_rec rem1 rem2 in
lub_rec l1 l2
and lubs ps qs = match ps,qs with
| p::ps, q::qs -> lub p q :: lubs ps qs
| _,_ -> []
(******************************)
(* Exported variant closing *)
(******************************)
(* Apply pressure to variants *)
let pressure_variants tdefs patl =
let pss = List.map (fun p -> [p;omega]) patl in
ignore (pressure_variants (Some tdefs) pss)
(*****************************)
Utilities for diagnostics
(*****************************)
(*
Build up a working pattern matrix by forgetting
about guarded patterns
*)
let rec initial_matrix = function
[] -> []
| {c_guard=Some _} :: rem -> initial_matrix rem
| {c_guard=None; c_lhs=p} :: rem -> [p] :: initial_matrix rem
(******************************************)
(* Look for a row that matches some value *)
(******************************************)
(*
Useful for seeing if the example of
non-matched value can indeed be matched
(by a guarded clause)
*)
exception NoGuard
let rec initial_all no_guard = function
| [] ->
if no_guard then
raise NoGuard
else
[]
| {c_lhs=pat; c_guard; _} :: rem ->
([pat], pat.pat_loc) :: initial_all (no_guard && c_guard = None) rem
let rec do_filter_var = function
| (_::ps,loc)::rem -> (ps,loc)::do_filter_var rem
| _ -> []
let do_filter_one q pss =
let rec filter_rec = function
| ({pat_desc = Tpat_alias(p,_,_)}::ps,loc)::pss ->
filter_rec ((p::ps,loc)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps,loc)::pss ->
filter_rec ((p1::ps,loc)::(p2::ps,loc)::pss)
| (p::ps,loc)::pss ->
if simple_match q p
then (simple_match_args q p @ ps, loc) :: filter_rec pss
else filter_rec pss
| _ -> [] in
filter_rec pss
let rec do_match pss qs = match qs with
| [] ->
begin match pss with
| ([],loc)::_ -> Some loc
| _ -> None
end
| q::qs -> match q with
| {pat_desc = Tpat_or (q1,q2,_)} ->
begin match do_match pss (q1::qs) with
| None -> do_match pss (q2::qs)
| r -> r
end
| {pat_desc = Tpat_any} ->
do_match (do_filter_var pss) qs
| _ ->
let q0 = normalize_pat q in
[ pss ] will ( or wo n't ) match [ q0 : : qs ] regardless of the coherence of
its first column .
its first column. *)
do_match (do_filter_one q0 pss) (simple_match_args q0 q @ qs)
let check_partial_all v casel =
try
let pss = initial_all true casel in
do_match pss [v]
with
| NoGuard -> None
(************************)
(* Exhaustiveness check *)
(************************)
conversion from Typedtree.pattern to Parsetree.pattern list
module Conv = struct
open Parsetree
let mkpat desc = Ast_helper.Pat.mk desc
let name_counter = ref 0
let fresh name =
let current = !name_counter in
name_counter := !name_counter + 1;
"#$" ^ name ^ string_of_int current
let conv typed =
let constrs = Hashtbl.create 7 in
let labels = Hashtbl.create 7 in
let rec loop pat =
match pat.pat_desc with
Tpat_or (pa,pb,_) ->
mkpat (Ppat_or (loop pa, loop pb))
| Tpat_var (_, ({txt="*extension*"} as nm)) -> (* PR#7330 *)
mkpat (Ppat_var nm)
| Tpat_any
| Tpat_var _ ->
mkpat Ppat_any
| Tpat_constant c ->
mkpat (Ppat_constant (Untypeast.constant c))
| Tpat_alias (p,_,_) -> loop p
| Tpat_tuple lst ->
mkpat (Ppat_tuple (List.map loop lst))
| Tpat_construct (cstr_lid, cstr, lst) ->
let id = fresh cstr.cstr_name in
let lid = { cstr_lid with txt = Longident.Lident id } in
Hashtbl.add constrs id cstr;
let arg =
match List.map loop lst with
| [] -> None
| [p] -> Some p
| lst -> Some (mkpat (Ppat_tuple lst))
in
mkpat (Ppat_construct(lid, arg))
| Tpat_variant(label,p_opt,_row_desc) ->
let arg = Misc.may_map loop p_opt in
mkpat (Ppat_variant(label, arg))
| Tpat_record (subpatterns, _closed_flag) ->
let fields =
List.map
(fun (_, lbl, p) ->
let id = fresh lbl.lbl_name in
Hashtbl.add labels id lbl;
(mknoloc (Longident.Lident id), loop p))
subpatterns
in
mkpat (Ppat_record (fields, Open))
| Tpat_array lst ->
mkpat (Ppat_array (List.map loop lst))
| Tpat_lazy p ->
mkpat (Ppat_lazy (loop p))
in
let ps = loop typed in
(ps, constrs, labels)
end
(* Whether the counter-example contains an extension pattern *)
let contains_extension pat =
let r = ref false in
let rec loop = function
{pat_desc=Tpat_var (_, {txt="*extension*"})} ->
r := true
| p -> Typedtree.iter_pattern_desc loop p.pat_desc
in loop pat; !r
(* Build an untyped or-pattern from its expected type *)
let ppat_of_type env ty =
match pats_of_type env ty with
[{pat_desc = Tpat_any}] ->
(Conv.mkpat Parsetree.Ppat_any, Hashtbl.create 0, Hashtbl.create 0)
| pats ->
Conv.conv (orify_many pats)
let do_check_partial ?pred exhaust loc casel pss = match pss with
| [] ->
This can occur
- For empty matches generated by ( no warning )
- when all patterns have guards ( then , < > [ ] )
( specific warning )
Then match MUST be considered non - exhaustive ,
otherwise compilation of PM is broken .
This can occur
- For empty matches generated by ocamlp4 (no warning)
- when all patterns have guards (then, casel <> [])
(specific warning)
Then match MUST be considered non-exhaustive,
otherwise compilation of PM is broken.
*)
begin match casel with
| [] -> ()
| _ ->
if Warnings.is_active Warnings.All_clauses_guarded then
Location.prerr_warning loc Warnings.All_clauses_guarded
end ;
Partial
| ps::_ ->
begin match exhaust None pss (List.length ps) with
| Rnone -> Total
| Rsome [u] ->
let v =
match pred with
| Some pred ->
let (pattern,constrs,labels) = Conv.conv u in
let u' = pred constrs labels pattern in
pretty_pat u ;
begin match u ' with
None - > prerr_endline " : impossible "
| Some _ - > prerr_endline " : possible "
end ;
begin match u' with
None -> prerr_endline ": impossible"
| Some _ -> prerr_endline ": possible"
end; *)
u'
| None -> Some u
in
begin match v with
None -> Total
| Some v ->
if Warnings.is_active (Warnings.Partial_match "") then begin
let errmsg =
try
let buf = Buffer.create 16 in
let fmt = formatter_of_buffer buf in
top_pretty fmt v;
begin match check_partial_all v casel with
| None -> ()
| Some _ ->
This is ' Some loc ' , where loc is the location of
a possibly matching clause .
Forget about loc , because printing two locations
is a pain in the top - level
a possibly matching clause.
Forget about loc, because printing two locations
is a pain in the top-level *)
Buffer.add_string buf
"\n(However, some guarded clause may match this value.)"
end;
if contains_extension v then
Buffer.add_string buf
"\nMatching over values of extensible variant types \
(the *extension* above)\n\
must include a wild card pattern in order to be exhaustive."
;
Buffer.contents buf
with _ ->
""
in
Location.prerr_warning loc (Warnings.Partial_match errmsg)
end;
Partial
end
| _ ->
fatal_error "Parmatch.check_partial"
end
let pss =
do_check_partial exhaust loc casel pss
let do_check_partial_normal loc casel pss =
do_check_partial exhaust loc casel pss
*)
let do_check_partial_gadt pred loc casel pss =
do_check_partial ~pred exhaust_gadt loc casel pss
(*****************)
(* Fragile check *)
(*****************)
(* Collect all data types in a pattern *)
let rec add_path path = function
| [] -> [path]
| x::rem as paths ->
if Path.same path x then paths
else x::add_path path rem
let extendable_path path =
not
(Path.same path Predef.path_bool ||
Path.same path Predef.path_list ||
Path.same path Predef.path_unit ||
Path.same path Predef.path_option)
let rec collect_paths_from_pat r p = match p.pat_desc with
| Tpat_construct(_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},ps)
->
let path = get_type_path p.pat_type p.pat_env in
List.fold_left
collect_paths_from_pat
(if extendable_path path then add_path path r else r)
ps
| Tpat_any|Tpat_var _|Tpat_constant _| Tpat_variant (_,None,_) -> r
| Tpat_tuple ps | Tpat_array ps
| Tpat_construct (_, {cstr_tag=Cstr_extension _}, ps)->
List.fold_left collect_paths_from_pat r ps
| Tpat_record (lps,_) ->
List.fold_left
(fun r (_, _, p) -> collect_paths_from_pat r p)
r lps
| Tpat_variant (_, Some p, _) | Tpat_alias (p,_,_) -> collect_paths_from_pat r p
| Tpat_or (p1,p2,_) ->
collect_paths_from_pat (collect_paths_from_pat r p1) p2
| Tpat_lazy p
->
collect_paths_from_pat r p
Actual fragile check
1 . Collect data types in the patterns of the match .
2 . One exhaustivity check per datatype , considering that
the type is extended .
Actual fragile check
1. Collect data types in the patterns of the match.
2. One exhaustivity check per datatype, considering that
the type is extended.
*)
let do_check_fragile_param exhaust loc casel pss =
let exts =
List.fold_left
(fun r c -> collect_paths_from_pat r c.c_lhs)
[] casel in
match exts with
| [] -> ()
| _ -> match pss with
| [] -> ()
| ps::_ ->
List.iter
(fun ext ->
match exhaust (Some ext) pss (List.length ps) with
| Rnone ->
Location.prerr_warning
loc
(Warnings.Fragile_match (Path.name ext))
| Rsome _ -> ())
exts
let do_check_fragile_normal = do_check_fragile_param exhaust
let do_check_fragile_gadt = do_check_fragile_param exhaust_gadt
(********************************)
(* Exported unused clause check *)
(********************************)
let check_unused pred casel =
if Warnings.is_active Warnings.Unused_match
|| List.exists (fun c -> c.c_rhs.exp_desc = Texp_unreachable) casel then
let rec do_rec pref = function
| [] -> ()
| {c_lhs=q; c_guard; c_rhs} :: rem ->
let qs = [q] in
begin try
let pss =
get_mins le_pats (List.filter (compats qs) pref) in
First look for redundant or partially redundant patterns
let r = every_satisfiables (make_rows pss) (make_row qs) in
let refute = (c_rhs.exp_desc = Texp_unreachable) in
(* Do not warn for unused [pat -> .] *)
if r = Unused && refute then () else
let r =
(* Do not refine if there are no other lines *)
let skip =
r = Unused || (not refute && pref = []) ||
not(refute || Warnings.is_active Warnings.Unreachable_case) in
if skip then r else
(* Then look for empty patterns *)
let sfs = satisfiables pss qs in
if sfs = [] then Unused else
let sfs =
List.map (function [u] -> u | _ -> assert false) sfs in
let u = orify_many sfs in
(*Format.eprintf "%a@." pretty_val u;*)
let (pattern,constrs,labels) = Conv.conv u in
let pattern = {pattern with Parsetree.ppat_loc = q.pat_loc} in
match pred refute constrs labels pattern with
None when not refute ->
Location.prerr_warning q.pat_loc Warnings.Unreachable_case;
Used
| _ -> r
in
match r with
| Unused ->
Location.prerr_warning
q.pat_loc Warnings.Unused_match
| Upartial ps ->
List.iter
(fun p ->
Location.prerr_warning
p.pat_loc Warnings.Unused_pat)
ps
| Used -> ()
with Empty | Not_found | NoGuard -> assert false
end ;
if c_guard <> None then
do_rec pref rem
else
do_rec ([q]::pref) rem in
do_rec [] casel
(*********************************)
(* Exported irrefutability tests *)
(*********************************)
let irrefutable pat = le_pat pat omega
let inactive ~partial pat =
match partial with
| Partial -> false
| Total -> begin
let rec loop pat =
match pat.pat_desc with
| Tpat_lazy _ | Tpat_array _ ->
false
| Tpat_any | Tpat_var _ | Tpat_variant (_, None, _) ->
true
| Tpat_constant c -> begin
match c with
| Const_string _ -> Config.safe_string
| Const_int _ | Const_char _ | Const_float _
| Const_int32 _ | Const_int64 _ | Const_nativeint _ -> true
end
| Tpat_tuple ps | Tpat_construct (_, _, ps) ->
List.for_all (fun p -> loop p) ps
| Tpat_alias (p,_,_) | Tpat_variant (_, Some p, _) ->
loop p
| Tpat_record (ldps,_) ->
List.for_all
(fun (_, lbl, p) -> lbl.lbl_mut = Immutable && loop p)
ldps
| Tpat_or (p,q,_) ->
loop p && loop q
in
loop pat
end
(*********************************)
(* Exported exhaustiveness check *)
(*********************************)
(*
Fragile check is performed when required and
on exhaustive matches only.
*)
let check_partial_param do_check_partial do_check_fragile loc casel =
let pss = initial_matrix casel in
let pss = get_mins le_pats pss in
let total = do_check_partial loc casel pss in
if
total = Total && Warnings.is_active (Warnings.Fragile_match "")
then begin
do_check_fragile loc casel pss
end ;
total
let check_partial =
check_partial_param
do_check_fragile_normal
check_partial_param
do_check_partial_normal
do_check_fragile_normal*)
let check_partial_gadt pred loc casel =
check_partial_param (do_check_partial_gadt pred)
do_check_fragile_gadt loc casel
(*************************************)
(* Ambiguous variable in or-patterns *)
(*************************************)
Specification : ambiguous variables in or - patterns .
The semantics of or - patterns in OCaml is specified with
a left - to - right bias : a value [ v ] matches the pattern [ p | q ] if it
matches [ p ] or [ q ] , but if it matches both , the environment
captured by the match is the environment captured by [ p ] , never the
one captured by [ q ] .
While this property is generally well - understood , one specific case
where users expect a different semantics is when a pattern is
followed by a when - guard : [ | p when g - > e ] . Consider for example :
| ( ( Const x , _ ) | ( _ , Const x ) ) when is_neutral x - > branch
The semantics is clear : match the scrutinee against the pattern , if
it matches , test the guard , and if the guard passes , take the
branch .
However , consider the input [ ( Const a , Const b ) ] , where [ a ] fails
the test [ is_neutral f ] , while [ b ] passes the test [ is_neutral
b ] . With the left - to - right semantics , the clause above is * not *
taken by its input : matching [ ( Const a , Const b ) ] against the
or - pattern succeeds in the left branch , it returns the environment
[ x - > a ] , and then the guard [ is_neutral a ] is tested and fails ,
the branch is not taken . Most users , however , intuitively expect
that any pair that has one side passing the test will take the
branch . They assume it is equivalent to the following :
| ( Const x , _ ) when is_neutral x - > branch
| ( _ , Const x ) when is_neutral x - > branch
while it is not .
The code below is dedicated to finding these confusing cases : the
cases where a guard uses " ambiguous " variables , that are bound to
different parts of the scrutinees by different sides of
a or - pattern . In other words , it finds the cases where the
specified left - to - right semantics is not equivalent to
a non - deterministic semantics ( any branch can be taken ) relatively
to a specific guard .
The semantics of or-patterns in OCaml is specified with
a left-to-right bias: a value [v] matches the pattern [p | q] if it
matches [p] or [q], but if it matches both, the environment
captured by the match is the environment captured by [p], never the
one captured by [q].
While this property is generally well-understood, one specific case
where users expect a different semantics is when a pattern is
followed by a when-guard: [| p when g -> e]. Consider for example:
| ((Const x, _) | (_, Const x)) when is_neutral x -> branch
The semantics is clear: match the scrutinee against the pattern, if
it matches, test the guard, and if the guard passes, take the
branch.
However, consider the input [(Const a, Const b)], where [a] fails
the test [is_neutral f], while [b] passes the test [is_neutral
b]. With the left-to-right semantics, the clause above is *not*
taken by its input: matching [(Const a, Const b)] against the
or-pattern succeeds in the left branch, it returns the environment
[x -> a], and then the guard [is_neutral a] is tested and fails,
the branch is not taken. Most users, however, intuitively expect
that any pair that has one side passing the test will take the
branch. They assume it is equivalent to the following:
| (Const x, _) when is_neutral x -> branch
| (_, Const x) when is_neutral x -> branch
while it is not.
The code below is dedicated to finding these confusing cases: the
cases where a guard uses "ambiguous" variables, that are bound to
different parts of the scrutinees by different sides of
a or-pattern. In other words, it finds the cases where the
specified left-to-right semantics is not equivalent to
a non-deterministic semantics (any branch can be taken) relatively
to a specific guard.
*)
module IdSet = Set.Make(Ident)
let pattern_vars p = IdSet.of_list (Typedtree.pat_bound_idents p)
(* Row for ambiguous variable search,
unseen is the traditional pattern row,
seen is a list of position bindings *)
type amb_row = { unseen : pattern list ; seen : IdSet.t list; }
(* Push binding variables now *)
let rec do_push r p ps seen k = match p.pat_desc with
| Tpat_alias (p,x,_) -> do_push (IdSet.add x r) p ps seen k
| Tpat_var (x,_) ->
(omega,{ unseen = ps; seen=IdSet.add x r::seen; })::k
| Tpat_or (p1,p2,_) ->
do_push r p1 ps seen (do_push r p2 ps seen k)
| _ ->
(p,{ unseen = ps; seen = r::seen; })::k
let rec push_vars = function
| [] -> []
| { unseen = [] }::_ -> assert false
| { unseen = p::ps; seen; }::rem ->
do_push IdSet.empty p ps seen (push_vars rem)
let collect_stable = function
| [] -> assert false
| { seen=xss; _}::rem ->
let rec c_rec xss = function
| [] -> xss
| {seen=yss; _}::rem ->
let xss = List.map2 IdSet.inter xss yss in
c_rec xss rem in
let inters = c_rec xss rem in
List.fold_left IdSet.union IdSet.empty inters
(*********************************************)
(* Filtering utilities for our specific rows *)
(*********************************************)
Take a pattern matrix as a list ( rows ) of lists ( columns ) of patterns
| p1 , p2 , .. , pn
| q1 , q2 , .. , qn
| r1 , r2 , .. , rn
| ...
We split this matrix into a list of sub - matrices , one for each head
constructor appearing in the leftmost column . For each row whose
left column starts with a head constructor , remove this head
column , prepend one column for each argument of the constructor ,
and add the resulting row in the sub - matrix corresponding to this
head constructor .
Rows whose left column is omega ( the Any pattern _ ) may match any
head constructor , so they are added to all groups .
The list of sub - matrices is represented as a list of pair
( head constructor , submatrix )
| p1, p2, .., pn
| q1, q2, .., qn
| r1, r2, .., rn
| ...
We split this matrix into a list of sub-matrices, one for each head
constructor appearing in the leftmost column. For each row whose
left column starts with a head constructor, remove this head
column, prepend one column for each argument of the constructor,
and add the resulting row in the sub-matrix corresponding to this
head constructor.
Rows whose left column is omega (the Any pattern _) may match any
head constructor, so they are added to all groups.
The list of sub-matrices is represented as a list of pair
(head constructor, submatrix)
*)
let filter_all =
(* the head constructor (as a pattern with omega arguments) of
a pattern *)
let discr_head pat =
match pat.pat_desc with
| Tpat_record (lbls, closed) ->
(* a partial record pattern { f1 = p1; f2 = p2; _ }
needs to be expanded, otherwise matching against this head
would drop the pattern arguments for non-mentioned fields *)
let lbls = all_record_args lbls in
normalize_pat { pat with pat_desc = Tpat_record (lbls, closed) }
| _ -> normalize_pat pat
in
(* insert a row of head [p] and rest [r] into the right group *)
let rec insert p r env = match env with
| [] ->
(* if no group matched this row, it has a head constructor that
was never seen before; add a new sub-matrix for this head *)
let p0 = discr_head p in
[p0,[{ r with unseen = simple_match_args p0 p @ r.unseen }]]
| (q0,rs) as bd::env ->
if simple_match q0 p then begin
let r = { r with unseen = simple_match_args q0 ; } in
(q0,r::rs)::env
end
else bd::insert p r env in
(* insert a row of head omega into all groups *)
let insert_omega r env =
List.map
(fun (q0,rs) ->
let r =
{ r with unseen = simple_match_args q0 omega @ r.unseen; } in
(q0,r::rs))
env
in
let rec filter_rec env = function
| [] -> env
| ({pat_desc=(Tpat_var _|Tpat_alias _|Tpat_or _)},_)::_ -> assert false
| ({pat_desc=Tpat_any}, _)::rs -> filter_rec env rs
| (p,r)::rs -> filter_rec (insert p r env) rs in
let rec filter_omega env = function
| [] -> env
| ({pat_desc=(Tpat_var _|Tpat_alias _|Tpat_or _)},_)::_ -> assert false
| ({pat_desc=Tpat_any},r)::rs -> filter_omega (insert_omega r env) rs
| _::rs -> filter_omega env rs in
fun rs ->
first insert the rows with head constructors ,
to get the definitive list of groups
to get the definitive list of groups *)
let env = filter_rec [] rs in
(* then add the omega rows to all groups *)
filter_omega env rs
(* Compute stable bindings *)
let rec do_stable rs = match rs with
| [] -> assert false (* No empty matrix *)
| { unseen=[]; _ }::_ ->
collect_stable rs
| _ ->
let rs = push_vars rs in
if not (all_coherent (first_column rs)) then begin
If the first column is incoherent , then all the variables of this
matrix are stable .
matrix are stable. *)
List.fold_left (fun acc (_, { seen; _ }) ->
List.fold_left IdSet.union acc seen
) IdSet.empty rs
end else begin
(* If the column is ill-typed but deemed coherent, we might spuriously
warn about some variables being unstable.
As sad as that might be, the warning can be silenced by splitting the
or-pattern... *)
match filter_all rs with
| [] ->
do_stable (List.map snd rs)
| (_,rs)::env ->
List.fold_left
(fun xs (_,rs) -> IdSet.inter xs (do_stable rs))
(do_stable rs) env
end
let stable p = do_stable [{unseen=[p]; seen=[];}]
All identifier paths that appear in an expression that occurs
as a clause right hand side or guard .
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and * * guards * * .
For pattern ( module M : S ) - > e the code is
let module M_mod = unpack M .. in e
Hence M is " free " in e iff M_mod is free in e.
Not doing so will yield excessive warning in
( module ( M : S ) } ... ) when true - > ....
as M is always present in
let module M_mod = unpack M .. in true
as a clause right hand side or guard.
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and **guards**.
For pattern (module M:S) -> e the code is
let module M_mod = unpack M .. in e
Hence M is "free" in e iff M_mod is free in e.
Not doing so will yield excessive warning in
(module (M:S) } ...) when true -> ....
as M is always present in
let module M_mod = unpack M .. in true
*)
let all_rhs_idents exp =
let ids = ref IdSet.empty in
let module Iterator = TypedtreeIter.MakeIterator(struct
include TypedtreeIter.DefaultIteratorArgument
let enter_expression exp = match exp.exp_desc with
| Texp_ident (path, _lid, _descr) ->
List.iter
(fun id -> ids := IdSet.add id !ids)
(Path.heads path)
| _ -> ()
(* Very hackish, detect unpack pattern compilation
and perform "indirect check for them" *)
let is_unpack exp =
List.exists
(fun (attr, _) -> attr.txt = "#modulepat") exp.exp_attributes
let leave_expression exp =
if is_unpack exp then begin match exp.exp_desc with
| Texp_letmodule
(id_mod,_,
{mod_desc=
Tmod_unpack ({exp_desc=Texp_ident (Path.Pident id_exp,_,_)},_)},
_) ->
assert (IdSet.mem id_exp !ids) ;
if not (IdSet.mem id_mod !ids) then begin
ids := IdSet.remove id_exp !ids
end
| _ -> assert false
end
end) in
Iterator.iter_expression exp;
!ids
let check_ambiguous_bindings =
let open Warnings in
let warn0 = Ambiguous_pattern [] in
fun cases ->
if is_active warn0 then
List.iter
(fun case -> match case with
| { c_guard=None ; _} -> ()
| { c_lhs=p; c_guard=Some g; _} ->
let all =
IdSet.inter (pattern_vars p) (all_rhs_idents g) in
if not (IdSet.is_empty all) then begin
let st = stable p in
let ambiguous = IdSet.diff all st in
if not (IdSet.is_empty ambiguous) then begin
let pps = IdSet.elements ambiguous |> List.map Ident.name in
let warn = Ambiguous_pattern pps in
Location.prerr_warning p.pat_loc warn
end
end)
cases
| null | https://raw.githubusercontent.com/jaredly/reason-language-server/ce1b3f8ddb554b6498c2a83ea9c53a6bdf0b6081/ocaml_typing/406/parmatch.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Detection of partial matches and unused match cases.
***********************************
***********************************
*****************
*****************
the rows are non-empty!
only omegas on the column: the column is coherent.
*********************
Compatibility check
*********************
same label on both sides
Variables match any value
Structural induction
Constructors, with special case for extension
More standard stuff
Empty pattern
**************************************
**************************************
***********************************
Values as patterns pretty printer
***********************************
do not show lbl=_
we assume that there is no label repetitions here
**************************
**************************
Check top matching
extract record fields as a whole
Raise Not_found when pos is not present in arg
Build argument list when p2 >= p1, where p1 is a simple pattern
Normalize a pattern ->
all arguments are omega (simple pattern) and no more variables
Build normalized (cf. supra) discriminating pattern,
in the non-data type case
In case a matching value is found, set actual arguments
of the matching pattern.
case any is used in matching.ml
Filter pss in the ``extra case''. This applies :
- According to an extra constructor (datatype case, non-complete signature).
- According to anything (all-variables case).
Variant related functions
mark constructor lines for failure when they are incomplete
m=false means that this tag is not explicitly matched
this unification cannot fail
extensions
closing=true, we are considering the variant as closed
m=true, do not discard matched tags, rather warn
Written as a non-fragile matching, PR#7451 originated from a fragile matching below.
complement constructor tags
build a pattern from a constructor list
Sends back a pattern that complements constructor tags all_tag
Auxiliary for build_other
let c = {c with cstr_name = "*extension*"} in
PR#7330
| Reither _
This one is called after erasing pattern info
activate this code for checking non-gadt constructors
Now another satisfiable function that additionally
supplies an example of a matching value.
This function should be called for exhaustiveness check only.
No matching value
This matching value
first column of pss is made of variables only
cannot occur, since constructors don't make a full signature
strictly more powerful than exhaust; however, exhaust
was kept for backwards compatibility
We're considering an ill-typed branch, we won't actually be able to
produce a well typed value taking that branch.
cannot occur, since constructors don't make a full signature
The following line is needed to compile stdlib/printf.ml
Another exhaustiveness check, enforcing variant typing.
Note that it does not check exact exhaustiveness, but whether a
matching could be made exhaustive by closing all variant types.
When this is true of all other columns, the current column is left
open (even if it means that the whole matching is not exhaustive as
a result).
When this is false for the matrix minus the current column, and the
current column is composed of variant tags, we close the variant
(even if it doesn't help in making the matching exhaustive).
Yet another satisfiable function
Useful pattern
Useless pattern
Mixed, with list of useless ones
this row type enable column processing inside the matrix
- left -> elements not to be processed,
- right -> elements to be processed
let pretty_row {ors=ors ; no_ors=no_ors; active=active} =
pretty_line ors ; prerr_string " *" ;
pretty_line no_ors ; prerr_string " *" ;
pretty_line active
let pretty_rows rs =
prerr_endline "begin matrix" ;
List.iter
(fun r ->
pretty_row r ;
prerr_endline "")
rs ;
prerr_endline "end matrix"
Initial build
Useful to detect and expand or pats inside as pats
Standard or-args for left-to-right matching
Just remove current column
Current column has been processed
Back to normal matrices
propose or pats for expansion
idem for matrices
the rows are non-empty!
qs is now partitionned, check usefulness
no or-patterns
forget about ``all-variable'' columns now
otherwise this is direct food for satisfiable
syntactically generated or-pats should not be expanded
this is a real or-pattern
Ah Jacques...
standard case, filter matrix
The handling of incoherent matrices is kept in line with
[satisfiable]
le_pat p q means, forall V, V matches q implies V matches p
In all other cases, enumeration is performed
****************************
Exported variant closing
****************************
Apply pressure to variants
***************************
***************************
Build up a working pattern matrix by forgetting
about guarded patterns
****************************************
Look for a row that matches some value
****************************************
Useful for seeing if the example of
non-matched value can indeed be matched
(by a guarded clause)
**********************
Exhaustiveness check
**********************
PR#7330
Whether the counter-example contains an extension pattern
Build an untyped or-pattern from its expected type
***************
Fragile check
***************
Collect all data types in a pattern
******************************
Exported unused clause check
******************************
Do not warn for unused [pat -> .]
Do not refine if there are no other lines
Then look for empty patterns
Format.eprintf "%a@." pretty_val u;
*******************************
Exported irrefutability tests
*******************************
*******************************
Exported exhaustiveness check
*******************************
Fragile check is performed when required and
on exhaustive matches only.
***********************************
Ambiguous variable in or-patterns
***********************************
Row for ambiguous variable search,
unseen is the traditional pattern row,
seen is a list of position bindings
Push binding variables now
*******************************************
Filtering utilities for our specific rows
*******************************************
the head constructor (as a pattern with omega arguments) of
a pattern
a partial record pattern { f1 = p1; f2 = p2; _ }
needs to be expanded, otherwise matching against this head
would drop the pattern arguments for non-mentioned fields
insert a row of head [p] and rest [r] into the right group
if no group matched this row, it has a head constructor that
was never seen before; add a new sub-matrix for this head
insert a row of head omega into all groups
then add the omega rows to all groups
Compute stable bindings
No empty matrix
If the column is ill-typed but deemed coherent, we might spuriously
warn about some variables being unstable.
As sad as that might be, the warning can be silenced by splitting the
or-pattern...
Very hackish, detect unpack pattern compilation
and perform "indirect check for them" | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Misc
open Asttypes
open Types
open Typedtree
Utilities for building patterns
let make_pat desc ty tenv =
{pat_desc = desc; pat_loc = Location.none; pat_extra = [];
pat_type = ty ; pat_env = tenv;
pat_attributes = [];
}
let omega = make_pat Tpat_any Ctype.none Env.empty
let extra_pat =
make_pat
(Tpat_var (Ident.create "+", mknoloc "+"))
Ctype.none Env.empty
let rec omegas i =
if i <= 0 then [] else omega :: omegas (i-1)
let omega_list l = List.map (fun _ -> omega) l
let zero = make_pat (Tpat_constant (Const_int 0)) Ctype.none Env.empty
Coherence check
For some of the operations we do in this module , we would like ( because it
simplifies matters ) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ ( think " of the same type " ) .
Unfortunately that is not always true .
Consider the following ( well - typed ) example :
{ [
type _ t = S : string t | U : unit t
let f ( type a ) ( t1 : a t ) ( t2 : a t ) ( a : a ) =
match t1 , t2 , a with
| U , _ , ( ) - > ( )
| _ , S , " " - > ( )
] }
Clearly the 3rd column contains incoherent patterns .
On the example above , most of the algorithms will explore the pattern matrix
as illustrated by the following tree :
{ v
S
------- > | " " |
U | S , " " | _ _ / | ( ) |
-------- > | _ , ( ) | \ ¬ S
| U , _ , ( ) | _ _ / ------- > | ( ) |
| _ , S , " " | \
--------- > | S , " " | ---------- > | " " |
¬ U S
v }
where following an edge labelled by a pattern P means " assuming the value I
am matching on is filtered by [ P ] on the column I am currently looking at ,
then the following submatrix is still reachable " .
Notice that at any point of that tree , if the first column of a matrix is
incoherent , then the branch leading to it can only be taken if the scrutinee
is ill - typed .
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [ t1 , t2 , a ] to be [ U , S , ... ] . However such
a value would be ill - typed , so we can never actually get there .
Checking the first column at each step of the recursion and making the
concious decision of " aborting " the algorithm whenever the first column
becomes incoherent , allows us to retain the initial assumption in later
stages of the algorithms .
---
N.B. two patterns can be considered coherent even though they might not be of
the same type .
That 's in part because we only care about the " head " of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm :
( ' a ' , ' b ' ) and ( 1 , ( ) ) will be deemed coherent because they are both a tuples
of arity 2 ( we 'll notice at a later stage the incoherence of ' a ' and 1 ) .
But also because it can be hard / costly to determine exactly whether two
patterns are of the same type or not ( eg . in the example above with _ and S ,
but see also the module [ Coherence_illustration ] in
testsuite / tests / basic - more / robustmatch.ml ) .
For the moment our weak , loosely - syntactic , coherence check seems to be
enough and we leave it to each user to consider ( and document ! ) what happens
when an " incoherence " is not detected by this check .
simplifies matters) to assume that patterns appearing on a given column in a
pattern matrix are /coherent/ (think "of the same type").
Unfortunately that is not always true.
Consider the following (well-typed) example:
{[
type _ t = S : string t | U : unit t
let f (type a) (t1 : a t) (t2 : a t) (a : a) =
match t1, t2, a with
| U, _, () -> ()
| _, S, "" -> ()
]}
Clearly the 3rd column contains incoherent patterns.
On the example above, most of the algorithms will explore the pattern matrix
as illustrated by the following tree:
{v
S
-------> | "" |
U | S, "" | __/ | () |
--------> | _, () | \ ¬ S
| U, _, () | __/ -------> | () |
| _, S, "" | \
---------> | S, "" | ----------> | "" |
¬ U S
v}
where following an edge labelled by a pattern P means "assuming the value I
am matching on is filtered by [P] on the column I am currently looking at,
then the following submatrix is still reachable".
Notice that at any point of that tree, if the first column of a matrix is
incoherent, then the branch leading to it can only be taken if the scrutinee
is ill-typed.
In the example above the only case where we have a matrix with an incoherent
first column is when we consider [t1, t2, a] to be [U, S, ...]. However such
a value would be ill-typed, so we can never actually get there.
Checking the first column at each step of the recursion and making the
concious decision of "aborting" the algorithm whenever the first column
becomes incoherent, allows us to retain the initial assumption in later
stages of the algorithms.
---
N.B. two patterns can be considered coherent even though they might not be of
the same type.
That's in part because we only care about the "head" of patterns and leave
checking coherence of subpatterns for the next steps of the algorithm:
('a', 'b') and (1, ()) will be deemed coherent because they are both a tuples
of arity 2 (we'll notice at a later stage the incoherence of 'a' and 1).
But also because it can be hard/costly to determine exactly whether two
patterns are of the same type or not (eg. in the example above with _ and S,
but see also the module [Coherence_illustration] in
testsuite/tests/basic-more/robustmatch.ml).
For the moment our weak, loosely-syntactic, coherence check seems to be
enough and we leave it to each user to consider (and document!) what happens
when an "incoherence" is not detected by this check.
*)
let simplify_head_pat p k =
let rec simplify_head_pat p k =
match p.pat_desc with
| Tpat_alias (p,_,_) -> simplify_head_pat p k
| Tpat_var (_,_) -> omega :: k
| Tpat_or (p1,p2,_) -> simplify_head_pat p1 (simplify_head_pat p2 k)
| _ -> p :: k
in simplify_head_pat p k
let rec simplified_first_col = function
| [] -> []
| (p::_) :: rows ->
simplify_head_pat p (simplified_first_col rows)
Given the simplified first column of a matrix , this function first looks for
a " discriminating " pattern on that column ( i.e. a non - omega one ) and then
check that every other head pattern in the column is coherent with that one .
a "discriminating" pattern on that column (i.e. a non-omega one) and then
check that every other head pattern in the column is coherent with that one.
*)
let all_coherent column =
let coherent_heads hp1 hp2 =
match hp1.pat_desc, hp2.pat_desc with
| (Tpat_var _ | Tpat_alias _ | Tpat_or _), _
| _, (Tpat_var _ | Tpat_alias _ | Tpat_or _) ->
assert false
| Tpat_construct (_, c, _), Tpat_construct (_, c', _) ->
c.cstr_consts = c'.cstr_consts
&& c.cstr_nonconsts = c'.cstr_nonconsts
| Tpat_constant c1, Tpat_constant c2 -> begin
match c1, c2 with
| Const_char _, Const_char _
| Const_int _, Const_int _
| Const_int32 _, Const_int32 _
| Const_int64 _, Const_int64 _
| Const_nativeint _, Const_nativeint _
| Const_float _, Const_float _
| Const_string _, Const_string _ -> true
| ( Const_char _
| Const_int _
| Const_int32 _
| Const_int64 _
| Const_nativeint _
| Const_float _
| Const_string _), _ -> false
end
| Tpat_tuple l1, Tpat_tuple l2 -> List.length l1 = List.length l2
| Tpat_record ((_, lbl1, _) :: _, _), Tpat_record ((_, lbl2, _) :: _, _) ->
Array.length lbl1.lbl_all = Array.length lbl2.lbl_all
| Tpat_any, _
| _, Tpat_any
| Tpat_record ([], _), Tpat_record (_, _)
| Tpat_record (_, _), Tpat_record ([], _)
| Tpat_variant _, Tpat_variant _
| Tpat_array _, Tpat_array _
| Tpat_lazy _, Tpat_lazy _ -> true
| _, _ -> false
in
match
List.find (fun head_pat ->
match head_pat.pat_desc with
| Tpat_var _ | Tpat_alias _ | Tpat_or _ -> assert false
| Tpat_any -> false
| _ -> true
) column
with
| exception Not_found ->
true
| discr_pat ->
List.for_all (coherent_heads discr_pat) column
let first_column simplified_matrix =
List.map fst simplified_matrix
Patterns p and q compatible means :
there exists value V that matches both , However ....
The case of extension types is dubious , as constructor rebind permits
that different constructors are the same ( and are thus compatible ) .
Compilation must take this into account , consider :
type t = ..
type t + = A|B
type t + = C = A
let f x y = match x , y with
| true , A - > ' 1 '
| _ , C - > ' 2 '
| false , A - > ' 3 '
| _ , _ - > ' _ '
As C is bound to A the value of f false A is ' 2 ' ( and not ' 3 ' as it would
be in the absence of rebinding ) .
Not considering rebinding , patterns " false , A " and " _ , C " are incompatible
and the compiler can swap the second and third clause , resulting in the
( more efficiently compiled ) matching
match x , y with
| true , A - > ' 1 '
| false , A - > ' 3 '
| _ , C - > ' 2 '
| _ , _ - > ' _ '
This is not correct : when C is bound to A , " f false A " returns ' 2 ' ( not ' 3 ' )
However , diagnostics do not take constructor rebinding into account .
Notice , that due to module abstraction constructor rebinding is hidden .
module X : sig type t = .. type t + = A|B end = struct
type t = ..
type t + = A
type t + = B = A
end
open X
let f x = match x with
| A - > ' 1 '
| B - > ' 2 '
| _ - > ' _ '
The second clause above will NOT ( and can not ) be flagged as useless .
Finally , there are two compatibility fonction
compat p q --- > ' syntactic compatibility , used for diagnostics .
may_compat p q --- > a safe approximation of possible compat ,
for compilation
there exists value V that matches both, However....
The case of extension types is dubious, as constructor rebind permits
that different constructors are the same (and are thus compatible).
Compilation must take this into account, consider:
type t = ..
type t += A|B
type t += C=A
let f x y = match x,y with
| true,A -> '1'
| _,C -> '2'
| false,A -> '3'
| _,_ -> '_'
As C is bound to A the value of f false A is '2' (and not '3' as it would
be in the absence of rebinding).
Not considering rebinding, patterns "false,A" and "_,C" are incompatible
and the compiler can swap the second and third clause, resulting in the
(more efficiently compiled) matching
match x,y with
| true,A -> '1'
| false,A -> '3'
| _,C -> '2'
| _,_ -> '_'
This is not correct: when C is bound to A, "f false A" returns '2' (not '3')
However, diagnostics do not take constructor rebinding into account.
Notice, that due to module abstraction constructor rebinding is hidden.
module X : sig type t = .. type t += A|B end = struct
type t = ..
type t += A
type t += B=A
end
open X
let f x = match x with
| A -> '1'
| B -> '2'
| _ -> '_'
The second clause above will NOT (and cannot) be flagged as useless.
Finally, there are two compatibility fonction
compat p q ---> 'syntactic compatibility, used for diagnostics.
may_compat p q ---> a safe approximation of possible compat,
for compilation
*)
let is_absent tag row = Btype.row_field tag !row = Rabsent
let is_absent_pat p = match p.pat_desc with
| Tpat_variant (tag, _, row) -> is_absent tag row
| _ -> false
let const_compare x y =
match x,y with
| Const_float f1, Const_float f2 ->
Pervasives.compare (float_of_string f1) (float_of_string f2)
| Const_string (s1, _), Const_string (s2, _) ->
String.compare s1 s2
| _, _ -> Pervasives.compare x y
let records_args l1 l2 =
Invariant : fields are already sorted by Typecore.type_label_a_list
let rec combine r1 r2 l1 l2 = match l1,l2 with
| [],[] -> List.rev r1, List.rev r2
| [],(_,_,p2)::rem2 -> combine (omega::r1) (p2::r2) [] rem2
| (_,_,p1)::rem1,[] -> combine (p1::r1) (omega::r2) rem1 []
| (_,lbl1,p1)::rem1, ( _,lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
combine (p1::r1) (omega::r2) rem1 l2
else if lbl1.lbl_pos > lbl2.lbl_pos then
combine (omega::r1) (p2::r2) l1 rem2
combine (p1::r1) (p2::r2) rem1 rem2 in
combine [] [] l1 l2
module Compat
(Constr:sig
val equal :
Types.constructor_description ->
Types.constructor_description ->
bool
end) = struct
let rec compat p q = match p.pat_desc,q.pat_desc with
| ((Tpat_any|Tpat_var _),_)
| (_,(Tpat_any|Tpat_var _)) -> true
| Tpat_alias (p,_,_),_ -> compat p q
| _,Tpat_alias (q,_,_) -> compat p q
| Tpat_or (p1,p2,_),_ ->
(compat p1 q || compat p2 q)
| _,Tpat_or (q1,q2,_) ->
(compat p q1 || compat p q2)
| Tpat_construct (_, c1,ps1), Tpat_construct (_, c2,ps2) ->
Constr.equal c1 c2 && compats ps1 ps2
| Tpat_variant(l1,op1, _), Tpat_variant(l2,op2,_) ->
l1=l2 && ocompat op1 op2
| Tpat_constant c1, Tpat_constant c2 ->
const_compare c1 c2 = 0
| Tpat_tuple ps, Tpat_tuple qs -> compats ps qs
| Tpat_lazy p, Tpat_lazy q -> compat p q
| Tpat_record (l1,_),Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
compats ps qs
| Tpat_array ps, Tpat_array qs ->
List.length ps = List.length qs &&
compats ps qs
| _,_ -> false
and ocompat op oq = match op,oq with
| None,None -> true
| Some p,Some q -> compat p q
| (None,Some _)|(Some _,None) -> false
and compats ps qs = match ps,qs with
| [], [] -> true
| p::ps, q::qs -> compat p q && compats ps qs
| _,_ -> false
end
module SyntacticCompat =
Compat
(struct
let equal c1 c2 = Types.equal_tag c1.cstr_tag c2.cstr_tag
end)
let compat = SyntacticCompat.compat
and compats = SyntacticCompat.compats
Due to ( potential ) rebinding , two extension constructors
of the same arity type may equal
of the same arity type may equal *)
Utilities for retrieving type paths
May need a clean copy , cf . PR#4745
let clean_copy ty =
if ty.level = Btype.generic_level then ty
else Subst.type_expr Subst.identity ty
let get_type_path ty tenv =
let ty = Ctype.repr (Ctype.expand_head tenv (clean_copy ty)) in
match ty.desc with
| Tconstr (path,_,_) -> path
| _ -> fatal_error "Parmatch.get_type_path"
open Format
;;
let is_cons = function
| {cstr_name = "::"} -> true
| _ -> false
let pretty_const c = match c with
| Const_int i -> Printf.sprintf "%d" i
| Const_char c -> Printf.sprintf "%C" c
| Const_string (s, _) -> Printf.sprintf "%S" s
| Const_float f -> Printf.sprintf "%s" f
| Const_int32 i -> Printf.sprintf "%ldl" i
| Const_int64 i -> Printf.sprintf "%LdL" i
| Const_nativeint i -> Printf.sprintf "%ndn" i
let rec pretty_val ppf v =
match v.pat_extra with
(cstr, _loc, _attrs) :: rem ->
begin match cstr with
| Tpat_unpack ->
fprintf ppf "@[(module %a)@]" pretty_val { v with pat_extra = rem }
| Tpat_constraint _ ->
fprintf ppf "@[(%a : _)@]" pretty_val { v with pat_extra = rem }
| Tpat_type _ ->
fprintf ppf "@[(# %a)@]" pretty_val { v with pat_extra = rem }
| Tpat_open _ ->
fprintf ppf "@[(# %a)@]" pretty_val { v with pat_extra = rem }
end
| [] ->
match v.pat_desc with
| Tpat_any -> fprintf ppf "_"
| Tpat_var (x,_) -> fprintf ppf "%s" (Ident.name x)
| Tpat_constant c -> fprintf ppf "%s" (pretty_const c)
| Tpat_tuple vs ->
fprintf ppf "@[(%a)@]" (pretty_vals ",") vs
| Tpat_construct (_, cstr, []) ->
fprintf ppf "%s" cstr.cstr_name
| Tpat_construct (_, cstr, [w]) ->
fprintf ppf "@[<2>%s@ %a@]" cstr.cstr_name pretty_arg w
| Tpat_construct (_, cstr, vs) ->
let name = cstr.cstr_name in
begin match (name, vs) with
("::", [v1;v2]) ->
fprintf ppf "@[%a::@,%a@]" pretty_car v1 pretty_cdr v2
| _ ->
fprintf ppf "@[<2>%s@ @[(%a)@]@]" name (pretty_vals ",") vs
end
| Tpat_variant (l, None, _) ->
fprintf ppf "`%s" l
| Tpat_variant (l, Some w, _) ->
fprintf ppf "@[<2>`%s@ %a@]" l pretty_arg w
| Tpat_record (lvs,_) ->
let filtered_lvs = List.filter
(function
| _ -> true) lvs in
begin match filtered_lvs with
| [] -> fprintf ppf "_"
| (_, lbl, _) :: q ->
let elision_mark ppf =
if Array.length lbl.lbl_all > 1 + List.length q then
fprintf ppf ";@ _@ "
else () in
fprintf ppf "@[{%a%t}@]"
pretty_lvals filtered_lvs elision_mark
end
| Tpat_array vs ->
fprintf ppf "@[[| %a |]@]" (pretty_vals " ;") vs
| Tpat_lazy v ->
fprintf ppf "@[<2>lazy@ %a@]" pretty_arg v
| Tpat_alias (v, x,_) ->
fprintf ppf "@[(%a@ as %a)@]" pretty_val v Ident.print x
| Tpat_or (v,w,_) ->
fprintf ppf "@[(%a|@,%a)@]" pretty_or v pretty_or w
and pretty_car ppf v = match v.pat_desc with
| Tpat_construct (_,cstr, [_ ; _])
when is_cons cstr ->
fprintf ppf "(%a)" pretty_val v
| _ -> pretty_val ppf v
and pretty_cdr ppf v = match v.pat_desc with
| Tpat_construct (_,cstr, [v1 ; v2])
when is_cons cstr ->
fprintf ppf "%a::@,%a" pretty_car v1 pretty_cdr v2
| _ -> pretty_val ppf v
and pretty_arg ppf v = match v.pat_desc with
| Tpat_construct (_,_,_::_)
| Tpat_variant (_, Some _, _) -> fprintf ppf "(%a)" pretty_val v
| _ -> pretty_val ppf v
and pretty_or ppf v = match v.pat_desc with
| Tpat_or (v,w,_) ->
fprintf ppf "%a|@,%a" pretty_or v pretty_or w
| _ -> pretty_val ppf v
and pretty_vals sep ppf = function
| [] -> ()
| [v] -> pretty_val ppf v
| v::vs ->
fprintf ppf "%a%s@ %a" pretty_val v sep (pretty_vals sep) vs
and pretty_lvals ppf = function
| [] -> ()
| [_,lbl,v] ->
fprintf ppf "%s=%a" lbl.lbl_name pretty_val v
| (_, lbl,v)::rest ->
fprintf ppf "%s=%a;@ %a"
lbl.lbl_name pretty_val v pretty_lvals rest
let top_pretty ppf v =
fprintf ppf "@[%a@]@?" pretty_val v
let pretty_pat p =
top_pretty Format.str_formatter p ;
prerr_string (Format.flush_str_formatter ())
type matrix = pattern list list
let pretty_line ps =
List.iter
(fun p ->
top_pretty Format.str_formatter p ;
prerr_string " <" ;
prerr_string (Format.flush_str_formatter ()) ;
prerr_string ">")
ps
let pretty_matrix (pss : matrix) =
prerr_endline "begin matrix" ;
List.iter
(fun ps ->
pretty_line ps ;
prerr_endline "")
pss ;
prerr_endline "end matrix"
Utilities for matching
let simple_match p1 p2 =
match p1.pat_desc, p2.pat_desc with
| Tpat_construct(_, c1, _), Tpat_construct(_, c2, _) ->
Types.equal_tag c1.cstr_tag c2.cstr_tag
| Tpat_variant(l1, _, _), Tpat_variant(l2, _, _) ->
l1 = l2
| Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0
| Tpat_lazy _, Tpat_lazy _ -> true
| Tpat_record _ , Tpat_record _ -> true
| Tpat_tuple p1s, Tpat_tuple p2s
| Tpat_array p1s, Tpat_array p2s -> List.length p1s = List.length p2s
| _, (Tpat_any | Tpat_var(_)) -> true
| _, _ -> false
let record_arg p = match p.pat_desc with
| Tpat_any -> []
| Tpat_record (args,_) -> args
| _ -> fatal_error "Parmatch.as_record"
let get_field pos arg =
let _,_, p = List.find (fun (_,lbl,_) -> pos = lbl.lbl_pos) arg in
p
let extract_fields omegas arg =
List.map
(fun (_,lbl,_) ->
try
get_field lbl.lbl_pos arg
with Not_found -> omega)
omegas
let all_record_args lbls = match lbls with
| (_,{lbl_all=lbl_all},_)::_ ->
let t =
Array.map
(fun lbl -> mknoloc (Longident.Lident "?temp?"), lbl,omega)
lbl_all in
List.iter
(fun ((_, lbl,_) as x) -> t.(lbl.lbl_pos) <- x)
lbls ;
Array.to_list t
| _ -> fatal_error "Parmatch.all_record_args"
let rec simple_match_args p1 p2 = match p2.pat_desc with
| Tpat_alias (p2,_,_) -> simple_match_args p1 p2
| Tpat_construct(_, _, args) -> args
| Tpat_variant(_, Some arg, _) -> [arg]
| Tpat_tuple(args) -> args
| Tpat_record(args,_) -> extract_fields (record_arg p1) args
| Tpat_array(args) -> args
| Tpat_lazy arg -> [arg]
| (Tpat_any | Tpat_var(_)) ->
begin match p1.pat_desc with
Tpat_construct(_, _,args) -> omega_list args
| Tpat_variant(_, Some _, _) -> [omega]
| Tpat_tuple(args) -> omega_list args
| Tpat_record(args,_) -> omega_list args
| Tpat_array(args) -> omega_list args
| Tpat_lazy _ -> [omega]
| _ -> []
end
| _ -> []
let rec normalize_pat q = match q.pat_desc with
| Tpat_any | Tpat_constant _ -> q
| Tpat_var _ -> make_pat Tpat_any q.pat_type q.pat_env
| Tpat_alias (p,_,_) -> normalize_pat p
| Tpat_tuple (args) ->
make_pat (Tpat_tuple (omega_list args)) q.pat_type q.pat_env
| Tpat_construct (lid, c,args) ->
make_pat
(Tpat_construct (lid, c,omega_list args))
q.pat_type q.pat_env
| Tpat_variant (l, arg, row) ->
make_pat (Tpat_variant (l, may_map (fun _ -> omega) arg, row))
q.pat_type q.pat_env
| Tpat_array (args) ->
make_pat (Tpat_array (omega_list args)) q.pat_type q.pat_env
| Tpat_record (largs, closed) ->
make_pat
(Tpat_record (List.map (fun (lid,lbl,_) ->
lid, lbl,omega) largs, closed))
q.pat_type q.pat_env
| Tpat_lazy _ ->
make_pat (Tpat_lazy omega) q.pat_type q.pat_env
| Tpat_or _ -> fatal_error "Parmatch.normalize_pat"
let discr_pat q pss =
let rec acc_pat acc pss = match pss with
({pat_desc = Tpat_alias (p,_,_)}::ps)::pss ->
acc_pat acc ((p::ps)::pss)
| ({pat_desc = Tpat_or (p1,p2,_)}::ps)::pss ->
acc_pat acc ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var _)}::_)::pss ->
acc_pat acc pss
| (({pat_desc = Tpat_tuple _} as p)::_)::_ -> normalize_pat p
| (({pat_desc = Tpat_lazy _} as p)::_)::_ -> normalize_pat p
| (({pat_desc = Tpat_record (largs,closed)} as p)::_)::pss ->
let new_omegas =
List.fold_right
(fun (lid, lbl,_) r ->
try
let _ = get_field lbl.lbl_pos r in
r
with Not_found ->
(lid, lbl,omega)::r)
largs (record_arg acc)
in
acc_pat
(make_pat (Tpat_record (new_omegas, closed)) p.pat_type p.pat_env)
pss
| _ -> acc in
match normalize_pat q with
| {pat_desc= (Tpat_any | Tpat_record _)} as q -> acc_pat q pss
| q -> q
let rec read_args xs r = match xs,r with
| [],_ -> [],r
| _::xs, arg::rest ->
let args,rest = read_args xs rest in
arg::args,rest
| _,_ ->
fatal_error "Parmatch.read_args"
let do_set_args erase_mutable q r = match q with
| {pat_desc = Tpat_tuple omegas} ->
let args,rest = read_args omegas r in
make_pat (Tpat_tuple args) q.pat_type q.pat_env::rest
| {pat_desc = Tpat_record (omegas,closed)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_record
(List.map2 (fun (lid, lbl,_) arg ->
if
erase_mutable &&
(match lbl.lbl_mut with
| Mutable -> true | Immutable -> false)
then
lid, lbl, omega
else
lid, lbl, arg)
omegas args, closed))
q.pat_type q.pat_env::
rest
| {pat_desc = Tpat_construct (lid, c,omegas)} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_construct (lid, c,args))
q.pat_type q.pat_env::
rest
| {pat_desc = Tpat_variant (l, omega, row)} ->
let arg, rest =
match omega, r with
Some _, a::r -> Some a, r
| None, r -> None, r
| _ -> assert false
in
make_pat
(Tpat_variant (l, arg, row)) q.pat_type q.pat_env::
rest
| {pat_desc = Tpat_lazy _omega} ->
begin match r with
arg::rest ->
make_pat (Tpat_lazy arg) q.pat_type q.pat_env::rest
| _ -> fatal_error "Parmatch.do_set_args (lazy)"
end
| {pat_desc = Tpat_array omegas} ->
let args,rest = read_args omegas r in
make_pat
(Tpat_array args) q.pat_type q.pat_env::
rest
| {pat_desc=Tpat_constant _|Tpat_any} ->
| _ -> fatal_error "Parmatch.set_args"
let set_args q r = do_set_args false q r
and set_args_erase_mutable q r = do_set_args true q r
filter pss according to pattern q
let filter_one q pss =
let rec filter_rec = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_rec ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_rec ((p1::ps)::(p2::ps)::pss)
| (p::ps)::pss ->
if simple_match q p
then (simple_match_args q p @ ps) :: filter_rec pss
else filter_rec pss
| _ -> [] in
filter_rec pss
let filter_extra pss =
let rec filter_rec = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_rec ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_rec ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))} :: qs) :: pss ->
qs :: filter_rec pss
| _::pss -> filter_rec pss
| [] -> [] in
filter_rec pss
Pattern p0 is the discriminating pattern ,
returns [ ( q0,pss0 ) ; ... ; ( qn , pssn ) ]
where the qi 's are simple patterns and the pssi 's are
matched matrices .
NOTES
* ( qi , [ ] ) is impossible .
* In the case when matching is useless ( all - variable case ) ,
returns [ ]
Pattern p0 is the discriminating pattern,
returns [(q0,pss0) ; ... ; (qn,pssn)]
where the qi's are simple patterns and the pssi's are
matched matrices.
NOTES
* (qi,[]) is impossible.
* In the case when matching is useless (all-variable case),
returns []
*)
let filter_all pat0 pss =
let rec insert q qs env =
match env with
[] ->
let q0 = normalize_pat q in
[q0, [simple_match_args q0 q @ qs]]
| ((q0,pss) as c)::env ->
if simple_match q0 q
then (q0, ((simple_match_args q0 q @ qs) :: pss)) :: env
else c :: insert q qs env in
let rec filter_rec env = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_rec env ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_rec env ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))}::_)::pss ->
filter_rec env pss
| (p::ps)::pss ->
filter_rec (insert p ps env) pss
| _ -> env
and filter_omega env = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
filter_omega env ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
filter_omega env ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))}::ps)::pss ->
filter_omega
(List.map (fun (q,qss) -> (q,(simple_match_args q omega @ ps) :: qss))
env)
pss
| _::pss -> filter_omega env pss
| [] -> env in
filter_omega
(filter_rec
(match pat0.pat_desc with
(Tpat_record(_) | Tpat_tuple(_) | Tpat_lazy(_)) -> [pat0,[]]
| _ -> [])
pss)
pss
let rec set_last a = function
[] -> []
| [_] -> [a]
| x::l -> x :: set_last a l
let rec mark_partial = function
({pat_desc = Tpat_alias(p,_,_)}::ps)::pss ->
mark_partial ((p::ps)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps)::pss ->
mark_partial ((p1::ps)::(p2::ps)::pss)
| ({pat_desc = (Tpat_any | Tpat_var(_))} :: _ as ps) :: pss ->
ps :: mark_partial pss
| ps::pss ->
(set_last zero ps) :: mark_partial pss
| [] -> []
let close_variant env row =
let row = Btype.row_repr row in
let nm =
List.fold_left
(fun nm (_tag,f) ->
match Btype.row_field_repr f with
| Reither(_, _, false, e) ->
Btype.set_row_field e Rabsent;
None
| Rabsent | Reither (_, _, true, _) | Rpresent _ -> nm)
row.row_name row.row_fields in
if not row.row_closed || nm != row.row_name then begin
Ctype.unify env row.row_more
(Btype.newgenty
(Tvariant {row with row_fields = []; row_more = Btype.newgenvar();
row_closed = true; row_name = nm}))
end
let row_of_pat pat =
match Ctype.expand_head pat.pat_env pat.pat_type with
{desc = Tvariant row} -> Btype.row_repr row
| _ -> assert false
Check whether the first column of env makes up a complete signature or
not .
Check whether the first column of env makes up a complete signature or
not.
*)
let full_match closing env = match env with
| ({pat_desc = Tpat_construct(_,c,_)},_) :: _ ->
else List.length env = c.cstr_consts + c.cstr_nonconsts
| ({pat_desc = Tpat_variant _} as p,_) :: _ ->
let fields =
List.map
(function ({pat_desc = Tpat_variant (tag, _, _)}, _) -> tag
| _ -> assert false)
env
in
let row = row_of_pat p in
if closing && not (Btype.row_fixed row) then
List.for_all
(fun (tag,f) ->
match Btype.row_field_repr f with
Rabsent | Reither(_, _, false, _) -> true
| Reither (_, _, true, _)
| Rpresent _ -> List.mem tag fields)
row.row_fields
else
row.row_closed &&
List.for_all
(fun (tag,f) ->
Btype.row_field_repr f = Rabsent || List.mem tag fields)
row.row_fields
| ({pat_desc = Tpat_constant(Const_char _)},_) :: _ ->
List.length env = 256
| ({pat_desc = Tpat_constant(_)},_) :: _ -> false
| ({pat_desc = Tpat_tuple(_)},_) :: _ -> true
| ({pat_desc = Tpat_record(_)},_) :: _ -> true
| ({pat_desc = Tpat_array(_)},_) :: _ -> false
| ({pat_desc = Tpat_lazy(_)},_) :: _ -> true
| ({pat_desc = (Tpat_any|Tpat_var _|Tpat_alias _|Tpat_or _)},_) :: _
| []
->
assert false
let should_extend ext env = match ext with
| None -> false
| Some ext -> begin match env with
| [] -> assert false
| (p,_)::_ ->
begin match p.pat_desc with
| Tpat_construct
(_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},_) ->
let path = get_type_path p.pat_type p.pat_env in
Path.same path ext
| Tpat_construct
(_, {cstr_tag=(Cstr_extension _)},_) -> false
| Tpat_constant _|Tpat_tuple _|Tpat_variant _
| Tpat_record _|Tpat_array _ | Tpat_lazy _
-> false
| Tpat_any|Tpat_var _|Tpat_alias _|Tpat_or _
-> assert false
end
end
module ConstructorTagHashtbl = Hashtbl.Make(
struct
type t = Types.constructor_tag
let hash = Hashtbl.hash
let equal = Types.equal_tag
end
)
let complete_tags nconsts nconstrs tags =
let seen_const = Array.make nconsts false
and seen_constr = Array.make nconstrs false in
List.iter
(function
| Cstr_constant i -> seen_const.(i) <- true
| Cstr_block i -> seen_constr.(i) <- true
| _ -> assert false)
tags ;
let r = ConstructorTagHashtbl.create (nconsts+nconstrs) in
for i = 0 to nconsts-1 do
if not seen_const.(i) then
ConstructorTagHashtbl.add r (Cstr_constant i) ()
done ;
for i = 0 to nconstrs-1 do
if not seen_constr.(i) then
ConstructorTagHashtbl.add r (Cstr_block i) ()
done ;
r
let pat_of_constr ex_pat cstr =
{ex_pat with pat_desc =
Tpat_construct (mknoloc (Longident.Lident "?pat_of_constr?"),
cstr, omegas cstr.cstr_arity)}
let orify x y = make_pat (Tpat_or (x, y, None)) x.pat_type x.pat_env
let rec orify_many = function
| [] -> assert false
| [x] -> x
| x :: xs -> orify x (orify_many xs)
let pat_of_constrs ex_pat cstrs =
if cstrs = [] then raise Empty else
orify_many (List.map (pat_of_constr ex_pat) cstrs)
let pats_of_type ?(always=false) env ty =
let ty' = Ctype.expand_head env ty in
match ty'.desc with
| Tconstr (path, _, _) ->
begin try match (Env.find_type path env).type_kind with
| Type_variant cl when always || List.length cl = 1 ||
List.for_all (fun cd -> cd.Types.cd_res <> None) cl ->
let cstrs = fst (Env.find_type_descrs path env) in
List.map (pat_of_constr (make_pat Tpat_any ty env)) cstrs
| Type_record _ ->
let labels = snd (Env.find_type_descrs path env) in
let fields =
List.map (fun ld ->
mknoloc (Longident.Lident "?pat_of_label?"), ld, omega)
labels
in
[make_pat (Tpat_record (fields, Closed)) ty env]
| _ -> [omega]
with Not_found -> [omega]
end
| Ttuple tl ->
[make_pat (Tpat_tuple (omegas (List.length tl))) ty env]
| _ -> [omega]
let rec get_variant_constructors env ty =
match (Ctype.repr ty).desc with
| Tconstr (path,_,_) -> begin
try match Env.find_type path env with
| {type_kind=Type_variant _} ->
fst (Env.find_type_descrs path env)
| {type_manifest = Some _} ->
get_variant_constructors env
(Ctype.expand_head_once env (clean_copy ty))
| _ -> fatal_error "Parmatch.get_variant_constructors"
with Not_found ->
fatal_error "Parmatch.get_variant_constructors"
end
| _ -> fatal_error "Parmatch.get_variant_constructors"
let complete_constrs p all_tags =
let c =
match p.pat_desc with Tpat_construct (_, c, _) -> c | _ -> assert false in
let not_tags = complete_tags c.cstr_consts c.cstr_nonconsts all_tags in
let constrs = get_variant_constructors p.pat_env c.cstr_res in
let others =
List.filter
(fun cnstr -> ConstructorTagHashtbl.mem not_tags cnstr.cstr_tag)
constrs in
let const, nonconst =
List.partition (fun cnstr -> cnstr.cstr_arity = 0) others in
const @ nonconst
let build_other_constrs env p =
match p.pat_desc with
Tpat_construct (_, {cstr_tag=Cstr_constant _|Cstr_block _}, _) ->
let get_tag = function
| {pat_desc = Tpat_construct (_,c,_)} -> c.cstr_tag
| _ -> fatal_error "Parmatch.get_tag" in
let all_tags = List.map (fun (p,_) -> get_tag p) env in
pat_of_constrs p (complete_constrs p all_tags)
| _ -> extra_pat
let build_other_constant proj make first next p env =
let all = List.map (fun (p, _) -> proj p.pat_desc) env in
let rec try_const i =
if List.mem i all
then try_const (next i)
else make_pat (make i) p.pat_type p.pat_env
in try_const first
Builds a pattern that is incompatible with all patterns in
in the first column of env
Builds a pattern that is incompatible with all patterns in
in the first column of env
*)
let some_other_tag = "<some other tag>"
let build_other ext env = match env with
| ({pat_desc = Tpat_construct (lid, {cstr_tag=Cstr_extension _},_)},_) :: _ ->
make_pat (Tpat_var (Ident.create "*extension*",
{lid with txt="*extension*"})) Ctype.none Env.empty
| ({pat_desc = Tpat_construct _} as p,_) :: _ ->
begin match ext with
| Some ext when Path.same ext (get_type_path p.pat_type p.pat_env) ->
extra_pat
| _ ->
build_other_constrs env p
end
| ({pat_desc = Tpat_variant (_,_,r)} as p,_) :: _ ->
let tags =
List.map
(function ({pat_desc = Tpat_variant (tag, _, _)}, _) -> tag
| _ -> assert false)
env
in
let row = row_of_pat p in
let make_other_pat tag const =
let arg = if const then None else Some omega in
make_pat (Tpat_variant(tag, arg, r)) p.pat_type p.pat_env in
begin match
List.fold_left
(fun others (tag,f) ->
if List.mem tag tags then others else
match Btype.row_field_repr f with
| Reither (c, _, _, _) -> make_other_pat tag c :: others
| Rpresent arg -> make_other_pat tag (arg = None) :: others)
[] row.row_fields
with
[] ->
make_other_pat some_other_tag true
| pat::other_pats ->
List.fold_left
(fun p_res pat ->
make_pat (Tpat_or (pat, p_res, None)) p.pat_type p.pat_env)
pat other_pats
end
| ({pat_desc = Tpat_constant(Const_char _)} as p,_) :: _ ->
let all_chars =
List.map
(fun (p,_) -> match p.pat_desc with
| Tpat_constant (Const_char c) -> c
| _ -> assert false)
env in
let rec find_other i imax =
if i > imax then raise Not_found
else
let ci = Char.chr i in
if List.mem ci all_chars then
find_other (i+1) imax
else
make_pat (Tpat_constant (Const_char ci)) p.pat_type p.pat_env in
let rec try_chars = function
| [] -> omega
| (c1,c2) :: rest ->
try
find_other (Char.code c1) (Char.code c2)
with
| Not_found -> try_chars rest in
try_chars
[ 'a', 'z' ; 'A', 'Z' ; '0', '9' ;
' ', '~' ; Char.chr 0 , Char.chr 255]
| ({pat_desc=(Tpat_constant (Const_int _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_int i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int i))
0 succ p env
| ({pat_desc=(Tpat_constant (Const_int32 _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_int32 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int32 i))
0l Int32.succ p env
| ({pat_desc=(Tpat_constant (Const_int64 _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_int64 i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_int64 i))
0L Int64.succ p env
| ({pat_desc=(Tpat_constant (Const_nativeint _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_nativeint i) -> i | _ -> assert false)
(function i -> Tpat_constant(Const_nativeint i))
0n Nativeint.succ p env
| ({pat_desc=(Tpat_constant (Const_string _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_string (s, _)) -> String.length s
| _ -> assert false)
(function i -> Tpat_constant(Const_string(String.make i '*', None)))
0 succ p env
| ({pat_desc=(Tpat_constant (Const_float _))} as p,_) :: _ ->
build_other_constant
(function Tpat_constant(Const_float f) -> float_of_string f
| _ -> assert false)
(function f -> Tpat_constant(Const_float (string_of_float f)))
0.0 (fun f -> f +. 1.0) p env
| ({pat_desc = Tpat_array _} as p,_)::_ ->
let all_lengths =
List.map
(fun (p,_) -> match p.pat_desc with
| Tpat_array args -> List.length args
| _ -> assert false)
env in
let rec try_arrays l =
if List.mem l all_lengths then try_arrays (l+1)
else
make_pat
(Tpat_array (omegas l))
p.pat_type p.pat_env in
try_arrays 0
| [] -> omega
| _ -> omega
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector , es such that :
1- for all ps in pss ps # es ( ps and es are not compatible )
2- qs < = es ( es matches qs )
Core function :
Is the last row of pattern matrix pss + qs satisfiable ?
That is :
Does there exists at least one value vector, es such that :
1- for all ps in pss ps # es (ps and es are not compatible)
2- qs <= es (es matches qs)
*)
let rec has_instance p = match p.pat_desc with
| Tpat_variant (l,_,r) when is_absent l r -> false
| Tpat_any | Tpat_var _ | Tpat_constant _ | Tpat_variant (_,None,_) -> true
| Tpat_alias (p,_,_) | Tpat_variant (_,Some p,_) -> has_instance p
| Tpat_or (p1,p2,_) -> has_instance p1 || has_instance p2
| Tpat_construct (_,_,ps) | Tpat_tuple ps | Tpat_array ps ->
has_instances ps
| Tpat_record (lps,_) -> has_instances (List.map (fun (_,_,x) -> x) lps)
| Tpat_lazy p
-> has_instance p
and has_instances = function
| [] -> true
| q::rem -> has_instance q && has_instances rem
In two places in the following function , we check the coherence of the first
column of ( pss + qs ) .
If it is incoherent , then we exit early saying that ( pss + qs ) is not
satisfiable ( which is equivalent to saying " oh , we should n't have considered
that branch , no good result came come from here " ) .
But what happens if we have a coherent but ill - typed column ?
- we might end up returning [ false ] , which is equivalent to noticing the
incompatibility : clearly this is fine .
- if we end up returning [ true ] then we 're saying that [ qs ] is useful while
it is not . This is sad but not the end of the world , we 're just allowing dead
code to survive .
In two places in the following function, we check the coherence of the first
column of (pss + qs).
If it is incoherent, then we exit early saying that (pss + qs) is not
satisfiable (which is equivalent to saying "oh, we shouldn't have considered
that branch, no good result came come from here").
But what happens if we have a coherent but ill-typed column?
- we might end up returning [false], which is equivalent to noticing the
incompatibility: clearly this is fine.
- if we end up returning [true] then we're saying that [qs] is useful while
it is not. This is sad but not the end of the world, we're just allowing dead
code to survive.
*)
let rec satisfiable pss qs = match pss with
| [] -> has_instances qs
| _ ->
match qs with
| [] -> false
| {pat_desc = Tpat_or(q1,q2,_)}::qs ->
satisfiable pss (q1::qs) || satisfiable pss (q2::qs)
| {pat_desc = Tpat_alias(q,_,_)}::qs ->
satisfiable pss (q::qs)
| {pat_desc = (Tpat_any | Tpat_var(_))}::qs ->
if not (all_coherent (simplified_first_col pss)) then
false
else begin
let q0 = discr_pat omega pss in
match filter_all q0 pss with
first column of pss is made of variables only
| [] -> satisfiable (filter_extra pss) qs
| constrs ->
if full_match false constrs then
List.exists
(fun (p,pss) ->
not (is_absent_pat p) &&
satisfiable pss (simple_match_args p omega @ qs))
constrs
else
satisfiable (filter_extra pss) qs
end
| {pat_desc=Tpat_variant (l,_,r)}::_ when is_absent l r -> false
| q::qs ->
if not (all_coherent (q :: simplified_first_col pss)) then
false
else begin
let q0 = discr_pat q pss in
satisfiable (filter_one q0 pss) (simple_match_args q0 q @ qs)
end
Also return the remaining cases , to enable GADT handling
For considerations regarding the coherence check , see the comment on
[ satisfiable ] above .
For considerations regarding the coherence check, see the comment on
[satisfiable] above. *)
let rec satisfiables pss qs = match pss with
| [] -> if has_instances qs then [qs] else []
| _ ->
match qs with
| [] -> []
| {pat_desc = Tpat_or(q1,q2,_)}::qs ->
satisfiables pss (q1::qs) @ satisfiables pss (q2::qs)
| {pat_desc = Tpat_alias(q,_,_)}::qs ->
satisfiables pss (q::qs)
| {pat_desc = (Tpat_any | Tpat_var(_))}::qs ->
if not (all_coherent (simplified_first_col pss)) then
[]
else begin
let q0 = discr_pat omega pss in
let wild p =
List.map (fun qs -> p::qs) (satisfiables (filter_extra pss) qs) in
match filter_all q0 pss with
first column of pss is made of variables only
| [] ->
wild omega
| (p,_)::_ as constrs ->
let for_constrs () =
List.flatten (
List.map
(fun (p,pss) ->
if is_absent_pat p then [] else
List.map (set_args p)
(satisfiables pss (simple_match_args p omega @ qs)))
constrs )
in
if full_match false constrs then for_constrs () else
match p.pat_desc with
Tpat_construct _ ->
wild (build_other_constrs constrs p) @ for_constrs ()
| _ ->
wild omega
end
| {pat_desc=Tpat_variant (l,_,r)}::_ when is_absent l r -> []
| q::qs ->
if not (all_coherent (q :: simplified_first_col pss)) then
[]
else begin
let q0 = discr_pat q pss in
List.map (set_args q0)
(satisfiables (filter_one q0 pss) (simple_match_args q0 q @ qs))
end
type 'a result =
let rec try_many f = function
| [ ] - > Rnone
| ( p , pss)::rest - >
match f ( p , pss ) with
| Rnone - > try_many f rest
| r - > r
let rec try_many f = function
| [] -> Rnone
| (p,pss)::rest ->
match f (p,pss) with
| Rnone -> try_many f rest
| r -> r
*)
let rappend r1 r2 =
match r1, r2 with
| Rnone, _ -> r2
| _, Rnone -> r1
| Rsome l1, Rsome l2 -> Rsome (l1 @ l2)
let rec try_many_gadt f = function
| [] -> Rnone
| (p,pss)::rest ->
rappend (f (p, pss)) (try_many_gadt f rest)
let rec exhaust ext pss n = match pss with
| [ ] - > Rsome ( omegas n )
| [ ] : : _ - > Rnone
| pss - >
let q0 = discr_pat omega pss in
begin match filter_all q0 pss with
( * first column of pss is made of variables only
let rec exhaust ext pss n = match pss with
| [] -> Rsome (omegas n)
| []::_ -> Rnone
| pss ->
let q0 = discr_pat omega pss in
begin match filter_all q0 pss with
| [] ->
begin match exhaust ext (filter_extra pss) (n-1) with
| Rsome r -> Rsome (q0::r)
| r -> r
end
| constrs ->
let try_non_omega (p,pss) =
if is_absent_pat p then
Rnone
else
match
exhaust
ext pss (List.length (simple_match_args p omega) + n - 1)
with
| Rsome r -> Rsome (set_args p r)
| r -> r in
if
full_match true false constrs && not (should_extend ext constrs)
then
try_many try_non_omega constrs
else
D = filter_extra pss is the default matrix
as it is included in pss , one can avoid
recursive calls on specialized matrices ,
Essentially :
* D exhaustive = > pss exhaustive
* D non - exhaustive = > we have a non - filtered value
D = filter_extra pss is the default matrix
as it is included in pss, one can avoid
recursive calls on specialized matrices,
Essentially :
* D exhaustive => pss exhaustive
* D non-exhaustive => we have a non-filtered value
*)
let r = exhaust ext (filter_extra pss) (n-1) in
match r with
| Rnone -> Rnone
| Rsome r ->
try
Rsome (build_other ext constrs::r)
with
| Empty -> fatal_error "Parmatch.exhaust"
end
let combinations f lst lst' =
let rec iter2 x =
function
[] -> []
| y :: ys ->
f x y :: iter2 x ys
in
let rec iter =
function
[] -> []
| x :: xs -> iter2 x lst' @ iter xs
in
iter lst
*)
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ - > " v "
| Tpat_any - > " _ "
| Tpat_alias ( p , x ) - > Printf.sprintf " ( % s ) as ? " ( string_of_pat p )
| Tpat_constant n - > " 0 "
| Tpat_construct ( _ , lid , _ ) - >
Printf.sprintf " % s " ( String.concat " . " ( Longident.flatten lid.txt ) )
| Tpat_lazy p - >
Printf.sprintf " ( lazy % s ) " ( string_of_pat p )
| Tpat_or ( p1,p2 , _ ) - >
Printf.sprintf " ( % s | % s ) " ( string_of_pat p1 ) ( string_of_pat p2 )
| Tpat_tuple list - >
Printf.sprintf " ( % s ) " ( String.concat " , " ( List.map string_of_pat list ) )
| Tpat_variant ( _ , _ , _ ) - > " variant "
| Tpat_record ( _ , _ ) - > " record "
| Tpat_array _ - > " array "
in
Printf.fprintf stderr " PAT[%s]\n% ! " ( string_of_pat pat )
let print_pat pat =
let rec string_of_pat pat =
match pat.pat_desc with
Tpat_var _ -> "v"
| Tpat_any -> "_"
| Tpat_alias (p, x) -> Printf.sprintf "(%s) as ?" (string_of_pat p)
| Tpat_constant n -> "0"
| Tpat_construct (_, lid, _) ->
Printf.sprintf "%s" (String.concat "." (Longident.flatten lid.txt))
| Tpat_lazy p ->
Printf.sprintf "(lazy %s)" (string_of_pat p)
| Tpat_or (p1,p2,_) ->
Printf.sprintf "(%s | %s)" (string_of_pat p1) (string_of_pat p2)
| Tpat_tuple list ->
Printf.sprintf "(%s)" (String.concat "," (List.map string_of_pat list))
| Tpat_variant (_, _, _) -> "variant"
| Tpat_record (_, _) -> "record"
| Tpat_array _ -> "array"
in
Printf.fprintf stderr "PAT[%s]\n%!" (string_of_pat pat)
*)
let rec exhaust_gadt (ext:Path.t option) pss n = match pss with
| [] -> Rsome [omegas n]
| []::_ -> Rnone
| pss ->
if not (all_coherent (simplified_first_col pss)) then
Rnone
else begin
Assuming the first column is ill - typed but considered coherent , we
might end up producing an ill - typed witness of non - exhaustivity
corresponding to the current branch .
If [ exhaust ] has been called by [ do_check_partial ] , then the witnesses
produced get typechecked and the ill - typed ones are discarded .
If [ exhaust ] has been called by [ do_check_fragile ] , then it is possible
we might fail to warn the user that the matching is fragile . See for
example testsuite / tests / warnings / w04_failure.ml .
might end up producing an ill-typed witness of non-exhaustivity
corresponding to the current branch.
If [exhaust] has been called by [do_check_partial], then the witnesses
produced get typechecked and the ill-typed ones are discarded.
If [exhaust] has been called by [do_check_fragile], then it is possible
we might fail to warn the user that the matching is fragile. See for
example testsuite/tests/warnings/w04_failure.ml. *)
let q0 = discr_pat omega pss in
match filter_all q0 pss with
first column of pss is made of variables only
| [] ->
begin match exhaust_gadt ext (filter_extra pss) (n-1) with
| Rsome r -> Rsome (List.map (fun row -> q0::row) r)
| r -> r
end
| constrs ->
let try_non_omega (p,pss) =
if is_absent_pat p then
Rnone
else
match
exhaust_gadt
ext pss (List.length (simple_match_args p omega) + n - 1)
with
| Rsome r -> Rsome (List.map (fun row -> (set_args p row)) r)
| r -> r in
let before = try_many_gadt try_non_omega constrs in
if
full_match false constrs && not (should_extend ext constrs)
then
before
else
D = filter_extra pss is the default matrix
as it is included in pss , one can avoid
recursive calls on specialized matrices ,
Essentially :
* D exhaustive = > pss exhaustive
* D non - exhaustive = > we have a non - filtered value
D = filter_extra pss is the default matrix
as it is included in pss, one can avoid
recursive calls on specialized matrices,
Essentially :
* D exhaustive => pss exhaustive
* D non-exhaustive => we have a non-filtered value
*)
let r = exhaust_gadt ext (filter_extra pss) (n-1) in
match r with
| Rnone -> before
| Rsome r ->
try
let p = build_other ext constrs in
let dug = List.map (fun tail -> p :: tail) r in
match before with
| Rnone -> Rsome dug
| Rsome x -> Rsome (x @ dug)
with
| Empty -> fatal_error "Parmatch.exhaust"
end
let exhaust_gadt ext pss n =
let ret = exhaust_gadt ext pss n in
match ret with
Rnone -> Rnone
| Rsome lst ->
if lst = [] then Rsome (omegas n) else
let singletons =
List.map
(function
[x] -> x
| _ -> assert false)
lst
in
Rsome [orify_many singletons]
let rec pressure_variants tdefs = function
| [] -> false
| []::_ -> true
| pss ->
if not (all_coherent (simplified_first_col pss)) then
true
else begin
let q0 = discr_pat omega pss in
match filter_all q0 pss with
[] -> pressure_variants tdefs (filter_extra pss)
| constrs ->
let rec try_non_omega = function
(_p,pss) :: rem ->
let ok = pressure_variants tdefs pss in
try_non_omega rem && ok
| [] -> true
in
if full_match (tdefs=None) constrs then
try_non_omega constrs
else if tdefs = None then
pressure_variants None (filter_extra pss)
else
let full = full_match true constrs in
let ok =
if full then try_non_omega constrs
else try_non_omega (filter_all q0 (mark_partial pss))
in
begin match constrs, tdefs with
({pat_desc=Tpat_variant _} as p,_):: _, Some env ->
let row = row_of_pat p in
if Btype.row_fixed row
|| pressure_variants None (filter_extra pss) then ()
else close_variant env row
| _ -> ()
end;
ok
end
This time every_satisfiable pss qs checks the
utility of every expansion of qs .
Expansion means expansion of or - patterns inside qs
This time every_satisfiable pss qs checks the
utility of every expansion of qs.
Expansion means expansion of or-patterns inside qs
*)
type answer =
type 'a row = {no_ors : 'a list ; ors : 'a list ; active : 'a list}
let make_row ps = {ors=[] ; no_ors=[]; active=ps}
let make_rows pss = List.map make_row pss
let rec unalias p = match p.pat_desc with
| Tpat_alias (p,_,_) -> unalias p
| _ -> p
let is_var p = match (unalias p).pat_desc with
| Tpat_any|Tpat_var _ -> true
| _ -> false
let is_var_column rs =
List.for_all
(fun r -> match r.active with
| p::_ -> is_var p
| [] -> assert false)
rs
let rec or_args p = match p.pat_desc with
| Tpat_or (p1,p2,_) -> p1,p2
| Tpat_alias (p,_,_) -> or_args p
| _ -> assert false
let remove r = match r.active with
| _::rem -> {r with active=rem}
| [] -> assert false
let remove_column rs = List.map remove rs
let push_no_or r = match r.active with
| p::rem -> { r with no_ors = p::r.no_ors ; active=rem}
| [] -> assert false
let push_or r = match r.active with
| p::rem -> { r with ors = p::r.ors ; active=rem}
| [] -> assert false
let push_or_column rs = List.map push_or rs
and push_no_or_column rs = List.map push_no_or rs
Those are adaptations of the previous homonymous functions that
work on the current column , instead of the first column
work on the current column, instead of the first column
*)
let discr_pat q rs =
discr_pat q (List.map (fun r -> r.active) rs)
let filter_one q rs =
let rec filter_rec rs = match rs with
| [] -> []
| r::rem ->
match r.active with
| [] -> assert false
| {pat_desc = Tpat_alias(p,_,_)}::ps ->
filter_rec ({r with active = p::ps}::rem)
| {pat_desc = Tpat_or(p1,p2,_)}::ps ->
filter_rec
({r with active = p1::ps}::
{r with active = p2::ps}::
rem)
| p::ps ->
if simple_match q p then
{r with active=simple_match_args q p @ ps} :: filter_rec rem
else
filter_rec rem in
filter_rec rs
let make_vector r = List.rev r.no_ors
let make_matrix rs = List.map make_vector rs
Standard union on answers
let union_res r1 r2 = match r1, r2 with
| (Unused,_)
| (_, Unused) -> Unused
| Used,_ -> r2
| _, Used -> r1
| Upartial u1, Upartial u2 -> Upartial (u1@u2)
let extract_elements qs =
let rec do_rec seen = function
| [] -> []
| q::rem ->
{no_ors= List.rev_append seen rem @ qs.no_ors ;
ors=[] ;
active = [q]}::
do_rec (q::seen) rem in
do_rec [] qs.ors
let transpose rs = match rs with
| [] -> assert false
| r::rem ->
let i = List.map (fun x -> [x]) r in
List.fold_left
(List.map2 (fun r x -> x::r))
i rem
let extract_columns pss qs = match pss with
| [] -> List.map (fun _ -> []) qs.ors
| _ ->
let rows = List.map extract_elements pss in
transpose rows
Core function
The idea is to first look for or patterns ( recursive case ) , then
check or - patterns argument usefulness ( terminal case )
The idea is to first look for or patterns (recursive case), then
check or-patterns argument usefulness (terminal case)
*)
let rec simplified_first_usefulness_col = function
| [] -> []
| row :: rows ->
match row.active with
| p :: _ -> simplify_head_pat p (simplified_first_usefulness_col rows)
let rec every_satisfiables pss qs = match qs.active with
| [] ->
begin match qs.ors with
if satisfiable (make_matrix pss) (make_vector qs) then
Used
else
Unused
n or - patterns - > 2n expansions
List.fold_right2
(fun pss qs r -> match r with
| Unused -> Unused
| _ ->
match qs.active with
| [q] ->
let q1,q2 = or_args q in
let r_loc = every_both pss qs q1 q2 in
union_res r r_loc
| _ -> assert false)
(extract_columns pss qs) (extract_elements qs)
Used
end
| q::rem ->
let uq = unalias q in
begin match uq.pat_desc with
| Tpat_any | Tpat_var _ ->
if is_var_column pss then
every_satisfiables (remove_column pss) (remove qs)
else
every_satisfiables (push_no_or_column pss) (push_no_or qs)
| Tpat_or (q1,q2,_) ->
if
q1.pat_loc.Location.loc_ghost &&
q2.pat_loc.Location.loc_ghost
then
every_satisfiables (push_no_or_column pss) (push_no_or qs)
else
every_satisfiables (push_or_column pss) (push_or qs)
Unused
| _ ->
if not (all_coherent (uq :: simplified_first_usefulness_col pss)) then
Unused
else begin
let q0 = discr_pat q pss in
every_satisfiables
(filter_one q0 pss)
{qs with active=simple_match_args q0 q @ rem}
end
end
This function ` ` every_both '' performs the usefulness check
of or - pat q1|q2 .
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2 ,
That way ,
- others orpats in qs.ors will not get expanded .
- all matching work performed on qs.no_ors is not performed again .
This function ``every_both'' performs the usefulness check
of or-pat q1|q2.
The trick is to call every_satisfied twice with
current active columns restricted to q1 and q2,
That way,
- others orpats in qs.ors will not get expanded.
- all matching work performed on qs.no_ors is not performed again.
*)
and every_both pss qs q1 q2 =
let qs1 = {qs with active=[q1]}
and qs2 = {qs with active=[q2]} in
let r1 = every_satisfiables pss qs1
and r2 = every_satisfiables (if compat q1 q2 then qs1::pss else pss) qs2 in
match r1 with
| Unused ->
begin match r2 with
| Unused -> Unused
| Used -> Upartial [q1]
| Upartial u2 -> Upartial (q1::u2)
end
| Used ->
begin match r2 with
| Unused -> Upartial [q2]
| _ -> r2
end
| Upartial u1 ->
begin match r2 with
| Unused -> Upartial (u1@[q2])
| Used -> r1
| Upartial u2 -> Upartial (u1 @ u2)
end
let rec le_pat p q =
match (p.pat_desc, q.pat_desc) with
| (Tpat_var _|Tpat_any),_ -> true
| Tpat_alias(p,_,_), _ -> le_pat p q
| _, Tpat_alias(q,_,_) -> le_pat p q
| Tpat_constant(c1), Tpat_constant(c2) -> const_compare c1 c2 = 0
| Tpat_construct(_,c1,ps), Tpat_construct(_,c2,qs) ->
Types.equal_tag c1.cstr_tag c2.cstr_tag && le_pats ps qs
| Tpat_variant(l1,Some p1,_), Tpat_variant(l2,Some p2,_) ->
(l1 = l2 && le_pat p1 p2)
| Tpat_variant(l1,None,_r1), Tpat_variant(l2,None,_) ->
l1 = l2
| Tpat_variant(_,_,_), Tpat_variant(_,_,_) -> false
| Tpat_tuple(ps), Tpat_tuple(qs) -> le_pats ps qs
| Tpat_lazy p, Tpat_lazy q -> le_pat p q
| Tpat_record (l1,_), Tpat_record (l2,_) ->
let ps,qs = records_args l1 l2 in
le_pats ps qs
| Tpat_array(ps), Tpat_array(qs) ->
List.length ps = List.length qs && le_pats ps qs
| _,_ -> not (satisfiable [[p]] [q])
and le_pats ps qs =
match ps,qs with
p::ps, q::qs -> le_pat p q && le_pats ps qs
| _, _ -> true
let get_mins le ps =
let rec select_rec r = function
[] -> r
| p::ps ->
if List.exists (fun p0 -> le p0 p) ps
then select_rec r ps
else select_rec (p::r) ps in
select_rec [] (select_rec [] ps)
lub p q is a pattern that matches all values matched by p and q
may raise Empty , when p and q are not compatible
lub p q is a pattern that matches all values matched by p and q
may raise Empty, when p and q are not compatible
*)
let rec lub p q = match p.pat_desc,q.pat_desc with
| Tpat_alias (p,_,_),_ -> lub p q
| _,Tpat_alias (q,_,_) -> lub p q
| (Tpat_any|Tpat_var _),_ -> q
| _,(Tpat_any|Tpat_var _) -> p
| Tpat_or (p1,p2,_),_ -> orlub p1 p2 q
Thanks god , lub is commutative
| Tpat_constant c1, Tpat_constant c2 when const_compare c1 c2 = 0 -> p
| Tpat_tuple ps, Tpat_tuple qs ->
let rs = lubs ps qs in
make_pat (Tpat_tuple rs) p.pat_type p.pat_env
| Tpat_lazy p, Tpat_lazy q ->
let r = lub p q in
make_pat (Tpat_lazy r) p.pat_type p.pat_env
| Tpat_construct (lid, c1,ps1), Tpat_construct (_,c2,ps2)
when Types.equal_tag c1.cstr_tag c2.cstr_tag ->
let rs = lubs ps1 ps2 in
make_pat (Tpat_construct (lid, c1,rs))
p.pat_type p.pat_env
| Tpat_variant(l1,Some p1,row), Tpat_variant(l2,Some p2,_)
when l1=l2 ->
let r=lub p1 p2 in
make_pat (Tpat_variant (l1,Some r,row)) p.pat_type p.pat_env
| Tpat_variant (l1,None,_row), Tpat_variant(l2,None,_)
when l1 = l2 -> p
| Tpat_record (l1,closed),Tpat_record (l2,_) ->
let rs = record_lubs l1 l2 in
make_pat (Tpat_record (rs, closed)) p.pat_type p.pat_env
| Tpat_array ps, Tpat_array qs
when List.length ps = List.length qs ->
let rs = lubs ps qs in
make_pat (Tpat_array rs) p.pat_type p.pat_env
| _,_ ->
raise Empty
and orlub p1 p2 q =
try
let r1 = lub p1 q in
try
{q with pat_desc=(Tpat_or (r1,lub p2 q,None))}
with
| Empty -> r1
with
| Empty -> lub p2 q
and record_lubs l1 l2 =
let rec lub_rec l1 l2 = match l1,l2 with
| [],_ -> l2
| _,[] -> l1
| (lid1, lbl1,p1)::rem1, (lid2, lbl2,p2)::rem2 ->
if lbl1.lbl_pos < lbl2.lbl_pos then
(lid1, lbl1,p1)::lub_rec rem1 l2
else if lbl2.lbl_pos < lbl1.lbl_pos then
(lid2, lbl2,p2)::lub_rec l1 rem2
else
(lid1, lbl1,lub p1 p2)::lub_rec rem1 rem2 in
lub_rec l1 l2
and lubs ps qs = match ps,qs with
| p::ps, q::qs -> lub p q :: lubs ps qs
| _,_ -> []
let pressure_variants tdefs patl =
let pss = List.map (fun p -> [p;omega]) patl in
ignore (pressure_variants (Some tdefs) pss)
Utilities for diagnostics
let rec initial_matrix = function
[] -> []
| {c_guard=Some _} :: rem -> initial_matrix rem
| {c_guard=None; c_lhs=p} :: rem -> [p] :: initial_matrix rem
exception NoGuard
let rec initial_all no_guard = function
| [] ->
if no_guard then
raise NoGuard
else
[]
| {c_lhs=pat; c_guard; _} :: rem ->
([pat], pat.pat_loc) :: initial_all (no_guard && c_guard = None) rem
let rec do_filter_var = function
| (_::ps,loc)::rem -> (ps,loc)::do_filter_var rem
| _ -> []
let do_filter_one q pss =
let rec filter_rec = function
| ({pat_desc = Tpat_alias(p,_,_)}::ps,loc)::pss ->
filter_rec ((p::ps,loc)::pss)
| ({pat_desc = Tpat_or(p1,p2,_)}::ps,loc)::pss ->
filter_rec ((p1::ps,loc)::(p2::ps,loc)::pss)
| (p::ps,loc)::pss ->
if simple_match q p
then (simple_match_args q p @ ps, loc) :: filter_rec pss
else filter_rec pss
| _ -> [] in
filter_rec pss
let rec do_match pss qs = match qs with
| [] ->
begin match pss with
| ([],loc)::_ -> Some loc
| _ -> None
end
| q::qs -> match q with
| {pat_desc = Tpat_or (q1,q2,_)} ->
begin match do_match pss (q1::qs) with
| None -> do_match pss (q2::qs)
| r -> r
end
| {pat_desc = Tpat_any} ->
do_match (do_filter_var pss) qs
| _ ->
let q0 = normalize_pat q in
[ pss ] will ( or wo n't ) match [ q0 : : qs ] regardless of the coherence of
its first column .
its first column. *)
do_match (do_filter_one q0 pss) (simple_match_args q0 q @ qs)
let check_partial_all v casel =
try
let pss = initial_all true casel in
do_match pss [v]
with
| NoGuard -> None
conversion from Typedtree.pattern to Parsetree.pattern list
module Conv = struct
open Parsetree
let mkpat desc = Ast_helper.Pat.mk desc
let name_counter = ref 0
let fresh name =
let current = !name_counter in
name_counter := !name_counter + 1;
"#$" ^ name ^ string_of_int current
let conv typed =
let constrs = Hashtbl.create 7 in
let labels = Hashtbl.create 7 in
let rec loop pat =
match pat.pat_desc with
Tpat_or (pa,pb,_) ->
mkpat (Ppat_or (loop pa, loop pb))
mkpat (Ppat_var nm)
| Tpat_any
| Tpat_var _ ->
mkpat Ppat_any
| Tpat_constant c ->
mkpat (Ppat_constant (Untypeast.constant c))
| Tpat_alias (p,_,_) -> loop p
| Tpat_tuple lst ->
mkpat (Ppat_tuple (List.map loop lst))
| Tpat_construct (cstr_lid, cstr, lst) ->
let id = fresh cstr.cstr_name in
let lid = { cstr_lid with txt = Longident.Lident id } in
Hashtbl.add constrs id cstr;
let arg =
match List.map loop lst with
| [] -> None
| [p] -> Some p
| lst -> Some (mkpat (Ppat_tuple lst))
in
mkpat (Ppat_construct(lid, arg))
| Tpat_variant(label,p_opt,_row_desc) ->
let arg = Misc.may_map loop p_opt in
mkpat (Ppat_variant(label, arg))
| Tpat_record (subpatterns, _closed_flag) ->
let fields =
List.map
(fun (_, lbl, p) ->
let id = fresh lbl.lbl_name in
Hashtbl.add labels id lbl;
(mknoloc (Longident.Lident id), loop p))
subpatterns
in
mkpat (Ppat_record (fields, Open))
| Tpat_array lst ->
mkpat (Ppat_array (List.map loop lst))
| Tpat_lazy p ->
mkpat (Ppat_lazy (loop p))
in
let ps = loop typed in
(ps, constrs, labels)
end
let contains_extension pat =
let r = ref false in
let rec loop = function
{pat_desc=Tpat_var (_, {txt="*extension*"})} ->
r := true
| p -> Typedtree.iter_pattern_desc loop p.pat_desc
in loop pat; !r
let ppat_of_type env ty =
match pats_of_type env ty with
[{pat_desc = Tpat_any}] ->
(Conv.mkpat Parsetree.Ppat_any, Hashtbl.create 0, Hashtbl.create 0)
| pats ->
Conv.conv (orify_many pats)
let do_check_partial ?pred exhaust loc casel pss = match pss with
| [] ->
This can occur
- For empty matches generated by ( no warning )
- when all patterns have guards ( then , < > [ ] )
( specific warning )
Then match MUST be considered non - exhaustive ,
otherwise compilation of PM is broken .
This can occur
- For empty matches generated by ocamlp4 (no warning)
- when all patterns have guards (then, casel <> [])
(specific warning)
Then match MUST be considered non-exhaustive,
otherwise compilation of PM is broken.
*)
begin match casel with
| [] -> ()
| _ ->
if Warnings.is_active Warnings.All_clauses_guarded then
Location.prerr_warning loc Warnings.All_clauses_guarded
end ;
Partial
| ps::_ ->
begin match exhaust None pss (List.length ps) with
| Rnone -> Total
| Rsome [u] ->
let v =
match pred with
| Some pred ->
let (pattern,constrs,labels) = Conv.conv u in
let u' = pred constrs labels pattern in
pretty_pat u ;
begin match u ' with
None - > prerr_endline " : impossible "
| Some _ - > prerr_endline " : possible "
end ;
begin match u' with
None -> prerr_endline ": impossible"
| Some _ -> prerr_endline ": possible"
end; *)
u'
| None -> Some u
in
begin match v with
None -> Total
| Some v ->
if Warnings.is_active (Warnings.Partial_match "") then begin
let errmsg =
try
let buf = Buffer.create 16 in
let fmt = formatter_of_buffer buf in
top_pretty fmt v;
begin match check_partial_all v casel with
| None -> ()
| Some _ ->
This is ' Some loc ' , where loc is the location of
a possibly matching clause .
Forget about loc , because printing two locations
is a pain in the top - level
a possibly matching clause.
Forget about loc, because printing two locations
is a pain in the top-level *)
Buffer.add_string buf
"\n(However, some guarded clause may match this value.)"
end;
if contains_extension v then
Buffer.add_string buf
"\nMatching over values of extensible variant types \
(the *extension* above)\n\
must include a wild card pattern in order to be exhaustive."
;
Buffer.contents buf
with _ ->
""
in
Location.prerr_warning loc (Warnings.Partial_match errmsg)
end;
Partial
end
| _ ->
fatal_error "Parmatch.check_partial"
end
let pss =
do_check_partial exhaust loc casel pss
let do_check_partial_normal loc casel pss =
do_check_partial exhaust loc casel pss
*)
let do_check_partial_gadt pred loc casel pss =
do_check_partial ~pred exhaust_gadt loc casel pss
let rec add_path path = function
| [] -> [path]
| x::rem as paths ->
if Path.same path x then paths
else x::add_path path rem
let extendable_path path =
not
(Path.same path Predef.path_bool ||
Path.same path Predef.path_list ||
Path.same path Predef.path_unit ||
Path.same path Predef.path_option)
let rec collect_paths_from_pat r p = match p.pat_desc with
| Tpat_construct(_, {cstr_tag=(Cstr_constant _|Cstr_block _|Cstr_unboxed)},ps)
->
let path = get_type_path p.pat_type p.pat_env in
List.fold_left
collect_paths_from_pat
(if extendable_path path then add_path path r else r)
ps
| Tpat_any|Tpat_var _|Tpat_constant _| Tpat_variant (_,None,_) -> r
| Tpat_tuple ps | Tpat_array ps
| Tpat_construct (_, {cstr_tag=Cstr_extension _}, ps)->
List.fold_left collect_paths_from_pat r ps
| Tpat_record (lps,_) ->
List.fold_left
(fun r (_, _, p) -> collect_paths_from_pat r p)
r lps
| Tpat_variant (_, Some p, _) | Tpat_alias (p,_,_) -> collect_paths_from_pat r p
| Tpat_or (p1,p2,_) ->
collect_paths_from_pat (collect_paths_from_pat r p1) p2
| Tpat_lazy p
->
collect_paths_from_pat r p
Actual fragile check
1 . Collect data types in the patterns of the match .
2 . One exhaustivity check per datatype , considering that
the type is extended .
Actual fragile check
1. Collect data types in the patterns of the match.
2. One exhaustivity check per datatype, considering that
the type is extended.
*)
let do_check_fragile_param exhaust loc casel pss =
let exts =
List.fold_left
(fun r c -> collect_paths_from_pat r c.c_lhs)
[] casel in
match exts with
| [] -> ()
| _ -> match pss with
| [] -> ()
| ps::_ ->
List.iter
(fun ext ->
match exhaust (Some ext) pss (List.length ps) with
| Rnone ->
Location.prerr_warning
loc
(Warnings.Fragile_match (Path.name ext))
| Rsome _ -> ())
exts
let do_check_fragile_normal = do_check_fragile_param exhaust
let do_check_fragile_gadt = do_check_fragile_param exhaust_gadt
let check_unused pred casel =
if Warnings.is_active Warnings.Unused_match
|| List.exists (fun c -> c.c_rhs.exp_desc = Texp_unreachable) casel then
let rec do_rec pref = function
| [] -> ()
| {c_lhs=q; c_guard; c_rhs} :: rem ->
let qs = [q] in
begin try
let pss =
get_mins le_pats (List.filter (compats qs) pref) in
First look for redundant or partially redundant patterns
let r = every_satisfiables (make_rows pss) (make_row qs) in
let refute = (c_rhs.exp_desc = Texp_unreachable) in
if r = Unused && refute then () else
let r =
let skip =
r = Unused || (not refute && pref = []) ||
not(refute || Warnings.is_active Warnings.Unreachable_case) in
if skip then r else
let sfs = satisfiables pss qs in
if sfs = [] then Unused else
let sfs =
List.map (function [u] -> u | _ -> assert false) sfs in
let u = orify_many sfs in
let (pattern,constrs,labels) = Conv.conv u in
let pattern = {pattern with Parsetree.ppat_loc = q.pat_loc} in
match pred refute constrs labels pattern with
None when not refute ->
Location.prerr_warning q.pat_loc Warnings.Unreachable_case;
Used
| _ -> r
in
match r with
| Unused ->
Location.prerr_warning
q.pat_loc Warnings.Unused_match
| Upartial ps ->
List.iter
(fun p ->
Location.prerr_warning
p.pat_loc Warnings.Unused_pat)
ps
| Used -> ()
with Empty | Not_found | NoGuard -> assert false
end ;
if c_guard <> None then
do_rec pref rem
else
do_rec ([q]::pref) rem in
do_rec [] casel
let irrefutable pat = le_pat pat omega
let inactive ~partial pat =
match partial with
| Partial -> false
| Total -> begin
let rec loop pat =
match pat.pat_desc with
| Tpat_lazy _ | Tpat_array _ ->
false
| Tpat_any | Tpat_var _ | Tpat_variant (_, None, _) ->
true
| Tpat_constant c -> begin
match c with
| Const_string _ -> Config.safe_string
| Const_int _ | Const_char _ | Const_float _
| Const_int32 _ | Const_int64 _ | Const_nativeint _ -> true
end
| Tpat_tuple ps | Tpat_construct (_, _, ps) ->
List.for_all (fun p -> loop p) ps
| Tpat_alias (p,_,_) | Tpat_variant (_, Some p, _) ->
loop p
| Tpat_record (ldps,_) ->
List.for_all
(fun (_, lbl, p) -> lbl.lbl_mut = Immutable && loop p)
ldps
| Tpat_or (p,q,_) ->
loop p && loop q
in
loop pat
end
let check_partial_param do_check_partial do_check_fragile loc casel =
let pss = initial_matrix casel in
let pss = get_mins le_pats pss in
let total = do_check_partial loc casel pss in
if
total = Total && Warnings.is_active (Warnings.Fragile_match "")
then begin
do_check_fragile loc casel pss
end ;
total
let check_partial =
check_partial_param
do_check_fragile_normal
check_partial_param
do_check_partial_normal
do_check_fragile_normal*)
let check_partial_gadt pred loc casel =
check_partial_param (do_check_partial_gadt pred)
do_check_fragile_gadt loc casel
Specification : ambiguous variables in or - patterns .
The semantics of or - patterns in OCaml is specified with
a left - to - right bias : a value [ v ] matches the pattern [ p | q ] if it
matches [ p ] or [ q ] , but if it matches both , the environment
captured by the match is the environment captured by [ p ] , never the
one captured by [ q ] .
While this property is generally well - understood , one specific case
where users expect a different semantics is when a pattern is
followed by a when - guard : [ | p when g - > e ] . Consider for example :
| ( ( Const x , _ ) | ( _ , Const x ) ) when is_neutral x - > branch
The semantics is clear : match the scrutinee against the pattern , if
it matches , test the guard , and if the guard passes , take the
branch .
However , consider the input [ ( Const a , Const b ) ] , where [ a ] fails
the test [ is_neutral f ] , while [ b ] passes the test [ is_neutral
b ] . With the left - to - right semantics , the clause above is * not *
taken by its input : matching [ ( Const a , Const b ) ] against the
or - pattern succeeds in the left branch , it returns the environment
[ x - > a ] , and then the guard [ is_neutral a ] is tested and fails ,
the branch is not taken . Most users , however , intuitively expect
that any pair that has one side passing the test will take the
branch . They assume it is equivalent to the following :
| ( Const x , _ ) when is_neutral x - > branch
| ( _ , Const x ) when is_neutral x - > branch
while it is not .
The code below is dedicated to finding these confusing cases : the
cases where a guard uses " ambiguous " variables , that are bound to
different parts of the scrutinees by different sides of
a or - pattern . In other words , it finds the cases where the
specified left - to - right semantics is not equivalent to
a non - deterministic semantics ( any branch can be taken ) relatively
to a specific guard .
The semantics of or-patterns in OCaml is specified with
a left-to-right bias: a value [v] matches the pattern [p | q] if it
matches [p] or [q], but if it matches both, the environment
captured by the match is the environment captured by [p], never the
one captured by [q].
While this property is generally well-understood, one specific case
where users expect a different semantics is when a pattern is
followed by a when-guard: [| p when g -> e]. Consider for example:
| ((Const x, _) | (_, Const x)) when is_neutral x -> branch
The semantics is clear: match the scrutinee against the pattern, if
it matches, test the guard, and if the guard passes, take the
branch.
However, consider the input [(Const a, Const b)], where [a] fails
the test [is_neutral f], while [b] passes the test [is_neutral
b]. With the left-to-right semantics, the clause above is *not*
taken by its input: matching [(Const a, Const b)] against the
or-pattern succeeds in the left branch, it returns the environment
[x -> a], and then the guard [is_neutral a] is tested and fails,
the branch is not taken. Most users, however, intuitively expect
that any pair that has one side passing the test will take the
branch. They assume it is equivalent to the following:
| (Const x, _) when is_neutral x -> branch
| (_, Const x) when is_neutral x -> branch
while it is not.
The code below is dedicated to finding these confusing cases: the
cases where a guard uses "ambiguous" variables, that are bound to
different parts of the scrutinees by different sides of
a or-pattern. In other words, it finds the cases where the
specified left-to-right semantics is not equivalent to
a non-deterministic semantics (any branch can be taken) relatively
to a specific guard.
*)
module IdSet = Set.Make(Ident)
let pattern_vars p = IdSet.of_list (Typedtree.pat_bound_idents p)
type amb_row = { unseen : pattern list ; seen : IdSet.t list; }
let rec do_push r p ps seen k = match p.pat_desc with
| Tpat_alias (p,x,_) -> do_push (IdSet.add x r) p ps seen k
| Tpat_var (x,_) ->
(omega,{ unseen = ps; seen=IdSet.add x r::seen; })::k
| Tpat_or (p1,p2,_) ->
do_push r p1 ps seen (do_push r p2 ps seen k)
| _ ->
(p,{ unseen = ps; seen = r::seen; })::k
let rec push_vars = function
| [] -> []
| { unseen = [] }::_ -> assert false
| { unseen = p::ps; seen; }::rem ->
do_push IdSet.empty p ps seen (push_vars rem)
let collect_stable = function
| [] -> assert false
| { seen=xss; _}::rem ->
let rec c_rec xss = function
| [] -> xss
| {seen=yss; _}::rem ->
let xss = List.map2 IdSet.inter xss yss in
c_rec xss rem in
let inters = c_rec xss rem in
List.fold_left IdSet.union IdSet.empty inters
Take a pattern matrix as a list ( rows ) of lists ( columns ) of patterns
| p1 , p2 , .. , pn
| q1 , q2 , .. , qn
| r1 , r2 , .. , rn
| ...
We split this matrix into a list of sub - matrices , one for each head
constructor appearing in the leftmost column . For each row whose
left column starts with a head constructor , remove this head
column , prepend one column for each argument of the constructor ,
and add the resulting row in the sub - matrix corresponding to this
head constructor .
Rows whose left column is omega ( the Any pattern _ ) may match any
head constructor , so they are added to all groups .
The list of sub - matrices is represented as a list of pair
( head constructor , submatrix )
| p1, p2, .., pn
| q1, q2, .., qn
| r1, r2, .., rn
| ...
We split this matrix into a list of sub-matrices, one for each head
constructor appearing in the leftmost column. For each row whose
left column starts with a head constructor, remove this head
column, prepend one column for each argument of the constructor,
and add the resulting row in the sub-matrix corresponding to this
head constructor.
Rows whose left column is omega (the Any pattern _) may match any
head constructor, so they are added to all groups.
The list of sub-matrices is represented as a list of pair
(head constructor, submatrix)
*)
let filter_all =
let discr_head pat =
match pat.pat_desc with
| Tpat_record (lbls, closed) ->
let lbls = all_record_args lbls in
normalize_pat { pat with pat_desc = Tpat_record (lbls, closed) }
| _ -> normalize_pat pat
in
let rec insert p r env = match env with
| [] ->
let p0 = discr_head p in
[p0,[{ r with unseen = simple_match_args p0 p @ r.unseen }]]
| (q0,rs) as bd::env ->
if simple_match q0 p then begin
let r = { r with unseen = simple_match_args q0 ; } in
(q0,r::rs)::env
end
else bd::insert p r env in
let insert_omega r env =
List.map
(fun (q0,rs) ->
let r =
{ r with unseen = simple_match_args q0 omega @ r.unseen; } in
(q0,r::rs))
env
in
let rec filter_rec env = function
| [] -> env
| ({pat_desc=(Tpat_var _|Tpat_alias _|Tpat_or _)},_)::_ -> assert false
| ({pat_desc=Tpat_any}, _)::rs -> filter_rec env rs
| (p,r)::rs -> filter_rec (insert p r env) rs in
let rec filter_omega env = function
| [] -> env
| ({pat_desc=(Tpat_var _|Tpat_alias _|Tpat_or _)},_)::_ -> assert false
| ({pat_desc=Tpat_any},r)::rs -> filter_omega (insert_omega r env) rs
| _::rs -> filter_omega env rs in
fun rs ->
first insert the rows with head constructors ,
to get the definitive list of groups
to get the definitive list of groups *)
let env = filter_rec [] rs in
filter_omega env rs
let rec do_stable rs = match rs with
| { unseen=[]; _ }::_ ->
collect_stable rs
| _ ->
let rs = push_vars rs in
if not (all_coherent (first_column rs)) then begin
If the first column is incoherent , then all the variables of this
matrix are stable .
matrix are stable. *)
List.fold_left (fun acc (_, { seen; _ }) ->
List.fold_left IdSet.union acc seen
) IdSet.empty rs
end else begin
match filter_all rs with
| [] ->
do_stable (List.map snd rs)
| (_,rs)::env ->
List.fold_left
(fun xs (_,rs) -> IdSet.inter xs (do_stable rs))
(do_stable rs) env
end
let stable p = do_stable [{unseen=[p]; seen=[];}]
All identifier paths that appear in an expression that occurs
as a clause right hand side or guard .
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and * * guards * * .
For pattern ( module M : S ) - > e the code is
let module M_mod = unpack M .. in e
Hence M is " free " in e iff M_mod is free in e.
Not doing so will yield excessive warning in
( module ( M : S ) } ... ) when true - > ....
as M is always present in
let module M_mod = unpack M .. in true
as a clause right hand side or guard.
The function is rather complex due to the compilation of
unpack patterns by introducing code in rhs expressions
and **guards**.
For pattern (module M:S) -> e the code is
let module M_mod = unpack M .. in e
Hence M is "free" in e iff M_mod is free in e.
Not doing so will yield excessive warning in
(module (M:S) } ...) when true -> ....
as M is always present in
let module M_mod = unpack M .. in true
*)
let all_rhs_idents exp =
let ids = ref IdSet.empty in
let module Iterator = TypedtreeIter.MakeIterator(struct
include TypedtreeIter.DefaultIteratorArgument
let enter_expression exp = match exp.exp_desc with
| Texp_ident (path, _lid, _descr) ->
List.iter
(fun id -> ids := IdSet.add id !ids)
(Path.heads path)
| _ -> ()
let is_unpack exp =
List.exists
(fun (attr, _) -> attr.txt = "#modulepat") exp.exp_attributes
let leave_expression exp =
if is_unpack exp then begin match exp.exp_desc with
| Texp_letmodule
(id_mod,_,
{mod_desc=
Tmod_unpack ({exp_desc=Texp_ident (Path.Pident id_exp,_,_)},_)},
_) ->
assert (IdSet.mem id_exp !ids) ;
if not (IdSet.mem id_mod !ids) then begin
ids := IdSet.remove id_exp !ids
end
| _ -> assert false
end
end) in
Iterator.iter_expression exp;
!ids
let check_ambiguous_bindings =
let open Warnings in
let warn0 = Ambiguous_pattern [] in
fun cases ->
if is_active warn0 then
List.iter
(fun case -> match case with
| { c_guard=None ; _} -> ()
| { c_lhs=p; c_guard=Some g; _} ->
let all =
IdSet.inter (pattern_vars p) (all_rhs_idents g) in
if not (IdSet.is_empty all) then begin
let st = stable p in
let ambiguous = IdSet.diff all st in
if not (IdSet.is_empty ambiguous) then begin
let pps = IdSet.elements ambiguous |> List.map Ident.name in
let warn = Ambiguous_pattern pps in
Location.prerr_warning p.pat_loc warn
end
end)
cases
|
e64d8bc5a50283348ac605538eb2c6d7a985db23c605b0118abe6d7903b72765 | freizl/dive-into-haskell | ask-password.hs | module Main where
{- | Trivial Monad Transformer example
-}
import Data.Char
import Control.Monad
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Class
main :: IO ()
main = do
runMaybeT askPassword
return ()
isValid :: String -> Bool
isValid s = length s >= 8 && any isAlpha s && any isNumber s && any isPunctuation s
getValidPassword :: MaybeT IO String
getValidPassword = do s <- lift getLine
guard (isValid s)
return s
askPassword :: MaybeT IO ()
askPassword = do lift $ putStrLn "Insert your new password:"
value <- getValidPassword
lift $ putStrLn $ "Storing in database..." ++ value
| null | https://raw.githubusercontent.com/freizl/dive-into-haskell/b18a6bfe212db6c3a5d707b4a640170b8bcf9330/codes/monad/ask-password.hs | haskell | | Trivial Monad Transformer example
| module Main where
import Data.Char
import Control.Monad
import Control.Monad.Trans.Maybe
import Control.Monad.Trans.Class
main :: IO ()
main = do
runMaybeT askPassword
return ()
isValid :: String -> Bool
isValid s = length s >= 8 && any isAlpha s && any isNumber s && any isPunctuation s
getValidPassword :: MaybeT IO String
getValidPassword = do s <- lift getLine
guard (isValid s)
return s
askPassword :: MaybeT IO ()
askPassword = do lift $ putStrLn "Insert your new password:"
value <- getValidPassword
lift $ putStrLn $ "Storing in database..." ++ value
|
f4a94070938db7737f3844151e5063066ef9b1e4730fb88ba4c054aee2f7a8d1 | meain/evil-textobj-tree-sitter | textobjects.scm | [
(integer)
(float)
] @number.inner
| null | https://raw.githubusercontent.com/meain/evil-textobj-tree-sitter/02f8253034042d8f171bc0ef93e3538b71a29153/queries/toml/textobjects.scm | scheme | [
(integer)
(float)
] @number.inner
| |
dc65863d5097867fa8d303f1ec58c1d41f466434fbd62ab69de23ccb754d0d7c | pirapira/coq2rust | pre_env.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
Created by out of environ.ml for better
modularity in the design of the bytecode virtual evaluation
machine , Dec 2005
modularity in the design of the bytecode virtual evaluation
machine, Dec 2005 *)
Bug fix by
(* This file defines the type of kernel environments *)
open Util
open Names
open Context
open Univ
open Term
open Declarations
(* The type of environments. *)
(* The key attached to each constant is used by the VM to retrieve previous *)
(* evaluations of the constant. It is essentially an index in the symbols table *)
(* used by the VM. *)
type key = int Ephemeron.key option ref
(** Linking information for the native compiler. *)
type link_info =
| Linked of string
| LinkedInteractive of string
| NotLinked
type constant_key = constant_body * (link_info ref * key)
type mind_key = mutual_inductive_body * link_info ref
type globals = {
env_constants : constant_key Cmap_env.t;
env_inductives : mind_key Mindmap_env.t;
env_modules : module_body MPmap.t;
env_modtypes : module_type_body MPmap.t}
type stratification = {
env_universes : universes;
env_engagement : engagement option;
env_type_in_type : bool
}
type val_kind =
| VKvalue of (values * Id.Set.t) Ephemeron.key
| VKnone
type lazy_val = val_kind ref
let force_lazy_val vk = match !vk with
| VKnone -> None
| VKvalue v -> try Some (Ephemeron.get v) with Ephemeron.InvalidKey -> None
let dummy_lazy_val () = ref VKnone
let build_lazy_val vk key = vk := VKvalue (Ephemeron.create key)
type named_vals = (Id.t * lazy_val) list
type env = {
env_globals : globals;
env_named_context : named_context;
env_named_vals : named_vals;
env_rel_context : rel_context;
env_rel_val : lazy_val list;
env_nb_rel : int;
env_stratification : stratification;
env_conv_oracle : Conv_oracle.oracle;
retroknowledge : Retroknowledge.retroknowledge;
indirect_pterms : Opaqueproof.opaquetab;
}
type named_context_val = named_context * named_vals
let empty_named_context_val = [],[]
let empty_env = {
env_globals = {
env_constants = Cmap_env.empty;
env_inductives = Mindmap_env.empty;
env_modules = MPmap.empty;
env_modtypes = MPmap.empty};
env_named_context = empty_named_context;
env_named_vals = [];
env_rel_context = empty_rel_context;
env_rel_val = [];
env_nb_rel = 0;
env_stratification = {
env_universes = initial_universes;
env_engagement = None;
env_type_in_type = false};
env_conv_oracle = Conv_oracle.empty;
retroknowledge = Retroknowledge.initial_retroknowledge;
indirect_pterms = Opaqueproof.empty_opaquetab }
(* Rel context *)
let nb_rel env = env.env_nb_rel
let push_rel d env =
let rval = ref VKnone in
{ env with
env_rel_context = add_rel_decl d env.env_rel_context;
env_rel_val = rval :: env.env_rel_val;
env_nb_rel = env.env_nb_rel + 1 }
let lookup_rel_val n env =
try List.nth env.env_rel_val (n - 1)
with Failure _ -> raise Not_found
let env_of_rel n env =
{ env with
env_rel_context = Util.List.skipn n env.env_rel_context;
env_rel_val = Util.List.skipn n env.env_rel_val;
env_nb_rel = env.env_nb_rel - n
}
(* Named context *)
let push_named_context_val d (ctxt,vals) =
let id,_,_ = d in
let rval = ref VKnone in
add_named_decl d ctxt, (id,rval)::vals
let push_named d env =
if not ( env.env_rel_context = [ ] ) then raise ( ASSERT env.env_rel_context ) ;
assert ( env.env_rel_context = [ ] ) ;
assert (env.env_rel_context = []); *)
let id,body,_ = d in
let rval = ref VKnone in
{ env_globals = env.env_globals;
env_named_context = Context.add_named_decl d env.env_named_context;
env_named_vals = (id, rval) :: env.env_named_vals;
env_rel_context = env.env_rel_context;
env_rel_val = env.env_rel_val;
env_nb_rel = env.env_nb_rel;
env_stratification = env.env_stratification;
env_conv_oracle = env.env_conv_oracle;
retroknowledge = env.retroknowledge;
indirect_pterms = env.indirect_pterms;
}
let lookup_named_val id env =
snd(List.find (fun (id',_) -> Id.equal id id') env.env_named_vals)
(* Warning all the names should be different *)
let env_of_named id env = env
(* Global constants *)
let lookup_constant_key kn env =
Cmap_env.find kn env.env_globals.env_constants
let lookup_constant kn env =
fst (Cmap_env.find kn env.env_globals.env_constants)
(* Mutual Inductives *)
let lookup_mind kn env =
fst (Mindmap_env.find kn env.env_globals.env_inductives)
let lookup_mind_key kn env =
Mindmap_env.find kn env.env_globals.env_inductives
| null | https://raw.githubusercontent.com/pirapira/coq2rust/22e8aaefc723bfb324ca2001b2b8e51fcc923543/kernel/pre_env.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
This file defines the type of kernel environments
The type of environments.
The key attached to each constant is used by the VM to retrieve previous
evaluations of the constant. It is essentially an index in the symbols table
used by the VM.
* Linking information for the native compiler.
Rel context
Named context
Warning all the names should be different
Global constants
Mutual Inductives | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Created by out of environ.ml for better
modularity in the design of the bytecode virtual evaluation
machine , Dec 2005
modularity in the design of the bytecode virtual evaluation
machine, Dec 2005 *)
Bug fix by
open Util
open Names
open Context
open Univ
open Term
open Declarations
type key = int Ephemeron.key option ref
type link_info =
| Linked of string
| LinkedInteractive of string
| NotLinked
type constant_key = constant_body * (link_info ref * key)
type mind_key = mutual_inductive_body * link_info ref
type globals = {
env_constants : constant_key Cmap_env.t;
env_inductives : mind_key Mindmap_env.t;
env_modules : module_body MPmap.t;
env_modtypes : module_type_body MPmap.t}
type stratification = {
env_universes : universes;
env_engagement : engagement option;
env_type_in_type : bool
}
type val_kind =
| VKvalue of (values * Id.Set.t) Ephemeron.key
| VKnone
type lazy_val = val_kind ref
let force_lazy_val vk = match !vk with
| VKnone -> None
| VKvalue v -> try Some (Ephemeron.get v) with Ephemeron.InvalidKey -> None
let dummy_lazy_val () = ref VKnone
let build_lazy_val vk key = vk := VKvalue (Ephemeron.create key)
type named_vals = (Id.t * lazy_val) list
type env = {
env_globals : globals;
env_named_context : named_context;
env_named_vals : named_vals;
env_rel_context : rel_context;
env_rel_val : lazy_val list;
env_nb_rel : int;
env_stratification : stratification;
env_conv_oracle : Conv_oracle.oracle;
retroknowledge : Retroknowledge.retroknowledge;
indirect_pterms : Opaqueproof.opaquetab;
}
type named_context_val = named_context * named_vals
let empty_named_context_val = [],[]
let empty_env = {
env_globals = {
env_constants = Cmap_env.empty;
env_inductives = Mindmap_env.empty;
env_modules = MPmap.empty;
env_modtypes = MPmap.empty};
env_named_context = empty_named_context;
env_named_vals = [];
env_rel_context = empty_rel_context;
env_rel_val = [];
env_nb_rel = 0;
env_stratification = {
env_universes = initial_universes;
env_engagement = None;
env_type_in_type = false};
env_conv_oracle = Conv_oracle.empty;
retroknowledge = Retroknowledge.initial_retroknowledge;
indirect_pterms = Opaqueproof.empty_opaquetab }
let nb_rel env = env.env_nb_rel
let push_rel d env =
let rval = ref VKnone in
{ env with
env_rel_context = add_rel_decl d env.env_rel_context;
env_rel_val = rval :: env.env_rel_val;
env_nb_rel = env.env_nb_rel + 1 }
let lookup_rel_val n env =
try List.nth env.env_rel_val (n - 1)
with Failure _ -> raise Not_found
let env_of_rel n env =
{ env with
env_rel_context = Util.List.skipn n env.env_rel_context;
env_rel_val = Util.List.skipn n env.env_rel_val;
env_nb_rel = env.env_nb_rel - n
}
let push_named_context_val d (ctxt,vals) =
let id,_,_ = d in
let rval = ref VKnone in
add_named_decl d ctxt, (id,rval)::vals
let push_named d env =
if not ( env.env_rel_context = [ ] ) then raise ( ASSERT env.env_rel_context ) ;
assert ( env.env_rel_context = [ ] ) ;
assert (env.env_rel_context = []); *)
let id,body,_ = d in
let rval = ref VKnone in
{ env_globals = env.env_globals;
env_named_context = Context.add_named_decl d env.env_named_context;
env_named_vals = (id, rval) :: env.env_named_vals;
env_rel_context = env.env_rel_context;
env_rel_val = env.env_rel_val;
env_nb_rel = env.env_nb_rel;
env_stratification = env.env_stratification;
env_conv_oracle = env.env_conv_oracle;
retroknowledge = env.retroknowledge;
indirect_pterms = env.indirect_pterms;
}
let lookup_named_val id env =
snd(List.find (fun (id',_) -> Id.equal id id') env.env_named_vals)
let env_of_named id env = env
let lookup_constant_key kn env =
Cmap_env.find kn env.env_globals.env_constants
let lookup_constant kn env =
fst (Cmap_env.find kn env.env_globals.env_constants)
let lookup_mind kn env =
fst (Mindmap_env.find kn env.env_globals.env_inductives)
let lookup_mind_key kn env =
Mindmap_env.find kn env.env_globals.env_inductives
|
b25c0ad258ca10b8426e38c86f9e8788d629c1ed55e21cc89a72e555a54be130 | clojure-interop/aws-api | AbstractAWSMediaLive.clj | (ns com.amazonaws.services.medialive.AbstractAWSMediaLive
"Abstract implementation of AWSMediaLive. Convenient method forms pass through to the corresponding overload
that takes a request object, which throws an UnsupportedOperationException."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.medialive AbstractAWSMediaLive]))
(defn create-input-security-group
"Description copied from interface: AWSMediaLive
request - The IPv4 CIDRs to whitelist for this Input Security Group - `com.amazonaws.services.medialive.model.CreateInputSecurityGroupRequest`
returns: Result of the CreateInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.CreateInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.CreateInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateInputSecurityGroupRequest request]
(-> this (.createInputSecurityGroup request))))
(defn delete-tags
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteTagsRequest - `com.amazonaws.services.medialive.model.DeleteTagsRequest`
returns: Result of the DeleteTags operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteTagsResult`"
(^com.amazonaws.services.medialive.model.DeleteTagsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteTagsRequest request]
(-> this (.deleteTags request))))
(defn purchase-offering
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for PurchaseOfferingRequest - `com.amazonaws.services.medialive.model.PurchaseOfferingRequest`
returns: Result of the PurchaseOffering operation returned by the service. - `com.amazonaws.services.medialive.model.PurchaseOfferingResult`"
(^com.amazonaws.services.medialive.model.PurchaseOfferingResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.PurchaseOfferingRequest request]
(-> this (.purchaseOffering request))))
(defn list-tags-for-resource
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListTagsForResourceRequest - `com.amazonaws.services.medialive.model.ListTagsForResourceRequest`
returns: Result of the ListTagsForResource operation returned by the service. - `com.amazonaws.services.medialive.model.ListTagsForResourceResult`"
(^com.amazonaws.services.medialive.model.ListTagsForResourceResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListTagsForResourceRequest request]
(-> this (.listTagsForResource request))))
(defn delete-schedule
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteScheduleRequest - `com.amazonaws.services.medialive.model.DeleteScheduleRequest`
returns: Result of the DeleteSchedule operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteScheduleResult`"
(^com.amazonaws.services.medialive.model.DeleteScheduleResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteScheduleRequest request]
(-> this (.deleteSchedule request))))
(defn list-inputs
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListInputsRequest - `com.amazonaws.services.medialive.model.ListInputsRequest`
returns: Result of the ListInputs operation returned by the service. - `com.amazonaws.services.medialive.model.ListInputsResult`"
(^com.amazonaws.services.medialive.model.ListInputsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListInputsRequest request]
(-> this (.listInputs request))))
(defn list-offerings
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListOfferingsRequest - `com.amazonaws.services.medialive.model.ListOfferingsRequest`
returns: Result of the ListOfferings operation returned by the service. - `com.amazonaws.services.medialive.model.ListOfferingsResult`"
(^com.amazonaws.services.medialive.model.ListOfferingsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListOfferingsRequest request]
(-> this (.listOfferings request))))
(defn waiters
"returns: `com.amazonaws.services.medialive.waiters.AWSMediaLiveWaiters`"
(^com.amazonaws.services.medialive.waiters.AWSMediaLiveWaiters [^AbstractAWSMediaLive this]
(-> this (.waiters))))
(defn list-channels
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListChannelsRequest - `com.amazonaws.services.medialive.model.ListChannelsRequest`
returns: Result of the ListChannels operation returned by the service. - `com.amazonaws.services.medialive.model.ListChannelsResult`"
(^com.amazonaws.services.medialive.model.ListChannelsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListChannelsRequest request]
(-> this (.listChannels request))))
(defn create-input
"Description copied from interface: AWSMediaLive
request - The name of the input - `com.amazonaws.services.medialive.model.CreateInputRequest`
returns: Result of the CreateInput operation returned by the service. - `com.amazonaws.services.medialive.model.CreateInputResult`"
(^com.amazonaws.services.medialive.model.CreateInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateInputRequest request]
(-> this (.createInput request))))
(defn list-input-security-groups
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListInputSecurityGroupsRequest - `com.amazonaws.services.medialive.model.ListInputSecurityGroupsRequest`
returns: Result of the ListInputSecurityGroups operation returned by the service. - `com.amazonaws.services.medialive.model.ListInputSecurityGroupsResult`"
(^com.amazonaws.services.medialive.model.ListInputSecurityGroupsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListInputSecurityGroupsRequest request]
(-> this (.listInputSecurityGroups request))))
(defn update-channel
"Description copied from interface: AWSMediaLive
request - A request to update a channel. - `com.amazonaws.services.medialive.model.UpdateChannelRequest`
returns: Result of the UpdateChannel operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateChannelResult`"
(^com.amazonaws.services.medialive.model.UpdateChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateChannelRequest request]
(-> this (.updateChannel request))))
(defn update-reservation
"Description copied from interface: AWSMediaLive
request - Request to update a reservation - `com.amazonaws.services.medialive.model.UpdateReservationRequest`
returns: Result of the UpdateReservation operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateReservationResult`"
(^com.amazonaws.services.medialive.model.UpdateReservationResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateReservationRequest request]
(-> this (.updateReservation request))))
(defn describe-schedule
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeScheduleRequest - `com.amazonaws.services.medialive.model.DescribeScheduleRequest`
returns: Result of the DescribeSchedule operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeScheduleResult`"
(^com.amazonaws.services.medialive.model.DescribeScheduleResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeScheduleRequest request]
(-> this (.describeSchedule request))))
(defn update-channel-class
"Description copied from interface: AWSMediaLive
request - Channel class that the channel should be updated to. - `com.amazonaws.services.medialive.model.UpdateChannelClassRequest`
returns: Result of the UpdateChannelClass operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateChannelClassResult`"
(^com.amazonaws.services.medialive.model.UpdateChannelClassResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateChannelClassRequest request]
(-> this (.updateChannelClass request))))
(defn delete-reservation
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteReservationRequest - `com.amazonaws.services.medialive.model.DeleteReservationRequest`
returns: Result of the DeleteReservation operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteReservationResult`"
(^com.amazonaws.services.medialive.model.DeleteReservationResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteReservationRequest request]
(-> this (.deleteReservation request))))
(defn update-input-security-group
"Description copied from interface: AWSMediaLive
request - The request to update some combination of the Input Security Group name and the IPv4 CIDRs the Input Security Group should allow. - `com.amazonaws.services.medialive.model.UpdateInputSecurityGroupRequest`
returns: Result of the UpdateInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.UpdateInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateInputSecurityGroupRequest request]
(-> this (.updateInputSecurityGroup request))))
(defn list-reservations
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListReservationsRequest - `com.amazonaws.services.medialive.model.ListReservationsRequest`
returns: Result of the ListReservations operation returned by the service. - `com.amazonaws.services.medialive.model.ListReservationsResult`"
(^com.amazonaws.services.medialive.model.ListReservationsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListReservationsRequest request]
(-> this (.listReservations request))))
(defn create-tags
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for CreateTagsRequest - `com.amazonaws.services.medialive.model.CreateTagsRequest`
returns: Result of the CreateTags operation returned by the service. - `com.amazonaws.services.medialive.model.CreateTagsResult`"
(^com.amazonaws.services.medialive.model.CreateTagsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateTagsRequest request]
(-> this (.createTags request))))
(defn create-channel
"Description copied from interface: AWSMediaLive
request - A request to create a channel - `com.amazonaws.services.medialive.model.CreateChannelRequest`
returns: Result of the CreateChannel operation returned by the service. - `com.amazonaws.services.medialive.model.CreateChannelResult`"
(^com.amazonaws.services.medialive.model.CreateChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateChannelRequest request]
(-> this (.createChannel request))))
(defn describe-input-security-group
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeInputSecurityGroupRequest - `com.amazonaws.services.medialive.model.DescribeInputSecurityGroupRequest`
returns: Result of the DescribeInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.DescribeInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeInputSecurityGroupRequest request]
(-> this (.describeInputSecurityGroup request))))
(defn shutdown
"Description copied from interface: AWSMediaLive"
([^AbstractAWSMediaLive this]
(-> this (.shutdown))))
(defn batch-update-schedule
"Description copied from interface: AWSMediaLive
request - List of actions to create and list of actions to delete. - `com.amazonaws.services.medialive.model.BatchUpdateScheduleRequest`
returns: Result of the BatchUpdateSchedule operation returned by the service. - `com.amazonaws.services.medialive.model.BatchUpdateScheduleResult`"
(^com.amazonaws.services.medialive.model.BatchUpdateScheduleResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.BatchUpdateScheduleRequest request]
(-> this (.batchUpdateSchedule request))))
(defn delete-input-security-group
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteInputSecurityGroupRequest - `com.amazonaws.services.medialive.model.DeleteInputSecurityGroupRequest`
returns: Result of the DeleteInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.DeleteInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteInputSecurityGroupRequest request]
(-> this (.deleteInputSecurityGroup request))))
(defn describe-input
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeInputRequest - `com.amazonaws.services.medialive.model.DescribeInputRequest`
returns: Result of the DescribeInput operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeInputResult`"
(^com.amazonaws.services.medialive.model.DescribeInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeInputRequest request]
(-> this (.describeInput request))))
(defn describe-reservation
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeReservationRequest - `com.amazonaws.services.medialive.model.DescribeReservationRequest`
returns: Result of the DescribeReservation operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeReservationResult`"
(^com.amazonaws.services.medialive.model.DescribeReservationResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeReservationRequest request]
(-> this (.describeReservation request))))
(defn describe-offering
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeOfferingRequest - `com.amazonaws.services.medialive.model.DescribeOfferingRequest`
returns: Result of the DescribeOffering operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeOfferingResult`"
(^com.amazonaws.services.medialive.model.DescribeOfferingResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeOfferingRequest request]
(-> this (.describeOffering request))))
(defn update-input
"Description copied from interface: AWSMediaLive
request - A request to update an input. - `com.amazonaws.services.medialive.model.UpdateInputRequest`
returns: Result of the UpdateInput operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateInputResult`"
(^com.amazonaws.services.medialive.model.UpdateInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateInputRequest request]
(-> this (.updateInput request))))
(defn start-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for StartChannelRequest - `com.amazonaws.services.medialive.model.StartChannelRequest`
returns: Result of the StartChannel operation returned by the service. - `com.amazonaws.services.medialive.model.StartChannelResult`"
(^com.amazonaws.services.medialive.model.StartChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.StartChannelRequest request]
(-> this (.startChannel request))))
(defn get-cached-response-metadata
"Description copied from interface: AWSMediaLive
request - The originally executed request. - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AbstractAWSMediaLive this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn delete-input
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteInputRequest - `com.amazonaws.services.medialive.model.DeleteInputRequest`
returns: Result of the DeleteInput operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteInputResult`"
(^com.amazonaws.services.medialive.model.DeleteInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteInputRequest request]
(-> this (.deleteInput request))))
(defn delete-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteChannelRequest - `com.amazonaws.services.medialive.model.DeleteChannelRequest`
returns: Result of the DeleteChannel operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteChannelResult`"
(^com.amazonaws.services.medialive.model.DeleteChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteChannelRequest request]
(-> this (.deleteChannel request))))
(defn stop-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for StopChannelRequest - `com.amazonaws.services.medialive.model.StopChannelRequest`
returns: Result of the StopChannel operation returned by the service. - `com.amazonaws.services.medialive.model.StopChannelResult`"
(^com.amazonaws.services.medialive.model.StopChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.StopChannelRequest request]
(-> this (.stopChannel request))))
(defn describe-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeChannelRequest - `com.amazonaws.services.medialive.model.DescribeChannelRequest`
returns: Result of the DescribeChannel operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeChannelResult`"
(^com.amazonaws.services.medialive.model.DescribeChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeChannelRequest request]
(-> this (.describeChannel request))))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.medialive/src/com/amazonaws/services/medialive/AbstractAWSMediaLive.clj | clojure | (ns com.amazonaws.services.medialive.AbstractAWSMediaLive
"Abstract implementation of AWSMediaLive. Convenient method forms pass through to the corresponding overload
that takes a request object, which throws an UnsupportedOperationException."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.medialive AbstractAWSMediaLive]))
(defn create-input-security-group
"Description copied from interface: AWSMediaLive
request - The IPv4 CIDRs to whitelist for this Input Security Group - `com.amazonaws.services.medialive.model.CreateInputSecurityGroupRequest`
returns: Result of the CreateInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.CreateInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.CreateInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateInputSecurityGroupRequest request]
(-> this (.createInputSecurityGroup request))))
(defn delete-tags
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteTagsRequest - `com.amazonaws.services.medialive.model.DeleteTagsRequest`
returns: Result of the DeleteTags operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteTagsResult`"
(^com.amazonaws.services.medialive.model.DeleteTagsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteTagsRequest request]
(-> this (.deleteTags request))))
(defn purchase-offering
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for PurchaseOfferingRequest - `com.amazonaws.services.medialive.model.PurchaseOfferingRequest`
returns: Result of the PurchaseOffering operation returned by the service. - `com.amazonaws.services.medialive.model.PurchaseOfferingResult`"
(^com.amazonaws.services.medialive.model.PurchaseOfferingResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.PurchaseOfferingRequest request]
(-> this (.purchaseOffering request))))
(defn list-tags-for-resource
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListTagsForResourceRequest - `com.amazonaws.services.medialive.model.ListTagsForResourceRequest`
returns: Result of the ListTagsForResource operation returned by the service. - `com.amazonaws.services.medialive.model.ListTagsForResourceResult`"
(^com.amazonaws.services.medialive.model.ListTagsForResourceResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListTagsForResourceRequest request]
(-> this (.listTagsForResource request))))
(defn delete-schedule
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteScheduleRequest - `com.amazonaws.services.medialive.model.DeleteScheduleRequest`
returns: Result of the DeleteSchedule operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteScheduleResult`"
(^com.amazonaws.services.medialive.model.DeleteScheduleResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteScheduleRequest request]
(-> this (.deleteSchedule request))))
(defn list-inputs
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListInputsRequest - `com.amazonaws.services.medialive.model.ListInputsRequest`
returns: Result of the ListInputs operation returned by the service. - `com.amazonaws.services.medialive.model.ListInputsResult`"
(^com.amazonaws.services.medialive.model.ListInputsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListInputsRequest request]
(-> this (.listInputs request))))
(defn list-offerings
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListOfferingsRequest - `com.amazonaws.services.medialive.model.ListOfferingsRequest`
returns: Result of the ListOfferings operation returned by the service. - `com.amazonaws.services.medialive.model.ListOfferingsResult`"
(^com.amazonaws.services.medialive.model.ListOfferingsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListOfferingsRequest request]
(-> this (.listOfferings request))))
(defn waiters
"returns: `com.amazonaws.services.medialive.waiters.AWSMediaLiveWaiters`"
(^com.amazonaws.services.medialive.waiters.AWSMediaLiveWaiters [^AbstractAWSMediaLive this]
(-> this (.waiters))))
(defn list-channels
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListChannelsRequest - `com.amazonaws.services.medialive.model.ListChannelsRequest`
returns: Result of the ListChannels operation returned by the service. - `com.amazonaws.services.medialive.model.ListChannelsResult`"
(^com.amazonaws.services.medialive.model.ListChannelsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListChannelsRequest request]
(-> this (.listChannels request))))
(defn create-input
"Description copied from interface: AWSMediaLive
request - The name of the input - `com.amazonaws.services.medialive.model.CreateInputRequest`
returns: Result of the CreateInput operation returned by the service. - `com.amazonaws.services.medialive.model.CreateInputResult`"
(^com.amazonaws.services.medialive.model.CreateInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateInputRequest request]
(-> this (.createInput request))))
(defn list-input-security-groups
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListInputSecurityGroupsRequest - `com.amazonaws.services.medialive.model.ListInputSecurityGroupsRequest`
returns: Result of the ListInputSecurityGroups operation returned by the service. - `com.amazonaws.services.medialive.model.ListInputSecurityGroupsResult`"
(^com.amazonaws.services.medialive.model.ListInputSecurityGroupsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListInputSecurityGroupsRequest request]
(-> this (.listInputSecurityGroups request))))
(defn update-channel
"Description copied from interface: AWSMediaLive
request - A request to update a channel. - `com.amazonaws.services.medialive.model.UpdateChannelRequest`
returns: Result of the UpdateChannel operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateChannelResult`"
(^com.amazonaws.services.medialive.model.UpdateChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateChannelRequest request]
(-> this (.updateChannel request))))
(defn update-reservation
"Description copied from interface: AWSMediaLive
request - Request to update a reservation - `com.amazonaws.services.medialive.model.UpdateReservationRequest`
returns: Result of the UpdateReservation operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateReservationResult`"
(^com.amazonaws.services.medialive.model.UpdateReservationResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateReservationRequest request]
(-> this (.updateReservation request))))
(defn describe-schedule
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeScheduleRequest - `com.amazonaws.services.medialive.model.DescribeScheduleRequest`
returns: Result of the DescribeSchedule operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeScheduleResult`"
(^com.amazonaws.services.medialive.model.DescribeScheduleResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeScheduleRequest request]
(-> this (.describeSchedule request))))
(defn update-channel-class
"Description copied from interface: AWSMediaLive
request - Channel class that the channel should be updated to. - `com.amazonaws.services.medialive.model.UpdateChannelClassRequest`
returns: Result of the UpdateChannelClass operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateChannelClassResult`"
(^com.amazonaws.services.medialive.model.UpdateChannelClassResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateChannelClassRequest request]
(-> this (.updateChannelClass request))))
(defn delete-reservation
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteReservationRequest - `com.amazonaws.services.medialive.model.DeleteReservationRequest`
returns: Result of the DeleteReservation operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteReservationResult`"
(^com.amazonaws.services.medialive.model.DeleteReservationResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteReservationRequest request]
(-> this (.deleteReservation request))))
(defn update-input-security-group
"Description copied from interface: AWSMediaLive
request - The request to update some combination of the Input Security Group name and the IPv4 CIDRs the Input Security Group should allow. - `com.amazonaws.services.medialive.model.UpdateInputSecurityGroupRequest`
returns: Result of the UpdateInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.UpdateInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateInputSecurityGroupRequest request]
(-> this (.updateInputSecurityGroup request))))
(defn list-reservations
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for ListReservationsRequest - `com.amazonaws.services.medialive.model.ListReservationsRequest`
returns: Result of the ListReservations operation returned by the service. - `com.amazonaws.services.medialive.model.ListReservationsResult`"
(^com.amazonaws.services.medialive.model.ListReservationsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.ListReservationsRequest request]
(-> this (.listReservations request))))
(defn create-tags
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for CreateTagsRequest - `com.amazonaws.services.medialive.model.CreateTagsRequest`
returns: Result of the CreateTags operation returned by the service. - `com.amazonaws.services.medialive.model.CreateTagsResult`"
(^com.amazonaws.services.medialive.model.CreateTagsResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateTagsRequest request]
(-> this (.createTags request))))
(defn create-channel
"Description copied from interface: AWSMediaLive
request - A request to create a channel - `com.amazonaws.services.medialive.model.CreateChannelRequest`
returns: Result of the CreateChannel operation returned by the service. - `com.amazonaws.services.medialive.model.CreateChannelResult`"
(^com.amazonaws.services.medialive.model.CreateChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.CreateChannelRequest request]
(-> this (.createChannel request))))
(defn describe-input-security-group
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeInputSecurityGroupRequest - `com.amazonaws.services.medialive.model.DescribeInputSecurityGroupRequest`
returns: Result of the DescribeInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.DescribeInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeInputSecurityGroupRequest request]
(-> this (.describeInputSecurityGroup request))))
(defn shutdown
"Description copied from interface: AWSMediaLive"
([^AbstractAWSMediaLive this]
(-> this (.shutdown))))
(defn batch-update-schedule
"Description copied from interface: AWSMediaLive
request - List of actions to create and list of actions to delete. - `com.amazonaws.services.medialive.model.BatchUpdateScheduleRequest`
returns: Result of the BatchUpdateSchedule operation returned by the service. - `com.amazonaws.services.medialive.model.BatchUpdateScheduleResult`"
(^com.amazonaws.services.medialive.model.BatchUpdateScheduleResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.BatchUpdateScheduleRequest request]
(-> this (.batchUpdateSchedule request))))
(defn delete-input-security-group
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteInputSecurityGroupRequest - `com.amazonaws.services.medialive.model.DeleteInputSecurityGroupRequest`
returns: Result of the DeleteInputSecurityGroup operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteInputSecurityGroupResult`"
(^com.amazonaws.services.medialive.model.DeleteInputSecurityGroupResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteInputSecurityGroupRequest request]
(-> this (.deleteInputSecurityGroup request))))
(defn describe-input
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeInputRequest - `com.amazonaws.services.medialive.model.DescribeInputRequest`
returns: Result of the DescribeInput operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeInputResult`"
(^com.amazonaws.services.medialive.model.DescribeInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeInputRequest request]
(-> this (.describeInput request))))
(defn describe-reservation
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeReservationRequest - `com.amazonaws.services.medialive.model.DescribeReservationRequest`
returns: Result of the DescribeReservation operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeReservationResult`"
(^com.amazonaws.services.medialive.model.DescribeReservationResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeReservationRequest request]
(-> this (.describeReservation request))))
(defn describe-offering
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeOfferingRequest - `com.amazonaws.services.medialive.model.DescribeOfferingRequest`
returns: Result of the DescribeOffering operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeOfferingResult`"
(^com.amazonaws.services.medialive.model.DescribeOfferingResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeOfferingRequest request]
(-> this (.describeOffering request))))
(defn update-input
"Description copied from interface: AWSMediaLive
request - A request to update an input. - `com.amazonaws.services.medialive.model.UpdateInputRequest`
returns: Result of the UpdateInput operation returned by the service. - `com.amazonaws.services.medialive.model.UpdateInputResult`"
(^com.amazonaws.services.medialive.model.UpdateInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.UpdateInputRequest request]
(-> this (.updateInput request))))
(defn start-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for StartChannelRequest - `com.amazonaws.services.medialive.model.StartChannelRequest`
returns: Result of the StartChannel operation returned by the service. - `com.amazonaws.services.medialive.model.StartChannelResult`"
(^com.amazonaws.services.medialive.model.StartChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.StartChannelRequest request]
(-> this (.startChannel request))))
(defn get-cached-response-metadata
"Description copied from interface: AWSMediaLive
request - The originally executed request. - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AbstractAWSMediaLive this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn delete-input
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteInputRequest - `com.amazonaws.services.medialive.model.DeleteInputRequest`
returns: Result of the DeleteInput operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteInputResult`"
(^com.amazonaws.services.medialive.model.DeleteInputResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteInputRequest request]
(-> this (.deleteInput request))))
(defn delete-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DeleteChannelRequest - `com.amazonaws.services.medialive.model.DeleteChannelRequest`
returns: Result of the DeleteChannel operation returned by the service. - `com.amazonaws.services.medialive.model.DeleteChannelResult`"
(^com.amazonaws.services.medialive.model.DeleteChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DeleteChannelRequest request]
(-> this (.deleteChannel request))))
(defn stop-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for StopChannelRequest - `com.amazonaws.services.medialive.model.StopChannelRequest`
returns: Result of the StopChannel operation returned by the service. - `com.amazonaws.services.medialive.model.StopChannelResult`"
(^com.amazonaws.services.medialive.model.StopChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.StopChannelRequest request]
(-> this (.stopChannel request))))
(defn describe-channel
"Description copied from interface: AWSMediaLive
request - Placeholder documentation for DescribeChannelRequest - `com.amazonaws.services.medialive.model.DescribeChannelRequest`
returns: Result of the DescribeChannel operation returned by the service. - `com.amazonaws.services.medialive.model.DescribeChannelResult`"
(^com.amazonaws.services.medialive.model.DescribeChannelResult [^AbstractAWSMediaLive this ^com.amazonaws.services.medialive.model.DescribeChannelRequest request]
(-> this (.describeChannel request))))
| |
c8ba8ab3e774e2215d6162739cce30b1991622538077a20e0421c957105bff9f | BitGameEN/bitgamex | log_gold_reclaimed.erl | %%%--------------------------------------------------------
%%% @Module: log_gold_reclaimed
%%% @Description: 自动生成
%%%--------------------------------------------------------
-module(log_gold_reclaimed).
-export([get_one/1, set_one/1, build_record_from_row/1]).
-include("common.hrl").
-include("record_log_gold_reclaimed.hrl").
get_one(Id) ->
case db_esql:get_row(?DB_LOG, <<"select id,game_id,gold_type,delta,old_value,new_value,drain_type,drain_id,drain_count,time,call_flow from gold_reclaimed where id=?">>, [Id]) of
[] -> [];
Row -> build_record_from_row(Row)
end.
set_one(R0) when is_record(R0, log_gold_reclaimed) ->
case R0#log_gold_reclaimed.key_id =:= undefined of
false ->
syncdb(R0),
R0#log_gold_reclaimed.key_id;
true ->
#log_gold_reclaimed{
id = Id,
game_id = Game_id,
gold_type = Gold_type,
delta = Delta,
old_value = Old_value,
new_value = New_value,
drain_type = Drain_type,
drain_id = Drain_id,
drain_count = Drain_count,
time = Time,
call_flow = Call_flow
} = R0,
spawn(fun() -> {ok, [[Insert_id|_]]} = db_esql:multi_execute(?DB_LOG, io_lib:format(<<"insert into gold_reclaimed(id,game_id,gold_type,delta,old_value,new_value,drain_type,drain_id,drain_count,time,call_flow) values(~p,~p,'~s',~p,~p,~p,'~s','~s',~p,~p,'~s'); select last_insert_id()">>,
[Id, Game_id, Gold_type, Delta, Old_value, New_value, Drain_type, Drain_id, Drain_count, Time, Call_flow])) end)
end.
syncdb(R) when is_record(R, log_gold_reclaimed) ->
#log_gold_reclaimed{
id = Id,
game_id = Game_id,
gold_type = Gold_type,
delta = Delta,
old_value = Old_value,
new_value = New_value,
drain_type = Drain_type,
drain_id = Drain_id,
drain_count = Drain_count,
time = Time,
call_flow = Call_flow
} = R,
spawn(fun() -> db_esql:execute(?DB_LOG, <<"replace into gold_reclaimed(id,game_id,gold_type,delta,old_value,new_value,drain_type,drain_id,drain_count,time,call_flow) values(?,?,?,?,?,?,?,?,?,?,?)">>,
[Id, Game_id, Gold_type, Delta, Old_value, New_value, Drain_type, Drain_id, Drain_count, Time, Call_flow]) end).
build_record_from_row([Id, Game_id, Gold_type, Delta, Old_value, New_value, Drain_type, Drain_id, Drain_count, Time, Call_flow]) ->
#log_gold_reclaimed{
key_id = Id,
id = Id,
game_id = Game_id,
gold_type = Gold_type,
delta = Delta,
old_value = Old_value,
new_value = New_value,
drain_type = Drain_type,
drain_id = Drain_id,
drain_count = Drain_count,
time = Time,
call_flow = Call_flow
}.
| null | https://raw.githubusercontent.com/BitGameEN/bitgamex/151ba70a481615379f9648581a5d459b503abe19/src/data/log_gold_reclaimed.erl | erlang | --------------------------------------------------------
@Module: log_gold_reclaimed
@Description: 自动生成
-------------------------------------------------------- | -module(log_gold_reclaimed).
-export([get_one/1, set_one/1, build_record_from_row/1]).
-include("common.hrl").
-include("record_log_gold_reclaimed.hrl").
get_one(Id) ->
case db_esql:get_row(?DB_LOG, <<"select id,game_id,gold_type,delta,old_value,new_value,drain_type,drain_id,drain_count,time,call_flow from gold_reclaimed where id=?">>, [Id]) of
[] -> [];
Row -> build_record_from_row(Row)
end.
set_one(R0) when is_record(R0, log_gold_reclaimed) ->
case R0#log_gold_reclaimed.key_id =:= undefined of
false ->
syncdb(R0),
R0#log_gold_reclaimed.key_id;
true ->
#log_gold_reclaimed{
id = Id,
game_id = Game_id,
gold_type = Gold_type,
delta = Delta,
old_value = Old_value,
new_value = New_value,
drain_type = Drain_type,
drain_id = Drain_id,
drain_count = Drain_count,
time = Time,
call_flow = Call_flow
} = R0,
spawn(fun() -> {ok, [[Insert_id|_]]} = db_esql:multi_execute(?DB_LOG, io_lib:format(<<"insert into gold_reclaimed(id,game_id,gold_type,delta,old_value,new_value,drain_type,drain_id,drain_count,time,call_flow) values(~p,~p,'~s',~p,~p,~p,'~s','~s',~p,~p,'~s'); select last_insert_id()">>,
[Id, Game_id, Gold_type, Delta, Old_value, New_value, Drain_type, Drain_id, Drain_count, Time, Call_flow])) end)
end.
syncdb(R) when is_record(R, log_gold_reclaimed) ->
#log_gold_reclaimed{
id = Id,
game_id = Game_id,
gold_type = Gold_type,
delta = Delta,
old_value = Old_value,
new_value = New_value,
drain_type = Drain_type,
drain_id = Drain_id,
drain_count = Drain_count,
time = Time,
call_flow = Call_flow
} = R,
spawn(fun() -> db_esql:execute(?DB_LOG, <<"replace into gold_reclaimed(id,game_id,gold_type,delta,old_value,new_value,drain_type,drain_id,drain_count,time,call_flow) values(?,?,?,?,?,?,?,?,?,?,?)">>,
[Id, Game_id, Gold_type, Delta, Old_value, New_value, Drain_type, Drain_id, Drain_count, Time, Call_flow]) end).
build_record_from_row([Id, Game_id, Gold_type, Delta, Old_value, New_value, Drain_type, Drain_id, Drain_count, Time, Call_flow]) ->
#log_gold_reclaimed{
key_id = Id,
id = Id,
game_id = Game_id,
gold_type = Gold_type,
delta = Delta,
old_value = Old_value,
new_value = New_value,
drain_type = Drain_type,
drain_id = Drain_id,
drain_count = Drain_count,
time = Time,
call_flow = Call_flow
}.
|
b171d08888d646f9fd88151cca424e7a0010db24c8975313b16a4211b1e493af | phadej/singleton-bool | Bool.hs | # LANGUAGE CPP #
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE EmptyCase #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE GADTs #
# LANGUAGE PolyKinds #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE TypeOperators #-}
#if __GLASGOW_HASKELL__ >= 800
{-# OPTIONS_GHC -Wno-redundant-constraints #-}
#endif
-- | Additions to "Data.Type.Bool".
module Data.Singletons.Bool (
SBool(..),
SBoolI(..),
fromSBool,
withSomeSBool,
reflectBool,
reifyBool,
-- * Data.Type.Dec
| ' discreteBool ' is available with @base > = 4.7@ ( GHC-7.8 )
discreteBool,
* Data . Type . and .Equality
| These are only defined with @base > = 4.7@
sboolAnd, sboolOr, sboolNot,
eqToRefl, eqCast, sboolEqRefl,
trivialRefl,
) where
import Control.DeepSeq (NFData (..))
import Data.Boring (Boring (..))
import Data.GADT.Compare (GCompare (..), GEq (..), GOrdering (..))
import Data.GADT.DeepSeq (GNFData (..))
import Data.GADT.Show (GRead (..), GShow (..))
import Data.Proxy (Proxy (..))
import Data.Type.Bool
import Data.Type.Dec (Dec (..))
import Data.Type.Equality
import Unsafe.Coerce (unsafeCoerce)
import qualified Data.Some.Church as Church
-- $setup
-- >>> :set -XDataKinds -XTypeOperators
-- >>> import Data.Proxy (Proxy (..))
-- >>> import Data.Type.Dec
> > > import Data . Some
-- >>> import Data.GADT.Compare
> > > import Data .
-- >>> import Data.Type.Equality
data SBool (b :: Bool) where
STrue :: SBool 'True
SFalse :: SBool 'False
class SBoolI (b :: Bool) where sbool :: SBool b
instance SBoolI 'True where sbool = STrue
instance SBoolI 'False where sbool = SFalse
| @since 0.1.5
instance Show (SBool b) where
showsPrec _ STrue = showString "STrue"
showsPrec _ SFalse = showString "SFalse"
| @since 0.1.5
instance Eq (SBool b) where
_ == _ = True
| @since 0.1.5
instance Ord (SBool b) where
compare _ _ = EQ
-- | @since 0.1.6
instance NFData (SBool b) where
rnf STrue = ()
rnf SFalse = ()
-------------------------------------------------------------------------------
-- conversion to and from explicit SBool values
-------------------------------------------------------------------------------
| Convert an ' SBool ' to the corresponding ' ' .
--
@since 0.1.4
fromSBool :: SBool b -> Bool
fromSBool STrue = True
fromSBool SFalse = False
| Convert a normal ' ' to an ' SBool ' , passing it into a continuation .
--
-- >>> withSomeSBool True fromSBool
-- True
--
@since 0.1.4
withSomeSBool :: Bool -> (forall b. SBool b -> r) -> r
withSomeSBool True f = f STrue
withSomeSBool False f = f SFalse
-------------------------------------------------------------------------------
-- reify & reflect
-------------------------------------------------------------------------------
| Reify ' ' to type - level .
--
-- >>> reifyBool True reflectBool
-- True
--
reifyBool :: forall r. Bool -> (forall b. SBoolI b => Proxy b -> r) -> r
reifyBool True f = f (Proxy :: Proxy 'True)
reifyBool False f = f (Proxy :: Proxy 'False)
-- | Reflect to term-level.
--
-- >>> reflectBool (Proxy :: Proxy 'True)
-- True
reflectBool :: forall b proxy. SBoolI b => proxy b -> Bool
reflectBool _ = fromSBool (sbool :: SBool b)
-------------------------------------------------------------------------------
-- Boring
-------------------------------------------------------------------------------
-- | @since 0.1.6
instance SBoolI b => Boring (SBool b) where
boring = sbool
-------------------------------------------------------------------------------
-- Data.GADT (some)
-------------------------------------------------------------------------------
-- |
--
-- >>> geq STrue STrue
-- Just Refl
--
-- >>> geq STrue SFalse
-- Nothing
--
@since 0.1.6
instance GEq SBool where
geq STrue STrue = Just Refl
geq SFalse SFalse = Just Refl
geq _ _ = Nothing
-- |
--
@since 0.1.6
instance GCompare SBool where
gcompare SFalse SFalse = GEQ
gcompare SFalse STrue = GLT
gcompare STrue SFalse = GGT
gcompare STrue STrue = GEQ
-- | @since 0.1.6
instance GNFData SBool where
grnf STrue = ()
grnf SFalse = ()
-- |
--
-- >>> showsPrec 0 STrue ""
-- "STrue"
--
@since 0.1.6
instance GShow SBool where
gshowsPrec = showsPrec
-- |
--
-- >>> readsPrec 0 "Some STrue" :: [(Some SBool, String)]
[ ( Some STrue , " " ) ]
--
-- >>> readsPrec 0 "Some SFalse" :: [(Some SBool, String)]
-- [(Some SFalse,"")]
--
-- >>> readsPrec 0 "Some Else" :: [(Some SBool, String)]
-- []
--
@since 0.1.6
instance GRead SBool where
greadsPrec _ s =
[ (Church.mkSome STrue, t)
| ("STrue", t) <- lex s
]
++
[ (Church.mkSome SFalse, t)
| ("SFalse", t) <- lex s
]
-------------------------------------------------------------------------------
-- Discrete
-------------------------------------------------------------------------------
| equality .
--
> > > ( discreteBool : : Dec ( ' True : ~ : ' True ) )
-- "Yes Refl"
--
@since 0.1.5
discreteBool :: forall a b. (SBoolI a, SBoolI b) => Dec (a :~: b)
discreteBool = case (sbool :: SBool a, sbool :: SBool b) of
(STrue, STrue) -> Yes Refl
(STrue, SFalse) -> No $ \p -> case p of {}
(SFalse, STrue) -> No $ \p -> case p of {}
(SFalse, SFalse) -> Yes Refl
-------------------------------------------------------------------------------
-- Witnesses
-------------------------------------------------------------------------------
| > > > sboolAnd STrue SFalse
-- SFalse
sboolAnd :: SBool a -> SBool b -> SBool (a && b)
sboolAnd SFalse _ = SFalse
sboolAnd STrue b = b
sboolOr :: SBool a -> SBool b -> SBool (a || b)
sboolOr STrue _ = STrue
sboolOr SFalse b = b
sboolNot :: SBool a -> SBool (Not a)
sboolNot STrue = SFalse
sboolNot SFalse = STrue
-- | @since 0.1.1.0
eqToRefl :: (a == b) ~ 'True => a :~: b
eqToRefl = unsafeCoerce trivialRefl
-- | @since 0.1.1.0
eqCast :: (a == b) ~ 'True => a -> b
eqCast = unsafeCoerce
-- | @since 0.1.1.0
trivialRefl :: () :~: ()
trivialRefl = Refl
GHC 8.10 + requires that all kind variables be explicitly quantified after
a ` forall ` . Technically , GHC has had the ability to do this since GHC 8.0 ,
but GHC 8.0 - 8.4 require enabling TypeInType to do . To avoid having to faff
around with CPP to enable TypeInType on certain GHC versions , we only
explicitly quantify kind variables on GHC 8.6 or later , since those versions
do not require TypeInType , only PolyKinds .
# if __GLASGOW_HASKELL__ >= 806
# define KVS(kvs) kvs
# else
# define KVS(kvs)
# endif
-- | Useful combination of 'sbool' and 'eqToRefl'
--
-- @since 0.1.2.0
sboolEqRefl :: forall KVS(k) (a :: k) (b :: k). SBoolI (a == b) => Maybe (a :~: b)
sboolEqRefl = case sbool :: SBool (a == b) of
STrue -> Just eqToRefl
SFalse -> Nothing
| null | https://raw.githubusercontent.com/phadej/singleton-bool/3433fbbb4859c47e1564de335e8db327061eb8c8/src/Data/Singletons/Bool.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE EmptyCase #
# LANGUAGE FlexibleContexts #
# LANGUAGE RankNTypes #
# LANGUAGE TypeOperators #
# OPTIONS_GHC -Wno-redundant-constraints #
| Additions to "Data.Type.Bool".
* Data.Type.Dec
$setup
>>> :set -XDataKinds -XTypeOperators
>>> import Data.Proxy (Proxy (..))
>>> import Data.Type.Dec
>>> import Data.GADT.Compare
>>> import Data.Type.Equality
| @since 0.1.6
-----------------------------------------------------------------------------
conversion to and from explicit SBool values
-----------------------------------------------------------------------------
>>> withSomeSBool True fromSBool
True
-----------------------------------------------------------------------------
reify & reflect
-----------------------------------------------------------------------------
>>> reifyBool True reflectBool
True
| Reflect to term-level.
>>> reflectBool (Proxy :: Proxy 'True)
True
-----------------------------------------------------------------------------
Boring
-----------------------------------------------------------------------------
| @since 0.1.6
-----------------------------------------------------------------------------
Data.GADT (some)
-----------------------------------------------------------------------------
|
>>> geq STrue STrue
Just Refl
>>> geq STrue SFalse
Nothing
|
| @since 0.1.6
|
>>> showsPrec 0 STrue ""
"STrue"
|
>>> readsPrec 0 "Some STrue" :: [(Some SBool, String)]
>>> readsPrec 0 "Some SFalse" :: [(Some SBool, String)]
[(Some SFalse,"")]
>>> readsPrec 0 "Some Else" :: [(Some SBool, String)]
[]
-----------------------------------------------------------------------------
Discrete
-----------------------------------------------------------------------------
"Yes Refl"
-----------------------------------------------------------------------------
Witnesses
-----------------------------------------------------------------------------
SFalse
| @since 0.1.1.0
| @since 0.1.1.0
| @since 0.1.1.0
| Useful combination of 'sbool' and 'eqToRefl'
@since 0.1.2.0 | # LANGUAGE CPP #
# LANGUAGE GADTs #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
#if __GLASGOW_HASKELL__ >= 800
#endif
module Data.Singletons.Bool (
SBool(..),
SBoolI(..),
fromSBool,
withSomeSBool,
reflectBool,
reifyBool,
| ' discreteBool ' is available with @base > = 4.7@ ( GHC-7.8 )
discreteBool,
* Data . Type . and .Equality
| These are only defined with @base > = 4.7@
sboolAnd, sboolOr, sboolNot,
eqToRefl, eqCast, sboolEqRefl,
trivialRefl,
) where
import Control.DeepSeq (NFData (..))
import Data.Boring (Boring (..))
import Data.GADT.Compare (GCompare (..), GEq (..), GOrdering (..))
import Data.GADT.DeepSeq (GNFData (..))
import Data.GADT.Show (GRead (..), GShow (..))
import Data.Proxy (Proxy (..))
import Data.Type.Bool
import Data.Type.Dec (Dec (..))
import Data.Type.Equality
import Unsafe.Coerce (unsafeCoerce)
import qualified Data.Some.Church as Church
> > > import Data . Some
> > > import Data .
data SBool (b :: Bool) where
STrue :: SBool 'True
SFalse :: SBool 'False
class SBoolI (b :: Bool) where sbool :: SBool b
instance SBoolI 'True where sbool = STrue
instance SBoolI 'False where sbool = SFalse
| @since 0.1.5
instance Show (SBool b) where
showsPrec _ STrue = showString "STrue"
showsPrec _ SFalse = showString "SFalse"
| @since 0.1.5
instance Eq (SBool b) where
_ == _ = True
| @since 0.1.5
instance Ord (SBool b) where
compare _ _ = EQ
instance NFData (SBool b) where
rnf STrue = ()
rnf SFalse = ()
| Convert an ' SBool ' to the corresponding ' ' .
@since 0.1.4
fromSBool :: SBool b -> Bool
fromSBool STrue = True
fromSBool SFalse = False
| Convert a normal ' ' to an ' SBool ' , passing it into a continuation .
@since 0.1.4
withSomeSBool :: Bool -> (forall b. SBool b -> r) -> r
withSomeSBool True f = f STrue
withSomeSBool False f = f SFalse
| Reify ' ' to type - level .
reifyBool :: forall r. Bool -> (forall b. SBoolI b => Proxy b -> r) -> r
reifyBool True f = f (Proxy :: Proxy 'True)
reifyBool False f = f (Proxy :: Proxy 'False)
reflectBool :: forall b proxy. SBoolI b => proxy b -> Bool
reflectBool _ = fromSBool (sbool :: SBool b)
instance SBoolI b => Boring (SBool b) where
boring = sbool
@since 0.1.6
instance GEq SBool where
geq STrue STrue = Just Refl
geq SFalse SFalse = Just Refl
geq _ _ = Nothing
@since 0.1.6
instance GCompare SBool where
gcompare SFalse SFalse = GEQ
gcompare SFalse STrue = GLT
gcompare STrue SFalse = GGT
gcompare STrue STrue = GEQ
instance GNFData SBool where
grnf STrue = ()
grnf SFalse = ()
@since 0.1.6
instance GShow SBool where
gshowsPrec = showsPrec
[ ( Some STrue , " " ) ]
@since 0.1.6
instance GRead SBool where
greadsPrec _ s =
[ (Church.mkSome STrue, t)
| ("STrue", t) <- lex s
]
++
[ (Church.mkSome SFalse, t)
| ("SFalse", t) <- lex s
]
| equality .
> > > ( discreteBool : : Dec ( ' True : ~ : ' True ) )
@since 0.1.5
discreteBool :: forall a b. (SBoolI a, SBoolI b) => Dec (a :~: b)
discreteBool = case (sbool :: SBool a, sbool :: SBool b) of
(STrue, STrue) -> Yes Refl
(STrue, SFalse) -> No $ \p -> case p of {}
(SFalse, STrue) -> No $ \p -> case p of {}
(SFalse, SFalse) -> Yes Refl
| > > > sboolAnd STrue SFalse
sboolAnd :: SBool a -> SBool b -> SBool (a && b)
sboolAnd SFalse _ = SFalse
sboolAnd STrue b = b
sboolOr :: SBool a -> SBool b -> SBool (a || b)
sboolOr STrue _ = STrue
sboolOr SFalse b = b
sboolNot :: SBool a -> SBool (Not a)
sboolNot STrue = SFalse
sboolNot SFalse = STrue
eqToRefl :: (a == b) ~ 'True => a :~: b
eqToRefl = unsafeCoerce trivialRefl
eqCast :: (a == b) ~ 'True => a -> b
eqCast = unsafeCoerce
trivialRefl :: () :~: ()
trivialRefl = Refl
GHC 8.10 + requires that all kind variables be explicitly quantified after
a ` forall ` . Technically , GHC has had the ability to do this since GHC 8.0 ,
but GHC 8.0 - 8.4 require enabling TypeInType to do . To avoid having to faff
around with CPP to enable TypeInType on certain GHC versions , we only
explicitly quantify kind variables on GHC 8.6 or later , since those versions
do not require TypeInType , only PolyKinds .
# if __GLASGOW_HASKELL__ >= 806
# define KVS(kvs) kvs
# else
# define KVS(kvs)
# endif
sboolEqRefl :: forall KVS(k) (a :: k) (b :: k). SBoolI (a == b) => Maybe (a :~: b)
sboolEqRefl = case sbool :: SBool (a == b) of
STrue -> Just eqToRefl
SFalse -> Nothing
|
6a2684092e07b031e6cd996c08236523cb9476a0f6c932183d882fd4cb5dc03b | bytekid/mkbtt | main.mli | val execute : unit -> unit
val execute_with : string -> string -> bool -> float -> bool ->
string -> string -> bool -> float -> string -> float ->
string -> int -> bool -> string -> bool -> unit
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/mascott/src/main.mli | ocaml | val execute : unit -> unit
val execute_with : string -> string -> bool -> float -> bool ->
string -> string -> bool -> float -> string -> float ->
string -> int -> bool -> string -> bool -> unit
| |
ede0291f40feec715ab6fe066354aa24268dba79a95ee7865fb3cffc74b86138 | conscell/hugs-android | Error.hs | # OPTIONS_GHC -fno - implicit - prelude #
-----------------------------------------------------------------------------
-- |
-- Module : Foreign.Marshal.Error
Copyright : ( c ) The FFI task force 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
Maintainer :
-- Stability : provisional
-- Portability : portable
--
Routines for testing return values and raising a ' userError ' exception
-- in case of values indicating an error state.
--
-----------------------------------------------------------------------------
module Foreign.Marshal.Error (
throwIf, -- :: (a -> Bool) -> (a -> String) -> IO a -> IO a
throwIf_, -- :: (a -> Bool) -> (a -> String) -> IO a -> IO ()
: : ( a , a )
-- => (a -> String) -> IO a -> IO a
: : ( a , a )
-- => (a -> String) -> IO a -> IO ()
throwIfNull, -- :: String -> IO (Ptr a) -> IO (Ptr a)
-- Discard return value
--
IO a - > IO ( )
) where
import Foreign.Ptr
-- exported functions
-- ------------------
|Execute an ' IO ' action , throwing a ' userError ' if the predicate yields
-- 'True' when applied to the result returned by the 'IO' action.
-- If no exception is raised, return the result of the computation.
--
throwIf :: (a -> Bool) -- ^ error condition on the result of the 'IO' action
-> (a -> String) -- ^ computes an error message from erroneous results
-- of the 'IO' action
-> IO a -- ^ the 'IO' action to be executed
-> IO a
throwIf pred msgfct act =
do
res <- act
(if pred res then ioError . userError . msgfct else return) res
-- |Like 'throwIf', but discarding the result
--
throwIf_ :: (a -> Bool) -> (a -> String) -> IO a -> IO ()
throwIf_ pred msgfct act = void $ throwIf pred msgfct act
-- |Guards against negative result values
--
throwIfNeg :: (Ord a, Num a) => (a -> String) -> IO a -> IO a
throwIfNeg = throwIf (< 0)
-- |Like 'throwIfNeg', but discarding the result
--
throwIfNeg_ :: (Ord a, Num a) => (a -> String) -> IO a -> IO ()
throwIfNeg_ = throwIf_ (< 0)
-- |Guards against null pointers
--
throwIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
throwIfNull = throwIf (== nullPtr) . const
-- |Discard the return value of an 'IO' action
--
void :: IO a -> IO ()
void act = act >> return ()
| null | https://raw.githubusercontent.com/conscell/hugs-android/31e5861bc1a1dd9931e6b2471a9f45c14e3c6c7e/hugs/lib/hugs/packages/base/Foreign/Marshal/Error.hs | haskell | ---------------------------------------------------------------------------
|
Module : Foreign.Marshal.Error
License : BSD-style (see the file libraries/base/LICENSE)
Stability : provisional
Portability : portable
in case of values indicating an error state.
---------------------------------------------------------------------------
:: (a -> Bool) -> (a -> String) -> IO a -> IO a
:: (a -> Bool) -> (a -> String) -> IO a -> IO ()
=> (a -> String) -> IO a -> IO a
=> (a -> String) -> IO a -> IO ()
:: String -> IO (Ptr a) -> IO (Ptr a)
Discard return value
exported functions
------------------
'True' when applied to the result returned by the 'IO' action.
If no exception is raised, return the result of the computation.
^ error condition on the result of the 'IO' action
^ computes an error message from erroneous results
of the 'IO' action
^ the 'IO' action to be executed
|Like 'throwIf', but discarding the result
|Guards against negative result values
|Like 'throwIfNeg', but discarding the result
|Guards against null pointers
|Discard the return value of an 'IO' action
| # OPTIONS_GHC -fno - implicit - prelude #
Copyright : ( c ) The FFI task force 2001
Maintainer :
Routines for testing return values and raising a ' userError ' exception
module Foreign.Marshal.Error (
: : ( a , a )
: : ( a , a )
IO a - > IO ( )
) where
import Foreign.Ptr
|Execute an ' IO ' action , throwing a ' userError ' if the predicate yields
-> IO a
throwIf pred msgfct act =
do
res <- act
(if pred res then ioError . userError . msgfct else return) res
throwIf_ :: (a -> Bool) -> (a -> String) -> IO a -> IO ()
throwIf_ pred msgfct act = void $ throwIf pred msgfct act
throwIfNeg :: (Ord a, Num a) => (a -> String) -> IO a -> IO a
throwIfNeg = throwIf (< 0)
throwIfNeg_ :: (Ord a, Num a) => (a -> String) -> IO a -> IO ()
throwIfNeg_ = throwIf_ (< 0)
throwIfNull :: String -> IO (Ptr a) -> IO (Ptr a)
throwIfNull = throwIf (== nullPtr) . const
void :: IO a -> IO ()
void act = act >> return ()
|
1bfef33ba5d9d8df933fcc9fded0fb0d9bd5e92090bac11023399c956d03fca6 | mirage/capnp-rpc | capnp_address.mli | (** Handling of capnp:// URI format addresses.
This code is shared between the unix and mirage networks. *)
module Location : sig
type t = [
| `Unix of string
| `TCP of string * int
]
val pp : t Fmt.t
val equal : t -> t -> bool
end
include S.ADDRESS with
type t = Location.t * Auth.Digest.t
| null | https://raw.githubusercontent.com/mirage/capnp-rpc/f04fa96a583994b71731bc1288833f8304c9ce81/capnp-rpc-net/capnp_address.mli | ocaml | * Handling of capnp:// URI format addresses.
This code is shared between the unix and mirage networks. |
module Location : sig
type t = [
| `Unix of string
| `TCP of string * int
]
val pp : t Fmt.t
val equal : t -> t -> bool
end
include S.ADDRESS with
type t = Location.t * Auth.Digest.t
|
d6f5620adb0485e203a67847f753d8b1509801d3b6a5bf160bc364bdcb5a3d01 | stuartsierra/frequencies | frequencies.clj | (ns com.stuartsierra.frequencies
"Basic statistical computations on frequency maps. A frequency map
(freq-map) is a map from observed values to their frequency in a
data set.
If the observed values are all integers within a small range, then a
frequency map may be exact, such as that returned by
clojure.core/frequencies. Floating-point values or a large range of
integers can be grouped into 'buckets' as in a histogram: the
'bucket-frequencies' function does this.
You can create your own bucketed frequency map (for example, as part
of a larger 'reduce' operation) using the functions 'bucket' and
'recover-bucket-keys'.")
(defn bucket
"Returns an integer bucket ID for the observed value. Bucket maps
use integers as map keys to avoid possible errors from
floating-point arithmetic."
[bucket-size value]
(long (Math/ceil (/ (double value) bucket-size))))
(defn recover-bucket-keys
"Converts the keys of a map from integer bucket IDs to the original
value domain. Use this only if you are building up a bucket map
yourself with the 'bucket' function; the bucket-frequencies function
calls recover-keys automatically."
[bucket-map bucket-size]
(reduce-kv (fn [m k v]
(assoc m (* k bucket-size) v))
(sorted-map)
bucket-map))
(defn bucket-frequencies
"Returns a bucketed frequency map. Keys in the map are values from
the input, rounded up to bucket-size. Values in the map are counts
of the number of occurances of values less than or equal to their
bucket key but greater than the next-lowest bucket key."
[bucket-size values]
(-> (reduce (fn [freq-map value]
(let [b (bucket bucket-size value)
freq (get freq-map b 0)]
(assoc! freq-map b (inc freq))))
(transient {})
values)
persistent!
(recover-bucket-keys bucket-size)))
(defn sum
"Returns the sum of all observed values in a frequency map."
[freq-map]
(reduce-kv (fn [sum value frequency]
(+ sum (* (double value) frequency)))
0.0
freq-map))
(defn sample-count
"Returns the number of observed values in a frequency map."
[freq-map]
(reduce + (vals freq-map)))
(defn mean
"Returns the mean (average) of observed values in a frequency map."
[freq-map]
(let [sample-count (sample-count freq-map)
sum (sum freq-map)]
(/ sum sample-count)))
(defn values
"Returns a lazy sequence of all the observed values, repeating each
value the number of times it was observed."
[freq-map]
(when-let [entry (first freq-map)]
(let [[value frequency] entry]
(lazy-seq (concat (repeat frequency value)
(values (rest freq-map)))))))
(defn quantile*
"Like quantile but takes sample-count as an argument. For when you
already know the sample-count and don't want to recompute it. Also
assumes that the frequency map is already sorted."
[sorted-freq-map k q sample-count]
(let [rank (long (Math/ceil (* k (/ (double sample-count) q))))]
(loop [m (seq sorted-freq-map)
lower 0
prev-value Double/NEGATIVE_INFINITY]
(if-let [entry (first m)]
(let [[value freq] entry
upper (+ lower freq)]
(if (<= rank upper)
value
(recur (rest m) upper value)))
prev-value))))
(defn- ensure-sorted [m]
(if (sorted? m)
m
(into (sorted-map) m)))
(defn quantile
"Returns the value which is greater than k/q of the observed values
in the frequency map. For example, k=1 q=2 is the median; k=99 q=100
is the 99th percentile. For bucketed frequency maps, returns the
nearest bucket."
[freq-map k q]
(quantile* (ensure-sorted freq-map) k q (sample-count freq-map)))
(defn median
"Returns the median of the observed values in the frequency map."
[freq-map]
(quantile freq-map 1 2))
(defn percentiles*
"Like percentiles but the sample-count is provided as an argument
instead of computed, and the frequency map must already be sorted."
[sorted-freq-map percentiles sample-count]
(reduce (fn [m k]
(assoc m k (quantile* sorted-freq-map k 100.0 sample-count)))
(sorted-map)
percentiles))
(defn percentiles
"Returns a map of percentile values from the frequency map. Argument
'percentiles' is a collection of percentile targets, which will be
keys in the returned map. For example, a percentiles argument of
[25 50 99.9] would return a map containing the 25th, 50th (median),
and 99.9th percentile."
[freq-map percentiles]
(percentiles* (ensure-sorted freq-map)
percentiles
(sample-count freq-map)))
(defn variance*
"Like 'variance' but takes the mean and sample count as arguments
instead of computing them."
[freq-map mean sample-count]
(reduce-kv (fn [sum value frequency]
(let [p (/ (double frequency) sample-count)
diff (- (double value) mean)
diff-squared (* diff diff)]
(+ sum (* p diff-squared))))
0
freq-map))
(defn variance
"Returns the variance of observed values in a frequency map."
[freq-map]
(variance* freq-map (mean freq-map) (sample-count freq-map)))
(defn stdev
"Returns the standard deviation (square root of the variance) of
observed values in a frequency map."
[freq-map]
(Math/sqrt (variance freq-map)))
(defn stats
"Returns a map of statistics for the frequency map with the
following keys:
:mean, :median, :variance, :stdev, :sum, :sample-count,
:min minimum observed value;
:max maximum observed value;
:percentiles Map of percentile level to observed value.
Defaults to quartiles and 90, 95, 99, and 99.9th percentiles.
Change the returned percentiles by passing a vector of percentile
levels (between 0 and 100) as the option :percentiles."
[freq-map & {:keys [percentiles]
:or {percentiles [25 50 75 90 95 99 99.9]}}]
(let [sorted-freq-map (ensure-sorted freq-map)
sum (sum sorted-freq-map)
sample-count (sample-count sorted-freq-map)
mean (/ (double sum) sample-count)
variance (variance* sorted-freq-map mean sample-count)
stdev (Math/sqrt variance)
min (first (keys sorted-freq-map))
max (last (keys sorted-freq-map))
percentiles (percentiles* sorted-freq-map
percentiles
sample-count)
median (or (get percentiles 50)
(quantile* sorted-freq-map 1 2 sample-count))]
(array-map
:mean mean
:median median
:min min
:max max
:percentiles percentiles
:sample-count sample-count
:variance variance
:stdev stdev
:sum sum)))
| null | https://raw.githubusercontent.com/stuartsierra/frequencies/bdc6ba6e11db00ce146747a430a4ce1adcc308cd/src/com/stuartsierra/frequencies.clj | clojure | the bucket-frequencies function
k=99 q=100
| (ns com.stuartsierra.frequencies
"Basic statistical computations on frequency maps. A frequency map
(freq-map) is a map from observed values to their frequency in a
data set.
If the observed values are all integers within a small range, then a
frequency map may be exact, such as that returned by
clojure.core/frequencies. Floating-point values or a large range of
integers can be grouped into 'buckets' as in a histogram: the
'bucket-frequencies' function does this.
You can create your own bucketed frequency map (for example, as part
of a larger 'reduce' operation) using the functions 'bucket' and
'recover-bucket-keys'.")
(defn bucket
"Returns an integer bucket ID for the observed value. Bucket maps
use integers as map keys to avoid possible errors from
floating-point arithmetic."
[bucket-size value]
(long (Math/ceil (/ (double value) bucket-size))))
(defn recover-bucket-keys
"Converts the keys of a map from integer bucket IDs to the original
value domain. Use this only if you are building up a bucket map
calls recover-keys automatically."
[bucket-map bucket-size]
(reduce-kv (fn [m k v]
(assoc m (* k bucket-size) v))
(sorted-map)
bucket-map))
(defn bucket-frequencies
"Returns a bucketed frequency map. Keys in the map are values from
the input, rounded up to bucket-size. Values in the map are counts
of the number of occurances of values less than or equal to their
bucket key but greater than the next-lowest bucket key."
[bucket-size values]
(-> (reduce (fn [freq-map value]
(let [b (bucket bucket-size value)
freq (get freq-map b 0)]
(assoc! freq-map b (inc freq))))
(transient {})
values)
persistent!
(recover-bucket-keys bucket-size)))
(defn sum
"Returns the sum of all observed values in a frequency map."
[freq-map]
(reduce-kv (fn [sum value frequency]
(+ sum (* (double value) frequency)))
0.0
freq-map))
(defn sample-count
"Returns the number of observed values in a frequency map."
[freq-map]
(reduce + (vals freq-map)))
(defn mean
"Returns the mean (average) of observed values in a frequency map."
[freq-map]
(let [sample-count (sample-count freq-map)
sum (sum freq-map)]
(/ sum sample-count)))
(defn values
"Returns a lazy sequence of all the observed values, repeating each
value the number of times it was observed."
[freq-map]
(when-let [entry (first freq-map)]
(let [[value frequency] entry]
(lazy-seq (concat (repeat frequency value)
(values (rest freq-map)))))))
(defn quantile*
"Like quantile but takes sample-count as an argument. For when you
already know the sample-count and don't want to recompute it. Also
assumes that the frequency map is already sorted."
[sorted-freq-map k q sample-count]
(let [rank (long (Math/ceil (* k (/ (double sample-count) q))))]
(loop [m (seq sorted-freq-map)
lower 0
prev-value Double/NEGATIVE_INFINITY]
(if-let [entry (first m)]
(let [[value freq] entry
upper (+ lower freq)]
(if (<= rank upper)
value
(recur (rest m) upper value)))
prev-value))))
(defn- ensure-sorted [m]
(if (sorted? m)
m
(into (sorted-map) m)))
(defn quantile
"Returns the value which is greater than k/q of the observed values
is the 99th percentile. For bucketed frequency maps, returns the
nearest bucket."
[freq-map k q]
(quantile* (ensure-sorted freq-map) k q (sample-count freq-map)))
(defn median
"Returns the median of the observed values in the frequency map."
[freq-map]
(quantile freq-map 1 2))
(defn percentiles*
"Like percentiles but the sample-count is provided as an argument
instead of computed, and the frequency map must already be sorted."
[sorted-freq-map percentiles sample-count]
(reduce (fn [m k]
(assoc m k (quantile* sorted-freq-map k 100.0 sample-count)))
(sorted-map)
percentiles))
(defn percentiles
"Returns a map of percentile values from the frequency map. Argument
'percentiles' is a collection of percentile targets, which will be
keys in the returned map. For example, a percentiles argument of
[25 50 99.9] would return a map containing the 25th, 50th (median),
and 99.9th percentile."
[freq-map percentiles]
(percentiles* (ensure-sorted freq-map)
percentiles
(sample-count freq-map)))
(defn variance*
"Like 'variance' but takes the mean and sample count as arguments
instead of computing them."
[freq-map mean sample-count]
(reduce-kv (fn [sum value frequency]
(let [p (/ (double frequency) sample-count)
diff (- (double value) mean)
diff-squared (* diff diff)]
(+ sum (* p diff-squared))))
0
freq-map))
(defn variance
"Returns the variance of observed values in a frequency map."
[freq-map]
(variance* freq-map (mean freq-map) (sample-count freq-map)))
(defn stdev
"Returns the standard deviation (square root of the variance) of
observed values in a frequency map."
[freq-map]
(Math/sqrt (variance freq-map)))
(defn stats
"Returns a map of statistics for the frequency map with the
following keys:
:mean, :median, :variance, :stdev, :sum, :sample-count,
:percentiles Map of percentile level to observed value.
Defaults to quartiles and 90, 95, 99, and 99.9th percentiles.
Change the returned percentiles by passing a vector of percentile
levels (between 0 and 100) as the option :percentiles."
[freq-map & {:keys [percentiles]
:or {percentiles [25 50 75 90 95 99 99.9]}}]
(let [sorted-freq-map (ensure-sorted freq-map)
sum (sum sorted-freq-map)
sample-count (sample-count sorted-freq-map)
mean (/ (double sum) sample-count)
variance (variance* sorted-freq-map mean sample-count)
stdev (Math/sqrt variance)
min (first (keys sorted-freq-map))
max (last (keys sorted-freq-map))
percentiles (percentiles* sorted-freq-map
percentiles
sample-count)
median (or (get percentiles 50)
(quantile* sorted-freq-map 1 2 sample-count))]
(array-map
:mean mean
:median median
:min min
:max max
:percentiles percentiles
:sample-count sample-count
:variance variance
:stdev stdev
:sum sum)))
|
d0debe1e0e4b33b0e6d6a824f39d6e6772f18aa3416800ffc03e783768a475fa | flosell/lambdacd | api.clj | (ns lambdacd.ui.api
"REST-API into the current state, structure and history of the pipeline for use by the UI."
(:require [lambdacd.presentation.unified :as unified]
[ring.util.response :as resp]
[clojure.string :as string]
[ring.middleware.json :as ring-json]
[lambdacd.presentation.pipeline-state :as state-presentation]
[lambdacd.execution.core :as execution]
[lambdacd.steps.manualtrigger :as manualtrigger]
[clojure.walk :as w]
[compojure.core :refer [routes GET POST]]
[lambdacd.state.core :as state]
[lambdacd.util.internal.sugar :as sugar]
[lambdacd.ui.internal.util :as ui-util]))
(defn- build-infos [ctx build-number-str]
(let [build-number (sugar/parse-int build-number-str)
pipeline-structure (state/get-pipeline-structure ctx build-number)
step-results (state/get-step-results ctx build-number)]
(if (and pipeline-structure step-results)
(ui-util/json (unified/pipeline-structure-with-step-results pipeline-structure step-results))
(resp/not-found (str "build " build-number-str " does not exist")))))
(defn- to-internal-step-id [dash-seperated-step-id]
(map sugar/parse-int (string/split dash-seperated-step-id #"-")))
(defn rest-api
"Returns a ring-handler offering a rest-api for the UI."
[{pipeline-def :pipeline-def ctx :context}]
(ring-json/wrap-json-params
(routes
(GET "/builds/" [] (ui-util/json (state-presentation/history-for ctx)))
(GET "/builds/:buildnumber/" [buildnumber] (build-infos ctx buildnumber))
(POST "/builds/:buildnumber/:step-id/retrigger" [buildnumber step-id]
(let [new-buildnumber (execution/retrigger-pipeline-async pipeline-def ctx (sugar/parse-int buildnumber) (to-internal-step-id step-id))]
(ui-util/json {:build-number new-buildnumber})))
(POST "/builds/:buildnumber/:step-id/kill" [buildnumber step-id]
(do
(execution/kill-step ctx (sugar/parse-int buildnumber) (to-internal-step-id step-id))
"OK"))
(POST "/dynamic/:id" {{id :id} :params data :json-params} (do
(manualtrigger/post-id ctx id (w/keywordize-keys data))
(ui-util/json {:status :success}))))))
| null | https://raw.githubusercontent.com/flosell/lambdacd/e9ba3cebb2d5f0070a2e0e1e08fc85fc99ee7135/src/clj/lambdacd/ui/api.clj | clojure | (ns lambdacd.ui.api
"REST-API into the current state, structure and history of the pipeline for use by the UI."
(:require [lambdacd.presentation.unified :as unified]
[ring.util.response :as resp]
[clojure.string :as string]
[ring.middleware.json :as ring-json]
[lambdacd.presentation.pipeline-state :as state-presentation]
[lambdacd.execution.core :as execution]
[lambdacd.steps.manualtrigger :as manualtrigger]
[clojure.walk :as w]
[compojure.core :refer [routes GET POST]]
[lambdacd.state.core :as state]
[lambdacd.util.internal.sugar :as sugar]
[lambdacd.ui.internal.util :as ui-util]))
(defn- build-infos [ctx build-number-str]
(let [build-number (sugar/parse-int build-number-str)
pipeline-structure (state/get-pipeline-structure ctx build-number)
step-results (state/get-step-results ctx build-number)]
(if (and pipeline-structure step-results)
(ui-util/json (unified/pipeline-structure-with-step-results pipeline-structure step-results))
(resp/not-found (str "build " build-number-str " does not exist")))))
(defn- to-internal-step-id [dash-seperated-step-id]
(map sugar/parse-int (string/split dash-seperated-step-id #"-")))
(defn rest-api
"Returns a ring-handler offering a rest-api for the UI."
[{pipeline-def :pipeline-def ctx :context}]
(ring-json/wrap-json-params
(routes
(GET "/builds/" [] (ui-util/json (state-presentation/history-for ctx)))
(GET "/builds/:buildnumber/" [buildnumber] (build-infos ctx buildnumber))
(POST "/builds/:buildnumber/:step-id/retrigger" [buildnumber step-id]
(let [new-buildnumber (execution/retrigger-pipeline-async pipeline-def ctx (sugar/parse-int buildnumber) (to-internal-step-id step-id))]
(ui-util/json {:build-number new-buildnumber})))
(POST "/builds/:buildnumber/:step-id/kill" [buildnumber step-id]
(do
(execution/kill-step ctx (sugar/parse-int buildnumber) (to-internal-step-id step-id))
"OK"))
(POST "/dynamic/:id" {{id :id} :params data :json-params} (do
(manualtrigger/post-id ctx id (w/keywordize-keys data))
(ui-util/json {:status :success}))))))
| |
809b1c8b6c5513e4957c246546da1ecc92bb8c7e6f916200635065e9f6430e26 | ArulselvanMadhavan/haskell-first-principles | intero16533bsE-TEMP.hs | {-# LANGUAGE Strict #-}
# OPTIONS_GHC -fwarn - incomplete - patterns #
module Phone where
import Data.Char
import Data.List
import qualified Data.Map.Strict as Map
import Data.Maybe
import Ex4
data Key = One | Two | Three | Four | Five | Six | Seven | Eight | Nine | Star | Zero | Pound deriving (Eq, Enum, Show, Ord)
data KeyPress = Key Int
data Phone =
Phone [Key]
deriving (Show)
type Presses = Int
keyToChars :: Key -> [Char]
keyToChars k =
case k of
Two -> ['a', 'b', 'c', '2']
Three -> ['d', 'e', 'f', '3']
Four -> ['g', 'h', 'i', '4']
Five -> ['j', 'k', 'l', '5']
Six -> ['m', 'n', 'o', '6']
Seven -> ['p', 'q', 'r', 's', '7']
Eight -> ['t', 'u', 'v', '8']
Nine -> ['w', 'x', 'y', 'z', '9']
Zero -> [' ', '0']
One -> ['1']
Star -> ['*']
Pound -> ['#']
convo :: [String]
convo = ["Wanna play 20 questions",
"Ya",
"U 1st haha",
"Lol ok. Have u ever tasted alcohol",
"Lol ya",
"Wow ur cool haha. Ur turn",
"Ok. Do u think I am pretty Lol",
"Lol ya",
"Just making sure rofl ur turn"]
checkKeyAvail :: Key -> Maybe Int -> [(Key, Presses)] -> [(Key, Presses)]
checkKeyAvail k (Just idx) _ = (k, idx + 1 :: Presses) : []
checkKeyAvail _ _ acc = acc
findValidKey :: Char -> Key -> [(Key, Presses)] -> [(Key, Presses)]
findValidKey c k acc =
(flip $ checkKeyAvail k) acc . getKeyPress c $ k
getKeyPress :: Char -> Key -> Maybe Int
getKeyPress c =
findIndex (== c) . keyToChars
getKeyPresses :: Char -> [Key] -> [(Key, Presses)]
getKeyPresses c =
foldr (findValidKey c) []
reverseTaps :: Phone -> Char -> [(Key, Presses)]
reverseTaps (Phone keys) c
| isUpper c = (Star, 1) : getKeyPresses (toLower c) keys
| otherwise = getKeyPresses c keys
cellPhonesDead :: Phone -> String -> [(Key, Presses)]
cellPhonesDead p =
concat . map (reverseTaps p)
fingerTaps :: [(Key, Presses)] -> Presses
fingerTaps =
sum . map snd
convosToKeys :: [String] -> [[(Key, Presses)]]
convosToKeys =
map (cellPhonesDead (Phone $ enumFrom One))
fingerTapsForConvos :: [Presses]
fingerTapsForConvos =
map fingerTaps $ convosToKeys convo
popularKey :: [(Key, Presses)] -> Key
popularKey =
fst . maximumBy (\(_, a) -> \(_, b) -> compare a b). Map.toList . Map.fromListWith (+)
popularCharForConvos :: [Key]
popularCharForConvos =
map popularKey $ convosToKeys convo
keyPressesToChar :: (Key, Presses) -> Char
keyPressesToChar (k, p) =
(flip (!!) $ (p - 1)) . keyToChars $ k
updateCharMap :: Char -> Map.Map Char Int -> Map.Map Char Int
updateCharMap c acc =
case Map.member c acc of
True -> Map.update (\x -> Just (x + 1)) c acc
False -> Map.insert c 1 acc
buildCharMap :: [Char] -> Map.Map Char Int
buildCharMap =
foldr updateCharMap Map.empty
maximumWithIndex :: [(Char, Presses)] -> (Int, (Char, Presses))
maximumWithIndex xs =
maximumBy (\x -> \y -> compare ((snd . snd) x) ((snd . snd) y)) $ zip [0..] xs
popularChar :: [Char]
popularChar =
map (fst . snd . maximumWithIndex . Map.toList . buildCharMap . map keyPressesToChar) $ convosToKeys convo
wordCount :: [String] -> Map.Map String Int
wordCount sen =
Map.fromListWith (+) $ zip sen $ repeat 1
coolestWord :: [String] -> String
coolestWord =
fst . maximumBy (\x -> \y -> compare (snd x) (snd y)) . Map.toList . wordCount . concat . map sentenceToWord
sentenceToWord :: String -> [String]
sentenceToWord =
split
| null | https://raw.githubusercontent.com/ArulselvanMadhavan/haskell-first-principles/06e0c71c502848c8e75c8109dd49c0954d815bba/chapter11/.stack-work/intero/intero16533bsE-TEMP.hs | haskell | # LANGUAGE Strict # | # OPTIONS_GHC -fwarn - incomplete - patterns #
module Phone where
import Data.Char
import Data.List
import qualified Data.Map.Strict as Map
import Data.Maybe
import Ex4
data Key = One | Two | Three | Four | Five | Six | Seven | Eight | Nine | Star | Zero | Pound deriving (Eq, Enum, Show, Ord)
data KeyPress = Key Int
data Phone =
Phone [Key]
deriving (Show)
type Presses = Int
keyToChars :: Key -> [Char]
keyToChars k =
case k of
Two -> ['a', 'b', 'c', '2']
Three -> ['d', 'e', 'f', '3']
Four -> ['g', 'h', 'i', '4']
Five -> ['j', 'k', 'l', '5']
Six -> ['m', 'n', 'o', '6']
Seven -> ['p', 'q', 'r', 's', '7']
Eight -> ['t', 'u', 'v', '8']
Nine -> ['w', 'x', 'y', 'z', '9']
Zero -> [' ', '0']
One -> ['1']
Star -> ['*']
Pound -> ['#']
convo :: [String]
convo = ["Wanna play 20 questions",
"Ya",
"U 1st haha",
"Lol ok. Have u ever tasted alcohol",
"Lol ya",
"Wow ur cool haha. Ur turn",
"Ok. Do u think I am pretty Lol",
"Lol ya",
"Just making sure rofl ur turn"]
checkKeyAvail :: Key -> Maybe Int -> [(Key, Presses)] -> [(Key, Presses)]
checkKeyAvail k (Just idx) _ = (k, idx + 1 :: Presses) : []
checkKeyAvail _ _ acc = acc
findValidKey :: Char -> Key -> [(Key, Presses)] -> [(Key, Presses)]
findValidKey c k acc =
(flip $ checkKeyAvail k) acc . getKeyPress c $ k
getKeyPress :: Char -> Key -> Maybe Int
getKeyPress c =
findIndex (== c) . keyToChars
getKeyPresses :: Char -> [Key] -> [(Key, Presses)]
getKeyPresses c =
foldr (findValidKey c) []
reverseTaps :: Phone -> Char -> [(Key, Presses)]
reverseTaps (Phone keys) c
| isUpper c = (Star, 1) : getKeyPresses (toLower c) keys
| otherwise = getKeyPresses c keys
cellPhonesDead :: Phone -> String -> [(Key, Presses)]
cellPhonesDead p =
concat . map (reverseTaps p)
fingerTaps :: [(Key, Presses)] -> Presses
fingerTaps =
sum . map snd
convosToKeys :: [String] -> [[(Key, Presses)]]
convosToKeys =
map (cellPhonesDead (Phone $ enumFrom One))
fingerTapsForConvos :: [Presses]
fingerTapsForConvos =
map fingerTaps $ convosToKeys convo
popularKey :: [(Key, Presses)] -> Key
popularKey =
fst . maximumBy (\(_, a) -> \(_, b) -> compare a b). Map.toList . Map.fromListWith (+)
popularCharForConvos :: [Key]
popularCharForConvos =
map popularKey $ convosToKeys convo
keyPressesToChar :: (Key, Presses) -> Char
keyPressesToChar (k, p) =
(flip (!!) $ (p - 1)) . keyToChars $ k
updateCharMap :: Char -> Map.Map Char Int -> Map.Map Char Int
updateCharMap c acc =
case Map.member c acc of
True -> Map.update (\x -> Just (x + 1)) c acc
False -> Map.insert c 1 acc
buildCharMap :: [Char] -> Map.Map Char Int
buildCharMap =
foldr updateCharMap Map.empty
maximumWithIndex :: [(Char, Presses)] -> (Int, (Char, Presses))
maximumWithIndex xs =
maximumBy (\x -> \y -> compare ((snd . snd) x) ((snd . snd) y)) $ zip [0..] xs
popularChar :: [Char]
popularChar =
map (fst . snd . maximumWithIndex . Map.toList . buildCharMap . map keyPressesToChar) $ convosToKeys convo
wordCount :: [String] -> Map.Map String Int
wordCount sen =
Map.fromListWith (+) $ zip sen $ repeat 1
coolestWord :: [String] -> String
coolestWord =
fst . maximumBy (\x -> \y -> compare (snd x) (snd y)) . Map.toList . wordCount . concat . map sentenceToWord
sentenceToWord :: String -> [String]
sentenceToWord =
split
|
904805eddd9f352377783cc972aa7468d83c250c4fb9a04ccb3fa53a4c62efbb | MinaProtocol/mina | list.ml | module Length = struct
type 'a t = ('a list, int) Sigs.predicate2
let equal l len = Caml.List.compare_length_with l len = 0
let unequal l len = Caml.List.compare_length_with l len <> 0
let gte l len = Caml.List.compare_length_with l len >= 0
let gt l len = Caml.List.compare_length_with l len > 0
let lte l len = Caml.List.compare_length_with l len <= 0
let lt l len = Caml.List.compare_length_with l len < 0
module Compare = struct
let ( = ) = equal
let ( <> ) = unequal
let ( >= ) = gte
let ( > ) = gt
let ( <= ) = lte
let ( < ) = lt
end
end
| null | https://raw.githubusercontent.com/MinaProtocol/mina/0cf192d7a74a46169e71efdbc700a7f2c2e374aa/src/lib/mina_stdlib/list.ml | ocaml | module Length = struct
type 'a t = ('a list, int) Sigs.predicate2
let equal l len = Caml.List.compare_length_with l len = 0
let unequal l len = Caml.List.compare_length_with l len <> 0
let gte l len = Caml.List.compare_length_with l len >= 0
let gt l len = Caml.List.compare_length_with l len > 0
let lte l len = Caml.List.compare_length_with l len <= 0
let lt l len = Caml.List.compare_length_with l len < 0
module Compare = struct
let ( = ) = equal
let ( <> ) = unequal
let ( >= ) = gte
let ( > ) = gt
let ( <= ) = lte
let ( < ) = lt
end
end
| |
c0abdad545227f689d6cf525b88c76f0e586f0c335024bf5f55139f1794b752c | k16shikano/hpdft | Definition.hs | module PDF.Definition where
import Data.ByteString (ByteString)
import Data.List (replicate, intercalate)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Codec.Compression.Zlib (decompress)
type PDFBS = (Int,BS.ByteString)
type PDFObj = (Int,[Obj])
type PDFStream = BSL.ByteString
type PDFxref = BSL.ByteString
data Obj = PdfDict Dict -- [(Obj, Obj)]
| PdfText String
| PdfStream PDFStream
| PdfNumber Double
| PdfHex String
| PdfBool Bool
| PdfArray [Obj]
| PdfName String
| ObjRef Int
| ObjOther String
| PdfNull
deriving (Eq)
type Dict = [(Obj,Obj)]
instance Show Obj where
show o = toString 0 o
toString depth (PdfDict d) = concat $ map dictentry d
where dictentry (PdfName n, o) = concat $ ["\n"] ++ replicate depth " " ++ [n, ": ", toString (depth+1) o]
dictentry e = error $ "Illegular dictionary entry "++show e
toString depth (PdfText t) = t
toString depth ( PdfStream s ) = " " + + ( BSL.unpack $ decompress s )
toString depth (PdfStream s) = "\n " ++ (BSL.unpack $ s)
toString depth (PdfNumber r) = show r
toString depth (PdfHex h) = h
toString depth (PdfArray a) = intercalate ", " $ map (toString depth) a
toString depth (PdfBool b) = show b
toString depth (PdfName n) = n
toString depth (ObjRef i) = show i
toString depth (ObjOther o) = o
toString depth (PdfNull) = ""
data Encoding = CIDmap String | Encoding [(Char,String)] | WithCharSet String | NullMap
instance Show Encoding where
show (CIDmap s) = "CIDmap"++s
show (Encoding a) = "Encoding"++show a
show (WithCharSet s) = "WithCharSet"++s
show NullMap = []
type CMap = [(Int,String)]
data PSR = PSR { linex :: Double
, liney :: Double
, absolutex :: Double
, absolutey :: Double
, text_lm :: (Double, Double, Double, Double, Double, Double)
, text_m :: (Double, Double, Double, Double, Double, Double)
, text_break :: Bool
, leftmargin :: Double
, top :: Double
, bottom :: Double
, fontfactor :: Double
, curfont :: String
, cmaps :: [(String, CMap)]
, fontmaps :: [(String, Encoding)]
, colorspace :: String
, xcolorspaces :: [String]
}
deriving (Show)
| null | https://raw.githubusercontent.com/k16shikano/hpdft/97484e4ec5d698d403add2b92ff4b128c01e7ace/src/PDF/Definition.hs | haskell | [(Obj, Obj)] | module PDF.Definition where
import Data.ByteString (ByteString)
import Data.List (replicate, intercalate)
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy.Char8 as BSL
import Codec.Compression.Zlib (decompress)
type PDFBS = (Int,BS.ByteString)
type PDFObj = (Int,[Obj])
type PDFStream = BSL.ByteString
type PDFxref = BSL.ByteString
| PdfText String
| PdfStream PDFStream
| PdfNumber Double
| PdfHex String
| PdfBool Bool
| PdfArray [Obj]
| PdfName String
| ObjRef Int
| ObjOther String
| PdfNull
deriving (Eq)
type Dict = [(Obj,Obj)]
instance Show Obj where
show o = toString 0 o
toString depth (PdfDict d) = concat $ map dictentry d
where dictentry (PdfName n, o) = concat $ ["\n"] ++ replicate depth " " ++ [n, ": ", toString (depth+1) o]
dictentry e = error $ "Illegular dictionary entry "++show e
toString depth (PdfText t) = t
toString depth ( PdfStream s ) = " " + + ( BSL.unpack $ decompress s )
toString depth (PdfStream s) = "\n " ++ (BSL.unpack $ s)
toString depth (PdfNumber r) = show r
toString depth (PdfHex h) = h
toString depth (PdfArray a) = intercalate ", " $ map (toString depth) a
toString depth (PdfBool b) = show b
toString depth (PdfName n) = n
toString depth (ObjRef i) = show i
toString depth (ObjOther o) = o
toString depth (PdfNull) = ""
data Encoding = CIDmap String | Encoding [(Char,String)] | WithCharSet String | NullMap
instance Show Encoding where
show (CIDmap s) = "CIDmap"++s
show (Encoding a) = "Encoding"++show a
show (WithCharSet s) = "WithCharSet"++s
show NullMap = []
type CMap = [(Int,String)]
data PSR = PSR { linex :: Double
, liney :: Double
, absolutex :: Double
, absolutey :: Double
, text_lm :: (Double, Double, Double, Double, Double, Double)
, text_m :: (Double, Double, Double, Double, Double, Double)
, text_break :: Bool
, leftmargin :: Double
, top :: Double
, bottom :: Double
, fontfactor :: Double
, curfont :: String
, cmaps :: [(String, CMap)]
, fontmaps :: [(String, Encoding)]
, colorspace :: String
, xcolorspaces :: [String]
}
deriving (Show)
|
7b83d28f6abdc323deaa5c9433eae284d00988ccae396d0d15335cc9453b367f | ocaml-flambda/ocaml-jst | pr11544.ml | (* TEST
* expect
*)
module M = struct type t = T end
let poly3 : 'b. M.t -> 'b -> 'b =
fun T x -> x
[%%expect {|
module M : sig type t = T end
val poly3 : M.t -> 'b -> 'b = <fun>
|}];;
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/fae9aef6b7023f2bf8c94f28e8ef8cbd8ffb7633/testsuite/tests/typing-poly/pr11544.ml | ocaml | TEST
* expect
|
module M = struct type t = T end
let poly3 : 'b. M.t -> 'b -> 'b =
fun T x -> x
[%%expect {|
module M : sig type t = T end
val poly3 : M.t -> 'b -> 'b = <fun>
|}];;
|
50c26a504c5efe8955f7e30b5fb239b27da3db0402f93ec5cfbbdda11af5e143 | bos/llvm | Loop.hs | # LANGUAGE ScopedTypeVariables , FlexibleInstances , TypeOperators , FlexibleContexts #
module LLVM.Util.Loop(Phi(phis,addPhis), forLoop, mapVector, mapVector2) where
import Data.TypeLevel hiding (Bool)
import LLVM.Core
class Phi a where
phis :: BasicBlock -> a -> CodeGenFunction r a
addPhis :: BasicBlock -> a -> a -> CodeGenFunction r ()
infixr 1 :*
-- XXX should use HList if it was packaged in a nice way .
data a :* b = a :* b
deriving ( Eq , Ord , Show , Read )
instance ( IsFirstClass a , Phi b ) = > Phi ( Value a :* b ) where
phis bb ( a :* b ) = do
a ' < - phi [ ( a , bb ) ]
b ' < - phis bb b
return ( a ' :* b ' )
addPhis bb ( a :* b ) ( a ' :* b ' ) = do
addPhiInputs a [ ( a ' , bb ) ]
addPhis bb b b '
infixr 1 :*
-- XXX should use HList if it was packaged in a nice way.
data a :* b = a :* b
deriving (Eq, Ord, Show, Read)
instance (IsFirstClass a, Phi b) => Phi (Value a :* b) where
phis bb (a :* b) = do
a' <- phi [(a, bb)]
b' <- phis bb b
return (a' :* b')
addPhis bb (a :* b) (a' :* b') = do
addPhiInputs a [(a', bb)]
addPhis bb b b'
-}
instance Phi () where
phis _ _ = return ()
addPhis _ _ _ = return ()
instance (IsFirstClass a) => Phi (Value a) where
phis bb a = do
a' <- phi [(a, bb)]
return a'
addPhis bb a a' = do
addPhiInputs a [(a', bb)]
instance (Phi a, Phi b) => Phi (a, b) where
phis bb (a, b) = do
a' <- phis bb a
b' <- phis bb b
return (a', b')
addPhis bb (a, b) (a', b') = do
addPhis bb a a'
addPhis bb b b'
instance (Phi a, Phi b, Phi c) => Phi (a, b, c) where
phis bb (a, b, c) = do
a' <- phis bb a
b' <- phis bb b
c' <- phis bb c
return (a', b', c')
addPhis bb (a, b, c) (a', b', c') = do
addPhis bb a a'
addPhis bb b b'
addPhis bb c c'
-- Loop the index variable from low to high. The state in the loop starts as start, and is modified
-- by incr in each iteration.
forLoop :: forall i a r . (Phi a, Num i, IsConst i, IsInteger i, IsFirstClass i, CmpRet i Bool) =>
Value i -> Value i -> a -> (Value i -> a -> CodeGenFunction r a) -> CodeGenFunction r a
forLoop low high start incr = do
top <- getCurrentBasicBlock
loop <- newBasicBlock
body <- newBasicBlock
exit <- newBasicBlock
br loop
defineBasicBlock loop
i <- phi [(low, top)]
vars <- phis top start
t <- cmp CmpNE i high
condBr t body exit
defineBasicBlock body
vars' <- incr i vars
i' <- add i (valueOf 1 :: Value i)
body' <- getCurrentBasicBlock
addPhis body' vars vars'
addPhiInputs i [(i', body')]
br loop
defineBasicBlock exit
return vars
--------------------------------------
mapVector :: forall a b n r .
(Pos n, IsPrimitive b) =>
(Value a -> CodeGenFunction r (Value b)) ->
Value (Vector n a) -> CodeGenFunction r (Value (Vector n b))
mapVector f v =
forLoop (valueOf 0) (valueOf (toNum (undefined :: n))) (value undef) $ \ i w -> do
x <- extractelement v i
y <- f x
insertelement w y i
mapVector2 :: forall a b c n r .
(Pos n, IsPrimitive c) =>
(Value a -> Value b -> CodeGenFunction r (Value c)) ->
Value (Vector n a) -> Value (Vector n b) -> CodeGenFunction r (Value (Vector n c))
mapVector2 f v1 v2 =
forLoop (valueOf 0) (valueOf (toNum (undefined :: n))) (value undef) $ \ i w -> do
x <- extractelement v1 i
y <- extractelement v2 i
z <- f x y
insertelement w z i
| null | https://raw.githubusercontent.com/bos/llvm/819b94d048c9d7787ce41cd7c71b84424e894f64/LLVM/Util/Loop.hs | haskell | XXX should use HList if it was packaged in a nice way .
XXX should use HList if it was packaged in a nice way.
Loop the index variable from low to high. The state in the loop starts as start, and is modified
by incr in each iteration.
------------------------------------ | # LANGUAGE ScopedTypeVariables , FlexibleInstances , TypeOperators , FlexibleContexts #
module LLVM.Util.Loop(Phi(phis,addPhis), forLoop, mapVector, mapVector2) where
import Data.TypeLevel hiding (Bool)
import LLVM.Core
class Phi a where
phis :: BasicBlock -> a -> CodeGenFunction r a
addPhis :: BasicBlock -> a -> a -> CodeGenFunction r ()
infixr 1 :*
data a :* b = a :* b
deriving ( Eq , Ord , Show , Read )
instance ( IsFirstClass a , Phi b ) = > Phi ( Value a :* b ) where
phis bb ( a :* b ) = do
a ' < - phi [ ( a , bb ) ]
b ' < - phis bb b
return ( a ' :* b ' )
addPhis bb ( a :* b ) ( a ' :* b ' ) = do
addPhiInputs a [ ( a ' , bb ) ]
addPhis bb b b '
infixr 1 :*
data a :* b = a :* b
deriving (Eq, Ord, Show, Read)
instance (IsFirstClass a, Phi b) => Phi (Value a :* b) where
phis bb (a :* b) = do
a' <- phi [(a, bb)]
b' <- phis bb b
return (a' :* b')
addPhis bb (a :* b) (a' :* b') = do
addPhiInputs a [(a', bb)]
addPhis bb b b'
-}
instance Phi () where
phis _ _ = return ()
addPhis _ _ _ = return ()
instance (IsFirstClass a) => Phi (Value a) where
phis bb a = do
a' <- phi [(a, bb)]
return a'
addPhis bb a a' = do
addPhiInputs a [(a', bb)]
instance (Phi a, Phi b) => Phi (a, b) where
phis bb (a, b) = do
a' <- phis bb a
b' <- phis bb b
return (a', b')
addPhis bb (a, b) (a', b') = do
addPhis bb a a'
addPhis bb b b'
instance (Phi a, Phi b, Phi c) => Phi (a, b, c) where
phis bb (a, b, c) = do
a' <- phis bb a
b' <- phis bb b
c' <- phis bb c
return (a', b', c')
addPhis bb (a, b, c) (a', b', c') = do
addPhis bb a a'
addPhis bb b b'
addPhis bb c c'
forLoop :: forall i a r . (Phi a, Num i, IsConst i, IsInteger i, IsFirstClass i, CmpRet i Bool) =>
Value i -> Value i -> a -> (Value i -> a -> CodeGenFunction r a) -> CodeGenFunction r a
forLoop low high start incr = do
top <- getCurrentBasicBlock
loop <- newBasicBlock
body <- newBasicBlock
exit <- newBasicBlock
br loop
defineBasicBlock loop
i <- phi [(low, top)]
vars <- phis top start
t <- cmp CmpNE i high
condBr t body exit
defineBasicBlock body
vars' <- incr i vars
i' <- add i (valueOf 1 :: Value i)
body' <- getCurrentBasicBlock
addPhis body' vars vars'
addPhiInputs i [(i', body')]
br loop
defineBasicBlock exit
return vars
mapVector :: forall a b n r .
(Pos n, IsPrimitive b) =>
(Value a -> CodeGenFunction r (Value b)) ->
Value (Vector n a) -> CodeGenFunction r (Value (Vector n b))
mapVector f v =
forLoop (valueOf 0) (valueOf (toNum (undefined :: n))) (value undef) $ \ i w -> do
x <- extractelement v i
y <- f x
insertelement w y i
mapVector2 :: forall a b c n r .
(Pos n, IsPrimitive c) =>
(Value a -> Value b -> CodeGenFunction r (Value c)) ->
Value (Vector n a) -> Value (Vector n b) -> CodeGenFunction r (Value (Vector n c))
mapVector2 f v1 v2 =
forLoop (valueOf 0) (valueOf (toNum (undefined :: n))) (value undef) $ \ i w -> do
x <- extractelement v1 i
y <- extractelement v2 i
z <- f x y
insertelement w z i
|
33c1baee5fd773fd2907a088f998e09670e78d33bf086003a04b03c3692011ca | haskell/ghc-builder | ClientMonad.hs |
# LANGUAGE GeneralizedNewtypeDeriving #
module ClientMonad (ClientMonad, evalClientMonad, mkClientState,
getUser, getVerbosity, getHost, getBaseDir,
getHandle, setHandle
) where
import Builder.Handlelike
import Builder.Utils
import Control.Applicative
import Control.Monad.State
newtype ClientMonad a = ClientMonad (StateT ClientState IO a)
deriving (Functor, Applicative, Monad, MonadIO)
data ClientState = ClientState {
cs_user :: User,
cs_verbosity :: Verbosity,
cs_host :: String,
cs_basedir :: FilePath,
cs_handleOrSsl :: HandleOrSsl
}
mkClientState :: Verbosity -> User -> String -> FilePath -> HandleOrSsl -> ClientState
mkClientState v u host bd h
= ClientState {
cs_user = u,
cs_verbosity = v,
cs_host = host,
cs_basedir = bd,
cs_handleOrSsl = h
}
evalClientMonad :: ClientMonad a -> ClientState -> IO a
evalClientMonad (ClientMonad m) cs = evalStateT m cs
getUser :: ClientMonad User
getUser = do st <- ClientMonad get
return $ cs_user st
getVerbosity :: ClientMonad Verbosity
getVerbosity = do st <- ClientMonad get
return $ cs_verbosity st
getHost :: ClientMonad String
getHost = do st <- ClientMonad get
return $ cs_host st
getHandle :: ClientMonad HandleOrSsl
getHandle = do st <- ClientMonad get
return $ cs_handleOrSsl st
setHandle :: HandleOrSsl -> ClientMonad ()
setHandle h = do st <- ClientMonad get
ClientMonad $ put $ st { cs_handleOrSsl = h }
getBaseDir :: ClientMonad FilePath
getBaseDir = do st <- ClientMonad get
return $ cs_basedir st
instance HandlelikeM ClientMonad where
hlPutStrLn str = do h <- getHandle
liftIO $ hlPutStrLn' h str
hlGetLine = do h <- getHandle
liftIO $ hlGetLine' h
hlGet n = do h <- getHandle
liftIO $ hlGet' h n
| null | https://raw.githubusercontent.com/haskell/ghc-builder/ef90aa7da7ec017d59d875e5bfe5d6b281d766f7/client/ClientMonad.hs | haskell |
# LANGUAGE GeneralizedNewtypeDeriving #
module ClientMonad (ClientMonad, evalClientMonad, mkClientState,
getUser, getVerbosity, getHost, getBaseDir,
getHandle, setHandle
) where
import Builder.Handlelike
import Builder.Utils
import Control.Applicative
import Control.Monad.State
newtype ClientMonad a = ClientMonad (StateT ClientState IO a)
deriving (Functor, Applicative, Monad, MonadIO)
data ClientState = ClientState {
cs_user :: User,
cs_verbosity :: Verbosity,
cs_host :: String,
cs_basedir :: FilePath,
cs_handleOrSsl :: HandleOrSsl
}
mkClientState :: Verbosity -> User -> String -> FilePath -> HandleOrSsl -> ClientState
mkClientState v u host bd h
= ClientState {
cs_user = u,
cs_verbosity = v,
cs_host = host,
cs_basedir = bd,
cs_handleOrSsl = h
}
evalClientMonad :: ClientMonad a -> ClientState -> IO a
evalClientMonad (ClientMonad m) cs = evalStateT m cs
getUser :: ClientMonad User
getUser = do st <- ClientMonad get
return $ cs_user st
getVerbosity :: ClientMonad Verbosity
getVerbosity = do st <- ClientMonad get
return $ cs_verbosity st
getHost :: ClientMonad String
getHost = do st <- ClientMonad get
return $ cs_host st
getHandle :: ClientMonad HandleOrSsl
getHandle = do st <- ClientMonad get
return $ cs_handleOrSsl st
setHandle :: HandleOrSsl -> ClientMonad ()
setHandle h = do st <- ClientMonad get
ClientMonad $ put $ st { cs_handleOrSsl = h }
getBaseDir :: ClientMonad FilePath
getBaseDir = do st <- ClientMonad get
return $ cs_basedir st
instance HandlelikeM ClientMonad where
hlPutStrLn str = do h <- getHandle
liftIO $ hlPutStrLn' h str
hlGetLine = do h <- getHandle
liftIO $ hlGetLine' h
hlGet n = do h <- getHandle
liftIO $ hlGet' h n
| |
c9392a756b2861d2884f3c087e6b282a13f987b653d7e80428410360f58a4d4e | master/ejabberd | ejabberd_stun.erl | %%%-------------------------------------------------------------------
File : ejabberd_stun.erl
Author : < >
%%% Description : RFC5389 implementation.
%%% Currently only Binding usage is supported.
%%%
Created : 8 Aug 2009 by < >
%%%
%%%
ejabberd , Copyright ( C ) 2002 - 2012 ProcessOne
%%%
%%% This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
%%% License, or (at your option) any later version.
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
%%% General Public License for more details.
%%%
You should have received a copy of the GNU General Public License
%%% along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
%%%
%%%-------------------------------------------------------------------
-module(ejabberd_stun).
-behaviour(gen_fsm).
%% API
-export([start_link/2,
start/2,
socket_type/0,
udp_recv/5]).
%% gen_fsm callbacks
-export([init/1,
handle_event/3,
handle_sync_event/4,
handle_info/3,
terminate/3,
code_change/4]).
%% gen_fsm states
-export([wait_for_tls/2,
session_established/2]).
-include("ejabberd.hrl").
-include("stun.hrl").
64 kb
10 sec
-record(state, {sock,
sock_mod = gen_tcp,
certfile,
peer,
tref,
buf = <<>>}).
%%====================================================================
%% API
%%====================================================================
start({gen_tcp, Sock}, Opts) ->
supervisor:start_child(ejabberd_stun_sup, [Sock, Opts]).
start_link(Sock, Opts) ->
gen_fsm:start_link(?MODULE, [Sock, Opts], []).
socket_type() ->
raw.
udp_recv(Sock, Addr, Port, Data, _Opts) ->
case stun_codec:decode(Data) of
{ok, Msg, <<>>} ->
?DEBUG("got:~n~p", [Msg]),
case process(Addr, Port, Msg) of
RespMsg when is_record(RespMsg, stun) ->
?DEBUG("sent:~n~p", [RespMsg]),
Data1 = stun_codec:encode(RespMsg),
gen_udp:send(Sock, Addr, Port, Data1);
_ ->
ok
end;
_ ->
ok
end.
%%====================================================================
%% gen_fsm callbacks
%%====================================================================
init([Sock, Opts]) ->
case inet:peername(Sock) of
{ok, Addr} ->
inet:setopts(Sock, [{active, once}]),
TRef = erlang:start_timer(?TIMEOUT, self(), stop),
State = #state{sock = Sock, peer = Addr, tref = TRef},
case proplists:get_value(certfile, Opts) of
undefined ->
{ok, session_established, State};
CertFile ->
{ok, wait_for_tls, State#state{certfile = CertFile}}
end;
Err ->
Err
end.
wait_for_tls(Event, State) ->
?INFO_MSG("unexpected event in wait_for_tls: ~p", [Event]),
{next_state, wait_for_tls, State}.
session_established(Msg, State) when is_record(Msg, stun) ->
?DEBUG("got:~n~p", [Msg]),
{Addr, Port} = State#state.peer,
case process(Addr, Port, Msg) of
Resp when is_record(Resp, stun) ->
?DEBUG("sent:~n~p", [Resp]),
Data = stun_codec:encode(Resp),
(State#state.sock_mod):send(State#state.sock, Data);
_ ->
ok
end,
{next_state, session_established, State};
session_established(Event, State) ->
?INFO_MSG("unexpected event in session_established: ~p", [Event]),
{next_state, session_established, State}.
handle_event(_Event, StateName, State) ->
{next_state, StateName, State}.
handle_sync_event(_Event, _From, StateName, State) ->
{reply, {error, badarg}, StateName, State}.
handle_info({tcp, Sock, TLSData}, wait_for_tls, State) ->
Buf = <<(State#state.buf)/binary, TLSData/binary>>,
%% Check if the initial message is a TLS handshake
case Buf of
_ when size(Buf) < 3 ->
{next_state, wait_for_tls,
update_state(State#state{buf = Buf})};
<<_:16, 1, _/binary>> ->
TLSOpts = [{certfile, State#state.certfile}],
{ok, TLSSock} = tls:tcp_to_tls(Sock, TLSOpts),
NewState = State#state{sock = TLSSock,
buf = <<>>,
sock_mod = tls},
case tls:recv_data(TLSSock, Buf) of
{ok, Data} ->
process_data(session_established, NewState, Data);
_Err ->
{stop, normal, NewState}
end;
_ ->
process_data(session_established, State, TLSData)
end;
handle_info({tcp, _Sock, TLSData}, StateName,
#state{sock_mod = tls} = State) ->
case tls:recv_data(State#state.sock, TLSData) of
{ok, Data} ->
process_data(StateName, State, Data);
_Err ->
{stop, normal, State}
end;
handle_info({tcp, _Sock, Data}, StateName, State) ->
process_data(StateName, State, Data);
handle_info({tcp_closed, _Sock}, _StateName, State) ->
?DEBUG("connection reset by peer", []),
{stop, normal, State};
handle_info({tcp_error, _Sock, Reason}, _StateName, State) ->
?DEBUG("connection error: ~p", [Reason]),
{stop, normal, State};
handle_info({timeout, TRef, stop}, _StateName,
#state{tref = TRef} = State) ->
{stop, normal, State};
handle_info(Info, StateName, State) ->
?INFO_MSG("unexpected info: ~p", [Info]),
{next_state, StateName, State}.
terminate(_Reason, _StateName, State) ->
catch (State#state.sock_mod):close(State#state.sock),
ok.
code_change(_OldVsn, StateName, State, _Extra) ->
{ok, StateName, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
process(Addr, Port, #stun{class = request, unsupported = []} = Msg) ->
Resp = prepare_response(Msg),
if Msg#stun.method == ?STUN_METHOD_BINDING ->
case stun_codec:version(Msg) of
old ->
Resp#stun{class = response,
'MAPPED-ADDRESS' = {Addr, Port}};
new ->
Resp#stun{class = response,
'XOR-MAPPED-ADDRESS' = {Addr, Port}}
end;
true ->
Resp#stun{class = error,
'ERROR-CODE' = {405, <<"Method Not Allowed">>}}
end;
process(_Addr, _Port, #stun{class = request} = Msg) ->
Resp = prepare_response(Msg),
Resp#stun{class = error,
'UNKNOWN-ATTRIBUTES' = Msg#stun.unsupported,
'ERROR-CODE' = {420, stun_codec:reason(420)}};
process(_Addr, _Port, _Msg) ->
pass.
prepare_response(Msg) ->
Version = list_to_binary("ejabberd " ++ ?VERSION),
#stun{method = Msg#stun.method,
magic = Msg#stun.magic,
trid = Msg#stun.trid,
'SOFTWARE' = Version}.
process_data(NextStateName, #state{buf = Buf} = State, Data) ->
NewBuf = <<Buf/binary, Data/binary>>,
case stun_codec:decode(NewBuf) of
{ok, Msg, Tail} ->
gen_fsm:send_event(self(), Msg),
process_data(NextStateName, State#state{buf = <<>>}, Tail);
empty ->
NewState = State#state{buf = <<>>},
{next_state, NextStateName, update_state(NewState)};
more when size(NewBuf) < ?MAX_BUF_SIZE ->
NewState = State#state{buf = NewBuf},
{next_state, NextStateName, update_state(NewState)};
_ ->
{stop, normal, State}
end.
update_state(#state{sock = Sock} = State) ->
case State#state.sock_mod of
gen_tcp ->
inet:setopts(Sock, [{active, once}]);
SockMod ->
SockMod:setopts(Sock, [{active, once}])
end,
cancel_timer(State#state.tref),
TRef = erlang:start_timer(?TIMEOUT, self(), stop),
State#state{tref = TRef}.
cancel_timer(TRef) ->
case erlang:cancel_timer(TRef) of
false ->
receive
{timeout, TRef, _} ->
ok
after 0 ->
ok
end;
_ ->
ok
end.
| null | https://raw.githubusercontent.com/master/ejabberd/9c31874d5a9d1852ece1b8ae70dd4b7e5eef7cf7/src/stun/ejabberd_stun.erl | erlang | -------------------------------------------------------------------
Description : RFC5389 implementation.
Currently only Binding usage is supported.
This program is free software; you can redistribute it and/or
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
along with this program; if not, write to the Free Software
-------------------------------------------------------------------
API
gen_fsm callbacks
gen_fsm states
====================================================================
API
====================================================================
====================================================================
gen_fsm callbacks
====================================================================
Check if the initial message is a TLS handshake
--------------------------------------------------------------------
-------------------------------------------------------------------- | File : ejabberd_stun.erl
Author : < >
Created : 8 Aug 2009 by < >
ejabberd , Copyright ( C ) 2002 - 2012 ProcessOne
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation ; either version 2 of the
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA
02111 - 1307 USA
-module(ejabberd_stun).
-behaviour(gen_fsm).
-export([start_link/2,
start/2,
socket_type/0,
udp_recv/5]).
-export([init/1,
handle_event/3,
handle_sync_event/4,
handle_info/3,
terminate/3,
code_change/4]).
-export([wait_for_tls/2,
session_established/2]).
-include("ejabberd.hrl").
-include("stun.hrl").
64 kb
10 sec
-record(state, {sock,
sock_mod = gen_tcp,
certfile,
peer,
tref,
buf = <<>>}).
start({gen_tcp, Sock}, Opts) ->
supervisor:start_child(ejabberd_stun_sup, [Sock, Opts]).
start_link(Sock, Opts) ->
gen_fsm:start_link(?MODULE, [Sock, Opts], []).
socket_type() ->
raw.
udp_recv(Sock, Addr, Port, Data, _Opts) ->
case stun_codec:decode(Data) of
{ok, Msg, <<>>} ->
?DEBUG("got:~n~p", [Msg]),
case process(Addr, Port, Msg) of
RespMsg when is_record(RespMsg, stun) ->
?DEBUG("sent:~n~p", [RespMsg]),
Data1 = stun_codec:encode(RespMsg),
gen_udp:send(Sock, Addr, Port, Data1);
_ ->
ok
end;
_ ->
ok
end.
init([Sock, Opts]) ->
case inet:peername(Sock) of
{ok, Addr} ->
inet:setopts(Sock, [{active, once}]),
TRef = erlang:start_timer(?TIMEOUT, self(), stop),
State = #state{sock = Sock, peer = Addr, tref = TRef},
case proplists:get_value(certfile, Opts) of
undefined ->
{ok, session_established, State};
CertFile ->
{ok, wait_for_tls, State#state{certfile = CertFile}}
end;
Err ->
Err
end.
wait_for_tls(Event, State) ->
?INFO_MSG("unexpected event in wait_for_tls: ~p", [Event]),
{next_state, wait_for_tls, State}.
session_established(Msg, State) when is_record(Msg, stun) ->
?DEBUG("got:~n~p", [Msg]),
{Addr, Port} = State#state.peer,
case process(Addr, Port, Msg) of
Resp when is_record(Resp, stun) ->
?DEBUG("sent:~n~p", [Resp]),
Data = stun_codec:encode(Resp),
(State#state.sock_mod):send(State#state.sock, Data);
_ ->
ok
end,
{next_state, session_established, State};
session_established(Event, State) ->
?INFO_MSG("unexpected event in session_established: ~p", [Event]),
{next_state, session_established, State}.
handle_event(_Event, StateName, State) ->
{next_state, StateName, State}.
handle_sync_event(_Event, _From, StateName, State) ->
{reply, {error, badarg}, StateName, State}.
handle_info({tcp, Sock, TLSData}, wait_for_tls, State) ->
Buf = <<(State#state.buf)/binary, TLSData/binary>>,
case Buf of
_ when size(Buf) < 3 ->
{next_state, wait_for_tls,
update_state(State#state{buf = Buf})};
<<_:16, 1, _/binary>> ->
TLSOpts = [{certfile, State#state.certfile}],
{ok, TLSSock} = tls:tcp_to_tls(Sock, TLSOpts),
NewState = State#state{sock = TLSSock,
buf = <<>>,
sock_mod = tls},
case tls:recv_data(TLSSock, Buf) of
{ok, Data} ->
process_data(session_established, NewState, Data);
_Err ->
{stop, normal, NewState}
end;
_ ->
process_data(session_established, State, TLSData)
end;
handle_info({tcp, _Sock, TLSData}, StateName,
#state{sock_mod = tls} = State) ->
case tls:recv_data(State#state.sock, TLSData) of
{ok, Data} ->
process_data(StateName, State, Data);
_Err ->
{stop, normal, State}
end;
handle_info({tcp, _Sock, Data}, StateName, State) ->
process_data(StateName, State, Data);
handle_info({tcp_closed, _Sock}, _StateName, State) ->
?DEBUG("connection reset by peer", []),
{stop, normal, State};
handle_info({tcp_error, _Sock, Reason}, _StateName, State) ->
?DEBUG("connection error: ~p", [Reason]),
{stop, normal, State};
handle_info({timeout, TRef, stop}, _StateName,
#state{tref = TRef} = State) ->
{stop, normal, State};
handle_info(Info, StateName, State) ->
?INFO_MSG("unexpected info: ~p", [Info]),
{next_state, StateName, State}.
terminate(_Reason, _StateName, State) ->
catch (State#state.sock_mod):close(State#state.sock),
ok.
code_change(_OldVsn, StateName, State, _Extra) ->
{ok, StateName, State}.
Internal functions
process(Addr, Port, #stun{class = request, unsupported = []} = Msg) ->
Resp = prepare_response(Msg),
if Msg#stun.method == ?STUN_METHOD_BINDING ->
case stun_codec:version(Msg) of
old ->
Resp#stun{class = response,
'MAPPED-ADDRESS' = {Addr, Port}};
new ->
Resp#stun{class = response,
'XOR-MAPPED-ADDRESS' = {Addr, Port}}
end;
true ->
Resp#stun{class = error,
'ERROR-CODE' = {405, <<"Method Not Allowed">>}}
end;
process(_Addr, _Port, #stun{class = request} = Msg) ->
Resp = prepare_response(Msg),
Resp#stun{class = error,
'UNKNOWN-ATTRIBUTES' = Msg#stun.unsupported,
'ERROR-CODE' = {420, stun_codec:reason(420)}};
process(_Addr, _Port, _Msg) ->
pass.
prepare_response(Msg) ->
Version = list_to_binary("ejabberd " ++ ?VERSION),
#stun{method = Msg#stun.method,
magic = Msg#stun.magic,
trid = Msg#stun.trid,
'SOFTWARE' = Version}.
process_data(NextStateName, #state{buf = Buf} = State, Data) ->
NewBuf = <<Buf/binary, Data/binary>>,
case stun_codec:decode(NewBuf) of
{ok, Msg, Tail} ->
gen_fsm:send_event(self(), Msg),
process_data(NextStateName, State#state{buf = <<>>}, Tail);
empty ->
NewState = State#state{buf = <<>>},
{next_state, NextStateName, update_state(NewState)};
more when size(NewBuf) < ?MAX_BUF_SIZE ->
NewState = State#state{buf = NewBuf},
{next_state, NextStateName, update_state(NewState)};
_ ->
{stop, normal, State}
end.
update_state(#state{sock = Sock} = State) ->
case State#state.sock_mod of
gen_tcp ->
inet:setopts(Sock, [{active, once}]);
SockMod ->
SockMod:setopts(Sock, [{active, once}])
end,
cancel_timer(State#state.tref),
TRef = erlang:start_timer(?TIMEOUT, self(), stop),
State#state{tref = TRef}.
cancel_timer(TRef) ->
case erlang:cancel_timer(TRef) of
false ->
receive
{timeout, TRef, _} ->
ok
after 0 ->
ok
end;
_ ->
ok
end.
|
a40e9455a422054e668e09afaa31131acc005d56f4ed6aa2fbf86c859c060d07 | michaelschade/hs-stripe | Token.hs | {-# LANGUAGE OverloadedStrings #-}
module Web.Stripe.Token
( Token(..)
, TokenId(..)
, createToken
, getToken
, tokRq
{- Re-Export -}
, UTCTime(..)
, Amount(..)
, Card(..)
, Currency(..)
, StripeConfig(..)
, StripeT(..)
, runStripeT
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (liftM, mzero)
import Control.Monad.Error (MonadIO)
import Data.Aeson (FromJSON (..), Value (..), (.:))
import qualified Data.Text as T
import Network.HTTP.Types (StdMethod (..))
import Web.Stripe.Card (Card (..), RequestCard (..), rCardKV)
import Web.Stripe.Client (StripeConfig (..), StripeRequest (..),
StripeT (..), baseSReq, query, runStripeT)
import Web.Stripe.Utils (Amount (..), Currency (..), UTCTime (..),
fromSeconds)
----------------
-- Data Types --
----------------
-- | Represents a token in the Stripe system.
data Token = Token
{ tokId :: TokenId
, tokLive :: Bool
, tokUsed :: Bool
, tokCreated :: UTCTime
, tokCard :: Card
} deriving Show
-- | Represents the identifier for a given 'Token' in the Stripe system.
newtype TokenId = TokenId { unTokenId :: T.Text } deriving (Show, Eq)
-- | Creates a 'Token' in the Stripe system.
createToken :: MonadIO m => RequestCard -> StripeT m Token
createToken rc =
snd `liftM` query (tokRq []) { sMethod = POST, sData = rCardKV rc }
-- | Retrieves a specific 'Token' based on its 'Token'.
getToken :: MonadIO m => TokenId -> StripeT m Token
getToken (TokenId tid) = return . snd =<< query (tokRq [tid])
| Convenience function to create a ' StripeRequest ' specific to tokens .
tokRq :: [T.Text] -> StripeRequest
tokRq pcs = baseSReq { sDestination = "tokens":pcs }
------------------
-- JSON Parsing --
------------------
-- | Attempts to parse JSON into a 'Token'.
instance FromJSON Token where
parseJSON (Object o) = Token
<$> (TokenId <$> o .: "id")
<*> o .: "livemode"
<*> o .: "used"
<*> (fromSeconds <$> o .: "created")
<*> o .: "card"
parseJSON _ = mzero
| null | https://raw.githubusercontent.com/michaelschade/hs-stripe/64b58415ccc567b00171b34470e93400cb9e79fd/src/Web/Stripe/Token.hs | haskell | # LANGUAGE OverloadedStrings #
Re-Export
--------------
Data Types --
--------------
| Represents a token in the Stripe system.
| Represents the identifier for a given 'Token' in the Stripe system.
| Creates a 'Token' in the Stripe system.
| Retrieves a specific 'Token' based on its 'Token'.
----------------
JSON Parsing --
----------------
| Attempts to parse JSON into a 'Token'. |
module Web.Stripe.Token
( Token(..)
, TokenId(..)
, createToken
, getToken
, tokRq
, UTCTime(..)
, Amount(..)
, Card(..)
, Currency(..)
, StripeConfig(..)
, StripeT(..)
, runStripeT
) where
import Control.Applicative ((<$>), (<*>))
import Control.Monad (liftM, mzero)
import Control.Monad.Error (MonadIO)
import Data.Aeson (FromJSON (..), Value (..), (.:))
import qualified Data.Text as T
import Network.HTTP.Types (StdMethod (..))
import Web.Stripe.Card (Card (..), RequestCard (..), rCardKV)
import Web.Stripe.Client (StripeConfig (..), StripeRequest (..),
StripeT (..), baseSReq, query, runStripeT)
import Web.Stripe.Utils (Amount (..), Currency (..), UTCTime (..),
fromSeconds)
data Token = Token
{ tokId :: TokenId
, tokLive :: Bool
, tokUsed :: Bool
, tokCreated :: UTCTime
, tokCard :: Card
} deriving Show
newtype TokenId = TokenId { unTokenId :: T.Text } deriving (Show, Eq)
createToken :: MonadIO m => RequestCard -> StripeT m Token
createToken rc =
snd `liftM` query (tokRq []) { sMethod = POST, sData = rCardKV rc }
getToken :: MonadIO m => TokenId -> StripeT m Token
getToken (TokenId tid) = return . snd =<< query (tokRq [tid])
| Convenience function to create a ' StripeRequest ' specific to tokens .
tokRq :: [T.Text] -> StripeRequest
tokRq pcs = baseSReq { sDestination = "tokens":pcs }
instance FromJSON Token where
parseJSON (Object o) = Token
<$> (TokenId <$> o .: "id")
<*> o .: "livemode"
<*> o .: "used"
<*> (fromSeconds <$> o .: "created")
<*> o .: "card"
parseJSON _ = mzero
|
e2be64d886442704ebdc907257c28464c2daec0cd3885a360d65ab725ac2a650 | hyperfiddle/electric | circuit_summation2.cljc | (ns dustin.y2022.forms.circuit-summation2
(:require [contrib.clojurex :refer [bindx]]
#?(:clj [datomic.client.api :as d])
[hyperfiddle.api :as hf]
[hyperfiddle.photon :as p]
[hyperfiddle.photon-dom :as dom]
[hyperfiddle.photon-ui2 :as ui]
#?(:clj [hyperfiddle.txn :refer [minimal-tx]]))
(:import [hyperfiddle.photon Pending])
#?(:cljs (:require-macros dustin.y2022.forms.circuit-summation2)))
; The problem is distributed state loops
; it's the network. the network is discrete
; db = App(db)
; f = fix f
( iterate App db )
; db = Σ [db=0..T] App(db)Δdb
; db = Σ [s=0..T] App(db)Δdb
; note that db is the clock, so we can substitute:
;
; db(t) = Σ [t=0..T] App(t)Δt
; where App(t)Δt = ΔApp(t),
; such that ΔApp(db) is a function of database that returns a small transactional change to the database
; db = Σ [s=0..T] ΔApp(db)
; db = Σ [s=0..T] App(db)Δdb
; Integrate db0,,dbT App(db)
(p/def Transact!)
(def cobbblestone 0)
(def label-form-spec [:db/id :label/gid :label/name :label/sortName :label/startYear])
(p/defn App [e]
(let [record (d/pull hf/db label-form-spec)]
(dom/dl
(dom/dt (dom/text "name"))
(dom/dd (let [v (ui/input (:label/name record) #_hf/loading)]
(p/server (Transact! [[:db/add e :label/name v]])))) ; throws pending, even if no-op
; should we silence the pending?
first the field throws pending on load
; then the db short circuits
; then we edit, throw pending, new db, queries throw pending, short circuit here
(dom/dt (dom/text "sortName"))
(dom/dd (let [v (ui/input (:label/sortName record) #_hf/loading)]
(p/server (Transact! [[:db/add e :label/sortName v]])))))))
( defmacro summation [ rf [ name ] & body ]
` ( let [ ! t # ( ) ~name ( p / watch ! t # ) ]
; (binding [Transact! (p/fn [Δt]
( p / wrap ( async - swap ! ! t # ~rf Δt ) ) ) ] ; advance time , busy now true
; (do ~@body))))
(p/defn Demo []
(bindx [hf/db (d/with-db @(requiring-resolve 'test/datomic-conn))]
(p/client
(dom/h1 (dom/text (str `Demo)))
(p/with-cycle [loading ::hf/loading]
(binding [hf/loading loading]
(dom/div (dom/text (name loading) " " (hf/Load-timer.) "ms"))
(try
(p/server
; don't lose the stage on rebase!
(let [!t (atom hf/db)]
(binding [hf/db (p/watch !t)
blocks , call with p / wrap . Or use async variant
(when-some [Δt (seq (minimal-tx db Δt))] ; stabilize initial loop, requires query
(:db-after (d/with db {:tx-data tx}))))
Transact! (p/fn [Δt]
; when finished, local busy state false, page busy state true
; call site can let the Pending exception through if the difference isn't meaningful
(p/wrap (async-swap! !t (partial hf/with hf/db) Δt)))] ; advance time
(App. cobbblestone))))
::hf/idle (catch Pending e ::hf/loading))))
nil)))
; What about optimistic updates? Don't wait for global order
; controls emit separate txs, the popover must intercept to make them atomic
implement the tx listener | null | https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2022/forms/circuit_summation2.cljc | clojure | The problem is distributed state loops
it's the network. the network is discrete
db = App(db)
f = fix f
db = Σ [db=0..T] App(db)Δdb
db = Σ [s=0..T] App(db)Δdb
note that db is the clock, so we can substitute:
db(t) = Σ [t=0..T] App(t)Δt
where App(t)Δt = ΔApp(t),
such that ΔApp(db) is a function of database that returns a small transactional change to the database
db = Σ [s=0..T] ΔApp(db)
db = Σ [s=0..T] App(db)Δdb
Integrate db0,,dbT App(db)
throws pending, even if no-op
should we silence the pending?
then the db short circuits
then we edit, throw pending, new db, queries throw pending, short circuit here
(binding [Transact! (p/fn [Δt]
advance time , busy now true
(do ~@body))))
don't lose the stage on rebase!
stabilize initial loop, requires query
when finished, local busy state false, page busy state true
call site can let the Pending exception through if the difference isn't meaningful
advance time
What about optimistic updates? Don't wait for global order
controls emit separate txs, the popover must intercept to make them atomic | (ns dustin.y2022.forms.circuit-summation2
(:require [contrib.clojurex :refer [bindx]]
#?(:clj [datomic.client.api :as d])
[hyperfiddle.api :as hf]
[hyperfiddle.photon :as p]
[hyperfiddle.photon-dom :as dom]
[hyperfiddle.photon-ui2 :as ui]
#?(:clj [hyperfiddle.txn :refer [minimal-tx]]))
(:import [hyperfiddle.photon Pending])
#?(:cljs (:require-macros dustin.y2022.forms.circuit-summation2)))
( iterate App db )
(p/def Transact!)
(def cobbblestone 0)
(def label-form-spec [:db/id :label/gid :label/name :label/sortName :label/startYear])
(p/defn App [e]
(let [record (d/pull hf/db label-form-spec)]
(dom/dl
(dom/dt (dom/text "name"))
(dom/dd (let [v (ui/input (:label/name record) #_hf/loading)]
first the field throws pending on load
(dom/dt (dom/text "sortName"))
(dom/dd (let [v (ui/input (:label/sortName record) #_hf/loading)]
(p/server (Transact! [[:db/add e :label/sortName v]])))))))
( defmacro summation [ rf [ name ] & body ]
` ( let [ ! t # ( ) ~name ( p / watch ! t # ) ]
(p/defn Demo []
(bindx [hf/db (d/with-db @(requiring-resolve 'test/datomic-conn))]
(p/client
(dom/h1 (dom/text (str `Demo)))
(p/with-cycle [loading ::hf/loading]
(binding [hf/loading loading]
(dom/div (dom/text (name loading) " " (hf/Load-timer.) "ms"))
(try
(p/server
(let [!t (atom hf/db)]
(binding [hf/db (p/watch !t)
blocks , call with p / wrap . Or use async variant
(:db-after (d/with db {:tx-data tx}))))
Transact! (p/fn [Δt]
(App. cobbblestone))))
::hf/idle (catch Pending e ::hf/loading))))
nil)))
implement the tx listener |
d2681747c8bed89aa9275914c3b0b9082b2d1f0006d6a40897e4c180f8c92db4 | pkhuong/Napa-FFT | support-macros.lisp | (in-package "NAPA-FFT")
(defmacro unrolled-for (((var count &key (offset 0) (stride 1) (type t)) &rest var-data)
&body body)
(assert (numberp count))
(let ((var-data (cons `(,var :offset ,offset :stride ,stride :type ,type)
var-data))
(var-names '())
(var-offsets '())
(var-strides '())
(var-types '()))
(dolist (data var-data)
(destructuring-bind (var &key (offset 0) (stride 1) (type t)) data
(push var var-names)
(push offset var-offsets)
(push stride var-strides)
(push type var-types)))
(setf var-names (nreverse var-names)
var-offsets (nreverse var-offsets)
var-strides (nreverse var-strides)
var-types (nreverse var-types))
(labels ((rec (count initials)
(and (plusp count)
`(let ,(mapcar (lambda (name initial type)
`(,name (the ,type ,initial)))
var-names initials var-types)
(declare (ignorable ,var))
,@body
,(rec (1- count)
(mapcar (lambda (stride name)
`(+ ,stride ,name))
var-strides var-names))))))
(rec count var-offsets))))
(defvar *function-bodies*)
(defvar *generated-bodies*)
(defvar *declarations*)
(defmacro with-function-bodies (&body body)
`(let ((*function-bodies* nil)
(*declarations* nil)
(*generated-bodies* (make-hash-table :test #'equal)))
,@body
(values (reverse *function-bodies*)
(reverse *declarations*))))
(defun symbolicate (root value &rest values)
(intern (format nil "~A[~{~A~^/~}]" root (cons value values))))
(defmacro ensure-body ((root value &rest values) (&rest arg-list) &body body)
(let ((_key (gensym "KEY"))
(_name (gensym "NAME")))
`(let ((,_key (list ',root ,value ,@values)))
(cond ((gethash ,_key *generated-bodies*))
(t
(let ((,_name (apply 'symbolicate ,_key)))
(setf (gethash ,_key *generated-bodies*) ,_name)
(multiple-value-bind (body declarations)
(locally ,@body)
(push (list* ,_name ',arg-list body)
*function-bodies*)
(setf *declarations* (nconc (reverse (subst ,_name '.self. declarations))
*declarations*)))
,_name))))))
| null | https://raw.githubusercontent.com/pkhuong/Napa-FFT/4a5ee157b5db8006e7a7bdbed47e23ad85bf184e/support-macros.lisp | lisp | (in-package "NAPA-FFT")
(defmacro unrolled-for (((var count &key (offset 0) (stride 1) (type t)) &rest var-data)
&body body)
(assert (numberp count))
(let ((var-data (cons `(,var :offset ,offset :stride ,stride :type ,type)
var-data))
(var-names '())
(var-offsets '())
(var-strides '())
(var-types '()))
(dolist (data var-data)
(destructuring-bind (var &key (offset 0) (stride 1) (type t)) data
(push var var-names)
(push offset var-offsets)
(push stride var-strides)
(push type var-types)))
(setf var-names (nreverse var-names)
var-offsets (nreverse var-offsets)
var-strides (nreverse var-strides)
var-types (nreverse var-types))
(labels ((rec (count initials)
(and (plusp count)
`(let ,(mapcar (lambda (name initial type)
`(,name (the ,type ,initial)))
var-names initials var-types)
(declare (ignorable ,var))
,@body
,(rec (1- count)
(mapcar (lambda (stride name)
`(+ ,stride ,name))
var-strides var-names))))))
(rec count var-offsets))))
(defvar *function-bodies*)
(defvar *generated-bodies*)
(defvar *declarations*)
(defmacro with-function-bodies (&body body)
`(let ((*function-bodies* nil)
(*declarations* nil)
(*generated-bodies* (make-hash-table :test #'equal)))
,@body
(values (reverse *function-bodies*)
(reverse *declarations*))))
(defun symbolicate (root value &rest values)
(intern (format nil "~A[~{~A~^/~}]" root (cons value values))))
(defmacro ensure-body ((root value &rest values) (&rest arg-list) &body body)
(let ((_key (gensym "KEY"))
(_name (gensym "NAME")))
`(let ((,_key (list ',root ,value ,@values)))
(cond ((gethash ,_key *generated-bodies*))
(t
(let ((,_name (apply 'symbolicate ,_key)))
(setf (gethash ,_key *generated-bodies*) ,_name)
(multiple-value-bind (body declarations)
(locally ,@body)
(push (list* ,_name ',arg-list body)
*function-bodies*)
(setf *declarations* (nconc (reverse (subst ,_name '.self. declarations))
*declarations*)))
,_name))))))
| |
22a5274ef5a61283e3fb2be179e3a1106e9f1a19b346db3ecf58dafb3466718f | nvim-treesitter/nvim-treesitter | folds.scm | [
(exp_apply)
(exp_do)
(function)
] @fold
| null | https://raw.githubusercontent.com/nvim-treesitter/nvim-treesitter/599fd416c81498fe0fe619e00c1aa316cbf8d964/queries/haskell/folds.scm | scheme | [
(exp_apply)
(exp_do)
(function)
] @fold
| |
274b63787cf6f8aeeb1a1f056fe11eca39211b8ea2c38d74f233350cb968473b | facebook/duckling | Tests.hs | Copyright ( c ) 2016 - present , Facebook , Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.ID.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.ID.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "ID Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
| null | https://raw.githubusercontent.com/facebook/duckling/72f45e8e2c7385f41f2f8b1f063e7b5daa6dca94/tests/Duckling/Ordinal/ID/Tests.hs | haskell | All rights reserved.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree. | Copyright ( c ) 2016 - present , Facebook , Inc.
module Duckling.Ordinal.ID.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.ID.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "ID Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
|
6d2ebdac86569b5ab3b97e533f93b93d34514db233c103ca9cc9cae01e070df7 | seriyps/logger_journald | logger_journald_h_SUITE.erl | -module(logger_journald_h_SUITE).
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2
]).
-export([
just_log_case/1,
truncation_case/1
%% ,
overload_case/1
]).
-include_lib("stdlib/include/assert.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/logger.hrl").
all() ->
All exported functions of arity 1 whose name ends with " _ case "
Exports = ?MODULE:module_info(exports),
[
F
|| {F, A} <- Exports,
A == 1,
case lists:reverse(atom_to_list(F)) of
"esac_" ++ _ -> true;
_ -> false
end
].
init_per_suite(Cfg) ->
{ok, _} = application:ensure_all_started(logger_journald),
logger:set_primary_config(level, all),
Cfg.
end_per_suite(Cfg) ->
Cfg.
init_per_testcase(Name, Cfg) ->
?MODULE:Name({pre, Cfg}).
end_per_testcase(Name, Cfg) ->
?MODULE:Name({post, Cfg}).
just_log_case({pre, Cfg}) ->
Srv = journald_server_mock:start(#{}),
add_handler_for_srv(?FUNCTION_NAME, Srv),
[{srv, Srv} | Cfg];
just_log_case({post, Cfg}) ->
Srv = ?config(srv, Cfg),
logger:remove_handler(?FUNCTION_NAME),
journald_server_mock:stop(Srv),
Cfg;
just_log_case(Cfg) when is_list(Cfg) ->
Srv = ?config(srv, Cfg),
SelfBin = list_to_binary(pid_to_list(self())),
logger:log(info, "test"),
?assertMatch(
#{
<<"MESSAGE">> := <<"test">>,
<<"ERL_GROUP_LEADER">> := <<"<", _/binary>>,
<<"ERL_PID">> := SelfBin,
<<"PRIORITY">> := <<"6">>,
<<"SYSLOG_TIMESTAMP">> := _
},
p_recv(Srv)
),
?LOG_WARNING("test"),
Func =
<<?MODULE_STRING ":", (atom_to_binary(?FUNCTION_NAME, utf8))/binary, "/",
(integer_to_binary(?FUNCTION_ARITY))/binary>>,
?assertMatch(
#{
<<"MESSAGE">> := <<"test">>,
<<"CODE_FILE">> := <<_/binary>>,
<<"CODE_FUNC">> := Func,
<<"CODE_LINE">> := _,
<<"ERL_GROUP_LEADER">> := <<"<", _/binary>>,
<<"ERL_PID">> := SelfBin,
<<"PRIORITY">> := <<"4">>,
<<"SYSLOG_TIMESTAMP">> := _
},
p_recv(Srv),
[{func, Func}, {pid, SelfBin}]
),
logger:log(notice, "Hello ~s", ["world"]),
?assertMatch(#{<<"MESSAGE">> := <<"Hello world">>}, p_recv(Srv)),
logger:log(debug, #{a => b, c => d}),
?assertMatch(#{<<"MESSAGE">> := <<"a: b, c: d">>}, p_recv(Srv)),
logger:log(error, #{a => b}, #{report_cb => fun(Rep) -> {"~p", [Rep]} end}),
?assertMatch(#{<<"MESSAGE">> := <<"#{a => b}">>}, p_recv(Srv)),
logger:log(
emergency,
#{c => d},
#{report_cb => fun(Rep, Smth) -> io_lib:format("~p ~p", [Rep, Smth]) end}
),
?assertMatch(#{<<"MESSAGE">> := <<"#{c => d} #{", _/binary>>}, p_recv(Srv)),
logger:critical(
"Error: ~p",
[smth_bad],
#{domain => [logger_journald, ?MODULE, ?FUNCTION_NAME]}
),
Domain =
<<"logger_journald." ?MODULE_STRING ".", (atom_to_binary(?FUNCTION_NAME, utf8))/binary>>,
?assertMatch(
#{
<<"MESSAGE">> := <<"Error: smth_bad">>,
<<"ERL_DOMAIN">> := Domain
},
p_recv(Srv),
[{domain, Domain}]
),
logger:alert("Wake up! ~p", [doom], #{
my_key1 => my_value1,
"my_key2" => "my_value2",
<<"my_key3">> => <<"my_value3">>,
<<"my_key4">> => {192, 168, 0, 1},
["my", $_, <<"key">>, ["5"]] => 42
}),
?assertMatch(
#{
<<"MESSAGE">> := <<"Wake up! doom">>,
<<"MY_KEY1">> := <<"my_value1">>,
<<"MY_KEY2">> := <<"my_value2">>,
<<"MY_KEY3">> := <<"my_value3">>,
<<"MY_KEY4">> := <<"{192,168,0,1}">>,
<<"MY_KEY5">> := <<"42">>
},
p_recv(Srv)
),
ok.
truncation_case({pre, Cfg}) ->
Srv = journald_server_mock:start(#{socket_opts => [{recbuf, 2048}]}),
add_handler_for_srv(?FUNCTION_NAME, Srv),
[{srv, Srv} | Cfg];
truncation_case({post, Cfg}) ->
Srv = ?config(srv, Cfg),
logger:remove_handler(?FUNCTION_NAME),
journald_server_mock:stop(Srv),
Cfg;
truncation_case(Cfg) when is_list(Cfg) ->
Srv = ?config(srv, Cfg),
Bin10 = list_to_binary(lists:seq($0, $9)),
<<MsgPart:1024/binary, _/binary>> = Msg = binary:copy(Bin10, 32 * 1024),
logger:log(info, Msg),
ShrunkMsg = <<MsgPart/binary, "…"/utf8>>,
?assertMatch(#{<<"MESSAGE">> := ShrunkMsg}, p_recv(Srv)).
%% @doc Test for overload protection
%% XXX: don't yet know how to validate it. Maybe use tracing? Or mock gen_udp?
%% overload_case({pre, Cfg}) ->
%% Srv = journald_server_mock:start(#{}),
add_handler_for_srv(?FUNCTION_NAME , , # {
sync_mode_qlen = > 5 ,
drop_mode_qlen = > 10
%% }),
%% logger:set_module_level(logger_backend, debug),
[ { srv , ;
%% overload_case({post, Cfg}) ->
Srv = ? config(srv , Cfg ) ,
%% logger:remove_handler(?FUNCTION_NAME),
%% journald_server_mock:stop(Srv),
%% Cfg;
%% overload_case(Cfg) when is_list(Cfg) ->
Srv = ? config(srv , Cfg ) ,
io : , " start ~n " , [ ] ) ,
Pids = [ spawn_link(fun ( ) - > log_loop(10 ) end ) || _ < - lists : seq(1 , 10 ) ] ,
[ p_recv(Srv ) || _ < - lists : seq(1 , 5 ) ] ,
%% timer:sleep(3000),
logger : " ) ,
[ p_recv(Srv ) || _ < - lists : seq(1 , 30 ) ] ,
%% [exit(Pid, shutdown) || Pid <- Pids],
%% ok.
%% log_loop(0) ->
%% ok;
%% log_loop(N) ->
%% logger:notice("loop ~w from ~p", [N, self()], #{domain => [test]}),
%% log_loop(N - 1).
Internal
add_handler_for_srv(Id, Srv) ->
add_handler_for_srv(Id, Srv, #{}).
add_handler_for_srv(Id, Srv, Conf) ->
Path = journald_server_mock:get_path(Srv),
ok = logger:add_handler(Id, logger_journald_h, #{config => Conf#{socket_path => Path}}).
p_recv(Srv) ->
journald_server_mock:recv_parse(Srv).
| null | https://raw.githubusercontent.com/seriyps/logger_journald/63c58c86b8b1db9f10ca697f33554086375ee9a4/test/logger_journald_h_SUITE.erl | erlang | ,
@doc Test for overload protection
XXX: don't yet know how to validate it. Maybe use tracing? Or mock gen_udp?
overload_case({pre, Cfg}) ->
Srv = journald_server_mock:start(#{}),
}),
logger:set_module_level(logger_backend, debug),
overload_case({post, Cfg}) ->
logger:remove_handler(?FUNCTION_NAME),
journald_server_mock:stop(Srv),
Cfg;
overload_case(Cfg) when is_list(Cfg) ->
timer:sleep(3000),
[exit(Pid, shutdown) || Pid <- Pids],
ok.
log_loop(0) ->
ok;
log_loop(N) ->
logger:notice("loop ~w from ~p", [N, self()], #{domain => [test]}),
log_loop(N - 1). | -module(logger_journald_h_SUITE).
-export([
all/0,
init_per_suite/1,
end_per_suite/1,
init_per_testcase/2,
end_per_testcase/2
]).
-export([
just_log_case/1,
truncation_case/1
overload_case/1
]).
-include_lib("stdlib/include/assert.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/logger.hrl").
all() ->
All exported functions of arity 1 whose name ends with " _ case "
Exports = ?MODULE:module_info(exports),
[
F
|| {F, A} <- Exports,
A == 1,
case lists:reverse(atom_to_list(F)) of
"esac_" ++ _ -> true;
_ -> false
end
].
init_per_suite(Cfg) ->
{ok, _} = application:ensure_all_started(logger_journald),
logger:set_primary_config(level, all),
Cfg.
end_per_suite(Cfg) ->
Cfg.
init_per_testcase(Name, Cfg) ->
?MODULE:Name({pre, Cfg}).
end_per_testcase(Name, Cfg) ->
?MODULE:Name({post, Cfg}).
just_log_case({pre, Cfg}) ->
Srv = journald_server_mock:start(#{}),
add_handler_for_srv(?FUNCTION_NAME, Srv),
[{srv, Srv} | Cfg];
just_log_case({post, Cfg}) ->
Srv = ?config(srv, Cfg),
logger:remove_handler(?FUNCTION_NAME),
journald_server_mock:stop(Srv),
Cfg;
just_log_case(Cfg) when is_list(Cfg) ->
Srv = ?config(srv, Cfg),
SelfBin = list_to_binary(pid_to_list(self())),
logger:log(info, "test"),
?assertMatch(
#{
<<"MESSAGE">> := <<"test">>,
<<"ERL_GROUP_LEADER">> := <<"<", _/binary>>,
<<"ERL_PID">> := SelfBin,
<<"PRIORITY">> := <<"6">>,
<<"SYSLOG_TIMESTAMP">> := _
},
p_recv(Srv)
),
?LOG_WARNING("test"),
Func =
<<?MODULE_STRING ":", (atom_to_binary(?FUNCTION_NAME, utf8))/binary, "/",
(integer_to_binary(?FUNCTION_ARITY))/binary>>,
?assertMatch(
#{
<<"MESSAGE">> := <<"test">>,
<<"CODE_FILE">> := <<_/binary>>,
<<"CODE_FUNC">> := Func,
<<"CODE_LINE">> := _,
<<"ERL_GROUP_LEADER">> := <<"<", _/binary>>,
<<"ERL_PID">> := SelfBin,
<<"PRIORITY">> := <<"4">>,
<<"SYSLOG_TIMESTAMP">> := _
},
p_recv(Srv),
[{func, Func}, {pid, SelfBin}]
),
logger:log(notice, "Hello ~s", ["world"]),
?assertMatch(#{<<"MESSAGE">> := <<"Hello world">>}, p_recv(Srv)),
logger:log(debug, #{a => b, c => d}),
?assertMatch(#{<<"MESSAGE">> := <<"a: b, c: d">>}, p_recv(Srv)),
logger:log(error, #{a => b}, #{report_cb => fun(Rep) -> {"~p", [Rep]} end}),
?assertMatch(#{<<"MESSAGE">> := <<"#{a => b}">>}, p_recv(Srv)),
logger:log(
emergency,
#{c => d},
#{report_cb => fun(Rep, Smth) -> io_lib:format("~p ~p", [Rep, Smth]) end}
),
?assertMatch(#{<<"MESSAGE">> := <<"#{c => d} #{", _/binary>>}, p_recv(Srv)),
logger:critical(
"Error: ~p",
[smth_bad],
#{domain => [logger_journald, ?MODULE, ?FUNCTION_NAME]}
),
Domain =
<<"logger_journald." ?MODULE_STRING ".", (atom_to_binary(?FUNCTION_NAME, utf8))/binary>>,
?assertMatch(
#{
<<"MESSAGE">> := <<"Error: smth_bad">>,
<<"ERL_DOMAIN">> := Domain
},
p_recv(Srv),
[{domain, Domain}]
),
logger:alert("Wake up! ~p", [doom], #{
my_key1 => my_value1,
"my_key2" => "my_value2",
<<"my_key3">> => <<"my_value3">>,
<<"my_key4">> => {192, 168, 0, 1},
["my", $_, <<"key">>, ["5"]] => 42
}),
?assertMatch(
#{
<<"MESSAGE">> := <<"Wake up! doom">>,
<<"MY_KEY1">> := <<"my_value1">>,
<<"MY_KEY2">> := <<"my_value2">>,
<<"MY_KEY3">> := <<"my_value3">>,
<<"MY_KEY4">> := <<"{192,168,0,1}">>,
<<"MY_KEY5">> := <<"42">>
},
p_recv(Srv)
),
ok.
truncation_case({pre, Cfg}) ->
Srv = journald_server_mock:start(#{socket_opts => [{recbuf, 2048}]}),
add_handler_for_srv(?FUNCTION_NAME, Srv),
[{srv, Srv} | Cfg];
truncation_case({post, Cfg}) ->
Srv = ?config(srv, Cfg),
logger:remove_handler(?FUNCTION_NAME),
journald_server_mock:stop(Srv),
Cfg;
truncation_case(Cfg) when is_list(Cfg) ->
Srv = ?config(srv, Cfg),
Bin10 = list_to_binary(lists:seq($0, $9)),
<<MsgPart:1024/binary, _/binary>> = Msg = binary:copy(Bin10, 32 * 1024),
logger:log(info, Msg),
ShrunkMsg = <<MsgPart/binary, "…"/utf8>>,
?assertMatch(#{<<"MESSAGE">> := ShrunkMsg}, p_recv(Srv)).
add_handler_for_srv(?FUNCTION_NAME , , # {
sync_mode_qlen = > 5 ,
drop_mode_qlen = > 10
[ { srv , ;
Srv = ? config(srv , Cfg ) ,
Srv = ? config(srv , Cfg ) ,
io : , " start ~n " , [ ] ) ,
Pids = [ spawn_link(fun ( ) - > log_loop(10 ) end ) || _ < - lists : seq(1 , 10 ) ] ,
[ p_recv(Srv ) || _ < - lists : seq(1 , 5 ) ] ,
logger : " ) ,
[ p_recv(Srv ) || _ < - lists : seq(1 , 30 ) ] ,
Internal
add_handler_for_srv(Id, Srv) ->
add_handler_for_srv(Id, Srv, #{}).
add_handler_for_srv(Id, Srv, Conf) ->
Path = journald_server_mock:get_path(Srv),
ok = logger:add_handler(Id, logger_journald_h, #{config => Conf#{socket_path => Path}}).
p_recv(Srv) ->
journald_server_mock:recv_parse(Srv).
|
a05f8a17535d2c3791959d3c5c2761d79f7951a4239b051b9927fa23fd5adc45 | armedbear/abcl | load.lisp | ;;; load.lisp
;;;
Copyright ( C ) 2004 - 2005
$ Id$
;;;
;;; This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
;;;
;;; As a special exception, the copyright holders of this library give you
;;; permission to link this library with independent modules to produce an
;;; executable, regardless of the license terms of these independent
;;; modules, and to copy and distribute the resulting executable under
;;; terms of your choice, provided that you also meet, for each linked
;;; independent module, the terms and conditions of the license of that
;;; module. An independent module is a module which is not derived from
;;; or based on this library. If you modify this library, you may extend
;;; this exception to your version of the library, but you are not
;;; obligated to do so. If you do not wish to do so, delete this
;;; exception statement from your version.
(in-package #:system)
(defun load (filespec
&key
(verbose *load-verbose*)
(print *load-print*)
(if-does-not-exist t)
(external-format :default))
(let (*fasl-loader*)
(%load (if (streamp filespec)
filespec
(merge-pathnames (pathname filespec)))
verbose print if-does-not-exist external-format)))
(defun load-returning-last-result (filespec
&key
(verbose *load-verbose*)
(print *load-print*)
(if-does-not-exist t)
(external-format :default))
(let (*fasl-loader*)
(%load-returning-last-result (if (streamp filespec)
filespec
(merge-pathnames (pathname filespec)))
verbose print if-does-not-exist external-format)))
| null | https://raw.githubusercontent.com/armedbear/abcl/0631ea551523bb93c06263e772fbe849008e2f68/src/org/armedbear/lisp/load.lisp | lisp | load.lisp
This program is free software; you can redistribute it and/or
either version 2
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. | Copyright ( C ) 2004 - 2005
$ Id$
modify it under the terms of the GNU General Public License
of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
(in-package #:system)
(defun load (filespec
&key
(verbose *load-verbose*)
(print *load-print*)
(if-does-not-exist t)
(external-format :default))
(let (*fasl-loader*)
(%load (if (streamp filespec)
filespec
(merge-pathnames (pathname filespec)))
verbose print if-does-not-exist external-format)))
(defun load-returning-last-result (filespec
&key
(verbose *load-verbose*)
(print *load-print*)
(if-does-not-exist t)
(external-format :default))
(let (*fasl-loader*)
(%load-returning-last-result (if (streamp filespec)
filespec
(merge-pathnames (pathname filespec)))
verbose print if-does-not-exist external-format)))
|
666c4032856aa386f07c92f4654f7bf93c35985f5fd12419a06885d0bc650b9b | wavewave/HROOT | Class.hs | {-# LANGUAGE OverloadedStrings #-}
module HROOT.Data.Math.Class where
import FFICXX.Generate.Code.Primitive (double, double_, int, int_, void_)
import FFICXX.Generate.Type.Cabal (BuildType (..), Cabal (..), CabalName (..))
import FFICXX.Generate.Type.Class
( Class (..),
Function (..),
ProtectedMethod (..),
TopLevel (..),
)
import FFICXX.Generate.Type.Config
( ModuleUnit (..),
ModuleUnitImports (..),
modImports,
)
import HROOT.Data.Core.Class (tNamed)
mathcabal :: Cabal
mathcabal =
Cabal
{ cabal_pkgname = CabalName "HROOT-math",
cabal_version = "0.10.0.1",
cabal_cheaderprefix = "HROOTMath",
cabal_moduleprefix = "HROOT.Math",
cabal_additional_c_incs = [],
cabal_additional_c_srcs = [],
cabal_additional_pkgdeps =
[ CabalName "stdcxx",
CabalName "HROOT-core"
],
cabal_license = Nothing,
cabal_licensefile = Nothing,
cabal_extraincludedirs = [],
cabal_extralibdirs = [],
cabal_extrafiles = [],
cabal_pkg_config_depends = [],
cabal_buildType = Custom [CabalName "Cabal", CabalName "base", CabalName "process"]
}
mathclass :: String -> [Class] -> [Function] -> Class
mathclass n ps fs =
Class
{ class_cabal = mathcabal,
class_name = n,
class_parents = ps,
class_protected = Protected [],
class_alias = Nothing,
class_funcs = fs,
class_vars = [],
class_tmpl_funcs = [],
class_has_proxy = False
}
tRandom :: Class
tRandom =
mathclass
"TRandom"
[tNamed]
[ Constructor [int "seed"] Nothing,
Virtual int_ "GetSeed" [] Nothing,
Virtual double_ "Gaus" [double "mean", double "sigma"] Nothing,
Virtual void_ "SetSeed" [int "seed"] Nothing,
Virtual double_ "Uniform" [double "x1", double "x2"] Nothing
]
-- rootFitFitResult :: Class
rootFitFitResult =
mathclass " ROOT::Fit::FitResult " [ ]
-- [ ]
math_classes :: [Class]
math_classes =
, rootFitFitResult
math_topfunctions :: [TopLevel]
math_topfunctions = []
math_headers :: [(ModuleUnit, ModuleUnitImports)]
math_headers =
[ modImports "TRandom" ["ROOT"] ["TRandom.h"]
]
math_extraLib :: [String]
math_extraLib = []
math_extraDep :: [(String, [String])]
math_extraDep = []
| null | https://raw.githubusercontent.com/wavewave/HROOT/eacc92b5f6228ca49dfa615d01dd72ca744d54c6/HROOT-generate/src/HROOT/Data/Math/Class.hs | haskell | # LANGUAGE OverloadedStrings #
rootFitFitResult :: Class
[ ] |
module HROOT.Data.Math.Class where
import FFICXX.Generate.Code.Primitive (double, double_, int, int_, void_)
import FFICXX.Generate.Type.Cabal (BuildType (..), Cabal (..), CabalName (..))
import FFICXX.Generate.Type.Class
( Class (..),
Function (..),
ProtectedMethod (..),
TopLevel (..),
)
import FFICXX.Generate.Type.Config
( ModuleUnit (..),
ModuleUnitImports (..),
modImports,
)
import HROOT.Data.Core.Class (tNamed)
mathcabal :: Cabal
mathcabal =
Cabal
{ cabal_pkgname = CabalName "HROOT-math",
cabal_version = "0.10.0.1",
cabal_cheaderprefix = "HROOTMath",
cabal_moduleprefix = "HROOT.Math",
cabal_additional_c_incs = [],
cabal_additional_c_srcs = [],
cabal_additional_pkgdeps =
[ CabalName "stdcxx",
CabalName "HROOT-core"
],
cabal_license = Nothing,
cabal_licensefile = Nothing,
cabal_extraincludedirs = [],
cabal_extralibdirs = [],
cabal_extrafiles = [],
cabal_pkg_config_depends = [],
cabal_buildType = Custom [CabalName "Cabal", CabalName "base", CabalName "process"]
}
mathclass :: String -> [Class] -> [Function] -> Class
mathclass n ps fs =
Class
{ class_cabal = mathcabal,
class_name = n,
class_parents = ps,
class_protected = Protected [],
class_alias = Nothing,
class_funcs = fs,
class_vars = [],
class_tmpl_funcs = [],
class_has_proxy = False
}
tRandom :: Class
tRandom =
mathclass
"TRandom"
[tNamed]
[ Constructor [int "seed"] Nothing,
Virtual int_ "GetSeed" [] Nothing,
Virtual double_ "Gaus" [double "mean", double "sigma"] Nothing,
Virtual void_ "SetSeed" [int "seed"] Nothing,
Virtual double_ "Uniform" [double "x1", double "x2"] Nothing
]
rootFitFitResult =
mathclass " ROOT::Fit::FitResult " [ ]
math_classes :: [Class]
math_classes =
, rootFitFitResult
math_topfunctions :: [TopLevel]
math_topfunctions = []
math_headers :: [(ModuleUnit, ModuleUnitImports)]
math_headers =
[ modImports "TRandom" ["ROOT"] ["TRandom.h"]
]
math_extraLib :: [String]
math_extraLib = []
math_extraDep :: [(String, [String])]
math_extraDep = []
|
e2ba42112c276851ae2e14290af5621da0d5e1e610ebed5130aada1f10ca3cc3 | sebashack/servantRestfulAPI | Bookable.hs | {-# LANGUAGE OverloadedStrings #-}
module HelperLibs.Interpreters.Bookable where
import Data.Aeson
import Data.Aeson.Types
import Data.Char
import Servant
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
import HelperLibs.Interpreters.BookingDomain
import HelperLibs.ElasticSearch.ResponseParser
import Control.Monad.Except
import Configs.ConfigTypes
import Data.Monoid
import Data.Maybe (catMaybes)
import HelperLibs.MySQL.ActionRunner
import qualified Data.CountryCodes as CC
import qualified Data.Vector as V
import qualified Data.Text as T
import qualified Domains.BookingDomain.Bookable.DataTypes as BT
import qualified Domains.BookingDomain.Property.DataTypes as PT
import qualified Repositories.BookableRepo.Operations as BR
import qualified Repositories.ReviewRepo.Operations as RvT (getPropertyScores)
import qualified Schemas.SQL.DbTypes as DbT
import qualified Data.Map.Strict as Map
import qualified Text.Email.Validate as EV
import qualified Data.Time as TM
import qualified Data.Text.Encoding as TE
import qualified Database.Persist.MySQL as MySQL
import qualified Repositories.UserRepo.Operations as UR (getAdminValidationData)
import qualified Data.ByteString as SB (ByteString)
import qualified Data.Set as S
import qualified HelperLibs.SCalendar.DataTypes as SCT
import qualified HelperLibs.SCalendar.Operations as SC
Given a SCT.Report ( The Report data type of the SCalendar library ) transform it into a
Report data tpe of the Bookable domain .
toDomainReport :: SCT.Report -> BT.Report
toDomainReport (SCT.Report (from, to) total reserved remaining) =
BT.Report total reserved remaining
A valid maxOccupancy must be greater than 0 but less than or equal to 15 .
validateMaxOccupancy :: Int -> Either String Int
validateMaxOccupancy occu
| occu < 1 = badOccupancy
| occu > 15 = badOccupancy
| otherwise = Right occu
Validate a list of roomIds : A valid list of roomIds must not be greater than 100 elements ,
and each element must be less than 15 characters long . Empty Strings are not allowed .
-- A list of roomIds can be empty.
validateRoomIds :: S.Set T.Text -> Either String (S.Set T.Text)
validateRoomIds ids
| all isValidRoomId ids = Right ids
| S.size ids > 100 = badRoomIds
| otherwise = badRoomIds
where
isValidRoomId roomId
| T.length roomId < 1 = False
| T.length roomId > 15 = False
| otherwise = True
Bookable names must be greater than 4 characters and less than 31 .
validateBookName :: T.Text -> Either String T.Text
validateBookName name
| numChars < 4 = badBookName
| numChars > 31 = badBookName
| not (T.any isAlphaNum name) = badBookName
| otherwise = Right name
where
numChars = T.length name
A room size must be greater than 3 characters and less than 10 .
validateRoomSize :: T.Text -> Maybe T.Text
validateRoomSize rSize
| numChars < 3 = Nothing
| numChars > 10 = Nothing
| not (T.any isDigit rSize) = Nothing
| otherwise = Just rSize
where
numChars = T.length rSize
Bed types must be greater than 4 characters and less than 21 .
validateBedType :: T.Text -> Maybe T.Text
validateBedType bedType
| numChars < 4 = Nothing
| numChars > 21 = Nothing
| not (T.any isAlphaNum bedType) = Nothing
| otherwise = Just bedType
where
numChars = T.length bedType
A valid number of beds must be greater than 0 but less than or equal to 15 .
validateBedNum :: Int -> Either String Int
validateBedNum bedNum
| bedNum < 1 = badBedNum
| bedNum > 15 = badBedNum
| otherwise = Right bedNum
Validate a list of amenities : A valid list of amenities must not be greater than 50 elements , and each element must be
less than 50 characters long . Empty Strings are not allowed .
validateAmenities :: [T.Text] -> Either String [T.Text]
validateAmenities amens
| length amens > 50 = badAmenities
| all isValidAmen amens = Right amens
| otherwise = badAmenities
where
isValidAmen amen
| T.length amen < 1 = False
| T.length amen > 50 = False
| otherwise = True
-- Given a BookableSpecs data type validate it. Note that in case that roomSize or
-- bedType is not valid a Nothing is returned inside BookableSpecs.
validateBookSpecs :: BT.BookableSpecs -> Either String BT.BookableSpecs
validateBookSpecs bklSpecs = do
-- Optional fields
let roomSize' = roomSize >>= validateRoomSize
bedType' = bedType >>= validateBedType
-- Obligatory fields
validateBookName name
validateBedNum bedNum
validateAmenities amenities
return $ BT.BookableSpecs name roomSize' bedType' bedNum amenities
where
(BT.BookableSpecs name roomSize bedType bedNum amenities) = bklSpecs
Validate the basic information to create a bookable . Note that in case that esDesc or
-- enDesc is not valid a Nothing is returned inside BasicBookableData.
validateBasicBookData :: BT.BasicBookableData -> Either String BT.BasicBookableData
validateBasicBookData bookData = do
-- Obligatory fields
validateMaxOccupancy maxOccu
validateRoomIds roomIds
validateBookSpecs bklSpecs
-- Optional fields
let esDesc' = esDesc >>= validateDescriptionM
enDesc' = enDesc >>= validateDescriptionM
return $ BT.BasicBookableData propId bklSpecs esDesc' enDesc' maxOccu roomIds
where
(BT.BasicBookableData propId bklSpecs esDesc enDesc maxOccu roomIds) = bookData
Given a Bookable get its roomIds .
takeRoomIds :: BT.Bookable -> Either String (S.Set T.Text)
takeRoomIds bkl = Right $ BT.roomIds (BT.basicData bkl)
Validate a PricingData : A valid PricingData has an occupancy greater than 0 , has a list of non - empty conditions which is less than
20 elements and every element is less than 350 characters , has a price greater than 0 , and has a discount which is an integer
between 0 and 100 .
validatePricingData :: T.Text
-> BT.PricingData
-> Either String BT.Pricing
validatePricingData _ (BT.PricingData occu conds price disc)
| occu < 1 = badPriOccupancy
| not $ all isValidCond conds = badPriConds
| length conds > 20 = badPriConds
| price < 1 = badPriPrice
| disc < 0 || disc > 100 = badPriDisc
where
isValidCond cond
| numChars < 3 = False
| numChars > 350 = False
| otherwise = True
where
numChars = T.length cond
validatePricingData priId priData = Right $ (BT.Pricing priId priData)
-- Given a bookableId and a UTCTime, generate a pricingId.
genPricingId :: T.Text -> TM.UTCTime -> T.Text
genPricingId bookId utc = bookId <> timeStamp
where
(TM.UTCTime date time) = utc
(year, monthDay) = splitAt 4 (show date)
(month, day') = splitAt 2 (tail monthDay)
day = tail day'
hour = show $ round (time * 1000000)
timeStamp = T.pack $ year ++ month ++ day ++ hour
Transform a BT.Pricing into a tuple .
pricingToTuple :: BT.Pricing -> (T.Text, Int, [T.Text], Integer, Int)
pricingToTuple (BT.Pricing priId priData) = (priId, occu, conds, price, disc)
where
(BT.PricingData occu conds price disc) = priData
Check if a Bookable can be listed : A Bookable can be listed if it has pricings , if its status is Unlisted , if it has
either a description in english or in spanish , if it has been assigned some roomIds .
isListable (BT.Bookable bookId _ status bookData pricings)
| status == BT.Listed = alreadyListed
| null roomIds = bklNoRooms
| esDesc == Nothing && enDesc == Nothing = bklNoDescs
| null pricings = emptyPris
| otherwise = Right ()
where
(BT.BasicBookableData propId bklSpecs esDesc enDesc maxOccu roomIds) = bookData
Check if a Bookable can be unlisted : A Bookable can be unlisted if is state is listed .
isUnlistable :: BT.Bookable -> Either String ()
isUnlistable (BT.Bookable bookId _ status bookData pricings)
| status == BT.Listed = Right ()
| otherwise = alreadyUnlisted
{-
General abstraction to update bookables, it receives:
- A bookableId
- A token
- A value to be updated.
- A function which validates if the bookable can be updated.
- A function which validates if the value to be updated is correct.
- A binary operation which operates with the return values of the previous functions.
- A function which updates the resulting value in DB.
This function returns the result of the binary operation if no errors have occured.
-}
updateBookable :: MonadIO m => ConfigES
-> T.Text
-> T.Text
-> t
-> (BT.Bookable -> Either String t1)
-> (t -> Either String t2)
-> (t1 -> t2 -> b)
-> (ConfigES -> T.Text -> b -> ExceptT a IO b1)
-> ReaderT MySQL.SqlBackend m (Either String b)
updateBookable coEs bookId token value validation1 validation2 biOp updateFunc = do
eitherBookable <- liftIO $ runExceptT $ queryAndParseBookable coEs bookId
case eitherBookable of
Left error -> return $ Left error
Right bookable -> do
case (validation1 bookable, validation2 value) of
(Left error, _) -> return $ Left error
(_, Left error) -> return $ Left error
(Right bookValue, Right validatedValue) -> do
let propId = BT.propId $ BT.basicData bookable
adminCreds <- validateAdminAndMatchProperty coEs token propId
case adminCreds of
Left error -> return $ Left error
Right _ -> do
let result = biOp bookValue validatedValue
jsonRes <- liftIO $ runExceptT $ updateFunc coEs bookId result
return $ either (\err -> updateBookErr) (\val -> Right result) jsonRes
General validation procedure to Validate an admin and to match an admin , a property and a bookable .
validateAdminPropBkl :: MonadIO m => ConfigES
-> T.Text
-> T.Text
-> ReaderT MySQL.SqlBackend m (Either String (Integer, T.Text))
validateAdminPropBkl coEs bookId token = do
eitherBookable <- liftIO $ runExceptT $ queryAndParseBookable coEs bookId
case eitherBookable of
Left error -> return $ Left error
Right bookable -> do
let propId = BT.propId $ BT.basicData bookable
adminCreds <- validateAdminAndMatchProperty coEs token propId
return $ either (\err -> Left err)
(\(userId, _, _, _, _, _, _, _) -> Right (userId, propId))
adminCreds
-- Given a interval (from, to) to create a calendar, and an interval (cIn, cOut) to check
-- availability of a bookable in that period this function checks if a given number of rooms
-- is available.
This function computed the meanScore of each bookable 's property with the first 1500 most recent scores .
-- IMPORTANT: take into account the behavior of createBookableCalendar
checkBookableAvailability :: MonadIO m => ConfigES
-> (TM.UTCTime, TM.UTCTime)
-> (TM.UTCTime, TM.UTCTime)
-> Int
-> Int
-> BT.Bookable
-> ReaderT MySQL.SqlBackend m (Maybe BT.SearchResult)
checkBookableAvailability coEs (from, to) (cIn, cOut) numDays numRooms bookable = do
let propId = BT.propId $ BT.basicData bookable
bookId = BT.bklId bookable
roomIds = BT.roomIds $ BT.basicData bookable
check availability up to the last night of the reservation
maybeCalendar <- createBookableCalendar bookId from to numDays roomIds
maybeProperty <- liftIO $ queryAndParseProperty coEs propId
get the 1500 most recent scores .
case (maybeCalendar, maybeProperty) of
(Nothing, _) -> return Nothing
(_, Nothing) -> return Nothing
(Just calendar, Just property) -> do
let maybeReport = SC.periodReport (cIn, lastNight) calendar
case maybeReport of
Nothing -> return Nothing
Just (SCT.Report _ total reserved remaining) -> do
let propImg = PT.mainImgId property
propName = PT.name $ PT.propData property
propType = PT.propType $ PT.propData property
availableRooms = S.size remaining
scoresLength = fromIntegral $ length reviewScores
scoreMean = if scoresLength == 0 then 0 else (sum reviewScores) / scoresLength
roundedMean = (fromInteger $ round $ scoreMean * 10) / 10.0
if availableRooms >= numRooms
then return $ Just (BT.SearchResult bookable availableRooms roundedMean propName propType propImg)
else return Nothing
Given an Object data type which is supposed to be a parsable ElasticSearch response with a list of bookables ,
-- this function checks the availability of a given number of rooms in a that list of bookables for a
-- (checkIn, checkOut), and returns a list of searchResults of the bookables which have availability.
getSearchResults :: MonadIO m => ConfigES
-> Object
-> TM.UTCTime
-> TM.UTCTime
-> Int
-> Int
-> Int
-> ReaderT MySQL.SqlBackend m (Either String [BT.SearchResult])
getSearchResults coEs object cIn cOut numRooms from size = do
case parseBookables object of
Nothing -> return bookParsingErr
Just bookables -> do
let start = TM.addUTCTime (-2764800) cIn
end = TM.addUTCTime (2764800) cOut
maybeResults <- mapM (checkBookableAvailability coEs (start, end) (cIn, cOut) 128 numRooms) bookables
let results = take size $ drop from (catMaybes maybeResults)
return $ Right results
getSearchResultImgIds :: MonadIO m => BT.SearchResult
-> ReaderT MySQL.SqlBackend m BT.SearchResultWithImgIds
getSearchResultImgIds sResult = do
let bookId = BT.bklId $ BT.bookable $ sResult
imgIds <- BR.bookableImageIds bookId
return $ BT.SearchResultWithImgIds sResult imgIds
| null | https://raw.githubusercontent.com/sebashack/servantRestfulAPI/e625535d196acefaff4f5bf03108816be668fe4d/libs/HelperLibs/Interpreters/Bookable.hs | haskell | # LANGUAGE OverloadedStrings #
A list of roomIds can be empty.
Given a BookableSpecs data type validate it. Note that in case that roomSize or
bedType is not valid a Nothing is returned inside BookableSpecs.
Optional fields
Obligatory fields
enDesc is not valid a Nothing is returned inside BasicBookableData.
Obligatory fields
Optional fields
Given a bookableId and a UTCTime, generate a pricingId.
General abstraction to update bookables, it receives:
- A bookableId
- A token
- A value to be updated.
- A function which validates if the bookable can be updated.
- A function which validates if the value to be updated is correct.
- A binary operation which operates with the return values of the previous functions.
- A function which updates the resulting value in DB.
This function returns the result of the binary operation if no errors have occured.
Given a interval (from, to) to create a calendar, and an interval (cIn, cOut) to check
availability of a bookable in that period this function checks if a given number of rooms
is available.
IMPORTANT: take into account the behavior of createBookableCalendar
this function checks the availability of a given number of rooms in a that list of bookables for a
(checkIn, checkOut), and returns a list of searchResults of the bookables which have availability. |
module HelperLibs.Interpreters.Bookable where
import Data.Aeson
import Data.Aeson.Types
import Data.Char
import Servant
import Control.Monad.Trans.Reader
import Control.Monad.IO.Class
import HelperLibs.Interpreters.BookingDomain
import HelperLibs.ElasticSearch.ResponseParser
import Control.Monad.Except
import Configs.ConfigTypes
import Data.Monoid
import Data.Maybe (catMaybes)
import HelperLibs.MySQL.ActionRunner
import qualified Data.CountryCodes as CC
import qualified Data.Vector as V
import qualified Data.Text as T
import qualified Domains.BookingDomain.Bookable.DataTypes as BT
import qualified Domains.BookingDomain.Property.DataTypes as PT
import qualified Repositories.BookableRepo.Operations as BR
import qualified Repositories.ReviewRepo.Operations as RvT (getPropertyScores)
import qualified Schemas.SQL.DbTypes as DbT
import qualified Data.Map.Strict as Map
import qualified Text.Email.Validate as EV
import qualified Data.Time as TM
import qualified Data.Text.Encoding as TE
import qualified Database.Persist.MySQL as MySQL
import qualified Repositories.UserRepo.Operations as UR (getAdminValidationData)
import qualified Data.ByteString as SB (ByteString)
import qualified Data.Set as S
import qualified HelperLibs.SCalendar.DataTypes as SCT
import qualified HelperLibs.SCalendar.Operations as SC
Given a SCT.Report ( The Report data type of the SCalendar library ) transform it into a
Report data tpe of the Bookable domain .
toDomainReport :: SCT.Report -> BT.Report
toDomainReport (SCT.Report (from, to) total reserved remaining) =
BT.Report total reserved remaining
A valid maxOccupancy must be greater than 0 but less than or equal to 15 .
validateMaxOccupancy :: Int -> Either String Int
validateMaxOccupancy occu
| occu < 1 = badOccupancy
| occu > 15 = badOccupancy
| otherwise = Right occu
Validate a list of roomIds : A valid list of roomIds must not be greater than 100 elements ,
and each element must be less than 15 characters long . Empty Strings are not allowed .
validateRoomIds :: S.Set T.Text -> Either String (S.Set T.Text)
validateRoomIds ids
| all isValidRoomId ids = Right ids
| S.size ids > 100 = badRoomIds
| otherwise = badRoomIds
where
isValidRoomId roomId
| T.length roomId < 1 = False
| T.length roomId > 15 = False
| otherwise = True
Bookable names must be greater than 4 characters and less than 31 .
validateBookName :: T.Text -> Either String T.Text
validateBookName name
| numChars < 4 = badBookName
| numChars > 31 = badBookName
| not (T.any isAlphaNum name) = badBookName
| otherwise = Right name
where
numChars = T.length name
A room size must be greater than 3 characters and less than 10 .
validateRoomSize :: T.Text -> Maybe T.Text
validateRoomSize rSize
| numChars < 3 = Nothing
| numChars > 10 = Nothing
| not (T.any isDigit rSize) = Nothing
| otherwise = Just rSize
where
numChars = T.length rSize
Bed types must be greater than 4 characters and less than 21 .
validateBedType :: T.Text -> Maybe T.Text
validateBedType bedType
| numChars < 4 = Nothing
| numChars > 21 = Nothing
| not (T.any isAlphaNum bedType) = Nothing
| otherwise = Just bedType
where
numChars = T.length bedType
A valid number of beds must be greater than 0 but less than or equal to 15 .
validateBedNum :: Int -> Either String Int
validateBedNum bedNum
| bedNum < 1 = badBedNum
| bedNum > 15 = badBedNum
| otherwise = Right bedNum
Validate a list of amenities : A valid list of amenities must not be greater than 50 elements , and each element must be
less than 50 characters long . Empty Strings are not allowed .
validateAmenities :: [T.Text] -> Either String [T.Text]
validateAmenities amens
| length amens > 50 = badAmenities
| all isValidAmen amens = Right amens
| otherwise = badAmenities
where
isValidAmen amen
| T.length amen < 1 = False
| T.length amen > 50 = False
| otherwise = True
validateBookSpecs :: BT.BookableSpecs -> Either String BT.BookableSpecs
validateBookSpecs bklSpecs = do
let roomSize' = roomSize >>= validateRoomSize
bedType' = bedType >>= validateBedType
validateBookName name
validateBedNum bedNum
validateAmenities amenities
return $ BT.BookableSpecs name roomSize' bedType' bedNum amenities
where
(BT.BookableSpecs name roomSize bedType bedNum amenities) = bklSpecs
Validate the basic information to create a bookable . Note that in case that esDesc or
validateBasicBookData :: BT.BasicBookableData -> Either String BT.BasicBookableData
validateBasicBookData bookData = do
validateMaxOccupancy maxOccu
validateRoomIds roomIds
validateBookSpecs bklSpecs
let esDesc' = esDesc >>= validateDescriptionM
enDesc' = enDesc >>= validateDescriptionM
return $ BT.BasicBookableData propId bklSpecs esDesc' enDesc' maxOccu roomIds
where
(BT.BasicBookableData propId bklSpecs esDesc enDesc maxOccu roomIds) = bookData
Given a Bookable get its roomIds .
takeRoomIds :: BT.Bookable -> Either String (S.Set T.Text)
takeRoomIds bkl = Right $ BT.roomIds (BT.basicData bkl)
Validate a PricingData : A valid PricingData has an occupancy greater than 0 , has a list of non - empty conditions which is less than
20 elements and every element is less than 350 characters , has a price greater than 0 , and has a discount which is an integer
between 0 and 100 .
validatePricingData :: T.Text
-> BT.PricingData
-> Either String BT.Pricing
validatePricingData _ (BT.PricingData occu conds price disc)
| occu < 1 = badPriOccupancy
| not $ all isValidCond conds = badPriConds
| length conds > 20 = badPriConds
| price < 1 = badPriPrice
| disc < 0 || disc > 100 = badPriDisc
where
isValidCond cond
| numChars < 3 = False
| numChars > 350 = False
| otherwise = True
where
numChars = T.length cond
validatePricingData priId priData = Right $ (BT.Pricing priId priData)
genPricingId :: T.Text -> TM.UTCTime -> T.Text
genPricingId bookId utc = bookId <> timeStamp
where
(TM.UTCTime date time) = utc
(year, monthDay) = splitAt 4 (show date)
(month, day') = splitAt 2 (tail monthDay)
day = tail day'
hour = show $ round (time * 1000000)
timeStamp = T.pack $ year ++ month ++ day ++ hour
Transform a BT.Pricing into a tuple .
pricingToTuple :: BT.Pricing -> (T.Text, Int, [T.Text], Integer, Int)
pricingToTuple (BT.Pricing priId priData) = (priId, occu, conds, price, disc)
where
(BT.PricingData occu conds price disc) = priData
Check if a Bookable can be listed : A Bookable can be listed if it has pricings , if its status is Unlisted , if it has
either a description in english or in spanish , if it has been assigned some roomIds .
isListable (BT.Bookable bookId _ status bookData pricings)
| status == BT.Listed = alreadyListed
| null roomIds = bklNoRooms
| esDesc == Nothing && enDesc == Nothing = bklNoDescs
| null pricings = emptyPris
| otherwise = Right ()
where
(BT.BasicBookableData propId bklSpecs esDesc enDesc maxOccu roomIds) = bookData
Check if a Bookable can be unlisted : A Bookable can be unlisted if is state is listed .
isUnlistable :: BT.Bookable -> Either String ()
isUnlistable (BT.Bookable bookId _ status bookData pricings)
| status == BT.Listed = Right ()
| otherwise = alreadyUnlisted
updateBookable :: MonadIO m => ConfigES
-> T.Text
-> T.Text
-> t
-> (BT.Bookable -> Either String t1)
-> (t -> Either String t2)
-> (t1 -> t2 -> b)
-> (ConfigES -> T.Text -> b -> ExceptT a IO b1)
-> ReaderT MySQL.SqlBackend m (Either String b)
updateBookable coEs bookId token value validation1 validation2 biOp updateFunc = do
eitherBookable <- liftIO $ runExceptT $ queryAndParseBookable coEs bookId
case eitherBookable of
Left error -> return $ Left error
Right bookable -> do
case (validation1 bookable, validation2 value) of
(Left error, _) -> return $ Left error
(_, Left error) -> return $ Left error
(Right bookValue, Right validatedValue) -> do
let propId = BT.propId $ BT.basicData bookable
adminCreds <- validateAdminAndMatchProperty coEs token propId
case adminCreds of
Left error -> return $ Left error
Right _ -> do
let result = biOp bookValue validatedValue
jsonRes <- liftIO $ runExceptT $ updateFunc coEs bookId result
return $ either (\err -> updateBookErr) (\val -> Right result) jsonRes
General validation procedure to Validate an admin and to match an admin , a property and a bookable .
validateAdminPropBkl :: MonadIO m => ConfigES
-> T.Text
-> T.Text
-> ReaderT MySQL.SqlBackend m (Either String (Integer, T.Text))
validateAdminPropBkl coEs bookId token = do
eitherBookable <- liftIO $ runExceptT $ queryAndParseBookable coEs bookId
case eitherBookable of
Left error -> return $ Left error
Right bookable -> do
let propId = BT.propId $ BT.basicData bookable
adminCreds <- validateAdminAndMatchProperty coEs token propId
return $ either (\err -> Left err)
(\(userId, _, _, _, _, _, _, _) -> Right (userId, propId))
adminCreds
This function computed the meanScore of each bookable 's property with the first 1500 most recent scores .
checkBookableAvailability :: MonadIO m => ConfigES
-> (TM.UTCTime, TM.UTCTime)
-> (TM.UTCTime, TM.UTCTime)
-> Int
-> Int
-> BT.Bookable
-> ReaderT MySQL.SqlBackend m (Maybe BT.SearchResult)
checkBookableAvailability coEs (from, to) (cIn, cOut) numDays numRooms bookable = do
let propId = BT.propId $ BT.basicData bookable
bookId = BT.bklId bookable
roomIds = BT.roomIds $ BT.basicData bookable
check availability up to the last night of the reservation
maybeCalendar <- createBookableCalendar bookId from to numDays roomIds
maybeProperty <- liftIO $ queryAndParseProperty coEs propId
get the 1500 most recent scores .
case (maybeCalendar, maybeProperty) of
(Nothing, _) -> return Nothing
(_, Nothing) -> return Nothing
(Just calendar, Just property) -> do
let maybeReport = SC.periodReport (cIn, lastNight) calendar
case maybeReport of
Nothing -> return Nothing
Just (SCT.Report _ total reserved remaining) -> do
let propImg = PT.mainImgId property
propName = PT.name $ PT.propData property
propType = PT.propType $ PT.propData property
availableRooms = S.size remaining
scoresLength = fromIntegral $ length reviewScores
scoreMean = if scoresLength == 0 then 0 else (sum reviewScores) / scoresLength
roundedMean = (fromInteger $ round $ scoreMean * 10) / 10.0
if availableRooms >= numRooms
then return $ Just (BT.SearchResult bookable availableRooms roundedMean propName propType propImg)
else return Nothing
Given an Object data type which is supposed to be a parsable ElasticSearch response with a list of bookables ,
getSearchResults :: MonadIO m => ConfigES
-> Object
-> TM.UTCTime
-> TM.UTCTime
-> Int
-> Int
-> Int
-> ReaderT MySQL.SqlBackend m (Either String [BT.SearchResult])
getSearchResults coEs object cIn cOut numRooms from size = do
case parseBookables object of
Nothing -> return bookParsingErr
Just bookables -> do
let start = TM.addUTCTime (-2764800) cIn
end = TM.addUTCTime (2764800) cOut
maybeResults <- mapM (checkBookableAvailability coEs (start, end) (cIn, cOut) 128 numRooms) bookables
let results = take size $ drop from (catMaybes maybeResults)
return $ Right results
getSearchResultImgIds :: MonadIO m => BT.SearchResult
-> ReaderT MySQL.SqlBackend m BT.SearchResultWithImgIds
getSearchResultImgIds sResult = do
let bookId = BT.bklId $ BT.bookable $ sResult
imgIds <- BR.bookableImageIds bookId
return $ BT.SearchResultWithImgIds sResult imgIds
|
93cb8f691b3d94619fb5a96aa8dfb1dd30880dbb3648310274e3fecd5d8185cc | Xandaros/abnf | ABNF.hs | |
Module : Text . ABNF.ABNF
Description : ABNF
Copyright : ( c ) , 2016
License : BSD2
Maintainer : < >
Stability : experimental
Portability : non - portable
Module : Text.ABNF.ABNF
Description : ABNF
Copyright : (c) Martin Zeller, 2016
License : BSD2
Maintainer : Martin Zeller <>
Stability : experimental
Portability : non-portable
-}
module Text.ABNF.ABNF
(
-- * ABNF types
-- | Re-exported from "Text.ABNF.ABNF.Types"
Rule(..)
-- * Parsing ABNF Rules
-- | Re-exported from "Text.ABNF.ABNF.Parser"
, rulelist
, parseABNF
-- * Canonicalizing ABNF Rules
-- | Re-exported from "Text.ABNF.ABNF.Canonicalizer"
, canonicalizeRules
) where
import Text.ABNF.ABNF.Types (Rule(..))
import Text.ABNF.ABNF.Parser (rulelist, parseABNF)
import Text.ABNF.ABNF.Canonicalizer (canonicalizeRules)
| null | https://raw.githubusercontent.com/Xandaros/abnf/347a2fbe49aeb380f5d07632036cfbc0e6bb1a6a/src/Text/ABNF/ABNF.hs | haskell | * ABNF types
| Re-exported from "Text.ABNF.ABNF.Types"
* Parsing ABNF Rules
| Re-exported from "Text.ABNF.ABNF.Parser"
* Canonicalizing ABNF Rules
| Re-exported from "Text.ABNF.ABNF.Canonicalizer" | |
Module : Text . ABNF.ABNF
Description : ABNF
Copyright : ( c ) , 2016
License : BSD2
Maintainer : < >
Stability : experimental
Portability : non - portable
Module : Text.ABNF.ABNF
Description : ABNF
Copyright : (c) Martin Zeller, 2016
License : BSD2
Maintainer : Martin Zeller <>
Stability : experimental
Portability : non-portable
-}
module Text.ABNF.ABNF
(
Rule(..)
, rulelist
, parseABNF
, canonicalizeRules
) where
import Text.ABNF.ABNF.Types (Rule(..))
import Text.ABNF.ABNF.Parser (rulelist, parseABNF)
import Text.ABNF.ABNF.Canonicalizer (canonicalizeRules)
|
8079b5617866ad446308523a8db9c043cbc84225f7a87712c72543c77d2c5f1b | MastodonC/kixi.hecuba | main.cljs | (ns kixi.hecuba.main
(:require
[om.core :as om :include-macros true]
[kixi.hecuba.tabs.hierarchy :as hierarchy]
[kixi.hecuba.history :as history]
[kixi.hecuba.model :refer (app-model)]
[ankha.core :as ankha]
[cljs.core.async :refer [put! chan <!]]))
(defn main []
(when-let [hecuba-tabs (.getElementById js/document "hecuba-tabs")]
(om/root hierarchy/main-tab
app-model
{:target hecuba-tabs
:shared {:history (history/new-history [:programmes :projects :properties :sensors :measurements])
:refresh (chan)}})))
;; Useful for debugging in dev
;; (om/root ankha/inspector app-model {:target (.getElementById js/document "ankha")})
| null | https://raw.githubusercontent.com/MastodonC/kixi.hecuba/467400bbe670e74420a2711f7d49e869ab2b3e21/src/cljs/kixi/hecuba/main.cljs | clojure | Useful for debugging in dev
(om/root ankha/inspector app-model {:target (.getElementById js/document "ankha")}) | (ns kixi.hecuba.main
(:require
[om.core :as om :include-macros true]
[kixi.hecuba.tabs.hierarchy :as hierarchy]
[kixi.hecuba.history :as history]
[kixi.hecuba.model :refer (app-model)]
[ankha.core :as ankha]
[cljs.core.async :refer [put! chan <!]]))
(defn main []
(when-let [hecuba-tabs (.getElementById js/document "hecuba-tabs")]
(om/root hierarchy/main-tab
app-model
{:target hecuba-tabs
:shared {:history (history/new-history [:programmes :projects :properties :sensors :measurements])
:refresh (chan)}})))
|
010cd4efe4708eb3dea190c0d2ba8650102b6f34b1bfb0f06ac14305e19fbff0 | wdebeaum/step | whereas.lisp | ;;;;
;;;; W::WHEREAS
;;;;
(define-words
:pos W::adv :templ DISC-PRE-TEMPL
:words (
(W::WHEREAS
(SENSES
((LF-PARENT ONT::Qualification )
(TEMPL binary-constraint-s-decl-templ)
(meta-data :origin beetle2 :entry-date 20070609 :change-date nil :comments sentential-conjunction-cleanup)
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/whereas.lisp | lisp |
W::WHEREAS
|
(define-words
:pos W::adv :templ DISC-PRE-TEMPL
:words (
(W::WHEREAS
(SENSES
((LF-PARENT ONT::Qualification )
(TEMPL binary-constraint-s-decl-templ)
(meta-data :origin beetle2 :entry-date 20070609 :change-date nil :comments sentential-conjunction-cleanup)
)
)
)
))
|
85dbb22631882869c82e4f2122cabb6acbd041ca32aa66988d0e7b2fa8e64bdc | deadpendency/deadpendency | DetermineDependenciesResult.hs | module DD.Effect.DetermineDependencies.Model.DetermineDependenciesResult
( DetermineDependenciesResult (..),
)
where
import Common.Aeson.Aeson
import Common.Model.Dependency.Basic.BasicRepoDependencies
import Common.Model.Dependency.Ignored.IgnoredRepoDependencies
import Data.Aeson
data DetermineDependenciesResult = DetermineDependenciesResult
{ _basicRepoDependencies :: BasicRepoDependencies,
_ignoredRepoDependencies :: IgnoredRepoDependencies
}
deriving stock (Eq, Show, Generic)
instance ToJSON DetermineDependenciesResult where
toJSON = genericToJSON cleanJSONOptions
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/dependency-determiner/src/DD/Effect/DetermineDependencies/Model/DetermineDependenciesResult.hs | haskell | module DD.Effect.DetermineDependencies.Model.DetermineDependenciesResult
( DetermineDependenciesResult (..),
)
where
import Common.Aeson.Aeson
import Common.Model.Dependency.Basic.BasicRepoDependencies
import Common.Model.Dependency.Ignored.IgnoredRepoDependencies
import Data.Aeson
data DetermineDependenciesResult = DetermineDependenciesResult
{ _basicRepoDependencies :: BasicRepoDependencies,
_ignoredRepoDependencies :: IgnoredRepoDependencies
}
deriving stock (Eq, Show, Generic)
instance ToJSON DetermineDependenciesResult where
toJSON = genericToJSON cleanJSONOptions
| |
b793a0c100f3ac891450f13c128e9564843a183264a0e821028866aae8a7442c | MariaGrozdeva/Functional_programming | paths.rkt | #lang racket/base
(define (paths tree)
(if (null? tree)
'()
(if (and (null? (cadr tree)) (null? (caddr tree)))
(list (list (car tree)))
(map (lambda (x) (cons (car tree) x))
(append (paths (cadr tree)) (paths (caddr tree)))) ) )
) | null | https://raw.githubusercontent.com/MariaGrozdeva/Functional_programming/002e3dbbbc64558094eecd147cb1fd064ee84a03/Tasks%20on%20binary%20trees/paths.rkt | racket | #lang racket/base
(define (paths tree)
(if (null? tree)
'()
(if (and (null? (cadr tree)) (null? (caddr tree)))
(list (list (car tree)))
(map (lambda (x) (cons (car tree) x))
(append (paths (cadr tree)) (paths (caddr tree)))) ) )
) | |
cb31c3089466038d8f303ed4fcf13586479b822a648aa2436727f40a1f91521e | altsun/My-Lisps | ChiaDat.lsp | ;;;=================================
Copyright by ssg - www.cadviet.com - February 2009
;;;=================================
;;;=================================
;;;DIALOG FUNCTIONS
;;;=================================
(defun begin_dialog(DiaFile DiaName)
(setq i (load_dialog DiaFile))
(if (not (new_dialog DiaName i)) (exit))
(action_tile "cancel" "(done_dialog) (command \"regen\") (exit)")
)
;;;-------------------------------------------------------------
(defun end_dialog()
(start_dialog) (unload_dialog i)
)
;;;-------------------------------------------------------------
(defun set_list(MyTile MyList MyVal / j x)
(start_list MyTile)
(setq j 0)
(while (setq x (nth j MyList))
(add_list x)
(setq j (1+ j))
)
(end_list)
(set_tile MyTile MyVal)
)
;;;-------------------------------------------------------------
;;;=================================
;;;PUBLIC FUNCTIONS
;;;=================================
(defun GetMid (p1 p2)
Midpoint : p1 , p2
(polar p1 (angle p1 p2) (/ (distance p1 p2) 2))
)
;;;-------------------------------------------------------------
(defun ints (e1 e2 / ob1 ob2 V L1 L2)
;;;Intersections of e1, e2. Return LIST of points
Thank Mr. for this function !
(setq
ob1 (vlax-ename->vla-object e1)
ob2 (vlax-ename->vla-object e2)
)
(setq V (vlax-variant-value (vla-IntersectWith ob1 ob2 acExtendOtherEntity)))
(if (/= (vlax-safearray-get-u-bound V 1) -1)
(progn
(setq L1 (vlax-safearray->list V) L2 nil)
(while L1
(setq L2 (append L2 (list (list (car L1) (cadr L1) (caddr L1)))))
(repeat 3 (setq L1 (cdr L1)))
)
)
(setq L2 nil)
)
L2
)
;;;-------------------------------------------------------------
(defun getVert (e / i L)
;;;Return list of all vertex from pline e
(setq i 0 L nil)
(vl-load-com)
(repeat (fix (+ (vlax-curve-getEndParam e) 1))
(setq L (append L (list (vlax-curve-getPointAtParam e i))))
(setq i (1+ i))
)
L
)
;;;-------------------------------------------------------------
(defun sideP (p1 p2 e / p1n p2n)
Check same side of 2 points by line e , return T or nil
(command "ucs" "n" "ob" e)
(setq
p1n (trans p1 0 1)
p2n (trans p2 0 1)
)
(command "ucs" "p")
(>= (* (cadr p1n) (cadr p2n)) 0)
)
;;;-------------------------------------------------------------
(defun wtxt (txt p / sty d h) ;;;Write txt on graphic screen, defaul setting
(setq
sty (getvar "textstyle")
d (tblsearch "style" sty)
h (cdr (assoc 40 d))
)
(if (= h 0) (setq h (cdr (assoc 42 d))))
(entmake
(list (cons 0 "TEXT") (cons 7 sty) (cons 1 txt) (cons 10 p) (cons 40 h) (assoc 41 d))
)
)
;;;-------------------------------------------------------------
(defun LastLoad( / K)
(setq K (strcat
"HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\"
"CurrentVersion\\Explorer\\ComDlg32\\OpenSaveMRU\\*"
))
(vl-registry-read K (substr (vl-registry-read K "MRUList") 1 1))
)
;;;-------------------------------------------------------------
(defun addspath(s) ;;;Add support file search path
(setenv "ACAD" (strcat (getenv "ACAD") ";" s))
)
;;;-------------------------------------------------------------
;;;=================================
;;;PRIVATE FUNCTIONS
;;;=================================
(defun area_DL (p)
Get area . Specify by e0 , e1 , p
;;;Filtered vertex, same side with p
(setq Lf (ints e0 e1))
(foreach x L0
(if (sideP x p e1) (setq Lf (append Lf (list x))))
)
;;;Convert to curve-param and sort
(setq Lpara nil)
(foreach x Lf
(setq para (vlax-curve-getParamAtPoint e0 x))
(setq Lpara (append Lpara (list para)))
)
(setq Lpara (vl-sort Lpara '<))
;;;Get area
(command ".area")
(foreach x Lpara (command (vlax-curve-getPointAtParam e0 x)))
(command "")
(setq S (getvar "area"))
)
;;;-------------------------------------------------------------
Move e by angle ag , step
(if (= song 1)
(command "move" e "" (list 0 0) (polar (list 0 0) ag dr))
(if (> dr 0)
(command "rotate" e "" pc "r" pC pM pN)
(command "rotate" e "" pc "r" pC pN pM)
)
)
)
;;;-------------------------------------------------------------
(defun RunDL ()
(setvar "cmdecho" 0)
(setvar "osmode" 0)
(setq OK nil)
(while (not OK)
(setq
Li (ints e0 e1)
i (- (length Li) 1)
pM (getMid (car Li) (nth i Li))
pN (polar pM ag tol)
St (area_DL pN)
)
(if (<= (* St flag) (* S1 flag))
(progn (setq flag (* flag -1)) (setq OK T))
(move_slow e1 ag (* flag tol))
)
)
)
;;;-------------------------------------------------------------
(defun ActTyle()
(setq S1 (* S0 (atof $value)))
(set_tile "dientich" (rtos S1))
)
;;;-------------------------------------------------------------
(defun ActDientich()
(setq k (/ (atof $value) S0))
(set_tile "tyle" (rtos k))
)
;;;-------------------------------------------------------------
(defun theoTL()
(mode_tile "tyle" 0)
(mode_tile "dientich" 1)
)
;;;-------------------------------------------------------------
(defun theoDT()
(mode_tile "tyle" 1)
(mode_tile "dientich" 0)
)
;;;-------------------------------------------------------------
(defun SL_chiadat()
(setq
byDT (atoi (get_tile "theodt"))
byTL (atoi (get_tile "theotl"))
S1 (atof (get_tile "dientich"))
k (atof (get_tile "tyle"))
Acc (atoi (get_tile "chinhxac"))
Song (atoi (get_tile "song"))
Ghi (atoi (get_tile "ghi"))
)
)
;;;-------------------------------------------------------------
(defun Dialog_chiadat()
(begin_dialog "chiadat.dcl" "chiadat")
(set_tile "tong" (strcat "Dien tich tong cong = " (rtos S0)))
(set_tile "theodt" (itoa byDT))
(set_tile "theotl" (itoa byTL))
(mode_tile "dientich" byTL)
(mode_tile "tyle" byDT)
(set_tile "dientich" (rtos S1))
(set_tile "tyle" (rtos k))
(set_list "chinhxac" AccList (itoa Acc))
(set_tile "song" (itoa song))
(set_tile "quay" (itoa quay))
(set_tile "ghi" (itoa ghi))
(action_tile "theodt" "(theoDT)")
(action_tile "theotl" "(theoTL)")
(action_tile "tyle" "(ActTyle)")
(action_tile "dientich" "(ActDientich)")
(action_tile "accept" "(SL_chiadat) (done_dialog)")
(end_dialog)
)
;;;-------------------------------------------------------------
(defun GhiDT()
(wtxt (rtos S1) (getpoint "\nDiem chuan ghi dien tich chia:"))
(wtxt (rtos (- S0 S1)) (getpoint "\nDiem chuan ghi dien tich con lai:"))
)
;;;-------------------------------------------------------------
;;;=================================
;;;MAIN PROGRAM
;;;=================================
(defun C:DL (/ e0 e1 Li i di p0 k tol S0 p1 ag L0 OK Lf x
p Lpara para S oldos S00 flag pM pN St prec)
(vl-load-com)
CHON PLINE VA
(setq e0 (car (entsel "\nChon 1 pline kin:")))
(redraw e0 3)
(setq
e1 (car (entsel "\nChon duong chia, cat pline it nhat tai 2 diem:"))
Li (ints e0 e1)
)
(redraw e1 3)
(if (< (length Li) 2) (progn (alert "\nKhong tim thay 2 giao diem!") (progn (command "regen") (exit))))
(setq
i (- (length Li) 1)
di (distance (car Li) (nth i Li))
p0 (getpoint "\nPick 1 diem, ve phia can chia so voi duong chuan:")
)
GOI DIALOG
(setq S0 (vlax-curve-getArea e0))
(if (not S1) (setq S1 (/ S0 2)))
(if (not byDT) (setq byDT 1))
(if (= byDT 1) (setq byTL 0) (setq byTL 1))
(if (not Acc) (setq Acc 4))
(if (not song) (setq song 1))
(if (= song 1) (setq quay 0) (setq quay 1))
(if (not ghi) (setq ghi 0))
(setq
k (/ S1 S0)
AccList (list "0" "0.0" "0.00" "0.000" "0.0000" "0.00000" "0.000000" "0.0000000" "0.00000000")
)
(Dialog_chiadat)
(command "regen")
TINH TOAN
(if (= song 0) (setq pc (getpoint "\nChon diem co dinh:")))
(setq
L0 (getVert e0) ;;;List of all vertex
S00 (area_DL p0)
St S00
p1 (vlax-curve-getClosestPointTo e1 p0)
ag (angle p1 p0)
prec (expt 10.0 (- acc))
oldos (getvar "osmode")
)
(cond
((<= (abs (- S00 S1)) prec) (progn (alert "Duong chia da dung vi tri!") (command "regen") (exit)))
((> S00 S1) (setq flag 1))
((< S00 S1) (setq flag -1))
)
(setq tol (* di 0.01))
;;;RUN DIVIDE LAND
(while (> (abs (- St S1)) prec) (runDL) (setq tol (* 0.1 tol)))
(alert "FINISH!")
GHI DIEN TICH
(if (= ghi 1) (GhiDT))
KET THUC
(setvar "cmdecho" 1)
(setvar "osmode" oldos)
(command "regen")
(princ)
)
;;;=================================
;;;Add support file search path
(if (not (findfile "Chiadat.lsp")) (addspath (vl-filename-directory (LastLoad))))
;;;=================================
| null | https://raw.githubusercontent.com/altsun/My-Lisps/f88bfff543d9a0be5c9fa8180f74e58651509dbe/Common/chiadat/ChiaDat.lsp | lisp | =================================
=================================
=================================
DIALOG FUNCTIONS
=================================
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
=================================
PUBLIC FUNCTIONS
=================================
-------------------------------------------------------------
Intersections of e1, e2. Return LIST of points
-------------------------------------------------------------
Return list of all vertex from pline e
-------------------------------------------------------------
-------------------------------------------------------------
Write txt on graphic screen, defaul setting
-------------------------------------------------------------
-------------------------------------------------------------
Add support file search path
-------------------------------------------------------------
=================================
PRIVATE FUNCTIONS
=================================
Filtered vertex, same side with p
Convert to curve-param and sort
Get area
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
-------------------------------------------------------------
=================================
MAIN PROGRAM
=================================
List of all vertex
RUN DIVIDE LAND
=================================
Add support file search path
=================================
|
Copyright by ssg - www.cadviet.com - February 2009
(defun begin_dialog(DiaFile DiaName)
(setq i (load_dialog DiaFile))
(if (not (new_dialog DiaName i)) (exit))
(action_tile "cancel" "(done_dialog) (command \"regen\") (exit)")
)
(defun end_dialog()
(start_dialog) (unload_dialog i)
)
(defun set_list(MyTile MyList MyVal / j x)
(start_list MyTile)
(setq j 0)
(while (setq x (nth j MyList))
(add_list x)
(setq j (1+ j))
)
(end_list)
(set_tile MyTile MyVal)
)
(defun GetMid (p1 p2)
Midpoint : p1 , p2
(polar p1 (angle p1 p2) (/ (distance p1 p2) 2))
)
(defun ints (e1 e2 / ob1 ob2 V L1 L2)
Thank Mr. for this function !
(setq
ob1 (vlax-ename->vla-object e1)
ob2 (vlax-ename->vla-object e2)
)
(setq V (vlax-variant-value (vla-IntersectWith ob1 ob2 acExtendOtherEntity)))
(if (/= (vlax-safearray-get-u-bound V 1) -1)
(progn
(setq L1 (vlax-safearray->list V) L2 nil)
(while L1
(setq L2 (append L2 (list (list (car L1) (cadr L1) (caddr L1)))))
(repeat 3 (setq L1 (cdr L1)))
)
)
(setq L2 nil)
)
L2
)
(defun getVert (e / i L)
(setq i 0 L nil)
(vl-load-com)
(repeat (fix (+ (vlax-curve-getEndParam e) 1))
(setq L (append L (list (vlax-curve-getPointAtParam e i))))
(setq i (1+ i))
)
L
)
(defun sideP (p1 p2 e / p1n p2n)
Check same side of 2 points by line e , return T or nil
(command "ucs" "n" "ob" e)
(setq
p1n (trans p1 0 1)
p2n (trans p2 0 1)
)
(command "ucs" "p")
(>= (* (cadr p1n) (cadr p2n)) 0)
)
(setq
sty (getvar "textstyle")
d (tblsearch "style" sty)
h (cdr (assoc 40 d))
)
(if (= h 0) (setq h (cdr (assoc 42 d))))
(entmake
(list (cons 0 "TEXT") (cons 7 sty) (cons 1 txt) (cons 10 p) (cons 40 h) (assoc 41 d))
)
)
(defun LastLoad( / K)
(setq K (strcat
"HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\"
"CurrentVersion\\Explorer\\ComDlg32\\OpenSaveMRU\\*"
))
(vl-registry-read K (substr (vl-registry-read K "MRUList") 1 1))
)
(setenv "ACAD" (strcat (getenv "ACAD") ";" s))
)
(defun area_DL (p)
Get area . Specify by e0 , e1 , p
(setq Lf (ints e0 e1))
(foreach x L0
(if (sideP x p e1) (setq Lf (append Lf (list x))))
)
(setq Lpara nil)
(foreach x Lf
(setq para (vlax-curve-getParamAtPoint e0 x))
(setq Lpara (append Lpara (list para)))
)
(setq Lpara (vl-sort Lpara '<))
(command ".area")
(foreach x Lpara (command (vlax-curve-getPointAtParam e0 x)))
(command "")
(setq S (getvar "area"))
)
Move e by angle ag , step
(if (= song 1)
(command "move" e "" (list 0 0) (polar (list 0 0) ag dr))
(if (> dr 0)
(command "rotate" e "" pc "r" pC pM pN)
(command "rotate" e "" pc "r" pC pN pM)
)
)
)
(defun RunDL ()
(setvar "cmdecho" 0)
(setvar "osmode" 0)
(setq OK nil)
(while (not OK)
(setq
Li (ints e0 e1)
i (- (length Li) 1)
pM (getMid (car Li) (nth i Li))
pN (polar pM ag tol)
St (area_DL pN)
)
(if (<= (* St flag) (* S1 flag))
(progn (setq flag (* flag -1)) (setq OK T))
(move_slow e1 ag (* flag tol))
)
)
)
(defun ActTyle()
(setq S1 (* S0 (atof $value)))
(set_tile "dientich" (rtos S1))
)
(defun ActDientich()
(setq k (/ (atof $value) S0))
(set_tile "tyle" (rtos k))
)
(defun theoTL()
(mode_tile "tyle" 0)
(mode_tile "dientich" 1)
)
(defun theoDT()
(mode_tile "tyle" 1)
(mode_tile "dientich" 0)
)
(defun SL_chiadat()
(setq
byDT (atoi (get_tile "theodt"))
byTL (atoi (get_tile "theotl"))
S1 (atof (get_tile "dientich"))
k (atof (get_tile "tyle"))
Acc (atoi (get_tile "chinhxac"))
Song (atoi (get_tile "song"))
Ghi (atoi (get_tile "ghi"))
)
)
(defun Dialog_chiadat()
(begin_dialog "chiadat.dcl" "chiadat")
(set_tile "tong" (strcat "Dien tich tong cong = " (rtos S0)))
(set_tile "theodt" (itoa byDT))
(set_tile "theotl" (itoa byTL))
(mode_tile "dientich" byTL)
(mode_tile "tyle" byDT)
(set_tile "dientich" (rtos S1))
(set_tile "tyle" (rtos k))
(set_list "chinhxac" AccList (itoa Acc))
(set_tile "song" (itoa song))
(set_tile "quay" (itoa quay))
(set_tile "ghi" (itoa ghi))
(action_tile "theodt" "(theoDT)")
(action_tile "theotl" "(theoTL)")
(action_tile "tyle" "(ActTyle)")
(action_tile "dientich" "(ActDientich)")
(action_tile "accept" "(SL_chiadat) (done_dialog)")
(end_dialog)
)
(defun GhiDT()
(wtxt (rtos S1) (getpoint "\nDiem chuan ghi dien tich chia:"))
(wtxt (rtos (- S0 S1)) (getpoint "\nDiem chuan ghi dien tich con lai:"))
)
(defun C:DL (/ e0 e1 Li i di p0 k tol S0 p1 ag L0 OK Lf x
p Lpara para S oldos S00 flag pM pN St prec)
(vl-load-com)
CHON PLINE VA
(setq e0 (car (entsel "\nChon 1 pline kin:")))
(redraw e0 3)
(setq
e1 (car (entsel "\nChon duong chia, cat pline it nhat tai 2 diem:"))
Li (ints e0 e1)
)
(redraw e1 3)
(if (< (length Li) 2) (progn (alert "\nKhong tim thay 2 giao diem!") (progn (command "regen") (exit))))
(setq
i (- (length Li) 1)
di (distance (car Li) (nth i Li))
p0 (getpoint "\nPick 1 diem, ve phia can chia so voi duong chuan:")
)
GOI DIALOG
(setq S0 (vlax-curve-getArea e0))
(if (not S1) (setq S1 (/ S0 2)))
(if (not byDT) (setq byDT 1))
(if (= byDT 1) (setq byTL 0) (setq byTL 1))
(if (not Acc) (setq Acc 4))
(if (not song) (setq song 1))
(if (= song 1) (setq quay 0) (setq quay 1))
(if (not ghi) (setq ghi 0))
(setq
k (/ S1 S0)
AccList (list "0" "0.0" "0.00" "0.000" "0.0000" "0.00000" "0.000000" "0.0000000" "0.00000000")
)
(Dialog_chiadat)
(command "regen")
TINH TOAN
(if (= song 0) (setq pc (getpoint "\nChon diem co dinh:")))
(setq
S00 (area_DL p0)
St S00
p1 (vlax-curve-getClosestPointTo e1 p0)
ag (angle p1 p0)
prec (expt 10.0 (- acc))
oldos (getvar "osmode")
)
(cond
((<= (abs (- S00 S1)) prec) (progn (alert "Duong chia da dung vi tri!") (command "regen") (exit)))
((> S00 S1) (setq flag 1))
((< S00 S1) (setq flag -1))
)
(setq tol (* di 0.01))
(while (> (abs (- St S1)) prec) (runDL) (setq tol (* 0.1 tol)))
(alert "FINISH!")
GHI DIEN TICH
(if (= ghi 1) (GhiDT))
KET THUC
(setvar "cmdecho" 1)
(setvar "osmode" oldos)
(command "regen")
(princ)
)
(if (not (findfile "Chiadat.lsp")) (addspath (vl-filename-directory (LastLoad))))
|
f586d37b5ebf9eed3b6ebdcabfe48d5ba68e811f5e5c5d08e189d289176f0ec4 | hasura/pg-client-hs | PTI.hs | # OPTIONS_GHC -fno - warn - missing - signatures #
module Database.PG.Query.PTI where
-------------------------------------------------------------------------------
import Data.Word (Word32)
import Database.PostgreSQL.LibPQ qualified as PQ
import Prelude
-------------------------------------------------------------------------------
mkOid :: Word32 -> PQ.Oid
mkOid = PQ.Oid . fromIntegral
-- * Constants
-------------------------
auto = mkOid 0
abstime = mkOid 702
aclitem = mkOid 1033
bit = mkOid 1560
bool = mkOid 16
box = mkOid 603
bpchar = mkOid 1042
bytea = mkOid 17
char = mkOid 18
cid = mkOid 29
cidr = mkOid 650
circle = mkOid 718
cstring = mkOid 2275
date = mkOid 1082
daterange = mkOid 3912
float4 = mkOid 700
float8 = mkOid 701
gtsvector = mkOid 3642
inet = mkOid 869
int2 = mkOid 21
int2vector = mkOid 22
int4 = mkOid 23
int4range = mkOid 3904
int8 = mkOid 20
int8range = mkOid 3926
interval = mkOid 1186
json = mkOid 114
jsonb = mkOid 3802
line = mkOid 628
lseg = mkOid 601
macaddr = mkOid 829
money = mkOid 790
name = mkOid 19
numeric = mkOid 1700
numrange = mkOid 3906
oid = mkOid 26
oidvector = mkOid 30
path = mkOid 602
point = mkOid 600
polygon = mkOid 604
record = mkOid 2249
refcursor = mkOid 1790
regclass = mkOid 2205
regconfig = mkOid 3734
regdictionary = mkOid 3769
regoper = mkOid 2203
regoperator = mkOid 2204
regproc = mkOid 24
regprocedure = mkOid 2202
regtype = mkOid 2206
reltime = mkOid 703
text = mkOid 25
tid = mkOid 27
time = mkOid 1083
timestamp = mkOid 1114
timestamptz = mkOid 1184
timetz = mkOid 1266
tinterval = mkOid 704
tsquery = mkOid 3615
tsrange = mkOid 3908
tstzrange = mkOid 3910
tsvector = mkOid 3614
txid_snapshot = mkOid 2970
unknown = mkOid 705
uuid = mkOid 2950
varbit = mkOid 1562
varchar = mkOid 1043
void = mkOid 2278
xid = mkOid 28
xml = mkOid 142
-- Array Types
text_arr = mkOid 1009
| null | https://raw.githubusercontent.com/hasura/pg-client-hs/6053444a171174cd273ad929760bd654b2751d55/src/Database/PG/Query/PTI.hs | haskell | -----------------------------------------------------------------------------
-----------------------------------------------------------------------------
* Constants
-----------------------
Array Types | # OPTIONS_GHC -fno - warn - missing - signatures #
module Database.PG.Query.PTI where
import Data.Word (Word32)
import Database.PostgreSQL.LibPQ qualified as PQ
import Prelude
mkOid :: Word32 -> PQ.Oid
mkOid = PQ.Oid . fromIntegral
auto = mkOid 0
abstime = mkOid 702
aclitem = mkOid 1033
bit = mkOid 1560
bool = mkOid 16
box = mkOid 603
bpchar = mkOid 1042
bytea = mkOid 17
char = mkOid 18
cid = mkOid 29
cidr = mkOid 650
circle = mkOid 718
cstring = mkOid 2275
date = mkOid 1082
daterange = mkOid 3912
float4 = mkOid 700
float8 = mkOid 701
gtsvector = mkOid 3642
inet = mkOid 869
int2 = mkOid 21
int2vector = mkOid 22
int4 = mkOid 23
int4range = mkOid 3904
int8 = mkOid 20
int8range = mkOid 3926
interval = mkOid 1186
json = mkOid 114
jsonb = mkOid 3802
line = mkOid 628
lseg = mkOid 601
macaddr = mkOid 829
money = mkOid 790
name = mkOid 19
numeric = mkOid 1700
numrange = mkOid 3906
oid = mkOid 26
oidvector = mkOid 30
path = mkOid 602
point = mkOid 600
polygon = mkOid 604
record = mkOid 2249
refcursor = mkOid 1790
regclass = mkOid 2205
regconfig = mkOid 3734
regdictionary = mkOid 3769
regoper = mkOid 2203
regoperator = mkOid 2204
regproc = mkOid 24
regprocedure = mkOid 2202
regtype = mkOid 2206
reltime = mkOid 703
text = mkOid 25
tid = mkOid 27
time = mkOid 1083
timestamp = mkOid 1114
timestamptz = mkOid 1184
timetz = mkOid 1266
tinterval = mkOid 704
tsquery = mkOid 3615
tsrange = mkOid 3908
tstzrange = mkOid 3910
tsvector = mkOid 3614
txid_snapshot = mkOid 2970
unknown = mkOid 705
uuid = mkOid 2950
varbit = mkOid 1562
varchar = mkOid 1043
void = mkOid 2278
xid = mkOid 28
xml = mkOid 142
text_arr = mkOid 1009
|
cbd720ffbb1f85dea77227b1bfa1922899b3d3b8fbac4a179790e781547b5282 | Ekdohibs/camlboot | arg.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Para , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
type key = string
type doc = string
type usage_msg = string
type anon_fun = (string -> unit)
type spec =
| Unit of (unit -> unit) (* Call the function with unit argument *)
| Bool of (bool -> unit) (* Call the function with a bool argument *)
| Set of bool ref (* Set the reference to true *)
| Clear of bool ref (* Set the reference to false *)
| String of (string -> unit) (* Call the function with a string argument *)
| Set_string of string ref (* Set the reference to the string argument *)
| Int of (int -> unit) (* Call the function with an int argument *)
| Set_int of int ref (* Set the reference to the int argument *)
| Float of (float -> unit) (* Call the function with a float argument *)
| Set_float of float ref (* Set the reference to the float argument *)
| Tuple of spec list (* Take several arguments according to the
spec list *)
| Symbol of string list * (string -> unit)
Take one of the symbols as argument and
call the function with the symbol .
call the function with the symbol. *)
| Rest of (string -> unit) (* Stop interpreting keywords and call the
function with each remaining argument *)
| Expand of (string -> string array) (* If the remaining arguments to process
are of the form
[["-foo"; "arg"] @ rest] where "foo" is
registered as [Expand f], then the
arguments [f "arg" @ rest] are
processed. Only allowed in
[parse_and_expand_argv_dynamic]. *)
exception Bad of string
exception Help of string
type error =
| Unknown of string
| Wrong of string * string * string (* option, actual, expected *)
| Missing of string
| Message of string
exception Stop of error (* used internally *)
open Printf
let rec assoc3 x l =
match l with
| [] -> raise Not_found
| (y1, y2, _) :: _ when y1 = x -> y2
| _ :: t -> assoc3 x t
let split s =
let i = String.index s '=' in
let len = String.length s in
String.sub s 0 i, String.sub s (i+1) (len-(i+1))
let make_symlist prefix sep suffix l =
match l with
| [] -> "<none>"
| h::t -> (List.fold_left (fun x y -> x ^ sep ^ y) (prefix ^ h) t) ^ suffix
let print_spec buf (key, spec, doc) =
if String.length doc > 0 then
match spec with
| Symbol (l, _) ->
bprintf buf " %s %s%s\n" key (make_symlist "{" "|" "}" l) doc
| _ ->
bprintf buf " %s %s\n" key doc
let help_action () = raise (Stop (Unknown "-help"))
let add_help speclist =
let add1 =
try ignore (assoc3 "-help" speclist); []
with Not_found ->
["-help", Unit help_action, " Display this list of options"]
and add2 =
try ignore (assoc3 "--help" speclist); []
with Not_found ->
["--help", Unit help_action, " Display this list of options"]
in
speclist @ (add1 @ add2)
let usage_b buf speclist errmsg =
bprintf buf "%s\n" errmsg;
List.iter (print_spec buf) (add_help speclist)
let usage_string speclist errmsg =
let b = Buffer.create 200 in
usage_b b speclist errmsg;
Buffer.contents b
let usage speclist errmsg =
eprintf "%s" (usage_string speclist errmsg)
let current = ref 0
let bool_of_string_opt x =
try Some (bool_of_string x)
with Invalid_argument _ -> None
let int_of_string_opt x =
try Some (int_of_string x)
with Failure _ -> None
let float_of_string_opt x =
try Some (float_of_string x)
with Failure _ -> None
let parse_and_expand_argv_dynamic_aux allow_expand current argv speclist anonfun errmsg =
let initpos = !current in
let convert_error error =
(* convert an internal error to a Bad/Help exception
*or* add the program name as a prefix and the usage message as a suffix
to an user-raised Bad exception.
*)
let b = Buffer.create 200 in
let progname = if initpos < (Array.length !argv) then !argv.(initpos) else "(?)" in
begin match error with
| Unknown "-help" -> ()
| Unknown "--help" -> ()
| Unknown s ->
bprintf b "%s: unknown option '%s'.\n" progname s
| Missing s ->
bprintf b "%s: option '%s' needs an argument.\n" progname s
| Wrong (opt, arg, expected) ->
bprintf b "%s: wrong argument '%s'; option '%s' expects %s.\n"
progname arg opt expected
| Message s -> (* user error message *)
bprintf b "%s: %s.\n" progname s
end;
usage_b b !speclist errmsg;
if error = Unknown "-help" || error = Unknown "--help"
then Help (Buffer.contents b)
else Bad (Buffer.contents b)
in
incr current;
while !current < (Array.length !argv) do
begin try
let s = !argv.(!current) in
if String.length s >= 1 && s.[0] = '-' then begin
let action, follow =
try assoc3 s !speclist, None
with Not_found ->
try
let keyword, arg = split s in
assoc3 keyword !speclist, Some arg
with Not_found -> raise (Stop (Unknown s))
in
let no_arg () =
match follow with
| None -> ()
| Some arg -> raise (Stop (Wrong (s, arg, "no argument"))) in
let get_arg () =
match follow with
| None ->
if !current + 1 < (Array.length !argv) then !argv.(!current + 1)
else raise (Stop (Missing s))
| Some arg -> arg
in
let consume_arg () =
match follow with
| None -> incr current
| Some _ -> ()
in
let rec treat_action = function
| Unit f -> f ();
| Bool f ->
let arg = get_arg () in
begin match bool_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a boolean")))
| Some s -> f s
end;
consume_arg ();
| Set r -> no_arg (); r := true;
| Clear r -> no_arg (); r := false;
| String f ->
let arg = get_arg () in
f arg;
consume_arg ();
| Symbol (symb, f) ->
let arg = get_arg () in
if List.mem arg symb then begin
f arg;
consume_arg ();
end else begin
raise (Stop (Wrong (s, arg, "one of: "
^ (make_symlist "" " " "" symb))))
end
| Set_string r ->
r := get_arg ();
consume_arg ();
| Int f ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> f x
end;
consume_arg ();
| Set_int r ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> r := x
end;
consume_arg ();
| Float f ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> f x
end;
consume_arg ();
| Set_float r ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> r := x
end;
consume_arg ();
| Tuple specs ->
List.iter treat_action specs;
| Rest f ->
while !current < (Array.length !argv) - 1 do
f !argv.(!current + 1);
consume_arg ();
done;
| Expand f ->
if not allow_expand then
raise (Invalid_argument "Arg.Expand is is only allowed with Arg.parse_and_expand_argv_dynamic");
let arg = get_arg () in
let newarg = f arg in
consume_arg ();
let before = Array.sub !argv 0 (!current + 1)
and after = Array.sub !argv (!current + 1) ((Array.length !argv) - !current - 1) in
argv:= Array.concat [before;newarg;after];
in
treat_action action end
else anonfun s
with | Bad m -> raise (convert_error (Message m));
| Stop e -> raise (convert_error e);
end;
incr current
done
let parse_and_expand_argv_dynamic current argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux true current argv speclist anonfun errmsg
let current1 = current
let parse_argv_dynamic ?(current=current1) argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux false current (ref argv) speclist anonfun errmsg
let parse_argv ?(current=current1) argv speclist anonfun errmsg =
parse_argv_dynamic ~current:current argv (ref speclist) anonfun errmsg
let parse l f msg =
try
parse_argv Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_dynamic l f msg =
try
parse_argv_dynamic Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_expand l f msg =
try
let argv = ref Sys.argv in
let spec = ref l in
let current = ref (!current) in
parse_and_expand_argv_dynamic current argv spec f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let second_word s =
let len = String.length s in
let rec loop n =
if n >= len then len
else if s.[n] = ' ' then loop (n+1)
else n
in
match String.index s '\t' with
| n -> loop (n+1)
| exception Not_found ->
begin match String.index s ' ' with
| n -> loop (n+1)
| exception Not_found -> len
end
let max_arg_len cur (kwd, spec, doc) =
match spec with
| Symbol _ -> max cur (String.length kwd)
| _ -> max cur (String.length kwd + second_word doc)
let replace_leading_tab s =
let seen = ref false in
String.map (function '\t' when not !seen -> seen := true; ' ' | c -> c) s
let add_padding len ksd =
match ksd with
| (_, _, "") ->
(* Do not pad undocumented options, so that they still don't show up when
* run through [usage] or [parse]. *)
ksd
| (kwd, (Symbol _ as spec), msg) ->
let cutcol = second_word msg in
let spaces = String.make ((max 0 (len - cutcol)) + 3) ' ' in
(kwd, spec, "\n" ^ spaces ^ replace_leading_tab msg)
| (kwd, spec, msg) ->
let cutcol = second_word msg in
let kwd_len = String.length kwd in
let diff = len - kwd_len - cutcol in
if diff <= 0 then
(kwd, spec, replace_leading_tab msg)
else
let spaces = String.make diff ' ' in
let prefix = String.sub (replace_leading_tab msg) 0 cutcol in
let suffix = String.sub msg cutcol (String.length msg - cutcol) in
(kwd, spec, prefix ^ spaces ^ suffix)
let align ?(limit=max_int) speclist =
let completed = add_help speclist in
let len = List.fold_left max_arg_len 0 completed in
let len = min len limit in
List.map (add_padding len) completed
let trim_cr s =
let len = String.length s in
if len > 0 && String.get s (len - 1) = '\r' then
String.sub s 0 (len - 1)
else
s
let read_aux trim sep file =
let ic = open_in_bin file in
let buf = Buffer.create 200 in
let words = ref [] in
let stash () =
let word = Buffer.contents buf in
let word = if trim then trim_cr word else word in
words := word :: !words;
Buffer.clear buf
in
begin
try while true do
let c = input_char ic in
if c = sep then stash () else Buffer.add_char buf c
done
with End_of_file -> ()
end;
if Buffer.length buf > 0 then stash ();
close_in ic;
Array.of_list (List.rev !words)
let read_arg = read_aux true '\n'
let read_arg0 = read_aux false '\x00'
let write_aux sep file args =
let oc = open_out_bin file in
Array.iter (fun s -> fprintf oc "%s%c" s sep) args;
close_out oc
let write_arg file args = write_aux '\n' file args
let write_arg0 file args = write_aux '\x00' file args
| null | https://raw.githubusercontent.com/Ekdohibs/camlboot/506280c6e0813e0e794988151a8e46be55373ebc/miniml/interp/arg.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Call the function with unit argument
Call the function with a bool argument
Set the reference to true
Set the reference to false
Call the function with a string argument
Set the reference to the string argument
Call the function with an int argument
Set the reference to the int argument
Call the function with a float argument
Set the reference to the float argument
Take several arguments according to the
spec list
Stop interpreting keywords and call the
function with each remaining argument
If the remaining arguments to process
are of the form
[["-foo"; "arg"] @ rest] where "foo" is
registered as [Expand f], then the
arguments [f "arg" @ rest] are
processed. Only allowed in
[parse_and_expand_argv_dynamic].
option, actual, expected
used internally
convert an internal error to a Bad/Help exception
*or* add the program name as a prefix and the usage message as a suffix
to an user-raised Bad exception.
user error message
Do not pad undocumented options, so that they still don't show up when
* run through [usage] or [parse]. | , projet Para , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type key = string
type doc = string
type usage_msg = string
type anon_fun = (string -> unit)
type spec =
| Symbol of string list * (string -> unit)
Take one of the symbols as argument and
call the function with the symbol .
call the function with the symbol. *)
exception Bad of string
exception Help of string
type error =
| Unknown of string
| Missing of string
| Message of string
open Printf
let rec assoc3 x l =
match l with
| [] -> raise Not_found
| (y1, y2, _) :: _ when y1 = x -> y2
| _ :: t -> assoc3 x t
let split s =
let i = String.index s '=' in
let len = String.length s in
String.sub s 0 i, String.sub s (i+1) (len-(i+1))
let make_symlist prefix sep suffix l =
match l with
| [] -> "<none>"
| h::t -> (List.fold_left (fun x y -> x ^ sep ^ y) (prefix ^ h) t) ^ suffix
let print_spec buf (key, spec, doc) =
if String.length doc > 0 then
match spec with
| Symbol (l, _) ->
bprintf buf " %s %s%s\n" key (make_symlist "{" "|" "}" l) doc
| _ ->
bprintf buf " %s %s\n" key doc
let help_action () = raise (Stop (Unknown "-help"))
let add_help speclist =
let add1 =
try ignore (assoc3 "-help" speclist); []
with Not_found ->
["-help", Unit help_action, " Display this list of options"]
and add2 =
try ignore (assoc3 "--help" speclist); []
with Not_found ->
["--help", Unit help_action, " Display this list of options"]
in
speclist @ (add1 @ add2)
let usage_b buf speclist errmsg =
bprintf buf "%s\n" errmsg;
List.iter (print_spec buf) (add_help speclist)
let usage_string speclist errmsg =
let b = Buffer.create 200 in
usage_b b speclist errmsg;
Buffer.contents b
let usage speclist errmsg =
eprintf "%s" (usage_string speclist errmsg)
let current = ref 0
let bool_of_string_opt x =
try Some (bool_of_string x)
with Invalid_argument _ -> None
let int_of_string_opt x =
try Some (int_of_string x)
with Failure _ -> None
let float_of_string_opt x =
try Some (float_of_string x)
with Failure _ -> None
let parse_and_expand_argv_dynamic_aux allow_expand current argv speclist anonfun errmsg =
let initpos = !current in
let convert_error error =
let b = Buffer.create 200 in
let progname = if initpos < (Array.length !argv) then !argv.(initpos) else "(?)" in
begin match error with
| Unknown "-help" -> ()
| Unknown "--help" -> ()
| Unknown s ->
bprintf b "%s: unknown option '%s'.\n" progname s
| Missing s ->
bprintf b "%s: option '%s' needs an argument.\n" progname s
| Wrong (opt, arg, expected) ->
bprintf b "%s: wrong argument '%s'; option '%s' expects %s.\n"
progname arg opt expected
bprintf b "%s: %s.\n" progname s
end;
usage_b b !speclist errmsg;
if error = Unknown "-help" || error = Unknown "--help"
then Help (Buffer.contents b)
else Bad (Buffer.contents b)
in
incr current;
while !current < (Array.length !argv) do
begin try
let s = !argv.(!current) in
if String.length s >= 1 && s.[0] = '-' then begin
let action, follow =
try assoc3 s !speclist, None
with Not_found ->
try
let keyword, arg = split s in
assoc3 keyword !speclist, Some arg
with Not_found -> raise (Stop (Unknown s))
in
let no_arg () =
match follow with
| None -> ()
| Some arg -> raise (Stop (Wrong (s, arg, "no argument"))) in
let get_arg () =
match follow with
| None ->
if !current + 1 < (Array.length !argv) then !argv.(!current + 1)
else raise (Stop (Missing s))
| Some arg -> arg
in
let consume_arg () =
match follow with
| None -> incr current
| Some _ -> ()
in
let rec treat_action = function
| Unit f -> f ();
| Bool f ->
let arg = get_arg () in
begin match bool_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a boolean")))
| Some s -> f s
end;
consume_arg ();
| Set r -> no_arg (); r := true;
| Clear r -> no_arg (); r := false;
| String f ->
let arg = get_arg () in
f arg;
consume_arg ();
| Symbol (symb, f) ->
let arg = get_arg () in
if List.mem arg symb then begin
f arg;
consume_arg ();
end else begin
raise (Stop (Wrong (s, arg, "one of: "
^ (make_symlist "" " " "" symb))))
end
| Set_string r ->
r := get_arg ();
consume_arg ();
| Int f ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> f x
end;
consume_arg ();
| Set_int r ->
let arg = get_arg () in
begin match int_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "an integer")))
| Some x -> r := x
end;
consume_arg ();
| Float f ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> f x
end;
consume_arg ();
| Set_float r ->
let arg = get_arg () in
begin match float_of_string_opt arg with
| None -> raise (Stop (Wrong (s, arg, "a float")))
| Some x -> r := x
end;
consume_arg ();
| Tuple specs ->
List.iter treat_action specs;
| Rest f ->
while !current < (Array.length !argv) - 1 do
f !argv.(!current + 1);
consume_arg ();
done;
| Expand f ->
if not allow_expand then
raise (Invalid_argument "Arg.Expand is is only allowed with Arg.parse_and_expand_argv_dynamic");
let arg = get_arg () in
let newarg = f arg in
consume_arg ();
let before = Array.sub !argv 0 (!current + 1)
and after = Array.sub !argv (!current + 1) ((Array.length !argv) - !current - 1) in
argv:= Array.concat [before;newarg;after];
in
treat_action action end
else anonfun s
with | Bad m -> raise (convert_error (Message m));
| Stop e -> raise (convert_error e);
end;
incr current
done
let parse_and_expand_argv_dynamic current argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux true current argv speclist anonfun errmsg
let current1 = current
let parse_argv_dynamic ?(current=current1) argv speclist anonfun errmsg =
parse_and_expand_argv_dynamic_aux false current (ref argv) speclist anonfun errmsg
let parse_argv ?(current=current1) argv speclist anonfun errmsg =
parse_argv_dynamic ~current:current argv (ref speclist) anonfun errmsg
let parse l f msg =
try
parse_argv Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_dynamic l f msg =
try
parse_argv_dynamic Sys.argv l f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let parse_expand l f msg =
try
let argv = ref Sys.argv in
let spec = ref l in
let current = ref (!current) in
parse_and_expand_argv_dynamic current argv spec f msg
with
| Bad msg -> eprintf "%s" msg; exit 2
| Help msg -> printf "%s" msg; exit 0
let second_word s =
let len = String.length s in
let rec loop n =
if n >= len then len
else if s.[n] = ' ' then loop (n+1)
else n
in
match String.index s '\t' with
| n -> loop (n+1)
| exception Not_found ->
begin match String.index s ' ' with
| n -> loop (n+1)
| exception Not_found -> len
end
let max_arg_len cur (kwd, spec, doc) =
match spec with
| Symbol _ -> max cur (String.length kwd)
| _ -> max cur (String.length kwd + second_word doc)
let replace_leading_tab s =
let seen = ref false in
String.map (function '\t' when not !seen -> seen := true; ' ' | c -> c) s
let add_padding len ksd =
match ksd with
| (_, _, "") ->
ksd
| (kwd, (Symbol _ as spec), msg) ->
let cutcol = second_word msg in
let spaces = String.make ((max 0 (len - cutcol)) + 3) ' ' in
(kwd, spec, "\n" ^ spaces ^ replace_leading_tab msg)
| (kwd, spec, msg) ->
let cutcol = second_word msg in
let kwd_len = String.length kwd in
let diff = len - kwd_len - cutcol in
if diff <= 0 then
(kwd, spec, replace_leading_tab msg)
else
let spaces = String.make diff ' ' in
let prefix = String.sub (replace_leading_tab msg) 0 cutcol in
let suffix = String.sub msg cutcol (String.length msg - cutcol) in
(kwd, spec, prefix ^ spaces ^ suffix)
let align ?(limit=max_int) speclist =
let completed = add_help speclist in
let len = List.fold_left max_arg_len 0 completed in
let len = min len limit in
List.map (add_padding len) completed
let trim_cr s =
let len = String.length s in
if len > 0 && String.get s (len - 1) = '\r' then
String.sub s 0 (len - 1)
else
s
let read_aux trim sep file =
let ic = open_in_bin file in
let buf = Buffer.create 200 in
let words = ref [] in
let stash () =
let word = Buffer.contents buf in
let word = if trim then trim_cr word else word in
words := word :: !words;
Buffer.clear buf
in
begin
try while true do
let c = input_char ic in
if c = sep then stash () else Buffer.add_char buf c
done
with End_of_file -> ()
end;
if Buffer.length buf > 0 then stash ();
close_in ic;
Array.of_list (List.rev !words)
let read_arg = read_aux true '\n'
let read_arg0 = read_aux false '\x00'
let write_aux sep file args =
let oc = open_out_bin file in
Array.iter (fun s -> fprintf oc "%s%c" s sep) args;
close_out oc
let write_arg file args = write_aux '\n' file args
let write_arg0 file args = write_aux '\x00' file args
|
56f987aa107a11c782fe6d73558e36a6f8f20e2995c3b2e9f835657fb91cc8c8 | sionescu/bordeaux-threads | impl-lispworks.lisp | ;;;; -*- indent-tabs-mode: nil -*-
(in-package :bordeaux-threads-2)
#+(or lispworks4 lispworks5)
(error 'bordeaux-threads-error
:message "Threading not supported")
;;;
;;; Threads
;;;
(deftype native-thread ()
'mp:process)
(defun %start-multiprocessing ()
(mp:initialize-multiprocessing))
(defun %make-thread (function name)
(mp:process-run-function name nil function))
(defun %current-thread ()
(mp:get-current-process))
(defun %thread-name (thread)
(mp:process-name thread))
(defun %join-thread (thread)
(mp:process-join thread))
(defun %thread-yield ()
(mp:process-allow-scheduling))
;;;
;;; Introspection/debugging
;;;
(defun %all-threads ()
(mp:list-all-processes))
(defun %interrupt-thread (thread function)
(mp:process-interrupt thread function))
(defun %destroy-thread (thread)
(mp:process-kill thread))
(defun %thread-alive-p (thread)
(mp:process-alive-p thread))
;;;
;;; Non-recursive locks
;;;
(deftype native-lock () 'mp:lock)
(defun %make-lock (name)
(mp:make-lock :name name :recursivep nil))
(defun %acquire-lock (lock waitp timeout)
(mp:process-lock lock "Lock" (if waitp timeout 0)))
(defun %release-lock (lock)
(mp:process-unlock lock))
(defmacro %with-lock ((place timeout) &body body)
`(mp:with-lock (,place nil ,timeout) ,@body))
;;;
;;; Recursive locks
;;;
(deftype native-recursive-lock ()
'(and mp:lock (satisfies mp:lock-recursive-p)))
(defun %make-recursive-lock (name)
(mp:make-lock :name name :recursivep t))
(defun %acquire-recursive-lock (lock waitp timeout)
(%acquire-lock lock waitp timeout))
(defun %release-recursive-lock (lock)
(%release-lock lock))
(defmacro %with-recursive-lock ((place timeout) &body body)
`(mp:with-lock (,place nil ,timeout) ,@body))
;;;
;;; Semaphores
;;;
(deftype semaphore ()
'mp:semaphore)
(defun %make-semaphore (name count)
(mp:make-semaphore :name name :count count))
(defun %signal-semaphore (semaphore count)
(mp:semaphore-release semaphore :count count))
(defun %wait-on-semaphore (semaphore timeout)
(if (mp:semaphore-acquire semaphore :timeout timeout :count 1)
t nil))
;;;
;;; Condition variables
;;;
(deftype condition-variable ()
'mp:condition-variable)
(defun %make-condition-variable (name)
(mp:make-condition-variable :name name))
(defun %condition-wait (cv lock timeout)
(mp:condition-variable-wait cv lock :timeout timeout))
(defun %condition-notify (cv)
(mp:condition-variable-signal cv))
(defun %condition-broadcast (cv)
(mp:condition-variable-broadcast cv))
| null | https://raw.githubusercontent.com/sionescu/bordeaux-threads/6131a9c6da2a0fc38e0cadfbb22614f2787a830f/apiv2/impl-lispworks.lisp | lisp | -*- indent-tabs-mode: nil -*-
Threads
Introspection/debugging
Non-recursive locks
Recursive locks
Semaphores
Condition variables
|
(in-package :bordeaux-threads-2)
#+(or lispworks4 lispworks5)
(error 'bordeaux-threads-error
:message "Threading not supported")
(deftype native-thread ()
'mp:process)
(defun %start-multiprocessing ()
(mp:initialize-multiprocessing))
(defun %make-thread (function name)
(mp:process-run-function name nil function))
(defun %current-thread ()
(mp:get-current-process))
(defun %thread-name (thread)
(mp:process-name thread))
(defun %join-thread (thread)
(mp:process-join thread))
(defun %thread-yield ()
(mp:process-allow-scheduling))
(defun %all-threads ()
(mp:list-all-processes))
(defun %interrupt-thread (thread function)
(mp:process-interrupt thread function))
(defun %destroy-thread (thread)
(mp:process-kill thread))
(defun %thread-alive-p (thread)
(mp:process-alive-p thread))
(deftype native-lock () 'mp:lock)
(defun %make-lock (name)
(mp:make-lock :name name :recursivep nil))
(defun %acquire-lock (lock waitp timeout)
(mp:process-lock lock "Lock" (if waitp timeout 0)))
(defun %release-lock (lock)
(mp:process-unlock lock))
(defmacro %with-lock ((place timeout) &body body)
`(mp:with-lock (,place nil ,timeout) ,@body))
(deftype native-recursive-lock ()
'(and mp:lock (satisfies mp:lock-recursive-p)))
(defun %make-recursive-lock (name)
(mp:make-lock :name name :recursivep t))
(defun %acquire-recursive-lock (lock waitp timeout)
(%acquire-lock lock waitp timeout))
(defun %release-recursive-lock (lock)
(%release-lock lock))
(defmacro %with-recursive-lock ((place timeout) &body body)
`(mp:with-lock (,place nil ,timeout) ,@body))
(deftype semaphore ()
'mp:semaphore)
(defun %make-semaphore (name count)
(mp:make-semaphore :name name :count count))
(defun %signal-semaphore (semaphore count)
(mp:semaphore-release semaphore :count count))
(defun %wait-on-semaphore (semaphore timeout)
(if (mp:semaphore-acquire semaphore :timeout timeout :count 1)
t nil))
(deftype condition-variable ()
'mp:condition-variable)
(defun %make-condition-variable (name)
(mp:make-condition-variable :name name))
(defun %condition-wait (cv lock timeout)
(mp:condition-variable-wait cv lock :timeout timeout))
(defun %condition-notify (cv)
(mp:condition-variable-signal cv))
(defun %condition-broadcast (cv)
(mp:condition-variable-broadcast cv))
|
66fef4c515fffc73eb7d2b1d0a72b8cd5db2020117f06bcbeec8dcee4f636e3e | bennn/dissertation | main.rkt | #lang typed/racket/base
(require
require-typed-check
"../base/command-types.rkt")
(require/typed/check "eval.rkt"
(forth-eval* (-> (Listof String) (Values Any Any)))
)
(require (only-in racket/file file->lines))
;; =============================================================================
(define LOOPS 10)
(: main (-> (Listof String) Void))
(define (main lines)
(for ((i (in-range LOOPS)))
(define-values [_e _s] (forth-eval* lines))
(void)))
(define lines (file->lines "../base/history-100.txt"))
(time (main lines))
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/scrbl/jfp-2019/benchmarks/forth/typed/main.rkt | racket | ============================================================================= | #lang typed/racket/base
(require
require-typed-check
"../base/command-types.rkt")
(require/typed/check "eval.rkt"
(forth-eval* (-> (Listof String) (Values Any Any)))
)
(require (only-in racket/file file->lines))
(define LOOPS 10)
(: main (-> (Listof String) Void))
(define (main lines)
(for ((i (in-range LOOPS)))
(define-values [_e _s] (forth-eval* lines))
(void)))
(define lines (file->lines "../base/history-100.txt"))
(time (main lines))
|
bbc99e5e776fce2877075628e140f0f4bc61f71a26322b9e0a6ded27164eeb83 | diku-dk/futhark | KernelBabysitting.hs | # LANGUAGE TypeFamilies #
-- | Do various kernel optimisations - mostly related to coalescing.
module Futhark.Pass.KernelBabysitting (babysitKernels) where
import Control.Arrow (first)
import Control.Monad.State.Strict
import Data.Foldable
import Data.List (elemIndex, isPrefixOf, sort)
import Data.Map.Strict qualified as M
import Data.Maybe
import Futhark.IR
import Futhark.IR.GPU hiding
( BasicOp,
Body,
Exp,
FParam,
FunDef,
LParam,
Lambda,
Pat,
PatElem,
Prog,
RetType,
Stm,
)
import Futhark.MonadFreshNames
import Futhark.Pass
import Futhark.Tools
import Futhark.Util
-- | The pass definition.
babysitKernels :: Pass GPU GPU
babysitKernels =
Pass
"babysit kernels"
"Transpose kernel input arrays for better performance."
$ intraproceduralTransformation onStms
where
onStms scope stms = do
let m = localScope scope $ transformStms mempty stms
fmap fst $ modifyNameSource $ runState (runBuilderT m M.empty)
type BabysitM = Builder GPU
transformStms :: ExpMap -> Stms GPU -> BabysitM (Stms GPU)
transformStms expmap stms = collectStms_ $ foldM_ transformStm expmap stms
transformBody :: ExpMap -> Body GPU -> BabysitM (Body GPU)
transformBody expmap (Body () stms res) = do
stms' <- transformStms expmap stms
pure $ Body () stms' res
-- | Map from variable names to defining expression. We use this to
-- hackily determine whether something is transposed or otherwise
-- funky in memory (and we'd prefer it not to be). If we cannot find
it in the map , we just assume it 's all good . HACK and FIXME , I
-- suppose. We really should do this at the memory level.
type ExpMap = M.Map VName (Stm GPU)
nonlinearInMemory :: VName -> ExpMap -> Maybe (Maybe [Int])
nonlinearInMemory name m =
case M.lookup name m of
Just (Let _ _ (BasicOp (Opaque _ (Var arr)))) -> nonlinearInMemory arr m
Just (Let _ _ (BasicOp (Rearrange perm _))) -> Just $ Just $ rearrangeInverse perm
Just (Let _ _ (BasicOp (Reshape _ _ arr))) -> nonlinearInMemory arr m
Just (Let _ _ (BasicOp (Manifest perm _))) -> Just $ Just perm
Just (Let pat _ (Op (SegOp (SegMap _ _ ts _)))) ->
nonlinear
=<< find
((== name) . patElemName . fst)
(zip (patElems pat) ts)
_ -> Nothing
where
nonlinear (pe, t)
| inner_r <- arrayRank t,
inner_r > 0 = do
let outer_r = arrayRank (patElemType pe) - inner_r
pure $ Just $ rearrangeInverse $ [inner_r .. inner_r + outer_r - 1] ++ [0 .. inner_r - 1]
| otherwise = Nothing
transformStm :: ExpMap -> Stm GPU -> BabysitM ExpMap
transformStm expmap (Let pat aux (Op (SegOp op)))
FIXME : We only make coalescing optimisations for SegThread
SegOps , because that 's what the analysis assumes . For SegGroup
we should probably look at the component SegThreads , but it
-- apparently hasn't come up in practice yet.
| SegThread {} <- segLevel op = do
let mapper =
identitySegOpMapper
{ mapOnSegOpBody =
transformKernelBody expmap (segSpace op)
}
op' <- mapSegOpM mapper op
let stm' = Let pat aux $ Op $ SegOp op'
addStm stm'
pure $ M.fromList [(name, stm') | name <- patNames pat] <> expmap
transformStm expmap (Let pat aux e) = do
e' <- mapExpM (transform expmap) e
let stm' = Let pat aux e'
addStm stm'
pure $ M.fromList [(name, stm') | name <- patNames pat] <> expmap
transform :: ExpMap -> Mapper GPU GPU BabysitM
transform expmap =
identityMapper {mapOnBody = \scope -> localScope scope . transformBody expmap}
transformKernelBody ::
ExpMap ->
SegSpace ->
KernelBody GPU ->
BabysitM (KernelBody GPU)
transformKernelBody expmap space kbody = do
-- Go spelunking for accesses to arrays that are defined outside the
-- kernel body and where the indices are kernel thread indices.
scope <- askScope
let thread_gids = map fst $ unSegSpace space
thread_local = namesFromList $ segFlat space : thread_gids
free_ker_vars = freeIn kbody `namesSubtract` getKerVariantIds space
evalStateT
( traverseKernelBodyArrayIndexes
free_ker_vars
thread_local
(scope <> scopeOfSegSpace space)
(ensureCoalescedAccess expmap (unSegSpace space))
kbody
)
mempty
where
getKerVariantIds = namesFromList . M.keys . scopeOfSegSpace
type ArrayIndexTransform m =
Names ->
(VName -> Bool) -> -- thread local?
variant to a certain gid ( given as first param ) ?
Scope GPU -> -- type environment
VName ->
Slice SubExp ->
m (Maybe (VName, Slice SubExp))
traverseKernelBodyArrayIndexes ::
Monad f =>
Names ->
Names ->
Scope GPU ->
ArrayIndexTransform f ->
KernelBody GPU ->
f (KernelBody GPU)
traverseKernelBodyArrayIndexes free_ker_vars thread_variant outer_scope f (KernelBody () kstms kres) =
KernelBody () . stmsFromList
<$> mapM
( onStm
( varianceInStms mempty kstms,
outer_scope
)
)
(stmsToList kstms)
<*> pure kres
where
onLambda (variance, scope) lam =
(\body' -> lam {lambdaBody = body'})
<$> onBody (variance, scope') (lambdaBody lam)
where
scope' = scope <> scopeOfLParams (lambdaParams lam)
onBody (variance, scope) (Body bdec stms bres) = do
stms' <- stmsFromList <$> mapM (onStm (variance', scope')) (stmsToList stms)
pure $ Body bdec stms' bres
where
variance' = varianceInStms variance stms
scope' = scope <> scopeOf stms
onStm (variance, _) (Let pat dec (BasicOp (Index arr is))) =
Let pat dec . oldOrNew <$> f free_ker_vars isThreadLocal isGidVariant outer_scope arr is
where
oldOrNew Nothing =
BasicOp $ Index arr is
oldOrNew (Just (arr', is')) =
BasicOp $ Index arr' is'
isGidVariant gid (Var v) =
gid == v || nameIn gid (M.findWithDefault (oneName v) v variance)
isGidVariant _ _ = False
isThreadLocal v =
thread_variant
`namesIntersect` M.findWithDefault (oneName v) v variance
onStm (variance, scope) (Let pat dec e) =
Let pat dec <$> mapExpM (mapper (variance, scope)) e
onOp ctx (OtherOp soac) =
OtherOp <$> mapSOACM identitySOACMapper {mapOnSOACLambda = onLambda ctx} soac
onOp _ op = pure op
mapper ctx =
identityMapper
{ mapOnBody = const (onBody ctx),
mapOnOp = onOp ctx
}
type Replacements = M.Map (VName, Slice SubExp) VName
ensureCoalescedAccess ::
MonadBuilder m =>
ExpMap ->
[(VName, SubExp)] ->
ArrayIndexTransform (StateT Replacements m)
ensureCoalescedAccess
expmap
thread_space
free_ker_vars
isThreadLocal
isGidVariant
outer_scope
arr
slice = do
seen <- gets $ M.lookup (arr, slice)
case (seen, isThreadLocal arr, typeOf <$> M.lookup arr outer_scope) of
-- Already took care of this case elsewhere.
(Just arr', _, _) ->
pure $ Just (arr', slice)
(Nothing, False, Just t)
-- We are fully indexing the array with thread IDs, but the
-- indices are in a permuted order.
| Just is <- sliceIndices slice,
length is == arrayRank t,
Just is' <- coalescedIndexes free_ker_vars isGidVariant (map Var thread_gids) is,
Just perm <- is' `isPermutationOf` is ->
replace =<< lift (rearrangeInput (nonlinearInMemory arr expmap) perm arr)
-- Check whether the access is already coalesced because of a
-- previous rearrange being applied to the current array:
1 . get the permutation of the source - array rearrange
2 . apply it to the slice
3 . check that the innermost index is actually the gid
-- of the innermost kernel dimension.
-- If so, the access is already coalesced, nothing to do!
( 's Heuristic . )
| Just (Let _ _ (BasicOp (Rearrange perm _))) <- M.lookup arr expmap,
not $ null perm,
not $ null thread_gids,
inner_gid <- last thread_gids,
length slice >= length perm,
slice' <- map (unSlice slice !!) perm,
DimFix inner_ind <- last slice',
not $ null thread_gids,
isGidVariant inner_gid inner_ind ->
pure Nothing
-- We are not fully indexing an array, but the remaining slice
-- is invariant to the innermost-kernel dimension. We assume
-- the remaining slice will be sequentially streamed, hence
-- tiling will be applied later and will solve coalescing.
Hence nothing to do at this point . ( 's Heuristic . )
| (is, rem_slice) <- splitSlice slice,
not $ null rem_slice,
allDimAreSlice rem_slice,
Nothing <- M.lookup arr expmap,
pt <- elemType t,
not $ tooSmallSlice (primByteSize pt) rem_slice,
is /= map Var (take (length is) thread_gids) || length is == length thread_gids,
not (null thread_gids || null is),
last thread_gids `notNameIn` (freeIn is <> freeIn rem_slice) ->
pure Nothing
-- We are not fully indexing the array, and the indices are not
-- a proper prefix of the thread indices, and some indices are
-- thread local, so we assume (HEURISTIC!) that the remaining
-- dimensions will be traversed sequentially.
| (is, rem_slice) <- splitSlice slice,
not $ null rem_slice,
pt <- elemType t,
not $ tooSmallSlice (primByteSize pt) rem_slice,
is /= map Var (take (length is) thread_gids) || length is == length thread_gids,
any isThreadLocal (namesToList $ freeIn is) -> do
let perm = coalescingPermutation (length is) $ arrayRank t
replace =<< lift (rearrangeInput (nonlinearInMemory arr expmap) perm arr)
-- Everything is fine... assuming that the array is in row-major
-- order! Make sure that is the case.
| Just {} <- nonlinearInMemory arr expmap ->
case sliceIndices slice of
Just is
| Just _ <- coalescedIndexes free_ker_vars isGidVariant (map Var thread_gids) is ->
replace =<< lift (rowMajorArray arr)
| otherwise ->
pure Nothing
_ -> replace =<< lift (rowMajorArray arr)
_ -> pure Nothing
where
(thread_gids, _thread_gdims) = unzip thread_space
replace arr' = do
modify $ M.insert (arr, slice) arr'
pure $ Just (arr', slice)
-- Heuristic for avoiding rearranging too small arrays.
tooSmallSlice :: Int32 -> Slice SubExp -> Bool
tooSmallSlice bs = fst . foldl comb (True, bs) . sliceDims
where
comb (True, x) (Constant (IntValue (Int32Value d))) = (d * x < 4, d * x)
comb (_, x) _ = (False, x)
splitSlice :: Slice SubExp -> ([SubExp], Slice SubExp)
splitSlice (Slice []) = ([], Slice [])
splitSlice (Slice (DimFix i : is)) = first (i :) $ splitSlice (Slice is)
splitSlice is = ([], is)
allDimAreSlice :: Slice SubExp -> Bool
allDimAreSlice (Slice []) = True
allDimAreSlice (Slice (DimFix _ : _)) = False
allDimAreSlice (Slice (_ : is)) = allDimAreSlice (Slice is)
-- Try to move thread indexes into their proper position.
coalescedIndexes :: Names -> (VName -> SubExp -> Bool) -> [SubExp] -> [SubExp] -> Maybe [SubExp]
coalescedIndexes free_ker_vars isGidVariant tgids is
-- Do Nothing if:
1 . any of the indices is a constant or a kernel free variable
-- (because it would transpose a bigger array then needed -- big overhead).
2 . the innermost index is variant to the innermost - thread gid
-- (because access is likely to be already coalesced)
3 . the indexes are a prefix of the thread indexes , because that
-- means multiple threads will be accessing the same element.
| any isCt is =
Nothing
| any (`nameIn` free_ker_vars) (subExpVars is) =
Nothing
| is `isPrefixOf` tgids =
Nothing
| not (null tgids),
not (null is),
Var innergid <- last tgids,
num_is > 0 && isGidVariant innergid (last is) =
Just is
3 . Otherwise try fix coalescing
| otherwise =
Just $ reverse $ foldl move (reverse is) $ zip [0 ..] (reverse tgids)
where
num_is = length is
move is_rev (i, tgid)
-- If tgid is in is_rev anywhere but at position i, and
-- position i exists, we move it to position i instead.
| Just j <- elemIndex tgid is_rev,
i /= j,
i < num_is =
swap i j is_rev
| otherwise =
is_rev
swap i j l
| Just ix <- maybeNth i l,
Just jx <- maybeNth j l =
update i jx $ update j ix l
| otherwise =
error $ "coalescedIndexes swap: invalid indices" ++ show (i, j, l)
update 0 x (_ : ys) = x : ys
update i x (y : ys) = y : update (i - 1) x ys
update _ _ [] = error "coalescedIndexes: update"
isCt :: SubExp -> Bool
isCt (Constant _) = True
isCt (Var _) = False
coalescingPermutation :: Int -> Int -> [Int]
coalescingPermutation num_is rank =
[num_is .. rank - 1] ++ [0 .. num_is - 1]
rearrangeInput ::
MonadBuilder m =>
Maybe (Maybe [Int]) ->
[Int] ->
VName ->
m VName
rearrangeInput (Just (Just current_perm)) perm arr
| current_perm == perm = pure arr -- Already has desired representation.
rearrangeInput Nothing perm arr
| sort perm == perm = pure arr -- We don't know the current
-- representation, but the indexing
-- is linear, so let's hope the
-- array is too.
rearrangeInput (Just Just {}) perm arr
| sort perm == perm = rowMajorArray arr -- We just want a row-major array, no tricks.
rearrangeInput manifest perm arr = do
We may first manifest the array to ensure that it is flat in
-- memory. This is sometimes unnecessary, in which case the copy
-- will hopefully be removed by the simplifier.
manifested <- if isJust manifest then rowMajorArray arr else pure arr
letExp (baseString arr ++ "_coalesced") $
BasicOp $
Manifest perm manifested
rowMajorArray ::
MonadBuilder m =>
VName ->
m VName
rowMajorArray arr = do
rank <- arrayRank <$> lookupType arr
letExp (baseString arr ++ "_rowmajor") $ BasicOp $ Manifest [0 .. rank - 1] arr
--- Computing variance.
type VarianceTable = M.Map VName Names
varianceInStms :: VarianceTable -> Stms GPU -> VarianceTable
varianceInStms t = foldl varianceInStm t . stmsToList
varianceInStm :: VarianceTable -> Stm GPU -> VarianceTable
varianceInStm variance stm =
foldl' add variance $ patNames $ stmPat stm
where
add variance' v = M.insert v binding_variance variance'
look variance' v = oneName v <> M.findWithDefault mempty v variance'
binding_variance = mconcat $ map (look variance) $ namesToList (freeIn stm)
| null | https://raw.githubusercontent.com/diku-dk/futhark/98e4a75e4de7042afe030837084764bbf3c6c66e/src/Futhark/Pass/KernelBabysitting.hs | haskell | | Do various kernel optimisations - mostly related to coalescing.
| The pass definition.
| Map from variable names to defining expression. We use this to
hackily determine whether something is transposed or otherwise
funky in memory (and we'd prefer it not to be). If we cannot find
suppose. We really should do this at the memory level.
apparently hasn't come up in practice yet.
Go spelunking for accesses to arrays that are defined outside the
kernel body and where the indices are kernel thread indices.
thread local?
type environment
Already took care of this case elsewhere.
We are fully indexing the array with thread IDs, but the
indices are in a permuted order.
Check whether the access is already coalesced because of a
previous rearrange being applied to the current array:
of the innermost kernel dimension.
If so, the access is already coalesced, nothing to do!
We are not fully indexing an array, but the remaining slice
is invariant to the innermost-kernel dimension. We assume
the remaining slice will be sequentially streamed, hence
tiling will be applied later and will solve coalescing.
We are not fully indexing the array, and the indices are not
a proper prefix of the thread indices, and some indices are
thread local, so we assume (HEURISTIC!) that the remaining
dimensions will be traversed sequentially.
Everything is fine... assuming that the array is in row-major
order! Make sure that is the case.
Heuristic for avoiding rearranging too small arrays.
Try to move thread indexes into their proper position.
Do Nothing if:
(because it would transpose a bigger array then needed -- big overhead).
(because access is likely to be already coalesced)
means multiple threads will be accessing the same element.
If tgid is in is_rev anywhere but at position i, and
position i exists, we move it to position i instead.
Already has desired representation.
We don't know the current
representation, but the indexing
is linear, so let's hope the
array is too.
We just want a row-major array, no tricks.
memory. This is sometimes unnecessary, in which case the copy
will hopefully be removed by the simplifier.
- Computing variance. | # LANGUAGE TypeFamilies #
module Futhark.Pass.KernelBabysitting (babysitKernels) where
import Control.Arrow (first)
import Control.Monad.State.Strict
import Data.Foldable
import Data.List (elemIndex, isPrefixOf, sort)
import Data.Map.Strict qualified as M
import Data.Maybe
import Futhark.IR
import Futhark.IR.GPU hiding
( BasicOp,
Body,
Exp,
FParam,
FunDef,
LParam,
Lambda,
Pat,
PatElem,
Prog,
RetType,
Stm,
)
import Futhark.MonadFreshNames
import Futhark.Pass
import Futhark.Tools
import Futhark.Util
babysitKernels :: Pass GPU GPU
babysitKernels =
Pass
"babysit kernels"
"Transpose kernel input arrays for better performance."
$ intraproceduralTransformation onStms
where
onStms scope stms = do
let m = localScope scope $ transformStms mempty stms
fmap fst $ modifyNameSource $ runState (runBuilderT m M.empty)
type BabysitM = Builder GPU
transformStms :: ExpMap -> Stms GPU -> BabysitM (Stms GPU)
transformStms expmap stms = collectStms_ $ foldM_ transformStm expmap stms
transformBody :: ExpMap -> Body GPU -> BabysitM (Body GPU)
transformBody expmap (Body () stms res) = do
stms' <- transformStms expmap stms
pure $ Body () stms' res
it in the map , we just assume it 's all good . HACK and FIXME , I
type ExpMap = M.Map VName (Stm GPU)
nonlinearInMemory :: VName -> ExpMap -> Maybe (Maybe [Int])
nonlinearInMemory name m =
case M.lookup name m of
Just (Let _ _ (BasicOp (Opaque _ (Var arr)))) -> nonlinearInMemory arr m
Just (Let _ _ (BasicOp (Rearrange perm _))) -> Just $ Just $ rearrangeInverse perm
Just (Let _ _ (BasicOp (Reshape _ _ arr))) -> nonlinearInMemory arr m
Just (Let _ _ (BasicOp (Manifest perm _))) -> Just $ Just perm
Just (Let pat _ (Op (SegOp (SegMap _ _ ts _)))) ->
nonlinear
=<< find
((== name) . patElemName . fst)
(zip (patElems pat) ts)
_ -> Nothing
where
nonlinear (pe, t)
| inner_r <- arrayRank t,
inner_r > 0 = do
let outer_r = arrayRank (patElemType pe) - inner_r
pure $ Just $ rearrangeInverse $ [inner_r .. inner_r + outer_r - 1] ++ [0 .. inner_r - 1]
| otherwise = Nothing
transformStm :: ExpMap -> Stm GPU -> BabysitM ExpMap
transformStm expmap (Let pat aux (Op (SegOp op)))
FIXME : We only make coalescing optimisations for SegThread
SegOps , because that 's what the analysis assumes . For SegGroup
we should probably look at the component SegThreads , but it
| SegThread {} <- segLevel op = do
let mapper =
identitySegOpMapper
{ mapOnSegOpBody =
transformKernelBody expmap (segSpace op)
}
op' <- mapSegOpM mapper op
let stm' = Let pat aux $ Op $ SegOp op'
addStm stm'
pure $ M.fromList [(name, stm') | name <- patNames pat] <> expmap
transformStm expmap (Let pat aux e) = do
e' <- mapExpM (transform expmap) e
let stm' = Let pat aux e'
addStm stm'
pure $ M.fromList [(name, stm') | name <- patNames pat] <> expmap
transform :: ExpMap -> Mapper GPU GPU BabysitM
transform expmap =
identityMapper {mapOnBody = \scope -> localScope scope . transformBody expmap}
transformKernelBody ::
ExpMap ->
SegSpace ->
KernelBody GPU ->
BabysitM (KernelBody GPU)
transformKernelBody expmap space kbody = do
scope <- askScope
let thread_gids = map fst $ unSegSpace space
thread_local = namesFromList $ segFlat space : thread_gids
free_ker_vars = freeIn kbody `namesSubtract` getKerVariantIds space
evalStateT
( traverseKernelBodyArrayIndexes
free_ker_vars
thread_local
(scope <> scopeOfSegSpace space)
(ensureCoalescedAccess expmap (unSegSpace space))
kbody
)
mempty
where
getKerVariantIds = namesFromList . M.keys . scopeOfSegSpace
type ArrayIndexTransform m =
Names ->
variant to a certain gid ( given as first param ) ?
VName ->
Slice SubExp ->
m (Maybe (VName, Slice SubExp))
traverseKernelBodyArrayIndexes ::
Monad f =>
Names ->
Names ->
Scope GPU ->
ArrayIndexTransform f ->
KernelBody GPU ->
f (KernelBody GPU)
traverseKernelBodyArrayIndexes free_ker_vars thread_variant outer_scope f (KernelBody () kstms kres) =
KernelBody () . stmsFromList
<$> mapM
( onStm
( varianceInStms mempty kstms,
outer_scope
)
)
(stmsToList kstms)
<*> pure kres
where
onLambda (variance, scope) lam =
(\body' -> lam {lambdaBody = body'})
<$> onBody (variance, scope') (lambdaBody lam)
where
scope' = scope <> scopeOfLParams (lambdaParams lam)
onBody (variance, scope) (Body bdec stms bres) = do
stms' <- stmsFromList <$> mapM (onStm (variance', scope')) (stmsToList stms)
pure $ Body bdec stms' bres
where
variance' = varianceInStms variance stms
scope' = scope <> scopeOf stms
onStm (variance, _) (Let pat dec (BasicOp (Index arr is))) =
Let pat dec . oldOrNew <$> f free_ker_vars isThreadLocal isGidVariant outer_scope arr is
where
oldOrNew Nothing =
BasicOp $ Index arr is
oldOrNew (Just (arr', is')) =
BasicOp $ Index arr' is'
isGidVariant gid (Var v) =
gid == v || nameIn gid (M.findWithDefault (oneName v) v variance)
isGidVariant _ _ = False
isThreadLocal v =
thread_variant
`namesIntersect` M.findWithDefault (oneName v) v variance
onStm (variance, scope) (Let pat dec e) =
Let pat dec <$> mapExpM (mapper (variance, scope)) e
onOp ctx (OtherOp soac) =
OtherOp <$> mapSOACM identitySOACMapper {mapOnSOACLambda = onLambda ctx} soac
onOp _ op = pure op
mapper ctx =
identityMapper
{ mapOnBody = const (onBody ctx),
mapOnOp = onOp ctx
}
type Replacements = M.Map (VName, Slice SubExp) VName
ensureCoalescedAccess ::
MonadBuilder m =>
ExpMap ->
[(VName, SubExp)] ->
ArrayIndexTransform (StateT Replacements m)
ensureCoalescedAccess
expmap
thread_space
free_ker_vars
isThreadLocal
isGidVariant
outer_scope
arr
slice = do
seen <- gets $ M.lookup (arr, slice)
case (seen, isThreadLocal arr, typeOf <$> M.lookup arr outer_scope) of
(Just arr', _, _) ->
pure $ Just (arr', slice)
(Nothing, False, Just t)
| Just is <- sliceIndices slice,
length is == arrayRank t,
Just is' <- coalescedIndexes free_ker_vars isGidVariant (map Var thread_gids) is,
Just perm <- is' `isPermutationOf` is ->
replace =<< lift (rearrangeInput (nonlinearInMemory arr expmap) perm arr)
1 . get the permutation of the source - array rearrange
2 . apply it to the slice
3 . check that the innermost index is actually the gid
( 's Heuristic . )
| Just (Let _ _ (BasicOp (Rearrange perm _))) <- M.lookup arr expmap,
not $ null perm,
not $ null thread_gids,
inner_gid <- last thread_gids,
length slice >= length perm,
slice' <- map (unSlice slice !!) perm,
DimFix inner_ind <- last slice',
not $ null thread_gids,
isGidVariant inner_gid inner_ind ->
pure Nothing
Hence nothing to do at this point . ( 's Heuristic . )
| (is, rem_slice) <- splitSlice slice,
not $ null rem_slice,
allDimAreSlice rem_slice,
Nothing <- M.lookup arr expmap,
pt <- elemType t,
not $ tooSmallSlice (primByteSize pt) rem_slice,
is /= map Var (take (length is) thread_gids) || length is == length thread_gids,
not (null thread_gids || null is),
last thread_gids `notNameIn` (freeIn is <> freeIn rem_slice) ->
pure Nothing
| (is, rem_slice) <- splitSlice slice,
not $ null rem_slice,
pt <- elemType t,
not $ tooSmallSlice (primByteSize pt) rem_slice,
is /= map Var (take (length is) thread_gids) || length is == length thread_gids,
any isThreadLocal (namesToList $ freeIn is) -> do
let perm = coalescingPermutation (length is) $ arrayRank t
replace =<< lift (rearrangeInput (nonlinearInMemory arr expmap) perm arr)
| Just {} <- nonlinearInMemory arr expmap ->
case sliceIndices slice of
Just is
| Just _ <- coalescedIndexes free_ker_vars isGidVariant (map Var thread_gids) is ->
replace =<< lift (rowMajorArray arr)
| otherwise ->
pure Nothing
_ -> replace =<< lift (rowMajorArray arr)
_ -> pure Nothing
where
(thread_gids, _thread_gdims) = unzip thread_space
replace arr' = do
modify $ M.insert (arr, slice) arr'
pure $ Just (arr', slice)
tooSmallSlice :: Int32 -> Slice SubExp -> Bool
tooSmallSlice bs = fst . foldl comb (True, bs) . sliceDims
where
comb (True, x) (Constant (IntValue (Int32Value d))) = (d * x < 4, d * x)
comb (_, x) _ = (False, x)
splitSlice :: Slice SubExp -> ([SubExp], Slice SubExp)
splitSlice (Slice []) = ([], Slice [])
splitSlice (Slice (DimFix i : is)) = first (i :) $ splitSlice (Slice is)
splitSlice is = ([], is)
allDimAreSlice :: Slice SubExp -> Bool
allDimAreSlice (Slice []) = True
allDimAreSlice (Slice (DimFix _ : _)) = False
allDimAreSlice (Slice (_ : is)) = allDimAreSlice (Slice is)
coalescedIndexes :: Names -> (VName -> SubExp -> Bool) -> [SubExp] -> [SubExp] -> Maybe [SubExp]
coalescedIndexes free_ker_vars isGidVariant tgids is
1 . any of the indices is a constant or a kernel free variable
2 . the innermost index is variant to the innermost - thread gid
3 . the indexes are a prefix of the thread indexes , because that
| any isCt is =
Nothing
| any (`nameIn` free_ker_vars) (subExpVars is) =
Nothing
| is `isPrefixOf` tgids =
Nothing
| not (null tgids),
not (null is),
Var innergid <- last tgids,
num_is > 0 && isGidVariant innergid (last is) =
Just is
3 . Otherwise try fix coalescing
| otherwise =
Just $ reverse $ foldl move (reverse is) $ zip [0 ..] (reverse tgids)
where
num_is = length is
move is_rev (i, tgid)
| Just j <- elemIndex tgid is_rev,
i /= j,
i < num_is =
swap i j is_rev
| otherwise =
is_rev
swap i j l
| Just ix <- maybeNth i l,
Just jx <- maybeNth j l =
update i jx $ update j ix l
| otherwise =
error $ "coalescedIndexes swap: invalid indices" ++ show (i, j, l)
update 0 x (_ : ys) = x : ys
update i x (y : ys) = y : update (i - 1) x ys
update _ _ [] = error "coalescedIndexes: update"
isCt :: SubExp -> Bool
isCt (Constant _) = True
isCt (Var _) = False
coalescingPermutation :: Int -> Int -> [Int]
coalescingPermutation num_is rank =
[num_is .. rank - 1] ++ [0 .. num_is - 1]
rearrangeInput ::
MonadBuilder m =>
Maybe (Maybe [Int]) ->
[Int] ->
VName ->
m VName
rearrangeInput (Just (Just current_perm)) perm arr
rearrangeInput Nothing perm arr
rearrangeInput (Just Just {}) perm arr
rearrangeInput manifest perm arr = do
We may first manifest the array to ensure that it is flat in
manifested <- if isJust manifest then rowMajorArray arr else pure arr
letExp (baseString arr ++ "_coalesced") $
BasicOp $
Manifest perm manifested
rowMajorArray ::
MonadBuilder m =>
VName ->
m VName
rowMajorArray arr = do
rank <- arrayRank <$> lookupType arr
letExp (baseString arr ++ "_rowmajor") $ BasicOp $ Manifest [0 .. rank - 1] arr
type VarianceTable = M.Map VName Names
varianceInStms :: VarianceTable -> Stms GPU -> VarianceTable
varianceInStms t = foldl varianceInStm t . stmsToList
varianceInStm :: VarianceTable -> Stm GPU -> VarianceTable
varianceInStm variance stm =
foldl' add variance $ patNames $ stmPat stm
where
add variance' v = M.insert v binding_variance variance'
look variance' v = oneName v <> M.findWithDefault mempty v variance'
binding_variance = mconcat $ map (look variance) $ namesToList (freeIn stm)
|
6fa6f3c66f910ad495f980760105afd4d178e97391e241401c2105a124b0fd71 | jimcrayne/jhc | StrictNewtype.hs |
newtype TID = TID Int
deriving(Show)
data Foo = Foo !TID Char !Int
deriving(Show)
main :: IO ()
main = print (Foo (TID 3) 'x' 4)
| null | https://raw.githubusercontent.com/jimcrayne/jhc/1ff035af3d697f9175f8761c8d08edbffde03b4e/regress/tests/6_fixed_bugs/StrictNewtype.hs | haskell |
newtype TID = TID Int
deriving(Show)
data Foo = Foo !TID Char !Int
deriving(Show)
main :: IO ()
main = print (Foo (TID 3) 'x' 4)
| |
b11d957c1fb11772e7c4079f5e156e57698f0ab7d2ef72d2ff0fa687018596a2 | beerendlauwers/hakyll-extra | JSON.hs | module Hakyll.Core.Util.JSON where
import Hakyll
import Data.Aeson
import Codec.Binary.UTF8.Generic (toString)
-- | Produces a String that is valid JSON (can be copy-pasted into a browser and parsed).
renderToJSON :: ToJSON a => a -> String
renderToJSON = toString . encode | null | https://raw.githubusercontent.com/beerendlauwers/hakyll-extra/cc4741a9781412108926ac2d7cf70f52a5ee68a3/src/Hakyll/Core/Util/JSON.hs | haskell | | Produces a String that is valid JSON (can be copy-pasted into a browser and parsed). | module Hakyll.Core.Util.JSON where
import Hakyll
import Data.Aeson
import Codec.Binary.UTF8.Generic (toString)
renderToJSON :: ToJSON a => a -> String
renderToJSON = toString . encode |
7496c140ae61ca8a19a3b6385c819537900da3f31f8c67c31bfb46c9e26b7389 | kanwei/montebot | core_test.clj | (ns montebot.core-test
(:require [clojure.test :refer :all]
[montebot.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/kanwei/montebot/72bccb9d94538c9aac8383b916c1100298d8bf1f/test/ulam/core_test.clj | clojure | (ns montebot.core-test
(:require [clojure.test :refer :all]
[montebot.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| |
008d707cfd468f9f7695105c7a4dbcd7bb66d4f846b476218a6a03ce5693d64b | juxt/shop | routes.clj | Copyright © 2016 , JUXT LTD .
Our phonebook but as a single page application ( SPA )
(ns edge.phonebook-app.routes
(:require
[clojure.java.io :as io]
[integrant.core :as ig]
[selmer.parser :as selmer]
[yada.yada :as yada]))
(defn- routes
[{:edge.phonebook/keys [db]}]
[["" (yada/resource
{:id ::phonebook-app
:path-info? true ; We want to serve the same content for
; every path below here.
:methods
{:get
{:produces "text/html"
:response
(fn [ctx]
(selmer/render-file
"phonebook-app.html"
{:ctx ctx}
{:custom-resource-path (io/resource "phonebook-app/templates/")}))}}})]])
(defmethod ig/init-key :edge.phonebook-app/routes [_ config]
(routes config))
| null | https://raw.githubusercontent.com/juxt/shop/c23fc55bca1852bfbabb681a72debc12373c3a36/examples/phonebook-app/src/edge/phonebook_app/routes.clj | clojure | We want to serve the same content for
every path below here. | Copyright © 2016 , JUXT LTD .
Our phonebook but as a single page application ( SPA )
(ns edge.phonebook-app.routes
(:require
[clojure.java.io :as io]
[integrant.core :as ig]
[selmer.parser :as selmer]
[yada.yada :as yada]))
(defn- routes
[{:edge.phonebook/keys [db]}]
[["" (yada/resource
{:id ::phonebook-app
:methods
{:get
{:produces "text/html"
:response
(fn [ctx]
(selmer/render-file
"phonebook-app.html"
{:ctx ctx}
{:custom-resource-path (io/resource "phonebook-app/templates/")}))}}})]])
(defmethod ig/init-key :edge.phonebook-app/routes [_ config]
(routes config))
|
5cc6dc6ea3afe74fad6d72a0286c4eef18145b825a23450362c9f851d37aa75f | chris-moreton/plutus-scripts | plutus-helloworld-bytestring.hs |
import Prelude
import System.Environment
import Cardano.Api
import Cardano.Api.Shelley
import Data.Aeson (encode)
import qualified Data.ByteString.Short as SBS
import qualified Plutus.V1.Ledger.Api as Plutus
import PlutusTx.Prelude as P (ByteString)
import Cardano.PlutusExample.HelloWorldByteStringParametric (hello, helloWorldSBS, helloWorldSerialised)
main :: IO ()
main = do
args <- getArgs
let nargs = length args
let scriptname = if nargs > 1 then args!!1 else "result.plutus"
putStrLn $ "Writing output to: " ++ scriptname
writePlutusScript hello scriptname helloWorldSerialised helloWorldSBS
writePlutusScript :: P.ByteString -> FilePath -> PlutusScript PlutusScriptV1 -> SBS.ShortByteString -> IO ()
writePlutusScript datum filename scriptSerial scriptSBS =
do
case Plutus.defaultCostModelParams of
Just m ->
let pData = Plutus.toData datum
(logout, e) = Plutus.evaluateScriptCounting Plutus.Verbose m scriptSBS [pData]
in do print ("Log output" :: String) >> print logout
case e of
Left evalErr -> print ("Eval Error" :: String) >> print evalErr
Right exbudget -> print ("Ex Budget" :: String) >> print exbudget
print $ "Datum value: " <> encode (scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData pData)
Nothing -> error "defaultCostModelParams failed"
result <- writeFileTextEnvelope filename Nothing scriptSerial
case result of
Left err -> print $ displayError err
Right () -> return ()
| null | https://raw.githubusercontent.com/chris-moreton/plutus-scripts/fd09a54c00f1593da4a75e57f44d7ac773e356fd/plutus-sources/plutus-helloworld/app/plutus-helloworld-bytestring.hs | haskell |
import Prelude
import System.Environment
import Cardano.Api
import Cardano.Api.Shelley
import Data.Aeson (encode)
import qualified Data.ByteString.Short as SBS
import qualified Plutus.V1.Ledger.Api as Plutus
import PlutusTx.Prelude as P (ByteString)
import Cardano.PlutusExample.HelloWorldByteStringParametric (hello, helloWorldSBS, helloWorldSerialised)
main :: IO ()
main = do
args <- getArgs
let nargs = length args
let scriptname = if nargs > 1 then args!!1 else "result.plutus"
putStrLn $ "Writing output to: " ++ scriptname
writePlutusScript hello scriptname helloWorldSerialised helloWorldSBS
writePlutusScript :: P.ByteString -> FilePath -> PlutusScript PlutusScriptV1 -> SBS.ShortByteString -> IO ()
writePlutusScript datum filename scriptSerial scriptSBS =
do
case Plutus.defaultCostModelParams of
Just m ->
let pData = Plutus.toData datum
(logout, e) = Plutus.evaluateScriptCounting Plutus.Verbose m scriptSBS [pData]
in do print ("Log output" :: String) >> print logout
case e of
Left evalErr -> print ("Eval Error" :: String) >> print evalErr
Right exbudget -> print ("Ex Budget" :: String) >> print exbudget
print $ "Datum value: " <> encode (scriptDataToJson ScriptDataJsonDetailedSchema $ fromPlutusData pData)
Nothing -> error "defaultCostModelParams failed"
result <- writeFileTextEnvelope filename Nothing scriptSerial
case result of
Left err -> print $ displayError err
Right () -> return ()
| |
b8238641f52cf27b01e3d6d7a1b93ffb02aef01e96685f8bfc890192d52454cd | rohitjha/ProjectEuler | PE010.hs | import MPL.NumberTheory.Primes
main = putStrLn $ show $ sum $ primesTo 2000000
| null | https://raw.githubusercontent.com/rohitjha/ProjectEuler/2f0a46bb1547b06a373c30966bba7a001b932bf4/MPL/PE010.hs | haskell | import MPL.NumberTheory.Primes
main = putStrLn $ show $ sum $ primesTo 2000000
| |
bf82c0128368dcc3a0645743cfaff3ea533c53e311c5e53a652e8f3d1fa94a60 | haskell/haskell-language-server | T1.hs | fmapEither :: (a -> b) -> Either c a -> Either c b
fmapEither = _lalala
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/f3ad27ba1634871b2240b8cd7de9f31b91a2e502/plugins/hls-tactics-plugin/new/test/golden/T1.hs | haskell | fmapEither :: (a -> b) -> Either c a -> Either c b
fmapEither = _lalala
| |
39c6e1daf1ce4c1ef6d5f9e44b31544adbe92f02105490c5ce5deb98c1c14414 | well-typed-lightbulbs/ocaml-esp32 | cloexec.ml | TEST
( *
This test is temporarily disabled on the MinGW and MSVC ports ,
because since fdstatus has been wrapped in an OCaml program ,
it does not work as well as before .
Presumably this is because the OCaml runtime opens files , so that handles
that have actually been closed at execution look open and make the
test fail .
One possible fix for this would be to make it possible for ocamltest to
compile C - only programs , which will be a bit of work to handle the
output of msvc and will also duplicate what the OCaml compiler itself
already does .
(*
This test is temporarily disabled on the MinGW and MSVC ports,
because since fdstatus has been wrapped in an OCaml program,
it does not work as well as before.
Presumably this is because the OCaml runtime opens files, so that handles
that have actually been closed at execution look open and make the
test fail.
One possible fix for this would be to make it possible for ocamltest to
compile C-only programs, which will be a bit of work to handle the
output of msvc and will also duplicate what the OCaml compiler itself
already does.
*)
* hasunix
include unix
files = "fdstatus_aux.c fdstatus_main.ml"
** libunix
*** setup-ocamlc.byte-build-env
program = "${test_build_directory}/cloexec.byte"
**** ocamlc.byte
program = "${test_build_directory}/fdstatus.exe"
all_modules = "fdstatus_aux.c fdstatus_main.ml"
***** ocamlc.byte
program = "${test_build_directory}/cloexec.byte"
all_modules= "cloexec.ml"
****** check-ocamlc.byte-output
******* run
******** check-program-output
*** setup-ocamlopt.byte-build-env
program = "${test_build_directory}/cloexec.opt"
**** ocamlopt.byte
program = "${test_build_directory}/fdstatus.exe"
all_modules = "fdstatus_aux.c fdstatus_main.ml"
***** ocamlopt.byte
program = "${test_build_directory}/cloexec.opt"
all_modules= "cloexec.ml"
****** check-ocamlopt.byte-output
******* run
******** check-program-output
*)
This is a terrible hack that plays on the internal representation
of file descriptors . The result is a number ( as a string )
that the fdstatus.exe auxiliary program can use to check whether
the fd is open . Moreover , since fdstatus.exe is an OCaml program ,
we must take into account that the Windows OCaml runtime opens a few handles
for its own use , hence we do likewise to try to get handle numbers
Windows will not allocate to the OCaml runtime of fdstatus.exe
of file descriptors. The result is a number (as a string)
that the fdstatus.exe auxiliary program can use to check whether
the fd is open. Moreover, since fdstatus.exe is an OCaml program,
we must take into account that the Windows OCaml runtime opens a few handles
for its own use, hence we do likewise to try to get handle numbers
Windows will not allocate to the OCaml runtime of fdstatus.exe *)
let string_of_fd (fd: Unix.file_descr) : string =
match Sys.os_type with
| "Unix" | "Cygwin" -> Int.to_string (Obj.magic fd : int)
| "Win32" ->
if Sys.word_size = 32 then
Int32.to_string (Obj.magic fd : int32)
else
Int64.to_string (Obj.magic fd : int64)
| _ -> assert false
let status_checker = "fdstatus.exe"
let _ =
let f0 = Unix.(openfile "tmp.txt" [O_WRONLY; O_CREAT; O_TRUNC] 0o600) in
let untested1 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested2 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested3 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested4 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested5 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let f1 = Unix.(openfile "tmp.txt" [O_RDONLY; O_KEEPEXEC] 0) in
let f2 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let d0 = Unix.dup f0 in
let d1 = Unix.dup ~cloexec:false f1 in
let d2 = Unix.dup ~cloexec:true f2 in
let (p0, p0') = Unix.pipe () in
let (p1, p1') = Unix.pipe ~cloexec:false () in
let (p2, p2') = Unix.pipe ~cloexec:true () in
let s0 = Unix.(socket PF_INET SOCK_STREAM 0) in
let s1 = Unix.(socket ~cloexec:false PF_INET SOCK_STREAM 0) in
let s2 = Unix.(socket ~cloexec:true PF_INET SOCK_STREAM 0) in
let (x0, x0') =
try Unix.(socketpair PF_UNIX SOCK_STREAM 0)
with Invalid_argument _ -> (p0, p0') in
socketpair not available under ; keep the same output
let (x1, x1') =
try Unix.(socketpair ~cloexec:false PF_UNIX SOCK_STREAM 0)
with Invalid_argument _ -> (p1, p1') in
let (x2, x2') =
try Unix.(socketpair ~cloexec:true PF_UNIX SOCK_STREAM 0)
with Invalid_argument _ -> (p2, p2') in
let fds = [| f0;f1;f2; d0;d1;d2;
p0;p0';p1;p1';p2;p2';
s0;s1;s2;
x0;x0';x1;x1';x2;x2' |] in
let untested =
[untested1; untested2; untested3; untested4; untested5]
in
let pid =
Unix.create_process
(Filename.concat Filename.current_dir_name status_checker)
(Array.append [| status_checker |] (Array.map string_of_fd fds))
Unix.stdin Unix.stdout Unix.stderr in
ignore (Unix.waitpid [] pid);
let close fd = try Unix.close fd with Unix.Unix_error _ -> () in
Array.iter close fds;
List.iter close untested;
Sys.remove "tmp.txt"
| null | https://raw.githubusercontent.com/well-typed-lightbulbs/ocaml-esp32/c24fcbfbee0e3aa6bb71c9b467c60c6bac326cc7/testsuite/tests/lib-unix/common/cloexec.ml | ocaml |
This test is temporarily disabled on the MinGW and MSVC ports,
because since fdstatus has been wrapped in an OCaml program,
it does not work as well as before.
Presumably this is because the OCaml runtime opens files, so that handles
that have actually been closed at execution look open and make the
test fail.
One possible fix for this would be to make it possible for ocamltest to
compile C-only programs, which will be a bit of work to handle the
output of msvc and will also duplicate what the OCaml compiler itself
already does.
| TEST
( *
This test is temporarily disabled on the MinGW and MSVC ports ,
because since fdstatus has been wrapped in an OCaml program ,
it does not work as well as before .
Presumably this is because the OCaml runtime opens files , so that handles
that have actually been closed at execution look open and make the
test fail .
One possible fix for this would be to make it possible for ocamltest to
compile C - only programs , which will be a bit of work to handle the
output of msvc and will also duplicate what the OCaml compiler itself
already does .
* hasunix
include unix
files = "fdstatus_aux.c fdstatus_main.ml"
** libunix
*** setup-ocamlc.byte-build-env
program = "${test_build_directory}/cloexec.byte"
**** ocamlc.byte
program = "${test_build_directory}/fdstatus.exe"
all_modules = "fdstatus_aux.c fdstatus_main.ml"
***** ocamlc.byte
program = "${test_build_directory}/cloexec.byte"
all_modules= "cloexec.ml"
****** check-ocamlc.byte-output
******* run
******** check-program-output
*** setup-ocamlopt.byte-build-env
program = "${test_build_directory}/cloexec.opt"
**** ocamlopt.byte
program = "${test_build_directory}/fdstatus.exe"
all_modules = "fdstatus_aux.c fdstatus_main.ml"
***** ocamlopt.byte
program = "${test_build_directory}/cloexec.opt"
all_modules= "cloexec.ml"
****** check-ocamlopt.byte-output
******* run
******** check-program-output
*)
This is a terrible hack that plays on the internal representation
of file descriptors . The result is a number ( as a string )
that the fdstatus.exe auxiliary program can use to check whether
the fd is open . Moreover , since fdstatus.exe is an OCaml program ,
we must take into account that the Windows OCaml runtime opens a few handles
for its own use , hence we do likewise to try to get handle numbers
Windows will not allocate to the OCaml runtime of fdstatus.exe
of file descriptors. The result is a number (as a string)
that the fdstatus.exe auxiliary program can use to check whether
the fd is open. Moreover, since fdstatus.exe is an OCaml program,
we must take into account that the Windows OCaml runtime opens a few handles
for its own use, hence we do likewise to try to get handle numbers
Windows will not allocate to the OCaml runtime of fdstatus.exe *)
let string_of_fd (fd: Unix.file_descr) : string =
match Sys.os_type with
| "Unix" | "Cygwin" -> Int.to_string (Obj.magic fd : int)
| "Win32" ->
if Sys.word_size = 32 then
Int32.to_string (Obj.magic fd : int32)
else
Int64.to_string (Obj.magic fd : int64)
| _ -> assert false
let status_checker = "fdstatus.exe"
let _ =
let f0 = Unix.(openfile "tmp.txt" [O_WRONLY; O_CREAT; O_TRUNC] 0o600) in
let untested1 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested2 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested3 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested4 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let untested5 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let f1 = Unix.(openfile "tmp.txt" [O_RDONLY; O_KEEPEXEC] 0) in
let f2 = Unix.(openfile "tmp.txt" [O_RDONLY; O_CLOEXEC] 0) in
let d0 = Unix.dup f0 in
let d1 = Unix.dup ~cloexec:false f1 in
let d2 = Unix.dup ~cloexec:true f2 in
let (p0, p0') = Unix.pipe () in
let (p1, p1') = Unix.pipe ~cloexec:false () in
let (p2, p2') = Unix.pipe ~cloexec:true () in
let s0 = Unix.(socket PF_INET SOCK_STREAM 0) in
let s1 = Unix.(socket ~cloexec:false PF_INET SOCK_STREAM 0) in
let s2 = Unix.(socket ~cloexec:true PF_INET SOCK_STREAM 0) in
let (x0, x0') =
try Unix.(socketpair PF_UNIX SOCK_STREAM 0)
with Invalid_argument _ -> (p0, p0') in
socketpair not available under ; keep the same output
let (x1, x1') =
try Unix.(socketpair ~cloexec:false PF_UNIX SOCK_STREAM 0)
with Invalid_argument _ -> (p1, p1') in
let (x2, x2') =
try Unix.(socketpair ~cloexec:true PF_UNIX SOCK_STREAM 0)
with Invalid_argument _ -> (p2, p2') in
let fds = [| f0;f1;f2; d0;d1;d2;
p0;p0';p1;p1';p2;p2';
s0;s1;s2;
x0;x0';x1;x1';x2;x2' |] in
let untested =
[untested1; untested2; untested3; untested4; untested5]
in
let pid =
Unix.create_process
(Filename.concat Filename.current_dir_name status_checker)
(Array.append [| status_checker |] (Array.map string_of_fd fds))
Unix.stdin Unix.stdout Unix.stderr in
ignore (Unix.waitpid [] pid);
let close fd = try Unix.close fd with Unix.Unix_error _ -> () in
Array.iter close fds;
List.iter close untested;
Sys.remove "tmp.txt"
|
e66593a2b23ec59aa9efaa07fe32261225ce2fd9cce0a33f0a3fc22008af3417 | dongcarl/guix | graph.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2015 , 2016 , 2020 , 2021 < >
Copyright © 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix graph)
#:use-module (guix store)
#:use-module (guix monads)
#:use-module (guix records)
#:use-module (guix sets)
#:use-module (rnrs io ports)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-9)
#:use-module (srfi srfi-26)
#:use-module (ice-9 match)
#:use-module (ice-9 vlist)
#:export (node-type
node-type?
node-type-identifier
node-type-label
node-type-edges
node-type-convert
node-type-name
node-type-description
node-edges
node-back-edges
traverse/depth-first
node-transitive-edges
node-reachable-count
shortest-path
%graph-backends
%d3js-backend
%graphviz-backend
graph-backend?
graph-backend
graph-backend-name
graph-backend-description
export-graph))
;;; Commentary:
;;;
;;; This module provides an abstract way to represent graphs and to manipulate
;;; them. It comes with several such representations for packages,
;;; derivations, and store items. It also provides a generic interface for
exporting graphs in an external format , including a Graphviz
;;; implementation thereof.
;;;
;;; Code:
;;;
;;; Node types.
;;;
(define-record-type* <node-type> node-type make-node-type
node-type?
(identifier node-type-identifier) ;node -> M identifier
(label node-type-label) ;node -> string
(edges node-type-edges) ;node -> M list of nodes
(convert node-type-convert ;any -> M list of nodes
(default (lift1 list %store-monad)))
(name node-type-name) ;string
(description node-type-description)) ;string
(define (%node-edges type nodes cons-edge)
(with-monad %store-monad
(match type
(($ <node-type> identifier label node-edges)
(define (add-edge node edges)
(>>= (node-edges node)
(lambda (nodes)
(return (fold (cut cons-edge node <> <>)
edges nodes)))))
(mlet %store-monad ((edges (foldm %store-monad
add-edge vlist-null nodes)))
(return (lambda (node)
(reverse (vhash-foldq* cons '() node edges)))))))))
(define (node-edges type nodes)
"Return, as a monadic value, a one-argument procedure that, given a node of TYPE,
returns its edges. NODES is taken to be the sinks of the global graph."
(%node-edges type nodes
(lambda (source target edges)
(vhash-consq source target edges))))
(define (node-back-edges type nodes)
"Return, as a monadic value, a one-argument procedure that, given a node of TYPE,
returns its back edges. NODES is taken to be the sinks of the global graph."
(%node-edges type nodes
(lambda (source target edges)
(vhash-consq target source edges))))
(define (traverse/depth-first proc seed nodes node-edges)
"Do a depth-first traversal of NODES along NODE-EDGES, calling PROC with
each node and the current result, and visiting each reachable node exactly
once. NODES must be a list of nodes, and NODE-EDGES must be a one-argument
procedure as returned by 'node-edges' or 'node-back-edges'."
(let loop ((nodes (append-map node-edges nodes))
(result seed)
(visited (setq)))
(match nodes
(()
result)
((head . tail)
(if (set-contains? visited head)
(loop tail result visited)
(let ((edges (node-edges head)))
(loop (append edges tail)
(proc head result)
(set-insert head visited))))))))
(define (node-transitive-edges nodes node-edges)
"Return the list of nodes directly or indirectly connected to NODES
according to the NODE-EDGES procedure. NODE-EDGES must be a one-argument
procedure that, given a node, returns its list of direct dependents; it is
typically returned by 'node-edges' or 'node-back-edges'."
(traverse/depth-first cons '() nodes node-edges))
(define (node-reachable-count nodes node-edges)
"Return the number of nodes reachable from NODES along NODE-EDGES."
(traverse/depth-first (lambda (_ count)
(+ 1 count))
0
nodes node-edges))
(define (shortest-path node1 node2 type)
"Return as a monadic value the shortest path, represented as a list, from
NODE1 to NODE2 of the given TYPE. Return #f when there is no path."
(define node-edges
(node-type-edges type))
(define (find-shortest lst)
Return the shortest path among LST , where each path is represented as a
;; vlist.
(let loop ((lst lst)
(best +inf.0)
(shortest #f))
(match lst
(()
shortest)
((head . tail)
(let ((len (vlist-length head)))
(if (< len best)
(loop tail len head)
(loop tail best shortest)))))))
(define (find-path node path paths)
;; Return the a vhash that maps nodes to paths, with each path from the
given node to NODE2 .
(define (augment-paths child paths)
;; When using %REFERENCE-NODE-TYPE, nodes can contain self references,
;; hence this test.
(if (eq? child node)
(store-return paths)
(find-path child vlist-null paths)))
(cond ((eq? node node2)
(store-return (vhash-consq node (vlist-cons node path)
paths)))
((vhash-assq node paths)
(store-return paths))
(else
XXX : We could stop recursing if one if CHILDREN is NODE2 , but in
;; practice it's good enough.
(mlet* %store-monad ((children (node-edges node))
(paths (foldm %store-monad
augment-paths
paths
children)))
(define sub-paths
(filter-map (lambda (child)
(match (vhash-assq child paths)
(#f #f)
((_ . path) path)))
children))
(match sub-paths
(()
(return (vhash-consq node #f paths)))
(lst
(return (vhash-consq node
(vlist-cons node (find-shortest sub-paths))
paths))))))))
(mlet %store-monad ((paths (find-path node1
(vlist-cons node1 vlist-null)
vlist-null)))
(return (match (vhash-assq node1 paths)
((_ . #f) #f)
((_ . path) (vlist->list path))))))
;;;
Graphviz export .
;;;
(define-record-type <graph-backend>
(graph-backend name description prologue epilogue node edge)
graph-backend?
(name graph-backend-name)
(description graph-backend-description)
(prologue graph-backend-prologue)
(epilogue graph-backend-epilogue)
(node graph-backend-node)
(edge graph-backend-edge))
(define %colors
See colortbl.h in Graphviz .
#("red" "magenta" "blue" "cyan3" "darkseagreen"
"peachpuff4" "darkviolet" "dimgrey" "darkgoldenrod"))
(define (pop-color hint)
"Return a Graphviz color based on HINT, an arbitrary object."
(let ((index (hash hint (vector-length %colors))))
(vector-ref %colors index)))
(define (emit-prologue name port)
(format port "digraph \"Guix ~a\" {\n"
name))
(define (emit-epilogue port)
(display "\n}\n" port))
(define (emit-node id label port)
(format port " \"~a\" [label = \"~a\", shape = box, fontname = sans];~%"
id label))
(define (emit-edge id1 id2 port)
(format port " \"~a\" -> \"~a\" [color = ~a];~%"
id1 id2 (pop-color id1)))
(define %graphviz-backend
(graph-backend "graphviz"
"Generate graph in DOT format for use with Graphviz."
emit-prologue emit-epilogue
emit-node emit-edge))
;;;
;;; d3js export.
;;;
(define (emit-d3js-prologue name port)
(format port "\
<!DOCTYPE html>
<html>
<head>
<meta charset=\"utf-8\">
<style>
text {
font: 10px sans-serif;
pointer-events: none;
}
</style>
<script type=\"text/javascript\" src=\"~a\"></script>
</head>
<body>
<script type=\"text/javascript\">
var nodes = {},
nodeArray = [],
links = [];
" (search-path %load-path "guix/d3.v3.js")))
(define (emit-d3js-epilogue port)
(format port "</script><script type=\"text/javascript\" src=\"~a\"></script></body></html>"
(search-path %load-path "guix/graph.js")))
(define (emit-d3js-node id label port)
(format port "\
nodes[\"~a\"] = {\"id\": \"~a\", \"label\": \"~a\", \"index\": nodeArray.length};
nodeArray.push(nodes[\"~a\"]);~%"
id id label id))
(define (emit-d3js-edge id1 id2 port)
(format port "links.push({\"source\": \"~a\", \"target\": \"~a\"});~%"
id1 id2))
(define %d3js-backend
(graph-backend "d3js"
"Generate chord diagrams with d3js."
emit-d3js-prologue emit-d3js-epilogue
emit-d3js-node emit-d3js-edge))
;;;
Cypher export .
;;;
(define (emit-cypher-prologue name port)
(format port ""))
(define (emit-cypher-epilogue port)
(format port ""))
(define (emit-cypher-node id label port)
(format port "MERGE (p:Package { id: ~s }) SET p.name = ~s;~%"
id label ))
(define (emit-cypher-edge id1 id2 port)
(format port "MERGE (a:Package { id: ~s });~%" id1)
(format port "MERGE (b:Package { id: ~s });~%" id2)
(format port "MATCH (a:Package { id: ~s }), (b:Package { id: ~s }) CREATE UNIQUE (a)-[:NEEDS]->(b);~%"
id1 id2))
(define %cypher-backend
(graph-backend "cypher"
"Generate Cypher queries."
emit-cypher-prologue emit-cypher-epilogue
emit-cypher-node emit-cypher-edge))
;;;
;;; Shared.
;;;
(define %graph-backends
(list %graphviz-backend
%d3js-backend
%cypher-backend))
(define* (export-graph sinks port
#:key
reverse-edges? node-type
(backend %graphviz-backend))
"Write to PORT the representation of the DAG with the given SINKS, using the
given BACKEND. Use NODE-TYPE to traverse the DAG. When REVERSE-EDGES? is
true, draw reverse arrows."
(match backend
(($ <graph-backend> _ _ emit-prologue emit-epilogue emit-node emit-edge)
(emit-prologue (node-type-name node-type) port)
(match node-type
(($ <node-type> node-identifier node-label node-edges)
(let loop ((nodes sinks)
(visited (set)))
(match nodes
(()
(with-monad %store-monad
(emit-epilogue port)
(store-return #t)))
((head . tail)
(mlet %store-monad ((id (node-identifier head)))
(if (set-contains? visited id)
(loop tail visited)
(mlet* %store-monad ((dependencies (node-edges head))
(ids (mapm %store-monad
node-identifier
dependencies)))
(emit-node id (node-label head) port)
(for-each (lambda (dependency dependency-id)
(if reverse-edges?
(emit-edge dependency-id id port)
(emit-edge id dependency-id port)))
dependencies ids)
(loop (append dependencies tail)
(set-insert id visited)))))))))))))
;;; graph.scm ends here
| null | https://raw.githubusercontent.com/dongcarl/guix/82543e9649da2da9a5285ede4ec4f718fd740fcb/guix/graph.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Commentary:
This module provides an abstract way to represent graphs and to manipulate
them. It comes with several such representations for packages,
derivations, and store items. It also provides a generic interface for
implementation thereof.
Code:
Node types.
node -> M identifier
node -> string
node -> M list of nodes
any -> M list of nodes
string
string
it is
vlist.
Return the a vhash that maps nodes to paths, with each path from the
When using %REFERENCE-NODE-TYPE, nodes can contain self references,
hence this test.
practice it's good enough.
d3js export.
~%"
Shared.
graph.scm ends here | Copyright © 2015 , 2016 , 2020 , 2021 < >
Copyright © 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (guix graph)
#:use-module (guix store)
#:use-module (guix monads)
#:use-module (guix records)
#:use-module (guix sets)
#:use-module (rnrs io ports)
#:use-module (srfi srfi-1)
#:use-module (srfi srfi-9)
#:use-module (srfi srfi-26)
#:use-module (ice-9 match)
#:use-module (ice-9 vlist)
#:export (node-type
node-type?
node-type-identifier
node-type-label
node-type-edges
node-type-convert
node-type-name
node-type-description
node-edges
node-back-edges
traverse/depth-first
node-transitive-edges
node-reachable-count
shortest-path
%graph-backends
%d3js-backend
%graphviz-backend
graph-backend?
graph-backend
graph-backend-name
graph-backend-description
export-graph))
exporting graphs in an external format , including a Graphviz
(define-record-type* <node-type> node-type make-node-type
node-type?
(default (lift1 list %store-monad)))
(define (%node-edges type nodes cons-edge)
(with-monad %store-monad
(match type
(($ <node-type> identifier label node-edges)
(define (add-edge node edges)
(>>= (node-edges node)
(lambda (nodes)
(return (fold (cut cons-edge node <> <>)
edges nodes)))))
(mlet %store-monad ((edges (foldm %store-monad
add-edge vlist-null nodes)))
(return (lambda (node)
(reverse (vhash-foldq* cons '() node edges)))))))))
(define (node-edges type nodes)
"Return, as a monadic value, a one-argument procedure that, given a node of TYPE,
returns its edges. NODES is taken to be the sinks of the global graph."
(%node-edges type nodes
(lambda (source target edges)
(vhash-consq source target edges))))
(define (node-back-edges type nodes)
"Return, as a monadic value, a one-argument procedure that, given a node of TYPE,
returns its back edges. NODES is taken to be the sinks of the global graph."
(%node-edges type nodes
(lambda (source target edges)
(vhash-consq target source edges))))
(define (traverse/depth-first proc seed nodes node-edges)
"Do a depth-first traversal of NODES along NODE-EDGES, calling PROC with
each node and the current result, and visiting each reachable node exactly
once. NODES must be a list of nodes, and NODE-EDGES must be a one-argument
procedure as returned by 'node-edges' or 'node-back-edges'."
(let loop ((nodes (append-map node-edges nodes))
(result seed)
(visited (setq)))
(match nodes
(()
result)
((head . tail)
(if (set-contains? visited head)
(loop tail result visited)
(let ((edges (node-edges head)))
(loop (append edges tail)
(proc head result)
(set-insert head visited))))))))
(define (node-transitive-edges nodes node-edges)
"Return the list of nodes directly or indirectly connected to NODES
according to the NODE-EDGES procedure. NODE-EDGES must be a one-argument
typically returned by 'node-edges' or 'node-back-edges'."
(traverse/depth-first cons '() nodes node-edges))
(define (node-reachable-count nodes node-edges)
"Return the number of nodes reachable from NODES along NODE-EDGES."
(traverse/depth-first (lambda (_ count)
(+ 1 count))
0
nodes node-edges))
(define (shortest-path node1 node2 type)
"Return as a monadic value the shortest path, represented as a list, from
NODE1 to NODE2 of the given TYPE. Return #f when there is no path."
(define node-edges
(node-type-edges type))
(define (find-shortest lst)
Return the shortest path among LST , where each path is represented as a
(let loop ((lst lst)
(best +inf.0)
(shortest #f))
(match lst
(()
shortest)
((head . tail)
(let ((len (vlist-length head)))
(if (< len best)
(loop tail len head)
(loop tail best shortest)))))))
(define (find-path node path paths)
given node to NODE2 .
(define (augment-paths child paths)
(if (eq? child node)
(store-return paths)
(find-path child vlist-null paths)))
(cond ((eq? node node2)
(store-return (vhash-consq node (vlist-cons node path)
paths)))
((vhash-assq node paths)
(store-return paths))
(else
XXX : We could stop recursing if one if CHILDREN is NODE2 , but in
(mlet* %store-monad ((children (node-edges node))
(paths (foldm %store-monad
augment-paths
paths
children)))
(define sub-paths
(filter-map (lambda (child)
(match (vhash-assq child paths)
(#f #f)
((_ . path) path)))
children))
(match sub-paths
(()
(return (vhash-consq node #f paths)))
(lst
(return (vhash-consq node
(vlist-cons node (find-shortest sub-paths))
paths))))))))
(mlet %store-monad ((paths (find-path node1
(vlist-cons node1 vlist-null)
vlist-null)))
(return (match (vhash-assq node1 paths)
((_ . #f) #f)
((_ . path) (vlist->list path))))))
Graphviz export .
(define-record-type <graph-backend>
(graph-backend name description prologue epilogue node edge)
graph-backend?
(name graph-backend-name)
(description graph-backend-description)
(prologue graph-backend-prologue)
(epilogue graph-backend-epilogue)
(node graph-backend-node)
(edge graph-backend-edge))
(define %colors
See colortbl.h in Graphviz .
#("red" "magenta" "blue" "cyan3" "darkseagreen"
"peachpuff4" "darkviolet" "dimgrey" "darkgoldenrod"))
(define (pop-color hint)
"Return a Graphviz color based on HINT, an arbitrary object."
(let ((index (hash hint (vector-length %colors))))
(vector-ref %colors index)))
(define (emit-prologue name port)
(format port "digraph \"Guix ~a\" {\n"
name))
(define (emit-epilogue port)
(display "\n}\n" port))
(define (emit-node id label port)
(format port " \"~a\" [label = \"~a\", shape = box, fontname = sans];~%"
id label))
(define (emit-edge id1 id2 port)
(format port " \"~a\" -> \"~a\" [color = ~a];~%"
id1 id2 (pop-color id1)))
(define %graphviz-backend
(graph-backend "graphviz"
"Generate graph in DOT format for use with Graphviz."
emit-prologue emit-epilogue
emit-node emit-edge))
(define (emit-d3js-prologue name port)
(format port "\
<!DOCTYPE html>
<html>
<head>
<meta charset=\"utf-8\">
<style>
text {
}
</style>
<script type=\"text/javascript\" src=\"~a\"></script>
</head>
<body>
<script type=\"text/javascript\">
var nodes = {},
nodeArray = [],
" (search-path %load-path "guix/d3.v3.js")))
(define (emit-d3js-epilogue port)
(format port "</script><script type=\"text/javascript\" src=\"~a\"></script></body></html>"
(search-path %load-path "guix/graph.js")))
(define (emit-d3js-node id label port)
(format port "\
id id label id))
(define (emit-d3js-edge id1 id2 port)
(format port "links.push({\"source\": \"~a\", \"target\": \"~a\"});~%"
id1 id2))
(define %d3js-backend
(graph-backend "d3js"
"Generate chord diagrams with d3js."
emit-d3js-prologue emit-d3js-epilogue
emit-d3js-node emit-d3js-edge))
Cypher export .
(define (emit-cypher-prologue name port)
(format port ""))
(define (emit-cypher-epilogue port)
(format port ""))
(define (emit-cypher-node id label port)
(format port "MERGE (p:Package { id: ~s }) SET p.name = ~s;~%"
id label ))
(define (emit-cypher-edge id1 id2 port)
(format port "MERGE (a:Package { id: ~s });~%" id1)
(format port "MERGE (b:Package { id: ~s });~%" id2)
(format port "MATCH (a:Package { id: ~s }), (b:Package { id: ~s }) CREATE UNIQUE (a)-[:NEEDS]->(b);~%"
id1 id2))
(define %cypher-backend
(graph-backend "cypher"
"Generate Cypher queries."
emit-cypher-prologue emit-cypher-epilogue
emit-cypher-node emit-cypher-edge))
(define %graph-backends
(list %graphviz-backend
%d3js-backend
%cypher-backend))
(define* (export-graph sinks port
#:key
reverse-edges? node-type
(backend %graphviz-backend))
"Write to PORT the representation of the DAG with the given SINKS, using the
given BACKEND. Use NODE-TYPE to traverse the DAG. When REVERSE-EDGES? is
true, draw reverse arrows."
(match backend
(($ <graph-backend> _ _ emit-prologue emit-epilogue emit-node emit-edge)
(emit-prologue (node-type-name node-type) port)
(match node-type
(($ <node-type> node-identifier node-label node-edges)
(let loop ((nodes sinks)
(visited (set)))
(match nodes
(()
(with-monad %store-monad
(emit-epilogue port)
(store-return #t)))
((head . tail)
(mlet %store-monad ((id (node-identifier head)))
(if (set-contains? visited id)
(loop tail visited)
(mlet* %store-monad ((dependencies (node-edges head))
(ids (mapm %store-monad
node-identifier
dependencies)))
(emit-node id (node-label head) port)
(for-each (lambda (dependency dependency-id)
(if reverse-edges?
(emit-edge dependency-id id port)
(emit-edge id dependency-id port)))
dependencies ids)
(loop (append dependencies tail)
(set-insert id visited)))))))))))))
|
51afabddc6e8db7b4f07f0866539ba71febf09235997c2006da468196db67763 | babashka/nbb | script.cljs | (ns script
(:require ["fs" :as fs] ;; verify that required namespaces can in turn also load node modules
built in namespace , continue processing libspecs ,
[other-script :as o :refer [another-fn] :rename {another-fn foo}]))
(defn script-fn []
(fs/existsSync ".")
(when (and (= :yolo (o/script-fn))
(= :another-fn (foo)))
:hello))
(+ 1 2 3)
| null | https://raw.githubusercontent.com/babashka/nbb/4d06aa142a5fb5baac48a8ad8e611d672f779b5f/test-scripts/script.cljs | clojure | verify that required namespaces can in turn also load node modules | (ns script
built in namespace , continue processing libspecs ,
[other-script :as o :refer [another-fn] :rename {another-fn foo}]))
(defn script-fn []
(fs/existsSync ".")
(when (and (= :yolo (o/script-fn))
(= :another-fn (foo)))
:hello))
(+ 1 2 3)
|
4eb58d2c171e5de9362491078fe42aa7814ce2adf654fedd0af4f37ee66f4e95 | inria-parkas/sundialsml | sundials_NonlinearSolver.mli | (***********************************************************************)
(* *)
(* OCaml interface to Sundials *)
(* *)
, , and
( / ENS ) ( / ENS ) ( UPMC / ENS / Inria )
(* *)
Copyright 2020 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
(* under a New BSD License, refer to the file LICENSE. *)
(* *)
(***********************************************************************)
* Generic nonlinear solvers .
Sundials provides generic nonlinear solvers of two main types :
{ ! module : } and { ! module : FixedPoint } . An instance of a nonlinear
solver may only be associated with at most one integrator session at
a time .
This module supports calling both Sundials and custom OCaml nonlinear
solvers from both Sundials integrators and OCaml applications .
This documentation is structured as follows .
{ ol
{ - { { : # nlscore}Core functions } }
{ - { { : # nlsset}Set functions } }
{ - { { : # nlsget}Get functions } }
{ - { { : # nlssolvers}Nonlinear Solver Implementations } }
{ - { { : # nlsexceptions}Exceptions } } }
@version VERSION ( )
@author ( Inria / ENS )
@author ( Inria / ENS )
@author ( UPMC / ENS / Inria )
@nonlinsol < SUNNonlinSol_API_link.html#the-sunnonlinearsolver-api > The SUNNonlinearSolver API
@since 4.0.0
Sundials provides generic nonlinear solvers of two main types:
{!module:Newton} and {!module:FixedPoint}. An instance of a nonlinear
solver may only be associated with at most one integrator session at
a time.
This module supports calling both Sundials and custom OCaml nonlinear
solvers from both Sundials integrators and OCaml applications.
This documentation is structured as follows.
{ol
{- {{:#nlscore}Core functions}}
{- {{:#nlsset}Set functions}}
{- {{:#nlsget}Get functions}}
{- {{:#nlssolvers}Nonlinear Solver Implementations}}
{- {{:#nlsexceptions}Exceptions}}}
@version VERSION()
@author Timothy Bourke (Inria/ENS)
@author Jun Inoue (Inria/ENS)
@author Marc Pouzet (UPMC/ENS/Inria)
@nonlinsol <SUNNonlinSol_API_link.html#the-sunnonlinearsolver-api> The SUNNonlinearSolver API
@since 4.0.0 *)
open Sundials
* A generic nonlinear solver .
The type variables specify
- [ ' data ] , the { ! Nvector.nvector } data ,
- [ ' kind ] , the { ! Nvector.nvector } kind ,
- [ 's ] , the type of of session data to be passed
into the nonlinear solver and through to callbacks , and ,
- [ ' v ] , a type indicating that the solver manipulates ( [ ` Nvec ] )
or { { ! Senswrapper.t}senswrappers } ( [ ` Sens ] ) .
@nonlinsol SUNNonlinearSolver
The type variables specify
- ['data], the {!Nvector.nvector} data,
- ['kind], the {!Nvector.nvector} kind,
- ['s], the type of of session data to be passed
into the nonlinear solver and through to callbacks, and,
- ['v], a type indicating that the solver manipulates nvectors ([`Nvec])
or {{!Senswrapper.t}senswrappers} ([`Sens]).
@nonlinsol SUNNonlinearSolver *)
type ('data, 'kind, 's, 'v) t
= ('data, 'kind, 's, 'v) Sundials_NonlinearSolver_impl.nonlinear_solver
* A limited interface to arrays of { ! Nvector.nvector}s required
to apply nonlinear solvers to sensitivity problems .
to apply nonlinear solvers to sensitivity problems. *)
module Senswrapper : sig (* {{{ *)
(** A senswrapper is an {!Nvector.nvector} of {!Nvector.nvector}s that
cannot be created or manipulated from OCaml. *)
type ('d, 'k) t = ('d, 'k) Sundials_NonlinearSolver_impl.Senswrapper.t
(** Creates an array to the nvector data within a senswrapper.
Given [s1, s2 : ('d, 'k) t] where [s1 = s2], then [data s1]
and [data s2] access the same underlying data. This fact can
be exploited for caching the array in callbacks.
@raise IncorrectUse Attempt to access an invalidated senswrapper *)
val data : ('d, 'k) t -> 'd array
end (* }}} *)
* { 2 : nlscore Core functions }
(** The problem specification expected by a nonlinear solver. *)
type nonlinear_solver_type =
| RootFind (** Solves {% $F(y) = 0$ %} *)
| FixedPoint (** Solves {% $G(y) = y$ %} *)
(** Returns the type of a nonlinear solver.
@nonlinsol SUNNonlinSolGetType *)
val get_type : ('d, 'k, 's, 'v) t -> nonlinear_solver_type
* Initializes a nonlinear solver .
@nonlinsol SUNNonlinSolInitialize
@nonlinsol SUNNonlinSolInitialize *)
val init : ('d, 'k, 's, 'v) t -> unit
(** Setup a nonlinear solver with an initial iteration value.
@nonlinsol SUNNonlinSolSetup *)
val setup : ('d, 'k, 's, [`Nvec]) t -> y:('d, 'k) Nvector.t -> 's -> unit
* Solves a nonlinear system .
The call [ solve ~y callLSetup s ] solves the
nonlinear system { % $ F(y ) = 0 $ % } or { % $ G(y ) = y$ % } , given the following
arguments .
- [ y0 ] , a predicted value for the new solution state ( which must not be
modified ) ,
- [ ycor ] , on input , an initial guess for the correction to the predicted
states , and on output , the final correction to the predicted state ,
- [ w ] , a solution error - weight vector used for computing weighted error
norms ,
- [ tol ] , the requested solution tolerance in the weighted
root - mean - squared norm ,
- [ callLSetup ] , a flag indicating whether the integrator recommends
calling the setup function , and ,
- [ s ] , the state to pass through to callbacks .
@nonlinsol SUNNonlinSolSolve
The call [solve ls ~y0 ~y ~w tol callLSetup s] solves the
nonlinear system {% $F(y) = 0$ %} or {% $G(y) = y$ %}, given the following
arguments.
- [y0], a predicted value for the new solution state (which must not be
modified),
- [ycor], on input, an initial guess for the correction to the predicted
states, and on output, the final correction to the predicted state,
- [w], a solution error-weight vector used for computing weighted error
norms,
- [tol], the requested solution tolerance in the weighted
root-mean-squared norm,
- [callLSetup], a flag indicating whether the integrator recommends
calling the setup function, and,
- [s], the state to pass through to callbacks.
@nonlinsol SUNNonlinSolSolve *)
val solve :
('d, 'k, 's, [`Nvec]) t
-> y0:('d, 'k) Nvector.t
-> ycor:('d, 'k) Nvector.t
-> w:('d, 'k) Nvector.t
-> float
-> bool
-> 's
-> unit
* { 2 : nlsset Set functions }
(** A function [sysfn y fg mem] to evaluate the nonlinear system
{% $F(y)$ %} (for {{!t}RootFind})
or {% $G(y)$ %} (for {{!t}FixedPoint}).
The contents of [y] must not be modified.
This function raises {!exception:Sundials.RecoverableFailure} to
indicate a recoverable failure. Other exceptions signal unrecoverable
failures.
@nonlinsol SUNNonlinSolSysFn *)
type ('nv, 's) sysfn = 'nv -> 'nv -> 's -> unit
(** Specify a system function callback.
The system function specifies the problem, either {% $F(y)$ %} or
{% $G(y)$ %}.
@nonlinsol SUNNonlinSolSetSysFn *)
val set_sys_fn : ('d, 'k, 's, [`Nvec]) t -> ('d, 's) sysfn -> unit
* A function to setup linear solves .
For direct linear solvers , sets up the system { % $ Ax = b$ % }
where { % $ A = \frac{\partial F}{\partial y}$ % } is the linearization
of the nonlinear residual function { % $ F(y ) = 0 $ % } . For iterative
linear solvers , calls a preconditioner setup function .
The call [ jcur = ] has as arguments [ jbad ] , which
indicates if the solver believes that { % $ A$ % } has gone stale , and [ mem ] ,
a token passed by the function provider . A true return value ( [ jcur ] )
signals that the Jacobian { % $ A$ % } has been updated .
This function raises { ! exception : Sundials . RecoverableFailure } to
indicate a recoverable failure . Other exceptions signal unrecoverable
failures .
@nonlinsol SUNNonlinSolLSetupFn
For direct linear solvers, sets up the system {% $Ax = b$ %}
where {% $A = \frac{\partial F}{\partial y}$ %} is the linearization
of the nonlinear residual function {% $F(y) = 0$ %}. For iterative
linear solvers, calls a preconditioner setup function.
The call [jcur = lsetupfn jbad mem] has as arguments [jbad], which
indicates if the solver believes that {% $A$ %} has gone stale, and [mem],
a token passed by the function provider. A true return value ([jcur])
signals that the Jacobian {% $A$ %} has been updated.
This function raises {!exception:Sundials.RecoverableFailure} to
indicate a recoverable failure. Other exceptions signal unrecoverable
failures.
@nonlinsol SUNNonlinSolLSetupFn *)
type 's lsetupfn = bool -> 's -> bool
* Specify a linear solver setup callback .
@nonlinsol SUNNonlinSolSetLSetupFn
@nonlinsol SUNNonlinSolSetLSetupFn *)
val set_lsetup_fn : ('d, 'k, 's, 'v) t -> 's lsetupfn -> unit
* A function to solve linear systems .
Solves the system { % $ Ax = b$ % } where
{ % $ A = \frac{\partial F}{\partial y}$ % } is the linearization of the
nonlinear residual function { % $ F(y)= 0 $ % } .
The call [ b mem ] has as arguments
- [ b ] , on input : the right - hand - side vector for the linear solve ,
set on output to the solution { % $ x$ % } ; and ,
- [ mem ] , a token passed by the function provider .
This function raises { ! exception : Sundials . RecoverableFailure } to
indicate a recoverable failure . Other exceptions signal unrecoverable
failures .
@nonlinsol SUNNonlinSolLSolveFn
Solves the system {% $Ax = b$ %} where
{% $A = \frac{\partial F}{\partial y}$ %} is the linearization of the
nonlinear residual function {% $F(y)= 0$ %}.
The call [lsolvefn b mem] has as arguments
- [b], on input: the right-hand-side vector for the linear solve,
set on output to the solution {% $x$ %}; and,
- [mem], a token passed by the function provider.
This function raises {!exception:Sundials.RecoverableFailure} to
indicate a recoverable failure. Other exceptions signal unrecoverable
failures.
@nonlinsol SUNNonlinSolLSolveFn *)
type ('nv, 's) lsolvefn = 'nv -> 's -> unit
(** Specify a linear solver callback.
@nonlinsol SUNNonlinSolSetLSolveFn *)
val set_lsolve_fn : ('d, 'k, 's, [`Nvec]) t -> ('d, 's) lsolvefn -> unit
* Values returned by convergence tests .
@nonlinsol
@nonlinsol SUNNonlinSolConvTestFn *)
type convtest =
| Success (** Converged ([SUN_NLS_SUCCESS]) *)
| Continue (** Not converged, keep iterating ([SUN_NLS_CONTINUE]) *)
* Appears to diverge , try to recover ( [ SUN_NLS_CONV_RECVR ] )
* A function providing a convergence test .
The call [ convtestfn y ] has as arguments
- [ y ] , the current nonlinear iterate ,
- [ del ] , the difference between current and prior nonlinear iterates ,
- [ tol ] , the nonlinear solver tolerance ( in a weighted root - mean - squared
norm with the given error - weight vector ) ,
- [ ewt ] , the error - weight vector used in computing weighted norms , and ,
- [ mem ] , a token passed by the function provider .
@nonlinsol
The call [convtestfn y del tol ewt mem] has as arguments
- [y], the current nonlinear iterate,
- [del], the difference between current and prior nonlinear iterates,
- [tol], the nonlinear solver tolerance (in a weighted root-mean-squared
norm with the given error-weight vector),
- [ewt], the error-weight vector used in computing weighted norms, and,
- [mem], a token passed by the function provider.
@nonlinsol SUNNonlinSolConvTestFn *)
type ('nv, 's) convtestfn' = 'nv -> 'nv -> float -> 'nv -> 's -> convtest
* A convergence test callback provided by an integrator .
Such callbacks require an additional first argument , the nonlinear solver
invoking the function , and otherwise expect nvector arguments .
They access the linear solver and nvector arguments using generic
functions , which is why the type variables are universally quantified .
Such callbacks require an additional first argument, the nonlinear solver
invoking the function, and otherwise expect nvector arguments.
They access the linear solver and nvector arguments using generic
functions, which is why the type variables are universally quantified. *)
type 's convtest_callback =
{ f : 'd1 'k1 't2 'd2 'k2. ('d1, 'k1, 't2, [`Nvec]) t
-> (('d2, 'k2) Nvector.t, 's) convtestfn' }
[@@unboxed]
* A convergence test callback provided by an integrator with sensitivities .
Such callbacks require an additional first argument , the nonlinear solver
invoking the function , and otherwise expect senswrapper arguments .
They access the linear solver and senswrapper arguments using generic
functions , which is why the type variables are universally quantified .
Such callbacks require an additional first argument, the nonlinear solver
invoking the function, and otherwise expect senswrapper arguments.
They access the linear solver and senswrapper arguments using generic
functions, which is why the type variables are universally quantified. *)
type 's convtest_callback_sens =
{ f : 'd1 'k1 't2 'd2 'k2. ('d1, 'k1, 't2, [`Sens]) t
-> (('d2, 'k2) Senswrapper.t, 's) convtestfn' }
[@@unboxed]
* A convergence test provided either by an integrator or a user program .
The OCaml interface distinguishes callback functions set by the
underlying library ( [ CConvTest ] ) from those supplied by user programs
( [ OConvTest ] ) . This reflects the different underlying mechanisms used
to create and invoke such functions . Callback functions provied by the
underlying library can be invoked with any kind of linear solver and
( homogeneous ) nvectors since they manipulate these values generically .
The OCaml interface distinguishes callback functions set by the
underlying library ([CConvTest]) from those supplied by user programs
([OConvTest]). This reflects the different underlying mechanisms used
to create and invoke such functions. Callback functions provied by the
underlying library can be invoked with any kind of linear solver and
(homogeneous) nvectors since they manipulate these values generically. *)
type ('nv, 's, 'v) convtestfn =
| CConvTest
: 's convtest_callback cfun -> ('nv, 's, [`Nvec]) convtestfn
| CSensConvTest
: 's convtest_callback_sens cfun -> ('nv, 's, [`Sens]) convtestfn
| OConvTest of ('nv, 's) convtestfn'
(** Ignore the nvector type argument in a convtestfn.
@raise Invalid_argument if the value was constructed with [OConvTest] *)
val assert_not_oconvtestfn
: ('nv1, 's, [`Nvec]) convtestfn -> ('nv2, 's, [`Nvec]) convtestfn
* Specify a convergence test callback for the nonlinear solver iteration .
@nonlinsol SUNNonlinSolSetConvTestFn
@nonlinsol SUNNonlinSolSetConvTestFn *)
val set_convtest_fn :
('d, 'k, 's, [`Nvec]) t -> ('d, 's, [`Nvec]) convtestfn -> unit
(** Support for nonlinear solvers with sensitivities. *)
module Sens : sig (* {{{ *)
* Setup a nonlinear solver for sensitivities with an initial iteration
value . See { ! setup } .
@nonlinsol
value. See {!setup}.
@nonlinsol SUNNonlinSolSetup *)
val setup :
('d, 'k, 's, [`Sens]) t -> y:('d, 'k) Senswrapper.t -> 's -> unit
(** Solves a nonlinear system with sensitivities. See {!solve}.
@nonlinsol SUNNonlinSolSolve *)
val solve :
('d, 'k, 's, [`Sens]) t
-> y0:('d, 'k) Senswrapper.t
-> ycor:('d, 'k) Senswrapper.t
-> w:('d, 'k) Senswrapper.t
-> float
-> bool
-> 's
-> unit
* Specify a system function callback with sensitivities .
@nonlinsol
@nonlinsol SUNNonlinSolSetSysFn *)
val set_sys_fn :
('d, 'k, 's, [`Sens]) t -> (('d, 'k) Senswrapper.t, 's) sysfn -> unit
* Specify a linear solver callback with sensitivities .
See { ! } .
@nonlinsol SUNNonlinSolSetLSolveFn
See {!set_lsolve_fn}.
@nonlinsol SUNNonlinSolSetLSolveFn *)
val set_lsolve_fn :
('d, 'k, 's, [`Sens]) t -> (('d, 'k) Senswrapper.t, 's) lsolvefn -> unit
(** Ignore the nvector type argument in a convtestfn.
@raise Invalid_argument if the value was constructed with [OConvTest] *)
val assert_not_oconvtestfn
: ('nv1, 's, [`Sens]) convtestfn -> ('nv2, 's, [`Sens]) convtestfn
* Specify a convergence test callback for the nonlinear solver iteration
when using sensitivities . See { ! set_convtest_fn } .
@nonlinsol SUNNonlinSolSetConvTestFn
when using sensitivities. See {!set_convtest_fn}.
@nonlinsol SUNNonlinSolSetConvTestFn *)
val set_convtest_fn :
('d, 'k, 's, [`Sens]) t
-> (('d, 'k) Senswrapper.t, 's, [`Sens]) convtestfn
-> unit
end (* }}} *)
* Sets the maximum number of nonlinear solver iterations .
@nonlinsol SUNNonlinSolSetMaxIters
@nonlinsol SUNNonlinSolSetMaxIters *)
val set_max_iters : ('d, 'k, 's, 'v) t -> int -> unit
* Sets the output file for informative ( non - error ) messages . The default
is to send such messages to stdout .
The optional argument is a convenience for invoking { ! set_print_level } .
Sundials must be built with { cconst SUNDIALS_BUILD_WITH_MONITORING } to
use this function .
@nonlinsol_module SUNNonlinSolSetInfoFile_Newton
@nonlinsol_module SUNNonlinSolSetInfoFile_FixedPoint
@since 5.3.0
is to send such messages to stdout.
The optional argument is a convenience for invoking {!set_print_level}.
Sundials must be built with {cconst SUNDIALS_BUILD_WITH_MONITORING} to
use this function.
@nonlinsol_module SUNNonlinSolSetInfoFile_Newton
@nonlinsol_module SUNNonlinSolSetInfoFile_FixedPoint
@since 5.3.0 *)
val set_info_file
: ('d, 'k, 's, 'v) t -> ?print_level:bool -> Sundials.Logfile.t -> unit
* Sets the level of output verbosity . When [ false ] ( the default ) no
information is printed , when [ true ] the residual norm is printed for
each nonlinear iteration .
Sundials must be built with { cconst SUNDIALS_BUILD_WITH_MONITORING } to
use this function .
@nonlinsol_module SUNNonlinSolSetPrintLevel_Newton
@nonlinsol_module SUNNonlinSolSetPrintLevel_FixedPoint
@since 5.3.0
information is printed, when [true] the residual norm is printed for
each nonlinear iteration.
Sundials must be built with {cconst SUNDIALS_BUILD_WITH_MONITORING} to
use this function.
@nonlinsol_module SUNNonlinSolSetPrintLevel_Newton
@nonlinsol_module SUNNonlinSolSetPrintLevel_FixedPoint
@since 5.3.0 *)
val set_print_level : ('d, 'k, 's, 'v) t -> bool -> unit
* { 2 : nlsget Get functions }
* Returns the number of nonlinear solver iterations in the most recent solve .
@nonlinsol SUNNonlinSolGetNumIters
@nonlinsol SUNNonlinSolGetNumIters *)
val get_num_iters : ('d, 'k, 's, 'v) t -> int
(** Returns the iteration index of the current nonlinear solve.
@nonlinsol SUNNonlinSolGetCurIter *)
val get_cur_iter : ('d, 'k, 's, 'v) t -> int
(** Returns the number of nonlinear solver convergence failures in the most
recent solve.
@nonlinsol SUNNonlinSolGetNumConvFails *)
val get_num_conv_fails : ('d, 'k, 's, 'v) t -> int
* { 2 : nlssolvers Nonlinear Solver Implementations }
* Generic nonlinear solver based on Newton 's method .
@nonlinsol < SUNNonlinSol_links.html#the-sunnonlinsol-newton-implementation > The SUNNonlinearSolver_Newton implementation
@nonlinsol <SUNNonlinSol_links.html#the-sunnonlinsol-newton-implementation> The SUNNonlinearSolver_Newton implementation *)
module Newton : sig (* {{{ *)
* Creates a nonlinear solver based on Newton 's method .
Solves nonlinear systems of the form { % $ F(y ) = 0 $ % } .
@nonlinsol_module SUNNonlinSol_Newton
Solves nonlinear systems of the form {% $F(y) = 0$ %}.
@nonlinsol_module SUNNonlinSol_Newton *)
val make :
?context:Context.t
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Nvec]) t
* Creates a nonlinear solver based on Newton 's method for
sensitivity - enabled integrators .
Solves nonlinear systems of the form { % $ F(y ) = 0 $ % } .
In the call [ make_sens count y ] ,
- [ count ] is the number of vectors in the nonlinear problem ,
if there are { % $ N_s$ % } sensitivities , then [ count ] should be
{ % $ N_s + 1 $ % } if using a simultaneous corrector
or { % $ N_s$ % } if using a staggered corrector ; and ,
- [ y ] is a template for cloning vectors .
@nonlinsol_module SUNNonlinSol_Newton
sensitivity-enabled integrators.
Solves nonlinear systems of the form {% $F(y) = 0$ %}.
In the call [make_sens count y],
- [count] is the number of vectors in the nonlinear problem,
if there are {% $N_s$ %} sensitivities, then [count] should be
{% $N_s + 1$ %} if using a simultaneous corrector
or {% $N_s$ %} if using a staggered corrector; and,
- [y] is a template for cloning vectors.
@nonlinsol_module SUNNonlinSol_Newton *)
val make_sens :
?context:Context.t
-> int
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Sens]) t
(** Returns the residual function that defines the nonlinear system.
Raises [Invalid_argument] if called on a nonlinear solver that was not
created by this module.
@nonlinsol_module SUNNonlinSolGetSysFn_Newton *)
val get_sys_fn
: ('d, 'k, 's, [`Nvec]) t -> (('d, 'k) Nvector.t, 's) sysfn option
end (* }}} *)
* Generic nonlinear solver for fixed - point ( functional ) iteration with
optional acceleration .
@nonlinsol < SUNNonlinSol_links.html#the-sunnonlinsol-fixedpoint-implementation > The SUNNonlinearSolver_FixedPoint implementation
optional Anderson acceleration.
@nonlinsol <SUNNonlinSol_links.html#the-sunnonlinsol-fixedpoint-implementation> The SUNNonlinearSolver_FixedPoint implementation *)
module FixedPoint : sig (* {{{ *)
* Creates a nonlinear solver using fixed - point ( functional ) iteration .
Solves nonlinear systems of the form { % $ G(y ) = y$ % } .
The number of [ acceleration_vectors ] defaults to zero .
@nonlinsol_module SUNNonlinSol_FixedPoint
Solves nonlinear systems of the form {% $G(y) = y$ %}.
The number of [acceleration_vectors] defaults to zero.
@nonlinsol_module SUNNonlinSol_FixedPoint *)
val make :
?context:Context.t
-> ?acceleration_vectors:int
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Nvec]) t
* Creates a nonlinear solver using fixed - point ( functional ) iteration for
sensitivity - enabled integrators .
Solves nonlinear systems of the form { % $ G(y ) = y$ % } .
In the call [ make_sens count y ] ,
- [ count ] is the number of vectors in the nonlinear problem ,
if there are { % $ N_s$ % } sensitivities , then [ count ] should be
{ % $ N_s + 1 $ % } if using a simultaneous corrector
or { % $ N_s$ % } if using a staggered corrector ;
- [ y ] is a template for cloning vectors ; and ,
The number of [ acceleration_vectors ] defaults to zero .
@nonlinsol_module SUNNonlinSol_FixedPoint
sensitivity-enabled integrators.
Solves nonlinear systems of the form {% $G(y) = y$ %}.
In the call [make_sens count y],
- [count] is the number of vectors in the nonlinear problem,
if there are {% $N_s$ %} sensitivities, then [count] should be
{% $N_s + 1$ %} if using a simultaneous corrector
or {% $N_s$ %} if using a staggered corrector;
- [y] is a template for cloning vectors; and,
The number of [acceleration_vectors] defaults to zero.
@nonlinsol_module SUNNonlinSol_FixedPoint *)
val make_sens :
?context:Context.t
-> ?acceleration_vectors:int
-> int
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Sens]) t
* Returns the residual function that defines the nonlinear system .
Raises [ Invalid_argument ] if called on a nonlinear solver that was not
created by this module .
@nonlinsol_module SUNNonlinSolGetSysFn_FixedPoint
Raises [Invalid_argument] if called on a nonlinear solver that was not
created by this module.
@nonlinsol_module SUNNonlinSolGetSysFn_FixedPoint *)
val get_sys_fn
: ('d, 'k, 's, [`Nvec]) t -> (('d, 'k) Nvector.t, 's) sysfn option
* Sets the damping parameter { % $ \beta$ % } to use with
acceleration . Damping is disabled by default { % $ \beta = 1.0 $ % } .
@nonlinsol_module SUNNonlinSolSetDamping_FixedPoint
@since 5.1.0
acceleration. Damping is disabled by default {% $\beta = 1.0$ %}.
@nonlinsol_module SUNNonlinSolSetDamping_FixedPoint
@since 5.1.0 *)
val set_damping : ('d, 'k, 's, 'v) t -> float -> unit
end (* }}} *)
(** Custom nonlinear solvers.
@nonlinsol <SUNNonlinSol_API_link.html#implementing-a-custom-sunnonlinearsolver-module> Implementing a Custom SUNNonlinearSolver Module *)
module Custom : sig (* {{{ *)
* Create a nonlinear solver from a set of callback functions .
The callbacks should indicate failure by raising an exception ( preferably
one of the exceptions in this package ) . Raising
{ ! exception : Sundials . RecoverableFailure } indicates a generic recoverable
failure .
The expected operations are :
- [ init ] : initializes the nonlinear solver .
- [ setup ] : sets up the nonlinear solver with an initial iteration value .
- [ set_lsetup_fn ] : receive a linear solver setup callback .
- [ set_lsolve_fn ] : receive a linear solver callback .
- [ set_convtest_fn ] : receive a convergence test callback .
- [ set_max_iters ] : sets the maximum number of iterations .
- [ set_info_file ] : sets a logfile for informational messages .
- [ set_print_level ] : sets the level of verbosity for informational
messages ( 0 = none ) .
- [ get_num_iters ] : returns the number of iterations in the most recent
solve .
- [ get_cur_iter ] : returns the iteration index of the current solve . This function is required when using a convergence test provided by Sundials or one of the spils linear solvers .
- [ get_num_conv_fails ] : return the number of convergence failures in the
most recent solve .
- [ nls_type ] : the type of problem solved .
- [ solve ] : the call [ solve y0 y w tol callLSetup mem ] should solve the nonlinear system { % $ F(y ) = 0 $ % } or { % $ G(y ) = y$ % } , given the initial iterate [ y0 ] , which must not be modified , the solution error - weight vector [ w ] used for computing weighted error norms , the requested solution tolerance in the weighted root - mean - squared norm [ tol ] , a flag [ callLSetup ] indicating whether the integrator recommends calling the setup function , and a memory value to be passed to the system function .
- [ set_sys_fn ] : receive the system callback .
Note that the [ setup ] and [ solve ] functions are passed the payload data
directly , whereas the [ lsolvefn ] and [ sysfn]s require
the data to be wrapped in an nvector . This asymmetry is awkward but ,
unfortunately , unavoidable given the implementation of nvectors and the
different constraints for C - to - OCaml calls and OCaml - to - C calls .
The callbacks should indicate failure by raising an exception (preferably
one of the exceptions in this package). Raising
{!exception:Sundials.RecoverableFailure} indicates a generic recoverable
failure.
The expected operations are:
- [init]: initializes the nonlinear solver.
- [setup]: sets up the nonlinear solver with an initial iteration value.
- [set_lsetup_fn]: receive a linear solver setup callback.
- [set_lsolve_fn]: receive a linear solver callback.
- [set_convtest_fn]: receive a convergence test callback.
- [set_max_iters]: sets the maximum number of iterations.
- [set_info_file]: sets a logfile for informational messages.
- [set_print_level]: sets the level of verbosity for informational
messages (0 = none).
- [get_num_iters]: returns the number of iterations in the most recent
solve.
- [get_cur_iter]: returns the iteration index of the current solve. This function is required when using a convergence test provided by Sundials or one of the spils linear solvers.
- [get_num_conv_fails]: return the number of convergence failures in the
most recent solve.
- [nls_type]: the type of problem solved.
- [solve]: the call [solve y0 y w tol callLSetup mem] should solve the nonlinear system {% $F(y) = 0$ %} or {% $G(y) = y$ %}, given the initial iterate [y0], which must not be modified, the solution error-weight vector [w] used for computing weighted error norms, the requested solution tolerance in the weighted root-mean-squared norm [tol], a flag [callLSetup] indicating whether the integrator recommends calling the setup function, and a memory value to be passed to the system function.
- [set_sys_fn]: receive the system callback.
Note that the [setup] and [solve] functions are passed the payload data
directly, whereas the [lsolvefn] and [sysfn]s require
the data to be wrapped in an nvector. This asymmetry is awkward but,
unfortunately, unavoidable given the implementation of nvectors and the
different constraints for C-to-OCaml calls and OCaml-to-C calls. *)
val make :
?init : (unit -> unit)
-> ?setup : ('d -> 's -> unit)
-> ?set_lsetup_fn : ('s lsetupfn -> unit)
-> ?set_lsolve_fn : ((('d, 'k) Nvector.t, 's) lsolvefn -> unit)
-> ?set_convtest_fn : (('d, 's, [`Nvec]) convtestfn -> unit)
-> ?set_max_iters : (int -> unit)
-> ?set_info_file : (Logfile.t -> unit)
-> ?set_print_level : (int -> unit)
-> ?get_num_iters : (unit -> int)
-> ?get_cur_iter : (unit -> int)
-> ?get_num_conv_fails : (unit -> int)
-> nls_type : nonlinear_solver_type
-> solve : ('d -> 'd -> 'd -> float -> bool -> 's -> unit)
-> set_sys_fn : ((('d, 'k) Nvector.t, 's) sysfn -> unit)
-> ?context:Context.t
-> unit
-> ('d, 'k, 's, [`Nvec]) t
* Create a nonlinear solver from a set of callback functions for
sensitivity problems that pass arrays of nvectors . As for the
{ ! make } function except that the callbacks receive arrays of
values .
Writing custom nonlinear solvers for use with some forward sensitivity
methods requires the " internal " senswrapper type .
Any attempt to use { ! Senswrapper.t}s outside of the call to
setup or solve that provides them will result in an { ! IncorrectUse }
exception . They must only be used to extract the underlying data with
{ ! Senswrapper.data } or as arguments for lsolve_fn , convtest_fn , or sys_fn .
There are no restrictions on the arrays extracted with
{ ! Senswrapper.data } .
sensitivity problems that pass arrays of nvectors. As for the
{!make} function except that the callbacks receive arrays of
values.
Writing custom nonlinear solvers for use with some forward sensitivity
methods requires the "internal" senswrapper type.
Any attempt to use {!Senswrapper.t}s outside of the call to
setup or solve that provides them will result in an {!IncorrectUse}
exception. They must only be used to extract the underlying data with
{!Senswrapper.data} or as arguments for lsolve_fn, convtest_fn, or sys_fn.
There are no restrictions on the arrays extracted with
{!Senswrapper.data}. *)
val make_sens :
?init : (unit -> unit)
-> ?setup : (('d, 'k) Senswrapper.t -> 's -> unit)
-> ?set_lsetup_fn : ('s lsetupfn -> unit)
-> ?set_lsolve_fn : ((('d, 'k) Senswrapper.t, 's) lsolvefn -> unit)
-> ?set_convtest_fn : ((('d, 'k) Senswrapper.t, 's, [`Sens]) convtestfn -> unit)
-> ?set_max_iters : (int -> unit)
-> ?set_info_file : (Logfile.t -> unit)
-> ?set_print_level : (int -> unit)
-> ?get_num_iters : (unit -> int)
-> ?get_cur_iter : (unit -> int)
-> ?get_num_conv_fails : (unit -> int)
-> nls_type : nonlinear_solver_type
-> solve : (('d, 'k) Senswrapper.t
-> ('d, 'k) Senswrapper.t
-> ('d, 'k) Senswrapper.t
-> float -> bool -> 's -> unit)
-> set_sys_fn : ((('d, 'k) Senswrapper.t, 's) sysfn -> unit)
-> ?context:Context.t
-> unit
-> ('d, 'k, 's, [`Sens]) t
end (* }}} *)
* { 2 : nlsexceptions Exceptions }
(** An error occurred in a vector operation.
@nodoc SUN_NLS_VECTOROP_ERR *)
exception VectorOpError
* Raised when a nonlinear solver is used incorrectly .
For example , calling { ! solve } without having first called { ! set_sys_fn }
( [ SUN_NLS_MEM_NULL ] ) .
For example, calling {!solve} without having first called {!set_sys_fn}
([SUN_NLS_MEM_NULL]). *)
exception IncorrectUse
(** Raised if an external library call fails. *)
exception ExtFail
* Raised on an attempt to associate a nonlinear solver instance with more
than one session .
than one session. *)
exception NonlinearSolverInUse
| null | https://raw.githubusercontent.com/inria-parkas/sundialsml/a72ebfc84b55470ed97fbb0b45d700deebfc1664/src/lsolvers/sundials_NonlinearSolver.mli | ocaml | *********************************************************************
OCaml interface to Sundials
under a New BSD License, refer to the file LICENSE.
*********************************************************************
{{{
* A senswrapper is an {!Nvector.nvector} of {!Nvector.nvector}s that
cannot be created or manipulated from OCaml.
* Creates an array to the nvector data within a senswrapper.
Given [s1, s2 : ('d, 'k) t] where [s1 = s2], then [data s1]
and [data s2] access the same underlying data. This fact can
be exploited for caching the array in callbacks.
@raise IncorrectUse Attempt to access an invalidated senswrapper
}}}
* The problem specification expected by a nonlinear solver.
* Solves {% $F(y) = 0$ %}
* Solves {% $G(y) = y$ %}
* Returns the type of a nonlinear solver.
@nonlinsol SUNNonlinSolGetType
* Setup a nonlinear solver with an initial iteration value.
@nonlinsol SUNNonlinSolSetup
* A function [sysfn y fg mem] to evaluate the nonlinear system
{% $F(y)$ %} (for {{!t}RootFind})
or {% $G(y)$ %} (for {{!t}FixedPoint}).
The contents of [y] must not be modified.
This function raises {!exception:Sundials.RecoverableFailure} to
indicate a recoverable failure. Other exceptions signal unrecoverable
failures.
@nonlinsol SUNNonlinSolSysFn
* Specify a system function callback.
The system function specifies the problem, either {% $F(y)$ %} or
{% $G(y)$ %}.
@nonlinsol SUNNonlinSolSetSysFn
* Specify a linear solver callback.
@nonlinsol SUNNonlinSolSetLSolveFn
* Converged ([SUN_NLS_SUCCESS])
* Not converged, keep iterating ([SUN_NLS_CONTINUE])
* Ignore the nvector type argument in a convtestfn.
@raise Invalid_argument if the value was constructed with [OConvTest]
* Support for nonlinear solvers with sensitivities.
{{{
* Solves a nonlinear system with sensitivities. See {!solve}.
@nonlinsol SUNNonlinSolSolve
* Ignore the nvector type argument in a convtestfn.
@raise Invalid_argument if the value was constructed with [OConvTest]
}}}
* Returns the iteration index of the current nonlinear solve.
@nonlinsol SUNNonlinSolGetCurIter
* Returns the number of nonlinear solver convergence failures in the most
recent solve.
@nonlinsol SUNNonlinSolGetNumConvFails
{{{
* Returns the residual function that defines the nonlinear system.
Raises [Invalid_argument] if called on a nonlinear solver that was not
created by this module.
@nonlinsol_module SUNNonlinSolGetSysFn_Newton
}}}
{{{
}}}
* Custom nonlinear solvers.
@nonlinsol <SUNNonlinSol_API_link.html#implementing-a-custom-sunnonlinearsolver-module> Implementing a Custom SUNNonlinearSolver Module
{{{
}}}
* An error occurred in a vector operation.
@nodoc SUN_NLS_VECTOROP_ERR
* Raised if an external library call fails. | , , and
( / ENS ) ( / ENS ) ( UPMC / ENS / Inria )
Copyright 2020 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
* Generic nonlinear solvers .
Sundials provides generic nonlinear solvers of two main types :
{ ! module : } and { ! module : FixedPoint } . An instance of a nonlinear
solver may only be associated with at most one integrator session at
a time .
This module supports calling both Sundials and custom OCaml nonlinear
solvers from both Sundials integrators and OCaml applications .
This documentation is structured as follows .
{ ol
{ - { { : # nlscore}Core functions } }
{ - { { : # nlsset}Set functions } }
{ - { { : # nlsget}Get functions } }
{ - { { : # nlssolvers}Nonlinear Solver Implementations } }
{ - { { : # nlsexceptions}Exceptions } } }
@version VERSION ( )
@author ( Inria / ENS )
@author ( Inria / ENS )
@author ( UPMC / ENS / Inria )
@nonlinsol < SUNNonlinSol_API_link.html#the-sunnonlinearsolver-api > The SUNNonlinearSolver API
@since 4.0.0
Sundials provides generic nonlinear solvers of two main types:
{!module:Newton} and {!module:FixedPoint}. An instance of a nonlinear
solver may only be associated with at most one integrator session at
a time.
This module supports calling both Sundials and custom OCaml nonlinear
solvers from both Sundials integrators and OCaml applications.
This documentation is structured as follows.
{ol
{- {{:#nlscore}Core functions}}
{- {{:#nlsset}Set functions}}
{- {{:#nlsget}Get functions}}
{- {{:#nlssolvers}Nonlinear Solver Implementations}}
{- {{:#nlsexceptions}Exceptions}}}
@version VERSION()
@author Timothy Bourke (Inria/ENS)
@author Jun Inoue (Inria/ENS)
@author Marc Pouzet (UPMC/ENS/Inria)
@nonlinsol <SUNNonlinSol_API_link.html#the-sunnonlinearsolver-api> The SUNNonlinearSolver API
@since 4.0.0 *)
open Sundials
* A generic nonlinear solver .
The type variables specify
- [ ' data ] , the { ! Nvector.nvector } data ,
- [ ' kind ] , the { ! Nvector.nvector } kind ,
- [ 's ] , the type of of session data to be passed
into the nonlinear solver and through to callbacks , and ,
- [ ' v ] , a type indicating that the solver manipulates ( [ ` Nvec ] )
or { { ! Senswrapper.t}senswrappers } ( [ ` Sens ] ) .
@nonlinsol SUNNonlinearSolver
The type variables specify
- ['data], the {!Nvector.nvector} data,
- ['kind], the {!Nvector.nvector} kind,
- ['s], the type of of session data to be passed
into the nonlinear solver and through to callbacks, and,
- ['v], a type indicating that the solver manipulates nvectors ([`Nvec])
or {{!Senswrapper.t}senswrappers} ([`Sens]).
@nonlinsol SUNNonlinearSolver *)
type ('data, 'kind, 's, 'v) t
= ('data, 'kind, 's, 'v) Sundials_NonlinearSolver_impl.nonlinear_solver
* A limited interface to arrays of { ! Nvector.nvector}s required
to apply nonlinear solvers to sensitivity problems .
to apply nonlinear solvers to sensitivity problems. *)
type ('d, 'k) t = ('d, 'k) Sundials_NonlinearSolver_impl.Senswrapper.t
val data : ('d, 'k) t -> 'd array
* { 2 : nlscore Core functions }
type nonlinear_solver_type =
val get_type : ('d, 'k, 's, 'v) t -> nonlinear_solver_type
* Initializes a nonlinear solver .
@nonlinsol SUNNonlinSolInitialize
@nonlinsol SUNNonlinSolInitialize *)
val init : ('d, 'k, 's, 'v) t -> unit
val setup : ('d, 'k, 's, [`Nvec]) t -> y:('d, 'k) Nvector.t -> 's -> unit
* Solves a nonlinear system .
The call [ solve ~y callLSetup s ] solves the
nonlinear system { % $ F(y ) = 0 $ % } or { % $ G(y ) = y$ % } , given the following
arguments .
- [ y0 ] , a predicted value for the new solution state ( which must not be
modified ) ,
- [ ycor ] , on input , an initial guess for the correction to the predicted
states , and on output , the final correction to the predicted state ,
- [ w ] , a solution error - weight vector used for computing weighted error
norms ,
- [ tol ] , the requested solution tolerance in the weighted
root - mean - squared norm ,
- [ callLSetup ] , a flag indicating whether the integrator recommends
calling the setup function , and ,
- [ s ] , the state to pass through to callbacks .
@nonlinsol SUNNonlinSolSolve
The call [solve ls ~y0 ~y ~w tol callLSetup s] solves the
nonlinear system {% $F(y) = 0$ %} or {% $G(y) = y$ %}, given the following
arguments.
- [y0], a predicted value for the new solution state (which must not be
modified),
- [ycor], on input, an initial guess for the correction to the predicted
states, and on output, the final correction to the predicted state,
- [w], a solution error-weight vector used for computing weighted error
norms,
- [tol], the requested solution tolerance in the weighted
root-mean-squared norm,
- [callLSetup], a flag indicating whether the integrator recommends
calling the setup function, and,
- [s], the state to pass through to callbacks.
@nonlinsol SUNNonlinSolSolve *)
val solve :
('d, 'k, 's, [`Nvec]) t
-> y0:('d, 'k) Nvector.t
-> ycor:('d, 'k) Nvector.t
-> w:('d, 'k) Nvector.t
-> float
-> bool
-> 's
-> unit
* { 2 : nlsset Set functions }
type ('nv, 's) sysfn = 'nv -> 'nv -> 's -> unit
val set_sys_fn : ('d, 'k, 's, [`Nvec]) t -> ('d, 's) sysfn -> unit
* A function to setup linear solves .
For direct linear solvers , sets up the system { % $ Ax = b$ % }
where { % $ A = \frac{\partial F}{\partial y}$ % } is the linearization
of the nonlinear residual function { % $ F(y ) = 0 $ % } . For iterative
linear solvers , calls a preconditioner setup function .
The call [ jcur = ] has as arguments [ jbad ] , which
indicates if the solver believes that { % $ A$ % } has gone stale , and [ mem ] ,
a token passed by the function provider . A true return value ( [ jcur ] )
signals that the Jacobian { % $ A$ % } has been updated .
This function raises { ! exception : Sundials . RecoverableFailure } to
indicate a recoverable failure . Other exceptions signal unrecoverable
failures .
@nonlinsol SUNNonlinSolLSetupFn
For direct linear solvers, sets up the system {% $Ax = b$ %}
where {% $A = \frac{\partial F}{\partial y}$ %} is the linearization
of the nonlinear residual function {% $F(y) = 0$ %}. For iterative
linear solvers, calls a preconditioner setup function.
The call [jcur = lsetupfn jbad mem] has as arguments [jbad], which
indicates if the solver believes that {% $A$ %} has gone stale, and [mem],
a token passed by the function provider. A true return value ([jcur])
signals that the Jacobian {% $A$ %} has been updated.
This function raises {!exception:Sundials.RecoverableFailure} to
indicate a recoverable failure. Other exceptions signal unrecoverable
failures.
@nonlinsol SUNNonlinSolLSetupFn *)
type 's lsetupfn = bool -> 's -> bool
* Specify a linear solver setup callback .
@nonlinsol SUNNonlinSolSetLSetupFn
@nonlinsol SUNNonlinSolSetLSetupFn *)
val set_lsetup_fn : ('d, 'k, 's, 'v) t -> 's lsetupfn -> unit
* A function to solve linear systems .
Solves the system { % $ Ax = b$ % } where
{ % $ A = \frac{\partial F}{\partial y}$ % } is the linearization of the
nonlinear residual function { % $ F(y)= 0 $ % } .
The call [ b mem ] has as arguments
- [ b ] , on input : the right - hand - side vector for the linear solve ,
set on output to the solution { % $ x$ % } ; and ,
- [ mem ] , a token passed by the function provider .
This function raises { ! exception : Sundials . RecoverableFailure } to
indicate a recoverable failure . Other exceptions signal unrecoverable
failures .
@nonlinsol SUNNonlinSolLSolveFn
Solves the system {% $Ax = b$ %} where
{% $A = \frac{\partial F}{\partial y}$ %} is the linearization of the
nonlinear residual function {% $F(y)= 0$ %}.
The call [lsolvefn b mem] has as arguments
- [b], on input: the right-hand-side vector for the linear solve,
set on output to the solution {% $x$ %}; and,
- [mem], a token passed by the function provider.
This function raises {!exception:Sundials.RecoverableFailure} to
indicate a recoverable failure. Other exceptions signal unrecoverable
failures.
@nonlinsol SUNNonlinSolLSolveFn *)
type ('nv, 's) lsolvefn = 'nv -> 's -> unit
val set_lsolve_fn : ('d, 'k, 's, [`Nvec]) t -> ('d, 's) lsolvefn -> unit
* Values returned by convergence tests .
@nonlinsol
@nonlinsol SUNNonlinSolConvTestFn *)
type convtest =
* Appears to diverge , try to recover ( [ SUN_NLS_CONV_RECVR ] )
* A function providing a convergence test .
The call [ convtestfn y ] has as arguments
- [ y ] , the current nonlinear iterate ,
- [ del ] , the difference between current and prior nonlinear iterates ,
- [ tol ] , the nonlinear solver tolerance ( in a weighted root - mean - squared
norm with the given error - weight vector ) ,
- [ ewt ] , the error - weight vector used in computing weighted norms , and ,
- [ mem ] , a token passed by the function provider .
@nonlinsol
The call [convtestfn y del tol ewt mem] has as arguments
- [y], the current nonlinear iterate,
- [del], the difference between current and prior nonlinear iterates,
- [tol], the nonlinear solver tolerance (in a weighted root-mean-squared
norm with the given error-weight vector),
- [ewt], the error-weight vector used in computing weighted norms, and,
- [mem], a token passed by the function provider.
@nonlinsol SUNNonlinSolConvTestFn *)
type ('nv, 's) convtestfn' = 'nv -> 'nv -> float -> 'nv -> 's -> convtest
* A convergence test callback provided by an integrator .
Such callbacks require an additional first argument , the nonlinear solver
invoking the function , and otherwise expect nvector arguments .
They access the linear solver and nvector arguments using generic
functions , which is why the type variables are universally quantified .
Such callbacks require an additional first argument, the nonlinear solver
invoking the function, and otherwise expect nvector arguments.
They access the linear solver and nvector arguments using generic
functions, which is why the type variables are universally quantified. *)
type 's convtest_callback =
{ f : 'd1 'k1 't2 'd2 'k2. ('d1, 'k1, 't2, [`Nvec]) t
-> (('d2, 'k2) Nvector.t, 's) convtestfn' }
[@@unboxed]
* A convergence test callback provided by an integrator with sensitivities .
Such callbacks require an additional first argument , the nonlinear solver
invoking the function , and otherwise expect senswrapper arguments .
They access the linear solver and senswrapper arguments using generic
functions , which is why the type variables are universally quantified .
Such callbacks require an additional first argument, the nonlinear solver
invoking the function, and otherwise expect senswrapper arguments.
They access the linear solver and senswrapper arguments using generic
functions, which is why the type variables are universally quantified. *)
type 's convtest_callback_sens =
{ f : 'd1 'k1 't2 'd2 'k2. ('d1, 'k1, 't2, [`Sens]) t
-> (('d2, 'k2) Senswrapper.t, 's) convtestfn' }
[@@unboxed]
* A convergence test provided either by an integrator or a user program .
The OCaml interface distinguishes callback functions set by the
underlying library ( [ CConvTest ] ) from those supplied by user programs
( [ OConvTest ] ) . This reflects the different underlying mechanisms used
to create and invoke such functions . Callback functions provied by the
underlying library can be invoked with any kind of linear solver and
( homogeneous ) nvectors since they manipulate these values generically .
The OCaml interface distinguishes callback functions set by the
underlying library ([CConvTest]) from those supplied by user programs
([OConvTest]). This reflects the different underlying mechanisms used
to create and invoke such functions. Callback functions provied by the
underlying library can be invoked with any kind of linear solver and
(homogeneous) nvectors since they manipulate these values generically. *)
type ('nv, 's, 'v) convtestfn =
| CConvTest
: 's convtest_callback cfun -> ('nv, 's, [`Nvec]) convtestfn
| CSensConvTest
: 's convtest_callback_sens cfun -> ('nv, 's, [`Sens]) convtestfn
| OConvTest of ('nv, 's) convtestfn'
val assert_not_oconvtestfn
: ('nv1, 's, [`Nvec]) convtestfn -> ('nv2, 's, [`Nvec]) convtestfn
* Specify a convergence test callback for the nonlinear solver iteration .
@nonlinsol SUNNonlinSolSetConvTestFn
@nonlinsol SUNNonlinSolSetConvTestFn *)
val set_convtest_fn :
('d, 'k, 's, [`Nvec]) t -> ('d, 's, [`Nvec]) convtestfn -> unit
* Setup a nonlinear solver for sensitivities with an initial iteration
value . See { ! setup } .
@nonlinsol
value. See {!setup}.
@nonlinsol SUNNonlinSolSetup *)
val setup :
('d, 'k, 's, [`Sens]) t -> y:('d, 'k) Senswrapper.t -> 's -> unit
val solve :
('d, 'k, 's, [`Sens]) t
-> y0:('d, 'k) Senswrapper.t
-> ycor:('d, 'k) Senswrapper.t
-> w:('d, 'k) Senswrapper.t
-> float
-> bool
-> 's
-> unit
* Specify a system function callback with sensitivities .
@nonlinsol
@nonlinsol SUNNonlinSolSetSysFn *)
val set_sys_fn :
('d, 'k, 's, [`Sens]) t -> (('d, 'k) Senswrapper.t, 's) sysfn -> unit
* Specify a linear solver callback with sensitivities .
See { ! } .
@nonlinsol SUNNonlinSolSetLSolveFn
See {!set_lsolve_fn}.
@nonlinsol SUNNonlinSolSetLSolveFn *)
val set_lsolve_fn :
('d, 'k, 's, [`Sens]) t -> (('d, 'k) Senswrapper.t, 's) lsolvefn -> unit
val assert_not_oconvtestfn
: ('nv1, 's, [`Sens]) convtestfn -> ('nv2, 's, [`Sens]) convtestfn
* Specify a convergence test callback for the nonlinear solver iteration
when using sensitivities . See { ! set_convtest_fn } .
@nonlinsol SUNNonlinSolSetConvTestFn
when using sensitivities. See {!set_convtest_fn}.
@nonlinsol SUNNonlinSolSetConvTestFn *)
val set_convtest_fn :
('d, 'k, 's, [`Sens]) t
-> (('d, 'k) Senswrapper.t, 's, [`Sens]) convtestfn
-> unit
* Sets the maximum number of nonlinear solver iterations .
@nonlinsol SUNNonlinSolSetMaxIters
@nonlinsol SUNNonlinSolSetMaxIters *)
val set_max_iters : ('d, 'k, 's, 'v) t -> int -> unit
* Sets the output file for informative ( non - error ) messages . The default
is to send such messages to stdout .
The optional argument is a convenience for invoking { ! set_print_level } .
Sundials must be built with { cconst SUNDIALS_BUILD_WITH_MONITORING } to
use this function .
@nonlinsol_module SUNNonlinSolSetInfoFile_Newton
@nonlinsol_module SUNNonlinSolSetInfoFile_FixedPoint
@since 5.3.0
is to send such messages to stdout.
The optional argument is a convenience for invoking {!set_print_level}.
Sundials must be built with {cconst SUNDIALS_BUILD_WITH_MONITORING} to
use this function.
@nonlinsol_module SUNNonlinSolSetInfoFile_Newton
@nonlinsol_module SUNNonlinSolSetInfoFile_FixedPoint
@since 5.3.0 *)
val set_info_file
: ('d, 'k, 's, 'v) t -> ?print_level:bool -> Sundials.Logfile.t -> unit
* Sets the level of output verbosity . When [ false ] ( the default ) no
information is printed , when [ true ] the residual norm is printed for
each nonlinear iteration .
Sundials must be built with { cconst SUNDIALS_BUILD_WITH_MONITORING } to
use this function .
@nonlinsol_module SUNNonlinSolSetPrintLevel_Newton
@nonlinsol_module SUNNonlinSolSetPrintLevel_FixedPoint
@since 5.3.0
information is printed, when [true] the residual norm is printed for
each nonlinear iteration.
Sundials must be built with {cconst SUNDIALS_BUILD_WITH_MONITORING} to
use this function.
@nonlinsol_module SUNNonlinSolSetPrintLevel_Newton
@nonlinsol_module SUNNonlinSolSetPrintLevel_FixedPoint
@since 5.3.0 *)
val set_print_level : ('d, 'k, 's, 'v) t -> bool -> unit
* { 2 : nlsget Get functions }
* Returns the number of nonlinear solver iterations in the most recent solve .
@nonlinsol SUNNonlinSolGetNumIters
@nonlinsol SUNNonlinSolGetNumIters *)
val get_num_iters : ('d, 'k, 's, 'v) t -> int
val get_cur_iter : ('d, 'k, 's, 'v) t -> int
val get_num_conv_fails : ('d, 'k, 's, 'v) t -> int
* { 2 : nlssolvers Nonlinear Solver Implementations }
* Generic nonlinear solver based on Newton 's method .
@nonlinsol < SUNNonlinSol_links.html#the-sunnonlinsol-newton-implementation > The SUNNonlinearSolver_Newton implementation
@nonlinsol <SUNNonlinSol_links.html#the-sunnonlinsol-newton-implementation> The SUNNonlinearSolver_Newton implementation *)
* Creates a nonlinear solver based on Newton 's method .
Solves nonlinear systems of the form { % $ F(y ) = 0 $ % } .
@nonlinsol_module SUNNonlinSol_Newton
Solves nonlinear systems of the form {% $F(y) = 0$ %}.
@nonlinsol_module SUNNonlinSol_Newton *)
val make :
?context:Context.t
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Nvec]) t
* Creates a nonlinear solver based on Newton 's method for
sensitivity - enabled integrators .
Solves nonlinear systems of the form { % $ F(y ) = 0 $ % } .
In the call [ make_sens count y ] ,
- [ count ] is the number of vectors in the nonlinear problem ,
if there are { % $ N_s$ % } sensitivities , then [ count ] should be
{ % $ N_s + 1 $ % } if using a simultaneous corrector
or { % $ N_s$ % } if using a staggered corrector ; and ,
- [ y ] is a template for cloning vectors .
@nonlinsol_module SUNNonlinSol_Newton
sensitivity-enabled integrators.
Solves nonlinear systems of the form {% $F(y) = 0$ %}.
In the call [make_sens count y],
- [count] is the number of vectors in the nonlinear problem,
if there are {% $N_s$ %} sensitivities, then [count] should be
{% $N_s + 1$ %} if using a simultaneous corrector
or {% $N_s$ %} if using a staggered corrector; and,
- [y] is a template for cloning vectors.
@nonlinsol_module SUNNonlinSol_Newton *)
val make_sens :
?context:Context.t
-> int
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Sens]) t
val get_sys_fn
: ('d, 'k, 's, [`Nvec]) t -> (('d, 'k) Nvector.t, 's) sysfn option
* Generic nonlinear solver for fixed - point ( functional ) iteration with
optional acceleration .
@nonlinsol < SUNNonlinSol_links.html#the-sunnonlinsol-fixedpoint-implementation > The SUNNonlinearSolver_FixedPoint implementation
optional Anderson acceleration.
@nonlinsol <SUNNonlinSol_links.html#the-sunnonlinsol-fixedpoint-implementation> The SUNNonlinearSolver_FixedPoint implementation *)
* Creates a nonlinear solver using fixed - point ( functional ) iteration .
Solves nonlinear systems of the form { % $ G(y ) = y$ % } .
The number of [ acceleration_vectors ] defaults to zero .
@nonlinsol_module SUNNonlinSol_FixedPoint
Solves nonlinear systems of the form {% $G(y) = y$ %}.
The number of [acceleration_vectors] defaults to zero.
@nonlinsol_module SUNNonlinSol_FixedPoint *)
val make :
?context:Context.t
-> ?acceleration_vectors:int
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Nvec]) t
* Creates a nonlinear solver using fixed - point ( functional ) iteration for
sensitivity - enabled integrators .
Solves nonlinear systems of the form { % $ G(y ) = y$ % } .
In the call [ make_sens count y ] ,
- [ count ] is the number of vectors in the nonlinear problem ,
if there are { % $ N_s$ % } sensitivities , then [ count ] should be
{ % $ N_s + 1 $ % } if using a simultaneous corrector
or { % $ N_s$ % } if using a staggered corrector ;
- [ y ] is a template for cloning vectors ; and ,
The number of [ acceleration_vectors ] defaults to zero .
@nonlinsol_module SUNNonlinSol_FixedPoint
sensitivity-enabled integrators.
Solves nonlinear systems of the form {% $G(y) = y$ %}.
In the call [make_sens count y],
- [count] is the number of vectors in the nonlinear problem,
if there are {% $N_s$ %} sensitivities, then [count] should be
{% $N_s + 1$ %} if using a simultaneous corrector
or {% $N_s$ %} if using a staggered corrector;
- [y] is a template for cloning vectors; and,
The number of [acceleration_vectors] defaults to zero.
@nonlinsol_module SUNNonlinSol_FixedPoint *)
val make_sens :
?context:Context.t
-> ?acceleration_vectors:int
-> int
-> ('d, 'k) Nvector.t
-> ('d, 'k, 's, [`Sens]) t
* Returns the residual function that defines the nonlinear system .
Raises [ Invalid_argument ] if called on a nonlinear solver that was not
created by this module .
@nonlinsol_module SUNNonlinSolGetSysFn_FixedPoint
Raises [Invalid_argument] if called on a nonlinear solver that was not
created by this module.
@nonlinsol_module SUNNonlinSolGetSysFn_FixedPoint *)
val get_sys_fn
: ('d, 'k, 's, [`Nvec]) t -> (('d, 'k) Nvector.t, 's) sysfn option
* Sets the damping parameter { % $ \beta$ % } to use with
acceleration . Damping is disabled by default { % $ \beta = 1.0 $ % } .
@nonlinsol_module SUNNonlinSolSetDamping_FixedPoint
@since 5.1.0
acceleration. Damping is disabled by default {% $\beta = 1.0$ %}.
@nonlinsol_module SUNNonlinSolSetDamping_FixedPoint
@since 5.1.0 *)
val set_damping : ('d, 'k, 's, 'v) t -> float -> unit
* Create a nonlinear solver from a set of callback functions .
The callbacks should indicate failure by raising an exception ( preferably
one of the exceptions in this package ) . Raising
{ ! exception : Sundials . RecoverableFailure } indicates a generic recoverable
failure .
The expected operations are :
- [ init ] : initializes the nonlinear solver .
- [ setup ] : sets up the nonlinear solver with an initial iteration value .
- [ set_lsetup_fn ] : receive a linear solver setup callback .
- [ set_lsolve_fn ] : receive a linear solver callback .
- [ set_convtest_fn ] : receive a convergence test callback .
- [ set_max_iters ] : sets the maximum number of iterations .
- [ set_info_file ] : sets a logfile for informational messages .
- [ set_print_level ] : sets the level of verbosity for informational
messages ( 0 = none ) .
- [ get_num_iters ] : returns the number of iterations in the most recent
solve .
- [ get_cur_iter ] : returns the iteration index of the current solve . This function is required when using a convergence test provided by Sundials or one of the spils linear solvers .
- [ get_num_conv_fails ] : return the number of convergence failures in the
most recent solve .
- [ nls_type ] : the type of problem solved .
- [ solve ] : the call [ solve y0 y w tol callLSetup mem ] should solve the nonlinear system { % $ F(y ) = 0 $ % } or { % $ G(y ) = y$ % } , given the initial iterate [ y0 ] , which must not be modified , the solution error - weight vector [ w ] used for computing weighted error norms , the requested solution tolerance in the weighted root - mean - squared norm [ tol ] , a flag [ callLSetup ] indicating whether the integrator recommends calling the setup function , and a memory value to be passed to the system function .
- [ set_sys_fn ] : receive the system callback .
Note that the [ setup ] and [ solve ] functions are passed the payload data
directly , whereas the [ lsolvefn ] and [ sysfn]s require
the data to be wrapped in an nvector . This asymmetry is awkward but ,
unfortunately , unavoidable given the implementation of nvectors and the
different constraints for C - to - OCaml calls and OCaml - to - C calls .
The callbacks should indicate failure by raising an exception (preferably
one of the exceptions in this package). Raising
{!exception:Sundials.RecoverableFailure} indicates a generic recoverable
failure.
The expected operations are:
- [init]: initializes the nonlinear solver.
- [setup]: sets up the nonlinear solver with an initial iteration value.
- [set_lsetup_fn]: receive a linear solver setup callback.
- [set_lsolve_fn]: receive a linear solver callback.
- [set_convtest_fn]: receive a convergence test callback.
- [set_max_iters]: sets the maximum number of iterations.
- [set_info_file]: sets a logfile for informational messages.
- [set_print_level]: sets the level of verbosity for informational
messages (0 = none).
- [get_num_iters]: returns the number of iterations in the most recent
solve.
- [get_cur_iter]: returns the iteration index of the current solve. This function is required when using a convergence test provided by Sundials or one of the spils linear solvers.
- [get_num_conv_fails]: return the number of convergence failures in the
most recent solve.
- [nls_type]: the type of problem solved.
- [solve]: the call [solve y0 y w tol callLSetup mem] should solve the nonlinear system {% $F(y) = 0$ %} or {% $G(y) = y$ %}, given the initial iterate [y0], which must not be modified, the solution error-weight vector [w] used for computing weighted error norms, the requested solution tolerance in the weighted root-mean-squared norm [tol], a flag [callLSetup] indicating whether the integrator recommends calling the setup function, and a memory value to be passed to the system function.
- [set_sys_fn]: receive the system callback.
Note that the [setup] and [solve] functions are passed the payload data
directly, whereas the [lsolvefn] and [sysfn]s require
the data to be wrapped in an nvector. This asymmetry is awkward but,
unfortunately, unavoidable given the implementation of nvectors and the
different constraints for C-to-OCaml calls and OCaml-to-C calls. *)
val make :
?init : (unit -> unit)
-> ?setup : ('d -> 's -> unit)
-> ?set_lsetup_fn : ('s lsetupfn -> unit)
-> ?set_lsolve_fn : ((('d, 'k) Nvector.t, 's) lsolvefn -> unit)
-> ?set_convtest_fn : (('d, 's, [`Nvec]) convtestfn -> unit)
-> ?set_max_iters : (int -> unit)
-> ?set_info_file : (Logfile.t -> unit)
-> ?set_print_level : (int -> unit)
-> ?get_num_iters : (unit -> int)
-> ?get_cur_iter : (unit -> int)
-> ?get_num_conv_fails : (unit -> int)
-> nls_type : nonlinear_solver_type
-> solve : ('d -> 'd -> 'd -> float -> bool -> 's -> unit)
-> set_sys_fn : ((('d, 'k) Nvector.t, 's) sysfn -> unit)
-> ?context:Context.t
-> unit
-> ('d, 'k, 's, [`Nvec]) t
* Create a nonlinear solver from a set of callback functions for
sensitivity problems that pass arrays of nvectors . As for the
{ ! make } function except that the callbacks receive arrays of
values .
Writing custom nonlinear solvers for use with some forward sensitivity
methods requires the " internal " senswrapper type .
Any attempt to use { ! Senswrapper.t}s outside of the call to
setup or solve that provides them will result in an { ! IncorrectUse }
exception . They must only be used to extract the underlying data with
{ ! Senswrapper.data } or as arguments for lsolve_fn , convtest_fn , or sys_fn .
There are no restrictions on the arrays extracted with
{ ! Senswrapper.data } .
sensitivity problems that pass arrays of nvectors. As for the
{!make} function except that the callbacks receive arrays of
values.
Writing custom nonlinear solvers for use with some forward sensitivity
methods requires the "internal" senswrapper type.
Any attempt to use {!Senswrapper.t}s outside of the call to
setup or solve that provides them will result in an {!IncorrectUse}
exception. They must only be used to extract the underlying data with
{!Senswrapper.data} or as arguments for lsolve_fn, convtest_fn, or sys_fn.
There are no restrictions on the arrays extracted with
{!Senswrapper.data}. *)
val make_sens :
?init : (unit -> unit)
-> ?setup : (('d, 'k) Senswrapper.t -> 's -> unit)
-> ?set_lsetup_fn : ('s lsetupfn -> unit)
-> ?set_lsolve_fn : ((('d, 'k) Senswrapper.t, 's) lsolvefn -> unit)
-> ?set_convtest_fn : ((('d, 'k) Senswrapper.t, 's, [`Sens]) convtestfn -> unit)
-> ?set_max_iters : (int -> unit)
-> ?set_info_file : (Logfile.t -> unit)
-> ?set_print_level : (int -> unit)
-> ?get_num_iters : (unit -> int)
-> ?get_cur_iter : (unit -> int)
-> ?get_num_conv_fails : (unit -> int)
-> nls_type : nonlinear_solver_type
-> solve : (('d, 'k) Senswrapper.t
-> ('d, 'k) Senswrapper.t
-> ('d, 'k) Senswrapper.t
-> float -> bool -> 's -> unit)
-> set_sys_fn : ((('d, 'k) Senswrapper.t, 's) sysfn -> unit)
-> ?context:Context.t
-> unit
-> ('d, 'k, 's, [`Sens]) t
* { 2 : nlsexceptions Exceptions }
exception VectorOpError
* Raised when a nonlinear solver is used incorrectly .
For example , calling { ! solve } without having first called { ! set_sys_fn }
( [ SUN_NLS_MEM_NULL ] ) .
For example, calling {!solve} without having first called {!set_sys_fn}
([SUN_NLS_MEM_NULL]). *)
exception IncorrectUse
exception ExtFail
* Raised on an attempt to associate a nonlinear solver instance with more
than one session .
than one session. *)
exception NonlinearSolverInUse
|
c845f5a866a005cde98e73c8ed305786217b59c87edcaf0f1a9d23d5419bc393 | input-output-hk/ouroboros-network | Orphans.hs | {-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
# OPTIONS_GHC -fno - warn - orphans #
module Cardano.Tools.DBSynthesizer.Orphans () where
import Control.Monad (when)
import Data.Aeson as Aeson (FromJSON (..), withObject, (.!=), (.:),
(.:?))
import qualified Cardano.Chain.Update as Byron (ApplicationName (..))
import Cardano.Crypto (RequiresNetworkMagic (..))
import Cardano.Node.Types (AdjustFilePaths (..),
NodeByronProtocolConfiguration (..),
NodeHardForkProtocolConfiguration (..))
import Cardano.Tools.DBSynthesizer.Types
instance FromJSON NodeConfigStub where
parseJSON val = withObject "NodeConfigStub" (parse' val) val
where
parse' o v = do
proto <- v .: "Protocol"
when (proto /= ("Cardano" :: String)) $
fail $ "nodeConfig.Protocol expected: Cardano; found: " ++ proto
NodeConfigStub o
<$> v .: "AlonzoGenesisFile"
<*> v .: "ShelleyGenesisFile"
<*> v .: "ByronGenesisFile"
<*> v .: "ConwayGenesisFile"
instance AdjustFilePaths NodeConfigStub where
adjustFilePaths f nc =
nc {
ncsAlonzoGenesisFile = f $ ncsAlonzoGenesisFile nc
, ncsShelleyGenesisFile = f $ ncsShelleyGenesisFile nc
, ncsByronGenesisFile = f $ ncsByronGenesisFile nc
, ncsConwayGenesisFile = f $ ncsConwayGenesisFile nc
}
instance AdjustFilePaths NodeCredentials where
adjustFilePaths f nc =
nc {
credCertFile = f <$> credCertFile nc
, credVRFFile = f <$> credVRFFile nc
, credKESFile = f <$> credKESFile nc
, credBulkFile = f <$> credBulkFile nc
}
-- DUPLICATE: mirroring parsers from cardano-node/src/Cardano/Node/Configuration/POM.hs
instance FromJSON NodeHardForkProtocolConfiguration where
parseJSON = withObject "NodeHardForkProtocolConfiguration" $ \v ->
NodeHardForkProtocolConfiguration
<$> v .:? "TestEnableDevelopmentHardForkEras"
.!= False
<*> v .:? "TestShelleyHardForkAtEpoch"
<*> v .:? "TestShelleyHardForkAtVersion"
<*> v .:? "TestAllegraHardForkAtEpoch"
<*> v .:? "TestAllegraHardForkAtVersion"
<*> v .:? "TestMaryHardForkAtEpoch"
<*> v .:? "TestMaryHardForkAtVersion"
<*> v .:? "TestAlonzoHardForkAtEpoch"
<*> v .:? "TestAlonzoHardForkAtVersion"
<*> v .:? "TestBabbageHardForkAtEpoch"
<*> v .:? "TestBabbageHardForkAtVersion"
<*> v .:? "TestConwayHardForkAtEpoch"
<*> v .:? "TestConwayHardForkAtVersion"
instance FromJSON NodeByronProtocolConfiguration where
parseJSON = withObject "NodeByronProtocolConfiguration" $ \v ->
NodeByronProtocolConfiguration
<$> v .: "ByronGenesisFile"
<*> v .:? "ByronGenesisHash"
<*> v .:? "RequiresNetworkMagic"
.!= RequiresNoMagic
<*> v .:? "PBftSignatureThreshold"
<*> pure (Byron.ApplicationName "cardano-sl")
<*> v .:? "ApplicationVersion"
.!= 1
<*> v .: "LastKnownBlockVersion-Major"
<*> v .: "LastKnownBlockVersion-Minor"
<*> v .: "LastKnownBlockVersion-Alt"
.!= 0
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/162c2b426ca66047f92a7d073036c13a434bf026/ouroboros-consensus-cardano-tools/src/Cardano/Tools/DBSynthesizer/Orphans.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
DUPLICATE: mirroring parsers from cardano-node/src/Cardano/Node/Configuration/POM.hs |
# OPTIONS_GHC -fno - warn - orphans #
module Cardano.Tools.DBSynthesizer.Orphans () where
import Control.Monad (when)
import Data.Aeson as Aeson (FromJSON (..), withObject, (.!=), (.:),
(.:?))
import qualified Cardano.Chain.Update as Byron (ApplicationName (..))
import Cardano.Crypto (RequiresNetworkMagic (..))
import Cardano.Node.Types (AdjustFilePaths (..),
NodeByronProtocolConfiguration (..),
NodeHardForkProtocolConfiguration (..))
import Cardano.Tools.DBSynthesizer.Types
instance FromJSON NodeConfigStub where
parseJSON val = withObject "NodeConfigStub" (parse' val) val
where
parse' o v = do
proto <- v .: "Protocol"
when (proto /= ("Cardano" :: String)) $
fail $ "nodeConfig.Protocol expected: Cardano; found: " ++ proto
NodeConfigStub o
<$> v .: "AlonzoGenesisFile"
<*> v .: "ShelleyGenesisFile"
<*> v .: "ByronGenesisFile"
<*> v .: "ConwayGenesisFile"
instance AdjustFilePaths NodeConfigStub where
adjustFilePaths f nc =
nc {
ncsAlonzoGenesisFile = f $ ncsAlonzoGenesisFile nc
, ncsShelleyGenesisFile = f $ ncsShelleyGenesisFile nc
, ncsByronGenesisFile = f $ ncsByronGenesisFile nc
, ncsConwayGenesisFile = f $ ncsConwayGenesisFile nc
}
instance AdjustFilePaths NodeCredentials where
adjustFilePaths f nc =
nc {
credCertFile = f <$> credCertFile nc
, credVRFFile = f <$> credVRFFile nc
, credKESFile = f <$> credKESFile nc
, credBulkFile = f <$> credBulkFile nc
}
instance FromJSON NodeHardForkProtocolConfiguration where
parseJSON = withObject "NodeHardForkProtocolConfiguration" $ \v ->
NodeHardForkProtocolConfiguration
<$> v .:? "TestEnableDevelopmentHardForkEras"
.!= False
<*> v .:? "TestShelleyHardForkAtEpoch"
<*> v .:? "TestShelleyHardForkAtVersion"
<*> v .:? "TestAllegraHardForkAtEpoch"
<*> v .:? "TestAllegraHardForkAtVersion"
<*> v .:? "TestMaryHardForkAtEpoch"
<*> v .:? "TestMaryHardForkAtVersion"
<*> v .:? "TestAlonzoHardForkAtEpoch"
<*> v .:? "TestAlonzoHardForkAtVersion"
<*> v .:? "TestBabbageHardForkAtEpoch"
<*> v .:? "TestBabbageHardForkAtVersion"
<*> v .:? "TestConwayHardForkAtEpoch"
<*> v .:? "TestConwayHardForkAtVersion"
instance FromJSON NodeByronProtocolConfiguration where
parseJSON = withObject "NodeByronProtocolConfiguration" $ \v ->
NodeByronProtocolConfiguration
<$> v .: "ByronGenesisFile"
<*> v .:? "ByronGenesisHash"
<*> v .:? "RequiresNetworkMagic"
.!= RequiresNoMagic
<*> v .:? "PBftSignatureThreshold"
<*> pure (Byron.ApplicationName "cardano-sl")
<*> v .:? "ApplicationVersion"
.!= 1
<*> v .: "LastKnownBlockVersion-Major"
<*> v .: "LastKnownBlockVersion-Minor"
<*> v .: "LastKnownBlockVersion-Alt"
.!= 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.