_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
0dc16f43c5d30a08a44905390ab3cc4683d1d277e3dd9100fe9d461501849381 | morpheusgraphql/morpheus-graphql | Validator.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveFunctor #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# LANGUAGE NoImplicitPrelude #
module Data.Morpheus.Types.Internal.Validation.Validator
( Validator (..),
SelectionValidator,
InputValidator,
BaseValidator,
runValidator,
Constraint (..),
setSelection,
inField,
inputMessagePrefix,
InputSource (..),
InputContext (..),
OperationContext (..),
renderInputPrefix,
Prop (..),
-- Resolution,
ScopeKind (..),
inputValueSource,
Scope (..),
setDirective,
startInput,
withContext,
renderField,
-- asks,
asksScope,
askVariables,
askFragments,
ValidatorContext (..),
FragmentValidator,
askTypeDefinitions,
withScope,
setPosition,
)
where
import Control.Monad.Except (MonadError (catchError, throwError))
import Control.Monad.Reader (asks)
import Data.Morpheus.Ext.Result
( GQLResult,
)
import Data.Morpheus.Types.Internal.AST
( ANY,
FieldDefinition (..),
FieldName,
Fragments,
IMPLEMENTABLE,
IN,
RAW,
Schema,
Stage,
TypeCategory,
TypeDefinition (..),
TypeName,
TypeRef (..),
VALID,
Variable (..),
VariableDefinitions,
intercalate,
typeDefinitions,
unpackName,
)
import Data.Morpheus.Types.Internal.AST.Error
import Data.Morpheus.Types.Internal.Config (Config (..))
import Data.Morpheus.Types.Internal.Validation.Scope
( Scope (..),
ScopeKind (..),
renderScope,
renderSection,
setDirective,
setPosition,
setSelection,
)
import Relude hiding
( Constraint,
asks,
get,
intercalate,
)
data Prop = Prop
{ propName :: FieldName,
propTypeName :: TypeName
}
deriving (Show)
type Path = [Prop]
renderPath :: Path -> GQLError
renderPath [] = ""
renderPath path = "in field " <> msg (intercalate "." $ fmap propName path) <> ": "
renderInputPrefix :: InputContext c -> GQLError
renderInputPrefix InputContext {inputPath, inputSource} =
renderSource inputSource <> renderPath inputPath
renderSource :: InputSource -> GQLError
renderSource (SourceArgument argumentName) =
"Argument " <> msg argumentName <> " got invalid value. "
renderSource (SourceVariable Variable {variableName} _) =
"Variable " <> msg ("$" <> variableName) <> " got invalid value. "
renderSource SourceInputField {sourceTypeName, sourceFieldName, sourceArgumentName} =
"Field " <> renderField sourceTypeName sourceFieldName sourceArgumentName <> " got invalid default value. "
renderField :: TypeName -> FieldName -> Maybe FieldName -> GQLError
renderField tName fName arg =
msg (unpackName tName <> "." <> unpackName fName <> renderArg arg :: Text)
where
renderArg (Just argName) = "(" <> unpackName argName <> ":)"
renderArg Nothing = ""
data OperationContext (s1 :: Stage) (s2 :: Stage) = OperationContext
{ fragments :: Fragments s2,
variables :: VariableDefinitions s1,
operationName :: Maybe FieldName
}
deriving (Show)
data InputContext ctx = InputContext
{ inputSource :: InputSource,
inputPath :: [Prop],
sourceContext :: ctx
}
deriving (Show)
data InputSource
= SourceArgument FieldName
| SourceVariable
{ sourceVariable :: Variable RAW,
isDefaultValue :: Bool
}
| SourceInputField
{ sourceTypeName :: TypeName,
sourceFieldName :: FieldName,
sourceArgumentName :: Maybe FieldName
}
deriving (Show)
data Constraint (a :: TypeCategory) where
IMPLEMENTABLE :: Constraint IMPLEMENTABLE
INPUT :: Constraint IN
inField :: FieldDefinition IN s -> InputValidator s c a -> InputValidator s c a
inField
FieldDefinition
{ fieldName,
fieldType = TypeRef {typeConName}
} = withContext update
where
update
InputContext
{ inputPath = old,
..
} =
InputContext
{ inputPath = old <> [Prop fieldName typeConName],
..
}
inputValueSource :: MonadReader (ValidatorContext s (InputContext c)) m => m InputSource
inputValueSource = asksLocal inputSource
asksScope :: MonadReader (ValidatorContext s ctx) m => (Scope -> a) -> m a
asksScope f = asks (f . scope)
askTypeDefinitions ::
MonadReader (ValidatorContext s ctx) m =>
m (HashMap TypeName (TypeDefinition ANY s))
askTypeDefinitions = asks (typeDefinitions . schema)
askVariables :: MonadReader (ValidatorContext s1 (OperationContext s2 s3)) m => m (VariableDefinitions s2)
askVariables = asksLocal variables
askFragments :: MonadReader (ValidatorContext s1 (OperationContext s2 s3)) m => m (Fragments s3)
askFragments = asksLocal fragments
runValidator :: Validator s ctx a -> Config -> Schema s -> Scope -> ctx -> GQLResult a
runValidator (Validator x) config schema scope localContext =
runReaderT x ValidatorContext {..}
withContext :: (c' -> c) -> Validator s c a -> Validator s c' a
withContext f = Validator . withReaderT (fmap f) . _runValidator
inputMessagePrefix :: InputValidator s ctx GQLError
inputMessagePrefix =
renderInputPrefix
. localContext
<$> Validator ask
startInput :: InputSource -> InputValidator s ctx a -> Validator s ctx a
startInput inputSource = withContext update
where
update sourceContext =
InputContext
{ inputSource,
inputPath = [],
sourceContext
}
data ValidatorContext (s :: Stage) (ctx :: Type) = ValidatorContext
{ scope :: Scope,
schema :: Schema s,
localContext :: ctx,
config :: Config
}
deriving
( Show,
Functor
)
newtype Validator s ctx a = Validator
{ _runValidator ::
ReaderT
(ValidatorContext s ctx)
GQLResult
a
}
deriving newtype
( Functor,
Applicative,
Monad,
MonadReader (ValidatorContext s ctx)
)
data ValidationTarget
= Base
| Fragments
| Selections
type family ValidationStage (s :: ValidationTarget) where
ValidationStage 'Base = OperationContext RAW RAW
ValidationStage 'Fragments = OperationContext VALID RAW
ValidationStage 'Selections = OperationContext VALID VALID
type ValidatorM (s :: ValidationTarget) = Validator VALID (ValidationStage s)
type BaseValidator = ValidatorM 'Base
type FragmentValidator (s :: Stage) = Validator VALID (OperationContext VALID s)
type SelectionValidator = ValidatorM 'Selections
type InputValidator s ctx = Validator s (InputContext ctx)
withScope ::
(MonadReader (ValidatorContext s c) m) =>
(Scope -> Scope) ->
m b ->
m b
withScope f = local (\ValidatorContext {..} -> ValidatorContext {scope = f scope, ..})
asksLocal :: MonadReader (ValidatorContext s c) m => (c -> a) -> m a
asksLocal f = asks (f . localContext)
instance MonadError GQLError (Validator s ctx) where
throwError err = Validator $ do
ctx <- ask
throwError (fromValidationError ctx err)
catchError (Validator x) f = Validator (catchError x (_runValidator . f))
fromValidationError :: ValidatorContext s ctx -> GQLError -> GQLError
fromValidationError
context@ValidatorContext
{ config,
scope = Scope {position, path}
}
err
| isInternal err || debug config =
( err
<> renderContext context
`atPositions` position
)
`withPath` path
| otherwise = err
renderContext :: ValidatorContext s ctx -> GQLError
renderContext
ValidatorContext
{ schema,
scope
} =
renderScope scope
<> renderSection "SchemaDefinition" schema
| null | https://raw.githubusercontent.com/morpheusgraphql/morpheus-graphql/cfaadc6f8491548bccab856757e95584946681b7/morpheus-graphql-core/src/Data/Morpheus/Types/Internal/Validation/Validator.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
Resolution,
asks, | # LANGUAGE DataKinds #
# LANGUAGE DeriveFunctor #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# LANGUAGE NoImplicitPrelude #
module Data.Morpheus.Types.Internal.Validation.Validator
( Validator (..),
SelectionValidator,
InputValidator,
BaseValidator,
runValidator,
Constraint (..),
setSelection,
inField,
inputMessagePrefix,
InputSource (..),
InputContext (..),
OperationContext (..),
renderInputPrefix,
Prop (..),
ScopeKind (..),
inputValueSource,
Scope (..),
setDirective,
startInput,
withContext,
renderField,
asksScope,
askVariables,
askFragments,
ValidatorContext (..),
FragmentValidator,
askTypeDefinitions,
withScope,
setPosition,
)
where
import Control.Monad.Except (MonadError (catchError, throwError))
import Control.Monad.Reader (asks)
import Data.Morpheus.Ext.Result
( GQLResult,
)
import Data.Morpheus.Types.Internal.AST
( ANY,
FieldDefinition (..),
FieldName,
Fragments,
IMPLEMENTABLE,
IN,
RAW,
Schema,
Stage,
TypeCategory,
TypeDefinition (..),
TypeName,
TypeRef (..),
VALID,
Variable (..),
VariableDefinitions,
intercalate,
typeDefinitions,
unpackName,
)
import Data.Morpheus.Types.Internal.AST.Error
import Data.Morpheus.Types.Internal.Config (Config (..))
import Data.Morpheus.Types.Internal.Validation.Scope
( Scope (..),
ScopeKind (..),
renderScope,
renderSection,
setDirective,
setPosition,
setSelection,
)
import Relude hiding
( Constraint,
asks,
get,
intercalate,
)
data Prop = Prop
{ propName :: FieldName,
propTypeName :: TypeName
}
deriving (Show)
type Path = [Prop]
renderPath :: Path -> GQLError
renderPath [] = ""
renderPath path = "in field " <> msg (intercalate "." $ fmap propName path) <> ": "
renderInputPrefix :: InputContext c -> GQLError
renderInputPrefix InputContext {inputPath, inputSource} =
renderSource inputSource <> renderPath inputPath
renderSource :: InputSource -> GQLError
renderSource (SourceArgument argumentName) =
"Argument " <> msg argumentName <> " got invalid value. "
renderSource (SourceVariable Variable {variableName} _) =
"Variable " <> msg ("$" <> variableName) <> " got invalid value. "
renderSource SourceInputField {sourceTypeName, sourceFieldName, sourceArgumentName} =
"Field " <> renderField sourceTypeName sourceFieldName sourceArgumentName <> " got invalid default value. "
renderField :: TypeName -> FieldName -> Maybe FieldName -> GQLError
renderField tName fName arg =
msg (unpackName tName <> "." <> unpackName fName <> renderArg arg :: Text)
where
renderArg (Just argName) = "(" <> unpackName argName <> ":)"
renderArg Nothing = ""
data OperationContext (s1 :: Stage) (s2 :: Stage) = OperationContext
{ fragments :: Fragments s2,
variables :: VariableDefinitions s1,
operationName :: Maybe FieldName
}
deriving (Show)
data InputContext ctx = InputContext
{ inputSource :: InputSource,
inputPath :: [Prop],
sourceContext :: ctx
}
deriving (Show)
data InputSource
= SourceArgument FieldName
| SourceVariable
{ sourceVariable :: Variable RAW,
isDefaultValue :: Bool
}
| SourceInputField
{ sourceTypeName :: TypeName,
sourceFieldName :: FieldName,
sourceArgumentName :: Maybe FieldName
}
deriving (Show)
data Constraint (a :: TypeCategory) where
IMPLEMENTABLE :: Constraint IMPLEMENTABLE
INPUT :: Constraint IN
inField :: FieldDefinition IN s -> InputValidator s c a -> InputValidator s c a
inField
FieldDefinition
{ fieldName,
fieldType = TypeRef {typeConName}
} = withContext update
where
update
InputContext
{ inputPath = old,
..
} =
InputContext
{ inputPath = old <> [Prop fieldName typeConName],
..
}
inputValueSource :: MonadReader (ValidatorContext s (InputContext c)) m => m InputSource
inputValueSource = asksLocal inputSource
asksScope :: MonadReader (ValidatorContext s ctx) m => (Scope -> a) -> m a
asksScope f = asks (f . scope)
askTypeDefinitions ::
MonadReader (ValidatorContext s ctx) m =>
m (HashMap TypeName (TypeDefinition ANY s))
askTypeDefinitions = asks (typeDefinitions . schema)
askVariables :: MonadReader (ValidatorContext s1 (OperationContext s2 s3)) m => m (VariableDefinitions s2)
askVariables = asksLocal variables
askFragments :: MonadReader (ValidatorContext s1 (OperationContext s2 s3)) m => m (Fragments s3)
askFragments = asksLocal fragments
runValidator :: Validator s ctx a -> Config -> Schema s -> Scope -> ctx -> GQLResult a
runValidator (Validator x) config schema scope localContext =
runReaderT x ValidatorContext {..}
withContext :: (c' -> c) -> Validator s c a -> Validator s c' a
withContext f = Validator . withReaderT (fmap f) . _runValidator
inputMessagePrefix :: InputValidator s ctx GQLError
inputMessagePrefix =
renderInputPrefix
. localContext
<$> Validator ask
startInput :: InputSource -> InputValidator s ctx a -> Validator s ctx a
startInput inputSource = withContext update
where
update sourceContext =
InputContext
{ inputSource,
inputPath = [],
sourceContext
}
data ValidatorContext (s :: Stage) (ctx :: Type) = ValidatorContext
{ scope :: Scope,
schema :: Schema s,
localContext :: ctx,
config :: Config
}
deriving
( Show,
Functor
)
newtype Validator s ctx a = Validator
{ _runValidator ::
ReaderT
(ValidatorContext s ctx)
GQLResult
a
}
deriving newtype
( Functor,
Applicative,
Monad,
MonadReader (ValidatorContext s ctx)
)
data ValidationTarget
= Base
| Fragments
| Selections
type family ValidationStage (s :: ValidationTarget) where
ValidationStage 'Base = OperationContext RAW RAW
ValidationStage 'Fragments = OperationContext VALID RAW
ValidationStage 'Selections = OperationContext VALID VALID
type ValidatorM (s :: ValidationTarget) = Validator VALID (ValidationStage s)
type BaseValidator = ValidatorM 'Base
type FragmentValidator (s :: Stage) = Validator VALID (OperationContext VALID s)
type SelectionValidator = ValidatorM 'Selections
type InputValidator s ctx = Validator s (InputContext ctx)
withScope ::
(MonadReader (ValidatorContext s c) m) =>
(Scope -> Scope) ->
m b ->
m b
withScope f = local (\ValidatorContext {..} -> ValidatorContext {scope = f scope, ..})
asksLocal :: MonadReader (ValidatorContext s c) m => (c -> a) -> m a
asksLocal f = asks (f . localContext)
instance MonadError GQLError (Validator s ctx) where
throwError err = Validator $ do
ctx <- ask
throwError (fromValidationError ctx err)
catchError (Validator x) f = Validator (catchError x (_runValidator . f))
fromValidationError :: ValidatorContext s ctx -> GQLError -> GQLError
fromValidationError
context@ValidatorContext
{ config,
scope = Scope {position, path}
}
err
| isInternal err || debug config =
( err
<> renderContext context
`atPositions` position
)
`withPath` path
| otherwise = err
renderContext :: ValidatorContext s ctx -> GQLError
renderContext
ValidatorContext
{ schema,
scope
} =
renderScope scope
<> renderSection "SchemaDefinition" schema
|
8c7da119bd746bd69a751bb999382a5fda26dc23ebf45c8248913cfeeaed0091 | JacquesCarette/Drasil | Body.hs | # LANGUAGE PostfixOperators #
module Drasil.SWHS.Body where
import Language.Drasil hiding (organization, section, variable)
import Drasil.SRSDocument
import qualified Drasil.DocLang.SRS as SRS (inModel)
import Theory.Drasil (GenDefn, InstanceModel)
import Language.Drasil.Chunk.Concept.NamedCombinators
import qualified Language.Drasil.NounPhrase.Combinators as NP
import qualified Language.Drasil.Sentence.Combinators as S
import Control.Lens ((^.))
import qualified Data.Drasil.Concepts.Documentation as Doc (srs)
import Data.Drasil.TheoryConcepts as Doc (inModel)
import Data.Drasil.Concepts.Computation (algorithm, compcon)
import Data.Drasil.Concepts.Documentation as Doc (assumption, column, condition,
constraint, corSol, datum, document, environment,input_, model, organization,
output_, physical, physics, property, quantity, software, softwareSys, solution,
srsDomains, sysCont, system, user, value, variable, doccon, doccon')
import Data.Drasil.Concepts.Education (calculus, educon, engineering)
import Data.Drasil.Concepts.Math (de, equation, ode, rightSide, unit_, mathcon, mathcon')
import Data.Drasil.Concepts.PhysicalProperties (materialProprty, physicalcon)
import Data.Drasil.Concepts.Physics (physicCon)
import Data.Drasil.Concepts.Software (program, softwarecon, correctness,
understandability, reusability, maintainability, verifiability)
import Data.Drasil.Concepts.Thermodynamics (enerSrc, heatTrans, htFlux,
htTransTheo, lawConsEnergy, thermalAnalysis, thermalConduction, thermalEnergy,
thermocon)
import Data.Drasil.Quantities.Math (surArea, surface, uNormalVect)
import Data.Drasil.Quantities.PhysicalProperties (vol)
import Data.Drasil.Quantities.Physics (energy, time, physicscon)
import Data.Drasil.Quantities.Thermodynamics (heatCapSpec, latentHeat)
import Data.Drasil.Software.Products (sciCompS, prodtcon)
import Data.Drasil.People (brooks, spencerSmith, thulasi)
import Data.Drasil.SI_Units (metre, kilogram, second, centigrade, joule, watt,
fundamentals, derived, m_2, m_3)
import Drasil.SWHS.Assumptions (assumpPIS, assumptions)
import Drasil.SWHS.Changes (likelyChgs, unlikelyChgs)
import Drasil.SWHS.Concepts (acronymsFull, coil, con, phaseChangeMaterial,
phsChgMtrl, progName, sWHT, swhsPCM, tank, tankPCM, transient, water)
import qualified Drasil.SWHS.DataDefs as SWHS (dataDefs)
import Drasil.SWHS.GenDefs (genDefs, htFluxWaterFromCoil, htFluxPCMFromWater)
import Drasil.SWHS.Goals (goals)
import Drasil.SWHS.IMods (eBalanceOnWtr, eBalanceOnPCM, heatEInWtr, heatEInPCM,
iMods, instModIntro)
import Drasil.SWHS.References (citations, koothoor2013, smithLai2005)
import Drasil.SWHS.Requirements (funcReqs, inReqDesc, nfRequirements, verifyEnergyOutput)
import Drasil.SWHS.TMods (tMods)
import Drasil.SWHS.Unitals (absTol, coilHTC, coilSA, consTol, constrained,
htFluxC, htFluxP, inputs, inputConstraints, outputs, pcmE, pcmHTC, pcmSA,
relTol, simTime, specParamValList, symbols, symbolsAll, tempC, tempPCM,
tempW, thickness, unitalChuncks, watE)
-------------------------------------------------------------------------------
srs :: Document
srs = mkDoc mkSRS S.forT si
fullSI :: SystemInformation
fullSI = fillcdbSRS mkSRS si
printSetting :: PrintingInformation
printSetting = piSys fullSI Equational defaultConfiguration
resourcePath :: String
resourcePath = "../../../../datafiles/swhs/"
units :: [UnitDefn]
units = map unitWrapper [metre, kilogram, second] ++
map unitWrapper [centigrade, joule, watt]
--Will there be a table of contents?
si :: SystemInformation
si = SI {
_sys = swhsPCM,
_kind = Doc.srs,
_authors = [thulasi, brooks, spencerSmith],
_purpose = [],
_background = [],
_quants = symbols,
_concepts = [] :: [DefinedQuantityDict],
_instModels = insModel,
_datadefs = SWHS.dataDefs,
_configFiles = [],
_inputs = inputs,
_outputs = map qw outputs,
_defSequence = [] :: [Block SimpleQDef],
_constraints = constrained,
_constants = specParamValList,
_sysinfodb = symbMap,
_usedinfodb = usedDB,
refdb = refDB
}
symbMap :: ChunkDB
symbMap = cdb (qw heatEInPCM : symbolsAll) -- heatEInPCM ?
(nw heatEInPCM : map nw symbols ++ map nw acronymsFull
++ map nw thermocon ++ map nw units ++ map nw [m_2, m_3] ++ map nw [absTol, relTol]
++ map nw physicscon ++ map nw doccon ++ map nw softwarecon ++ map nw doccon' ++ map nw con
++ map nw prodtcon ++ map nw physicCon ++ map nw mathcon ++ map nw mathcon' ++ map nw specParamValList
++ map nw fundamentals ++ map nw educon ++ map nw derived ++ map nw physicalcon ++ map nw unitalChuncks
++ [nw swhsPCM, nw algorithm] ++ map nw compcon ++ [nw materialProprty])
(cw heatEInPCM : map cw symbols ++ srsDomains ++ map cw specParamValList) -- FIXME: heatEInPCM?
(units ++ [m_2, m_3]) SWHS.dataDefs insModel genDefs tMods concIns section [] []
usedDB :: ChunkDB
usedDB = cdb ([] :: [QuantityDict]) (map nw symbols ++ map nw acronymsFull)
([] :: [ConceptChunk]) ([] :: [UnitDefn]) [] [] [] [] [] [] [] ([] :: [Reference])
refDB :: ReferenceDB
refDB = rdb citations concIns
mkSRS :: SRSDecl
mkSRS = [TableOfContents,
RefSec $ RefProg intro [
TUnits,
tsymb'' tSymbIntro $ TermExcept [uNormalVect],
TAandA],
IntroSec $
IntroProg (introStart +:+ introStartSWHS) (introEnd (plural swhsPCM) progName)
[IPurpose $ purpDoc progName Verbose,
IScope scope,
IChar [] charsOfReader [],
IOrgSec orgDocIntro inModel (SRS.inModel [] []) orgDocEnd
],
GSDSec $ GSDProg
[ SysCntxt [sysCntxtDesc progName, LlC sysCntxtFig, sysCntxtRespIntro progName, systContRespBullets]
, UsrChars [userChars progName]
, SystCons [] []
],
SSDSec $
SSDProg
[ SSDProblem $ PDProg probDescIntro []
[ TermsAndDefs Nothing terms
, PhySysDesc progName physSystParts figTank []
, Goals goalInputs]
, SSDSolChSpec $ SCSProg
[ Assumptions
, TMs [] (Label : stdFields)
, GDs [] ([Label, Units] ++ stdFields) ShowDerivation
, DDs [] ([Label, Symbol, Units] ++ stdFields) ShowDerivation
, IMs [instModIntro] ([Label, Input, Output, InConstraints, OutConstraints] ++ stdFields) ShowDerivation
, Constraints dataConTail inputConstraints
, CorrSolnPpties outputConstraints propsDeriv
]
],
ReqrmntSec $ ReqsProg [
FReqsSub inReqDesc [],
NonFReqsSub
],
LCsSec,
UCsSec,
TraceabilitySec $ TraceabilityProg $ traceMatStandard si,
AuxConstntSec $ AuxConsProg progName specParamValList,
Bibliography]
tSymbIntro :: [TSIntro]
tSymbIntro = [TSPurpose, SymbConvention
[Lit (nw heatTrans), Doc' (nw progName)], SymbOrder, VectorUnits]
insModel :: [InstanceModel]
insModel = [eBalanceOnWtr, eBalanceOnPCM, heatEInWtr, heatEInPCM]
concIns :: [ConceptInstance]
concIns = goals ++ assumptions ++ likelyChgs ++ unlikelyChgs ++ funcReqs
++ nfRequirements
section :: [Section]
section = extractSection srs
stdFields :: Fields
stdFields = [DefiningEquation, Description Verbose IncludeUnits, Notes, Source, RefBy]
priorityNFReqs :: [ConceptChunk]
priorityNFReqs = [correctness, verifiability, understandability, reusability,
maintainability]
-- It is sometimes hard to remember to add new sections both here and above.
-- =================================== --
-- SOFTWARE REQUIREMENTS SPECIFICATION --
-- =================================== --
------------------------------
-- Section 2 : INTRODUCTION --
------------------------------
introStart :: Sentence
introStart = foldlSent [S "Due to", foldlList Comma List (map S
["increasing costs", "diminishing availability", "negative environmental impact"]) `S.of_`
S "fossil fuels" `sC` S "the demand is high for renewable", pluralNP (enerSrc `and_PS`
energy), S "storage technology"]
introStartSWHS :: Sentence
introStartSWHS = foldlSent [capSent (swhsPCM ^. defn), sParen (short phsChgMtrl),
S "use a renewable", phrase enerSrc `S.and_` S "provide a novel way of storing" +:+.
phrase energy, atStart swhsPCM, S "improve over the traditional", plural progName,
S "because of their smaller size. The smaller size is possible because of the ability" `S.of_`
short phsChgMtrl, S "to store", phrase thermalEnergy, S "as", phrase latentHeat `sC`
S "which allows higher", phrase thermalEnergy, S "storage capacity per",
phrase unit_, S "weight"]
introEnd :: Sentence -> CI -> Sentence
introEnd progSent pro = foldlSent_ [(progSent !.), S "The developed",
phrase program, S "will be referred to as", titleize pro, sParen (short pro)]
SSP has same style sentence here
-------------------------------
2.1 : Purpose of Document --
-------------------------------
-- Purpose of Document automatically generated in IPurpose
--How to italicize words in sentence?
--How to cite?
---------------------------------
2.2 : Scope of Requirements --
---------------------------------
scope :: Sentence
scope = foldlSent_ [phrase thermalAnalysis `S.of_` S "a single" +:+. phrase tankPCM,
S "This entire", phrase document `S.is` S "written assuming that the substances inside the",
phrase sWHT `S.are` phraseNP (and_Gen phrase short water phsChgMtrl)]
-- There is a similar paragraph in each example, but there's a lot of specific
-- info here. Would need to abstract out the object of analysis (i.e. solar
-- water heating tank rating PCM, 2D slope composed of homogeneous soil
-- layers, glass slab and blast, or 2D bodies acted on by forces) and also
-- abstract out the overall goal of the program (i.e. predict the temperature
-- and energy histories for the water and PCM, simulate how 2D rigid bodies
-- interact with each other, predict whether the glass slab is safe to use or
-- not, etc.). If that is done, then this paragraph can also be abstracted out.
----------------------------------------------
2.3 : Characteristics of Intended Reader --
----------------------------------------------
charsOfReader :: [Sentence]
charsOfReader = [charReaderHTT, charReaderDE]
charReaderHTT :: Sentence
charReaderHTT = foldlSent_ [phrase htTransTheo, S "from level 3 or 4",
S "mechanical", phrase engineering]
charReaderDE :: Sentence
charReaderDE = plural de +:+ S "from level 1 and 2" +:+ phrase calculus
------------------------------------
2.4 : Organization of Document --
------------------------------------
orgDocIntro :: Sentence
orgDocIntro = foldlSent [atStartNP (the organization), S "of this",
phrase document, S "follows the template for an", short Doc.srs
`S.for` phrase sciCompS, S "proposed by", refS koothoor2013 `S.and_`
refS smithLai2005]
orgDocEnd :: Sentence
orgDocEnd = foldlSent_ [atStartNP' (the inModel),
S "to be solved are referred to as" +:+.
foldlList Comma List (map refS iMods), S "The", plural inModel,
S "provide the", plural ode, sParen (short ode :+: S "s") `S.and_`
S "algebraic", plural equation, S "that", phrase model,
(phraseNP (the swhsPCM) !.), short progName, S "solves these", short ode :+: S "s"]
-- This paragraph is mostly general (besides program name and number of IMs),
-- but there are some differences between the examples that I'm not sure how to
account for . Specifically , the glass example references a Volere paper that
-- is not used for the other examples. Besides that, this paragraph could
-- probably be abstracted out with some changes (i.e. the other examples don't
-- include the last sentence, so we might not need to know the number of IMs
-- after all if we just leave that sentence out)
-- IM1 to IM4 : reference later
-- how to cite/reference?
If all SRS have the same basic layout , is it possible to automate
-- the sectioning? This would also improve the tediousness of declaring
-- LayoutObjs
--------------------------------------------
-- Section 3: GENERAL SYSTEM DESCRIPTION --
--------------------------------------------
--------------------------
3.1 : System Context --
--------------------------
sysCntxtDesc :: CI -> Contents
sysCntxtDesc pro = foldlSP [refS sysCntxtFig, S "shows the" +:+.
phrase sysCont, S "A circle represents an external entity outside the",
phrase software `sC` phraseNP (the user) +:+. S "in this case",
S "A rectangle represents the", phrase softwareSys, S "itself" +:+.
sParen (short pro), S "Arrows are used to show the", plural datum,
S "flow between the", phraseNP (system `andIts` environment)]
sysCntxtFig :: LabelledContent
sysCntxtFig = llcc (makeFigRef "SysCon") $ fig (foldlSent_
[refS sysCntxtFig +: EmptyS, titleize sysCont])
$ resourcePath ++ "SystemContextFigure.png"
sysCntxtRespIntro :: CI -> Contents
sysCntxtRespIntro pro = foldlSPCol [short pro +:+. S "is mostly self-contained",
S "The only external interaction is through the", phrase user +:+.
S "interface", S "responsibilities" `S.the_ofTheC` phraseNP (user `andThe`
system) `S.are` S "as follows"]
systContRespBullets :: Contents
systContRespBullets = UlC $ ulcc $ Enumeration $ bulletNested
[titleize user +: S "Responsibilities", short progName +: S "Responsibilities"]
$ map bulletFlat [userResp, swhsResp]
userResp :: [Sentence]
userResp = map foldlSent_ [
[S "Provide the", phrase input_, plural datum `S.toThe`
phrase system `sC` S "ensuring no errors in the", plural datum, S "entry"],
[S "Take care that consistent", plural unit_, S "are used for",
phrase input_, plural variable]
]
swhsResp :: [Sentence]
swhsResp = map foldlSent_ [
[S "Detect", plural datum, S "type mismatch, such as a string" `S.of_`
S "characters instead of a floating point number"],
[S "Determine if the", plural input_, S "satisfy the required",
phraseNP (physical `and_` software), plural constraint],
[S "Calculate the required", plural output_]
]
--------------------------------
3.2 : User Characteristics --
--------------------------------
userChars :: CI -> Contents
userChars pro = foldlSP [S "The end", phrase user `S.of_` short pro,
S "should have an understanding of undergraduate Level 1 Calculus" `S.and_`
titleize Doc.physics]
-- Some of these course names are repeated between examples, could potentially
-- be abstracted out.
------------------------------
3.3 : System Constraints --
------------------------------
---------------------------------------------
-- Section 4 : SPECIFIC SYSTEM DESCRIPTION --
---------------------------------------------
-------------------------------
4.1 : Problem Description --
-------------------------------
probDescIntro :: Sentence
probDescIntro = foldlSent_ [S "investigate the effect" `S.of_` S "employing",
short phsChgMtrl, S "within a", phrase sWHT]
-----------------------------------------
4.1.1 : Terminology and Definitions --
-----------------------------------------
terms :: [ConceptChunk]
terms = map cw [htFlux, phaseChangeMaterial, cw heatCapSpec, thermalConduction, transient]
Included heat flux and specific heat in NamedChunks even though they are
already in SWHSUnits
-----------------------------------------
4.1.2 : Physical System Description --
-----------------------------------------
physSystParts :: [Sentence]
physSystParts = map foldlSent_ [physSyst1 tank water, physSyst2 coil tank htFluxC,
[short phsChgMtrl, S "suspended in" +:+. phrase tank,
sParen (ch htFluxP +:+ S "represents the" +:+. phrase htFluxP)]]
physSyst1 :: ConceptChunk -> ConceptChunk -> [Sentence]
physSyst1 ta wa = [atStart ta, S "containing" +:+. phrase wa]
physSyst2 :: ConceptChunk -> ConceptChunk -> UnitalChunk -> [Sentence]
physSyst2 co ta hfc = [atStart co, S "at bottom of" +:+. phrase ta,
sParen (ch hfc +:+ S "represents the" +:+. phrase hfc)]
-- Structure of list would be same between examples but content is completely
-- different
figTank :: LabelledContent
figTank = llcc (makeFigRef "Tank") $ fig (
foldlSent_ [atStart sWHT `sC` S "with", phrase htFluxC `S.of_`
ch htFluxC `S.and_` phrase htFluxP `S.of_` ch htFluxP])
$ resourcePath ++ "Tank.png"
-----------------------------
-- 4.1.3 : Goal Statements --
-----------------------------
goalInputs :: [Sentence]
goalInputs = [phraseNP (the tempC),
S "the initial" +:+ plural condition +:+ S "for the" +:+ phraseNP (tempW `andThe` tempPCM),
S "the material" +:+ plural property]
2 examples include this paragraph , 2 do n't . The " givens " would need to be
-- abstracted out if this paragraph were to be abstracted out.
--------------------------------------------------
4.2 : Solution Characteristics Specification --
--------------------------------------------------
-------------------------
4.2.1 : Assumptions --
-------------------------
-- Can booktabs colored links be used? The box links completely cover nearby
-- punctuation.
--------------------------------
4.2.2 : Theoretical Models --
--------------------------------
Theory has to be RelationChunk ....
-- No way to include "Source" or "Ref. By" sections?
---------------------------------
4.2.3 : General Definitions --
---------------------------------
-- SECTION 4.2.3 --
-- General Definitions is automatically generated in solChSpecF
s4_2_3_genDefs : : [ Contents ]
s4_2_3_genDefs = map reldefn swhsRC
s4_2_3_deriv : : [ Contents ]
s4_2_3_deriv = [ s4_2_3_deriv_1 rOfChng temp ,
s4_2_3_deriv_2 consThermE vol ,
s4_2_3_deriv_3 ,
s4_2_3_deriv_4 gaussDiv surface vol thFluxVect uNormalVect unit _ ,
s4_2_3_deriv_5 ,
s4_2_3_deriv_6 vol volHtGen ,
s4_2_3_deriv_7 ,
s4_2_3_deriv_8 htFluxIn htFluxOut inSA outSA density heatCapSpec
temp vol assumption ,
s4_2_3_deriv_9 ,
s4_2_3_deriv_10 density mass vol ,
s4_2_3_deriv_11 ]
s4_2_3_genDefs :: [Contents]
s4_2_3_genDefs = map reldefn swhsRC
s4_2_3_deriv :: [Contents]
s4_2_3_deriv = [s4_2_3_deriv_1 rOfChng temp,
s4_2_3_deriv_2 consThermE vol,
s4_2_3_deriv_3,
s4_2_3_deriv_4 gaussDiv surface vol thFluxVect uNormalVect unit_,
s4_2_3_deriv_5,
s4_2_3_deriv_6 vol volHtGen,
s4_2_3_deriv_7,
s4_2_3_deriv_8 htFluxIn htFluxOut inSA outSA density heatCapSpec
temp vol assumption assump3 assump4 assump5 assump6,
s4_2_3_deriv_9,
s4_2_3_deriv_10 density mass vol,
s4_2_3_deriv_11]-}
-- General Definitions is automatically generated
------------------------------
-- 4.2.4 : Data Definitions --
------------------------------
-----------------------------
4.2.5 : Instance Models --
-----------------------------
----------------------------
-- 4.2.6 Data Constraints --
----------------------------
I do not think Table 2 will end up being necessary for the version
-- The info from table 2 will likely end up in table 1 .
dataConTail :: Sentence
dataConTail = dataContMid +:+ dataContFooter
dataContMid :: Sentence
dataContMid = foldlSent [atStartNP (the column) `S.for` pluralNP (combineNINI software
constraint), S "restricts the range" `S.of_` plural input_,
S "to reasonable", plural value]
dataContFooter :: Sentence
dataContFooter = foldlSent_ $ map foldlSent [
[sParen (S "*"), S "These", plural quantity, S "cannot be equal to zero" `sC`
S "or there will be a divide by zero in the", phrase model],
[sParen (S "+"), S "These", plural quantity, S "cannot be zero" `sC`
S "or there would be freezing", sParen (refS assumpPIS)],
[sParen (S "++"), atStartNP' (NP.the (constraint `onThePS` surArea)),
S "are calculated by considering the", phrase surArea, S "to", phrase vol +:+.
S "ratio", atStartNP (the assumption), S "is that the lowest ratio is 1" `S.and_`
S "the highest possible is", eS (exactDbl 2 $/ sy thickness) `sC` S "where", ch thickness,
S "is the thickness of a" +:+. (Quote (S "sheet") `S.of_` short phsChgMtrl),
S "A thin sheet has the greatest", phrase surArea, S "to", phrase vol, S "ratio"],
[sParen (S "**"), atStartNP (the constraint), S "on the maximum", phrase time,
S "at the end of the simulation is the total number of seconds in one day"]
]
------------------------------
Data Constraint : Table 1 --
------------------------------
------------------------------
Data Constraint : Table 2 --
------------------------------
------------------------------
Data Constraint : Table 3 --
------------------------------
outputConstraints :: [ConstrConcept]
FIXME : add " ( by A11 ) " in Physical Constraints of ` tempW ` and ` tempPCM ` ?
-- Other Notes:
---- Will there be a way to have asterisks for certain pieces of the table?
----------------------------------------------
4.2.7 : Properties of A Correct Solution --
----------------------------------------------
{-Properties of a Correct Solution-}
propsDeriv :: [Contents]
propsDeriv = [
propCorSolDeriv1 lawConsEnergy watE energy coil phsChgMtrl
htFluxWaterFromCoil htFluxPCMFromWater surface heatTrans,
propCorSolDeriv2,
propCorSolDeriv3 pcmE energy phsChgMtrl water,
propCorSolDeriv4,
propCorSolDeriv5 equation progName rightSide]
propCorSolDeriv1 :: (NamedIdea b, NamedIdea h) => ConceptChunk -> b -> UnitalChunk ->
ConceptChunk -> CI -> GenDefn -> GenDefn -> h -> ConceptChunk -> Contents
propCorSolDeriv1 lce ewat en co pcmat g1hfc g2hfp su ht =
foldlSPCol [atStartNP (a_ corSol), S "must exhibit" +:+.
phraseNP (the lce), S "This means that", phraseNP (the ewat),
S "should equal the difference between the total", phrase en,
phrase input_, S "from", phraseNP (the co `NP.andThe`
combineNINI en output_), S "to the" +:+. short pcmat,
S "This can be shown as an", phrase equation, S "by taking",
refS g1hfc `S.and_` refS g2hfp `sC`
S "multiplying each by their respective", phrase su,
S "area of", phrase ht `sC` S "and integrating each",
S "over the", phrase simTime `sC` S "as follows"]
propCorSolDeriv2 :: Contents
propCorSolDeriv2 = unlbldExpr
(sy watE $= defint (eqSymb time) (exactDbl 0) (sy time)
(sy coilHTC `mulRe` sy coilSA `mulRe` (sy tempC $- apply1 tempW time))
$- defint (eqSymb time) (exactDbl 0) (sy time)
(sy pcmHTC `mulRe` sy pcmSA `mulRe` (apply1 tempW time $-
apply1 tempPCM time)))
propCorSolDeriv3 :: NamedIdea a => a -> UnitalChunk -> CI -> ConceptChunk -> Contents
propCorSolDeriv3 epcm en pcmat wa =
foldlSP_ [S "In addition, the", phrase epcm, S "should equal the",
phrase en, phrase input_, S "to the", short pcmat,
S "from the" +:+. phrase wa, S "This can be expressed as"]
propCorSolDeriv4 :: Contents
propCorSolDeriv4 = unlbldExpr
(sy pcmE $= defint (eqSymb time) (exactDbl 0) (sy time)
(sy pcmHTC `mulRe` sy pcmSA `mulRe` (apply1 tempW time $-
apply1 tempPCM time)))
propCorSolDeriv5 :: ConceptChunk -> CI -> CI -> Contents
propCorSolDeriv5 eq pro rs = foldlSP [titleize' eq, S "(FIXME: Equation 7)"
`S.and_` S "(FIXME: Equation 8) can be used as", Quote (S "sanity") +:+
S "checks to gain confidence in any", phrase solution,
S "computed by" +:+. short pro, S "The relative",
S "error between the results computed by", short pro `S.and_`
S "the results calculated from the", short rs, S "of these",
plural eq, S "should be less than", ch consTol, refS verifyEnergyOutput]
-- Remember to insert references in above derivation when available
------------------------------
-- Section 5 : REQUIREMENTS --
------------------------------
-----------------------------------
5.1 : Functional Requirements --
-----------------------------------
---------------------------------------
5.2 : Non - functional Requirements --
---------------------------------------
--------------------------------
-- Section 6 : LIKELY CHANGES --
--------------------------------
--------------------------------
Section 6b : CHANGES --
--------------------------------
--------------------------------------------------
Section 7 : TRACEABILITY MATRICES AND GRAPHS --
--------------------------------------------------
------------------------
-- Traceabilty Graphs --
------------------------
-------------------------------------------------
Section 8 : Specification Parameter Values --
-------------------------------------------------
----------------------------
Section 9 : References --
----------------------------
| null | https://raw.githubusercontent.com/JacquesCarette/Drasil/84272acccc09574dec70d8d96c6ea994f15f8b22/code/drasil-example/swhs/lib/Drasil/SWHS/Body.hs | haskell | -----------------------------------------------------------------------------
Will there be a table of contents?
heatEInPCM ?
FIXME: heatEInPCM?
It is sometimes hard to remember to add new sections both here and above.
=================================== --
SOFTWARE REQUIREMENTS SPECIFICATION --
=================================== --
----------------------------
Section 2 : INTRODUCTION --
----------------------------
-----------------------------
-----------------------------
Purpose of Document automatically generated in IPurpose
How to italicize words in sentence?
How to cite?
-------------------------------
-------------------------------
There is a similar paragraph in each example, but there's a lot of specific
info here. Would need to abstract out the object of analysis (i.e. solar
water heating tank rating PCM, 2D slope composed of homogeneous soil
layers, glass slab and blast, or 2D bodies acted on by forces) and also
abstract out the overall goal of the program (i.e. predict the temperature
and energy histories for the water and PCM, simulate how 2D rigid bodies
interact with each other, predict whether the glass slab is safe to use or
not, etc.). If that is done, then this paragraph can also be abstracted out.
--------------------------------------------
--------------------------------------------
----------------------------------
----------------------------------
This paragraph is mostly general (besides program name and number of IMs),
but there are some differences between the examples that I'm not sure how to
is not used for the other examples. Besides that, this paragraph could
probably be abstracted out with some changes (i.e. the other examples don't
include the last sentence, so we might not need to know the number of IMs
after all if we just leave that sentence out)
IM1 to IM4 : reference later
how to cite/reference?
the sectioning? This would also improve the tediousness of declaring
LayoutObjs
------------------------------------------
Section 3: GENERAL SYSTEM DESCRIPTION --
------------------------------------------
------------------------
------------------------
------------------------------
------------------------------
Some of these course names are repeated between examples, could potentially
be abstracted out.
----------------------------
----------------------------
-------------------------------------------
Section 4 : SPECIFIC SYSTEM DESCRIPTION --
-------------------------------------------
-----------------------------
-----------------------------
---------------------------------------
---------------------------------------
---------------------------------------
---------------------------------------
Structure of list would be same between examples but content is completely
different
---------------------------
4.1.3 : Goal Statements --
---------------------------
abstracted out if this paragraph were to be abstracted out.
------------------------------------------------
------------------------------------------------
-----------------------
-----------------------
Can booktabs colored links be used? The box links completely cover nearby
punctuation.
------------------------------
------------------------------
No way to include "Source" or "Ref. By" sections?
-------------------------------
-------------------------------
SECTION 4.2.3 --
General Definitions is automatically generated in solChSpecF
General Definitions is automatically generated
----------------------------
4.2.4 : Data Definitions --
----------------------------
---------------------------
---------------------------
--------------------------
4.2.6 Data Constraints --
--------------------------
The info from table 2 will likely end up in table 1 .
----------------------------
----------------------------
----------------------------
----------------------------
----------------------------
----------------------------
Other Notes:
-- Will there be a way to have asterisks for certain pieces of the table?
--------------------------------------------
--------------------------------------------
Properties of a Correct Solution
Remember to insert references in above derivation when available
----------------------------
Section 5 : REQUIREMENTS --
----------------------------
---------------------------------
---------------------------------
-------------------------------------
-------------------------------------
------------------------------
Section 6 : LIKELY CHANGES --
------------------------------
------------------------------
------------------------------
------------------------------------------------
------------------------------------------------
----------------------
Traceabilty Graphs --
----------------------
-----------------------------------------------
-----------------------------------------------
--------------------------
-------------------------- | # LANGUAGE PostfixOperators #
module Drasil.SWHS.Body where
import Language.Drasil hiding (organization, section, variable)
import Drasil.SRSDocument
import qualified Drasil.DocLang.SRS as SRS (inModel)
import Theory.Drasil (GenDefn, InstanceModel)
import Language.Drasil.Chunk.Concept.NamedCombinators
import qualified Language.Drasil.NounPhrase.Combinators as NP
import qualified Language.Drasil.Sentence.Combinators as S
import Control.Lens ((^.))
import qualified Data.Drasil.Concepts.Documentation as Doc (srs)
import Data.Drasil.TheoryConcepts as Doc (inModel)
import Data.Drasil.Concepts.Computation (algorithm, compcon)
import Data.Drasil.Concepts.Documentation as Doc (assumption, column, condition,
constraint, corSol, datum, document, environment,input_, model, organization,
output_, physical, physics, property, quantity, software, softwareSys, solution,
srsDomains, sysCont, system, user, value, variable, doccon, doccon')
import Data.Drasil.Concepts.Education (calculus, educon, engineering)
import Data.Drasil.Concepts.Math (de, equation, ode, rightSide, unit_, mathcon, mathcon')
import Data.Drasil.Concepts.PhysicalProperties (materialProprty, physicalcon)
import Data.Drasil.Concepts.Physics (physicCon)
import Data.Drasil.Concepts.Software (program, softwarecon, correctness,
understandability, reusability, maintainability, verifiability)
import Data.Drasil.Concepts.Thermodynamics (enerSrc, heatTrans, htFlux,
htTransTheo, lawConsEnergy, thermalAnalysis, thermalConduction, thermalEnergy,
thermocon)
import Data.Drasil.Quantities.Math (surArea, surface, uNormalVect)
import Data.Drasil.Quantities.PhysicalProperties (vol)
import Data.Drasil.Quantities.Physics (energy, time, physicscon)
import Data.Drasil.Quantities.Thermodynamics (heatCapSpec, latentHeat)
import Data.Drasil.Software.Products (sciCompS, prodtcon)
import Data.Drasil.People (brooks, spencerSmith, thulasi)
import Data.Drasil.SI_Units (metre, kilogram, second, centigrade, joule, watt,
fundamentals, derived, m_2, m_3)
import Drasil.SWHS.Assumptions (assumpPIS, assumptions)
import Drasil.SWHS.Changes (likelyChgs, unlikelyChgs)
import Drasil.SWHS.Concepts (acronymsFull, coil, con, phaseChangeMaterial,
phsChgMtrl, progName, sWHT, swhsPCM, tank, tankPCM, transient, water)
import qualified Drasil.SWHS.DataDefs as SWHS (dataDefs)
import Drasil.SWHS.GenDefs (genDefs, htFluxWaterFromCoil, htFluxPCMFromWater)
import Drasil.SWHS.Goals (goals)
import Drasil.SWHS.IMods (eBalanceOnWtr, eBalanceOnPCM, heatEInWtr, heatEInPCM,
iMods, instModIntro)
import Drasil.SWHS.References (citations, koothoor2013, smithLai2005)
import Drasil.SWHS.Requirements (funcReqs, inReqDesc, nfRequirements, verifyEnergyOutput)
import Drasil.SWHS.TMods (tMods)
import Drasil.SWHS.Unitals (absTol, coilHTC, coilSA, consTol, constrained,
htFluxC, htFluxP, inputs, inputConstraints, outputs, pcmE, pcmHTC, pcmSA,
relTol, simTime, specParamValList, symbols, symbolsAll, tempC, tempPCM,
tempW, thickness, unitalChuncks, watE)
srs :: Document
srs = mkDoc mkSRS S.forT si
fullSI :: SystemInformation
fullSI = fillcdbSRS mkSRS si
printSetting :: PrintingInformation
printSetting = piSys fullSI Equational defaultConfiguration
resourcePath :: String
resourcePath = "../../../../datafiles/swhs/"
units :: [UnitDefn]
units = map unitWrapper [metre, kilogram, second] ++
map unitWrapper [centigrade, joule, watt]
si :: SystemInformation
si = SI {
_sys = swhsPCM,
_kind = Doc.srs,
_authors = [thulasi, brooks, spencerSmith],
_purpose = [],
_background = [],
_quants = symbols,
_concepts = [] :: [DefinedQuantityDict],
_instModels = insModel,
_datadefs = SWHS.dataDefs,
_configFiles = [],
_inputs = inputs,
_outputs = map qw outputs,
_defSequence = [] :: [Block SimpleQDef],
_constraints = constrained,
_constants = specParamValList,
_sysinfodb = symbMap,
_usedinfodb = usedDB,
refdb = refDB
}
symbMap :: ChunkDB
(nw heatEInPCM : map nw symbols ++ map nw acronymsFull
++ map nw thermocon ++ map nw units ++ map nw [m_2, m_3] ++ map nw [absTol, relTol]
++ map nw physicscon ++ map nw doccon ++ map nw softwarecon ++ map nw doccon' ++ map nw con
++ map nw prodtcon ++ map nw physicCon ++ map nw mathcon ++ map nw mathcon' ++ map nw specParamValList
++ map nw fundamentals ++ map nw educon ++ map nw derived ++ map nw physicalcon ++ map nw unitalChuncks
++ [nw swhsPCM, nw algorithm] ++ map nw compcon ++ [nw materialProprty])
(units ++ [m_2, m_3]) SWHS.dataDefs insModel genDefs tMods concIns section [] []
usedDB :: ChunkDB
usedDB = cdb ([] :: [QuantityDict]) (map nw symbols ++ map nw acronymsFull)
([] :: [ConceptChunk]) ([] :: [UnitDefn]) [] [] [] [] [] [] [] ([] :: [Reference])
refDB :: ReferenceDB
refDB = rdb citations concIns
mkSRS :: SRSDecl
mkSRS = [TableOfContents,
RefSec $ RefProg intro [
TUnits,
tsymb'' tSymbIntro $ TermExcept [uNormalVect],
TAandA],
IntroSec $
IntroProg (introStart +:+ introStartSWHS) (introEnd (plural swhsPCM) progName)
[IPurpose $ purpDoc progName Verbose,
IScope scope,
IChar [] charsOfReader [],
IOrgSec orgDocIntro inModel (SRS.inModel [] []) orgDocEnd
],
GSDSec $ GSDProg
[ SysCntxt [sysCntxtDesc progName, LlC sysCntxtFig, sysCntxtRespIntro progName, systContRespBullets]
, UsrChars [userChars progName]
, SystCons [] []
],
SSDSec $
SSDProg
[ SSDProblem $ PDProg probDescIntro []
[ TermsAndDefs Nothing terms
, PhySysDesc progName physSystParts figTank []
, Goals goalInputs]
, SSDSolChSpec $ SCSProg
[ Assumptions
, TMs [] (Label : stdFields)
, GDs [] ([Label, Units] ++ stdFields) ShowDerivation
, DDs [] ([Label, Symbol, Units] ++ stdFields) ShowDerivation
, IMs [instModIntro] ([Label, Input, Output, InConstraints, OutConstraints] ++ stdFields) ShowDerivation
, Constraints dataConTail inputConstraints
, CorrSolnPpties outputConstraints propsDeriv
]
],
ReqrmntSec $ ReqsProg [
FReqsSub inReqDesc [],
NonFReqsSub
],
LCsSec,
UCsSec,
TraceabilitySec $ TraceabilityProg $ traceMatStandard si,
AuxConstntSec $ AuxConsProg progName specParamValList,
Bibliography]
tSymbIntro :: [TSIntro]
tSymbIntro = [TSPurpose, SymbConvention
[Lit (nw heatTrans), Doc' (nw progName)], SymbOrder, VectorUnits]
insModel :: [InstanceModel]
insModel = [eBalanceOnWtr, eBalanceOnPCM, heatEInWtr, heatEInPCM]
concIns :: [ConceptInstance]
concIns = goals ++ assumptions ++ likelyChgs ++ unlikelyChgs ++ funcReqs
++ nfRequirements
section :: [Section]
section = extractSection srs
stdFields :: Fields
stdFields = [DefiningEquation, Description Verbose IncludeUnits, Notes, Source, RefBy]
priorityNFReqs :: [ConceptChunk]
priorityNFReqs = [correctness, verifiability, understandability, reusability,
maintainability]
introStart :: Sentence
introStart = foldlSent [S "Due to", foldlList Comma List (map S
["increasing costs", "diminishing availability", "negative environmental impact"]) `S.of_`
S "fossil fuels" `sC` S "the demand is high for renewable", pluralNP (enerSrc `and_PS`
energy), S "storage technology"]
introStartSWHS :: Sentence
introStartSWHS = foldlSent [capSent (swhsPCM ^. defn), sParen (short phsChgMtrl),
S "use a renewable", phrase enerSrc `S.and_` S "provide a novel way of storing" +:+.
phrase energy, atStart swhsPCM, S "improve over the traditional", plural progName,
S "because of their smaller size. The smaller size is possible because of the ability" `S.of_`
short phsChgMtrl, S "to store", phrase thermalEnergy, S "as", phrase latentHeat `sC`
S "which allows higher", phrase thermalEnergy, S "storage capacity per",
phrase unit_, S "weight"]
introEnd :: Sentence -> CI -> Sentence
introEnd progSent pro = foldlSent_ [(progSent !.), S "The developed",
phrase program, S "will be referred to as", titleize pro, sParen (short pro)]
SSP has same style sentence here
scope :: Sentence
scope = foldlSent_ [phrase thermalAnalysis `S.of_` S "a single" +:+. phrase tankPCM,
S "This entire", phrase document `S.is` S "written assuming that the substances inside the",
phrase sWHT `S.are` phraseNP (and_Gen phrase short water phsChgMtrl)]
charsOfReader :: [Sentence]
charsOfReader = [charReaderHTT, charReaderDE]
charReaderHTT :: Sentence
charReaderHTT = foldlSent_ [phrase htTransTheo, S "from level 3 or 4",
S "mechanical", phrase engineering]
charReaderDE :: Sentence
charReaderDE = plural de +:+ S "from level 1 and 2" +:+ phrase calculus
orgDocIntro :: Sentence
orgDocIntro = foldlSent [atStartNP (the organization), S "of this",
phrase document, S "follows the template for an", short Doc.srs
`S.for` phrase sciCompS, S "proposed by", refS koothoor2013 `S.and_`
refS smithLai2005]
orgDocEnd :: Sentence
orgDocEnd = foldlSent_ [atStartNP' (the inModel),
S "to be solved are referred to as" +:+.
foldlList Comma List (map refS iMods), S "The", plural inModel,
S "provide the", plural ode, sParen (short ode :+: S "s") `S.and_`
S "algebraic", plural equation, S "that", phrase model,
(phraseNP (the swhsPCM) !.), short progName, S "solves these", short ode :+: S "s"]
account for . Specifically , the glass example references a Volere paper that
If all SRS have the same basic layout , is it possible to automate
sysCntxtDesc :: CI -> Contents
sysCntxtDesc pro = foldlSP [refS sysCntxtFig, S "shows the" +:+.
phrase sysCont, S "A circle represents an external entity outside the",
phrase software `sC` phraseNP (the user) +:+. S "in this case",
S "A rectangle represents the", phrase softwareSys, S "itself" +:+.
sParen (short pro), S "Arrows are used to show the", plural datum,
S "flow between the", phraseNP (system `andIts` environment)]
sysCntxtFig :: LabelledContent
sysCntxtFig = llcc (makeFigRef "SysCon") $ fig (foldlSent_
[refS sysCntxtFig +: EmptyS, titleize sysCont])
$ resourcePath ++ "SystemContextFigure.png"
sysCntxtRespIntro :: CI -> Contents
sysCntxtRespIntro pro = foldlSPCol [short pro +:+. S "is mostly self-contained",
S "The only external interaction is through the", phrase user +:+.
S "interface", S "responsibilities" `S.the_ofTheC` phraseNP (user `andThe`
system) `S.are` S "as follows"]
systContRespBullets :: Contents
systContRespBullets = UlC $ ulcc $ Enumeration $ bulletNested
[titleize user +: S "Responsibilities", short progName +: S "Responsibilities"]
$ map bulletFlat [userResp, swhsResp]
userResp :: [Sentence]
userResp = map foldlSent_ [
[S "Provide the", phrase input_, plural datum `S.toThe`
phrase system `sC` S "ensuring no errors in the", plural datum, S "entry"],
[S "Take care that consistent", plural unit_, S "are used for",
phrase input_, plural variable]
]
swhsResp :: [Sentence]
swhsResp = map foldlSent_ [
[S "Detect", plural datum, S "type mismatch, such as a string" `S.of_`
S "characters instead of a floating point number"],
[S "Determine if the", plural input_, S "satisfy the required",
phraseNP (physical `and_` software), plural constraint],
[S "Calculate the required", plural output_]
]
userChars :: CI -> Contents
userChars pro = foldlSP [S "The end", phrase user `S.of_` short pro,
S "should have an understanding of undergraduate Level 1 Calculus" `S.and_`
titleize Doc.physics]
probDescIntro :: Sentence
probDescIntro = foldlSent_ [S "investigate the effect" `S.of_` S "employing",
short phsChgMtrl, S "within a", phrase sWHT]
terms :: [ConceptChunk]
terms = map cw [htFlux, phaseChangeMaterial, cw heatCapSpec, thermalConduction, transient]
Included heat flux and specific heat in NamedChunks even though they are
already in SWHSUnits
physSystParts :: [Sentence]
physSystParts = map foldlSent_ [physSyst1 tank water, physSyst2 coil tank htFluxC,
[short phsChgMtrl, S "suspended in" +:+. phrase tank,
sParen (ch htFluxP +:+ S "represents the" +:+. phrase htFluxP)]]
physSyst1 :: ConceptChunk -> ConceptChunk -> [Sentence]
physSyst1 ta wa = [atStart ta, S "containing" +:+. phrase wa]
physSyst2 :: ConceptChunk -> ConceptChunk -> UnitalChunk -> [Sentence]
physSyst2 co ta hfc = [atStart co, S "at bottom of" +:+. phrase ta,
sParen (ch hfc +:+ S "represents the" +:+. phrase hfc)]
figTank :: LabelledContent
figTank = llcc (makeFigRef "Tank") $ fig (
foldlSent_ [atStart sWHT `sC` S "with", phrase htFluxC `S.of_`
ch htFluxC `S.and_` phrase htFluxP `S.of_` ch htFluxP])
$ resourcePath ++ "Tank.png"
goalInputs :: [Sentence]
goalInputs = [phraseNP (the tempC),
S "the initial" +:+ plural condition +:+ S "for the" +:+ phraseNP (tempW `andThe` tempPCM),
S "the material" +:+ plural property]
2 examples include this paragraph , 2 do n't . The " givens " would need to be
Theory has to be RelationChunk ....
s4_2_3_genDefs : : [ Contents ]
s4_2_3_genDefs = map reldefn swhsRC
s4_2_3_deriv : : [ Contents ]
s4_2_3_deriv = [ s4_2_3_deriv_1 rOfChng temp ,
s4_2_3_deriv_2 consThermE vol ,
s4_2_3_deriv_3 ,
s4_2_3_deriv_4 gaussDiv surface vol thFluxVect uNormalVect unit _ ,
s4_2_3_deriv_5 ,
s4_2_3_deriv_6 vol volHtGen ,
s4_2_3_deriv_7 ,
s4_2_3_deriv_8 htFluxIn htFluxOut inSA outSA density heatCapSpec
temp vol assumption ,
s4_2_3_deriv_9 ,
s4_2_3_deriv_10 density mass vol ,
s4_2_3_deriv_11 ]
s4_2_3_genDefs :: [Contents]
s4_2_3_genDefs = map reldefn swhsRC
s4_2_3_deriv :: [Contents]
s4_2_3_deriv = [s4_2_3_deriv_1 rOfChng temp,
s4_2_3_deriv_2 consThermE vol,
s4_2_3_deriv_3,
s4_2_3_deriv_4 gaussDiv surface vol thFluxVect uNormalVect unit_,
s4_2_3_deriv_5,
s4_2_3_deriv_6 vol volHtGen,
s4_2_3_deriv_7,
s4_2_3_deriv_8 htFluxIn htFluxOut inSA outSA density heatCapSpec
temp vol assumption assump3 assump4 assump5 assump6,
s4_2_3_deriv_9,
s4_2_3_deriv_10 density mass vol,
s4_2_3_deriv_11]-}
I do not think Table 2 will end up being necessary for the version
dataConTail :: Sentence
dataConTail = dataContMid +:+ dataContFooter
dataContMid :: Sentence
dataContMid = foldlSent [atStartNP (the column) `S.for` pluralNP (combineNINI software
constraint), S "restricts the range" `S.of_` plural input_,
S "to reasonable", plural value]
dataContFooter :: Sentence
dataContFooter = foldlSent_ $ map foldlSent [
[sParen (S "*"), S "These", plural quantity, S "cannot be equal to zero" `sC`
S "or there will be a divide by zero in the", phrase model],
[sParen (S "+"), S "These", plural quantity, S "cannot be zero" `sC`
S "or there would be freezing", sParen (refS assumpPIS)],
[sParen (S "++"), atStartNP' (NP.the (constraint `onThePS` surArea)),
S "are calculated by considering the", phrase surArea, S "to", phrase vol +:+.
S "ratio", atStartNP (the assumption), S "is that the lowest ratio is 1" `S.and_`
S "the highest possible is", eS (exactDbl 2 $/ sy thickness) `sC` S "where", ch thickness,
S "is the thickness of a" +:+. (Quote (S "sheet") `S.of_` short phsChgMtrl),
S "A thin sheet has the greatest", phrase surArea, S "to", phrase vol, S "ratio"],
[sParen (S "**"), atStartNP (the constraint), S "on the maximum", phrase time,
S "at the end of the simulation is the total number of seconds in one day"]
]
outputConstraints :: [ConstrConcept]
FIXME : add " ( by A11 ) " in Physical Constraints of ` tempW ` and ` tempPCM ` ?
propsDeriv :: [Contents]
propsDeriv = [
propCorSolDeriv1 lawConsEnergy watE energy coil phsChgMtrl
htFluxWaterFromCoil htFluxPCMFromWater surface heatTrans,
propCorSolDeriv2,
propCorSolDeriv3 pcmE energy phsChgMtrl water,
propCorSolDeriv4,
propCorSolDeriv5 equation progName rightSide]
propCorSolDeriv1 :: (NamedIdea b, NamedIdea h) => ConceptChunk -> b -> UnitalChunk ->
ConceptChunk -> CI -> GenDefn -> GenDefn -> h -> ConceptChunk -> Contents
propCorSolDeriv1 lce ewat en co pcmat g1hfc g2hfp su ht =
foldlSPCol [atStartNP (a_ corSol), S "must exhibit" +:+.
phraseNP (the lce), S "This means that", phraseNP (the ewat),
S "should equal the difference between the total", phrase en,
phrase input_, S "from", phraseNP (the co `NP.andThe`
combineNINI en output_), S "to the" +:+. short pcmat,
S "This can be shown as an", phrase equation, S "by taking",
refS g1hfc `S.and_` refS g2hfp `sC`
S "multiplying each by their respective", phrase su,
S "area of", phrase ht `sC` S "and integrating each",
S "over the", phrase simTime `sC` S "as follows"]
propCorSolDeriv2 :: Contents
propCorSolDeriv2 = unlbldExpr
(sy watE $= defint (eqSymb time) (exactDbl 0) (sy time)
(sy coilHTC `mulRe` sy coilSA `mulRe` (sy tempC $- apply1 tempW time))
$- defint (eqSymb time) (exactDbl 0) (sy time)
(sy pcmHTC `mulRe` sy pcmSA `mulRe` (apply1 tempW time $-
apply1 tempPCM time)))
propCorSolDeriv3 :: NamedIdea a => a -> UnitalChunk -> CI -> ConceptChunk -> Contents
propCorSolDeriv3 epcm en pcmat wa =
foldlSP_ [S "In addition, the", phrase epcm, S "should equal the",
phrase en, phrase input_, S "to the", short pcmat,
S "from the" +:+. phrase wa, S "This can be expressed as"]
propCorSolDeriv4 :: Contents
propCorSolDeriv4 = unlbldExpr
(sy pcmE $= defint (eqSymb time) (exactDbl 0) (sy time)
(sy pcmHTC `mulRe` sy pcmSA `mulRe` (apply1 tempW time $-
apply1 tempPCM time)))
propCorSolDeriv5 :: ConceptChunk -> CI -> CI -> Contents
propCorSolDeriv5 eq pro rs = foldlSP [titleize' eq, S "(FIXME: Equation 7)"
`S.and_` S "(FIXME: Equation 8) can be used as", Quote (S "sanity") +:+
S "checks to gain confidence in any", phrase solution,
S "computed by" +:+. short pro, S "The relative",
S "error between the results computed by", short pro `S.and_`
S "the results calculated from the", short rs, S "of these",
plural eq, S "should be less than", ch consTol, refS verifyEnergyOutput]
|
2a2e8e6138a92a4a4ee0342796cf8179e4aa0016f006f0a4088c488ed8d234b5 | puppetlabs/ring-middleware | utils.clj | (ns puppetlabs.ring-middleware.utils
(:require [schema.core :as schema]
[ring.util.response :as rr]
[slingshot.slingshot :as sling]
[cheshire.core :as json])
(:import (java.security.cert X509Certificate)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Schemas
(def ResponseType
(schema/enum :json :plain))
(def RingRequest
{:uri schema/Str
(schema/optional-key :ssl-client-cert) (schema/maybe X509Certificate)
schema/Keyword schema/Any})
(def RingResponse
{:status schema/Int
:headers {schema/Str schema/Any}
:body schema/Any
schema/Keyword schema/Any})
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;; Helpers
(schema/defn ^:always-validate json-response
:- RingResponse
[status :- schema/Int
body :- schema/Any]
(-> body
json/encode
rr/response
(rr/status status)
(rr/content-type "application/json; charset=utf-8")))
(schema/defn ^:always-validate plain-response
:- RingResponse
[status :- schema/Int
body :- schema/Str]
(-> body
rr/response
(rr/status status)
(rr/content-type "text/plain; charset=utf-8")))
(defn throw-bad-request!
"Throw a :bad-request type slingshot error with the supplied message"
[message]
(sling/throw+ {:kind :bad-request
:msg message}))
(defn bad-request?
[e]
"Determine if the supplied slingshot error is for a bad request"
(when (map? e)
(= (:kind e)
:bad-request)))
(defn throw-service-unavailable!
"Throw a :service-unavailable type slingshot error with the supplied message"
[message]
(sling/throw+ {:kind :service-unavailable
:msg message}))
(defn service-unavailable?
[e]
"Determine if the supplied slingshot error is for an unavailable service"
(when (map? e)
(= (:kind e)
:service-unavailable)))
(defn throw-data-invalid!
"Throw a :data-invalid type slingshot error with the supplied message"
[message]
(sling/throw+ {:kind :data-invalid
:msg message}))
(defn data-invalid?
[e]
"Determine if the supplied slingshot error is for invalid data"
(when (map? e)
(= (:kind e)
:data-invalid)))
(defn schema-error?
[e]
"Determine if the supplied slingshot error is for a schema mismatch"
(when (map? e)
(= (:type e)
:schema.core/error)))
| null | https://raw.githubusercontent.com/puppetlabs/ring-middleware/a09e0fe0e62f31ce49c807b831adf7153cdc37ac/src/puppetlabs/ring_middleware/utils.clj | clojure |
Helpers | (ns puppetlabs.ring-middleware.utils
(:require [schema.core :as schema]
[ring.util.response :as rr]
[slingshot.slingshot :as sling]
[cheshire.core :as json])
(:import (java.security.cert X509Certificate)))
Schemas
(def ResponseType
(schema/enum :json :plain))
(def RingRequest
{:uri schema/Str
(schema/optional-key :ssl-client-cert) (schema/maybe X509Certificate)
schema/Keyword schema/Any})
(def RingResponse
{:status schema/Int
:headers {schema/Str schema/Any}
:body schema/Any
schema/Keyword schema/Any})
(schema/defn ^:always-validate json-response
:- RingResponse
[status :- schema/Int
body :- schema/Any]
(-> body
json/encode
rr/response
(rr/status status)
(rr/content-type "application/json; charset=utf-8")))
(schema/defn ^:always-validate plain-response
:- RingResponse
[status :- schema/Int
body :- schema/Str]
(-> body
rr/response
(rr/status status)
(rr/content-type "text/plain; charset=utf-8")))
(defn throw-bad-request!
"Throw a :bad-request type slingshot error with the supplied message"
[message]
(sling/throw+ {:kind :bad-request
:msg message}))
(defn bad-request?
[e]
"Determine if the supplied slingshot error is for a bad request"
(when (map? e)
(= (:kind e)
:bad-request)))
(defn throw-service-unavailable!
"Throw a :service-unavailable type slingshot error with the supplied message"
[message]
(sling/throw+ {:kind :service-unavailable
:msg message}))
(defn service-unavailable?
[e]
"Determine if the supplied slingshot error is for an unavailable service"
(when (map? e)
(= (:kind e)
:service-unavailable)))
(defn throw-data-invalid!
"Throw a :data-invalid type slingshot error with the supplied message"
[message]
(sling/throw+ {:kind :data-invalid
:msg message}))
(defn data-invalid?
[e]
"Determine if the supplied slingshot error is for invalid data"
(when (map? e)
(= (:kind e)
:data-invalid)))
(defn schema-error?
[e]
"Determine if the supplied slingshot error is for a schema mismatch"
(when (map? e)
(= (:type e)
:schema.core/error)))
|
144ec57617f7dba552fcddc13f980a1440a0547cb8bf18056c3b4a821eb7ef69 | typelead/eta | Tc220.hs | {-# LANGUAGE DeriveDataTypeable #-}
See Trac # 1033
module Tc220 where
import Data.Generics
import Control.Monad.State
data HsExp = HsWildCard deriving( Typeable, Data )
data HsName = HsName deriving( Typeable, Data )
rename : : ( ) - > HsExp - > State ( , [ HsName ] ) HsExp
-- Type sig commented out
rename1 = \_ -> everywhereM (mkM (\e -> case e of HsWildCard -> return e))
rename2 _ = everywhereM (mkM (\e -> case e of HsWildCard -> return e))
uncomb1 :: State (HsName, [HsName]) HsExp
uncomb1 = rename1 () undefined
uncomb2 :: State (HsName, [HsName]) HsExp
uncomb2 = rename2 () undefined
| null | https://raw.githubusercontent.com/typelead/eta/97ee2251bbc52294efbf60fa4342ce6f52c0d25c/tests/suite/typecheck/compile/tc220/Tc220.hs | haskell | # LANGUAGE DeriveDataTypeable #
Type sig commented out |
See Trac # 1033
module Tc220 where
import Data.Generics
import Control.Monad.State
data HsExp = HsWildCard deriving( Typeable, Data )
data HsName = HsName deriving( Typeable, Data )
rename : : ( ) - > HsExp - > State ( , [ HsName ] ) HsExp
rename1 = \_ -> everywhereM (mkM (\e -> case e of HsWildCard -> return e))
rename2 _ = everywhereM (mkM (\e -> case e of HsWildCard -> return e))
uncomb1 :: State (HsName, [HsName]) HsExp
uncomb1 = rename1 () undefined
uncomb2 :: State (HsName, [HsName]) HsExp
uncomb2 = rename2 () undefined
|
f484b8f786c6750578c86768f675aaf5b0ecb173b73a7452ee7d6679700f817e | S8A/htdp-exercises | ex058.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex058) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp")) #f)))
(define LOW-PRICE-THRESHOLD 1000)
(define LUXURY-PRICE-THRESHOLD 10000)
(define LOW-PRICE-TAX-RATE 0.05)
(define LUXURY-PRICE-TAX-RATE 0.08)
A Price falls into one of three intervals :
; — 0 through LOW-PRICE-THRESHOLD
; — LOW-PRICE-THRESHOLD through LUXURY-PRICE-THRESHOLD
; — LUXURY-PRICE-THRESHOLD and above.
; interpretation the price of an item
; Price -> Number
computes the amount of tax charged for p
(check-expect (sales-tax 0) 0)
(check-expect (sales-tax 537) 0)
(check-expect (sales-tax 1000) (* 0.05 1000))
(check-expect (sales-tax 10000) (* 0.08 10000))
(check-expect (sales-tax 12017) (* 0.08 12017))
(define (sales-tax p)
(* p (cond
[(and (>= p 0) (< p LOW-PRICE-THRESHOLD))
0]
[(and (>= p LOW-PRICE-THRESHOLD) (< p LUXURY-PRICE-THRESHOLD))
LOW-PRICE-TAX-RATE]
[(>= p LUXURY-PRICE-THRESHOLD) LUXURY-PRICE-TAX-RATE])))
| null | https://raw.githubusercontent.com/S8A/htdp-exercises/578e49834a9513f29ef81b7589b28081c5e0b69f/ex058.rkt | racket | about the language level of this file in a form that our tools can easily process.
— 0 through LOW-PRICE-THRESHOLD
— LOW-PRICE-THRESHOLD through LUXURY-PRICE-THRESHOLD
— LUXURY-PRICE-THRESHOLD and above.
interpretation the price of an item
Price -> Number | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex058) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp") (lib "batch-io.rkt" "teachpack" "2htdp")) #f)))
(define LOW-PRICE-THRESHOLD 1000)
(define LUXURY-PRICE-THRESHOLD 10000)
(define LOW-PRICE-TAX-RATE 0.05)
(define LUXURY-PRICE-TAX-RATE 0.08)
A Price falls into one of three intervals :
computes the amount of tax charged for p
(check-expect (sales-tax 0) 0)
(check-expect (sales-tax 537) 0)
(check-expect (sales-tax 1000) (* 0.05 1000))
(check-expect (sales-tax 10000) (* 0.08 10000))
(check-expect (sales-tax 12017) (* 0.08 12017))
(define (sales-tax p)
(* p (cond
[(and (>= p 0) (< p LOW-PRICE-THRESHOLD))
0]
[(and (>= p LOW-PRICE-THRESHOLD) (< p LUXURY-PRICE-THRESHOLD))
LOW-PRICE-TAX-RATE]
[(>= p LUXURY-PRICE-THRESHOLD) LUXURY-PRICE-TAX-RATE])))
|
7cd2b1598fe42b0295e251297cc7e9645928882864fa965ba8e129ed39b22b07 | sadiqj/ocaml-esp32 | pr6872.ml | Ignore = b to be reproducible
Printexc.record_backtrace false;;
exception A;;
type a = A;;
A;;
raise A;;
fun (A : a) -> ();;
function Not_found -> 1 | A -> 2 | _ -> 3;;
try raise A with A -> 2;;
| null | https://raw.githubusercontent.com/sadiqj/ocaml-esp32/33aad4ca2becb9701eb90d779c1b1183aefeb578/testsuite/tests/typing-warnings/pr6872.ml | ocaml | Ignore = b to be reproducible
Printexc.record_backtrace false;;
exception A;;
type a = A;;
A;;
raise A;;
fun (A : a) -> ();;
function Not_found -> 1 | A -> 2 | _ -> 3;;
try raise A with A -> 2;;
| |
b2f4709b0454d49aa70d0ad9f7caa86fb7f82ae476e4e0acb98d4687ae4e7a1b | avsm/eeww | stdlib.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* The OCaml Standard library .
This module is automatically opened at the beginning of each
compilation . All components of this module can therefore be
referred by their short name , without prefixing them by [ ] .
In particular , it provides the basic operations over the built - in
types ( numbers , booleans , byte sequences , strings , exceptions ,
references , lists , arrays , input - output channels , ... ) and the
{ { ! modules}standard library modules } .
This module is automatically opened at the beginning of each
compilation. All components of this module can therefore be
referred by their short name, without prefixing them by [Stdlib].
In particular, it provides the basic operations over the built-in
types (numbers, booleans, byte sequences, strings, exceptions,
references, lists, arrays, input-output channels, ...) and the
{{!modules}standard library modules}.
*)
* { 1 Exceptions }
external raise : exn -> 'a = "%raise"
(** Raise the given exception value *)
external raise_notrace : exn -> 'a = "%raise_notrace"
* A faster version [ raise ] which does not record the backtrace .
@since 4.02
@since 4.02
*)
val invalid_arg : string -> 'a
(** Raise exception [Invalid_argument] with the given string. *)
val failwith : string -> 'a
(** Raise exception [Failure] with the given string. *)
exception Exit
(** The [Exit] exception is not raised by any library function. It is
provided for use in your programs. *)
exception Match_failure of (string * int * int)
[@ocaml.warn_on_literal_pattern]
(** Exception raised when none of the cases of a pattern-matching
apply. The arguments are the location of the match keyword in the
source code (file name, line number, column number). *)
exception Assert_failure of (string * int * int)
[@ocaml.warn_on_literal_pattern]
(** Exception raised when an assertion fails. The arguments are the
location of the assert keyword in the source code (file name, line
number, column number). *)
exception Invalid_argument of string
[@ocaml.warn_on_literal_pattern]
(** Exception raised by library functions to signal that the given
arguments do not make sense. The string gives some information to
the programmer. As a general rule, this exception should not be
caught, it denotes a programming error and the code should be
modified not to trigger it. *)
exception Failure of string
[@ocaml.warn_on_literal_pattern]
(** Exception raised by library functions to signal that they are
undefined on the given arguments. The string is meant to give some
information to the programmer; you must not pattern match on the
string literal because it may change in future versions (use
Failure _ instead). *)
exception Not_found
(** Exception raised by search functions when the desired object could
not be found. *)
exception Out_of_memory
(** Exception raised by the garbage collector when there is
insufficient memory to complete the computation. (Not reliable for
allocations on the minor heap.) *)
exception Stack_overflow
* Exception raised by the bytecode interpreter when the evaluation
stack reaches its maximal size . This often indicates infinite or
excessively deep recursion in the user 's program .
Before 4.10 , it was not fully implemented by the native - code
compiler .
stack reaches its maximal size. This often indicates infinite or
excessively deep recursion in the user's program.
Before 4.10, it was not fully implemented by the native-code
compiler. *)
exception Sys_error of string
[@ocaml.warn_on_literal_pattern]
(** Exception raised by the input/output functions to report an
operating system error. The string is meant to give some
information to the programmer; you must not pattern match on the
string literal because it may change in future versions (use
Sys_error _ instead). *)
exception End_of_file
(** Exception raised by input functions to signal that the end of file
has been reached. *)
exception Division_by_zero
* Exception raised by integer division and remainder operations when
their second argument is zero .
their second argument is zero. *)
exception Sys_blocked_io
* A special case of Sys_error raised when no I / O is possible on a
non - blocking I / O channel .
non-blocking I/O channel. *)
exception Undefined_recursive_module of (string * int * int)
[@ocaml.warn_on_literal_pattern]
(** Exception raised when an ill-founded recursive module definition
is evaluated. The arguments are the location of the definition in
the source code (file name, line number, column number). *)
* { 1 Comparisons }
external ( = ) : 'a -> 'a -> bool = "%equal"
* [ e1 = e2 ] tests for structural equality of [ e1 ] and [ e2 ] .
Mutable structures ( e.g. references and arrays ) are equal
if and only if their current contents are structurally equal ,
even if the two mutable objects are not the same physical object .
Equality between functional values raises [ Invalid_argument ] .
Equality between cyclic data structures may not terminate .
Left - associative operator , see { ! Ocaml_operators } for more information .
Mutable structures (e.g. references and arrays) are equal
if and only if their current contents are structurally equal,
even if the two mutable objects are not the same physical object.
Equality between functional values raises [Invalid_argument].
Equality between cyclic data structures may not terminate.
Left-associative operator, see {!Ocaml_operators} for more information. *)
external ( <> ) : 'a -> 'a -> bool = "%notequal"
* Negation of { ! . ( = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( < ) : 'a -> 'a -> bool = "%lessthan"
* See { ! . ( > = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( > ) : 'a -> 'a -> bool = "%greaterthan"
* See { ! . ( > = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( <= ) : 'a -> 'a -> bool = "%lessequal"
* See { ! . ( > = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( >= ) : 'a -> 'a -> bool = "%greaterequal"
(** Structural ordering functions. These functions coincide with
the usual orderings over integers, characters, strings, byte sequences
and floating-point numbers, and extend them to a
total ordering over all types.
The ordering is compatible with [( = )]. As in the case
of [( = )], mutable structures are compared by contents.
Comparison between functional values raises [Invalid_argument].
Comparison between cyclic structures may not terminate.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external compare : 'a -> 'a -> int = "%compare"
* [ compare x y ] returns [ 0 ] if [ x ] is equal to [ y ] ,
a negative integer if [ x ] is less than [ y ] , and a positive integer
if [ x ] is greater than [ y ] . The ordering implemented by [ compare ]
is compatible with the comparison predicates [ =] , [ < ] and [ > ]
defined above , with one difference on the treatment of the float value
{ ! . Namely , the comparison predicates treat [ nan ]
as different from any other float value , including itself ;
while [ compare ] treats [ nan ] as equal to itself and less than any
other float value . This treatment of ensures that [ compare ]
defines a total ordering relation .
[ compare ] applied to functional values may raise [ Invalid_argument ] .
[ compare ] applied to cyclic structures may not terminate .
The [ compare ] function can be used as the comparison function
required by the { ! Set . Make } and { ! Map . Make } functors , as well as
the { ! List.sort } and { ! Array.sort } functions .
a negative integer if [x] is less than [y], and a positive integer
if [x] is greater than [y]. The ordering implemented by [compare]
is compatible with the comparison predicates [=], [<] and [>]
defined above, with one difference on the treatment of the float value
{!Stdlib.nan}. Namely, the comparison predicates treat [nan]
as different from any other float value, including itself;
while [compare] treats [nan] as equal to itself and less than any
other float value. This treatment of [nan] ensures that [compare]
defines a total ordering relation.
[compare] applied to functional values may raise [Invalid_argument].
[compare] applied to cyclic structures may not terminate.
The [compare] function can be used as the comparison function
required by the {!Set.Make} and {!Map.Make} functors, as well as
the {!List.sort} and {!Array.sort} functions. *)
val min : 'a -> 'a -> 'a
* Return the smaller of the two arguments .
The result is unspecified if one of the arguments contains
the float value [ nan ] .
The result is unspecified if one of the arguments contains
the float value [nan]. *)
val max : 'a -> 'a -> 'a
* Return the greater of the two arguments .
The result is unspecified if one of the arguments contains
the float value [ nan ] .
The result is unspecified if one of the arguments contains
the float value [nan]. *)
external ( == ) : 'a -> 'a -> bool = "%eq"
(** [e1 == e2] tests for physical equality of [e1] and [e2].
On mutable types such as references, arrays, byte sequences, records with
mutable fields and objects with mutable instance variables,
[e1 == e2] is true if and only if physical modification of [e1]
also affects [e2].
On non-mutable types, the behavior of [( == )] is
implementation-dependent; however, it is guaranteed that
[e1 == e2] implies [compare e1 e2 = 0].
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( != ) : 'a -> 'a -> bool = "%noteq"
* Negation of { ! . ( = = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
* { 1 Boolean operations }
external not : bool -> bool = "%boolnot"
(** The boolean negation. *)
external ( && ) : bool -> bool -> bool = "%sequand"
* The boolean ' and ' . Evaluation is sequential , left - to - right :
in [ e1 & & e2 ] , [ e1 ] is evaluated first , and if it returns [ false ] ,
[ e2 ] is not evaluated at all .
Right - associative operator , see { ! Ocaml_operators } for more information .
in [e1 && e2], [e1] is evaluated first, and if it returns [false],
[e2] is not evaluated at all.
Right-associative operator, see {!Ocaml_operators} for more information.
*)
external ( || ) : bool -> bool -> bool = "%sequor"
* The boolean ' or ' . Evaluation is sequential , left - to - right :
in [ e1 || e2 ] , [ e1 ] is evaluated first , and if it returns [ true ] ,
[ e2 ] is not evaluated at all .
Right - associative operator , see { ! Ocaml_operators } for more information .
in [e1 || e2], [e1] is evaluated first, and if it returns [true],
[e2] is not evaluated at all.
Right-associative operator, see {!Ocaml_operators} for more information.
*)
* { 1 Debugging }
external __LOC__ : string = "%loc_LOC"
* [ _ _ LOC _ _ ] returns the location at which this expression appears in
the file currently being parsed by the compiler , with the standard
error format of : " File % S , line % d , characters % d-%d " .
@since 4.02
the file currently being parsed by the compiler, with the standard
error format of OCaml: "File %S, line %d, characters %d-%d".
@since 4.02
*)
external __FILE__ : string = "%loc_FILE"
* [ _ _ FILE _ _ ] returns the name of the file currently being
parsed by the compiler .
@since 4.02
parsed by the compiler.
@since 4.02
*)
external __LINE__ : int = "%loc_LINE"
* [ _ _ LINE _ _ ] returns the line number at which this expression
appears in the file currently being parsed by the compiler .
@since 4.02
appears in the file currently being parsed by the compiler.
@since 4.02
*)
external __MODULE__ : string = "%loc_MODULE"
* [ _ _ MODULE _ _ ] returns the module name of the file being
parsed by the compiler .
@since 4.02
parsed by the compiler.
@since 4.02
*)
external __POS__ : string * int * int * int = "%loc_POS"
* [ _ _ POS _ _ ] returns a tuple [ ( file , lnum , cnum , enum ) ] , corresponding
to the location at which this expression appears in the file
currently being parsed by the compiler . [ file ] is the current
filename , [ lnum ] the line number , [ cnum ] the character position in
the line and [ enum ] the last character position in the line .
@since 4.02
to the location at which this expression appears in the file
currently being parsed by the compiler. [file] is the current
filename, [lnum] the line number, [cnum] the character position in
the line and [enum] the last character position in the line.
@since 4.02
*)
external __FUNCTION__ : string = "%loc_FUNCTION"
* [ _ _ FUNCTION _ _ ] returns the name of the current function or method , including
any enclosing modules or classes .
@since 4.12
any enclosing modules or classes.
@since 4.12 *)
external __LOC_OF__ : 'a -> string * 'a = "%loc_LOC"
* [ _ _ LOC_OF _ _ expr ] returns a pair [ ( loc , expr ) ] where [ loc ] is the
location of [ expr ] in the file currently being parsed by the
compiler , with the standard error format of : " File % S , line
% d , characters % d-%d " .
@since 4.02
location of [expr] in the file currently being parsed by the
compiler, with the standard error format of OCaml: "File %S, line
%d, characters %d-%d".
@since 4.02
*)
external __LINE_OF__ : 'a -> int * 'a = "%loc_LINE"
* [ _ _ LINE_OF _ _ expr ] returns a pair [ ( line , expr ) ] , where [ line ] is the
line number at which the expression [ expr ] appears in the file
currently being parsed by the compiler .
@since 4.02
line number at which the expression [expr] appears in the file
currently being parsed by the compiler.
@since 4.02
*)
external __POS_OF__ : 'a -> (string * int * int * int) * 'a = "%loc_POS"
* [ _ _ POS_OF _ _ expr ] returns a pair [ ( loc , expr ) ] , where [ loc ] is a
tuple [ ( file , lnum , cnum , enum ) ] corresponding to the location at
which the expression [ expr ] appears in the file currently being
parsed by the compiler . [ file ] is the current filename , [ lnum ] the
line number , [ cnum ] the character position in the line and [ enum ]
the last character position in the line .
@since 4.02
tuple [(file,lnum,cnum,enum)] corresponding to the location at
which the expression [expr] appears in the file currently being
parsed by the compiler. [file] is the current filename, [lnum] the
line number, [cnum] the character position in the line and [enum]
the last character position in the line.
@since 4.02
*)
* { 1 Composition operators }
external ( |> ) : 'a -> ('a -> 'b) -> 'b = "%revapply"
* Reverse - application operator : [ x | > f | > g ] is exactly equivalent
to [ g ( f ( x ) ) ] .
Left - associative operator , see { ! Ocaml_operators } for more information .
@since 4.01
to [g (f (x))].
Left-associative operator, see {!Ocaml_operators} for more information.
@since 4.01
*)
external ( @@ ) : ('a -> 'b) -> 'a -> 'b = "%apply"
* Application operator : [ g @@ f @@ x ] is exactly equivalent to
[ g ( f ( x ) ) ] .
Right - associative operator , see { ! Ocaml_operators } for more information .
@since 4.01
[g (f (x))].
Right-associative operator, see {!Ocaml_operators} for more information.
@since 4.01
*)
* { 1 Integer arithmetic }
(** Integers are [Sys.int_size] bits wide.
All operations are taken modulo 2{^[Sys.int_size]}.
They do not fail on overflow. *)
external ( ~- ) : int -> int = "%negint"
(** Unary negation. You can also write [- e] instead of [~- e].
Unary operator, see {!Ocaml_operators} for more information.
*)
external ( ~+ ) : int -> int = "%identity"
* Unary addition . You can also write [ + e ] instead of [ ~+ e ] .
Unary operator , see { ! Ocaml_operators } for more information .
@since 3.12
Unary operator, see {!Ocaml_operators} for more information.
@since 3.12
*)
external succ : int -> int = "%succint"
(** [succ x] is [x + 1]. *)
external pred : int -> int = "%predint"
(** [pred x] is [x - 1]. *)
external ( + ) : int -> int -> int = "%addint"
(** Integer addition.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( - ) : int -> int -> int = "%subint"
(** Integer subtraction.
Left-associative operator, , see {!Ocaml_operators} for more information.
*)
external ( * ) : int -> int -> int = "%mulint"
(** Integer multiplication.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( / ) : int -> int -> int = "%divint"
* Integer division .
Integer division rounds the real quotient of its arguments towards zero .
More precisely , if [ x > = 0 ] and [ y > 0 ] , [ x / y ] is the greatest integer
less than or equal to the real quotient of [ x ] by [ y ] . Moreover ,
[ ( - x ) / y = x / ( - y ) = - ( x / y ) ] .
Left - associative operator , see { ! Ocaml_operators } for more information .
@raise Division_by_zero if the second argument is 0 .
Integer division rounds the real quotient of its arguments towards zero.
More precisely, if [x >= 0] and [y > 0], [x / y] is the greatest integer
less than or equal to the real quotient of [x] by [y]. Moreover,
[(- x) / y = x / (- y) = - (x / y)].
Left-associative operator, see {!Ocaml_operators} for more information.
@raise Division_by_zero if the second argument is 0.
*)
external ( mod ) : int -> int -> int = "%modint"
* Integer remainder . If [ y ] is not zero , the result
of [ x mod y ] satisfies the following properties :
[ x = ( x / y ) * y + x mod y ] and
[ ) < = abs(y ) - 1 ] .
If [ y = 0 ] , [ x mod y ] raises [ Division_by_zero ] .
Note that [ x mod y ] is negative only if [ x < 0 ] .
Left - associative operator , see { ! Ocaml_operators } for more information .
@raise Division_by_zero if [ y ] is zero .
of [x mod y] satisfies the following properties:
[x = (x / y) * y + x mod y] and
[abs(x mod y) <= abs(y) - 1].
If [y = 0], [x mod y] raises [Division_by_zero].
Note that [x mod y] is negative only if [x < 0].
Left-associative operator, see {!Ocaml_operators} for more information.
@raise Division_by_zero if [y] is zero.
*)
val abs : int -> int
(** [abs x] is the absolute value of [x]. On [min_int] this
is [min_int] itself and thus remains negative. *)
val max_int : int
(** The greatest representable integer. *)
val min_int : int
(** The smallest representable integer. *)
* { 2 Bitwise operations }
external ( land ) : int -> int -> int = "%andint"
(** Bitwise logical and.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( lor ) : int -> int -> int = "%orint"
(** Bitwise logical or.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( lxor ) : int -> int -> int = "%xorint"
(** Bitwise logical exclusive or.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
val lnot : int -> int
(** Bitwise logical negation. *)
external ( lsl ) : int -> int -> int = "%lslint"
(** [n lsl m] shifts [n] to the left by [m] bits.
The result is unspecified if [m < 0] or [m > Sys.int_size].
Right-associative operator, see {!Ocaml_operators} for more information.
*)
external ( lsr ) : int -> int -> int = "%lsrint"
* [ n lsr m ] shifts [ n ] to the right by [ m ] bits .
This is a logical shift : zeroes are inserted regardless of
the sign of [ n ] .
The result is unspecified if [ m < 0 ] or [ m > Sys.int_size ] .
Right - associative operator , see { ! Ocaml_operators } for more information .
This is a logical shift: zeroes are inserted regardless of
the sign of [n].
The result is unspecified if [m < 0] or [m > Sys.int_size].
Right-associative operator, see {!Ocaml_operators} for more information.
*)
external ( asr ) : int -> int -> int = "%asrint"
(** [n asr m] shifts [n] to the right by [m] bits.
This is an arithmetic shift: the sign bit of [n] is replicated.
The result is unspecified if [m < 0] or [m > Sys.int_size].
Right-associative operator, see {!Ocaml_operators} for more information.
*)
* { 1 Floating - point arithmetic }
's floating - point numbers follow the
IEEE 754 standard , using double precision ( 64 bits ) numbers .
Floating - point operations never raise an exception on overflow ,
underflow , division by zero , etc . Instead , special IEEE numbers
are returned as appropriate , such as [ infinity ] for [ 1.0 /. 0.0 ] ,
[ neg_infinity ] for [ -1.0 /. 0.0 ] , and [ nan ] ( ' not a number ' )
for [ 0.0 /. 0.0 ] . These special numbers then propagate through
floating - point computations as expected : for instance ,
[ 1.0 /. infinity ] is [ 0.0 ] , basic arithmetic operations
( [ + . ] , [ - . ] , [ * . ] , [ /. ] ) with [ ] as an argument return [ nan ] , ...
OCaml's floating-point numbers follow the
IEEE 754 standard, using double precision (64 bits) numbers.
Floating-point operations never raise an exception on overflow,
underflow, division by zero, etc. Instead, special IEEE numbers
are returned as appropriate, such as [infinity] for [1.0 /. 0.0],
[neg_infinity] for [-1.0 /. 0.0], and [nan] ('not a number')
for [0.0 /. 0.0]. These special numbers then propagate through
floating-point computations as expected: for instance,
[1.0 /. infinity] is [0.0], basic arithmetic operations
([+.], [-.], [*.], [/.]) with [nan] as an argument return [nan], ...
*)
external ( ~-. ) : float -> float = "%negfloat"
(** Unary negation. You can also write [-. e] instead of [~-. e].
Unary operator, see {!Ocaml_operators} for more information.
*)
external ( ~+. ) : float -> float = "%identity"
* Unary addition . You can also write [ + . e ] instead of [ ~+ . e ] .
Unary operator , see { ! Ocaml_operators } for more information .
@since 3.12
Unary operator, see {!Ocaml_operators} for more information.
@since 3.12
*)
external ( +. ) : float -> float -> float = "%addfloat"
(** Floating-point addition.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( -. ) : float -> float -> float = "%subfloat"
(** Floating-point subtraction.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( *. ) : float -> float -> float = "%mulfloat"
(** Floating-point multiplication.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( /. ) : float -> float -> float = "%divfloat"
(** Floating-point division.
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( ** ) : float -> float -> float = "caml_power_float" "pow"
[@@unboxed] [@@noalloc]
(** Exponentiation.
Right-associative operator, see {!Ocaml_operators} for more information.
*)
external sqrt : float -> float = "caml_sqrt_float" "sqrt"
[@@unboxed] [@@noalloc]
(** Square root. *)
external exp : float -> float = "caml_exp_float" "exp" [@@unboxed] [@@noalloc]
(** Exponential. *)
external log : float -> float = "caml_log_float" "log" [@@unboxed] [@@noalloc]
(** Natural logarithm. *)
external log10 : float -> float = "caml_log10_float" "log10"
[@@unboxed] [@@noalloc]
(** Base 10 logarithm. *)
external expm1 : float -> float = "caml_expm1_float" "caml_expm1"
[@@unboxed] [@@noalloc]
* [ expm1 x ] computes [ exp x - . 1.0 ] , giving numerically - accurate results
even if [ x ] is close to [ 0.0 ] .
@since 3.12
even if [x] is close to [0.0].
@since 3.12
*)
external log1p : float -> float = "caml_log1p_float" "caml_log1p"
[@@unboxed] [@@noalloc]
* [ log1p x ] computes [ log(1.0 + . x ) ] ( natural logarithm ) ,
giving numerically - accurate results even if [ x ] is close to [ 0.0 ] .
@since 3.12
giving numerically-accurate results even if [x] is close to [0.0].
@since 3.12
*)
external cos : float -> float = "caml_cos_float" "cos" [@@unboxed] [@@noalloc]
(** Cosine. Argument is in radians. *)
external sin : float -> float = "caml_sin_float" "sin" [@@unboxed] [@@noalloc]
* . Argument is in radians .
external tan : float -> float = "caml_tan_float" "tan" [@@unboxed] [@@noalloc]
(** Tangent. Argument is in radians. *)
external acos : float -> float = "caml_acos_float" "acos"
[@@unboxed] [@@noalloc]
* Arc cosine . The argument must fall within the range [ [ -1.0 , 1.0 ] ] .
Result is in radians and is between [ 0.0 ] and [ pi ] .
Result is in radians and is between [0.0] and [pi]. *)
external asin : float -> float = "caml_asin_float" "asin"
[@@unboxed] [@@noalloc]
* Arc sine . The argument must fall within the range [ [ -1.0 , 1.0 ] ] .
Result is in radians and is between [ -pi/2 ] and [ pi/2 ] .
Result is in radians and is between [-pi/2] and [pi/2]. *)
external atan : float -> float = "caml_atan_float" "atan"
[@@unboxed] [@@noalloc]
(** Arc tangent.
Result is in radians and is between [-pi/2] and [pi/2]. *)
external atan2 : float -> float -> float = "caml_atan2_float" "atan2"
[@@unboxed] [@@noalloc]
(** [atan2 y x] returns the arc tangent of [y /. x]. The signs of [x]
and [y] are used to determine the quadrant of the result.
Result is in radians and is between [-pi] and [pi]. *)
external hypot : float -> float -> float = "caml_hypot_float" "caml_hypot"
[@@unboxed] [@@noalloc]
* [ hypot x y ] returns [ sqrt(x * . x + y * . y ) ] , that is , the length
of the hypotenuse of a right - angled triangle with sides of length
[ x ] and [ y ] , or , equivalently , the distance of the point [ ( x , y ) ]
to origin . If one of [ x ] or [ y ] is infinite , returns [ infinity ]
even if the other is [ nan ] .
@since 4.00
of the hypotenuse of a right-angled triangle with sides of length
[x] and [y], or, equivalently, the distance of the point [(x,y)]
to origin. If one of [x] or [y] is infinite, returns [infinity]
even if the other is [nan].
@since 4.00 *)
external cosh : float -> float = "caml_cosh_float" "cosh"
[@@unboxed] [@@noalloc]
(** Hyperbolic cosine. Argument is in radians. *)
external sinh : float -> float = "caml_sinh_float" "sinh"
[@@unboxed] [@@noalloc]
(** Hyperbolic sine. Argument is in radians. *)
external tanh : float -> float = "caml_tanh_float" "tanh"
[@@unboxed] [@@noalloc]
(** Hyperbolic tangent. Argument is in radians. *)
external acosh : float -> float = "caml_acosh_float" "caml_acosh"
[@@unboxed] [@@noalloc]
* Hyperbolic arc cosine . The argument must fall within the range
[ [ 1.0 , inf ] ] .
Result is in radians and is between [ 0.0 ] and [ inf ] .
@since 4.13
[[1.0, inf]].
Result is in radians and is between [0.0] and [inf].
@since 4.13
*)
external asinh : float -> float = "caml_asinh_float" "caml_asinh"
[@@unboxed] [@@noalloc]
* Hyperbolic arc sine . The argument and result range over the entire
real line .
Result is in radians .
@since 4.13
real line.
Result is in radians.
@since 4.13
*)
external atanh : float -> float = "caml_atanh_float" "caml_atanh"
[@@unboxed] [@@noalloc]
* Hyperbolic arc tangent . The argument must fall within the range
[ [ -1.0 , 1.0 ] ] .
Result is in radians and ranges over the entire real line .
@since 4.13
[[-1.0, 1.0]].
Result is in radians and ranges over the entire real line.
@since 4.13
*)
external ceil : float -> float = "caml_ceil_float" "ceil"
[@@unboxed] [@@noalloc]
(** Round above to an integer value.
[ceil f] returns the least integer value greater than or equal to [f].
The result is returned as a float. *)
external floor : float -> float = "caml_floor_float" "floor"
[@@unboxed] [@@noalloc]
(** Round below to an integer value.
[floor f] returns the greatest integer value less than or
equal to [f].
The result is returned as a float. *)
external abs_float : float -> float = "%absfloat"
(** [abs_float f] returns the absolute value of [f]. *)
external copysign : float -> float -> float
= "caml_copysign_float" "caml_copysign"
[@@unboxed] [@@noalloc]
* [ copysign x y ] returns a float whose absolute value is that of [ x ]
and whose sign is that of [ y ] . If [ x ] is [ nan ] , returns [ nan ] .
If [ y ] is [ nan ] , returns either [ x ] or [ - . x ] , but it is not
specified which .
@since 4.00
and whose sign is that of [y]. If [x] is [nan], returns [nan].
If [y] is [nan], returns either [x] or [-. x], but it is not
specified which.
@since 4.00 *)
external mod_float : float -> float -> float = "caml_fmod_float" "fmod"
[@@unboxed] [@@noalloc]
* [ mod_float a b ] returns the remainder of [ a ] with respect to
[ b ] . The returned value is [ a - . n * . b ] , where [ n ]
is the quotient [ a /. b ] rounded towards zero to an integer .
[b]. The returned value is [a -. n *. b], where [n]
is the quotient [a /. b] rounded towards zero to an integer. *)
external frexp : float -> float * int = "caml_frexp_float"
* [ frexp f ] returns the pair of the significant
and the exponent of [ f ] . When [ f ] is zero , the
significant [ x ] and the exponent [ n ] of [ f ] are equal to
zero . When [ f ] is non - zero , they are defined by
[ f = x * . 2 * * n ] and [ 0.5 < = x < 1.0 ] .
and the exponent of [f]. When [f] is zero, the
significant [x] and the exponent [n] of [f] are equal to
zero. When [f] is non-zero, they are defined by
[f = x *. 2 ** n] and [0.5 <= x < 1.0]. *)
external ldexp : (float [@unboxed]) -> (int [@untagged]) -> (float [@unboxed]) =
"caml_ldexp_float" "caml_ldexp_float_unboxed" [@@noalloc]
* [ ldexp x n ] returns [ x * . 2 * * n ] .
external modf : float -> float * float = "caml_modf_float"
(** [modf f] returns the pair of the fractional and integral
part of [f]. *)
external float : int -> float = "%floatofint"
(** Same as {!Stdlib.float_of_int}. *)
external float_of_int : int -> float = "%floatofint"
(** Convert an integer to floating-point. *)
external truncate : float -> int = "%intoffloat"
(** Same as {!Stdlib.int_of_float}. *)
external int_of_float : float -> int = "%intoffloat"
* the given floating - point number to an integer .
The result is unspecified if the argument is [ nan ] or falls outside the
range of representable integers .
The result is unspecified if the argument is [nan] or falls outside the
range of representable integers. *)
val infinity : float
(** Positive infinity. *)
val neg_infinity : float
(** Negative infinity. *)
val nan : float
* A special floating - point value denoting the result of an
undefined operation such as [ 0.0 /. 0.0 ] . Stands for
' not a number ' . Any floating - point operation with [ nan ] as
argument returns [ nan ] as result , unless otherwise specified in
IEEE 754 standard . As for floating - point comparisons ,
[ =] , [ < ] , [ < =] , [ > ] and [ > =] return [ false ] and [ < > ] returns [ true ]
if one or both of their arguments is [ nan ] .
[ nan ] is a quiet NaN since 5.1 ; it was a signaling NaN before .
undefined operation such as [0.0 /. 0.0]. Stands for
'not a number'. Any floating-point operation with [nan] as
argument returns [nan] as result, unless otherwise specified in
IEEE 754 standard. As for floating-point comparisons,
[=], [<], [<=], [>] and [>=] return [false] and [<>] returns [true]
if one or both of their arguments is [nan].
[nan] is a quiet NaN since 5.1; it was a signaling NaN before. *)
val max_float : float
(** The largest positive finite value of type [float]. *)
val min_float : float
* The smallest positive , non - zero , non - denormalized value of type [ float ] .
val epsilon_float : float
* The difference between [ 1.0 ] and the smallest exactly representable
floating - point number greater than [ 1.0 ] .
floating-point number greater than [1.0]. *)
type fpclass =
FP_normal (** Normal number, none of the below *)
* Number very close to 0.0 , has reduced precision
* Number is 0.0 or -0.0
| FP_infinite (** Number is positive or negative infinity *)
| FP_nan (** Not a number: result of an undefined operation *)
* The five classes of floating - point numbers , as determined by
the { ! Stdlib.classify_float } function .
the {!Stdlib.classify_float} function. *)
external classify_float : (float [@unboxed]) -> fpclass =
"caml_classify_float" "caml_classify_float_unboxed" [@@noalloc]
* Return the class of the given floating - point number :
normal , subnormal , zero , infinite , or not a number .
normal, subnormal, zero, infinite, or not a number. *)
(** {1 String operations}
More string operations are provided in module {!String}.
*)
val ( ^ ) : string -> string -> string
* String concatenation .
Right - associative operator , see { ! Ocaml_operators } for more information .
@raise Invalid_argument if the result is longer then
than { ! } bytes .
Right-associative operator, see {!Ocaml_operators} for more information.
@raise Invalid_argument if the result is longer then
than {!Sys.max_string_length} bytes.
*)
* { 1 Character operations }
More character operations are provided in module { ! } .
More character operations are provided in module {!Char}.
*)
external int_of_char : char -> int = "%identity"
(** Return the ASCII code of the argument. *)
val char_of_int : int -> char
(** Return the character with the given ASCII code.
@raise Invalid_argument if the argument is
outside the range 0--255. *)
* { 1 Unit operations }
external ignore : 'a -> unit = "%ignore"
(** Discard the value of its argument and return [()].
For instance, [ignore(f x)] discards the result of
the side-effecting function [f]. It is equivalent to
[f x; ()], except that the latter may generate a
compiler warning; writing [ignore(f x)] instead
avoids the warning. *)
* { 1 String conversion functions }
val string_of_bool : bool -> string
(** Return the string representation of a boolean. As the returned values
may be shared, the user should not modify them directly.
*)
val bool_of_string_opt: string -> bool option
* Convert the given string to a boolean .
Return [ None ] if the string is not [ " true " ] or [ " false " ] .
@since 4.05
Return [None] if the string is not ["true"] or ["false"].
@since 4.05
*)
val bool_of_string : string -> bool
(** Same as {!Stdlib.bool_of_string_opt}, but raise
[Invalid_argument "bool_of_string"] instead of returning [None]. *)
val string_of_int : int -> string
(** Return the string representation of an integer, in decimal. *)
val int_of_string_opt: string -> int option
* Convert the given string to an integer .
The string is read in decimal ( by default , or if the string
begins with [ 0u ] ) , in hexadecimal ( if it begins with [ 0x ] or
[ 0X ] ) , in octal ( if it begins with [ 0o ] or [ 0O ] ) , or in binary
( if it begins with [ 0b ] or [ ] ) .
The [ 0u ] prefix reads the input as an unsigned integer in the range
[ [ 0 , 2*max_int+1 ] ] . If the input exceeds { ! max_int }
it is converted to the signed integer
[ min_int + input - max_int - 1 ] .
The [ _ ] ( underscore ) character can appear anywhere in the string
and is ignored .
Return [ None ] if the given string is not a valid representation of an
integer , or if the integer represented exceeds the range of integers
representable in type [ int ] .
@since 4.05
The string is read in decimal (by default, or if the string
begins with [0u]), in hexadecimal (if it begins with [0x] or
[0X]), in octal (if it begins with [0o] or [0O]), or in binary
(if it begins with [0b] or [0B]).
The [0u] prefix reads the input as an unsigned integer in the range
[[0, 2*max_int+1]]. If the input exceeds {!max_int}
it is converted to the signed integer
[min_int + input - max_int - 1].
The [_] (underscore) character can appear anywhere in the string
and is ignored.
Return [None] if the given string is not a valid representation of an
integer, or if the integer represented exceeds the range of integers
representable in type [int].
@since 4.05
*)
external int_of_string : string -> int = "caml_int_of_string"
(** Same as {!Stdlib.int_of_string_opt}, but raise
[Failure "int_of_string"] instead of returning [None]. *)
val string_of_float : float -> string
(** Return a string representation of a floating-point number.
This conversion can involve a loss of precision. For greater control over
the manner in which the number is printed, see {!Printf}. *)
val float_of_string_opt: string -> float option
* Convert the given string to a float . The string is read in decimal
( by default ) or in hexadecimal ( marked by [ 0x ] or [ 0X ] ) .
The format of decimal floating - point numbers is
[ [ - ] dd.ddd ( e|E ) [ + |- ] dd ] , where [ d ] stands for a decimal digit .
The format of hexadecimal floating - point numbers is
[ [ - ] 0(x|X ) hh.hhh ( p|P ) [ + |- ] dd ] , where [ h ] stands for an
hexadecimal digit and [ d ] for a decimal digit .
In both cases , at least one of the integer and fractional parts must be
given ; the exponent part is optional .
The [ _ ] ( underscore ) character can appear anywhere in the string
and is ignored .
Depending on the execution platforms , other representations of
floating - point numbers can be accepted , but should not be relied upon .
Return [ None ] if the given string is not a valid representation of a float .
@since 4.05
(by default) or in hexadecimal (marked by [0x] or [0X]).
The format of decimal floating-point numbers is
[ [-] dd.ddd (e|E) [+|-] dd ], where [d] stands for a decimal digit.
The format of hexadecimal floating-point numbers is
[ [-] 0(x|X) hh.hhh (p|P) [+|-] dd ], where [h] stands for an
hexadecimal digit and [d] for a decimal digit.
In both cases, at least one of the integer and fractional parts must be
given; the exponent part is optional.
The [_] (underscore) character can appear anywhere in the string
and is ignored.
Depending on the execution platforms, other representations of
floating-point numbers can be accepted, but should not be relied upon.
Return [None] if the given string is not a valid representation of a float.
@since 4.05
*)
external float_of_string : string -> float = "caml_float_of_string"
(** Same as {!Stdlib.float_of_string_opt}, but raise
[Failure "float_of_string"] instead of returning [None]. *)
(** {1 Pair operations} *)
external fst : 'a * 'b -> 'a = "%field0"
* Return the first component of a pair .
external snd : 'a * 'b -> 'b = "%field1"
* Return the second component of a pair .
* { 1 List operations }
More list operations are provided in module { ! List } .
More list operations are provided in module {!List}.
*)
val ( @ ) : 'a list -> 'a list -> 'a list
* [ l0 @ l1 ] appends [ l1 ] to [ l0 ] . Same function as { ! List.append } .
Right - associative operator , see { ! Ocaml_operators } for more information .
@since 5.1 this function is tail - recursive .
Right-associative operator, see {!Ocaml_operators} for more information.
@since 5.1 this function is tail-recursive.
*)
* { 1 Input / output }
Note : all input / output functions can raise [ ] when the system
calls they invoke fail .
Note: all input/output functions can raise [Sys_error] when the system
calls they invoke fail. *)
type in_channel
(** The type of input channel. *)
type out_channel
(** The type of output channel. *)
val stdin : in_channel
(** The standard input for the process. *)
val stdout : out_channel
(** The standard output for the process. *)
val stderr : out_channel
(** The standard error output for the process. *)
* { 2 Output functions on standard output }
val print_char : char -> unit
(** Print a character on standard output. *)
val print_string : string -> unit
(** Print a string on standard output. *)
val print_bytes : bytes -> unit
* Print a byte sequence on standard output .
@since 4.02
@since 4.02 *)
val print_int : int -> unit
(** Print an integer, in decimal, on standard output. *)
val print_float : float -> unit
* Print a floating - point number , in decimal , on standard output .
The conversion of the number to a string uses { ! } and
can involve a loss of precision .
The conversion of the number to a string uses {!string_of_float} and
can involve a loss of precision. *)
val print_endline : string -> unit
(** Print a string, followed by a newline character, on
standard output and flush standard output. *)
val print_newline : unit -> unit
(** Print a newline character on standard output, and flush
standard output. This can be used to simulate line
buffering of standard output. *)
* { 2 Output functions on standard error }
val prerr_char : char -> unit
(** Print a character on standard error. *)
val prerr_string : string -> unit
(** Print a string on standard error. *)
val prerr_bytes : bytes -> unit
* Print a byte sequence on standard error .
@since 4.02
@since 4.02 *)
val prerr_int : int -> unit
(** Print an integer, in decimal, on standard error. *)
val prerr_float : float -> unit
* Print a floating - point number , in decimal , on standard error .
The conversion of the number to a string uses { ! } and
can involve a loss of precision .
The conversion of the number to a string uses {!string_of_float} and
can involve a loss of precision. *)
val prerr_endline : string -> unit
(** Print a string, followed by a newline character on standard
error and flush standard error. *)
val prerr_newline : unit -> unit
(** Print a newline character on standard error, and flush
standard error. *)
* { 2 Input functions on standard input }
val read_line : unit -> string
(** Flush standard output, then read characters from standard input
until a newline character is encountered.
Return the string of all characters read, without the newline character
at the end.
@raise End_of_file if the end of the file is reached at the beginning of
line.
*)
val read_int_opt: unit -> int option
* Flush standard output , then read one line from standard input
and convert it to an integer .
Return [ None ] if the line read is not a valid representation of an integer .
@since 4.05
and convert it to an integer.
Return [None] if the line read is not a valid representation of an integer.
@since 4.05
*)
val read_int : unit -> int
(** Same as {!Stdlib.read_int_opt}, but raise [Failure "int_of_string"]
instead of returning [None]. *)
val read_float_opt: unit -> float option
* Flush standard output , then read one line from standard input
and convert it to a floating - point number .
Return [ None ] if the line read is not a valid representation of a
floating - point number .
@since 4.05
and convert it to a floating-point number.
Return [None] if the line read is not a valid representation of a
floating-point number.
@since 4.05
*)
val read_float : unit -> float
(** Same as {!Stdlib.read_float_opt}, but raise [Failure "float_of_string"]
instead of returning [None]. *)
* { 2 General output functions }
type open_flag =
Open_rdonly (** open for reading. *)
| Open_wronly (** open for writing. *)
| Open_append (** open for appending: always write at end of file. *)
| Open_creat (** create the file if it does not exist. *)
| Open_trunc (** empty the file if it already exists. *)
| Open_excl (** fail if Open_creat and the file already exists. *)
| Open_binary (** open in binary mode (no conversion). *)
| Open_text (** open in text mode (may perform conversions). *)
| Open_nonblock (** open in non-blocking mode. *)
* Opening modes for { ! Stdlib.open_out_gen } and
{ ! } .
{!Stdlib.open_in_gen}. *)
val open_out : string -> out_channel
* Open the named file for writing , and return a new output channel
on that file , positioned at the beginning of the file . The
file is truncated to zero length if it already exists . It
is created if it does not already exists .
on that file, positioned at the beginning of the file. The
file is truncated to zero length if it already exists. It
is created if it does not already exists. *)
val open_out_bin : string -> out_channel
(** Same as {!Stdlib.open_out}, but the file is opened in binary mode,
so that no translation takes place during writes. On operating
systems that do not distinguish between text mode and binary
mode, this function behaves like {!Stdlib.open_out}. *)
val open_out_gen : open_flag list -> int -> string -> out_channel
* [ open_out_gen mode perm filename ] opens the named file for writing ,
as described above . The extra argument [ mode ]
specifies the opening mode . The extra argument [ perm ] specifies
the file permissions , in case the file must be created .
{ ! Stdlib.open_out } and { ! are special
cases of this function .
as described above. The extra argument [mode]
specifies the opening mode. The extra argument [perm] specifies
the file permissions, in case the file must be created.
{!Stdlib.open_out} and {!Stdlib.open_out_bin} are special
cases of this function. *)
val flush : out_channel -> unit
(** Flush the buffer associated with the given output channel,
performing all pending writes on that channel.
Interactive programs must be careful about flushing standard
output and standard error at the right time. *)
val flush_all : unit -> unit
(** Flush all open output channels; ignore errors. *)
val output_char : out_channel -> char -> unit
(** Write the character on the given output channel. *)
val output_string : out_channel -> string -> unit
(** Write the string on the given output channel. *)
val output_bytes : out_channel -> bytes -> unit
* Write the byte sequence on the given output channel .
@since 4.02
@since 4.02 *)
val output : out_channel -> bytes -> int -> int -> unit
* [ output oc buf pos len ] writes [ len ] characters from byte sequence [ buf ] ,
starting at offset [ pos ] , to the given output channel [ oc ] .
@raise Invalid_argument if [ pos ] and [ len ] do not
designate a valid range of [ buf ] .
starting at offset [pos], to the given output channel [oc].
@raise Invalid_argument if [pos] and [len] do not
designate a valid range of [buf]. *)
val output_substring : out_channel -> string -> int -> int -> unit
* Same as [ output ] but take a string as argument instead of
a byte sequence .
@since 4.02
a byte sequence.
@since 4.02 *)
val output_byte : out_channel -> int -> unit
* Write one 8 - bit integer ( as the single character with that code )
on the given output channel . The given integer is taken modulo
256 .
on the given output channel. The given integer is taken modulo
256. *)
val output_binary_int : out_channel -> int -> unit
* Write one integer in binary format ( 4 bytes , big - endian )
on the given output channel .
The given integer is taken modulo 2{^32 } .
The only reliable way to read it back is through the
{ ! Stdlib.input_binary_int } function . The format is compatible across
all machines for a given version of OCaml .
on the given output channel.
The given integer is taken modulo 2{^32}.
The only reliable way to read it back is through the
{!Stdlib.input_binary_int} function. The format is compatible across
all machines for a given version of OCaml. *)
val output_value : out_channel -> 'a -> unit
* Write the representation of a structured value of any type
to a channel . Circularities and sharing inside the value
are detected and preserved . The object can be read back ,
by the function { ! } . See the description of module
{ ! Marshal } for more information . { ! Stdlib.output_value } is equivalent
to { ! Marshal.to_channel } with an empty list of flags .
to a channel. Circularities and sharing inside the value
are detected and preserved. The object can be read back,
by the function {!Stdlib.input_value}. See the description of module
{!Marshal} for more information. {!Stdlib.output_value} is equivalent
to {!Marshal.to_channel} with an empty list of flags. *)
val seek_out : out_channel -> int -> unit
* [ seek_out ] sets the current writing position to [ pos ]
for channel [ chan ] . This works only for regular files . On
files of other kinds ( such as terminals , pipes and sockets ) ,
the behavior is unspecified .
for channel [chan]. This works only for regular files. On
files of other kinds (such as terminals, pipes and sockets),
the behavior is unspecified. *)
val pos_out : out_channel -> int
* Return the current writing position for the given channel . Does
not work on channels opened with the [ Open_append ] flag ( returns
unspecified results ) .
For files opened in text mode under Windows , the returned position
is approximate ( owing to end - of - line conversion ) ; in particular ,
saving the current position with [ pos_out ] , then going back to
this position using [ seek_out ] will not work . For this
programming idiom to work reliably and portably , the file must be
opened in binary mode .
not work on channels opened with the [Open_append] flag (returns
unspecified results).
For files opened in text mode under Windows, the returned position
is approximate (owing to end-of-line conversion); in particular,
saving the current position with [pos_out], then going back to
this position using [seek_out] will not work. For this
programming idiom to work reliably and portably, the file must be
opened in binary mode. *)
val out_channel_length : out_channel -> int
(** Return the size (number of characters) of the regular file
on which the given channel is opened. If the channel is opened
on a file that is not a regular file, the result is meaningless. *)
val close_out : out_channel -> unit
* Close the given channel , flushing all buffered write operations .
Output functions raise a [ Sys_error ] exception when they are
applied to a closed output channel , except [ close_out ] and [ flush ] ,
which do nothing when applied to an already closed channel .
Note that [ close_out ] may raise [ ] if the operating
system signals an error when flushing or closing .
Output functions raise a [Sys_error] exception when they are
applied to a closed output channel, except [close_out] and [flush],
which do nothing when applied to an already closed channel.
Note that [close_out] may raise [Sys_error] if the operating
system signals an error when flushing or closing. *)
val close_out_noerr : out_channel -> unit
(** Same as [close_out], but ignore all errors. *)
val set_binary_mode_out : out_channel -> bool -> unit
* [ set_binary_mode_out oc true ] sets the channel [ oc ] to binary
mode : no translations take place during output .
[ set_binary_mode_out oc false ] sets the channel [ oc ] to text
mode : depending on the operating system , some translations
may take place during output . For instance , under Windows ,
end - of - lines will be translated from [ \n ] to [ \r\n ] .
This function has no effect under operating systems that
do not distinguish between text mode and binary mode .
mode: no translations take place during output.
[set_binary_mode_out oc false] sets the channel [oc] to text
mode: depending on the operating system, some translations
may take place during output. For instance, under Windows,
end-of-lines will be translated from [\n] to [\r\n].
This function has no effect under operating systems that
do not distinguish between text mode and binary mode. *)
* { 2 General input functions }
val open_in : string -> in_channel
(** Open the named file for reading, and return a new input channel
on that file, positioned at the beginning of the file. *)
val open_in_bin : string -> in_channel
* Same as { ! } , but the file is opened in binary mode ,
so that no translation takes place during reads . On operating
systems that do not distinguish between text mode and binary
mode , this function behaves like { ! } .
so that no translation takes place during reads. On operating
systems that do not distinguish between text mode and binary
mode, this function behaves like {!Stdlib.open_in}. *)
val open_in_gen : open_flag list -> int -> string -> in_channel
* [ open_in_gen mode perm filename ] opens the named file for reading ,
as described above . The extra arguments
[ mode ] and [ perm ] specify the opening mode and file permissions .
{ ! } and { ! Stdlib.open_in_bin } are special
cases of this function .
as described above. The extra arguments
[mode] and [perm] specify the opening mode and file permissions.
{!Stdlib.open_in} and {!Stdlib.open_in_bin} are special
cases of this function. *)
val input_char : in_channel -> char
* Read one character from the given input channel .
@raise End_of_file if there are no more characters to read .
@raise End_of_file if there are no more characters to read. *)
val input_line : in_channel -> string
(** Read characters from the given input channel, until a
newline character is encountered. Return the string of
all characters read, without the newline character at the end.
@raise End_of_file if the end of the file is reached
at the beginning of line. *)
val input : in_channel -> bytes -> int -> int -> int
* [ input ic buf pos len ] reads up to [ len ] characters from
the given channel [ ic ] , storing them in byte sequence [ buf ] , starting at
character number [ pos ] .
It returns the actual number of characters read , between 0 and
[ len ] ( inclusive ) .
A return value of 0 means that the end of file was reached .
A return value between 0 and [ len ] exclusive means that
not all requested [ len ] characters were read , either because
no more characters were available at that time , or because
the implementation found it convenient to do a partial read ;
[ input ] must be called again to read the remaining characters ,
if desired . ( See also { ! Stdlib.really_input } for reading
exactly [ len ] characters . )
Exception [ Invalid_argument " input " ] is raised if [ pos ] and [ len ]
do not designate a valid range of [ buf ] .
the given channel [ic], storing them in byte sequence [buf], starting at
character number [pos].
It returns the actual number of characters read, between 0 and
[len] (inclusive).
A return value of 0 means that the end of file was reached.
A return value between 0 and [len] exclusive means that
not all requested [len] characters were read, either because
no more characters were available at that time, or because
the implementation found it convenient to do a partial read;
[input] must be called again to read the remaining characters,
if desired. (See also {!Stdlib.really_input} for reading
exactly [len] characters.)
Exception [Invalid_argument "input"] is raised if [pos] and [len]
do not designate a valid range of [buf]. *)
val really_input : in_channel -> bytes -> int -> int -> unit
* [ really_input ic buf pos len ] reads [ len ] characters from channel [ ic ] ,
storing them in byte sequence [ buf ] , starting at character number [ pos ] .
@raise End_of_file if the end of file is reached before [ len ]
characters have been read .
@raise Invalid_argument if
[ pos ] and [ len ] do not designate a valid range of [ buf ] .
storing them in byte sequence [buf], starting at character number [pos].
@raise End_of_file if the end of file is reached before [len]
characters have been read.
@raise Invalid_argument if
[pos] and [len] do not designate a valid range of [buf]. *)
val really_input_string : in_channel -> int -> string
* [ ] reads [ len ] characters from channel [ ic ]
and returns them in a new string .
@raise End_of_file if the end of file is reached before [ len ]
characters have been read .
@since 4.02
and returns them in a new string.
@raise End_of_file if the end of file is reached before [len]
characters have been read.
@since 4.02 *)
val input_byte : in_channel -> int
* Same as { ! } , but return the 8 - bit integer representing
the character .
@raise End_of_file if the end of file was reached .
the character.
@raise End_of_file if the end of file was reached. *)
val input_binary_int : in_channel -> int
* Read an integer encoded in binary format ( 4 bytes , big - endian )
from the given input channel . See { ! Stdlib.output_binary_int } .
@raise End_of_file if the end of file was reached while reading the
integer .
from the given input channel. See {!Stdlib.output_binary_int}.
@raise End_of_file if the end of file was reached while reading the
integer. *)
val input_value : in_channel -> 'a
(** Read the representation of a structured value, as produced
by {!Stdlib.output_value}, and return the corresponding value.
This function is identical to {!Marshal.from_channel};
see the description of module {!Marshal} for more information,
in particular concerning the lack of type safety. *)
val seek_in : in_channel -> int -> unit
(** [seek_in chan pos] sets the current reading position to [pos]
for channel [chan]. This works only for regular files. On
files of other kinds, the behavior is unspecified. *)
val pos_in : in_channel -> int
* Return the current reading position for the given channel . For
files opened in text mode under Windows , the returned position is
approximate ( owing to end - of - line conversion ) ; in particular ,
saving the current position with [ pos_in ] , then going back to this
position using [ seek_in ] will not work . For this programming
idiom to work reliably and portably , the file must be opened in
binary mode .
files opened in text mode under Windows, the returned position is
approximate (owing to end-of-line conversion); in particular,
saving the current position with [pos_in], then going back to this
position using [seek_in] will not work. For this programming
idiom to work reliably and portably, the file must be opened in
binary mode. *)
val in_channel_length : in_channel -> int
(** Return the size (number of characters) of the regular file
on which the given channel is opened. If the channel is opened
on a file that is not a regular file, the result is meaningless.
The returned size does not take into account the end-of-line
translations that can be performed when reading from a channel
opened in text mode. *)
val close_in : in_channel -> unit
(** Close the given channel. Input functions raise a [Sys_error]
exception when they are applied to a closed input channel,
except [close_in], which does nothing when applied to an already
closed channel. *)
val close_in_noerr : in_channel -> unit
(** Same as [close_in], but ignore all errors. *)
val set_binary_mode_in : in_channel -> bool -> unit
* [ set_binary_mode_in ic true ] sets the channel [ ic ] to binary
mode : no translations take place during input .
[ set_binary_mode_out ic false ] sets the channel [ ic ] to text
mode : depending on the operating system , some translations
may take place during input . For instance , under Windows ,
end - of - lines will be translated from [ \r\n ] to [ \n ] .
This function has no effect under operating systems that
do not distinguish between text mode and binary mode .
mode: no translations take place during input.
[set_binary_mode_out ic false] sets the channel [ic] to text
mode: depending on the operating system, some translations
may take place during input. For instance, under Windows,
end-of-lines will be translated from [\r\n] to [\n].
This function has no effect under operating systems that
do not distinguish between text mode and binary mode. *)
* { 2 Operations on large files }
module LargeFile :
sig
val seek_out : out_channel -> int64 -> unit
val pos_out : out_channel -> int64
val out_channel_length : out_channel -> int64
val seek_in : in_channel -> int64 -> unit
val pos_in : in_channel -> int64
val in_channel_length : in_channel -> int64
end
* Operations on large files .
This sub - module provides 64 - bit variants of the channel functions
that manipulate file positions and file sizes . By representing
positions and sizes by 64 - bit integers ( type [ int64 ] ) instead of
regular integers ( type [ int ] ) , these alternate functions allow
operating on files whose sizes are greater than [ max_int ] .
This sub-module provides 64-bit variants of the channel functions
that manipulate file positions and file sizes. By representing
positions and sizes by 64-bit integers (type [int64]) instead of
regular integers (type [int]), these alternate functions allow
operating on files whose sizes are greater than [max_int]. *)
(** {1 References} *)
type 'a ref = { mutable contents : 'a }
(** The type of references (mutable indirection cells) containing
a value of type ['a]. *)
external ref : 'a -> 'a ref = "%makemutable"
(** Return a fresh reference containing the given value. *)
external ( ! ) : 'a ref -> 'a = "%field0"
* [ ! r ] returns the current contents of reference [ r ] .
Equivalent to [ fun r - > r.contents ] .
Unary operator , see { ! Ocaml_operators } for more information .
Equivalent to [fun r -> r.contents].
Unary operator, see {!Ocaml_operators} for more information.
*)
external ( := ) : 'a ref -> 'a -> unit = "%setfield0"
(** [r := a] stores the value of [a] in reference [r].
Equivalent to [fun r v -> r.contents <- v].
Right-associative operator, see {!Ocaml_operators} for more information.
*)
external incr : int ref -> unit = "%incr"
(** Increment the integer contained in the given reference.
Equivalent to [fun r -> r := succ !r]. *)
external decr : int ref -> unit = "%decr"
(** Decrement the integer contained in the given reference.
Equivalent to [fun r -> r := pred !r]. *)
* { 1 Result type }
* @since 4.03
type ('a,'b) result = Ok of 'a | Error of 'b
(** {1 Operations on format strings} *)
* Format strings are character strings with special lexical conventions
that defines the functionality of formatted input / output functions . Format
strings are used to read data with formatted input functions from module
{ ! Scanf } and to print data with formatted output functions from modules
{ ! Printf } and { ! Format } .
Format strings are made of three kinds of entities :
- { e conversions specifications } , introduced by the special character [ ' % ' ]
followed by one or more characters specifying what kind of argument to
read or print ,
- { e formatting indications } , introduced by the special character [ ' @ ' ]
followed by one or more characters specifying how to read or print the
argument ,
- { e plain characters } that are regular characters with usual lexical
conventions . Plain characters specify string literals to be read in the
input or printed in the output .
There is an additional lexical rule to escape the special characters [ ' % ' ]
and [ ' @ ' ] in format strings : if a special character follows a [ ' % ' ]
character , it is treated as a plain character . In other words , [ " % % " ] is
considered as a plain [ ' % ' ] and [ " % @ " ] as a plain [ ' @ ' ] .
For more information about conversion specifications and formatting
indications available , read the documentation of modules { ! Scanf } ,
{ ! Printf } and { ! Format } .
that defines the functionality of formatted input/output functions. Format
strings are used to read data with formatted input functions from module
{!Scanf} and to print data with formatted output functions from modules
{!Printf} and {!Format}.
Format strings are made of three kinds of entities:
- {e conversions specifications}, introduced by the special character ['%']
followed by one or more characters specifying what kind of argument to
read or print,
- {e formatting indications}, introduced by the special character ['@']
followed by one or more characters specifying how to read or print the
argument,
- {e plain characters} that are regular characters with usual lexical
conventions. Plain characters specify string literals to be read in the
input or printed in the output.
There is an additional lexical rule to escape the special characters ['%']
and ['@'] in format strings: if a special character follows a ['%']
character, it is treated as a plain character. In other words, ["%%"] is
considered as a plain ['%'] and ["%@"] as a plain ['@'].
For more information about conversion specifications and formatting
indications available, read the documentation of modules {!Scanf},
{!Printf} and {!Format}.
*)
* Format strings have a general and highly polymorphic type
[ ( ' a , ' b , ' c , 'd , ' e , ' f ) format6 ] .
The two simplified types , [ format ] and [ ] below are
included for backward compatibility with earlier releases of
OCaml .
The meaning of format string type parameters is as follows :
- [ ' a ] is the type of the parameters of the format for formatted output
functions ( [ printf]-style functions ) ;
[ ' a ] is the type of the values read by the format for formatted input
functions ( [ scanf]-style functions ) .
- [ ' b ] is the type of input source for formatted input functions and the
type of output target for formatted output functions .
For [ printf]-style functions from module { ! , [ ' b ] is typically
[ out_channel ] ;
for [ printf]-style functions from module { ! Format } , [ ' b ] is typically
{ ! type : Format.formatter } ;
for [ scanf]-style functions from module { ! Scanf } , [ ' b ] is typically
{ ! Scanf . Scanning.in_channel } .
Type argument [ ' b ] is also the type of the first argument given to
user 's defined printing functions for [ % a ] and [ % t ] conversions ,
and user 's defined reading functions for [ % r ] conversion .
- [ ' c ] is the type of the result of the [ % a ] and [ % t ] printing
functions , and also the type of the argument transmitted to the
first argument of [ kprintf]-style functions or to the
[ functions .
- [ 'd ] is the type of parameters for the [ scanf]-style functions .
- [ ' e ] is the type of the receiver function for the [ scanf]-style functions .
- [ ' f ] is the final result type of a formatted input / output function
invocation : for the [ printf]-style functions , it is typically [ unit ] ;
for the [ scanf]-style functions , it is typically the result type of the
receiver function .
[('a, 'b, 'c, 'd, 'e, 'f) format6].
The two simplified types, [format] and [format4] below are
included for backward compatibility with earlier releases of
OCaml.
The meaning of format string type parameters is as follows:
- ['a] is the type of the parameters of the format for formatted output
functions ([printf]-style functions);
['a] is the type of the values read by the format for formatted input
functions ([scanf]-style functions).
- ['b] is the type of input source for formatted input functions and the
type of output target for formatted output functions.
For [printf]-style functions from module {!Printf}, ['b] is typically
[out_channel];
for [printf]-style functions from module {!Format}, ['b] is typically
{!type:Format.formatter};
for [scanf]-style functions from module {!Scanf}, ['b] is typically
{!Scanf.Scanning.in_channel}.
Type argument ['b] is also the type of the first argument given to
user's defined printing functions for [%a] and [%t] conversions,
and user's defined reading functions for [%r] conversion.
- ['c] is the type of the result of the [%a] and [%t] printing
functions, and also the type of the argument transmitted to the
first argument of [kprintf]-style functions or to the
[kscanf]-style functions.
- ['d] is the type of parameters for the [scanf]-style functions.
- ['e] is the type of the receiver function for the [scanf]-style functions.
- ['f] is the final result type of a formatted input/output function
invocation: for the [printf]-style functions, it is typically [unit];
for the [scanf]-style functions, it is typically the result type of the
receiver function.
*)
type ('a, 'b, 'c, 'd, 'e, 'f) format6 =
('a, 'b, 'c, 'd, 'e, 'f) CamlinternalFormatBasics.format6
type ('a, 'b, 'c, 'd) format4 = ('a, 'b, 'c, 'c, 'c, 'd) format6
type ('a, 'b, 'c) format = ('a, 'b, 'c, 'c) format4
val string_of_format : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> string
(** Converts a format string into a string. *)
external format_of_string :
('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('a, 'b, 'c, 'd, 'e, 'f) format6 = "%identity"
(** [format_of_string s] returns a format string read from the string
literal [s].
Note: [format_of_string] can not convert a string argument that is not a
literal. If you need this functionality, use the more general
{!Scanf.format_from_string} function.
*)
val ( ^^ ) :
('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('f, 'b, 'c, 'e, 'g, 'h) format6 ->
('a, 'b, 'c, 'd, 'g, 'h) format6
(** [f1 ^^ f2] catenates format strings [f1] and [f2]. The result is a
format string that behaves as the concatenation of format strings [f1] and
[f2]: in case of formatted output, it accepts arguments from [f1], then
arguments from [f2]; in case of formatted input, it returns results from
[f1], then results from [f2].
Right-associative operator, see {!Ocaml_operators} for more information.
*)
* { 1 Program termination }
val exit : int -> 'a
* Terminate the process , returning the given status code to the operating
system : usually 0 to indicate no errors , and a small positive integer to
indicate failure . All open output channels are flushed with [ flush_all ] .
The callbacks registered with { ! Domain.at_exit } are called followed by
those registered with { ! } .
An implicit [ exit 0 ] is performed each time a program terminates normally .
An implicit [ exit 2 ] is performed if the program terminates early because
of an uncaught exception .
system: usually 0 to indicate no errors, and a small positive integer to
indicate failure. All open output channels are flushed with [flush_all].
The callbacks registered with {!Domain.at_exit} are called followed by
those registered with {!Stdlib.at_exit}.
An implicit [exit 0] is performed each time a program terminates normally.
An implicit [exit 2] is performed if the program terminates early because
of an uncaught exception. *)
val at_exit : (unit -> unit) -> unit
* Register the given function to be called at program termination
time . The functions registered with [ at_exit ] will be called when
the program does any of the following :
- executes { ! Stdlib.exit }
- terminates , either normally or because of an uncaught
exception
- executes the C function [ caml_shutdown ] .
The functions are called in ' last in , first out ' order : the
function most recently added with [ at_exit ] is called first .
time. The functions registered with [at_exit] will be called when
the program does any of the following:
- executes {!Stdlib.exit}
- terminates, either normally or because of an uncaught
exception
- executes the C function [caml_shutdown].
The functions are called in 'last in, first out' order: the
function most recently added with [at_exit] is called first. *)
(**/**)
(* The following is for system use only. Do not call directly. *)
val valid_float_lexem : string -> string
val unsafe_really_input : in_channel -> bytes -> int -> int -> unit
val do_at_exit : unit -> unit
val do_domain_local_at_exit : (unit -> unit) ref
(**/**)
* { 1 : modules Standard library modules }
(*MODULE_ALIASES*)
module Arg = Arg
module Array = Array
module ArrayLabels = ArrayLabels
module Atomic = Atomic
module Bigarray = Bigarray
module Bool = Bool
module Buffer = Buffer
module Bytes = Bytes
module BytesLabels = BytesLabels
module Callback = Callback
module Char = Char
module Complex = Complex
module Condition = Condition
module Digest = Digest
module Domain = Domain
[@@alert "-unstable"]
[@@alert unstable
"The Domain interface may change in incompatible ways in the future."
]
module Effect = Effect
[@@alert "-unstable"]
[@@alert unstable
"The Effect interface may change in incompatible ways in the future."
]
module Either = Either
module Ephemeron = Ephemeron
module Filename = Filename
module Float = Float
module Format = Format
module Fun = Fun
module Gc = Gc
module Hashtbl = Hashtbl
module In_channel = In_channel
module Int = Int
module Int32 = Int32
module Int64 = Int64
module Lazy = Lazy
module Lexing = Lexing
module List = List
module ListLabels = ListLabels
module Map = Map
module Marshal = Marshal
module MoreLabels = MoreLabels
module Mutex = Mutex
module Nativeint = Nativeint
module Obj = Obj
module Oo = Oo
module Option = Option
module Out_channel = Out_channel
module Parsing = Parsing
module Printexc = Printexc
module Printf = Printf
module Queue = Queue
module Random = Random
module Result = Result
module Scanf = Scanf
module Semaphore = Semaphore
module Seq = Seq
module Set = Set
module Stack = Stack
module StdLabels = StdLabels
module String = String
module StringLabels = StringLabels
module Sys = Sys
module Type = Type
module Uchar = Uchar
module Unit = Unit
module Weak = Weak
| null | https://raw.githubusercontent.com/avsm/eeww/4d65720b5dd51376842ffe5c8c220d5329c1dc10/boot/ocaml/stdlib/stdlib.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Raise the given exception value
* Raise exception [Invalid_argument] with the given string.
* Raise exception [Failure] with the given string.
* The [Exit] exception is not raised by any library function. It is
provided for use in your programs.
* Exception raised when none of the cases of a pattern-matching
apply. The arguments are the location of the match keyword in the
source code (file name, line number, column number).
* Exception raised when an assertion fails. The arguments are the
location of the assert keyword in the source code (file name, line
number, column number).
* Exception raised by library functions to signal that the given
arguments do not make sense. The string gives some information to
the programmer. As a general rule, this exception should not be
caught, it denotes a programming error and the code should be
modified not to trigger it.
* Exception raised by library functions to signal that they are
undefined on the given arguments. The string is meant to give some
information to the programmer; you must not pattern match on the
string literal because it may change in future versions (use
Failure _ instead).
* Exception raised by search functions when the desired object could
not be found.
* Exception raised by the garbage collector when there is
insufficient memory to complete the computation. (Not reliable for
allocations on the minor heap.)
* Exception raised by the input/output functions to report an
operating system error. The string is meant to give some
information to the programmer; you must not pattern match on the
string literal because it may change in future versions (use
Sys_error _ instead).
* Exception raised by input functions to signal that the end of file
has been reached.
* Exception raised when an ill-founded recursive module definition
is evaluated. The arguments are the location of the definition in
the source code (file name, line number, column number).
* Structural ordering functions. These functions coincide with
the usual orderings over integers, characters, strings, byte sequences
and floating-point numbers, and extend them to a
total ordering over all types.
The ordering is compatible with [( = )]. As in the case
of [( = )], mutable structures are compared by contents.
Comparison between functional values raises [Invalid_argument].
Comparison between cyclic structures may not terminate.
Left-associative operator, see {!Ocaml_operators} for more information.
* [e1 == e2] tests for physical equality of [e1] and [e2].
On mutable types such as references, arrays, byte sequences, records with
mutable fields and objects with mutable instance variables,
[e1 == e2] is true if and only if physical modification of [e1]
also affects [e2].
On non-mutable types, the behavior of [( == )] is
implementation-dependent; however, it is guaranteed that
[e1 == e2] implies [compare e1 e2 = 0].
Left-associative operator, see {!Ocaml_operators} for more information.
* The boolean negation.
* Integers are [Sys.int_size] bits wide.
All operations are taken modulo 2{^[Sys.int_size]}.
They do not fail on overflow.
* Unary negation. You can also write [- e] instead of [~- e].
Unary operator, see {!Ocaml_operators} for more information.
* [succ x] is [x + 1].
* [pred x] is [x - 1].
* Integer addition.
Left-associative operator, see {!Ocaml_operators} for more information.
* Integer subtraction.
Left-associative operator, , see {!Ocaml_operators} for more information.
* Integer multiplication.
Left-associative operator, see {!Ocaml_operators} for more information.
* [abs x] is the absolute value of [x]. On [min_int] this
is [min_int] itself and thus remains negative.
* The greatest representable integer.
* The smallest representable integer.
* Bitwise logical and.
Left-associative operator, see {!Ocaml_operators} for more information.
* Bitwise logical or.
Left-associative operator, see {!Ocaml_operators} for more information.
* Bitwise logical exclusive or.
Left-associative operator, see {!Ocaml_operators} for more information.
* Bitwise logical negation.
* [n lsl m] shifts [n] to the left by [m] bits.
The result is unspecified if [m < 0] or [m > Sys.int_size].
Right-associative operator, see {!Ocaml_operators} for more information.
* [n asr m] shifts [n] to the right by [m] bits.
This is an arithmetic shift: the sign bit of [n] is replicated.
The result is unspecified if [m < 0] or [m > Sys.int_size].
Right-associative operator, see {!Ocaml_operators} for more information.
* Unary negation. You can also write [-. e] instead of [~-. e].
Unary operator, see {!Ocaml_operators} for more information.
* Floating-point addition.
Left-associative operator, see {!Ocaml_operators} for more information.
* Floating-point subtraction.
Left-associative operator, see {!Ocaml_operators} for more information.
* Floating-point multiplication.
Left-associative operator, see {!Ocaml_operators} for more information.
* Floating-point division.
Left-associative operator, see {!Ocaml_operators} for more information.
* Exponentiation.
Right-associative operator, see {!Ocaml_operators} for more information.
* Square root.
* Exponential.
* Natural logarithm.
* Base 10 logarithm.
* Cosine. Argument is in radians.
* Tangent. Argument is in radians.
* Arc tangent.
Result is in radians and is between [-pi/2] and [pi/2].
* [atan2 y x] returns the arc tangent of [y /. x]. The signs of [x]
and [y] are used to determine the quadrant of the result.
Result is in radians and is between [-pi] and [pi].
* Hyperbolic cosine. Argument is in radians.
* Hyperbolic sine. Argument is in radians.
* Hyperbolic tangent. Argument is in radians.
* Round above to an integer value.
[ceil f] returns the least integer value greater than or equal to [f].
The result is returned as a float.
* Round below to an integer value.
[floor f] returns the greatest integer value less than or
equal to [f].
The result is returned as a float.
* [abs_float f] returns the absolute value of [f].
* [modf f] returns the pair of the fractional and integral
part of [f].
* Same as {!Stdlib.float_of_int}.
* Convert an integer to floating-point.
* Same as {!Stdlib.int_of_float}.
* Positive infinity.
* Negative infinity.
* The largest positive finite value of type [float].
* Normal number, none of the below
* Number is positive or negative infinity
* Not a number: result of an undefined operation
* {1 String operations}
More string operations are provided in module {!String}.
* Return the ASCII code of the argument.
* Return the character with the given ASCII code.
@raise Invalid_argument if the argument is
outside the range 0--255.
* Discard the value of its argument and return [()].
For instance, [ignore(f x)] discards the result of
the side-effecting function [f]. It is equivalent to
[f x; ()], except that the latter may generate a
compiler warning; writing [ignore(f x)] instead
avoids the warning.
* Return the string representation of a boolean. As the returned values
may be shared, the user should not modify them directly.
* Same as {!Stdlib.bool_of_string_opt}, but raise
[Invalid_argument "bool_of_string"] instead of returning [None].
* Return the string representation of an integer, in decimal.
* Same as {!Stdlib.int_of_string_opt}, but raise
[Failure "int_of_string"] instead of returning [None].
* Return a string representation of a floating-point number.
This conversion can involve a loss of precision. For greater control over
the manner in which the number is printed, see {!Printf}.
* Same as {!Stdlib.float_of_string_opt}, but raise
[Failure "float_of_string"] instead of returning [None].
* {1 Pair operations}
* The type of input channel.
* The type of output channel.
* The standard input for the process.
* The standard output for the process.
* The standard error output for the process.
* Print a character on standard output.
* Print a string on standard output.
* Print an integer, in decimal, on standard output.
* Print a string, followed by a newline character, on
standard output and flush standard output.
* Print a newline character on standard output, and flush
standard output. This can be used to simulate line
buffering of standard output.
* Print a character on standard error.
* Print a string on standard error.
* Print an integer, in decimal, on standard error.
* Print a string, followed by a newline character on standard
error and flush standard error.
* Print a newline character on standard error, and flush
standard error.
* Flush standard output, then read characters from standard input
until a newline character is encountered.
Return the string of all characters read, without the newline character
at the end.
@raise End_of_file if the end of the file is reached at the beginning of
line.
* Same as {!Stdlib.read_int_opt}, but raise [Failure "int_of_string"]
instead of returning [None].
* Same as {!Stdlib.read_float_opt}, but raise [Failure "float_of_string"]
instead of returning [None].
* open for reading.
* open for writing.
* open for appending: always write at end of file.
* create the file if it does not exist.
* empty the file if it already exists.
* fail if Open_creat and the file already exists.
* open in binary mode (no conversion).
* open in text mode (may perform conversions).
* open in non-blocking mode.
* Same as {!Stdlib.open_out}, but the file is opened in binary mode,
so that no translation takes place during writes. On operating
systems that do not distinguish between text mode and binary
mode, this function behaves like {!Stdlib.open_out}.
* Flush the buffer associated with the given output channel,
performing all pending writes on that channel.
Interactive programs must be careful about flushing standard
output and standard error at the right time.
* Flush all open output channels; ignore errors.
* Write the character on the given output channel.
* Write the string on the given output channel.
* Return the size (number of characters) of the regular file
on which the given channel is opened. If the channel is opened
on a file that is not a regular file, the result is meaningless.
* Same as [close_out], but ignore all errors.
* Open the named file for reading, and return a new input channel
on that file, positioned at the beginning of the file.
* Read characters from the given input channel, until a
newline character is encountered. Return the string of
all characters read, without the newline character at the end.
@raise End_of_file if the end of the file is reached
at the beginning of line.
* Read the representation of a structured value, as produced
by {!Stdlib.output_value}, and return the corresponding value.
This function is identical to {!Marshal.from_channel};
see the description of module {!Marshal} for more information,
in particular concerning the lack of type safety.
* [seek_in chan pos] sets the current reading position to [pos]
for channel [chan]. This works only for regular files. On
files of other kinds, the behavior is unspecified.
* Return the size (number of characters) of the regular file
on which the given channel is opened. If the channel is opened
on a file that is not a regular file, the result is meaningless.
The returned size does not take into account the end-of-line
translations that can be performed when reading from a channel
opened in text mode.
* Close the given channel. Input functions raise a [Sys_error]
exception when they are applied to a closed input channel,
except [close_in], which does nothing when applied to an already
closed channel.
* Same as [close_in], but ignore all errors.
* {1 References}
* The type of references (mutable indirection cells) containing
a value of type ['a].
* Return a fresh reference containing the given value.
* [r := a] stores the value of [a] in reference [r].
Equivalent to [fun r v -> r.contents <- v].
Right-associative operator, see {!Ocaml_operators} for more information.
* Increment the integer contained in the given reference.
Equivalent to [fun r -> r := succ !r].
* Decrement the integer contained in the given reference.
Equivalent to [fun r -> r := pred !r].
* {1 Operations on format strings}
* Converts a format string into a string.
* [format_of_string s] returns a format string read from the string
literal [s].
Note: [format_of_string] can not convert a string argument that is not a
literal. If you need this functionality, use the more general
{!Scanf.format_from_string} function.
* [f1 ^^ f2] catenates format strings [f1] and [f2]. The result is a
format string that behaves as the concatenation of format strings [f1] and
[f2]: in case of formatted output, it accepts arguments from [f1], then
arguments from [f2]; in case of formatted input, it returns results from
[f1], then results from [f2].
Right-associative operator, see {!Ocaml_operators} for more information.
*/*
The following is for system use only. Do not call directly.
*/*
MODULE_ALIASES | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* The OCaml Standard library .
This module is automatically opened at the beginning of each
compilation . All components of this module can therefore be
referred by their short name , without prefixing them by [ ] .
In particular , it provides the basic operations over the built - in
types ( numbers , booleans , byte sequences , strings , exceptions ,
references , lists , arrays , input - output channels , ... ) and the
{ { ! modules}standard library modules } .
This module is automatically opened at the beginning of each
compilation. All components of this module can therefore be
referred by their short name, without prefixing them by [Stdlib].
In particular, it provides the basic operations over the built-in
types (numbers, booleans, byte sequences, strings, exceptions,
references, lists, arrays, input-output channels, ...) and the
{{!modules}standard library modules}.
*)
* { 1 Exceptions }
external raise : exn -> 'a = "%raise"
external raise_notrace : exn -> 'a = "%raise_notrace"
* A faster version [ raise ] which does not record the backtrace .
@since 4.02
@since 4.02
*)
val invalid_arg : string -> 'a
val failwith : string -> 'a
exception Exit
exception Match_failure of (string * int * int)
[@ocaml.warn_on_literal_pattern]
exception Assert_failure of (string * int * int)
[@ocaml.warn_on_literal_pattern]
exception Invalid_argument of string
[@ocaml.warn_on_literal_pattern]
exception Failure of string
[@ocaml.warn_on_literal_pattern]
exception Not_found
exception Out_of_memory
exception Stack_overflow
* Exception raised by the bytecode interpreter when the evaluation
stack reaches its maximal size . This often indicates infinite or
excessively deep recursion in the user 's program .
Before 4.10 , it was not fully implemented by the native - code
compiler .
stack reaches its maximal size. This often indicates infinite or
excessively deep recursion in the user's program.
Before 4.10, it was not fully implemented by the native-code
compiler. *)
exception Sys_error of string
[@ocaml.warn_on_literal_pattern]
exception End_of_file
exception Division_by_zero
* Exception raised by integer division and remainder operations when
their second argument is zero .
their second argument is zero. *)
exception Sys_blocked_io
* A special case of Sys_error raised when no I / O is possible on a
non - blocking I / O channel .
non-blocking I/O channel. *)
exception Undefined_recursive_module of (string * int * int)
[@ocaml.warn_on_literal_pattern]
* { 1 Comparisons }
external ( = ) : 'a -> 'a -> bool = "%equal"
* [ e1 = e2 ] tests for structural equality of [ e1 ] and [ e2 ] .
Mutable structures ( e.g. references and arrays ) are equal
if and only if their current contents are structurally equal ,
even if the two mutable objects are not the same physical object .
Equality between functional values raises [ Invalid_argument ] .
Equality between cyclic data structures may not terminate .
Left - associative operator , see { ! Ocaml_operators } for more information .
Mutable structures (e.g. references and arrays) are equal
if and only if their current contents are structurally equal,
even if the two mutable objects are not the same physical object.
Equality between functional values raises [Invalid_argument].
Equality between cyclic data structures may not terminate.
Left-associative operator, see {!Ocaml_operators} for more information. *)
external ( <> ) : 'a -> 'a -> bool = "%notequal"
* Negation of { ! . ( = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( < ) : 'a -> 'a -> bool = "%lessthan"
* See { ! . ( > = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( > ) : 'a -> 'a -> bool = "%greaterthan"
* See { ! . ( > = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( <= ) : 'a -> 'a -> bool = "%lessequal"
* See { ! . ( > = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
external ( >= ) : 'a -> 'a -> bool = "%greaterequal"
external compare : 'a -> 'a -> int = "%compare"
* [ compare x y ] returns [ 0 ] if [ x ] is equal to [ y ] ,
a negative integer if [ x ] is less than [ y ] , and a positive integer
if [ x ] is greater than [ y ] . The ordering implemented by [ compare ]
is compatible with the comparison predicates [ =] , [ < ] and [ > ]
defined above , with one difference on the treatment of the float value
{ ! . Namely , the comparison predicates treat [ nan ]
as different from any other float value , including itself ;
while [ compare ] treats [ nan ] as equal to itself and less than any
other float value . This treatment of ensures that [ compare ]
defines a total ordering relation .
[ compare ] applied to functional values may raise [ Invalid_argument ] .
[ compare ] applied to cyclic structures may not terminate .
The [ compare ] function can be used as the comparison function
required by the { ! Set . Make } and { ! Map . Make } functors , as well as
the { ! List.sort } and { ! Array.sort } functions .
a negative integer if [x] is less than [y], and a positive integer
if [x] is greater than [y]. The ordering implemented by [compare]
is compatible with the comparison predicates [=], [<] and [>]
defined above, with one difference on the treatment of the float value
{!Stdlib.nan}. Namely, the comparison predicates treat [nan]
as different from any other float value, including itself;
while [compare] treats [nan] as equal to itself and less than any
other float value. This treatment of [nan] ensures that [compare]
defines a total ordering relation.
[compare] applied to functional values may raise [Invalid_argument].
[compare] applied to cyclic structures may not terminate.
The [compare] function can be used as the comparison function
required by the {!Set.Make} and {!Map.Make} functors, as well as
the {!List.sort} and {!Array.sort} functions. *)
val min : 'a -> 'a -> 'a
* Return the smaller of the two arguments .
The result is unspecified if one of the arguments contains
the float value [ nan ] .
The result is unspecified if one of the arguments contains
the float value [nan]. *)
val max : 'a -> 'a -> 'a
* Return the greater of the two arguments .
The result is unspecified if one of the arguments contains
the float value [ nan ] .
The result is unspecified if one of the arguments contains
the float value [nan]. *)
external ( == ) : 'a -> 'a -> bool = "%eq"
external ( != ) : 'a -> 'a -> bool = "%noteq"
* Negation of { ! . ( = = ) } .
Left - associative operator , see { ! Ocaml_operators } for more information .
Left-associative operator, see {!Ocaml_operators} for more information.
*)
* { 1 Boolean operations }
external not : bool -> bool = "%boolnot"
external ( && ) : bool -> bool -> bool = "%sequand"
* The boolean ' and ' . Evaluation is sequential , left - to - right :
in [ e1 & & e2 ] , [ e1 ] is evaluated first , and if it returns [ false ] ,
[ e2 ] is not evaluated at all .
Right - associative operator , see { ! Ocaml_operators } for more information .
in [e1 && e2], [e1] is evaluated first, and if it returns [false],
[e2] is not evaluated at all.
Right-associative operator, see {!Ocaml_operators} for more information.
*)
external ( || ) : bool -> bool -> bool = "%sequor"
* The boolean ' or ' . Evaluation is sequential , left - to - right :
in [ e1 || e2 ] , [ e1 ] is evaluated first , and if it returns [ true ] ,
[ e2 ] is not evaluated at all .
Right - associative operator , see { ! Ocaml_operators } for more information .
in [e1 || e2], [e1] is evaluated first, and if it returns [true],
[e2] is not evaluated at all.
Right-associative operator, see {!Ocaml_operators} for more information.
*)
* { 1 Debugging }
external __LOC__ : string = "%loc_LOC"
* [ _ _ LOC _ _ ] returns the location at which this expression appears in
the file currently being parsed by the compiler , with the standard
error format of : " File % S , line % d , characters % d-%d " .
@since 4.02
the file currently being parsed by the compiler, with the standard
error format of OCaml: "File %S, line %d, characters %d-%d".
@since 4.02
*)
external __FILE__ : string = "%loc_FILE"
* [ _ _ FILE _ _ ] returns the name of the file currently being
parsed by the compiler .
@since 4.02
parsed by the compiler.
@since 4.02
*)
external __LINE__ : int = "%loc_LINE"
* [ _ _ LINE _ _ ] returns the line number at which this expression
appears in the file currently being parsed by the compiler .
@since 4.02
appears in the file currently being parsed by the compiler.
@since 4.02
*)
external __MODULE__ : string = "%loc_MODULE"
* [ _ _ MODULE _ _ ] returns the module name of the file being
parsed by the compiler .
@since 4.02
parsed by the compiler.
@since 4.02
*)
external __POS__ : string * int * int * int = "%loc_POS"
* [ _ _ POS _ _ ] returns a tuple [ ( file , lnum , cnum , enum ) ] , corresponding
to the location at which this expression appears in the file
currently being parsed by the compiler . [ file ] is the current
filename , [ lnum ] the line number , [ cnum ] the character position in
the line and [ enum ] the last character position in the line .
@since 4.02
to the location at which this expression appears in the file
currently being parsed by the compiler. [file] is the current
filename, [lnum] the line number, [cnum] the character position in
the line and [enum] the last character position in the line.
@since 4.02
*)
external __FUNCTION__ : string = "%loc_FUNCTION"
* [ _ _ FUNCTION _ _ ] returns the name of the current function or method , including
any enclosing modules or classes .
@since 4.12
any enclosing modules or classes.
@since 4.12 *)
external __LOC_OF__ : 'a -> string * 'a = "%loc_LOC"
* [ _ _ LOC_OF _ _ expr ] returns a pair [ ( loc , expr ) ] where [ loc ] is the
location of [ expr ] in the file currently being parsed by the
compiler , with the standard error format of : " File % S , line
% d , characters % d-%d " .
@since 4.02
location of [expr] in the file currently being parsed by the
compiler, with the standard error format of OCaml: "File %S, line
%d, characters %d-%d".
@since 4.02
*)
external __LINE_OF__ : 'a -> int * 'a = "%loc_LINE"
* [ _ _ LINE_OF _ _ expr ] returns a pair [ ( line , expr ) ] , where [ line ] is the
line number at which the expression [ expr ] appears in the file
currently being parsed by the compiler .
@since 4.02
line number at which the expression [expr] appears in the file
currently being parsed by the compiler.
@since 4.02
*)
external __POS_OF__ : 'a -> (string * int * int * int) * 'a = "%loc_POS"
* [ _ _ POS_OF _ _ expr ] returns a pair [ ( loc , expr ) ] , where [ loc ] is a
tuple [ ( file , lnum , cnum , enum ) ] corresponding to the location at
which the expression [ expr ] appears in the file currently being
parsed by the compiler . [ file ] is the current filename , [ lnum ] the
line number , [ cnum ] the character position in the line and [ enum ]
the last character position in the line .
@since 4.02
tuple [(file,lnum,cnum,enum)] corresponding to the location at
which the expression [expr] appears in the file currently being
parsed by the compiler. [file] is the current filename, [lnum] the
line number, [cnum] the character position in the line and [enum]
the last character position in the line.
@since 4.02
*)
* { 1 Composition operators }
external ( |> ) : 'a -> ('a -> 'b) -> 'b = "%revapply"
* Reverse - application operator : [ x | > f | > g ] is exactly equivalent
to [ g ( f ( x ) ) ] .
Left - associative operator , see { ! Ocaml_operators } for more information .
@since 4.01
to [g (f (x))].
Left-associative operator, see {!Ocaml_operators} for more information.
@since 4.01
*)
external ( @@ ) : ('a -> 'b) -> 'a -> 'b = "%apply"
* Application operator : [ g @@ f @@ x ] is exactly equivalent to
[ g ( f ( x ) ) ] .
Right - associative operator , see { ! Ocaml_operators } for more information .
@since 4.01
[g (f (x))].
Right-associative operator, see {!Ocaml_operators} for more information.
@since 4.01
*)
* { 1 Integer arithmetic }
external ( ~- ) : int -> int = "%negint"
external ( ~+ ) : int -> int = "%identity"
* Unary addition . You can also write [ + e ] instead of [ ~+ e ] .
Unary operator , see { ! Ocaml_operators } for more information .
@since 3.12
Unary operator, see {!Ocaml_operators} for more information.
@since 3.12
*)
external succ : int -> int = "%succint"
external pred : int -> int = "%predint"
external ( + ) : int -> int -> int = "%addint"
external ( - ) : int -> int -> int = "%subint"
external ( * ) : int -> int -> int = "%mulint"
external ( / ) : int -> int -> int = "%divint"
* Integer division .
Integer division rounds the real quotient of its arguments towards zero .
More precisely , if [ x > = 0 ] and [ y > 0 ] , [ x / y ] is the greatest integer
less than or equal to the real quotient of [ x ] by [ y ] . Moreover ,
[ ( - x ) / y = x / ( - y ) = - ( x / y ) ] .
Left - associative operator , see { ! Ocaml_operators } for more information .
@raise Division_by_zero if the second argument is 0 .
Integer division rounds the real quotient of its arguments towards zero.
More precisely, if [x >= 0] and [y > 0], [x / y] is the greatest integer
less than or equal to the real quotient of [x] by [y]. Moreover,
[(- x) / y = x / (- y) = - (x / y)].
Left-associative operator, see {!Ocaml_operators} for more information.
@raise Division_by_zero if the second argument is 0.
*)
external ( mod ) : int -> int -> int = "%modint"
* Integer remainder . If [ y ] is not zero , the result
of [ x mod y ] satisfies the following properties :
[ x = ( x / y ) * y + x mod y ] and
[ ) < = abs(y ) - 1 ] .
If [ y = 0 ] , [ x mod y ] raises [ Division_by_zero ] .
Note that [ x mod y ] is negative only if [ x < 0 ] .
Left - associative operator , see { ! Ocaml_operators } for more information .
@raise Division_by_zero if [ y ] is zero .
of [x mod y] satisfies the following properties:
[x = (x / y) * y + x mod y] and
[abs(x mod y) <= abs(y) - 1].
If [y = 0], [x mod y] raises [Division_by_zero].
Note that [x mod y] is negative only if [x < 0].
Left-associative operator, see {!Ocaml_operators} for more information.
@raise Division_by_zero if [y] is zero.
*)
val abs : int -> int
val max_int : int
val min_int : int
* { 2 Bitwise operations }
external ( land ) : int -> int -> int = "%andint"
external ( lor ) : int -> int -> int = "%orint"
external ( lxor ) : int -> int -> int = "%xorint"
val lnot : int -> int
external ( lsl ) : int -> int -> int = "%lslint"
external ( lsr ) : int -> int -> int = "%lsrint"
* [ n lsr m ] shifts [ n ] to the right by [ m ] bits .
This is a logical shift : zeroes are inserted regardless of
the sign of [ n ] .
The result is unspecified if [ m < 0 ] or [ m > Sys.int_size ] .
Right - associative operator , see { ! Ocaml_operators } for more information .
This is a logical shift: zeroes are inserted regardless of
the sign of [n].
The result is unspecified if [m < 0] or [m > Sys.int_size].
Right-associative operator, see {!Ocaml_operators} for more information.
*)
external ( asr ) : int -> int -> int = "%asrint"
* { 1 Floating - point arithmetic }
's floating - point numbers follow the
IEEE 754 standard , using double precision ( 64 bits ) numbers .
Floating - point operations never raise an exception on overflow ,
underflow , division by zero , etc . Instead , special IEEE numbers
are returned as appropriate , such as [ infinity ] for [ 1.0 /. 0.0 ] ,
[ neg_infinity ] for [ -1.0 /. 0.0 ] , and [ nan ] ( ' not a number ' )
for [ 0.0 /. 0.0 ] . These special numbers then propagate through
floating - point computations as expected : for instance ,
[ 1.0 /. infinity ] is [ 0.0 ] , basic arithmetic operations
( [ + . ] , [ - . ] , [ * . ] , [ /. ] ) with [ ] as an argument return [ nan ] , ...
OCaml's floating-point numbers follow the
IEEE 754 standard, using double precision (64 bits) numbers.
Floating-point operations never raise an exception on overflow,
underflow, division by zero, etc. Instead, special IEEE numbers
are returned as appropriate, such as [infinity] for [1.0 /. 0.0],
[neg_infinity] for [-1.0 /. 0.0], and [nan] ('not a number')
for [0.0 /. 0.0]. These special numbers then propagate through
floating-point computations as expected: for instance,
[1.0 /. infinity] is [0.0], basic arithmetic operations
([+.], [-.], [*.], [/.]) with [nan] as an argument return [nan], ...
*)
external ( ~-. ) : float -> float = "%negfloat"
external ( ~+. ) : float -> float = "%identity"
* Unary addition . You can also write [ + . e ] instead of [ ~+ . e ] .
Unary operator , see { ! Ocaml_operators } for more information .
@since 3.12
Unary operator, see {!Ocaml_operators} for more information.
@since 3.12
*)
external ( +. ) : float -> float -> float = "%addfloat"
external ( -. ) : float -> float -> float = "%subfloat"
external ( *. ) : float -> float -> float = "%mulfloat"
external ( /. ) : float -> float -> float = "%divfloat"
external ( ** ) : float -> float -> float = "caml_power_float" "pow"
[@@unboxed] [@@noalloc]
external sqrt : float -> float = "caml_sqrt_float" "sqrt"
[@@unboxed] [@@noalloc]
external exp : float -> float = "caml_exp_float" "exp" [@@unboxed] [@@noalloc]
external log : float -> float = "caml_log_float" "log" [@@unboxed] [@@noalloc]
external log10 : float -> float = "caml_log10_float" "log10"
[@@unboxed] [@@noalloc]
external expm1 : float -> float = "caml_expm1_float" "caml_expm1"
[@@unboxed] [@@noalloc]
* [ expm1 x ] computes [ exp x - . 1.0 ] , giving numerically - accurate results
even if [ x ] is close to [ 0.0 ] .
@since 3.12
even if [x] is close to [0.0].
@since 3.12
*)
external log1p : float -> float = "caml_log1p_float" "caml_log1p"
[@@unboxed] [@@noalloc]
* [ log1p x ] computes [ log(1.0 + . x ) ] ( natural logarithm ) ,
giving numerically - accurate results even if [ x ] is close to [ 0.0 ] .
@since 3.12
giving numerically-accurate results even if [x] is close to [0.0].
@since 3.12
*)
external cos : float -> float = "caml_cos_float" "cos" [@@unboxed] [@@noalloc]
external sin : float -> float = "caml_sin_float" "sin" [@@unboxed] [@@noalloc]
* . Argument is in radians .
external tan : float -> float = "caml_tan_float" "tan" [@@unboxed] [@@noalloc]
external acos : float -> float = "caml_acos_float" "acos"
[@@unboxed] [@@noalloc]
* Arc cosine . The argument must fall within the range [ [ -1.0 , 1.0 ] ] .
Result is in radians and is between [ 0.0 ] and [ pi ] .
Result is in radians and is between [0.0] and [pi]. *)
external asin : float -> float = "caml_asin_float" "asin"
[@@unboxed] [@@noalloc]
* Arc sine . The argument must fall within the range [ [ -1.0 , 1.0 ] ] .
Result is in radians and is between [ -pi/2 ] and [ pi/2 ] .
Result is in radians and is between [-pi/2] and [pi/2]. *)
external atan : float -> float = "caml_atan_float" "atan"
[@@unboxed] [@@noalloc]
external atan2 : float -> float -> float = "caml_atan2_float" "atan2"
[@@unboxed] [@@noalloc]
external hypot : float -> float -> float = "caml_hypot_float" "caml_hypot"
[@@unboxed] [@@noalloc]
* [ hypot x y ] returns [ sqrt(x * . x + y * . y ) ] , that is , the length
of the hypotenuse of a right - angled triangle with sides of length
[ x ] and [ y ] , or , equivalently , the distance of the point [ ( x , y ) ]
to origin . If one of [ x ] or [ y ] is infinite , returns [ infinity ]
even if the other is [ nan ] .
@since 4.00
of the hypotenuse of a right-angled triangle with sides of length
[x] and [y], or, equivalently, the distance of the point [(x,y)]
to origin. If one of [x] or [y] is infinite, returns [infinity]
even if the other is [nan].
@since 4.00 *)
external cosh : float -> float = "caml_cosh_float" "cosh"
[@@unboxed] [@@noalloc]
external sinh : float -> float = "caml_sinh_float" "sinh"
[@@unboxed] [@@noalloc]
external tanh : float -> float = "caml_tanh_float" "tanh"
[@@unboxed] [@@noalloc]
external acosh : float -> float = "caml_acosh_float" "caml_acosh"
[@@unboxed] [@@noalloc]
* Hyperbolic arc cosine . The argument must fall within the range
[ [ 1.0 , inf ] ] .
Result is in radians and is between [ 0.0 ] and [ inf ] .
@since 4.13
[[1.0, inf]].
Result is in radians and is between [0.0] and [inf].
@since 4.13
*)
external asinh : float -> float = "caml_asinh_float" "caml_asinh"
[@@unboxed] [@@noalloc]
* Hyperbolic arc sine . The argument and result range over the entire
real line .
Result is in radians .
@since 4.13
real line.
Result is in radians.
@since 4.13
*)
external atanh : float -> float = "caml_atanh_float" "caml_atanh"
[@@unboxed] [@@noalloc]
* Hyperbolic arc tangent . The argument must fall within the range
[ [ -1.0 , 1.0 ] ] .
Result is in radians and ranges over the entire real line .
@since 4.13
[[-1.0, 1.0]].
Result is in radians and ranges over the entire real line.
@since 4.13
*)
external ceil : float -> float = "caml_ceil_float" "ceil"
[@@unboxed] [@@noalloc]
external floor : float -> float = "caml_floor_float" "floor"
[@@unboxed] [@@noalloc]
external abs_float : float -> float = "%absfloat"
external copysign : float -> float -> float
= "caml_copysign_float" "caml_copysign"
[@@unboxed] [@@noalloc]
* [ copysign x y ] returns a float whose absolute value is that of [ x ]
and whose sign is that of [ y ] . If [ x ] is [ nan ] , returns [ nan ] .
If [ y ] is [ nan ] , returns either [ x ] or [ - . x ] , but it is not
specified which .
@since 4.00
and whose sign is that of [y]. If [x] is [nan], returns [nan].
If [y] is [nan], returns either [x] or [-. x], but it is not
specified which.
@since 4.00 *)
external mod_float : float -> float -> float = "caml_fmod_float" "fmod"
[@@unboxed] [@@noalloc]
* [ mod_float a b ] returns the remainder of [ a ] with respect to
[ b ] . The returned value is [ a - . n * . b ] , where [ n ]
is the quotient [ a /. b ] rounded towards zero to an integer .
[b]. The returned value is [a -. n *. b], where [n]
is the quotient [a /. b] rounded towards zero to an integer. *)
external frexp : float -> float * int = "caml_frexp_float"
* [ frexp f ] returns the pair of the significant
and the exponent of [ f ] . When [ f ] is zero , the
significant [ x ] and the exponent [ n ] of [ f ] are equal to
zero . When [ f ] is non - zero , they are defined by
[ f = x * . 2 * * n ] and [ 0.5 < = x < 1.0 ] .
and the exponent of [f]. When [f] is zero, the
significant [x] and the exponent [n] of [f] are equal to
zero. When [f] is non-zero, they are defined by
[f = x *. 2 ** n] and [0.5 <= x < 1.0]. *)
external ldexp : (float [@unboxed]) -> (int [@untagged]) -> (float [@unboxed]) =
"caml_ldexp_float" "caml_ldexp_float_unboxed" [@@noalloc]
* [ ldexp x n ] returns [ x * . 2 * * n ] .
external modf : float -> float * float = "caml_modf_float"
external float : int -> float = "%floatofint"
external float_of_int : int -> float = "%floatofint"
external truncate : float -> int = "%intoffloat"
external int_of_float : float -> int = "%intoffloat"
* the given floating - point number to an integer .
The result is unspecified if the argument is [ nan ] or falls outside the
range of representable integers .
The result is unspecified if the argument is [nan] or falls outside the
range of representable integers. *)
val infinity : float
val neg_infinity : float
val nan : float
* A special floating - point value denoting the result of an
undefined operation such as [ 0.0 /. 0.0 ] . Stands for
' not a number ' . Any floating - point operation with [ nan ] as
argument returns [ nan ] as result , unless otherwise specified in
IEEE 754 standard . As for floating - point comparisons ,
[ =] , [ < ] , [ < =] , [ > ] and [ > =] return [ false ] and [ < > ] returns [ true ]
if one or both of their arguments is [ nan ] .
[ nan ] is a quiet NaN since 5.1 ; it was a signaling NaN before .
undefined operation such as [0.0 /. 0.0]. Stands for
'not a number'. Any floating-point operation with [nan] as
argument returns [nan] as result, unless otherwise specified in
IEEE 754 standard. As for floating-point comparisons,
[=], [<], [<=], [>] and [>=] return [false] and [<>] returns [true]
if one or both of their arguments is [nan].
[nan] is a quiet NaN since 5.1; it was a signaling NaN before. *)
val max_float : float
val min_float : float
* The smallest positive , non - zero , non - denormalized value of type [ float ] .
val epsilon_float : float
* The difference between [ 1.0 ] and the smallest exactly representable
floating - point number greater than [ 1.0 ] .
floating-point number greater than [1.0]. *)
type fpclass =
* Number very close to 0.0 , has reduced precision
* Number is 0.0 or -0.0
* The five classes of floating - point numbers , as determined by
the { ! Stdlib.classify_float } function .
the {!Stdlib.classify_float} function. *)
external classify_float : (float [@unboxed]) -> fpclass =
"caml_classify_float" "caml_classify_float_unboxed" [@@noalloc]
* Return the class of the given floating - point number :
normal , subnormal , zero , infinite , or not a number .
normal, subnormal, zero, infinite, or not a number. *)
val ( ^ ) : string -> string -> string
* String concatenation .
Right - associative operator , see { ! Ocaml_operators } for more information .
@raise Invalid_argument if the result is longer then
than { ! } bytes .
Right-associative operator, see {!Ocaml_operators} for more information.
@raise Invalid_argument if the result is longer then
than {!Sys.max_string_length} bytes.
*)
* { 1 Character operations }
More character operations are provided in module { ! } .
More character operations are provided in module {!Char}.
*)
external int_of_char : char -> int = "%identity"
val char_of_int : int -> char
* { 1 Unit operations }
external ignore : 'a -> unit = "%ignore"
* { 1 String conversion functions }
val string_of_bool : bool -> string
val bool_of_string_opt: string -> bool option
* Convert the given string to a boolean .
Return [ None ] if the string is not [ " true " ] or [ " false " ] .
@since 4.05
Return [None] if the string is not ["true"] or ["false"].
@since 4.05
*)
val bool_of_string : string -> bool
val string_of_int : int -> string
val int_of_string_opt: string -> int option
* Convert the given string to an integer .
The string is read in decimal ( by default , or if the string
begins with [ 0u ] ) , in hexadecimal ( if it begins with [ 0x ] or
[ 0X ] ) , in octal ( if it begins with [ 0o ] or [ 0O ] ) , or in binary
( if it begins with [ 0b ] or [ ] ) .
The [ 0u ] prefix reads the input as an unsigned integer in the range
[ [ 0 , 2*max_int+1 ] ] . If the input exceeds { ! max_int }
it is converted to the signed integer
[ min_int + input - max_int - 1 ] .
The [ _ ] ( underscore ) character can appear anywhere in the string
and is ignored .
Return [ None ] if the given string is not a valid representation of an
integer , or if the integer represented exceeds the range of integers
representable in type [ int ] .
@since 4.05
The string is read in decimal (by default, or if the string
begins with [0u]), in hexadecimal (if it begins with [0x] or
[0X]), in octal (if it begins with [0o] or [0O]), or in binary
(if it begins with [0b] or [0B]).
The [0u] prefix reads the input as an unsigned integer in the range
[[0, 2*max_int+1]]. If the input exceeds {!max_int}
it is converted to the signed integer
[min_int + input - max_int - 1].
The [_] (underscore) character can appear anywhere in the string
and is ignored.
Return [None] if the given string is not a valid representation of an
integer, or if the integer represented exceeds the range of integers
representable in type [int].
@since 4.05
*)
external int_of_string : string -> int = "caml_int_of_string"
val string_of_float : float -> string
val float_of_string_opt: string -> float option
* Convert the given string to a float . The string is read in decimal
( by default ) or in hexadecimal ( marked by [ 0x ] or [ 0X ] ) .
The format of decimal floating - point numbers is
[ [ - ] dd.ddd ( e|E ) [ + |- ] dd ] , where [ d ] stands for a decimal digit .
The format of hexadecimal floating - point numbers is
[ [ - ] 0(x|X ) hh.hhh ( p|P ) [ + |- ] dd ] , where [ h ] stands for an
hexadecimal digit and [ d ] for a decimal digit .
In both cases , at least one of the integer and fractional parts must be
given ; the exponent part is optional .
The [ _ ] ( underscore ) character can appear anywhere in the string
and is ignored .
Depending on the execution platforms , other representations of
floating - point numbers can be accepted , but should not be relied upon .
Return [ None ] if the given string is not a valid representation of a float .
@since 4.05
(by default) or in hexadecimal (marked by [0x] or [0X]).
The format of decimal floating-point numbers is
[ [-] dd.ddd (e|E) [+|-] dd ], where [d] stands for a decimal digit.
The format of hexadecimal floating-point numbers is
[ [-] 0(x|X) hh.hhh (p|P) [+|-] dd ], where [h] stands for an
hexadecimal digit and [d] for a decimal digit.
In both cases, at least one of the integer and fractional parts must be
given; the exponent part is optional.
The [_] (underscore) character can appear anywhere in the string
and is ignored.
Depending on the execution platforms, other representations of
floating-point numbers can be accepted, but should not be relied upon.
Return [None] if the given string is not a valid representation of a float.
@since 4.05
*)
external float_of_string : string -> float = "caml_float_of_string"
external fst : 'a * 'b -> 'a = "%field0"
* Return the first component of a pair .
external snd : 'a * 'b -> 'b = "%field1"
* Return the second component of a pair .
* { 1 List operations }
More list operations are provided in module { ! List } .
More list operations are provided in module {!List}.
*)
val ( @ ) : 'a list -> 'a list -> 'a list
* [ l0 @ l1 ] appends [ l1 ] to [ l0 ] . Same function as { ! List.append } .
Right - associative operator , see { ! Ocaml_operators } for more information .
@since 5.1 this function is tail - recursive .
Right-associative operator, see {!Ocaml_operators} for more information.
@since 5.1 this function is tail-recursive.
*)
* { 1 Input / output }
Note : all input / output functions can raise [ ] when the system
calls they invoke fail .
Note: all input/output functions can raise [Sys_error] when the system
calls they invoke fail. *)
type in_channel
type out_channel
val stdin : in_channel
val stdout : out_channel
val stderr : out_channel
* { 2 Output functions on standard output }
val print_char : char -> unit
val print_string : string -> unit
val print_bytes : bytes -> unit
* Print a byte sequence on standard output .
@since 4.02
@since 4.02 *)
val print_int : int -> unit
val print_float : float -> unit
* Print a floating - point number , in decimal , on standard output .
The conversion of the number to a string uses { ! } and
can involve a loss of precision .
The conversion of the number to a string uses {!string_of_float} and
can involve a loss of precision. *)
val print_endline : string -> unit
val print_newline : unit -> unit
* { 2 Output functions on standard error }
val prerr_char : char -> unit
val prerr_string : string -> unit
val prerr_bytes : bytes -> unit
* Print a byte sequence on standard error .
@since 4.02
@since 4.02 *)
val prerr_int : int -> unit
val prerr_float : float -> unit
* Print a floating - point number , in decimal , on standard error .
The conversion of the number to a string uses { ! } and
can involve a loss of precision .
The conversion of the number to a string uses {!string_of_float} and
can involve a loss of precision. *)
val prerr_endline : string -> unit
val prerr_newline : unit -> unit
* { 2 Input functions on standard input }
val read_line : unit -> string
val read_int_opt: unit -> int option
* Flush standard output , then read one line from standard input
and convert it to an integer .
Return [ None ] if the line read is not a valid representation of an integer .
@since 4.05
and convert it to an integer.
Return [None] if the line read is not a valid representation of an integer.
@since 4.05
*)
val read_int : unit -> int
val read_float_opt: unit -> float option
* Flush standard output , then read one line from standard input
and convert it to a floating - point number .
Return [ None ] if the line read is not a valid representation of a
floating - point number .
@since 4.05
and convert it to a floating-point number.
Return [None] if the line read is not a valid representation of a
floating-point number.
@since 4.05
*)
val read_float : unit -> float
* { 2 General output functions }
type open_flag =
* Opening modes for { ! Stdlib.open_out_gen } and
{ ! } .
{!Stdlib.open_in_gen}. *)
val open_out : string -> out_channel
* Open the named file for writing , and return a new output channel
on that file , positioned at the beginning of the file . The
file is truncated to zero length if it already exists . It
is created if it does not already exists .
on that file, positioned at the beginning of the file. The
file is truncated to zero length if it already exists. It
is created if it does not already exists. *)
val open_out_bin : string -> out_channel
val open_out_gen : open_flag list -> int -> string -> out_channel
* [ open_out_gen mode perm filename ] opens the named file for writing ,
as described above . The extra argument [ mode ]
specifies the opening mode . The extra argument [ perm ] specifies
the file permissions , in case the file must be created .
{ ! Stdlib.open_out } and { ! are special
cases of this function .
as described above. The extra argument [mode]
specifies the opening mode. The extra argument [perm] specifies
the file permissions, in case the file must be created.
{!Stdlib.open_out} and {!Stdlib.open_out_bin} are special
cases of this function. *)
val flush : out_channel -> unit
val flush_all : unit -> unit
val output_char : out_channel -> char -> unit
val output_string : out_channel -> string -> unit
val output_bytes : out_channel -> bytes -> unit
* Write the byte sequence on the given output channel .
@since 4.02
@since 4.02 *)
val output : out_channel -> bytes -> int -> int -> unit
* [ output oc buf pos len ] writes [ len ] characters from byte sequence [ buf ] ,
starting at offset [ pos ] , to the given output channel [ oc ] .
@raise Invalid_argument if [ pos ] and [ len ] do not
designate a valid range of [ buf ] .
starting at offset [pos], to the given output channel [oc].
@raise Invalid_argument if [pos] and [len] do not
designate a valid range of [buf]. *)
val output_substring : out_channel -> string -> int -> int -> unit
* Same as [ output ] but take a string as argument instead of
a byte sequence .
@since 4.02
a byte sequence.
@since 4.02 *)
val output_byte : out_channel -> int -> unit
* Write one 8 - bit integer ( as the single character with that code )
on the given output channel . The given integer is taken modulo
256 .
on the given output channel. The given integer is taken modulo
256. *)
val output_binary_int : out_channel -> int -> unit
* Write one integer in binary format ( 4 bytes , big - endian )
on the given output channel .
The given integer is taken modulo 2{^32 } .
The only reliable way to read it back is through the
{ ! Stdlib.input_binary_int } function . The format is compatible across
all machines for a given version of OCaml .
on the given output channel.
The given integer is taken modulo 2{^32}.
The only reliable way to read it back is through the
{!Stdlib.input_binary_int} function. The format is compatible across
all machines for a given version of OCaml. *)
val output_value : out_channel -> 'a -> unit
* Write the representation of a structured value of any type
to a channel . Circularities and sharing inside the value
are detected and preserved . The object can be read back ,
by the function { ! } . See the description of module
{ ! Marshal } for more information . { ! Stdlib.output_value } is equivalent
to { ! Marshal.to_channel } with an empty list of flags .
to a channel. Circularities and sharing inside the value
are detected and preserved. The object can be read back,
by the function {!Stdlib.input_value}. See the description of module
{!Marshal} for more information. {!Stdlib.output_value} is equivalent
to {!Marshal.to_channel} with an empty list of flags. *)
val seek_out : out_channel -> int -> unit
* [ seek_out ] sets the current writing position to [ pos ]
for channel [ chan ] . This works only for regular files . On
files of other kinds ( such as terminals , pipes and sockets ) ,
the behavior is unspecified .
for channel [chan]. This works only for regular files. On
files of other kinds (such as terminals, pipes and sockets),
the behavior is unspecified. *)
val pos_out : out_channel -> int
* Return the current writing position for the given channel . Does
not work on channels opened with the [ Open_append ] flag ( returns
unspecified results ) .
For files opened in text mode under Windows , the returned position
is approximate ( owing to end - of - line conversion ) ; in particular ,
saving the current position with [ pos_out ] , then going back to
this position using [ seek_out ] will not work . For this
programming idiom to work reliably and portably , the file must be
opened in binary mode .
not work on channels opened with the [Open_append] flag (returns
unspecified results).
For files opened in text mode under Windows, the returned position
is approximate (owing to end-of-line conversion); in particular,
saving the current position with [pos_out], then going back to
this position using [seek_out] will not work. For this
programming idiom to work reliably and portably, the file must be
opened in binary mode. *)
val out_channel_length : out_channel -> int
val close_out : out_channel -> unit
* Close the given channel , flushing all buffered write operations .
Output functions raise a [ Sys_error ] exception when they are
applied to a closed output channel , except [ close_out ] and [ flush ] ,
which do nothing when applied to an already closed channel .
Note that [ close_out ] may raise [ ] if the operating
system signals an error when flushing or closing .
Output functions raise a [Sys_error] exception when they are
applied to a closed output channel, except [close_out] and [flush],
which do nothing when applied to an already closed channel.
Note that [close_out] may raise [Sys_error] if the operating
system signals an error when flushing or closing. *)
val close_out_noerr : out_channel -> unit
val set_binary_mode_out : out_channel -> bool -> unit
* [ set_binary_mode_out oc true ] sets the channel [ oc ] to binary
mode : no translations take place during output .
[ set_binary_mode_out oc false ] sets the channel [ oc ] to text
mode : depending on the operating system , some translations
may take place during output . For instance , under Windows ,
end - of - lines will be translated from [ \n ] to [ \r\n ] .
This function has no effect under operating systems that
do not distinguish between text mode and binary mode .
mode: no translations take place during output.
[set_binary_mode_out oc false] sets the channel [oc] to text
mode: depending on the operating system, some translations
may take place during output. For instance, under Windows,
end-of-lines will be translated from [\n] to [\r\n].
This function has no effect under operating systems that
do not distinguish between text mode and binary mode. *)
* { 2 General input functions }
val open_in : string -> in_channel
val open_in_bin : string -> in_channel
* Same as { ! } , but the file is opened in binary mode ,
so that no translation takes place during reads . On operating
systems that do not distinguish between text mode and binary
mode , this function behaves like { ! } .
so that no translation takes place during reads. On operating
systems that do not distinguish between text mode and binary
mode, this function behaves like {!Stdlib.open_in}. *)
val open_in_gen : open_flag list -> int -> string -> in_channel
* [ open_in_gen mode perm filename ] opens the named file for reading ,
as described above . The extra arguments
[ mode ] and [ perm ] specify the opening mode and file permissions .
{ ! } and { ! Stdlib.open_in_bin } are special
cases of this function .
as described above. The extra arguments
[mode] and [perm] specify the opening mode and file permissions.
{!Stdlib.open_in} and {!Stdlib.open_in_bin} are special
cases of this function. *)
val input_char : in_channel -> char
* Read one character from the given input channel .
@raise End_of_file if there are no more characters to read .
@raise End_of_file if there are no more characters to read. *)
val input_line : in_channel -> string
val input : in_channel -> bytes -> int -> int -> int
* [ input ic buf pos len ] reads up to [ len ] characters from
the given channel [ ic ] , storing them in byte sequence [ buf ] , starting at
character number [ pos ] .
It returns the actual number of characters read , between 0 and
[ len ] ( inclusive ) .
A return value of 0 means that the end of file was reached .
A return value between 0 and [ len ] exclusive means that
not all requested [ len ] characters were read , either because
no more characters were available at that time , or because
the implementation found it convenient to do a partial read ;
[ input ] must be called again to read the remaining characters ,
if desired . ( See also { ! Stdlib.really_input } for reading
exactly [ len ] characters . )
Exception [ Invalid_argument " input " ] is raised if [ pos ] and [ len ]
do not designate a valid range of [ buf ] .
the given channel [ic], storing them in byte sequence [buf], starting at
character number [pos].
It returns the actual number of characters read, between 0 and
[len] (inclusive).
A return value of 0 means that the end of file was reached.
A return value between 0 and [len] exclusive means that
not all requested [len] characters were read, either because
no more characters were available at that time, or because
the implementation found it convenient to do a partial read;
[input] must be called again to read the remaining characters,
if desired. (See also {!Stdlib.really_input} for reading
exactly [len] characters.)
Exception [Invalid_argument "input"] is raised if [pos] and [len]
do not designate a valid range of [buf]. *)
val really_input : in_channel -> bytes -> int -> int -> unit
* [ really_input ic buf pos len ] reads [ len ] characters from channel [ ic ] ,
storing them in byte sequence [ buf ] , starting at character number [ pos ] .
@raise End_of_file if the end of file is reached before [ len ]
characters have been read .
@raise Invalid_argument if
[ pos ] and [ len ] do not designate a valid range of [ buf ] .
storing them in byte sequence [buf], starting at character number [pos].
@raise End_of_file if the end of file is reached before [len]
characters have been read.
@raise Invalid_argument if
[pos] and [len] do not designate a valid range of [buf]. *)
val really_input_string : in_channel -> int -> string
* [ ] reads [ len ] characters from channel [ ic ]
and returns them in a new string .
@raise End_of_file if the end of file is reached before [ len ]
characters have been read .
@since 4.02
and returns them in a new string.
@raise End_of_file if the end of file is reached before [len]
characters have been read.
@since 4.02 *)
val input_byte : in_channel -> int
* Same as { ! } , but return the 8 - bit integer representing
the character .
@raise End_of_file if the end of file was reached .
the character.
@raise End_of_file if the end of file was reached. *)
val input_binary_int : in_channel -> int
* Read an integer encoded in binary format ( 4 bytes , big - endian )
from the given input channel . See { ! Stdlib.output_binary_int } .
@raise End_of_file if the end of file was reached while reading the
integer .
from the given input channel. See {!Stdlib.output_binary_int}.
@raise End_of_file if the end of file was reached while reading the
integer. *)
val input_value : in_channel -> 'a
val seek_in : in_channel -> int -> unit
val pos_in : in_channel -> int
* Return the current reading position for the given channel . For
files opened in text mode under Windows , the returned position is
approximate ( owing to end - of - line conversion ) ; in particular ,
saving the current position with [ pos_in ] , then going back to this
position using [ seek_in ] will not work . For this programming
idiom to work reliably and portably , the file must be opened in
binary mode .
files opened in text mode under Windows, the returned position is
approximate (owing to end-of-line conversion); in particular,
saving the current position with [pos_in], then going back to this
position using [seek_in] will not work. For this programming
idiom to work reliably and portably, the file must be opened in
binary mode. *)
val in_channel_length : in_channel -> int
val close_in : in_channel -> unit
val close_in_noerr : in_channel -> unit
val set_binary_mode_in : in_channel -> bool -> unit
* [ set_binary_mode_in ic true ] sets the channel [ ic ] to binary
mode : no translations take place during input .
[ set_binary_mode_out ic false ] sets the channel [ ic ] to text
mode : depending on the operating system , some translations
may take place during input . For instance , under Windows ,
end - of - lines will be translated from [ \r\n ] to [ \n ] .
This function has no effect under operating systems that
do not distinguish between text mode and binary mode .
mode: no translations take place during input.
[set_binary_mode_out ic false] sets the channel [ic] to text
mode: depending on the operating system, some translations
may take place during input. For instance, under Windows,
end-of-lines will be translated from [\r\n] to [\n].
This function has no effect under operating systems that
do not distinguish between text mode and binary mode. *)
* { 2 Operations on large files }
module LargeFile :
sig
val seek_out : out_channel -> int64 -> unit
val pos_out : out_channel -> int64
val out_channel_length : out_channel -> int64
val seek_in : in_channel -> int64 -> unit
val pos_in : in_channel -> int64
val in_channel_length : in_channel -> int64
end
* Operations on large files .
This sub - module provides 64 - bit variants of the channel functions
that manipulate file positions and file sizes . By representing
positions and sizes by 64 - bit integers ( type [ int64 ] ) instead of
regular integers ( type [ int ] ) , these alternate functions allow
operating on files whose sizes are greater than [ max_int ] .
This sub-module provides 64-bit variants of the channel functions
that manipulate file positions and file sizes. By representing
positions and sizes by 64-bit integers (type [int64]) instead of
regular integers (type [int]), these alternate functions allow
operating on files whose sizes are greater than [max_int]. *)
type 'a ref = { mutable contents : 'a }
external ref : 'a -> 'a ref = "%makemutable"
external ( ! ) : 'a ref -> 'a = "%field0"
* [ ! r ] returns the current contents of reference [ r ] .
Equivalent to [ fun r - > r.contents ] .
Unary operator , see { ! Ocaml_operators } for more information .
Equivalent to [fun r -> r.contents].
Unary operator, see {!Ocaml_operators} for more information.
*)
external ( := ) : 'a ref -> 'a -> unit = "%setfield0"
external incr : int ref -> unit = "%incr"
external decr : int ref -> unit = "%decr"
* { 1 Result type }
* @since 4.03
type ('a,'b) result = Ok of 'a | Error of 'b
* Format strings are character strings with special lexical conventions
that defines the functionality of formatted input / output functions . Format
strings are used to read data with formatted input functions from module
{ ! Scanf } and to print data with formatted output functions from modules
{ ! Printf } and { ! Format } .
Format strings are made of three kinds of entities :
- { e conversions specifications } , introduced by the special character [ ' % ' ]
followed by one or more characters specifying what kind of argument to
read or print ,
- { e formatting indications } , introduced by the special character [ ' @ ' ]
followed by one or more characters specifying how to read or print the
argument ,
- { e plain characters } that are regular characters with usual lexical
conventions . Plain characters specify string literals to be read in the
input or printed in the output .
There is an additional lexical rule to escape the special characters [ ' % ' ]
and [ ' @ ' ] in format strings : if a special character follows a [ ' % ' ]
character , it is treated as a plain character . In other words , [ " % % " ] is
considered as a plain [ ' % ' ] and [ " % @ " ] as a plain [ ' @ ' ] .
For more information about conversion specifications and formatting
indications available , read the documentation of modules { ! Scanf } ,
{ ! Printf } and { ! Format } .
that defines the functionality of formatted input/output functions. Format
strings are used to read data with formatted input functions from module
{!Scanf} and to print data with formatted output functions from modules
{!Printf} and {!Format}.
Format strings are made of three kinds of entities:
- {e conversions specifications}, introduced by the special character ['%']
followed by one or more characters specifying what kind of argument to
read or print,
- {e formatting indications}, introduced by the special character ['@']
followed by one or more characters specifying how to read or print the
argument,
- {e plain characters} that are regular characters with usual lexical
conventions. Plain characters specify string literals to be read in the
input or printed in the output.
There is an additional lexical rule to escape the special characters ['%']
and ['@'] in format strings: if a special character follows a ['%']
character, it is treated as a plain character. In other words, ["%%"] is
considered as a plain ['%'] and ["%@"] as a plain ['@'].
For more information about conversion specifications and formatting
indications available, read the documentation of modules {!Scanf},
{!Printf} and {!Format}.
*)
* Format strings have a general and highly polymorphic type
[ ( ' a , ' b , ' c , 'd , ' e , ' f ) format6 ] .
The two simplified types , [ format ] and [ ] below are
included for backward compatibility with earlier releases of
OCaml .
The meaning of format string type parameters is as follows :
- [ ' a ] is the type of the parameters of the format for formatted output
functions ( [ printf]-style functions ) ;
[ ' a ] is the type of the values read by the format for formatted input
functions ( [ scanf]-style functions ) .
- [ ' b ] is the type of input source for formatted input functions and the
type of output target for formatted output functions .
For [ printf]-style functions from module { ! , [ ' b ] is typically
[ out_channel ] ;
for [ printf]-style functions from module { ! Format } , [ ' b ] is typically
{ ! type : Format.formatter } ;
for [ scanf]-style functions from module { ! Scanf } , [ ' b ] is typically
{ ! Scanf . Scanning.in_channel } .
Type argument [ ' b ] is also the type of the first argument given to
user 's defined printing functions for [ % a ] and [ % t ] conversions ,
and user 's defined reading functions for [ % r ] conversion .
- [ ' c ] is the type of the result of the [ % a ] and [ % t ] printing
functions , and also the type of the argument transmitted to the
first argument of [ kprintf]-style functions or to the
[ functions .
- [ 'd ] is the type of parameters for the [ scanf]-style functions .
- [ ' e ] is the type of the receiver function for the [ scanf]-style functions .
- [ ' f ] is the final result type of a formatted input / output function
invocation : for the [ printf]-style functions , it is typically [ unit ] ;
for the [ scanf]-style functions , it is typically the result type of the
receiver function .
[('a, 'b, 'c, 'd, 'e, 'f) format6].
The two simplified types, [format] and [format4] below are
included for backward compatibility with earlier releases of
OCaml.
The meaning of format string type parameters is as follows:
- ['a] is the type of the parameters of the format for formatted output
functions ([printf]-style functions);
['a] is the type of the values read by the format for formatted input
functions ([scanf]-style functions).
- ['b] is the type of input source for formatted input functions and the
type of output target for formatted output functions.
For [printf]-style functions from module {!Printf}, ['b] is typically
[out_channel];
for [printf]-style functions from module {!Format}, ['b] is typically
{!type:Format.formatter};
for [scanf]-style functions from module {!Scanf}, ['b] is typically
{!Scanf.Scanning.in_channel}.
Type argument ['b] is also the type of the first argument given to
user's defined printing functions for [%a] and [%t] conversions,
and user's defined reading functions for [%r] conversion.
- ['c] is the type of the result of the [%a] and [%t] printing
functions, and also the type of the argument transmitted to the
first argument of [kprintf]-style functions or to the
[kscanf]-style functions.
- ['d] is the type of parameters for the [scanf]-style functions.
- ['e] is the type of the receiver function for the [scanf]-style functions.
- ['f] is the final result type of a formatted input/output function
invocation: for the [printf]-style functions, it is typically [unit];
for the [scanf]-style functions, it is typically the result type of the
receiver function.
*)
type ('a, 'b, 'c, 'd, 'e, 'f) format6 =
('a, 'b, 'c, 'd, 'e, 'f) CamlinternalFormatBasics.format6
type ('a, 'b, 'c, 'd) format4 = ('a, 'b, 'c, 'c, 'c, 'd) format6
type ('a, 'b, 'c) format = ('a, 'b, 'c, 'c) format4
val string_of_format : ('a, 'b, 'c, 'd, 'e, 'f) format6 -> string
external format_of_string :
('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('a, 'b, 'c, 'd, 'e, 'f) format6 = "%identity"
val ( ^^ ) :
('a, 'b, 'c, 'd, 'e, 'f) format6 ->
('f, 'b, 'c, 'e, 'g, 'h) format6 ->
('a, 'b, 'c, 'd, 'g, 'h) format6
* { 1 Program termination }
val exit : int -> 'a
* Terminate the process , returning the given status code to the operating
system : usually 0 to indicate no errors , and a small positive integer to
indicate failure . All open output channels are flushed with [ flush_all ] .
The callbacks registered with { ! Domain.at_exit } are called followed by
those registered with { ! } .
An implicit [ exit 0 ] is performed each time a program terminates normally .
An implicit [ exit 2 ] is performed if the program terminates early because
of an uncaught exception .
system: usually 0 to indicate no errors, and a small positive integer to
indicate failure. All open output channels are flushed with [flush_all].
The callbacks registered with {!Domain.at_exit} are called followed by
those registered with {!Stdlib.at_exit}.
An implicit [exit 0] is performed each time a program terminates normally.
An implicit [exit 2] is performed if the program terminates early because
of an uncaught exception. *)
val at_exit : (unit -> unit) -> unit
* Register the given function to be called at program termination
time . The functions registered with [ at_exit ] will be called when
the program does any of the following :
- executes { ! Stdlib.exit }
- terminates , either normally or because of an uncaught
exception
- executes the C function [ caml_shutdown ] .
The functions are called in ' last in , first out ' order : the
function most recently added with [ at_exit ] is called first .
time. The functions registered with [at_exit] will be called when
the program does any of the following:
- executes {!Stdlib.exit}
- terminates, either normally or because of an uncaught
exception
- executes the C function [caml_shutdown].
The functions are called in 'last in, first out' order: the
function most recently added with [at_exit] is called first. *)
val valid_float_lexem : string -> string
val unsafe_really_input : in_channel -> bytes -> int -> int -> unit
val do_at_exit : unit -> unit
val do_domain_local_at_exit : (unit -> unit) ref
* { 1 : modules Standard library modules }
module Arg = Arg
module Array = Array
module ArrayLabels = ArrayLabels
module Atomic = Atomic
module Bigarray = Bigarray
module Bool = Bool
module Buffer = Buffer
module Bytes = Bytes
module BytesLabels = BytesLabels
module Callback = Callback
module Char = Char
module Complex = Complex
module Condition = Condition
module Digest = Digest
module Domain = Domain
[@@alert "-unstable"]
[@@alert unstable
"The Domain interface may change in incompatible ways in the future."
]
module Effect = Effect
[@@alert "-unstable"]
[@@alert unstable
"The Effect interface may change in incompatible ways in the future."
]
module Either = Either
module Ephemeron = Ephemeron
module Filename = Filename
module Float = Float
module Format = Format
module Fun = Fun
module Gc = Gc
module Hashtbl = Hashtbl
module In_channel = In_channel
module Int = Int
module Int32 = Int32
module Int64 = Int64
module Lazy = Lazy
module Lexing = Lexing
module List = List
module ListLabels = ListLabels
module Map = Map
module Marshal = Marshal
module MoreLabels = MoreLabels
module Mutex = Mutex
module Nativeint = Nativeint
module Obj = Obj
module Oo = Oo
module Option = Option
module Out_channel = Out_channel
module Parsing = Parsing
module Printexc = Printexc
module Printf = Printf
module Queue = Queue
module Random = Random
module Result = Result
module Scanf = Scanf
module Semaphore = Semaphore
module Seq = Seq
module Set = Set
module Stack = Stack
module StdLabels = StdLabels
module String = String
module StringLabels = StringLabels
module Sys = Sys
module Type = Type
module Uchar = Uchar
module Unit = Unit
module Weak = Weak
|
3a02410adb146b25fd0dade12f5a48708139fd8e7dae06caca9299c756e7b4b2 | jepsen-io/jepsen | interpreter.clj | (ns jepsen.generator.interpreter
"This namespace interprets operations from a pure generator, handling worker
threads, spawning processes for interacting with clients and nemeses, and
recording a history."
(:refer-clojure :exclude [run!])
(:require [clojure [datafy :refer [datafy]]
[pprint :refer [pprint]]]
[clojure.tools.logging :refer [info warn error]]
[jepsen [client :as client]
[generator :as gen]
[history :as h]
[nemesis :as nemesis]
[util :as util]]
[jepsen.generator.context :as context]
[jepsen.store.format :as store.format]
[slingshot.slingshot :refer [try+ throw+]])
(:import (java.util.concurrent ArrayBlockingQueue
TimeUnit)
(io.lacuna.bifurcan Set)))
(defprotocol Worker
"This protocol allows the interpreter to manage the lifecycle of stateful
workers. All operations on a Worker are guaranteed to be executed by a single
thread."
(open [this test id]
"Spawns a new Worker process for the given worker ID.")
(invoke! [this test op]
"Asks the worker to perform this operation, and returns a completed
operation.")
(close! [this test]
"Closes this worker, releasing any resources it may hold."))
(deftype ClientWorker [node
^:unsynchronized-mutable process
^:unsynchronized-mutable client]
Worker
(open [this test id]
this)
(invoke! [this test op]
(if (and (not= process (:process op))
(not (client/is-reusable? client test)))
; New process, new client!
(do (close! this test)
; Try to open new client
(let [err (try
(set! (.client this)
(client/open! (client/validate (:client test))
test node))
(set! (.process this) (:process op))
nil
(catch Exception e
(warn e "Error opening client")
(set! (.client this) nil)
(assoc op
:type :fail
:error [:no-client (.getMessage e)])))]
; If we failed to open, just go ahead and return that error op.
; Otherwise, we can try again, this time with a fresh client.
(or err (recur test op))))
; Good, we have a client for this process.
(client/invoke! client test op)))
(close! [this test]
(when client
(client/close! client test)
(set! (.client this) nil))))
(defrecord NemesisWorker []
Worker
(open [this test id] this)
(invoke! [this test op]
(nemesis/invoke! (:nemesis test) test op))
(close! [this test]))
; This doesn't feel like the right shape exactly, but it's symmetric to Client,
Nemesis , etc .
(defrecord ClientNemesisWorker []
Worker
(open [this test id]
;(locking *out* (prn :spawn id))
(if (integer? id)
(let [nodes (:nodes test)]
(ClientWorker. (nth nodes (mod id (count nodes))) nil nil))
(NemesisWorker.)))
(invoke! [this test op])
(close! [this test]))
(defn client-nemesis-worker
"A Worker which can spawn both client and nemesis-specific workers based on
the :client and :nemesis in a test."
[]
(ClientNemesisWorker.))
(defn spawn-worker
"Creates communication channels and spawns a worker thread to evaluate the
given worker. Takes a test, a Queue which should receive completion
operations, a Worker object, and a worker id.
Returns a map with:
:id The worker ID
:future The future evaluating the worker code
:in A Queue which delivers invocations to the worker"
[test ^ArrayBlockingQueue out worker id]
(let [in (ArrayBlockingQueue. 1)
fut
(future
(util/with-thread-name (str "jepsen worker "
(util/name+ id))
(let [worker (open worker test id)]
(try
(loop []
(when
(let [op (.take in)]
(try
(case (:type op)
; We're done here
:exit false
; Ahhh
:sleep (do (Thread/sleep (* 1000 (:value op)))
(.put out op)
true)
; Log a message
:log (do (info (:value op))
(.put out op)
true)
; Ask the invoke handler
(do (util/log-op op)
(let [op' (invoke! worker test op)]
(.put out op')
(util/log-op op')
true)))
(catch Throwable e
; Yes, we want to capture throwable here;
; assertion errors aren't Exceptions. D-:
(warn e "Process" (:process op) "crashed")
; Convert this to an info op.
(.put out
(assoc op
:type :info
:exception (datafy e)
:error (str "indeterminate: "
(if (.getCause e)
(.. e getCause
getMessage)
(.getMessage e)))))
true)))
(recur)))
(finally
; Make sure we close our worker on exit.
(close! worker test))))))]
{:id id
:in in
:future fut}))
(def ^Long/TYPE max-pending-interval
"When the generator is :pending, this controls the maximum interval before
we'll update the context and check the generator for an operation again.
Measured in microseconds."
1000)
(defn goes-in-history?
"Should this operation be journaled to the history? We exclude :log and
:sleep ops right now."
[op]
(condp identical? (:type op)
:sleep false
:log false
true))
(defn run!
"Takes a test with a :store :handle open. Opens a writer for the test's
history using that handle. Creates an initial context from test and evaluates
all ops from (:gen test). Spawns a thread for each worker, and hands those
workers operations from gen; each thread applies the operation using (:client
test) or (:nemesis test), as appropriate. Invocations and completions are
journaled to a history on disk, which is returned at the end of `run`.
Generators are automatically wrapped in friendly-exception and validate.
Clients are wrapped in a validator as well.
Automatically initializes the generator system, which, on first invocation,
extends the Generator protocol over some dynamic classes like (promise)."
[test]
(gen/init!)
(with-open [history-writer (store.format/test-history-writer!
(:handle (:store test))
test)]
(let [ctx (gen/context test)
worker-ids (gen/all-threads ctx)
completions (ArrayBlockingQueue.
(.size ^io.lacuna.bifurcan.ISet worker-ids))
workers (mapv (partial spawn-worker test completions
(client-nemesis-worker))
worker-ids)
invocations (into {} (map (juxt :id :in) workers))
gen (->> (:generator test)
gen/friendly-exceptions
gen/validate)]
(try+
(loop [ctx ctx
gen gen
op-index 0 ; Index of the next op in the history
outstanding 0 ; Number of in-flight ops
; How long to poll on the completion queue, in micros.
poll-timeout 0]
First , can we complete an operation ? We want to get to these first
; because they're latency sensitive--if we wait, we introduce false
; concurrency.
(if-let [op' (.poll completions poll-timeout TimeUnit/MICROSECONDS)]
(let [;_ (prn :completed op')
thread (gen/process->thread ctx (:process op'))
time (util/relative-time-nanos)
; Update op with index and new timestamp
op' (assoc op' :index op-index :time time)
; Update context with new time and thread being free
ctx (context/free-thread ctx time thread)
; Let generator know about our completion. We use the context
; with the new time and thread free, but *don't* assign a new
; process here, so that thread->process recovers the right
; value for this event.
gen (gen/update gen test ctx op')
; Threads that crash (other than the nemesis), or which
; explicitly request a new process, should be assigned new
; process identifiers.
ctx (if (and (not= :nemesis thread)
(or (= :info (:type op'))
(:end-process? op')))
(context/with-next-process ctx thread)
ctx)]
; Log completion and move on
(if (goes-in-history? op')
(do (store.format/append-to-big-vector-block!
history-writer op')
(recur ctx gen (inc op-index) (dec outstanding) 0))
(recur ctx gen op-index (dec outstanding) 0)))
; There's nothing to complete; let's see what the generator's up to
(let [time (util/relative-time-nanos)
ctx (assoc ctx :time time)
;_ (prn :asking-for-op)
_ ( binding [ * print - length * 12 ] ( pprint gen ) )
[op gen'] (gen/op gen test ctx)]
;_ (prn :time time :got op)]
(condp = op
; We're exhausted, but workers might still be going.
nil (if (pos? outstanding)
; Still waiting on workers
(recur ctx gen op-index outstanding
(long max-pending-interval))
; Good, we're done. Tell workers to exit...
(do (doseq [[thread queue] invocations]
(.put ^ArrayBlockingQueue queue {:type :exit}))
; Wait for exit
(dorun (map (comp deref :future) workers))
; Await completion of writes
(.close history-writer)
; And return history
(let [history-block-id (:block-id history-writer)]
(-> (:handle (:store test))
(store.format/read-block-by-id
history-block-id)
:data
(h/history {:dense-indices? true
:have-indices? true
:already-ops? true})))))
; Nothing we can do right now. Let's try to complete something.
:pending (recur ctx gen op-index
outstanding (long max-pending-interval))
; Good, we've got an invocation.
(if (< time (:time op))
; Can't evaluate this op yet!
(do ;(prn :waiting (util/nanos->secs (- (:time op) time)) "s")
(recur ctx gen op-index outstanding
; Unless something changes, we don't need to ask
; the generator for another op until it's time.
(long (/ (- (:time op) time) 1000))))
; Good, we can run this.
(let [thread (gen/process->thread ctx (:process op))
op (assoc op :index op-index)
; Log the invocation
goes-in-history? (goes-in-history? op)
_ (when goes-in-history?
(store.format/append-to-big-vector-block!
history-writer op))
op-index' (if goes-in-history? (inc op-index) op-index)
; Dispatch it to a worker
_ (.put ^ArrayBlockingQueue (get invocations thread) op)
; Update our context to reflect
ctx (context/busy-thread ctx
(:time op) ; Use time instead?
thread)
; Let the generator know about the invocation
gen' (gen/update gen' test ctx op)]
(recur ctx gen' op-index' (inc outstanding) 0)))))))
(catch Throwable t
; We've thrown, but we still need to ensure the workers exit.
(info "Shutting down workers after abnormal exit")
; We only try to cancel each worker *once*--if we try to cancel
; multiple times, we might interrupt a worker while it's in the
; finally block, cleaning up its client.
(dorun (map (comp future-cancel :future) workers))
; If for some reason *that* doesn't work, we ask them all to exit via
; their queue.
(loop [unfinished workers]
(when (seq unfinished)
(let [{:keys [in future] :as worker} (first unfinished)]
(if (future-done? future)
(recur (next unfinished))
(do (.offer ^java.util.Queue in {:type :exit})
(recur unfinished))))))
(throw t))))))
| null | https://raw.githubusercontent.com/jepsen-io/jepsen/e601daf2b20d77cfccc447c05c9deab2620fdc44/jepsen/src/jepsen/generator/interpreter.clj | clojure | New process, new client!
Try to open new client
If we failed to open, just go ahead and return that error op.
Otherwise, we can try again, this time with a fresh client.
Good, we have a client for this process.
This doesn't feel like the right shape exactly, but it's symmetric to Client,
(locking *out* (prn :spawn id))
We're done here
Ahhh
Log a message
Ask the invoke handler
Yes, we want to capture throwable here;
assertion errors aren't Exceptions. D-:
Convert this to an info op.
Make sure we close our worker on exit.
each thread applies the operation using (:client
Index of the next op in the history
Number of in-flight ops
How long to poll on the completion queue, in micros.
because they're latency sensitive--if we wait, we introduce false
concurrency.
_ (prn :completed op')
Update op with index and new timestamp
Update context with new time and thread being free
Let generator know about our completion. We use the context
with the new time and thread free, but *don't* assign a new
process here, so that thread->process recovers the right
value for this event.
Threads that crash (other than the nemesis), or which
explicitly request a new process, should be assigned new
process identifiers.
Log completion and move on
There's nothing to complete; let's see what the generator's up to
_ (prn :asking-for-op)
_ (prn :time time :got op)]
We're exhausted, but workers might still be going.
Still waiting on workers
Good, we're done. Tell workers to exit...
Wait for exit
Await completion of writes
And return history
Nothing we can do right now. Let's try to complete something.
Good, we've got an invocation.
Can't evaluate this op yet!
(prn :waiting (util/nanos->secs (- (:time op) time)) "s")
Unless something changes, we don't need to ask
the generator for another op until it's time.
Good, we can run this.
Log the invocation
Dispatch it to a worker
Update our context to reflect
Use time instead?
Let the generator know about the invocation
We've thrown, but we still need to ensure the workers exit.
We only try to cancel each worker *once*--if we try to cancel
multiple times, we might interrupt a worker while it's in the
finally block, cleaning up its client.
If for some reason *that* doesn't work, we ask them all to exit via
their queue. | (ns jepsen.generator.interpreter
"This namespace interprets operations from a pure generator, handling worker
threads, spawning processes for interacting with clients and nemeses, and
recording a history."
(:refer-clojure :exclude [run!])
(:require [clojure [datafy :refer [datafy]]
[pprint :refer [pprint]]]
[clojure.tools.logging :refer [info warn error]]
[jepsen [client :as client]
[generator :as gen]
[history :as h]
[nemesis :as nemesis]
[util :as util]]
[jepsen.generator.context :as context]
[jepsen.store.format :as store.format]
[slingshot.slingshot :refer [try+ throw+]])
(:import (java.util.concurrent ArrayBlockingQueue
TimeUnit)
(io.lacuna.bifurcan Set)))
(defprotocol Worker
"This protocol allows the interpreter to manage the lifecycle of stateful
workers. All operations on a Worker are guaranteed to be executed by a single
thread."
(open [this test id]
"Spawns a new Worker process for the given worker ID.")
(invoke! [this test op]
"Asks the worker to perform this operation, and returns a completed
operation.")
(close! [this test]
"Closes this worker, releasing any resources it may hold."))
(deftype ClientWorker [node
^:unsynchronized-mutable process
^:unsynchronized-mutable client]
Worker
(open [this test id]
this)
(invoke! [this test op]
(if (and (not= process (:process op))
(not (client/is-reusable? client test)))
(do (close! this test)
(let [err (try
(set! (.client this)
(client/open! (client/validate (:client test))
test node))
(set! (.process this) (:process op))
nil
(catch Exception e
(warn e "Error opening client")
(set! (.client this) nil)
(assoc op
:type :fail
:error [:no-client (.getMessage e)])))]
(or err (recur test op))))
(client/invoke! client test op)))
(close! [this test]
(when client
(client/close! client test)
(set! (.client this) nil))))
(defrecord NemesisWorker []
Worker
(open [this test id] this)
(invoke! [this test op]
(nemesis/invoke! (:nemesis test) test op))
(close! [this test]))
Nemesis , etc .
(defrecord ClientNemesisWorker []
Worker
(open [this test id]
(if (integer? id)
(let [nodes (:nodes test)]
(ClientWorker. (nth nodes (mod id (count nodes))) nil nil))
(NemesisWorker.)))
(invoke! [this test op])
(close! [this test]))
(defn client-nemesis-worker
"A Worker which can spawn both client and nemesis-specific workers based on
the :client and :nemesis in a test."
[]
(ClientNemesisWorker.))
(defn spawn-worker
"Creates communication channels and spawns a worker thread to evaluate the
given worker. Takes a test, a Queue which should receive completion
operations, a Worker object, and a worker id.
Returns a map with:
:id The worker ID
:future The future evaluating the worker code
:in A Queue which delivers invocations to the worker"
[test ^ArrayBlockingQueue out worker id]
(let [in (ArrayBlockingQueue. 1)
fut
(future
(util/with-thread-name (str "jepsen worker "
(util/name+ id))
(let [worker (open worker test id)]
(try
(loop []
(when
(let [op (.take in)]
(try
(case (:type op)
:exit false
:sleep (do (Thread/sleep (* 1000 (:value op)))
(.put out op)
true)
:log (do (info (:value op))
(.put out op)
true)
(do (util/log-op op)
(let [op' (invoke! worker test op)]
(.put out op')
(util/log-op op')
true)))
(catch Throwable e
(warn e "Process" (:process op) "crashed")
(.put out
(assoc op
:type :info
:exception (datafy e)
:error (str "indeterminate: "
(if (.getCause e)
(.. e getCause
getMessage)
(.getMessage e)))))
true)))
(recur)))
(finally
(close! worker test))))))]
{:id id
:in in
:future fut}))
(def ^Long/TYPE max-pending-interval
"When the generator is :pending, this controls the maximum interval before
we'll update the context and check the generator for an operation again.
Measured in microseconds."
1000)
(defn goes-in-history?
"Should this operation be journaled to the history? We exclude :log and
:sleep ops right now."
[op]
(condp identical? (:type op)
:sleep false
:log false
true))
(defn run!
"Takes a test with a :store :handle open. Opens a writer for the test's
history using that handle. Creates an initial context from test and evaluates
all ops from (:gen test). Spawns a thread for each worker, and hands those
test) or (:nemesis test), as appropriate. Invocations and completions are
journaled to a history on disk, which is returned at the end of `run`.
Generators are automatically wrapped in friendly-exception and validate.
Clients are wrapped in a validator as well.
Automatically initializes the generator system, which, on first invocation,
extends the Generator protocol over some dynamic classes like (promise)."
[test]
(gen/init!)
(with-open [history-writer (store.format/test-history-writer!
(:handle (:store test))
test)]
(let [ctx (gen/context test)
worker-ids (gen/all-threads ctx)
completions (ArrayBlockingQueue.
(.size ^io.lacuna.bifurcan.ISet worker-ids))
workers (mapv (partial spawn-worker test completions
(client-nemesis-worker))
worker-ids)
invocations (into {} (map (juxt :id :in) workers))
gen (->> (:generator test)
gen/friendly-exceptions
gen/validate)]
(try+
(loop [ctx ctx
gen gen
poll-timeout 0]
First , can we complete an operation ? We want to get to these first
(if-let [op' (.poll completions poll-timeout TimeUnit/MICROSECONDS)]
thread (gen/process->thread ctx (:process op'))
time (util/relative-time-nanos)
op' (assoc op' :index op-index :time time)
ctx (context/free-thread ctx time thread)
gen (gen/update gen test ctx op')
ctx (if (and (not= :nemesis thread)
(or (= :info (:type op'))
(:end-process? op')))
(context/with-next-process ctx thread)
ctx)]
(if (goes-in-history? op')
(do (store.format/append-to-big-vector-block!
history-writer op')
(recur ctx gen (inc op-index) (dec outstanding) 0))
(recur ctx gen op-index (dec outstanding) 0)))
(let [time (util/relative-time-nanos)
ctx (assoc ctx :time time)
_ ( binding [ * print - length * 12 ] ( pprint gen ) )
[op gen'] (gen/op gen test ctx)]
(condp = op
nil (if (pos? outstanding)
(recur ctx gen op-index outstanding
(long max-pending-interval))
(do (doseq [[thread queue] invocations]
(.put ^ArrayBlockingQueue queue {:type :exit}))
(dorun (map (comp deref :future) workers))
(.close history-writer)
(let [history-block-id (:block-id history-writer)]
(-> (:handle (:store test))
(store.format/read-block-by-id
history-block-id)
:data
(h/history {:dense-indices? true
:have-indices? true
:already-ops? true})))))
:pending (recur ctx gen op-index
outstanding (long max-pending-interval))
(if (< time (:time op))
(recur ctx gen op-index outstanding
(long (/ (- (:time op) time) 1000))))
(let [thread (gen/process->thread ctx (:process op))
op (assoc op :index op-index)
goes-in-history? (goes-in-history? op)
_ (when goes-in-history?
(store.format/append-to-big-vector-block!
history-writer op))
op-index' (if goes-in-history? (inc op-index) op-index)
_ (.put ^ArrayBlockingQueue (get invocations thread) op)
ctx (context/busy-thread ctx
thread)
gen' (gen/update gen' test ctx op)]
(recur ctx gen' op-index' (inc outstanding) 0)))))))
(catch Throwable t
(info "Shutting down workers after abnormal exit")
(dorun (map (comp future-cancel :future) workers))
(loop [unfinished workers]
(when (seq unfinished)
(let [{:keys [in future] :as worker} (first unfinished)]
(if (future-done? future)
(recur (next unfinished))
(do (.offer ^java.util.Queue in {:type :exit})
(recur unfinished))))))
(throw t))))))
|
4250e2d436ff74cc18bf32f1a498af636a1286240002b8fa3b623c23e1a0d030 | ndmitchell/catch | FilePath.hs |
module FilePath where
|
Module : System . FilePath . Version_0_12
Copyright : ( c ) 2005 - 2006
License : BSD3
Maintainer : /~ndm/
Stability : in - progress
Portability : portable
A library for FilePath manipulations , designed to be cross platform .
This library will select the correct type of FilePath 's for the
platform the code is running on at runtime . For more details see
< /~ndm/projects/libraries.php >
DO NOT USE THIS CODE , IT IS STILL UNDER DEVELOPMENT - please use
" System . FilePath . Version_0_11 " .
Some short examples :
You are given a C file , you want to figure out the corresponding object ( .o ) file :
@'replaceExtension ' file \"o\"@
Haskell module Main imports Test , you have the file named main :
@['replaceFileName ' path_to_main \"Test\ " ' < . > ' ext | ext < - [ \"hs\",\"lhs\ " ] ] @
You want to download a file from the web and save it to disk :
@do let file = ' makeValid ' url
System . IO.createDirectoryIfMissing True ( ' takeDirectory ' file)@
You want to compile a Haskell file , but put the hi file under \"interface\ "
@'takeDirectory ' file ' < / > ' \"interface\ " ' < / > ' ( ' takeFileName ' file \`replaceExtension\ ` \"hi\"@ )
You want to display a filename to the user , as neatly as possible
@'makeRelativeToCurrentDirectory ' file > > = putStrLn@
The examples in code format descibed by each function are used to generate
tests , and should give clear semantics for the functions .
Module : System.FilePath.Version_0_12
Copyright : (c) Neil Mitchell 2005-2006
License : BSD3
Maintainer : /~ndm/
Stability : in-progress
Portability : portable
A library for FilePath manipulations, designed to be cross platform.
This library will select the correct type of FilePath's for the
platform the code is running on at runtime. For more details see
</~ndm/projects/libraries.php>
DO NOT USE THIS CODE, IT IS STILL UNDER DEVELOPMENT - please use
"System.FilePath.Version_0_11".
Some short examples:
You are given a C file, you want to figure out the corresponding object (.o) file:
@'replaceExtension' file \"o\"@
Haskell module Main imports Test, you have the file named main:
@['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@
You want to download a file from the web and save it to disk:
@do let file = 'makeValid' url
System.IO.createDirectoryIfMissing True ('takeDirectory' file)@
You want to compile a Haskell file, but put the hi file under \"interface\"
@'takeDirectory' file '</>' \"interface\" '</>' ('takeFileName' file \`replaceExtension\` \"hi\"@)
You want to display a filename to the user, as neatly as possible
@'makeRelativeToCurrentDirectory' file >>= putStrLn@
The examples in code format descibed by each function are used to generate
tests, and should give clear semantics for the functions.
-}
import Data.Maybe(isJust, fromMaybe, fromJust)
import Data.Char(toLower, toUpper)
import Data.List(isPrefixOf, inits)
import Control.Monad(when, filterM)
import System.Environment(getEnv, getProgName)
import System.Directory(getCurrentDirectory, doesFileExist, doesDirectoryExist,
getTemporaryDirectory, getDirectoryContents, createDirectory)
os = any0
compilerName = any0
foreign import primitive any0 :: a
foreign import primitive anyEval1 :: a -> b
foreign import primitive anyEval2 :: a -> b -> c
foreign import primitive anyEval3 :: a -> b -> c -> d
class Test a where
test :: a -> Bool
instance Test b => Test (a -> b) where
test f = test (f any0)
instance Test [a] where
test f = anyEval1 f
instance Test (a,b) where
test f = anyEval1 f
instance Test Bool where
test f = anyEval1 f
instance Test Char where
test f = anyEval1 f
instance Test (IO a) where
test f = anyEval1 (f >> return ())
(|||) :: (Test a, Test b) => a -> b -> IO c
(|||) l r = anyEval2 (test l) (test r)
main =
pathSeparator ||| pathSeparators ||| isPathSeparator |||
searchPathSeparator ||| isSearchPathSeparator |||
extSeparator ||| isExtSeparator |||
-- * Path methods (environment $PATH)
splitSearchPath ||| getSearchPath |||
-- * Extension methods
splitExtension |||
takeExtension ||| replaceExtension ||| dropExtension ||| addExtension ||| hasExtension ||| (<.>) |||
splitExtensions ||| dropExtensions ||| takeExtensions |||
DRIVE_SECTION
-- * Drive methods
splitDrive ||| ||| hasDrive ||| dropDrive ||| isDrive |||
END_DRIVE_SECTION
-- * Drive methods
splitDrive ||| joinDrive |||
takeDrive ||| replaceDrive ||| hasDrive ||| dropDrive ||| isDrive |||
END_DRIVE_SECTION -}
-- * Operations on a FilePath ||| as a list of directories
splitFileName |||
takeFileName ||| replaceFileName ||| dropFileName |||
takeBaseName ||| replaceBaseName |||
takeDirectory ||| replaceDirectory |||
combine ||| (</>) |||
splitPath ||| joinPath ||| splitDirectories |||
* Low level FilePath operators
hasTrailingPathSeparator |||
addTrailingPathSeparator |||
dropTrailingPathSeparator |||
-- * File name manipulators
normalise ||| equalFilePath |||
makeRelativeToCurrentDirectory ||| makeRelative |||
isRelative ||| isAbsolute |||
isValid ||| makeValid
infixr 7 <.>
infixr 5 </>
---------------------------------------------------------------------
Platform Abstraction Methods ( private )
data Force = ForcePosix
| ForceNone
| ForceWindows
deriving Eq
forceEffectView = let forceEffect = ForceNone
in forceEffect
| What is the name of the OS ? The real name , Hugs and GHC get this wrong ...
osName :: String
osName = if compilerName == "yhc" || os /= "mingw32"
then os
else "windows"
| Is the operating system Unix or Linux like
isPosix :: Bool
isPosix = not isWindows && forceEffectView /= ForceWindows
| Is the operating system Windows like
isWindows :: Bool
isWindows = osName == "windows" && forceEffectView /= ForcePosix
---------------------------------------------------------------------
-- The basic functions
-- | The character that separates directories. In the case where more than
one character is possible , ' pathSeparator ' is the \'ideal\ ' one .
--
-- > Windows: pathSeparator == '\\'
> : pathSeparator = = ' / '
-- > isPathSeparator pathSeparator
pathSeparator :: Char
pathSeparator = if isWindows then '\\' else '/'
-- | The list of all possible separators.
--
-- > Windows: pathSeparators == ['\\', '/']
> : pathSeparators = = [ ' / ' ]
-- > pathSeparator `elem` pathSeparators
pathSeparators :: [Char]
pathSeparators = if isWindows then "\\/" else "/"
-- | Rather than using @(== 'pathSeparator')@, use this. Test if something
-- is a path separator.
--
-- > isPathSeparator a == (a `elem` pathSeparators)
isPathSeparator :: Char -> Bool
isPathSeparator = (`elem` pathSeparators)
-- | The character that is used to separate the entries in the $PATH environment variable.
--
-- > Windows: searchPathSeparator == ';'
> : searchPathSeparator = = ' : '
searchPathSeparator :: Char
searchPathSeparator = if isWindows then ';' else ':'
-- | Is the character a file separator?
--
-- > isSearchPathSeparator a == (a == searchPathSeparator)
isSearchPathSeparator :: Char -> Bool
isSearchPathSeparator = (== searchPathSeparator)
-- | File extension character
--
-- > extSeparator == '.'
extSeparator :: Char
extSeparator = '.'
-- | Is the character an extension character?
--
-- > isExtSeparator a == (a == extSeparator)
isExtSeparator :: Char -> Bool
isExtSeparator = (== extSeparator)
---------------------------------------------------------------------
-- Path methods (environment $PATH)
-- | Take a string, split it on the 'searchPathSeparator' character.
--
> Windows : splitSearchPath " File1;File2;File3 " = = [ " File1","File2","File3 " ]
> : splitSearchPath " File1 : : File3 " = = [ " File1","File2","File3 " ]
splitSearchPath :: String -> [FilePath]
splitSearchPath = f
where
f xs = case break isSearchPathSeparator xs of
([], []) -> []
([], post) -> f (tail post)
(pre, []) -> [pre]
(pre, post) -> pre : f (tail post)
-- | Get a list of filepaths in the $PATH.
getSearchPath :: IO [FilePath]
getSearchPath = fmap splitSearchPath (getEnv "PATH")
---------------------------------------------------------------------
-- Extension methods
-- | Split on the extension. 'addExtension' is the inverse.
--
-- > uncurry (++) (splitExtension x) == x
-- > uncurry addExtension (splitExtension x) == x
-- > splitExtension "file.txt" == ("file",".txt")
-- > splitExtension "file" == ("file","")
-- > splitExtension "file/file.txt" == ("file/file",".txt")
-- > splitExtension "file.txt/boris" == ("file.txt/boris","")
> splitExtension " file.txt/boris.ext " = = ( " file.txt/boris",".ext " )
-- > splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred")
-- > splitExtension "file/path.txt/" == ("file/path.txt/","")
splitExtension :: FilePath -> (String, String)
splitExtension x = case d of
"" -> (x,"")
(y:ys) -> (a ++ reverse ys, y : reverse c)
where
(a,b) = splitFileName x
(c,d) = break isExtSeparator $ reverse b
| Get the extension of a file , returns @\"\"@ for no extension , @.ext@ otherwise .
--
> takeExtension x = = snd ( splitExtension x )
-- > takeExtension (addExtension x "ext") == ".ext"
-- > takeExtension (replaceExtension x "ext") == ".ext"
takeExtension :: FilePath -> String
takeExtension = snd . splitExtension
-- | Set the extension of a file, overwriting one if already present.
--
-- > replaceExtension "file.txt" ".bob" == "file.bob"
-- > replaceExtension "file.txt" "bob" == "file.bob"
-- > replaceExtension "file" ".bob" == "file.bob"
-- > replaceExtension "file.txt" "" == "file"
-- > replaceExtension "file.fred.bob" "txt" == "file.fred.txt"
replaceExtension :: FilePath -> String -> FilePath
replaceExtension x y = dropExtension x <.> y
| to ' addExtension ' , for people who like that sort of thing .
(<.>) :: FilePath -> String -> FilePath
(<.>) = addExtension
-- | Remove last extension, and any . following it.
--
-- > dropExtension x == fst (splitExtension x)
dropExtension :: FilePath -> FilePath
dropExtension = fst . splitExtension
-- | Add an extension, even if there is already one there.
E.g. @addExtension \"foo.txt\ " \"bat\ " - > \"foo.txt.bat\"@.
--
-- > addExtension "file.txt" "bib" == "file.txt.bib"
-- > addExtension "file." ".bib" == "file..bib"
-- > addExtension "file" ".bib" == "file.bib"
-- > addExtension "/" "x" == "/.x"
-- > takeBaseName (addExtension (addTrailingPathSeparator x) "ext") == ".ext"
-- > Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt"
addExtension :: FilePath -> String -> FilePath
addExtension file "" = file
addExtension file xs@(x:_) = joinDrive a res
where
res = if isExtSeparator x then b ++ xs
else b ++ [extSeparator] ++ xs
(a,b) = splitDrive file
-- | Does the given filename have an extension?
--
-- > null (takeExtension x) == not (hasExtension x)
hasExtension :: FilePath -> Bool
hasExtension = any isExtSeparator . takeFileName
-- | Split on all extensions
--
-- > splitExtensions "file.tar.gz" == ("file",".tar.gz")
splitExtensions :: FilePath -> (FilePath, String)
splitExtensions x = (a ++ c, d)
where
(a,b) = splitFileName x
(c,d) = break isExtSeparator b
-- | Drop all extensions
--
-- > not $ hasExtension (dropExtensions x)
dropExtensions :: FilePath -> FilePath
dropExtensions = fst . splitExtensions
-- | Get all extensions
takeExtensions :: FilePath -> String
takeExtensions = snd . splitExtensions
---------------------------------------------------------------------
-- Drive methods
-- | Is the given character a valid drive letter?
-- only a-z and A-Z are letters, not isAlpha which is more unicodey
isLetter :: Char -> Bool
isLetter x = (x >= 'a' && x <= 'z') || (x >= 'A' && x <= 'Z')
-- | Split a path into a drive and a path.
On Unix , \/ is a Drive .
--
-- > uncurry (++) (splitDrive x) == x
-- > Windows: splitDrive "file" == ("","file")
-- > Windows: splitDrive "c:/file" == ("c:/","file")
> Windows : splitDrive " c:\\file " = = ( " c:\\","file " )
-- > Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test")
-- > Windows: splitDrive "\\\\shared" == ("\\\\shared","")
> Windows : splitDrive " \\\\?\\UNC\\shared\\file " = = ( " \\\\?\\UNC\\shared\\","file " )
-- > Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file")
> Windows : splitDrive " /d " = = ( " /","d " )
> : splitDrive " /test " = = ( " /","test " )
> : splitDrive " //test " = = ( " //","test " )
> : splitDrive " test / file " = = ( " " , " test / file " )
> : splitDrive " file " = = ( " " , " file " )
splitDrive :: FilePath -> (FilePath, FilePath)
splitDrive x | isPosix = span (== '/') x
splitDrive x | isJust y = fromJust y
where y = readDriveLetter x
splitDrive x | isJust y = fromJust y
where y = readDriveUNC x
splitDrive x | isJust y = fromJust y
where y = readDriveShare x
splitDrive (x:xs) | isPathSeparator x = addSlash [x] xs
splitDrive x = ("",x)
addSlash a xs = (a++c,d)
where (c,d) = span isPathSeparator xs
-- -us/fileio/fs/naming_a_file.asp
-- "\\?\D:\<path>" or "\\?\UNC\<server>\<share>"
a is " \\?\ "
readDriveUNC :: FilePath -> Maybe (FilePath, FilePath)
readDriveUNC (s1:s2:'?':s3:xs) | all isPathSeparator [s1,s2,s3] =
case map toUpper xs of
('U':'N':'C':s4:_) | isPathSeparator s4 ->
let (a,b) = readDriveShareName (drop 4 xs)
in Just (s1:s2:'?':s3:take 4 xs ++ a, b)
_ -> case readDriveLetter xs of
Just (a,b) -> Just (s1:s2:'?':s3:a,b)
Nothing -> Nothing
readDriveUNC x = Nothing
-- c:\
readDriveLetter :: String -> Maybe (FilePath, FilePath)
readDriveLetter (x:':':y:xs) | isLetter x && isPathSeparator y = Just $ addSlash [x,':'] (y:xs)
readDriveLetter (x:':':xs) | isLetter x = Just ([x,':'], xs)
readDriveLetter x = Nothing
-- \\sharename\
readDriveShare :: String -> Maybe (FilePath, FilePath)
readDriveShare (s1:s2:xs) | isPathSeparator s1 && isPathSeparator s2 =
Just (s1:s2:a,b)
where (a,b) = readDriveShareName xs
readDriveShare x = Nothing
-- assume you have already seen \\
-- share\bob -> "share","\","bob"
readDriveShareName :: String -> (FilePath, FilePath)
readDriveShareName name = addSlash a b
where (a,b) = break isPathSeparator name
-- | Join a drive and the rest of the path.
--
-- > uncurry joinDrive (splitDrive x) == x
joinDrive :: FilePath -> FilePath -> FilePath
joinDrive a b | isPosix = a ++ b
| null a = b
| null b = a
| isPathSeparator (last a) = a ++ b
| otherwise = case a of
[a1,':'] | isLetter a1 -> a ++ b
_ -> a ++ [pathSeparator] ++ b
| Set the drive , from the filepath .
--
> replaceDrive x ( takeDrive x ) = = x
replaceDrive :: FilePath -> String -> FilePath
replaceDrive x drv = joinDrive drv (dropDrive x)
-- | Get the drive from a filepath.
--
> takeDrive x = = fst ( splitDrive x )
takeDrive :: FilePath -> FilePath
takeDrive = fst . splitDrive
-- | Delete the drive, if it exists.
--
> dropDrive x = = snd ( splitDrive x )
dropDrive :: FilePath -> FilePath
dropDrive = snd . splitDrive
-- | Does a path have a drive.
--
> not ( hasDrive x ) = = null ( takeDrive x )
hasDrive :: FilePath -> Bool
hasDrive = not . null . takeDrive
-- | Is an element a drive
isDrive :: FilePath -> Bool
isDrive = null . dropDrive
---------------------------------------------------------------------
-- Operations on a filepath, as a list of directories
-- | Split a filename into directory and file. 'combine' is the inverse.
--
-- > uncurry (++) (splitFileName x) == x
-- > uncurry combine (splitFileName x) == x
-- > splitFileName "file/bob.txt" == ("file/", "bob.txt")
-- > splitFileName "file/" == ("file/", "")
> splitFileName " bob " = = ( " " , " bob " )
> : splitFileName " / " = = ( " / " , " " )
-- > Windows: splitFileName "c:" == ("c:","")
splitFileName :: FilePath -> (String, String)
splitFileName x = (c ++ reverse b, reverse a)
where
(a,b) = break isPathSeparator $ reverse d
(c,d) = splitDrive x
-- | Set the filename.
--
-- > replaceFileName x (takeFileName x) == x
replaceFileName :: FilePath -> String -> FilePath
replaceFileName x y = dropFileName x `combine` y
-- | Drop the filename.
--
-- > dropFileName x == fst (splitFileName x)
dropFileName :: FilePath -> FilePath
dropFileName = fst . splitFileName
-- | Get the file name.
--
-- > takeFileName "test/" == ""
> takeFileName x = = snd ( splitFileName x )
> takeFileName ( replaceFileName x " ) = = " fred "
> takeFileName ( combine x " ) = = " fred "
-- > isRelative (takeFileName x)
takeFileName :: FilePath -> FilePath
takeFileName = snd . splitFileName
-- | Get the base name, without an extension or path.
--
-- > takeBaseName "file/test.txt" == "test"
> takeBaseName " dave.ext " = = " "
-- > takeBaseName "" == ""
-- > takeBaseName "test" == "test"
-- > takeBaseName (addTrailingPathSeparator x) == ""
-- > takeBaseName "file/file.tar.gz" == "file.tar"
takeBaseName :: FilePath -> String
takeBaseName = dropExtension . takeFileName
-- | Set the base name.
--
-- > replaceBaseName "file/test.txt" "bob" == "file/bob.txt"
> replaceBaseName " " " bill " = = " bill "
-- > replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar"
-- > replaceBaseName x (takeBaseName x) == x
replaceBaseName :: FilePath -> String -> FilePath
replaceBaseName pth nam = combine a (addExtension nam ext)
where
(a,b) = splitFileName pth
ext = takeExtension b
-- | Is an item either a directory or the last character a path separator?
--
-- > hasTrailingPathSeparator "test" == False
-- > hasTrailingPathSeparator "test/" == True
hasTrailingPathSeparator :: FilePath -> Bool
hasTrailingPathSeparator "" = False
hasTrailingPathSeparator x = isPathSeparator (last x)
-- | Add a trailing file path separator if one is not already present.
--
-- > hasTrailingPathSeparator (addTrailingPathSeparator x)
-- > if hasTrailingPathSeparator x then addTrailingPathSeparator x == x else True
> : addTrailingPathSeparator " test / rest " = = " test / rest/ "
addTrailingPathSeparator :: FilePath -> FilePath
addTrailingPathSeparator x = if hasTrailingPathSeparator x then x else x ++ [pathSeparator]
-- | Remove any trailing path separators
--
-- > dropTrailingPathSeparator "file/test/" == "file/test"
-- > not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x
> : dropTrailingPathSeparator " / " = = " / "
dropTrailingPathSeparator :: FilePath -> FilePath
dropTrailingPathSeparator x =
if hasTrailingPathSeparator x && not (isDrive x)
then reverse $ dropWhile isPathSeparator $ reverse x
else x
| Get the directory name , move up one level .
--
> : takeDirectory " /foo / bar / baz " = = " /foo / bar "
> : takeDirectory " /foo / bar / baz/ " = = " /foo / bar / baz "
-- > Windows: takeDirectory "foo\\bar" == "foo"
> Windows : takeDirectory " foo\\bar\\\\ " = = " "
-- > Windows: takeDirectory "C:\\" == "C:\\"
takeDirectory :: FilePath -> FilePath
takeDirectory x = if isDrive file then file
else if null res && not (null file) then file
else res
where
res = reverse $ dropWhile isPathSeparator $ reverse file
file = dropFileName x
-- | Set the directory, keeping the filename the same.
--
-- > replaceDirectory x (takeDirectory x) `equalFilePath` x
replaceDirectory :: FilePath -> String -> FilePath
replaceDirectory x dir = combine dir (takeFileName x)
| Combine two paths , if the second path ' isAbsolute ' , then it returns the second .
--
-- > combine (takeDirectory x) (takeFileName x) `equalFilePath` x
> : combine " / " " test " = = " /test "
> : combine " home " " bob " = = " home / bob "
-- > Windows: combine "home" "bob" == "home\\bob"
combine :: FilePath -> FilePath -> FilePath
combine a b | isAbsolute b || null a = b
| null b = a
| isPathSeparator (last a) = a ++ b
| isDrive a = joinDrive a b
| otherwise = a ++ [pathSeparator] ++ b
-- | A nice alias for 'combine'.
(</>) :: FilePath -> FilePath -> FilePath
(</>) = combine
-- | Split a path by the directory separator.
--
-- > concat (splitPath x) == x
> splitPath " test//item/ " = = [ " test//","item/ " ]
-- > splitPath "test/item/file" == ["test/","item/","file"]
-- > splitPath "" == []
-- > Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"]
> : splitPath " /file / test " = = [ " /","file/","test " ]
splitPath :: FilePath -> [FilePath]
splitPath x = [a | a /= ""] ++ f b
where
(a,b) = splitDrive x
f "" = []
f x = (a++c) : f d
where
(a,b) = break isPathSeparator x
(c,d) = break (not . isPathSeparator) b
-- | Just as 'splitPath', but don't add the trailing slashes to each element.
--
-- > splitDirectories "test/file" == ["test","file"]
-- > splitDirectories "/test/file" == ["/","test","file"]
> joinPath ( splitDirectories ( ) ) ` equalFilePath ` makeValid x
-- > splitDirectories "" == []
splitDirectories :: FilePath -> [FilePath]
splitDirectories x =
if hasDrive x then head xs : f (tail xs)
else f xs
where
xs = splitPath x
f xs = map g xs
g x = if null res then x else res
where res = takeWhile (not . isPathSeparator) x
-- | Join path elements back together.
--
> joinPath ( splitPath ( ) ) = = makeValid x
-- Note that this definition on c:\\c:\\, join then split will give c:\\
joinPath :: [FilePath] -> FilePath
joinPath x = foldr combine "" x
---------------------------------------------------------------------
-- File name manipulators
| Equality of two ' FilePath 's .
If you call .
first this has a much better chance of working .
-- Note that this doesn't follow symlinks or DOSNAM~1s.
equalFilePath :: FilePath -> FilePath -> Bool
equalFilePath a b = f a == f b
where
f x | isPosix = dropTrailSlash $ normalise x
| otherwise = dropTrailSlash $ map toLower $ normalise x
dropTrailSlash "" = ""
dropTrailSlash x | isPathSeparator (last x) = init x
| otherwise = x
-- | Contract a filename, based on a relative path.
--
> : makeRelative " " " /home / bob / foo / bar " = = " bob / foo / bar "
> : makeRelative " /fred " " bob " = = " bob "
> : makeRelative " /file / test " " /file / test / fred " = = " fred "
> : makeRelative " /file / test " " /file / test / fred/ " = = " fred/ "
> : makeRelative " /fred / dave " " /fred / bill " = = " .. /bill "
makeRelative :: FilePath -> FilePath -> FilePath
makeRelative cur x | isRelative x || isRelative cur || not (takeDrive x `equalFilePath` takeDrive cur) = normalise x
makeRelative cur x = joinPath $
replicate (length curdir - common) ".." ++
drop common orgpth
where
common = length $ takeWhile id $ zipWith (==) orgdir curdir
orgpth = splitPath pth
orgdir = splitDirectories pth
curdir = splitDirectories $ dropDrive $ normalise $ cur
(drv,pth) = splitDrive $ normalise x
-- | 'makeRelative' the current directory.
makeRelativeToCurrentDirectory :: FilePath -> IO FilePath
makeRelativeToCurrentDirectory x = do
cur <- getCurrentDirectory
return $ makeRelative cur x
-- | Normalise a file
--
-- * \/\/ outside of the drive can be made blank
--
-- * \/ -> 'pathSeparator'
--
-- * .\/ -> \"\"
--
> : normalise " /file/\\test//// " = = " /file/\\test/ "
> : normalise " /file/./test " = = " /file / test "
> : normalise " /test / file/ .. /bob / fred/ " = = " /test / file/ .. /bob / fred/ "
> : normalise " .. /bob / fred/ " = = " .. /bob / fred/ "
> : normalise " ./bob / fred/ " = = " bob / fred/ "
-- > Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\"
-- > Windows: normalise "\\\\server\\test" == "\\\\server\\test"
> Windows : normalise " c:/file " = = " C:\\file "
normalise :: FilePath -> FilePath
normalise "" = ""
normalise x = joinDrive (normaliseDrive drv) (f pth) ++ [pathSeparator | isPathSeparator $ last x]
where
(drv,pth) = splitDrive x
f = joinPath . dropDots [] . splitDirectories . propSep
g x = if isPathSeparator x then pathSeparator else x
propSep (a:b:xs) | isPathSeparator a && isPathSeparator b = propSep (a:xs)
propSep (a:xs) | isPathSeparator a = pathSeparator : propSep xs
propSep (x:xs) = x : propSep xs
propSep [] = []
dropDots acc (".":xs) = dropDots acc xs
dropDots acc (x:xs) = dropDots (x:acc) xs
dropDots acc [] = reverse acc
normaliseDrive :: FilePath -> FilePath
normaliseDrive x | isPosix = x
normaliseDrive x = if isJust $ readDriveLetter x2 then
map toUpper x2
else
x
where
x2 = map repSlash x
repSlash x = if isPathSeparator x then pathSeparator else x
information for validity functions on Windows
-- see -us/fileio/fs/naming_a_file.asp
badCharacters = ":*?><|"
badElements = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9", "CLOCK$"]
| Is a FilePath valid , i.e. could you create a file like it ?
--
> : isValid " /random _ path :* " = = True
> : isValid x = = True
-- > Windows: isValid "c:\\test" == True
-- > Windows: isValid "c:\\test:of_test" == False
-- > Windows: isValid "test*" == False
-- > Windows: isValid "c:\\test\\nul" == False
-- > Windows: isValid "c:\\test\\prn.txt" == False
-- > Windows: isValid "c:\\nul\\file" == False
isValid :: FilePath -> Bool
isValid x | isPosix = True
isValid x = not (any (`elem` badCharacters) x2) && not (any f $ splitDirectories x2)
where
x2 = dropDrive x
f x = map toUpper (dropExtensions x) `elem` badElements
| Take a FilePath and make it valid ; does not change already valid FilePaths .
--
> isValid ( )
> if isValid x then x = = x else True
-- > Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test"
-- > Windows: makeValid "test*" == "test_"
-- > Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_"
-- > Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt"
-- > Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt"
-- > Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file"
makeValid :: FilePath -> FilePath
makeValid x | isPosix = x
makeValid x = joinDrive drv $ validElements $ validChars pth
where
(drv,pth) = splitDrive x
validChars x = map f x
f x | x `elem` badCharacters = '_'
| otherwise = x
validElements x = joinPath $ map g $ splitPath x
g x = h (reverse b) ++ reverse a
where (a,b) = span isPathSeparator $ reverse x
h x = if map toUpper a `elem` badElements then addExtension (a ++ "_") b else x
where (a,b) = splitExtensions x
-- | Is a path relative, or is it fixed to the root?
--
-- > Windows: isRelative "path\\test" == True
-- > Windows: isRelative "c:\\test" == False
> : isRelative " test / path " = = True
> : isRelative " /test " = = False
isRelative :: FilePath -> Bool
isRelative = null . takeDrive
| @not . ' isRelative'@
--
-- > isAbsolute x == not (isRelative x)
isAbsolute :: FilePath -> Bool
isAbsolute = not . isRelative
| null | https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/examples/Example/FilePath.hs | haskell | * Path methods (environment $PATH)
* Extension methods
* Drive methods
* Drive methods
* Operations on a FilePath ||| as a list of directories
* File name manipulators
-------------------------------------------------------------------
-------------------------------------------------------------------
The basic functions
| The character that separates directories. In the case where more than
> Windows: pathSeparator == '\\'
> isPathSeparator pathSeparator
| The list of all possible separators.
> Windows: pathSeparators == ['\\', '/']
> pathSeparator `elem` pathSeparators
| Rather than using @(== 'pathSeparator')@, use this. Test if something
is a path separator.
> isPathSeparator a == (a `elem` pathSeparators)
| The character that is used to separate the entries in the $PATH environment variable.
> Windows: searchPathSeparator == ';'
| Is the character a file separator?
> isSearchPathSeparator a == (a == searchPathSeparator)
| File extension character
> extSeparator == '.'
| Is the character an extension character?
> isExtSeparator a == (a == extSeparator)
-------------------------------------------------------------------
Path methods (environment $PATH)
| Take a string, split it on the 'searchPathSeparator' character.
| Get a list of filepaths in the $PATH.
-------------------------------------------------------------------
Extension methods
| Split on the extension. 'addExtension' is the inverse.
> uncurry (++) (splitExtension x) == x
> uncurry addExtension (splitExtension x) == x
> splitExtension "file.txt" == ("file",".txt")
> splitExtension "file" == ("file","")
> splitExtension "file/file.txt" == ("file/file",".txt")
> splitExtension "file.txt/boris" == ("file.txt/boris","")
> splitExtension "file/path.txt.bob.fred" == ("file/path.txt.bob",".fred")
> splitExtension "file/path.txt/" == ("file/path.txt/","")
> takeExtension (addExtension x "ext") == ".ext"
> takeExtension (replaceExtension x "ext") == ".ext"
| Set the extension of a file, overwriting one if already present.
> replaceExtension "file.txt" ".bob" == "file.bob"
> replaceExtension "file.txt" "bob" == "file.bob"
> replaceExtension "file" ".bob" == "file.bob"
> replaceExtension "file.txt" "" == "file"
> replaceExtension "file.fred.bob" "txt" == "file.fred.txt"
| Remove last extension, and any . following it.
> dropExtension x == fst (splitExtension x)
| Add an extension, even if there is already one there.
> addExtension "file.txt" "bib" == "file.txt.bib"
> addExtension "file." ".bib" == "file..bib"
> addExtension "file" ".bib" == "file.bib"
> addExtension "/" "x" == "/.x"
> takeBaseName (addExtension (addTrailingPathSeparator x) "ext") == ".ext"
> Windows: addExtension "\\\\share" ".txt" == "\\\\share\\.txt"
| Does the given filename have an extension?
> null (takeExtension x) == not (hasExtension x)
| Split on all extensions
> splitExtensions "file.tar.gz" == ("file",".tar.gz")
| Drop all extensions
> not $ hasExtension (dropExtensions x)
| Get all extensions
-------------------------------------------------------------------
Drive methods
| Is the given character a valid drive letter?
only a-z and A-Z are letters, not isAlpha which is more unicodey
| Split a path into a drive and a path.
> uncurry (++) (splitDrive x) == x
> Windows: splitDrive "file" == ("","file")
> Windows: splitDrive "c:/file" == ("c:/","file")
> Windows: splitDrive "\\\\shared\\test" == ("\\\\shared\\","test")
> Windows: splitDrive "\\\\shared" == ("\\\\shared","")
> Windows: splitDrive "\\\\?\\d:\\file" == ("\\\\?\\d:\\","file")
-us/fileio/fs/naming_a_file.asp
"\\?\D:\<path>" or "\\?\UNC\<server>\<share>"
c:\
\\sharename\
assume you have already seen \\
share\bob -> "share","\","bob"
| Join a drive and the rest of the path.
> uncurry joinDrive (splitDrive x) == x
| Get the drive from a filepath.
| Delete the drive, if it exists.
| Does a path have a drive.
| Is an element a drive
-------------------------------------------------------------------
Operations on a filepath, as a list of directories
| Split a filename into directory and file. 'combine' is the inverse.
> uncurry (++) (splitFileName x) == x
> uncurry combine (splitFileName x) == x
> splitFileName "file/bob.txt" == ("file/", "bob.txt")
> splitFileName "file/" == ("file/", "")
> Windows: splitFileName "c:" == ("c:","")
| Set the filename.
> replaceFileName x (takeFileName x) == x
| Drop the filename.
> dropFileName x == fst (splitFileName x)
| Get the file name.
> takeFileName "test/" == ""
> isRelative (takeFileName x)
| Get the base name, without an extension or path.
> takeBaseName "file/test.txt" == "test"
> takeBaseName "" == ""
> takeBaseName "test" == "test"
> takeBaseName (addTrailingPathSeparator x) == ""
> takeBaseName "file/file.tar.gz" == "file.tar"
| Set the base name.
> replaceBaseName "file/test.txt" "bob" == "file/bob.txt"
> replaceBaseName "/dave/fred/bob.gz.tar" "new" == "/dave/fred/new.tar"
> replaceBaseName x (takeBaseName x) == x
| Is an item either a directory or the last character a path separator?
> hasTrailingPathSeparator "test" == False
> hasTrailingPathSeparator "test/" == True
| Add a trailing file path separator if one is not already present.
> hasTrailingPathSeparator (addTrailingPathSeparator x)
> if hasTrailingPathSeparator x then addTrailingPathSeparator x == x else True
| Remove any trailing path separators
> dropTrailingPathSeparator "file/test/" == "file/test"
> not (hasTrailingPathSeparator (dropTrailingPathSeparator x)) || isDrive x
> Windows: takeDirectory "foo\\bar" == "foo"
> Windows: takeDirectory "C:\\" == "C:\\"
| Set the directory, keeping the filename the same.
> replaceDirectory x (takeDirectory x) `equalFilePath` x
> combine (takeDirectory x) (takeFileName x) `equalFilePath` x
> Windows: combine "home" "bob" == "home\\bob"
| A nice alias for 'combine'.
| Split a path by the directory separator.
> concat (splitPath x) == x
> splitPath "test/item/file" == ["test/","item/","file"]
> splitPath "" == []
> Windows: splitPath "c:\\test\\path" == ["c:\\","test\\","path"]
| Just as 'splitPath', but don't add the trailing slashes to each element.
> splitDirectories "test/file" == ["test","file"]
> splitDirectories "/test/file" == ["/","test","file"]
> splitDirectories "" == []
| Join path elements back together.
Note that this definition on c:\\c:\\, join then split will give c:\\
-------------------------------------------------------------------
File name manipulators
Note that this doesn't follow symlinks or DOSNAM~1s.
| Contract a filename, based on a relative path.
| 'makeRelative' the current directory.
| Normalise a file
* \/\/ outside of the drive can be made blank
* \/ -> 'pathSeparator'
* .\/ -> \"\"
> Windows: normalise "c:\\file/bob\\" == "C:\\file\\bob\\"
> Windows: normalise "\\\\server\\test" == "\\\\server\\test"
see -us/fileio/fs/naming_a_file.asp
> Windows: isValid "c:\\test" == True
> Windows: isValid "c:\\test:of_test" == False
> Windows: isValid "test*" == False
> Windows: isValid "c:\\test\\nul" == False
> Windows: isValid "c:\\test\\prn.txt" == False
> Windows: isValid "c:\\nul\\file" == False
> Windows: makeValid "c:\\test:of_test" == "c:\\test_of_test"
> Windows: makeValid "test*" == "test_"
> Windows: makeValid "c:\\test\\nul" == "c:\\test\\nul_"
> Windows: makeValid "c:\\test\\prn.txt" == "c:\\test\\prn_.txt"
> Windows: makeValid "c:\\test/prn.txt" == "c:\\test/prn_.txt"
> Windows: makeValid "c:\\nul\\file" == "c:\\nul_\\file"
| Is a path relative, or is it fixed to the root?
> Windows: isRelative "path\\test" == True
> Windows: isRelative "c:\\test" == False
> isAbsolute x == not (isRelative x) |
module FilePath where
|
Module : System . FilePath . Version_0_12
Copyright : ( c ) 2005 - 2006
License : BSD3
Maintainer : /~ndm/
Stability : in - progress
Portability : portable
A library for FilePath manipulations , designed to be cross platform .
This library will select the correct type of FilePath 's for the
platform the code is running on at runtime . For more details see
< /~ndm/projects/libraries.php >
DO NOT USE THIS CODE , IT IS STILL UNDER DEVELOPMENT - please use
" System . FilePath . Version_0_11 " .
Some short examples :
You are given a C file , you want to figure out the corresponding object ( .o ) file :
@'replaceExtension ' file \"o\"@
Haskell module Main imports Test , you have the file named main :
@['replaceFileName ' path_to_main \"Test\ " ' < . > ' ext | ext < - [ \"hs\",\"lhs\ " ] ] @
You want to download a file from the web and save it to disk :
@do let file = ' makeValid ' url
System . IO.createDirectoryIfMissing True ( ' takeDirectory ' file)@
You want to compile a Haskell file , but put the hi file under \"interface\ "
@'takeDirectory ' file ' < / > ' \"interface\ " ' < / > ' ( ' takeFileName ' file \`replaceExtension\ ` \"hi\"@ )
You want to display a filename to the user , as neatly as possible
@'makeRelativeToCurrentDirectory ' file > > = putStrLn@
The examples in code format descibed by each function are used to generate
tests , and should give clear semantics for the functions .
Module : System.FilePath.Version_0_12
Copyright : (c) Neil Mitchell 2005-2006
License : BSD3
Maintainer : /~ndm/
Stability : in-progress
Portability : portable
A library for FilePath manipulations, designed to be cross platform.
This library will select the correct type of FilePath's for the
platform the code is running on at runtime. For more details see
</~ndm/projects/libraries.php>
DO NOT USE THIS CODE, IT IS STILL UNDER DEVELOPMENT - please use
"System.FilePath.Version_0_11".
Some short examples:
You are given a C file, you want to figure out the corresponding object (.o) file:
@'replaceExtension' file \"o\"@
Haskell module Main imports Test, you have the file named main:
@['replaceFileName' path_to_main \"Test\" '<.>' ext | ext <- [\"hs\",\"lhs\"] ]@
You want to download a file from the web and save it to disk:
@do let file = 'makeValid' url
System.IO.createDirectoryIfMissing True ('takeDirectory' file)@
You want to compile a Haskell file, but put the hi file under \"interface\"
@'takeDirectory' file '</>' \"interface\" '</>' ('takeFileName' file \`replaceExtension\` \"hi\"@)
You want to display a filename to the user, as neatly as possible
@'makeRelativeToCurrentDirectory' file >>= putStrLn@
The examples in code format descibed by each function are used to generate
tests, and should give clear semantics for the functions.
-}
import Data.Maybe(isJust, fromMaybe, fromJust)
import Data.Char(toLower, toUpper)
import Data.List(isPrefixOf, inits)
import Control.Monad(when, filterM)
import System.Environment(getEnv, getProgName)
import System.Directory(getCurrentDirectory, doesFileExist, doesDirectoryExist,
getTemporaryDirectory, getDirectoryContents, createDirectory)
os = any0
compilerName = any0
foreign import primitive any0 :: a
foreign import primitive anyEval1 :: a -> b
foreign import primitive anyEval2 :: a -> b -> c
foreign import primitive anyEval3 :: a -> b -> c -> d
class Test a where
test :: a -> Bool
instance Test b => Test (a -> b) where
test f = test (f any0)
instance Test [a] where
test f = anyEval1 f
instance Test (a,b) where
test f = anyEval1 f
instance Test Bool where
test f = anyEval1 f
instance Test Char where
test f = anyEval1 f
instance Test (IO a) where
test f = anyEval1 (f >> return ())
(|||) :: (Test a, Test b) => a -> b -> IO c
(|||) l r = anyEval2 (test l) (test r)
main =
pathSeparator ||| pathSeparators ||| isPathSeparator |||
searchPathSeparator ||| isSearchPathSeparator |||
extSeparator ||| isExtSeparator |||
splitSearchPath ||| getSearchPath |||
splitExtension |||
takeExtension ||| replaceExtension ||| dropExtension ||| addExtension ||| hasExtension ||| (<.>) |||
splitExtensions ||| dropExtensions ||| takeExtensions |||
DRIVE_SECTION
splitDrive ||| ||| hasDrive ||| dropDrive ||| isDrive |||
END_DRIVE_SECTION
splitDrive ||| joinDrive |||
takeDrive ||| replaceDrive ||| hasDrive ||| dropDrive ||| isDrive |||
END_DRIVE_SECTION -}
splitFileName |||
takeFileName ||| replaceFileName ||| dropFileName |||
takeBaseName ||| replaceBaseName |||
takeDirectory ||| replaceDirectory |||
combine ||| (</>) |||
splitPath ||| joinPath ||| splitDirectories |||
* Low level FilePath operators
hasTrailingPathSeparator |||
addTrailingPathSeparator |||
dropTrailingPathSeparator |||
normalise ||| equalFilePath |||
makeRelativeToCurrentDirectory ||| makeRelative |||
isRelative ||| isAbsolute |||
isValid ||| makeValid
infixr 7 <.>
infixr 5 </>
Platform Abstraction Methods ( private )
data Force = ForcePosix
| ForceNone
| ForceWindows
deriving Eq
forceEffectView = let forceEffect = ForceNone
in forceEffect
| What is the name of the OS ? The real name , Hugs and GHC get this wrong ...
osName :: String
osName = if compilerName == "yhc" || os /= "mingw32"
then os
else "windows"
| Is the operating system Unix or Linux like
isPosix :: Bool
isPosix = not isWindows && forceEffectView /= ForceWindows
| Is the operating system Windows like
isWindows :: Bool
isWindows = osName == "windows" && forceEffectView /= ForcePosix
one character is possible , ' pathSeparator ' is the \'ideal\ ' one .
> : pathSeparator = = ' / '
pathSeparator :: Char
pathSeparator = if isWindows then '\\' else '/'
> : pathSeparators = = [ ' / ' ]
pathSeparators :: [Char]
pathSeparators = if isWindows then "\\/" else "/"
isPathSeparator :: Char -> Bool
isPathSeparator = (`elem` pathSeparators)
> : searchPathSeparator = = ' : '
searchPathSeparator :: Char
searchPathSeparator = if isWindows then ';' else ':'
isSearchPathSeparator :: Char -> Bool
isSearchPathSeparator = (== searchPathSeparator)
extSeparator :: Char
extSeparator = '.'
isExtSeparator :: Char -> Bool
isExtSeparator = (== extSeparator)
> Windows : splitSearchPath " File1;File2;File3 " = = [ " File1","File2","File3 " ]
> : splitSearchPath " File1 : : File3 " = = [ " File1","File2","File3 " ]
splitSearchPath :: String -> [FilePath]
splitSearchPath = f
where
f xs = case break isSearchPathSeparator xs of
([], []) -> []
([], post) -> f (tail post)
(pre, []) -> [pre]
(pre, post) -> pre : f (tail post)
getSearchPath :: IO [FilePath]
getSearchPath = fmap splitSearchPath (getEnv "PATH")
> splitExtension " file.txt/boris.ext " = = ( " file.txt/boris",".ext " )
splitExtension :: FilePath -> (String, String)
splitExtension x = case d of
"" -> (x,"")
(y:ys) -> (a ++ reverse ys, y : reverse c)
where
(a,b) = splitFileName x
(c,d) = break isExtSeparator $ reverse b
| Get the extension of a file , returns @\"\"@ for no extension , @.ext@ otherwise .
> takeExtension x = = snd ( splitExtension x )
takeExtension :: FilePath -> String
takeExtension = snd . splitExtension
replaceExtension :: FilePath -> String -> FilePath
replaceExtension x y = dropExtension x <.> y
| to ' addExtension ' , for people who like that sort of thing .
(<.>) :: FilePath -> String -> FilePath
(<.>) = addExtension
dropExtension :: FilePath -> FilePath
dropExtension = fst . splitExtension
E.g. @addExtension \"foo.txt\ " \"bat\ " - > \"foo.txt.bat\"@.
addExtension :: FilePath -> String -> FilePath
addExtension file "" = file
addExtension file xs@(x:_) = joinDrive a res
where
res = if isExtSeparator x then b ++ xs
else b ++ [extSeparator] ++ xs
(a,b) = splitDrive file
hasExtension :: FilePath -> Bool
hasExtension = any isExtSeparator . takeFileName
splitExtensions :: FilePath -> (FilePath, String)
splitExtensions x = (a ++ c, d)
where
(a,b) = splitFileName x
(c,d) = break isExtSeparator b
dropExtensions :: FilePath -> FilePath
dropExtensions = fst . splitExtensions
takeExtensions :: FilePath -> String
takeExtensions = snd . splitExtensions
isLetter :: Char -> Bool
isLetter x = (x >= 'a' && x <= 'z') || (x >= 'A' && x <= 'Z')
On Unix , \/ is a Drive .
> Windows : splitDrive " c:\\file " = = ( " c:\\","file " )
> Windows : splitDrive " \\\\?\\UNC\\shared\\file " = = ( " \\\\?\\UNC\\shared\\","file " )
> Windows : splitDrive " /d " = = ( " /","d " )
> : splitDrive " /test " = = ( " /","test " )
> : splitDrive " //test " = = ( " //","test " )
> : splitDrive " test / file " = = ( " " , " test / file " )
> : splitDrive " file " = = ( " " , " file " )
splitDrive :: FilePath -> (FilePath, FilePath)
splitDrive x | isPosix = span (== '/') x
splitDrive x | isJust y = fromJust y
where y = readDriveLetter x
splitDrive x | isJust y = fromJust y
where y = readDriveUNC x
splitDrive x | isJust y = fromJust y
where y = readDriveShare x
splitDrive (x:xs) | isPathSeparator x = addSlash [x] xs
splitDrive x = ("",x)
addSlash a xs = (a++c,d)
where (c,d) = span isPathSeparator xs
a is " \\?\ "
readDriveUNC :: FilePath -> Maybe (FilePath, FilePath)
readDriveUNC (s1:s2:'?':s3:xs) | all isPathSeparator [s1,s2,s3] =
case map toUpper xs of
('U':'N':'C':s4:_) | isPathSeparator s4 ->
let (a,b) = readDriveShareName (drop 4 xs)
in Just (s1:s2:'?':s3:take 4 xs ++ a, b)
_ -> case readDriveLetter xs of
Just (a,b) -> Just (s1:s2:'?':s3:a,b)
Nothing -> Nothing
readDriveUNC x = Nothing
readDriveLetter :: String -> Maybe (FilePath, FilePath)
readDriveLetter (x:':':y:xs) | isLetter x && isPathSeparator y = Just $ addSlash [x,':'] (y:xs)
readDriveLetter (x:':':xs) | isLetter x = Just ([x,':'], xs)
readDriveLetter x = Nothing
readDriveShare :: String -> Maybe (FilePath, FilePath)
readDriveShare (s1:s2:xs) | isPathSeparator s1 && isPathSeparator s2 =
Just (s1:s2:a,b)
where (a,b) = readDriveShareName xs
readDriveShare x = Nothing
readDriveShareName :: String -> (FilePath, FilePath)
readDriveShareName name = addSlash a b
where (a,b) = break isPathSeparator name
joinDrive :: FilePath -> FilePath -> FilePath
joinDrive a b | isPosix = a ++ b
| null a = b
| null b = a
| isPathSeparator (last a) = a ++ b
| otherwise = case a of
[a1,':'] | isLetter a1 -> a ++ b
_ -> a ++ [pathSeparator] ++ b
| Set the drive , from the filepath .
> replaceDrive x ( takeDrive x ) = = x
replaceDrive :: FilePath -> String -> FilePath
replaceDrive x drv = joinDrive drv (dropDrive x)
> takeDrive x = = fst ( splitDrive x )
takeDrive :: FilePath -> FilePath
takeDrive = fst . splitDrive
> dropDrive x = = snd ( splitDrive x )
dropDrive :: FilePath -> FilePath
dropDrive = snd . splitDrive
> not ( hasDrive x ) = = null ( takeDrive x )
hasDrive :: FilePath -> Bool
hasDrive = not . null . takeDrive
isDrive :: FilePath -> Bool
isDrive = null . dropDrive
> splitFileName " bob " = = ( " " , " bob " )
> : splitFileName " / " = = ( " / " , " " )
splitFileName :: FilePath -> (String, String)
splitFileName x = (c ++ reverse b, reverse a)
where
(a,b) = break isPathSeparator $ reverse d
(c,d) = splitDrive x
replaceFileName :: FilePath -> String -> FilePath
replaceFileName x y = dropFileName x `combine` y
dropFileName :: FilePath -> FilePath
dropFileName = fst . splitFileName
> takeFileName x = = snd ( splitFileName x )
> takeFileName ( replaceFileName x " ) = = " fred "
> takeFileName ( combine x " ) = = " fred "
takeFileName :: FilePath -> FilePath
takeFileName = snd . splitFileName
> takeBaseName " dave.ext " = = " "
takeBaseName :: FilePath -> String
takeBaseName = dropExtension . takeFileName
> replaceBaseName " " " bill " = = " bill "
replaceBaseName :: FilePath -> String -> FilePath
replaceBaseName pth nam = combine a (addExtension nam ext)
where
(a,b) = splitFileName pth
ext = takeExtension b
hasTrailingPathSeparator :: FilePath -> Bool
hasTrailingPathSeparator "" = False
hasTrailingPathSeparator x = isPathSeparator (last x)
> : addTrailingPathSeparator " test / rest " = = " test / rest/ "
addTrailingPathSeparator :: FilePath -> FilePath
addTrailingPathSeparator x = if hasTrailingPathSeparator x then x else x ++ [pathSeparator]
> : dropTrailingPathSeparator " / " = = " / "
dropTrailingPathSeparator :: FilePath -> FilePath
dropTrailingPathSeparator x =
if hasTrailingPathSeparator x && not (isDrive x)
then reverse $ dropWhile isPathSeparator $ reverse x
else x
| Get the directory name , move up one level .
> : takeDirectory " /foo / bar / baz " = = " /foo / bar "
> : takeDirectory " /foo / bar / baz/ " = = " /foo / bar / baz "
> Windows : takeDirectory " foo\\bar\\\\ " = = " "
takeDirectory :: FilePath -> FilePath
takeDirectory x = if isDrive file then file
else if null res && not (null file) then file
else res
where
res = reverse $ dropWhile isPathSeparator $ reverse file
file = dropFileName x
replaceDirectory :: FilePath -> String -> FilePath
replaceDirectory x dir = combine dir (takeFileName x)
| Combine two paths , if the second path ' isAbsolute ' , then it returns the second .
> : combine " / " " test " = = " /test "
> : combine " home " " bob " = = " home / bob "
combine :: FilePath -> FilePath -> FilePath
combine a b | isAbsolute b || null a = b
| null b = a
| isPathSeparator (last a) = a ++ b
| isDrive a = joinDrive a b
| otherwise = a ++ [pathSeparator] ++ b
(</>) :: FilePath -> FilePath -> FilePath
(</>) = combine
> splitPath " test//item/ " = = [ " test//","item/ " ]
> : splitPath " /file / test " = = [ " /","file/","test " ]
splitPath :: FilePath -> [FilePath]
splitPath x = [a | a /= ""] ++ f b
where
(a,b) = splitDrive x
f "" = []
f x = (a++c) : f d
where
(a,b) = break isPathSeparator x
(c,d) = break (not . isPathSeparator) b
> joinPath ( splitDirectories ( ) ) ` equalFilePath ` makeValid x
splitDirectories :: FilePath -> [FilePath]
splitDirectories x =
if hasDrive x then head xs : f (tail xs)
else f xs
where
xs = splitPath x
f xs = map g xs
g x = if null res then x else res
where res = takeWhile (not . isPathSeparator) x
> joinPath ( splitPath ( ) ) = = makeValid x
joinPath :: [FilePath] -> FilePath
joinPath x = foldr combine "" x
| Equality of two ' FilePath 's .
If you call .
first this has a much better chance of working .
equalFilePath :: FilePath -> FilePath -> Bool
equalFilePath a b = f a == f b
where
f x | isPosix = dropTrailSlash $ normalise x
| otherwise = dropTrailSlash $ map toLower $ normalise x
dropTrailSlash "" = ""
dropTrailSlash x | isPathSeparator (last x) = init x
| otherwise = x
> : makeRelative " " " /home / bob / foo / bar " = = " bob / foo / bar "
> : makeRelative " /fred " " bob " = = " bob "
> : makeRelative " /file / test " " /file / test / fred " = = " fred "
> : makeRelative " /file / test " " /file / test / fred/ " = = " fred/ "
> : makeRelative " /fred / dave " " /fred / bill " = = " .. /bill "
makeRelative :: FilePath -> FilePath -> FilePath
makeRelative cur x | isRelative x || isRelative cur || not (takeDrive x `equalFilePath` takeDrive cur) = normalise x
makeRelative cur x = joinPath $
replicate (length curdir - common) ".." ++
drop common orgpth
where
common = length $ takeWhile id $ zipWith (==) orgdir curdir
orgpth = splitPath pth
orgdir = splitDirectories pth
curdir = splitDirectories $ dropDrive $ normalise $ cur
(drv,pth) = splitDrive $ normalise x
makeRelativeToCurrentDirectory :: FilePath -> IO FilePath
makeRelativeToCurrentDirectory x = do
cur <- getCurrentDirectory
return $ makeRelative cur x
> : normalise " /file/\\test//// " = = " /file/\\test/ "
> : normalise " /file/./test " = = " /file / test "
> : normalise " /test / file/ .. /bob / fred/ " = = " /test / file/ .. /bob / fred/ "
> : normalise " .. /bob / fred/ " = = " .. /bob / fred/ "
> : normalise " ./bob / fred/ " = = " bob / fred/ "
> Windows : normalise " c:/file " = = " C:\\file "
normalise :: FilePath -> FilePath
normalise "" = ""
normalise x = joinDrive (normaliseDrive drv) (f pth) ++ [pathSeparator | isPathSeparator $ last x]
where
(drv,pth) = splitDrive x
f = joinPath . dropDots [] . splitDirectories . propSep
g x = if isPathSeparator x then pathSeparator else x
propSep (a:b:xs) | isPathSeparator a && isPathSeparator b = propSep (a:xs)
propSep (a:xs) | isPathSeparator a = pathSeparator : propSep xs
propSep (x:xs) = x : propSep xs
propSep [] = []
dropDots acc (".":xs) = dropDots acc xs
dropDots acc (x:xs) = dropDots (x:acc) xs
dropDots acc [] = reverse acc
normaliseDrive :: FilePath -> FilePath
normaliseDrive x | isPosix = x
normaliseDrive x = if isJust $ readDriveLetter x2 then
map toUpper x2
else
x
where
x2 = map repSlash x
repSlash x = if isPathSeparator x then pathSeparator else x
information for validity functions on Windows
badCharacters = ":*?><|"
badElements = ["CON", "PRN", "AUX", "NUL", "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9", "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9", "CLOCK$"]
| Is a FilePath valid , i.e. could you create a file like it ?
> : isValid " /random _ path :* " = = True
> : isValid x = = True
isValid :: FilePath -> Bool
isValid x | isPosix = True
isValid x = not (any (`elem` badCharacters) x2) && not (any f $ splitDirectories x2)
where
x2 = dropDrive x
f x = map toUpper (dropExtensions x) `elem` badElements
| Take a FilePath and make it valid ; does not change already valid FilePaths .
> isValid ( )
> if isValid x then x = = x else True
makeValid :: FilePath -> FilePath
makeValid x | isPosix = x
makeValid x = joinDrive drv $ validElements $ validChars pth
where
(drv,pth) = splitDrive x
validChars x = map f x
f x | x `elem` badCharacters = '_'
| otherwise = x
validElements x = joinPath $ map g $ splitPath x
g x = h (reverse b) ++ reverse a
where (a,b) = span isPathSeparator $ reverse x
h x = if map toUpper a `elem` badElements then addExtension (a ++ "_") b else x
where (a,b) = splitExtensions x
> : isRelative " test / path " = = True
> : isRelative " /test " = = False
isRelative :: FilePath -> Bool
isRelative = null . takeDrive
| @not . ' isRelative'@
isAbsolute :: FilePath -> Bool
isAbsolute = not . isRelative
|
5fc0765894ffe16cb01c273e34ee1d61a3f0277cd17b2c8813ee62d15765f192 | raph-amiard/clojurescript-lua | cljsloader.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(ns cljs.cljsloader
(:require [clojure.java.io :as io]))
(defn read-or-nil [rdr]
(try (read rdr) (catch RuntimeException e nil)))
(defn make-forms-seq
"Construct a lazy sequence of clojure forms from input f.
f can be anything that can be coerced to a reader"
[f]
(letfn [(forms-seq [rdr]
(let [form (read-or-nil rdr)]
(if (nil? form) []
(lazy-seq (cons form (forms-seq rdr))))))]
(forms-seq (java.io.PushbackReader. (io/reader f)))))
(defn keep-form? [form]
(contains? #{'ns 'def 'defn 'deftype 'extend-type} (first form)))
(defn signature [form]
(if (= 'defn (first form))
`(def ~(second form))
(take 2 form)))
(defn make-override-map [forms-seq]
(apply hash-map (mapcat (fn [a] [(signature a) a]) forms-seq)))
(defn core-forms-seq
"Will load every form from core.cljs, except those who are defined in override-file
override-file can be anything that can be coerced to a reader by io/reader"
([override-file & {:keys [replace-forms extra-file-before extra-file-after]}]
(let [core-forms (make-forms-seq (io/resource "cljs/core.cljs"))
override-map (-> override-file make-forms-seq make-override-map)
replace-forms (or replace-forms {})
forms-override (for [form core-forms]
(let [sig (signature form)]
(cond
(contains? override-map sig) (override-map sig)
(contains? replace-forms sig) (override-map (replace-forms sig))
:else form)))
forms-filtered (remove nil? forms-override)]
(lazy-cat (if extra-file-before (make-forms-seq extra-file-before) [])
forms-filtered
(if extra-file-after (make-forms-seq extra-file-after) []))))) | null | https://raw.githubusercontent.com/raph-amiard/clojurescript-lua/a1992a1666c59c8fcf7e8f7471e51b0e68183e72/src/cljs/cljsloader.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) . All rights reserved .
(ns cljs.cljsloader
(:require [clojure.java.io :as io]))
(defn read-or-nil [rdr]
(try (read rdr) (catch RuntimeException e nil)))
(defn make-forms-seq
"Construct a lazy sequence of clojure forms from input f.
f can be anything that can be coerced to a reader"
[f]
(letfn [(forms-seq [rdr]
(let [form (read-or-nil rdr)]
(if (nil? form) []
(lazy-seq (cons form (forms-seq rdr))))))]
(forms-seq (java.io.PushbackReader. (io/reader f)))))
(defn keep-form? [form]
(contains? #{'ns 'def 'defn 'deftype 'extend-type} (first form)))
(defn signature [form]
(if (= 'defn (first form))
`(def ~(second form))
(take 2 form)))
(defn make-override-map [forms-seq]
(apply hash-map (mapcat (fn [a] [(signature a) a]) forms-seq)))
(defn core-forms-seq
"Will load every form from core.cljs, except those who are defined in override-file
override-file can be anything that can be coerced to a reader by io/reader"
([override-file & {:keys [replace-forms extra-file-before extra-file-after]}]
(let [core-forms (make-forms-seq (io/resource "cljs/core.cljs"))
override-map (-> override-file make-forms-seq make-override-map)
replace-forms (or replace-forms {})
forms-override (for [form core-forms]
(let [sig (signature form)]
(cond
(contains? override-map sig) (override-map sig)
(contains? replace-forms sig) (override-map (replace-forms sig))
:else form)))
forms-filtered (remove nil? forms-override)]
(lazy-cat (if extra-file-before (make-forms-seq extra-file-before) [])
forms-filtered
(if extra-file-after (make-forms-seq extra-file-after) []))))) |
42795461a0cecd146ead734005ca38c12bfdfa4dcf1b511d28d98482a4a6d166 | bravit/hid-examples | Main.hs | import System.Environment
import System.TimeIt
import IsPrime
main :: IO ()
main = getArgs >>= timeIt . print . isPrime . read . head
| null | https://raw.githubusercontent.com/bravit/hid-examples/913e116b7ee9c7971bba10fe70ae0b61bfb9391b/ch09/isprime/Main.hs | haskell | import System.Environment
import System.TimeIt
import IsPrime
main :: IO ()
main = getArgs >>= timeIt . print . isPrime . read . head
| |
071c9b9e85004855a466a3a47e0463cc13c5581cff3222ba764002fc7241af10 | erlyaws/yaws | websockets_SUITE.erl | -module(websockets_SUITE).
-include("testsuite.hrl").
-compile(export_all).
-record(frame, {fin = true,
rsv = 0,
opcode,
masked = false,
mask,
payload = <<>>}).
-define(WS_OPCODE_CONTINUATION, 16#00).
-define(WS_OPCODE_TEXT, 16#01).
-define(WS_OPCODE_BINARY, 16#02).
-define(WS_OPCODE_CLOSE, 16#08).
-define(WS_OPCODE_PING, 16#09).
-define(WS_OPCODE_PONG, 16#0A).
-define(WS_STATUS_NORMAL, 1000).
-define(WS_STATUS_PROTO_ERROR, 1002).
-define(WS_STATUS_ABNORMAL_CLOSURE, 1006).
-define(WS_STATUS_INVALID_PAYLOAD, 1007).
-define(WS_STATUS_MSG_TOO_BIG, 1009).
-define(WS_STATUS_INTERNAL_ERROR, 1011).
all() ->
[
{group, websocket_tests},
{group, secure_websocket_tests}
].
groups() ->
[
{websocket_tests, [],
[valid_opening_handshake,
bad_version_handshake,
bad_origin_handshake,
noconnection_handshake,
bad_connection_handshake,
noupgrade_handshake,
bad_upgrade_handshake,
basic_unfragmented_text,
advanced_unfragmented_text,
basic_unfragmented_binary,
advanced_unfragmented_binary,
basic_ping_text,
advanced_ping_text,
basic_ping_binary,
advanced_ping_binary,
toolong_payload_ping,
basic_unsolicited_pong,
advanced_unsolicited_pong,
basic_unsolicited_pong_ping_pong,
advanced_unsolicited_pong_ping_pong,
basic_10_pings,
advanced_10_pings,
badrsv_text,
badrsv_binary,
badrsv_ping,
badrsv_close,
badrsv_complex,
badopcodes,
basic_fragmented_empty,
basic_fragmented_text_1,
basic_fragmented_binary_1,
basic_fragmented_text_2,
basic_fragmented_binary_2,
basic_fragmented_ping,
basic_fragmented_pong,
basic_fragmented_close,
basic_fragmented_text_with_ping,
basic_fragmented_text_with_pong,
basic_badfragmented_1,
basic_badfragmented_2,
basic_badfragmented_nocontinuation,
advanced_fragmented_empty,
advanced_fragmented_text_1,
advanced_fragmented_binary_1,
advanced_fragmented_text_2,
advanced_fragmented_binary_2,
advanced_fragmented_ping,
advanced_fragmented_pong,
advanced_fragmented_close,
advanced_fragmented_text_with_ping,
advanced_fragmented_text_with_pong,
advanced_badfragmented_1,
advanced_badfragmented_2,
advanced_badfragmented_nocontinuation,
basic_unfragmented_valid_utf8_text,
basic_fragmented_valid_utf8_text,
advanced_unfragmented_valid_utf8_text,
advanced_fragmented_valid_utf8_text,
basic_unfragmented_invalid_utf8_text,
basic_fragmented_invalid_utf8_text,
advanced_unfragmented_invalid_utf8_text,
advanced_fragmented_invalid_utf8_text,
basic_2_closes,
basic_close_ping,
basic_close_text,
basic_fragtext_close_fragtext,
basic_close_empty,
basic_close_toosmall,
basic_close_statusonly,
basic_close_with_reason,
basic_close_limit_size,
basic_close_toolong,
basic_close_invalid_utf8,
basic_close_valid_codes,
basic_close_invalid_codes,
advanced_2_closes,
advanced_close_ping,
advanced_close_text,
advanced_fragtext_close_fragtext,
advanced_close_empty,
advanced_close_toosmall,
advanced_close_statusonly,
advanced_close_with_reason,
advanced_close_limit_size,
advanced_close_toolong,
advanced_close_invalid_utf8,
advanced_close_valid_codes,
advanced_close_invalid_codes,
close_timeout,
keepalive_timeout,
too_big_frame,
too_big_message,
close_unmasked_frame]},
{secure_websocket_tests, [], [secure_websocket]}
].
%%====================================================================
init_per_suite(Config) ->
Id = "testsuite-server",
GConf = [
{logdir, ?config(priv_dir, Config)},
{trace, false},
{flags, [{copy_error_log, true}]},
{keepalive_timeout, 10000},
{acceptor_pool_size, 32}
],
ok = yaws:start_embedded(?wwwdir, [], GConf, Id),
[{yaws_id, Id} | Config].
end_per_suite(_Config) ->
ok.
init_per_group(websocket_tests, Config) ->
SConfHTTP = [
{docroot, ?wwwdir},
{port, testsuite:get_yaws_port(1, Config)},
{listen, {127,0,0,1}},
{flags, [{access_log, true}]},
{servername, "localhost"}
],
{ok, _} = testsuite:add_yaws_server(?wwwdir, SConfHTTP),
Config;
init_per_group(secure_websocket_tests, Config) ->
SConfHTTPS = [
{docroot, ?wwwdir},
{port, testsuite:get_yaws_port(1, Config)},
{listen, {127,0,0,1}},
{flags, [{access_log, true}]},
{servername, "localhost"},
{ssl, [
{keyfile, ?sslkeyfile},
{certfile, ?sslcertfile},
{depth, 0}
]}
],
{ok, _} = testsuite:add_yaws_server(?wwwdir, SConfHTTPS),
Config.
end_per_group(_Group, _Config) ->
testsuite:reset_yaws_servers(),
ok.
init_per_testcase(_Test, Config) ->
Config.
end_per_testcase(_Test, _Config) ->
ok.
%%====================================================================
valid_opening_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
%% Send the handshake and retrieve the response
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
{ok, {101, Hds}} = wsopen(Sock, Key, WSPath, "", 13),
%% Check the server response
?assert(is_valid_handshake_hash(
Key, proplists:get_value("sec-websocket-accept", Hds)
)),
?assertEqual("websocket", string:to_lower(proplists:get_value("upgrade", Hds))),
?assertEqual("upgrade", string:to_lower(proplists:get_value("connection", Hds))),
%% Close the webscoket and check the server reply
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
bad_version_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
%% Send the handshake and retrieve the response
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
{ok, {400, Hds}} = wsopen(Sock, Key, WSPath, "", 15),
%% Check the server response
?assertEqual("13, 8", string:to_lower(
proplists:get_value("sec-websocket-version", Hds)
)),
?assertEqual(ok, close(Sock)),
ok.
bad_origin_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
%% Send the handshake and retrieve the response
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
{ok, {403, _}} = wsopen(Sock, Key, WSPath, "", 13),
?assertEqual(ok, close(Sock)),
ok.
noconnection_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
%% Send the handshake and retrieve the response
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: websocket\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
bad_connection_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
%% Send the handshake and retrieve the response
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: websocket\r\n",
"Connection: Keep-Alive\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
noupgrade_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
%% Send the handshake and retrieve the response
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Connection: Upgrade\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
bad_upgrade_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
%% Send the handshake and retrieve the response
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: TLS/1.0\r\n",
"Connection: Upgrade\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
basic_unfragmented_text(Config) ->
basic_unfragmented_text(Config, 0, all),
basic_unfragmented_text(Config, 125, all),
basic_unfragmented_text(Config, 126, all),
basic_unfragmented_text(Config, 127, all),
basic_unfragmented_text(Config, 128, all),
basic_unfragmented_text(Config, 65535, all),
basic_unfragmented_text(Config, 65536, all),
basic_unfragmented_text(Config, 65536, 997),
ok.
advanced_unfragmented_text(Config) ->
advanced_unfragmented_text(Config, 0, all),
advanced_unfragmented_text(Config, 125, all),
advanced_unfragmented_text(Config, 126, all),
advanced_unfragmented_text(Config, 127, all),
advanced_unfragmented_text(Config, 128, all),
advanced_unfragmented_text(Config, 65535, all),
advanced_unfragmented_text(Config, 65536, all),
advanced_unfragmented_text(Config, 65536, 997),
ok.
basic_unfragmented_binary(Config) ->
basic_unfragmented_binary(Config, 0, all),
basic_unfragmented_binary(Config, 125, all),
basic_unfragmented_binary(Config, 126, all),
basic_unfragmented_binary(Config, 127, all),
basic_unfragmented_binary(Config, 128, all),
basic_unfragmented_binary(Config, 65535, all),
basic_unfragmented_binary(Config, 65536, all),
basic_unfragmented_binary(Config, 65536, 997),
ok.
advanced_unfragmented_binary(Config) ->
advanced_unfragmented_binary(Config, 0, all),
advanced_unfragmented_binary(Config, 125, all),
advanced_unfragmented_binary(Config, 126, all),
advanced_unfragmented_binary(Config, 127, all),
advanced_unfragmented_binary(Config, 128, all),
advanced_unfragmented_binary(Config, 65535, all),
advanced_unfragmented_binary(Config, 65536, all),
advanced_unfragmented_binary(Config, 65536, 997),
ok.
basic_unfragmented_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
unfragmented_msg(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_TEXT, Payload, BlockSz).
advanced_unfragmented_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
unfragmented_msg(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_TEXT, Payload, BlockSz).
basic_unfragmented_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unfragmented_msg(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_BINARY, Payload, BlockSz).
advanced_unfragmented_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unfragmented_msg(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_BINARY, Payload, BlockSz).
unfragmented_msg(Config, WSPath, Type, Payload, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
%% unmasked
SndFrame1 = #frame{opcode=Type, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, BlockSz)),
{ok, RcvFrame1} = read_frame(Sock),
?assertEqual(Type, RcvFrame1#frame.opcode),
?assertEqual(Payload, RcvFrame1#frame.payload),
%% masked
SndFrame2 = SndFrame1#frame{masked=true, mask = <<"abcd">>},
?assertEqual(ok, send_frame(Sock, SndFrame2, BlockSz)),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(Type, RcvFrame2#frame.opcode),
?assertEqual(Payload, RcvFrame2#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_ping_text(Config) ->
basic_ping_text(Config, 0, all),
basic_ping_text(Config, 125, all),
ok.
basic_ping_binary(Config) ->
basic_ping_binary(Config, 125, all),
basic_ping_binary(Config, 125, 1),
ok.
advanced_ping_text(Config) ->
advanced_ping_text(Config, 0, all),
advanced_ping_text(Config, 125, all),
ok.
advanced_ping_binary(Config) ->
advanced_ping_binary(Config, 125, all),
advanced_ping_binary(Config, 125, 1),
ok.
basic_ping_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
ping_msg(Config, "/websockets_example_endpoint.yaws", Payload, BlockSz).
advanced_ping_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
ping_msg(Config, "/websockets_autobahn_endpoint.yaws", Payload, BlockSz).
basic_ping_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
ping_msg(Config, "/websockets_example_endpoint.yaws", Payload, BlockSz).
advanced_ping_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
ping_msg(Config, "/websockets_autobahn_endpoint.yaws", Payload, BlockSz).
ping_msg(Config, WSPath, Payload, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
%% unmasked
SndFrame1 = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, BlockSz)),
{ok, RcvFrame1} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PONG, RcvFrame1#frame.opcode),
?assertEqual(Payload, RcvFrame1#frame.payload),
%% masked
SndFrame2 = SndFrame1#frame{masked=true, mask = <<"abcd">>},
?assertEqual(ok, send_frame(Sock, SndFrame2, BlockSz)),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PONG, RcvFrame2#frame.opcode),
?assertEqual(Payload, RcvFrame2#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
toolong_payload_ping(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = list_to_binary(lists:duplicate(126, 16#fe)),
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_unsolicited_pong(Config) ->
basic_unsolicited_pong(Config, 0),
basic_unsolicited_pong(Config, 125),
ok.
advanced_unsolicited_pong(Config) ->
advanced_unsolicited_pong(Config, 0),
advanced_unsolicited_pong(Config, 125),
ok.
basic_unsolicited_pong(Config, Sz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unsolicited_pong_msg(Config, "/websockets_example_endpoint.yaws", Payload).
advanced_unsolicited_pong(Config, Sz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unsolicited_pong_msg(Config, "/websockets_autobahn_endpoint.yaws", Payload).
unsolicited_pong_msg(Config, WSPath, Payload) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_PONG, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_unsolicited_pong_ping_pong(Config) ->
unsolicited_pong_ping_pong(Config, "/websockets_example_endpoint.yaws").
advanced_unsolicited_pong_ping_pong(Config) ->
unsolicited_pong_ping_pong(Config, "/websockets_autobahn_endpoint.yaws").
unsolicited_pong_ping_pong(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = list_to_binary(lists:duplicate(125, $*)),
Payload2 = <<"ping payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{opcode=?WS_OPCODE_PONG, payload=Payload1},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
SndFrame2 = #frame{opcode=?WS_OPCODE_PING, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PONG, RcvFrame2#frame.opcode),
?assertEqual(Payload2, RcvFrame2#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_10_pings(Config) ->
basic_10_pings(Config, all),
basic_10_pings(Config, 1),
ok.
advanced_10_pings(Config) ->
advanced_10_pings(Config, all),
advanced_10_pings(Config, 1),
ok.
basic_10_pings(Config, BlockSz) ->
send_10_pings(Config, "/websockets_example_endpoint.yaws", BlockSz).
advanced_10_pings(Config, BlockSz) ->
send_10_pings(Config, "/websockets_autobahn_endpoint.yaws", BlockSz).
send_10_pings(Config, WSPath, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"ping payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
[begin
?assertEqual(ok, send_frame(Sock, SndFrame, BlockSz))
end || _ <- lists:seq(1, 10)],
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
{Frames1, Frames2} = lists:split(10, Frames),
?assert(lists:all(fun(#frame{payload=P}) -> P == Payload end, Frames1)),
?assert(is_valid_close_frame(Frames2, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
badrsv_text(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_TEXT, 1).
badrsv_binary(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_BINARY, 2).
badrsv_ping(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_PING, 3).
badrsv_close(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_CLOSE, 4).
badrsv(Config, WSPath, Type, Rsv) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"small payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{rsv=Rsv, opcode=Type, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
badrsv_complex(Config) ->
badrsv_complex(Config, all),
badrsv_complex(Config, 1),
ok.
badrsv_complex(Config, BlockSz) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"small payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
SndFrame2 = SndFrame1#frame{rsv=5},
SndFrame3 = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, BlockSz)),
?assertEqual(ok, send_frame(Sock, SndFrame2, BlockSz)),
?assertEqual(ok, send_frame(Sock, SndFrame3, BlockSz)),
{ok, [Frame1|Frames]} = wsflush(Sock, false),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
badopcodes(Config) ->
[badopcodes(Config, O) || O <- [3,4,5,6,7,11,12,13,14,15]],
ok.
badopcodes(Config, Opcode) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=Opcode},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_empty(Config) ->
fragmented_empty(Config, "/websockets_example_endpoint.yaws").
advanced_fragmented_empty(Config) ->
fragmented_empty(Config, "/websockets_autobahn_endpoint.yaws").
fragmented_empty(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(<<>>, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_1(Config) ->
valid_fragmented_1(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_TEXT).
advanced_fragmented_text_1(Config) ->
valid_fragmented_1(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_TEXT).
basic_fragmented_binary_1(Config) ->
valid_fragmented_1(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_BINARY).
advanced_fragmented_binary_1(Config) ->
valid_fragmented_1(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_BINARY).
valid_fragmented_1(Config, WSPath, Type) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
Payload3 = <<"fragment3">>,
Payload = <<Payload1/binary, Payload2/binary, Payload3/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=Type, payload=Payload1},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload3},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(Type, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_2(Config) ->
valid_fragmented_2(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_TEXT).
advanced_fragmented_text_2(Config) ->
valid_fragmented_2(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_TEXT).
basic_fragmented_binary_2(Config) ->
valid_fragmented_2(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_BINARY).
advanced_fragmented_binary_2(Config) ->
valid_fragmented_2(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_BINARY).
valid_fragmented_2(Config, WSPath, Type) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"fragment">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=Type},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Payload},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(Type, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_ping(Config) ->
invalid_fragmented(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_PING).
advanced_fragmented_ping(Config) ->
invalid_fragmented(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_PING).
basic_fragmented_pong(Config) ->
invalid_fragmented(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_PONG).
advanced_fragmented_pong(Config) ->
invalid_fragmented(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_PONG).
basic_fragmented_close(Config) ->
invalid_fragmented(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_CLOSE).
advanced_fragmented_close(Config) ->
invalid_fragmented(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_CLOSE).
invalid_fragmented(Config, WSPath, Type) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=Type, payload=Payload1},
SndFrame2 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_with_ping(Config) ->
fragmented_with_ping(Config, "/websockets_example_endpoint.yaws").
advanced_fragmented_text_with_ping(Config) ->
fragmented_with_ping(Config, "/websockets_autobahn_endpoint.yaws").
fragmented_with_ping(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
Payload = <<Payload1/binary, Payload2/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload1},
SndFrame2 = #frame{opcode=?WS_OPCODE_PING},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1, Frame2|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_PONG, Frame1#frame.opcode),
?assertEqual(?WS_OPCODE_TEXT, Frame2#frame.opcode),
?assertEqual(Payload, Frame2#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_with_pong(Config) ->
fragmented_with_pong(Config, "/websockets_example_endpoint.yaws").
advanced_fragmented_text_with_pong(Config) ->
fragmented_with_pong(Config, "/websockets_autobahn_endpoint.yaws").
fragmented_with_pong(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
Payload = <<Payload1/binary, Payload2/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload1},
SndFrame2 = #frame{opcode=?WS_OPCODE_PONG},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_badfragmented_1(Config) ->
badfragmented(Config, "/websockets_example_endpoint.yaws", true).
basic_badfragmented_2(Config) ->
badfragmented(Config, "/websockets_example_endpoint.yaws", false).
advanced_badfragmented_1(Config) ->
badfragmented(Config, "/websockets_autobahn_endpoint.yaws", true).
advanced_badfragmented_2(Config) ->
badfragmented(Config, "/websockets_autobahn_endpoint.yaws", false).
badfragmented(Config, WSPath, Fin) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"small payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=Fin, opcode=?WS_OPCODE_CONTINUATION, payload=Payload},
SndFrame2 = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_badfragmented_nocontinuation(Config) ->
badfragmented_nocontinuation(Config, "/websockets_example_endpoint.yaws").
advanced_badfragmented_nocontinuation(Config) ->
badfragmented_nocontinuation(Config, "/websockets_autobahn_endpoint.yaws").
badfragmented_nocontinuation(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload1},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_unfragmented_valid_utf8_text(Config) ->
unfragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", all),
unfragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_unfragmented_valid_utf8_text(Config) ->
unfragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
unfragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
unfragmented_valid_utf8(Config, WSPath, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Fun = fun(Payload) ->
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, BlockSz)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_TEXT, RcvFrame#frame.opcode),
?assertEqual(Payload, RcvFrame#frame.payload)
end,
Fun(<<16#ce,16#ba>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,
16#ce,16#b5>>),
Fun(<<"Hello-",16#c2,16#b5,$@,16#c3,16#9f,16#c3,16#b6,16#c3,16#a4,
16#c3,16#bc,16#c3,16#a0,16#c3,16#a1,"-UTF-8!!">>),
Fun(<<16#00>>),
Fun(<<16#c2,16#80>>),
Fun(<<16#e0,16#a0,16#80>>),
Fun(<<16#f0,16#90,16#80,16#80>>),
Fun(<<16#7f>>),
Fun(<<16#df,16#bf>>),
Fun(<<16#ef,16#bf,16#bf>>),
Fun(<<16#f4,16#8f,16#bf,16#bf>>),
Fun(<<16#ed,16#9f,16#bf>>),
Fun(<<16#ee,16#80,16#80>>),
Fun(<<16#ef,16#bf,16#bd>>),
Fun(<<16#f4,16#8f,16#bf,16#bf>>),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_valid_utf8_text(Config) ->
fragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", all),
fragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_fragmented_valid_utf8_text(Config) ->
fragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
fragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
fragmented_valid_utf8(Config, WSPath, FragSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"Hello-",16#c2,16#b5,$@,16#c3,16#9f,16#c3,16#b6,16#c3,16#a4>>,
Payload2 = <<16#c3,16#bc,16#c3,16#a0,16#c3,16#a1,"-UTF-8!!">>,
Payload = <<Payload1/binary, Payload2/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payloads = fragment_payload(Payload1, FragSz) ++
fragment_payload(Payload2, FragSz),
[First|Rest0] = Payloads,
[Last|Rest1] = lists:reverse(Rest0),
Middles = lists:reverse(Rest1),
FirstFrame = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=First},
LastFrame = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Last},
MiddleFrames = lists:map(fun(P) ->
#frame{fin=false,
opcode=?WS_OPCODE_CONTINUATION,
payload=P}
end, Middles),
?assertEqual(ok, send_frame(Sock, FirstFrame, all)),
lists:foreach(fun(F) ->
?assertEqual(ok, send_frame(Sock, F, all))
end, MiddleFrames),
?assertEqual(ok, send_frame(Sock, LastFrame, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_unfragmented_invalid_utf8_text(Config) ->
unfragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", all),
unfragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_unfragmented_invalid_utf8_text(Config) ->
unfragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
unfragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
unfragmented_invalid_utf8(Config, WSPath, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Fun = fun(Payload) ->
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, BlockSz)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_INVALID_PAYLOAD])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock))
end,
Fun(<<16#cd>>),
Fun(<<16#ce,16#ba,16#e1>>),
Fun(<<16#ce,16#ba,16#e1,16#bd>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce,
16#b5,16#ed,16#a0,16#80,16#65,16#64,16#69,16#74,16#65,16#64>>),
Fun(<<16#f8,16#88,16#80,16#80,16#80>>),
Fun(<<16#fc,16#84,16#80,16#80,16#80,16#80>>),
Fun(<<16#f7,16#bf,16#bf,16#bf>>),
Fun(<<16#fb,16#bf,16#bf,16#bf,16#bf>>),
Fun(<<16#fd,16#bf,16#bf,16#bf,16#bf,16#bf>>),
Fun(<<16#f4,16#90,16#80,16#80>>),
Fun(<<16#80>>),
Fun(<<16#bf>>),
Fun(<<16#80,16#bf>>),
Fun(<<16#80,16#bf,16#80>>),
Fun(<<16#80,16#bf,16#80,16#bf>>),
Fun(<<16#80,16#bf,16#80,16#bf,16#80>>),
Fun(<<16#80,16#bf,16#80,16#bf,16#80,16#bf>>),
Fun(<<16#80,16#81,16#82,16#83,16#84,16#85,16#86,16#87,16#88,16#89,16#8a,
16#8b,16#8c,16#8d,16#8e,16#8f,16#90,16#91,16#92,16#93,16#94,16#95,
16#96,16#97,16#98,16#99,16#9a,16#9b,16#9c,16#9d,16#9e,16#9f,16#a0,
16#a1,16#a2,16#a3,16#a4,16#a5,16#a6,16#a7,16#a8,16#a9,16#aa,16#ab,
16#ac,16#ad,16#ae,16#af,16#b0,16#b1,16#b2,16#b3,16#b4,16#b5,16#b6,
16#b7,16#b8,16#b9,16#ba,16#bb,16#bc,16#bd,16#be>>),
ok.
basic_fragmented_invalid_utf8_text(Config) ->
fragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", all),
fragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_fragmented_invalid_utf8_text(Config) ->
fragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
fragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
fragmented_invalid_utf8(Config, WSPath, FragSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce>>,
Payload2 = <<16#b5,16#ed,16#a0,16#80,16#65,16#64,16#69,16#74,16#65,16#64>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payloads = fragment_payload(Payload1, FragSz) ++
fragment_payload(Payload2, FragSz),
[First|Rest0] = Payloads,
[Last|Rest1] = lists:reverse(Rest0),
Middles = lists:reverse(Rest1),
FirstFrame = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=First},
LastFrame = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Last},
MiddleFrames = lists:map(fun(P) ->
#frame{fin=false,
opcode=?WS_OPCODE_CONTINUATION,
payload=P}
end, Middles),
?assertEqual(ok, send_frame(Sock, FirstFrame, all)),
lists:foreach(fun(F) ->
?assertEqual(ok, send_frame(Sock, F, all))
end, MiddleFrames),
?assertEqual(ok, send_frame(Sock, LastFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_INVALID_PAYLOAD])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_2_closes(Config) ->
send_2_closes(Config, "/websockets_example_endpoint.yaws").
advanced_2_closes(Config) ->
send_2_closes(Config, "/websockets_autobahn_endpoint.yaws").
send_2_closes(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_close_ping(Config) ->
close_ping(Config, "/websockets_example_endpoint.yaws").
advanced_close_ping(Config) ->
close_ping(Config, "/websockets_autobahn_endpoint.yaws").
close_ping(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_PING}, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_text(Config) ->
close_text(Config, "/websockets_example_endpoint.yaws").
advanced_close_text(Config) ->
close_text(Config, "/websockets_autobahn_endpoint.yaws").
close_text(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_TEXT}, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragtext_close_fragtext(Config) ->
close_fragtext(Config, "/websockets_example_endpoint.yaws").
advanced_fragtext_close_fragtext(Config) ->
close_fragtext(Config, "/websockets_autobahn_endpoint.yaws").
close_fragtext(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, send_frame(Sock, #frame{fin=false, opcode=?WS_OPCODE_TEXT}, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_CONTINUATION}, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_empty(Config) ->
close_empty(Config, "/websockets_example_endpoint.yaws").
advanced_close_empty(Config) ->
close_empty(Config, "/websockets_autobahn_endpoint.yaws").
close_empty(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,0>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_toosmall(Config) ->
close_toosmall(Config, "/websockets_example_endpoint.yaws").
advanced_close_toosmall(Config) ->
close_toosmall(Config, "/websockets_autobahn_endpoint.yaws").
close_toosmall(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,1,0>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_statusonly(Config) ->
close_statusonly(Config, "/websockets_example_endpoint.yaws").
advanced_close_statusonly(Config) ->
close_statusonly(Config, "/websockets_autobahn_endpoint.yaws").
close_statusonly(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,2,1000:16/big>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_with_reason(Config) ->
close_with_reason(Config, "/websockets_example_endpoint.yaws").
advanced_close_with_reason(Config) ->
close_with_reason(Config, "/websockets_autobahn_endpoint.yaws").
close_with_reason(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,4,1000:16/big,"Ok">>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_limit_size(Config) ->
close_limit_size(Config, "/websockets_example_endpoint.yaws").
advanced_close_limit_size(Config) ->
close_limit_size(Config, "/websockets_autobahn_endpoint.yaws").
close_limit_size(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Reason = list_to_binary(lists:duplicate(123, $*)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,125,1000:16/big,Reason/binary>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_toolong(Config) ->
close_toolong(Config, "/websockets_example_endpoint.yaws").
advanced_close_toolong(Config) ->
close_toolong(Config, "/websockets_autobahn_endpoint.yaws").
close_toolong(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Reason = list_to_binary(lists:duplicate(124, $*)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,126,1000:16/big,Reason/binary>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_invalid_utf8(Config) ->
close_invalid_utf8(Config, "/websockets_example_endpoint.yaws").
advanced_close_invalid_utf8(Config) ->
close_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws").
close_invalid_utf8(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Reason = <<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce,
16#b5,16#ed,16#a0,16#80,16#65,16#64,16#69,16#74,16#65,16#64>>,
?assertEqual(ok, gen_tcp:send(Sock, <<136,22,1000:16/big,Reason/binary>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_INVALID_PAYLOAD])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_valid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_valid_code(Config, "/websockets_example_endpoint.yaws", Code)
end,
[1000,1001,1002,1003,1007,1008,1009,1010,1011,3000,3999,4000,4999]
).
advanced_close_valid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_valid_code(Config, "/websockets_autobahn_endpoint.yaws", Code)
end,
[1000,1001,1002,1003,1007,1008,1009,1010,1011,3000,3999,4000,4999]
).
close_valid_code(Config, WSPath, Code) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, Code, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [Code])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_invalid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_invalid_code(Config, "/websockets_example_endpoint.yaws", Code)
end,
[0,999,1004,1005,1006,1012,1013,1014,1015,1016,1100,2000,2999,5000,65536]
).
advanced_close_invalid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_invalid_code(Config, "/websockets_autobahn_endpoint.yaws", Code)
end,
[0,999,1004,1005,1006,1012,1013,1014,1015,1016,1100,2000,2999,5000,65536]
).
close_invalid_code(Config, WSPath, Code) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, Code, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
close_timeout(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws?extversion=true",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload = <<"bye">>},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
LastFrame = lists:last(Frames),
?assert(is_valid_close_frame([LastFrame], [?WS_STATUS_NORMAL])),
timer:sleep(5500), %% Waiting for the timeout
?assertEqual({error, closed}, gen_tcp:recv(Sock, 0)),
?assertEqual(ok, close(Sock)),
ok.
keepalive_timeout(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws?keepalive=true&timeout=5000&drop=true",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
timer:sleep(5500),
{ok, RcvFrame1} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PING, RcvFrame1#frame.opcode),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_PONG}, all)),
timer:sleep(5500),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PING, RcvFrame2#frame.opcode),
timer:sleep(2000),
?assertEqual({error, closed}, gen_tcp:recv(Sock, 0)),
?assertEqual(ok, close(Sock)),
ok.
too_big_frame(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payload1 = crypto:strong_rand_bytes(16*1024*1024),
SndFrame1 = #frame{opcode=?WS_OPCODE_BINARY, payload=Payload1},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_BINARY, RcvFrame#frame.opcode),
?assertEqual(Payload1, RcvFrame#frame.payload),
Payload2 = <<0, Payload1/binary>>,
SndFrame2 = #frame{opcode=?WS_OPCODE_BINARY, payload=Payload2},
{ok, Closed} = case send_frame(Sock, SndFrame2, all) of
ok -> {ok, false};
{error, closed} -> {ok, true}
end,
{ok, Frames} = wsflush(Sock, true),
?assert(case Closed of
false -> is_valid_close_frame(Frames, [?WS_STATUS_MSG_TOO_BIG]);
true -> true
end),
?assertEqual(ok, close(Sock)),
ok.
close_unmasked_frame(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws?close_unmasked=true",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
%% unmasked
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload = <<"unmasked">>},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
too_big_message(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payload1 = crypto:strong_rand_bytes(16*1024*1024),
<<Frag1:(4*1024)/binary, Frag2:(4*1024)/binary,
Frag3:(4*1024)/binary, Frag4/binary>> = Payload1,
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_BINARY, payload=Frag1},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag2},
SndFrame3 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag3},
SndFrame4 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Frag4},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, send_frame(Sock, SndFrame4, all)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_BINARY, RcvFrame#frame.opcode),
?assertEqual(Payload1, RcvFrame#frame.payload),
Payload2 = <<0, Payload1/binary>>,
<<Frag5:(4*1024)/binary, Frag6:(4*1024)/binary,
Frag7:(4*1024)/binary, Frag8/binary>> = Payload2,
SndFrame5 = #frame{fin=false, opcode=?WS_OPCODE_BINARY, payload=Frag5},
SndFrame6 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag6},
SndFrame7 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag7},
SndFrame8 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Frag8},
?assertEqual(ok, send_frame(Sock, SndFrame5, all)),
?assertEqual(ok, send_frame(Sock, SndFrame6, all)),
?assertEqual(ok, send_frame(Sock, SndFrame7, all)),
?assertEqual(ok, send_frame(Sock, SndFrame8, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_MSG_TOO_BIG])),
?assertEqual(ok, close(Sock)),
ok.
secure_websocket(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws",
{ok, Sock} = sslopen("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload = <<"small payload">>},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_TEXT, RcvFrame#frame.opcode),
?assertEqual(<<"small payload">>, RcvFrame#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
%%====================================================================
open(Host, Port) ->
Opts = [{send_timeout, 2000}, binary, {packet, raw}, {active, false}],
gen_tcp:connect(Host, Port, Opts).
sslopen(Host, Port) ->
Opts = [{send_timeout, 2000}, binary, {packet, raw}, {active, false}],
case ssl:connect(Host, Port, Opts) of
{ok, Sock} -> {ok, {ssl, Sock}};
{error, Reason} -> {error, Reason}
end.
close(Sock) ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:close(SslSock);
undefined -> gen_tcp:close(Sock)
end.
%% ----
wsopen(Sock, Key, Path, Origin, Vsn) ->
Handshake = ["GET ", Path, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: websocket\r\n",
"Connection: Upgrade\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: ", Origin, "\r\n",
"Sec-WebSocket-Version: ", integer_to_list(Vsn), "\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
read_handshake_response(Sock).
wsclose(Sock, Status, Msg) ->
Fin = 1,
Rsv = 0,
Mask = 0,
Opcode = ?WS_OPCODE_CLOSE,
Payload= <<Status:16/big, Msg/binary>>,
Len = byte_size(Payload),
Frame = if
Len < 126 ->
<<Fin:1,Rsv:3,Opcode:4,Mask:1,Len:7,Payload/binary>>;
Len < 65536 ->
<<Fin:1,Rsv:3,Opcode:4,Mask:1,126:7,Len:16,Payload/binary>>;
true ->
<<Fin:1,Rsv:3,Opcode:4,Mask:1,127:7,Len:64,Payload/binary>>
end,
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Frame);
undefined -> gen_tcp:send(Sock, Frame)
end.
%% ----
wsflush(Sock, WithTcpClose) ->
wsflush(Sock, WithTcpClose, []).
wsflush(Sock, WithTcpClose, Acc) ->
case read_frame(Sock) of
{ok, Frame} ->
case Frame#frame.opcode of
?WS_OPCODE_CLOSE when WithTcpClose == false ->
{ok, lists:reverse([Frame|Acc])};
_ ->
wsflush(Sock, WithTcpClose, [Frame|Acc])
end;
{error, closed} ->
{ok, lists:reverse(Acc)};
{error, Reason} ->
{error, Reason}
end.
%% ----
is_valid_handshake_hash(Key, Hash) ->
Salted = Key ++ "258EAFA5-E914-47DA-95CA-C5AB0DC85B11",
HashBin = crypto:hash(sha, Salted),
Hash == base64:encode_to_string(HashBin).
%% ----
is_valid_close_frame([], _) ->
io:format(" WARNING: Connection closed by server without Close frame~n"),
true;
is_valid_close_frame([#frame{opcode=?WS_OPCODE_CLOSE, payload=Payload}|Rest],
Codes) ->
case Rest of
[] ->
case Payload of
<<>> -> lists:member(?WS_STATUS_NORMAL, Codes);
<<Status:16/big, _/binary>> ->
case lists:member(Status, Codes) of
true ->
true;
false ->
io:format(" ERROR: Bad status code in close"
" frame: status=~p~n", [Status]),
false
end
end;
_ ->
io:format(" ERROR: Remaining frames after the Close frame~n")
end;
is_valid_close_frame([#frame{opcode=OpCode}|_], _) ->
io:format(" ERROR: Not a close frame: opcode=~p~n", [OpCode]),
false.
%% ----
fragment_payload(Payload, all) ->
[Payload];
fragment_payload(<<>>, _) ->
[];
fragment_payload(Payload, FragSz) ->
case Payload of
<<Frag:FragSz/binary, Rest/binary>> ->
[Frag | fragment_payload(Rest, FragSz)];
Rest ->
[Rest]
end.
%% ----
read_handshake_response(Sock) ->
Res = case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} ->
ssl:setopts(SslSock, [{packet, http}, {packet_size, 16#4000}]),
ssl:recv(SslSock, 0, 5000);
undefined ->
inet:setopts(Sock, [{packet, http}, {packet_size, 16#4000}]),
gen_tcp:recv(Sock, 0, 5000)
end,
case Res of
{ok, {http_response, _, Status, _}} ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock1} ->
ssl:setopts(SslSock1,[{packet,httph},{packet_size,16#4000}]);
undefined ->
inet:setopts(Sock, [{packet,httph},{packet_size,16#4000}])
end,
Resp = read_handshake_response(Sock, Status, []),
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock2} -> ssl:setopts(SslSock2,[binary, {packet, raw}]);
undefined -> inet:setopts(Sock, [binary, {packet, raw}])
end,
Resp;
{ok, Error} ->
{error, Error};
{error, Reason} ->
{error, Reason}
end.
read_handshake_response(Sock, Status, Acc) ->
Res = case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:recv(SslSock, 0, 5000);
undefined -> gen_tcp:recv(Sock, 0, 5000)
end,
case Res of
{ok, {http_header, _, Name, _, Value}} when is_atom(Name) ->
Name1 = string:to_lower(atom_to_list(Name)),
read_handshake_response(Sock, Status, [{Name1, Value}|Acc]);
{ok, {http_header, _, Name, _, Value}} ->
Name1 = string:to_lower(Name),
read_handshake_response(Sock, Status, [{Name1, Value}|Acc]);
{ok, http_eoh} ->
{ok, {Status, Acc}};
{ok, Error} ->
{error, Error};
{error, Reason} ->
{error, Reason}
end.
%% ----
read_frame(Sock) ->
case read_frame_header(Sock) of
{ok, #frame{mask=undefined}=Frame} ->
{ok, Frame};
{ok, Frame} ->
Payload = mask(Frame#frame.mask, Frame#frame.payload),
{ok, Frame#frame{payload=Payload}};
{error, Reason} ->
{error, Reason}
end.
read_frame_header(Sock) ->
case do_recv(Sock, 2) of
{ok, <<Fin:1, Rsv:3, Opcode:4, MaskBit:1, Len:7>>} ->
Frame = #frame{fin = bit_to_boolean(Fin),
rsv = Rsv,
opcode = Opcode,
masked = bit_to_boolean(MaskBit)},
case read_frame_length(Sock, Len) of
{ok, Length} -> read_frame_payload(Sock, Frame, Length);
{error, Reason} -> {error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
read_frame_length(Sock, 126) ->
case do_recv(Sock, 2) of
{ok, <<Length:16>>} -> {ok, Length};
{error, Reason} -> {error, Reason}
end;
read_frame_length(Sock, 127) ->
case do_recv(Sock, 8) of
{ok, <<Length:64>>} -> {ok, Length};
{error, Reason} -> {error, Reason}
end;
read_frame_length(_Sock, Length) ->
{ok, Length}.
read_frame_mask(Sock) ->
case do_recv(Sock, 4) of
{ok, Mask} -> {ok, Mask};
{error, Reason} -> {error, Reason}
end.
read_frame_payload(Sock, #frame{masked=true, mask=undefined}=Frame, Length) ->
case read_frame_mask(Sock) of
{ok, Mask} ->
read_frame_payload(Sock, Frame#frame{mask=Mask}, Length);
{error, Reason} ->
{error, Reason}
end;
read_frame_payload(Sock, Frame, Length) ->
case do_recv(Sock, Length) of
{ok, Payload} -> {ok, Frame#frame{payload=Payload}};
{error, Reason} -> {error, Reason}
end.
%% ----
send_frame(Sock, Frame, BlockSz) ->
Fin = boolean_to_bit(Frame#frame.fin),
Rsv = Frame#frame.rsv,
Opcode = Frame#frame.opcode,
MaskBit = boolean_to_bit(Frame#frame.masked),
Mask = case Frame#frame.mask of
undefined -> <<>>;
M -> M
end,
Data = mask(Mask, Frame#frame.payload),
Len = byte_size(Data),
Packet = if
Len < 126 ->
<<Fin:1,Rsv:3,Opcode:4,MaskBit:1,Len:7,
Mask/binary,Data/binary>>;
Len < 65536 ->
<<Fin:1,Rsv:3,Opcode:4,MaskBit:1,126:7,Len:16,
Mask/binary,Data/binary>>;
true ->
<<Fin:1,Rsv:3,Opcode:4,MaskBit:1,127:7,Len:64,
Mask/binary,Data/binary>>
end,
case BlockSz of
all ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Packet);
undefined -> gen_tcp:send(Sock, Packet)
end;
_ ->
do_send(Sock, Packet, BlockSz)
end.
do_send(_Sock, <<>>, _BlockSz) ->
ok;
do_send(Sock, Packet, BlockSz) ->
case Packet of
<<Block:BlockSz/binary, Rest/binary>> ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Block);
undefined -> gen_tcp:send(Sock, Block)
end,
do_send(Sock, Rest, BlockSz);
_ ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Packet);
undefined -> gen_tcp:send(Sock, Packet)
end
end.
%% ----
mask(MaskBin, Data) ->
list_to_binary(rmask(MaskBin, Data)).
rmask(_,<<>>) ->
[<<>>];
rmask(<<>>, Data) ->
[Data];
rmask(MaskBin = <<Mask:4/integer-unit:8>>,
<<Data:4/integer-unit:8, Rest/binary>>) ->
Masked = Mask bxor Data,
[<<Masked:4/integer-unit:8>> | rmask(MaskBin, Rest)];
rmask(<<Mask:3/integer-unit:8, _Rest/binary>>, <<Data:3/integer-unit:8>>) ->
Masked = Mask bxor Data,
[<<Masked:3/integer-unit:8>>];
rmask(<<Mask:2/integer-unit:8, _Rest/binary>>, <<Data:2/integer-unit:8>>) ->
Masked = Mask bxor Data,
[<<Masked:2/integer-unit:8>>];
rmask(<<Mask:1/integer-unit:8, _Rest/binary>>, <<Data:1/integer-unit:8>>) ->
Masked = Mask bxor Data,
[<<Masked:1/integer-unit:8>>].
%% ----
bit_to_boolean(1) -> true;
bit_to_boolean(0) -> false.
boolean_to_bit(true) -> 1;
boolean_to_bit(false) -> 0.
%% ----
do_recv(Sock, Sz) ->
do_recv(Sock, Sz, []).
do_recv(_Sock, 0, Acc) ->
{ok, list_to_binary(lists:reverse(Acc))};
do_recv(Sock, Sz, Acc) ->
Res = case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:recv(SslSock, Sz, 1000);
undefined -> gen_tcp:recv(Sock, Sz, 1000)
end,
case Res of
{ok, Bin} -> do_recv(Sock, Sz - byte_size(Bin), [Bin|Acc]);
{error, Reason} -> {error, Reason}
end.
| null | https://raw.githubusercontent.com/erlyaws/yaws/da198c828e9d95ca2137da7884cddadd73941d13/testsuite/websockets_SUITE.erl | erlang | ====================================================================
====================================================================
Send the handshake and retrieve the response
Check the server response
Close the webscoket and check the server reply
Send the handshake and retrieve the response
Check the server response
Send the handshake and retrieve the response
Send the handshake and retrieve the response
Send the handshake and retrieve the response
Send the handshake and retrieve the response
Send the handshake and retrieve the response
unmasked
masked
unmasked
masked
Waiting for the timeout
unmasked
====================================================================
----
----
----
----
----
----
----
----
----
----
---- | -module(websockets_SUITE).
-include("testsuite.hrl").
-compile(export_all).
-record(frame, {fin = true,
rsv = 0,
opcode,
masked = false,
mask,
payload = <<>>}).
-define(WS_OPCODE_CONTINUATION, 16#00).
-define(WS_OPCODE_TEXT, 16#01).
-define(WS_OPCODE_BINARY, 16#02).
-define(WS_OPCODE_CLOSE, 16#08).
-define(WS_OPCODE_PING, 16#09).
-define(WS_OPCODE_PONG, 16#0A).
-define(WS_STATUS_NORMAL, 1000).
-define(WS_STATUS_PROTO_ERROR, 1002).
-define(WS_STATUS_ABNORMAL_CLOSURE, 1006).
-define(WS_STATUS_INVALID_PAYLOAD, 1007).
-define(WS_STATUS_MSG_TOO_BIG, 1009).
-define(WS_STATUS_INTERNAL_ERROR, 1011).
all() ->
[
{group, websocket_tests},
{group, secure_websocket_tests}
].
groups() ->
[
{websocket_tests, [],
[valid_opening_handshake,
bad_version_handshake,
bad_origin_handshake,
noconnection_handshake,
bad_connection_handshake,
noupgrade_handshake,
bad_upgrade_handshake,
basic_unfragmented_text,
advanced_unfragmented_text,
basic_unfragmented_binary,
advanced_unfragmented_binary,
basic_ping_text,
advanced_ping_text,
basic_ping_binary,
advanced_ping_binary,
toolong_payload_ping,
basic_unsolicited_pong,
advanced_unsolicited_pong,
basic_unsolicited_pong_ping_pong,
advanced_unsolicited_pong_ping_pong,
basic_10_pings,
advanced_10_pings,
badrsv_text,
badrsv_binary,
badrsv_ping,
badrsv_close,
badrsv_complex,
badopcodes,
basic_fragmented_empty,
basic_fragmented_text_1,
basic_fragmented_binary_1,
basic_fragmented_text_2,
basic_fragmented_binary_2,
basic_fragmented_ping,
basic_fragmented_pong,
basic_fragmented_close,
basic_fragmented_text_with_ping,
basic_fragmented_text_with_pong,
basic_badfragmented_1,
basic_badfragmented_2,
basic_badfragmented_nocontinuation,
advanced_fragmented_empty,
advanced_fragmented_text_1,
advanced_fragmented_binary_1,
advanced_fragmented_text_2,
advanced_fragmented_binary_2,
advanced_fragmented_ping,
advanced_fragmented_pong,
advanced_fragmented_close,
advanced_fragmented_text_with_ping,
advanced_fragmented_text_with_pong,
advanced_badfragmented_1,
advanced_badfragmented_2,
advanced_badfragmented_nocontinuation,
basic_unfragmented_valid_utf8_text,
basic_fragmented_valid_utf8_text,
advanced_unfragmented_valid_utf8_text,
advanced_fragmented_valid_utf8_text,
basic_unfragmented_invalid_utf8_text,
basic_fragmented_invalid_utf8_text,
advanced_unfragmented_invalid_utf8_text,
advanced_fragmented_invalid_utf8_text,
basic_2_closes,
basic_close_ping,
basic_close_text,
basic_fragtext_close_fragtext,
basic_close_empty,
basic_close_toosmall,
basic_close_statusonly,
basic_close_with_reason,
basic_close_limit_size,
basic_close_toolong,
basic_close_invalid_utf8,
basic_close_valid_codes,
basic_close_invalid_codes,
advanced_2_closes,
advanced_close_ping,
advanced_close_text,
advanced_fragtext_close_fragtext,
advanced_close_empty,
advanced_close_toosmall,
advanced_close_statusonly,
advanced_close_with_reason,
advanced_close_limit_size,
advanced_close_toolong,
advanced_close_invalid_utf8,
advanced_close_valid_codes,
advanced_close_invalid_codes,
close_timeout,
keepalive_timeout,
too_big_frame,
too_big_message,
close_unmasked_frame]},
{secure_websocket_tests, [], [secure_websocket]}
].
init_per_suite(Config) ->
Id = "testsuite-server",
GConf = [
{logdir, ?config(priv_dir, Config)},
{trace, false},
{flags, [{copy_error_log, true}]},
{keepalive_timeout, 10000},
{acceptor_pool_size, 32}
],
ok = yaws:start_embedded(?wwwdir, [], GConf, Id),
[{yaws_id, Id} | Config].
end_per_suite(_Config) ->
ok.
init_per_group(websocket_tests, Config) ->
SConfHTTP = [
{docroot, ?wwwdir},
{port, testsuite:get_yaws_port(1, Config)},
{listen, {127,0,0,1}},
{flags, [{access_log, true}]},
{servername, "localhost"}
],
{ok, _} = testsuite:add_yaws_server(?wwwdir, SConfHTTP),
Config;
init_per_group(secure_websocket_tests, Config) ->
SConfHTTPS = [
{docroot, ?wwwdir},
{port, testsuite:get_yaws_port(1, Config)},
{listen, {127,0,0,1}},
{flags, [{access_log, true}]},
{servername, "localhost"},
{ssl, [
{keyfile, ?sslkeyfile},
{certfile, ?sslcertfile},
{depth, 0}
]}
],
{ok, _} = testsuite:add_yaws_server(?wwwdir, SConfHTTPS),
Config.
end_per_group(_Group, _Config) ->
testsuite:reset_yaws_servers(),
ok.
init_per_testcase(_Test, Config) ->
Config.
end_per_testcase(_Test, _Config) ->
ok.
valid_opening_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
{ok, {101, Hds}} = wsopen(Sock, Key, WSPath, "", 13),
?assert(is_valid_handshake_hash(
Key, proplists:get_value("sec-websocket-accept", Hds)
)),
?assertEqual("websocket", string:to_lower(proplists:get_value("upgrade", Hds))),
?assertEqual("upgrade", string:to_lower(proplists:get_value("connection", Hds))),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
bad_version_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
{ok, {400, Hds}} = wsopen(Sock, Key, WSPath, "", 15),
?assertEqual("13, 8", string:to_lower(
proplists:get_value("sec-websocket-version", Hds)
)),
?assertEqual(ok, close(Sock)),
ok.
bad_origin_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
{ok, {403, _}} = wsopen(Sock, Key, WSPath, "", 13),
?assertEqual(ok, close(Sock)),
ok.
noconnection_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: websocket\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
bad_connection_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: websocket\r\n",
"Connection: Keep-Alive\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
noupgrade_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Connection: Upgrade\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
bad_upgrade_handshake(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
Handshake = ["GET ", WSPath, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: TLS/1.0\r\n",
"Connection: Upgrade\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: \r\n",
"Sec-WebSocket-Version: 13\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
?assertMatch({ok, {400, _}}, read_handshake_response(Sock)),
?assertEqual(ok, close(Sock)),
ok.
basic_unfragmented_text(Config) ->
basic_unfragmented_text(Config, 0, all),
basic_unfragmented_text(Config, 125, all),
basic_unfragmented_text(Config, 126, all),
basic_unfragmented_text(Config, 127, all),
basic_unfragmented_text(Config, 128, all),
basic_unfragmented_text(Config, 65535, all),
basic_unfragmented_text(Config, 65536, all),
basic_unfragmented_text(Config, 65536, 997),
ok.
advanced_unfragmented_text(Config) ->
advanced_unfragmented_text(Config, 0, all),
advanced_unfragmented_text(Config, 125, all),
advanced_unfragmented_text(Config, 126, all),
advanced_unfragmented_text(Config, 127, all),
advanced_unfragmented_text(Config, 128, all),
advanced_unfragmented_text(Config, 65535, all),
advanced_unfragmented_text(Config, 65536, all),
advanced_unfragmented_text(Config, 65536, 997),
ok.
basic_unfragmented_binary(Config) ->
basic_unfragmented_binary(Config, 0, all),
basic_unfragmented_binary(Config, 125, all),
basic_unfragmented_binary(Config, 126, all),
basic_unfragmented_binary(Config, 127, all),
basic_unfragmented_binary(Config, 128, all),
basic_unfragmented_binary(Config, 65535, all),
basic_unfragmented_binary(Config, 65536, all),
basic_unfragmented_binary(Config, 65536, 997),
ok.
advanced_unfragmented_binary(Config) ->
advanced_unfragmented_binary(Config, 0, all),
advanced_unfragmented_binary(Config, 125, all),
advanced_unfragmented_binary(Config, 126, all),
advanced_unfragmented_binary(Config, 127, all),
advanced_unfragmented_binary(Config, 128, all),
advanced_unfragmented_binary(Config, 65535, all),
advanced_unfragmented_binary(Config, 65536, all),
advanced_unfragmented_binary(Config, 65536, 997),
ok.
basic_unfragmented_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
unfragmented_msg(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_TEXT, Payload, BlockSz).
advanced_unfragmented_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
unfragmented_msg(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_TEXT, Payload, BlockSz).
basic_unfragmented_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unfragmented_msg(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_BINARY, Payload, BlockSz).
advanced_unfragmented_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unfragmented_msg(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_BINARY, Payload, BlockSz).
unfragmented_msg(Config, WSPath, Type, Payload, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{opcode=Type, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, BlockSz)),
{ok, RcvFrame1} = read_frame(Sock),
?assertEqual(Type, RcvFrame1#frame.opcode),
?assertEqual(Payload, RcvFrame1#frame.payload),
SndFrame2 = SndFrame1#frame{masked=true, mask = <<"abcd">>},
?assertEqual(ok, send_frame(Sock, SndFrame2, BlockSz)),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(Type, RcvFrame2#frame.opcode),
?assertEqual(Payload, RcvFrame2#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_ping_text(Config) ->
basic_ping_text(Config, 0, all),
basic_ping_text(Config, 125, all),
ok.
basic_ping_binary(Config) ->
basic_ping_binary(Config, 125, all),
basic_ping_binary(Config, 125, 1),
ok.
advanced_ping_text(Config) ->
advanced_ping_text(Config, 0, all),
advanced_ping_text(Config, 125, all),
ok.
advanced_ping_binary(Config) ->
advanced_ping_binary(Config, 125, all),
advanced_ping_binary(Config, 125, 1),
ok.
basic_ping_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
ping_msg(Config, "/websockets_example_endpoint.yaws", Payload, BlockSz).
advanced_ping_text(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, $*)),
ping_msg(Config, "/websockets_autobahn_endpoint.yaws", Payload, BlockSz).
basic_ping_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
ping_msg(Config, "/websockets_example_endpoint.yaws", Payload, BlockSz).
advanced_ping_binary(Config, Sz, BlockSz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
ping_msg(Config, "/websockets_autobahn_endpoint.yaws", Payload, BlockSz).
ping_msg(Config, WSPath, Payload, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, BlockSz)),
{ok, RcvFrame1} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PONG, RcvFrame1#frame.opcode),
?assertEqual(Payload, RcvFrame1#frame.payload),
SndFrame2 = SndFrame1#frame{masked=true, mask = <<"abcd">>},
?assertEqual(ok, send_frame(Sock, SndFrame2, BlockSz)),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PONG, RcvFrame2#frame.opcode),
?assertEqual(Payload, RcvFrame2#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
toolong_payload_ping(Config) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = list_to_binary(lists:duplicate(126, 16#fe)),
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_unsolicited_pong(Config) ->
basic_unsolicited_pong(Config, 0),
basic_unsolicited_pong(Config, 125),
ok.
advanced_unsolicited_pong(Config) ->
advanced_unsolicited_pong(Config, 0),
advanced_unsolicited_pong(Config, 125),
ok.
basic_unsolicited_pong(Config, Sz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unsolicited_pong_msg(Config, "/websockets_example_endpoint.yaws", Payload).
advanced_unsolicited_pong(Config, Sz) ->
Payload = list_to_binary(lists:duplicate(Sz, 16#fe)),
unsolicited_pong_msg(Config, "/websockets_autobahn_endpoint.yaws", Payload).
unsolicited_pong_msg(Config, WSPath, Payload) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_PONG, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_unsolicited_pong_ping_pong(Config) ->
unsolicited_pong_ping_pong(Config, "/websockets_example_endpoint.yaws").
advanced_unsolicited_pong_ping_pong(Config) ->
unsolicited_pong_ping_pong(Config, "/websockets_autobahn_endpoint.yaws").
unsolicited_pong_ping_pong(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = list_to_binary(lists:duplicate(125, $*)),
Payload2 = <<"ping payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{opcode=?WS_OPCODE_PONG, payload=Payload1},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
SndFrame2 = #frame{opcode=?WS_OPCODE_PING, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PONG, RcvFrame2#frame.opcode),
?assertEqual(Payload2, RcvFrame2#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_10_pings(Config) ->
basic_10_pings(Config, all),
basic_10_pings(Config, 1),
ok.
advanced_10_pings(Config) ->
advanced_10_pings(Config, all),
advanced_10_pings(Config, 1),
ok.
basic_10_pings(Config, BlockSz) ->
send_10_pings(Config, "/websockets_example_endpoint.yaws", BlockSz).
advanced_10_pings(Config, BlockSz) ->
send_10_pings(Config, "/websockets_autobahn_endpoint.yaws", BlockSz).
send_10_pings(Config, WSPath, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"ping payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
[begin
?assertEqual(ok, send_frame(Sock, SndFrame, BlockSz))
end || _ <- lists:seq(1, 10)],
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
{Frames1, Frames2} = lists:split(10, Frames),
?assert(lists:all(fun(#frame{payload=P}) -> P == Payload end, Frames1)),
?assert(is_valid_close_frame(Frames2, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
badrsv_text(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_TEXT, 1).
badrsv_binary(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_BINARY, 2).
badrsv_ping(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_PING, 3).
badrsv_close(Config) ->
badrsv(Config, "/websockets_example_endpoint.yaws", ?WS_OPCODE_CLOSE, 4).
badrsv(Config, WSPath, Type, Rsv) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"small payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{rsv=Rsv, opcode=Type, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
badrsv_complex(Config) ->
badrsv_complex(Config, all),
badrsv_complex(Config, 1),
ok.
badrsv_complex(Config, BlockSz) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"small payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
SndFrame2 = SndFrame1#frame{rsv=5},
SndFrame3 = #frame{opcode=?WS_OPCODE_PING, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, BlockSz)),
?assertEqual(ok, send_frame(Sock, SndFrame2, BlockSz)),
?assertEqual(ok, send_frame(Sock, SndFrame3, BlockSz)),
{ok, [Frame1|Frames]} = wsflush(Sock, false),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
badopcodes(Config) ->
[badopcodes(Config, O) || O <- [3,4,5,6,7,11,12,13,14,15]],
ok.
badopcodes(Config, Opcode) ->
WSPath = "/websockets_example_endpoint.yaws",
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=Opcode},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_empty(Config) ->
fragmented_empty(Config, "/websockets_example_endpoint.yaws").
advanced_fragmented_empty(Config) ->
fragmented_empty(Config, "/websockets_autobahn_endpoint.yaws").
fragmented_empty(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(<<>>, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_1(Config) ->
valid_fragmented_1(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_TEXT).
advanced_fragmented_text_1(Config) ->
valid_fragmented_1(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_TEXT).
basic_fragmented_binary_1(Config) ->
valid_fragmented_1(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_BINARY).
advanced_fragmented_binary_1(Config) ->
valid_fragmented_1(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_BINARY).
valid_fragmented_1(Config, WSPath, Type) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
Payload3 = <<"fragment3">>,
Payload = <<Payload1/binary, Payload2/binary, Payload3/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=Type, payload=Payload1},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload3},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(Type, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_2(Config) ->
valid_fragmented_2(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_TEXT).
advanced_fragmented_text_2(Config) ->
valid_fragmented_2(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_TEXT).
basic_fragmented_binary_2(Config) ->
valid_fragmented_2(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_BINARY).
advanced_fragmented_binary_2(Config) ->
valid_fragmented_2(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_BINARY).
valid_fragmented_2(Config, WSPath, Type) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"fragment">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=Type},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Payload},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(Type, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_ping(Config) ->
invalid_fragmented(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_PING).
advanced_fragmented_ping(Config) ->
invalid_fragmented(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_PING).
basic_fragmented_pong(Config) ->
invalid_fragmented(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_PONG).
advanced_fragmented_pong(Config) ->
invalid_fragmented(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_PONG).
basic_fragmented_close(Config) ->
invalid_fragmented(Config, "/websockets_example_endpoint.yaws",
?WS_OPCODE_CLOSE).
advanced_fragmented_close(Config) ->
invalid_fragmented(Config, "/websockets_autobahn_endpoint.yaws",
?WS_OPCODE_CLOSE).
invalid_fragmented(Config, WSPath, Type) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=Type, payload=Payload1},
SndFrame2 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_with_ping(Config) ->
fragmented_with_ping(Config, "/websockets_example_endpoint.yaws").
advanced_fragmented_text_with_ping(Config) ->
fragmented_with_ping(Config, "/websockets_autobahn_endpoint.yaws").
fragmented_with_ping(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
Payload = <<Payload1/binary, Payload2/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload1},
SndFrame2 = #frame{opcode=?WS_OPCODE_PING},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1, Frame2|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_PONG, Frame1#frame.opcode),
?assertEqual(?WS_OPCODE_TEXT, Frame2#frame.opcode),
?assertEqual(Payload, Frame2#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_text_with_pong(Config) ->
fragmented_with_pong(Config, "/websockets_example_endpoint.yaws").
advanced_fragmented_text_with_pong(Config) ->
fragmented_with_pong(Config, "/websockets_autobahn_endpoint.yaws").
fragmented_with_pong(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
Payload = <<Payload1/binary, Payload2/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload1},
SndFrame2 = #frame{opcode=?WS_OPCODE_PONG},
SndFrame3 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_badfragmented_1(Config) ->
badfragmented(Config, "/websockets_example_endpoint.yaws", true).
basic_badfragmented_2(Config) ->
badfragmented(Config, "/websockets_example_endpoint.yaws", false).
advanced_badfragmented_1(Config) ->
badfragmented(Config, "/websockets_autobahn_endpoint.yaws", true).
advanced_badfragmented_2(Config) ->
badfragmented(Config, "/websockets_autobahn_endpoint.yaws", false).
badfragmented(Config, WSPath, Fin) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload = <<"small payload">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=Fin, opcode=?WS_OPCODE_CONTINUATION, payload=Payload},
SndFrame2 = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_badfragmented_nocontinuation(Config) ->
badfragmented_nocontinuation(Config, "/websockets_example_endpoint.yaws").
advanced_badfragmented_nocontinuation(Config) ->
badfragmented_nocontinuation(Config, "/websockets_autobahn_endpoint.yaws").
badfragmented_nocontinuation(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"fragment1">>,
Payload2 = <<"fragment2">>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload1},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=Payload2},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_unfragmented_valid_utf8_text(Config) ->
unfragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", all),
unfragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_unfragmented_valid_utf8_text(Config) ->
unfragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
unfragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
unfragmented_valid_utf8(Config, WSPath, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Fun = fun(Payload) ->
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, BlockSz)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_TEXT, RcvFrame#frame.opcode),
?assertEqual(Payload, RcvFrame#frame.payload)
end,
Fun(<<16#ce,16#ba>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,
16#ce,16#b5>>),
Fun(<<"Hello-",16#c2,16#b5,$@,16#c3,16#9f,16#c3,16#b6,16#c3,16#a4,
16#c3,16#bc,16#c3,16#a0,16#c3,16#a1,"-UTF-8!!">>),
Fun(<<16#00>>),
Fun(<<16#c2,16#80>>),
Fun(<<16#e0,16#a0,16#80>>),
Fun(<<16#f0,16#90,16#80,16#80>>),
Fun(<<16#7f>>),
Fun(<<16#df,16#bf>>),
Fun(<<16#ef,16#bf,16#bf>>),
Fun(<<16#f4,16#8f,16#bf,16#bf>>),
Fun(<<16#ed,16#9f,16#bf>>),
Fun(<<16#ee,16#80,16#80>>),
Fun(<<16#ef,16#bf,16#bd>>),
Fun(<<16#f4,16#8f,16#bf,16#bf>>),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragmented_valid_utf8_text(Config) ->
fragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", all),
fragmented_valid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_fragmented_valid_utf8_text(Config) ->
fragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
fragmented_valid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
fragmented_valid_utf8(Config, WSPath, FragSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<"Hello-",16#c2,16#b5,$@,16#c3,16#9f,16#c3,16#b6,16#c3,16#a4>>,
Payload2 = <<16#c3,16#bc,16#c3,16#a0,16#c3,16#a1,"-UTF-8!!">>,
Payload = <<Payload1/binary, Payload2/binary>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payloads = fragment_payload(Payload1, FragSz) ++
fragment_payload(Payload2, FragSz),
[First|Rest0] = Payloads,
[Last|Rest1] = lists:reverse(Rest0),
Middles = lists:reverse(Rest1),
FirstFrame = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=First},
LastFrame = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Last},
MiddleFrames = lists:map(fun(P) ->
#frame{fin=false,
opcode=?WS_OPCODE_CONTINUATION,
payload=P}
end, Middles),
?assertEqual(ok, send_frame(Sock, FirstFrame, all)),
lists:foreach(fun(F) ->
?assertEqual(ok, send_frame(Sock, F, all))
end, MiddleFrames),
?assertEqual(ok, send_frame(Sock, LastFrame, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, [Frame1|Frames]} = wsflush(Sock, true),
?assertEqual(?WS_OPCODE_TEXT, Frame1#frame.opcode),
?assertEqual(Payload, Frame1#frame.payload),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_unfragmented_invalid_utf8_text(Config) ->
unfragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", all),
unfragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_unfragmented_invalid_utf8_text(Config) ->
unfragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
unfragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
unfragmented_invalid_utf8(Config, WSPath, BlockSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Fun = fun(Payload) ->
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload=Payload},
?assertEqual(ok, send_frame(Sock, SndFrame, BlockSz)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_INVALID_PAYLOAD])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock))
end,
Fun(<<16#cd>>),
Fun(<<16#ce,16#ba,16#e1>>),
Fun(<<16#ce,16#ba,16#e1,16#bd>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce>>),
Fun(<<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce,
16#b5,16#ed,16#a0,16#80,16#65,16#64,16#69,16#74,16#65,16#64>>),
Fun(<<16#f8,16#88,16#80,16#80,16#80>>),
Fun(<<16#fc,16#84,16#80,16#80,16#80,16#80>>),
Fun(<<16#f7,16#bf,16#bf,16#bf>>),
Fun(<<16#fb,16#bf,16#bf,16#bf,16#bf>>),
Fun(<<16#fd,16#bf,16#bf,16#bf,16#bf,16#bf>>),
Fun(<<16#f4,16#90,16#80,16#80>>),
Fun(<<16#80>>),
Fun(<<16#bf>>),
Fun(<<16#80,16#bf>>),
Fun(<<16#80,16#bf,16#80>>),
Fun(<<16#80,16#bf,16#80,16#bf>>),
Fun(<<16#80,16#bf,16#80,16#bf,16#80>>),
Fun(<<16#80,16#bf,16#80,16#bf,16#80,16#bf>>),
Fun(<<16#80,16#81,16#82,16#83,16#84,16#85,16#86,16#87,16#88,16#89,16#8a,
16#8b,16#8c,16#8d,16#8e,16#8f,16#90,16#91,16#92,16#93,16#94,16#95,
16#96,16#97,16#98,16#99,16#9a,16#9b,16#9c,16#9d,16#9e,16#9f,16#a0,
16#a1,16#a2,16#a3,16#a4,16#a5,16#a6,16#a7,16#a8,16#a9,16#aa,16#ab,
16#ac,16#ad,16#ae,16#af,16#b0,16#b1,16#b2,16#b3,16#b4,16#b5,16#b6,
16#b7,16#b8,16#b9,16#ba,16#bb,16#bc,16#bd,16#be>>),
ok.
basic_fragmented_invalid_utf8_text(Config) ->
fragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", all),
fragmented_invalid_utf8(Config, "/websockets_example_endpoint.yaws", 1),
ok.
advanced_fragmented_invalid_utf8_text(Config) ->
fragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", all),
fragmented_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws", 1),
ok.
fragmented_invalid_utf8(Config, WSPath, FragSz) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
Payload1 = <<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce>>,
Payload2 = <<16#b5,16#ed,16#a0,16#80,16#65,16#64,16#69,16#74,16#65,16#64>>,
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payloads = fragment_payload(Payload1, FragSz) ++
fragment_payload(Payload2, FragSz),
[First|Rest0] = Payloads,
[Last|Rest1] = lists:reverse(Rest0),
Middles = lists:reverse(Rest1),
FirstFrame = #frame{fin=false, opcode=?WS_OPCODE_TEXT, payload=First},
LastFrame = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Last},
MiddleFrames = lists:map(fun(P) ->
#frame{fin=false,
opcode=?WS_OPCODE_CONTINUATION,
payload=P}
end, Middles),
?assertEqual(ok, send_frame(Sock, FirstFrame, all)),
lists:foreach(fun(F) ->
?assertEqual(ok, send_frame(Sock, F, all))
end, MiddleFrames),
?assertEqual(ok, send_frame(Sock, LastFrame, all)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_INVALID_PAYLOAD])),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_2_closes(Config) ->
send_2_closes(Config, "/websockets_example_endpoint.yaws").
advanced_2_closes(Config) ->
send_2_closes(Config, "/websockets_autobahn_endpoint.yaws").
send_2_closes(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, false),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual({ok, []}, wsflush(Sock, true)),
?assertEqual(ok, close(Sock)),
ok.
basic_close_ping(Config) ->
close_ping(Config, "/websockets_example_endpoint.yaws").
advanced_close_ping(Config) ->
close_ping(Config, "/websockets_autobahn_endpoint.yaws").
close_ping(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_PING}, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_text(Config) ->
close_text(Config, "/websockets_example_endpoint.yaws").
advanced_close_text(Config) ->
close_text(Config, "/websockets_autobahn_endpoint.yaws").
close_text(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_TEXT}, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_fragtext_close_fragtext(Config) ->
close_fragtext(Config, "/websockets_example_endpoint.yaws").
advanced_fragtext_close_fragtext(Config) ->
close_fragtext(Config, "/websockets_autobahn_endpoint.yaws").
close_fragtext(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, send_frame(Sock, #frame{fin=false, opcode=?WS_OPCODE_TEXT}, all)),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_CONTINUATION}, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_empty(Config) ->
close_empty(Config, "/websockets_example_endpoint.yaws").
advanced_close_empty(Config) ->
close_empty(Config, "/websockets_autobahn_endpoint.yaws").
close_empty(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,0>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_toosmall(Config) ->
close_toosmall(Config, "/websockets_example_endpoint.yaws").
advanced_close_toosmall(Config) ->
close_toosmall(Config, "/websockets_autobahn_endpoint.yaws").
close_toosmall(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,1,0>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_statusonly(Config) ->
close_statusonly(Config, "/websockets_example_endpoint.yaws").
advanced_close_statusonly(Config) ->
close_statusonly(Config, "/websockets_autobahn_endpoint.yaws").
close_statusonly(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,2,1000:16/big>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_with_reason(Config) ->
close_with_reason(Config, "/websockets_example_endpoint.yaws").
advanced_close_with_reason(Config) ->
close_with_reason(Config, "/websockets_autobahn_endpoint.yaws").
close_with_reason(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,4,1000:16/big,"Ok">>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_limit_size(Config) ->
close_limit_size(Config, "/websockets_example_endpoint.yaws").
advanced_close_limit_size(Config) ->
close_limit_size(Config, "/websockets_autobahn_endpoint.yaws").
close_limit_size(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Reason = list_to_binary(lists:duplicate(123, $*)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,125,1000:16/big,Reason/binary>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_toolong(Config) ->
close_toolong(Config, "/websockets_example_endpoint.yaws").
advanced_close_toolong(Config) ->
close_toolong(Config, "/websockets_autobahn_endpoint.yaws").
close_toolong(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Reason = list_to_binary(lists:duplicate(124, $*)),
?assertEqual(ok, gen_tcp:send(Sock, <<136,126,1000:16/big,Reason/binary>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_invalid_utf8(Config) ->
close_invalid_utf8(Config, "/websockets_example_endpoint.yaws").
advanced_close_invalid_utf8(Config) ->
close_invalid_utf8(Config, "/websockets_autobahn_endpoint.yaws").
close_invalid_utf8(Config, WSPath) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Reason = <<16#ce,16#ba,16#e1,16#bd,16#b9,16#cf,16#83,16#ce,16#bc,16#ce,
16#b5,16#ed,16#a0,16#80,16#65,16#64,16#69,16#74,16#65,16#64>>,
?assertEqual(ok, gen_tcp:send(Sock, <<136,22,1000:16/big,Reason/binary>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_INVALID_PAYLOAD])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_valid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_valid_code(Config, "/websockets_example_endpoint.yaws", Code)
end,
[1000,1001,1002,1003,1007,1008,1009,1010,1011,3000,3999,4000,4999]
).
advanced_close_valid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_valid_code(Config, "/websockets_autobahn_endpoint.yaws", Code)
end,
[1000,1001,1002,1003,1007,1008,1009,1010,1011,3000,3999,4000,4999]
).
close_valid_code(Config, WSPath, Code) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, Code, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [Code])),
?assertEqual(ok, close(Sock)),
ok.
basic_close_invalid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_invalid_code(Config, "/websockets_example_endpoint.yaws", Code)
end,
[0,999,1004,1005,1006,1012,1013,1014,1015,1016,1100,2000,2999,5000,65536]
).
advanced_close_invalid_codes(Config) ->
lists:foreach(
fun(Code) ->
close_invalid_code(Config, "/websockets_autobahn_endpoint.yaws", Code)
end,
[0,999,1004,1005,1006,1012,1013,1014,1015,1016,1100,2000,2999,5000,65536]
).
close_invalid_code(Config, WSPath, Code) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
?assertEqual(ok, wsclose(Sock, Code, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
close_timeout(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws?extversion=true",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload = <<"bye">>},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, false),
LastFrame = lists:last(Frames),
?assert(is_valid_close_frame([LastFrame], [?WS_STATUS_NORMAL])),
?assertEqual({error, closed}, gen_tcp:recv(Sock, 0)),
?assertEqual(ok, close(Sock)),
ok.
keepalive_timeout(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws?keepalive=true&timeout=5000&drop=true",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
timer:sleep(5500),
{ok, RcvFrame1} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PING, RcvFrame1#frame.opcode),
?assertEqual(ok, send_frame(Sock, #frame{opcode=?WS_OPCODE_PONG}, all)),
timer:sleep(5500),
{ok, RcvFrame2} = read_frame(Sock),
?assertEqual(?WS_OPCODE_PING, RcvFrame2#frame.opcode),
timer:sleep(2000),
?assertEqual({error, closed}, gen_tcp:recv(Sock, 0)),
?assertEqual(ok, close(Sock)),
ok.
too_big_frame(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payload1 = crypto:strong_rand_bytes(16*1024*1024),
SndFrame1 = #frame{opcode=?WS_OPCODE_BINARY, payload=Payload1},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_BINARY, RcvFrame#frame.opcode),
?assertEqual(Payload1, RcvFrame#frame.payload),
Payload2 = <<0, Payload1/binary>>,
SndFrame2 = #frame{opcode=?WS_OPCODE_BINARY, payload=Payload2},
{ok, Closed} = case send_frame(Sock, SndFrame2, all) of
ok -> {ok, false};
{error, closed} -> {ok, true}
end,
{ok, Frames} = wsflush(Sock, true),
?assert(case Closed of
false -> is_valid_close_frame(Frames, [?WS_STATUS_MSG_TOO_BIG]);
true -> true
end),
?assertEqual(ok, close(Sock)),
ok.
close_unmasked_frame(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws?close_unmasked=true",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload = <<"unmasked">>},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_PROTO_ERROR])),
?assertEqual(ok, close(Sock)),
ok.
too_big_message(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws",
{ok, Sock} = open("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
Payload1 = crypto:strong_rand_bytes(16*1024*1024),
<<Frag1:(4*1024)/binary, Frag2:(4*1024)/binary,
Frag3:(4*1024)/binary, Frag4/binary>> = Payload1,
SndFrame1 = #frame{fin=false, opcode=?WS_OPCODE_BINARY, payload=Frag1},
SndFrame2 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag2},
SndFrame3 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag3},
SndFrame4 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Frag4},
?assertEqual(ok, send_frame(Sock, SndFrame1, all)),
?assertEqual(ok, send_frame(Sock, SndFrame2, all)),
?assertEqual(ok, send_frame(Sock, SndFrame3, all)),
?assertEqual(ok, send_frame(Sock, SndFrame4, all)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_BINARY, RcvFrame#frame.opcode),
?assertEqual(Payload1, RcvFrame#frame.payload),
Payload2 = <<0, Payload1/binary>>,
<<Frag5:(4*1024)/binary, Frag6:(4*1024)/binary,
Frag7:(4*1024)/binary, Frag8/binary>> = Payload2,
SndFrame5 = #frame{fin=false, opcode=?WS_OPCODE_BINARY, payload=Frag5},
SndFrame6 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag6},
SndFrame7 = #frame{fin=false, opcode=?WS_OPCODE_CONTINUATION, payload=Frag7},
SndFrame8 = #frame{opcode=?WS_OPCODE_CONTINUATION, payload=Frag8},
?assertEqual(ok, send_frame(Sock, SndFrame5, all)),
?assertEqual(ok, send_frame(Sock, SndFrame6, all)),
?assertEqual(ok, send_frame(Sock, SndFrame7, all)),
?assertEqual(ok, send_frame(Sock, SndFrame8, all)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_MSG_TOO_BIG])),
?assertEqual(ok, close(Sock)),
ok.
secure_websocket(Config) ->
Key = "dGhlIHNhbXBsZSBub25jZQ==",
WSPath = "/websockets_example_endpoint.yaws",
{ok, Sock} = sslopen("localhost", testsuite:get_yaws_port(1, Config)),
?assertMatch({ok, {101, _}}, wsopen(Sock, Key, WSPath, "", 13)),
SndFrame = #frame{opcode=?WS_OPCODE_TEXT, payload = <<"small payload">>},
?assertEqual(ok, send_frame(Sock, SndFrame, all)),
{ok, RcvFrame} = read_frame(Sock),
?assertEqual(?WS_OPCODE_TEXT, RcvFrame#frame.opcode),
?assertEqual(<<"small payload">>, RcvFrame#frame.payload),
?assertEqual(ok, wsclose(Sock, ?WS_STATUS_NORMAL, <<>>)),
{ok, Frames} = wsflush(Sock, true),
?assert(is_valid_close_frame(Frames, [?WS_STATUS_NORMAL])),
?assertEqual(ok, close(Sock)),
ok.
open(Host, Port) ->
Opts = [{send_timeout, 2000}, binary, {packet, raw}, {active, false}],
gen_tcp:connect(Host, Port, Opts).
sslopen(Host, Port) ->
Opts = [{send_timeout, 2000}, binary, {packet, raw}, {active, false}],
case ssl:connect(Host, Port, Opts) of
{ok, Sock} -> {ok, {ssl, Sock}};
{error, Reason} -> {error, Reason}
end.
close(Sock) ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:close(SslSock);
undefined -> gen_tcp:close(Sock)
end.
wsopen(Sock, Key, Path, Origin, Vsn) ->
Handshake = ["GET ", Path, " HTTP/1.1\r\n",
"Host: localhost\r\n",
"Upgrade: websocket\r\n",
"Connection: Upgrade\r\n",
"Sec-WebSocket-Key: ", Key, "\r\n",
"Origin: ", Origin, "\r\n",
"Sec-WebSocket-Version: ", integer_to_list(Vsn), "\r\n",
"\r\n"],
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Handshake);
undefined -> gen_tcp:send(Sock, Handshake)
end,
read_handshake_response(Sock).
wsclose(Sock, Status, Msg) ->
Fin = 1,
Rsv = 0,
Mask = 0,
Opcode = ?WS_OPCODE_CLOSE,
Payload= <<Status:16/big, Msg/binary>>,
Len = byte_size(Payload),
Frame = if
Len < 126 ->
<<Fin:1,Rsv:3,Opcode:4,Mask:1,Len:7,Payload/binary>>;
Len < 65536 ->
<<Fin:1,Rsv:3,Opcode:4,Mask:1,126:7,Len:16,Payload/binary>>;
true ->
<<Fin:1,Rsv:3,Opcode:4,Mask:1,127:7,Len:64,Payload/binary>>
end,
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Frame);
undefined -> gen_tcp:send(Sock, Frame)
end.
wsflush(Sock, WithTcpClose) ->
wsflush(Sock, WithTcpClose, []).
wsflush(Sock, WithTcpClose, Acc) ->
case read_frame(Sock) of
{ok, Frame} ->
case Frame#frame.opcode of
?WS_OPCODE_CLOSE when WithTcpClose == false ->
{ok, lists:reverse([Frame|Acc])};
_ ->
wsflush(Sock, WithTcpClose, [Frame|Acc])
end;
{error, closed} ->
{ok, lists:reverse(Acc)};
{error, Reason} ->
{error, Reason}
end.
is_valid_handshake_hash(Key, Hash) ->
Salted = Key ++ "258EAFA5-E914-47DA-95CA-C5AB0DC85B11",
HashBin = crypto:hash(sha, Salted),
Hash == base64:encode_to_string(HashBin).
is_valid_close_frame([], _) ->
io:format(" WARNING: Connection closed by server without Close frame~n"),
true;
is_valid_close_frame([#frame{opcode=?WS_OPCODE_CLOSE, payload=Payload}|Rest],
Codes) ->
case Rest of
[] ->
case Payload of
<<>> -> lists:member(?WS_STATUS_NORMAL, Codes);
<<Status:16/big, _/binary>> ->
case lists:member(Status, Codes) of
true ->
true;
false ->
io:format(" ERROR: Bad status code in close"
" frame: status=~p~n", [Status]),
false
end
end;
_ ->
io:format(" ERROR: Remaining frames after the Close frame~n")
end;
is_valid_close_frame([#frame{opcode=OpCode}|_], _) ->
io:format(" ERROR: Not a close frame: opcode=~p~n", [OpCode]),
false.
fragment_payload(Payload, all) ->
[Payload];
fragment_payload(<<>>, _) ->
[];
fragment_payload(Payload, FragSz) ->
case Payload of
<<Frag:FragSz/binary, Rest/binary>> ->
[Frag | fragment_payload(Rest, FragSz)];
Rest ->
[Rest]
end.
read_handshake_response(Sock) ->
Res = case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} ->
ssl:setopts(SslSock, [{packet, http}, {packet_size, 16#4000}]),
ssl:recv(SslSock, 0, 5000);
undefined ->
inet:setopts(Sock, [{packet, http}, {packet_size, 16#4000}]),
gen_tcp:recv(Sock, 0, 5000)
end,
case Res of
{ok, {http_response, _, Status, _}} ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock1} ->
ssl:setopts(SslSock1,[{packet,httph},{packet_size,16#4000}]);
undefined ->
inet:setopts(Sock, [{packet,httph},{packet_size,16#4000}])
end,
Resp = read_handshake_response(Sock, Status, []),
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock2} -> ssl:setopts(SslSock2,[binary, {packet, raw}]);
undefined -> inet:setopts(Sock, [binary, {packet, raw}])
end,
Resp;
{ok, Error} ->
{error, Error};
{error, Reason} ->
{error, Reason}
end.
read_handshake_response(Sock, Status, Acc) ->
Res = case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:recv(SslSock, 0, 5000);
undefined -> gen_tcp:recv(Sock, 0, 5000)
end,
case Res of
{ok, {http_header, _, Name, _, Value}} when is_atom(Name) ->
Name1 = string:to_lower(atom_to_list(Name)),
read_handshake_response(Sock, Status, [{Name1, Value}|Acc]);
{ok, {http_header, _, Name, _, Value}} ->
Name1 = string:to_lower(Name),
read_handshake_response(Sock, Status, [{Name1, Value}|Acc]);
{ok, http_eoh} ->
{ok, {Status, Acc}};
{ok, Error} ->
{error, Error};
{error, Reason} ->
{error, Reason}
end.
read_frame(Sock) ->
case read_frame_header(Sock) of
{ok, #frame{mask=undefined}=Frame} ->
{ok, Frame};
{ok, Frame} ->
Payload = mask(Frame#frame.mask, Frame#frame.payload),
{ok, Frame#frame{payload=Payload}};
{error, Reason} ->
{error, Reason}
end.
read_frame_header(Sock) ->
case do_recv(Sock, 2) of
{ok, <<Fin:1, Rsv:3, Opcode:4, MaskBit:1, Len:7>>} ->
Frame = #frame{fin = bit_to_boolean(Fin),
rsv = Rsv,
opcode = Opcode,
masked = bit_to_boolean(MaskBit)},
case read_frame_length(Sock, Len) of
{ok, Length} -> read_frame_payload(Sock, Frame, Length);
{error, Reason} -> {error, Reason}
end;
{error, Reason} ->
{error, Reason}
end.
read_frame_length(Sock, 126) ->
case do_recv(Sock, 2) of
{ok, <<Length:16>>} -> {ok, Length};
{error, Reason} -> {error, Reason}
end;
read_frame_length(Sock, 127) ->
case do_recv(Sock, 8) of
{ok, <<Length:64>>} -> {ok, Length};
{error, Reason} -> {error, Reason}
end;
read_frame_length(_Sock, Length) ->
{ok, Length}.
read_frame_mask(Sock) ->
case do_recv(Sock, 4) of
{ok, Mask} -> {ok, Mask};
{error, Reason} -> {error, Reason}
end.
read_frame_payload(Sock, #frame{masked=true, mask=undefined}=Frame, Length) ->
case read_frame_mask(Sock) of
{ok, Mask} ->
read_frame_payload(Sock, Frame#frame{mask=Mask}, Length);
{error, Reason} ->
{error, Reason}
end;
read_frame_payload(Sock, Frame, Length) ->
case do_recv(Sock, Length) of
{ok, Payload} -> {ok, Frame#frame{payload=Payload}};
{error, Reason} -> {error, Reason}
end.
send_frame(Sock, Frame, BlockSz) ->
Fin = boolean_to_bit(Frame#frame.fin),
Rsv = Frame#frame.rsv,
Opcode = Frame#frame.opcode,
MaskBit = boolean_to_bit(Frame#frame.masked),
Mask = case Frame#frame.mask of
undefined -> <<>>;
M -> M
end,
Data = mask(Mask, Frame#frame.payload),
Len = byte_size(Data),
Packet = if
Len < 126 ->
<<Fin:1,Rsv:3,Opcode:4,MaskBit:1,Len:7,
Mask/binary,Data/binary>>;
Len < 65536 ->
<<Fin:1,Rsv:3,Opcode:4,MaskBit:1,126:7,Len:16,
Mask/binary,Data/binary>>;
true ->
<<Fin:1,Rsv:3,Opcode:4,MaskBit:1,127:7,Len:64,
Mask/binary,Data/binary>>
end,
case BlockSz of
all ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Packet);
undefined -> gen_tcp:send(Sock, Packet)
end;
_ ->
do_send(Sock, Packet, BlockSz)
end.
do_send(_Sock, <<>>, _BlockSz) ->
ok;
do_send(Sock, Packet, BlockSz) ->
case Packet of
<<Block:BlockSz/binary, Rest/binary>> ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Block);
undefined -> gen_tcp:send(Sock, Block)
end,
do_send(Sock, Rest, BlockSz);
_ ->
case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:send(SslSock, Packet);
undefined -> gen_tcp:send(Sock, Packet)
end
end.
mask(MaskBin, Data) ->
list_to_binary(rmask(MaskBin, Data)).
rmask(_,<<>>) ->
[<<>>];
rmask(<<>>, Data) ->
[Data];
rmask(MaskBin = <<Mask:4/integer-unit:8>>,
<<Data:4/integer-unit:8, Rest/binary>>) ->
Masked = Mask bxor Data,
[<<Masked:4/integer-unit:8>> | rmask(MaskBin, Rest)];
rmask(<<Mask:3/integer-unit:8, _Rest/binary>>, <<Data:3/integer-unit:8>>) ->
Masked = Mask bxor Data,
[<<Masked:3/integer-unit:8>>];
rmask(<<Mask:2/integer-unit:8, _Rest/binary>>, <<Data:2/integer-unit:8>>) ->
Masked = Mask bxor Data,
[<<Masked:2/integer-unit:8>>];
rmask(<<Mask:1/integer-unit:8, _Rest/binary>>, <<Data:1/integer-unit:8>>) ->
Masked = Mask bxor Data,
[<<Masked:1/integer-unit:8>>].
bit_to_boolean(1) -> true;
bit_to_boolean(0) -> false.
boolean_to_bit(true) -> 1;
boolean_to_bit(false) -> 0.
do_recv(Sock, Sz) ->
do_recv(Sock, Sz, []).
do_recv(_Sock, 0, Acc) ->
{ok, list_to_binary(lists:reverse(Acc))};
do_recv(Sock, Sz, Acc) ->
Res = case yaws_api:get_sslsocket(Sock) of
{ok, SslSock} -> ssl:recv(SslSock, Sz, 1000);
undefined -> gen_tcp:recv(Sock, Sz, 1000)
end,
case Res of
{ok, Bin} -> do_recv(Sock, Sz - byte_size(Bin), [Bin|Acc]);
{error, Reason} -> {error, Reason}
end.
|
79069d62038c8ffc668958e41996fe5d273f5ff93aa5c60ffef39fbdd86b12b0 | schemedoc/implementation-metadata | s7.scm | (title "s7")
(tagline "extension language for Snd, Radium, and Common Music")
(based-on "tinyscheme")
(named-after "the Sunbeam S7 motorcycle")
(homepage-url "/")
(person "Bill Schottstaedt")
(country "US")
(sourceforge "snd")
(documentation
(title "User Manual")
(web-url ""))
(mailing-list
(name "cmdist")
(list-address "cmdist" "ccrma.stanford.edu")
(archive-url "-mail.stanford.edu/mailman/private/cmdist/")
(instructions-url "-mail.stanford.edu/mailman/listinfo/cmdist"))
| null | https://raw.githubusercontent.com/schemedoc/implementation-metadata/6280d9c4c73833dc5bd1c9bef9b45be6ea5beb68/schemes/s7.scm | scheme | (title "s7")
(tagline "extension language for Snd, Radium, and Common Music")
(based-on "tinyscheme")
(named-after "the Sunbeam S7 motorcycle")
(homepage-url "/")
(person "Bill Schottstaedt")
(country "US")
(sourceforge "snd")
(documentation
(title "User Manual")
(web-url ""))
(mailing-list
(name "cmdist")
(list-address "cmdist" "ccrma.stanford.edu")
(archive-url "-mail.stanford.edu/mailman/private/cmdist/")
(instructions-url "-mail.stanford.edu/mailman/listinfo/cmdist"))
| |
3d55b966a344dcfd3eeb9c2bd7baa84df902e525aaea9ea1d922aadfa9b2de9d | input-output-hk/cardano-sl | Internal.hs | # OPTIONS_GHC -fno - warn - name - shadowing #
# LANGUAGE BangPatterns #
# LANGUAGE CPP #
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE GADTSyntax #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE RecursiveDo #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
module Node.Internal (
NodeId(..),
Node(..),
NodeEnvironment(..),
defaultNodeEnvironment,
NodeEndPoint(..),
simpleNodeEndPoint,
manualNodeEndPoint,
ReceiveDelay,
noReceiveDelay,
constantReceiveDelay,
NodeState(..),
nodeId,
nodeEndPointAddress,
Statistics(..),
stTotalLiveBytes,
stRunningHandlersRemoteVariance,
stRunningHandlersLocalVariance,
PeerStatistics(..),
nodeStatistics,
ChannelIn(..),
ChannelOut(..),
startNode,
stopNode,
killNode,
withInOutChannel,
writeMany,
Timeout(..)
) where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Control.Concurrent.STM
import Control.Exception (Exception, SomeAsyncException,
SomeException, bracket, catch, finally, fromException,
handle, mask, throwIO, try, uninterruptibleMask_)
import Control.Monad (forM_, mapM_, when)
import Data.Binary
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BS
import qualified Data.ByteString.Builder.Extra as BS
import qualified Data.ByteString.Lazy as LBS
import Data.Foldable (foldl', foldlM)
import Data.Hashable (Hashable)
import Data.Int (Int64)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
#if !(MIN_VERSION_base(4,8,0))
import Data.Monoid
#endif
import Data.NonEmptySet (NonEmptySet)
import qualified Data.NonEmptySet as NESet
import Data.Semigroup ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import Data.Time.Clock.POSIX (getPOSIXTime)
import Data.Time.Units (Microsecond)
import Formatting (sformat, shown, (%))
import GHC.Generics (Generic)
import qualified Network.Transport as NT
import Node.Message.Class (Packing, Serializable (..), pack, unpack)
import Node.Message.Decoder (Decoder (..), DecoderStep (..),
continueDecoding)
import Pos.Util.Trace (Severity (..), Trace, traceWith)
import qualified System.Metrics.Distribution as Metrics (Distribution)
import qualified System.Metrics.Distribution as Metrics.Distribution
import qualified System.Metrics.Gauge as Metrics (Gauge)
import qualified System.Metrics.Gauge as Metrics.Gauge
import System.Random (Random, StdGen, random)
Copied from the old Mockable definition for Production .
getCurrentTime :: IO Microsecond
getCurrentTime = round . (* 1000000) <$> getPOSIXTime
delay :: Microsecond -> IO ()
delay = threadDelay . fromIntegral
-- | A 'NodeId' wraps a network-transport endpoint address
newtype NodeId = NodeId NT.EndPointAddress
deriving (Eq, Ord, Show, Hashable, Generic)
instance Binary NodeId
-- | The state of a Node, to be held in a shared atomic cell because other
-- threads will mutate it in order to set up bidirectional connections.
data NodeState peerData = NodeState {
_nodeStateGen :: !StdGen
-- ^ To generate nonces.
, _nodeStateOutboundBidirectional :: !(Map NT.EndPointAddress (Map Nonce (SomeHandler, Maybe BS.ByteString -> IO (), Int -> IO (), MVar peerData, NT.ConnectionBundle, Async (), Bool)))
-- ^ Handlers for each nonce which we generated (locally-initiated
-- bidirectional connections).
-- The bool indicates whether we have received an ACK for this.
, _nodeStateInbound :: !(Set SomeHandler)
-- ^ Handlers for inbound connections (remotely-initiated unidirectional
-- _or_ bidirectional connections).
, _nodeStateConnectedTo :: !(Map NT.EndPointAddress OutboundConnectionState)
^ For each peer that we have at least one open connection to , the
number of connections ; or an MVar in case there 's some thread
sending the initial data ( it just opened the first connection to that
-- peer).
, _nodeStateStatistics :: !Statistics
-- ^ Statistics about traffic at this node.
-- Must be kept in mutable state so that handlers can update it when
-- they finish.
, _nodeStateClosed :: !Bool
^ Indicates whether the Node has been closed and is no longer capable
of establishing or accepting connections ( its EndPoint is closed ) .
}
-- | An exception which is thrown when something times out.
data Timeout = Timeout
deriving (Show)
instance Exception Timeout
-- | The initial state of a node, wrapped up in a shared atomic.
initialNodeState
:: StdGen
-> IO (MVar (NodeState peerData))
initialNodeState prng = do
!stats <- initialStatistics
let nodeState = NodeState {
_nodeStateGen = prng
, _nodeStateOutboundBidirectional = Map.empty
, _nodeStateInbound = Set.empty
, _nodeStateConnectedTo = Map.empty
, _nodeStateStatistics = stats
, _nodeStateClosed = False
}
newMVar nodeState
-- | Some 'Async', we don't care the result type.
data SomeHandler = forall t . SomeHandler (Async t)
-- | Uses equality on thread id. Should be good for our use case.
-- Are thread ids ever recycled? Surely they must be, eventually, since they're
of bounded size . Anyway , if we 're paranoid , we can use a ' Unique ' for ' '
and ' ' .
instance Eq SomeHandler where
SomeHandler as1 == SomeHandler as2 =
asyncThreadId as1 == asyncThreadId as2
instance Ord SomeHandler where
SomeHandler as1 `compare` SomeHandler as2 =
asyncThreadId as1 `compare` asyncThreadId as2
-- | Waits for a handler.
waitSomeHandler :: SomeHandler -> IO ()
waitSomeHandler (SomeHandler promise) = () <$ wait promise
-- | Cancels a handler.
cancelSomeHandler :: SomeHandler -> IO ()
cancelSomeHandler (SomeHandler promise) = uninterruptibleCancel promise
-- | Waits for it and squelches all (even async) exceptions.
waitCatchSomeHandler :: SomeHandler -> IO ()
waitCatchSomeHandler = handle squelch . waitSomeHandler
where
squelch :: SomeException -> IO ()
squelch = const (pure ())
data NodeEnvironment = NodeEnvironment {
nodeAckTimeout :: !Microsecond
-- | Maximum transmission unit: how many bytes can be sent in a single
-- network-transport send. Tune this according to the transport
-- which backs the time-warp node.
, nodeMtu :: !Word32
}
defaultNodeEnvironment :: NodeEnvironment
defaultNodeEnvironment = NodeEnvironment {
30 second timeout waiting for an ACK .
nodeAckTimeout = 30000000
, nodeMtu = maxBound
}
-- | Computation in IO of a delay (or no delay).
type ReceiveDelay = IO (Maybe Microsecond)
noReceiveDelay :: ReceiveDelay
noReceiveDelay = pure Nothing
constantReceiveDelay :: Microsecond -> ReceiveDelay
constantReceiveDelay = pure . Just
| A ' Node ' is a network - transport ' EndPoint ' with bidirectional connection
-- state and a thread to dispatch network-transport events.
data Node packingType peerData = Node {
nodeTrace :: Trace IO (Severity, Text)
, nodeEndPoint :: NT.EndPoint
, nodeCloseEndPoint :: IO ()
, nodeDispatcherThread :: Async ()
, nodeEnvironment :: NodeEnvironment
, nodeState :: MVar (NodeState peerData)
, nodePacking :: Packing packingType IO
, nodePeerData :: peerData
-- | How long to wait before dequeueing an event from the
-- network-transport receive queue, where Nothing means
-- instantaneous (different from a 0 delay).
-- The term is evaluated once for each dequeued event, immediately
-- before dequeueing it.
, nodeReceiveDelay :: ReceiveDelay
-- | As 'nodeReceiveDelay' but instead of a delay on every network
-- level message, the delay applies only to establishing new
-- incomming connections. These connect/talk/close patterns tend
-- to correspond to application level messages or conversations
-- so this is a way to delay per-high-level message rather than
-- lower level events.
, nodeConnectDelay :: ReceiveDelay
}
nodeId :: Node packingType peerData -> NodeId
nodeId = NodeId . NT.address . nodeEndPoint
nodeEndPointAddress :: NodeId -> NT.EndPointAddress
nodeEndPointAddress (NodeId addr) = addr
nodeStatistics :: Node packingType peerData -> IO Statistics
nodeStatistics Node{..} = modifyMVar nodeState $ \st ->
return (st, _nodeStateStatistics st)
-- | Used to identify bidirectional connections.
newtype Nonce = Nonce {
_getNonce :: Word64
}
deriving instance Show Nonce
deriving instance Eq Nonce
deriving instance Ord Nonce
deriving instance Random Nonce
deriving instance Binary Nonce
data NodeException =
ProtocolError String
| InternalError String
deriving (Show)
instance Exception NodeException
-- | Input from the wire.
newtype ChannelIn = ChannelIn (TChan (Maybe BS.ByteString))
-- | Output to the wire.
newtype ChannelOut = ChannelOut NT.Connection
| Do multiple sends on a ' ChannelOut ' .
writeMany
:: Word32 -- ^ Split into chunks of at most this size in bytes. 0 means no split.
-> ChannelOut
-> LBS.ByteString
-> IO ()
writeMany mtu (ChannelOut conn) bss = mapM_ sendUnit units
where
sendUnit :: [BS.ByteString] -> IO ()
sendUnit unit = NT.send conn unit >>= either throwIO pure
units :: [[BS.ByteString]]
units = fmap LBS.toChunks (chop bss)
chop :: LBS.ByteString -> [LBS.ByteString]
chop lbs
| mtu == 0 = [lbs]
-- Non-recursive definition for the case when the input is empty, so
-- that
-- writeMany mtu outChan ""
-- still induces a send. Without this case, the list would be empty.
| LBS.null lbs = [lbs]
| otherwise =
let mtuInt :: Int64
mtuInt = fromIntegral mtu
chopItUp lbs | LBS.null lbs = []
| otherwise =
let (front, back) = LBS.splitAt mtuInt lbs
in front : chopItUp back
in chopItUp lbs
-- | Statistics concerning traffic at this node.
data Statistics = Statistics {
-- | How many handlers are running right now in response to a
-- remotely initiated connection (whether unidirectional or
-- bidirectional).
NB a handler may run longer or shorter than the duration of a
-- connection.
stRunningHandlersRemote :: !Metrics.Gauge
-- | How many handlers are running right now which were initiated
-- locally, i.e. corresponding to bidirectional connections.
, stRunningHandlersLocal :: !Metrics.Gauge
-- | Statistics for each peer.
, stPeerStatistics :: !(Map NT.EndPointAddress (MVar PeerStatistics))
-- | How many peers are connected.
, stPeers :: !Metrics.Gauge
-- | Average number of remotely-initiated handlers per peer.
-- Also track the average of the number of handlers squared, so we
-- can quickly compute the variance.
, stRunningHandlersRemoteAverage :: !(Double, Double)
-- | Average number of locally-initiated handlers per peer.
-- Also track the average of the number of handlers squared, so we
-- can quickly compute the variance.
, stRunningHandlersLocalAverage :: !(Double, Double)
-- | Handlers which finished normally. Distribution is on their
-- running time.
, stHandlersFinishedNormally :: !Metrics.Distribution
-- | Handlers which finished exceptionally. Distribution is on their
-- running time.
, stHandlersFinishedExceptionally :: !Metrics.Distribution
}
stTotalLiveBytes :: Statistics -> IO Int
stTotalLiveBytes stats = do
allPeers <- mapM readMVar $ Map.elems (stPeerStatistics stats)
let allBytes = fmap pstLiveBytes allPeers
return $ sum allBytes
stRunningHandlersRemoteVariance :: Statistics -> Double
stRunningHandlersRemoteVariance statistics = avg2 - (avg*avg)
where
(avg, avg2) = stRunningHandlersRemoteAverage statistics
stRunningHandlersLocalVariance :: Statistics -> Double
stRunningHandlersLocalVariance statistics = avg2 - (avg*avg)
where
(avg, avg2) = stRunningHandlersLocalAverage statistics
-- | Statistics about a given peer.
data PeerStatistics = PeerStatistics {
-- | How many handlers are running right now in response to connections
-- from this peer (whether unidirectional or remotely-initiated
-- bidirectional).
pstRunningHandlersRemote :: !Int
-- | How many handlers are running right now for locally-iniaiated
-- bidirectional connections to this peer.
, pstRunningHandlersLocal :: !Int
-- | How many bytes have been received by running handlers for this
-- peer.
, pstLiveBytes :: !Int
}
pstNull :: PeerStatistics -> Bool
pstNull PeerStatistics{..} =
let remote = pstRunningHandlersRemote
local = pstRunningHandlersLocal
in remote == 0 && local == 0
stIncrBytes :: NT.EndPointAddress -> Int -> Statistics -> IO ()
stIncrBytes peer bytes stats =
case Map.lookup peer (stPeerStatistics stats) of
Nothing -> return ()
Just peerStats -> modifyMVar peerStats $ \ps ->
let !ps' = pstIncrBytes bytes ps
in return (ps', ())
pstIncrBytes :: Int -> PeerStatistics -> PeerStatistics
pstIncrBytes bytes peerStatistics = peerStatistics {
pstLiveBytes = pstLiveBytes peerStatistics + bytes
}
| Record a new handler for a given peer . Second component is True if it 's the
-- only handler for that peer.
pstAddHandler
:: HandlerProvenance peerData t
-> Map NT.EndPointAddress (MVar PeerStatistics)
-> IO (Map NT.EndPointAddress (MVar PeerStatistics), Bool)
pstAddHandler provenance map = case provenance of
Local peer _ -> case Map.lookup peer map of
Nothing ->
newMVar (PeerStatistics 0 1 0) >>= \peerStatistics ->
return (Map.insert peer peerStatistics map, True)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let !stats' = stats { pstRunningHandlersLocal = pstRunningHandlersLocal stats + 1 }
in return (stats', (map, False))
Remote peer _ -> case Map.lookup peer map of
Nothing ->
newMVar (PeerStatistics 1 0 0) >>= \peerStatistics ->
return (Map.insert peer peerStatistics map, True)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let !stats' = stats { pstRunningHandlersRemote = pstRunningHandlersRemote stats + 1 }
in return (stats', (map, False))
| Remove a handler for a given peer . Second component is True if there
-- are no more handlers for that peer.
pstRemoveHandler
:: Trace IO (Severity, Text)
-> HandlerProvenance peerData t
-> Map NT.EndPointAddress (MVar PeerStatistics)
-> IO (Map NT.EndPointAddress (MVar PeerStatistics), Bool)
pstRemoveHandler logTrace provenance map = case provenance of
Local peer _ -> case Map.lookup peer map of
Nothing -> do
traceWith logTrace (Warning, sformat ("tried to remove handler for "%shown%", but it is not in the map") peer)
return (map, False)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let stats' = stats { pstRunningHandlersLocal = pstRunningHandlersLocal stats - 1 }
in return $ if pstNull stats'
then (stats', (Map.delete peer map, True))
else (stats', (map, False))
Remote peer _ -> case Map.lookup peer map of
Nothing -> do
traceWith logTrace (Warning, sformat ("tried to remove handler for "%shown%", but it is not in the map") peer)
return (map, False)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let stats' = stats { pstRunningHandlersRemote = pstRunningHandlersRemote stats - 1 }
in return $ if pstNull stats'
then (stats', (Map.delete peer map, True))
else (stats', (map, False))
-- | Statistics when a node is launched.
initialStatistics :: IO Statistics
initialStatistics = do
!runningHandlersRemote <- Metrics.Gauge.new
!runningHandlersLocal <- Metrics.Gauge.new
!peers <- Metrics.Gauge.new
!handlersFinishedNormally <- Metrics.Distribution.new
!handlersFinishedExceptionally <- Metrics.Distribution.new
return Statistics {
stRunningHandlersRemote = runningHandlersRemote
, stRunningHandlersLocal = runningHandlersLocal
, stPeerStatistics = Map.empty
, stPeers = peers
, stRunningHandlersRemoteAverage = (0, 0)
, stRunningHandlersLocalAverage = (0, 0)
, stHandlersFinishedNormally = handlersFinishedNormally
, stHandlersFinishedExceptionally = handlersFinishedExceptionally
}
data HandlerProvenance peerData t =
-- | Initiated locally, _to_ this peer.
Local !NT.EndPointAddress (Nonce, MVar peerData, NT.ConnectionBundle, Async (), t)
-- | Initiated remotely, _by_ or _from_ this peer.
| Remote !NT.EndPointAddress !NT.ConnectionId
instance Show (HandlerProvenance peerData t) where
show prov = case prov of
Local addr mdata -> concat [
"Local "
, show addr
, show ((\(x,_,_,_,_) -> x) $ mdata)
]
Remote addr connid -> concat ["Remote ", show addr, show connid]
handlerProvenancePeer :: HandlerProvenance peerData t -> NT.EndPointAddress
handlerProvenancePeer provenance = case provenance of
Local peer _ -> peer
Remote peer _ -> peer
-- TODO: revise these computations to make them numerically stable (or maybe
-- use Rational?).
stAddHandler
:: HandlerProvenance peerData t
-> Statistics
-> IO Statistics
stAddHandler !provenance !statistics = case provenance of
-- TODO: generalize this computation so we can use the same thing for
-- both local and remote. It's a copy/paste job right now swapping local
-- for remote.
Local !_peer _ -> do
(!peerStatistics, !isNewPeer) <- pstAddHandler provenance (stPeerStatistics statistics)
when isNewPeer $ Metrics.Gauge.inc (stPeers statistics)
Metrics.Gauge.inc (stRunningHandlersLocal statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersLocal statistics)
let runningHandlersLocalAverage =
adjustMeans isNewPeer
(fromIntegral npeers)
nhandlers
(stRunningHandlersLocalAverage statistics)
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersLocalAverage = runningHandlersLocalAverage
}
Remote !_peer _ -> do
(!peerStatistics, !isNewPeer) <- pstAddHandler provenance (stPeerStatistics statistics)
when isNewPeer $ Metrics.Gauge.inc (stPeers statistics)
Metrics.Gauge.inc (stRunningHandlersRemote statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersRemote statistics)
let runningHandlersRemoteAverage =
adjustMeans isNewPeer
(fromIntegral npeers)
nhandlers
(stRunningHandlersRemoteAverage statistics)
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersRemoteAverage = runningHandlersRemoteAverage
}
where
Adjust the means . The is true if it 's a new peer .
-- The Double is the current number of peers (always > 0).
-- The Int is the current number of running handlers.
adjustMeans :: Bool -> Double -> Int64 -> (Double, Double) -> (Double, Double)
adjustMeans !isNewPeer !npeers !nhandlers (!avg, !avg2) = case isNewPeer of
True -> (avg', avg2')
where
avg' = avg * ((npeers - 1) / npeers) + (1 / npeers)
avg2' = avg2 * ((npeers - 1) / npeers) + (1 / npeers)
False -> (avg', avg2')
where
avg' = avg + (1 / npeers)
avg2' = avg + (fromIntegral (2 * nhandlers + 1) / npeers)
-- TODO: revise these computations to make them numerically stable (or maybe
-- use Rational?).
stRemoveHandler
:: Trace IO (Severity, Text)
-> HandlerProvenance peerData t
-> Microsecond
-> Maybe SomeException
-> Statistics
-> IO Statistics
stRemoveHandler logTrace !provenance !elapsed !outcome !statistics = case provenance of
-- TODO: generalize this computation so we can use the same thing for
-- both local and remote. It's a copy/paste job right now swapping local
-- for remote.
Local !_peer _ -> do
(!peerStatistics, !isEndedPeer) <- pstRemoveHandler logTrace provenance (stPeerStatistics statistics)
when isEndedPeer $ Metrics.Gauge.dec (stPeers statistics)
Metrics.Gauge.dec (stRunningHandlersLocal statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersLocal statistics)
let runningHandlersLocalAverage =
adjustMeans isEndedPeer
npeers
nhandlers
(stRunningHandlersLocalAverage statistics)
addSample
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersLocalAverage = runningHandlersLocalAverage
}
Remote !_peer _ -> do
(!peerStatistics, !isEndedPeer) <- pstRemoveHandler logTrace provenance (stPeerStatistics statistics)
when isEndedPeer $ Metrics.Gauge.dec (stPeers statistics)
Metrics.Gauge.dec (stRunningHandlersRemote statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersRemote statistics)
let runningHandlersRemoteAverage =
adjustMeans isEndedPeer
npeers
nhandlers
(stRunningHandlersRemoteAverage statistics)
addSample
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersRemoteAverage = runningHandlersRemoteAverage
}
where
-- Convert the elapsed time to a Double and then add it to the relevant
-- distribution.
addSample = case outcome of
Nothing -> Metrics.Distribution.add (stHandlersFinishedNormally statistics) (fromIntegral (toInteger elapsed))
Just _ -> Metrics.Distribution.add (stHandlersFinishedExceptionally statistics) (fromIntegral (toInteger elapsed))
Adjust the means . The is true if it 's a stale peer ( removed last
-- handler).
The first Int is the current number of peers ( could be 0 ) .
-- The Int is the current number of running handlers.
adjustMeans :: Bool -> Int64 -> Int64 -> (Double, Double) -> (Double, Double)
adjustMeans !isEndedPeer !npeers !nhandlers (!avg, !avg2) = case isEndedPeer of
True -> if npeers == 0
then (0, 0)
else (avg', avg2')
where
avg' = avg * (fromIntegral (npeers - 1) / fromIntegral npeers) + (1 / fromIntegral npeers)
avg2' = avg2 * (fromIntegral (npeers - 1) / fromIntegral npeers) + (1 / fromIntegral npeers)
False -> (avg', avg2')
where
avg' = avg - (1 / fromIntegral npeers)
avg2' = avg - (fromIntegral (2 * nhandlers + 1) / fromIntegral npeers)
| How to create and close an ' EndPoint ' .
-- See 'simpleNodeEndPoint' for a very obvious example.
-- More complicated things are possible, for instance using concrete
-- transport specific features.
data NodeEndPoint = NodeEndPoint {
newNodeEndPoint :: IO (Either (NT.TransportError NT.NewEndPointErrorCode) NT.EndPoint)
, closeNodeEndPoint :: NT.EndPoint -> IO ()
}
-- | A 'NodeEndPoint' which uses the typical network-transport 'newEndPoint'
-- and 'closeEndPoint'.
simpleNodeEndPoint :: NT.Transport -> NodeEndPoint
simpleNodeEndPoint transport = NodeEndPoint {
newNodeEndPoint = NT.newEndPoint transport
, closeNodeEndPoint = NT.closeEndPoint
}
| Use an existing ' EndPoint ' . It will be closed automatically when the node
-- stops, so do not close it yourself.
manualNodeEndPoint :: NT.EndPoint -> NodeEndPoint
manualNodeEndPoint ep = NodeEndPoint {
newNodeEndPoint = pure $ Right ep
, closeNodeEndPoint = NT.closeEndPoint
}
-- | Bring up a 'Node' using a network transport.
startNode
:: forall packingType peerData .
( Serializable packingType peerData )
=> Trace IO (Severity, Text)
-> Packing packingType IO
-> peerData
-> (Node packingType peerData -> NodeEndPoint)
-> (Node packingType peerData -> ReceiveDelay)
-- ^ Use the node (lazily) to determine a delay in microseconds to wait
-- before dequeueing the next network-transport event (see
-- 'nodeReceiveDelay').
-> (Node packingType peerData -> ReceiveDelay)
-- ^ See 'nodeConnectDelay'
-> StdGen
-- ^ A source of randomness, for generating nonces.
-> NodeEnvironment
-> (peerData -> NodeId -> ChannelIn -> ChannelOut -> IO ())
-- ^ Handle incoming bidirectional connections.
-> IO (Node packingType peerData)
startNode logTrace packing peerData mkNodeEndPoint mkReceiveDelay mkConnectDelay
prng nodeEnv handlerInOut = do
rec { let nodeEndPoint = mkNodeEndPoint node
; mEndPoint <- newNodeEndPoint nodeEndPoint
; let receiveDelay = mkReceiveDelay node
connectDelay = mkConnectDelay node
; node <- case mEndPoint of
Left err -> throwIO err
Right endPoint -> do
sharedState <- initialNodeState prng
-- TODO this thread should get exceptions from the dispatcher thread.
rec { let node = Node {
nodeTrace = logTrace
, nodeEndPoint = endPoint
, nodeCloseEndPoint = closeNodeEndPoint nodeEndPoint endPoint
, nodeDispatcherThread = dispatcherThread
, nodeEnvironment = nodeEnv
, nodeState = sharedState
, nodePacking = packing
, nodePeerData = peerData
, nodeReceiveDelay = receiveDelay
, nodeConnectDelay = connectDelay
}
; dispatcherThread <- async $
nodeDispatcher node handlerInOut
-- Exceptions in the dispatcher are re-thrown here.
; link dispatcherThread
}
return node
}
traceWith logTrace (Debug, sformat ("startNode, we are " % shown % "") (nodeId node))
return node
-- | Stop a 'Node', closing its network transport and end point.
stopNode :: Node packingType peerData -> IO ()
stopNode node = do
modifyMVar (nodeState node) $ \nodeState ->
if _nodeStateClosed nodeState
then throwIO $ userError "stopNode : already stopped"
else pure (nodeState { _nodeStateClosed = True }, ())
-- This eventually will shut down the dispatcher thread, which in turn
-- ought to stop the connection handling threads.
-- It'll also close all TCP connections.
nodeCloseEndPoint node
-- Must wait on any handler threads. The dispatcher thread will eventually
-- see an event indicating that the end point has closed, after which it
-- will wait on all running handlers. Since the end point has been closed,
-- no new handler threads will be created, so this will block indefinitely
-- only if some handler is blocked indefinitely or looping.
wait (nodeDispatcherThread node)
waitForRunningHandlers node
-- | Kill a 'Node', terminating its dispatcher thread, closing its endpoint,
-- and killing all of its handlers.
killNode :: Node packingType peerData -> IO ()
killNode node = do
modifyMVar (nodeState node) $ \nodeState ->
if _nodeStateClosed nodeState
then throwIO $ userError "killNode : already killed"
else pure (nodeState { _nodeStateClosed = True }, ())
-- Closing the end point will cause the dispatcher thread to end when it
-- gets the EndPointClosed event, so we don't cancel that thread.
-- Cancelling that thread before closing the end point can lead to deadlock,
-- in particular if this is backed by a TCP transport with a QDisc which
-- may block on write.
nodeCloseEndPoint node
killRunningHandlers node
data ConnectionState peerData =
-- | This connection cannot proceed because peer data has not been
-- received and parsed.
WaitingForPeerData
-- | This connection attempted to parse the peer data but failed.
-- Any subsequent data will be ignored.
| PeerDataParseFailure
-- | This connection is waiting for a handshake and we have partial
-- data. The peer state of the connection must be 'GotPeerData'.
| WaitingForHandshake !peerData !BS.ByteString
-- | This connection attempted handshake but it failed (protocol error).
-- Any subsequent data will be ignored.
| HandshakeFailure
-- | This connection has made a handshake and is now feeding an
-- application-specific handler through a channel. The peer state
-- of this connection must be 'GotPeerData'.
--
Second argument will be run with the number of bytes each time more
-- bytes are received. It's used to update shared metrics.
| FeedingApplicationHandler !(Maybe BS.ByteString -> IO ()) (Int -> IO ())
instance Show (ConnectionState peerData) where
show term = case term of
WaitingForPeerData -> "WaitingForPeerData"
PeerDataParseFailure -> "PeerDataParseFailure"
WaitingForHandshake _ _ -> "WaitingForHandshake"
HandshakeFailure -> "HandshakeFailure"
FeedingApplicationHandler _ _ -> "FeedingApplicationHandler"
data PeerState peerData =
-- | Peer data is expected from one of these lightweight connections.
If the second component is ' Just ' , then there 's a lightweight
-- connection which has given a partial parse of the peer data.
ExpectingPeerData
!(NonEmptySet NT.ConnectionId)
!(Maybe (NT.ConnectionId, Maybe BS.ByteString -> Decoder IO peerData))
-- | Peer data has been received and parsed.
| GotPeerData !peerData !(NonEmptySet NT.ConnectionId)
instance Show (PeerState peerData) where
show term = case term of
ExpectingPeerData peers mleader -> "ExpectingPeerData " ++ show peers ++ " " ++ show (fmap fst mleader)
GotPeerData _ peers -> "GotPeerData " ++ show peers
data DispatcherState peerData = DispatcherState {
dsConnections :: Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
, dsPeers :: Map NT.EndPointAddress (PeerState peerData)
}
deriving instance Show (DispatcherState peerData)
initialDispatcherState :: DispatcherState peerData
initialDispatcherState = DispatcherState Map.empty Map.empty
-- | Get the running handlers for a node.
getRunningHandlers :: Node packingType peerData -> IO [SomeHandler]
getRunningHandlers node = withMVar (nodeState node) $ \st -> do
let -- List monad computation: grab the values of the map (ignoring
-- peer keys), then for each of those maps grab its values (ignoring
-- nonce keys) and then return the promise.
outbound_bi = do
map <- Map.elems (_nodeStateOutboundBidirectional st)
(x, _, _, _, _, _, _) <- Map.elems map
return x
inbound = Set.toList (_nodeStateInbound st)
return $ outbound_bi ++ inbound
-- | Wait for every running handler in a node's state to finish.
-- If they throw an exception, it's not re-thrown. Even async exceptions are
-- squelched, so be careful.
waitForRunningHandlers :: Node packingType peerData -> IO ()
waitForRunningHandlers node = getRunningHandlers node >>= mapM_ waitCatchSomeHandler
-- | Kill every running handler in a node's state.
killRunningHandlers :: Node packingType peerData -> IO ()
killRunningHandlers node = getRunningHandlers node >>= mapM_ cancelSomeHandler
| The one thread that handles /all/ incoming messages and dispatches them
-- to various handlers.
nodeDispatcher
:: forall packingType peerData .
( Serializable packingType peerData )
=> Node packingType peerData
-> (peerData -> NodeId -> ChannelIn -> ChannelOut -> IO ())
-> IO ()
nodeDispatcher node handlerInOut =
loop initialDispatcherState
where
logTrace :: Trace IO (Severity, Text)
logTrace = nodeTrace node
nstate :: MVar (NodeState peerData)
nstate = nodeState node
receiveDelay, connectDelay :: IO ()
receiveDelay = nodeReceiveDelay node >>= maybe (return ()) delay
connectDelay = nodeConnectDelay node >>= maybe (return ()) delay
endpoint = nodeEndPoint node
loop :: DispatcherState peerData -> IO ()
loop !state = do
receiveDelay
event <- NT.receive endpoint
case event of
NT.ConnectionOpened connid _reliability peer ->
connectDelay >> connectionOpened state connid peer >>= loop
NT.Received connid bytes -> received state connid bytes >>= loop
NT.ConnectionClosed connid -> connectionClosed state connid >>= loop
-- When the end point closes, we're done.
NT.EndPointClosed -> endPointClosed state
-- Don't deal with this.
NT.ReceivedMulticast _ _ -> loop state
-- When a heavyweight connection is lost we must close up all of the
-- lightweight connections which it carried.
NT.ErrorEvent (NT.TransportError (NT.EventConnectionLost peer bundle) reason) -> do
traceWith logTrace (Error, sformat ("EventConnectionLost received from the network layer: " % shown) reason)
connectionLost state peer bundle >>= loop
-- End point failure is unrecoverable.
NT.ErrorEvent (NT.TransportError NT.EventEndPointFailed reason) ->
throwIO (InternalError $ "EndPoint failed: " ++ reason)
-- Transport failure is unrecoverable.
NT.ErrorEvent (NT.TransportError NT.EventTransportFailed reason) ->
throwIO (InternalError $ "Transport failed " ++ reason)
-- EndPointClosed is the final event that we will receive. There may be
-- connections which remain open! ConnectionClosed events may be
-- inbound but since our end point has closed, we won't take them. So here
-- we have to plug every remaining input channel.
endPointClosed
:: DispatcherState peerData
-> IO ()
endPointClosed state = do
let connections = Map.toList (dsConnections state)
-- This is *not* a network-transport error; EndPointClosed can be
-- posted without ConnectionClosed for all open connections, as an
-- optimization.
when (not (null connections)) $ do
forM_ connections $ \(_, st) -> case st of
(_, FeedingApplicationHandler dumpBytes _) -> do
dumpBytes Nothing
_ -> return ()
Must plug input channels for all un - acked outbound connections , and
-- fill the peer data vars in case they haven't yet been filled. This
-- is to ensure that handlers never block on these things.
_ <- modifyMVar nstate $ \st -> do
let nonceMaps = Map.elems (_nodeStateOutboundBidirectional st)
let outbounds = nonceMaps >>= Map.elems
forM_ outbounds $ \(_, dumpBytes, _, peerDataVar, _, _, acked) -> do
when (not acked) $ do
_ <- tryPutMVar peerDataVar (error "no peer data because local node has gone down")
dumpBytes Nothing
return (st, ())
-- Check that this node was closed by a call to 'stopNode' or
-- 'killNode'. If it wasn't, we throw an exception. This is important
-- because the thread which runs 'startNode' must *not* continue after
the ' EndPoint ' is closed .
withMVar nstate $ \nodeState ->
if _nodeStateClosed nodeState
then pure ()
else throwIO (InternalError "EndPoint prematurely closed")
connectionOpened
:: DispatcherState peerData
-> NT.ConnectionId
-> NT.EndPointAddress
-> IO (DispatcherState peerData)
connectionOpened state connid peer = case Map.lookup connid (dsConnections state) of
Just (peer', _) -> do
traceWith logTrace (Warning, sformat ("ignoring duplicate connection " % shown % shown % shown) peer peer' connid)
return state
Nothing -> do
-- How we handle this connection depends on whether we already have
-- a connection from this peer.
case Map.lookup peer (dsPeers state) of
-- If we do, we can start waiting for the handshake.
Just (GotPeerData peerData neset) -> do
return $ state {
dsConnections = Map.insert connid (peer, WaitingForHandshake peerData BS.empty) (dsConnections state)
, dsPeers = Map.insert peer (GotPeerData peerData (NESet.insert connid neset)) (dsPeers state)
}
-- If we don't, then we must await and decode the peer data.
Nothing -> do
return $ state {
dsConnections = Map.insert connid (peer, WaitingForPeerData) (dsConnections state)
, dsPeers = Map.insert peer (ExpectingPeerData (NESet.singleton connid) Nothing) (dsPeers state)
}
-- We got another connection before the peer data arrived.
-- That's actually OK. It's only an error if we receive data
on this connection before the first connection receives
-- and parses the peer data ('received' handles this aspect).
-- So here we just record the connection.
Just (ExpectingPeerData neset mleader) -> do
return $ state {
dsConnections = Map.insert connid (peer, WaitingForPeerData) (dsConnections state)
, dsPeers = Map.insert peer (ExpectingPeerData (NESet.insert connid neset) mleader) (dsPeers state)
}
received
:: DispatcherState peerData
-> NT.ConnectionId
-> [BS.ByteString]
-> IO (DispatcherState peerData)
received state connid chunks = case Map.lookup connid (dsConnections state) of
Nothing -> do
traceWith logTrace (Warning, sformat ("ignoring data on unknown connection " % shown) connid)
return state
-- This connection gave bogus peer data. Ignore the data.
Just (peer, PeerDataParseFailure) -> do
traceWith logTrace (Warning, sformat ("ignoring data on failed connection (peer data) from " % shown) peer)
return state
-- This connection gave a bad handshake. Ignore the data.
Just (peer, HandshakeFailure) -> do
traceWith logTrace (Warning, sformat ("ignoring data on failed connection (handshake) from " % shown) peer)
return state
-- This connection is awaiting the initial peer data.
Just (peer, WaitingForPeerData) -> case Map.lookup peer (dsPeers state) of
Just (ExpectingPeerData connids mleader) -> case mleader of
-- There's no leader. This connection is now the leader. Begin
-- the attempt to decode the peer data.
Nothing -> do
decoderStep :: DecoderStep IO peerData <- runDecoder (unpack (nodePacking node))
decoderStep' <- continueDecoding decoderStep (BS.concat chunks)
case decoderStep' of
Fail _ _ err -> do
traceWith logTrace (Warning, sformat ("failed to decode peer data from " % shown % ": got error " % shown) peer err)
return $ state {
dsConnections = Map.insert connid (peer, PeerDataParseFailure) (dsConnections state)
}
Done trailing _ peerData -> do
let state' = state {
dsConnections = foldl' (awaitHandshake peerData) (dsConnections state) (NESet.toList connids)
, dsPeers = Map.insert peer (GotPeerData peerData connids) (dsPeers state)
}
received state' connid [trailing]
Partial decoderContinuation -> do
return $ state {
dsPeers = Map.insert peer (ExpectingPeerData connids (Just (connid, decoderContinuation))) (dsPeers state)
}
Just (connid', decoderContinuation) -> case connid == connid' of
Protocol error . We got data from some other lightweight
-- connection before the peer data was parsed.
False -> do
traceWith logTrace (Warning, sformat ("peer data protocol error from " % shown) peer)
return state
True -> do
decoderStep <- runDecoder (decoderContinuation (Just (BS.concat chunks)))
case decoderStep of
Fail _ _ err -> do
traceWith logTrace (Warning, sformat ("failed to decode peer data from " % shown % ": got error " % shown) peer err)
return $ state {
dsConnections = Map.insert connid (peer, PeerDataParseFailure) (dsConnections state)
}
Done trailing _ peerData -> do
let state' = state {
dsConnections = foldl' (awaitHandshake peerData) (dsConnections state) (NESet.toList connids)
, dsPeers = Map.insert peer (GotPeerData peerData connids) (dsPeers state)
}
received state' connid [trailing]
Partial decoderContinuation' -> do
return $ state {
dsPeers = Map.insert peer (ExpectingPeerData connids (Just (connid, decoderContinuation'))) (dsPeers state)
}
where
-- Update a connection's state to WaitingForHandshake. For use
in a fold once the peer data has been parsed . The first
-- parameters give the id of the connection which made the
-- parse and the data left-over after the parse, which must
-- be remembered in the connection state for that id.
awaitHandshake
:: peerData
-> Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
-> NT.ConnectionId
-> Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
awaitHandshake peerData map connid =
Map.update (\(peer, _) -> Just (peer, WaitingForHandshake peerData BS.empty)) connid map
-- We're waiting for peer data on this connection, but we don't
-- have an entry for the peer. That's an internal error.
Nothing -> do
throwIO $ InternalError "node dispatcher inconsistent state (waiting for peer data)"
Just (GotPeerData _ _) -> do
throwIO $ InternalError "node dispatcher inconsistent state (already got peer data)"
-- Waiting for a handshake. Try to get a control header and then
-- move on.
Just (peer, WaitingForHandshake peerData partial) -> do
let bytes = BS.append partial (BS.concat chunks)
case BS.uncons bytes of
Nothing -> return state
Just (w, ws)
-- Got a bidirectional header but still waiting for the
-- nonce.
| w == controlHeaderCodeBidirectionalSyn ||
w == controlHeaderCodeBidirectionalAck
, BS.length ws < 8 -> return $ state {
dsConnections = Map.insert connid (peer, WaitingForHandshake peerData bytes) (dsConnections state)
}
-- Got a SYN. Spawn a thread to connect to the peer using
-- the nonce provided and then run the bidirectional handler.
| w == controlHeaderCodeBidirectionalSyn
, Right (ws', _, nonce) <- decodeOrFail (LBS.fromStrict ws) -> do
channel <- newTChanIO
chanVar <- newMVar (Just channel)
let dumpBytes mBytes = withMVar chanVar $
maybe (return ()) (\chan -> atomically (writeTChan chan mBytes))
provenance = Remote peer connid
respondAndHandle conn = do
outcome <- NT.send conn [controlHeaderBidirectionalAck nonce]
case outcome of
Left err -> throwIO err
Right () -> do
handlerInOut peerData (NodeId peer) (ChannelIn channel) (ChannelOut conn)
Resource releaser for bracketWithException .
-- No matter what, we must update the node state to
-- indicate that we've disconnected from the peer.
cleanup (me :: Maybe SomeException) = do
modifyMVar chanVar $ \_ -> return (Nothing, ())
case me of
Nothing -> return ()
Just e -> traceWith logTrace (Error,
sformat (shown % " error in conversation response " % shown) nonce e)
handler = bracketWithException
(return ())
(const cleanup)
(const (connectToPeer node (NodeId peer) respondAndHandle))
-- Establish the other direction in a separate thread.
(_, incrBytes) <- spawnHandler logTrace nstate provenance handler
let bs = LBS.toStrict ws'
dumpBytes $ Just bs
incrBytes $ fromIntegral (BS.length bs)
return $ state {
dsConnections = Map.insert connid (peer, FeedingApplicationHandler dumpBytes incrBytes) (dsConnections state)
}
-- Got an ACK. Try to decode the nonce and check that
-- we actually sent it.
| w == controlHeaderCodeBidirectionalAck
, Right (ws', _, nonce) <- decodeOrFail (LBS.fromStrict ws) -> do
outcome <- modifyMVar nstate $ \st -> do
-- Lookup the nonce map for the peer, then check
-- that nonce map at the supplied nonce.
let nonces = Map.lookup peer (_nodeStateOutboundBidirectional st)
let thisNonce = nonces >>= Map.lookup nonce
case thisNonce of
Nothing -> return (st, Nothing)
Just (_, _, _, _, _, _, True) -> return (st, Just Nothing)
Just (promise, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, False) -> do
cancel timeoutPromise
return
( st { _nodeStateOutboundBidirectional = Map.update updater peer (_nodeStateOutboundBidirectional st)
}
, Just (Just (dumpBytes, incrBytes, peerDataVar))
)
where
updater map = Just $ Map.insert nonce (promise, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, True) map
case outcome of
-- We don't know about the nonce. Could be that
we never sent the SYN for it ( protocol error )
-- or the handler for it has already finished.
-- In any case, say the handshake failed so that
-- subsequent data is ignored.
Nothing -> do
traceWith logTrace (Warning, sformat ("got unknown nonce " % shown) nonce)
return $ state {
dsConnections = Map.insert connid (peer, HandshakeFailure) (dsConnections state)
}
Got a duplicate ACK .
Just Nothing -> do
traceWith logTrace (Warning, sformat ("duplicate ACK nonce from " % shown) peer)
return $ state {
dsConnections = Map.insert connid (peer, HandshakeFailure) (dsConnections state)
}
Got an ACK for a SYN that we sent . Start
-- feeding the application handler.
Just (Just (dumpBytes, incrBytes, peerDataVar)) -> do
putMVar peerDataVar peerData
let bs = LBS.toStrict ws'
dumpBytes $ Just bs
incrBytes $ fromIntegral (BS.length bs)
return $ state {
dsConnections = Map.insert connid (peer, FeedingApplicationHandler dumpBytes incrBytes) (dsConnections state)
}
-- Handshake failure. Subsequent receives will be ignored.
| otherwise -> do
traceWith logTrace (Warning, sformat ("unexpected control header from " % shown % " : " % shown) peer w)
return $ state {
dsConnections = Map.insert connid (peer, HandshakeFailure) (dsConnections state)
}
-- This connection is feeding a handler. Make the data available.
-- TODO: if the handler has already finished, we want to just forget
-- the data. How? Weak reference to the channel perhaps? Or
-- explcitly close it down when the handler finishes by adding some
-- mutable cell to FeedingApplicationHandler?
Just (_peer, FeedingApplicationHandler dumpBytes incrBytes) -> do
let bs = LBS.toStrict (LBS.fromChunks chunks)
dumpBytes $ Just bs
incrBytes $ BS.length bs
return state
connectionClosed
:: DispatcherState peerData
-> NT.ConnectionId
-> IO (DispatcherState peerData)
connectionClosed state connid = case Map.lookup connid (dsConnections state) of
Nothing -> do
traceWith logTrace (Warning, sformat ("closed unknown connection " % shown) connid)
return state
Just (peer, connState) -> do
case connState of
FeedingApplicationHandler dumpBytes _ -> do
Signal end of channel .
dumpBytes Nothing
_ -> return ()
-- This connection can be removed from the connection states map.
-- Removing it from the peers map is more involved.
let peersUpdater existing = case existing of
GotPeerData peerData neset -> case NESet.delete connid neset of
Nothing -> Nothing
Just neset' -> Just (GotPeerData peerData neset')
ExpectingPeerData neset mleader -> case NESet.delete connid neset of
Nothing -> Nothing
Just neset' -> case mleader of
Nothing -> Just (ExpectingPeerData neset' mleader)
Just (connid', _partialDecoder) -> case connid == connid' of
-- The connection which is giving the peer data
-- has closed! That's ok, just forget about it
-- and the partial decode of that data.
True -> Just (ExpectingPeerData neset' Nothing)
False -> Just (ExpectingPeerData neset' mleader)
let state' = state {
dsConnections = Map.delete connid (dsConnections state)
, dsPeers = Map.update peersUpdater peer (dsPeers state)
}
return state'
connectionLost
:: DispatcherState peerData
-> NT.EndPointAddress
-> NT.ConnectionBundle
-> IO (DispatcherState peerData)
connectionLost state peer bundle = do
-- There must always be 0 connections from the peer, for
-- network-transport must have posted the ConnectionClosed events for
every inbound connection before posting .
traceWith logTrace (Warning, sformat ("lost connection bundle " % shown % " to " % shown) bundle peer)
state' <- case Map.lookup peer (dsPeers state) of
Just it -> do
-- This is *not* a network-transport bug; a connection lost
-- event can be posted without ConnectionClosed, as an
-- optimization.
let connids = case it of
GotPeerData _ neset -> NESet.toList neset
ExpectingPeerData neset _ -> NESet.toList neset
-- For every connection to that peer we'll plug the channel with
-- Nothing and remove it from the map.
let folder :: Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
-> NT.ConnectionId
-> IO (Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData))
folder channels connid = case Map.updateLookupWithKey (\_ _ -> Nothing) connid channels of
(Just (_, FeedingApplicationHandler dumpBytes _), channels') -> do
dumpBytes Nothing
return channels'
(_, channels') -> return channels'
channels' <- foldlM folder (dsConnections state) connids
return $ state {
dsConnections = channels'
, dsPeers = Map.delete peer (dsPeers state)
}
Nothing -> return state
-- Every outbound bidirectional connection which is carried by this
-- bundle, and which has not yet received an ACK, must have its
-- channel plugged and its peer data shared exclusive filled in case
-- it has not yet been. This is to ensure that the handlers do not
-- block indefinitely when trying to access these things.
--
-- Outbound unidirectional connections need no attention: they will
-- fail if they try to 'send', but since they expect no data in
-- return, we don't have to take care of them here.
channelsAndPeerDataVars <- modifyMVar nstate $ \st -> do
let nonces = Map.lookup peer (_nodeStateOutboundBidirectional st)
case nonces of
-- Perfectly normal: lost the connection but we had no
-- outbound bidirectional connections to it.
Nothing -> return (st, [])
Just map -> do
-- Remove every element from the map which is carried by
-- this bundle, and then remove the map itself if it's
-- empty.
let folder (_, channelIn, _, peerDataVar, bundle', _, acked) channels
| bundle' == bundle && not acked = (channelIn, peerDataVar) : channels
| otherwise = channels
let channelsAndPeerDataVars = Map.foldr folder [] map
return (st, channelsAndPeerDataVars)
traceWith logTrace (Warning, sformat ("closing " % shown % " channels on bundle " % shown % " to " % shown) (length channelsAndPeerDataVars) bundle peer)
forM_ channelsAndPeerDataVars $ \(dumpBytes, peerDataVar) -> do
_ <- tryPutMVar peerDataVar (error "no peer data because the connection was lost")
dumpBytes Nothing
return state'
-- | Spawn a thread and track it in shared state, taking care to remove it from
-- shared state when it's finished and updating statistics appropriately.
-- This is applicable to handlers spawned in response to inbound peer
-- connections, and also for actions which use outbound connections.
spawnHandler
:: forall peerData t .
Trace IO (Severity, Text)
-> MVar (NodeState peerData)
-> HandlerProvenance peerData (Maybe BS.ByteString -> IO ())
-> IO t
-> IO (Async t, Int -> IO ())
spawnHandler logTrace stateVar provenance action =
modifyMVar stateVar $ \nodeState -> do
totalBytes <- newMVar 0
Spawn the thread to get a ' SomeHandler ' .
rec { promise <- async $ do
startTime <- getCurrentTime
normal someHandler startTime totalBytes
`catch` exceptional someHandler startTime totalBytes
; let someHandler = SomeHandler promise
}
-- It is assumed that different promises do not compare equal.
-- It is assumed to be highly unlikely that there will be nonce
-- collisions (that we have a good prng).
let nodeState' = case provenance of
Remote _ _ -> nodeState {
_nodeStateInbound = Set.insert someHandler (_nodeStateInbound nodeState)
}
Local peer (nonce, peerDataVar, connBundle, timeoutPromise, dumpBytes) -> nodeState {
_nodeStateOutboundBidirectional = Map.alter alteration peer (_nodeStateOutboundBidirectional nodeState)
}
where
alteration Nothing = Just $ Map.singleton nonce (someHandler, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, False)
alteration (Just map) = Just $ Map.insert nonce (someHandler, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, False) map
incrBytes !n = do
nodeState <- readMVar stateVar
stIncrBytes (handlerProvenancePeer provenance) n (_nodeStateStatistics nodeState)
modifyMVar totalBytes $ \(!m) -> return (m + n, ())
statistics' <- stAddHandler provenance (_nodeStateStatistics nodeState)
return (nodeState' { _nodeStateStatistics = statistics' }, (promise, incrBytes))
where
normal :: SomeHandler -> Microsecond -> MVar Int -> IO t
normal someHandler startTime totalBytesVar = do
t <- action
signalFinished someHandler startTime totalBytesVar Nothing
pure t
exceptional :: SomeHandler -> Microsecond -> MVar Int -> SomeException -> IO t
exceptional someHandler startTime totalBytesVar e = do
signalFinished someHandler startTime totalBytesVar (Just e)
throwIO e
signalFinished :: SomeHandler -> Microsecond -> MVar Int -> Maybe SomeException -> IO ()
signalFinished someHandler startTime totalBytesVar outcome = do
endTime <- getCurrentTime
let elapsed = endTime - startTime
totalBytes <- readMVar totalBytesVar
modifyMVar stateVar $ \nodeState -> do
let nodeState' = case provenance of
Remote _ _ -> nodeState {
_nodeStateInbound = Set.delete someHandler (_nodeStateInbound nodeState)
}
-- Remove the nonce for this peer, and remove the whole map
-- if this was the only nonce for that peer.
Local peer (nonce, _, _, _, _) -> nodeState {
_nodeStateOutboundBidirectional = Map.update updater peer (_nodeStateOutboundBidirectional nodeState)
}
where
updater map =
let map' = Map.delete nonce map
in if Map.null map' then Nothing else Just map'
-- Decrement the live bytes by the total bytes received, and
-- remove the handler.
stIncrBytes (handlerProvenancePeer provenance) (-totalBytes) $ _nodeStateStatistics nodeState
statistics' <-
stRemoveHandler logTrace provenance elapsed outcome $
_nodeStateStatistics nodeState
return (nodeState' { _nodeStateStatistics = statistics' }, ())
controlHeaderCodeBidirectionalSyn :: Word8
controlHeaderCodeBidirectionalSyn = fromIntegral (fromEnum 'S')
controlHeaderCodeBidirectionalAck :: Word8
controlHeaderCodeBidirectionalAck = fromIntegral (fromEnum 'A')
controlHeaderBidirectionalSyn :: Nonce -> BS.ByteString
controlHeaderBidirectionalSyn (Nonce nonce) =
fixedSizeBuilder' 9 $
BS.word8 controlHeaderCodeBidirectionalSyn
<> BS.word64BE nonce
controlHeaderBidirectionalAck :: Nonce -> BS.ByteString
controlHeaderBidirectionalAck (Nonce nonce) =
fixedSizeBuilder' 9 $
BS.word8 controlHeaderCodeBidirectionalAck
<> BS.word64BE nonce
fixedSizeBuilder' :: Int -> BS.Builder -> BS.ByteString
fixedSizeBuilder' n = LBS.toStrict . fixedSizeBuilder n
fixedSizeBuilder :: Int -> BS.Builder -> LBS.ByteString
fixedSizeBuilder n =
BS.toLazyByteStringWith (BS.untrimmedStrategy n n) LBS.empty
-- | Create, use, and tear down a conversation channel with a given peer
-- (NodeId).
-- This may be killed with a 'Timeout' exception in case the peer does not
-- give an ACK before the specified timeout ('nodeAckTimeout').
withInOutChannel
:: forall packingType peerData a .
( Serializable packingType peerData )
=> Node packingType peerData
-> NodeId
-> (peerData -> ChannelIn -> ChannelOut -> IO a)
-> IO a
withInOutChannel node@Node{nodeEnvironment, nodeState, nodeTrace} nodeid@(NodeId peer) action = do
nonce <- modifyMVar nodeState $ \nodeState -> do
let (nonce, !prng') = random (_nodeStateGen nodeState)
pure (nodeState { _nodeStateGen = prng' }, nonce)
channel <- fmap ChannelIn newTChanIO
-- A mutable cell for the channel. We'll swap it to Nothing when we don't
-- want to accept any more bytes (the handler has finished).
channelVar <- newMVar (Just channel)
let dumpBytes mbs = withMVar channelVar $ \mchannel -> case mchannel of
Nothing -> pure ()
Just (ChannelIn channel) -> atomically $ writeTChan channel mbs
closeChannel = modifyMVar channelVar $ \_ -> pure (Nothing, ())
-- The dispatcher will fill in the peer data as soon as it's available.
TODO must ensure that at some point it is always filled . What if the
-- peer never responds? All we can do is time-out I suppose.
-- Indeed, the peer may never even ACK.
peerDataVar <- newEmptyMVar
-- When the connection is up, we can register a handler using the bundle
-- identifier.
-- An exception may be thrown after the connection is established but
before we register , but that 's OK , as disconnectFromPeer is forgiving
-- about this.
-- But if an exception is thrown to this action while it's waiting for the
-- promise, we must cancel that promise (that running handler).
let action' conn = mask $ \restore -> do
rec { let provenance = Local peer (nonce, peerDataVar, NT.bundle conn, timeoutPromise, dumpBytes)
; (promise, _) <- restore $ spawnHandler nodeTrace nodeState provenance $ do
It 's essential that we only send the handshake SYN inside
-- the handler, because at this point the nonce is guaranteed
-- to be known in the node state. If we sent the handhsake
-- before 'spawnHandler' we risk (although it's highly unlikely)
-- receiving the ACK before the nonce is put into the state.
-- This isn't so unlikely in the case of self-connections.
outcome <- NT.send conn [controlHeaderBidirectionalSyn nonce]
case outcome of
Left err -> throwIO err
Right _ -> do
peerData <- readMVar peerDataVar
action peerData channel (ChannelOut conn)
-- Here we spawn the timeout thread... Killing the 'promise'
-- is enough to clean everything up.
-- This timeout promise is included in the provenance, so
-- that when an ACK is received, the timeout thread can
-- be killed.
; timeoutPromise <- async $ do
delay (nodeAckTimeout nodeEnvironment)
cancelWith promise Timeout
}
restore (wait promise) `catch` \(e :: SomeAsyncException) -> do
uninterruptibleCancel promise
throwIO e
connectToPeer node nodeid action' `finally` closeChannel
data OutboundConnectionState =
-- | A stable outbound connection has some positive number of established
-- connections.
Stable !(Maybe ComingUp) !Int !(Maybe GoingDown) !PeerDataTransmission
-- | Every connection is being brought down.
| AllGoingDown !GoingDown
-- | Every connection is being brought up.
| AllComingUp !ComingUp
| The MVar will be filled when the last connection goes down .
data GoingDown = GoingDown !Int !(MVar ())
| The MVar will be filled when the first connection comes up .
data ComingUp = ComingUp !Int !(MVar ())
data PeerDataTransmission =
PeerDataToBeTransmitted
| PeerDataInFlight !(MVar (Maybe SomeException))
| PeerDataTransmitted
disconnectFromPeer
:: Node packingType peerData
-> NodeId
-> NT.Connection
-> IO ()
disconnectFromPeer Node{nodeState} (NodeId peer) conn =
bracketWithException startClosing finishClosing (const (NT.close conn))
where
-- Update the OutboundConnectionState at this peer to no longer show
-- this connection as going down, and fill the shared exclusive if it's
-- the last to go down.
finishClosing _ (_ :: Maybe SomeException) = do
modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
Just (Stable comingUp established goingDown transmission)
| Just (GoingDown n excl) <- goingDown
, n == 1 -> do
putMVar excl ()
return . Just $ Stable comingUp established Nothing transmission
| Just (GoingDown n excl) <- goingDown
, n > 1 -> do
return . Just $ Stable comingUp established (Just (GoingDown (n - 1) excl)) transmission
Just (AllGoingDown (GoingDown n excl))
| n == 1 -> do
putMVar excl ()
return Nothing
| otherwise -> do
return $ Just (AllGoingDown (GoingDown (n - 1) excl))
_ -> throwIO (InternalError "finishClosing : impossible")
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.update (const choice) peer map
}
return (nodeState', ())
-- Update the OutboundConnectionState at this peer to show this connection
-- as going down.
startClosing = do
canClose <- modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
Just (Stable comingUp established goingDown transmission)
| established > 1
, Just (GoingDown !n excl) <- goingDown ->
return . Right $ Stable comingUp (established - 1) (Just (GoingDown (n + 1) excl)) transmission
| established > 1
, Nothing <- goingDown -> do
excl <- newEmptyMVar
return . Right $ Stable comingUp (established - 1) (Just (GoingDown 1 excl)) transmission
| established == 1
, Nothing <- comingUp
, Just (GoingDown !n excl) <- goingDown ->
return . Right $ AllGoingDown (GoingDown (n + 1) excl)
| established == 1
, Nothing <- comingUp
, Nothing <- goingDown -> do
excl <- newEmptyMVar
return . Right $ AllGoingDown (GoingDown 1 excl)
| established == 1
, Just (ComingUp !_m excl) <- comingUp ->
return . Left $ excl
| otherwise -> throwIO (InternalError "startClosing : impossible")
Nothing -> throwIO (InternalError "startClosing : impossible")
Just (AllGoingDown _) -> throwIO (InternalError "startClosing : impossible")
Just (AllComingUp _) -> throwIO (InternalError "startClosing : impossible")
case choice of
Left excl -> return (nodeState, Left excl)
Right ocs -> return (nodeState', Right ())
where
nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
case canClose of
Left excl -> do
readMVar excl
startClosing
Right () -> return ()
-- | Connect to a peer, taking care to send the peer-data in case there are no
-- other connections to that peer. Subsequent connections to that peer
will block until the peer - data is sent ; it must be the first thing to
arrive when the first lightweight connection to a peer is opened .
connectToPeer
:: forall packingType peerData r .
( Serializable packingType peerData )
=> Node packingType peerData
-> NodeId
-> (NT.Connection -> IO r)
-> IO r
connectToPeer node@Node{nodeEndPoint, nodeState, nodePacking, nodePeerData, nodeEnvironment, nodeTrace} nid@(NodeId peer) act =
-- 'establish' will update shared state indicating the nature of
-- connections to this peer: how many are coming up, going down, or
-- established. It's essential to bracket that against 'disconnectFromPeer'
-- so that if there's an exception when sending the peer data or when
-- doing the 'act' continuation, the state is always brought back to
-- consistency.
bracket establish (disconnectFromPeer node nid) $ \conn -> do
sendPeerDataIfNecessary conn
act conn
where
mtu = nodeMtu nodeEnvironment
sendPeerDataIfNecessary conn =
bracketWithException getPeerDataResponsibility
dischargePeerDataResponsibility
(maybeSendPeerData conn)
maybeSendPeerData conn responsibility = case responsibility of
-- Somebody else sent it, so we can proceed.
False -> return ()
-- We are responsible for sending it.
True -> sendPeerData conn
sendPeerData conn = do
serializedPeerData <- pack nodePacking nodePeerData
writeMany mtu (ChannelOut conn) serializedPeerData
getPeerDataResponsibility = do
responsibility <- modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
(ocs, responsibility) <- case Map.lookup peer map of
Just it@(Stable comingUp established goingDown transmission)
| PeerDataToBeTransmitted <- transmission -> do
excl <- newEmptyMVar
return (Stable comingUp established goingDown (PeerDataInFlight excl), Just (Right excl))
| PeerDataInFlight excl <- transmission ->
return (it, Just (Left excl))
| PeerDataTransmitted <- transmission ->
return (it, Nothing)
| otherwise -> throwIO (InternalError "impossible")
_ -> do
traceWith nodeTrace (Error, "getPeerDataResponsibility: unexpected peer state")
throwIO $ InternalError "connectToPeer: getPeerDataResponsibility: impossible"
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
return (nodeState', responsibility)
case responsibility of
Just (Left excl) -> do
_ <- readMVar excl
getPeerDataResponsibility
Just (Right _) -> do
return True
Nothing -> do
return False
dischargePeerDataResponsibility responsibility (merr :: Maybe SomeException) = do
modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
ocs <- case Map.lookup peer map of
Just it@(Stable comingUp established goingDown transmission)
-- We were responsible for sending it and we succeeded.
| True <- responsibility
, Nothing <- merr
, PeerDataInFlight excl <- transmission -> do
putMVar excl Nothing
return $ Stable comingUp established goingDown PeerDataTransmitted
| True <- responsibility
, Just _ <- merr
, PeerDataInFlight excl <- transmission -> do
putMVar excl merr
return $ Stable comingUp established goingDown PeerDataToBeTransmitted
| False <- responsibility -> return it
_ -> do
traceWith nodeTrace (Error, "dischargePeerDataResponsibility: unexpected peer state")
throwIO $ InternalError "connectToPeer: dischargePeerDataResponsibility: impossible"
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
return (nodeState', ())
establish = bracketWithException startConnecting finishConnecting doConnection
doConnection _ = do
mconn <- NT.connect nodeEndPoint
peer
NT.ReliableOrdered
TODO give a timeout . Ca n't rely on it being set at
-- the transport level.
NT.ConnectHints{ connectTimeout = Nothing }
case mconn of
-- Throwing the error will induce the bracket resource releaser
Left err -> throwIO err
Right conn -> return conn
-- Update the OutboundConnectionState at this peer to no longer show
-- this connection as coming up, and fill the shared exclusive if it's
the first to come up .
finishConnecting _ (merr :: Maybe SomeException) = do
modifyMVar nodeState $ \nodeState -> do
when (_nodeStateClosed nodeState) (throwIO $ InternalError "connectToPeer : node closed while establishing connection!")
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
Just (AllComingUp (ComingUp n excl))
| Nothing <- merr -> do
let comingUp = case n of
1 -> Nothing
_ -> Just (ComingUp (n - 1) excl)
return . Just $ Stable comingUp 1 Nothing PeerDataToBeTransmitted
| Just _ <- merr
, n == 1 ->
return Nothing
| Just _ <- merr
, n > 1 ->
return . Just $ AllComingUp (ComingUp (n - 1) excl)
Just (Stable comingUp established goingDown transmission)
| Just (ComingUp n excl) <- comingUp -> do
putMVar excl ()
comingUp' <- case n of
1 -> return Nothing
_ -> do
excl' <- newEmptyMVar
return $ Just (ComingUp (n - 1) excl')
let established' = case merr of
Nothing -> established + 1
Just _ -> established
return . Just $ Stable comingUp' established' goingDown transmission
_ -> throwIO (InternalError "finishConnecting : impossible")
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.update (const choice) peer map
}
return (nodeState', ())
-- Update the OutboundConnectionState at this peer to show this connection
-- as going up.
startConnecting = do
canOpen <- modifyMVar nodeState $ \nodeState -> do
when (_nodeStateClosed nodeState) (throwIO $ userError "connectToPeer : you're doing it wrong! Our node is closed!")
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
First to connect .
Nothing -> do
excl <- newEmptyMVar
return . Right $ AllComingUp (ComingUp 1 excl)
Stable connection . There 's at least one that is n't currently
-- going down.
Just (Stable comingUp established goingDown transmission)
| Just (ComingUp n excl) <- comingUp ->
return . Right $ Stable (Just (ComingUp (n + 1) excl)) established goingDown transmission
| Nothing <- comingUp -> do
excl <- newEmptyMVar
return . Right $ Stable (Just (ComingUp 1 excl)) established goingDown transmission
Just (AllGoingDown (GoingDown _ excl)) ->
return . Left $ excl
Just (AllComingUp (ComingUp n excl)) ->
return . Right $ AllComingUp (ComingUp (n + 1) excl)
case choice of
Left excl -> return (nodeState, Left excl)
Right ocs -> return (nodeState', Right ())
where
nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
case canOpen of
Left excl -> do
readMVar excl
startConnecting
Right () -> return ()
-- FIXME: Remove this once -exceptions/pull/28 is merged.
bracketWithException
:: ( Exception e )
=> IO r
-> (r -> Maybe e -> IO b)
-> (r -> IO c)
-> IO c
bracketWithException before after thing = mask $ \restore -> do
x <- before
res1 <- try $ restore (thing x)
case res1 of
Left (e1 :: SomeException) -> do
_ :: Either SomeException b <-
try $ uninterruptibleMask_ $ after x (fromException e1)
throwIO e1
Right y -> do
_ <- uninterruptibleMask_ $ after x Nothing
return y
| null | https://raw.githubusercontent.com/input-output-hk/cardano-sl/1499214d93767b703b9599369a431e67d83f10a2/networking/src/Node/Internal.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
# LANGUAGE RecursiveDo #
| A 'NodeId' wraps a network-transport endpoint address
| The state of a Node, to be held in a shared atomic cell because other
threads will mutate it in order to set up bidirectional connections.
^ To generate nonces.
^ Handlers for each nonce which we generated (locally-initiated
bidirectional connections).
The bool indicates whether we have received an ACK for this.
^ Handlers for inbound connections (remotely-initiated unidirectional
_or_ bidirectional connections).
peer).
^ Statistics about traffic at this node.
Must be kept in mutable state so that handlers can update it when
they finish.
| An exception which is thrown when something times out.
| The initial state of a node, wrapped up in a shared atomic.
| Some 'Async', we don't care the result type.
| Uses equality on thread id. Should be good for our use case.
Are thread ids ever recycled? Surely they must be, eventually, since they're
| Waits for a handler.
| Cancels a handler.
| Waits for it and squelches all (even async) exceptions.
| Maximum transmission unit: how many bytes can be sent in a single
network-transport send. Tune this according to the transport
which backs the time-warp node.
| Computation in IO of a delay (or no delay).
state and a thread to dispatch network-transport events.
| How long to wait before dequeueing an event from the
network-transport receive queue, where Nothing means
instantaneous (different from a 0 delay).
The term is evaluated once for each dequeued event, immediately
before dequeueing it.
| As 'nodeReceiveDelay' but instead of a delay on every network
level message, the delay applies only to establishing new
incomming connections. These connect/talk/close patterns tend
to correspond to application level messages or conversations
so this is a way to delay per-high-level message rather than
lower level events.
| Used to identify bidirectional connections.
| Input from the wire.
| Output to the wire.
^ Split into chunks of at most this size in bytes. 0 means no split.
Non-recursive definition for the case when the input is empty, so
that
writeMany mtu outChan ""
still induces a send. Without this case, the list would be empty.
| Statistics concerning traffic at this node.
| How many handlers are running right now in response to a
remotely initiated connection (whether unidirectional or
bidirectional).
connection.
| How many handlers are running right now which were initiated
locally, i.e. corresponding to bidirectional connections.
| Statistics for each peer.
| How many peers are connected.
| Average number of remotely-initiated handlers per peer.
Also track the average of the number of handlers squared, so we
can quickly compute the variance.
| Average number of locally-initiated handlers per peer.
Also track the average of the number of handlers squared, so we
can quickly compute the variance.
| Handlers which finished normally. Distribution is on their
running time.
| Handlers which finished exceptionally. Distribution is on their
running time.
| Statistics about a given peer.
| How many handlers are running right now in response to connections
from this peer (whether unidirectional or remotely-initiated
bidirectional).
| How many handlers are running right now for locally-iniaiated
bidirectional connections to this peer.
| How many bytes have been received by running handlers for this
peer.
only handler for that peer.
are no more handlers for that peer.
| Statistics when a node is launched.
| Initiated locally, _to_ this peer.
| Initiated remotely, _by_ or _from_ this peer.
TODO: revise these computations to make them numerically stable (or maybe
use Rational?).
TODO: generalize this computation so we can use the same thing for
both local and remote. It's a copy/paste job right now swapping local
for remote.
The Double is the current number of peers (always > 0).
The Int is the current number of running handlers.
TODO: revise these computations to make them numerically stable (or maybe
use Rational?).
TODO: generalize this computation so we can use the same thing for
both local and remote. It's a copy/paste job right now swapping local
for remote.
Convert the elapsed time to a Double and then add it to the relevant
distribution.
handler).
The Int is the current number of running handlers.
See 'simpleNodeEndPoint' for a very obvious example.
More complicated things are possible, for instance using concrete
transport specific features.
| A 'NodeEndPoint' which uses the typical network-transport 'newEndPoint'
and 'closeEndPoint'.
stops, so do not close it yourself.
| Bring up a 'Node' using a network transport.
^ Use the node (lazily) to determine a delay in microseconds to wait
before dequeueing the next network-transport event (see
'nodeReceiveDelay').
^ See 'nodeConnectDelay'
^ A source of randomness, for generating nonces.
^ Handle incoming bidirectional connections.
TODO this thread should get exceptions from the dispatcher thread.
Exceptions in the dispatcher are re-thrown here.
| Stop a 'Node', closing its network transport and end point.
This eventually will shut down the dispatcher thread, which in turn
ought to stop the connection handling threads.
It'll also close all TCP connections.
Must wait on any handler threads. The dispatcher thread will eventually
see an event indicating that the end point has closed, after which it
will wait on all running handlers. Since the end point has been closed,
no new handler threads will be created, so this will block indefinitely
only if some handler is blocked indefinitely or looping.
| Kill a 'Node', terminating its dispatcher thread, closing its endpoint,
and killing all of its handlers.
Closing the end point will cause the dispatcher thread to end when it
gets the EndPointClosed event, so we don't cancel that thread.
Cancelling that thread before closing the end point can lead to deadlock,
in particular if this is backed by a TCP transport with a QDisc which
may block on write.
| This connection cannot proceed because peer data has not been
received and parsed.
| This connection attempted to parse the peer data but failed.
Any subsequent data will be ignored.
| This connection is waiting for a handshake and we have partial
data. The peer state of the connection must be 'GotPeerData'.
| This connection attempted handshake but it failed (protocol error).
Any subsequent data will be ignored.
| This connection has made a handshake and is now feeding an
application-specific handler through a channel. The peer state
of this connection must be 'GotPeerData'.
bytes are received. It's used to update shared metrics.
| Peer data is expected from one of these lightweight connections.
connection which has given a partial parse of the peer data.
| Peer data has been received and parsed.
| Get the running handlers for a node.
List monad computation: grab the values of the map (ignoring
peer keys), then for each of those maps grab its values (ignoring
nonce keys) and then return the promise.
| Wait for every running handler in a node's state to finish.
If they throw an exception, it's not re-thrown. Even async exceptions are
squelched, so be careful.
| Kill every running handler in a node's state.
to various handlers.
When the end point closes, we're done.
Don't deal with this.
When a heavyweight connection is lost we must close up all of the
lightweight connections which it carried.
End point failure is unrecoverable.
Transport failure is unrecoverable.
EndPointClosed is the final event that we will receive. There may be
connections which remain open! ConnectionClosed events may be
inbound but since our end point has closed, we won't take them. So here
we have to plug every remaining input channel.
This is *not* a network-transport error; EndPointClosed can be
posted without ConnectionClosed for all open connections, as an
optimization.
fill the peer data vars in case they haven't yet been filled. This
is to ensure that handlers never block on these things.
Check that this node was closed by a call to 'stopNode' or
'killNode'. If it wasn't, we throw an exception. This is important
because the thread which runs 'startNode' must *not* continue after
How we handle this connection depends on whether we already have
a connection from this peer.
If we do, we can start waiting for the handshake.
If we don't, then we must await and decode the peer data.
We got another connection before the peer data arrived.
That's actually OK. It's only an error if we receive data
and parses the peer data ('received' handles this aspect).
So here we just record the connection.
This connection gave bogus peer data. Ignore the data.
This connection gave a bad handshake. Ignore the data.
This connection is awaiting the initial peer data.
There's no leader. This connection is now the leader. Begin
the attempt to decode the peer data.
connection before the peer data was parsed.
Update a connection's state to WaitingForHandshake. For use
parameters give the id of the connection which made the
parse and the data left-over after the parse, which must
be remembered in the connection state for that id.
We're waiting for peer data on this connection, but we don't
have an entry for the peer. That's an internal error.
Waiting for a handshake. Try to get a control header and then
move on.
Got a bidirectional header but still waiting for the
nonce.
Got a SYN. Spawn a thread to connect to the peer using
the nonce provided and then run the bidirectional handler.
No matter what, we must update the node state to
indicate that we've disconnected from the peer.
Establish the other direction in a separate thread.
Got an ACK. Try to decode the nonce and check that
we actually sent it.
Lookup the nonce map for the peer, then check
that nonce map at the supplied nonce.
We don't know about the nonce. Could be that
or the handler for it has already finished.
In any case, say the handshake failed so that
subsequent data is ignored.
feeding the application handler.
Handshake failure. Subsequent receives will be ignored.
This connection is feeding a handler. Make the data available.
TODO: if the handler has already finished, we want to just forget
the data. How? Weak reference to the channel perhaps? Or
explcitly close it down when the handler finishes by adding some
mutable cell to FeedingApplicationHandler?
This connection can be removed from the connection states map.
Removing it from the peers map is more involved.
The connection which is giving the peer data
has closed! That's ok, just forget about it
and the partial decode of that data.
There must always be 0 connections from the peer, for
network-transport must have posted the ConnectionClosed events for
This is *not* a network-transport bug; a connection lost
event can be posted without ConnectionClosed, as an
optimization.
For every connection to that peer we'll plug the channel with
Nothing and remove it from the map.
Every outbound bidirectional connection which is carried by this
bundle, and which has not yet received an ACK, must have its
channel plugged and its peer data shared exclusive filled in case
it has not yet been. This is to ensure that the handlers do not
block indefinitely when trying to access these things.
Outbound unidirectional connections need no attention: they will
fail if they try to 'send', but since they expect no data in
return, we don't have to take care of them here.
Perfectly normal: lost the connection but we had no
outbound bidirectional connections to it.
Remove every element from the map which is carried by
this bundle, and then remove the map itself if it's
empty.
| Spawn a thread and track it in shared state, taking care to remove it from
shared state when it's finished and updating statistics appropriately.
This is applicable to handlers spawned in response to inbound peer
connections, and also for actions which use outbound connections.
It is assumed that different promises do not compare equal.
It is assumed to be highly unlikely that there will be nonce
collisions (that we have a good prng).
Remove the nonce for this peer, and remove the whole map
if this was the only nonce for that peer.
Decrement the live bytes by the total bytes received, and
remove the handler.
| Create, use, and tear down a conversation channel with a given peer
(NodeId).
This may be killed with a 'Timeout' exception in case the peer does not
give an ACK before the specified timeout ('nodeAckTimeout').
A mutable cell for the channel. We'll swap it to Nothing when we don't
want to accept any more bytes (the handler has finished).
The dispatcher will fill in the peer data as soon as it's available.
peer never responds? All we can do is time-out I suppose.
Indeed, the peer may never even ACK.
When the connection is up, we can register a handler using the bundle
identifier.
An exception may be thrown after the connection is established but
about this.
But if an exception is thrown to this action while it's waiting for the
promise, we must cancel that promise (that running handler).
the handler, because at this point the nonce is guaranteed
to be known in the node state. If we sent the handhsake
before 'spawnHandler' we risk (although it's highly unlikely)
receiving the ACK before the nonce is put into the state.
This isn't so unlikely in the case of self-connections.
Here we spawn the timeout thread... Killing the 'promise'
is enough to clean everything up.
This timeout promise is included in the provenance, so
that when an ACK is received, the timeout thread can
be killed.
| A stable outbound connection has some positive number of established
connections.
| Every connection is being brought down.
| Every connection is being brought up.
Update the OutboundConnectionState at this peer to no longer show
this connection as going down, and fill the shared exclusive if it's
the last to go down.
Update the OutboundConnectionState at this peer to show this connection
as going down.
| Connect to a peer, taking care to send the peer-data in case there are no
other connections to that peer. Subsequent connections to that peer
'establish' will update shared state indicating the nature of
connections to this peer: how many are coming up, going down, or
established. It's essential to bracket that against 'disconnectFromPeer'
so that if there's an exception when sending the peer data or when
doing the 'act' continuation, the state is always brought back to
consistency.
Somebody else sent it, so we can proceed.
We are responsible for sending it.
We were responsible for sending it and we succeeded.
the transport level.
Throwing the error will induce the bracket resource releaser
Update the OutboundConnectionState at this peer to no longer show
this connection as coming up, and fill the shared exclusive if it's
Update the OutboundConnectionState at this peer to show this connection
as going up.
going down.
FIXME: Remove this once -exceptions/pull/28 is merged. | # OPTIONS_GHC -fno - warn - name - shadowing #
# LANGUAGE BangPatterns #
# LANGUAGE CPP #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE GADTSyntax #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE KindSignatures #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
module Node.Internal (
NodeId(..),
Node(..),
NodeEnvironment(..),
defaultNodeEnvironment,
NodeEndPoint(..),
simpleNodeEndPoint,
manualNodeEndPoint,
ReceiveDelay,
noReceiveDelay,
constantReceiveDelay,
NodeState(..),
nodeId,
nodeEndPointAddress,
Statistics(..),
stTotalLiveBytes,
stRunningHandlersRemoteVariance,
stRunningHandlersLocalVariance,
PeerStatistics(..),
nodeStatistics,
ChannelIn(..),
ChannelOut(..),
startNode,
stopNode,
killNode,
withInOutChannel,
writeMany,
Timeout(..)
) where
import Control.Concurrent (threadDelay)
import Control.Concurrent.Async
import Control.Concurrent.MVar
import Control.Concurrent.STM
import Control.Exception (Exception, SomeAsyncException,
SomeException, bracket, catch, finally, fromException,
handle, mask, throwIO, try, uninterruptibleMask_)
import Control.Monad (forM_, mapM_, when)
import Data.Binary
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BS
import qualified Data.ByteString.Builder.Extra as BS
import qualified Data.ByteString.Lazy as LBS
import Data.Foldable (foldl', foldlM)
import Data.Hashable (Hashable)
import Data.Int (Int64)
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
#if !(MIN_VERSION_base(4,8,0))
import Data.Monoid
#endif
import Data.NonEmptySet (NonEmptySet)
import qualified Data.NonEmptySet as NESet
import Data.Semigroup ((<>))
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Text (Text)
import Data.Time.Clock.POSIX (getPOSIXTime)
import Data.Time.Units (Microsecond)
import Formatting (sformat, shown, (%))
import GHC.Generics (Generic)
import qualified Network.Transport as NT
import Node.Message.Class (Packing, Serializable (..), pack, unpack)
import Node.Message.Decoder (Decoder (..), DecoderStep (..),
continueDecoding)
import Pos.Util.Trace (Severity (..), Trace, traceWith)
import qualified System.Metrics.Distribution as Metrics (Distribution)
import qualified System.Metrics.Distribution as Metrics.Distribution
import qualified System.Metrics.Gauge as Metrics (Gauge)
import qualified System.Metrics.Gauge as Metrics.Gauge
import System.Random (Random, StdGen, random)
Copied from the old Mockable definition for Production .
getCurrentTime :: IO Microsecond
getCurrentTime = round . (* 1000000) <$> getPOSIXTime
delay :: Microsecond -> IO ()
delay = threadDelay . fromIntegral
newtype NodeId = NodeId NT.EndPointAddress
deriving (Eq, Ord, Show, Hashable, Generic)
instance Binary NodeId
data NodeState peerData = NodeState {
_nodeStateGen :: !StdGen
, _nodeStateOutboundBidirectional :: !(Map NT.EndPointAddress (Map Nonce (SomeHandler, Maybe BS.ByteString -> IO (), Int -> IO (), MVar peerData, NT.ConnectionBundle, Async (), Bool)))
, _nodeStateInbound :: !(Set SomeHandler)
, _nodeStateConnectedTo :: !(Map NT.EndPointAddress OutboundConnectionState)
^ For each peer that we have at least one open connection to , the
number of connections ; or an MVar in case there 's some thread
sending the initial data ( it just opened the first connection to that
, _nodeStateStatistics :: !Statistics
, _nodeStateClosed :: !Bool
^ Indicates whether the Node has been closed and is no longer capable
of establishing or accepting connections ( its EndPoint is closed ) .
}
data Timeout = Timeout
deriving (Show)
instance Exception Timeout
initialNodeState
:: StdGen
-> IO (MVar (NodeState peerData))
initialNodeState prng = do
!stats <- initialStatistics
let nodeState = NodeState {
_nodeStateGen = prng
, _nodeStateOutboundBidirectional = Map.empty
, _nodeStateInbound = Set.empty
, _nodeStateConnectedTo = Map.empty
, _nodeStateStatistics = stats
, _nodeStateClosed = False
}
newMVar nodeState
data SomeHandler = forall t . SomeHandler (Async t)
of bounded size . Anyway , if we 're paranoid , we can use a ' Unique ' for ' '
and ' ' .
instance Eq SomeHandler where
SomeHandler as1 == SomeHandler as2 =
asyncThreadId as1 == asyncThreadId as2
instance Ord SomeHandler where
SomeHandler as1 `compare` SomeHandler as2 =
asyncThreadId as1 `compare` asyncThreadId as2
waitSomeHandler :: SomeHandler -> IO ()
waitSomeHandler (SomeHandler promise) = () <$ wait promise
cancelSomeHandler :: SomeHandler -> IO ()
cancelSomeHandler (SomeHandler promise) = uninterruptibleCancel promise
waitCatchSomeHandler :: SomeHandler -> IO ()
waitCatchSomeHandler = handle squelch . waitSomeHandler
where
squelch :: SomeException -> IO ()
squelch = const (pure ())
data NodeEnvironment = NodeEnvironment {
nodeAckTimeout :: !Microsecond
, nodeMtu :: !Word32
}
defaultNodeEnvironment :: NodeEnvironment
defaultNodeEnvironment = NodeEnvironment {
30 second timeout waiting for an ACK .
nodeAckTimeout = 30000000
, nodeMtu = maxBound
}
type ReceiveDelay = IO (Maybe Microsecond)
noReceiveDelay :: ReceiveDelay
noReceiveDelay = pure Nothing
constantReceiveDelay :: Microsecond -> ReceiveDelay
constantReceiveDelay = pure . Just
| A ' Node ' is a network - transport ' EndPoint ' with bidirectional connection
data Node packingType peerData = Node {
nodeTrace :: Trace IO (Severity, Text)
, nodeEndPoint :: NT.EndPoint
, nodeCloseEndPoint :: IO ()
, nodeDispatcherThread :: Async ()
, nodeEnvironment :: NodeEnvironment
, nodeState :: MVar (NodeState peerData)
, nodePacking :: Packing packingType IO
, nodePeerData :: peerData
, nodeReceiveDelay :: ReceiveDelay
, nodeConnectDelay :: ReceiveDelay
}
nodeId :: Node packingType peerData -> NodeId
nodeId = NodeId . NT.address . nodeEndPoint
nodeEndPointAddress :: NodeId -> NT.EndPointAddress
nodeEndPointAddress (NodeId addr) = addr
nodeStatistics :: Node packingType peerData -> IO Statistics
nodeStatistics Node{..} = modifyMVar nodeState $ \st ->
return (st, _nodeStateStatistics st)
newtype Nonce = Nonce {
_getNonce :: Word64
}
deriving instance Show Nonce
deriving instance Eq Nonce
deriving instance Ord Nonce
deriving instance Random Nonce
deriving instance Binary Nonce
data NodeException =
ProtocolError String
| InternalError String
deriving (Show)
instance Exception NodeException
newtype ChannelIn = ChannelIn (TChan (Maybe BS.ByteString))
newtype ChannelOut = ChannelOut NT.Connection
| Do multiple sends on a ' ChannelOut ' .
writeMany
-> ChannelOut
-> LBS.ByteString
-> IO ()
writeMany mtu (ChannelOut conn) bss = mapM_ sendUnit units
where
sendUnit :: [BS.ByteString] -> IO ()
sendUnit unit = NT.send conn unit >>= either throwIO pure
units :: [[BS.ByteString]]
units = fmap LBS.toChunks (chop bss)
chop :: LBS.ByteString -> [LBS.ByteString]
chop lbs
| mtu == 0 = [lbs]
| LBS.null lbs = [lbs]
| otherwise =
let mtuInt :: Int64
mtuInt = fromIntegral mtu
chopItUp lbs | LBS.null lbs = []
| otherwise =
let (front, back) = LBS.splitAt mtuInt lbs
in front : chopItUp back
in chopItUp lbs
data Statistics = Statistics {
NB a handler may run longer or shorter than the duration of a
stRunningHandlersRemote :: !Metrics.Gauge
, stRunningHandlersLocal :: !Metrics.Gauge
, stPeerStatistics :: !(Map NT.EndPointAddress (MVar PeerStatistics))
, stPeers :: !Metrics.Gauge
, stRunningHandlersRemoteAverage :: !(Double, Double)
, stRunningHandlersLocalAverage :: !(Double, Double)
, stHandlersFinishedNormally :: !Metrics.Distribution
, stHandlersFinishedExceptionally :: !Metrics.Distribution
}
stTotalLiveBytes :: Statistics -> IO Int
stTotalLiveBytes stats = do
allPeers <- mapM readMVar $ Map.elems (stPeerStatistics stats)
let allBytes = fmap pstLiveBytes allPeers
return $ sum allBytes
stRunningHandlersRemoteVariance :: Statistics -> Double
stRunningHandlersRemoteVariance statistics = avg2 - (avg*avg)
where
(avg, avg2) = stRunningHandlersRemoteAverage statistics
stRunningHandlersLocalVariance :: Statistics -> Double
stRunningHandlersLocalVariance statistics = avg2 - (avg*avg)
where
(avg, avg2) = stRunningHandlersLocalAverage statistics
data PeerStatistics = PeerStatistics {
pstRunningHandlersRemote :: !Int
, pstRunningHandlersLocal :: !Int
, pstLiveBytes :: !Int
}
pstNull :: PeerStatistics -> Bool
pstNull PeerStatistics{..} =
let remote = pstRunningHandlersRemote
local = pstRunningHandlersLocal
in remote == 0 && local == 0
stIncrBytes :: NT.EndPointAddress -> Int -> Statistics -> IO ()
stIncrBytes peer bytes stats =
case Map.lookup peer (stPeerStatistics stats) of
Nothing -> return ()
Just peerStats -> modifyMVar peerStats $ \ps ->
let !ps' = pstIncrBytes bytes ps
in return (ps', ())
pstIncrBytes :: Int -> PeerStatistics -> PeerStatistics
pstIncrBytes bytes peerStatistics = peerStatistics {
pstLiveBytes = pstLiveBytes peerStatistics + bytes
}
| Record a new handler for a given peer . Second component is True if it 's the
pstAddHandler
:: HandlerProvenance peerData t
-> Map NT.EndPointAddress (MVar PeerStatistics)
-> IO (Map NT.EndPointAddress (MVar PeerStatistics), Bool)
pstAddHandler provenance map = case provenance of
Local peer _ -> case Map.lookup peer map of
Nothing ->
newMVar (PeerStatistics 0 1 0) >>= \peerStatistics ->
return (Map.insert peer peerStatistics map, True)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let !stats' = stats { pstRunningHandlersLocal = pstRunningHandlersLocal stats + 1 }
in return (stats', (map, False))
Remote peer _ -> case Map.lookup peer map of
Nothing ->
newMVar (PeerStatistics 1 0 0) >>= \peerStatistics ->
return (Map.insert peer peerStatistics map, True)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let !stats' = stats { pstRunningHandlersRemote = pstRunningHandlersRemote stats + 1 }
in return (stats', (map, False))
| Remove a handler for a given peer . Second component is True if there
pstRemoveHandler
:: Trace IO (Severity, Text)
-> HandlerProvenance peerData t
-> Map NT.EndPointAddress (MVar PeerStatistics)
-> IO (Map NT.EndPointAddress (MVar PeerStatistics), Bool)
pstRemoveHandler logTrace provenance map = case provenance of
Local peer _ -> case Map.lookup peer map of
Nothing -> do
traceWith logTrace (Warning, sformat ("tried to remove handler for "%shown%", but it is not in the map") peer)
return (map, False)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let stats' = stats { pstRunningHandlersLocal = pstRunningHandlersLocal stats - 1 }
in return $ if pstNull stats'
then (stats', (Map.delete peer map, True))
else (stats', (map, False))
Remote peer _ -> case Map.lookup peer map of
Nothing -> do
traceWith logTrace (Warning, sformat ("tried to remove handler for "%shown%", but it is not in the map") peer)
return (map, False)
Just !statsVar -> modifyMVar statsVar $ \stats ->
let stats' = stats { pstRunningHandlersRemote = pstRunningHandlersRemote stats - 1 }
in return $ if pstNull stats'
then (stats', (Map.delete peer map, True))
else (stats', (map, False))
initialStatistics :: IO Statistics
initialStatistics = do
!runningHandlersRemote <- Metrics.Gauge.new
!runningHandlersLocal <- Metrics.Gauge.new
!peers <- Metrics.Gauge.new
!handlersFinishedNormally <- Metrics.Distribution.new
!handlersFinishedExceptionally <- Metrics.Distribution.new
return Statistics {
stRunningHandlersRemote = runningHandlersRemote
, stRunningHandlersLocal = runningHandlersLocal
, stPeerStatistics = Map.empty
, stPeers = peers
, stRunningHandlersRemoteAverage = (0, 0)
, stRunningHandlersLocalAverage = (0, 0)
, stHandlersFinishedNormally = handlersFinishedNormally
, stHandlersFinishedExceptionally = handlersFinishedExceptionally
}
data HandlerProvenance peerData t =
Local !NT.EndPointAddress (Nonce, MVar peerData, NT.ConnectionBundle, Async (), t)
| Remote !NT.EndPointAddress !NT.ConnectionId
instance Show (HandlerProvenance peerData t) where
show prov = case prov of
Local addr mdata -> concat [
"Local "
, show addr
, show ((\(x,_,_,_,_) -> x) $ mdata)
]
Remote addr connid -> concat ["Remote ", show addr, show connid]
handlerProvenancePeer :: HandlerProvenance peerData t -> NT.EndPointAddress
handlerProvenancePeer provenance = case provenance of
Local peer _ -> peer
Remote peer _ -> peer
stAddHandler
:: HandlerProvenance peerData t
-> Statistics
-> IO Statistics
stAddHandler !provenance !statistics = case provenance of
Local !_peer _ -> do
(!peerStatistics, !isNewPeer) <- pstAddHandler provenance (stPeerStatistics statistics)
when isNewPeer $ Metrics.Gauge.inc (stPeers statistics)
Metrics.Gauge.inc (stRunningHandlersLocal statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersLocal statistics)
let runningHandlersLocalAverage =
adjustMeans isNewPeer
(fromIntegral npeers)
nhandlers
(stRunningHandlersLocalAverage statistics)
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersLocalAverage = runningHandlersLocalAverage
}
Remote !_peer _ -> do
(!peerStatistics, !isNewPeer) <- pstAddHandler provenance (stPeerStatistics statistics)
when isNewPeer $ Metrics.Gauge.inc (stPeers statistics)
Metrics.Gauge.inc (stRunningHandlersRemote statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersRemote statistics)
let runningHandlersRemoteAverage =
adjustMeans isNewPeer
(fromIntegral npeers)
nhandlers
(stRunningHandlersRemoteAverage statistics)
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersRemoteAverage = runningHandlersRemoteAverage
}
where
Adjust the means . The is true if it 's a new peer .
adjustMeans :: Bool -> Double -> Int64 -> (Double, Double) -> (Double, Double)
adjustMeans !isNewPeer !npeers !nhandlers (!avg, !avg2) = case isNewPeer of
True -> (avg', avg2')
where
avg' = avg * ((npeers - 1) / npeers) + (1 / npeers)
avg2' = avg2 * ((npeers - 1) / npeers) + (1 / npeers)
False -> (avg', avg2')
where
avg' = avg + (1 / npeers)
avg2' = avg + (fromIntegral (2 * nhandlers + 1) / npeers)
stRemoveHandler
:: Trace IO (Severity, Text)
-> HandlerProvenance peerData t
-> Microsecond
-> Maybe SomeException
-> Statistics
-> IO Statistics
stRemoveHandler logTrace !provenance !elapsed !outcome !statistics = case provenance of
Local !_peer _ -> do
(!peerStatistics, !isEndedPeer) <- pstRemoveHandler logTrace provenance (stPeerStatistics statistics)
when isEndedPeer $ Metrics.Gauge.dec (stPeers statistics)
Metrics.Gauge.dec (stRunningHandlersLocal statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersLocal statistics)
let runningHandlersLocalAverage =
adjustMeans isEndedPeer
npeers
nhandlers
(stRunningHandlersLocalAverage statistics)
addSample
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersLocalAverage = runningHandlersLocalAverage
}
Remote !_peer _ -> do
(!peerStatistics, !isEndedPeer) <- pstRemoveHandler logTrace provenance (stPeerStatistics statistics)
when isEndedPeer $ Metrics.Gauge.dec (stPeers statistics)
Metrics.Gauge.dec (stRunningHandlersRemote statistics)
!npeers <- Metrics.Gauge.read (stPeers statistics)
!nhandlers <- Metrics.Gauge.read (stRunningHandlersRemote statistics)
let runningHandlersRemoteAverage =
adjustMeans isEndedPeer
npeers
nhandlers
(stRunningHandlersRemoteAverage statistics)
addSample
return $ statistics {
stPeerStatistics = peerStatistics
, stRunningHandlersRemoteAverage = runningHandlersRemoteAverage
}
where
addSample = case outcome of
Nothing -> Metrics.Distribution.add (stHandlersFinishedNormally statistics) (fromIntegral (toInteger elapsed))
Just _ -> Metrics.Distribution.add (stHandlersFinishedExceptionally statistics) (fromIntegral (toInteger elapsed))
Adjust the means . The is true if it 's a stale peer ( removed last
The first Int is the current number of peers ( could be 0 ) .
adjustMeans :: Bool -> Int64 -> Int64 -> (Double, Double) -> (Double, Double)
adjustMeans !isEndedPeer !npeers !nhandlers (!avg, !avg2) = case isEndedPeer of
True -> if npeers == 0
then (0, 0)
else (avg', avg2')
where
avg' = avg * (fromIntegral (npeers - 1) / fromIntegral npeers) + (1 / fromIntegral npeers)
avg2' = avg2 * (fromIntegral (npeers - 1) / fromIntegral npeers) + (1 / fromIntegral npeers)
False -> (avg', avg2')
where
avg' = avg - (1 / fromIntegral npeers)
avg2' = avg - (fromIntegral (2 * nhandlers + 1) / fromIntegral npeers)
| How to create and close an ' EndPoint ' .
data NodeEndPoint = NodeEndPoint {
newNodeEndPoint :: IO (Either (NT.TransportError NT.NewEndPointErrorCode) NT.EndPoint)
, closeNodeEndPoint :: NT.EndPoint -> IO ()
}
simpleNodeEndPoint :: NT.Transport -> NodeEndPoint
simpleNodeEndPoint transport = NodeEndPoint {
newNodeEndPoint = NT.newEndPoint transport
, closeNodeEndPoint = NT.closeEndPoint
}
| Use an existing ' EndPoint ' . It will be closed automatically when the node
manualNodeEndPoint :: NT.EndPoint -> NodeEndPoint
manualNodeEndPoint ep = NodeEndPoint {
newNodeEndPoint = pure $ Right ep
, closeNodeEndPoint = NT.closeEndPoint
}
startNode
:: forall packingType peerData .
( Serializable packingType peerData )
=> Trace IO (Severity, Text)
-> Packing packingType IO
-> peerData
-> (Node packingType peerData -> NodeEndPoint)
-> (Node packingType peerData -> ReceiveDelay)
-> (Node packingType peerData -> ReceiveDelay)
-> StdGen
-> NodeEnvironment
-> (peerData -> NodeId -> ChannelIn -> ChannelOut -> IO ())
-> IO (Node packingType peerData)
startNode logTrace packing peerData mkNodeEndPoint mkReceiveDelay mkConnectDelay
prng nodeEnv handlerInOut = do
rec { let nodeEndPoint = mkNodeEndPoint node
; mEndPoint <- newNodeEndPoint nodeEndPoint
; let receiveDelay = mkReceiveDelay node
connectDelay = mkConnectDelay node
; node <- case mEndPoint of
Left err -> throwIO err
Right endPoint -> do
sharedState <- initialNodeState prng
rec { let node = Node {
nodeTrace = logTrace
, nodeEndPoint = endPoint
, nodeCloseEndPoint = closeNodeEndPoint nodeEndPoint endPoint
, nodeDispatcherThread = dispatcherThread
, nodeEnvironment = nodeEnv
, nodeState = sharedState
, nodePacking = packing
, nodePeerData = peerData
, nodeReceiveDelay = receiveDelay
, nodeConnectDelay = connectDelay
}
; dispatcherThread <- async $
nodeDispatcher node handlerInOut
; link dispatcherThread
}
return node
}
traceWith logTrace (Debug, sformat ("startNode, we are " % shown % "") (nodeId node))
return node
stopNode :: Node packingType peerData -> IO ()
stopNode node = do
modifyMVar (nodeState node) $ \nodeState ->
if _nodeStateClosed nodeState
then throwIO $ userError "stopNode : already stopped"
else pure (nodeState { _nodeStateClosed = True }, ())
nodeCloseEndPoint node
wait (nodeDispatcherThread node)
waitForRunningHandlers node
killNode :: Node packingType peerData -> IO ()
killNode node = do
modifyMVar (nodeState node) $ \nodeState ->
if _nodeStateClosed nodeState
then throwIO $ userError "killNode : already killed"
else pure (nodeState { _nodeStateClosed = True }, ())
nodeCloseEndPoint node
killRunningHandlers node
data ConnectionState peerData =
WaitingForPeerData
| PeerDataParseFailure
| WaitingForHandshake !peerData !BS.ByteString
| HandshakeFailure
Second argument will be run with the number of bytes each time more
| FeedingApplicationHandler !(Maybe BS.ByteString -> IO ()) (Int -> IO ())
instance Show (ConnectionState peerData) where
show term = case term of
WaitingForPeerData -> "WaitingForPeerData"
PeerDataParseFailure -> "PeerDataParseFailure"
WaitingForHandshake _ _ -> "WaitingForHandshake"
HandshakeFailure -> "HandshakeFailure"
FeedingApplicationHandler _ _ -> "FeedingApplicationHandler"
data PeerState peerData =
If the second component is ' Just ' , then there 's a lightweight
ExpectingPeerData
!(NonEmptySet NT.ConnectionId)
!(Maybe (NT.ConnectionId, Maybe BS.ByteString -> Decoder IO peerData))
| GotPeerData !peerData !(NonEmptySet NT.ConnectionId)
instance Show (PeerState peerData) where
show term = case term of
ExpectingPeerData peers mleader -> "ExpectingPeerData " ++ show peers ++ " " ++ show (fmap fst mleader)
GotPeerData _ peers -> "GotPeerData " ++ show peers
data DispatcherState peerData = DispatcherState {
dsConnections :: Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
, dsPeers :: Map NT.EndPointAddress (PeerState peerData)
}
deriving instance Show (DispatcherState peerData)
initialDispatcherState :: DispatcherState peerData
initialDispatcherState = DispatcherState Map.empty Map.empty
getRunningHandlers :: Node packingType peerData -> IO [SomeHandler]
getRunningHandlers node = withMVar (nodeState node) $ \st -> do
outbound_bi = do
map <- Map.elems (_nodeStateOutboundBidirectional st)
(x, _, _, _, _, _, _) <- Map.elems map
return x
inbound = Set.toList (_nodeStateInbound st)
return $ outbound_bi ++ inbound
waitForRunningHandlers :: Node packingType peerData -> IO ()
waitForRunningHandlers node = getRunningHandlers node >>= mapM_ waitCatchSomeHandler
killRunningHandlers :: Node packingType peerData -> IO ()
killRunningHandlers node = getRunningHandlers node >>= mapM_ cancelSomeHandler
| The one thread that handles /all/ incoming messages and dispatches them
nodeDispatcher
:: forall packingType peerData .
( Serializable packingType peerData )
=> Node packingType peerData
-> (peerData -> NodeId -> ChannelIn -> ChannelOut -> IO ())
-> IO ()
nodeDispatcher node handlerInOut =
loop initialDispatcherState
where
logTrace :: Trace IO (Severity, Text)
logTrace = nodeTrace node
nstate :: MVar (NodeState peerData)
nstate = nodeState node
receiveDelay, connectDelay :: IO ()
receiveDelay = nodeReceiveDelay node >>= maybe (return ()) delay
connectDelay = nodeConnectDelay node >>= maybe (return ()) delay
endpoint = nodeEndPoint node
loop :: DispatcherState peerData -> IO ()
loop !state = do
receiveDelay
event <- NT.receive endpoint
case event of
NT.ConnectionOpened connid _reliability peer ->
connectDelay >> connectionOpened state connid peer >>= loop
NT.Received connid bytes -> received state connid bytes >>= loop
NT.ConnectionClosed connid -> connectionClosed state connid >>= loop
NT.EndPointClosed -> endPointClosed state
NT.ReceivedMulticast _ _ -> loop state
NT.ErrorEvent (NT.TransportError (NT.EventConnectionLost peer bundle) reason) -> do
traceWith logTrace (Error, sformat ("EventConnectionLost received from the network layer: " % shown) reason)
connectionLost state peer bundle >>= loop
NT.ErrorEvent (NT.TransportError NT.EventEndPointFailed reason) ->
throwIO (InternalError $ "EndPoint failed: " ++ reason)
NT.ErrorEvent (NT.TransportError NT.EventTransportFailed reason) ->
throwIO (InternalError $ "Transport failed " ++ reason)
endPointClosed
:: DispatcherState peerData
-> IO ()
endPointClosed state = do
let connections = Map.toList (dsConnections state)
when (not (null connections)) $ do
forM_ connections $ \(_, st) -> case st of
(_, FeedingApplicationHandler dumpBytes _) -> do
dumpBytes Nothing
_ -> return ()
Must plug input channels for all un - acked outbound connections , and
_ <- modifyMVar nstate $ \st -> do
let nonceMaps = Map.elems (_nodeStateOutboundBidirectional st)
let outbounds = nonceMaps >>= Map.elems
forM_ outbounds $ \(_, dumpBytes, _, peerDataVar, _, _, acked) -> do
when (not acked) $ do
_ <- tryPutMVar peerDataVar (error "no peer data because local node has gone down")
dumpBytes Nothing
return (st, ())
the ' EndPoint ' is closed .
withMVar nstate $ \nodeState ->
if _nodeStateClosed nodeState
then pure ()
else throwIO (InternalError "EndPoint prematurely closed")
connectionOpened
:: DispatcherState peerData
-> NT.ConnectionId
-> NT.EndPointAddress
-> IO (DispatcherState peerData)
connectionOpened state connid peer = case Map.lookup connid (dsConnections state) of
Just (peer', _) -> do
traceWith logTrace (Warning, sformat ("ignoring duplicate connection " % shown % shown % shown) peer peer' connid)
return state
Nothing -> do
case Map.lookup peer (dsPeers state) of
Just (GotPeerData peerData neset) -> do
return $ state {
dsConnections = Map.insert connid (peer, WaitingForHandshake peerData BS.empty) (dsConnections state)
, dsPeers = Map.insert peer (GotPeerData peerData (NESet.insert connid neset)) (dsPeers state)
}
Nothing -> do
return $ state {
dsConnections = Map.insert connid (peer, WaitingForPeerData) (dsConnections state)
, dsPeers = Map.insert peer (ExpectingPeerData (NESet.singleton connid) Nothing) (dsPeers state)
}
on this connection before the first connection receives
Just (ExpectingPeerData neset mleader) -> do
return $ state {
dsConnections = Map.insert connid (peer, WaitingForPeerData) (dsConnections state)
, dsPeers = Map.insert peer (ExpectingPeerData (NESet.insert connid neset) mleader) (dsPeers state)
}
received
:: DispatcherState peerData
-> NT.ConnectionId
-> [BS.ByteString]
-> IO (DispatcherState peerData)
received state connid chunks = case Map.lookup connid (dsConnections state) of
Nothing -> do
traceWith logTrace (Warning, sformat ("ignoring data on unknown connection " % shown) connid)
return state
Just (peer, PeerDataParseFailure) -> do
traceWith logTrace (Warning, sformat ("ignoring data on failed connection (peer data) from " % shown) peer)
return state
Just (peer, HandshakeFailure) -> do
traceWith logTrace (Warning, sformat ("ignoring data on failed connection (handshake) from " % shown) peer)
return state
Just (peer, WaitingForPeerData) -> case Map.lookup peer (dsPeers state) of
Just (ExpectingPeerData connids mleader) -> case mleader of
Nothing -> do
decoderStep :: DecoderStep IO peerData <- runDecoder (unpack (nodePacking node))
decoderStep' <- continueDecoding decoderStep (BS.concat chunks)
case decoderStep' of
Fail _ _ err -> do
traceWith logTrace (Warning, sformat ("failed to decode peer data from " % shown % ": got error " % shown) peer err)
return $ state {
dsConnections = Map.insert connid (peer, PeerDataParseFailure) (dsConnections state)
}
Done trailing _ peerData -> do
let state' = state {
dsConnections = foldl' (awaitHandshake peerData) (dsConnections state) (NESet.toList connids)
, dsPeers = Map.insert peer (GotPeerData peerData connids) (dsPeers state)
}
received state' connid [trailing]
Partial decoderContinuation -> do
return $ state {
dsPeers = Map.insert peer (ExpectingPeerData connids (Just (connid, decoderContinuation))) (dsPeers state)
}
Just (connid', decoderContinuation) -> case connid == connid' of
Protocol error . We got data from some other lightweight
False -> do
traceWith logTrace (Warning, sformat ("peer data protocol error from " % shown) peer)
return state
True -> do
decoderStep <- runDecoder (decoderContinuation (Just (BS.concat chunks)))
case decoderStep of
Fail _ _ err -> do
traceWith logTrace (Warning, sformat ("failed to decode peer data from " % shown % ": got error " % shown) peer err)
return $ state {
dsConnections = Map.insert connid (peer, PeerDataParseFailure) (dsConnections state)
}
Done trailing _ peerData -> do
let state' = state {
dsConnections = foldl' (awaitHandshake peerData) (dsConnections state) (NESet.toList connids)
, dsPeers = Map.insert peer (GotPeerData peerData connids) (dsPeers state)
}
received state' connid [trailing]
Partial decoderContinuation' -> do
return $ state {
dsPeers = Map.insert peer (ExpectingPeerData connids (Just (connid, decoderContinuation'))) (dsPeers state)
}
where
in a fold once the peer data has been parsed . The first
awaitHandshake
:: peerData
-> Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
-> NT.ConnectionId
-> Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
awaitHandshake peerData map connid =
Map.update (\(peer, _) -> Just (peer, WaitingForHandshake peerData BS.empty)) connid map
Nothing -> do
throwIO $ InternalError "node dispatcher inconsistent state (waiting for peer data)"
Just (GotPeerData _ _) -> do
throwIO $ InternalError "node dispatcher inconsistent state (already got peer data)"
Just (peer, WaitingForHandshake peerData partial) -> do
let bytes = BS.append partial (BS.concat chunks)
case BS.uncons bytes of
Nothing -> return state
Just (w, ws)
| w == controlHeaderCodeBidirectionalSyn ||
w == controlHeaderCodeBidirectionalAck
, BS.length ws < 8 -> return $ state {
dsConnections = Map.insert connid (peer, WaitingForHandshake peerData bytes) (dsConnections state)
}
| w == controlHeaderCodeBidirectionalSyn
, Right (ws', _, nonce) <- decodeOrFail (LBS.fromStrict ws) -> do
channel <- newTChanIO
chanVar <- newMVar (Just channel)
let dumpBytes mBytes = withMVar chanVar $
maybe (return ()) (\chan -> atomically (writeTChan chan mBytes))
provenance = Remote peer connid
respondAndHandle conn = do
outcome <- NT.send conn [controlHeaderBidirectionalAck nonce]
case outcome of
Left err -> throwIO err
Right () -> do
handlerInOut peerData (NodeId peer) (ChannelIn channel) (ChannelOut conn)
Resource releaser for bracketWithException .
cleanup (me :: Maybe SomeException) = do
modifyMVar chanVar $ \_ -> return (Nothing, ())
case me of
Nothing -> return ()
Just e -> traceWith logTrace (Error,
sformat (shown % " error in conversation response " % shown) nonce e)
handler = bracketWithException
(return ())
(const cleanup)
(const (connectToPeer node (NodeId peer) respondAndHandle))
(_, incrBytes) <- spawnHandler logTrace nstate provenance handler
let bs = LBS.toStrict ws'
dumpBytes $ Just bs
incrBytes $ fromIntegral (BS.length bs)
return $ state {
dsConnections = Map.insert connid (peer, FeedingApplicationHandler dumpBytes incrBytes) (dsConnections state)
}
| w == controlHeaderCodeBidirectionalAck
, Right (ws', _, nonce) <- decodeOrFail (LBS.fromStrict ws) -> do
outcome <- modifyMVar nstate $ \st -> do
let nonces = Map.lookup peer (_nodeStateOutboundBidirectional st)
let thisNonce = nonces >>= Map.lookup nonce
case thisNonce of
Nothing -> return (st, Nothing)
Just (_, _, _, _, _, _, True) -> return (st, Just Nothing)
Just (promise, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, False) -> do
cancel timeoutPromise
return
( st { _nodeStateOutboundBidirectional = Map.update updater peer (_nodeStateOutboundBidirectional st)
}
, Just (Just (dumpBytes, incrBytes, peerDataVar))
)
where
updater map = Just $ Map.insert nonce (promise, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, True) map
case outcome of
we never sent the SYN for it ( protocol error )
Nothing -> do
traceWith logTrace (Warning, sformat ("got unknown nonce " % shown) nonce)
return $ state {
dsConnections = Map.insert connid (peer, HandshakeFailure) (dsConnections state)
}
Got a duplicate ACK .
Just Nothing -> do
traceWith logTrace (Warning, sformat ("duplicate ACK nonce from " % shown) peer)
return $ state {
dsConnections = Map.insert connid (peer, HandshakeFailure) (dsConnections state)
}
Got an ACK for a SYN that we sent . Start
Just (Just (dumpBytes, incrBytes, peerDataVar)) -> do
putMVar peerDataVar peerData
let bs = LBS.toStrict ws'
dumpBytes $ Just bs
incrBytes $ fromIntegral (BS.length bs)
return $ state {
dsConnections = Map.insert connid (peer, FeedingApplicationHandler dumpBytes incrBytes) (dsConnections state)
}
| otherwise -> do
traceWith logTrace (Warning, sformat ("unexpected control header from " % shown % " : " % shown) peer w)
return $ state {
dsConnections = Map.insert connid (peer, HandshakeFailure) (dsConnections state)
}
Just (_peer, FeedingApplicationHandler dumpBytes incrBytes) -> do
let bs = LBS.toStrict (LBS.fromChunks chunks)
dumpBytes $ Just bs
incrBytes $ BS.length bs
return state
connectionClosed
:: DispatcherState peerData
-> NT.ConnectionId
-> IO (DispatcherState peerData)
connectionClosed state connid = case Map.lookup connid (dsConnections state) of
Nothing -> do
traceWith logTrace (Warning, sformat ("closed unknown connection " % shown) connid)
return state
Just (peer, connState) -> do
case connState of
FeedingApplicationHandler dumpBytes _ -> do
Signal end of channel .
dumpBytes Nothing
_ -> return ()
let peersUpdater existing = case existing of
GotPeerData peerData neset -> case NESet.delete connid neset of
Nothing -> Nothing
Just neset' -> Just (GotPeerData peerData neset')
ExpectingPeerData neset mleader -> case NESet.delete connid neset of
Nothing -> Nothing
Just neset' -> case mleader of
Nothing -> Just (ExpectingPeerData neset' mleader)
Just (connid', _partialDecoder) -> case connid == connid' of
True -> Just (ExpectingPeerData neset' Nothing)
False -> Just (ExpectingPeerData neset' mleader)
let state' = state {
dsConnections = Map.delete connid (dsConnections state)
, dsPeers = Map.update peersUpdater peer (dsPeers state)
}
return state'
connectionLost
:: DispatcherState peerData
-> NT.EndPointAddress
-> NT.ConnectionBundle
-> IO (DispatcherState peerData)
connectionLost state peer bundle = do
every inbound connection before posting .
traceWith logTrace (Warning, sformat ("lost connection bundle " % shown % " to " % shown) bundle peer)
state' <- case Map.lookup peer (dsPeers state) of
Just it -> do
let connids = case it of
GotPeerData _ neset -> NESet.toList neset
ExpectingPeerData neset _ -> NESet.toList neset
let folder :: Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData)
-> NT.ConnectionId
-> IO (Map NT.ConnectionId (NT.EndPointAddress, ConnectionState peerData))
folder channels connid = case Map.updateLookupWithKey (\_ _ -> Nothing) connid channels of
(Just (_, FeedingApplicationHandler dumpBytes _), channels') -> do
dumpBytes Nothing
return channels'
(_, channels') -> return channels'
channels' <- foldlM folder (dsConnections state) connids
return $ state {
dsConnections = channels'
, dsPeers = Map.delete peer (dsPeers state)
}
Nothing -> return state
channelsAndPeerDataVars <- modifyMVar nstate $ \st -> do
let nonces = Map.lookup peer (_nodeStateOutboundBidirectional st)
case nonces of
Nothing -> return (st, [])
Just map -> do
let folder (_, channelIn, _, peerDataVar, bundle', _, acked) channels
| bundle' == bundle && not acked = (channelIn, peerDataVar) : channels
| otherwise = channels
let channelsAndPeerDataVars = Map.foldr folder [] map
return (st, channelsAndPeerDataVars)
traceWith logTrace (Warning, sformat ("closing " % shown % " channels on bundle " % shown % " to " % shown) (length channelsAndPeerDataVars) bundle peer)
forM_ channelsAndPeerDataVars $ \(dumpBytes, peerDataVar) -> do
_ <- tryPutMVar peerDataVar (error "no peer data because the connection was lost")
dumpBytes Nothing
return state'
spawnHandler
:: forall peerData t .
Trace IO (Severity, Text)
-> MVar (NodeState peerData)
-> HandlerProvenance peerData (Maybe BS.ByteString -> IO ())
-> IO t
-> IO (Async t, Int -> IO ())
spawnHandler logTrace stateVar provenance action =
modifyMVar stateVar $ \nodeState -> do
totalBytes <- newMVar 0
Spawn the thread to get a ' SomeHandler ' .
rec { promise <- async $ do
startTime <- getCurrentTime
normal someHandler startTime totalBytes
`catch` exceptional someHandler startTime totalBytes
; let someHandler = SomeHandler promise
}
let nodeState' = case provenance of
Remote _ _ -> nodeState {
_nodeStateInbound = Set.insert someHandler (_nodeStateInbound nodeState)
}
Local peer (nonce, peerDataVar, connBundle, timeoutPromise, dumpBytes) -> nodeState {
_nodeStateOutboundBidirectional = Map.alter alteration peer (_nodeStateOutboundBidirectional nodeState)
}
where
alteration Nothing = Just $ Map.singleton nonce (someHandler, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, False)
alteration (Just map) = Just $ Map.insert nonce (someHandler, dumpBytes, incrBytes, peerDataVar, connBundle, timeoutPromise, False) map
incrBytes !n = do
nodeState <- readMVar stateVar
stIncrBytes (handlerProvenancePeer provenance) n (_nodeStateStatistics nodeState)
modifyMVar totalBytes $ \(!m) -> return (m + n, ())
statistics' <- stAddHandler provenance (_nodeStateStatistics nodeState)
return (nodeState' { _nodeStateStatistics = statistics' }, (promise, incrBytes))
where
normal :: SomeHandler -> Microsecond -> MVar Int -> IO t
normal someHandler startTime totalBytesVar = do
t <- action
signalFinished someHandler startTime totalBytesVar Nothing
pure t
exceptional :: SomeHandler -> Microsecond -> MVar Int -> SomeException -> IO t
exceptional someHandler startTime totalBytesVar e = do
signalFinished someHandler startTime totalBytesVar (Just e)
throwIO e
signalFinished :: SomeHandler -> Microsecond -> MVar Int -> Maybe SomeException -> IO ()
signalFinished someHandler startTime totalBytesVar outcome = do
endTime <- getCurrentTime
let elapsed = endTime - startTime
totalBytes <- readMVar totalBytesVar
modifyMVar stateVar $ \nodeState -> do
let nodeState' = case provenance of
Remote _ _ -> nodeState {
_nodeStateInbound = Set.delete someHandler (_nodeStateInbound nodeState)
}
Local peer (nonce, _, _, _, _) -> nodeState {
_nodeStateOutboundBidirectional = Map.update updater peer (_nodeStateOutboundBidirectional nodeState)
}
where
updater map =
let map' = Map.delete nonce map
in if Map.null map' then Nothing else Just map'
stIncrBytes (handlerProvenancePeer provenance) (-totalBytes) $ _nodeStateStatistics nodeState
statistics' <-
stRemoveHandler logTrace provenance elapsed outcome $
_nodeStateStatistics nodeState
return (nodeState' { _nodeStateStatistics = statistics' }, ())
controlHeaderCodeBidirectionalSyn :: Word8
controlHeaderCodeBidirectionalSyn = fromIntegral (fromEnum 'S')
controlHeaderCodeBidirectionalAck :: Word8
controlHeaderCodeBidirectionalAck = fromIntegral (fromEnum 'A')
controlHeaderBidirectionalSyn :: Nonce -> BS.ByteString
controlHeaderBidirectionalSyn (Nonce nonce) =
fixedSizeBuilder' 9 $
BS.word8 controlHeaderCodeBidirectionalSyn
<> BS.word64BE nonce
controlHeaderBidirectionalAck :: Nonce -> BS.ByteString
controlHeaderBidirectionalAck (Nonce nonce) =
fixedSizeBuilder' 9 $
BS.word8 controlHeaderCodeBidirectionalAck
<> BS.word64BE nonce
fixedSizeBuilder' :: Int -> BS.Builder -> BS.ByteString
fixedSizeBuilder' n = LBS.toStrict . fixedSizeBuilder n
fixedSizeBuilder :: Int -> BS.Builder -> LBS.ByteString
fixedSizeBuilder n =
BS.toLazyByteStringWith (BS.untrimmedStrategy n n) LBS.empty
withInOutChannel
:: forall packingType peerData a .
( Serializable packingType peerData )
=> Node packingType peerData
-> NodeId
-> (peerData -> ChannelIn -> ChannelOut -> IO a)
-> IO a
withInOutChannel node@Node{nodeEnvironment, nodeState, nodeTrace} nodeid@(NodeId peer) action = do
nonce <- modifyMVar nodeState $ \nodeState -> do
let (nonce, !prng') = random (_nodeStateGen nodeState)
pure (nodeState { _nodeStateGen = prng' }, nonce)
channel <- fmap ChannelIn newTChanIO
channelVar <- newMVar (Just channel)
let dumpBytes mbs = withMVar channelVar $ \mchannel -> case mchannel of
Nothing -> pure ()
Just (ChannelIn channel) -> atomically $ writeTChan channel mbs
closeChannel = modifyMVar channelVar $ \_ -> pure (Nothing, ())
TODO must ensure that at some point it is always filled . What if the
peerDataVar <- newEmptyMVar
before we register , but that 's OK , as disconnectFromPeer is forgiving
let action' conn = mask $ \restore -> do
rec { let provenance = Local peer (nonce, peerDataVar, NT.bundle conn, timeoutPromise, dumpBytes)
; (promise, _) <- restore $ spawnHandler nodeTrace nodeState provenance $ do
It 's essential that we only send the handshake SYN inside
outcome <- NT.send conn [controlHeaderBidirectionalSyn nonce]
case outcome of
Left err -> throwIO err
Right _ -> do
peerData <- readMVar peerDataVar
action peerData channel (ChannelOut conn)
; timeoutPromise <- async $ do
delay (nodeAckTimeout nodeEnvironment)
cancelWith promise Timeout
}
restore (wait promise) `catch` \(e :: SomeAsyncException) -> do
uninterruptibleCancel promise
throwIO e
connectToPeer node nodeid action' `finally` closeChannel
data OutboundConnectionState =
Stable !(Maybe ComingUp) !Int !(Maybe GoingDown) !PeerDataTransmission
| AllGoingDown !GoingDown
| AllComingUp !ComingUp
| The MVar will be filled when the last connection goes down .
data GoingDown = GoingDown !Int !(MVar ())
| The MVar will be filled when the first connection comes up .
data ComingUp = ComingUp !Int !(MVar ())
data PeerDataTransmission =
PeerDataToBeTransmitted
| PeerDataInFlight !(MVar (Maybe SomeException))
| PeerDataTransmitted
disconnectFromPeer
:: Node packingType peerData
-> NodeId
-> NT.Connection
-> IO ()
disconnectFromPeer Node{nodeState} (NodeId peer) conn =
bracketWithException startClosing finishClosing (const (NT.close conn))
where
finishClosing _ (_ :: Maybe SomeException) = do
modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
Just (Stable comingUp established goingDown transmission)
| Just (GoingDown n excl) <- goingDown
, n == 1 -> do
putMVar excl ()
return . Just $ Stable comingUp established Nothing transmission
| Just (GoingDown n excl) <- goingDown
, n > 1 -> do
return . Just $ Stable comingUp established (Just (GoingDown (n - 1) excl)) transmission
Just (AllGoingDown (GoingDown n excl))
| n == 1 -> do
putMVar excl ()
return Nothing
| otherwise -> do
return $ Just (AllGoingDown (GoingDown (n - 1) excl))
_ -> throwIO (InternalError "finishClosing : impossible")
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.update (const choice) peer map
}
return (nodeState', ())
startClosing = do
canClose <- modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
Just (Stable comingUp established goingDown transmission)
| established > 1
, Just (GoingDown !n excl) <- goingDown ->
return . Right $ Stable comingUp (established - 1) (Just (GoingDown (n + 1) excl)) transmission
| established > 1
, Nothing <- goingDown -> do
excl <- newEmptyMVar
return . Right $ Stable comingUp (established - 1) (Just (GoingDown 1 excl)) transmission
| established == 1
, Nothing <- comingUp
, Just (GoingDown !n excl) <- goingDown ->
return . Right $ AllGoingDown (GoingDown (n + 1) excl)
| established == 1
, Nothing <- comingUp
, Nothing <- goingDown -> do
excl <- newEmptyMVar
return . Right $ AllGoingDown (GoingDown 1 excl)
| established == 1
, Just (ComingUp !_m excl) <- comingUp ->
return . Left $ excl
| otherwise -> throwIO (InternalError "startClosing : impossible")
Nothing -> throwIO (InternalError "startClosing : impossible")
Just (AllGoingDown _) -> throwIO (InternalError "startClosing : impossible")
Just (AllComingUp _) -> throwIO (InternalError "startClosing : impossible")
case choice of
Left excl -> return (nodeState, Left excl)
Right ocs -> return (nodeState', Right ())
where
nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
case canClose of
Left excl -> do
readMVar excl
startClosing
Right () -> return ()
will block until the peer - data is sent ; it must be the first thing to
arrive when the first lightweight connection to a peer is opened .
connectToPeer
:: forall packingType peerData r .
( Serializable packingType peerData )
=> Node packingType peerData
-> NodeId
-> (NT.Connection -> IO r)
-> IO r
connectToPeer node@Node{nodeEndPoint, nodeState, nodePacking, nodePeerData, nodeEnvironment, nodeTrace} nid@(NodeId peer) act =
bracket establish (disconnectFromPeer node nid) $ \conn -> do
sendPeerDataIfNecessary conn
act conn
where
mtu = nodeMtu nodeEnvironment
sendPeerDataIfNecessary conn =
bracketWithException getPeerDataResponsibility
dischargePeerDataResponsibility
(maybeSendPeerData conn)
maybeSendPeerData conn responsibility = case responsibility of
False -> return ()
True -> sendPeerData conn
sendPeerData conn = do
serializedPeerData <- pack nodePacking nodePeerData
writeMany mtu (ChannelOut conn) serializedPeerData
getPeerDataResponsibility = do
responsibility <- modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
(ocs, responsibility) <- case Map.lookup peer map of
Just it@(Stable comingUp established goingDown transmission)
| PeerDataToBeTransmitted <- transmission -> do
excl <- newEmptyMVar
return (Stable comingUp established goingDown (PeerDataInFlight excl), Just (Right excl))
| PeerDataInFlight excl <- transmission ->
return (it, Just (Left excl))
| PeerDataTransmitted <- transmission ->
return (it, Nothing)
| otherwise -> throwIO (InternalError "impossible")
_ -> do
traceWith nodeTrace (Error, "getPeerDataResponsibility: unexpected peer state")
throwIO $ InternalError "connectToPeer: getPeerDataResponsibility: impossible"
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
return (nodeState', responsibility)
case responsibility of
Just (Left excl) -> do
_ <- readMVar excl
getPeerDataResponsibility
Just (Right _) -> do
return True
Nothing -> do
return False
dischargePeerDataResponsibility responsibility (merr :: Maybe SomeException) = do
modifyMVar nodeState $ \nodeState -> do
let map = _nodeStateConnectedTo nodeState
ocs <- case Map.lookup peer map of
Just it@(Stable comingUp established goingDown transmission)
| True <- responsibility
, Nothing <- merr
, PeerDataInFlight excl <- transmission -> do
putMVar excl Nothing
return $ Stable comingUp established goingDown PeerDataTransmitted
| True <- responsibility
, Just _ <- merr
, PeerDataInFlight excl <- transmission -> do
putMVar excl merr
return $ Stable comingUp established goingDown PeerDataToBeTransmitted
| False <- responsibility -> return it
_ -> do
traceWith nodeTrace (Error, "dischargePeerDataResponsibility: unexpected peer state")
throwIO $ InternalError "connectToPeer: dischargePeerDataResponsibility: impossible"
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
return (nodeState', ())
establish = bracketWithException startConnecting finishConnecting doConnection
doConnection _ = do
mconn <- NT.connect nodeEndPoint
peer
NT.ReliableOrdered
TODO give a timeout . Ca n't rely on it being set at
NT.ConnectHints{ connectTimeout = Nothing }
case mconn of
Left err -> throwIO err
Right conn -> return conn
the first to come up .
finishConnecting _ (merr :: Maybe SomeException) = do
modifyMVar nodeState $ \nodeState -> do
when (_nodeStateClosed nodeState) (throwIO $ InternalError "connectToPeer : node closed while establishing connection!")
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
Just (AllComingUp (ComingUp n excl))
| Nothing <- merr -> do
let comingUp = case n of
1 -> Nothing
_ -> Just (ComingUp (n - 1) excl)
return . Just $ Stable comingUp 1 Nothing PeerDataToBeTransmitted
| Just _ <- merr
, n == 1 ->
return Nothing
| Just _ <- merr
, n > 1 ->
return . Just $ AllComingUp (ComingUp (n - 1) excl)
Just (Stable comingUp established goingDown transmission)
| Just (ComingUp n excl) <- comingUp -> do
putMVar excl ()
comingUp' <- case n of
1 -> return Nothing
_ -> do
excl' <- newEmptyMVar
return $ Just (ComingUp (n - 1) excl')
let established' = case merr of
Nothing -> established + 1
Just _ -> established
return . Just $ Stable comingUp' established' goingDown transmission
_ -> throwIO (InternalError "finishConnecting : impossible")
let nodeState' = nodeState {
_nodeStateConnectedTo = Map.update (const choice) peer map
}
return (nodeState', ())
startConnecting = do
canOpen <- modifyMVar nodeState $ \nodeState -> do
when (_nodeStateClosed nodeState) (throwIO $ userError "connectToPeer : you're doing it wrong! Our node is closed!")
let map = _nodeStateConnectedTo nodeState
choice <- case Map.lookup peer map of
First to connect .
Nothing -> do
excl <- newEmptyMVar
return . Right $ AllComingUp (ComingUp 1 excl)
Stable connection . There 's at least one that is n't currently
Just (Stable comingUp established goingDown transmission)
| Just (ComingUp n excl) <- comingUp ->
return . Right $ Stable (Just (ComingUp (n + 1) excl)) established goingDown transmission
| Nothing <- comingUp -> do
excl <- newEmptyMVar
return . Right $ Stable (Just (ComingUp 1 excl)) established goingDown transmission
Just (AllGoingDown (GoingDown _ excl)) ->
return . Left $ excl
Just (AllComingUp (ComingUp n excl)) ->
return . Right $ AllComingUp (ComingUp (n + 1) excl)
case choice of
Left excl -> return (nodeState, Left excl)
Right ocs -> return (nodeState', Right ())
where
nodeState' = nodeState {
_nodeStateConnectedTo = Map.insert peer ocs map
}
case canOpen of
Left excl -> do
readMVar excl
startConnecting
Right () -> return ()
bracketWithException
:: ( Exception e )
=> IO r
-> (r -> Maybe e -> IO b)
-> (r -> IO c)
-> IO c
bracketWithException before after thing = mask $ \restore -> do
x <- before
res1 <- try $ restore (thing x)
case res1 of
Left (e1 :: SomeException) -> do
_ :: Either SomeException b <-
try $ uninterruptibleMask_ $ after x (fromException e1)
throwIO e1
Right y -> do
_ <- uninterruptibleMask_ $ after x Nothing
return y
|
c323346b174bdcadd6d80adefda1cd7e51dbb69b594752ab5519b087fcbab02a | c4-project/c4f | args.ml | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Core
open C4f_common
module Tx = Travesty_base_exts
module Colour_table = C4f_utils.String_table.Make (struct
let equal_style_renderer (x : Fmt.style_renderer) (y : Fmt.style_renderer)
: bool =
match (x, y) with
| `Ansi_tty, `Ansi_tty | `None, `None -> true
| `Ansi_tty, `None | `None, `Ansi_tty -> false
type t = Fmt.style_renderer option
let equal : t -> t -> bool = Option.equal equal_style_renderer
let table =
[(Some `None, "never"); (Some `Ansi_tty, "always"); (None, "auto")]
end)
let colour_map : Fmt.style_renderer option String.Map.t =
Map.of_alist_exn (module String) (List.Assoc.inverse Colour_table.table)
let colour_type : Fmt.style_renderer option Command.Arg_type.t =
Command.Arg_type.of_map colour_map
let colour_sexp (sr : Fmt.style_renderer option) : Sexp.t =
sr |> Colour_table.to_string |> Option.value ~default:"?" |> Sexp.Atom
module Other = struct
open Command.Param
let flag_to_enum_choice (type a) (enum : a) (str : string) ~(doc : string)
: a option t =
map ~f:(Fn.flip Option.some_if enum) (flag str no_arg ~doc)
let id_type = Arg_type.create Id.of_string
let fpath_type : Fpath.t Arg_type.t =
Arg_type.map ~f:Fpath.v Filename_unix.arg_type
let input_type : Plumbing.Input.t Arg_type.t =
Arg_type.map
~f:(Fn.compose Or_error.ok_exn Plumbing.Input.of_string)
Filename_unix.arg_type
let output_type : Plumbing.Output.t Arg_type.t =
Arg_type.map
~f:(Fn.compose Or_error.ok_exn Plumbing.Output.of_string)
Filename_unix.arg_type
let aux_file : string option Command.Param.t =
flag "aux-file"
(optional Filename_unix.arg_type)
~doc:
"FILE path to a JSON file containing auxiliary litmus information \
for this file"
end
include Other
module Standard = struct
type t =
{ verbose: bool
; no_warnings: bool
; colour: Fmt.style_renderer option
; config_file: Fpath.t option }
[@@deriving fields]
let is_verbose t = t.verbose
let are_warnings_enabled t = not t.no_warnings
let load_config (x : t) : C4f_config.Global.t Or_error.t =
match x.config_file with
| Some f -> f |> Plumbing.Input.of_fpath |> C4f_config.Global.Load.load
| None -> Ok (C4f_config.Global.make ())
let get =
Command.Let_syntax.(
let%map_open verbose =
flag "verbose" no_arg
~doc:"print more information about the compilers"
and no_warnings =
flag "no-warnings" no_arg ~doc:"if given, suppresses all warnings"
and config_file =
flag "config"
(optional
(Arg_type.map
~f:
(Fn.compose Or_error.ok_exn
Plumbing.Fpath_helpers.of_string )
Filename_unix.arg_type ) )
~doc:"PATH a fuzzing config file to use"
and colour =
flag_optional_with_default_doc "colour" colour_type colour_sexp
~default:None ~doc:"MODE force a particular colouring mode"
in
{verbose; no_warnings; config_file; colour})
let make_output (args : t) : C4f_common.Output.t =
C4f_common.Output.make ~verbose:(is_verbose args)
~warnings:(are_warnings_enabled args)
let setup_colour (args : t) : unit =
let style_renderer = colour args in
Fmt_tty.setup_std_outputs ?style_renderer ()
let lift_command (args : t) ~(f : C4f_common.Output.t -> unit Or_error.t) :
unit =
setup_colour args ;
let o = make_output args in
let result = f o in
if Or_error.is_error result then (
C4f_common.Output.print_error o result ;
exit 1 )
let lift_command_with_config (args : t)
~(f : C4f_common.Output.t -> C4f_config.Global.t -> unit Or_error.t) :
unit =
lift_command args ~f:(fun o -> Or_error.(args |> load_config >>= f o))
end
module With_files = struct
type 'a t = {rest: 'a; infiles_raw: string list; outfile_raw: string option}
[@@deriving fields]
let out : string option Command.Param.t =
Command.Param.(
flag "output"
(optional Filename_unix.arg_type)
~doc:"FILE the output file (default: stdout)")
let get (type a) (rest : a Command.Param.t) : a t Command.Param.t =
Command.Let_syntax.(
let%map_open infile_raw =
anon (maybe ("FILE" %: Filename_unix.arg_type))
and outfile_raw = out
and rest = rest in
{rest; infiles_raw= Option.to_list infile_raw; outfile_raw})
let get_with_multiple_inputs (type a) (rest : a Command.Param.t) :
a t Command.Param.t =
Command.Let_syntax.(
let%map_open infiles_raw =
anon (sequence ("FILE" %: Filename_unix.arg_type))
and outfile_raw = out
and rest = rest in
{rest; infiles_raw; outfile_raw})
let infiles_fpath (args : _ t) : Fpath.t list Or_error.t =
Tx.Or_error.combine_map (infiles_raw args)
~f:Plumbing.Fpath_helpers.of_string
let infile_raw (args : _ t) : string option Or_error.t =
args |> infiles_raw |> Tx.List.at_most_one
let infile_fpath (args : _ t) : Fpath.t option Or_error.t =
Or_error.(args |> infile_raw >>= Plumbing.Fpath_helpers.of_string_option)
let infile_source (args : _ t) : Plumbing.Input.t Or_error.t =
Or_error.(args |> infile_raw >>= Plumbing.Input.of_string_opt)
let outfile_fpath (args : _ t) : Fpath.t option Or_error.t =
args |> outfile_raw |> Plumbing.Fpath_helpers.of_string_option
let outfile_sink (args : _ t) : Plumbing.Output.t Or_error.t =
args |> outfile_raw |> Plumbing.Output.of_string_opt
let run_filter (type o)
(f : Plumbing.Input.t -> Plumbing.Output.t -> o Or_error.t) (args : _ t)
: o Or_error.t =
Or_error.Let_syntax.(
let%bind input = infile_source args in
let%bind output = outfile_sink args in
f input output)
end
| null | https://raw.githubusercontent.com/c4-project/c4f/8939477732861789abc807c8c1532a302b2848a5/lib/common_cmd/src/args.ml | ocaml | This file is part of c4f .
Copyright ( c ) 2018 - 2022 C4 Project
c4 t itself is licensed under the MIT License . See the LICENSE file in the
project root for more information .
Parts of c4 t are based on code from the Herdtools7 project
( ) : see the LICENSE.herd file in the
project root for more information .
Copyright (c) 2018-2022 C4 Project
c4t itself is licensed under the MIT License. See the LICENSE file in the
project root for more information.
Parts of c4t are based on code from the Herdtools7 project
() : see the LICENSE.herd file in the
project root for more information. *)
open Core
open C4f_common
module Tx = Travesty_base_exts
module Colour_table = C4f_utils.String_table.Make (struct
let equal_style_renderer (x : Fmt.style_renderer) (y : Fmt.style_renderer)
: bool =
match (x, y) with
| `Ansi_tty, `Ansi_tty | `None, `None -> true
| `Ansi_tty, `None | `None, `Ansi_tty -> false
type t = Fmt.style_renderer option
let equal : t -> t -> bool = Option.equal equal_style_renderer
let table =
[(Some `None, "never"); (Some `Ansi_tty, "always"); (None, "auto")]
end)
let colour_map : Fmt.style_renderer option String.Map.t =
Map.of_alist_exn (module String) (List.Assoc.inverse Colour_table.table)
let colour_type : Fmt.style_renderer option Command.Arg_type.t =
Command.Arg_type.of_map colour_map
let colour_sexp (sr : Fmt.style_renderer option) : Sexp.t =
sr |> Colour_table.to_string |> Option.value ~default:"?" |> Sexp.Atom
module Other = struct
open Command.Param
let flag_to_enum_choice (type a) (enum : a) (str : string) ~(doc : string)
: a option t =
map ~f:(Fn.flip Option.some_if enum) (flag str no_arg ~doc)
let id_type = Arg_type.create Id.of_string
let fpath_type : Fpath.t Arg_type.t =
Arg_type.map ~f:Fpath.v Filename_unix.arg_type
let input_type : Plumbing.Input.t Arg_type.t =
Arg_type.map
~f:(Fn.compose Or_error.ok_exn Plumbing.Input.of_string)
Filename_unix.arg_type
let output_type : Plumbing.Output.t Arg_type.t =
Arg_type.map
~f:(Fn.compose Or_error.ok_exn Plumbing.Output.of_string)
Filename_unix.arg_type
let aux_file : string option Command.Param.t =
flag "aux-file"
(optional Filename_unix.arg_type)
~doc:
"FILE path to a JSON file containing auxiliary litmus information \
for this file"
end
include Other
module Standard = struct
type t =
{ verbose: bool
; no_warnings: bool
; colour: Fmt.style_renderer option
; config_file: Fpath.t option }
[@@deriving fields]
let is_verbose t = t.verbose
let are_warnings_enabled t = not t.no_warnings
let load_config (x : t) : C4f_config.Global.t Or_error.t =
match x.config_file with
| Some f -> f |> Plumbing.Input.of_fpath |> C4f_config.Global.Load.load
| None -> Ok (C4f_config.Global.make ())
let get =
Command.Let_syntax.(
let%map_open verbose =
flag "verbose" no_arg
~doc:"print more information about the compilers"
and no_warnings =
flag "no-warnings" no_arg ~doc:"if given, suppresses all warnings"
and config_file =
flag "config"
(optional
(Arg_type.map
~f:
(Fn.compose Or_error.ok_exn
Plumbing.Fpath_helpers.of_string )
Filename_unix.arg_type ) )
~doc:"PATH a fuzzing config file to use"
and colour =
flag_optional_with_default_doc "colour" colour_type colour_sexp
~default:None ~doc:"MODE force a particular colouring mode"
in
{verbose; no_warnings; config_file; colour})
let make_output (args : t) : C4f_common.Output.t =
C4f_common.Output.make ~verbose:(is_verbose args)
~warnings:(are_warnings_enabled args)
let setup_colour (args : t) : unit =
let style_renderer = colour args in
Fmt_tty.setup_std_outputs ?style_renderer ()
let lift_command (args : t) ~(f : C4f_common.Output.t -> unit Or_error.t) :
unit =
setup_colour args ;
let o = make_output args in
let result = f o in
if Or_error.is_error result then (
C4f_common.Output.print_error o result ;
exit 1 )
let lift_command_with_config (args : t)
~(f : C4f_common.Output.t -> C4f_config.Global.t -> unit Or_error.t) :
unit =
lift_command args ~f:(fun o -> Or_error.(args |> load_config >>= f o))
end
module With_files = struct
type 'a t = {rest: 'a; infiles_raw: string list; outfile_raw: string option}
[@@deriving fields]
let out : string option Command.Param.t =
Command.Param.(
flag "output"
(optional Filename_unix.arg_type)
~doc:"FILE the output file (default: stdout)")
let get (type a) (rest : a Command.Param.t) : a t Command.Param.t =
Command.Let_syntax.(
let%map_open infile_raw =
anon (maybe ("FILE" %: Filename_unix.arg_type))
and outfile_raw = out
and rest = rest in
{rest; infiles_raw= Option.to_list infile_raw; outfile_raw})
let get_with_multiple_inputs (type a) (rest : a Command.Param.t) :
a t Command.Param.t =
Command.Let_syntax.(
let%map_open infiles_raw =
anon (sequence ("FILE" %: Filename_unix.arg_type))
and outfile_raw = out
and rest = rest in
{rest; infiles_raw; outfile_raw})
let infiles_fpath (args : _ t) : Fpath.t list Or_error.t =
Tx.Or_error.combine_map (infiles_raw args)
~f:Plumbing.Fpath_helpers.of_string
let infile_raw (args : _ t) : string option Or_error.t =
args |> infiles_raw |> Tx.List.at_most_one
let infile_fpath (args : _ t) : Fpath.t option Or_error.t =
Or_error.(args |> infile_raw >>= Plumbing.Fpath_helpers.of_string_option)
let infile_source (args : _ t) : Plumbing.Input.t Or_error.t =
Or_error.(args |> infile_raw >>= Plumbing.Input.of_string_opt)
let outfile_fpath (args : _ t) : Fpath.t option Or_error.t =
args |> outfile_raw |> Plumbing.Fpath_helpers.of_string_option
let outfile_sink (args : _ t) : Plumbing.Output.t Or_error.t =
args |> outfile_raw |> Plumbing.Output.of_string_opt
let run_filter (type o)
(f : Plumbing.Input.t -> Plumbing.Output.t -> o Or_error.t) (args : _ t)
: o Or_error.t =
Or_error.Let_syntax.(
let%bind input = infile_source args in
let%bind output = outfile_sink args in
f input output)
end
| |
b7488afb51697fa6e77939446683bc461f8cedf5a767ae5096e4bc2d5e3f4d3c | aigarashi/copl-tools | keywords.ml | open Parser
let v = [
(* game-specific keywords *)
(":", COLON);
(* ML1 expressions *)
("true", TRUE);
("false", FALSE);
("if", IF);
("then", THEN);
("else", ELSE);
("*", AST);
("+", CROSS);
("-", HYPHEN);
("<", LT);
ML2 judgments and expressions
("|-", VDASH);
(",", COMMA);
("let", LET);
("in", IN);
("=", EQ);
(* TypingML2 types *)
("int", INT);
("bool", BOOL);
]
| null | https://raw.githubusercontent.com/aigarashi/copl-tools/3c4da117083a0870334c8eef270206b11060514e/checker/TypingML2/keywords.ml | ocaml | game-specific keywords
ML1 expressions
TypingML2 types | open Parser
let v = [
(":", COLON);
("true", TRUE);
("false", FALSE);
("if", IF);
("then", THEN);
("else", ELSE);
("*", AST);
("+", CROSS);
("-", HYPHEN);
("<", LT);
ML2 judgments and expressions
("|-", VDASH);
(",", COMMA);
("let", LET);
("in", IN);
("=", EQ);
("int", INT);
("bool", BOOL);
]
|
415b10d9c438a6ab8672e01db6ab380b8519c5dac8fa718443f509d371abacda | Andromedans/andromeda | reflect.ml | * * * * * Predefined operations and conversion from AML to OCaml * * * * *
* Conversions between OCaml list and ML list
let tag_nil, _, _ = Typecheck.Builtin.nil
let tag_cons, _, _ = Typecheck.Builtin.cons
let tag_none, _, _ = Typecheck.Builtin.none
let tag_some, _, _ = Typecheck.Builtin.some
let tag_mlless, _, _ = Typecheck.Builtin.mlless
let tag_mlequal, _, _ = Typecheck.Builtin.mlequal
let tag_mlgreater, _, _ = Typecheck.Builtin.mlgreater
let equal_type, _ = Typecheck.Builtin.equal_type
let coerce, _ = Typecheck.Builtin.coerce
let eqchk_exc, _ = Typecheck.Builtin.eqchk_excs
let list_nil = Runtime.mk_tag tag_nil []
let list_cons v lst = Runtime.mk_tag tag_cons [v; lst]
let rec mk_list = function
| [] -> list_nil
| x :: xs -> list_cons x (mk_list xs)
(* let as_list ~at v = *)
match v with
(* | Some lst -> lst *)
| None - > Runtime.(error ~at ( ListExpected v ) )
* Conversion between Ocaml option and ML option
let mk_option = function
| Some v -> Runtime.mk_tag tag_some [v]
| None -> Runtime.mk_tag tag_none []
let function
(* | Runtime.Tag (t, []) when (Runtime.equal_tag t tag_none) -> None *)
(* | Runtime.Tag (t, [x]) when (Runtime.equal_tag t tag_some) -> Some x *)
(* | Runtime.(Judgement _ | Boundary _ | Derivation _ | External _ | Closure _ | Handler _ | *)
(* Exc _ | Tag _ | Tuple _ | Ref _ | String _) as v -> *)
(* Runtime.(error ~at (OptionExpected v)) *)
(* let as_judgement_option ~at v = *)
(* match as_option ~at v with *)
(* | None -> None *)
| Some ( Runtime . Judgement ) - > Some jdg
(* | Some (Runtime.(Boundary _ | Closure _ | External _ | Derivation _ | *)
(* Handler _ | Exc _ | Tag _ | Tuple _ | Ref _ | String _) as v) -> *)
Runtime.(error ~at ( JudgementExpected v ) )
* Conversion between and
(** Conversion from OCaml [Runtime.order] to [ML.order]. *)
let mlless = Runtime.mk_tag tag_mlless []
let mlequal = Runtime.mk_tag tag_mlequal []
let mlgreater = Runtime.mk_tag tag_mlgreater []
(** Computations that invoke operations *)
let (>>=) = Runtime.bind
let return = Runtime.return
let operation_equal_type ~at t1 t2 =
let v1 = Runtime.mk_judgement (Nucleus.(abstract_not_abstract (JudgementIsType t1)))
and v2 = Runtime.mk_judgement (Nucleus.(abstract_not_abstract (JudgementIsType t2))) in
Runtime.operation equal_type [v1;v2] >>= fun v ->
return (Runtime.as_eq_type ~at v)
let operation_coerce ~at jdg bdry =
let v1 = Runtime.Judgement jdg
and v2 = Runtime.Boundary bdry in
Runtime.operation coerce [v1;v2] >>= fun v ->
return (Runtime.as_judgement_abstraction ~at v)
let eqchk_exception ~at msg =
let msg' = Runtime.String msg in
Runtime.raise_exception (eqchk_exc, Some msg')
| null | https://raw.githubusercontent.com/Andromedans/andromeda/a5c678450e6c6d4a7cd5eee1196bde558541b994/src/runtime/reflect.ml | ocaml | let as_list ~at v =
| Some lst -> lst
| Runtime.Tag (t, []) when (Runtime.equal_tag t tag_none) -> None
| Runtime.Tag (t, [x]) when (Runtime.equal_tag t tag_some) -> Some x
| Runtime.(Judgement _ | Boundary _ | Derivation _ | External _ | Closure _ | Handler _ |
Exc _ | Tag _ | Tuple _ | Ref _ | String _) as v ->
Runtime.(error ~at (OptionExpected v))
let as_judgement_option ~at v =
match as_option ~at v with
| None -> None
| Some (Runtime.(Boundary _ | Closure _ | External _ | Derivation _ |
Handler _ | Exc _ | Tag _ | Tuple _ | Ref _ | String _) as v) ->
* Conversion from OCaml [Runtime.order] to [ML.order].
* Computations that invoke operations | * * * * * Predefined operations and conversion from AML to OCaml * * * * *
* Conversions between OCaml list and ML list
let tag_nil, _, _ = Typecheck.Builtin.nil
let tag_cons, _, _ = Typecheck.Builtin.cons
let tag_none, _, _ = Typecheck.Builtin.none
let tag_some, _, _ = Typecheck.Builtin.some
let tag_mlless, _, _ = Typecheck.Builtin.mlless
let tag_mlequal, _, _ = Typecheck.Builtin.mlequal
let tag_mlgreater, _, _ = Typecheck.Builtin.mlgreater
let equal_type, _ = Typecheck.Builtin.equal_type
let coerce, _ = Typecheck.Builtin.coerce
let eqchk_exc, _ = Typecheck.Builtin.eqchk_excs
let list_nil = Runtime.mk_tag tag_nil []
let list_cons v lst = Runtime.mk_tag tag_cons [v; lst]
let rec mk_list = function
| [] -> list_nil
| x :: xs -> list_cons x (mk_list xs)
match v with
| None - > Runtime.(error ~at ( ListExpected v ) )
* Conversion between Ocaml option and ML option
let mk_option = function
| Some v -> Runtime.mk_tag tag_some [v]
| None -> Runtime.mk_tag tag_none []
let function
| Some ( Runtime . Judgement ) - > Some jdg
Runtime.(error ~at ( JudgementExpected v ) )
* Conversion between and
let mlless = Runtime.mk_tag tag_mlless []
let mlequal = Runtime.mk_tag tag_mlequal []
let mlgreater = Runtime.mk_tag tag_mlgreater []
let (>>=) = Runtime.bind
let return = Runtime.return
let operation_equal_type ~at t1 t2 =
let v1 = Runtime.mk_judgement (Nucleus.(abstract_not_abstract (JudgementIsType t1)))
and v2 = Runtime.mk_judgement (Nucleus.(abstract_not_abstract (JudgementIsType t2))) in
Runtime.operation equal_type [v1;v2] >>= fun v ->
return (Runtime.as_eq_type ~at v)
let operation_coerce ~at jdg bdry =
let v1 = Runtime.Judgement jdg
and v2 = Runtime.Boundary bdry in
Runtime.operation coerce [v1;v2] >>= fun v ->
return (Runtime.as_judgement_abstraction ~at v)
let eqchk_exception ~at msg =
let msg' = Runtime.String msg in
Runtime.raise_exception (eqchk_exc, Some msg')
|
96ece2540b97c4d89750e19904d4fbf7dd899bae03fce48176c519cebb521f4f | michaelnisi/feeder | fedex_parse.erl | %% example_parse - stream parse feed over HTTP
-module(fedex_parse).
-export([start_link/1]).
-export([resume/1]).
-behaviour(gen_statem).
-export([terminate/3]).
-export([code_change/4]).
-export([init/1]).
-export([callback_mode/0]).
-export([ready/3]).
-export([executing/3]).
-record(state, {
httpcPid,
reqId,
url
}).
-define(TIMEOUT, 5000).
%% API
resume(FsmRef) ->
gen_statem:cast(FsmRef, request).
start_link(Url) ->
gen_statem:start_link(?MODULE, Url, []).
%% Mandatory callback functions
terminate(_Reason, _StateName, #state{reqId=ReqId}) ->
httpc:cancel_request(ReqId);
terminate(_Reason, _StateName, _StateData) ->
ok.
code_change(_OldVsn, StateName, StateData, _Extra) ->
{ok, StateName, StateData}.
init(Url) ->
{ok, ready, #state{url=Url}}.
callback_mode() -> state_functions.
%% Internals
stream(State=#state{reqId=ReqId, httpcPid=Pid}) ->
httpc:stream_next(Pid),
receive
{http, {ReqId, stream, Chunk}} ->
{Chunk, State};
{http, {error, Reason}} ->
{error, Reason};
{http, {ReqId, stream_end, _Headers}} ->
{<<>>, State}
end.
event_fun({entry, Entry}, State=#state{url=Url}) ->
gen_event:notify(fedex_event_man, {entry, Entry, Url}),
State;
event_fun({feed, Feed}, State) ->
gen_event:notify(fedex_event_man, {feed, Feed}),
State;
event_fun(endFeed, State) ->
State.
parser_opts(State) ->
[{event_state, State}, {event_fun, fun event_fun/2},
{continuation_state, State}, {continuation_fun, fun stream/1}].
opts(http) -> [
{autoredirect, true}];
opts(req) -> [
{body_format, binary},
{stream, {self, once}},
{sync, false}].
State callbacks
ready(cast, request, Data=#state{url=Url}) ->
{ok, ReqId} = httpc:request(get, {Url, []}, opts(http), opts(req)),
NewData = Data#state{reqId=ReqId},
{next_state, executing, NewData}.
executing(info, {http, {ReqId, stream_start, _Headers, Pid}}, Data) ->
ReqId = Data#state.reqId,
NewData = Data#state{httpcPid=Pid},
{ok, _EventState, _Rest} = feeder:stream(<<>>, parser_opts(NewData)),
{stop, normal}.
| null | https://raw.githubusercontent.com/michaelnisi/feeder/de2004cb954a5a70390a680e2cb0ed697dfcfd27/example/src/fedex_parse.erl | erlang | example_parse - stream parse feed over HTTP
API
Mandatory callback functions
Internals |
-module(fedex_parse).
-export([start_link/1]).
-export([resume/1]).
-behaviour(gen_statem).
-export([terminate/3]).
-export([code_change/4]).
-export([init/1]).
-export([callback_mode/0]).
-export([ready/3]).
-export([executing/3]).
-record(state, {
httpcPid,
reqId,
url
}).
-define(TIMEOUT, 5000).
resume(FsmRef) ->
gen_statem:cast(FsmRef, request).
start_link(Url) ->
gen_statem:start_link(?MODULE, Url, []).
terminate(_Reason, _StateName, #state{reqId=ReqId}) ->
httpc:cancel_request(ReqId);
terminate(_Reason, _StateName, _StateData) ->
ok.
code_change(_OldVsn, StateName, StateData, _Extra) ->
{ok, StateName, StateData}.
init(Url) ->
{ok, ready, #state{url=Url}}.
callback_mode() -> state_functions.
stream(State=#state{reqId=ReqId, httpcPid=Pid}) ->
httpc:stream_next(Pid),
receive
{http, {ReqId, stream, Chunk}} ->
{Chunk, State};
{http, {error, Reason}} ->
{error, Reason};
{http, {ReqId, stream_end, _Headers}} ->
{<<>>, State}
end.
event_fun({entry, Entry}, State=#state{url=Url}) ->
gen_event:notify(fedex_event_man, {entry, Entry, Url}),
State;
event_fun({feed, Feed}, State) ->
gen_event:notify(fedex_event_man, {feed, Feed}),
State;
event_fun(endFeed, State) ->
State.
parser_opts(State) ->
[{event_state, State}, {event_fun, fun event_fun/2},
{continuation_state, State}, {continuation_fun, fun stream/1}].
opts(http) -> [
{autoredirect, true}];
opts(req) -> [
{body_format, binary},
{stream, {self, once}},
{sync, false}].
State callbacks
ready(cast, request, Data=#state{url=Url}) ->
{ok, ReqId} = httpc:request(get, {Url, []}, opts(http), opts(req)),
NewData = Data#state{reqId=ReqId},
{next_state, executing, NewData}.
executing(info, {http, {ReqId, stream_start, _Headers, Pid}}, Data) ->
ReqId = Data#state.reqId,
NewData = Data#state{httpcPid=Pid},
{ok, _EventState, _Rest} = feeder:stream(<<>>, parser_opts(NewData)),
{stop, normal}.
|
d3ac6d013be056a6395da939403b8f19bd0bb57ae3049314d7642012aa8c47b5 | appleshan/cl-http | cl-http-init.lisp | ;;;
;;; **********************************************************************
This code was written by and has been placed in
;;; the Public domain, and is provided 'as is'.
;;;
;;; **********************************************************************
;;;
Example initialisation file for CL - HTTP CMUCL ; to be loaded into a
;;; dumped core.
;;;
(in-package "USER")
May need to disable page protection if it causes trouble .
#+(and gencgc nil)
(setf (alien:extern-alien "enable_page_protection" alien:unsigned) 0)
;;; Load the configuration, and default demos if not already loaded.
;;;
(if (member :cl-http-examples *cl-http-options*)
;; Even if the examples are loaded, need to restart the log process.
(load "http:examples;configuration")
(load-system 'cl-http-examples
:compile-during-load ()
:load-source-if-no-binary t
:bother-user-if-no-binary nil))
#+W4
(unless (member :w4-web-walker-demo *cl-http-options*)
(load-system 'w4-web-walker-demo
:compile-during-load ()
:load-source-if-no-binary t
:bother-user-if-no-binary nil))
#+gencgc (gc :full t)
#-gencgc (purify)
(when (or (member :enable *cl-http-options*)
(and (member :ask-enable *cl-http-options*)
(y-or-n-p "Enable HTTP Service now? ")))
(http:enable-http-service))
;;; Multi-processing setup.
#+MP
(progn
;; Setup the event server timeout so that an interactive process can
;; act as the idle loop.
(setf lisp::*max-event-to-sec* 0
lisp::*max-event-to-usec* 500000)
;; Setup the initial process as the idle process.
(setf mp::*idle-process* mp::*initial-process*
mp::*idle-loop-timeout* 0.1d0)
Start a background SIGALRM driven process - yield , every 10 seconds ,
;; in case of stuck connections. E.g. The opening of remote
connections can lockup . Since CMUCL is not yet interrupt safe this
;; is not suggested.
#+nil (mp::start-sigalrm-yield 10 0)
;; If not interactive then run the idle loop.
#+nil (mp::idle-process-loop)
) ; end progn MP
| null | https://raw.githubusercontent.com/appleshan/cl-http/a7ec6bf51e260e9bb69d8e180a103daf49aa0ac2/cmucl/examples/cl-http-init.lisp | lisp |
**********************************************************************
the Public domain, and is provided 'as is'.
**********************************************************************
to be loaded into a
dumped core.
Load the configuration, and default demos if not already loaded.
Even if the examples are loaded, need to restart the log process.
Multi-processing setup.
Setup the event server timeout so that an interactive process can
act as the idle loop.
Setup the initial process as the idle process.
in case of stuck connections. E.g. The opening of remote
is not suggested.
If not interactive then run the idle loop.
end progn MP | This code was written by and has been placed in
(in-package "USER")
May need to disable page protection if it causes trouble .
#+(and gencgc nil)
(setf (alien:extern-alien "enable_page_protection" alien:unsigned) 0)
(if (member :cl-http-examples *cl-http-options*)
(load "http:examples;configuration")
(load-system 'cl-http-examples
:compile-during-load ()
:load-source-if-no-binary t
:bother-user-if-no-binary nil))
#+W4
(unless (member :w4-web-walker-demo *cl-http-options*)
(load-system 'w4-web-walker-demo
:compile-during-load ()
:load-source-if-no-binary t
:bother-user-if-no-binary nil))
#+gencgc (gc :full t)
#-gencgc (purify)
(when (or (member :enable *cl-http-options*)
(and (member :ask-enable *cl-http-options*)
(y-or-n-p "Enable HTTP Service now? ")))
(http:enable-http-service))
#+MP
(progn
(setf lisp::*max-event-to-sec* 0
lisp::*max-event-to-usec* 500000)
(setf mp::*idle-process* mp::*initial-process*
mp::*idle-loop-timeout* 0.1d0)
Start a background SIGALRM driven process - yield , every 10 seconds ,
connections can lockup . Since CMUCL is not yet interrupt safe this
#+nil (mp::start-sigalrm-yield 10 0)
#+nil (mp::idle-process-loop)
|
7b84600531031ba343b5ff9122ed1ef94159e1a1c9a51db7311a3f93a83dbe8a | headwinds/reagent-reframe-material-ui | demo-autocomplete.cljs | (ns example.demos.autocomplete.demo-autocomplete
(:require [reagent.core :as r]
["material-ui" :as mui]
[example.utils.theme :refer [with-custom-styles]]
[example.demos.demo-text-field :refer [text-field]]
[example.utils.theme :refer [with-custom-styles custom-theme]]
[example.demos.autocomplete.demo-react-autosuggest :refer [autosuggest-view]]
[example.demos.autocomplete.demo-react-reselect :refer [reselect-view]]))
(defn handle-keypress [e]
( swap ! model # ( assoc % 1 : query % 2 ) ( .. e -target -value ) )
)
(defn demo-autocomplete [{:keys [classes] :as props}]
(fn []
[:div {:style {:display "flex"
:flexDirection "column"
:position "relative"
:margin 50
:alignItems "left"
}}
[:h2 "Autocomplete"]
[:div {:style {:width 400}}
[:p "This is an attempt to port "
[:a {:target "_blank" :href "-ui.com/demos/autocomplete/"}
"Material UI's autocomplete "
] "component."
]]
[:h3 "React-autosuggest"]
[:div {:style {:margin-bottom 50}} [autosuggest-view "my-auto"]]
[:h4 {:style {:margin "20px 0px"}} "Result"]
[:div {:style {:width 400}}
[:p "This is a better than the below react-select example. You can find the autosuggest.css in the css folder within the resources folder."]
[:p "As a base, I started with this "
[:a {:target "_blank" :href "-sample"} "autosuggest-sample"]
" ClojureScript version and then bumped the library to the latest - cljsjs/react-autosuggest 9.3.4-0."
]
]
[:h3 {:style {:margin "20px 0px"}} "React-reselect"]
[:div { :on-key-press (fn [e]
(handle-keypress e))}
[:div
[:> (with-custom-styles (r/reactify-component reselect-view))]
]]
[:h4 {:style {:margin "20px 0px"}} "Result"]
[:div {:style {:width 400}}
[:p "I don't believe I was that succesful as I failed to pass in the custom components. You can find the reselect.css in the css folder within the resources folder."]
[:p "As a base, I started with this "
[:a {:target "_blank" :href ""}
"react-reselect"
] " ClojureScript version."]
]
]
))
| null | https://raw.githubusercontent.com/headwinds/reagent-reframe-material-ui/8a6fba82a026cfedca38491becac85751be9a9d4/src/example/demos/autocomplete/demo-autocomplete.cljs | clojure | (ns example.demos.autocomplete.demo-autocomplete
(:require [reagent.core :as r]
["material-ui" :as mui]
[example.utils.theme :refer [with-custom-styles]]
[example.demos.demo-text-field :refer [text-field]]
[example.utils.theme :refer [with-custom-styles custom-theme]]
[example.demos.autocomplete.demo-react-autosuggest :refer [autosuggest-view]]
[example.demos.autocomplete.demo-react-reselect :refer [reselect-view]]))
(defn handle-keypress [e]
( swap ! model # ( assoc % 1 : query % 2 ) ( .. e -target -value ) )
)
(defn demo-autocomplete [{:keys [classes] :as props}]
(fn []
[:div {:style {:display "flex"
:flexDirection "column"
:position "relative"
:margin 50
:alignItems "left"
}}
[:h2 "Autocomplete"]
[:div {:style {:width 400}}
[:p "This is an attempt to port "
[:a {:target "_blank" :href "-ui.com/demos/autocomplete/"}
"Material UI's autocomplete "
] "component."
]]
[:h3 "React-autosuggest"]
[:div {:style {:margin-bottom 50}} [autosuggest-view "my-auto"]]
[:h4 {:style {:margin "20px 0px"}} "Result"]
[:div {:style {:width 400}}
[:p "This is a better than the below react-select example. You can find the autosuggest.css in the css folder within the resources folder."]
[:p "As a base, I started with this "
[:a {:target "_blank" :href "-sample"} "autosuggest-sample"]
" ClojureScript version and then bumped the library to the latest - cljsjs/react-autosuggest 9.3.4-0."
]
]
[:h3 {:style {:margin "20px 0px"}} "React-reselect"]
[:div { :on-key-press (fn [e]
(handle-keypress e))}
[:div
[:> (with-custom-styles (r/reactify-component reselect-view))]
]]
[:h4 {:style {:margin "20px 0px"}} "Result"]
[:div {:style {:width 400}}
[:p "I don't believe I was that succesful as I failed to pass in the custom components. You can find the reselect.css in the css folder within the resources folder."]
[:p "As a base, I started with this "
[:a {:target "_blank" :href ""}
"react-reselect"
] " ClojureScript version."]
]
]
))
| |
b2b176e5ac037161e956119a1ff75a6d1e0add4dbcbb1cea309abb076b72d860 | ijvcms/chuanqi_dev | hook_lib.erl | %%%-------------------------------------------------------------------
@author zhengsiying
( C ) 2015 , < COMPANY >
%%% @doc
%%% 个人挂机
%%% @end
Created : 04 . 八月 2015 下午5:21
%%%-------------------------------------------------------------------
-module(hook_lib).
-include("common.hrl").
-include("record.hrl").
-include("cache.hrl").
-include("config.hrl").
-include("proto.hrl").
-include("language_config.hrl").
-include("log_type_config.hrl").
-define(REVISE_CLIENT_ROUND_TIME, 60). %% 回合修正时间间隔
-define(UPDATE_LAST_HOOK_TIME, 300). %% 更新最后挂机时间间隔(用于记录最后挂机时间,便于离线后上线奖励计算)
-define(HOOK_BOSS_TIME, 2).%% boss击杀后下一轮时间
%% API
-export([
init/1,
get_obj/3,
get_monster_data/1,
new_round/2,
refresh_monster/1,
get_hook_statistics/2,
compute_hook_gain/2,
compute_hook_offline/2,
obj_use_skill/6,
heartbeat/1,
on_timer/2,
check_scene_id/2,
update_drive/3,
challenge_boos/3,
get_hook_report/2,
get_challenge_info/1,
buy_challenge_num/1,
receive_hook_draw/2,
buy_power/1,
get_buy_power_need/1,
update_last_hook_time/2,
fire_wall_attack/3
]).
-export([get_button_tips_hook_raids/1]).
-record(hook_report, {
time_count = 0,
win_num = 0,
fail_num = 0,
kill_num = 0,
coin = 0,
exp = 0,
goods_dict = dict:new(),
sell_coin = 0,
sell_list = []
}).
%% ====================================================================
%% API functions
%% ====================================================================
%% 初始化挂机信息
init(PlayerState) ->
PlayerBase = PlayerState#player_state.db_player_base,
F = fun(SkillId, Skill, Acc) ->
dict:store(SkillId, Skill#db_skill{next_time = 0}, Acc)
end,
SkillDict = dict:fold(F, dict:new(), PlayerState#player_state.skill_dict),
PlayerAttr = PlayerState#player_state.attr_total,
PlayerAttr1 = PlayerAttr#attr_base{hp = PlayerAttr#attr_base.hp},
HookPlayerState = #hook_obj_state{
obj_id = PlayerState#player_state.player_id,
obj_type = ?OBJ_TYPE_PLAYER,
career = PlayerBase#db_player_base.career,
lv = PlayerBase#db_player_base.lv,
status = ?STATUS_ALIVE,
attr_base = PlayerState#player_state.attr_base,
attr_total = PlayerAttr1,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new(),
cur_hp = PlayerAttr1#attr_base.hp,
cur_mp = PlayerAttr1#attr_base.mp,
skill_dict = SkillDict,
order_skill_list = PlayerState#player_state.order_skill_list,
last_use_skill_time = 0,
pass_trigger_skill_list = PlayerState#player_state.pass_trigger_skill_list
},
#hook_state{
scene_id = PlayerBase#db_player_base.hook_scene_id,
hook_player_state = HookPlayerState,
start_time = 0,
next_round_time = 0,
round_status = ?ROUND_STATUS_INIT,
hook_heartbeat = util_date:unixtime(),
drive = ?HOOK_DRIVE_CLIENT,
fire_wall_dict = dict:new(),
monster_dict = dict:new(),
fire_wall_uid = util_rand:rand(1000, 10000)
}.
%% 挂机心跳
heartbeat(HookState) ->
HookState#hook_state{hook_heartbeat = util_date:unixtime()}.
%% 挂机定时器
on_timer(PlayerState, HookState) ->
CurTime = util_date:unixtime(),
HeartbeatTime = HookState#hook_state.hook_heartbeat, %% 挂机心跳,每次收到前端挂机动作都会更新心跳
EndTime = HookState#hook_state.end_time,
%% 判断挂机是否由前端驱动
case HookState#hook_state.drive =:= ?HOOK_DRIVE_CLIENT andalso PlayerState#player_state.scene_id =:= null of
true ->
if
%% 判断是否是回合结束
HookState#hook_state.round_status /= ?ROUND_STATUS_END andalso EndTime =< CurTime ->
%% 回合结束更新挂机状态,并且通知前端
NewHookState = HookState#hook_state{round_status = ?ROUND_STATUS_END, challenge_boos = false},
player_lib:put_hook_state(NewHookState),
%% 回合时间到,还没有通过,说明回合失败
send_result_to_client(PlayerState, HookState, ?RESULT_STATUS_FAIL),
case HookState#hook_state.boss_round of
true ->
Base = PlayerState#player_state.db_player_base,
%% 如果是boss回合通知前端回合星数为0星挑战失败
Data = #rep_challenge_boos_result{status = ?RESULT_STATUS_FAIL, scene_id = Base#db_player_base.hook_scene_id},
net_send:send_to_client(PlayerState#player_state.socket, 13017, Data);
_ ->
skip
end;
%% 下面俩条判断都是用于挂机网络不好,丢包或者断网做特殊处理
%% 太久没收到挂机心跳,导致最后挂机心跳到当前时间超过了最后挂机更新时间间隔,触发最后挂机时间更新
HeartbeatTime + ?UPDATE_LAST_HOOK_TIME =< CurTime ->
%% 更新最后挂机时间
update_last_hook_time(PlayerState, HookState);
%% 太久没收到挂机心跳,导致最后心跳到当前时间超过了回合修正时间,通知前端回合修正,抛弃旧回合,进入新回合
HeartbeatTime + ?REVISE_CLIENT_ROUND_TIME =< CurTime ->
send_result_to_client(PlayerState, HookState, ?RESULT_STATUS_WAIT);
true ->
skip
end;
_ ->
%% 服务端驱动挂机,说明玩家当前不在挂机场景,在普通场景
DbPlayerBase = PlayerState#player_state.db_player_base,
LastHookTime = DbPlayerBase#db_player_base.last_hook_time,
DrawHookTime = DbPlayerBase#db_player_base.draw_hook_time,
TimeCount = min(CurTime - LastHookTime, CurTime - DrawHookTime),
%% 定时给予玩家挂机奖励并且更新最后挂机时间
case TimeCount >= 60 of
true ->
Update = #player_state{
db_player_base = #db_player_base{
last_hook_time = CurTime
}
},
{ok, PlayerState1} = player_lib:update_player_state(PlayerState, Update, false),
GoodsHook = compute_hook_gain(PlayerState1, TimeCount),%% 计算挂机奖励
{ok, receive_hook_draw(PlayerState1, GoodsHook)};%% 领取挂机奖励
_ ->
skip
end
end.
%% 更新最后挂机时间
update_last_hook_time(PlayerState, HookState) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
LastHookTime = DbPlayerBase#db_player_base.last_hook_time,
HeartbeatTime = HookState#hook_state.hook_heartbeat,
case LastHookTime /= HeartbeatTime of
true ->
Update = #player_state{
db_player_base = #db_player_base{
last_hook_time = HeartbeatTime
}
},
player_lib:update_player_state(PlayerState, Update, false);
_ ->
{ok, PlayerState}
end.
校验挂机场景id,判断玩家是否可以进入这张场景挂机
check_scene_id(PlayerState, HookSceneId) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
PassHookSceneId = DbPlayerBase#db_player_base.pass_hook_scene_id,
(PassHookSceneId + 1 >= HookSceneId andalso HookSceneId >= ?INIT_HOOK_SCENE_ID) orelse HookSceneId =:= ?INIT_HOOK_SCENE_ID.
挑战boss
challenge_boos(PlayerState, HookState, HookSceneId) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
%% 判断是否有挑战次数
case DbPlayerBase#db_player_base.challenge_num > 0 andalso HookState#hook_state.challenge_boos /= true of
true ->
%% 检查是否可以挑战这个场景的boss
case hook_lib:check_scene_id(PlayerState, HookSceneId) of
true ->
%% 所有条件都合法的话,这里只需要更新玩家的挂机场景id,以及更新回合标识
并不会马上作用于当前回合,作用于下一回合
Update = #player_state{
db_player_base = #db_player_base{hook_scene_id = HookSceneId}
},
case player_lib:update_player_state(PlayerState, Update) of
{ok, NewPlayerState} ->
NewHookState = HookState#hook_state{challenge_boos = true},
{NewPlayerState, NewHookState};
_ ->
skip
end;
_ ->
skip
end;
_ ->
skip
end.
获取挂机报告
get_hook_report(PlayerState, GoodsHook) ->
HookReport = case GoodsHook of
null ->
PlayerState#player_state.hook_report;
_ ->
GoodsHook
end,
%% 计算挂机得到物品对应品质数量
F = fun({GoodsId, _}, Num, Acc) ->
GoodsConf = goods_config:get(GoodsId),
case GoodsConf#goods_conf.type =:= ?EQUIPS_TYPE of
true ->
Quality = GoodsConf#goods_conf.quality,
case dict:find(Quality, Acc) of
{ok, Num1} ->
NewNum = Num1 + Num,
dict:store(Quality, NewNum, Acc);
_ ->
dict:store(Quality, Num, Acc)
end;
false ->
Acc
end
end,
QualityDict = dict:fold(F, dict:new(), HookReport#hook_report.goods_dict),
F1 =
fun(Quality, Num, Acc) ->
[#proto_goods_report{quality = Quality, num = Num, sale_num = 0} | Acc]
end,
GoodsReportList = dict:fold(F1, [], QualityDict),
%% 计算挂机出售物品对应品质数量
F2 =
fun({Quality, Num}, Acc) ->
case lists:keyfind(Quality, #proto_goods_report.quality, Acc) of
#proto_goods_report{} = R ->
lists:keyreplace(Quality, #proto_goods_report.quality, Acc, R#proto_goods_report{sale_num = Num});
_ ->
Acc
end
end,
GoodsReportList1 = lists:foldl(F2, GoodsReportList, HookReport#hook_report.sell_list),
%% 计算挂机活的物品
F3 =
fun({GoodsId, _}, Num, Acc) ->
[#proto_hook_drop{goods_id = GoodsId, num = Num} | Acc]
end,
GoodsList = dict:fold(F3, [], HookReport#hook_report.goods_dict),
#proto_hook_report{
offline_time = HookReport#hook_report.time_count,
kill_num = HookReport#hook_report.kill_num,
die_num = HookReport#hook_report.fail_num,
coin = HookReport#hook_report.coin + HookReport#hook_report.sell_coin,
exp = HookReport#hook_report.exp,
goods_report_list = GoodsReportList1,
goods_list = GoodsList
}.
更新挂机驱动
update_drive(PlayerState, HookState, DriveStatus) ->
case HookState#hook_state.drive /= DriveStatus of
true ->
NewPlayerState =
case DriveStatus of
?HOOK_DRIVE_SERVER ->
如果新驱动是服务器驱动说明玩家已经离开挂机场景,立刻更新最后挂机时间
Update = #player_state{
db_player_base = #db_player_base{
last_hook_time = util_date:unixtime()
}
},
{ok, PlayerState1} = player_lib:update_player_state(PlayerState, Update),
PlayerState1;
_ ->
PlayerState
end,
? INFO("update state : ~p " , [ DriveStatus ] ) ,
{NewPlayerState, HookState#hook_state{drive = DriveStatus}};
_ ->
skip
end.
%% 获取挂机对象
get_obj(HookState, ObjType, ObjId) ->
try
case ObjType of
?OBJ_TYPE_PLAYER ->
HookState#hook_state.hook_player_state;
?OBJ_TYPE_MONSTER ->
MonsterDict = HookState#hook_state.monster_dict,
case dict:find(ObjId, MonsterDict) of
{ok, ObjState} ->
ObjState;
_ ->
null
end;
?OBJ_TYPE_PET ->
HookState#hook_state.hook_pet_state;
_ ->
null
end
catch
Error:Info ->
?ERR("~p:~p ~p ~p ~p", [Error, Info, HookState#hook_state.scene_id, ObjType, ObjId]),
null
end.
%% 获取挂机怪物信息
get_monster_data(HookState) ->
F = fun(_K, Obj, Acc) ->
[make_proto_hook_monster(Obj, null) | Acc]
end,
dict:fold(F, [], HookState#hook_state.monster_dict).
%% 获取boss挑战信息
get_challenge_info(PlayerState) ->
get_challenge_info(PlayerState, true).
%% 获取boss挑战信息
get_challenge_info(PlayerState, IsSendUpdate) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
#db_player_base{
challenge_num = ChallengeNum,
buy_challenge_num = BuyChallengeNum,
reset_challenge_time = ResetTime
} = DbPlayerBase,
CurTime = util_date:unixtime(),
case ResetTime =< CurTime of
true ->
Update = #player_state{
db_player_base = #db_player_base{
challenge_num = ?INIT_CHALLENGE_NUM,
buy_challenge_num = 0,
reset_challenge_time = util_date:get_tomorrow_unixtime()
}
},
{ok, NewPlayerState} = player_lib:update_player_state(PlayerState, Update, IsSendUpdate),
{NewPlayerState, ?INIT_CHALLENGE_NUM, 50};
_ ->
{PlayerState, ChallengeNum, util_math:floor((BuyChallengeNum + 1) * 50)}
end.
购买boss挑战次数
buy_challenge_num(PlayerState) ->
{PlayerState1, ChallengeNum, NeedJade} = get_challenge_info(PlayerState, false),
DbPlayerMoney = PlayerState1#player_state.db_player_money,
case DbPlayerMoney#db_player_money.jade >= NeedJade of
true ->
case player_lib:incval_on_player_money_log(PlayerState1, #db_player_money.jade, -NeedJade, false, ?LOG_TYPE_BUY_CHALLENGE) of
{ok, PlayerState2} ->
DbPlayerBase = PlayerState2#player_state.db_player_base,
NewBuyChallengeNum = DbPlayerBase#db_player_base.buy_challenge_num + 1,
NewChallengeNum = ChallengeNum + 1,
Update = #player_state{
db_player_base = #db_player_base{
challenge_num = NewChallengeNum,
buy_challenge_num = NewBuyChallengeNum
}
},
{ok, NewPlayerState} = player_lib:update_player_state(PlayerState2, Update, false),
player_lib:send_update(PlayerState, NewPlayerState, ?UPDATE_CAUSE_OTHER),
{NewPlayerState, NewChallengeNum, util_math:floor((NewBuyChallengeNum + 1) * 50)};
_ ->
skip
end;
_ ->
skip
end.
生成新回合
new_round(PlayerState, HookState) ->
CurTime = util_date:unixtime(),
#hook_state{
next_round_time = NextTime,
round_status = Status,
end_time = EndTime,
challenge_boos = ChallengeBoss
} = HookState,
case NextTime =< CurTime andalso (Status /= ?ROUND_STATUS_START orelse EndTime =< CurTime) of
true ->
HookState1 = init(PlayerState),
HookState2 = HookState1#hook_state{start_time = CurTime, challenge_boos = HookState#hook_state.challenge_boos},
HookSceneConf = hook_scene_config:get(HookState2#hook_state.scene_id),
%% 根据回合标识刷出对应怪物
{HookState4, MinRoundTime} =
case ChallengeBoss of
true ->
%% 刷出boss
HookState3 = refresh_boss(HookState2#hook_state{end_time = CurTime + HookSceneConf#hook_scene_conf.limit_time}),
{HookState3, ?HOOK_BOSS_TIME};
_ ->
%% 刷出小怪
HookState3 = refresh_monster(HookState2#hook_state{end_time = CurTime + 600}),
{HookState3, HookSceneConf#hook_scene_conf.min_round_time}
end,
NewHookState = HookState4#hook_state{next_round_time = CurTime + MinRoundTime},
{ok, NewHookState};
_ ->
{fail, 1}
end.
%% 刷新挂机怪物
refresh_monster(HookState) ->
SceneId = HookState#hook_state.scene_id,
HookSceneConf = hook_scene_config:get(SceneId),
MinNum = HookSceneConf#hook_scene_conf.min_monster_num,
MaxNum = HookSceneConf#hook_scene_conf.max_monster_num,
Count = util_rand:rand(MinNum, MaxNum),
MonsterList = HookSceneConf#hook_scene_conf.monster_list,
CurTime = util_date:unixtime(),
SeedId = util_rand:rand(CurTime - 1000, CurTime),
MonsterDict = add_monster(Count, MonsterList, dict:new(), SeedId),
NewHookState = HookState#hook_state{
monster_dict = MonsterDict
},
NewHookState.
%% 刷新挂机boss
refresh_boss(HookState) ->
SceneId = HookState#hook_state.scene_id,
HookSceneConf = hook_scene_config:get(SceneId),
MonsterId = HookSceneConf#hook_scene_conf.boss_id,
MonsterConf = monster_config:get(MonsterId),
MonsterAttr = MonsterConf#monster_conf.attr_base,
F = fun(SkillId, Acc) ->
dict:store(SkillId, #db_skill{skill_id = SkillId, lv = 1, next_time = 0}, Acc)
end,
SkillDict = lists:foldl(F, dict:new(), MonsterConf#monster_conf.hook_skill_list),
CurTime = util_date:unixtime(),
MonsterUid = util_rand:rand(CurTime - 1000, CurTime),
ObjState = #hook_obj_state{
obj_id = MonsterUid,
obj_type = ?OBJ_TYPE_MONSTER,
monster_id = MonsterId,
status = ?STATUS_ALIVE,
is_boss = true,
last_use_skill_time = 0,
attr_base = MonsterAttr,
attr_total = MonsterAttr,
cur_hp = MonsterAttr#attr_base.hp,
cur_mp = MonsterAttr#attr_base.mp,
lv = MonsterConf#monster_conf.lv,
career = MonsterConf#monster_conf.career,
order_skill_list = MonsterConf#monster_conf.hook_skill_list,
skill_dict = SkillDict,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new(),
is_drop = false
},
NewDict = dict:store(MonsterUid, ObjState, dict:new()),
HookState#hook_state{
monster_dict = NewDict,
boss_round = true
}.
%% 创建宠物(挂机创建)
create_pet(HookState, MonsterId) ->
HookPlayerState = HookState#hook_state.hook_player_state,
OwnerLv = HookPlayerState#hook_obj_state.lv,
MonsterConf = monster_config:get(MonsterId),
%% MonsterAttr = obj_pet_lib:make_attr(MonsterId, Lv),
MonsterAttr = api_attr:addition_attr(MonsterConf#monster_conf.attr_base, OwnerLv / 100),
F = fun(SkillId, Acc) ->
dict:store(SkillId, #db_skill{skill_id = SkillId, lv = 1, next_time = 0}, Acc)
end,
SkillDict = lists:foldl(F, dict:new(), MonsterConf#monster_conf.hook_skill_list),
ObjState = #hook_obj_state{
obj_id = HookPlayerState#hook_obj_state.obj_id,
obj_type = ?OBJ_TYPE_PET,
monster_id = MonsterId,
status = ?STATUS_ALIVE,
last_use_skill_time = 0,
attr_base = MonsterAttr,
attr_total = MonsterAttr,
cur_hp = MonsterAttr#attr_base.hp,
cur_mp = MonsterAttr#attr_base.mp,
lv = MonsterConf#monster_conf.lv,
career = MonsterConf#monster_conf.career,
order_skill_list = MonsterConf#monster_conf.hook_skill_list,
skill_dict = SkillDict,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new()
},
HookState1 = HookState#hook_state{
hook_pet_state = ObjState,
hook_player_state = HookPlayerState#hook_obj_state{pet_id = HookPlayerState#hook_obj_state.obj_id}
},
{HookState1, ObjState}.
%% 创建火墙(挂机火墙)
make_fire_wall(HookState, Percent, EffectiveTime, Interval, {X, Y}) ->
HookPlayerState = HookState#hook_state.hook_player_state,
Attr = HookPlayerState#hook_obj_state.attr_total,
CurTime = util_date:unixtime(),
PointList = [{X, Y}, {X + 1, Y}, {X - 1, Y}, {X, Y + 1}, {X, Y - 1}],
F = fun({X1, Y1}, Acc) ->
{HookState1, FireWallList} = Acc,
Uid = HookState1#hook_state.fire_wall_uid + 1,
FireWallDict = HookState1#hook_state.fire_wall_dict,
State = #fire_wall_state{
uid = Uid,
min_att = util_math:floor(Attr#attr_base.min_mac * Percent / ?PERCENT_BASE),
max_att = util_math:floor(Attr#attr_base.max_mac * Percent / ?PERCENT_BASE),
interval = Interval,
next_time = CurTime,
remove_time = CurTime + EffectiveTime
},
NewFireWallDict = dict:store(Uid, State, FireWallDict),
HookState2 = HookState1#hook_state{fire_wall_dict = NewFireWallDict, fire_wall_uid = Uid},
ProtoFireWall = #proto_hook_fire_wall{
obj_flag = #proto_obj_flag{type = ?OBJ_TYPE_FIRE_WALL, id = Uid},
point = #proto_point{x = X1, y = Y1},
interval = Interval,
duration = EffectiveTime
},
NewFireWallList = [ProtoFireWall | FireWallList],
{HookState2, NewFireWallList}
end,
lists:foldl(F, {HookState, []}, PointList).
%% 发送挂机结果给前端
send_result_to_client(PlayerState, HookState, ResultStatus) ->
NextTime = max(5, HookState#hook_state.next_round_time - util_date:unixtime()),
Data1 = #rep_round_result{
status = ResultStatus,
next_time = NextTime
},
net_send:send_to_client(PlayerState#player_state.socket, 13004, Data1).
%% 挂机对象使用技能,玩家和怪物使用技能都由客户端发送,服务器只做伤害结果计算
obj_use_skill(PlayerState, HookState, {CasterType, CasterId}, SkillId, TargetFlagList, {X, Y}) ->
case skill_base_lib:hook_use_skill(HookState, {CasterType, CasterId}, SkillId, TargetFlagList) of
{ok, UpdateDict, EffectProto} ->
{NewPlayerState, HookState1} = update_obj_state(UpdateDict, PlayerState, HookState),
#hook_state{
hook_player_state = HookPlayerState,
monster_dict = MonsterDict
} = HookState1,
%% 发送技能效果
Data = #rep_hook_use_skill{
harm_list = EffectProto#skill_effect.harm_list,
cure_list = EffectProto#skill_effect.cure_list,
buff_list = EffectProto#skill_effect.buff_list
},
net_send:send_to_client(PlayerState#player_state.socket, 13003, Data),
%% 如果使用的技能是召唤宠物
_NewHookState0 =
case EffectProto#skill_effect.call_pet of
PetId when is_integer(PetId) ->
%% 生成宠物并通知前端
{HookState2, PetObj} = create_pet(HookState1, PetId),
AddObjData = #rep_add_hook_obj{
hook_obj_list = [make_proto_hook_monster(PetObj, PlayerState)]
},
net_send:send_to_client(NewPlayerState#player_state.socket, 13018, AddObjData),
HookState2;
_ ->
HookState1
end,
如果技能需要移除buff,执行移除buff操作
_NewHookState1 =
case EffectProto#skill_effect.remove_effect of
{ObjType, ObjId, EffectId} ->
case get_obj(_NewHookState0, ObjType, ObjId) of%%
#hook_obj_state{} = ObjState ->
{NewObjState, RemoveBuffList} = buff_base_lib:remove_effect_buff(ObjState, EffectId),
{_, HookState3} = update_obj_state(NewObjState, NewPlayerState, _NewHookState0),
MakeProtoF =
fun(BuffId, Acc) ->
Proto = #proto_buff_operate{
obj_flag = #proto_obj_flag{type = ObjType, id = ObjId},
operate = ?BUFF_OPERATE_DELETE,
buff_id = BuffId,
effect_id = EffectId
},
[Proto | Acc]
end,
List = lists:foldl(MakeProtoF, [], RemoveBuffList),
net_send:send_to_client(NewPlayerState#player_state.socket, 13021, #rep_hook_buff_operate{buff_list = List}),
HookState3;
_ ->
_NewHookState0
end;
_ ->
_NewHookState0
end,
如果是火墙技能,生成火墙
NewHookState =
case EffectProto#skill_effect.fire_wall of
{Percent, EffectiveTime, Interval} ->
{HookState4, FireWallList} = make_fire_wall(_NewHookState0, Percent, EffectiveTime, Interval, {X, Y}),
AddFireWallData = #rep_add_hook_fire_wall{
hook_fire_wall_list = FireWallList
},
net_send:send_to_client(NewPlayerState#player_state.socket, 13019, AddFireWallData),
HookState4;
_ ->
_NewHookState0
end,
%% 判断是否回合结束
IsPlayerDie = is_player_die(HookPlayerState), %% 玩家是否死亡
KillNum = monster_die_num(MonsterDict), %% 杀死怪物数量
MonsterNum = dict:size(MonsterDict), %% 怪物总数
{IsEnd, Status} =
if
IsPlayerDie =:= true ->
{true, ?RESULT_STATUS_FAIL};
KillNum >= MonsterNum ->
{true, ?RESULT_STATUS_WIN};
true ->
{false, ?RESULT_STATUS_FAIL}
end,
case IsEnd andalso NewHookState#hook_state.round_status /= ?ROUND_STATUS_END of
true ->
%% 发送回合结果
send_result_to_client(NewPlayerState, NewHookState, Status),
{NewPlayerState, NewHookState#hook_state{round_status = ?ROUND_STATUS_END}};
_ ->
{NewPlayerState, NewHookState}
end;
{fail, ?ERR_HOOK_OBJ_NOT} ->
%% %% ?INFO("~p", [_Err]),
send_result_to_client(PlayerState, HookState, ?RESULT_STATUS_WAIT),
skip;
_Err ->
%% %% ?INFO("err: ~p", [_Err]),
skip
end.
%% 挂机怪物死亡
do_monster_die(PlayerState, HookState, MonsterUid) ->
case get_obj(HookState, ?OBJ_TYPE_MONSTER, MonsterUid) of%%
#hook_obj_state{is_drop = false, status = ?STATUS_DIE} = ObjState ->
#hook_scene_conf{
per_exp = Exp,
per_coin = Coin
} = hook_scene_config:get(HookState#hook_state.scene_id),
NewObjState = ObjState#hook_obj_state{is_drop = true},
NewMonsterDict = dict:store(MonsterUid, NewObjState, HookState#hook_state.monster_dict),
HookState1 = HookState#hook_state{monster_dict = NewMonsterDict},
%% 发放物品
DbPlayerBase = PlayerState#player_state.db_player_base,
Career = DbPlayerBase#db_player_base.career,
HookSceneConf = hook_scene_config:get(HookState1#hook_state.scene_id),
GoodsList =
case NewObjState#hook_obj_state.is_boss of
true ->
make_boss_drop(Career, HookSceneConf);
_ ->
make_monster_drop(Career, HookSceneConf)
end,
发放经验金币 % % vip经验加成
VipAddExp = vip_lib:get_vip_hook_exp(Career, DbPlayerBase#db_player_base.vip),
{ok, PlayerState1} = player_lib:add_exp(PlayerState, util_math:floor(Exp * ((100 + VipAddExp) / 100)), {?LOG_TYPE_HOOK, []}),
{ok, PlayerState2} = player_lib:incval_on_player_money_log(PlayerState1, #db_player_money.coin, Coin, ?LOG_TYPE_HOOK),
%% 直接把掉落物品加入玩家背包
{ok, PlayerState3, _SellCoin, _QList} = goods_lib:add_goods_list_and_auto_sell(PlayerState2, GoodsList),
%% 生成掉落,通知前端(这里的掉落只用于前端显示,生成的瞬间已经加入玩家背包)
Data = make_rep_drop(MonsterUid, GoodsList),
net_send:send_to_client(PlayerState3#player_state.socket, 13005, Data),
case NewObjState#hook_obj_state.is_boss of
true ->
{PlayerState4, HookState2} = do_boss_die(PlayerState3, HookState1, HookSceneConf),
%% boss死亡
{ok, PlayerState5} = task_comply:update_player_task_info(PlayerState4, ?TASKSORT_BOSS, 1),
{PlayerState5, HookState2};
_ ->
{PlayerState3, HookState1}
end;
_ ->
{PlayerState, HookState}
end.
计算星级
compute_star(HookState) ->
CurTime = util_date:unixtime(),
SceneId = HookState#hook_state.scene_id,
StartTime = HookState#hook_state.start_time,
UseTime = CurTime - StartTime,
#hook_scene_conf{
star_2_time = Star2Time,
star_3_time = Star3Time
} = hook_scene_config:get(SceneId),
if
Star3Time >= UseTime -> 3;
Star3Time < UseTime andalso UseTime =< Star2Time -> 2;
true -> 1
end.
挂机boss死亡逻辑
do_boss_die(PlayerState, HookState, HookSceneConf) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
PassSceneId = DbPlayerBase#db_player_base.pass_hook_scene_id,
CurSceneId = HookState#hook_state.scene_id,
{PlayerState1, ChallengeNum, NeedJade} = get_challenge_info(PlayerState, false),
NewChallengeNum = ChallengeNum - 1,
Socket = PlayerState#player_state.socket,
{NewPlayerState, NewHookState} =
%% 判断当前所在场景是不是比之前通关的最大场景还要大
case CurSceneId >= PassSceneId of
true ->
%% 如果是,说明玩家是在最大可以挂机场景挑战boss通关,直接把玩家移到下一个场景(在前端不会马上显示,下一回合会移到新场景)
NewHookSceneId =
case hook_scene_config:get(CurSceneId + 1) of
#hook_scene_conf{} = _ ->
CurSceneId + 1;
_ ->
CurSceneId
end,
%% 更新最大通过场景为当前场景
Update = #player_state{
db_player_base = #db_player_base{
pass_hook_scene_id = CurSceneId,
hook_scene_id = NewHookSceneId,
challenge_num = NewChallengeNum
}
},
{ok, PlayerState2} = player_lib:update_player_state(PlayerState1, Update, false),
GoodsList = [{GoodsId, ?BIND, Num} || {GoodsId, Num} <- HookSceneConf#hook_scene_conf.first_prize],
{ok, PlayerState3} = goods_lib_log:add_goods_list_and_send_mail(PlayerState2, GoodsList, ?LOG_TYPE_HOOK),
%% 发送结果给前端
net_send:send_to_client(Socket, 13008, #rep_change_hook_scene1{scene_id = NewHookSceneId}),
{PlayerState3, HookState#hook_state{scene_id = NewHookSceneId, challenge_boos = false}};
_ ->
Update = #player_state{
db_player_base = #db_player_base{
challenge_num = NewChallengeNum
}
},
{ok, PlayerState2} = player_lib:update_player_state(PlayerState1, Update, false),
{PlayerState2, HookState#hook_state{challenge_boos = false}}
end,
player_lib:send_update(PlayerState, NewPlayerState, ?UPDATE_CAUSE_OTHER),
net_send:send_to_client(Socket, 13006, #rep_challenge_num{challenge_num = NewChallengeNum, need_jade = NeedJade}),
Base = NewPlayerState#player_state.db_player_base,
计算星级
Star = compute_star(NewHookState),
net_send:send_to_client(Socket, 13017, #rep_challenge_boos_result{status = ?RESULT_STATUS_WIN, scene_id = Base#db_player_base.hook_scene_id}),
player_hook_star_lib:store_hook_star(NewPlayerState, CurSceneId, Star),
{NewPlayerState, NewHookState}.
%% 生成挂机掉落
make_rep_drop(MonsterUid, DropList) ->
List = [#proto_hook_drop{goods_id = GoodsId, num = Num} || {GoodsId, _, Num} <- DropList],
#rep_drop{
obj_flag = #proto_obj_flag{type = ?OBJ_TYPE_MONSTER, id = MonsterUid},
drop_list = List
}.
%% 添加怪物
add_monster(0, _MonsterList, MonsterDict, _SeedId) ->
MonsterDict;
add_monster(Count, MonsterList, MonsterDict, SeedId) ->
MonsterUid = SeedId + util_rand:rand(1, 10),
MonsterId = util_rand:weight_rand_ex(MonsterList),
MonsterConf = monster_config:get(MonsterId),
MonsterAttr = MonsterConf#monster_conf.attr_base,
F = fun(SkillId, Acc) ->
dict:store(SkillId, #db_skill{skill_id = SkillId, lv = 1, next_time = 0}, Acc)
end,
SkillDict = lists:foldl(F, dict:new(), MonsterConf#monster_conf.hook_skill_list),
ObjState = #hook_obj_state{
obj_id = MonsterUid,
obj_type = ?OBJ_TYPE_MONSTER,
monster_id = MonsterId,
status = ?STATUS_ALIVE,
is_boss = false,
last_use_skill_time = 0,
attr_base = MonsterAttr,
attr_total = MonsterAttr,
cur_hp = MonsterAttr#attr_base.hp,
cur_mp = MonsterAttr#attr_base.mp,
lv = MonsterConf#monster_conf.lv,
career = MonsterConf#monster_conf.career,
order_skill_list = MonsterConf#monster_conf.hook_skill_list,
skill_dict = SkillDict,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new(),
is_drop = false
},
NewDict = dict:store(MonsterUid, ObjState, MonsterDict),
add_monster(Count - 1, MonsterList, NewDict, MonsterUid + 1).
%% 获取怪物平均血量(用于离线挂机数据统计)
get_monster_avg_hp(HookSceneConf) ->
MonsterList = HookSceneConf#hook_scene_conf.monster_list,
F = fun({MonsterId, Weight}, Acc) ->
{HpCount, WeightCount} = Acc,
MonsterConf = monster_config:get(MonsterId),
Attr = MonsterConf#monster_conf.attr_base,
NewHpCount = Attr#attr_base.hp * Weight + HpCount,
{NewHpCount, WeightCount + Weight}
end,
{HpCount, WeightCount} = lists:foldl(F, {0, 0}, MonsterList),
HpCount / WeightCount.
%% 生成怪物掉落
make_monster_drop(Career, HookSceneConf) ->
MonsterDrop = HookSceneConf#hook_scene_conf.monster_drop,
make_drop(Career, MonsterDrop).
%% 生成boss掉落
make_boss_drop(Career, HookSceneConf) ->
BossDrop = HookSceneConf#hook_scene_conf.boss_drop,
make_drop(Career, BossDrop).
%% 生成掉落列表
make_drop(Career, DropList) ->
F = fun({CareerLimit, DropNumList, GoodsList}, Acc) ->
case CareerLimit == Career orelse CareerLimit == 0 of
true ->
DropNum = util_rand:weight_rand_ex(DropNumList),
case DropNum > 0 of
true ->
List1 = [{{GoodsId, IsBind, Num}, Rate} || {GoodsId, IsBind, Num, Rate} <- GoodsList],
DropList1 = [util_rand:weight_rand_ex(List1) || _N <- lists:seq(1, DropNum)],
DropList1 ++ Acc;
_ ->
Acc
end;
_ ->
Acc
end
end,
lists:foldl(F, [], DropList).
根据离线市场获取挂机统计数据
get_hook_statistics(PlayerState, TimeCount) ->
PlayerBase = PlayerState#player_state.db_player_base,
PlayerAttr = PlayerState#player_state.attr_total,
HookSceneConf = hook_scene_config:get(PlayerBase#db_player_base.hook_scene_id),
计算一回合单个怪物平均血量
MonseterAvgHp = get_monster_avg_hp(HookSceneConf),
%% 获取玩家平均伤害
PlayerAvgAtt =
case PlayerBase#db_player_base.career of
?CAREER_ZHANSHI ->
(PlayerAttr#attr_base.min_ac + PlayerAttr#attr_base.max_ac) / 2;
?CAREER_FASHI ->
(PlayerAttr#attr_base.min_mac + PlayerAttr#attr_base.max_mac) / 2;
_ ->
(PlayerAttr#attr_base.min_sc + PlayerAttr#attr_base.max_sc) / 2
end,
MinCount = HookSceneConf#hook_scene_conf.min_monster_num,
MaxCount = HookSceneConf#hook_scene_conf.max_monster_num,
%% 获取每回合平均怪物个数
Count = (MinCount + MaxCount) / 2,
计算杀死一回合怪物所需时间
T = util_math:ceil(MonseterAvgHp / PlayerAvgAtt * Count),
T1 = max(T, HookSceneConf#hook_scene_conf.min_round_time),
%% 根据离线时间计算一共杀死多少只怪物,并根据杀死的怪物给予玩家对应的经验和金币奖励
SumKill = util_math:floor(TimeCount / T1 * Count),
Exp = SumKill * HookSceneConf#hook_scene_conf.per_exp,
Coin = SumKill * HookSceneConf#hook_scene_conf.per_coin,
{SumKill, Exp, Coin}.
计算离线挂机奖励
compute_hook_offline(PlayerState, TimeCount) ->
计算离线挂机奖励
PlayerState#player_state{
hook_report = GoodsHook
}.
%% 计算挂机获取
compute_hook_gain(PlayerState, TimeCountTemp) ->
TimeCount = case TimeCountTemp < 1 of
true ->
60;
_ ->
TimeCountTemp
end,
case function_lib:is_function_open(PlayerState, ?FUNCTION_ID_HOOK) of
true ->
PlayerBase = PlayerState#player_state.db_player_base,
HookSceneConf = hook_scene_config:get(PlayerBase#db_player_base.hook_scene_id),
{SumKill, Exp, Coin} = get_hook_statistics(PlayerState, TimeCount),
%% 根据玩家职业以及杀死怪物总数生成掉落信息
Career = PlayerBase#db_player_base.career,
F = fun(_, Acc) ->
case make_monster_drop(Career, HookSceneConf) of
[] ->
Acc;
DropList ->
F1 =
fun({GoodsId, IsBind, Num}, Acc1) ->
case dict:find({GoodsId, IsBind}, Acc1) of
{ok, Num1} ->
dict:store({GoodsId, IsBind}, Num1 + Num, Acc1);
_ ->
dict:store({GoodsId, IsBind}, Num, Acc1)
end
end,
lists:foldl(F1, Acc, DropList)
end
end,
DropDict = lists:foldl(F, dict:new(), lists:seq(1, SumKill)),
%% 生成离线统计数据
#hook_report{
time_count = TimeCount,
kill_num = SumKill,
exp = Exp,
coin = Coin,
goods_dict = DropDict
};
_ ->
#hook_report{}
end.
%% 领取挂机奖励
receive_hook_draw(PlayerState, GoodsHook) ->
HookReport = case GoodsHook of
null ->
PlayerState#player_state.hook_report;
_ ->
GoodsHook
end,
Exp = HookReport#hook_report.exp,
case Exp > 0 of
true ->
F1 = fun({GoodsId, IsBind}, Num, Acc) ->
[{GoodsId, IsBind, Num} | Acc]
end,
GoodsList = dict:fold(F1, [], HookReport#hook_report.goods_dict),
%% 直接把物品发放到背包
{ok, PlayerState1, SellCoin, _SellList} = goods_lib:add_goods_list_and_auto_sell(PlayerState, GoodsList),
Base = PlayerState1#player_state.db_player_base,
发放经验金币 % % vip经验加成
DbPlayerBase = PlayerState1#player_state.db_player_base,
VipAddExp = vip_lib:get_vip_hook_exp(Base#db_player_base.career, DbPlayerBase#db_player_base.vip),
Coin = HookReport#hook_report.coin + SellCoin,
{ok, PlayerState2} = player_lib:add_exp(PlayerState1, util_math:floor(Exp * ((100 + VipAddExp) / 100)), {?LOG_TYPE_HOOK, []}),
{ok, PlayerState3} = player_lib:incval_on_player_money_log(PlayerState2, #db_player_money.coin, Coin, ?LOG_TYPE_HOOK),
%% 领取挂机奖励
case GoodsHook of
null ->
PlayerState3#player_state{
hook_report = #hook_report{}
};
_ ->
PlayerState3
end;
_ ->
PlayerState
end.
获取购买次数需求
get_buy_power_need(BuyNum) ->
case buy_power_need_config:get(BuyNum + 1) of
#buy_power_need_conf{} = Conf ->
Conf#buy_power_need_conf.need_jade;
_ ->
0
end.
购买 挂机次数
buy_power(PlayerState) ->
%% 获取购买的的次数
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
Base = PlayerState#player_state.db_player_base,
VipBuyHookNum = vip_lib:get_vip_buy_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
%% 判断已经购买的次数
case BuyHookNum >= VipBuyHookNum of
true ->
{fail, ?ERR_VIP_3};
_ ->
%% 购买挂机次数元宝消耗配置
BuyPowerConf = buy_power_need_config:get(BuyHookNum + 1),
DbPlayerMoney = PlayerState#player_state.db_player_money,
case DbPlayerMoney#db_player_money.jade >= BuyPowerConf#buy_power_need_conf.need_jade of
true ->
case player_lib:incval_on_player_money_log(PlayerState, #db_player_money.jade, -BuyPowerConf#buy_power_need_conf.need_jade, ?LOG_TYPE_BUY_POWER) of
{ok, PlayerState2} ->
counter_lib:update_limit(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
{ok, PlayerState2};
_ ->
{fail, ?ERR_PLAYER_JADE_NOT_ENOUGH}
end;
_ ->
{fail, ?ERR_PLAYER_JADE_NOT_ENOUGH}
end
end.
挂机火墙攻击(这里也必须由前端触发火墙攻击,服务端只是计算伤害 )
%% 这个函数现在没用到,先阶段挂机没有火墙技能
fire_wall_attack(PlayerState, HookState, Data) ->
CurTime = util_date:unixtime(),
F = fun(FireWallAttack, Acc) ->
{PlayerState1, HookState1, HarmList} = Acc,
#proto_fire_wall_attack{
fire_wall_uid = Fuid,
monster_uid = Muid
} = FireWallAttack,
FireWallDict = HookState1#hook_state.fire_wall_dict,
MonsterDict = HookState1#hook_state.monster_dict,
case dict:find(Fuid, FireWallDict) of
{ok, #fire_wall_state{next_time = NT, remove_time = RT} = FWState} when CurTime < RT ->
case dict:find(Muid, MonsterDict) of
{ok, #hook_obj_state{cur_hp = CurHp} = ObjState} when CurHp > 0 ->
Interval = FWState#fire_wall_state.interval,
NFWState = FWState#fire_wall_state{next_time = NT + Interval},
{HarmResult, NewObjState} = skill_base_lib:fire_wall_attack(NFWState, ObjState),
NewFireWallDict = dict:store(Fuid, NFWState, FireWallDict),
HookState2 = HookState1#hook_state{fire_wall_dict = NewFireWallDict},
{PlayerState2, HookState3} = update_obj_state(NewObjState, PlayerState1, HookState2),
ProtoHarm = #proto_harm{
obj_flag = #proto_obj_flag{type = ?OBJ_TYPE_MONSTER, id = Muid},
harm_status = HarmResult#harm_result.status,
harm_value = HarmResult#harm_result.harm_value,
cur_hp = NewObjState#hook_obj_state.cur_hp,
cur_mp = NewObjState#hook_obj_state.cur_mp
},
NewHarmList = [ProtoHarm | HarmList],
{PlayerState2, HookState3, NewHarmList};
_ ->
Acc
end;
_ ->
Acc
end
end,
{NewPlayerState, NewHookState, List} = lists:foldl(F, {PlayerState, HookState, []}, Data#req_hook_fire_wall_attack.fire_wall_attack_list),
net_send:send_to_client(PlayerState#player_state.socket, 13003, #rep_hook_use_skill{harm_list = List}),
{NewPlayerState, NewHookState}.
%% ====================================================================
%% 红点提示
%% ====================================================================
%% 挂机扫荡
get_button_tips_hook_raids(PlayerState) ->
%% 购买的次数
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
%% 免费的次数上限
LimitNum = counter_lib:get_limit(?COUNTER_HOOK_NUM),
%% 已用的次数
HookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM),
{PlayerState, BuyHookNum + LimitNum - HookNum}.
%% ====================================================================
Internal functions
%% ====================================================================
判断玩家是否死亡
is_player_die(HookPlayerState) ->
case HookPlayerState#hook_obj_state.status of
?STATUS_DIE -> true;
_ -> false
end.
%% 获取怪物死亡个数
monster_die_num(MonsterDict) ->
F = fun(_K, V, Acc) ->
case V#hook_obj_state.status of
?STATUS_DIE -> Acc + 1;
_ -> Acc
end
end,
dict:fold(F, 0, MonsterDict).
%% 更新对象状态
update_obj_state(HookObjState, PlayerState, HookState) when is_record(HookObjState, hook_obj_state) ->
ObjType = HookObjState#hook_obj_state.obj_type,
ObjId = HookObjState#hook_obj_state.obj_id,
case ObjType of
?OBJ_TYPE_PLAYER ->
{PlayerState, HookState#hook_state{hook_player_state = HookObjState}};
?OBJ_TYPE_MONSTER ->
MonsterDict = HookState#hook_state.monster_dict,
NewMonsterDict = dict:store(ObjId, HookObjState, MonsterDict),
NewHookState = HookState#hook_state{monster_dict = NewMonsterDict},
case HookObjState#hook_obj_state.status of
?STATUS_DIE ->
do_monster_die(PlayerState, NewHookState, ObjId);
_ ->
{PlayerState, NewHookState}
end;
?OBJ_TYPE_PET ->
case HookObjState#hook_obj_state.status of
?STATUS_ALIVE ->
{PlayerState, HookState#hook_state{hook_pet_state = HookObjState}};
_ ->
HookPlayerState = HookState#hook_state.hook_player_state,
NewHookState = HookState#hook_state{
hook_player_state = HookPlayerState#hook_obj_state{pet_id = null},
hook_pet_state = HookObjState
},
{PlayerState, NewHookState}
end;
_ ->
{PlayerState, HookState}
end;
update_obj_state(UpdateDict, PlayerState, HookState) ->
F = fun(_, HookObjState, Acc) ->
{PlayerState1, HookState1} = Acc,
update_obj_state(HookObjState, PlayerState1, HookState1)
end,
dict:fold(F, {PlayerState, HookState}, UpdateDict).
%% 生成对应的数据格式,便于发送给前端,便于服务器统一管理
make_proto_hook_monster(MonsterObj, PlayerState) ->
#hook_obj_state{
obj_id = ObjId,
obj_type = ObjType,
attr_total = AttrTotal,
monster_id = MonsterId,
cur_hp = CurHp,
cur_mp = CurMp
} = MonsterObj,
case ObjType of
?OBJ_TYPE_PET ->
DbPlayerBase = PlayerState#player_state.db_player_base,
#proto_hook_monster{
obj_flag = #proto_obj_flag{type = ObjType, id = ObjId},
owner_flag = #proto_obj_flag{type = ?OBJ_TYPE_PLAYER, id = ObjId},
monster_id = MonsterId,
cur_hp = CurHp,
cur_mp = CurMp,
hp = AttrTotal#attr_base.hp,
mp = AttrTotal#attr_base.mp,
guild_id = DbPlayerBase#db_player_base.guild_id,
team_id = PlayerState#player_state.team_id,
name_colour = PlayerState#player_state.name_colour
};
_ ->
#proto_hook_monster{
obj_flag = #proto_obj_flag{type = ObjType, id = ObjId},
monster_id = MonsterId,
cur_hp = CurHp,
cur_mp = CurMp,
hp = AttrTotal#attr_base.hp,
mp = AttrTotal#attr_base.mp
}
end.
| null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/business/hook/hook_lib.erl | erlang | -------------------------------------------------------------------
@doc
个人挂机
@end
-------------------------------------------------------------------
回合修正时间间隔
更新最后挂机时间间隔(用于记录最后挂机时间,便于离线后上线奖励计算)
boss击杀后下一轮时间
API
====================================================================
API functions
====================================================================
初始化挂机信息
挂机心跳
挂机定时器
挂机心跳,每次收到前端挂机动作都会更新心跳
判断挂机是否由前端驱动
判断是否是回合结束
回合结束更新挂机状态,并且通知前端
回合时间到,还没有通过,说明回合失败
如果是boss回合通知前端回合星数为0星挑战失败
下面俩条判断都是用于挂机网络不好,丢包或者断网做特殊处理
太久没收到挂机心跳,导致最后挂机心跳到当前时间超过了最后挂机更新时间间隔,触发最后挂机时间更新
更新最后挂机时间
太久没收到挂机心跳,导致最后心跳到当前时间超过了回合修正时间,通知前端回合修正,抛弃旧回合,进入新回合
服务端驱动挂机,说明玩家当前不在挂机场景,在普通场景
定时给予玩家挂机奖励并且更新最后挂机时间
计算挂机奖励
领取挂机奖励
更新最后挂机时间
判断是否有挑战次数
检查是否可以挑战这个场景的boss
所有条件都合法的话,这里只需要更新玩家的挂机场景id,以及更新回合标识
计算挂机得到物品对应品质数量
计算挂机出售物品对应品质数量
计算挂机活的物品
获取挂机对象
获取挂机怪物信息
获取boss挑战信息
获取boss挑战信息
根据回合标识刷出对应怪物
刷出boss
刷出小怪
刷新挂机怪物
刷新挂机boss
创建宠物(挂机创建)
MonsterAttr = obj_pet_lib:make_attr(MonsterId, Lv),
创建火墙(挂机火墙)
发送挂机结果给前端
挂机对象使用技能,玩家和怪物使用技能都由客户端发送,服务器只做伤害结果计算
发送技能效果
如果使用的技能是召唤宠物
生成宠物并通知前端
判断是否回合结束
玩家是否死亡
杀死怪物数量
怪物总数
发送回合结果
%% ?INFO("~p", [_Err]),
%% ?INFO("err: ~p", [_Err]),
挂机怪物死亡
发放物品
% vip经验加成
直接把掉落物品加入玩家背包
生成掉落,通知前端(这里的掉落只用于前端显示,生成的瞬间已经加入玩家背包)
boss死亡
判断当前所在场景是不是比之前通关的最大场景还要大
如果是,说明玩家是在最大可以挂机场景挑战boss通关,直接把玩家移到下一个场景(在前端不会马上显示,下一回合会移到新场景)
更新最大通过场景为当前场景
发送结果给前端
生成挂机掉落
添加怪物
获取怪物平均血量(用于离线挂机数据统计)
生成怪物掉落
生成boss掉落
生成掉落列表
获取玩家平均伤害
获取每回合平均怪物个数
根据离线时间计算一共杀死多少只怪物,并根据杀死的怪物给予玩家对应的经验和金币奖励
计算挂机获取
根据玩家职业以及杀死怪物总数生成掉落信息
生成离线统计数据
领取挂机奖励
直接把物品发放到背包
% vip经验加成
领取挂机奖励
获取购买的的次数
判断已经购买的次数
购买挂机次数元宝消耗配置
这个函数现在没用到,先阶段挂机没有火墙技能
====================================================================
红点提示
====================================================================
挂机扫荡
购买的次数
免费的次数上限
已用的次数
====================================================================
====================================================================
获取怪物死亡个数
更新对象状态
生成对应的数据格式,便于发送给前端,便于服务器统一管理 | @author zhengsiying
( C ) 2015 , < COMPANY >
Created : 04 . 八月 2015 下午5:21
-module(hook_lib).
-include("common.hrl").
-include("record.hrl").
-include("cache.hrl").
-include("config.hrl").
-include("proto.hrl").
-include("language_config.hrl").
-include("log_type_config.hrl").
-export([
init/1,
get_obj/3,
get_monster_data/1,
new_round/2,
refresh_monster/1,
get_hook_statistics/2,
compute_hook_gain/2,
compute_hook_offline/2,
obj_use_skill/6,
heartbeat/1,
on_timer/2,
check_scene_id/2,
update_drive/3,
challenge_boos/3,
get_hook_report/2,
get_challenge_info/1,
buy_challenge_num/1,
receive_hook_draw/2,
buy_power/1,
get_buy_power_need/1,
update_last_hook_time/2,
fire_wall_attack/3
]).
-export([get_button_tips_hook_raids/1]).
-record(hook_report, {
time_count = 0,
win_num = 0,
fail_num = 0,
kill_num = 0,
coin = 0,
exp = 0,
goods_dict = dict:new(),
sell_coin = 0,
sell_list = []
}).
init(PlayerState) ->
PlayerBase = PlayerState#player_state.db_player_base,
F = fun(SkillId, Skill, Acc) ->
dict:store(SkillId, Skill#db_skill{next_time = 0}, Acc)
end,
SkillDict = dict:fold(F, dict:new(), PlayerState#player_state.skill_dict),
PlayerAttr = PlayerState#player_state.attr_total,
PlayerAttr1 = PlayerAttr#attr_base{hp = PlayerAttr#attr_base.hp},
HookPlayerState = #hook_obj_state{
obj_id = PlayerState#player_state.player_id,
obj_type = ?OBJ_TYPE_PLAYER,
career = PlayerBase#db_player_base.career,
lv = PlayerBase#db_player_base.lv,
status = ?STATUS_ALIVE,
attr_base = PlayerState#player_state.attr_base,
attr_total = PlayerAttr1,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new(),
cur_hp = PlayerAttr1#attr_base.hp,
cur_mp = PlayerAttr1#attr_base.mp,
skill_dict = SkillDict,
order_skill_list = PlayerState#player_state.order_skill_list,
last_use_skill_time = 0,
pass_trigger_skill_list = PlayerState#player_state.pass_trigger_skill_list
},
#hook_state{
scene_id = PlayerBase#db_player_base.hook_scene_id,
hook_player_state = HookPlayerState,
start_time = 0,
next_round_time = 0,
round_status = ?ROUND_STATUS_INIT,
hook_heartbeat = util_date:unixtime(),
drive = ?HOOK_DRIVE_CLIENT,
fire_wall_dict = dict:new(),
monster_dict = dict:new(),
fire_wall_uid = util_rand:rand(1000, 10000)
}.
heartbeat(HookState) ->
HookState#hook_state{hook_heartbeat = util_date:unixtime()}.
on_timer(PlayerState, HookState) ->
CurTime = util_date:unixtime(),
EndTime = HookState#hook_state.end_time,
case HookState#hook_state.drive =:= ?HOOK_DRIVE_CLIENT andalso PlayerState#player_state.scene_id =:= null of
true ->
if
HookState#hook_state.round_status /= ?ROUND_STATUS_END andalso EndTime =< CurTime ->
NewHookState = HookState#hook_state{round_status = ?ROUND_STATUS_END, challenge_boos = false},
player_lib:put_hook_state(NewHookState),
send_result_to_client(PlayerState, HookState, ?RESULT_STATUS_FAIL),
case HookState#hook_state.boss_round of
true ->
Base = PlayerState#player_state.db_player_base,
Data = #rep_challenge_boos_result{status = ?RESULT_STATUS_FAIL, scene_id = Base#db_player_base.hook_scene_id},
net_send:send_to_client(PlayerState#player_state.socket, 13017, Data);
_ ->
skip
end;
HeartbeatTime + ?UPDATE_LAST_HOOK_TIME =< CurTime ->
update_last_hook_time(PlayerState, HookState);
HeartbeatTime + ?REVISE_CLIENT_ROUND_TIME =< CurTime ->
send_result_to_client(PlayerState, HookState, ?RESULT_STATUS_WAIT);
true ->
skip
end;
_ ->
DbPlayerBase = PlayerState#player_state.db_player_base,
LastHookTime = DbPlayerBase#db_player_base.last_hook_time,
DrawHookTime = DbPlayerBase#db_player_base.draw_hook_time,
TimeCount = min(CurTime - LastHookTime, CurTime - DrawHookTime),
case TimeCount >= 60 of
true ->
Update = #player_state{
db_player_base = #db_player_base{
last_hook_time = CurTime
}
},
{ok, PlayerState1} = player_lib:update_player_state(PlayerState, Update, false),
_ ->
skip
end
end.
update_last_hook_time(PlayerState, HookState) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
LastHookTime = DbPlayerBase#db_player_base.last_hook_time,
HeartbeatTime = HookState#hook_state.hook_heartbeat,
case LastHookTime /= HeartbeatTime of
true ->
Update = #player_state{
db_player_base = #db_player_base{
last_hook_time = HeartbeatTime
}
},
player_lib:update_player_state(PlayerState, Update, false);
_ ->
{ok, PlayerState}
end.
校验挂机场景id,判断玩家是否可以进入这张场景挂机
check_scene_id(PlayerState, HookSceneId) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
PassHookSceneId = DbPlayerBase#db_player_base.pass_hook_scene_id,
(PassHookSceneId + 1 >= HookSceneId andalso HookSceneId >= ?INIT_HOOK_SCENE_ID) orelse HookSceneId =:= ?INIT_HOOK_SCENE_ID.
挑战boss
challenge_boos(PlayerState, HookState, HookSceneId) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
case DbPlayerBase#db_player_base.challenge_num > 0 andalso HookState#hook_state.challenge_boos /= true of
true ->
case hook_lib:check_scene_id(PlayerState, HookSceneId) of
true ->
并不会马上作用于当前回合,作用于下一回合
Update = #player_state{
db_player_base = #db_player_base{hook_scene_id = HookSceneId}
},
case player_lib:update_player_state(PlayerState, Update) of
{ok, NewPlayerState} ->
NewHookState = HookState#hook_state{challenge_boos = true},
{NewPlayerState, NewHookState};
_ ->
skip
end;
_ ->
skip
end;
_ ->
skip
end.
获取挂机报告
get_hook_report(PlayerState, GoodsHook) ->
HookReport = case GoodsHook of
null ->
PlayerState#player_state.hook_report;
_ ->
GoodsHook
end,
F = fun({GoodsId, _}, Num, Acc) ->
GoodsConf = goods_config:get(GoodsId),
case GoodsConf#goods_conf.type =:= ?EQUIPS_TYPE of
true ->
Quality = GoodsConf#goods_conf.quality,
case dict:find(Quality, Acc) of
{ok, Num1} ->
NewNum = Num1 + Num,
dict:store(Quality, NewNum, Acc);
_ ->
dict:store(Quality, Num, Acc)
end;
false ->
Acc
end
end,
QualityDict = dict:fold(F, dict:new(), HookReport#hook_report.goods_dict),
F1 =
fun(Quality, Num, Acc) ->
[#proto_goods_report{quality = Quality, num = Num, sale_num = 0} | Acc]
end,
GoodsReportList = dict:fold(F1, [], QualityDict),
F2 =
fun({Quality, Num}, Acc) ->
case lists:keyfind(Quality, #proto_goods_report.quality, Acc) of
#proto_goods_report{} = R ->
lists:keyreplace(Quality, #proto_goods_report.quality, Acc, R#proto_goods_report{sale_num = Num});
_ ->
Acc
end
end,
GoodsReportList1 = lists:foldl(F2, GoodsReportList, HookReport#hook_report.sell_list),
F3 =
fun({GoodsId, _}, Num, Acc) ->
[#proto_hook_drop{goods_id = GoodsId, num = Num} | Acc]
end,
GoodsList = dict:fold(F3, [], HookReport#hook_report.goods_dict),
#proto_hook_report{
offline_time = HookReport#hook_report.time_count,
kill_num = HookReport#hook_report.kill_num,
die_num = HookReport#hook_report.fail_num,
coin = HookReport#hook_report.coin + HookReport#hook_report.sell_coin,
exp = HookReport#hook_report.exp,
goods_report_list = GoodsReportList1,
goods_list = GoodsList
}.
更新挂机驱动
update_drive(PlayerState, HookState, DriveStatus) ->
case HookState#hook_state.drive /= DriveStatus of
true ->
NewPlayerState =
case DriveStatus of
?HOOK_DRIVE_SERVER ->
如果新驱动是服务器驱动说明玩家已经离开挂机场景,立刻更新最后挂机时间
Update = #player_state{
db_player_base = #db_player_base{
last_hook_time = util_date:unixtime()
}
},
{ok, PlayerState1} = player_lib:update_player_state(PlayerState, Update),
PlayerState1;
_ ->
PlayerState
end,
? INFO("update state : ~p " , [ DriveStatus ] ) ,
{NewPlayerState, HookState#hook_state{drive = DriveStatus}};
_ ->
skip
end.
get_obj(HookState, ObjType, ObjId) ->
try
case ObjType of
?OBJ_TYPE_PLAYER ->
HookState#hook_state.hook_player_state;
?OBJ_TYPE_MONSTER ->
MonsterDict = HookState#hook_state.monster_dict,
case dict:find(ObjId, MonsterDict) of
{ok, ObjState} ->
ObjState;
_ ->
null
end;
?OBJ_TYPE_PET ->
HookState#hook_state.hook_pet_state;
_ ->
null
end
catch
Error:Info ->
?ERR("~p:~p ~p ~p ~p", [Error, Info, HookState#hook_state.scene_id, ObjType, ObjId]),
null
end.
get_monster_data(HookState) ->
F = fun(_K, Obj, Acc) ->
[make_proto_hook_monster(Obj, null) | Acc]
end,
dict:fold(F, [], HookState#hook_state.monster_dict).
get_challenge_info(PlayerState) ->
get_challenge_info(PlayerState, true).
get_challenge_info(PlayerState, IsSendUpdate) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
#db_player_base{
challenge_num = ChallengeNum,
buy_challenge_num = BuyChallengeNum,
reset_challenge_time = ResetTime
} = DbPlayerBase,
CurTime = util_date:unixtime(),
case ResetTime =< CurTime of
true ->
Update = #player_state{
db_player_base = #db_player_base{
challenge_num = ?INIT_CHALLENGE_NUM,
buy_challenge_num = 0,
reset_challenge_time = util_date:get_tomorrow_unixtime()
}
},
{ok, NewPlayerState} = player_lib:update_player_state(PlayerState, Update, IsSendUpdate),
{NewPlayerState, ?INIT_CHALLENGE_NUM, 50};
_ ->
{PlayerState, ChallengeNum, util_math:floor((BuyChallengeNum + 1) * 50)}
end.
购买boss挑战次数
buy_challenge_num(PlayerState) ->
{PlayerState1, ChallengeNum, NeedJade} = get_challenge_info(PlayerState, false),
DbPlayerMoney = PlayerState1#player_state.db_player_money,
case DbPlayerMoney#db_player_money.jade >= NeedJade of
true ->
case player_lib:incval_on_player_money_log(PlayerState1, #db_player_money.jade, -NeedJade, false, ?LOG_TYPE_BUY_CHALLENGE) of
{ok, PlayerState2} ->
DbPlayerBase = PlayerState2#player_state.db_player_base,
NewBuyChallengeNum = DbPlayerBase#db_player_base.buy_challenge_num + 1,
NewChallengeNum = ChallengeNum + 1,
Update = #player_state{
db_player_base = #db_player_base{
challenge_num = NewChallengeNum,
buy_challenge_num = NewBuyChallengeNum
}
},
{ok, NewPlayerState} = player_lib:update_player_state(PlayerState2, Update, false),
player_lib:send_update(PlayerState, NewPlayerState, ?UPDATE_CAUSE_OTHER),
{NewPlayerState, NewChallengeNum, util_math:floor((NewBuyChallengeNum + 1) * 50)};
_ ->
skip
end;
_ ->
skip
end.
生成新回合
new_round(PlayerState, HookState) ->
CurTime = util_date:unixtime(),
#hook_state{
next_round_time = NextTime,
round_status = Status,
end_time = EndTime,
challenge_boos = ChallengeBoss
} = HookState,
case NextTime =< CurTime andalso (Status /= ?ROUND_STATUS_START orelse EndTime =< CurTime) of
true ->
HookState1 = init(PlayerState),
HookState2 = HookState1#hook_state{start_time = CurTime, challenge_boos = HookState#hook_state.challenge_boos},
HookSceneConf = hook_scene_config:get(HookState2#hook_state.scene_id),
{HookState4, MinRoundTime} =
case ChallengeBoss of
true ->
HookState3 = refresh_boss(HookState2#hook_state{end_time = CurTime + HookSceneConf#hook_scene_conf.limit_time}),
{HookState3, ?HOOK_BOSS_TIME};
_ ->
HookState3 = refresh_monster(HookState2#hook_state{end_time = CurTime + 600}),
{HookState3, HookSceneConf#hook_scene_conf.min_round_time}
end,
NewHookState = HookState4#hook_state{next_round_time = CurTime + MinRoundTime},
{ok, NewHookState};
_ ->
{fail, 1}
end.
refresh_monster(HookState) ->
SceneId = HookState#hook_state.scene_id,
HookSceneConf = hook_scene_config:get(SceneId),
MinNum = HookSceneConf#hook_scene_conf.min_monster_num,
MaxNum = HookSceneConf#hook_scene_conf.max_monster_num,
Count = util_rand:rand(MinNum, MaxNum),
MonsterList = HookSceneConf#hook_scene_conf.monster_list,
CurTime = util_date:unixtime(),
SeedId = util_rand:rand(CurTime - 1000, CurTime),
MonsterDict = add_monster(Count, MonsterList, dict:new(), SeedId),
NewHookState = HookState#hook_state{
monster_dict = MonsterDict
},
NewHookState.
refresh_boss(HookState) ->
SceneId = HookState#hook_state.scene_id,
HookSceneConf = hook_scene_config:get(SceneId),
MonsterId = HookSceneConf#hook_scene_conf.boss_id,
MonsterConf = monster_config:get(MonsterId),
MonsterAttr = MonsterConf#monster_conf.attr_base,
F = fun(SkillId, Acc) ->
dict:store(SkillId, #db_skill{skill_id = SkillId, lv = 1, next_time = 0}, Acc)
end,
SkillDict = lists:foldl(F, dict:new(), MonsterConf#monster_conf.hook_skill_list),
CurTime = util_date:unixtime(),
MonsterUid = util_rand:rand(CurTime - 1000, CurTime),
ObjState = #hook_obj_state{
obj_id = MonsterUid,
obj_type = ?OBJ_TYPE_MONSTER,
monster_id = MonsterId,
status = ?STATUS_ALIVE,
is_boss = true,
last_use_skill_time = 0,
attr_base = MonsterAttr,
attr_total = MonsterAttr,
cur_hp = MonsterAttr#attr_base.hp,
cur_mp = MonsterAttr#attr_base.mp,
lv = MonsterConf#monster_conf.lv,
career = MonsterConf#monster_conf.career,
order_skill_list = MonsterConf#monster_conf.hook_skill_list,
skill_dict = SkillDict,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new(),
is_drop = false
},
NewDict = dict:store(MonsterUid, ObjState, dict:new()),
HookState#hook_state{
monster_dict = NewDict,
boss_round = true
}.
create_pet(HookState, MonsterId) ->
HookPlayerState = HookState#hook_state.hook_player_state,
OwnerLv = HookPlayerState#hook_obj_state.lv,
MonsterConf = monster_config:get(MonsterId),
MonsterAttr = api_attr:addition_attr(MonsterConf#monster_conf.attr_base, OwnerLv / 100),
F = fun(SkillId, Acc) ->
dict:store(SkillId, #db_skill{skill_id = SkillId, lv = 1, next_time = 0}, Acc)
end,
SkillDict = lists:foldl(F, dict:new(), MonsterConf#monster_conf.hook_skill_list),
ObjState = #hook_obj_state{
obj_id = HookPlayerState#hook_obj_state.obj_id,
obj_type = ?OBJ_TYPE_PET,
monster_id = MonsterId,
status = ?STATUS_ALIVE,
last_use_skill_time = 0,
attr_base = MonsterAttr,
attr_total = MonsterAttr,
cur_hp = MonsterAttr#attr_base.hp,
cur_mp = MonsterAttr#attr_base.mp,
lv = MonsterConf#monster_conf.lv,
career = MonsterConf#monster_conf.career,
order_skill_list = MonsterConf#monster_conf.hook_skill_list,
skill_dict = SkillDict,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new()
},
HookState1 = HookState#hook_state{
hook_pet_state = ObjState,
hook_player_state = HookPlayerState#hook_obj_state{pet_id = HookPlayerState#hook_obj_state.obj_id}
},
{HookState1, ObjState}.
make_fire_wall(HookState, Percent, EffectiveTime, Interval, {X, Y}) ->
HookPlayerState = HookState#hook_state.hook_player_state,
Attr = HookPlayerState#hook_obj_state.attr_total,
CurTime = util_date:unixtime(),
PointList = [{X, Y}, {X + 1, Y}, {X - 1, Y}, {X, Y + 1}, {X, Y - 1}],
F = fun({X1, Y1}, Acc) ->
{HookState1, FireWallList} = Acc,
Uid = HookState1#hook_state.fire_wall_uid + 1,
FireWallDict = HookState1#hook_state.fire_wall_dict,
State = #fire_wall_state{
uid = Uid,
min_att = util_math:floor(Attr#attr_base.min_mac * Percent / ?PERCENT_BASE),
max_att = util_math:floor(Attr#attr_base.max_mac * Percent / ?PERCENT_BASE),
interval = Interval,
next_time = CurTime,
remove_time = CurTime + EffectiveTime
},
NewFireWallDict = dict:store(Uid, State, FireWallDict),
HookState2 = HookState1#hook_state{fire_wall_dict = NewFireWallDict, fire_wall_uid = Uid},
ProtoFireWall = #proto_hook_fire_wall{
obj_flag = #proto_obj_flag{type = ?OBJ_TYPE_FIRE_WALL, id = Uid},
point = #proto_point{x = X1, y = Y1},
interval = Interval,
duration = EffectiveTime
},
NewFireWallList = [ProtoFireWall | FireWallList],
{HookState2, NewFireWallList}
end,
lists:foldl(F, {HookState, []}, PointList).
send_result_to_client(PlayerState, HookState, ResultStatus) ->
NextTime = max(5, HookState#hook_state.next_round_time - util_date:unixtime()),
Data1 = #rep_round_result{
status = ResultStatus,
next_time = NextTime
},
net_send:send_to_client(PlayerState#player_state.socket, 13004, Data1).
obj_use_skill(PlayerState, HookState, {CasterType, CasterId}, SkillId, TargetFlagList, {X, Y}) ->
case skill_base_lib:hook_use_skill(HookState, {CasterType, CasterId}, SkillId, TargetFlagList) of
{ok, UpdateDict, EffectProto} ->
{NewPlayerState, HookState1} = update_obj_state(UpdateDict, PlayerState, HookState),
#hook_state{
hook_player_state = HookPlayerState,
monster_dict = MonsterDict
} = HookState1,
Data = #rep_hook_use_skill{
harm_list = EffectProto#skill_effect.harm_list,
cure_list = EffectProto#skill_effect.cure_list,
buff_list = EffectProto#skill_effect.buff_list
},
net_send:send_to_client(PlayerState#player_state.socket, 13003, Data),
_NewHookState0 =
case EffectProto#skill_effect.call_pet of
PetId when is_integer(PetId) ->
{HookState2, PetObj} = create_pet(HookState1, PetId),
AddObjData = #rep_add_hook_obj{
hook_obj_list = [make_proto_hook_monster(PetObj, PlayerState)]
},
net_send:send_to_client(NewPlayerState#player_state.socket, 13018, AddObjData),
HookState2;
_ ->
HookState1
end,
如果技能需要移除buff,执行移除buff操作
_NewHookState1 =
case EffectProto#skill_effect.remove_effect of
{ObjType, ObjId, EffectId} ->
#hook_obj_state{} = ObjState ->
{NewObjState, RemoveBuffList} = buff_base_lib:remove_effect_buff(ObjState, EffectId),
{_, HookState3} = update_obj_state(NewObjState, NewPlayerState, _NewHookState0),
MakeProtoF =
fun(BuffId, Acc) ->
Proto = #proto_buff_operate{
obj_flag = #proto_obj_flag{type = ObjType, id = ObjId},
operate = ?BUFF_OPERATE_DELETE,
buff_id = BuffId,
effect_id = EffectId
},
[Proto | Acc]
end,
List = lists:foldl(MakeProtoF, [], RemoveBuffList),
net_send:send_to_client(NewPlayerState#player_state.socket, 13021, #rep_hook_buff_operate{buff_list = List}),
HookState3;
_ ->
_NewHookState0
end;
_ ->
_NewHookState0
end,
如果是火墙技能,生成火墙
NewHookState =
case EffectProto#skill_effect.fire_wall of
{Percent, EffectiveTime, Interval} ->
{HookState4, FireWallList} = make_fire_wall(_NewHookState0, Percent, EffectiveTime, Interval, {X, Y}),
AddFireWallData = #rep_add_hook_fire_wall{
hook_fire_wall_list = FireWallList
},
net_send:send_to_client(NewPlayerState#player_state.socket, 13019, AddFireWallData),
HookState4;
_ ->
_NewHookState0
end,
{IsEnd, Status} =
if
IsPlayerDie =:= true ->
{true, ?RESULT_STATUS_FAIL};
KillNum >= MonsterNum ->
{true, ?RESULT_STATUS_WIN};
true ->
{false, ?RESULT_STATUS_FAIL}
end,
case IsEnd andalso NewHookState#hook_state.round_status /= ?ROUND_STATUS_END of
true ->
send_result_to_client(NewPlayerState, NewHookState, Status),
{NewPlayerState, NewHookState#hook_state{round_status = ?ROUND_STATUS_END}};
_ ->
{NewPlayerState, NewHookState}
end;
{fail, ?ERR_HOOK_OBJ_NOT} ->
send_result_to_client(PlayerState, HookState, ?RESULT_STATUS_WAIT),
skip;
_Err ->
skip
end.
do_monster_die(PlayerState, HookState, MonsterUid) ->
#hook_obj_state{is_drop = false, status = ?STATUS_DIE} = ObjState ->
#hook_scene_conf{
per_exp = Exp,
per_coin = Coin
} = hook_scene_config:get(HookState#hook_state.scene_id),
NewObjState = ObjState#hook_obj_state{is_drop = true},
NewMonsterDict = dict:store(MonsterUid, NewObjState, HookState#hook_state.monster_dict),
HookState1 = HookState#hook_state{monster_dict = NewMonsterDict},
DbPlayerBase = PlayerState#player_state.db_player_base,
Career = DbPlayerBase#db_player_base.career,
HookSceneConf = hook_scene_config:get(HookState1#hook_state.scene_id),
GoodsList =
case NewObjState#hook_obj_state.is_boss of
true ->
make_boss_drop(Career, HookSceneConf);
_ ->
make_monster_drop(Career, HookSceneConf)
end,
VipAddExp = vip_lib:get_vip_hook_exp(Career, DbPlayerBase#db_player_base.vip),
{ok, PlayerState1} = player_lib:add_exp(PlayerState, util_math:floor(Exp * ((100 + VipAddExp) / 100)), {?LOG_TYPE_HOOK, []}),
{ok, PlayerState2} = player_lib:incval_on_player_money_log(PlayerState1, #db_player_money.coin, Coin, ?LOG_TYPE_HOOK),
{ok, PlayerState3, _SellCoin, _QList} = goods_lib:add_goods_list_and_auto_sell(PlayerState2, GoodsList),
Data = make_rep_drop(MonsterUid, GoodsList),
net_send:send_to_client(PlayerState3#player_state.socket, 13005, Data),
case NewObjState#hook_obj_state.is_boss of
true ->
{PlayerState4, HookState2} = do_boss_die(PlayerState3, HookState1, HookSceneConf),
{ok, PlayerState5} = task_comply:update_player_task_info(PlayerState4, ?TASKSORT_BOSS, 1),
{PlayerState5, HookState2};
_ ->
{PlayerState3, HookState1}
end;
_ ->
{PlayerState, HookState}
end.
计算星级
compute_star(HookState) ->
CurTime = util_date:unixtime(),
SceneId = HookState#hook_state.scene_id,
StartTime = HookState#hook_state.start_time,
UseTime = CurTime - StartTime,
#hook_scene_conf{
star_2_time = Star2Time,
star_3_time = Star3Time
} = hook_scene_config:get(SceneId),
if
Star3Time >= UseTime -> 3;
Star3Time < UseTime andalso UseTime =< Star2Time -> 2;
true -> 1
end.
挂机boss死亡逻辑
do_boss_die(PlayerState, HookState, HookSceneConf) ->
DbPlayerBase = PlayerState#player_state.db_player_base,
PassSceneId = DbPlayerBase#db_player_base.pass_hook_scene_id,
CurSceneId = HookState#hook_state.scene_id,
{PlayerState1, ChallengeNum, NeedJade} = get_challenge_info(PlayerState, false),
NewChallengeNum = ChallengeNum - 1,
Socket = PlayerState#player_state.socket,
{NewPlayerState, NewHookState} =
case CurSceneId >= PassSceneId of
true ->
NewHookSceneId =
case hook_scene_config:get(CurSceneId + 1) of
#hook_scene_conf{} = _ ->
CurSceneId + 1;
_ ->
CurSceneId
end,
Update = #player_state{
db_player_base = #db_player_base{
pass_hook_scene_id = CurSceneId,
hook_scene_id = NewHookSceneId,
challenge_num = NewChallengeNum
}
},
{ok, PlayerState2} = player_lib:update_player_state(PlayerState1, Update, false),
GoodsList = [{GoodsId, ?BIND, Num} || {GoodsId, Num} <- HookSceneConf#hook_scene_conf.first_prize],
{ok, PlayerState3} = goods_lib_log:add_goods_list_and_send_mail(PlayerState2, GoodsList, ?LOG_TYPE_HOOK),
net_send:send_to_client(Socket, 13008, #rep_change_hook_scene1{scene_id = NewHookSceneId}),
{PlayerState3, HookState#hook_state{scene_id = NewHookSceneId, challenge_boos = false}};
_ ->
Update = #player_state{
db_player_base = #db_player_base{
challenge_num = NewChallengeNum
}
},
{ok, PlayerState2} = player_lib:update_player_state(PlayerState1, Update, false),
{PlayerState2, HookState#hook_state{challenge_boos = false}}
end,
player_lib:send_update(PlayerState, NewPlayerState, ?UPDATE_CAUSE_OTHER),
net_send:send_to_client(Socket, 13006, #rep_challenge_num{challenge_num = NewChallengeNum, need_jade = NeedJade}),
Base = NewPlayerState#player_state.db_player_base,
计算星级
Star = compute_star(NewHookState),
net_send:send_to_client(Socket, 13017, #rep_challenge_boos_result{status = ?RESULT_STATUS_WIN, scene_id = Base#db_player_base.hook_scene_id}),
player_hook_star_lib:store_hook_star(NewPlayerState, CurSceneId, Star),
{NewPlayerState, NewHookState}.
make_rep_drop(MonsterUid, DropList) ->
List = [#proto_hook_drop{goods_id = GoodsId, num = Num} || {GoodsId, _, Num} <- DropList],
#rep_drop{
obj_flag = #proto_obj_flag{type = ?OBJ_TYPE_MONSTER, id = MonsterUid},
drop_list = List
}.
add_monster(0, _MonsterList, MonsterDict, _SeedId) ->
MonsterDict;
add_monster(Count, MonsterList, MonsterDict, SeedId) ->
MonsterUid = SeedId + util_rand:rand(1, 10),
MonsterId = util_rand:weight_rand_ex(MonsterList),
MonsterConf = monster_config:get(MonsterId),
MonsterAttr = MonsterConf#monster_conf.attr_base,
F = fun(SkillId, Acc) ->
dict:store(SkillId, #db_skill{skill_id = SkillId, lv = 1, next_time = 0}, Acc)
end,
SkillDict = lists:foldl(F, dict:new(), MonsterConf#monster_conf.hook_skill_list),
ObjState = #hook_obj_state{
obj_id = MonsterUid,
obj_type = ?OBJ_TYPE_MONSTER,
monster_id = MonsterId,
status = ?STATUS_ALIVE,
is_boss = false,
last_use_skill_time = 0,
attr_base = MonsterAttr,
attr_total = MonsterAttr,
cur_hp = MonsterAttr#attr_base.hp,
cur_mp = MonsterAttr#attr_base.mp,
lv = MonsterConf#monster_conf.lv,
career = MonsterConf#monster_conf.career,
order_skill_list = MonsterConf#monster_conf.hook_skill_list,
skill_dict = SkillDict,
buff_dict = dict:new(),
effect_dict = dict:new(),
effect_src_dict = dict:new(),
is_drop = false
},
NewDict = dict:store(MonsterUid, ObjState, MonsterDict),
add_monster(Count - 1, MonsterList, NewDict, MonsterUid + 1).
get_monster_avg_hp(HookSceneConf) ->
MonsterList = HookSceneConf#hook_scene_conf.monster_list,
F = fun({MonsterId, Weight}, Acc) ->
{HpCount, WeightCount} = Acc,
MonsterConf = monster_config:get(MonsterId),
Attr = MonsterConf#monster_conf.attr_base,
NewHpCount = Attr#attr_base.hp * Weight + HpCount,
{NewHpCount, WeightCount + Weight}
end,
{HpCount, WeightCount} = lists:foldl(F, {0, 0}, MonsterList),
HpCount / WeightCount.
make_monster_drop(Career, HookSceneConf) ->
MonsterDrop = HookSceneConf#hook_scene_conf.monster_drop,
make_drop(Career, MonsterDrop).
make_boss_drop(Career, HookSceneConf) ->
BossDrop = HookSceneConf#hook_scene_conf.boss_drop,
make_drop(Career, BossDrop).
make_drop(Career, DropList) ->
F = fun({CareerLimit, DropNumList, GoodsList}, Acc) ->
case CareerLimit == Career orelse CareerLimit == 0 of
true ->
DropNum = util_rand:weight_rand_ex(DropNumList),
case DropNum > 0 of
true ->
List1 = [{{GoodsId, IsBind, Num}, Rate} || {GoodsId, IsBind, Num, Rate} <- GoodsList],
DropList1 = [util_rand:weight_rand_ex(List1) || _N <- lists:seq(1, DropNum)],
DropList1 ++ Acc;
_ ->
Acc
end;
_ ->
Acc
end
end,
lists:foldl(F, [], DropList).
根据离线市场获取挂机统计数据
get_hook_statistics(PlayerState, TimeCount) ->
PlayerBase = PlayerState#player_state.db_player_base,
PlayerAttr = PlayerState#player_state.attr_total,
HookSceneConf = hook_scene_config:get(PlayerBase#db_player_base.hook_scene_id),
计算一回合单个怪物平均血量
MonseterAvgHp = get_monster_avg_hp(HookSceneConf),
PlayerAvgAtt =
case PlayerBase#db_player_base.career of
?CAREER_ZHANSHI ->
(PlayerAttr#attr_base.min_ac + PlayerAttr#attr_base.max_ac) / 2;
?CAREER_FASHI ->
(PlayerAttr#attr_base.min_mac + PlayerAttr#attr_base.max_mac) / 2;
_ ->
(PlayerAttr#attr_base.min_sc + PlayerAttr#attr_base.max_sc) / 2
end,
MinCount = HookSceneConf#hook_scene_conf.min_monster_num,
MaxCount = HookSceneConf#hook_scene_conf.max_monster_num,
Count = (MinCount + MaxCount) / 2,
计算杀死一回合怪物所需时间
T = util_math:ceil(MonseterAvgHp / PlayerAvgAtt * Count),
T1 = max(T, HookSceneConf#hook_scene_conf.min_round_time),
SumKill = util_math:floor(TimeCount / T1 * Count),
Exp = SumKill * HookSceneConf#hook_scene_conf.per_exp,
Coin = SumKill * HookSceneConf#hook_scene_conf.per_coin,
{SumKill, Exp, Coin}.
计算离线挂机奖励
compute_hook_offline(PlayerState, TimeCount) ->
计算离线挂机奖励
PlayerState#player_state{
hook_report = GoodsHook
}.
compute_hook_gain(PlayerState, TimeCountTemp) ->
TimeCount = case TimeCountTemp < 1 of
true ->
60;
_ ->
TimeCountTemp
end,
case function_lib:is_function_open(PlayerState, ?FUNCTION_ID_HOOK) of
true ->
PlayerBase = PlayerState#player_state.db_player_base,
HookSceneConf = hook_scene_config:get(PlayerBase#db_player_base.hook_scene_id),
{SumKill, Exp, Coin} = get_hook_statistics(PlayerState, TimeCount),
Career = PlayerBase#db_player_base.career,
F = fun(_, Acc) ->
case make_monster_drop(Career, HookSceneConf) of
[] ->
Acc;
DropList ->
F1 =
fun({GoodsId, IsBind, Num}, Acc1) ->
case dict:find({GoodsId, IsBind}, Acc1) of
{ok, Num1} ->
dict:store({GoodsId, IsBind}, Num1 + Num, Acc1);
_ ->
dict:store({GoodsId, IsBind}, Num, Acc1)
end
end,
lists:foldl(F1, Acc, DropList)
end
end,
DropDict = lists:foldl(F, dict:new(), lists:seq(1, SumKill)),
#hook_report{
time_count = TimeCount,
kill_num = SumKill,
exp = Exp,
coin = Coin,
goods_dict = DropDict
};
_ ->
#hook_report{}
end.
receive_hook_draw(PlayerState, GoodsHook) ->
HookReport = case GoodsHook of
null ->
PlayerState#player_state.hook_report;
_ ->
GoodsHook
end,
Exp = HookReport#hook_report.exp,
case Exp > 0 of
true ->
F1 = fun({GoodsId, IsBind}, Num, Acc) ->
[{GoodsId, IsBind, Num} | Acc]
end,
GoodsList = dict:fold(F1, [], HookReport#hook_report.goods_dict),
{ok, PlayerState1, SellCoin, _SellList} = goods_lib:add_goods_list_and_auto_sell(PlayerState, GoodsList),
Base = PlayerState1#player_state.db_player_base,
DbPlayerBase = PlayerState1#player_state.db_player_base,
VipAddExp = vip_lib:get_vip_hook_exp(Base#db_player_base.career, DbPlayerBase#db_player_base.vip),
Coin = HookReport#hook_report.coin + SellCoin,
{ok, PlayerState2} = player_lib:add_exp(PlayerState1, util_math:floor(Exp * ((100 + VipAddExp) / 100)), {?LOG_TYPE_HOOK, []}),
{ok, PlayerState3} = player_lib:incval_on_player_money_log(PlayerState2, #db_player_money.coin, Coin, ?LOG_TYPE_HOOK),
case GoodsHook of
null ->
PlayerState3#player_state{
hook_report = #hook_report{}
};
_ ->
PlayerState3
end;
_ ->
PlayerState
end.
获取购买次数需求
get_buy_power_need(BuyNum) ->
case buy_power_need_config:get(BuyNum + 1) of
#buy_power_need_conf{} = Conf ->
Conf#buy_power_need_conf.need_jade;
_ ->
0
end.
购买 挂机次数
buy_power(PlayerState) ->
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
Base = PlayerState#player_state.db_player_base,
VipBuyHookNum = vip_lib:get_vip_buy_hook_num(Base#db_player_base.career, Base#db_player_base.vip),
case BuyHookNum >= VipBuyHookNum of
true ->
{fail, ?ERR_VIP_3};
_ ->
BuyPowerConf = buy_power_need_config:get(BuyHookNum + 1),
DbPlayerMoney = PlayerState#player_state.db_player_money,
case DbPlayerMoney#db_player_money.jade >= BuyPowerConf#buy_power_need_conf.need_jade of
true ->
case player_lib:incval_on_player_money_log(PlayerState, #db_player_money.jade, -BuyPowerConf#buy_power_need_conf.need_jade, ?LOG_TYPE_BUY_POWER) of
{ok, PlayerState2} ->
counter_lib:update_limit(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
{ok, PlayerState2};
_ ->
{fail, ?ERR_PLAYER_JADE_NOT_ENOUGH}
end;
_ ->
{fail, ?ERR_PLAYER_JADE_NOT_ENOUGH}
end
end.
挂机火墙攻击(这里也必须由前端触发火墙攻击,服务端只是计算伤害 )
fire_wall_attack(PlayerState, HookState, Data) ->
CurTime = util_date:unixtime(),
F = fun(FireWallAttack, Acc) ->
{PlayerState1, HookState1, HarmList} = Acc,
#proto_fire_wall_attack{
fire_wall_uid = Fuid,
monster_uid = Muid
} = FireWallAttack,
FireWallDict = HookState1#hook_state.fire_wall_dict,
MonsterDict = HookState1#hook_state.monster_dict,
case dict:find(Fuid, FireWallDict) of
{ok, #fire_wall_state{next_time = NT, remove_time = RT} = FWState} when CurTime < RT ->
case dict:find(Muid, MonsterDict) of
{ok, #hook_obj_state{cur_hp = CurHp} = ObjState} when CurHp > 0 ->
Interval = FWState#fire_wall_state.interval,
NFWState = FWState#fire_wall_state{next_time = NT + Interval},
{HarmResult, NewObjState} = skill_base_lib:fire_wall_attack(NFWState, ObjState),
NewFireWallDict = dict:store(Fuid, NFWState, FireWallDict),
HookState2 = HookState1#hook_state{fire_wall_dict = NewFireWallDict},
{PlayerState2, HookState3} = update_obj_state(NewObjState, PlayerState1, HookState2),
ProtoHarm = #proto_harm{
obj_flag = #proto_obj_flag{type = ?OBJ_TYPE_MONSTER, id = Muid},
harm_status = HarmResult#harm_result.status,
harm_value = HarmResult#harm_result.harm_value,
cur_hp = NewObjState#hook_obj_state.cur_hp,
cur_mp = NewObjState#hook_obj_state.cur_mp
},
NewHarmList = [ProtoHarm | HarmList],
{PlayerState2, HookState3, NewHarmList};
_ ->
Acc
end;
_ ->
Acc
end
end,
{NewPlayerState, NewHookState, List} = lists:foldl(F, {PlayerState, HookState, []}, Data#req_hook_fire_wall_attack.fire_wall_attack_list),
net_send:send_to_client(PlayerState#player_state.socket, 13003, #rep_hook_use_skill{harm_list = List}),
{NewPlayerState, NewHookState}.
get_button_tips_hook_raids(PlayerState) ->
BuyHookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_BUY_NUM),
LimitNum = counter_lib:get_limit(?COUNTER_HOOK_NUM),
HookNum = counter_lib:get_value(PlayerState#player_state.player_id, ?COUNTER_HOOK_NUM),
{PlayerState, BuyHookNum + LimitNum - HookNum}.
Internal functions
判断玩家是否死亡
is_player_die(HookPlayerState) ->
case HookPlayerState#hook_obj_state.status of
?STATUS_DIE -> true;
_ -> false
end.
monster_die_num(MonsterDict) ->
F = fun(_K, V, Acc) ->
case V#hook_obj_state.status of
?STATUS_DIE -> Acc + 1;
_ -> Acc
end
end,
dict:fold(F, 0, MonsterDict).
update_obj_state(HookObjState, PlayerState, HookState) when is_record(HookObjState, hook_obj_state) ->
ObjType = HookObjState#hook_obj_state.obj_type,
ObjId = HookObjState#hook_obj_state.obj_id,
case ObjType of
?OBJ_TYPE_PLAYER ->
{PlayerState, HookState#hook_state{hook_player_state = HookObjState}};
?OBJ_TYPE_MONSTER ->
MonsterDict = HookState#hook_state.monster_dict,
NewMonsterDict = dict:store(ObjId, HookObjState, MonsterDict),
NewHookState = HookState#hook_state{monster_dict = NewMonsterDict},
case HookObjState#hook_obj_state.status of
?STATUS_DIE ->
do_monster_die(PlayerState, NewHookState, ObjId);
_ ->
{PlayerState, NewHookState}
end;
?OBJ_TYPE_PET ->
case HookObjState#hook_obj_state.status of
?STATUS_ALIVE ->
{PlayerState, HookState#hook_state{hook_pet_state = HookObjState}};
_ ->
HookPlayerState = HookState#hook_state.hook_player_state,
NewHookState = HookState#hook_state{
hook_player_state = HookPlayerState#hook_obj_state{pet_id = null},
hook_pet_state = HookObjState
},
{PlayerState, NewHookState}
end;
_ ->
{PlayerState, HookState}
end;
update_obj_state(UpdateDict, PlayerState, HookState) ->
F = fun(_, HookObjState, Acc) ->
{PlayerState1, HookState1} = Acc,
update_obj_state(HookObjState, PlayerState1, HookState1)
end,
dict:fold(F, {PlayerState, HookState}, UpdateDict).
make_proto_hook_monster(MonsterObj, PlayerState) ->
#hook_obj_state{
obj_id = ObjId,
obj_type = ObjType,
attr_total = AttrTotal,
monster_id = MonsterId,
cur_hp = CurHp,
cur_mp = CurMp
} = MonsterObj,
case ObjType of
?OBJ_TYPE_PET ->
DbPlayerBase = PlayerState#player_state.db_player_base,
#proto_hook_monster{
obj_flag = #proto_obj_flag{type = ObjType, id = ObjId},
owner_flag = #proto_obj_flag{type = ?OBJ_TYPE_PLAYER, id = ObjId},
monster_id = MonsterId,
cur_hp = CurHp,
cur_mp = CurMp,
hp = AttrTotal#attr_base.hp,
mp = AttrTotal#attr_base.mp,
guild_id = DbPlayerBase#db_player_base.guild_id,
team_id = PlayerState#player_state.team_id,
name_colour = PlayerState#player_state.name_colour
};
_ ->
#proto_hook_monster{
obj_flag = #proto_obj_flag{type = ObjType, id = ObjId},
monster_id = MonsterId,
cur_hp = CurHp,
cur_mp = CurMp,
hp = AttrTotal#attr_base.hp,
mp = AttrTotal#attr_base.mp
}
end.
|
ee966cb88a6e166cbfbe81943beca663a9ecabd1bda81f915d194f5b7ceadfd7 | tlaplus/tlapm | loc.mli |
* loc.mli --- source locations
*
*
* Copyright ( C ) 2008 - 2010 INRIA and Microsoft Corporation
* loc.mli --- source locations
*
*
* Copyright (C) 2008-2010 INRIA and Microsoft Corporation
*)
(** Source locations *)
(** A location represents a col in a source file *)
type pt_ = { line : int ; (* line number *)
bol : int ; (* beginning of line *)
col : int ; (* column number relative to beginning of line,
see the implementation of the function `locus_of_position`. *)
}
type pt = Actual of pt_ | Dummy
(** A location that is always invalid (but both = and == itself). *)
val dummy : pt
* The line number of the location , starting from 1 . Raises [ Failure
" Loc.line " ] if the location is a dummy .
"Loc.line"] if the location is a dummy. *)
val line : pt -> int
* The column number of the location , starting from 1 . Raises
[ Failure " Loc.col " ] if the location is a dummy .
[Failure "Loc.col"] if the location is a dummy. *)
val column : pt -> int
(** The character offset of a location in a file. Raises [Failure
"Loc.point"] if the location is a dummy. *)
val offset : pt -> int
(** String representation of a location. The [file] argument is by
default "<nofile>". *)
val string_of_pt : ?file:string -> pt -> string
(** The area of a locus is the space between [start] and [end],
excluding both. *)
type locus = {
start : pt ;
stop : pt ;
file : string ;
}
(** left edge *)
val left_of : locus -> locus
(** right edge *)
val right_of : locus -> locus
(** A bogus locus *)
val unknown : locus
* Convert a [ Lexing.position ] to a [ locus ] of 0 width .
val locus_of_position : Lexing.position -> locus
* Merge two loci . Raises [ Failure " Loc.merge " ] if the loci are in
different files .
different files. *)
val merge : locus -> locus -> locus
* String representation of a locus . Capitalize the first word in the
result iff [ cap ] is true ( the default ) .
result iff [cap] is true (the default). *)
val string_of_locus : ?cap:bool -> locus -> string
(** String representation of a locus without filename. *)
val string_of_locus_nofile : locus -> string
(** Comparing loci *)
val compare : locus -> locus -> int
| null | https://raw.githubusercontent.com/tlaplus/tlapm/b82e2fd049c5bc1b14508ae16890666c6928975f/src/loc.mli | ocaml | * Source locations
* A location represents a col in a source file
line number
beginning of line
column number relative to beginning of line,
see the implementation of the function `locus_of_position`.
* A location that is always invalid (but both = and == itself).
* The character offset of a location in a file. Raises [Failure
"Loc.point"] if the location is a dummy.
* String representation of a location. The [file] argument is by
default "<nofile>".
* The area of a locus is the space between [start] and [end],
excluding both.
* left edge
* right edge
* A bogus locus
* String representation of a locus without filename.
* Comparing loci |
* loc.mli --- source locations
*
*
* Copyright ( C ) 2008 - 2010 INRIA and Microsoft Corporation
* loc.mli --- source locations
*
*
* Copyright (C) 2008-2010 INRIA and Microsoft Corporation
*)
}
type pt = Actual of pt_ | Dummy
val dummy : pt
* The line number of the location , starting from 1 . Raises [ Failure
" Loc.line " ] if the location is a dummy .
"Loc.line"] if the location is a dummy. *)
val line : pt -> int
* The column number of the location , starting from 1 . Raises
[ Failure " Loc.col " ] if the location is a dummy .
[Failure "Loc.col"] if the location is a dummy. *)
val column : pt -> int
val offset : pt -> int
val string_of_pt : ?file:string -> pt -> string
type locus = {
start : pt ;
stop : pt ;
file : string ;
}
val left_of : locus -> locus
val right_of : locus -> locus
val unknown : locus
* Convert a [ Lexing.position ] to a [ locus ] of 0 width .
val locus_of_position : Lexing.position -> locus
* Merge two loci . Raises [ Failure " Loc.merge " ] if the loci are in
different files .
different files. *)
val merge : locus -> locus -> locus
* String representation of a locus . Capitalize the first word in the
result iff [ cap ] is true ( the default ) .
result iff [cap] is true (the default). *)
val string_of_locus : ?cap:bool -> locus -> string
val string_of_locus_nofile : locus -> string
val compare : locus -> locus -> int
|
1110d0271ee8da6f14b4dedb555d48106c48b4b2f30bdaf328c9e15cefbc9b6b | moostang/autolisp | get_matchline_pairs.lsp | ;; ------------------------------------------------------------------------- ;;
;; ------------------------------------------------------------------------- ;;
Find pairs of coordinates for matchlines along a centerline . ; ;
Created on : March 02 , 2019 ; ;
;; ------------------------------------------------------------------------- ;;
;; ------------------------------------------------------------------------- ;;
(defun createStripMap(data rowCount fieldMXYZIndices matchlineInterval headerOption /
rowStartIndex overlapDistance fieldMIndex totalLength matchlineCount
beforeTriplet matchlineIndex mMatchline afterTriplet pairList
)
Get index of first row ; ;
(setq rowStartIndex (getRowStartIndex headerOption))
;; Calculate overlapping distance ;;
10 % of matchlineInterval
Get Index for mValue ; ;
(setq fieldMIndex (nth 0 fieldMXYZIndices))
Make list m values to position matchlines ; ;
(setq totalLength (getFloat data rowCount fieldMIndex)
matchlineCount (ceil (/ totalLength matchlineInterval) 1)
)
(print (strcat "totalLength: ;" (rtos totalLength) ", matchlineCount: " (itoa matchlineCount)))
;; Prepare empty list ;;
(setq matchlinePairList nil)
;; TEST OUTPUT ;;
(print "Getting coordinates of first matchline pair") (princ)
(setq beforeTriplet (getMatchlineTriplet data rowCount headerOption (- 0 overlapDistance) fieldMXYZIndices))
;; TEST OUTPUT ;;
(print (strcat "Looping over other matchlines. matchlineCount: " (itoa matchlineCount)))
(setq matchlineIndex 1)
(repeat matchlineCount
(print (strcat "matchlineIndex: " (itoa matchlineIndex)))
(setq mMatchline (* (float matchlineIndex) matchlineInterval)
afterTriplet (getMatchlineTriplet data rowCount headerOption mMatchline fieldMXYZIndices)
)
;; Calculate central coordinates ;;
(setq centralCoordinates (getCentralCoordinates beforeTriplet afterTriplet))
;; Append to output list ;;
(setq pairList (list matchlineIndex beforeTriplet afterTriplet centralCoordinates)
matchlinePairList (append matchlinePairList (list pairList))
beforeTriplet nil
beforeTriplet afterTriplet
afterTriplet nil
pairList nil
centralCoordinates nil
)
(setq matchlineIndex (1+ matchlineIndex))
);; repeat
);; defun
(defun getMatchlineTriplet (data rowCount headerOption mValue fieldIndices /
fieldMIndex rowStartIndex rowIndex totalRows mNext
mDistance mNextIndex mNextTriplet mBeforeTriplet
geomAtt mAngle mTriplet mBefore m0 mEnd)
(if (< (length fieldIndices) 3)
(progn
(alert "[getMatchlineTriplet] Length of fieldIndices is less than 3")
(exit)
);; progn
)
Get Index for mValue ; ;
(setq fieldMIndex (nth 0 fieldIndices))
;; Prepare fore iteration ;;
(setq rowStartIndex (getRowStartIndex headerOption)
rowIndex rowStartIndex
totalRows (- rowCount rowStartIndex)
)
(setq m0 (getFloat data rowStartIndex fieldMIndex)
mEnd (getFloat data rowCount fieldMIndex)
)
;; TEST OUTPUT ;;
(print (strcat "mValue: " (rtos mValue) ", m0: " (rtos m0) ", mEnd: " (rtos mEnd)))
(cond
Condition 1 : If mValue comes before first mvalue in data ; ;
((< mValue m0)
(progn
TEST OUTPU ; ;
(print " Inside Condition 1")
(print rowStartIndex)
(print fieldIndices)
(setq mNextTriplet (getTriplet data rowStartIndex fieldIndices) )
(setq mBeforeTriplet (getTriplet data (+ rowStartIndex 1) fieldIndices))
(setq mBefore (getFloat data (+ rowStartIndex 1) fieldMIndex) )
(setq mDistance (- mValue m0)) ;; (- m0 mValue) if bearing is in opposite direction ;;
(print " End of Condition 1")
);; progn
);; Condition 1
Condition 2 : For other m values ; ;
( (and (> mValue m0) (<= mValue mEnd))
(print " Inside Condition 2")
;; Iterate ;;
(setq flagRepeat 1)
(repeat totalRows
(if (eq flagRepeat 1)
(progn
(setq mNext (getFloat data rowIndex fieldMIndex))
(if (> mNext mValue)
(progn
(setq mNextIndex rowIndex
mNextTriplet (getTriplet data mNextIndex fieldIndices)
mBeforeTriplet (getTriplet data (- mNextIndex 1) fieldIndices)
mBefore (getFloat data (- mNextIndex 1) fieldMIndex)
mDistance (- mValue mBefore)
)
(setq flagRepeat 0) ;; Set flag to 0 after finding correct m value ;;
);; progn
);; if
);; progn
);; if
(setq rowIndex (1+ rowIndex))
);; repeat
Condition 2
Condition 3 : For last m value ; ;
((> mValue mEnd)
(print " Inside Condition 3")
(progn
(setq mNextTriplet (getTriplet data rowCount fieldIndices)
mBeforeTriplet (getTriplet data (- rowCount 1) fieldIndices)
mBefore (getFloat data (- rowCount 1) fieldMIndex)
mDistance (- mValue mEnd)
)
);; progn
Condition 3
);; cond
;; Calculation ;;
(setq geomAtt (getGeometryAttributes mBeforeTriplet mNextTriplet)
mAngle (nth 3 geomAtt)
)
(setq mTriplet (getNextCoordinates mBeforeTriplet mDistance mAngle))
;; OUTPUT ;;
(princ mTriplet)
);; defun
(defun getTriplet (data row fieldIndices)
(print "[getTriplet]")
(print fieldIndices)
(cond
((eq (length fieldIndices) 4)
;; Check for nil Z values ;;
(if (eq (nth 3 fieldIndices) nil)
(setq zValue 0.0)
)
(list (getFloat data row (nth 1 fieldIndices))
(getFloat data row (nth 2 fieldIndices))
zValue
)
)
((eq (length fieldIndices) 3)
;; Check for nil Z values ;;
(if (eq (nth 2 fieldIndices) nil)
(setq zValue 0.0)
)
(list (getFloat data row (nth 0 fieldIndices))
(getFloat data row (nth 1 fieldIndices))
zValue
)
)
(alert "[getTriplet] Something wrong with fieldIndices")
);; cond
);; defun
(defun getCentralCoordinates (m1Triplet m2Triplet / geomAtt hHalf)
(setq geomAtt (getGeometryAttributes m1Triplet m2Triplet))
(setq hHalf (* 0.5 (nth 2 geomAtt)))
(setq xyCenter (getNextCoordinates m1Triplet hHalf (nth 3 geomAtt)))
(princ (list (nth 0 xyCenter) (nth 1 xyCenter) (nth 2 xyCenter) (nth 3 geomAtt)))
);; defun
(defun getNextCoordinates (baseTriplet extendeDistance angleDegrees / xNew yNew)
(setq xNew (+ (nth 0 baseTriplet) (* extendeDistance (cos (deg2rad angleDegrees))))
yNew (+ (nth 1 baseTriplet) (* extendeDistance (sin (deg2rad angleDegrees))))
zNew 0.0
)
;; TEST OUTPUT ;;
( princ ( strcat " : " ( ) " , yNew : " ( rtos yNew)))(princ )
(list xNew yNew zNew)
);; defun
(defun getGeometryAttributes (pt1Triplet pt2Triplet / b p h aDeg)
(setq b (- (nth 0 pt2Triplet) (nth 0 pt1Triplet))
p (- (nth 1 pt2Triplet) (nth 1 pt1Triplet))
h (sqrt (+ (* b b) (* p p)))
aDeg (bearing (nth 0 pt1Triplet) (nth 1 pt1Triplet) (nth 0 pt2Triplet) (nth 1 pt2Triplet))
)
;; TEST OUTPUT ;;
( print ( strcat " b : " ( rtos b ) " , p : " ( rtos p ) " , h : " ( rtos h ) " , angle : " ( ) ) )
(list b p h aDeg)
);; defun
;; ------------------------------------------------------------------------- ;;
;; GET BEARING ;;
;; ------------------------------------------------------------------------- ;;
Created on : 2020 - 02 - 14
;; ------------------------------------------------------------------------- ;;
(defun bearing (x1 y1 x2 y2)
(if (/= (- x2 x1) 0.0)
( ( atan ( / ( - y2 y1 ) ( - x2 x1 ) ) ) ) ; ;
(if (< (- y2 y1) 0)
(rad2deg (- 0 (/ pi 2.0)))
(rad2deg (/ pi 2.0))
)
)
)
(defun getRowStartIndex (headerOption)
(if (eq headerOption "TRUE")
(princ 2)
(princ 1)
)
);; defun
| null | https://raw.githubusercontent.com/moostang/autolisp/e4f9e624175880a6383850bae58718c48e31ff43/get_matchline_pairs.lsp | lisp | ------------------------------------------------------------------------- ;;
------------------------------------------------------------------------- ;;
;
;
------------------------------------------------------------------------- ;;
------------------------------------------------------------------------- ;;
;
Calculate overlapping distance ;;
;
;
Prepare empty list ;;
TEST OUTPUT ;;
TEST OUTPUT ;;
Calculate central coordinates ;;
Append to output list ;;
repeat
defun
progn
;
Prepare fore iteration ;;
TEST OUTPUT ;;
;
;
(- m0 mValue) if bearing is in opposite direction ;;
progn
Condition 1
;
Iterate ;;
Set flag to 0 after finding correct m value ;;
progn
if
progn
if
repeat
;
progn
cond
Calculation ;;
OUTPUT ;;
defun
Check for nil Z values ;;
Check for nil Z values ;;
cond
defun
defun
TEST OUTPUT ;;
defun
TEST OUTPUT ;;
defun
------------------------------------------------------------------------- ;;
GET BEARING ;;
------------------------------------------------------------------------- ;;
------------------------------------------------------------------------- ;;
;
defun |
(defun createStripMap(data rowCount fieldMXYZIndices matchlineInterval headerOption /
rowStartIndex overlapDistance fieldMIndex totalLength matchlineCount
beforeTriplet matchlineIndex mMatchline afterTriplet pairList
)
(setq rowStartIndex (getRowStartIndex headerOption))
10 % of matchlineInterval
(setq fieldMIndex (nth 0 fieldMXYZIndices))
(setq totalLength (getFloat data rowCount fieldMIndex)
matchlineCount (ceil (/ totalLength matchlineInterval) 1)
)
(print (strcat "totalLength: ;" (rtos totalLength) ", matchlineCount: " (itoa matchlineCount)))
(setq matchlinePairList nil)
(print "Getting coordinates of first matchline pair") (princ)
(setq beforeTriplet (getMatchlineTriplet data rowCount headerOption (- 0 overlapDistance) fieldMXYZIndices))
(print (strcat "Looping over other matchlines. matchlineCount: " (itoa matchlineCount)))
(setq matchlineIndex 1)
(repeat matchlineCount
(print (strcat "matchlineIndex: " (itoa matchlineIndex)))
(setq mMatchline (* (float matchlineIndex) matchlineInterval)
afterTriplet (getMatchlineTriplet data rowCount headerOption mMatchline fieldMXYZIndices)
)
(setq centralCoordinates (getCentralCoordinates beforeTriplet afterTriplet))
(setq pairList (list matchlineIndex beforeTriplet afterTriplet centralCoordinates)
matchlinePairList (append matchlinePairList (list pairList))
beforeTriplet nil
beforeTriplet afterTriplet
afterTriplet nil
pairList nil
centralCoordinates nil
)
(setq matchlineIndex (1+ matchlineIndex))
(defun getMatchlineTriplet (data rowCount headerOption mValue fieldIndices /
fieldMIndex rowStartIndex rowIndex totalRows mNext
mDistance mNextIndex mNextTriplet mBeforeTriplet
geomAtt mAngle mTriplet mBefore m0 mEnd)
(if (< (length fieldIndices) 3)
(progn
(alert "[getMatchlineTriplet] Length of fieldIndices is less than 3")
(exit)
)
(setq fieldMIndex (nth 0 fieldIndices))
(setq rowStartIndex (getRowStartIndex headerOption)
rowIndex rowStartIndex
totalRows (- rowCount rowStartIndex)
)
(setq m0 (getFloat data rowStartIndex fieldMIndex)
mEnd (getFloat data rowCount fieldMIndex)
)
(print (strcat "mValue: " (rtos mValue) ", m0: " (rtos m0) ", mEnd: " (rtos mEnd)))
(cond
((< mValue m0)
(progn
(print " Inside Condition 1")
(print rowStartIndex)
(print fieldIndices)
(setq mNextTriplet (getTriplet data rowStartIndex fieldIndices) )
(setq mBeforeTriplet (getTriplet data (+ rowStartIndex 1) fieldIndices))
(setq mBefore (getFloat data (+ rowStartIndex 1) fieldMIndex) )
(print " End of Condition 1")
( (and (> mValue m0) (<= mValue mEnd))
(print " Inside Condition 2")
(setq flagRepeat 1)
(repeat totalRows
(if (eq flagRepeat 1)
(progn
(setq mNext (getFloat data rowIndex fieldMIndex))
(if (> mNext mValue)
(progn
(setq mNextIndex rowIndex
mNextTriplet (getTriplet data mNextIndex fieldIndices)
mBeforeTriplet (getTriplet data (- mNextIndex 1) fieldIndices)
mBefore (getFloat data (- mNextIndex 1) fieldMIndex)
mDistance (- mValue mBefore)
)
(setq rowIndex (1+ rowIndex))
Condition 2
((> mValue mEnd)
(print " Inside Condition 3")
(progn
(setq mNextTriplet (getTriplet data rowCount fieldIndices)
mBeforeTriplet (getTriplet data (- rowCount 1) fieldIndices)
mBefore (getFloat data (- rowCount 1) fieldMIndex)
mDistance (- mValue mEnd)
)
Condition 3
(setq geomAtt (getGeometryAttributes mBeforeTriplet mNextTriplet)
mAngle (nth 3 geomAtt)
)
(setq mTriplet (getNextCoordinates mBeforeTriplet mDistance mAngle))
(princ mTriplet)
(defun getTriplet (data row fieldIndices)
(print "[getTriplet]")
(print fieldIndices)
(cond
((eq (length fieldIndices) 4)
(if (eq (nth 3 fieldIndices) nil)
(setq zValue 0.0)
)
(list (getFloat data row (nth 1 fieldIndices))
(getFloat data row (nth 2 fieldIndices))
zValue
)
)
((eq (length fieldIndices) 3)
(if (eq (nth 2 fieldIndices) nil)
(setq zValue 0.0)
)
(list (getFloat data row (nth 0 fieldIndices))
(getFloat data row (nth 1 fieldIndices))
zValue
)
)
(alert "[getTriplet] Something wrong with fieldIndices")
(defun getCentralCoordinates (m1Triplet m2Triplet / geomAtt hHalf)
(setq geomAtt (getGeometryAttributes m1Triplet m2Triplet))
(setq hHalf (* 0.5 (nth 2 geomAtt)))
(setq xyCenter (getNextCoordinates m1Triplet hHalf (nth 3 geomAtt)))
(princ (list (nth 0 xyCenter) (nth 1 xyCenter) (nth 2 xyCenter) (nth 3 geomAtt)))
(defun getNextCoordinates (baseTriplet extendeDistance angleDegrees / xNew yNew)
(setq xNew (+ (nth 0 baseTriplet) (* extendeDistance (cos (deg2rad angleDegrees))))
yNew (+ (nth 1 baseTriplet) (* extendeDistance (sin (deg2rad angleDegrees))))
zNew 0.0
)
( princ ( strcat " : " ( ) " , yNew : " ( rtos yNew)))(princ )
(list xNew yNew zNew)
(defun getGeometryAttributes (pt1Triplet pt2Triplet / b p h aDeg)
(setq b (- (nth 0 pt2Triplet) (nth 0 pt1Triplet))
p (- (nth 1 pt2Triplet) (nth 1 pt1Triplet))
h (sqrt (+ (* b b) (* p p)))
aDeg (bearing (nth 0 pt1Triplet) (nth 1 pt1Triplet) (nth 0 pt2Triplet) (nth 1 pt2Triplet))
)
( print ( strcat " b : " ( rtos b ) " , p : " ( rtos p ) " , h : " ( rtos h ) " , angle : " ( ) ) )
(list b p h aDeg)
Created on : 2020 - 02 - 14
(defun bearing (x1 y1 x2 y2)
(if (/= (- x2 x1) 0.0)
(if (< (- y2 y1) 0)
(rad2deg (- 0 (/ pi 2.0)))
(rad2deg (/ pi 2.0))
)
)
)
(defun getRowStartIndex (headerOption)
(if (eq headerOption "TRUE")
(princ 2)
(princ 1)
)
|
685b157d6f5b553d6c0be4d0734c59e47bba0b117c9b93a2ed72183f12d1099e | synduce/Synduce | height.ml |
let s0 = 0
let f0 x5 = x5 + 1
let rec target = function Nil -> s0 | Node(a, l, r) -> f0 (target l)
| null | https://raw.githubusercontent.com/synduce/Synduce/d453b04cfb507395908a270b1906f5ac34298d29/extras/solutions/constraints/balanced_tree/height.ml | ocaml |
let s0 = 0
let f0 x5 = x5 + 1
let rec target = function Nil -> s0 | Node(a, l, r) -> f0 (target l)
| |
f104cbf795d762833a0ade96cc75080d9ea2eee4b6c93619ef848ac2715c6a12 | janestreet/universe | date_bindings.ml | (* THIS CODE IS GENERATED AUTOMATICALLY, DO NOT EDIT BY HAND *)
open! Base
open! Python_lib
open! Python_lib.Let_syntax
open! Gen_types
open! Gen_import
[@@@alert "-deprecated-legacy"]
let protect ~f x =
try f x with
| Py.Err _ as err -> raise err
| exn -> raise (Py.Err (SyntaxError, Exn.to_string exn))
;;
let bin_shape_t () = (* Bin_prot__.Shape.t *)
Defunc.no_arg (fun () -> Core_kernel__Date.bin_shape_t |> python_of_bin_prot____shape__t)
;;
let t_of_sexp () = (* Sexplib0__.Sexp.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_sexplib0____sexp__t ~docstring:"Sexplib0__.Sexp.t"
in
Core_kernel__Date.t_of_sexp
positional_1
|> python_of_core_kernel__date__t
;;
let sexp_of_t () = (* Core_kernel__Date.t -> Sexplib0__.Sexp.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.sexp_of_t
positional_1
|> python_of_sexplib0____sexp__t
;;
let hash_fold_t () = (* Ppx_hash_lib.Std.Hash.state -> Core_kernel__Date.t -> Ppx_hash_lib.Std.Hash.state *)
let%map_open
positional_1 = positional "positional_1" param_ppx_hash_lib__std__hash__state ~docstring:"Ppx_hash_lib.Std.Hash.state" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.hash_fold_t
positional_1
positional_2
|> python_of_ppx_hash_lib__std__hash__state
;;
Core_kernel__Date.t - > Ppx_hash_lib . Std . Hash.hash_value
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.hash
positional_1
|> python_of_ppx_hash_lib__std__hash__hash_value
;;
let of_string () = (* string -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" string ~docstring:"string"
in
Core_kernel__Date.of_string
positional_1
|> python_of_core_kernel__date__t
;;
let to_string () = (* Core_kernel__Date.t -> string *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.to_string
positional_1
|> python_of_string
;;
let greatereq () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(>=)
positional_1
positional_2
|> python_of_bool
;;
let lowereq () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(<=)
positional_1
positional_2
|> python_of_bool
;;
let eq () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(=)
positional_1
positional_2
|> python_of_bool
;;
let greater () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(>)
positional_1
positional_2
|> python_of_bool
;;
let lower () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(<)
positional_1
positional_2
|> python_of_bool
;;
let neq () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(<>)
positional_1
positional_2
|> python_of_bool
;;
let equal () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.equal
positional_1
positional_2
|> python_of_bool
;;
let compare () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.compare
positional_1
positional_2
|> python_of_int
;;
let min () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.min
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let max () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.max
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let ascending () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.ascending
positional_1
positional_2
|> python_of_int
;;
let descending () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.descending
positional_1
positional_2
|> python_of_int
;;
let between () = (* Core_kernel__Date.t -> low:Core_kernel__Date.t -> high:Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
low = keyword "low" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
high = keyword "high" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.between
positional_1
~low
~high
|> python_of_bool
;;
let clamp_exn () = (* Core_kernel__Date.t -> min:Core_kernel__Date.t -> max:Core_kernel__Date.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
min = keyword "min" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
max = keyword "max" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.clamp_exn
positional_1
~min
~max
|> python_of_core_kernel__date__t
;;
let pp () = (* Base__.Formatter.t -> Core_kernel__Date.t -> unit *)
let%map_open
positional_1 = positional "positional_1" param_base____formatter__t ~docstring:"Base__.Formatter.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.pp
positional_1
positional_2
|> python_of_unit
;;
let create_exn () = (* y:int -> m:Core_kernel__.Month.t -> d:int -> Core_kernel__Date.t *)
let%map_open
y = keyword "y" int ~docstring:"int" and
m = keyword "m" param_core_kernel____month__t ~docstring:"Core_kernel__.Month.t" and
d = keyword "d" int ~docstring:"int"
in
Core_kernel__Date.create_exn
~y
~m
~d
|> python_of_core_kernel__date__t
;;
let of_string_iso8601_basic () = (* string -> pos:int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" string ~docstring:"string" and
pos = keyword "pos" int ~docstring:"int"
in
Core_kernel__Date.of_string_iso8601_basic
positional_1
~pos
|> python_of_core_kernel__date__t
;;
let to_string_iso8601_basic () = (* Core_kernel__Date.t -> string *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.to_string_iso8601_basic
positional_1
|> python_of_string
;;
let to_string_american () = (* Core_kernel__Date.t -> string *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.to_string_american
positional_1
|> python_of_string
;;
let day () = (* Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.day
positional_1
|> python_of_int
;;
let month () = (* Core_kernel__Date.t -> Core_kernel__.Month.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.month
positional_1
|> python_of_core_kernel____month__t
;;
let year () = (* Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.year
positional_1
|> python_of_int
;;
let day_of_week () = (* Core_kernel__Date.t -> Core_kernel__.Day_of_week.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.day_of_week
positional_1
|> python_of_core_kernel____day_of_week__t
;;
let week_number_and_year () = (* Core_kernel__Date.t -> (int, int) *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.week_number_and_year
positional_1
|> (fun (t0, t1) -> Py.Tuple.of_list [python_of_int t0; python_of_int t1])
;;
let week_number () = (* Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.week_number
positional_1
|> python_of_int
;;
let is_weekend () = (* Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.is_weekend
positional_1
|> python_of_bool
;;
let is_weekday () = (* Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.is_weekday
positional_1
|> python_of_bool
;;
let add_days () = (* Core_kernel__Date.t -> int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_days
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let add_months () = (* Core_kernel__Date.t -> int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_months
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let add_years () = (* Core_kernel__Date.t -> int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_years
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let diff () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.diff
positional_1
positional_2
|> python_of_int
;;
let diff_weekdays () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.diff_weekdays
positional_1
positional_2
|> python_of_int
;;
let diff_weekend_days () = (* Core_kernel__Date.t -> Core_kernel__Date.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.diff_weekend_days
positional_1
positional_2
|> python_of_int
;;
let add_weekdays_rounding_backward () = (* Core_kernel__Date.t -> int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays_rounding_backward
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let add_weekdays_rounding_forward () = (* Core_kernel__Date.t -> int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays_rounding_forward
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let add_weekdays_deprecated () = (* Core_kernel__Date.t -> int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let add_weekdays_rounding_in_direction_of_step () = (* Core_kernel__Date.t -> int -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays_rounding_in_direction_of_step
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let dates_between () = (* min:Core_kernel__Date.t -> max:Core_kernel__Date.t -> Core_kernel__Date.t list *)
let%map_open
min = keyword "min" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
max = keyword "max" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.dates_between
~min
~max
|> (python_of_list python_of_core_kernel__date__t)
;;
let weekdays_between () = (* min:Core_kernel__Date.t -> max:Core_kernel__Date.t -> Core_kernel__Date.t list *)
let%map_open
min = keyword "min" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
max = keyword "max" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.weekdays_between
~min
~max
|> (python_of_list python_of_core_kernel__date__t)
;;
let previous_weekday () = (* Core_kernel__Date.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.previous_weekday
positional_1
|> python_of_core_kernel__date__t
;;
let following_weekday () = (* Core_kernel__Date.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.following_weekday
positional_1
|> python_of_core_kernel__date__t
;;
let first_strictly_after () = (* Core_kernel__Date.t -> on:Core_kernel__.Day_of_week.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
on = keyword "on" param_core_kernel____day_of_week__t ~docstring:"Core_kernel__.Day_of_week.t"
in
Core_kernel__Date.first_strictly_after
positional_1
~on
|> python_of_core_kernel__date__t
;;
let days_in_month () = (* year:int -> month:Core_kernel__.Month.t -> int *)
let%map_open
year = keyword "year" int ~docstring:"int" and
month = keyword "month" param_core_kernel____month__t ~docstring:"Core_kernel__.Month.t"
in
Core_kernel__Date.days_in_month
~year
~month
|> python_of_int
;;
let is_leap_year () = (* year:int -> bool *)
let%map_open
year = keyword "year" int ~docstring:"int"
in
Core_kernel__Date.is_leap_year
~year
|> python_of_bool
;;
let unix_epoch () = (* Core_kernel__Date.t *)
Defunc.no_arg (fun () -> Core_kernel__Date.unix_epoch |> python_of_core_kernel__date__t)
;;
module Days = struct
Core_kernel__Date.t - > Core_kernel__Date . Days.t
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Days.of_date
positional_1
|> python_of_core_kernel__date__days__t
;;
let to_date () = (* Core_kernel__Date.Days.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t"
in
Core_kernel__Date.Days.to_date
positional_1
|> python_of_core_kernel__date__t
;;
Core_kernel__Date . Days.t - > Core_kernel__Date . Days.t - > int
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t" and
positional_2 = positional "positional_2" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t"
in
Core_kernel__Date.Days.diff
positional_1
positional_2
|> python_of_int
;;
Core_kernel__Date . Days.t - > int - > Core_kernel__Date . Days.t
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.Days.add_days
positional_1
positional_2
|> python_of_core_kernel__date__days__t
;;
Core_kernel__Date . Days.t
Defunc.no_arg (fun () -> Core_kernel__Date.Days.unix_epoch |> python_of_core_kernel__date__days__t)
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "of_date" (of_date ());
Py_module.set modl "to_date" (to_date ());
Py_module.set modl "diff" (diff ());
Py_module.set modl "add_days" (add_days ());
Py_module.set modl "unix_epoch" (unix_epoch ());
modl
end;;
module Option = struct
let hash_fold_t () = (* Ppx_hash_lib.Std.Hash.state -> Core_kernel__Date.Option.t -> Ppx_hash_lib.Std.Hash.state *)
let%map_open
positional_1 = positional "positional_1" param_ppx_hash_lib__std__hash__state ~docstring:"Ppx_hash_lib.Std.Hash.state" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.hash_fold_t
positional_1
positional_2
|> python_of_ppx_hash_lib__std__hash__state
;;
Core_kernel__Date . Option.t - > Ppx_hash_lib . Std . Hash.hash_value
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.hash
positional_1
|> python_of_ppx_hash_lib__std__hash__hash_value
;;
Core_kernel__Date . Option.t - > Ppx_sexp_conv_lib .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.sexp_of_t
positional_1
|> python_of_ppx_sexp_conv_lib__sexp__t
;;
Core_kernel__Date .
Defunc.no_arg (fun () -> Core_kernel__Date.Option.none |> python_of_core_kernel__date__option__t)
;;
Core_kernel__Date.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Option.some
positional_1
|> python_of_core_kernel__date__option__t
;;
let some_is_representable () = (* Core_kernel__Date.t -> bool *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Option.some_is_representable
positional_1
|> python_of_bool
;;
Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.is_none
positional_1
|> python_of_bool
;;
Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.is_some
positional_1
|> python_of_bool
;;
let value () = (* Core_kernel__Date.Option.t -> default:Core_kernel__Date.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
default = keyword "default" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Option.value
positional_1
~default
|> python_of_core_kernel__date__t
;;
Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.value_exn
positional_1
|> python_of_core_kernel__date__t
;;
Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.unchecked_value
positional_1
|> python_of_core_kernel__date__t
;;
module Optional_syntax = struct
module Optional_syntax = struct
Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.Optional_syntax.Optional_syntax.is_none
positional_1
|> python_of_bool
;;
Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.Optional_syntax.Optional_syntax.unsafe_value
positional_1
|> python_of_core_kernel__date__t
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "is_none" (is_none ());
Py_module.set modl "unsafe_value" (unsafe_value ());
modl
end;;
let register_module ~module_name =
let modl = Py_module.create module_name in
let sub_module = Optional_syntax.register_module ~module_name:"core_kernel__date__option__optional_syntax__optional_syntax__optional_syntax" in
Py_module.set_value modl "optional_syntax" (Py_module.pyobject sub_module);
modl
end;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(>=)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(<=)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(=)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(>)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(<)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(<>)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.equal
positional_1
positional_2
|> python_of_bool
;;
let compare () = (* Core_kernel__Date.Option.t -> Core_kernel__Date.Option.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.compare
positional_1
positional_2
|> python_of_int
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . Option.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.min
positional_1
positional_2
|> python_of_core_kernel__date__option__t
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . Option.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.max
positional_1
positional_2
|> python_of_core_kernel__date__option__t
;;
let ascending () = (* Core_kernel__Date.Option.t -> Core_kernel__Date.Option.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.ascending
positional_1
positional_2
|> python_of_int
;;
let descending () = (* Core_kernel__Date.Option.t -> Core_kernel__Date.Option.t -> int *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.descending
positional_1
positional_2
|> python_of_int
;;
Core_kernel__Date . Option.t - > low : Core_kernel__Date . Option.t - > high : Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
low = keyword "low" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
high = keyword "high" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.between
positional_1
~low
~high
|> python_of_bool
;;
Core_kernel__Date . : Core_kernel__Date . : Core_kernel__Date . Option.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
min = keyword "min" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
max = keyword "max" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.clamp_exn
positional_1
~min
~max
|> python_of_core_kernel__date__option__t
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "hash_fold_t" (hash_fold_t ());
Py_module.set modl "hash" (hash ());
Py_module.set modl "sexp_of_t" (sexp_of_t ());
Py_module.set modl "none" (none ());
Py_module.set modl "some" (some ());
Py_module.set modl "some_is_representable" (some_is_representable ());
Py_module.set modl "is_none" (is_none ());
Py_module.set modl "is_some" (is_some ());
Py_module.set modl "value" (value ());
Py_module.set modl "value_exn" (value_exn ());
Py_module.set modl "unchecked_value" (unchecked_value ());
let sub_module = Optional_syntax.register_module ~module_name:"core_kernel__date__option__optional_syntax__optional_syntax" in
Py_module.set_value modl "optional_syntax" (Py_module.pyobject sub_module);
Py_module.set modl "greatereq" (greatereq ());
Py_module.set modl "lowereq" (lowereq ());
Py_module.set modl "eq" (eq ());
Py_module.set modl "greater" (greater ());
Py_module.set modl "lower" (lower ());
Py_module.set modl "neq" (neq ());
Py_module.set modl "equal" (equal ());
Py_module.set modl "compare" (compare ());
Py_module.set modl "min" (min ());
Py_module.set modl "max" (max ());
Py_module.set modl "ascending" (ascending ());
Py_module.set modl "descending" (descending ());
Py_module.set modl "between" (between ());
Py_module.set modl "clamp_exn" (clamp_exn ());
modl
end;;
let of_time () = (* Core_kernel__.Time_float.t -> zone:Core_kernel__.Time_float.Zone.t -> Core_kernel__Date.t *)
let%map_open
positional_1 = positional "positional_1" param_core_kernel____time_float__t ~docstring:"Core_kernel__.Time_float.t" and
zone = keyword "zone" param_core_kernel____time_float__zone__t ~docstring:"Core_kernel__.Time_float.Zone.t"
in
Core_kernel__Date.of_time
positional_1
~zone
|> python_of_core_kernel__date__t
;;
let today () = (* zone:Core_kernel__.Time_float.Zone.t -> Core_kernel__Date.t *)
let%map_open
zone = keyword "zone" param_core_kernel____time_float__zone__t ~docstring:"Core_kernel__.Time_float.Zone.t"
in
Core_kernel__Date.today
~zone
|> python_of_core_kernel__date__t
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "bin_shape_t" (bin_shape_t ());
Py_module.set modl "t_of_sexp" (t_of_sexp ());
Py_module.set modl "sexp_of_t" (sexp_of_t ());
Py_module.set modl "hash_fold_t" (hash_fold_t ());
Py_module.set modl "hash" (hash ());
Py_module.set modl "of_string" (of_string ());
Py_module.set modl "to_string" (to_string ());
Py_module.set modl "greatereq" (greatereq ());
Py_module.set modl "lowereq" (lowereq ());
Py_module.set modl "eq" (eq ());
Py_module.set modl "greater" (greater ());
Py_module.set modl "lower" (lower ());
Py_module.set modl "neq" (neq ());
Py_module.set modl "equal" (equal ());
Py_module.set modl "compare" (compare ());
Py_module.set modl "min" (min ());
Py_module.set modl "max" (max ());
Py_module.set modl "ascending" (ascending ());
Py_module.set modl "descending" (descending ());
Py_module.set modl "between" (between ());
Py_module.set modl "clamp_exn" (clamp_exn ());
Py_module.set modl "pp" (pp ());
Py_module.set modl "create_exn" (create_exn ());
Py_module.set modl "of_string_iso8601_basic" (of_string_iso8601_basic ());
Py_module.set modl "to_string_iso8601_basic" (to_string_iso8601_basic ());
Py_module.set modl "to_string_american" (to_string_american ());
Py_module.set modl "day" (day ());
Py_module.set modl "month" (month ());
Py_module.set modl "year" (year ());
Py_module.set modl "day_of_week" (day_of_week ());
Py_module.set modl "week_number_and_year" (week_number_and_year ());
Py_module.set modl "week_number" (week_number ());
Py_module.set modl "is_weekend" (is_weekend ());
Py_module.set modl "is_weekday" (is_weekday ());
Py_module.set modl "add_days" (add_days ());
Py_module.set modl "add_months" (add_months ());
Py_module.set modl "add_years" (add_years ());
Py_module.set modl "diff" (diff ());
Py_module.set modl "diff_weekdays" (diff_weekdays ());
Py_module.set modl "diff_weekend_days" (diff_weekend_days ());
Py_module.set modl "add_weekdays_rounding_backward" (add_weekdays_rounding_backward ());
Py_module.set modl "add_weekdays_rounding_forward" (add_weekdays_rounding_forward ());
Py_module.set modl "add_weekdays_deprecated" (add_weekdays_deprecated ());
Py_module.set modl "add_weekdays_rounding_in_direction_of_step" (add_weekdays_rounding_in_direction_of_step ());
Py_module.set modl "dates_between" (dates_between ());
Py_module.set modl "weekdays_between" (weekdays_between ());
Py_module.set modl "previous_weekday" (previous_weekday ());
Py_module.set modl "following_weekday" (following_weekday ());
Py_module.set modl "first_strictly_after" (first_strictly_after ());
Py_module.set modl "days_in_month" (days_in_month ());
Py_module.set modl "is_leap_year" (is_leap_year ());
Py_module.set modl "unix_epoch" (unix_epoch ());
let sub_module = Days.register_module ~module_name:"core_kernel__date__days__days" in
Py_module.set_value modl "days" (Py_module.pyobject sub_module);
let sub_module = Option.register_module ~module_name:"core_kernel__date__option__option" in
Py_module.set_value modl "option" (Py_module.pyobject sub_module);
Py_module.set modl "of_time" (of_time ());
Py_module.set modl "today" (today ());
modl
| null | https://raw.githubusercontent.com/janestreet/universe/b6cb56fdae83f5d55f9c809f1c2a2b50ea213126/pythonlib/examples-gen/generated/date_bindings.ml | ocaml | THIS CODE IS GENERATED AUTOMATICALLY, DO NOT EDIT BY HAND
Bin_prot__.Shape.t
Sexplib0__.Sexp.t -> Core_kernel__Date.t
Core_kernel__Date.t -> Sexplib0__.Sexp.t
Ppx_hash_lib.Std.Hash.state -> Core_kernel__Date.t -> Ppx_hash_lib.Std.Hash.state
string -> Core_kernel__Date.t
Core_kernel__Date.t -> string
Core_kernel__Date.t -> Core_kernel__Date.t -> bool
Core_kernel__Date.t -> Core_kernel__Date.t -> bool
Core_kernel__Date.t -> Core_kernel__Date.t -> bool
Core_kernel__Date.t -> Core_kernel__Date.t -> bool
Core_kernel__Date.t -> Core_kernel__Date.t -> bool
Core_kernel__Date.t -> Core_kernel__Date.t -> bool
Core_kernel__Date.t -> Core_kernel__Date.t -> bool
Core_kernel__Date.t -> Core_kernel__Date.t -> int
Core_kernel__Date.t -> Core_kernel__Date.t -> Core_kernel__Date.t
Core_kernel__Date.t -> Core_kernel__Date.t -> Core_kernel__Date.t
Core_kernel__Date.t -> Core_kernel__Date.t -> int
Core_kernel__Date.t -> Core_kernel__Date.t -> int
Core_kernel__Date.t -> low:Core_kernel__Date.t -> high:Core_kernel__Date.t -> bool
Core_kernel__Date.t -> min:Core_kernel__Date.t -> max:Core_kernel__Date.t -> Core_kernel__Date.t
Base__.Formatter.t -> Core_kernel__Date.t -> unit
y:int -> m:Core_kernel__.Month.t -> d:int -> Core_kernel__Date.t
string -> pos:int -> Core_kernel__Date.t
Core_kernel__Date.t -> string
Core_kernel__Date.t -> string
Core_kernel__Date.t -> int
Core_kernel__Date.t -> Core_kernel__.Month.t
Core_kernel__Date.t -> int
Core_kernel__Date.t -> Core_kernel__.Day_of_week.t
Core_kernel__Date.t -> (int, int)
Core_kernel__Date.t -> int
Core_kernel__Date.t -> bool
Core_kernel__Date.t -> bool
Core_kernel__Date.t -> int -> Core_kernel__Date.t
Core_kernel__Date.t -> int -> Core_kernel__Date.t
Core_kernel__Date.t -> int -> Core_kernel__Date.t
Core_kernel__Date.t -> Core_kernel__Date.t -> int
Core_kernel__Date.t -> Core_kernel__Date.t -> int
Core_kernel__Date.t -> Core_kernel__Date.t -> int
Core_kernel__Date.t -> int -> Core_kernel__Date.t
Core_kernel__Date.t -> int -> Core_kernel__Date.t
Core_kernel__Date.t -> int -> Core_kernel__Date.t
Core_kernel__Date.t -> int -> Core_kernel__Date.t
min:Core_kernel__Date.t -> max:Core_kernel__Date.t -> Core_kernel__Date.t list
min:Core_kernel__Date.t -> max:Core_kernel__Date.t -> Core_kernel__Date.t list
Core_kernel__Date.t -> Core_kernel__Date.t
Core_kernel__Date.t -> Core_kernel__Date.t
Core_kernel__Date.t -> on:Core_kernel__.Day_of_week.t -> Core_kernel__Date.t
year:int -> month:Core_kernel__.Month.t -> int
year:int -> bool
Core_kernel__Date.t
Core_kernel__Date.Days.t -> Core_kernel__Date.t
Ppx_hash_lib.Std.Hash.state -> Core_kernel__Date.Option.t -> Ppx_hash_lib.Std.Hash.state
Core_kernel__Date.t -> bool
Core_kernel__Date.Option.t -> default:Core_kernel__Date.t -> Core_kernel__Date.t
Core_kernel__Date.Option.t -> Core_kernel__Date.Option.t -> int
Core_kernel__Date.Option.t -> Core_kernel__Date.Option.t -> int
Core_kernel__Date.Option.t -> Core_kernel__Date.Option.t -> int
Core_kernel__.Time_float.t -> zone:Core_kernel__.Time_float.Zone.t -> Core_kernel__Date.t
zone:Core_kernel__.Time_float.Zone.t -> Core_kernel__Date.t | open! Base
open! Python_lib
open! Python_lib.Let_syntax
open! Gen_types
open! Gen_import
[@@@alert "-deprecated-legacy"]
let protect ~f x =
try f x with
| Py.Err _ as err -> raise err
| exn -> raise (Py.Err (SyntaxError, Exn.to_string exn))
;;
Defunc.no_arg (fun () -> Core_kernel__Date.bin_shape_t |> python_of_bin_prot____shape__t)
;;
let%map_open
positional_1 = positional "positional_1" param_sexplib0____sexp__t ~docstring:"Sexplib0__.Sexp.t"
in
Core_kernel__Date.t_of_sexp
positional_1
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.sexp_of_t
positional_1
|> python_of_sexplib0____sexp__t
;;
let%map_open
positional_1 = positional "positional_1" param_ppx_hash_lib__std__hash__state ~docstring:"Ppx_hash_lib.Std.Hash.state" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.hash_fold_t
positional_1
positional_2
|> python_of_ppx_hash_lib__std__hash__state
;;
Core_kernel__Date.t - > Ppx_hash_lib . Std . Hash.hash_value
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.hash
positional_1
|> python_of_ppx_hash_lib__std__hash__hash_value
;;
let%map_open
positional_1 = positional "positional_1" string ~docstring:"string"
in
Core_kernel__Date.of_string
positional_1
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.to_string
positional_1
|> python_of_string
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(>=)
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(<=)
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(=)
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(>)
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(<)
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.(<>)
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.equal
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.compare
positional_1
positional_2
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.min
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.max
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.ascending
positional_1
positional_2
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.descending
positional_1
positional_2
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
low = keyword "low" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
high = keyword "high" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.between
positional_1
~low
~high
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
min = keyword "min" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
max = keyword "max" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.clamp_exn
positional_1
~min
~max
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_base____formatter__t ~docstring:"Base__.Formatter.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.pp
positional_1
positional_2
|> python_of_unit
;;
let%map_open
y = keyword "y" int ~docstring:"int" and
m = keyword "m" param_core_kernel____month__t ~docstring:"Core_kernel__.Month.t" and
d = keyword "d" int ~docstring:"int"
in
Core_kernel__Date.create_exn
~y
~m
~d
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" string ~docstring:"string" and
pos = keyword "pos" int ~docstring:"int"
in
Core_kernel__Date.of_string_iso8601_basic
positional_1
~pos
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.to_string_iso8601_basic
positional_1
|> python_of_string
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.to_string_american
positional_1
|> python_of_string
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.day
positional_1
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.month
positional_1
|> python_of_core_kernel____month__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.year
positional_1
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.day_of_week
positional_1
|> python_of_core_kernel____day_of_week__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.week_number_and_year
positional_1
|> (fun (t0, t1) -> Py.Tuple.of_list [python_of_int t0; python_of_int t1])
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.week_number
positional_1
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.is_weekend
positional_1
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.is_weekday
positional_1
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_days
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_months
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_years
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.diff
positional_1
positional_2
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.diff_weekdays
positional_1
positional_2
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.diff_weekend_days
positional_1
positional_2
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays_rounding_backward
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays_rounding_forward
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.add_weekdays_rounding_in_direction_of_step
positional_1
positional_2
|> python_of_core_kernel__date__t
;;
let%map_open
min = keyword "min" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
max = keyword "max" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.dates_between
~min
~max
|> (python_of_list python_of_core_kernel__date__t)
;;
let%map_open
min = keyword "min" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
max = keyword "max" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.weekdays_between
~min
~max
|> (python_of_list python_of_core_kernel__date__t)
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.previous_weekday
positional_1
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.following_weekday
positional_1
|> python_of_core_kernel__date__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t" and
on = keyword "on" param_core_kernel____day_of_week__t ~docstring:"Core_kernel__.Day_of_week.t"
in
Core_kernel__Date.first_strictly_after
positional_1
~on
|> python_of_core_kernel__date__t
;;
let%map_open
year = keyword "year" int ~docstring:"int" and
month = keyword "month" param_core_kernel____month__t ~docstring:"Core_kernel__.Month.t"
in
Core_kernel__Date.days_in_month
~year
~month
|> python_of_int
;;
let%map_open
year = keyword "year" int ~docstring:"int"
in
Core_kernel__Date.is_leap_year
~year
|> python_of_bool
;;
Defunc.no_arg (fun () -> Core_kernel__Date.unix_epoch |> python_of_core_kernel__date__t)
;;
module Days = struct
Core_kernel__Date.t - > Core_kernel__Date . Days.t
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Days.of_date
positional_1
|> python_of_core_kernel__date__days__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t"
in
Core_kernel__Date.Days.to_date
positional_1
|> python_of_core_kernel__date__t
;;
Core_kernel__Date . Days.t - > Core_kernel__Date . Days.t - > int
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t" and
positional_2 = positional "positional_2" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t"
in
Core_kernel__Date.Days.diff
positional_1
positional_2
|> python_of_int
;;
Core_kernel__Date . Days.t - > int - > Core_kernel__Date . Days.t
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__days__t ~docstring:"Core_kernel__Date.Days.t" and
positional_2 = positional "positional_2" int ~docstring:"int"
in
Core_kernel__Date.Days.add_days
positional_1
positional_2
|> python_of_core_kernel__date__days__t
;;
Core_kernel__Date . Days.t
Defunc.no_arg (fun () -> Core_kernel__Date.Days.unix_epoch |> python_of_core_kernel__date__days__t)
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "of_date" (of_date ());
Py_module.set modl "to_date" (to_date ());
Py_module.set modl "diff" (diff ());
Py_module.set modl "add_days" (add_days ());
Py_module.set modl "unix_epoch" (unix_epoch ());
modl
end;;
module Option = struct
let%map_open
positional_1 = positional "positional_1" param_ppx_hash_lib__std__hash__state ~docstring:"Ppx_hash_lib.Std.Hash.state" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.hash_fold_t
positional_1
positional_2
|> python_of_ppx_hash_lib__std__hash__state
;;
Core_kernel__Date . Option.t - > Ppx_hash_lib . Std . Hash.hash_value
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.hash
positional_1
|> python_of_ppx_hash_lib__std__hash__hash_value
;;
Core_kernel__Date . Option.t - > Ppx_sexp_conv_lib .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.sexp_of_t
positional_1
|> python_of_ppx_sexp_conv_lib__sexp__t
;;
Core_kernel__Date .
Defunc.no_arg (fun () -> Core_kernel__Date.Option.none |> python_of_core_kernel__date__option__t)
;;
Core_kernel__Date.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Option.some
positional_1
|> python_of_core_kernel__date__option__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Option.some_is_representable
positional_1
|> python_of_bool
;;
Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.is_none
positional_1
|> python_of_bool
;;
Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.is_some
positional_1
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
default = keyword "default" param_core_kernel__date__t ~docstring:"Core_kernel__Date.t"
in
Core_kernel__Date.Option.value
positional_1
~default
|> python_of_core_kernel__date__t
;;
Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.value_exn
positional_1
|> python_of_core_kernel__date__t
;;
Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.unchecked_value
positional_1
|> python_of_core_kernel__date__t
;;
module Optional_syntax = struct
module Optional_syntax = struct
Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.Optional_syntax.Optional_syntax.is_none
positional_1
|> python_of_bool
;;
Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.Optional_syntax.Optional_syntax.unsafe_value
positional_1
|> python_of_core_kernel__date__t
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "is_none" (is_none ());
Py_module.set modl "unsafe_value" (unsafe_value ());
modl
end;;
let register_module ~module_name =
let modl = Py_module.create module_name in
let sub_module = Optional_syntax.register_module ~module_name:"core_kernel__date__option__optional_syntax__optional_syntax__optional_syntax" in
Py_module.set_value modl "optional_syntax" (Py_module.pyobject sub_module);
modl
end;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(>=)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(<=)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(=)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(>)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(<)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.(<>)
positional_1
positional_2
|> python_of_bool
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.equal
positional_1
positional_2
|> python_of_bool
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.compare
positional_1
positional_2
|> python_of_int
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . Option.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.min
positional_1
positional_2
|> python_of_core_kernel__date__option__t
;;
Core_kernel__Date . Option.t - > Core_kernel__Date . Option.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.max
positional_1
positional_2
|> python_of_core_kernel__date__option__t
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.ascending
positional_1
positional_2
|> python_of_int
;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
positional_2 = positional "positional_2" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.descending
positional_1
positional_2
|> python_of_int
;;
Core_kernel__Date . Option.t - > low : Core_kernel__Date . Option.t - > high : Core_kernel__Date . bool
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
low = keyword "low" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
high = keyword "high" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.between
positional_1
~low
~high
|> python_of_bool
;;
Core_kernel__Date . : Core_kernel__Date . : Core_kernel__Date . Option.t - > Core_kernel__Date .
let%map_open
positional_1 = positional "positional_1" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
min = keyword "min" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t" and
max = keyword "max" param_core_kernel__date__option__t ~docstring:"Core_kernel__Date.Option.t"
in
Core_kernel__Date.Option.clamp_exn
positional_1
~min
~max
|> python_of_core_kernel__date__option__t
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "hash_fold_t" (hash_fold_t ());
Py_module.set modl "hash" (hash ());
Py_module.set modl "sexp_of_t" (sexp_of_t ());
Py_module.set modl "none" (none ());
Py_module.set modl "some" (some ());
Py_module.set modl "some_is_representable" (some_is_representable ());
Py_module.set modl "is_none" (is_none ());
Py_module.set modl "is_some" (is_some ());
Py_module.set modl "value" (value ());
Py_module.set modl "value_exn" (value_exn ());
Py_module.set modl "unchecked_value" (unchecked_value ());
let sub_module = Optional_syntax.register_module ~module_name:"core_kernel__date__option__optional_syntax__optional_syntax" in
Py_module.set_value modl "optional_syntax" (Py_module.pyobject sub_module);
Py_module.set modl "greatereq" (greatereq ());
Py_module.set modl "lowereq" (lowereq ());
Py_module.set modl "eq" (eq ());
Py_module.set modl "greater" (greater ());
Py_module.set modl "lower" (lower ());
Py_module.set modl "neq" (neq ());
Py_module.set modl "equal" (equal ());
Py_module.set modl "compare" (compare ());
Py_module.set modl "min" (min ());
Py_module.set modl "max" (max ());
Py_module.set modl "ascending" (ascending ());
Py_module.set modl "descending" (descending ());
Py_module.set modl "between" (between ());
Py_module.set modl "clamp_exn" (clamp_exn ());
modl
end;;
let%map_open
positional_1 = positional "positional_1" param_core_kernel____time_float__t ~docstring:"Core_kernel__.Time_float.t" and
zone = keyword "zone" param_core_kernel____time_float__zone__t ~docstring:"Core_kernel__.Time_float.Zone.t"
in
Core_kernel__Date.of_time
positional_1
~zone
|> python_of_core_kernel__date__t
;;
let%map_open
zone = keyword "zone" param_core_kernel____time_float__zone__t ~docstring:"Core_kernel__.Time_float.Zone.t"
in
Core_kernel__Date.today
~zone
|> python_of_core_kernel__date__t
;;
let register_module ~module_name =
let modl = Py_module.create module_name in
Py_module.set modl "bin_shape_t" (bin_shape_t ());
Py_module.set modl "t_of_sexp" (t_of_sexp ());
Py_module.set modl "sexp_of_t" (sexp_of_t ());
Py_module.set modl "hash_fold_t" (hash_fold_t ());
Py_module.set modl "hash" (hash ());
Py_module.set modl "of_string" (of_string ());
Py_module.set modl "to_string" (to_string ());
Py_module.set modl "greatereq" (greatereq ());
Py_module.set modl "lowereq" (lowereq ());
Py_module.set modl "eq" (eq ());
Py_module.set modl "greater" (greater ());
Py_module.set modl "lower" (lower ());
Py_module.set modl "neq" (neq ());
Py_module.set modl "equal" (equal ());
Py_module.set modl "compare" (compare ());
Py_module.set modl "min" (min ());
Py_module.set modl "max" (max ());
Py_module.set modl "ascending" (ascending ());
Py_module.set modl "descending" (descending ());
Py_module.set modl "between" (between ());
Py_module.set modl "clamp_exn" (clamp_exn ());
Py_module.set modl "pp" (pp ());
Py_module.set modl "create_exn" (create_exn ());
Py_module.set modl "of_string_iso8601_basic" (of_string_iso8601_basic ());
Py_module.set modl "to_string_iso8601_basic" (to_string_iso8601_basic ());
Py_module.set modl "to_string_american" (to_string_american ());
Py_module.set modl "day" (day ());
Py_module.set modl "month" (month ());
Py_module.set modl "year" (year ());
Py_module.set modl "day_of_week" (day_of_week ());
Py_module.set modl "week_number_and_year" (week_number_and_year ());
Py_module.set modl "week_number" (week_number ());
Py_module.set modl "is_weekend" (is_weekend ());
Py_module.set modl "is_weekday" (is_weekday ());
Py_module.set modl "add_days" (add_days ());
Py_module.set modl "add_months" (add_months ());
Py_module.set modl "add_years" (add_years ());
Py_module.set modl "diff" (diff ());
Py_module.set modl "diff_weekdays" (diff_weekdays ());
Py_module.set modl "diff_weekend_days" (diff_weekend_days ());
Py_module.set modl "add_weekdays_rounding_backward" (add_weekdays_rounding_backward ());
Py_module.set modl "add_weekdays_rounding_forward" (add_weekdays_rounding_forward ());
Py_module.set modl "add_weekdays_deprecated" (add_weekdays_deprecated ());
Py_module.set modl "add_weekdays_rounding_in_direction_of_step" (add_weekdays_rounding_in_direction_of_step ());
Py_module.set modl "dates_between" (dates_between ());
Py_module.set modl "weekdays_between" (weekdays_between ());
Py_module.set modl "previous_weekday" (previous_weekday ());
Py_module.set modl "following_weekday" (following_weekday ());
Py_module.set modl "first_strictly_after" (first_strictly_after ());
Py_module.set modl "days_in_month" (days_in_month ());
Py_module.set modl "is_leap_year" (is_leap_year ());
Py_module.set modl "unix_epoch" (unix_epoch ());
let sub_module = Days.register_module ~module_name:"core_kernel__date__days__days" in
Py_module.set_value modl "days" (Py_module.pyobject sub_module);
let sub_module = Option.register_module ~module_name:"core_kernel__date__option__option" in
Py_module.set_value modl "option" (Py_module.pyobject sub_module);
Py_module.set modl "of_time" (of_time ());
Py_module.set modl "today" (today ());
modl
|
b5bc092072da0349922bf254239bdfe98f98b4c4d490f3f8758d265d15232d26 | saturn-lab/BDMI-2019S | 李金朋-getfiblist.hs | fib n1 n2 = n1 : fib n2 (n1+n2)
getfiblist = fib 1 1 | null | https://raw.githubusercontent.com/saturn-lab/BDMI-2019S/77f1e7a341a894f1c673e0f2340ccc37184c04b6/haskell/homework/%E6%9D%8E%E9%87%91%E6%9C%8B-getfiblist.hs | haskell | fib n1 n2 = n1 : fib n2 (n1+n2)
getfiblist = fib 1 1 | |
c2bb59714213daa500d48d99cbb6b4ba2f99457d99130e07fb33de33a74a4fdc | icfpcontest2021/icfpcontest2021.github.io | GenerateBonus.hs | {-# LANGUAGE NamedFieldPuns #-}
# LANGUAGE RecordWildCards #
module BrainWall.Main.GenerateBonus
( main
) where
import qualified BrainWall.Box as Box
import BrainWall.Database (allFeatures)
import BrainWall.Json
import BrainWall.Main.InsertProblems (iterateProblems,
problemIdFromFilePath,
solutionIdFromFilePath)
import BrainWall.Polygon
import BrainWall.Polygon.ContainsPoint (pointInPolygon)
import BrainWall.Problem
import BrainWall.V2
import Control.Monad (unless)
import Data.Aeson.Encode.Pretty (encodePretty)
import qualified Data.ByteString.Lazy as BL
import Data.Foldable (for_)
import qualified Data.HashMap.Strict as HMS
import Data.List
import Data.Maybe (fromJust)
import Data.Ord
import Data.Traversable (for)
import qualified Data.Vector as V
import qualified Options.Applicative.Extended as OA
import System.FilePath (replaceExtension)
import qualified Test.QuickCheck as QC
bonusPositions :: Hole -> Solution -> [V2 Integer] -> [V2 Integer]
bonusPositions (Hole poly) solution = go
where
go bonusesPlaced = case allPoints of
[] -> []
_ -> thisBonus : go (thisBonus:bonusesPlaced)
where
thisBonus = maximumBy (comparing solutionDistance) allPoints
Just box = foldMap (Just . Box.fromV2) $ unPolygon poly
allPoints = filter (`notElem` bonusesPlaced) $
filter (`pointInPolygon` poly) $
[V2 x y | x <- [v2X (Box.topLeft box), v2X (Box.topLeft box) + 3 .. v2X (Box.bottomRight box)] , y <- [v2Y (Box.topLeft box), v2Y (Box.topLeft box) + 3 .. v2Y (Box.bottomRight box)] ]
solutionDistance v2 = V.minimum $ V.map (dist v2) (solutionVertices solution V.++ V.fromList bonusesPlaced V.++ unPolygon poly)
dist :: V2 Integer -> V2 Integer -> Double
dist v1 v2 = sqrt $ fromIntegral $ squaredDistance v1 v2
-- | Shuffle such that no element ends up at its original position.
properShuffle :: Eq a => [a] -> QC.Gen [a]
properShuffle things =
fmap (map $ snd . snd) $
indexedShuffle `QC.suchThat` \l -> and [i /= j | (i, (j, _)) <- l]
where
indexedShuffle = zip [0 :: Int ..] <$> QC.shuffle (zip [0 ..] things)
genBonusAssignment :: [Int] -> QC.Gen [(Int, (Int, BonusType))]
genBonusAssignment problemIds = fmap (zip problemIds) $ do
ordered <- for problemIds $ \problemId -> do
types <- QC.shuffle [SuperFlex, WallHack, Globalist, BreakALeg, SuperFlex, WallHack]
pure (problemId, head types)
properShuffle ordered
type Problems = HMS.HashMap Int (FilePath, Problem, FilePath, Solution)
type BonusLayer = [(Int, Bonus)]
genBonusLayer :: Problems -> QC.Gen BonusLayer
genBonusLayer problems = do
assignment <- genBonusAssignment $ HMS.keys problems
for (HMS.toList problems) $ \(pid, (_, problem, _, solution)) -> do
let hole = problemHole problem
(pid', ty) = fromJust $ lookup pid assignment
takenPositions = bonusPosition <$> V.toList (problemBonuses problem)
position = head $ bonusPositions hole solution takenPositions
pure (pid, Bonus pid' ty position)
parseProblems :: [FilePath] -> IO Problems
parseProblems problemPaths = do
problems <- fmap HMS.fromList $ for problemPaths $ \path -> do
pid <- maybe (fail $ "Bad file name: " <> show path) pure $
problemIdFromFilePath path
problem <- decodeFileWith (decodeProblem decodeFeatures) path
pure (pid, (path, problem))
solutions <- fmap HMS.fromList $ for solutionPaths $ \path -> do
pid <- maybe (fail $ "Bad file name: " <> show path) pure $
solutionIdFromFilePath path
solution <- decodeFileWith (decodeSolution decodeFeatures) path
pure (pid, (path, solution))
unless (HMS.null $ HMS.difference problems solutions) $
fail $ "These problems don't have solutions: " <> show (HMS.difference problems solutions)
unless (HMS.null $ HMS.difference solutions problems) $
fail $ "These solutions don't have problems: " <> show (HMS.difference problems solutions)
return $ HMS.intersectionWith (\(k1, v1) (k2, v2) -> (k1, v1, k2, v2)) problems solutions
where
solutionPaths = map (\path -> replaceExtension path ".solution") problemPaths
decodeFeatures = allFeatures
updateProblems :: Problems -> (Int -> Problem -> Problem) -> IO ()
updateProblems problems f = for_ (HMS.toList problems) $
\(pid, (path, problem, _, _)) ->
BL.writeFile path . encodePretty . encodeProblem allFeatures $
f pid problem
data Options = Options
{ optionsClear :: Bool
, optionsPaths :: [FilePath]
} deriving (Show)
parseOptions :: OA.Parser Options
parseOptions = Options
<$> OA.switch (OA.long "clear")
<*> OA.some (OA.strArgument $ OA.metavar "N.problem")
main :: IO ()
main = do
options <- OA.simpleRunParser parseOptions
problemPaths <- concat <$> for (optionsPaths options) iterateProblems
problems <- parseProblems problemPaths
if optionsClear options then
updateProblems problems $ \_ problem ->
problem {problemBonuses = mempty}
else do
bonuses <- fmap head . QC.sample' $ genBonusLayer problems
updateProblems problems $ \pid problem -> case lookup pid bonuses of
Nothing -> problem
Just bonus ->
let newBonuses = problemBonuses problem <> V.singleton bonus in
problem {problemBonuses = newBonuses}
| null | https://raw.githubusercontent.com/icfpcontest2021/icfpcontest2021.github.io/fb23fea2a8ecec7740017d3dda78d921c1df5a26/toolchain/lib/BrainWall/Main/GenerateBonus.hs | haskell | # LANGUAGE NamedFieldPuns #
| Shuffle such that no element ends up at its original position. | # LANGUAGE RecordWildCards #
module BrainWall.Main.GenerateBonus
( main
) where
import qualified BrainWall.Box as Box
import BrainWall.Database (allFeatures)
import BrainWall.Json
import BrainWall.Main.InsertProblems (iterateProblems,
problemIdFromFilePath,
solutionIdFromFilePath)
import BrainWall.Polygon
import BrainWall.Polygon.ContainsPoint (pointInPolygon)
import BrainWall.Problem
import BrainWall.V2
import Control.Monad (unless)
import Data.Aeson.Encode.Pretty (encodePretty)
import qualified Data.ByteString.Lazy as BL
import Data.Foldable (for_)
import qualified Data.HashMap.Strict as HMS
import Data.List
import Data.Maybe (fromJust)
import Data.Ord
import Data.Traversable (for)
import qualified Data.Vector as V
import qualified Options.Applicative.Extended as OA
import System.FilePath (replaceExtension)
import qualified Test.QuickCheck as QC
bonusPositions :: Hole -> Solution -> [V2 Integer] -> [V2 Integer]
bonusPositions (Hole poly) solution = go
where
go bonusesPlaced = case allPoints of
[] -> []
_ -> thisBonus : go (thisBonus:bonusesPlaced)
where
thisBonus = maximumBy (comparing solutionDistance) allPoints
Just box = foldMap (Just . Box.fromV2) $ unPolygon poly
allPoints = filter (`notElem` bonusesPlaced) $
filter (`pointInPolygon` poly) $
[V2 x y | x <- [v2X (Box.topLeft box), v2X (Box.topLeft box) + 3 .. v2X (Box.bottomRight box)] , y <- [v2Y (Box.topLeft box), v2Y (Box.topLeft box) + 3 .. v2Y (Box.bottomRight box)] ]
solutionDistance v2 = V.minimum $ V.map (dist v2) (solutionVertices solution V.++ V.fromList bonusesPlaced V.++ unPolygon poly)
dist :: V2 Integer -> V2 Integer -> Double
dist v1 v2 = sqrt $ fromIntegral $ squaredDistance v1 v2
properShuffle :: Eq a => [a] -> QC.Gen [a]
properShuffle things =
fmap (map $ snd . snd) $
indexedShuffle `QC.suchThat` \l -> and [i /= j | (i, (j, _)) <- l]
where
indexedShuffle = zip [0 :: Int ..] <$> QC.shuffle (zip [0 ..] things)
genBonusAssignment :: [Int] -> QC.Gen [(Int, (Int, BonusType))]
genBonusAssignment problemIds = fmap (zip problemIds) $ do
ordered <- for problemIds $ \problemId -> do
types <- QC.shuffle [SuperFlex, WallHack, Globalist, BreakALeg, SuperFlex, WallHack]
pure (problemId, head types)
properShuffle ordered
type Problems = HMS.HashMap Int (FilePath, Problem, FilePath, Solution)
type BonusLayer = [(Int, Bonus)]
genBonusLayer :: Problems -> QC.Gen BonusLayer
genBonusLayer problems = do
assignment <- genBonusAssignment $ HMS.keys problems
for (HMS.toList problems) $ \(pid, (_, problem, _, solution)) -> do
let hole = problemHole problem
(pid', ty) = fromJust $ lookup pid assignment
takenPositions = bonusPosition <$> V.toList (problemBonuses problem)
position = head $ bonusPositions hole solution takenPositions
pure (pid, Bonus pid' ty position)
parseProblems :: [FilePath] -> IO Problems
parseProblems problemPaths = do
problems <- fmap HMS.fromList $ for problemPaths $ \path -> do
pid <- maybe (fail $ "Bad file name: " <> show path) pure $
problemIdFromFilePath path
problem <- decodeFileWith (decodeProblem decodeFeatures) path
pure (pid, (path, problem))
solutions <- fmap HMS.fromList $ for solutionPaths $ \path -> do
pid <- maybe (fail $ "Bad file name: " <> show path) pure $
solutionIdFromFilePath path
solution <- decodeFileWith (decodeSolution decodeFeatures) path
pure (pid, (path, solution))
unless (HMS.null $ HMS.difference problems solutions) $
fail $ "These problems don't have solutions: " <> show (HMS.difference problems solutions)
unless (HMS.null $ HMS.difference solutions problems) $
fail $ "These solutions don't have problems: " <> show (HMS.difference problems solutions)
return $ HMS.intersectionWith (\(k1, v1) (k2, v2) -> (k1, v1, k2, v2)) problems solutions
where
solutionPaths = map (\path -> replaceExtension path ".solution") problemPaths
decodeFeatures = allFeatures
updateProblems :: Problems -> (Int -> Problem -> Problem) -> IO ()
updateProblems problems f = for_ (HMS.toList problems) $
\(pid, (path, problem, _, _)) ->
BL.writeFile path . encodePretty . encodeProblem allFeatures $
f pid problem
data Options = Options
{ optionsClear :: Bool
, optionsPaths :: [FilePath]
} deriving (Show)
parseOptions :: OA.Parser Options
parseOptions = Options
<$> OA.switch (OA.long "clear")
<*> OA.some (OA.strArgument $ OA.metavar "N.problem")
main :: IO ()
main = do
options <- OA.simpleRunParser parseOptions
problemPaths <- concat <$> for (optionsPaths options) iterateProblems
problems <- parseProblems problemPaths
if optionsClear options then
updateProblems problems $ \_ problem ->
problem {problemBonuses = mempty}
else do
bonuses <- fmap head . QC.sample' $ genBonusLayer problems
updateProblems problems $ \pid problem -> case lookup pid bonuses of
Nothing -> problem
Just bonus ->
let newBonuses = problemBonuses problem <> V.singleton bonus in
problem {problemBonuses = newBonuses}
|
a1df6c6c47d3c29aa38381e5b19c6a90ed552ad4c42b111987a0bfac1a121573 | ssardina/ergo | tag-bridge.scm | This is interface code that can be used for an ERGO agent that
;;; uses tagged actions for online interactions
this calls define - interface after modifying readfn and printfn to use tag
(define (define-tagged-interfaces tag readfn printfn)
(define (read-add-tag)
(let ((r (readfn)))
(if (symbol? r) (list r tag) (cons (car r) (cons tag (cdr r))))))
(define (print-detag a)
(and (not (symbol? a)) (not (null? (cdr a))) (eq? (cadr a) tag)
(printfn (cons (car a) (cddr a)))))
(define-interface 'in read-add-tag)
(define-interface 'out print-detag))
;; setup in and out interfaces over TCP
(define (tag-tcp-setup tag portnum IPaddress)
(eprintf "Setting up interfaces over TCP for ~a\n" tag)
(define tcp-ports (open-tcp-client portnum IPaddress))
(define-tagged-interfaces tag
(lambda () (read (car tcp-ports)))
(lambda (act) (displayln act (cadr tcp-ports))))
(eprintf "~a is ready to go\n" tag))
;; setup in and out interfaces with standard IO
(define (tag-stdio-setup)
(eprintf "Setting up interfaces over stdin and stdout\n")
(define-tagged-interfaces 'user read-exogenous write-endogenous))
| null | https://raw.githubusercontent.com/ssardina/ergo/4225ebb95779d1748f377cf2e4d0a593d6a2a103/Projects/LEGO/tag-bridge.scm | scheme | uses tagged actions for online interactions
setup in and out interfaces over TCP
setup in and out interfaces with standard IO | This is interface code that can be used for an ERGO agent that
this calls define - interface after modifying readfn and printfn to use tag
(define (define-tagged-interfaces tag readfn printfn)
(define (read-add-tag)
(let ((r (readfn)))
(if (symbol? r) (list r tag) (cons (car r) (cons tag (cdr r))))))
(define (print-detag a)
(and (not (symbol? a)) (not (null? (cdr a))) (eq? (cadr a) tag)
(printfn (cons (car a) (cddr a)))))
(define-interface 'in read-add-tag)
(define-interface 'out print-detag))
(define (tag-tcp-setup tag portnum IPaddress)
(eprintf "Setting up interfaces over TCP for ~a\n" tag)
(define tcp-ports (open-tcp-client portnum IPaddress))
(define-tagged-interfaces tag
(lambda () (read (car tcp-ports)))
(lambda (act) (displayln act (cadr tcp-ports))))
(eprintf "~a is ready to go\n" tag))
(define (tag-stdio-setup)
(eprintf "Setting up interfaces over stdin and stdout\n")
(define-tagged-interfaces 'user read-exogenous write-endogenous))
|
4bb9bb2d05ab9e1598bab338ac2b3877f313d16a65aadd8d6d8bb299d03aa53b | didierverna/tfm | character.lisp | ;;; character.lisp --- Character Information
Copyright ( C ) 2018 , 2019
Author : < >
This file is part of TFM .
;; Permission to use, copy, modify, and distribute this software for any
;; purpose with or without fee is hereby granted, provided that the above
;; copyright notice and this permission notice appear in all copies.
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
;; WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
;; MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
;; ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
;; OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
;;; Commentary:
;;; Code:
(in-package :net.didierverna.tfm)
(in-readtable :net.didierverna.tfm)
;; ==========================================================================
;; Extension Recipes
;; ==========================================================================
(defclass extension-recipe ()
((top-character
:documentation "The recipe's top character, or NIL."
:initform nil
:initarg :top-character
:accessor top-character)
(middle-character
:documentation "The recipe's middle character, or NIL."
:initform nil
:initarg :middle-character
:accessor middle-character)
(bottom-character
:documentation "The recipe's bottom character, or NIL."
:initform nil
:initarg :bottom-character
:accessor bottom-character)
(repeated-character
:documentation "The recipe's repeated character."
:initarg :repeated-character
:accessor repeated-character))
(:documentation "The Extension Recipe class.
This class represents decoded information for extensible characters. Within
the context of this library, the expression \"extension recipe\" denotes an
instance of this class."))
(defmethod print-object ((extension-recipe extension-recipe) stream)
"Print EXTENSION-RECIPE unreadably with its repeated character to STREAM."
(print-unreadable-object (extension-recipe stream :type t)
(princ (repeated-character extension-recipe) stream)))
# # # # NOTE : we do n't bother to check that the repeated character is not NIL
;; because this class is not exported and I trust my code.
(defun make-extension-recipe
(repeated-character
&rest initargs &key top-character middle-character bottom-character)
"Make a new EXTENSION-RECIPE with REPEATED-CHARACTER and return it.
The recipe may also have a TOP-, MIDDLE-, and BOTTOM-CHARACTER."
(declare (ignore top-character middle-character bottom-character))
(apply #'make-instance 'extension-recipe
:repeated-character repeated-character initargs))
;; ==========================================================================
;; Character Metrics
;; ==========================================================================
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +character-metrics-dimension-accessors+
'(width height depth italic-correction)
"The list of dimension accessor names in the CHARACTER-METRICS class."))
(defmacro map-character-metrics-dimension-accessors (var character &body body)
"Map BODY on CHARACTER metrics dimension accessors available as VAR."
`(map-accessors ,var ,character ,+character-metrics-dimension-accessors+
,@body))
(defclass character-metrics ()
((code
:documentation "The character's numerical code."
:initarg :code
:reader code)
(font
:documentation "The character's font."
:initarg :font
:reader font)
(width
:documentation "The character's width.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initarg :width
:accessor width)
(height
:documentation "The character's height.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initarg :height
:accessor height)
(depth
:documentation "The character's depth.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initarg :depth
:accessor depth)
(italic-correction
:documentation
"The character's italic correction.
TeX uses this value for regular characters followed by the command \/, and
also in math mode for superscript placement. It is expressed in design size
units, or in TeX point units if the font is frozen."
:initarg :italic-correction
:accessor italic-correction)
(next-character
:documentation "The next character in a character list.
This slot is non-null only if the character is part of a chain of characters
of ascending size, and not the last one (see TeX: the Program [544]). It is
mutually exclusive with the EXTENSION-RECIPE slot, and also with the existence
of a ligature or kerning program for this character."
:initform nil
:accessor next-character)
(extension-recipe
:documentation "The character's extension recipe, or NIL.
This slot is non-null only if this character is extensible (see TeX: the
Program [544]). It is mutually exclusive with the NEXT-CHARACTER slot, and
also with the existence of a ligature or kerning program for this character."
:initform nil
:accessor extension-recipe))
(:documentation "The Character Metrics class.
This class represents decoded character information. Within the context of
this library, the term \"character\" denotes an instance of this class."))
(defmethod print-object ((character character-metrics) stream)
"Print CHARACTER unreadably with its code to STREAM."
(print-unreadable-object (character stream :type t)
(princ (code character) stream)))
(defun make-character-metrics (code font width height depth italic-correction)
"Make a new CHARACTER-METRICS instance, and return it.
Initialize the character's CODE, FONT, WIDTH, HEIGHT, DEPTH, and
ITALIC-CORRECTION appropriately. The two remaining slots (NEXT-CHARACTER and
EXTENSION-RECIPE) will be initialized later if needed, when all character
metrics instances are created."
(make-instance 'character-metrics
:code code
:font font
:width width
:height height
:depth depth
:italic-correction italic-correction))
;; ---------------------------------
;; Extension Recipe Pseudo-Accessors
;; ---------------------------------
(defun extensiblep (character)
"Return T if CHARACTER has an extension recipe."
;; We don't want to expose the recipe itself.
(when (extension-recipe character) t))
(define-condition not-extensible (tfm-usage-error)
((value
:documentation "The non extensible character."
:initarg :value
:accessor value))
(:report (lambda (not-extensible stream)
(format stream "Character ~A is not extensible."
(value not-extensible))))
(:documentation "The Not Extensible usage error.
It signals an attempt at accessing the extension recipe of a non extensible
character."))
(defmacro define-extension-recipe-pseudo-accessor (name)
`(defmethod ,name ((character character-metrics))
,(format nil "Return extensible CHARACTER's ~A.
If CHARACTER is not extensible, signal a NOT-EXTENSIBLE error."
name)
(unless (extensiblep character) (error 'not-extensible :value character))
(,name (extension-recipe character))))
(define-extension-recipe-pseudo-accessor top-character)
(define-extension-recipe-pseudo-accessor middle-character)
(define-extension-recipe-pseudo-accessor bottom-character)
(define-extension-recipe-pseudo-accessor repeated-character)
;;; character.lisp ends here
| null | https://raw.githubusercontent.com/didierverna/tfm/192c10b04eaec381638bfcf9bbea66b208141f5a/core/src/character.lisp | lisp | character.lisp --- Character Information
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Commentary:
Code:
==========================================================================
Extension Recipes
==========================================================================
because this class is not exported and I trust my code.
==========================================================================
Character Metrics
==========================================================================
---------------------------------
Extension Recipe Pseudo-Accessors
---------------------------------
We don't want to expose the recipe itself.
character.lisp ends here |
Copyright ( C ) 2018 , 2019
Author : < >
This file is part of TFM .
THIS SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
(in-package :net.didierverna.tfm)
(in-readtable :net.didierverna.tfm)
(defclass extension-recipe ()
((top-character
:documentation "The recipe's top character, or NIL."
:initform nil
:initarg :top-character
:accessor top-character)
(middle-character
:documentation "The recipe's middle character, or NIL."
:initform nil
:initarg :middle-character
:accessor middle-character)
(bottom-character
:documentation "The recipe's bottom character, or NIL."
:initform nil
:initarg :bottom-character
:accessor bottom-character)
(repeated-character
:documentation "The recipe's repeated character."
:initarg :repeated-character
:accessor repeated-character))
(:documentation "The Extension Recipe class.
This class represents decoded information for extensible characters. Within
the context of this library, the expression \"extension recipe\" denotes an
instance of this class."))
(defmethod print-object ((extension-recipe extension-recipe) stream)
"Print EXTENSION-RECIPE unreadably with its repeated character to STREAM."
(print-unreadable-object (extension-recipe stream :type t)
(princ (repeated-character extension-recipe) stream)))
# # # # NOTE : we do n't bother to check that the repeated character is not NIL
(defun make-extension-recipe
(repeated-character
&rest initargs &key top-character middle-character bottom-character)
"Make a new EXTENSION-RECIPE with REPEATED-CHARACTER and return it.
The recipe may also have a TOP-, MIDDLE-, and BOTTOM-CHARACTER."
(declare (ignore top-character middle-character bottom-character))
(apply #'make-instance 'extension-recipe
:repeated-character repeated-character initargs))
(eval-when (:compile-toplevel :load-toplevel :execute)
(define-constant +character-metrics-dimension-accessors+
'(width height depth italic-correction)
"The list of dimension accessor names in the CHARACTER-METRICS class."))
(defmacro map-character-metrics-dimension-accessors (var character &body body)
"Map BODY on CHARACTER metrics dimension accessors available as VAR."
`(map-accessors ,var ,character ,+character-metrics-dimension-accessors+
,@body))
(defclass character-metrics ()
((code
:documentation "The character's numerical code."
:initarg :code
:reader code)
(font
:documentation "The character's font."
:initarg :font
:reader font)
(width
:documentation "The character's width.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initarg :width
:accessor width)
(height
:documentation "The character's height.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initarg :height
:accessor height)
(depth
:documentation "The character's depth.
It is expressed in design size units, or in TeX point units if the font is
frozen."
:initarg :depth
:accessor depth)
(italic-correction
:documentation
"The character's italic correction.
TeX uses this value for regular characters followed by the command \/, and
also in math mode for superscript placement. It is expressed in design size
units, or in TeX point units if the font is frozen."
:initarg :italic-correction
:accessor italic-correction)
(next-character
:documentation "The next character in a character list.
This slot is non-null only if the character is part of a chain of characters
of ascending size, and not the last one (see TeX: the Program [544]). It is
mutually exclusive with the EXTENSION-RECIPE slot, and also with the existence
of a ligature or kerning program for this character."
:initform nil
:accessor next-character)
(extension-recipe
:documentation "The character's extension recipe, or NIL.
This slot is non-null only if this character is extensible (see TeX: the
Program [544]). It is mutually exclusive with the NEXT-CHARACTER slot, and
also with the existence of a ligature or kerning program for this character."
:initform nil
:accessor extension-recipe))
(:documentation "The Character Metrics class.
This class represents decoded character information. Within the context of
this library, the term \"character\" denotes an instance of this class."))
(defmethod print-object ((character character-metrics) stream)
"Print CHARACTER unreadably with its code to STREAM."
(print-unreadable-object (character stream :type t)
(princ (code character) stream)))
(defun make-character-metrics (code font width height depth italic-correction)
"Make a new CHARACTER-METRICS instance, and return it.
Initialize the character's CODE, FONT, WIDTH, HEIGHT, DEPTH, and
ITALIC-CORRECTION appropriately. The two remaining slots (NEXT-CHARACTER and
EXTENSION-RECIPE) will be initialized later if needed, when all character
metrics instances are created."
(make-instance 'character-metrics
:code code
:font font
:width width
:height height
:depth depth
:italic-correction italic-correction))
(defun extensiblep (character)
"Return T if CHARACTER has an extension recipe."
(when (extension-recipe character) t))
(define-condition not-extensible (tfm-usage-error)
((value
:documentation "The non extensible character."
:initarg :value
:accessor value))
(:report (lambda (not-extensible stream)
(format stream "Character ~A is not extensible."
(value not-extensible))))
(:documentation "The Not Extensible usage error.
It signals an attempt at accessing the extension recipe of a non extensible
character."))
(defmacro define-extension-recipe-pseudo-accessor (name)
`(defmethod ,name ((character character-metrics))
,(format nil "Return extensible CHARACTER's ~A.
If CHARACTER is not extensible, signal a NOT-EXTENSIBLE error."
name)
(unless (extensiblep character) (error 'not-extensible :value character))
(,name (extension-recipe character))))
(define-extension-recipe-pseudo-accessor top-character)
(define-extension-recipe-pseudo-accessor middle-character)
(define-extension-recipe-pseudo-accessor bottom-character)
(define-extension-recipe-pseudo-accessor repeated-character)
|
037a48de020d90ea134596b3424e1b844f5481fdaacbede0317718e129c6c53b | robertmeta/cowboy-examples | upload_html.erl | -module(upload_html).
-export([init/3, handle/2, terminate/2]).
init({tcp, http}, Req, _Opts) ->
{ok, Req, undefined_state}.
handle(Req, State) ->
HtmlContentType = {<<"Content-Type">>, <<"text/html">>},
{ok, Reply} = cowboy_http_req:reply(200, [HtmlContentType],
<<"
<html><form action='/upload' method='POST'
enctype='multipart/form-data'>
<input type='file' name='file'><br>
<input type='submit'>
</form></html>
">>
, Req),
{ok, Reply, State}.
terminate(_Req, _State) ->
ok.
| null | https://raw.githubusercontent.com/robertmeta/cowboy-examples/d03c289c9fb0d750eca11e3f1671e74d1841bd09/apps/upload/src/upload_html.erl | erlang | -module(upload_html).
-export([init/3, handle/2, terminate/2]).
init({tcp, http}, Req, _Opts) ->
{ok, Req, undefined_state}.
handle(Req, State) ->
HtmlContentType = {<<"Content-Type">>, <<"text/html">>},
{ok, Reply} = cowboy_http_req:reply(200, [HtmlContentType],
<<"
<html><form action='/upload' method='POST'
enctype='multipart/form-data'>
<input type='file' name='file'><br>
<input type='submit'>
</form></html>
">>
, Req),
{ok, Reply, State}.
terminate(_Req, _State) ->
ok.
| |
bba64d0dfe39f70ac92b66ea6bfdfef1e6969a96b2efeeb82e5b12abe20e298e | Noeda/dfterm3 | CP437ToUnicode.hs | | Module that turns CP437 code points to Unicode code points .
--
module Dfterm3.CP437ToUnicode
( cp437ToUnicode
, unicodeToCP437 )
where
import Data.Word ( Word8 )
import Data.Char ( chr )
import qualified Data.Map as M
cp437ToUnicode :: Word8 -> Char
cp437ToUnicode 1 = '\x263a'
cp437ToUnicode 2 = '\x263b'
cp437ToUnicode 3 = '\x2665'
cp437ToUnicode 4 = '\x2666'
cp437ToUnicode 5 = '\x2663'
cp437ToUnicode 6 = '\x2660'
cp437ToUnicode 7 = '\x2022'
cp437ToUnicode 8 = '\x25d8'
cp437ToUnicode 9 = '\x25cb'
cp437ToUnicode 10 = '\x25d9'
cp437ToUnicode 11 = '\x2642'
cp437ToUnicode 12 = '\x2640'
cp437ToUnicode 13 = '\x266a'
cp437ToUnicode 14 = '\x266b'
cp437ToUnicode 15 = '\x263c'
cp437ToUnicode 16 = '\x25ba'
cp437ToUnicode 17 = '\x25c4'
cp437ToUnicode 18 = '\x2195'
cp437ToUnicode 19 = '\x203c'
cp437ToUnicode 20 = '\x00b6'
cp437ToUnicode 21 = '\x00a7'
cp437ToUnicode 22 = '\x25ac'
cp437ToUnicode 23 = '\x21a8'
cp437ToUnicode 24 = '\x2191'
cp437ToUnicode 25 = '\x2193'
cp437ToUnicode 26 = '\x2192'
cp437ToUnicode 27 = '\x2190'
cp437ToUnicode 28 = '\x221f'
cp437ToUnicode 29 = '\x2194'
cp437ToUnicode 30 = '\x25b2'
cp437ToUnicode 31 = '\x25bc'
cp437ToUnicode 127 = '\x2302'
cp437ToUnicode 128 = '\x00c7'
cp437ToUnicode 129 = '\x00fc'
cp437ToUnicode 130 = '\x00e9'
cp437ToUnicode 131 = '\x00e2'
cp437ToUnicode 132 = '\x00e4'
cp437ToUnicode 133 = '\x00e0'
cp437ToUnicode 134 = '\x00e5'
cp437ToUnicode 135 = '\x00e7'
cp437ToUnicode 136 = '\x00ea'
cp437ToUnicode 137 = '\x00eb'
cp437ToUnicode 138 = '\x00e8'
cp437ToUnicode 139 = '\x00ef'
cp437ToUnicode 140 = '\x00ee'
cp437ToUnicode 141 = '\x00ec'
cp437ToUnicode 142 = '\x00c4'
cp437ToUnicode 143 = '\x00e5'
cp437ToUnicode 144 = '\x00c9'
cp437ToUnicode 145 = '\x00e6'
cp437ToUnicode 146 = '\x00c6'
cp437ToUnicode 147 = '\x00f4'
cp437ToUnicode 148 = '\x00f6'
cp437ToUnicode 149 = '\x00f2'
cp437ToUnicode 150 = '\x00fb'
cp437ToUnicode 151 = '\x00f9'
cp437ToUnicode 152 = '\x00ff'
cp437ToUnicode 153 = '\x00d6'
cp437ToUnicode 154 = '\x00dc'
cp437ToUnicode 155 = '\x00a2'
cp437ToUnicode 156 = '\x00a3'
cp437ToUnicode 157 = '\x00a5'
cp437ToUnicode 158 = '\x20a7'
cp437ToUnicode 159 = '\x0192'
cp437ToUnicode 160 = '\x00e1'
cp437ToUnicode 161 = '\x00ed'
cp437ToUnicode 162 = '\x00f3'
cp437ToUnicode 163 = '\x00fa'
cp437ToUnicode 164 = '\x00f1'
cp437ToUnicode 165 = '\x00d1'
cp437ToUnicode 166 = '\x00aa'
cp437ToUnicode 167 = '\x00ba'
cp437ToUnicode 168 = '\x00bf'
cp437ToUnicode 169 = '\x2310'
cp437ToUnicode 170 = '\x00ac'
cp437ToUnicode 171 = '\x00bd'
cp437ToUnicode 172 = '\x00bc'
cp437ToUnicode 173 = '\x00a1'
cp437ToUnicode 174 = '\x00ab'
cp437ToUnicode 175 = '\x00bb'
cp437ToUnicode 176 = '\x2591'
cp437ToUnicode 177 = '\x2592'
cp437ToUnicode 178 = '\x2593'
cp437ToUnicode 179 = '\x2502'
cp437ToUnicode 180 = '\x2524'
cp437ToUnicode 181 = '\x2561'
cp437ToUnicode 182 = '\x2562'
cp437ToUnicode 183 = '\x2556'
cp437ToUnicode 184 = '\x2555'
cp437ToUnicode 185 = '\x2563'
cp437ToUnicode 186 = '\x2551'
cp437ToUnicode 187 = '\x2557'
cp437ToUnicode 188 = '\x255d'
cp437ToUnicode 189 = '\x255c'
cp437ToUnicode 190 = '\x255b'
cp437ToUnicode 191 = '\x2510'
cp437ToUnicode 192 = '\x2514'
cp437ToUnicode 193 = '\x2534'
cp437ToUnicode 194 = '\x252c'
cp437ToUnicode 195 = '\x251c'
cp437ToUnicode 196 = '\x2500'
cp437ToUnicode 197 = '\x253c'
cp437ToUnicode 198 = '\x255e'
cp437ToUnicode 199 = '\x255f'
cp437ToUnicode 200 = '\x255a'
cp437ToUnicode 201 = '\x2554'
cp437ToUnicode 202 = '\x2569'
cp437ToUnicode 203 = '\x2566'
cp437ToUnicode 204 = '\x2560'
cp437ToUnicode 205 = '\x2550'
cp437ToUnicode 206 = '\x256c'
cp437ToUnicode 207 = '\x2567'
cp437ToUnicode 208 = '\x2568'
cp437ToUnicode 209 = '\x2564'
cp437ToUnicode 210 = '\x2565'
cp437ToUnicode 211 = '\x2559'
cp437ToUnicode 212 = '\x2558'
cp437ToUnicode 213 = '\x2552'
cp437ToUnicode 214 = '\x2553'
cp437ToUnicode 215 = '\x256b'
cp437ToUnicode 216 = '\x256a'
cp437ToUnicode 217 = '\x2518'
cp437ToUnicode 218 = '\x250c'
cp437ToUnicode 219 = '\x2588'
cp437ToUnicode 220 = '\x2584'
cp437ToUnicode 221 = '\x258c'
cp437ToUnicode 222 = '\x2590'
cp437ToUnicode 223 = '\x2580'
cp437ToUnicode 224 = '\x03b1'
cp437ToUnicode 225 = '\x00df'
cp437ToUnicode 226 = '\x0393'
cp437ToUnicode 227 = '\x03c0'
cp437ToUnicode 228 = '\x03a3'
cp437ToUnicode 229 = '\x03c3'
cp437ToUnicode 230 = '\x00b5'
cp437ToUnicode 231 = '\x03c4'
cp437ToUnicode 232 = '\x03a6'
cp437ToUnicode 233 = '\x0398'
cp437ToUnicode 234 = '\x03a9'
cp437ToUnicode 235 = '\x03b4'
cp437ToUnicode 236 = '\x221e'
cp437ToUnicode 237 = '\x03c6'
cp437ToUnicode 238 = '\x03b5'
cp437ToUnicode 239 = '\x2229'
cp437ToUnicode 240 = '\x2261'
cp437ToUnicode 241 = '\x00b1'
cp437ToUnicode 242 = '\x2265'
cp437ToUnicode 243 = '\x2264'
cp437ToUnicode 244 = '\x2320'
cp437ToUnicode 245 = '\x2321'
cp437ToUnicode 246 = '\x00F7'
cp437ToUnicode 247 = '\x2248'
cp437ToUnicode 248 = '\x00b0'
cp437ToUnicode 249 = '\x2219'
cp437ToUnicode 250 = '\x00b7'
cp437ToUnicode 251 = '\x221a'
cp437ToUnicode 252 = '\x207f'
cp437ToUnicode 253 = '\x00b2'
cp437ToUnicode 254 = '\x25a0'
cp437ToUnicode 255 = '\x00a0'
cp437ToUnicode x = chr (fromIntegral x)
unicodeMap :: M.Map Char Word8
unicodeMap = M.fromList (fmap (\x -> ( cp437ToUnicode x
, x ))
[0..255])
# NOINLINE unicodeMap #
unicodeToCP437 :: Char -> Word8
unicodeToCP437 text = M.findWithDefault undefined text unicodeMap
| null | https://raw.githubusercontent.com/Noeda/dfterm3/6b33c9b4712da486bb84356f6c9f48abb8074faf/src/Dfterm3/CP437ToUnicode.hs | haskell | | Module that turns CP437 code points to Unicode code points .
module Dfterm3.CP437ToUnicode
( cp437ToUnicode
, unicodeToCP437 )
where
import Data.Word ( Word8 )
import Data.Char ( chr )
import qualified Data.Map as M
cp437ToUnicode :: Word8 -> Char
cp437ToUnicode 1 = '\x263a'
cp437ToUnicode 2 = '\x263b'
cp437ToUnicode 3 = '\x2665'
cp437ToUnicode 4 = '\x2666'
cp437ToUnicode 5 = '\x2663'
cp437ToUnicode 6 = '\x2660'
cp437ToUnicode 7 = '\x2022'
cp437ToUnicode 8 = '\x25d8'
cp437ToUnicode 9 = '\x25cb'
cp437ToUnicode 10 = '\x25d9'
cp437ToUnicode 11 = '\x2642'
cp437ToUnicode 12 = '\x2640'
cp437ToUnicode 13 = '\x266a'
cp437ToUnicode 14 = '\x266b'
cp437ToUnicode 15 = '\x263c'
cp437ToUnicode 16 = '\x25ba'
cp437ToUnicode 17 = '\x25c4'
cp437ToUnicode 18 = '\x2195'
cp437ToUnicode 19 = '\x203c'
cp437ToUnicode 20 = '\x00b6'
cp437ToUnicode 21 = '\x00a7'
cp437ToUnicode 22 = '\x25ac'
cp437ToUnicode 23 = '\x21a8'
cp437ToUnicode 24 = '\x2191'
cp437ToUnicode 25 = '\x2193'
cp437ToUnicode 26 = '\x2192'
cp437ToUnicode 27 = '\x2190'
cp437ToUnicode 28 = '\x221f'
cp437ToUnicode 29 = '\x2194'
cp437ToUnicode 30 = '\x25b2'
cp437ToUnicode 31 = '\x25bc'
cp437ToUnicode 127 = '\x2302'
cp437ToUnicode 128 = '\x00c7'
cp437ToUnicode 129 = '\x00fc'
cp437ToUnicode 130 = '\x00e9'
cp437ToUnicode 131 = '\x00e2'
cp437ToUnicode 132 = '\x00e4'
cp437ToUnicode 133 = '\x00e0'
cp437ToUnicode 134 = '\x00e5'
cp437ToUnicode 135 = '\x00e7'
cp437ToUnicode 136 = '\x00ea'
cp437ToUnicode 137 = '\x00eb'
cp437ToUnicode 138 = '\x00e8'
cp437ToUnicode 139 = '\x00ef'
cp437ToUnicode 140 = '\x00ee'
cp437ToUnicode 141 = '\x00ec'
cp437ToUnicode 142 = '\x00c4'
cp437ToUnicode 143 = '\x00e5'
cp437ToUnicode 144 = '\x00c9'
cp437ToUnicode 145 = '\x00e6'
cp437ToUnicode 146 = '\x00c6'
cp437ToUnicode 147 = '\x00f4'
cp437ToUnicode 148 = '\x00f6'
cp437ToUnicode 149 = '\x00f2'
cp437ToUnicode 150 = '\x00fb'
cp437ToUnicode 151 = '\x00f9'
cp437ToUnicode 152 = '\x00ff'
cp437ToUnicode 153 = '\x00d6'
cp437ToUnicode 154 = '\x00dc'
cp437ToUnicode 155 = '\x00a2'
cp437ToUnicode 156 = '\x00a3'
cp437ToUnicode 157 = '\x00a5'
cp437ToUnicode 158 = '\x20a7'
cp437ToUnicode 159 = '\x0192'
cp437ToUnicode 160 = '\x00e1'
cp437ToUnicode 161 = '\x00ed'
cp437ToUnicode 162 = '\x00f3'
cp437ToUnicode 163 = '\x00fa'
cp437ToUnicode 164 = '\x00f1'
cp437ToUnicode 165 = '\x00d1'
cp437ToUnicode 166 = '\x00aa'
cp437ToUnicode 167 = '\x00ba'
cp437ToUnicode 168 = '\x00bf'
cp437ToUnicode 169 = '\x2310'
cp437ToUnicode 170 = '\x00ac'
cp437ToUnicode 171 = '\x00bd'
cp437ToUnicode 172 = '\x00bc'
cp437ToUnicode 173 = '\x00a1'
cp437ToUnicode 174 = '\x00ab'
cp437ToUnicode 175 = '\x00bb'
cp437ToUnicode 176 = '\x2591'
cp437ToUnicode 177 = '\x2592'
cp437ToUnicode 178 = '\x2593'
cp437ToUnicode 179 = '\x2502'
cp437ToUnicode 180 = '\x2524'
cp437ToUnicode 181 = '\x2561'
cp437ToUnicode 182 = '\x2562'
cp437ToUnicode 183 = '\x2556'
cp437ToUnicode 184 = '\x2555'
cp437ToUnicode 185 = '\x2563'
cp437ToUnicode 186 = '\x2551'
cp437ToUnicode 187 = '\x2557'
cp437ToUnicode 188 = '\x255d'
cp437ToUnicode 189 = '\x255c'
cp437ToUnicode 190 = '\x255b'
cp437ToUnicode 191 = '\x2510'
cp437ToUnicode 192 = '\x2514'
cp437ToUnicode 193 = '\x2534'
cp437ToUnicode 194 = '\x252c'
cp437ToUnicode 195 = '\x251c'
cp437ToUnicode 196 = '\x2500'
cp437ToUnicode 197 = '\x253c'
cp437ToUnicode 198 = '\x255e'
cp437ToUnicode 199 = '\x255f'
cp437ToUnicode 200 = '\x255a'
cp437ToUnicode 201 = '\x2554'
cp437ToUnicode 202 = '\x2569'
cp437ToUnicode 203 = '\x2566'
cp437ToUnicode 204 = '\x2560'
cp437ToUnicode 205 = '\x2550'
cp437ToUnicode 206 = '\x256c'
cp437ToUnicode 207 = '\x2567'
cp437ToUnicode 208 = '\x2568'
cp437ToUnicode 209 = '\x2564'
cp437ToUnicode 210 = '\x2565'
cp437ToUnicode 211 = '\x2559'
cp437ToUnicode 212 = '\x2558'
cp437ToUnicode 213 = '\x2552'
cp437ToUnicode 214 = '\x2553'
cp437ToUnicode 215 = '\x256b'
cp437ToUnicode 216 = '\x256a'
cp437ToUnicode 217 = '\x2518'
cp437ToUnicode 218 = '\x250c'
cp437ToUnicode 219 = '\x2588'
cp437ToUnicode 220 = '\x2584'
cp437ToUnicode 221 = '\x258c'
cp437ToUnicode 222 = '\x2590'
cp437ToUnicode 223 = '\x2580'
cp437ToUnicode 224 = '\x03b1'
cp437ToUnicode 225 = '\x00df'
cp437ToUnicode 226 = '\x0393'
cp437ToUnicode 227 = '\x03c0'
cp437ToUnicode 228 = '\x03a3'
cp437ToUnicode 229 = '\x03c3'
cp437ToUnicode 230 = '\x00b5'
cp437ToUnicode 231 = '\x03c4'
cp437ToUnicode 232 = '\x03a6'
cp437ToUnicode 233 = '\x0398'
cp437ToUnicode 234 = '\x03a9'
cp437ToUnicode 235 = '\x03b4'
cp437ToUnicode 236 = '\x221e'
cp437ToUnicode 237 = '\x03c6'
cp437ToUnicode 238 = '\x03b5'
cp437ToUnicode 239 = '\x2229'
cp437ToUnicode 240 = '\x2261'
cp437ToUnicode 241 = '\x00b1'
cp437ToUnicode 242 = '\x2265'
cp437ToUnicode 243 = '\x2264'
cp437ToUnicode 244 = '\x2320'
cp437ToUnicode 245 = '\x2321'
cp437ToUnicode 246 = '\x00F7'
cp437ToUnicode 247 = '\x2248'
cp437ToUnicode 248 = '\x00b0'
cp437ToUnicode 249 = '\x2219'
cp437ToUnicode 250 = '\x00b7'
cp437ToUnicode 251 = '\x221a'
cp437ToUnicode 252 = '\x207f'
cp437ToUnicode 253 = '\x00b2'
cp437ToUnicode 254 = '\x25a0'
cp437ToUnicode 255 = '\x00a0'
cp437ToUnicode x = chr (fromIntegral x)
unicodeMap :: M.Map Char Word8
unicodeMap = M.fromList (fmap (\x -> ( cp437ToUnicode x
, x ))
[0..255])
# NOINLINE unicodeMap #
unicodeToCP437 :: Char -> Word8
unicodeToCP437 text = M.findWithDefault undefined text unicodeMap
| |
13b6de4f0c9c026b230b88b21b675aa63d7a483acba4538be871160a46cb549e | rainbyte/frag | ReadImage.hs |
ReadImage.hs ( adapted from readImage.c which is ( c ) Silicon Graphics , Inc. )
Copyright ( c ) 2002 - 2004 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This module has been modified to read both color and alpha data necessary for transparent textures in OpenGL .
Support for reading a file of raw RGB data :
4 bytes big - endian width
4 bytes big - endian height
width * height RGBA byte quadruples ( the original module reads width * height RGB byte triples )
ReadImage.hs (adapted from readImage.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2002-2004 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This module has been modified to read both color and alpha data necessary for transparent textures in OpenGL.
Support for reading a file of raw RGB data:
4 bytes big-endian width
4 bytes big-endian height
width * height RGBA byte quadruples (the original module reads width * height RGB byte triples)
-}
module ReadImage ( readImage ) where
import Data.Word ( Word8, Word32 )
import Control.Exception ( bracket )
import Control.Monad ( when )
import System.IO ( Handle, IOMode(ReadMode), openBinaryFile, hGetBuf, hClose )
import System.IO.Error ( mkIOError, eofErrorType )
import Foreign ( Ptr, alloca, mallocBytes, Storable(..) )
import Graphics.UI.GLUT
-- This is probably overkill, but anyway...
newtype Word32BigEndian = Word32BigEndian Word32
word32BigEndianToGLsizei :: Word32BigEndian -> GLsizei
word32BigEndianToGLsizei (Word32BigEndian x) = fromIntegral x
instance Storable Word32BigEndian where
sizeOf ~(Word32BigEndian x) = sizeOf x
alignment ~(Word32BigEndian x) = alignment x
peek ptr = do
let numBytes = sizeOf (undefined :: Word32BigEndian)
bytes <- mapM (peekByteOff ptr) [ 0 .. numBytes - 1 ] :: IO [Word8]
let value = foldl (\val byte -> val * 256 + fromIntegral byte) 0 bytes
return $ Word32BigEndian value
poke = error ""
-- This is the reason for all this stuff above...
readGLsizei :: Handle -> IO GLsizei
readGLsizei handle =
alloca $ \buf -> do
hGetBufFully handle buf (sizeOf (undefined :: Word32BigEndian))
word32BigEndianToGLsizei <$> peek buf
A handy variant of hGetBuf with additional error checking
hGetBufFully :: Handle -> Ptr a -> Int -> IO ()
hGetBufFully handle ptr numBytes = do
bytesRead <- hGetBuf handle ptr numBytes
when (bytesRead /= numBytes) $
ioError $ mkIOError eofErrorType "hGetBufFully" (Just handle) Nothing
-- Closing a file is nice, even when an error occurs during reading.
withBinaryFile :: FilePath -> (Handle -> IO a) -> IO a
withBinaryFile filePath = bracket (openBinaryFile filePath ReadMode) hClose
readImage :: FilePath -> IO (Maybe (Size, PixelData a))
readImage filePath =
withBinaryFile filePath $ \handle -> do
width <- readGLsizei handle
height <- readGLsizei handle
changed the 3 to a 4 to make space for our alpha data .
buf <- mallocBytes numBytes
hGetBufFully handle buf numBytes
changed the PixelFormat constructor here from RGB to RGBA , to account for our alpha data .
| null | https://raw.githubusercontent.com/rainbyte/frag/28893048f093f369c896932ff297150ef8ed2dd0/src/ReadImage.hs | haskell | This is probably overkill, but anyway...
This is the reason for all this stuff above...
Closing a file is nice, even when an error occurs during reading. |
ReadImage.hs ( adapted from readImage.c which is ( c ) Silicon Graphics , Inc. )
Copyright ( c ) 2002 - 2004 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This module has been modified to read both color and alpha data necessary for transparent textures in OpenGL .
Support for reading a file of raw RGB data :
4 bytes big - endian width
4 bytes big - endian height
width * height RGBA byte quadruples ( the original module reads width * height RGB byte triples )
ReadImage.hs (adapted from readImage.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2002-2004 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This module has been modified to read both color and alpha data necessary for transparent textures in OpenGL.
Support for reading a file of raw RGB data:
4 bytes big-endian width
4 bytes big-endian height
width * height RGBA byte quadruples (the original module reads width * height RGB byte triples)
-}
module ReadImage ( readImage ) where
import Data.Word ( Word8, Word32 )
import Control.Exception ( bracket )
import Control.Monad ( when )
import System.IO ( Handle, IOMode(ReadMode), openBinaryFile, hGetBuf, hClose )
import System.IO.Error ( mkIOError, eofErrorType )
import Foreign ( Ptr, alloca, mallocBytes, Storable(..) )
import Graphics.UI.GLUT
newtype Word32BigEndian = Word32BigEndian Word32
word32BigEndianToGLsizei :: Word32BigEndian -> GLsizei
word32BigEndianToGLsizei (Word32BigEndian x) = fromIntegral x
instance Storable Word32BigEndian where
sizeOf ~(Word32BigEndian x) = sizeOf x
alignment ~(Word32BigEndian x) = alignment x
peek ptr = do
let numBytes = sizeOf (undefined :: Word32BigEndian)
bytes <- mapM (peekByteOff ptr) [ 0 .. numBytes - 1 ] :: IO [Word8]
let value = foldl (\val byte -> val * 256 + fromIntegral byte) 0 bytes
return $ Word32BigEndian value
poke = error ""
readGLsizei :: Handle -> IO GLsizei
readGLsizei handle =
alloca $ \buf -> do
hGetBufFully handle buf (sizeOf (undefined :: Word32BigEndian))
word32BigEndianToGLsizei <$> peek buf
A handy variant of hGetBuf with additional error checking
hGetBufFully :: Handle -> Ptr a -> Int -> IO ()
hGetBufFully handle ptr numBytes = do
bytesRead <- hGetBuf handle ptr numBytes
when (bytesRead /= numBytes) $
ioError $ mkIOError eofErrorType "hGetBufFully" (Just handle) Nothing
withBinaryFile :: FilePath -> (Handle -> IO a) -> IO a
withBinaryFile filePath = bracket (openBinaryFile filePath ReadMode) hClose
readImage :: FilePath -> IO (Maybe (Size, PixelData a))
readImage filePath =
withBinaryFile filePath $ \handle -> do
width <- readGLsizei handle
height <- readGLsizei handle
changed the 3 to a 4 to make space for our alpha data .
buf <- mallocBytes numBytes
hGetBufFully handle buf numBytes
changed the PixelFormat constructor here from RGB to RGBA , to account for our alpha data .
|
f308ce654b6f18af1e683f22c7950858481035988c4c80014fcd84b39a560f04 | racketscript/racketscript | vector.rkt | #lang racket/base
(define vec (vector 0 1 2 3))
(displayln vec)
(displayln (vector-ref vec 0))
(displayln (vector-ref vec 1))
(displayln (vector-ref vec 2))
(displayln (vector-ref vec 3))
(vector-set! vec 0 3)
(vector-set! vec 1 2)
(vector-set! vec 2 1)
(vector-set! vec 3 0)
(displayln vec)
(displayln #(1 2 3))
(displayln (vector-ref #(1 2 3) 0))
(displayln (vector-ref #(1 2 3) 1))
(displayln (vector-ref #(1 2 3) 2))
(displayln "equal")
(displayln (equal? #(1 2 3) #(1 2 3)))
(displayln (equal? #(1 2 3) #(2 2 3)))
(displayln "make-vector")
(displayln (make-vector 5))
(displayln (make-vector 5 3))
(displayln (make-vector 5 #f))
(displayln (make-vector 0))
(define vec2 (vector 4 5 6))
(vector-copy! vec 1 vec2 0 3)
(displayln vec)
| null | https://raw.githubusercontent.com/racketscript/racketscript/11400fdd9d11541e7dd40c536c015677a7fa4181/tests/basic/vector.rkt | racket | #lang racket/base
(define vec (vector 0 1 2 3))
(displayln vec)
(displayln (vector-ref vec 0))
(displayln (vector-ref vec 1))
(displayln (vector-ref vec 2))
(displayln (vector-ref vec 3))
(vector-set! vec 0 3)
(vector-set! vec 1 2)
(vector-set! vec 2 1)
(vector-set! vec 3 0)
(displayln vec)
(displayln #(1 2 3))
(displayln (vector-ref #(1 2 3) 0))
(displayln (vector-ref #(1 2 3) 1))
(displayln (vector-ref #(1 2 3) 2))
(displayln "equal")
(displayln (equal? #(1 2 3) #(1 2 3)))
(displayln (equal? #(1 2 3) #(2 2 3)))
(displayln "make-vector")
(displayln (make-vector 5))
(displayln (make-vector 5 3))
(displayln (make-vector 5 #f))
(displayln (make-vector 0))
(define vec2 (vector 4 5 6))
(vector-copy! vec 1 vec2 0 3)
(displayln vec)
| |
4d1d684d4b804822750ce2e316b67fde153a0759d3d1eae16f7c8d9024681752 | Bogdanp/nemea | current-visitors.rkt | #lang racket/base
(require component
net/url
racket/contract
racket/list
racket/match
racket/set)
(provide (contract-out
[struct current-visitors ((session-timeout exact-positive-integer?)
(manager-thread (or/c false/c thread?)))]
[make-current-visitors (->* ()
(#:session-timeout exact-positive-integer?)
current-visitors?)]
[current-visitors-subscribe (-> current-visitors? thread? void?)]
[current-visitors-track (-> current-visitors? string? url? void?)]))
(struct current-visitors (session-timeout (manager-thread #:mutable))
#:methods gen:component
[(define (component-start cv)
(define session-timeout (current-visitors-session-timeout cv))
(set-current-visitors-manager-thread! cv (thread (make-manager-thread session-timeout)))
cv)
(define (component-stop cv)
(thread-send (current-visitors-manager-thread cv) 'stop)
(set-current-visitors-manager-thread! cv #f)
cv)])
(define (make-current-visitors #:session-timeout [session-timeout 60])
(current-visitors session-timeout #f))
(define ((make-manager-thread session-timeout))
(let loop ([visitors (hash)]
[listeners (set)])
(sync
(choice-evt
(handle-evt
(thread-receive-evt)
(lambda (e)
(match (thread-receive)
['stop (void)]
['broadcast
(define deadline (- (current-seconds) session-timeout))
(define active-visitors
(for*/hash ([(visitor-id data) (in-hash visitors)]
[timestamp (in-value (car data))]
#:unless (< timestamp deadline))
(values visitor-id data)))
(define active-listeners
(for/set ([listener (in-set listeners)] #:unless (thread-dead? listener))
(begin0 listener
(thread-send listener active-visitors))))
(loop active-visitors active-listeners)]
[(list 'subscribe t)
(thread-send t visitors)
(loop visitors (set-add listeners t))]
[(list 'track visitor-id location)
(thread-send (current-thread) 'broadcast)
(loop (hash-set visitors visitor-id (cons (current-seconds) location)) listeners)])))
(handle-evt
(alarm-evt (+ (current-inexact-milliseconds) 1000))
(lambda (e)
(thread-send (current-thread) 'broadcast)
(loop visitors listeners)))))))
(define (current-visitors-subscribe current-visitors listener)
(thread-send (current-visitors-manager-thread current-visitors)
(list 'subscribe listener)))
(define (current-visitors-track current-visitors visitor-id location)
(thread-send (current-visitors-manager-thread current-visitors)
(list 'track visitor-id location)))
(module+ test
(require rackunit
rackunit/text-ui)
(define waiter (make-semaphore))
(define cv (make-current-visitors #:session-timeout 2))
(define counts '())
(define t1
(thread
(lambda ()
(let loop ()
(set! counts (cons (hash-count (thread-receive)) counts))
(semaphore-post waiter)
(loop)))))
(run-tests
(test-suite
"current-visitors"
#:before
(lambda ()
(component-start cv)
(current-visitors-subscribe cv t1))
#:after
(lambda ()
(component-stop cv))
(test-case "tracking"
(sync/timeout 2 waiter)
(check-equal? counts '(0) "timeout after none tracked")
(current-visitors-track cv "alice" (string->url ""))
(current-visitors-track cv "bob" (string->url ""))
(sync/timeout 2 waiter) ; broadcast for alice
(sync/timeout 2 waiter) ; broadcast for bob
(sync/timeout 2 waiter) ; timeout
(check-equal? counts '(2 2 2 0) "timeout after alice and bob tracked")
(sync/timeout 2 waiter) ; timeout
(check-equal? counts '(2 2 2 2 0) "timeout after alice and bob tracked no. 2")
(current-visitors-track cv "bob" (string->url ""))
(sync/timeout 2 waiter) ; broadcast for bob
(sync/timeout 2 waiter) ; timeout
(check-equal? counts '(1 2 2 2 2 2 0) "timeout after bob tracked again")
(sync/timeout 2 waiter) ; timeout
(sync/timeout 2 waiter) ; timeout
(check-equal? counts '(0 1 1 2 2 2 2 2 0) "2 timeouts after bob tracked again no. 2")))))
| null | https://raw.githubusercontent.com/Bogdanp/nemea/6e6149007fb0c43d8f0fb2271b36f0ccad830703/nemea/components/current-visitors.rkt | racket | broadcast for alice
broadcast for bob
timeout
timeout
broadcast for bob
timeout
timeout
timeout | #lang racket/base
(require component
net/url
racket/contract
racket/list
racket/match
racket/set)
(provide (contract-out
[struct current-visitors ((session-timeout exact-positive-integer?)
(manager-thread (or/c false/c thread?)))]
[make-current-visitors (->* ()
(#:session-timeout exact-positive-integer?)
current-visitors?)]
[current-visitors-subscribe (-> current-visitors? thread? void?)]
[current-visitors-track (-> current-visitors? string? url? void?)]))
(struct current-visitors (session-timeout (manager-thread #:mutable))
#:methods gen:component
[(define (component-start cv)
(define session-timeout (current-visitors-session-timeout cv))
(set-current-visitors-manager-thread! cv (thread (make-manager-thread session-timeout)))
cv)
(define (component-stop cv)
(thread-send (current-visitors-manager-thread cv) 'stop)
(set-current-visitors-manager-thread! cv #f)
cv)])
(define (make-current-visitors #:session-timeout [session-timeout 60])
(current-visitors session-timeout #f))
(define ((make-manager-thread session-timeout))
(let loop ([visitors (hash)]
[listeners (set)])
(sync
(choice-evt
(handle-evt
(thread-receive-evt)
(lambda (e)
(match (thread-receive)
['stop (void)]
['broadcast
(define deadline (- (current-seconds) session-timeout))
(define active-visitors
(for*/hash ([(visitor-id data) (in-hash visitors)]
[timestamp (in-value (car data))]
#:unless (< timestamp deadline))
(values visitor-id data)))
(define active-listeners
(for/set ([listener (in-set listeners)] #:unless (thread-dead? listener))
(begin0 listener
(thread-send listener active-visitors))))
(loop active-visitors active-listeners)]
[(list 'subscribe t)
(thread-send t visitors)
(loop visitors (set-add listeners t))]
[(list 'track visitor-id location)
(thread-send (current-thread) 'broadcast)
(loop (hash-set visitors visitor-id (cons (current-seconds) location)) listeners)])))
(handle-evt
(alarm-evt (+ (current-inexact-milliseconds) 1000))
(lambda (e)
(thread-send (current-thread) 'broadcast)
(loop visitors listeners)))))))
(define (current-visitors-subscribe current-visitors listener)
(thread-send (current-visitors-manager-thread current-visitors)
(list 'subscribe listener)))
(define (current-visitors-track current-visitors visitor-id location)
(thread-send (current-visitors-manager-thread current-visitors)
(list 'track visitor-id location)))
(module+ test
(require rackunit
rackunit/text-ui)
(define waiter (make-semaphore))
(define cv (make-current-visitors #:session-timeout 2))
(define counts '())
(define t1
(thread
(lambda ()
(let loop ()
(set! counts (cons (hash-count (thread-receive)) counts))
(semaphore-post waiter)
(loop)))))
(run-tests
(test-suite
"current-visitors"
#:before
(lambda ()
(component-start cv)
(current-visitors-subscribe cv t1))
#:after
(lambda ()
(component-stop cv))
(test-case "tracking"
(sync/timeout 2 waiter)
(check-equal? counts '(0) "timeout after none tracked")
(current-visitors-track cv "alice" (string->url ""))
(current-visitors-track cv "bob" (string->url ""))
(check-equal? counts '(2 2 2 0) "timeout after alice and bob tracked")
(check-equal? counts '(2 2 2 2 0) "timeout after alice and bob tracked no. 2")
(current-visitors-track cv "bob" (string->url ""))
(check-equal? counts '(1 2 2 2 2 2 0) "timeout after bob tracked again")
(check-equal? counts '(0 1 1 2 2 2 2 2 0) "2 timeouts after bob tracked again no. 2")))))
|
2310a4316ec5da2bc9c89aa58582f48e534ab762cfb13b253f4aa2e815b8fa43 | lisp-mirror/cl-tar | fd-stream.lisp | ;;;; This is part of cl-tar. See README.md and LICENSE for more information.
(in-package #:tar-extract)
(define-condition fd-stream-error (stream-error)
())
(define-condition simple-fd-stream-error (fd-stream-error simple-error)
())
(defclass directory-fd-stream ()
((fd
:initarg :fd
:reader fd
:documentation
"The underlying file descriptor.")))
(defclass fd-stream (trivial-gray-streams:trivial-gray-stream-mixin
trivial-gray-streams:fundamental-binary-stream)
((fd
:initarg :fd
:reader fd
:documentation
"The underlying file descriptor.")))
(defclass fd-output-stream (fd-stream
trivial-gray-streams:fundamental-binary-output-stream)
()
(:documentation
"A FD-STREAM used for output."))
(defmethod trivial-gray-streams:stream-write-byte ((stream fd-output-stream) byte)
(cffi:with-foreign-array (buf (make-array 1 :element-type '(unsigned-byte 8)
:initial-element byte)
'(unsigned-byte 8))
(nix:write (fd stream) buf 1)
byte))
(defmethod trivial-gray-streams:stream-write-sequence ((stream fd-output-stream)
sequence start end
&key &allow-other-keys)
(let ((num-bytes (- (or end (length sequence)) start)))
(cffi:with-foreign-array (buf (subseq sequence start end) `(:array :uchar ,num-bytes))
(nix:write (fd stream) buf num-bytes)))
sequence)
(defmethod stream-element-type ((stream fd-stream))
'(unsigned-byte 8))
(defmethod close ((stream fd-stream) &key abort)
(declare (ignore abort))
(nix:close (fd stream)))
#+tar-extract-use-openat
(defun openat-random (dir-handle pathname mode)
(loop
:for random := (random 10000000000)
:for name := (concatenate 'string "." (file-namestring pathname)
"." (princ-to-string random))
:for stream := (handler-case
(openat dir-handle name mode)
(extraction-through-symbolic-link-error () nil)
(destination-exists-error () nil))
:when stream
:return (values stream name)))
(defun open-random (pathname mode)
(loop
:for random := (random 10000000000)
:for name := (concatenate 'string (namestring pathname)
"-" (princ-to-string random))
:for stream := (handler-case
(my-open name mode)
(destination-exists-error () nil))
:when stream
:return (values stream name)))
#+tar-extract-use-openat
(defun openat (cwdfd pathname mode &optional (path-so-far (list :relative)))
"This is a slightly safer version of openat that checks for symlinks along the entire path.
Returns an FD-STREAM or OUTPUT-FD-STREAM."
(let* ((name (file-namestring pathname))
(directory (pathname-directory pathname))
(absolute-directory-p (eql (first directory) :absolute))
flags
stat)
(cond
(absolute-directory-p
(with-fd (rootfd (nix:open "/" nix:o-rdonly))
(openat rootfd (make-pathname :directory (list* :relative (rest directory))
:defaults pathname)
mode
(list :absolute))))
;; Still directories to traverse.
((and (not (null directory))
(not (equal directory '(:relative))))
(let ((next-dir-name (second directory)))
(when (eql next-dir-name :back)
(setf next-dir-name ".."))
(tagbody
:retry
(setf flags (logior nix:o-rdonly
nix:o-nofollow))
(handler-case
(setf stat (nix:fstatat cwdfd next-dir-name nix:at-symlink-nofollow))
(nix:enoent ()
(handler-case
(nix:mkdirat cwdfd next-dir-name nix:s-irwxu)
(nix:eexist () (go :retry)))
(go :open)))
(cond
((nix:s-islnk (nix:stat-mode stat))
(let (target)
(handler-case
(setf target (uiop:parse-unix-namestring
(nix:readlinkat cwdfd next-dir-name)
:dot-dot :back))
The link got deleted between the stat and
(nix:einval () (go :retry)))
(restart-case
(error 'extraction-through-symbolic-link-error
:target target
:pathname (make-pathname :name nil
:type nil
:directory (reverse path-so-far)
:defaults pathname))
(follow-symbolic-link ()
(return-from openat
(openat cwdfd (merge-pathnames
(make-pathname :directory (list* :relative (cddr directory))
:defaults pathname)
(uiop:ensure-directory-pathname target))
mode)))
(replace-symbolic-link ()
Sadly , there 's no non - Linux - specific way to atomically
;; replace the symlink with a directory.
(nix:unlinkat cwdfd next-dir-name 0)
(go :retry)))))
((not (nix:s-isdir (nix:stat-mode stat)))
(restart-case
(error 'file-exists-in-place-of-directory-error
:pathname (make-pathname :name nil
:type nil
:directory (reverse path-so-far)
:defaults pathname))
(remove-file ()
(nix:unlinkat cwdfd next-dir-name 0)
(go :retry)))))
:open
(let (nextfd)
(handler-case
(setf nextfd (nix:openat cwdfd next-dir-name flags mode))
(nix:enoent () (go :retry))
(nix:eloop () (go :retry)))
(with-fd (nextfd)
(return-from openat
(openat nextfd (make-pathname :directory (list* :relative (cddr directory))
:defaults pathname)
mode
(list* (second directory) path-so-far))))))))
((or (null name)
(equal name ""))
(make-instance 'directory-fd-stream :fd (nix:dup cwdfd)))
(t
(tagbody
:retry
(setf flags (logior nix:o-wronly
nix:o-creat
nix:o-nofollow))
(handler-case
(setf stat (nix:fstatat cwdfd name nix:at-symlink-nofollow))
;; If the file doesn't seem to exist, add O_EXCL to our flags and
;; try to open it. The O_EXCL ensures we get an error if the file
;; is created between the stat and open calls
(nix:enoent ()
(setf flags (logior flags nix:o-excl))
(go :open)))
(cond
;; The file exists and is a symlink.
((nix:s-islnk (nix:stat-mode stat))
(let (target)
;; Try reading where it points to, so we can ask the user what
;; to do.
(handler-case
(setf target (uiop:parse-unix-namestring
(nix:readlinkat cwdfd name)
:dot-dot :back))
The link got deleted between the stat and
;; calls. Just retry from scratch.
(nix:einval () (go :retry)))
(restart-case
(error 'extraction-through-symbolic-link-error
:pathname (make-pathname :directory (reverse path-so-far)
:defaults pathname)
:target target)
;; Follow the symlink! We resolve the symlink destination
;; ourselves. This is because our API tells the user where the
;; symlink points and POSIX has no way to say "follow the
;; symlink, but only if it points to X still" (well, Linux
sort of does , but not nor BSD ( pass a file
;; descriptor to readlinkat, not a dirfd))
(follow-symbolic-link ()
(return-from openat
(openat cwdfd target mode)))
;; Replace the symbolic link! Create a temporary file, rename
;; it on top of the symlink, and then return a stream to the
;; new file. This ensures that the link is atomically
;; replaced.
(replace-symbolic-link ()
(multiple-value-bind (stream tmp-name)
(openat-random cwdfd name mode)
(nix:renameat cwdfd tmp-name cwdfd name)
(return-from openat stream))))))
;; File exists, but is not a symlink. Ask the user what to do.
(t
(restart-case
(error 'destination-exists-error
:mtime (local-time:unix-to-timestamp (nix:stat-mtime stat)
:nsec (nix:stat-mtime-nsec stat))
:pathname pathname)
;; User wants us to overwrite it. So add O_TRUNC to the flags
;; and get going.
(supersede-file ()
(setf flags (logior flags nix:o-trunc))
(go :open))
;; User wants us to rename and replace the file. This keeps
;; processes that already have the file open happier. Take the
;; same approach as replacing a symlink, make a new file and
;; rename it.
(remove-file ()
(multiple-value-bind (stream tmp-name) (openat-random cwdfd name mode)
(nix:renameat cwdfd tmp-name cwdfd name)
(return-from openat stream))))))
:open
;; Try opening the file!
(handler-case
(return-from openat
(make-instance 'fd-output-stream :fd (nix:openat cwdfd name flags mode)))
;; Someone snuck in and created a file between the stat and open!
(nix:eexist () (go :retry))
;; Someone snuck in and made a symlink on us!
(nix:eloop () (go :retry))))))))
(defun my-open (pn mode)
(ensure-directories-exist (merge-pathnames pn))
(let (flags
stat)
(tagbody
:retry
(setf flags (logior nix:o-wronly
nix:o-creat))
(handler-case
(setf stat (nix:stat (merge-pathnames pn)))
;; If the file doesn't seem to exist, add O_EXCL to our flags and try
;; to open it. The O_EXCL ensures we get an error if the file is
;; created between the stat and open calls
(nix:enoent ()
(setf flags (logior flags nix:o-excl))
(go :open)))
;; File exists, ask the user what to do.
(restart-case
(error 'destination-exists-error
:mtime (local-time:unix-to-timestamp (nix:stat-mtime stat))
:pathname pn)
;; User wants us to overwrite it. So add O_TRUNC to the flags
;; and get going.
(supersede-file ()
(setf flags (logior flags nix:o-trunc))
(go :open))
;; User wants us to rename and replace the file. This keeps
;; processes that already have the file open happier. Take the
;; same approach as replacing a symlink, make a new file and
;; rename it.
(remove-file ()
(multiple-value-bind (stream tmp-name) (open-random pn mode)
(nix:rename tmp-name (merge-pathnames pn))
(return-from my-open stream))))
:open
;; Try opening the file!
(handler-case
(return-from my-open
(make-instance 'fd-output-stream :fd (nix:open (merge-pathnames pn) flags mode)))
;; Someone snuck in and created a file between the stat and open!
(nix:eexist () (go :retry))))))
| null | https://raw.githubusercontent.com/lisp-mirror/cl-tar/8369f16b51dfe04dc68c2ebf146c769d4bc7d471/src/extract/fd-stream.lisp | lisp | This is part of cl-tar. See README.md and LICENSE for more information.
Still directories to traverse.
replace the symlink with a directory.
If the file doesn't seem to exist, add O_EXCL to our flags and
try to open it. The O_EXCL ensures we get an error if the file
is created between the stat and open calls
The file exists and is a symlink.
Try reading where it points to, so we can ask the user what
to do.
calls. Just retry from scratch.
Follow the symlink! We resolve the symlink destination
ourselves. This is because our API tells the user where the
symlink points and POSIX has no way to say "follow the
symlink, but only if it points to X still" (well, Linux
descriptor to readlinkat, not a dirfd))
Replace the symbolic link! Create a temporary file, rename
it on top of the symlink, and then return a stream to the
new file. This ensures that the link is atomically
replaced.
File exists, but is not a symlink. Ask the user what to do.
User wants us to overwrite it. So add O_TRUNC to the flags
and get going.
User wants us to rename and replace the file. This keeps
processes that already have the file open happier. Take the
same approach as replacing a symlink, make a new file and
rename it.
Try opening the file!
Someone snuck in and created a file between the stat and open!
Someone snuck in and made a symlink on us!
If the file doesn't seem to exist, add O_EXCL to our flags and try
to open it. The O_EXCL ensures we get an error if the file is
created between the stat and open calls
File exists, ask the user what to do.
User wants us to overwrite it. So add O_TRUNC to the flags
and get going.
User wants us to rename and replace the file. This keeps
processes that already have the file open happier. Take the
same approach as replacing a symlink, make a new file and
rename it.
Try opening the file!
Someone snuck in and created a file between the stat and open! |
(in-package #:tar-extract)
(define-condition fd-stream-error (stream-error)
())
(define-condition simple-fd-stream-error (fd-stream-error simple-error)
())
(defclass directory-fd-stream ()
((fd
:initarg :fd
:reader fd
:documentation
"The underlying file descriptor.")))
(defclass fd-stream (trivial-gray-streams:trivial-gray-stream-mixin
trivial-gray-streams:fundamental-binary-stream)
((fd
:initarg :fd
:reader fd
:documentation
"The underlying file descriptor.")))
(defclass fd-output-stream (fd-stream
trivial-gray-streams:fundamental-binary-output-stream)
()
(:documentation
"A FD-STREAM used for output."))
(defmethod trivial-gray-streams:stream-write-byte ((stream fd-output-stream) byte)
(cffi:with-foreign-array (buf (make-array 1 :element-type '(unsigned-byte 8)
:initial-element byte)
'(unsigned-byte 8))
(nix:write (fd stream) buf 1)
byte))
(defmethod trivial-gray-streams:stream-write-sequence ((stream fd-output-stream)
sequence start end
&key &allow-other-keys)
(let ((num-bytes (- (or end (length sequence)) start)))
(cffi:with-foreign-array (buf (subseq sequence start end) `(:array :uchar ,num-bytes))
(nix:write (fd stream) buf num-bytes)))
sequence)
(defmethod stream-element-type ((stream fd-stream))
'(unsigned-byte 8))
(defmethod close ((stream fd-stream) &key abort)
(declare (ignore abort))
(nix:close (fd stream)))
#+tar-extract-use-openat
(defun openat-random (dir-handle pathname mode)
(loop
:for random := (random 10000000000)
:for name := (concatenate 'string "." (file-namestring pathname)
"." (princ-to-string random))
:for stream := (handler-case
(openat dir-handle name mode)
(extraction-through-symbolic-link-error () nil)
(destination-exists-error () nil))
:when stream
:return (values stream name)))
(defun open-random (pathname mode)
(loop
:for random := (random 10000000000)
:for name := (concatenate 'string (namestring pathname)
"-" (princ-to-string random))
:for stream := (handler-case
(my-open name mode)
(destination-exists-error () nil))
:when stream
:return (values stream name)))
#+tar-extract-use-openat
(defun openat (cwdfd pathname mode &optional (path-so-far (list :relative)))
"This is a slightly safer version of openat that checks for symlinks along the entire path.
Returns an FD-STREAM or OUTPUT-FD-STREAM."
(let* ((name (file-namestring pathname))
(directory (pathname-directory pathname))
(absolute-directory-p (eql (first directory) :absolute))
flags
stat)
(cond
(absolute-directory-p
(with-fd (rootfd (nix:open "/" nix:o-rdonly))
(openat rootfd (make-pathname :directory (list* :relative (rest directory))
:defaults pathname)
mode
(list :absolute))))
((and (not (null directory))
(not (equal directory '(:relative))))
(let ((next-dir-name (second directory)))
(when (eql next-dir-name :back)
(setf next-dir-name ".."))
(tagbody
:retry
(setf flags (logior nix:o-rdonly
nix:o-nofollow))
(handler-case
(setf stat (nix:fstatat cwdfd next-dir-name nix:at-symlink-nofollow))
(nix:enoent ()
(handler-case
(nix:mkdirat cwdfd next-dir-name nix:s-irwxu)
(nix:eexist () (go :retry)))
(go :open)))
(cond
((nix:s-islnk (nix:stat-mode stat))
(let (target)
(handler-case
(setf target (uiop:parse-unix-namestring
(nix:readlinkat cwdfd next-dir-name)
:dot-dot :back))
The link got deleted between the stat and
(nix:einval () (go :retry)))
(restart-case
(error 'extraction-through-symbolic-link-error
:target target
:pathname (make-pathname :name nil
:type nil
:directory (reverse path-so-far)
:defaults pathname))
(follow-symbolic-link ()
(return-from openat
(openat cwdfd (merge-pathnames
(make-pathname :directory (list* :relative (cddr directory))
:defaults pathname)
(uiop:ensure-directory-pathname target))
mode)))
(replace-symbolic-link ()
Sadly , there 's no non - Linux - specific way to atomically
(nix:unlinkat cwdfd next-dir-name 0)
(go :retry)))))
((not (nix:s-isdir (nix:stat-mode stat)))
(restart-case
(error 'file-exists-in-place-of-directory-error
:pathname (make-pathname :name nil
:type nil
:directory (reverse path-so-far)
:defaults pathname))
(remove-file ()
(nix:unlinkat cwdfd next-dir-name 0)
(go :retry)))))
:open
(let (nextfd)
(handler-case
(setf nextfd (nix:openat cwdfd next-dir-name flags mode))
(nix:enoent () (go :retry))
(nix:eloop () (go :retry)))
(with-fd (nextfd)
(return-from openat
(openat nextfd (make-pathname :directory (list* :relative (cddr directory))
:defaults pathname)
mode
(list* (second directory) path-so-far))))))))
((or (null name)
(equal name ""))
(make-instance 'directory-fd-stream :fd (nix:dup cwdfd)))
(t
(tagbody
:retry
(setf flags (logior nix:o-wronly
nix:o-creat
nix:o-nofollow))
(handler-case
(setf stat (nix:fstatat cwdfd name nix:at-symlink-nofollow))
(nix:enoent ()
(setf flags (logior flags nix:o-excl))
(go :open)))
(cond
((nix:s-islnk (nix:stat-mode stat))
(let (target)
(handler-case
(setf target (uiop:parse-unix-namestring
(nix:readlinkat cwdfd name)
:dot-dot :back))
The link got deleted between the stat and
(nix:einval () (go :retry)))
(restart-case
(error 'extraction-through-symbolic-link-error
:pathname (make-pathname :directory (reverse path-so-far)
:defaults pathname)
:target target)
sort of does , but not nor BSD ( pass a file
(follow-symbolic-link ()
(return-from openat
(openat cwdfd target mode)))
(replace-symbolic-link ()
(multiple-value-bind (stream tmp-name)
(openat-random cwdfd name mode)
(nix:renameat cwdfd tmp-name cwdfd name)
(return-from openat stream))))))
(t
(restart-case
(error 'destination-exists-error
:mtime (local-time:unix-to-timestamp (nix:stat-mtime stat)
:nsec (nix:stat-mtime-nsec stat))
:pathname pathname)
(supersede-file ()
(setf flags (logior flags nix:o-trunc))
(go :open))
(remove-file ()
(multiple-value-bind (stream tmp-name) (openat-random cwdfd name mode)
(nix:renameat cwdfd tmp-name cwdfd name)
(return-from openat stream))))))
:open
(handler-case
(return-from openat
(make-instance 'fd-output-stream :fd (nix:openat cwdfd name flags mode)))
(nix:eexist () (go :retry))
(nix:eloop () (go :retry))))))))
(defun my-open (pn mode)
(ensure-directories-exist (merge-pathnames pn))
(let (flags
stat)
(tagbody
:retry
(setf flags (logior nix:o-wronly
nix:o-creat))
(handler-case
(setf stat (nix:stat (merge-pathnames pn)))
(nix:enoent ()
(setf flags (logior flags nix:o-excl))
(go :open)))
(restart-case
(error 'destination-exists-error
:mtime (local-time:unix-to-timestamp (nix:stat-mtime stat))
:pathname pn)
(supersede-file ()
(setf flags (logior flags nix:o-trunc))
(go :open))
(remove-file ()
(multiple-value-bind (stream tmp-name) (open-random pn mode)
(nix:rename tmp-name (merge-pathnames pn))
(return-from my-open stream))))
:open
(handler-case
(return-from my-open
(make-instance 'fd-output-stream :fd (nix:open (merge-pathnames pn) flags mode)))
(nix:eexist () (go :retry))))))
|
86db6da4025e7f706d12533097343a39ccb9fc71abd25b02dbabb7a398aa0c5c | OCamlPro/ocp-build | test.ml | (**************************************************************************)
(* *)
(* Typerex Libraries *)
(* *)
Copyright 2011 - 2017 OCamlPro SAS
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
Few lines from xen-api.git/ocaml/xapi/quicktest_common.ml
Copyright ( C ) 2006 - 2009 Citrix Systems Inc. LGPL v2.1
open StringCompat
let total_started = ref 0
let total_passed = ref 0
type status =
| Pending
| Success
| Failed
type vt100 =
| Control of string
| Data of string
let length_of_vt100 sequence =
let length = function
| Control _ -> 0
| Data x -> String.length x in
List.fold_left (+) 0 (List.map length sequence)
let flatten_vt100 sequence =
List.fold_left (^) "" (List.map (function Control x -> x | Data x -> x) sequence)
let escape = String.make 1 (char_of_int 0x1b)
let set_attribute attrs =
Control (Printf.sprintf "%s[%sm" escape (String.concat ";" (List.map string_of_int attrs)))
let reset = 0
let bright = 1
let dim = 2
let red = 31
let green = 32
let blue = 34
let yellow = 33
let basic_string_of_status = function
| Pending -> [ Data " " ]
| Success -> [ Data "[ Success ]" ]
| Failed -> [ Data "[ Failed ]" ]
let coloured_string_of_status = function
| Pending -> [ Data " " ]
| Success ->
[ Data "[ ";
set_attribute [ bright; green ];
Data "Success";
set_attribute [ reset ];
Data " ]" ]
| Failed ->
[ Data "[ ";
set_attribute [ bright; red ];
Data "Failed ";
set_attribute [ reset ];
Data " ]" ]
let use_colour = ref true
let cols = 80
let nice_status_output name status =
let vt100 = (if !use_colour then coloured_string_of_status else basic_string_of_status) status in
let flattened = flatten_vt100 vt100 in
let invisible_length = String.length flattened in
let visible_length = length_of_vt100 vt100 in
(* Need a bigger string to cope with control characters *)
let line = Bytes.make (cols + (invisible_length - visible_length)) ' ' in
Stick the test name towards the left
Bytes.blit_string name 0 line 0 (min cols (String.length name));
Stick the coloured bit towards the right
NB we need to use the ' visible length ' for positioning but copy all chars , even invis ones
Bytes.blit_string (flatten_vt100 vt100) 0 line (cols - visible_length) (String.length flattened);
Printf.printf "%s\n%!" (Bytes.to_string line)
module Raw = struct
type test = {
name : string;
mutable status : status;
}
let make_test name = {
name;
status = Pending
}
let all_tests = ref []
let failed_tests = ref []
let mem test =
List.memq test !all_tests
let remove test =
all_tests := List.removeq test !all_tests
let add test =
all_tests := test :: !all_tests
let debug test fmt =
let fn msg =
(* Might need to divide into multiple lines *)
let tab = " " in
let max_length =
cols
- length_of_vt100 (coloured_string_of_status test.status)
- String.length tab in
let rec loop start_offset =
if start_offset < String.length msg then begin
let length = min (String.length msg - start_offset) max_length in
let submsg = String.sub msg start_offset length in
nice_status_output (tab^submsg) Pending;
loop (start_offset + length)
end in
nice_status_output (test.name^":") Pending;
loop 0 in
Printf.kprintf fn fmt
let start test =
incr total_started;
add test
let success test =
if not (mem test) then
failwith (Printf.sprintf "Test not started: %s" test.name);
remove test;
if test.status = Pending then begin
incr total_passed;
test.status <- Success
end;
nice_status_output test.name test.status
let failed test =
if not (mem test) then
failwith (Printf.sprintf "Test not started: %s" test.name);
remove test;
failed_tests := test :: !failed_tests;
test.status <- Failed;
nice_status_output test.name Failed
let stats ~started ~passed =
Printf.sprintf "Total test started: %d; total passed: %d (%.2f%%)"
started passed (float_of_int passed /. (float_of_int started) *. 100.)
let summarise () =
Printf.printf "\n\n%s\n%!" (stats ~started:!total_started ~passed:!total_passed);
if !all_tests <> [] then begin
Printf.printf "Tests neither succeeded nor failed:\n%!";
List.iter (fun t -> nice_status_output t.name t.status) !all_tests;
end;
if !total_passed <> !total_started then begin
Printf.printf "Tests failed:\n%!";
List.iter (fun t -> nice_status_output t.name Failed) (List.rev !failed_tests);
exit 1;
end
end
type test =
| Test_case of (unit -> bool)
| Test_group of string * test list
let test t = Test_case t
let test_exn exn fn =
Test_case (fun () ->
try ignore (fn () : 'a); false
with e -> e = exn)
let group name l = Test_group (name, l)
let tests = ref []
let register name tl =
tests := Test_group (name, tl) :: !tests
let process () =
let rank = ref 1 in
let fullname path =
String.concat "." (List.rev path) ^ ":" ^ string_of_int !rank in
let rec aux path = function
| Test_case fn ->
let name = fullname path in
incr rank;
let test = Raw.make_test name in
Raw.start test;
let result =
try fn ()
with e ->
Raw.debug test "Got exception: %s" (Printexc.to_string e);
false in
if result then
Raw.success test
else
Raw.failed test
| Test_group (n, tl) ->
let oldrank = !rank in
rank := 1;
List.iter (aux (n::path)) tl;
rank := oldrank in
List.iter (aux []) !tests;
Raw.summarise ()
| null | https://raw.githubusercontent.com/OCamlPro/ocp-build/56aff560bb438c12b2929feaf8379bc6f31b9840/libs/ocplib-lang/test.ml | ocaml | ************************************************************************
Typerex Libraries
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Need a bigger string to cope with control characters
Might need to divide into multiple lines | Copyright 2011 - 2017 OCamlPro SAS
the GNU Lesser General Public License version 2.1 , with the
Few lines from xen-api.git/ocaml/xapi/quicktest_common.ml
Copyright ( C ) 2006 - 2009 Citrix Systems Inc. LGPL v2.1
open StringCompat
let total_started = ref 0
let total_passed = ref 0
type status =
| Pending
| Success
| Failed
type vt100 =
| Control of string
| Data of string
let length_of_vt100 sequence =
let length = function
| Control _ -> 0
| Data x -> String.length x in
List.fold_left (+) 0 (List.map length sequence)
let flatten_vt100 sequence =
List.fold_left (^) "" (List.map (function Control x -> x | Data x -> x) sequence)
let escape = String.make 1 (char_of_int 0x1b)
let set_attribute attrs =
Control (Printf.sprintf "%s[%sm" escape (String.concat ";" (List.map string_of_int attrs)))
let reset = 0
let bright = 1
let dim = 2
let red = 31
let green = 32
let blue = 34
let yellow = 33
let basic_string_of_status = function
| Pending -> [ Data " " ]
| Success -> [ Data "[ Success ]" ]
| Failed -> [ Data "[ Failed ]" ]
let coloured_string_of_status = function
| Pending -> [ Data " " ]
| Success ->
[ Data "[ ";
set_attribute [ bright; green ];
Data "Success";
set_attribute [ reset ];
Data " ]" ]
| Failed ->
[ Data "[ ";
set_attribute [ bright; red ];
Data "Failed ";
set_attribute [ reset ];
Data " ]" ]
let use_colour = ref true
let cols = 80
let nice_status_output name status =
let vt100 = (if !use_colour then coloured_string_of_status else basic_string_of_status) status in
let flattened = flatten_vt100 vt100 in
let invisible_length = String.length flattened in
let visible_length = length_of_vt100 vt100 in
let line = Bytes.make (cols + (invisible_length - visible_length)) ' ' in
Stick the test name towards the left
Bytes.blit_string name 0 line 0 (min cols (String.length name));
Stick the coloured bit towards the right
NB we need to use the ' visible length ' for positioning but copy all chars , even invis ones
Bytes.blit_string (flatten_vt100 vt100) 0 line (cols - visible_length) (String.length flattened);
Printf.printf "%s\n%!" (Bytes.to_string line)
module Raw = struct
type test = {
name : string;
mutable status : status;
}
let make_test name = {
name;
status = Pending
}
let all_tests = ref []
let failed_tests = ref []
let mem test =
List.memq test !all_tests
let remove test =
all_tests := List.removeq test !all_tests
let add test =
all_tests := test :: !all_tests
let debug test fmt =
let fn msg =
let tab = " " in
let max_length =
cols
- length_of_vt100 (coloured_string_of_status test.status)
- String.length tab in
let rec loop start_offset =
if start_offset < String.length msg then begin
let length = min (String.length msg - start_offset) max_length in
let submsg = String.sub msg start_offset length in
nice_status_output (tab^submsg) Pending;
loop (start_offset + length)
end in
nice_status_output (test.name^":") Pending;
loop 0 in
Printf.kprintf fn fmt
let start test =
incr total_started;
add test
let success test =
if not (mem test) then
failwith (Printf.sprintf "Test not started: %s" test.name);
remove test;
if test.status = Pending then begin
incr total_passed;
test.status <- Success
end;
nice_status_output test.name test.status
let failed test =
if not (mem test) then
failwith (Printf.sprintf "Test not started: %s" test.name);
remove test;
failed_tests := test :: !failed_tests;
test.status <- Failed;
nice_status_output test.name Failed
let stats ~started ~passed =
Printf.sprintf "Total test started: %d; total passed: %d (%.2f%%)"
started passed (float_of_int passed /. (float_of_int started) *. 100.)
let summarise () =
Printf.printf "\n\n%s\n%!" (stats ~started:!total_started ~passed:!total_passed);
if !all_tests <> [] then begin
Printf.printf "Tests neither succeeded nor failed:\n%!";
List.iter (fun t -> nice_status_output t.name t.status) !all_tests;
end;
if !total_passed <> !total_started then begin
Printf.printf "Tests failed:\n%!";
List.iter (fun t -> nice_status_output t.name Failed) (List.rev !failed_tests);
exit 1;
end
end
type test =
| Test_case of (unit -> bool)
| Test_group of string * test list
let test t = Test_case t
let test_exn exn fn =
Test_case (fun () ->
try ignore (fn () : 'a); false
with e -> e = exn)
let group name l = Test_group (name, l)
let tests = ref []
let register name tl =
tests := Test_group (name, tl) :: !tests
let process () =
let rank = ref 1 in
let fullname path =
String.concat "." (List.rev path) ^ ":" ^ string_of_int !rank in
let rec aux path = function
| Test_case fn ->
let name = fullname path in
incr rank;
let test = Raw.make_test name in
Raw.start test;
let result =
try fn ()
with e ->
Raw.debug test "Got exception: %s" (Printexc.to_string e);
false in
if result then
Raw.success test
else
Raw.failed test
| Test_group (n, tl) ->
let oldrank = !rank in
rank := 1;
List.iter (aux (n::path)) tl;
rank := oldrank in
List.iter (aux []) !tests;
Raw.summarise ()
|
e8476a2911f28db05ea95897ca6d11d8240425299e30f052675d2380cef96a61 | monadfix/ormolu-live | Avail.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveDataTypeable #-}
--
( c ) The University of Glasgow
--
#include "HsVersions2.h"
module Avail (
Avails,
AvailInfo(..),
avail,
availsToNameSet,
availsToNameSetWithSelectors,
availsToNameEnv,
availName, availNames, availNonFldNames,
availNamesWithSelectors,
availFlds,
availsNamesWithOccs,
availNamesWithOccs,
stableAvailCmp,
plusAvail,
trimAvail,
filterAvail,
filterAvails,
nubAvails
) where
import GhcPrelude
import Name
import NameEnv
import NameSet
import FieldLabel
import Binary
import ListSetOps
import Outputable
import Util
import Data.Data ( Data )
import Data.List ( find )
import Data.Function
-- -----------------------------------------------------------------------------
The AvailInfo type
| Records what things are \"available\ " , i.e. in scope
data AvailInfo
-- | An ordinary identifier in scope
= Avail Name
-- | A type or class in scope
--
-- The __AvailTC Invariant__: If the type or class is itself to be in scope,
-- it must be /first/ in this list. Thus, typically:
--
> AvailTC Eq [ Eq , = = , \/= ] [ ]
| AvailTC
Name -- ^ The name of the type or class
[Name] -- ^ The available pieces of type or class,
-- excluding field selectors.
[FieldLabel] -- ^ The record fields of the type
-- (see Note [Representing fields in AvailInfo]).
deriving ( Eq -- ^ Used when deciding if the interface has changed
, Data )
| A collection of ' AvailInfo ' - several things that are \"available\ "
type Avails = [AvailInfo]
Note [ Representing fields in AvailInfo ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When -XDuplicateRecordFields is disabled ( the normal case ) , a
datatype like
data T = MkT { foo : : Int }
gives rise to the AvailInfo
AvailTC T [ T , MkT ] [ FieldLabel " foo " False foo ]
whereas if -XDuplicateRecordFields is enabled it gives
AvailTC T [ T , MkT ] [ FieldLabel " foo " True $ sel : foo : MkT ]
since the label does not match the selector name .
The labels in a field list are not necessarily unique :
data families allow the same parent ( the family tycon ) to have
multiple distinct fields with the same label . For example ,
data family F a
data instance F Int = MkFInt { foo : : Int }
data instance F Bool = MkFBool { foo : : Bool }
gives rise to
AvailTC F [ F , MkFInt , MkFBool ]
[ " foo " True $ sel : foo : MkFInt
, " foo " True $ sel : foo : MkFBool ]
Moreover , note that the flIsOverloaded flag need not be the same for
all the elements of the list . In the example above , this occurs if
the two data instances are defined in different modules , one with
` -XDuplicateRecordFields ` enabled and one with it disabled . Thus it
is possible to have
AvailTC F [ F , MkFInt , MkFBool ]
[ " foo " True $ sel : foo : MkFInt
, " foo " False foo ]
If the two data instances are defined in different modules , both
without ` -XDuplicateRecordFields ` , it will be impossible to export
them from the same module ( even with ` -XDuplicateRecordfields `
enabled ) , because they would be represented identically . The
workaround here is to enable ` -XDuplicateRecordFields ` on the defining
modules .
Note [Representing fields in AvailInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When -XDuplicateRecordFields is disabled (the normal case), a
datatype like
data T = MkT { foo :: Int }
gives rise to the AvailInfo
AvailTC T [T, MkT] [FieldLabel "foo" False foo]
whereas if -XDuplicateRecordFields is enabled it gives
AvailTC T [T, MkT] [FieldLabel "foo" True $sel:foo:MkT]
since the label does not match the selector name.
The labels in a field list are not necessarily unique:
data families allow the same parent (the family tycon) to have
multiple distinct fields with the same label. For example,
data family F a
data instance F Int = MkFInt { foo :: Int }
data instance F Bool = MkFBool { foo :: Bool}
gives rise to
AvailTC F [ F, MkFInt, MkFBool ]
[ FieldLabel "foo" True $sel:foo:MkFInt
, FieldLabel "foo" True $sel:foo:MkFBool ]
Moreover, note that the flIsOverloaded flag need not be the same for
all the elements of the list. In the example above, this occurs if
the two data instances are defined in different modules, one with
`-XDuplicateRecordFields` enabled and one with it disabled. Thus it
is possible to have
AvailTC F [ F, MkFInt, MkFBool ]
[ FieldLabel "foo" True $sel:foo:MkFInt
, FieldLabel "foo" False foo ]
If the two data instances are defined in different modules, both
without `-XDuplicateRecordFields`, it will be impossible to export
them from the same module (even with `-XDuplicateRecordfields`
enabled), because they would be represented identically. The
workaround here is to enable `-XDuplicateRecordFields` on the defining
modules.
-}
-- | Compare lexicographically
stableAvailCmp :: AvailInfo -> AvailInfo -> Ordering
stableAvailCmp (Avail n1) (Avail n2) = n1 `stableNameCmp` n2
stableAvailCmp (Avail {}) (AvailTC {}) = LT
stableAvailCmp (AvailTC n ns nfs) (AvailTC m ms mfs) =
(n `stableNameCmp` m) `thenCmp`
(cmpList stableNameCmp ns ms) `thenCmp`
(cmpList (stableNameCmp `on` flSelector) nfs mfs)
stableAvailCmp (AvailTC {}) (Avail {}) = GT
avail :: Name -> AvailInfo
avail n = Avail n
-- -----------------------------------------------------------------------------
Operations on AvailInfo
availsToNameSet :: [AvailInfo] -> NameSet
availsToNameSet avails = foldr add emptyNameSet avails
where add avail set = extendNameSetList set (availNames avail)
availsToNameSetWithSelectors :: [AvailInfo] -> NameSet
availsToNameSetWithSelectors avails = foldr add emptyNameSet avails
where add avail set = extendNameSetList set (availNamesWithSelectors avail)
availsToNameEnv :: [AvailInfo] -> NameEnv AvailInfo
availsToNameEnv avails = foldr add emptyNameEnv avails
where add avail env = extendNameEnvList env
(zip (availNames avail) (repeat avail))
-- | Just the main name made available, i.e. not the available pieces
-- of type or class brought into scope by the 'GenAvailInfo'
availName :: AvailInfo -> Name
availName (Avail n) = n
availName (AvailTC n _ _) = n
-- | All names made available by the availability information (excluding overloaded selectors)
availNames :: AvailInfo -> [Name]
availNames (Avail n) = [n]
availNames (AvailTC _ ns fs) = ns ++ [ flSelector f | f <- fs, not (flIsOverloaded f) ]
-- | All names made available by the availability information (including overloaded selectors)
availNamesWithSelectors :: AvailInfo -> [Name]
availNamesWithSelectors (Avail n) = [n]
availNamesWithSelectors (AvailTC _ ns fs) = ns ++ map flSelector fs
-- | Names for non-fields made available by the availability information
availNonFldNames :: AvailInfo -> [Name]
availNonFldNames (Avail n) = [n]
availNonFldNames (AvailTC _ ns _) = ns
-- | Fields made available by the availability information
availFlds :: AvailInfo -> [FieldLabel]
availFlds (AvailTC _ _ fs) = fs
availFlds _ = []
availsNamesWithOccs :: [AvailInfo] -> [(Name, OccName)]
availsNamesWithOccs = concatMap availNamesWithOccs
-- | 'Name's made available by the availability information, paired with
-- the 'OccName' used to refer to each one.
--
When is in use , the ' Name ' may be the
mangled name of a record selector ( e.g. @$sel : foo : MkT@ ) while the
-- 'OccName' will be the label of the field (e.g. @foo@).
--
-- See Note [Representing fields in AvailInfo].
availNamesWithOccs :: AvailInfo -> [(Name, OccName)]
availNamesWithOccs (Avail n) = [(n, nameOccName n)]
availNamesWithOccs (AvailTC _ ns fs)
= [ (n, nameOccName n) | n <- ns ] ++
[ (flSelector fl, mkVarOccFS (flLabel fl)) | fl <- fs ]
-- -----------------------------------------------------------------------------
-- Utility
plusAvail :: AvailInfo -> AvailInfo -> AvailInfo
plusAvail a1 a2
| debugIsOn && availName a1 /= availName a2
= pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2])
plusAvail a1@(Avail {}) (Avail {}) = a1
plusAvail (AvailTC _ [] []) a2@(AvailTC {}) = a2
plusAvail a1@(AvailTC {}) (AvailTC _ [] []) = a1
plusAvail (AvailTC n1 (s1:ss1) fs1) (AvailTC n2 (s2:ss2) fs2)
Maintain invariant the parent is first
(True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2))
(fs1 `unionLists` fs2)
(True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2)))
(fs1 `unionLists` fs2)
(False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2))
(fs1 `unionLists` fs2)
(False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2))
(fs1 `unionLists` fs2)
plusAvail (AvailTC n1 ss1 fs1) (AvailTC _ [] fs2)
= AvailTC n1 ss1 (fs1 `unionLists` fs2)
plusAvail (AvailTC n1 [] fs1) (AvailTC _ ss2 fs2)
= AvailTC n1 ss2 (fs1 `unionLists` fs2)
plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2])
| trims an ' AvailInfo ' to keep only a single name
trimAvail :: AvailInfo -> Name -> AvailInfo
trimAvail (Avail n) _ = Avail n
trimAvail (AvailTC n ns fs) m = case find ((== m) . flSelector) fs of
Just x -> AvailTC n [] [x]
Nothing -> ASSERT( m `elem` ns ) AvailTC n [m] []
| filters ' AvailInfo 's by the given predicate
filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo]
filterAvails keep avails = foldr (filterAvail keep) [] avails
| filters an ' AvailInfo ' by the given predicate
filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo]
filterAvail keep ie rest =
case ie of
Avail n | keep n -> ie : rest
| otherwise -> rest
AvailTC tc ns fs ->
let ns' = filter keep ns
fs' = filter (keep . flSelector) fs in
if null ns' && null fs' then rest else AvailTC tc ns' fs' : rest
| Combines ' AvailInfo 's from the same family
-- 'avails' may have several items with the same availName
E.g import Ix ( Ix ( .. ) , index )
will give Ix(Ix , index , range ) and Ix(index )
We want to combine these ; addAvail does that
nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl' add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
-- -----------------------------------------------------------------------------
-- Printing
instance Outputable AvailInfo where
ppr = pprAvail
pprAvail :: AvailInfo -> SDoc
pprAvail (Avail n)
= ppr n
pprAvail (AvailTC n ns fs)
= ppr n <> braces (sep [ fsep (punctuate comma (map ppr ns)) <> semi
, fsep (punctuate comma (map (ppr . flLabel) fs))])
instance Binary AvailInfo where
put_ bh (Avail aa) = do
putByte bh 0
put_ bh aa
put_ bh (AvailTC ab ac ad) = do
putByte bh 1
put_ bh ab
put_ bh ac
put_ bh ad
get bh = do
h <- getByte bh
case h of
0 -> do aa <- get bh
return (Avail aa)
_ -> do ab <- get bh
ac <- get bh
ad <- get bh
return (AvailTC ab ac ad)
| null | https://raw.githubusercontent.com/monadfix/ormolu-live/d8ae72ef168b98a8d179d642f70352c88b3ac226/ghc-lib-parser-8.10.1.20200412/compiler/basicTypes/Avail.hs | haskell | # LANGUAGE DeriveDataTypeable #
-----------------------------------------------------------------------------
| An ordinary identifier in scope
| A type or class in scope
The __AvailTC Invariant__: If the type or class is itself to be in scope,
it must be /first/ in this list. Thus, typically:
^ The name of the type or class
^ The available pieces of type or class,
excluding field selectors.
^ The record fields of the type
(see Note [Representing fields in AvailInfo]).
^ Used when deciding if the interface has changed
| Compare lexicographically
-----------------------------------------------------------------------------
| Just the main name made available, i.e. not the available pieces
of type or class brought into scope by the 'GenAvailInfo'
| All names made available by the availability information (excluding overloaded selectors)
| All names made available by the availability information (including overloaded selectors)
| Names for non-fields made available by the availability information
| Fields made available by the availability information
| 'Name's made available by the availability information, paired with
the 'OccName' used to refer to each one.
'OccName' will be the label of the field (e.g. @foo@).
See Note [Representing fields in AvailInfo].
-----------------------------------------------------------------------------
Utility
'avails' may have several items with the same availName
-----------------------------------------------------------------------------
Printing | # LANGUAGE CPP #
( c ) The University of Glasgow
#include "HsVersions2.h"
module Avail (
Avails,
AvailInfo(..),
avail,
availsToNameSet,
availsToNameSetWithSelectors,
availsToNameEnv,
availName, availNames, availNonFldNames,
availNamesWithSelectors,
availFlds,
availsNamesWithOccs,
availNamesWithOccs,
stableAvailCmp,
plusAvail,
trimAvail,
filterAvail,
filterAvails,
nubAvails
) where
import GhcPrelude
import Name
import NameEnv
import NameSet
import FieldLabel
import Binary
import ListSetOps
import Outputable
import Util
import Data.Data ( Data )
import Data.List ( find )
import Data.Function
The AvailInfo type
| Records what things are \"available\ " , i.e. in scope
data AvailInfo
= Avail Name
> AvailTC Eq [ Eq , = = , \/= ] [ ]
| AvailTC
, Data )
| A collection of ' AvailInfo ' - several things that are \"available\ "
type Avails = [AvailInfo]
Note [ Representing fields in AvailInfo ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When -XDuplicateRecordFields is disabled ( the normal case ) , a
datatype like
data T = MkT { foo : : Int }
gives rise to the AvailInfo
AvailTC T [ T , MkT ] [ FieldLabel " foo " False foo ]
whereas if -XDuplicateRecordFields is enabled it gives
AvailTC T [ T , MkT ] [ FieldLabel " foo " True $ sel : foo : MkT ]
since the label does not match the selector name .
The labels in a field list are not necessarily unique :
data families allow the same parent ( the family tycon ) to have
multiple distinct fields with the same label . For example ,
data family F a
data instance F Int = MkFInt { foo : : Int }
data instance F Bool = MkFBool { foo : : Bool }
gives rise to
AvailTC F [ F , MkFInt , MkFBool ]
[ " foo " True $ sel : foo : MkFInt
, " foo " True $ sel : foo : MkFBool ]
Moreover , note that the flIsOverloaded flag need not be the same for
all the elements of the list . In the example above , this occurs if
the two data instances are defined in different modules , one with
` -XDuplicateRecordFields ` enabled and one with it disabled . Thus it
is possible to have
AvailTC F [ F , MkFInt , MkFBool ]
[ " foo " True $ sel : foo : MkFInt
, " foo " False foo ]
If the two data instances are defined in different modules , both
without ` -XDuplicateRecordFields ` , it will be impossible to export
them from the same module ( even with ` -XDuplicateRecordfields `
enabled ) , because they would be represented identically . The
workaround here is to enable ` -XDuplicateRecordFields ` on the defining
modules .
Note [Representing fields in AvailInfo]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When -XDuplicateRecordFields is disabled (the normal case), a
datatype like
data T = MkT { foo :: Int }
gives rise to the AvailInfo
AvailTC T [T, MkT] [FieldLabel "foo" False foo]
whereas if -XDuplicateRecordFields is enabled it gives
AvailTC T [T, MkT] [FieldLabel "foo" True $sel:foo:MkT]
since the label does not match the selector name.
The labels in a field list are not necessarily unique:
data families allow the same parent (the family tycon) to have
multiple distinct fields with the same label. For example,
data family F a
data instance F Int = MkFInt { foo :: Int }
data instance F Bool = MkFBool { foo :: Bool}
gives rise to
AvailTC F [ F, MkFInt, MkFBool ]
[ FieldLabel "foo" True $sel:foo:MkFInt
, FieldLabel "foo" True $sel:foo:MkFBool ]
Moreover, note that the flIsOverloaded flag need not be the same for
all the elements of the list. In the example above, this occurs if
the two data instances are defined in different modules, one with
`-XDuplicateRecordFields` enabled and one with it disabled. Thus it
is possible to have
AvailTC F [ F, MkFInt, MkFBool ]
[ FieldLabel "foo" True $sel:foo:MkFInt
, FieldLabel "foo" False foo ]
If the two data instances are defined in different modules, both
without `-XDuplicateRecordFields`, it will be impossible to export
them from the same module (even with `-XDuplicateRecordfields`
enabled), because they would be represented identically. The
workaround here is to enable `-XDuplicateRecordFields` on the defining
modules.
-}
stableAvailCmp :: AvailInfo -> AvailInfo -> Ordering
stableAvailCmp (Avail n1) (Avail n2) = n1 `stableNameCmp` n2
stableAvailCmp (Avail {}) (AvailTC {}) = LT
stableAvailCmp (AvailTC n ns nfs) (AvailTC m ms mfs) =
(n `stableNameCmp` m) `thenCmp`
(cmpList stableNameCmp ns ms) `thenCmp`
(cmpList (stableNameCmp `on` flSelector) nfs mfs)
stableAvailCmp (AvailTC {}) (Avail {}) = GT
avail :: Name -> AvailInfo
avail n = Avail n
Operations on AvailInfo
availsToNameSet :: [AvailInfo] -> NameSet
availsToNameSet avails = foldr add emptyNameSet avails
where add avail set = extendNameSetList set (availNames avail)
availsToNameSetWithSelectors :: [AvailInfo] -> NameSet
availsToNameSetWithSelectors avails = foldr add emptyNameSet avails
where add avail set = extendNameSetList set (availNamesWithSelectors avail)
availsToNameEnv :: [AvailInfo] -> NameEnv AvailInfo
availsToNameEnv avails = foldr add emptyNameEnv avails
where add avail env = extendNameEnvList env
(zip (availNames avail) (repeat avail))
availName :: AvailInfo -> Name
availName (Avail n) = n
availName (AvailTC n _ _) = n
availNames :: AvailInfo -> [Name]
availNames (Avail n) = [n]
availNames (AvailTC _ ns fs) = ns ++ [ flSelector f | f <- fs, not (flIsOverloaded f) ]
availNamesWithSelectors :: AvailInfo -> [Name]
availNamesWithSelectors (Avail n) = [n]
availNamesWithSelectors (AvailTC _ ns fs) = ns ++ map flSelector fs
availNonFldNames :: AvailInfo -> [Name]
availNonFldNames (Avail n) = [n]
availNonFldNames (AvailTC _ ns _) = ns
availFlds :: AvailInfo -> [FieldLabel]
availFlds (AvailTC _ _ fs) = fs
availFlds _ = []
availsNamesWithOccs :: [AvailInfo] -> [(Name, OccName)]
availsNamesWithOccs = concatMap availNamesWithOccs
When is in use , the ' Name ' may be the
mangled name of a record selector ( e.g. @$sel : foo : MkT@ ) while the
availNamesWithOccs :: AvailInfo -> [(Name, OccName)]
availNamesWithOccs (Avail n) = [(n, nameOccName n)]
availNamesWithOccs (AvailTC _ ns fs)
= [ (n, nameOccName n) | n <- ns ] ++
[ (flSelector fl, mkVarOccFS (flLabel fl)) | fl <- fs ]
plusAvail :: AvailInfo -> AvailInfo -> AvailInfo
plusAvail a1 a2
| debugIsOn && availName a1 /= availName a2
= pprPanic "RnEnv.plusAvail names differ" (hsep [ppr a1,ppr a2])
plusAvail a1@(Avail {}) (Avail {}) = a1
plusAvail (AvailTC _ [] []) a2@(AvailTC {}) = a2
plusAvail a1@(AvailTC {}) (AvailTC _ [] []) = a1
plusAvail (AvailTC n1 (s1:ss1) fs1) (AvailTC n2 (s2:ss2) fs2)
Maintain invariant the parent is first
(True,True) -> AvailTC n1 (s1 : (ss1 `unionLists` ss2))
(fs1 `unionLists` fs2)
(True,False) -> AvailTC n1 (s1 : (ss1 `unionLists` (s2:ss2)))
(fs1 `unionLists` fs2)
(False,True) -> AvailTC n1 (s2 : ((s1:ss1) `unionLists` ss2))
(fs1 `unionLists` fs2)
(False,False) -> AvailTC n1 ((s1:ss1) `unionLists` (s2:ss2))
(fs1 `unionLists` fs2)
plusAvail (AvailTC n1 ss1 fs1) (AvailTC _ [] fs2)
= AvailTC n1 ss1 (fs1 `unionLists` fs2)
plusAvail (AvailTC n1 [] fs1) (AvailTC _ ss2 fs2)
= AvailTC n1 ss2 (fs1 `unionLists` fs2)
plusAvail a1 a2 = pprPanic "RnEnv.plusAvail" (hsep [ppr a1,ppr a2])
| trims an ' AvailInfo ' to keep only a single name
trimAvail :: AvailInfo -> Name -> AvailInfo
trimAvail (Avail n) _ = Avail n
trimAvail (AvailTC n ns fs) m = case find ((== m) . flSelector) fs of
Just x -> AvailTC n [] [x]
Nothing -> ASSERT( m `elem` ns ) AvailTC n [m] []
| filters ' AvailInfo 's by the given predicate
filterAvails :: (Name -> Bool) -> [AvailInfo] -> [AvailInfo]
filterAvails keep avails = foldr (filterAvail keep) [] avails
| filters an ' AvailInfo ' by the given predicate
filterAvail :: (Name -> Bool) -> AvailInfo -> [AvailInfo] -> [AvailInfo]
filterAvail keep ie rest =
case ie of
Avail n | keep n -> ie : rest
| otherwise -> rest
AvailTC tc ns fs ->
let ns' = filter keep ns
fs' = filter (keep . flSelector) fs in
if null ns' && null fs' then rest else AvailTC tc ns' fs' : rest
| Combines ' AvailInfo 's from the same family
E.g import Ix ( Ix ( .. ) , index )
will give Ix(Ix , index , range ) and Ix(index )
We want to combine these ; addAvail does that
nubAvails :: [AvailInfo] -> [AvailInfo]
nubAvails avails = nameEnvElts (foldl' add emptyNameEnv avails)
where
add env avail = extendNameEnv_C plusAvail env (availName avail) avail
instance Outputable AvailInfo where
ppr = pprAvail
pprAvail :: AvailInfo -> SDoc
pprAvail (Avail n)
= ppr n
pprAvail (AvailTC n ns fs)
= ppr n <> braces (sep [ fsep (punctuate comma (map ppr ns)) <> semi
, fsep (punctuate comma (map (ppr . flLabel) fs))])
instance Binary AvailInfo where
put_ bh (Avail aa) = do
putByte bh 0
put_ bh aa
put_ bh (AvailTC ab ac ad) = do
putByte bh 1
put_ bh ab
put_ bh ac
put_ bh ad
get bh = do
h <- getByte bh
case h of
0 -> do aa <- get bh
return (Avail aa)
_ -> do ab <- get bh
ac <- get bh
ad <- get bh
return (AvailTC ab ac ad)
|
4542cb1872bdc72732b703cc5d0b19b24a6a147b06c16faac941c7cb7afc463f | squirrel-prover/squirrel-prover | action.ml | open Utils
type 'a item = {
par_choice : int * 'a ; (** position in parallel compositions *)
sum_choice : int * 'a (** position in conditionals *)
}
type 'a t = 'a item list
(** Strict dependency [a < b]. *)
let depends a b =
let rec aux a b = match a, b with
| [], _::_ -> true
| hda::tla, hdb::tlb when hda = hdb -> aux tla tlb
| _ -> false
in aux a b
(** Distance in control-flow graph. Return [None] when there is no
* dependency, and [Some 0] when the actions are equal. *)
let distance a b =
let rec aux a b = match a, b with
| [], _ -> Some (List.length b)
| hda::tla, hdb::tlb when hda = hdb -> aux tla tlb
| _ -> None
in aux a b
type shape = int t
type action = (Vars.var list) t
let rec get_shape = function
| [] -> []
| { par_choice = (p,lp) ; sum_choice = (s,ls) } :: l ->
{ par_choice = (p, List.length lp) ;
sum_choice = (s, List.length ls) }
:: get_shape l
let rec get_indices = function
| [] -> []
| a :: l ->
snd a.par_choice @ snd a.sum_choice @ get_indices l
let fv_action a = Vars.Sv.of_list1 (get_indices a)
let same_shape a b : Term.subst option =
let rec same acc a b = match a,b with
| [],[] -> Some acc
| [], _ | _, [] -> None
| i :: l, i' :: l' ->
let p,lp = i.par_choice and p',lp' = i'.par_choice in
let s,ls = i.sum_choice and s',ls' = i'.sum_choice in
if p = p' && List.length lp = List.length lp' &&
s = s' && List.length ls = List.length ls'
then
let acc' =
List.map2 (fun i i' -> Term.ESubst (Term.mk_var i,Term.mk_var i')) lp lp'
in
let acc'' =
List.map2 (fun i i' -> Term.ESubst (Term.mk_var i,Term.mk_var i')) ls ls'
in
same (acc'' @ acc' @ acc) l l'
else None in
same [] a b
(** Action symbols *)
type Symbols.data += ActionData of Vars.var list * action
let fresh_symbol table ~exact name =
if exact
then Symbols.Action.reserve_exact table name
else Symbols.Action.reserve table name
let define_symbol table symb args action =
let data = ActionData (args,action) in
Symbols.Action.define table symb ~data (List.length args)
let find_symbol s table =
match Symbols.Action.data_of_lsymb s table with
| ActionData (x,y) -> x,y
| _ -> assert false
let of_symbol s table =
match Symbols.Action.get_data s table with
| ActionData (x,y) -> x,y
| _ -> assert false
let arity s table =
let l,_ = of_symbol s table in
List.length l
let iter_table f table =
Symbols.Action.iter
(fun s _ -> function
| ActionData (args,action) -> f s args action
| _ -> assert false)
table
(** Pretty-printing *)
(** Print integers in action shapes. *)
let pp_int ppf i =
if i <> 0 then Fmt.pf ppf "(%d)" i
(** Print list of indices in actions. *)
let pp_indices ppf l =
if l <> [] then Fmt.pf ppf "(%a)" Vars.pp_list l
(** Print list of strings in actions. *)
let pp_strings ppf l =
let pp_list = Fmt.list ~sep:(fun ppf () -> Fmt.pf ppf ",") Fmt.string in
if l <> [] then Fmt.pf ppf "(%a)" pp_list l
(** [pp_par_choice_f f] formats [int*'a] as parallel choices,
* relying on [f] to format ['a]. *)
let pp_par_choice_f f ppf (k,a) =
Fmt.pf ppf "%d%a" k f a
(** [pp_sum_choice_f f d] formats [int*'a] as sum choices,
* relying on [f] to format ['a]. It does not format
* the default choice [d]. *)
let pp_sum_choice_f f d ppf (k,a) =
if (k,a) <> d then Fmt.pf ppf "/%d%a" k f a
* [ f d ] is a formatter for [ ' a action ] ,
* relying on the formatter [ f ] for [ ' a ] , and ignoring
* the default sum choice [ d ] .
* relying on the formatter [f] for ['a], and ignoring
* the default sum choice [d]. *)
let pp_action_f f d ppf a =
if a = [] then Fmt.pf ppf "ε"
else
Fmt.list
~sep:(fun fmt () -> Fmt.pf fmt "_")
(fun ppf {par_choice;sum_choice} ->
Fmt.pf ppf "%a%a"
(pp_par_choice_f f) par_choice
(pp_sum_choice_f f d) sum_choice)
ppf
a
let pp_action_structure ppf a =
Printer.kw `GoalAction ppf "%a" (pp_action_f pp_indices (0,[])) a
let pp_shape ppf a = pp_action_f pp_int (0,0) ppf a
let rec subst_action (s : Term.subst) (a : action) : action =
match a with
| [] -> []
| a :: l ->
let p,lp = a.par_choice in
let q,lq = a.sum_choice in
{ par_choice = p, List.map (Term.subst_var s) lp ;
sum_choice = q, List.map (Term.subst_var s) lq }
:: subst_action s l
let of_term (s:Symbols.action) (l:Vars.var list) table : action =
let l',a = of_symbol s table in
let subst =
List.map2 (fun x y -> Term.ESubst (Term.mk_var x,Term.mk_var y)) l' l
in
subst_action subst a
let pp_parsed_action ppf a = pp_action_f pp_strings (0,[]) ppf a
(*------------------------------------------------------------------*)
(** An action description features an input, a condition (which sums up
* several [Exist] constructs which might have succeeded or not) and
* subsequent updates and outputs.
* The condition binds variables in the updates and output.
* An action description may feature free index variables, that are
* in a sense bound by the corresponding action. We also include a list of
* all used indices, since they are not explicitly declared as part of
* the action or current condition (they could be introduced by previous
* conditions). *)
type descr = {
name : Symbols.action ;
action : action ;
input : Channel.t * string ;
indices : Vars.var list ;
condition : Vars.var list * Term.term ;
updates : (Term.state * Term.term) list ;
output : Channel.t * Term.term;
globals : Symbols.macro list;
}
(** Minimal validation function. Could be improved to check for free
variables, valid diff operators, etc. *)
let valid_descr d =
d.indices = get_indices d.action
(*------------------------------------------------------------------*)
(** Apply a substitution to an action description.
* The domain of the substitution must contain all indices
* occurring in the description. *)
let subst_descr subst descr =
let action = subst_action subst descr.action in
let subst_term = Term.subst subst in
let indices = Term.subst_vars subst descr.indices in
let condition =
(* FIXME: do we need to substitute ? *)
fst descr.condition,
Term.subst subst (snd descr.condition) in
let updates =
List.map (fun (ss,t) ->
Term.subst_isymb subst ss, subst_term t
) descr.updates
in
let output = fst descr.output, subst_term (snd descr.output) in
{ descr with action; indices; condition; updates; output; }
(*------------------------------------------------------------------*)
let pp_descr_short ppf descr =
let t = Term.mk_action descr.name descr.indices in
Term.pp ppf t
(*------------------------------------------------------------------*)
let pp_descr ~debug ppf descr =
let e = ref (Vars.of_list []) in
let _, s = Term.refresh_vars (`InEnv e) descr.indices in
let descr = if debug then descr else subst_descr s descr in
Fmt.pf ppf "@[<v 0>action name: @[<hov>%a@]@;\
%a\
@[<hv 2>condition:@ @[<hov>%a@]@]@;\
%a\
%a\
@[<hv 2>output:@ @[<hov>%a@]@]@]"
pp_descr_short descr
(Utils.pp_ne_list "@[<hv 2>indices:@ @[<hov>%a@]@]@;" Vars.pp_list)
descr.indices
Term.pp (snd descr.condition)
(Utils.pp_ne_list "@[<hv 2>updates:@ @[<hv>%a@]@]@;"
(Fmt.list
~sep:(fun ppf () -> Fmt.pf ppf ";@ ")
(fun ppf (s, t) ->
Fmt.pf ppf "@[%a :=@ %a@]" Term.pp_msymb s Term.pp t)))
descr.updates
(Utils.pp_ne_list "@[<hv 2>globals:@ @[<hv>%a@]@]@;"
(Fmt.list ~sep:(fun ppf () -> Fmt.pf ppf ";@ ") Symbols.pp))
descr.globals
Term.pp (snd descr.output)
(*------------------------------------------------------------------*)
(* well-formedness check for a description: check free variables *)
let check_descr (d : descr) : bool =
(* special case for [init], which does not satisfy the free variables
condition. *)
if d.name = Symbols.init_action then true else
begin
let _, cond = d.condition
and _, outp = d.output in
let dfv = Vars.Sv.of_list d.indices in
Vars.Sv.subset (Term.fv cond) dfv &&
Vars.Sv.subset (Term.fv outp) dfv &&
List.for_all (fun (_, state) ->
Vars.Sv.subset (Term.fv state) dfv
) d.updates
end
(*------------------------------------------------------------------*)
let descr_map
(f : Vars.env -> Symbols.macro -> Term.term -> Term.term)
(descr : descr)
: descr
=
let env = Vars.of_list descr.indices in
let f = f env in
let condition =
fst descr.condition,
f Symbols.cond (snd descr.condition)
in
let updates =
List.map (fun (ss,t) -> ss, f ss.Term.s_symb t) descr.updates
in
let output = fst descr.output, f Symbols.out (snd descr.output) in
let descr = { descr with condition; updates; output; } in
assert (check_descr descr);
descr
(*------------------------------------------------------------------*)
let refresh_descr descr =
let _, s = Term.refresh_vars `Global descr.indices in
let descr = subst_descr s descr in
assert (check_descr descr);
descr
let project_descr (s : Term.proj) d =
let project1 t = Term.project1 s t in
{ d with
condition = (let is,t = d.condition in is, project1 t);
updates = List.map (fun (st, m) -> st, project1 m) d.updates;
output = (let c,m = d.output in c, project1 m) }
let strongly_compatible_descr d1 d2 =
d1.name = d2.name &&
d1.action = d2.action &&
d1.input = d2.input &&
d1.indices = d2.indices &&
fst d1.condition = fst d2.condition &&
List.map fst d1.updates = List.map fst d2.updates &&
fst d1.output = fst d2.output
let combine_descrs (descrs : (Term.proj * descr) list) : descr =
let (p1,d1),rest =
match descrs with
| hd::tl -> hd,tl
| [] -> raise (Invalid_argument "combine_descrs")
in
(* Rename indices of descriptions in [rest] to agree with [d1]. *)
let rest =
List.map
(fun (proj,d2) ->
let subst =
List.map2
(fun i j -> Term.ESubst (Term.mk_var i, Term.mk_var j))
d2.indices d1.indices
in
proj, subst_descr subst d2)
rest
in
let descrs = (p1,d1)::rest in
assert (List.for_all (fun (_,d2) -> strongly_compatible_descr d1 d2) rest);
let map f = List.map (fun (lbl,descr) -> (lbl, f descr)) descrs in
{ name = d1.name;
action = d1.action;
input = d1.input;
indices = d1.indices;
condition =
fst d1.condition,
Term.combine (map (fun descr -> snd descr.condition));
updates =
List.map
(fun (st,_) ->
st,
Term.combine (map (fun descr -> List.assoc st descr.updates)))
d1.updates;
output =
fst d1.output,
Term.combine (map (fun descr -> snd descr.output));
globals =
List.sort_uniq Stdlib.compare
(List.concat (List.map (fun (_,d) -> d.globals) descrs)) }
(*------------------------------------------------------------------*)
let debug = false
let pp_actions ppf table =
Fmt.pf ppf "@[<v 2>Available action shapes:@;@;@[" ;
let comma = ref false in
iter_table
(fun symbol indices action ->
if !comma then Fmt.pf ppf ",@;" ;
comma := true ;
if debug then
Fmt.pf ppf "%s%a=%a"
(Symbols.to_string symbol)
pp_indices indices
pp_action_structure action
else
Fmt.pf ppf "%s%a"
(Symbols.to_string symbol)
pp_indices indices)
table;
Fmt.pf ppf "@]@]@."
let rec dummy (shape : shape) : action =
match shape with
| [] -> []
| { par_choice = (p,lp) ; sum_choice = (s,ls) } :: l ->
{ par_choice = (p, List.init lp (fun _ -> Vars.make_new Type.Index "i")) ;
sum_choice = (s, List.init ls (fun _ -> Vars.make_new Type.Index "i")) }
:: dummy l
(*------------------------------------------------------------------*)
* { 2 Shapes }
module Shape = struct
type t = shape
let pp = pp_shape
let compare (u : t) (v : t) = Stdlib.compare u v
end
(*------------------------------------------------------------------*)
* { 2 FA - DUP }
let is_dup_match
(is_match : Term.term -> Term.term -> 'a -> 'a option)
(st : 'a)
(table : Symbols.table)
(elem : Term.term)
(elems : Term.term list) : 'a option
=
(* try to match [t] and [t'] modulo ≤ *)
let is_dup_leq table st t t' : 'a option =
let rec leq t t' =
match is_match t t' st with
| Some st -> Some st
| None ->
match t,t' with
| Fun (f,_, [t]), Fun (f',_, [t'])
when f = Term.f_pred && f' = Term.f_pred ->
leq t t'
| Fun (f,_, [t]), t' when f = Term.f_pred -> leq t t'
| Action (n,is), Action (n',is') ->
(* FIXME: allow to match [is] with (a prefix of) [is'] *)
if depends (of_term n is table) (of_term n' is' table)
then Some st
else None
| _ -> None
in
leq t t'
in
let direct_match =
List.find_map (fun t' ->
is_match elem t' st
) elems
in
match direct_match with
| Some res -> Some res
| None ->
match elem with
| Macro (im,[],t) when im = Term.in_macro ->
List.find_map (function
| Term.Macro (fm,[],t') when fm = Term.frame_macro ->
is_dup_leq table st (Term.mk_pred t) t'
| _ -> None
) elems
| Macro (em,[],t) when em = Term.frame_macro ->
List.find_map (function
| Term.Macro (fm,[],t')
when fm = Term.frame_macro -> is_dup_leq table st t t'
| _ -> None
) elems
| Macro (em,[],t) when em = Term.exec_macro ->
List.find_map (function
| Term.Macro (fm,[],t')
when fm = Term.frame_macro -> is_dup_leq table st t t'
| _ -> None
) elems
| _ -> None
let is_dup table t t' : bool =
let is_match t t' () = if t = t' then Some () else None in
match is_dup_match is_match () table t t' with
| None -> false
| Some () -> true
(*------------------------------------------------------------------*)
let pp_descr_dbg = pp_descr ~debug:true
let pp_descr = pp_descr ~debug:false
| null | https://raw.githubusercontent.com/squirrel-prover/squirrel-prover/d25b6dab570ea0e99915059a67599fd3a38caa8b/src/action.ml | ocaml | * position in parallel compositions
* position in conditionals
* Strict dependency [a < b].
* Distance in control-flow graph. Return [None] when there is no
* dependency, and [Some 0] when the actions are equal.
* Action symbols
* Pretty-printing
* Print integers in action shapes.
* Print list of indices in actions.
* Print list of strings in actions.
* [pp_par_choice_f f] formats [int*'a] as parallel choices,
* relying on [f] to format ['a].
* [pp_sum_choice_f f d] formats [int*'a] as sum choices,
* relying on [f] to format ['a]. It does not format
* the default choice [d].
------------------------------------------------------------------
* An action description features an input, a condition (which sums up
* several [Exist] constructs which might have succeeded or not) and
* subsequent updates and outputs.
* The condition binds variables in the updates and output.
* An action description may feature free index variables, that are
* in a sense bound by the corresponding action. We also include a list of
* all used indices, since they are not explicitly declared as part of
* the action or current condition (they could be introduced by previous
* conditions).
* Minimal validation function. Could be improved to check for free
variables, valid diff operators, etc.
------------------------------------------------------------------
* Apply a substitution to an action description.
* The domain of the substitution must contain all indices
* occurring in the description.
FIXME: do we need to substitute ?
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
well-formedness check for a description: check free variables
special case for [init], which does not satisfy the free variables
condition.
------------------------------------------------------------------
------------------------------------------------------------------
Rename indices of descriptions in [rest] to agree with [d1].
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
try to match [t] and [t'] modulo ≤
FIXME: allow to match [is] with (a prefix of) [is']
------------------------------------------------------------------ | open Utils
type 'a item = {
}
type 'a t = 'a item list
let depends a b =
let rec aux a b = match a, b with
| [], _::_ -> true
| hda::tla, hdb::tlb when hda = hdb -> aux tla tlb
| _ -> false
in aux a b
let distance a b =
let rec aux a b = match a, b with
| [], _ -> Some (List.length b)
| hda::tla, hdb::tlb when hda = hdb -> aux tla tlb
| _ -> None
in aux a b
type shape = int t
type action = (Vars.var list) t
let rec get_shape = function
| [] -> []
| { par_choice = (p,lp) ; sum_choice = (s,ls) } :: l ->
{ par_choice = (p, List.length lp) ;
sum_choice = (s, List.length ls) }
:: get_shape l
let rec get_indices = function
| [] -> []
| a :: l ->
snd a.par_choice @ snd a.sum_choice @ get_indices l
let fv_action a = Vars.Sv.of_list1 (get_indices a)
let same_shape a b : Term.subst option =
let rec same acc a b = match a,b with
| [],[] -> Some acc
| [], _ | _, [] -> None
| i :: l, i' :: l' ->
let p,lp = i.par_choice and p',lp' = i'.par_choice in
let s,ls = i.sum_choice and s',ls' = i'.sum_choice in
if p = p' && List.length lp = List.length lp' &&
s = s' && List.length ls = List.length ls'
then
let acc' =
List.map2 (fun i i' -> Term.ESubst (Term.mk_var i,Term.mk_var i')) lp lp'
in
let acc'' =
List.map2 (fun i i' -> Term.ESubst (Term.mk_var i,Term.mk_var i')) ls ls'
in
same (acc'' @ acc' @ acc) l l'
else None in
same [] a b
type Symbols.data += ActionData of Vars.var list * action
let fresh_symbol table ~exact name =
if exact
then Symbols.Action.reserve_exact table name
else Symbols.Action.reserve table name
let define_symbol table symb args action =
let data = ActionData (args,action) in
Symbols.Action.define table symb ~data (List.length args)
let find_symbol s table =
match Symbols.Action.data_of_lsymb s table with
| ActionData (x,y) -> x,y
| _ -> assert false
let of_symbol s table =
match Symbols.Action.get_data s table with
| ActionData (x,y) -> x,y
| _ -> assert false
let arity s table =
let l,_ = of_symbol s table in
List.length l
let iter_table f table =
Symbols.Action.iter
(fun s _ -> function
| ActionData (args,action) -> f s args action
| _ -> assert false)
table
let pp_int ppf i =
if i <> 0 then Fmt.pf ppf "(%d)" i
let pp_indices ppf l =
if l <> [] then Fmt.pf ppf "(%a)" Vars.pp_list l
let pp_strings ppf l =
let pp_list = Fmt.list ~sep:(fun ppf () -> Fmt.pf ppf ",") Fmt.string in
if l <> [] then Fmt.pf ppf "(%a)" pp_list l
let pp_par_choice_f f ppf (k,a) =
Fmt.pf ppf "%d%a" k f a
let pp_sum_choice_f f d ppf (k,a) =
if (k,a) <> d then Fmt.pf ppf "/%d%a" k f a
* [ f d ] is a formatter for [ ' a action ] ,
* relying on the formatter [ f ] for [ ' a ] , and ignoring
* the default sum choice [ d ] .
* relying on the formatter [f] for ['a], and ignoring
* the default sum choice [d]. *)
let pp_action_f f d ppf a =
if a = [] then Fmt.pf ppf "ε"
else
Fmt.list
~sep:(fun fmt () -> Fmt.pf fmt "_")
(fun ppf {par_choice;sum_choice} ->
Fmt.pf ppf "%a%a"
(pp_par_choice_f f) par_choice
(pp_sum_choice_f f d) sum_choice)
ppf
a
let pp_action_structure ppf a =
Printer.kw `GoalAction ppf "%a" (pp_action_f pp_indices (0,[])) a
let pp_shape ppf a = pp_action_f pp_int (0,0) ppf a
let rec subst_action (s : Term.subst) (a : action) : action =
match a with
| [] -> []
| a :: l ->
let p,lp = a.par_choice in
let q,lq = a.sum_choice in
{ par_choice = p, List.map (Term.subst_var s) lp ;
sum_choice = q, List.map (Term.subst_var s) lq }
:: subst_action s l
let of_term (s:Symbols.action) (l:Vars.var list) table : action =
let l',a = of_symbol s table in
let subst =
List.map2 (fun x y -> Term.ESubst (Term.mk_var x,Term.mk_var y)) l' l
in
subst_action subst a
let pp_parsed_action ppf a = pp_action_f pp_strings (0,[]) ppf a
type descr = {
name : Symbols.action ;
action : action ;
input : Channel.t * string ;
indices : Vars.var list ;
condition : Vars.var list * Term.term ;
updates : (Term.state * Term.term) list ;
output : Channel.t * Term.term;
globals : Symbols.macro list;
}
let valid_descr d =
d.indices = get_indices d.action
let subst_descr subst descr =
let action = subst_action subst descr.action in
let subst_term = Term.subst subst in
let indices = Term.subst_vars subst descr.indices in
let condition =
fst descr.condition,
Term.subst subst (snd descr.condition) in
let updates =
List.map (fun (ss,t) ->
Term.subst_isymb subst ss, subst_term t
) descr.updates
in
let output = fst descr.output, subst_term (snd descr.output) in
{ descr with action; indices; condition; updates; output; }
let pp_descr_short ppf descr =
let t = Term.mk_action descr.name descr.indices in
Term.pp ppf t
let pp_descr ~debug ppf descr =
let e = ref (Vars.of_list []) in
let _, s = Term.refresh_vars (`InEnv e) descr.indices in
let descr = if debug then descr else subst_descr s descr in
Fmt.pf ppf "@[<v 0>action name: @[<hov>%a@]@;\
%a\
@[<hv 2>condition:@ @[<hov>%a@]@]@;\
%a\
%a\
@[<hv 2>output:@ @[<hov>%a@]@]@]"
pp_descr_short descr
(Utils.pp_ne_list "@[<hv 2>indices:@ @[<hov>%a@]@]@;" Vars.pp_list)
descr.indices
Term.pp (snd descr.condition)
(Utils.pp_ne_list "@[<hv 2>updates:@ @[<hv>%a@]@]@;"
(Fmt.list
~sep:(fun ppf () -> Fmt.pf ppf ";@ ")
(fun ppf (s, t) ->
Fmt.pf ppf "@[%a :=@ %a@]" Term.pp_msymb s Term.pp t)))
descr.updates
(Utils.pp_ne_list "@[<hv 2>globals:@ @[<hv>%a@]@]@;"
(Fmt.list ~sep:(fun ppf () -> Fmt.pf ppf ";@ ") Symbols.pp))
descr.globals
Term.pp (snd descr.output)
let check_descr (d : descr) : bool =
if d.name = Symbols.init_action then true else
begin
let _, cond = d.condition
and _, outp = d.output in
let dfv = Vars.Sv.of_list d.indices in
Vars.Sv.subset (Term.fv cond) dfv &&
Vars.Sv.subset (Term.fv outp) dfv &&
List.for_all (fun (_, state) ->
Vars.Sv.subset (Term.fv state) dfv
) d.updates
end
let descr_map
(f : Vars.env -> Symbols.macro -> Term.term -> Term.term)
(descr : descr)
: descr
=
let env = Vars.of_list descr.indices in
let f = f env in
let condition =
fst descr.condition,
f Symbols.cond (snd descr.condition)
in
let updates =
List.map (fun (ss,t) -> ss, f ss.Term.s_symb t) descr.updates
in
let output = fst descr.output, f Symbols.out (snd descr.output) in
let descr = { descr with condition; updates; output; } in
assert (check_descr descr);
descr
let refresh_descr descr =
let _, s = Term.refresh_vars `Global descr.indices in
let descr = subst_descr s descr in
assert (check_descr descr);
descr
let project_descr (s : Term.proj) d =
let project1 t = Term.project1 s t in
{ d with
condition = (let is,t = d.condition in is, project1 t);
updates = List.map (fun (st, m) -> st, project1 m) d.updates;
output = (let c,m = d.output in c, project1 m) }
let strongly_compatible_descr d1 d2 =
d1.name = d2.name &&
d1.action = d2.action &&
d1.input = d2.input &&
d1.indices = d2.indices &&
fst d1.condition = fst d2.condition &&
List.map fst d1.updates = List.map fst d2.updates &&
fst d1.output = fst d2.output
let combine_descrs (descrs : (Term.proj * descr) list) : descr =
let (p1,d1),rest =
match descrs with
| hd::tl -> hd,tl
| [] -> raise (Invalid_argument "combine_descrs")
in
let rest =
List.map
(fun (proj,d2) ->
let subst =
List.map2
(fun i j -> Term.ESubst (Term.mk_var i, Term.mk_var j))
d2.indices d1.indices
in
proj, subst_descr subst d2)
rest
in
let descrs = (p1,d1)::rest in
assert (List.for_all (fun (_,d2) -> strongly_compatible_descr d1 d2) rest);
let map f = List.map (fun (lbl,descr) -> (lbl, f descr)) descrs in
{ name = d1.name;
action = d1.action;
input = d1.input;
indices = d1.indices;
condition =
fst d1.condition,
Term.combine (map (fun descr -> snd descr.condition));
updates =
List.map
(fun (st,_) ->
st,
Term.combine (map (fun descr -> List.assoc st descr.updates)))
d1.updates;
output =
fst d1.output,
Term.combine (map (fun descr -> snd descr.output));
globals =
List.sort_uniq Stdlib.compare
(List.concat (List.map (fun (_,d) -> d.globals) descrs)) }
let debug = false
let pp_actions ppf table =
Fmt.pf ppf "@[<v 2>Available action shapes:@;@;@[" ;
let comma = ref false in
iter_table
(fun symbol indices action ->
if !comma then Fmt.pf ppf ",@;" ;
comma := true ;
if debug then
Fmt.pf ppf "%s%a=%a"
(Symbols.to_string symbol)
pp_indices indices
pp_action_structure action
else
Fmt.pf ppf "%s%a"
(Symbols.to_string symbol)
pp_indices indices)
table;
Fmt.pf ppf "@]@]@."
let rec dummy (shape : shape) : action =
match shape with
| [] -> []
| { par_choice = (p,lp) ; sum_choice = (s,ls) } :: l ->
{ par_choice = (p, List.init lp (fun _ -> Vars.make_new Type.Index "i")) ;
sum_choice = (s, List.init ls (fun _ -> Vars.make_new Type.Index "i")) }
:: dummy l
* { 2 Shapes }
module Shape = struct
type t = shape
let pp = pp_shape
let compare (u : t) (v : t) = Stdlib.compare u v
end
* { 2 FA - DUP }
let is_dup_match
(is_match : Term.term -> Term.term -> 'a -> 'a option)
(st : 'a)
(table : Symbols.table)
(elem : Term.term)
(elems : Term.term list) : 'a option
=
let is_dup_leq table st t t' : 'a option =
let rec leq t t' =
match is_match t t' st with
| Some st -> Some st
| None ->
match t,t' with
| Fun (f,_, [t]), Fun (f',_, [t'])
when f = Term.f_pred && f' = Term.f_pred ->
leq t t'
| Fun (f,_, [t]), t' when f = Term.f_pred -> leq t t'
| Action (n,is), Action (n',is') ->
if depends (of_term n is table) (of_term n' is' table)
then Some st
else None
| _ -> None
in
leq t t'
in
let direct_match =
List.find_map (fun t' ->
is_match elem t' st
) elems
in
match direct_match with
| Some res -> Some res
| None ->
match elem with
| Macro (im,[],t) when im = Term.in_macro ->
List.find_map (function
| Term.Macro (fm,[],t') when fm = Term.frame_macro ->
is_dup_leq table st (Term.mk_pred t) t'
| _ -> None
) elems
| Macro (em,[],t) when em = Term.frame_macro ->
List.find_map (function
| Term.Macro (fm,[],t')
when fm = Term.frame_macro -> is_dup_leq table st t t'
| _ -> None
) elems
| Macro (em,[],t) when em = Term.exec_macro ->
List.find_map (function
| Term.Macro (fm,[],t')
when fm = Term.frame_macro -> is_dup_leq table st t t'
| _ -> None
) elems
| _ -> None
let is_dup table t t' : bool =
let is_match t t' () = if t = t' then Some () else None in
match is_dup_match is_match () table t t' with
| None -> false
| Some () -> true
let pp_descr_dbg = pp_descr ~debug:true
let pp_descr = pp_descr ~debug:false
|
f0f060288f5facc6c4ac394c6a53f7c518c3f39c8caf86a73647d2a14aa36576 | mainej/schema-voyager | additional_fields.cljs | (ns schema-voyager.html.components.additional-fields)
(defn additional-fields [fields]
(when (seq fields)
[:dl.divide-y
(for [[field value] (sort-by first fields)]
^{:key field}
[:div.sm:flex.p-4.sm:p-6
[:dt.sm:w-1|3 (pr-str field)]
[:dd (pr-str value)]])]))
| null | https://raw.githubusercontent.com/mainej/schema-voyager/eaf0367ec639f5a2e9238a5b1fbbb2fc2d76d520/src/web/schema_voyager/html/components/additional_fields.cljs | clojure | (ns schema-voyager.html.components.additional-fields)
(defn additional-fields [fields]
(when (seq fields)
[:dl.divide-y
(for [[field value] (sort-by first fields)]
^{:key field}
[:div.sm:flex.p-4.sm:p-6
[:dt.sm:w-1|3 (pr-str field)]
[:dd (pr-str value)]])]))
| |
05f252f7f0a9cf10ca066c4cab67e7335414c0c1ae984896120e3e6b929f9936 | uccmisl/dashc | dashc.ml |
* dashc , client emulator for DASH video streaming
* Copyright ( c ) 2016 - 2018 , , University College Cork
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
* 02110 - 1301 , USA .
* dashc, client emulator for DASH video streaming
* Copyright (c) 2016-2018, Aleksandr Reviakin, University College Cork
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*)
open Core
let command =
Command.group ~summary:"Modes: play"
[ "play", Playback.play ]
let () = Command.run command ~version:"0.1.21" ~build_info:"OCaml 4.07.1 was used"
| null | https://raw.githubusercontent.com/uccmisl/dashc/8a97ceb2bdf6a74bde410be9a1d1432d5e11445a/src/dashc.ml | ocaml |
* dashc , client emulator for DASH video streaming
* Copyright ( c ) 2016 - 2018 , , University College Cork
*
* This program is free software ; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation ; either version 2
* of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA
* 02110 - 1301 , USA .
* dashc, client emulator for DASH video streaming
* Copyright (c) 2016-2018, Aleksandr Reviakin, University College Cork
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*)
open Core
let command =
Command.group ~summary:"Modes: play"
[ "play", Playback.play ]
let () = Command.run command ~version:"0.1.21" ~build_info:"OCaml 4.07.1 was used"
| |
112a8aa32548d2abf7c6730f2ffac845f73e20a0ec1a075b76ca112d1901e40b | awakesecurity/spectacle | Env.hs | {-# OPTIONS_HADDOCK show-extensions #-}
-- |
-- Module : Language.Spectacle.Syntax.Env
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see LICENSE
--
-- Stability : stable
Portability : non - portable ( GHC extensions )
--
-- TODO: docs
--
-- @since 1.0.0
module Language.Spectacle.Syntax.Env
( Env (Env),
Effect (Get, Put),
get,
gets,
put,
modify,
runEnv,
)
where
import Data.Coerce (coerce)
import Data.Void (absurd)
import Data.Functor.Loom (hoist, runLoom, (~>~))
import Language.Spectacle.Lang (Effect, Lang (Op, Pure, Scoped), Member, decomposeOp, decomposeS, scope)
import Language.Spectacle.RTS.Registers (RuntimeState)
import Language.Spectacle.Syntax.Env.Internal (Effect (Get, Put), Env (Env))
-- ---------------------------------------------------------------------------------------------------------------------
get :: Member Env effs => Lang ctx effs (RuntimeState ctx)
get = scope Get
{-# INLINE get #-}
gets :: Member Env effs => (RuntimeState ctx -> s) -> Lang ctx effs s
gets f = fmap f get
{-# INLINE gets #-}
put :: Member Env effs => RuntimeState ctx -> Lang ctx effs ()
put x = scope (Put x)
# INLINE put #
modify :: Member Env effs => (RuntimeState ctx -> RuntimeState ctx) -> Lang ctx effs ()
modify f = get >>= put . f
# INLINE modify #
runEnv :: RuntimeState ctx -> Lang ctx (Env ': effs) a -> Lang ctx effs (RuntimeState ctx, a)
runEnv st = \case
Pure x -> pure (st, x)
Op op k -> case decomposeOp op of
Left other -> Op other (runEnv st . k)
Right (Env b) -> absurd (coerce b)
Scoped scoped loom -> case decomposeS scoped of
Left other -> Scoped other (loom' st)
Right eff
| Get <- eff -> runLoom (loom' st) (pure st)
| Put st' <- eff -> runLoom (loom' st') (pure ())
where
loom' st' = loom ~>~ hoist (runEnv st')
| null | https://raw.githubusercontent.com/awakesecurity/spectacle/430680c28b26dabb50f466948180eb59ba72fc8e/src/Language/Spectacle/Syntax/Env.hs | haskell | # OPTIONS_HADDOCK show-extensions #
|
Module : Language.Spectacle.Syntax.Env
Stability : stable
TODO: docs
@since 1.0.0
---------------------------------------------------------------------------------------------------------------------
# INLINE get #
# INLINE gets # |
Copyright : ( c ) Arista Networks , 2022 - 2023
License : Apache License 2.0 , see LICENSE
Portability : non - portable ( GHC extensions )
module Language.Spectacle.Syntax.Env
( Env (Env),
Effect (Get, Put),
get,
gets,
put,
modify,
runEnv,
)
where
import Data.Coerce (coerce)
import Data.Void (absurd)
import Data.Functor.Loom (hoist, runLoom, (~>~))
import Language.Spectacle.Lang (Effect, Lang (Op, Pure, Scoped), Member, decomposeOp, decomposeS, scope)
import Language.Spectacle.RTS.Registers (RuntimeState)
import Language.Spectacle.Syntax.Env.Internal (Effect (Get, Put), Env (Env))
get :: Member Env effs => Lang ctx effs (RuntimeState ctx)
get = scope Get
gets :: Member Env effs => (RuntimeState ctx -> s) -> Lang ctx effs s
gets f = fmap f get
put :: Member Env effs => RuntimeState ctx -> Lang ctx effs ()
put x = scope (Put x)
# INLINE put #
modify :: Member Env effs => (RuntimeState ctx -> RuntimeState ctx) -> Lang ctx effs ()
modify f = get >>= put . f
# INLINE modify #
runEnv :: RuntimeState ctx -> Lang ctx (Env ': effs) a -> Lang ctx effs (RuntimeState ctx, a)
runEnv st = \case
Pure x -> pure (st, x)
Op op k -> case decomposeOp op of
Left other -> Op other (runEnv st . k)
Right (Env b) -> absurd (coerce b)
Scoped scoped loom -> case decomposeS scoped of
Left other -> Scoped other (loom' st)
Right eff
| Get <- eff -> runLoom (loom' st) (pure st)
| Put st' <- eff -> runLoom (loom' st') (pure ())
where
loom' st' = loom ~>~ hoist (runEnv st')
|
6368e92f27734e11519a80193d85610a54e6fbaee4df374cdf8806035b46b7d1 | kingcons/cl-6502 | disassemble.lisp | (in-package :6502)
(defmacro with-disasm ((start end &key op) &body body)
"Loop from START to END, passing each instruction to OP and execute BODY.
OP is PRINT-INSTRUCTION by default. Within BODY, the return value of OP is
bound to RESULT and the length of the instruction in bytes is bound to STEP."
`(loop with index = ,start while (<= index ,end)
for (step result) = (disasm-ins index ,@(when op (list op)))
do (incf index step) ,@body))
(defun disasm (start end)
"Disassemble memory from START to END."
(with-disasm (start end)))
(defun disasm-to-list (start end)
"Disassemble a given region of memory into a sexp-based format."
(with-disasm (start end :op #'sexpify-instruction) collect result))
(defun disasm-to-str (start end)
"Call DISASM with the provided args and return its output as a string."
(with-output-to-string (*standard-output*) (disasm start end)))
(defun disasm-ins (index &optional (disasm-op #'print-instruction))
"Lookup the metadata for the instruction at INDEX and pass it to
DISASM-OP for formatting and display, returning the instruction length."
(destructuring-bind (name docs cycles bytes mode)
(aref *opcode-meta* (get-byte index))
(declare (ignore cycles))
(let ((code-block (coerce (get-range index (+ index bytes)) 'list)))
(list bytes (funcall disasm-op code-block index name docs mode)))))
(defun print-instruction (bytes index name docs mode)
"Format the instruction at INDEX and its operands for display."
(let ((byte-str (format nil "~{~2,'0x ~}" bytes))
(args-str (format nil "~A ~A" name (arg-formatter (rest bytes) mode))))
(format t "$~4,'0x ~9A ;; ~14A ~A~%" index byte-str args-str docs)))
(defun sexpify-instruction (bytes index name docs mode)
"Given BYTES and metadata, return a sexp-format representation of it."
(declare (ignore index docs))
(alexandria:if-let ((args (rest bytes))
(args-str (bytes-to-keyword-syntax bytes mode)))
(mapcar #'make-keyword (list name args-str))
(mapcar #'make-keyword (list name))))
(defun arg-formatter (arg mode)
"Given an instruction's ARG, format it for display using the MODE's WRITER."
(if (member mode '(absolute absolute-x absolute-y indirect))
(format nil (writer mode) (reverse arg))
(format nil (writer mode) arg)))
(defun bytes-to-keyword-syntax (bytes mode)
"Take BYTES and a MODE and return our assembly representation of the arguments."
(let ((result (arg-formatter (rest bytes) mode)))
(flet ((munge-indirect (str)
(cl-ppcre:regex-replace "\\(\\$(.*)\\)(.*)?" str "@\\1\\2")))
(cl-ppcre:regex-replace ", " (munge-indirect result) "."))))
(defun current-instruction (cpu &optional print-p)
"Return a list representing the current instruction. If PRINT-P is non-nil,
print the current address and instruction and return NIL."
(let ((fn (if print-p #'print-instruction #'sexpify-instruction)))
(second (disasm-ins (cpu-pc cpu) fn))))
| null | https://raw.githubusercontent.com/kingcons/cl-6502/0fbce82c55a63df0ceaf17d6e3d952219514a949/src/disassemble.lisp | lisp | (in-package :6502)
(defmacro with-disasm ((start end &key op) &body body)
"Loop from START to END, passing each instruction to OP and execute BODY.
OP is PRINT-INSTRUCTION by default. Within BODY, the return value of OP is
bound to RESULT and the length of the instruction in bytes is bound to STEP."
`(loop with index = ,start while (<= index ,end)
for (step result) = (disasm-ins index ,@(when op (list op)))
do (incf index step) ,@body))
(defun disasm (start end)
"Disassemble memory from START to END."
(with-disasm (start end)))
(defun disasm-to-list (start end)
"Disassemble a given region of memory into a sexp-based format."
(with-disasm (start end :op #'sexpify-instruction) collect result))
(defun disasm-to-str (start end)
"Call DISASM with the provided args and return its output as a string."
(with-output-to-string (*standard-output*) (disasm start end)))
(defun disasm-ins (index &optional (disasm-op #'print-instruction))
"Lookup the metadata for the instruction at INDEX and pass it to
DISASM-OP for formatting and display, returning the instruction length."
(destructuring-bind (name docs cycles bytes mode)
(aref *opcode-meta* (get-byte index))
(declare (ignore cycles))
(let ((code-block (coerce (get-range index (+ index bytes)) 'list)))
(list bytes (funcall disasm-op code-block index name docs mode)))))
(defun print-instruction (bytes index name docs mode)
"Format the instruction at INDEX and its operands for display."
(let ((byte-str (format nil "~{~2,'0x ~}" bytes))
(args-str (format nil "~A ~A" name (arg-formatter (rest bytes) mode))))
(format t "$~4,'0x ~9A ;; ~14A ~A~%" index byte-str args-str docs)))
(defun sexpify-instruction (bytes index name docs mode)
"Given BYTES and metadata, return a sexp-format representation of it."
(declare (ignore index docs))
(alexandria:if-let ((args (rest bytes))
(args-str (bytes-to-keyword-syntax bytes mode)))
(mapcar #'make-keyword (list name args-str))
(mapcar #'make-keyword (list name))))
(defun arg-formatter (arg mode)
"Given an instruction's ARG, format it for display using the MODE's WRITER."
(if (member mode '(absolute absolute-x absolute-y indirect))
(format nil (writer mode) (reverse arg))
(format nil (writer mode) arg)))
(defun bytes-to-keyword-syntax (bytes mode)
"Take BYTES and a MODE and return our assembly representation of the arguments."
(let ((result (arg-formatter (rest bytes) mode)))
(flet ((munge-indirect (str)
(cl-ppcre:regex-replace "\\(\\$(.*)\\)(.*)?" str "@\\1\\2")))
(cl-ppcre:regex-replace ", " (munge-indirect result) "."))))
(defun current-instruction (cpu &optional print-p)
"Return a list representing the current instruction. If PRINT-P is non-nil,
print the current address and instruction and return NIL."
(let ((fn (if print-p #'print-instruction #'sexpify-instruction)))
(second (disasm-ins (cpu-pc cpu) fn))))
| |
ec390071316c076b703967ba75ccfcefdc3fef7e09d28cdc4595b9d05f59318c | braidchat/braid | notify_rules.clj | (ns braid.core.server.notify-rules
(:require
[clojure.set :as set]
[braid.chat.db.tag :as tag]
[braid.chat.db.thread :as thread]
[braid.core.common.util :as util]
[braid.core.common.schema :as schema]))
(defn tag->group
[tag-id]
(tag/tag-group-id tag-id))
(defn thread->tags
[thread-id]
(:tag-ids (thread/thread-by-id thread-id)))
(defn thread->groups [thread-id]
(into #{} (map tag->group) (thread->tags thread-id)))
(defn notify?
[user-id rules new-message]
(assert (util/valid? schema/NotifyRules rules))
(assert (util/valid? schema/NewMessage new-message))
(let [{:keys [tag mention any]
:or {tag #{} mention #{} any #{}}}
(->> (group-by first rules)
(into {}
(map (fn [[k v]]
[k (into #{} (map second) v)]))))]
; notify if...
(or (any :any) ; ...you want to be notified by anything in any group
; ...or by a tag that this thread has
(seq (set/intersection
(set (new-message :mentioned-tag-ids))
tag))
(let [groups (thread->groups (new-message :thread-id))]
(or ; ...or by anything in this group
(seq (set/intersection groups any))
; ...or by a mention...
(and (seq mention) ((set (new-message :mentioned-user-ids)) user-id)
(or (mention :any) ; in any group
; or this group
(seq (set/intersection
groups
mention)))))))))
| null | https://raw.githubusercontent.com/braidchat/braid/2e44eb6e77f1d203115f9b9c529bd865fa3d7302/src/braid/core/server/notify_rules.clj | clojure | notify if...
...you want to be notified by anything in any group
...or by a tag that this thread has
...or by anything in this group
...or by a mention...
in any group
or this group | (ns braid.core.server.notify-rules
(:require
[clojure.set :as set]
[braid.chat.db.tag :as tag]
[braid.chat.db.thread :as thread]
[braid.core.common.util :as util]
[braid.core.common.schema :as schema]))
(defn tag->group
[tag-id]
(tag/tag-group-id tag-id))
(defn thread->tags
[thread-id]
(:tag-ids (thread/thread-by-id thread-id)))
(defn thread->groups [thread-id]
(into #{} (map tag->group) (thread->tags thread-id)))
(defn notify?
[user-id rules new-message]
(assert (util/valid? schema/NotifyRules rules))
(assert (util/valid? schema/NewMessage new-message))
(let [{:keys [tag mention any]
:or {tag #{} mention #{} any #{}}}
(->> (group-by first rules)
(into {}
(map (fn [[k v]]
[k (into #{} (map second) v)]))))]
(seq (set/intersection
(set (new-message :mentioned-tag-ids))
tag))
(let [groups (thread->groups (new-message :thread-id))]
(seq (set/intersection groups any))
(and (seq mention) ((set (new-message :mentioned-user-ids)) user-id)
(seq (set/intersection
groups
mention)))))))))
|
11af79c7e92f097d122edfe1d277bb683614ce7c9d47a29e4111e2a5cef55366 | backtracking/cfrac | bench.ml |
open Format
open Cfrac
open Unix
let start, stop =
let t = ref 0. in
(fun () -> t := (times()).tms_utime),
(fun () -> printf "=> %f s@." ((times()).tms_utime -. !t))
let test ?(n=1_000_000) msg x =
printf "%s:@." msg;
printf " to_float: @?";
start ();
for _ = 1 to n do ignore (to_float x) done;
stop ();
printf " to_float2 : @ ? " ;
* start ( ) ;
* for _ = 1 to n do ignore ( to_float2 x ) done ;
* stop ( ) ;
* printf " to_float12 : @ ? " ;
* start ( ) ;
* for _ = 1 to n do ignore ( to_float12 x ) done ;
* stop ( ) ;
* start ();
* for _ = 1 to n do ignore (to_float2 x) done;
* stop ();
* printf " to_float12: @?";
* start ();
* for _ = 1 to n do ignore (to_float12 x) done;
* stop (); *)
()
let () =
test "phi" phi;
test "pi" pi;
test "e" e;
test "sqrt(2)" sqrt2;
test "sqrt(3)" sqrt3;
test "1/phi" (memo (inv phi));
printf "10000 decimals of pi: @?";
start ();
let b = Buffer.create 10000 in
let fmt = formatter_of_buffer b in
ignore (fprintf fmt "%a@." (print_decimals ~prec:10000) pi);
stop ();
()
| null | https://raw.githubusercontent.com/backtracking/cfrac/3be4cff8a2372985fa4f17bf0bbe907993c4a88b/bench.ml | ocaml |
open Format
open Cfrac
open Unix
let start, stop =
let t = ref 0. in
(fun () -> t := (times()).tms_utime),
(fun () -> printf "=> %f s@." ((times()).tms_utime -. !t))
let test ?(n=1_000_000) msg x =
printf "%s:@." msg;
printf " to_float: @?";
start ();
for _ = 1 to n do ignore (to_float x) done;
stop ();
printf " to_float2 : @ ? " ;
* start ( ) ;
* for _ = 1 to n do ignore ( to_float2 x ) done ;
* stop ( ) ;
* printf " to_float12 : @ ? " ;
* start ( ) ;
* for _ = 1 to n do ignore ( to_float12 x ) done ;
* stop ( ) ;
* start ();
* for _ = 1 to n do ignore (to_float2 x) done;
* stop ();
* printf " to_float12: @?";
* start ();
* for _ = 1 to n do ignore (to_float12 x) done;
* stop (); *)
()
let () =
test "phi" phi;
test "pi" pi;
test "e" e;
test "sqrt(2)" sqrt2;
test "sqrt(3)" sqrt3;
test "1/phi" (memo (inv phi));
printf "10000 decimals of pi: @?";
start ();
let b = Buffer.create 10000 in
let fmt = formatter_of_buffer b in
ignore (fprintf fmt "%a@." (print_decimals ~prec:10000) pi);
stop ();
()
| |
670c2815c1d7677234033ddc78c879829a5dc44e1271ddf549543d946e423f49 | RyanMcG/Cadence | project.clj | (defproject cadence "0.4.3-SNAPSHOT"
:description "Use pattern recognition to match users with Cadence.js output."
:url "/"
:min-lein-version "2.0.0"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:profiles {:dev {:marginalia {:css ["/docs/marginalia.css"]}}
:production {:offline true
:mirrors {#"central|clojars"
""}}}
:dependencies [[org.clojure/clojure "1.5.1"]
[com.cemerick/friend "0.1.5"]
[lib-noir "0.4.6"]
[dieter "0.3.0"]
[ragtime/ragtime.core "0.3.2"]
[org.clojars.ryanmcg/ring-anti-forgery "0.3.1-SNAPSHOT"]
[compojure "1.1.3"]
[hiccup "1.0.2"]
[http-kit "2.0.0"]
[bultitude "0.1.7"]
[com.cemerick/drawbridge "0.0.6"]
[net.tanesha.recaptcha4j/recaptcha4j "0.0.8"]
[com.novemberain/monger "1.5.0"]
[amalloy/ring-gzip-middleware "0.1.1"]
[ring-middleware-format "0.1.1"]
[com.leadtune/clj-ml "0.2.4"]]
:main cadence.server)
| null | https://raw.githubusercontent.com/RyanMcG/Cadence/c7364cba7e2de48c8a0b90f0f4d16a8248c097d4/project.clj | clojure | (defproject cadence "0.4.3-SNAPSHOT"
:description "Use pattern recognition to match users with Cadence.js output."
:url "/"
:min-lein-version "2.0.0"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:profiles {:dev {:marginalia {:css ["/docs/marginalia.css"]}}
:production {:offline true
:mirrors {#"central|clojars"
""}}}
:dependencies [[org.clojure/clojure "1.5.1"]
[com.cemerick/friend "0.1.5"]
[lib-noir "0.4.6"]
[dieter "0.3.0"]
[ragtime/ragtime.core "0.3.2"]
[org.clojars.ryanmcg/ring-anti-forgery "0.3.1-SNAPSHOT"]
[compojure "1.1.3"]
[hiccup "1.0.2"]
[http-kit "2.0.0"]
[bultitude "0.1.7"]
[com.cemerick/drawbridge "0.0.6"]
[net.tanesha.recaptcha4j/recaptcha4j "0.0.8"]
[com.novemberain/monger "1.5.0"]
[amalloy/ring-gzip-middleware "0.1.1"]
[ring-middleware-format "0.1.1"]
[com.leadtune/clj-ml "0.2.4"]]
:main cadence.server)
| |
8c24bd03a2af403da47414b7703397c2153e2015b84f3ff829aea60135495c1d | k-stz/cl-ptrace | proc-pid-dir.lisp | (in-package :cl-ptrace)
;;; /proc/<pid>/maps and
;;; /proc/<pid>/mem operations
(defun get-maps-path (&optional (pid *pid*))
(concatenate 'string
"/proc/"
(format nil "~a" pid)
"/maps"))
(defun parse-proc-pid-maps (&optional (pid *pid*) (parse-this-file-instead nil))
"Return a list of plists with GETFable columns of /proc/pid/maps"
(let (maps-line-strings
(file-to-parse
(if parse-this-file-instead
parse-this-file-instead
(get-maps-path pid))))
(setf maps-line-strings
(with-open-file (maps-stream file-to-parse :direction :input)
;; condition of type END-OF-FILE
(loop for text = (read-line maps-stream nil nil)
while text ;; nil
collect text)))
(loop for line in maps-line-strings collect
(with-input-from-string (string-stream line)
(destructuring-bind (address-range permissions offset dev inode &optional pathname)
(loop for i from 1 to 6
:if (< i 6)
:collect (read-word-to-string string-stream)
:else
;; quick hack " /some/path/etc" -> "/some/path/etc
:collect (remove #\Space (read-line string-stream nil nil)))
(list :address-range address-range
:permission permissions
:offset offset
:dev dev
:inode inode
:pathname pathname))))))
(defun permission-readable? (proc-pid-maps-line)
"Takes a string like 'rw-p' and returns true if 'r' is set "
(let ((permission-string (getf proc-pid-maps-line :permission)))
(char= #\r (aref permission-string 0))))
(defun permission-private? (proc-pid-maps-line)
"Takes a string like 'rw-p' and returns true if 'r' is set "
(let ((permission-string (getf proc-pid-maps-line :permission)))
(char= #\p (aref permission-string 3))))
(defun has-pathname? (proc-pid-maps-line)
"Returns the pathname of the parsed pid-maps-line, or if there is none, NIL."
(getf proc-pid-maps-line :pathname))
(defun get-heap-address-range (&optional (pid *pid*))
"Get the limits of the heap for the process referred to by `*pid*'. The end address is NOT inclusive,
and should not be read from."
(loop for line in (parse-proc-pid-maps pid) :do
(when
(string= "[heap]"
(getf line :pathname))
(return (progn
(hex-print
(address-range-list line))
(address-range-list line))))
:finally (error "Process maps file has no [heap] entry. PID: ~a" pid)))
;; Takes the output from `parse-proc-pid-maps' and creates a
;; list of memory regions that are all readable
TODO find more criteria for ' useless ' memory regions , it seems that when
;; they have a :dev entry or :pathname they're probably loaded from somewhere other than
;; the binary for example (just a some driver or static library)
(defun get-readable-memory-regions (proc-pid-maps-string-list &optional (without-pathname? nil) (without-heap? nil))
"Return a list of all readable address ranges from a parsed /proc/pid/maps
file. `proc-pid-maps-string-list' should be the output of `parse-proc-pid-maps'"
(loop for line in proc-pid-maps-string-list
when (and (permission-readable? line)
(permission-private? line)
(if without-pathname?
(if (stringp (getf line :pathname))
nil
t)
t)
(if without-heap?
(if (string= "[heap]" (getf line :pathname))
nil
t)
t))
collect (address-range-list line)))
(defun get-readable-non-pathname-list (proc-pid-maps-string-list)
(loop for line in proc-pid-maps-string-list
when (and (permission-readable? line)
(not (has-pathname? line)))
collect line))
(defun address-range-list (proc-pid-maps-line)
"Returns an address-range object representing the /proc/<pid>/maps/ entry, the seconds
address is NOT inclusive and should not be read from."
(let ((address-range (getf proc-pid-maps-line :address-range))
start-address
end-address)
(multiple-value-bind (left-address index-end) (parse-integer address-range
:radix 16
:junk-allowed t)
(setf start-address left-address)
(setf end-address
(parse-integer
1 + is starting the sub - string after the hyphen
0400000 - 50000
^ this hyphen , after the ' 040000 ' part has been parsed
(subseq address-range (1+ index-end))
:radix 16)))
(list start-address end-address)))
(defun address-range-length (address-range)
"Return the number of addresses in given `address-range'"
(abs (- (first address-range)
(second address-range))))
(defun read-word-to-string (stream)
(let ((char-list '()))
(loop
for char = (read-char stream nil nil) do
(cond ((null char)
(return))
((or (char= char #\Tab)
(char= char #\Space))
(return))
(t
(push char char-list))))
(coerce (reverse char-list) 'string)))
;;; /end /proc/<pid>/maps operations
(defun get-mem-path (&optional (pid *pid*))
"Return the path to /proc/<pid>/mem"
(concatenate 'string
"/proc/"
(format nil "~a" pid)
"/mem"))
_ Do n't use this in a loop _ , it is by a factor of 50 slower than a PEEKDATA call the only
;; advantage left to use this one is that we don't need to attach to a process thread
;; prior to using it.
(defun read-proc-mem-byte (address &key (bytes 1) (pid *pid*) (hex-print? t))
"Reads `address' from pid memory directly from /proc/pid/mem.
This opens and closes the stream on each invocation, making it useful to inspect actual
current value under `address'"
;; Hack: Even though we just read from memory, if we don't make it an IO-Stream READ-BYTE
will raise an error I / O - Error when reading memory address 512 byte before the end of
;; a readable memory region...
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :io
:if-exists :append)
(file-position str address)
(let ((byte
(read-byte str t)))
(when hex-print?
(hex-print byte))
byte)))
(defun n-read-proc-mem-bytes-list (address &key (bytes 1) (pid *pid*))
"Read `bytes' amount under `address' from process memory and return a list in of the
bytes in a address-ascending order."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :io
:if-exists :append)
(file-position str address)
(loop for address from address below (+ address bytes)
;; TODO: what happens if we read outside of memory segment, do we want to
;; check for that here?
:collect (read-byte str t))))
;; once this works, replace other read-proc-* functions
(defun read-mem (address &optional (bytes 8) (pid *pid*))
"Read `bytes' amount under `address' from process memory and return a `memory-array'
representation. Can be used without attaching or stopping the target process referred to
by `pid'."
;; TODO: if :direction is not only :input will this hinder parallelization?
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :io
:if-exists :append)
(file-position str address)
(make-mem-array
(loop for address from address below (+ address bytes)
:collect (read-byte str t))
address)))
;; TODO: allow writing half-bytes as well?
currently writing " f " will write = > " 0f " to memory !
;; could be done by using already a mask?
(defun write-mem (address value &key (pid *pid*))
"Write the `value' given to the process memory starting from `address' in
sequential order.
`value' will be internally treated as input to a `memory-array' (make-mem-array ..) such that:
`value' can be represented as an integer (write only the bytes needed to represent it),
hex-string (allows for leading zeros) or a byte-sequence like #(32 172) and '(255 312).
Returns a `memory-array' representing the newly changed memory."
(let ((value-byte-array
(get-byte-array
(make-mem-array value nil))))
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(write-sequence value-byte-array str
:end (length value-byte-array)))
(read-mem address (max (length value-byte-array) 8))))
(defun n-write-proc-mem-bytes-list (address byte-list &key (pid *pid*))
"Write bytes in the byte-list given to the process memory starting from `address' in
sequential order."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(write-sequence byte-list str
:end (length byte-list))))
(defun n-read-proc-mem (address &optional (bytes 8) (pid *pid*))
(byte-list->number
(n-read-proc-mem-bytes-list address :bytes bytes :pid pid)))
(defun read-proc-mem-word (address &optional (offset 0) (hex-print? t) (pid *pid*))
(let* ((integer-word
(n-read-proc-mem (+ offset address) 8 pid)))
(when hex-print?
(hex-print integer-word t t))
integer-word))
use this with Disassembly !
;; behaves same as above... TODO
(defun byte-list->number (byte-list)
"Converts a list of bytes like (255 255 40 77 46 41 0 96), to
an integer of those 8 bytes, in this example: #x6000292e4d28ffff"
(apply #'+
(loop for index from 0 below (length byte-list)
for byte in byte-list
:collect
(ash byte (* 8 index)))))
(defun byte-list= (byte-list1 byte-list2)
(unless (= (length byte-list1) (length byte-list2))
(error "Byte lists don't have same length: ~a ~a" byte-list1 byte-list2))
(loop
:for byte1 in byte-list1
:for byte2 in byte-list2
:always (= byte1 byte2)))
(defun integer->byte-list (integer)
"Converts a integer into a list of bytes, in this case: Example #x6000292e4d28ffff
=> (255 255 40 77 46 41 0 96)\""
(let ((number-byte-length (integer-byte-length integer)))
(loop for byte from 0 below number-byte-length
:collect
(ldb (byte 8 (* byte 8)) integer))))
(defun hex-string->byte-list (hex-string)
"Translates a string like \"0001ff02\" or \"#x0001ff02\" to the byte-list
(2 1 255 0)."
(unless (stringp hex-string)
(error "~a is not a string." hex-string))
(flet ((sanitize-hex-string (hex-string)
(if (>= (length hex-string) 2)
(if (string= "#x" (string-downcase (subseq hex-string 0 2)))
cut own the # x or # X from the beginning of string
(subseq hex-string 2)
hex-string)
hex-string)))
(let*
((sanitized-hex-input (sanitize-hex-string hex-string))
(hex-list (split-sequence-backwards-by-n sanitized-hex-input 2)))
(mapcar (lambda (hex)
(parse-integer hex :radix 16))
hex-list))))
(defun pad-byte-list (byte-list padding-length)
(let ((pad-diff (- padding-length (length byte-list))))
(if (plusp pad-diff)
(append byte-list (make-list pad-diff :initial-element 0))
byte-list)))
(defun get-byte (number byte)
(ldb (byte 8 (* byte 8)) number))
#+sbcl
(defun ascii-string->integer (string)
(let ((byte-list
(loop for char across string
:collect (char-code char))))
(byte-list->number byte-list)))
TODO use flexi - streams
#+sbcl
(defun integer->ascii-string (integer)
(let* ((byte-length (integer-byte-length integer))
(string (make-array byte-length :element-type 'character)))
(loop for i from 0 below byte-length :do
(setf (aref string i)
(code-char (ldb (byte 8 (* i 8)) integer))))
string))
(defun write-proc-mem-byte (address new-byte &key (pid *pid*))
"Careful, this writes a `new-byte' to the process memory address of the process
designated by `pid'.
This can be used to write to any process memory, without having to trace or even
SIGSTOP it."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(write-byte new-byte str)))
TODO still does n't write # x00ab < - leading zeros , because of ( integer - byte - length .. )
;; use. Given that we pass it a hex representation like #x00ab, either this is solved
;; via macro, another option word, another input type (string?) or get in the habit
of writing some non zero byte before it .
(defun write-proc-mem-word (address new-word &key (pid *pid*) (write-full-word? nil)
(hex-print? t))
"Writes the `new-word' to address, use `write-full-word?' to always write 8 bytes
regardless of leading zeros. Such that an new-word=#xabcd will write #x0000000000abcd,
instead of just #abcd and leaving the leading bytes as they where."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(let ((byte-list (integer->byte-list new-word)))
(when write-full-word?
;; fill in byte-list with 0's, to match word-length
(setf byte-list
(append byte-list
(make-list (- 8 (length byte-list)) :initial-element 0))))
(n-write-proc-mem-bytes-list address byte-list :pid pid)))
(when hex-print?
(read-proc-mem-word address 0 pid)))
(defun rw-proc-mem-word (address &optional (offset 0) (rw-mode :r) new-word (pid *pid*))
"Read or write word to memory. Using the `rw-mode' keyword switches :r = read, :w =
write, and :wf = write the full word.
Used to read through the memory interactively with the offset using the :r keyword, and
then when the memory address of interest was found, overwrite it by simply switching to
:w (write just the provided bytes) or :wf (always write the full word, with leading zeros
if needed) and provide the `new-word' to overwrite it."
(if (or (eq rw-mode :w) (eq rw-mode :wf))
(when (null new-word)
(error "rw-mode is :w or :wf but no new-word to write
provided. `new-word' is: ~a" new-word)))
(case rw-mode
(:r (read-proc-mem-word address offset t pid))
(:w (write-proc-mem-word (+ address offset) new-word :pid pid))
(:wf (write-proc-mem-word (+ address offset) new-word :write-full-word? t :pid pid))))
(defun print-proc-mem-table (&key address-list address-range (number-of-rows 30) (spacing 1) (pid *pid*))
"Print Process memory addresses in a table. If `address-range' is provided it is used instead of
the address-list."
(format t "***PID: ~6a ~3a ~3a***~%"
pid number-of-rows spacing)
(let ((row 1)
(spaces (make-string spacing :initial-element #\Space)))
(flet ((flet-print-memory (address)
(format t "~(~2x~)" (read-proc-mem-byte address :pid pid :hex-print? nil))
(format t "~a" spaces)
(when (= row number-of-rows)
(terpri) ;; new-line
(setf row 0))
(incf row)))
(if (not address-range)
;; address-list
(loop for address in address-list
:do
(flet-print-memory address))
;; address-range
(loop for address from (first address-range) to (second address-range)
:do
(flet-print-memory address)))))
(terpri))
(defun find-address-region-maps-entry (address &optional (pid *pid*))
(flet ((address-in-address-region? (address-region)
(<= (first address-region) address (second address-region))))
(loop for line in (parse-proc-pid-maps pid)
:when
(address-in-address-region?
(address-range-list line))
:do (return line))))
(defun find-address-region (address address-region-list)
"Return the address-region where `address' is contained."
(loop for address-region in address-region-list
for from-address = (first address-region)
for to-address = (second address-region)
:when (<= from-address address to-address)
:do (return address-region)))
| null | https://raw.githubusercontent.com/k-stz/cl-ptrace/10b95e226d21e0059e0ce0ff6130895522ec64b4/cl-ptrace/proc-pid-dir.lisp | lisp | /proc/<pid>/maps and
/proc/<pid>/mem operations
condition of type END-OF-FILE
nil
quick hack " /some/path/etc" -> "/some/path/etc
Takes the output from `parse-proc-pid-maps' and creates a
list of memory regions that are all readable
they have a :dev entry or :pathname they're probably loaded from somewhere other than
the binary for example (just a some driver or static library)
/end /proc/<pid>/maps operations
advantage left to use this one is that we don't need to attach to a process thread
prior to using it.
Hack: Even though we just read from memory, if we don't make it an IO-Stream READ-BYTE
a readable memory region...
TODO: what happens if we read outside of memory segment, do we want to
check for that here?
once this works, replace other read-proc-* functions
TODO: if :direction is not only :input will this hinder parallelization?
TODO: allow writing half-bytes as well?
could be done by using already a mask?
behaves same as above... TODO
use. Given that we pass it a hex representation like #x00ab, either this is solved
via macro, another option word, another input type (string?) or get in the habit
fill in byte-list with 0's, to match word-length
new-line
address-list
address-range | (in-package :cl-ptrace)
(defun get-maps-path (&optional (pid *pid*))
(concatenate 'string
"/proc/"
(format nil "~a" pid)
"/maps"))
(defun parse-proc-pid-maps (&optional (pid *pid*) (parse-this-file-instead nil))
"Return a list of plists with GETFable columns of /proc/pid/maps"
(let (maps-line-strings
(file-to-parse
(if parse-this-file-instead
parse-this-file-instead
(get-maps-path pid))))
(setf maps-line-strings
(with-open-file (maps-stream file-to-parse :direction :input)
(loop for text = (read-line maps-stream nil nil)
collect text)))
(loop for line in maps-line-strings collect
(with-input-from-string (string-stream line)
(destructuring-bind (address-range permissions offset dev inode &optional pathname)
(loop for i from 1 to 6
:if (< i 6)
:collect (read-word-to-string string-stream)
:else
:collect (remove #\Space (read-line string-stream nil nil)))
(list :address-range address-range
:permission permissions
:offset offset
:dev dev
:inode inode
:pathname pathname))))))
(defun permission-readable? (proc-pid-maps-line)
"Takes a string like 'rw-p' and returns true if 'r' is set "
(let ((permission-string (getf proc-pid-maps-line :permission)))
(char= #\r (aref permission-string 0))))
(defun permission-private? (proc-pid-maps-line)
"Takes a string like 'rw-p' and returns true if 'r' is set "
(let ((permission-string (getf proc-pid-maps-line :permission)))
(char= #\p (aref permission-string 3))))
(defun has-pathname? (proc-pid-maps-line)
"Returns the pathname of the parsed pid-maps-line, or if there is none, NIL."
(getf proc-pid-maps-line :pathname))
(defun get-heap-address-range (&optional (pid *pid*))
"Get the limits of the heap for the process referred to by `*pid*'. The end address is NOT inclusive,
and should not be read from."
(loop for line in (parse-proc-pid-maps pid) :do
(when
(string= "[heap]"
(getf line :pathname))
(return (progn
(hex-print
(address-range-list line))
(address-range-list line))))
:finally (error "Process maps file has no [heap] entry. PID: ~a" pid)))
TODO find more criteria for ' useless ' memory regions , it seems that when
(defun get-readable-memory-regions (proc-pid-maps-string-list &optional (without-pathname? nil) (without-heap? nil))
"Return a list of all readable address ranges from a parsed /proc/pid/maps
file. `proc-pid-maps-string-list' should be the output of `parse-proc-pid-maps'"
(loop for line in proc-pid-maps-string-list
when (and (permission-readable? line)
(permission-private? line)
(if without-pathname?
(if (stringp (getf line :pathname))
nil
t)
t)
(if without-heap?
(if (string= "[heap]" (getf line :pathname))
nil
t)
t))
collect (address-range-list line)))
(defun get-readable-non-pathname-list (proc-pid-maps-string-list)
(loop for line in proc-pid-maps-string-list
when (and (permission-readable? line)
(not (has-pathname? line)))
collect line))
(defun address-range-list (proc-pid-maps-line)
"Returns an address-range object representing the /proc/<pid>/maps/ entry, the seconds
address is NOT inclusive and should not be read from."
(let ((address-range (getf proc-pid-maps-line :address-range))
start-address
end-address)
(multiple-value-bind (left-address index-end) (parse-integer address-range
:radix 16
:junk-allowed t)
(setf start-address left-address)
(setf end-address
(parse-integer
1 + is starting the sub - string after the hyphen
0400000 - 50000
^ this hyphen , after the ' 040000 ' part has been parsed
(subseq address-range (1+ index-end))
:radix 16)))
(list start-address end-address)))
(defun address-range-length (address-range)
"Return the number of addresses in given `address-range'"
(abs (- (first address-range)
(second address-range))))
(defun read-word-to-string (stream)
(let ((char-list '()))
(loop
for char = (read-char stream nil nil) do
(cond ((null char)
(return))
((or (char= char #\Tab)
(char= char #\Space))
(return))
(t
(push char char-list))))
(coerce (reverse char-list) 'string)))
(defun get-mem-path (&optional (pid *pid*))
"Return the path to /proc/<pid>/mem"
(concatenate 'string
"/proc/"
(format nil "~a" pid)
"/mem"))
_ Do n't use this in a loop _ , it is by a factor of 50 slower than a PEEKDATA call the only
(defun read-proc-mem-byte (address &key (bytes 1) (pid *pid*) (hex-print? t))
"Reads `address' from pid memory directly from /proc/pid/mem.
This opens and closes the stream on each invocation, making it useful to inspect actual
current value under `address'"
will raise an error I / O - Error when reading memory address 512 byte before the end of
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :io
:if-exists :append)
(file-position str address)
(let ((byte
(read-byte str t)))
(when hex-print?
(hex-print byte))
byte)))
(defun n-read-proc-mem-bytes-list (address &key (bytes 1) (pid *pid*))
"Read `bytes' amount under `address' from process memory and return a list in of the
bytes in a address-ascending order."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :io
:if-exists :append)
(file-position str address)
(loop for address from address below (+ address bytes)
:collect (read-byte str t))))
(defun read-mem (address &optional (bytes 8) (pid *pid*))
"Read `bytes' amount under `address' from process memory and return a `memory-array'
representation. Can be used without attaching or stopping the target process referred to
by `pid'."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :io
:if-exists :append)
(file-position str address)
(make-mem-array
(loop for address from address below (+ address bytes)
:collect (read-byte str t))
address)))
currently writing " f " will write = > " 0f " to memory !
(defun write-mem (address value &key (pid *pid*))
"Write the `value' given to the process memory starting from `address' in
sequential order.
`value' will be internally treated as input to a `memory-array' (make-mem-array ..) such that:
`value' can be represented as an integer (write only the bytes needed to represent it),
hex-string (allows for leading zeros) or a byte-sequence like #(32 172) and '(255 312).
Returns a `memory-array' representing the newly changed memory."
(let ((value-byte-array
(get-byte-array
(make-mem-array value nil))))
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(write-sequence value-byte-array str
:end (length value-byte-array)))
(read-mem address (max (length value-byte-array) 8))))
(defun n-write-proc-mem-bytes-list (address byte-list &key (pid *pid*))
"Write bytes in the byte-list given to the process memory starting from `address' in
sequential order."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(write-sequence byte-list str
:end (length byte-list))))
(defun n-read-proc-mem (address &optional (bytes 8) (pid *pid*))
(byte-list->number
(n-read-proc-mem-bytes-list address :bytes bytes :pid pid)))
(defun read-proc-mem-word (address &optional (offset 0) (hex-print? t) (pid *pid*))
(let* ((integer-word
(n-read-proc-mem (+ offset address) 8 pid)))
(when hex-print?
(hex-print integer-word t t))
integer-word))
use this with Disassembly !
(defun byte-list->number (byte-list)
"Converts a list of bytes like (255 255 40 77 46 41 0 96), to
an integer of those 8 bytes, in this example: #x6000292e4d28ffff"
(apply #'+
(loop for index from 0 below (length byte-list)
for byte in byte-list
:collect
(ash byte (* 8 index)))))
(defun byte-list= (byte-list1 byte-list2)
(unless (= (length byte-list1) (length byte-list2))
(error "Byte lists don't have same length: ~a ~a" byte-list1 byte-list2))
(loop
:for byte1 in byte-list1
:for byte2 in byte-list2
:always (= byte1 byte2)))
(defun integer->byte-list (integer)
"Converts a integer into a list of bytes, in this case: Example #x6000292e4d28ffff
=> (255 255 40 77 46 41 0 96)\""
(let ((number-byte-length (integer-byte-length integer)))
(loop for byte from 0 below number-byte-length
:collect
(ldb (byte 8 (* byte 8)) integer))))
(defun hex-string->byte-list (hex-string)
"Translates a string like \"0001ff02\" or \"#x0001ff02\" to the byte-list
(2 1 255 0)."
(unless (stringp hex-string)
(error "~a is not a string." hex-string))
(flet ((sanitize-hex-string (hex-string)
(if (>= (length hex-string) 2)
(if (string= "#x" (string-downcase (subseq hex-string 0 2)))
cut own the # x or # X from the beginning of string
(subseq hex-string 2)
hex-string)
hex-string)))
(let*
((sanitized-hex-input (sanitize-hex-string hex-string))
(hex-list (split-sequence-backwards-by-n sanitized-hex-input 2)))
(mapcar (lambda (hex)
(parse-integer hex :radix 16))
hex-list))))
(defun pad-byte-list (byte-list padding-length)
(let ((pad-diff (- padding-length (length byte-list))))
(if (plusp pad-diff)
(append byte-list (make-list pad-diff :initial-element 0))
byte-list)))
(defun get-byte (number byte)
(ldb (byte 8 (* byte 8)) number))
#+sbcl
(defun ascii-string->integer (string)
(let ((byte-list
(loop for char across string
:collect (char-code char))))
(byte-list->number byte-list)))
TODO use flexi - streams
#+sbcl
(defun integer->ascii-string (integer)
(let* ((byte-length (integer-byte-length integer))
(string (make-array byte-length :element-type 'character)))
(loop for i from 0 below byte-length :do
(setf (aref string i)
(code-char (ldb (byte 8 (* i 8)) integer))))
string))
(defun write-proc-mem-byte (address new-byte &key (pid *pid*))
"Careful, this writes a `new-byte' to the process memory address of the process
designated by `pid'.
This can be used to write to any process memory, without having to trace or even
SIGSTOP it."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(write-byte new-byte str)))
TODO still does n't write # x00ab < - leading zeros , because of ( integer - byte - length .. )
of writing some non zero byte before it .
(defun write-proc-mem-word (address new-word &key (pid *pid*) (write-full-word? nil)
(hex-print? t))
"Writes the `new-word' to address, use `write-full-word?' to always write 8 bytes
regardless of leading zeros. Such that an new-word=#xabcd will write #x0000000000abcd,
instead of just #abcd and leaving the leading bytes as they where."
(with-open-file (str (get-mem-path pid) :element-type '(unsigned-byte 8) :direction :output
:if-exists :append)
(file-position str address)
(let ((byte-list (integer->byte-list new-word)))
(when write-full-word?
(setf byte-list
(append byte-list
(make-list (- 8 (length byte-list)) :initial-element 0))))
(n-write-proc-mem-bytes-list address byte-list :pid pid)))
(when hex-print?
(read-proc-mem-word address 0 pid)))
(defun rw-proc-mem-word (address &optional (offset 0) (rw-mode :r) new-word (pid *pid*))
"Read or write word to memory. Using the `rw-mode' keyword switches :r = read, :w =
write, and :wf = write the full word.
Used to read through the memory interactively with the offset using the :r keyword, and
then when the memory address of interest was found, overwrite it by simply switching to
:w (write just the provided bytes) or :wf (always write the full word, with leading zeros
if needed) and provide the `new-word' to overwrite it."
(if (or (eq rw-mode :w) (eq rw-mode :wf))
(when (null new-word)
(error "rw-mode is :w or :wf but no new-word to write
provided. `new-word' is: ~a" new-word)))
(case rw-mode
(:r (read-proc-mem-word address offset t pid))
(:w (write-proc-mem-word (+ address offset) new-word :pid pid))
(:wf (write-proc-mem-word (+ address offset) new-word :write-full-word? t :pid pid))))
(defun print-proc-mem-table (&key address-list address-range (number-of-rows 30) (spacing 1) (pid *pid*))
"Print Process memory addresses in a table. If `address-range' is provided it is used instead of
the address-list."
(format t "***PID: ~6a ~3a ~3a***~%"
pid number-of-rows spacing)
(let ((row 1)
(spaces (make-string spacing :initial-element #\Space)))
(flet ((flet-print-memory (address)
(format t "~(~2x~)" (read-proc-mem-byte address :pid pid :hex-print? nil))
(format t "~a" spaces)
(when (= row number-of-rows)
(setf row 0))
(incf row)))
(if (not address-range)
(loop for address in address-list
:do
(flet-print-memory address))
(loop for address from (first address-range) to (second address-range)
:do
(flet-print-memory address)))))
(terpri))
(defun find-address-region-maps-entry (address &optional (pid *pid*))
(flet ((address-in-address-region? (address-region)
(<= (first address-region) address (second address-region))))
(loop for line in (parse-proc-pid-maps pid)
:when
(address-in-address-region?
(address-range-list line))
:do (return line))))
(defun find-address-region (address address-region-list)
"Return the address-region where `address' is contained."
(loop for address-region in address-region-list
for from-address = (first address-region)
for to-address = (second address-region)
:when (<= from-address address to-address)
:do (return address-region)))
|
cd6c9950026ca9810950bf34f9462c015245b9939444a2e5e7e04afa4b6313f8 | camllight/camllight | myTypeParser.mli | type token =
IDENT of string
| INFIX of string
| INT of int
| CHAR of char
| FLOAT of float
| STRING of string
| EOF
| MULTIPLICATIVE of string
| ADDITIVE of string
| SUBTRACTIVE of string
| CONCATENATION of string
| COMPARISON of string
| EQUAL
| EQUALEQUAL
| SHARP
| BANG
| AMPERSAND
| QUOTE
| LPAREN
| RPAREN
| STAR
| COMMA
| MINUSGREATER
| DOT
| DOTDOT
| DOTLPAREN
| COLON
| COLONCOLON
| COLONEQUAL
| SEMI
| SEMISEMI
| LESSMINUS
| LBRACKET
| LBRACKETBAR
| LBRACKETLESS
| RBRACKET
| UNDERSCORE
| UNDERUNDER
| LBRACE
| BAR
| BARRBRACKET
| GREATERRBRACKET
| RBRACE
| AND
| AS
| BEGIN
| DO
| DONE
| DOWNTO
| ELSE
| END
| EXCEPTION
| FOR
| FUN
| FUNCTION
| IF
| IN
| LET
| MATCH
| MUTABLE
| NOT
| OF
| OR
| PREFIX
| REC
| THEN
| TO
| TRY
| TYPE
| VALUE
| WHERE
| WHILE
| WITH
;;
value TypeEntry :
(lexing__lexbuf -> token) -> lexing__lexbuf -> syntax__type_expression;;
| null | https://raw.githubusercontent.com/camllight/camllight/0cc537de0846393322058dbb26449427bfc76786/sources/contrib/search_isos/myTypeParser.mli | ocaml | type token =
IDENT of string
| INFIX of string
| INT of int
| CHAR of char
| FLOAT of float
| STRING of string
| EOF
| MULTIPLICATIVE of string
| ADDITIVE of string
| SUBTRACTIVE of string
| CONCATENATION of string
| COMPARISON of string
| EQUAL
| EQUALEQUAL
| SHARP
| BANG
| AMPERSAND
| QUOTE
| LPAREN
| RPAREN
| STAR
| COMMA
| MINUSGREATER
| DOT
| DOTDOT
| DOTLPAREN
| COLON
| COLONCOLON
| COLONEQUAL
| SEMI
| SEMISEMI
| LESSMINUS
| LBRACKET
| LBRACKETBAR
| LBRACKETLESS
| RBRACKET
| UNDERSCORE
| UNDERUNDER
| LBRACE
| BAR
| BARRBRACKET
| GREATERRBRACKET
| RBRACE
| AND
| AS
| BEGIN
| DO
| DONE
| DOWNTO
| ELSE
| END
| EXCEPTION
| FOR
| FUN
| FUNCTION
| IF
| IN
| LET
| MATCH
| MUTABLE
| NOT
| OF
| OR
| PREFIX
| REC
| THEN
| TO
| TRY
| TYPE
| VALUE
| WHERE
| WHILE
| WITH
;;
value TypeEntry :
(lexing__lexbuf -> token) -> lexing__lexbuf -> syntax__type_expression;;
| |
66ef8d74f6960a7052fa03265c171fae63c6ea19fa5923c1ed49f14f2a9520db | mauny/the-functional-approach-to-programming | fonts.ml | (* *)
(* Projet Formel *)
(* *)
CAML - light :
(* *)
(*************************************************************************)
(* *)
(* LIENS *)
45 rue d'Ulm
75005 PARIS
France
(* *)
(*************************************************************************)
$ I d : fonts.mlp , v 1.1 1997/08/14 11:34:25
(* fonts.ml *)
Mon Jan 20 1992
#open "MLgraph";;
#open "compatibility";;
#open "prelude";;
(* somes variables of font description *)
let courier_descr =
{font_descr_filename="Cour.fnt"; font_descr_name = ""; font_descr_height=12.0; font_descr_width=7.2;
font_descr_descr=[||];
font_descr_descr_bbox=[||]
};;
let courier_Bold_descr =
{font_descr_filename="Cour-B.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let courier_Oblique_descr =
{font_descr_filename="Cour-O.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let courier_BoldOblique_descr =
{font_descr_filename="Cour-BO.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_Roman_descr =
{font_descr_filename="Time-R.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_Bold_descr =
{font_descr_filename="Time-B.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_Italic_descr =
{font_descr_filename="Time-I.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_BoldItalic_descr =
{font_descr_filename="Time-BI.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width = 12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_descr =
{font_descr_filename="Helv.fnt";font_descr_name=""; font_descr_height=12.0; font_descr_width=12.18;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_Bold_descr =
{font_descr_filename="Helv-B.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_Oblique_descr =
{font_descr_filename="Helv-O.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_BoldOblique_descr =
{font_descr_filename="Helv-BO.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let symbol_descr =
{font_descr_filename="Symb.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
exception Find of int;;
let pos_char_in_string s c b e =
try
let l = string_length s in
for i=(max_int 0 b) to (min_int e (l-1)) do
if nth_char s i = c then raise (Find i)
done;
-1
with Find x -> x
;;
let floatpair_of_string s =
let ep = (string_length s)-1 in
let pc = pos_char_in_string s comma_char 0 ep
and op = pos_char_in_string s open_par_char 0 ep
and cp = pos_char_in_string s close_par_char 0 ep
in
if (pc<0) or (op < 0) or (cp <0) then raise (Failure ("floatpair_of_string "^"bad format"))
else
(float_of_string (sub_string s (op+1) (pc-op-1)),
float_of_string (sub_string s (pc+1) (cp-pc-1)))
;;
let bbox_of_string s =
let ep = (string_length s) -1 in
let pc = pos_char_in_string s comma_char ((pos_char_in_string s comma_char 0 ep)+1) ep
and op = pos_char_in_string s open_par_char 0 ep
and cp = pos_char_in_string s close_par_char
((pos_char_in_string s close_par_char
((pos_char_in_string s close_par_char 0 ep)+1) ep) +1)
ep
in
if (pc<0) or (op < 0) or (cp <0) then raise (Failure ("bbox_of_string "^"bad format"))
else
(floatpair_of_string (sub_string s (op+1) (pc-op-1)),
floatpair_of_string (sub_string s (pc+1) (cp-pc-1)))
;;
let load_font filename =
let name = !font_lib_directory^filename in
let chan = (try open_in name with e -> prerr_endline (" cannot open file : " ^ name ); raise e)
in
let r = ref ""
and c = ref 0
and n = ref ""
and h = ref 0.0
and w = ref 0.0
and d = ref (make_vect 255 0.0)
and db = ref (make_vect 255 ((0.0,0.0),(0.0,0.0)))
in
try
while true
do
r := input_line chan;
match !r with
"Name" -> n:= input_line chan;()
| "Height" -> h:= float_of_string (input_line chan);()
| "Width" -> w:= float_of_string (input_line chan);()
| "Descr" -> r:= input_line chan;
if !r="empty" then (d:=[||];())
else
(!d.(0)<- float_of_string (!r);
for i=1 to 254 do r:=input_line chan; !d.(i)<-float_of_string(!r) done)
| "Descr_bbox" ->
for i=0 to 254
do
r:=input_line chan;
!db.(i)<-bbox_of_string (!r)
done
| _ -> ()
done;
close_in chan;
{font_descr_filename=filename;font_descr_name= !n;font_descr_height= !h;font_descr_width= !w;font_descr_descr= !d;font_descr_descr_bbox= !db}
with End_of_file ->
(close_in chan;
{font_descr_filename=filename;font_descr_name= !n;font_descr_height= !h;font_descr_width= !w;font_descr_descr= !d;font_descr_descr_bbox= !db})
;;
| null | https://raw.githubusercontent.com/mauny/the-functional-approach-to-programming/1ec8bed5d33d3a67bbd67d09afb3f5c3c8978838/cl-75/MLGRAPH.DIR/fonts.ml | ocaml |
Projet Formel
***********************************************************************
LIENS
***********************************************************************
fonts.ml
somes variables of font description | CAML - light :
45 rue d'Ulm
75005 PARIS
France
$ I d : fonts.mlp , v 1.1 1997/08/14 11:34:25
Mon Jan 20 1992
#open "MLgraph";;
#open "compatibility";;
#open "prelude";;
let courier_descr =
{font_descr_filename="Cour.fnt"; font_descr_name = ""; font_descr_height=12.0; font_descr_width=7.2;
font_descr_descr=[||];
font_descr_descr_bbox=[||]
};;
let courier_Bold_descr =
{font_descr_filename="Cour-B.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let courier_Oblique_descr =
{font_descr_filename="Cour-O.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let courier_BoldOblique_descr =
{font_descr_filename="Cour-BO.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_Roman_descr =
{font_descr_filename="Time-R.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_Bold_descr =
{font_descr_filename="Time-B.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_Italic_descr =
{font_descr_filename="Time-I.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let times_BoldItalic_descr =
{font_descr_filename="Time-BI.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width = 12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_descr =
{font_descr_filename="Helv.fnt";font_descr_name=""; font_descr_height=12.0; font_descr_width=12.18;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_Bold_descr =
{font_descr_filename="Helv-B.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_Oblique_descr =
{font_descr_filename="Helv-O.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let helvetica_BoldOblique_descr =
{font_descr_filename="Helv-BO.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
let symbol_descr =
{font_descr_filename="Symb.fnt"; font_descr_name=""; font_descr_height=12.0; font_descr_width=12.0;
font_descr_descr=[||];
font_descr_descr_bbox=[||]};;
exception Find of int;;
let pos_char_in_string s c b e =
try
let l = string_length s in
for i=(max_int 0 b) to (min_int e (l-1)) do
if nth_char s i = c then raise (Find i)
done;
-1
with Find x -> x
;;
let floatpair_of_string s =
let ep = (string_length s)-1 in
let pc = pos_char_in_string s comma_char 0 ep
and op = pos_char_in_string s open_par_char 0 ep
and cp = pos_char_in_string s close_par_char 0 ep
in
if (pc<0) or (op < 0) or (cp <0) then raise (Failure ("floatpair_of_string "^"bad format"))
else
(float_of_string (sub_string s (op+1) (pc-op-1)),
float_of_string (sub_string s (pc+1) (cp-pc-1)))
;;
let bbox_of_string s =
let ep = (string_length s) -1 in
let pc = pos_char_in_string s comma_char ((pos_char_in_string s comma_char 0 ep)+1) ep
and op = pos_char_in_string s open_par_char 0 ep
and cp = pos_char_in_string s close_par_char
((pos_char_in_string s close_par_char
((pos_char_in_string s close_par_char 0 ep)+1) ep) +1)
ep
in
if (pc<0) or (op < 0) or (cp <0) then raise (Failure ("bbox_of_string "^"bad format"))
else
(floatpair_of_string (sub_string s (op+1) (pc-op-1)),
floatpair_of_string (sub_string s (pc+1) (cp-pc-1)))
;;
let load_font filename =
let name = !font_lib_directory^filename in
let chan = (try open_in name with e -> prerr_endline (" cannot open file : " ^ name ); raise e)
in
let r = ref ""
and c = ref 0
and n = ref ""
and h = ref 0.0
and w = ref 0.0
and d = ref (make_vect 255 0.0)
and db = ref (make_vect 255 ((0.0,0.0),(0.0,0.0)))
in
try
while true
do
r := input_line chan;
match !r with
"Name" -> n:= input_line chan;()
| "Height" -> h:= float_of_string (input_line chan);()
| "Width" -> w:= float_of_string (input_line chan);()
| "Descr" -> r:= input_line chan;
if !r="empty" then (d:=[||];())
else
(!d.(0)<- float_of_string (!r);
for i=1 to 254 do r:=input_line chan; !d.(i)<-float_of_string(!r) done)
| "Descr_bbox" ->
for i=0 to 254
do
r:=input_line chan;
!db.(i)<-bbox_of_string (!r)
done
| _ -> ()
done;
close_in chan;
{font_descr_filename=filename;font_descr_name= !n;font_descr_height= !h;font_descr_width= !w;font_descr_descr= !d;font_descr_descr_bbox= !db}
with End_of_file ->
(close_in chan;
{font_descr_filename=filename;font_descr_name= !n;font_descr_height= !h;font_descr_width= !w;font_descr_descr= !d;font_descr_descr_bbox= !db})
;;
|
6f2cc69cb3501050bf3ecf433d963f7496a7975a510d89ab9998f236c44f4568 | l29ah/hatexmpp3 | Roster.hs | {-# LANGUAGE OverloadedStrings #-}
module GTK.Roster
( spawnRosterWindow
) where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad.Reader
import Data.Text (Text)
import qualified Data.Text as T
import Data.Tree
import Graphics.UI.Gtk hiding (eventKeyName, eventModifier)
import qualified Graphics.UI.Gtk.ModelView as MView
import Network.Xmpp as X
import Types
import GTK.Chat
treeMUCIndex = 0
treeUserIndex = 1
data RosterTreeNode = MUCs | Users | JID X.MessageType Jid deriving (Eq, Show)
defaultTree :: Forest RosterTreeNode
defaultTree = [Node MUCs [], Node Users []]
renderNode :: RosterTreeNode -> Text
renderNode MUCs = "MUCs"
renderNode Users = "Users"
renderNode (JID _ jid) = jidToText jid
spawnRosterWindow :: (X.MessageType -> Jid -> Text -> IO ()) -> Hate ()
spawnRosterWindow sendMessage = do
s <- ask
addToRoster <- liftIO $ postGUISync $ do
w <- windowNew
set w [windowTitle := ("hatexmpp roster" :: Text)]
windowSetDefaultSize w 300 800
view <- MView.treeViewNew
treeViewSetHeadersVisible view False
store <- MView.treeStoreNew defaultTree
MView.treeViewSetModel view $ Just store
column <- MView.treeViewColumnNew
MView.treeViewAppendColumn view column
MView.treeViewColumnSetTitle column T.empty
cell <- cellRendererTextNew
MView.treeViewColumnPackStart column cell True
cellLayoutSetAttributes column cell store (\record -> [MView.cellText := renderNode record])
on view rowActivated $ \path _ -> do
val <- treeStoreGetValue store path
case val of
JID typ jid -> do
forkIO $ flip runHate s $ addChat jid (sendMessage typ jid)
pure ()
_ -> pure ()
scroll <- scrolledWindowNew Nothing Nothing
scrolledWindowSetPolicy scroll PolicyAutomatic PolicyAutomatic
containerAdd scroll view
containerAdd w scroll
widgetShowAll w
pure (\index typ jid -> postGUISync $ do
let path = [index]
pathIter <- treeModelGetIter store path
unusedIndex <- treeModelIterNChildren store pathIter
treeStoreInsert store path unusedIndex $ JID typ jid)
let addMUCToRoster = addToRoster treeMUCIndex GroupChat
let addUserToRoster = addToRoster treeUserIndex Chat
liftIO $ atomically $ do
writeTVar (addMUCToRosterWindow s) addMUCToRoster
writeTVar (addUserToRosterWindow s) addUserToRoster
| null | https://raw.githubusercontent.com/l29ah/hatexmpp3/9d3e25c6acf4c0978a2c1d88b3572ad20b1c228d/GTK/Roster.hs | haskell | # LANGUAGE OverloadedStrings # | module GTK.Roster
( spawnRosterWindow
) where
import Control.Concurrent
import Control.Concurrent.STM
import Control.Monad.Reader
import Data.Text (Text)
import qualified Data.Text as T
import Data.Tree
import Graphics.UI.Gtk hiding (eventKeyName, eventModifier)
import qualified Graphics.UI.Gtk.ModelView as MView
import Network.Xmpp as X
import Types
import GTK.Chat
treeMUCIndex = 0
treeUserIndex = 1
data RosterTreeNode = MUCs | Users | JID X.MessageType Jid deriving (Eq, Show)
defaultTree :: Forest RosterTreeNode
defaultTree = [Node MUCs [], Node Users []]
renderNode :: RosterTreeNode -> Text
renderNode MUCs = "MUCs"
renderNode Users = "Users"
renderNode (JID _ jid) = jidToText jid
spawnRosterWindow :: (X.MessageType -> Jid -> Text -> IO ()) -> Hate ()
spawnRosterWindow sendMessage = do
s <- ask
addToRoster <- liftIO $ postGUISync $ do
w <- windowNew
set w [windowTitle := ("hatexmpp roster" :: Text)]
windowSetDefaultSize w 300 800
view <- MView.treeViewNew
treeViewSetHeadersVisible view False
store <- MView.treeStoreNew defaultTree
MView.treeViewSetModel view $ Just store
column <- MView.treeViewColumnNew
MView.treeViewAppendColumn view column
MView.treeViewColumnSetTitle column T.empty
cell <- cellRendererTextNew
MView.treeViewColumnPackStart column cell True
cellLayoutSetAttributes column cell store (\record -> [MView.cellText := renderNode record])
on view rowActivated $ \path _ -> do
val <- treeStoreGetValue store path
case val of
JID typ jid -> do
forkIO $ flip runHate s $ addChat jid (sendMessage typ jid)
pure ()
_ -> pure ()
scroll <- scrolledWindowNew Nothing Nothing
scrolledWindowSetPolicy scroll PolicyAutomatic PolicyAutomatic
containerAdd scroll view
containerAdd w scroll
widgetShowAll w
pure (\index typ jid -> postGUISync $ do
let path = [index]
pathIter <- treeModelGetIter store path
unusedIndex <- treeModelIterNChildren store pathIter
treeStoreInsert store path unusedIndex $ JID typ jid)
let addMUCToRoster = addToRoster treeMUCIndex GroupChat
let addUserToRoster = addToRoster treeUserIndex Chat
liftIO $ atomically $ do
writeTVar (addMUCToRosterWindow s) addMUCToRoster
writeTVar (addUserToRosterWindow s) addUserToRoster
|
30f0678ab35b96d5ecad3ee68d0ece23eec47fedaca1c81a200dbb08a1750eb9 | dktr0/estuary | Notepad.hs | # LANGUAGE OverloadedStrings , RecursiveDo #
module Estuary.Widgets.Notepad where
import Reflex
import Reflex.Dom
import Control.Monad
import Data.Text (Text)
import Data.Sequence as Seq
import Data.Text as T
import GHCJS.DOM.EventM
import Estuary.Widgets.W
import Estuary.Types.Definition
import Estuary.Widgets.Reflex
notePadWidget :: MonadWidget t m => Dynamic t NotePad -> W t m (Variable t NotePad)
notePadWidget delta = divClass "notepadContainers" $ mdo
let pageNumSampled = fmap fst delta
let notesSampled = fmap snd delta -- []
let changes = currentValue v
let pageNum = fmap fst changes
let notes = fmap snd changes -- []
buttons <- divClass "notepad-rowOfButtons" $ do
-- add note
addPageButton <- clickableDivClass "+" "notepad-prevNextButtons ui-buttons ui-font"
let addPage = addNote <$ addPageButton -- :: m (Event t (Notepad -> Notepad))
-- erase note
erasePageButton <- clickableDivClass "-" "notepad-prevNextButtons ui-buttons ui-font"
let erasePage = eraseNote <$ erasePageButton -- :: m (Event t (Notepad -> Notepad))
-- prev page
prevPageButton <- clickableDivClass "←" "notepad-prevNextButtons ui-buttons ui-font" -- :: m (Event t ())
let prevPage = prevPageOfNote <$ prevPageButton -- :: m (Event t (Notepad -> Notepad))
-- next page
nextPageButton <- clickableDivClass "→" "notepad-prevNextButtons ui-buttons ui-font" -- :: m (Event t ())
let nextPage = nextPageOfNote <$ nextPageButton -- :: m (Event t (Notepad -> Notepad))
--
return $ leftmost [addPage, erasePage, prevPage, nextPage]
let noteTupple = Seq.index <$> notes <*> pageNum -- (t,c)
let noteTuppleSampled = Seq.index <$> notesSampled <*> pageNumSampled -- (t,c)
(titleEv,contentEv) <- titleContentWidget (fmap fst noteTuppleSampled) (fmap snd noteTuppleSampled) (fmap fst noteTupple) (fmap snd noteTupple) -- :: (Event t Text, Event t Text)
let setNoteTitle' = fmap setNoteTitle titleEv -- :: m (Event t (Notepad -> Notepad))
let setNoteContent' = fmap setNoteContent contentEv -- :: m (Event t (Notepad -> Notepad))
let localEvs = mergeWith (.) [setNoteTitle',setNoteContent', buttons]
let localUpdates = attachWith (flip ($)) (current $ currentValue v) localEvs
v <- variable delta localUpdates
return v
titleContentWidget :: MonadWidget t m => Dynamic t Text -> Dynamic t Text -> Dynamic t Text -> Dynamic t Text -> m (Event t Text,Event t Text)
titleContentWidget it ic t c = divClass "notepadContainers code-font" $ mdo
it' <- sample $ current it
ic' <- sample $ current ic
x <- textInput $ def & textInputConfig_initialValue .~ it'
& textInputConfig_setValue .~ (updated t)
& attributes .~ constDyn ("class" =: "notepage-title code-font primary-color")
let title = _textInput_input x -- :: Event t Text
y <- textArea $ def & textAreaConfig_initialValue .~ ic'
& textAreaConfig_setValue .~ (updated c)
& attributes .~ constDyn ("class" =: "notepage-content code-font primary-color primary-borders")
let content = _textArea_input y
return $ (title,content)
---------------------------------------------------------------------
-- PURE FUNCTIONS
------------------------------------------
-- FUNCTION THAT MOVES THROUGH THE NOTEPAD
nextPageOfNote :: NotePad -> NotePad
nextPageOfNote (currentPage,listOfNotes)
| currentPage >= (Prelude.length listOfNotes - 1) = (0,listOfNotes)
| otherwise = ((currentPage+1),listOfNotes)
prevPageOfNote :: NotePad -> NotePad
prevPageOfNote (currentPage,listOfNotes)
| currentPage <= 0 = ((Prelude.length listOfNotes)-1, listOfNotes)
| otherwise = ((currentPage-1),listOfNotes)
-- FUNCTIONS THAT CREATE AND ERASE NOTEPAGES
addNote :: NotePad -> NotePad
addNote notepad = do
let note = ("NewTitle","NewContent")
(fst notepad, insertAt (fst notepad) note (snd notepad))
eraseNote :: NotePad -> NotePad
eraseNote notepad
| Prelude.length (snd notepad) <= 1 = notepad
| otherwise = (fst notepad, deleteAt (fst notepad) (snd notepad))
-- FUNCTIONS THAT CHANGES/UPDATES THE TITLE AND CONTENT OF A NOTEPAGE
setNoteContent :: Text -> NotePad -> NotePad
setNoteContent newC (currentPage,listOfNotes) = do
let currentNote = getCurrentNotePage (currentPage,listOfNotes) -- :: NotePage
let updatedNote = replaceContentInPage newC currentNote -- :: NotePage
let updatedListOfNotes = update currentPage updatedNote listOfNotes -- :: NotePage
(currentPage,updatedListOfNotes)
setNoteTitle :: Text -> NotePad -> NotePad
setNoteTitle newT (currentPage,listOfNotes) = do
let currentNote = getCurrentNotePage (currentPage,listOfNotes)-- :: NotePage
let updatedNote = replaceTitleInPage newT currentNote -- :: NotePage
let updatedListOfNotes = update currentPage updatedNote listOfNotes -- :: NotePage
(currentPage,updatedListOfNotes)
getCurrentNotePage :: NotePad -> NotePage
getCurrentNotePage (currentPage,listOfNotes) = listOfNotes `Seq.index` currentPage
replaceTitleInPage :: Text -> NotePage -> NotePage
replaceTitleInPage newT (currentT,currentC) = (newT,currentC)
replaceContentInPage :: Text -> NotePage -> NotePage
replaceContentInPage newC (currentT,currentC) = (currentT,newC)
--
| null | https://raw.githubusercontent.com/dktr0/estuary/41c4ceeaaa7cc9183b53db4b9d961acbbe686bc0/client/src/Estuary/Widgets/Notepad.hs | haskell | []
[]
add note
:: m (Event t (Notepad -> Notepad))
erase note
:: m (Event t (Notepad -> Notepad))
prev page
:: m (Event t ())
:: m (Event t (Notepad -> Notepad))
next page
:: m (Event t ())
:: m (Event t (Notepad -> Notepad))
(t,c)
(t,c)
:: (Event t Text, Event t Text)
:: m (Event t (Notepad -> Notepad))
:: m (Event t (Notepad -> Notepad))
:: Event t Text
-------------------------------------------------------------------
PURE FUNCTIONS
----------------------------------------
FUNCTION THAT MOVES THROUGH THE NOTEPAD
FUNCTIONS THAT CREATE AND ERASE NOTEPAGES
FUNCTIONS THAT CHANGES/UPDATES THE TITLE AND CONTENT OF A NOTEPAGE
:: NotePage
:: NotePage
:: NotePage
:: NotePage
:: NotePage
:: NotePage
| # LANGUAGE OverloadedStrings , RecursiveDo #
module Estuary.Widgets.Notepad where
import Reflex
import Reflex.Dom
import Control.Monad
import Data.Text (Text)
import Data.Sequence as Seq
import Data.Text as T
import GHCJS.DOM.EventM
import Estuary.Widgets.W
import Estuary.Types.Definition
import Estuary.Widgets.Reflex
notePadWidget :: MonadWidget t m => Dynamic t NotePad -> W t m (Variable t NotePad)
notePadWidget delta = divClass "notepadContainers" $ mdo
let pageNumSampled = fmap fst delta
let changes = currentValue v
let pageNum = fmap fst changes
buttons <- divClass "notepad-rowOfButtons" $ do
addPageButton <- clickableDivClass "+" "notepad-prevNextButtons ui-buttons ui-font"
erasePageButton <- clickableDivClass "-" "notepad-prevNextButtons ui-buttons ui-font"
return $ leftmost [addPage, erasePage, prevPage, nextPage]
let localEvs = mergeWith (.) [setNoteTitle',setNoteContent', buttons]
let localUpdates = attachWith (flip ($)) (current $ currentValue v) localEvs
v <- variable delta localUpdates
return v
titleContentWidget :: MonadWidget t m => Dynamic t Text -> Dynamic t Text -> Dynamic t Text -> Dynamic t Text -> m (Event t Text,Event t Text)
titleContentWidget it ic t c = divClass "notepadContainers code-font" $ mdo
it' <- sample $ current it
ic' <- sample $ current ic
x <- textInput $ def & textInputConfig_initialValue .~ it'
& textInputConfig_setValue .~ (updated t)
& attributes .~ constDyn ("class" =: "notepage-title code-font primary-color")
y <- textArea $ def & textAreaConfig_initialValue .~ ic'
& textAreaConfig_setValue .~ (updated c)
& attributes .~ constDyn ("class" =: "notepage-content code-font primary-color primary-borders")
let content = _textArea_input y
return $ (title,content)
nextPageOfNote :: NotePad -> NotePad
nextPageOfNote (currentPage,listOfNotes)
| currentPage >= (Prelude.length listOfNotes - 1) = (0,listOfNotes)
| otherwise = ((currentPage+1),listOfNotes)
prevPageOfNote :: NotePad -> NotePad
prevPageOfNote (currentPage,listOfNotes)
| currentPage <= 0 = ((Prelude.length listOfNotes)-1, listOfNotes)
| otherwise = ((currentPage-1),listOfNotes)
addNote :: NotePad -> NotePad
addNote notepad = do
let note = ("NewTitle","NewContent")
(fst notepad, insertAt (fst notepad) note (snd notepad))
eraseNote :: NotePad -> NotePad
eraseNote notepad
| Prelude.length (snd notepad) <= 1 = notepad
| otherwise = (fst notepad, deleteAt (fst notepad) (snd notepad))
setNoteContent :: Text -> NotePad -> NotePad
setNoteContent newC (currentPage,listOfNotes) = do
(currentPage,updatedListOfNotes)
setNoteTitle :: Text -> NotePad -> NotePad
setNoteTitle newT (currentPage,listOfNotes) = do
(currentPage,updatedListOfNotes)
getCurrentNotePage :: NotePad -> NotePage
getCurrentNotePage (currentPage,listOfNotes) = listOfNotes `Seq.index` currentPage
replaceTitleInPage :: Text -> NotePage -> NotePage
replaceTitleInPage newT (currentT,currentC) = (newT,currentC)
replaceContentInPage :: Text -> NotePage -> NotePage
replaceContentInPage newC (currentT,currentC) = (currentT,newC)
|
5267e4947a0a50988d9ab3f71555c1e63d4b1660c407c45cafeac25eccb951b0 | coq/coq | microPG.ml | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
open Ideutils
open Session
open Preferences
open GdkKeysyms
open Printf
let eprintf x =
if CDebug.(get_flag misc) then Printf.eprintf x else Printf.ifprintf stderr x
type gui = {
notebook : session Wg_Notebook.typed_notebook;
action_groups : GAction.action_group list;
}
let actiong gui name = List.find (fun ag -> ag#name = name) gui.action_groups
let ct gui = gui.notebook#current_term
let get_sel b = b#selection_bounds
let sel_nonempty b = let i, j = get_sel b in not (i#equal j)
let get_sel_txt b = let i, j = get_sel b in i#get_text ~stop:j
type status = { move : int option; kill : (string * bool) option; sel: bool }
let pr_status { move; kill; sel } =
let move = Option.cata (fun i -> string_of_int i) "" move in
let kill = Option.cata (fun (s,b) -> sprintf "kill(%b) %S" b s) "" kill in
let sel = string_of_bool sel in
Printf.sprintf "{ move: %s; kill: %s; sel: %s }" move kill sel
let pr_key t =
let kv = GdkEvent.Key.keyval t in
let str = GdkEvent.Key.string t in
let str_of_mod = function
| `SHIFT -> "SHIFT" | `LOCK -> "LOCK" | `CONTROL -> "CONTROL"
| `MOD1 -> "MOD1" | `MOD2 -> "MOD2" | `MOD3 -> "MOD3" | `MOD4 -> "MOD4"
| `MOD5 -> "MOD5" | `BUTTON1 -> "BUTTON1" | `BUTTON2 -> "BUTTON2"
| `BUTTON3 -> "BUTTON3" | `BUTTON4 -> "BUTTON4" | `BUTTON5 -> "BUTTON5"
| `SUPER -> "SUPER" | `HYPER -> "HYPER" | `META -> "META"
| `RELEASE -> "RELEASE" in
let mods = String.concat " " (List.map str_of_mod (GdkEvent.Key.state t)) in
Printf.sprintf "'%s' (%d, %s)" str kv mods
type action =
| Action of string * string
| Callback of (gui -> unit)
| Edit of (status -> GSourceView3.source_buffer -> GText.iter ->
(string -> string -> unit) -> status)
| Motion of (status -> GText.iter -> GText.iter * status)
type 'c entry = {
mods : Gdk.Tags.modifier list;
key : Gdk.keysym;
keyname : string;
doc : string;
contents : 'c
}
let mC = [`CONTROL]
let mM =
if Coq_config.arch = "Darwin" then
We add both MOD2 and META because both are
returned when pressing Command on MacOS X
returned when pressing Command on MacOS X *)
[`CONTROL;`MOD2;`META]
else
[`MOD1]
let mod_of t x =
let y = GdkEvent.Key.state t in
List.for_all (fun m -> List.mem m y) x &&
List.for_all (fun m -> List.mem m x) y
let pr_keymod l =
if l = mC then
"Ctrl-"
else
if l = mM then
if Coq_config.arch = "Darwin" then "Ctrl-Cmd-" else "Meta-"
else
""
let mkE ?(mods=mC) key keyname doc ?(alias=[]) contents =
List.map (fun (mods, key, keyname) -> { mods; key; keyname; doc; contents })
((mods, key, keyname)::alias)
type keypaths = Step of action entry list * keypaths entry list
let print_keypaths kps =
let rec aux prefix (Step (l, konts)) =
String.concat "\n" (
(List.map (fun x ->
prefix ^ pr_keymod x.mods ^ x.keyname ^ " " ^ x.doc ) l) @
(List.map (fun x ->
aux (prefix^pr_keymod x.mods^x.keyname^" ") x.contents) konts)) in
aux " " kps
let empty = Step([],[])
let frontier (Step(l1,l2)) =
List.map (fun x -> pr_keymod x.mods ^ x.keyname) l1 @
List.map (fun x -> pr_keymod x.mods ^ x.keyname) l2
let insert kps name enter_syms bindings =
let rec aux kps enter_syms =
match enter_syms, kps with
| [], Step (el, konts) -> Step (List.flatten bindings @ el, konts)
| (mods, key, keyname)::gs, Step (el, konts) ->
if List.exists (fun { key = k; mods = m } -> key = k && mods = m) konts
then
let konts =
List.map
(fun ({ key = k; contents } as x) ->
if key <> k then x else { x with contents = aux contents gs })
konts in
Step(el,konts)
else
let kont =
{ mods; key; keyname; doc = name; contents = aux empty gs } in
Step(el, kont::konts) in
aux kps enter_syms
let run_action gui group name =
((actiong gui group)#get_action name)#activate ()
let run key gui action status =
match action with
| Callback f -> f gui; status
| Action(group, name) -> run_action gui group name; status
| Edit f ->
let b = (ct gui).script#source_buffer in
let i = b#get_iter_at_mark `INSERT in
let status = f status b i (run_action gui) in
if not status.sel then
b#place_cursor ~where:(b#get_iter_at_mark `SEL_BOUND);
status
| Motion f ->
let b, script = (ct gui).script#source_buffer, (ct gui).script in
let sel_mode = status.sel || List.mem `SHIFT (GdkEvent.Key.state key) in
let i =
if sel_mode then b#get_iter_at_mark `SEL_BOUND
else b#get_iter_at_mark `INSERT in
let where, status = f status i in
let sel_mode = status.sel || List.mem `SHIFT (GdkEvent.Key.state key) in
if sel_mode then (b#move_mark `SEL_BOUND ~where; script#scroll_mark_onscreen `SEL_BOUND)
else (b#place_cursor ~where; script#scroll_mark_onscreen `INSERT);
status
let emacs = empty
let emacs = insert emacs "Emacs" [] [
(* motion *)
mkE _e "e" "Move to end of line" (Motion(fun s i ->
(if not i#ends_line then i#forward_to_line_end else i),
{ s with move = None }));
mkE ~mods:mM _Right "->" "Move to end of buffer" (Motion(fun s i ->
i#forward_to_end,
{ s with move = None }));
mkE ~mods:mM _Left "<-" "Move to start of buffer" (Motion(fun s i ->
let buffer = new GText.buffer i#buffer in
buffer#start_iter,
{ s with move = None }));
mkE _a "a" "Move to beginning of line" (Motion(fun s i ->
(i#set_line_offset 0), { s with move = None }));
mkE ~mods:mM _e "e" "Move to end of sentence" (Motion(fun s i ->
i#forward_sentence_end, { s with move = None }));
mkE ~mods:mM _a "a" "Move to beginning of sentence" (Motion(fun s i ->
i#backward_sentence_start, { s with move = None }));
mkE _n "n" "Move to next line" (Motion(fun s i ->
let orig_off = Option.default i#line_offset s.move in
let i = i#forward_line in
let new_off = min (i#chars_in_line - 1) orig_off in
(if new_off > 0 then i#set_line_offset new_off else i),
{ s with move = Some orig_off }));
mkE _p "p" "Move to previous line" (Motion(fun s i ->
let orig_off = Option.default i#line_offset s.move in
let i = i#backward_line in
let new_off = min (i#chars_in_line - 1) orig_off in
(if new_off > 0 then i#set_line_offset new_off else i),
{ s with move = Some orig_off }));
mkE _f "f" "Forward char" ~alias:[[],_Right,"RIGHT"]
(Motion(fun s i -> i#forward_char, { s with move = None }));
mkE _b "b" "Backward char" ~alias:[[],_Left,"LEFT"]
(Motion(fun s i -> i#backward_char, { s with move = None }));
mkE ~mods:mM _f "f" "Forward word" ~alias:[mC,_Right,"RIGHT"]
(Motion(fun s i -> i#forward_word_end, { s with move = None }));
mkE ~mods:mM _b "b" "Backward word" ~alias:[mC,_Left,"LEFT"]
(Motion(fun s i -> i#backward_word_start, { s with move = None }));
mkE _space "SPC" "Set mark" ~alias:[mC,_at,"@"] (Motion(fun s i ->
if s.sel = false then i, { s with sel = true }
else i, { s with sel = false } ));
(* edits *)
mkE ~mods:mM _w "w" "Copy selected region" (Edit(fun s b i run ->
if sel_nonempty b then
let txt = get_sel_txt b in
run "Edit" "Copy";
{ s with kill = Some(txt,false); sel = false }
else s));
mkE _w "w" "Kill selected region" (Edit(fun s b i run ->
if sel_nonempty b then
let txt = get_sel_txt b in
run "Edit" "Cut";
{ s with kill = Some(txt,false); sel = false }
else s));
mkE _k "k" "Kill until the end of line" (Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
if i#ends_line then begin
b#delete ~start:i ~stop:i#forward_char; "\n"
end else begin
let k = b#get_text ~start:i ~stop:i#forward_to_line_end () in
b#delete ~start:i ~stop:i#forward_to_line_end; k
end in
{ s with kill = Some (already_killed ^ k,true) }));
mkE ~mods:mM _d "d" "Kill next word" (Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
let k = b#get_text ~start:i ~stop:i#forward_word_end () in
b#delete ~start:i ~stop:i#forward_word_end; k in
{ s with kill = Some (already_killed ^ k,true) }));
mkE ~mods:mM _k "k" "Kill until sentence end" (Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
let k = b#get_text ~start:i ~stop:i#forward_sentence_end () in
b#delete ~start:i ~stop:i#forward_sentence_end; k in
{ s with kill = Some (already_killed ^ k,true) }));
mkE ~mods:mM _BackSpace "DELBACK" "Kill word before cursor"
(Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
let k = b#get_text ~start:i ~stop:i#backward_word_start () in
b#delete ~start:i ~stop:i#backward_word_start; k in
{ s with kill = Some (already_killed ^ k,true) }));
mkE _d "d" "Delete next character" (Edit(fun s b i _ ->
b#delete ~start:i ~stop:i#forward_char; s));
mkE _y "y" "Yank killed text back " (Edit(fun s b i _ ->
let k, s = match s.kill with
| Some (k,_) -> k, { s with kill = Some (k,false) }
| _ -> "", s in
b#insert ~iter:i k;
s));
(* misc *)
mkE _underscore "_" "Undo" (Action("Edit", "Undo"));
mkE _g "g" "Esc" (Callback(fun gui -> (ct gui).finder#hide ()));
mkE _s "s" "Search" (Callback(fun gui ->
if (ct gui).finder#coerce#misc#visible
then run_action gui "Edit" "Find Next"
else run_action gui "Edit" "Find"));
mkE _s "r" "Search backward" (Callback(fun gui ->
if (ct gui).finder#coerce#misc#visible
then run_action gui "Edit" "Find Previous"
else run_action gui "Edit" "Find"));
]
let emacs = insert emacs "Emacs" [mC,_x,"x"] [
mkE _s "s" "Save" (Action("File", "Save"));
mkE _c "c" "Quit" (Action("File", "Quit"));
mkE _f "f" "Open" (Action("File", "Open"));
mkE ~mods:[] _u "u" "Undo" (Action("Edit", "Undo"));
]
let pg = insert emacs "Proof General" [mC,_c,"c"] [
mkE _Return "RET" "Go to" (Action("Navigation", "Go to"));
mkE _n "n" "Advance 1 sentence" (Action("Navigation", "Forward"));
mkE _u "u" "Retract 1 sentence" (Action("Navigation", "Backward"));
mkE _b "b" "Advance" (Action("Navigation", "End"));
mkE _r "r" "Reset" (Action("Navigation", "Reset"));
mkE _c "c" "Stop" (Action("Navigation", "Interrupt"));
]
let command gui c =
let command = (ct gui).command in
let script = (ct gui).script in
let term =
let i, j = script#source_buffer#selection_bounds in
if i#equal j then None
else Some (script#buffer#get_text ~start:i ~stop:j ()) in
command#show;
command#new_query ~command:c ?term ()
let pg = insert pg "Proof General" [mC,_c,"c"; mC,_a,"a"] [
mkE _p "p" "Print" (Callback (fun gui -> command gui "Print"));
mkE _c "c" "Check" (Callback (fun gui -> command gui "Check"));
mkE _b "b" "About" (Callback (fun gui -> command gui "About"));
mkE _o "o" "Search Pattern" (Callback (fun gui->command gui "SearchPattern"));
mkE _l "l" "Locate" (Callback (fun gui -> command gui "Locate"));
mkE _Return "RET" "match template" (Action("Templates","match"));
]
let empty = { sel = false; kill = None; move = None }
let find gui (Step(here,konts)) t =
(* hack: ^c does copy in clipboard *)
let sel_nonempty () = sel_nonempty (ct gui).script#source_buffer in
let k = GdkEvent.Key.keyval t in
if k = _x && mod_of t mC && sel_nonempty () then
ignore(run t gui (Action("Edit","Cut")) empty)
else
if k = _c && mod_of t mC && sel_nonempty () then
ignore(run t gui (Action("Edit","Copy")) empty);
let cmp { key; mods } = key = k && mod_of t mods in
try `Do (List.find cmp here) with Not_found ->
try `Cont (List.find cmp konts).contents with Not_found -> `NotFound
let init w nb ags =
let gui = { notebook = nb; action_groups = ags } in
let cur = ref pg in
let status = ref empty in
let reset () = eprintf "reset\n%!"; cur := pg in
ignore(w#event#connect#key_press ~callback:(fun t ->
let on_current_term f =
let term = try Some nb#current_term with Invalid_argument _ -> None in
match term with None -> false | Some t -> f t
in
on_current_term (fun x ->
if x.script#misc#get_property "has-focus" <> `BOOL true
then false
else begin
eprintf "got key %s\n%!" (pr_key t);
if microPG#get then begin
match find gui !cur t with
| `Do e ->
eprintf "run (%s) %s on %s\n%!" e.keyname e.doc (pr_status !status);
status := run t gui e.contents !status; reset (); true
| `Cont c ->
flash_info ("Waiting one of " ^ String.concat " " (frontier c));
cur := c; true
| `NotFound -> reset (); false
end else false
end)));
ignore(w#event#connect#button_press ~callback:(fun t -> reset (); false))
let get_documentation () =
"Chars, words, lines and sentences below pertain to standard unicode segmentation rules\n" ^
print_keypaths pg
| null | https://raw.githubusercontent.com/coq/coq/e0de9db708817cc08efb20d65b9819d8f2b0ea68/ide/coqide/microPG.ml | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
motion
edits
misc
hack: ^c does copy in clipboard | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
open Ideutils
open Session
open Preferences
open GdkKeysyms
open Printf
let eprintf x =
if CDebug.(get_flag misc) then Printf.eprintf x else Printf.ifprintf stderr x
type gui = {
notebook : session Wg_Notebook.typed_notebook;
action_groups : GAction.action_group list;
}
let actiong gui name = List.find (fun ag -> ag#name = name) gui.action_groups
let ct gui = gui.notebook#current_term
let get_sel b = b#selection_bounds
let sel_nonempty b = let i, j = get_sel b in not (i#equal j)
let get_sel_txt b = let i, j = get_sel b in i#get_text ~stop:j
type status = { move : int option; kill : (string * bool) option; sel: bool }
let pr_status { move; kill; sel } =
let move = Option.cata (fun i -> string_of_int i) "" move in
let kill = Option.cata (fun (s,b) -> sprintf "kill(%b) %S" b s) "" kill in
let sel = string_of_bool sel in
Printf.sprintf "{ move: %s; kill: %s; sel: %s }" move kill sel
let pr_key t =
let kv = GdkEvent.Key.keyval t in
let str = GdkEvent.Key.string t in
let str_of_mod = function
| `SHIFT -> "SHIFT" | `LOCK -> "LOCK" | `CONTROL -> "CONTROL"
| `MOD1 -> "MOD1" | `MOD2 -> "MOD2" | `MOD3 -> "MOD3" | `MOD4 -> "MOD4"
| `MOD5 -> "MOD5" | `BUTTON1 -> "BUTTON1" | `BUTTON2 -> "BUTTON2"
| `BUTTON3 -> "BUTTON3" | `BUTTON4 -> "BUTTON4" | `BUTTON5 -> "BUTTON5"
| `SUPER -> "SUPER" | `HYPER -> "HYPER" | `META -> "META"
| `RELEASE -> "RELEASE" in
let mods = String.concat " " (List.map str_of_mod (GdkEvent.Key.state t)) in
Printf.sprintf "'%s' (%d, %s)" str kv mods
type action =
| Action of string * string
| Callback of (gui -> unit)
| Edit of (status -> GSourceView3.source_buffer -> GText.iter ->
(string -> string -> unit) -> status)
| Motion of (status -> GText.iter -> GText.iter * status)
type 'c entry = {
mods : Gdk.Tags.modifier list;
key : Gdk.keysym;
keyname : string;
doc : string;
contents : 'c
}
let mC = [`CONTROL]
let mM =
if Coq_config.arch = "Darwin" then
We add both MOD2 and META because both are
returned when pressing Command on MacOS X
returned when pressing Command on MacOS X *)
[`CONTROL;`MOD2;`META]
else
[`MOD1]
let mod_of t x =
let y = GdkEvent.Key.state t in
List.for_all (fun m -> List.mem m y) x &&
List.for_all (fun m -> List.mem m x) y
let pr_keymod l =
if l = mC then
"Ctrl-"
else
if l = mM then
if Coq_config.arch = "Darwin" then "Ctrl-Cmd-" else "Meta-"
else
""
let mkE ?(mods=mC) key keyname doc ?(alias=[]) contents =
List.map (fun (mods, key, keyname) -> { mods; key; keyname; doc; contents })
((mods, key, keyname)::alias)
type keypaths = Step of action entry list * keypaths entry list
let print_keypaths kps =
let rec aux prefix (Step (l, konts)) =
String.concat "\n" (
(List.map (fun x ->
prefix ^ pr_keymod x.mods ^ x.keyname ^ " " ^ x.doc ) l) @
(List.map (fun x ->
aux (prefix^pr_keymod x.mods^x.keyname^" ") x.contents) konts)) in
aux " " kps
let empty = Step([],[])
let frontier (Step(l1,l2)) =
List.map (fun x -> pr_keymod x.mods ^ x.keyname) l1 @
List.map (fun x -> pr_keymod x.mods ^ x.keyname) l2
let insert kps name enter_syms bindings =
let rec aux kps enter_syms =
match enter_syms, kps with
| [], Step (el, konts) -> Step (List.flatten bindings @ el, konts)
| (mods, key, keyname)::gs, Step (el, konts) ->
if List.exists (fun { key = k; mods = m } -> key = k && mods = m) konts
then
let konts =
List.map
(fun ({ key = k; contents } as x) ->
if key <> k then x else { x with contents = aux contents gs })
konts in
Step(el,konts)
else
let kont =
{ mods; key; keyname; doc = name; contents = aux empty gs } in
Step(el, kont::konts) in
aux kps enter_syms
let run_action gui group name =
((actiong gui group)#get_action name)#activate ()
let run key gui action status =
match action with
| Callback f -> f gui; status
| Action(group, name) -> run_action gui group name; status
| Edit f ->
let b = (ct gui).script#source_buffer in
let i = b#get_iter_at_mark `INSERT in
let status = f status b i (run_action gui) in
if not status.sel then
b#place_cursor ~where:(b#get_iter_at_mark `SEL_BOUND);
status
| Motion f ->
let b, script = (ct gui).script#source_buffer, (ct gui).script in
let sel_mode = status.sel || List.mem `SHIFT (GdkEvent.Key.state key) in
let i =
if sel_mode then b#get_iter_at_mark `SEL_BOUND
else b#get_iter_at_mark `INSERT in
let where, status = f status i in
let sel_mode = status.sel || List.mem `SHIFT (GdkEvent.Key.state key) in
if sel_mode then (b#move_mark `SEL_BOUND ~where; script#scroll_mark_onscreen `SEL_BOUND)
else (b#place_cursor ~where; script#scroll_mark_onscreen `INSERT);
status
let emacs = empty
let emacs = insert emacs "Emacs" [] [
mkE _e "e" "Move to end of line" (Motion(fun s i ->
(if not i#ends_line then i#forward_to_line_end else i),
{ s with move = None }));
mkE ~mods:mM _Right "->" "Move to end of buffer" (Motion(fun s i ->
i#forward_to_end,
{ s with move = None }));
mkE ~mods:mM _Left "<-" "Move to start of buffer" (Motion(fun s i ->
let buffer = new GText.buffer i#buffer in
buffer#start_iter,
{ s with move = None }));
mkE _a "a" "Move to beginning of line" (Motion(fun s i ->
(i#set_line_offset 0), { s with move = None }));
mkE ~mods:mM _e "e" "Move to end of sentence" (Motion(fun s i ->
i#forward_sentence_end, { s with move = None }));
mkE ~mods:mM _a "a" "Move to beginning of sentence" (Motion(fun s i ->
i#backward_sentence_start, { s with move = None }));
mkE _n "n" "Move to next line" (Motion(fun s i ->
let orig_off = Option.default i#line_offset s.move in
let i = i#forward_line in
let new_off = min (i#chars_in_line - 1) orig_off in
(if new_off > 0 then i#set_line_offset new_off else i),
{ s with move = Some orig_off }));
mkE _p "p" "Move to previous line" (Motion(fun s i ->
let orig_off = Option.default i#line_offset s.move in
let i = i#backward_line in
let new_off = min (i#chars_in_line - 1) orig_off in
(if new_off > 0 then i#set_line_offset new_off else i),
{ s with move = Some orig_off }));
mkE _f "f" "Forward char" ~alias:[[],_Right,"RIGHT"]
(Motion(fun s i -> i#forward_char, { s with move = None }));
mkE _b "b" "Backward char" ~alias:[[],_Left,"LEFT"]
(Motion(fun s i -> i#backward_char, { s with move = None }));
mkE ~mods:mM _f "f" "Forward word" ~alias:[mC,_Right,"RIGHT"]
(Motion(fun s i -> i#forward_word_end, { s with move = None }));
mkE ~mods:mM _b "b" "Backward word" ~alias:[mC,_Left,"LEFT"]
(Motion(fun s i -> i#backward_word_start, { s with move = None }));
mkE _space "SPC" "Set mark" ~alias:[mC,_at,"@"] (Motion(fun s i ->
if s.sel = false then i, { s with sel = true }
else i, { s with sel = false } ));
mkE ~mods:mM _w "w" "Copy selected region" (Edit(fun s b i run ->
if sel_nonempty b then
let txt = get_sel_txt b in
run "Edit" "Copy";
{ s with kill = Some(txt,false); sel = false }
else s));
mkE _w "w" "Kill selected region" (Edit(fun s b i run ->
if sel_nonempty b then
let txt = get_sel_txt b in
run "Edit" "Cut";
{ s with kill = Some(txt,false); sel = false }
else s));
mkE _k "k" "Kill until the end of line" (Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
if i#ends_line then begin
b#delete ~start:i ~stop:i#forward_char; "\n"
end else begin
let k = b#get_text ~start:i ~stop:i#forward_to_line_end () in
b#delete ~start:i ~stop:i#forward_to_line_end; k
end in
{ s with kill = Some (already_killed ^ k,true) }));
mkE ~mods:mM _d "d" "Kill next word" (Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
let k = b#get_text ~start:i ~stop:i#forward_word_end () in
b#delete ~start:i ~stop:i#forward_word_end; k in
{ s with kill = Some (already_killed ^ k,true) }));
mkE ~mods:mM _k "k" "Kill until sentence end" (Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
let k = b#get_text ~start:i ~stop:i#forward_sentence_end () in
b#delete ~start:i ~stop:i#forward_sentence_end; k in
{ s with kill = Some (already_killed ^ k,true) }));
mkE ~mods:mM _BackSpace "DELBACK" "Kill word before cursor"
(Edit(fun s b i _ ->
let already_killed = match s.kill with Some (k,true) -> k | _ -> "" in
let k =
let k = b#get_text ~start:i ~stop:i#backward_word_start () in
b#delete ~start:i ~stop:i#backward_word_start; k in
{ s with kill = Some (already_killed ^ k,true) }));
mkE _d "d" "Delete next character" (Edit(fun s b i _ ->
b#delete ~start:i ~stop:i#forward_char; s));
mkE _y "y" "Yank killed text back " (Edit(fun s b i _ ->
let k, s = match s.kill with
| Some (k,_) -> k, { s with kill = Some (k,false) }
| _ -> "", s in
b#insert ~iter:i k;
s));
mkE _underscore "_" "Undo" (Action("Edit", "Undo"));
mkE _g "g" "Esc" (Callback(fun gui -> (ct gui).finder#hide ()));
mkE _s "s" "Search" (Callback(fun gui ->
if (ct gui).finder#coerce#misc#visible
then run_action gui "Edit" "Find Next"
else run_action gui "Edit" "Find"));
mkE _s "r" "Search backward" (Callback(fun gui ->
if (ct gui).finder#coerce#misc#visible
then run_action gui "Edit" "Find Previous"
else run_action gui "Edit" "Find"));
]
let emacs = insert emacs "Emacs" [mC,_x,"x"] [
mkE _s "s" "Save" (Action("File", "Save"));
mkE _c "c" "Quit" (Action("File", "Quit"));
mkE _f "f" "Open" (Action("File", "Open"));
mkE ~mods:[] _u "u" "Undo" (Action("Edit", "Undo"));
]
let pg = insert emacs "Proof General" [mC,_c,"c"] [
mkE _Return "RET" "Go to" (Action("Navigation", "Go to"));
mkE _n "n" "Advance 1 sentence" (Action("Navigation", "Forward"));
mkE _u "u" "Retract 1 sentence" (Action("Navigation", "Backward"));
mkE _b "b" "Advance" (Action("Navigation", "End"));
mkE _r "r" "Reset" (Action("Navigation", "Reset"));
mkE _c "c" "Stop" (Action("Navigation", "Interrupt"));
]
let command gui c =
let command = (ct gui).command in
let script = (ct gui).script in
let term =
let i, j = script#source_buffer#selection_bounds in
if i#equal j then None
else Some (script#buffer#get_text ~start:i ~stop:j ()) in
command#show;
command#new_query ~command:c ?term ()
let pg = insert pg "Proof General" [mC,_c,"c"; mC,_a,"a"] [
mkE _p "p" "Print" (Callback (fun gui -> command gui "Print"));
mkE _c "c" "Check" (Callback (fun gui -> command gui "Check"));
mkE _b "b" "About" (Callback (fun gui -> command gui "About"));
mkE _o "o" "Search Pattern" (Callback (fun gui->command gui "SearchPattern"));
mkE _l "l" "Locate" (Callback (fun gui -> command gui "Locate"));
mkE _Return "RET" "match template" (Action("Templates","match"));
]
let empty = { sel = false; kill = None; move = None }
let find gui (Step(here,konts)) t =
let sel_nonempty () = sel_nonempty (ct gui).script#source_buffer in
let k = GdkEvent.Key.keyval t in
if k = _x && mod_of t mC && sel_nonempty () then
ignore(run t gui (Action("Edit","Cut")) empty)
else
if k = _c && mod_of t mC && sel_nonempty () then
ignore(run t gui (Action("Edit","Copy")) empty);
let cmp { key; mods } = key = k && mod_of t mods in
try `Do (List.find cmp here) with Not_found ->
try `Cont (List.find cmp konts).contents with Not_found -> `NotFound
let init w nb ags =
let gui = { notebook = nb; action_groups = ags } in
let cur = ref pg in
let status = ref empty in
let reset () = eprintf "reset\n%!"; cur := pg in
ignore(w#event#connect#key_press ~callback:(fun t ->
let on_current_term f =
let term = try Some nb#current_term with Invalid_argument _ -> None in
match term with None -> false | Some t -> f t
in
on_current_term (fun x ->
if x.script#misc#get_property "has-focus" <> `BOOL true
then false
else begin
eprintf "got key %s\n%!" (pr_key t);
if microPG#get then begin
match find gui !cur t with
| `Do e ->
eprintf "run (%s) %s on %s\n%!" e.keyname e.doc (pr_status !status);
status := run t gui e.contents !status; reset (); true
| `Cont c ->
flash_info ("Waiting one of " ^ String.concat " " (frontier c));
cur := c; true
| `NotFound -> reset (); false
end else false
end)));
ignore(w#event#connect#button_press ~callback:(fun t -> reset (); false))
let get_documentation () =
"Chars, words, lines and sentences below pertain to standard unicode segmentation rules\n" ^
print_keypaths pg
|
55e2f6bd89ccfa6af1eb2ee4dbeec658e4ae1b265a69193925b00e7d303c5eeb | joshaber/clojurem | compiler.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
(set! *warn-on-reflection* true)
(ns cljm.compiler
(:refer-clojure :exclude [munge macroexpand-1])
(:require [clojure.java.io :as io]
[clojure.string :as string]
[cljm.tagged-literals :as tags]
[cljm.analyzer :as ana]
[clojure.pprint :as pp])
(:import java.lang.StringBuilder))
(declare munge)
(declare init-func-name)
(def include-core true)
(def ^:dynamic *externs* nil)
(def ^:dynamic *static-exprs* nil)
(defmacro ^:private debug-prn
[& args]
`(.println System/err (str ~@args)))
(def js-reserved
#{"abstract" "boolean" "break" "byte" "case"
"catch" "char" "class" "const" "continue"
"debugger" "default" "delete" "do" "double"
"else" "enum" "export" "extends" "final"
"finally" "float" "for" "function" "goto" "if"
"implements" "import" "in" "instanceof" "int"
"interface" "let" "long" "native" "new"
"package" "private" "protected" "public"
"return" "short" "static" "super" "switch"
"synchronized" "this" "throw" "throws"
"transient" "try" "typeof" "var" "void"
"volatile" "while" "with" "yield" "methods"})
(def ^:dynamic *position* nil)
(def cljm-reserved-file-names #{"deps.cljm"})
(defn munge
([s] (munge s js-reserved))
([s reserved]
Division is special
ss (string/replace (str s) #"\." "_DOT_")
ss (apply str (map #(if (reserved %) (str % "$") %)
(string/split ss #"(?<=\.)|(?=\.)")))
ms (clojure.lang.Compiler/munge ss)]
(if (symbol? s)
(symbol ms)
ms))))
(defn- comma-sep [xs]
(interpose ", " xs))
(defn- escape-char [^Character c]
(let [cp (.hashCode c)]
(case cp
; Handle printable escapes before ASCII
34 "\\\""
92 "\\\\"
; Handle non-printable escapes
8 "\\b"
12 "\\f"
10 "\\n"
13 "\\r"
9 "\\t"
(if (< 31 cp 127)
c ; Print simple ASCII characters
Any other character is Unicode
(defn- escape-string [^CharSequence s]
(let [sb (StringBuilder. (count s))]
(doseq [c s]
(.append sb (escape-char c)))
(.toString sb)))
(defn- wrap-in-double-quotes [x]
(str \" x \"))
(defmulti emit :op)
(defn emits [& xs]
(doseq [x xs]
(cond
(nil? x) nil
(map? x) (emit x)
(seq? x) (apply emits x)
(fn? x) (x)
:else (do
(let [s (print-str x)]
(when *position*
(swap! *position* (fn [[line column]]
[line (+ column (count s))])))
(print s)))))
nil)
(defn ^String emit-str [expr]
(with-out-str (emit expr)))
(defn emitln [& xs]
(apply emits xs)
;; Prints column-aligned line number comments; good test of *position*.
;(when *position*
; (let [[line column] @*position*]
( print ( apply str ( concat ( repeat ( - 120 column ) ) [ " // " ( inc line ) ] ) ) ) ) )
(println)
(when *position*
(swap! *position* (fn [[line column]]
[(inc line) 0])))
nil)
(defn sel-parts
"Splits a selector into its constituent parts, keeping any colons. Returns a sequence of strings."
[sel]
(map second (re-seq #"(:|[a-zA-Z0-9_]+\:?)" sel)))
(defn- emit-comma-sep
[xs]
(doseq [x xs]
(emits ", ")
(emits x))
(emits ", nil"))
(defn emit-method-parts
"Given remaining selector parts and arguments, returns a string representing the rest of an Objective-C message send. selparts and args should both be sequences of strings."
[selparts args]
(emits
(cond (empty? selparts) (emit-comma-sep args)
(empty? args) (emits " " (first selparts))
:else (emits " " (first selparts) (first args)))
; If we had both a selector part and an argument this time,
(if (and
(and (seq selparts) (seq args))
... and we have at least one more of either
(or (next selparts) (next args)))
; ... recur
(emit-method-parts (next selparts) (next args)))))
(defmulti emit-constant class)
(defmethod emit-constant nil [x] (emits "nil"))
(defmethod emit-constant Long [x] (emits "@" x))
(defmethod emit-constant Integer [x] (emits "@" x)) ; reader puts Integers in metadata
(defmethod emit-constant Double [x] (emits "@" x))
(defmethod emit-constant String [x]
(emits "@" (wrap-in-double-quotes (escape-string x))))
(defmethod emit-constant Boolean [x] (emits (if x "@(YES)" "@(NO)")))
(defmethod emit-constant Character [x]
(emits "@" (wrap-in-double-quotes (escape-char x))))
(defmethod emit-constant java.util.regex.Pattern [x]
(let [[_ flags pattern] (re-find #"^(?:\(\?([idmsux]*)\))?(.*)" (str x))]
(emits \/ (.replaceAll (re-matcher #"/" pattern) "\\\\/") \/ flags)))
(defmethod emit-constant clojure.lang.Keyword [x]
(emits "cljm_keyword(@\":")
(emits (if (namespace x)
(str (namespace x) "/") "")
(name x))
(emits "\")"))
(defmethod emit-constant clojure.lang.Symbol [x]
(emits "cljm_symbol(@\"'")
(emits (if (namespace x)
(str (namespace x) "/") "")
(name x))
(emits "\")"))
(defn- emit-meta-constant [x & body]
(if (meta x)
(do
(emits "cljm.core.with_meta(" body ",")
(emit-constant (meta x))
(emits ")"))
(emits body)))
(defmethod emit-constant clojure.lang.PersistentList$EmptyList [x]
(emit-meta-constant x "@[]"))
(defmethod emit-constant clojure.lang.PersistentList [x]
(emit-meta-constant x
(concat ["cljm.core.list("]
(comma-sep (map #(fn [] (emit-constant %)) x))
[")"])))
(defmethod emit-constant clojure.lang.Cons [x]
(emit-meta-constant x
(concat ["cljm.core.list("]
(comma-sep (map #(fn [] (emit-constant %)) x))
[")"])))
(defmethod emit-constant clojure.lang.IPersistentVector [x]
(emit-meta-constant x
(concat ["cljm.core.vec(["]
(comma-sep (map #(fn [] (emit-constant %)) x))
["])"])))
(defmethod emit-constant clojure.lang.IPersistentMap [x]
(emit-meta-constant x
(concat ["cljm.core.hash_map("]
(comma-sep (map #(fn [] (emit-constant %))
(apply concat x)))
[")"])))
(defmethod emit-constant clojure.lang.PersistentHashSet [x]
(emit-meta-constant x
(concat ["cljm.core.set(["]
(comma-sep (map #(fn [] (emit-constant %)) x))
["])"])))
(defn emit-block
[context statements ret]
(when statements
(emits statements))
(emit ret))
(defmacro emit-wrap [env & body]
`(let [env# ~env]
(when (= :return (:context env#)) (emits "return "))
~@body
(when-not (= :expr (:context env#)) (emitln ";"))))
(defmethod emit :no-op [m])
(defmethod emit :var
[{:keys [info env] :as arg}]
(let [n (:name info)
n (if (= (namespace n) "js")
(name n)
n)
dynamic (:dynamic info)
local (:local info)
field (:field info)
ns (:ns info)
type? (:type info)
is-protocol? (:is-protocol info)]
(emit-wrap env
(if-not local
(if is-protocol?
(emits "@protocol(" (munge n) ")")
(do
(if-not dynamic
(emits (munge n))
(emits "cljm_var_lookup(@\"" n "\")"))
(if-not (or (= ns 'ObjectiveCClass) type?)
(emits ".value"))
(when (or (= ns 'ObjectiveCClass) type?)
(emits ".class"))))
(if field
(emits "[self " (munge n) "]")
(emits (munge n)))))))
(defmethod emit :meta
[{:keys [expr meta env]}]
(emit-wrap env
(emits "cljm.core.with_meta(" expr "," meta ")")))
(defmethod emit :map
[{:keys [env keys vals]}]
(emit-wrap env
(if (zero? (count keys))
(emits "@{}")
(emits "@{ "
(comma-sep (map (fn [k v]
(with-out-str (emit k) (print ": ") (emit v)))
keys vals))
" }"))))
(defmethod emit :vector
[{:keys [items env]}]
(emit-wrap env
(if (empty? items)
(emits "@[]")
(emits "@[ "
(comma-sep items) " ]"))))
(defmethod emit :set
[{:keys [items env]}]
(emit-wrap env
(if (empty? items)
(emits "[NSSet set]")
(emits "[NSSet setWithObjects:"
(comma-sep items) ", nil]"))))
(defmethod emit :constant
[{:keys [form env]}]
(when-not (= :statement (:context env))
(emit-wrap env (emit-constant form))))
(defn get-tag [e]
(or (-> e :tag)
(-> e :info :tag)))
(defn infer-tag [e]
(if-let [tag (get-tag e)]
tag
(case (:op e)
:let (infer-tag (:ret e))
:if (let [then-tag (infer-tag (:then e))
else-tag (infer-tag (:else e))]
(when (= then-tag else-tag)
then-tag))
:constant (case (:form e)
true 'boolean
false 'boolean
nil)
nil)))
(defn safe-test? [e]
(let [tag (infer-tag e)]
(or (#{'boolean 'seq} tag)
(when (= (:op e) :constant)
(let [form (:form e)]
(not (or (and (string? form) (= form ""))
(and (number? form) (zero? form)))))))))
(defmethod emit :if
[{:keys [test then else env]}]
(let [context (:context env)]
(if (= :expr context)
(emits "cljm_truthy(" test ") ? " then " : " else)
(do
(emitln "if(cljm_truthy(" test ")) {")
(emitln then)
(emitln "} else {")
(emitln else)
(emitln "}")))))
(defmethod emit :throw
[{:keys [throw env]}]
(if (= :expr (:context env))
(emits "(function(){throw " throw "})()")
(emitln "throw " throw ";")))
(defn emit-comment
"Emit a nicely formatted comment string."
[doc jsdoc]
(let [docs (when doc [doc])
docs (if jsdoc (concat docs jsdoc) docs)
docs (remove nil? docs)]
(letfn [(print-comment-lines [e] (doseq [next-line (string/split-lines e)]
(emitln "* " (string/trim next-line))))]
(when (seq docs)
(emitln "/**")
(doseq [e docs]
(when e
(print-comment-lines e)))
(emitln "*/")))))
(defn add-extern!
[ast]
(swap! *externs* conj ast))
(defn add-static-expr!
[ast]
(swap! *static-exprs* conj ast))
(defmethod emit :def
[{:keys [name init env doc dynamic protocol] :as ast}]
;; TODO: don't extern private fn's
(if-not protocol
(do (add-extern! ast)
(when init
(emit-comment doc (:jsdoc init))
(if-not dynamic
(let [mname (munge name)]
(emits mname " = [[CLJMVar alloc] initWithValue:" init "]"))
(emits "cljm_var_def(@\"" name "\", " init ")"))
(when-not (= :expr (:context env)) (emitln ";")))
(emitln))))
(defn emit-apply-to
[{:keys [name params env]}]
(let [arglist (gensym "arglist__")
delegate-name (str (munge name) "__delegate")
params (map munge params)]
(emitln "(function (" arglist "){")
(doseq [[i param] (map-indexed vector (butlast params))]
(emits "var " param " = cljm.core.first(")
(dotimes [_ i] (emits "cljm.core.next("))
(emits arglist ")")
(dotimes [_ i] (emits ")"))
(emitln ";"))
(if (< 1 (count params))
(do
(emits "var " (last params) " = cljm.core.rest(")
(dotimes [_ (- (count params) 2)] (emits "cljm.core.next("))
(emits arglist)
(dotimes [_ (- (count params) 2)] (emits ")"))
(emitln ");")
(emitln "return " delegate-name "(" (string/join ", " params) ");"))
(do
(emits "var " (last params) " = ")
(emits "cljm.core.seq(" arglist ");")
(emitln ";")
(emitln "return " delegate-name "(" (string/join ", " params) ");")))
(emits "})")))
(defn emit-start-fn-var
[args imp-fn]
(emits "[[CLJMFunction alloc] initWithBlock:^ id (")
(emits (comma-sep (map #(str "id " (munge %)) args)))
(when-not imp-fn
(when (> (count args) 0)
(emits ", "))
(emits "id cljm_vararg, ..."))
(emitln ") {"))
(defn emit-end-fn-var
[]
(emitln "}]"))
(defn emit-fn-method
[{:keys [gthis name variadic params statements ret env recurs max-fixed-arity]} imp-fn]
(emit-wrap env
(emit-start-fn-var params imp-fn)
(when imp-fn
(let [n (munge (first params))]
(when (not= n 'self) (emitln "id self = " n ";"))))
(when recurs (emitln "while(YES) {"))
(emit-block :return statements ret)
(when recurs
(emitln "break;")
(emitln "}"))
(emit-end-fn-var)))
(defn emit-variadic-fn-method
[{:keys [gthis name variadic params statements ret env recurs max-fixed-arity] :as f} imp-fn]
(emit-wrap env
(emit-start-fn-var (drop-last params) imp-fn)
(when imp-fn
(let [n (munge (first params))]
(when (not= n 'self) (emitln "id self = " n ";"))))
(let [lastn (munge (last params))]
(emitln "NSMutableArray *" lastn " = [NSMutableArray array];")
(emitln "va_list cljm_args;")
(emitln "va_start(cljm_args, cljm_vararg);")
(emitln "for (id cljm_currentArg = cljm_vararg; cljm_currentArg != nil; cljm_currentArg = va_arg(cljm_args, id)) {")
(emitln "\t[" lastn " addObject:cljm_currentArg];")
(emitln "}")
(emitln "va_end(cljm_args);"))
(when recurs (emitln "while(YES) {"))
(emit-block :return statements ret)
(when recurs
(emitln "break;")
(emitln "}"))
(emit-end-fn-var)))
(defmethod emit :fn
[{:keys [name env methods max-fixed-arity variadic recur-frames loop-lets imp-fn]}]
;;fn statements get erased, serve no purpose and can pollute scope if named
(when-not (= :statement (:context env))
(let [loop-locals (->> (concat (mapcat :names (filter #(and % @(:flag %)) recur-frames))
(mapcat :names loop-lets))
(map munge)
seq)]
(when loop-locals
(when (= :return (:context env))
(emits "return "))
(emitln "((function (" (comma-sep loop-locals) "){")
(when-not (= :return (:context env))
(emits "return ")))
(if (= 1 (count methods))
(if variadic
(emit-variadic-fn-method (assoc (first methods) :name name) imp-fn)
(emit-fn-method (assoc (first methods) :name name) imp-fn))
(let [has-name? (and name true)
name (or name (gensym))
mname (munge name)
maxparams (map munge (apply max-key count (map :params methods)))
mmap (into {}
(map (fn [method]
[(munge (symbol (str mname "__" (count (:params method)))))
method])
methods))
ms (sort-by #(-> % second :params count) (seq mmap))]
(when (= :return (:context env))
(emits "return "))
(emitln "[[CLJMFunction alloc] initWithBlock:^ id (id cljm_vararg, ...) {")
(emitln "__block CLJMVar *" mname ";")
(doseq [[n meth] ms]
(emits "CLJMFunction *" n " = ")
(if (:variadic meth)
(emit-variadic-fn-method meth imp-fn)
(emit-fn-method meth imp-fn))
(emitln ";")
(emitln))
(emitln mname " = [[CLJMVar alloc] initWithValue:[[CLJMFunction alloc] initWithBlock:^ id (NSArray *cljm_args) {")
(emitln "switch(cljm_args.count) {")
(doseq [[n meth] ms]
(if (:variadic meth)
(do (emitln "default:")
(emitln "return ((id (^)(id, ...))[" n " block])(cljm_args[0], nil);")
(emitln "break;"))
(let [pcnt (count (:params meth))]
(emitln "case " pcnt ":")
(emits "return ((id (^)(id, ...))[" n " block])(")
(dotimes [n pcnt]
(emits "cljm_args[" n "], "))
(emits "nil);")
( emitln " return ( ( i d ( ^)(id , ... ) ) [ " n " ] , nil ) ; " )
(emitln "break;"))))
(emitln "}")
(emitln "return nil;")
(emitln "}]];")
(emitln "NSMutableArray *cljm_collectedArgs = [NSMutableArray array];")
(emitln "va_list cljm_args;")
(emitln "va_start(cljm_args, cljm_vararg);")
(emitln "for (id cljm_currentArg = cljm_vararg; cljm_currentArg != nil; cljm_currentArg = va_arg(cljm_args, id)) {")
(emitln "\t[cljm_collectedArgs addObject:cljm_currentArg];")
(emitln "}")
(emitln "va_end(cljm_args);")
(emitln "return ((id (^)(NSArray *))[(CLJMFunction *)[" mname " value] block])(cljm_collectedArgs);")
(emitln "}]")))
(when loop-locals
(emitln ";})(" (comma-sep loop-locals) "))")))))
(defmethod emit :do
[{:keys [statements ret env]}]
(let [context (:context env)]
(when (and statements (= :expr context)) (emits "{"))
;(when statements (emitln "{"))
(emit-block context statements ret)
;(when statements (emits "}"))
(when (and statements (= :expr context)) (emits "}"))))
(defmethod emit :try*
[{:keys [env try catch name finally]}]
(let [context (:context env)
subcontext (if (= :expr context) :return context)]
(if (or name finally)
(do
(when (= :expr context) (emits "(function (){"))
(emits "try{")
(let [{:keys [statements ret]} try]
(emit-block subcontext statements ret))
(emits "}")
(when name
(emits "catch (" (munge name) "){")
(when catch
(let [{:keys [statements ret]} catch]
(emit-block subcontext statements ret)))
(emits "}"))
(when finally
(let [{:keys [statements ret]} finally]
(assert (not= :constant (:op ret)) "finally block cannot contain constant")
(emits "finally {")
(emit-block subcontext statements ret)
(emits "}")))
(when (= :expr context) (emits "})()")))
(let [{:keys [statements ret]} try]
(when (and statements (= :expr context)) (emits "(function (){"))
(emit-block subcontext statements ret)
(when (and statements (= :expr context)) (emits "})()"))))))
(defmethod emit :let
[{:keys [bindings statements ret env loop]}]
(let [context (:context env)]
(when (= :expr context) (emits "^ id {"))
(doseq [{:keys [name init]} bindings]
(emitln "id " (munge name) " = " init ";"))
(when loop (emitln "while(YES) {"))
(emit-block (if (= :expr context) :return context) statements ret)
(when loop
(emitln "break;")
(emitln "}"))
;(emits "}")
(when (= :expr context) (emits "}()"))))
(defmethod emit :recur
[{:keys [frame exprs env]}]
(let [temps (vec (take (count exprs) (repeatedly gensym)))
names (:names frame)]
(emitln "{")
(dotimes [i (count exprs)]
(emitln "id " (temps i) " = " (exprs i) ";"))
(dotimes [i (count exprs)]
(emitln (munge (names i)) " = " (temps i) ";"))
(emitln "continue;")
(emitln "}")))
(defmethod emit :letfn
[{:keys [bindings statements ret env]}]
(let [context (:context env)]
(when (= :expr context) (emits "(function (){"))
(doseq [{:keys [name init]} bindings]
(emitln "id " (munge name) " = " init ";"))
(emit-block (if (= :expr context) :return context) statements ret)
(when (= :expr context) (emits "})()"))))
(defn protocol-prefix [psym]
(str (-> (str psym) (.replace \. \$) (.replace \/ \$)) "$"))
(defn protocol-munge
[p x]
(str (munge p) "_" (munge x)))
(defmethod emit :invoke
[{:keys [f args env] :as expr}]
(let [info (:info f)
variadic? (:variadic info)
dynamic? (:dynamic info)
fn-name (:name info)
mname (munge fn-name)
keyword? (and (= (-> f :op) :constant)
(keyword? (-> f :form)))
kwname (-> f :form)
protocol (:protocol info)
local? (:local info)
ns (:ns info)
c-call? (= ns 'c)]
(emit-wrap env
(cond
protocol (let [pmname (protocol-munge protocol (apply str (drop 1 (last (string/split (str fn-name) #"/")))))]
(emits "[(id<" (munge protocol) ">) " (first args) " ")
(emits pmname)
(doseq [arg (rest args)]
(emits ":" arg " "))
(emits "]"))
keyword? (emits "[" (first args) " objectForKey:cljm_keyword(@\"" kwname "\")]")
c-call? (emits (name fn-name) "(" (comma-sep args) ")")
:else (do (emits "((id (^)(")
(emits (comma-sep (map (fn [x] (str "id")) (concat args (list "cljm_args")))))
(emits ", ...))")
(if-not local?
(emits "["))
(emits "(CLJMFunction *)[")
(if dynamic?
(emits "cljm_var_lookup(@\"" fn-name "\")")
(emits mname))
(if-not local?
(emits " value]"))
(emits " block])(")
(emits (comma-sep (conj args "nil")) ")"))))))
(comment (defmethod emit :invoke
[{:keys [f args env] :as expr}]
(let [info (:info f)
fn? (and ana/*cljm-static-fns*
(not (:dynamic info))
(:fn-var info))
protocol (:protocol info)
proto? (let [tag (infer-tag (first (:args expr)))]
(and protocol tag
(or ana/*cljm-static-fns*
(:protocol-inline env))
(or (= protocol tag)
(when-let [ps (:protocols (ana/resolve-existing-var (dissoc env :locals) tag))]
(ps protocol)))))
opt-not? (and (= (:name info) 'cljm.core/not)
(= (infer-tag (first (:args expr))) 'boolean))
ns (:ns info)
js? (= ns 'js)
goog? (when ns
(or (= ns 'goog)
(when-let [ns-str (str ns)]
(= (get (string/split ns-str #"\.") 0 nil) "goog"))))
keyword? (and (= (-> f :op) :constant)
(keyword? (-> f :form)))
[f variadic-invoke]
(if fn?
(let [arity (count args)
variadic? (:variadic info)
mps (:method-params info)
mfa (:max-fixed-arity info)]
(cond
if only one method , no renaming needed
(and (not variadic?)
(= (count mps) 1))
[f nil]
;; direct dispatch to variadic case
(and variadic? (> arity mfa))
[(update-in f [:info :name]
(fn [name] (symbol (str (munge name) ".cljm$lang$arity$variadic"))))
{:max-fixed-arity mfa}]
;; direct dispatch to specific arity case
:else
(let [arities (map count mps)]
(if (some #{arity} arities)
[(update-in f [:info :name]
(fn [name] (symbol (str (munge name) ".cljm$lang$arity$" arity)))) nil]
[f nil]))))
[f nil])]
(emit-wrap env
(cond
opt-not?
(emits "!(" (first args) ")")
proto?
(let [pimpl (str (protocol-prefix protocol)
(munge (name (:name info))) "$arity$" (count args))]
(emits (first args) "." pimpl "(" (comma-sep args) ")"))
keyword?
(emits "(new cljm.core.Keyword(" f ")).call(" (comma-sep (cons "null" args)) ")")
variadic-invoke
(let [mfa (:max-fixed-arity variadic-invoke)]
(emits f "(" (comma-sep (take mfa args))
(when-not (zero? mfa) ",")
"cljm.core.array_seq([" (comma-sep (drop mfa args)) "], 0))"))
(or fn? js? goog?)
(emits f "(" (comma-sep args) ")")
:else
(if (and ana/*cljm-static-fns* (= (:op f) :var))
(let [fprop (str ".cljm$lang$arity$" (count args))]
(emits "(" f fprop " ? " f fprop "(" (comma-sep args) ") : " f ".call(" (comma-sep (cons "null" args)) "))"))
(if variadic?
(emits f "(" (comma-sep args) ", nil)")
(emits f "(" (comma-sep args) ")"))))))))
(defmethod emit :new
[{:keys [ctor args env]}]
(emit-wrap env
(let [method (first args)
init-args (rest args)
init-meth (if (seq args)
(reduce (fn [xs x] (str xs ":")) "initWithFields" args)
"init")]
(emits "[[" ctor " alloc]")
(emit-method-parts (sel-parts init-meth) args)
(emits "]"))))
(defmethod emit :set!
[{:keys [target val env]}]
(emit-wrap env (emits target " = " val)))
(defmethod emit :ns
[{:keys [name requires uses requires-macros env]}]
(emitln "#import <Foundation/Foundation.h>")
(emitln "#import <CLJMRuntime/CLJMRuntime.h>")
(emitln "#import <objc/runtime.h>")
(when include-core
(when-not (= name 'cljm.core)
(emitln "#import \"cljm_DOT_core.h\"")))
(emitln "#import \"" (munge name) ".h\"")
(doseq [lib (into (vals requires) (distinct (vals uses)))]
(emitln "#import \"" (munge lib) ".h\"")))
(defmethod emit :defprotocol*
[ast]
(add-extern! ast))
(defmethod emit :deftype*
[{:keys [t fields pmasks reify] :as ast}]
(when-not reify
(add-extern! ast)
(add-static-expr! ast)))
(defmethod emit :defrecord*
[{:keys [t fields pmasks]}]
(let [fields (concat (map munge fields) '[__meta __extmap])]
(emitln "")
(emitln "/**")
(emitln "* @constructor")
(doseq [fld fields]
(emitln "* @param {*} " fld))
(emitln "* @param {*=} __meta ")
(emitln "* @param {*=} __extmap")
(emitln "*/")
(emitln (munge t) " = (function (" (comma-sep fields) "){")
(doseq [fld fields]
(emitln "this." fld " = " fld ";"))
(doseq [[pno pmask] pmasks]
(emitln "this.cljm$lang$protocol_mask$partition" pno "$ = " pmask ";"))
(emitln "if(arguments.length>" (- (count fields) 2) "){")
(emitln "this.__meta = __meta;")
(emitln "this.__extmap = __extmap;")
(emitln "} else {")
(emits "this.__meta=")
(emit-constant nil)
(emitln ";")
(emits "this.__extmap=")
(emit-constant nil)
(emitln ";")
(emitln "}")
(emitln "})")))
(defmethod emit :dot
[{:keys [target field method args env]}]
(emit-wrap env
(if field
(emits "[" target " " (munge field #{}) "]")
(do
(emits "[" target)
(emit-method-parts (sel-parts (str method)) args)
(emits "]")))))
(defmethod emit :objc
[{:keys [env code segs args]}]
(emit-wrap env
(if code
(emits code)
(emits (interleave (concat segs (repeat nil))
(concat args [nil]))))))
(defmethod emit :js
[{:keys [env code segs args]}]
(emit-wrap env
(if code
(emits code)
(emits (interleave (concat segs (repeat nil))
(concat args [nil]))))))
(defmulti emit-static :op)
(declare objc-class-munge)
(defmethod emit-static :deftype*
[{:keys [t methods]}]
(emitln)
(emitln "@implementation " (objc-class-munge t))
(emitln "@end")
(emitln))
(defn forms-seq
"Seq of forms in a Clojure or ClojureScript file."
([f]
(forms-seq f (clojure.lang.LineNumberingPushbackReader. (io/reader f))))
([f ^java.io.PushbackReader rdr]
(if-let [form (binding [*ns* ana/*reader-ns*] (read rdr nil nil))]
(lazy-seq (cons form (forms-seq f rdr)))
(.close rdr))))
(defn rename-to
"Change the file extension from .cljm to .m. Takes a File or a
String. Always returns a String."
[file-str ext]
(clojure.string/replace file-str #"\.cljm$" ext))
(defn mkdirs
"Create all parent directories for the passed file."
[^java.io.File f]
(.mkdirs (.getParentFile (.getCanonicalFile f))))
(defmacro with-core-cljm
"Ensure that core.cljm has been loaded."
[& body]
`(do (when include-core
(when-not (:defs (get @ana/namespaces 'cljm.core))
(ana/analyze-file "cljm/core.cljm")))
~@body))
(defn compile-file* [src dest]
(with-core-cljm
(with-open [out ^java.io.Writer (io/make-writer dest {})]
(binding [*out* out
ana/*cljm-ns* 'cljm.user
ana/*cljm-file* (.getPath ^java.io.File src)
*data-readers* tags/*cljm-data-readers*
*position* (atom [0 0])
*static-exprs* (atom [])]
(loop [forms (forms-seq src)
ns-name nil
deps nil]
(if (seq forms)
(let [env (ana/empty-env)
ast (ana/analyze env (first forms))]
; (binding [*out* *err*]
( println " Goodbye , world ! " )
; (clojure.pprint/pprint ast))
(emit ast)
(if (= (:op ast) :ns)
(let [found-ns (:name ast)]
; TODO: It'd be nice to only init namespaces that are
; actually used.
(emitln "__attribute__((constructor))")
(emitln "void " (init-func-name found-ns) "(void) {\n")
(emitln "@autoreleasepool {")
(recur (rest forms) found-ns (merge (:uses ast) (:requires ast))))
(recur (rest forms) ns-name deps)))
(do
(emitln "}")
(emitln "}")
(doseq [ast @*static-exprs*]
(emit-static ast))
{:ns (or ns-name 'cljm.user)
:provides [ns-name]
:requires (if (= ns-name 'cljm.core) (set (vals deps)) (conj (set (vals deps)) 'cljm.core))
:file dest})))))))
(defn requires-compilation?
"Return true if the src file requires compilation."
[^java.io.File src ^java.io.File dest]
; for the sake of debugging right now, we'll always recompile everything
true)
; (or (not (.exists dest))
; (> (.lastModified src) (.lastModified dest))))
(defmulti emit-h :op)
(defmethod emit-h :defprotocol*
[{:keys [p index methods]}]
(emitln)
(emitln "@protocol " (munge p) " <NSObject>")
(emitln)
(doseq [method methods]
(let [mname (protocol-munge p (apply str (drop 1 (seq (str (first method))))))
arities (take-while vector? (drop 1 method))
has-comment? (string? (last method))
comment (if has-comment? (last method) nil)]
(when has-comment?
(emit-comment comment ""))
(doseq [arity arities]
(emits "- (id)" mname)
(doseq [arg (drop 1 arity)]
(emits ":(id)" (munge arg) " "))
(emits ";")
(emitln))
(emitln)))
(emitln)
(emitln "@end")
(emitln))
(defmethod emit-h :def
[ast]
(let [mname (munge (:name ast))]
(emitln "CLJMVar *" mname ";")))
(defn- objc-class-munge
[t]
(if (= (string/upper-case (namespace t)) (namespace t))
(str (namespace t) (name t))
(munge t)))
(defn- selector-name
[sel]
(let [ssel (seq sel)]
(apply str (cond
(= (first ssel) \-) (drop 1 ssel)
(= (last ssel) \!) (drop-last ssel)
:else ssel))))
(defmethod emit-h :deftype*
[{:keys [t fields superclass protocols methods env] :as ast}]
(emitln)
(let [class-name (objc-class-munge t)
superclass (objc-class-munge superclass)]
(emits "@interface " class-name " : " superclass))
(when (seq? (seq protocols))
(emits " <" (comma-sep (map objc-class-munge protocols)) ">"))
(emitln)
(emitln)
(doseq [p fields]
(let [tag (-> p meta :tag)
type (cond
(= 'iboutlet tag) "IBOutlet id"
nil? "id"
:else tag)]
(emitln "@property (nonatomic, strong) " type " " (munge p) ";")))
(emitln)
(doseq [[p ms] methods]
(doseq [m ms]
(let [p-ns (:ns (ana/resolve-existing-var (dissoc env :locals) p))
prefix (if (= p-ns 'ObjectiveCClass)
""
(str (munge (str p-ns "/" p)) "_"))
mname (str prefix (selector-name (str (first m))))
parts (string/split mname #":")
pair-args (fn [sel arg] (str (munge sel) ":(id)" (munge arg) " "))
args (drop 1 (second m))
sel-parts (if (seq args)
(apply str (map pair-args (concat parts (repeat "")) args))
(str (first parts)))]
(emitln "- (id)" sel-parts ";")
(emitln))))
(emitln)
(emitln "@end")
(emitln))
(defn generate-header
[externs file]
(let [dest-file (io/file file)]
(with-open [out ^java.io.Writer (io/make-writer dest-file {})]
(binding [*out* out]
(emitln "@class CLJMVar;")
(emitln)
(doseq [ast externs]
(emit-h ast))))))
(defn compile-file
"Compiles src to a file of the same name, but with a .js extension,
in the src file's directory.
With dest argument, write file to provided location. If the dest
argument is a file outside the source tree, missing parent
directories will be created. The src file will only be compiled if
the dest file has an older modification time.
Both src and dest may be either a String or a File.
Returns a map containing {:ns .. :provides .. :requires .. :file ..}.
If the file was not compiled returns only {:file ...}"
([src]
(let [dest (rename-to src ".m")]
(compile-file src dest)))
([src dest]
(binding [*externs* (atom [])]
(let [src-file (io/file src)
dest-file (io/file dest)]
(if (.exists src-file)
(if (requires-compilation? src-file dest-file)
(do (mkdirs dest-file)
(assoc (compile-file* src-file dest-file) :externs @*externs*))
{:file dest-file, :externs []})
(throw (java.io.FileNotFoundException. (str "The file " src " does not exist."))))))))
(defn init-func-name
[ns]
(munge (str ns "/cljm-ns-init")))
(comment
;; flex compile-file
(do
(compile-file "/tmp/hello.cljm" "/tmp/something.js")
(slurp "/tmp/hello.js")
(compile-file "/tmp/somescript.cljm")
(slurp "/tmp/somescript.js")))
(defn path-seq
[file-str]
(->> java.io.File/separator
java.util.regex.Pattern/quote
re-pattern
(string/split file-str)))
(defn to-path
([parts]
(to-path parts java.io.File/separator))
([parts sep]
(apply str (interpose sep parts))))
(defn to-target-file
"Given the source root directory, the output target directory and
file under the source root, produce the target file."
[^java.io.File dir ^String target ^java.io.File file ext]
(let [dir-path (path-seq (.getAbsolutePath dir))
file-path (path-seq (.getAbsolutePath file))
relative-path (drop (count dir-path) file-path)
parents (butlast relative-path)
parent-file (java.io.File. ^String (to-path (cons target parents)))]
(java.io.File. parent-file ^String (rename-to (last relative-path) ext))))
(defn cljm-files-in
"Return a sequence of all .cljm files in the given directory."
[dir]
(filter #(let [name (.getName ^java.io.File %)]
(and (.endsWith name ".cljm")
(not= \. (first name))
(not (contains? cljm-reserved-file-names name))))
(file-seq dir)))
(defn move-and-rename
[^java.io.File m-file ^java.io.File h-file ns target-dir]
(let [mname (munge ns)]
(.renameTo m-file (java.io.File. (str target-dir java.io.File/separator mname ".m")))
(.renameTo h-file (java.io.File. (str target-dir java.io.File/separator mname ".h")))))
(defn compile-root
"Looks recursively in src-dir for .cljm files and compiles them to
.m files. If target-dir is provided, output will go into this
directory mirroring the source directory structure. Returns a list
of maps containing information about each file which was compiled
in dependency order."
([src-dir]
(compile-root src-dir "out"))
([src-dir target-dir]
(let [src-dir-file (io/file src-dir)]
(loop [cljm-files (cljm-files-in src-dir-file)
output-files []]
(if (seq cljm-files)
(let [cljm-file (first cljm-files)
m-file ^java.io.File (to-target-file src-dir-file target-dir cljm-file ".m")
h-file ^java.io.File (to-target-file src-dir-file target-dir cljm-file ".h")
ns-info (compile-file cljm-file m-file)]
(generate-header (:externs ns-info) h-file)
(move-and-rename m-file h-file (:ns ns-info) target-dir)
(recur (rest cljm-files) (conj output-files (assoc ns-info :file-name (.getPath m-file)))))
output-files)))))
| null | https://raw.githubusercontent.com/joshaber/clojurem/cf60fc6a8ef0e9065a1219c33ca39176ee88c2de/src/clj/cljm/compiler.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
Handle printable escapes before ASCII
Handle non-printable escapes
Print simple ASCII characters
Prints column-aligned line number comments; good test of *position*.
(when *position*
(let [[line column] @*position*]
If we had both a selector part and an argument this time,
... recur
reader puts Integers in metadata
TODO: don't extern private fn's
fn statements get erased, serve no purpose and can pollute scope if named
(when statements (emitln "{"))
(when statements (emits "}"))
(emits "}")
direct dispatch to variadic case
direct dispatch to specific arity case
(binding [*out* *err*]
(clojure.pprint/pprint ast))
TODO: It'd be nice to only init namespaces that are
actually used.
for the sake of debugging right now, we'll always recompile everything
(or (not (.exists dest))
(> (.lastModified src) (.lastModified dest))))
flex compile-file | Copyright ( c ) . All rights reserved .
(set! *warn-on-reflection* true)
(ns cljm.compiler
(:refer-clojure :exclude [munge macroexpand-1])
(:require [clojure.java.io :as io]
[clojure.string :as string]
[cljm.tagged-literals :as tags]
[cljm.analyzer :as ana]
[clojure.pprint :as pp])
(:import java.lang.StringBuilder))
(declare munge)
(declare init-func-name)
(def include-core true)
(def ^:dynamic *externs* nil)
(def ^:dynamic *static-exprs* nil)
(defmacro ^:private debug-prn
[& args]
`(.println System/err (str ~@args)))
(def js-reserved
#{"abstract" "boolean" "break" "byte" "case"
"catch" "char" "class" "const" "continue"
"debugger" "default" "delete" "do" "double"
"else" "enum" "export" "extends" "final"
"finally" "float" "for" "function" "goto" "if"
"implements" "import" "in" "instanceof" "int"
"interface" "let" "long" "native" "new"
"package" "private" "protected" "public"
"return" "short" "static" "super" "switch"
"synchronized" "this" "throw" "throws"
"transient" "try" "typeof" "var" "void"
"volatile" "while" "with" "yield" "methods"})
(def ^:dynamic *position* nil)
(def cljm-reserved-file-names #{"deps.cljm"})
(defn munge
([s] (munge s js-reserved))
([s reserved]
Division is special
ss (string/replace (str s) #"\." "_DOT_")
ss (apply str (map #(if (reserved %) (str % "$") %)
(string/split ss #"(?<=\.)|(?=\.)")))
ms (clojure.lang.Compiler/munge ss)]
(if (symbol? s)
(symbol ms)
ms))))
(defn- comma-sep [xs]
(interpose ", " xs))
(defn- escape-char [^Character c]
(let [cp (.hashCode c)]
(case cp
34 "\\\""
92 "\\\\"
8 "\\b"
12 "\\f"
10 "\\n"
13 "\\r"
9 "\\t"
(if (< 31 cp 127)
Any other character is Unicode
(defn- escape-string [^CharSequence s]
(let [sb (StringBuilder. (count s))]
(doseq [c s]
(.append sb (escape-char c)))
(.toString sb)))
(defn- wrap-in-double-quotes [x]
(str \" x \"))
(defmulti emit :op)
(defn emits [& xs]
(doseq [x xs]
(cond
(nil? x) nil
(map? x) (emit x)
(seq? x) (apply emits x)
(fn? x) (x)
:else (do
(let [s (print-str x)]
(when *position*
(swap! *position* (fn [[line column]]
[line (+ column (count s))])))
(print s)))))
nil)
(defn ^String emit-str [expr]
(with-out-str (emit expr)))
(defn emitln [& xs]
(apply emits xs)
( print ( apply str ( concat ( repeat ( - 120 column ) ) [ " // " ( inc line ) ] ) ) ) ) )
(println)
(when *position*
(swap! *position* (fn [[line column]]
[(inc line) 0])))
nil)
(defn sel-parts
"Splits a selector into its constituent parts, keeping any colons. Returns a sequence of strings."
[sel]
(map second (re-seq #"(:|[a-zA-Z0-9_]+\:?)" sel)))
(defn- emit-comma-sep
[xs]
(doseq [x xs]
(emits ", ")
(emits x))
(emits ", nil"))
(defn emit-method-parts
"Given remaining selector parts and arguments, returns a string representing the rest of an Objective-C message send. selparts and args should both be sequences of strings."
[selparts args]
(emits
(cond (empty? selparts) (emit-comma-sep args)
(empty? args) (emits " " (first selparts))
:else (emits " " (first selparts) (first args)))
(if (and
(and (seq selparts) (seq args))
... and we have at least one more of either
(or (next selparts) (next args)))
(emit-method-parts (next selparts) (next args)))))
(defmulti emit-constant class)
(defmethod emit-constant nil [x] (emits "nil"))
(defmethod emit-constant Long [x] (emits "@" x))
(defmethod emit-constant Double [x] (emits "@" x))
(defmethod emit-constant String [x]
(emits "@" (wrap-in-double-quotes (escape-string x))))
(defmethod emit-constant Boolean [x] (emits (if x "@(YES)" "@(NO)")))
(defmethod emit-constant Character [x]
(emits "@" (wrap-in-double-quotes (escape-char x))))
(defmethod emit-constant java.util.regex.Pattern [x]
(let [[_ flags pattern] (re-find #"^(?:\(\?([idmsux]*)\))?(.*)" (str x))]
(emits \/ (.replaceAll (re-matcher #"/" pattern) "\\\\/") \/ flags)))
(defmethod emit-constant clojure.lang.Keyword [x]
(emits "cljm_keyword(@\":")
(emits (if (namespace x)
(str (namespace x) "/") "")
(name x))
(emits "\")"))
(defmethod emit-constant clojure.lang.Symbol [x]
(emits "cljm_symbol(@\"'")
(emits (if (namespace x)
(str (namespace x) "/") "")
(name x))
(emits "\")"))
(defn- emit-meta-constant [x & body]
(if (meta x)
(do
(emits "cljm.core.with_meta(" body ",")
(emit-constant (meta x))
(emits ")"))
(emits body)))
(defmethod emit-constant clojure.lang.PersistentList$EmptyList [x]
(emit-meta-constant x "@[]"))
(defmethod emit-constant clojure.lang.PersistentList [x]
(emit-meta-constant x
(concat ["cljm.core.list("]
(comma-sep (map #(fn [] (emit-constant %)) x))
[")"])))
(defmethod emit-constant clojure.lang.Cons [x]
(emit-meta-constant x
(concat ["cljm.core.list("]
(comma-sep (map #(fn [] (emit-constant %)) x))
[")"])))
(defmethod emit-constant clojure.lang.IPersistentVector [x]
(emit-meta-constant x
(concat ["cljm.core.vec(["]
(comma-sep (map #(fn [] (emit-constant %)) x))
["])"])))
(defmethod emit-constant clojure.lang.IPersistentMap [x]
(emit-meta-constant x
(concat ["cljm.core.hash_map("]
(comma-sep (map #(fn [] (emit-constant %))
(apply concat x)))
[")"])))
(defmethod emit-constant clojure.lang.PersistentHashSet [x]
(emit-meta-constant x
(concat ["cljm.core.set(["]
(comma-sep (map #(fn [] (emit-constant %)) x))
["])"])))
(defn emit-block
[context statements ret]
(when statements
(emits statements))
(emit ret))
(defmacro emit-wrap [env & body]
`(let [env# ~env]
(when (= :return (:context env#)) (emits "return "))
~@body
(when-not (= :expr (:context env#)) (emitln ";"))))
(defmethod emit :no-op [m])
(defmethod emit :var
[{:keys [info env] :as arg}]
(let [n (:name info)
n (if (= (namespace n) "js")
(name n)
n)
dynamic (:dynamic info)
local (:local info)
field (:field info)
ns (:ns info)
type? (:type info)
is-protocol? (:is-protocol info)]
(emit-wrap env
(if-not local
(if is-protocol?
(emits "@protocol(" (munge n) ")")
(do
(if-not dynamic
(emits (munge n))
(emits "cljm_var_lookup(@\"" n "\")"))
(if-not (or (= ns 'ObjectiveCClass) type?)
(emits ".value"))
(when (or (= ns 'ObjectiveCClass) type?)
(emits ".class"))))
(if field
(emits "[self " (munge n) "]")
(emits (munge n)))))))
(defmethod emit :meta
[{:keys [expr meta env]}]
(emit-wrap env
(emits "cljm.core.with_meta(" expr "," meta ")")))
(defmethod emit :map
[{:keys [env keys vals]}]
(emit-wrap env
(if (zero? (count keys))
(emits "@{}")
(emits "@{ "
(comma-sep (map (fn [k v]
(with-out-str (emit k) (print ": ") (emit v)))
keys vals))
" }"))))
(defmethod emit :vector
[{:keys [items env]}]
(emit-wrap env
(if (empty? items)
(emits "@[]")
(emits "@[ "
(comma-sep items) " ]"))))
(defmethod emit :set
[{:keys [items env]}]
(emit-wrap env
(if (empty? items)
(emits "[NSSet set]")
(emits "[NSSet setWithObjects:"
(comma-sep items) ", nil]"))))
(defmethod emit :constant
[{:keys [form env]}]
(when-not (= :statement (:context env))
(emit-wrap env (emit-constant form))))
(defn get-tag [e]
(or (-> e :tag)
(-> e :info :tag)))
(defn infer-tag [e]
(if-let [tag (get-tag e)]
tag
(case (:op e)
:let (infer-tag (:ret e))
:if (let [then-tag (infer-tag (:then e))
else-tag (infer-tag (:else e))]
(when (= then-tag else-tag)
then-tag))
:constant (case (:form e)
true 'boolean
false 'boolean
nil)
nil)))
(defn safe-test? [e]
(let [tag (infer-tag e)]
(or (#{'boolean 'seq} tag)
(when (= (:op e) :constant)
(let [form (:form e)]
(not (or (and (string? form) (= form ""))
(and (number? form) (zero? form)))))))))
(defmethod emit :if
[{:keys [test then else env]}]
(let [context (:context env)]
(if (= :expr context)
(emits "cljm_truthy(" test ") ? " then " : " else)
(do
(emitln "if(cljm_truthy(" test ")) {")
(emitln then)
(emitln "} else {")
(emitln else)
(emitln "}")))))
(defmethod emit :throw
[{:keys [throw env]}]
(if (= :expr (:context env))
(emits "(function(){throw " throw "})()")
(emitln "throw " throw ";")))
(defn emit-comment
"Emit a nicely formatted comment string."
[doc jsdoc]
(let [docs (when doc [doc])
docs (if jsdoc (concat docs jsdoc) docs)
docs (remove nil? docs)]
(letfn [(print-comment-lines [e] (doseq [next-line (string/split-lines e)]
(emitln "* " (string/trim next-line))))]
(when (seq docs)
(emitln "/**")
(doseq [e docs]
(when e
(print-comment-lines e)))
(emitln "*/")))))
(defn add-extern!
[ast]
(swap! *externs* conj ast))
(defn add-static-expr!
[ast]
(swap! *static-exprs* conj ast))
(defmethod emit :def
[{:keys [name init env doc dynamic protocol] :as ast}]
(if-not protocol
(do (add-extern! ast)
(when init
(emit-comment doc (:jsdoc init))
(if-not dynamic
(let [mname (munge name)]
(emits mname " = [[CLJMVar alloc] initWithValue:" init "]"))
(emits "cljm_var_def(@\"" name "\", " init ")"))
(when-not (= :expr (:context env)) (emitln ";")))
(emitln))))
(defn emit-apply-to
[{:keys [name params env]}]
(let [arglist (gensym "arglist__")
delegate-name (str (munge name) "__delegate")
params (map munge params)]
(emitln "(function (" arglist "){")
(doseq [[i param] (map-indexed vector (butlast params))]
(emits "var " param " = cljm.core.first(")
(dotimes [_ i] (emits "cljm.core.next("))
(emits arglist ")")
(dotimes [_ i] (emits ")"))
(emitln ";"))
(if (< 1 (count params))
(do
(emits "var " (last params) " = cljm.core.rest(")
(dotimes [_ (- (count params) 2)] (emits "cljm.core.next("))
(emits arglist)
(dotimes [_ (- (count params) 2)] (emits ")"))
(emitln ");")
(emitln "return " delegate-name "(" (string/join ", " params) ");"))
(do
(emits "var " (last params) " = ")
(emits "cljm.core.seq(" arglist ");")
(emitln ";")
(emitln "return " delegate-name "(" (string/join ", " params) ");")))
(emits "})")))
(defn emit-start-fn-var
[args imp-fn]
(emits "[[CLJMFunction alloc] initWithBlock:^ id (")
(emits (comma-sep (map #(str "id " (munge %)) args)))
(when-not imp-fn
(when (> (count args) 0)
(emits ", "))
(emits "id cljm_vararg, ..."))
(emitln ") {"))
(defn emit-end-fn-var
[]
(emitln "}]"))
(defn emit-fn-method
[{:keys [gthis name variadic params statements ret env recurs max-fixed-arity]} imp-fn]
(emit-wrap env
(emit-start-fn-var params imp-fn)
(when imp-fn
(let [n (munge (first params))]
(when (not= n 'self) (emitln "id self = " n ";"))))
(when recurs (emitln "while(YES) {"))
(emit-block :return statements ret)
(when recurs
(emitln "break;")
(emitln "}"))
(emit-end-fn-var)))
(defn emit-variadic-fn-method
[{:keys [gthis name variadic params statements ret env recurs max-fixed-arity] :as f} imp-fn]
(emit-wrap env
(emit-start-fn-var (drop-last params) imp-fn)
(when imp-fn
(let [n (munge (first params))]
(when (not= n 'self) (emitln "id self = " n ";"))))
(let [lastn (munge (last params))]
(emitln "NSMutableArray *" lastn " = [NSMutableArray array];")
(emitln "va_list cljm_args;")
(emitln "va_start(cljm_args, cljm_vararg);")
(emitln "for (id cljm_currentArg = cljm_vararg; cljm_currentArg != nil; cljm_currentArg = va_arg(cljm_args, id)) {")
(emitln "\t[" lastn " addObject:cljm_currentArg];")
(emitln "}")
(emitln "va_end(cljm_args);"))
(when recurs (emitln "while(YES) {"))
(emit-block :return statements ret)
(when recurs
(emitln "break;")
(emitln "}"))
(emit-end-fn-var)))
(defmethod emit :fn
[{:keys [name env methods max-fixed-arity variadic recur-frames loop-lets imp-fn]}]
(when-not (= :statement (:context env))
(let [loop-locals (->> (concat (mapcat :names (filter #(and % @(:flag %)) recur-frames))
(mapcat :names loop-lets))
(map munge)
seq)]
(when loop-locals
(when (= :return (:context env))
(emits "return "))
(emitln "((function (" (comma-sep loop-locals) "){")
(when-not (= :return (:context env))
(emits "return ")))
(if (= 1 (count methods))
(if variadic
(emit-variadic-fn-method (assoc (first methods) :name name) imp-fn)
(emit-fn-method (assoc (first methods) :name name) imp-fn))
(let [has-name? (and name true)
name (or name (gensym))
mname (munge name)
maxparams (map munge (apply max-key count (map :params methods)))
mmap (into {}
(map (fn [method]
[(munge (symbol (str mname "__" (count (:params method)))))
method])
methods))
ms (sort-by #(-> % second :params count) (seq mmap))]
(when (= :return (:context env))
(emits "return "))
(emitln "[[CLJMFunction alloc] initWithBlock:^ id (id cljm_vararg, ...) {")
(emitln "__block CLJMVar *" mname ";")
(doseq [[n meth] ms]
(emits "CLJMFunction *" n " = ")
(if (:variadic meth)
(emit-variadic-fn-method meth imp-fn)
(emit-fn-method meth imp-fn))
(emitln ";")
(emitln))
(emitln mname " = [[CLJMVar alloc] initWithValue:[[CLJMFunction alloc] initWithBlock:^ id (NSArray *cljm_args) {")
(emitln "switch(cljm_args.count) {")
(doseq [[n meth] ms]
(if (:variadic meth)
(do (emitln "default:")
(emitln "return ((id (^)(id, ...))[" n " block])(cljm_args[0], nil);")
(emitln "break;"))
(let [pcnt (count (:params meth))]
(emitln "case " pcnt ":")
(emits "return ((id (^)(id, ...))[" n " block])(")
(dotimes [n pcnt]
(emits "cljm_args[" n "], "))
(emits "nil);")
( emitln " return ( ( i d ( ^)(id , ... ) ) [ " n " ] , nil ) ; " )
(emitln "break;"))))
(emitln "}")
(emitln "return nil;")
(emitln "}]];")
(emitln "NSMutableArray *cljm_collectedArgs = [NSMutableArray array];")
(emitln "va_list cljm_args;")
(emitln "va_start(cljm_args, cljm_vararg);")
(emitln "for (id cljm_currentArg = cljm_vararg; cljm_currentArg != nil; cljm_currentArg = va_arg(cljm_args, id)) {")
(emitln "\t[cljm_collectedArgs addObject:cljm_currentArg];")
(emitln "}")
(emitln "va_end(cljm_args);")
(emitln "return ((id (^)(NSArray *))[(CLJMFunction *)[" mname " value] block])(cljm_collectedArgs);")
(emitln "}]")))
(when loop-locals
(emitln ";})(" (comma-sep loop-locals) "))")))))
(defmethod emit :do
[{:keys [statements ret env]}]
(let [context (:context env)]
(when (and statements (= :expr context)) (emits "{"))
(emit-block context statements ret)
(when (and statements (= :expr context)) (emits "}"))))
(defmethod emit :try*
[{:keys [env try catch name finally]}]
(let [context (:context env)
subcontext (if (= :expr context) :return context)]
(if (or name finally)
(do
(when (= :expr context) (emits "(function (){"))
(emits "try{")
(let [{:keys [statements ret]} try]
(emit-block subcontext statements ret))
(emits "}")
(when name
(emits "catch (" (munge name) "){")
(when catch
(let [{:keys [statements ret]} catch]
(emit-block subcontext statements ret)))
(emits "}"))
(when finally
(let [{:keys [statements ret]} finally]
(assert (not= :constant (:op ret)) "finally block cannot contain constant")
(emits "finally {")
(emit-block subcontext statements ret)
(emits "}")))
(when (= :expr context) (emits "})()")))
(let [{:keys [statements ret]} try]
(when (and statements (= :expr context)) (emits "(function (){"))
(emit-block subcontext statements ret)
(when (and statements (= :expr context)) (emits "})()"))))))
(defmethod emit :let
[{:keys [bindings statements ret env loop]}]
(let [context (:context env)]
(when (= :expr context) (emits "^ id {"))
(doseq [{:keys [name init]} bindings]
(emitln "id " (munge name) " = " init ";"))
(when loop (emitln "while(YES) {"))
(emit-block (if (= :expr context) :return context) statements ret)
(when loop
(emitln "break;")
(emitln "}"))
(when (= :expr context) (emits "}()"))))
(defmethod emit :recur
[{:keys [frame exprs env]}]
(let [temps (vec (take (count exprs) (repeatedly gensym)))
names (:names frame)]
(emitln "{")
(dotimes [i (count exprs)]
(emitln "id " (temps i) " = " (exprs i) ";"))
(dotimes [i (count exprs)]
(emitln (munge (names i)) " = " (temps i) ";"))
(emitln "continue;")
(emitln "}")))
(defmethod emit :letfn
[{:keys [bindings statements ret env]}]
(let [context (:context env)]
(when (= :expr context) (emits "(function (){"))
(doseq [{:keys [name init]} bindings]
(emitln "id " (munge name) " = " init ";"))
(emit-block (if (= :expr context) :return context) statements ret)
(when (= :expr context) (emits "})()"))))
(defn protocol-prefix [psym]
(str (-> (str psym) (.replace \. \$) (.replace \/ \$)) "$"))
(defn protocol-munge
[p x]
(str (munge p) "_" (munge x)))
(defmethod emit :invoke
[{:keys [f args env] :as expr}]
(let [info (:info f)
variadic? (:variadic info)
dynamic? (:dynamic info)
fn-name (:name info)
mname (munge fn-name)
keyword? (and (= (-> f :op) :constant)
(keyword? (-> f :form)))
kwname (-> f :form)
protocol (:protocol info)
local? (:local info)
ns (:ns info)
c-call? (= ns 'c)]
(emit-wrap env
(cond
protocol (let [pmname (protocol-munge protocol (apply str (drop 1 (last (string/split (str fn-name) #"/")))))]
(emits "[(id<" (munge protocol) ">) " (first args) " ")
(emits pmname)
(doseq [arg (rest args)]
(emits ":" arg " "))
(emits "]"))
keyword? (emits "[" (first args) " objectForKey:cljm_keyword(@\"" kwname "\")]")
c-call? (emits (name fn-name) "(" (comma-sep args) ")")
:else (do (emits "((id (^)(")
(emits (comma-sep (map (fn [x] (str "id")) (concat args (list "cljm_args")))))
(emits ", ...))")
(if-not local?
(emits "["))
(emits "(CLJMFunction *)[")
(if dynamic?
(emits "cljm_var_lookup(@\"" fn-name "\")")
(emits mname))
(if-not local?
(emits " value]"))
(emits " block])(")
(emits (comma-sep (conj args "nil")) ")"))))))
(comment (defmethod emit :invoke
[{:keys [f args env] :as expr}]
(let [info (:info f)
fn? (and ana/*cljm-static-fns*
(not (:dynamic info))
(:fn-var info))
protocol (:protocol info)
proto? (let [tag (infer-tag (first (:args expr)))]
(and protocol tag
(or ana/*cljm-static-fns*
(:protocol-inline env))
(or (= protocol tag)
(when-let [ps (:protocols (ana/resolve-existing-var (dissoc env :locals) tag))]
(ps protocol)))))
opt-not? (and (= (:name info) 'cljm.core/not)
(= (infer-tag (first (:args expr))) 'boolean))
ns (:ns info)
js? (= ns 'js)
goog? (when ns
(or (= ns 'goog)
(when-let [ns-str (str ns)]
(= (get (string/split ns-str #"\.") 0 nil) "goog"))))
keyword? (and (= (-> f :op) :constant)
(keyword? (-> f :form)))
[f variadic-invoke]
(if fn?
(let [arity (count args)
variadic? (:variadic info)
mps (:method-params info)
mfa (:max-fixed-arity info)]
(cond
if only one method , no renaming needed
(and (not variadic?)
(= (count mps) 1))
[f nil]
(and variadic? (> arity mfa))
[(update-in f [:info :name]
(fn [name] (symbol (str (munge name) ".cljm$lang$arity$variadic"))))
{:max-fixed-arity mfa}]
:else
(let [arities (map count mps)]
(if (some #{arity} arities)
[(update-in f [:info :name]
(fn [name] (symbol (str (munge name) ".cljm$lang$arity$" arity)))) nil]
[f nil]))))
[f nil])]
(emit-wrap env
(cond
opt-not?
(emits "!(" (first args) ")")
proto?
(let [pimpl (str (protocol-prefix protocol)
(munge (name (:name info))) "$arity$" (count args))]
(emits (first args) "." pimpl "(" (comma-sep args) ")"))
keyword?
(emits "(new cljm.core.Keyword(" f ")).call(" (comma-sep (cons "null" args)) ")")
variadic-invoke
(let [mfa (:max-fixed-arity variadic-invoke)]
(emits f "(" (comma-sep (take mfa args))
(when-not (zero? mfa) ",")
"cljm.core.array_seq([" (comma-sep (drop mfa args)) "], 0))"))
(or fn? js? goog?)
(emits f "(" (comma-sep args) ")")
:else
(if (and ana/*cljm-static-fns* (= (:op f) :var))
(let [fprop (str ".cljm$lang$arity$" (count args))]
(emits "(" f fprop " ? " f fprop "(" (comma-sep args) ") : " f ".call(" (comma-sep (cons "null" args)) "))"))
(if variadic?
(emits f "(" (comma-sep args) ", nil)")
(emits f "(" (comma-sep args) ")"))))))))
(defmethod emit :new
[{:keys [ctor args env]}]
(emit-wrap env
(let [method (first args)
init-args (rest args)
init-meth (if (seq args)
(reduce (fn [xs x] (str xs ":")) "initWithFields" args)
"init")]
(emits "[[" ctor " alloc]")
(emit-method-parts (sel-parts init-meth) args)
(emits "]"))))
(defmethod emit :set!
[{:keys [target val env]}]
(emit-wrap env (emits target " = " val)))
(defmethod emit :ns
[{:keys [name requires uses requires-macros env]}]
(emitln "#import <Foundation/Foundation.h>")
(emitln "#import <CLJMRuntime/CLJMRuntime.h>")
(emitln "#import <objc/runtime.h>")
(when include-core
(when-not (= name 'cljm.core)
(emitln "#import \"cljm_DOT_core.h\"")))
(emitln "#import \"" (munge name) ".h\"")
(doseq [lib (into (vals requires) (distinct (vals uses)))]
(emitln "#import \"" (munge lib) ".h\"")))
(defmethod emit :defprotocol*
[ast]
(add-extern! ast))
(defmethod emit :deftype*
[{:keys [t fields pmasks reify] :as ast}]
(when-not reify
(add-extern! ast)
(add-static-expr! ast)))
(defmethod emit :defrecord*
[{:keys [t fields pmasks]}]
(let [fields (concat (map munge fields) '[__meta __extmap])]
(emitln "")
(emitln "/**")
(emitln "* @constructor")
(doseq [fld fields]
(emitln "* @param {*} " fld))
(emitln "* @param {*=} __meta ")
(emitln "* @param {*=} __extmap")
(emitln "*/")
(emitln (munge t) " = (function (" (comma-sep fields) "){")
(doseq [fld fields]
(emitln "this." fld " = " fld ";"))
(doseq [[pno pmask] pmasks]
(emitln "this.cljm$lang$protocol_mask$partition" pno "$ = " pmask ";"))
(emitln "if(arguments.length>" (- (count fields) 2) "){")
(emitln "this.__meta = __meta;")
(emitln "this.__extmap = __extmap;")
(emitln "} else {")
(emits "this.__meta=")
(emit-constant nil)
(emitln ";")
(emits "this.__extmap=")
(emit-constant nil)
(emitln ";")
(emitln "}")
(emitln "})")))
(defmethod emit :dot
[{:keys [target field method args env]}]
(emit-wrap env
(if field
(emits "[" target " " (munge field #{}) "]")
(do
(emits "[" target)
(emit-method-parts (sel-parts (str method)) args)
(emits "]")))))
(defmethod emit :objc
[{:keys [env code segs args]}]
(emit-wrap env
(if code
(emits code)
(emits (interleave (concat segs (repeat nil))
(concat args [nil]))))))
(defmethod emit :js
[{:keys [env code segs args]}]
(emit-wrap env
(if code
(emits code)
(emits (interleave (concat segs (repeat nil))
(concat args [nil]))))))
(defmulti emit-static :op)
(declare objc-class-munge)
(defmethod emit-static :deftype*
[{:keys [t methods]}]
(emitln)
(emitln "@implementation " (objc-class-munge t))
(emitln "@end")
(emitln))
(defn forms-seq
"Seq of forms in a Clojure or ClojureScript file."
([f]
(forms-seq f (clojure.lang.LineNumberingPushbackReader. (io/reader f))))
([f ^java.io.PushbackReader rdr]
(if-let [form (binding [*ns* ana/*reader-ns*] (read rdr nil nil))]
(lazy-seq (cons form (forms-seq f rdr)))
(.close rdr))))
(defn rename-to
"Change the file extension from .cljm to .m. Takes a File or a
String. Always returns a String."
[file-str ext]
(clojure.string/replace file-str #"\.cljm$" ext))
(defn mkdirs
"Create all parent directories for the passed file."
[^java.io.File f]
(.mkdirs (.getParentFile (.getCanonicalFile f))))
(defmacro with-core-cljm
"Ensure that core.cljm has been loaded."
[& body]
`(do (when include-core
(when-not (:defs (get @ana/namespaces 'cljm.core))
(ana/analyze-file "cljm/core.cljm")))
~@body))
(defn compile-file* [src dest]
(with-core-cljm
(with-open [out ^java.io.Writer (io/make-writer dest {})]
(binding [*out* out
ana/*cljm-ns* 'cljm.user
ana/*cljm-file* (.getPath ^java.io.File src)
*data-readers* tags/*cljm-data-readers*
*position* (atom [0 0])
*static-exprs* (atom [])]
(loop [forms (forms-seq src)
ns-name nil
deps nil]
(if (seq forms)
(let [env (ana/empty-env)
ast (ana/analyze env (first forms))]
( println " Goodbye , world ! " )
(emit ast)
(if (= (:op ast) :ns)
(let [found-ns (:name ast)]
(emitln "__attribute__((constructor))")
(emitln "void " (init-func-name found-ns) "(void) {\n")
(emitln "@autoreleasepool {")
(recur (rest forms) found-ns (merge (:uses ast) (:requires ast))))
(recur (rest forms) ns-name deps)))
(do
(emitln "}")
(emitln "}")
(doseq [ast @*static-exprs*]
(emit-static ast))
{:ns (or ns-name 'cljm.user)
:provides [ns-name]
:requires (if (= ns-name 'cljm.core) (set (vals deps)) (conj (set (vals deps)) 'cljm.core))
:file dest})))))))
(defn requires-compilation?
"Return true if the src file requires compilation."
[^java.io.File src ^java.io.File dest]
true)
(defmulti emit-h :op)
(defmethod emit-h :defprotocol*
[{:keys [p index methods]}]
(emitln)
(emitln "@protocol " (munge p) " <NSObject>")
(emitln)
(doseq [method methods]
(let [mname (protocol-munge p (apply str (drop 1 (seq (str (first method))))))
arities (take-while vector? (drop 1 method))
has-comment? (string? (last method))
comment (if has-comment? (last method) nil)]
(when has-comment?
(emit-comment comment ""))
(doseq [arity arities]
(emits "- (id)" mname)
(doseq [arg (drop 1 arity)]
(emits ":(id)" (munge arg) " "))
(emits ";")
(emitln))
(emitln)))
(emitln)
(emitln "@end")
(emitln))
(defmethod emit-h :def
[ast]
(let [mname (munge (:name ast))]
(emitln "CLJMVar *" mname ";")))
(defn- objc-class-munge
[t]
(if (= (string/upper-case (namespace t)) (namespace t))
(str (namespace t) (name t))
(munge t)))
(defn- selector-name
[sel]
(let [ssel (seq sel)]
(apply str (cond
(= (first ssel) \-) (drop 1 ssel)
(= (last ssel) \!) (drop-last ssel)
:else ssel))))
(defmethod emit-h :deftype*
[{:keys [t fields superclass protocols methods env] :as ast}]
(emitln)
(let [class-name (objc-class-munge t)
superclass (objc-class-munge superclass)]
(emits "@interface " class-name " : " superclass))
(when (seq? (seq protocols))
(emits " <" (comma-sep (map objc-class-munge protocols)) ">"))
(emitln)
(emitln)
(doseq [p fields]
(let [tag (-> p meta :tag)
type (cond
(= 'iboutlet tag) "IBOutlet id"
nil? "id"
:else tag)]
(emitln "@property (nonatomic, strong) " type " " (munge p) ";")))
(emitln)
(doseq [[p ms] methods]
(doseq [m ms]
(let [p-ns (:ns (ana/resolve-existing-var (dissoc env :locals) p))
prefix (if (= p-ns 'ObjectiveCClass)
""
(str (munge (str p-ns "/" p)) "_"))
mname (str prefix (selector-name (str (first m))))
parts (string/split mname #":")
pair-args (fn [sel arg] (str (munge sel) ":(id)" (munge arg) " "))
args (drop 1 (second m))
sel-parts (if (seq args)
(apply str (map pair-args (concat parts (repeat "")) args))
(str (first parts)))]
(emitln "- (id)" sel-parts ";")
(emitln))))
(emitln)
(emitln "@end")
(emitln))
(defn generate-header
[externs file]
(let [dest-file (io/file file)]
(with-open [out ^java.io.Writer (io/make-writer dest-file {})]
(binding [*out* out]
(emitln "@class CLJMVar;")
(emitln)
(doseq [ast externs]
(emit-h ast))))))
(defn compile-file
"Compiles src to a file of the same name, but with a .js extension,
in the src file's directory.
With dest argument, write file to provided location. If the dest
argument is a file outside the source tree, missing parent
directories will be created. The src file will only be compiled if
the dest file has an older modification time.
Both src and dest may be either a String or a File.
Returns a map containing {:ns .. :provides .. :requires .. :file ..}.
If the file was not compiled returns only {:file ...}"
([src]
(let [dest (rename-to src ".m")]
(compile-file src dest)))
([src dest]
(binding [*externs* (atom [])]
(let [src-file (io/file src)
dest-file (io/file dest)]
(if (.exists src-file)
(if (requires-compilation? src-file dest-file)
(do (mkdirs dest-file)
(assoc (compile-file* src-file dest-file) :externs @*externs*))
{:file dest-file, :externs []})
(throw (java.io.FileNotFoundException. (str "The file " src " does not exist."))))))))
(defn init-func-name
[ns]
(munge (str ns "/cljm-ns-init")))
(comment
(do
(compile-file "/tmp/hello.cljm" "/tmp/something.js")
(slurp "/tmp/hello.js")
(compile-file "/tmp/somescript.cljm")
(slurp "/tmp/somescript.js")))
(defn path-seq
[file-str]
(->> java.io.File/separator
java.util.regex.Pattern/quote
re-pattern
(string/split file-str)))
(defn to-path
([parts]
(to-path parts java.io.File/separator))
([parts sep]
(apply str (interpose sep parts))))
(defn to-target-file
"Given the source root directory, the output target directory and
file under the source root, produce the target file."
[^java.io.File dir ^String target ^java.io.File file ext]
(let [dir-path (path-seq (.getAbsolutePath dir))
file-path (path-seq (.getAbsolutePath file))
relative-path (drop (count dir-path) file-path)
parents (butlast relative-path)
parent-file (java.io.File. ^String (to-path (cons target parents)))]
(java.io.File. parent-file ^String (rename-to (last relative-path) ext))))
(defn cljm-files-in
"Return a sequence of all .cljm files in the given directory."
[dir]
(filter #(let [name (.getName ^java.io.File %)]
(and (.endsWith name ".cljm")
(not= \. (first name))
(not (contains? cljm-reserved-file-names name))))
(file-seq dir)))
(defn move-and-rename
[^java.io.File m-file ^java.io.File h-file ns target-dir]
(let [mname (munge ns)]
(.renameTo m-file (java.io.File. (str target-dir java.io.File/separator mname ".m")))
(.renameTo h-file (java.io.File. (str target-dir java.io.File/separator mname ".h")))))
(defn compile-root
"Looks recursively in src-dir for .cljm files and compiles them to
.m files. If target-dir is provided, output will go into this
directory mirroring the source directory structure. Returns a list
of maps containing information about each file which was compiled
in dependency order."
([src-dir]
(compile-root src-dir "out"))
([src-dir target-dir]
(let [src-dir-file (io/file src-dir)]
(loop [cljm-files (cljm-files-in src-dir-file)
output-files []]
(if (seq cljm-files)
(let [cljm-file (first cljm-files)
m-file ^java.io.File (to-target-file src-dir-file target-dir cljm-file ".m")
h-file ^java.io.File (to-target-file src-dir-file target-dir cljm-file ".h")
ns-info (compile-file cljm-file m-file)]
(generate-header (:externs ns-info) h-file)
(move-and-rename m-file h-file (:ns ns-info) target-dir)
(recur (rest cljm-files) (conj output-files (assoc ns-info :file-name (.getPath m-file)))))
output-files)))))
|
f38cd185d673f55869c05a002888724baadc667a5a2a2b45bf8f967be74d285e | gedge-platform/gedge-platform | jose_curve25519_unsupported.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
%%%-------------------------------------------------------------------
@author < >
2014 - 2016 ,
%%% @doc
%%%
%%% @end
Created : 02 Jan 2016 by < >
%%%-------------------------------------------------------------------
-module(jose_curve25519_unsupported).
-behaviour(jose_curve25519).
%% jose_curve25519 callbacks
-export([eddsa_keypair/0]).
-export([eddsa_keypair/1]).
-export([eddsa_secret_to_public/1]).
-export([ed25519_sign/2]).
-export([ed25519_verify/3]).
-export([ed25519ph_sign/2]).
-export([ed25519ph_verify/3]).
-export([x25519_keypair/0]).
-export([x25519_keypair/1]).
-export([x25519_secret_to_public/1]).
-export([x25519_shared_secret/2]).
Macros
-define(unsupported, erlang:error(curve25519_unsupported)).
%%====================================================================
%% jose_curve25519 callbacks
%%====================================================================
EdDSA
eddsa_keypair() ->
?unsupported.
eddsa_keypair(_Seed) ->
?unsupported.
eddsa_secret_to_public(_SecretKey) ->
?unsupported.
% Ed25519
ed25519_sign(_Message, _SecretKey) ->
?unsupported.
ed25519_verify(_Signature, _Message, _PublicKey) ->
?unsupported.
% Ed25519ph
ed25519ph_sign(_Message, _SecretKey) ->
?unsupported.
ed25519ph_verify(_Signature, _Message, _PublicKey) ->
?unsupported.
% X25519
x25519_keypair() ->
?unsupported.
x25519_keypair(_Seed) ->
?unsupported.
x25519_secret_to_public(_SecretKey) ->
?unsupported.
x25519_shared_secret(_MySecretKey, _YourPublicKey) ->
?unsupported.
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/jose/src/jose_curve25519_unsupported.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
-------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
jose_curve25519 callbacks
====================================================================
jose_curve25519 callbacks
====================================================================
Ed25519
Ed25519ph
X25519 | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
@author < >
2014 - 2016 ,
Created : 02 Jan 2016 by < >
-module(jose_curve25519_unsupported).
-behaviour(jose_curve25519).
-export([eddsa_keypair/0]).
-export([eddsa_keypair/1]).
-export([eddsa_secret_to_public/1]).
-export([ed25519_sign/2]).
-export([ed25519_verify/3]).
-export([ed25519ph_sign/2]).
-export([ed25519ph_verify/3]).
-export([x25519_keypair/0]).
-export([x25519_keypair/1]).
-export([x25519_secret_to_public/1]).
-export([x25519_shared_secret/2]).
Macros
-define(unsupported, erlang:error(curve25519_unsupported)).
EdDSA
eddsa_keypair() ->
?unsupported.
eddsa_keypair(_Seed) ->
?unsupported.
eddsa_secret_to_public(_SecretKey) ->
?unsupported.
ed25519_sign(_Message, _SecretKey) ->
?unsupported.
ed25519_verify(_Signature, _Message, _PublicKey) ->
?unsupported.
ed25519ph_sign(_Message, _SecretKey) ->
?unsupported.
ed25519ph_verify(_Signature, _Message, _PublicKey) ->
?unsupported.
x25519_keypair() ->
?unsupported.
x25519_keypair(_Seed) ->
?unsupported.
x25519_secret_to_public(_SecretKey) ->
?unsupported.
x25519_shared_secret(_MySecretKey, _YourPublicKey) ->
?unsupported.
|
065eb3eb4e8d4ae228e837be69f5ba3880cb00ac03c8d3122a9c554f96e24f20 | ahrefs/ocaml-elastic | elastic_query_dsl.mli | type term
type query
type top_query
val bool_val : bool -> term
val int_val : int -> term
val int64_val : Int64.t -> term
val float_val : float -> term
val string_val : string -> term
val list_val : ('a -> term) -> 'a list -> term
val int64_list : Int64.t list -> term
val bool_list : bool list -> term
val int_list : int list -> term
val float_list : float list -> term
val string_list : string list -> term
(** NB term is usually split on word boundaries (tokenized), will not work for multi-word query *)
val filter_term : string -> term -> query
val filter_terms : string -> term -> query
val filter_prefix : field:string -> string -> query
val filter_ids : term list -> query
val filter_range' : string -> (string * term) list -> query
val filter_range : string -> string -> term -> query
val filter_regexp : field:string -> string -> query
(** NB wildcard on analyzed fields will only match per-term, use wildcard on non-analyzed subfield for multi-word grepping *)
val filter_wildcard : field:string -> string -> query
* NB match on analyzed field does n't care about word order , use [ ] for exact match on fixed string
val filter_match : field:string -> ?operator:[ `And | `Or ] -> string -> query
val filter_match_phrase : field:string -> string -> query
val filter_bool
: ?filter:query list ->
?must:query list ->
?must_not:query list ->
?should:query list ->
?minimum_should_match:int ->
unit ->
query
val filter_must : query list -> query
val filter_and : query list -> query
val filter_or : query list -> query
val filter_not : query -> query
val filter_exists : string -> query
val filter_missing_or : string -> query list -> query
val query_string : ?field:string -> ?default_operator:[ `And | `Or ] -> string -> query
val nested : string -> query -> query
val match_phrase_prefix : string -> string -> int -> query
val match_all : query
val query_to_json : query -> Yojson.Safe.t [@@deprecated "use json_of_query"]
val json_of_query : query -> Yojson.Safe.t
val basic_json_of_query : query -> Yojson.Basic.t
val make_top_query' : ?args:(string * Yojson.Safe.t) list -> query -> top_query
val make_top_query : ?args:(string * Yojson.Safe.t) list -> query list -> top_query
val empty_top_query : top_query
val top_query_to_json : top_query -> Yojson.Safe.t
val top_query_to_string : top_query -> string
val basic_json_assoc_of_filters_agg : (string * query) list -> [> `Assoc of (string * Yojson.Basic.t) list ]
module Unsafe : sig
val query_of_json : Yojson.Safe.t -> query
val top_query_of_json : Yojson.Safe.t -> top_query
end
| null | https://raw.githubusercontent.com/ahrefs/ocaml-elastic/a25aab7ab321e0302bd5c8e2d7e7a4217ab1ed8a/elastic_query_dsl.mli | ocaml | * NB term is usually split on word boundaries (tokenized), will not work for multi-word query
* NB wildcard on analyzed fields will only match per-term, use wildcard on non-analyzed subfield for multi-word grepping | type term
type query
type top_query
val bool_val : bool -> term
val int_val : int -> term
val int64_val : Int64.t -> term
val float_val : float -> term
val string_val : string -> term
val list_val : ('a -> term) -> 'a list -> term
val int64_list : Int64.t list -> term
val bool_list : bool list -> term
val int_list : int list -> term
val float_list : float list -> term
val string_list : string list -> term
val filter_term : string -> term -> query
val filter_terms : string -> term -> query
val filter_prefix : field:string -> string -> query
val filter_ids : term list -> query
val filter_range' : string -> (string * term) list -> query
val filter_range : string -> string -> term -> query
val filter_regexp : field:string -> string -> query
val filter_wildcard : field:string -> string -> query
* NB match on analyzed field does n't care about word order , use [ ] for exact match on fixed string
val filter_match : field:string -> ?operator:[ `And | `Or ] -> string -> query
val filter_match_phrase : field:string -> string -> query
val filter_bool
: ?filter:query list ->
?must:query list ->
?must_not:query list ->
?should:query list ->
?minimum_should_match:int ->
unit ->
query
val filter_must : query list -> query
val filter_and : query list -> query
val filter_or : query list -> query
val filter_not : query -> query
val filter_exists : string -> query
val filter_missing_or : string -> query list -> query
val query_string : ?field:string -> ?default_operator:[ `And | `Or ] -> string -> query
val nested : string -> query -> query
val match_phrase_prefix : string -> string -> int -> query
val match_all : query
val query_to_json : query -> Yojson.Safe.t [@@deprecated "use json_of_query"]
val json_of_query : query -> Yojson.Safe.t
val basic_json_of_query : query -> Yojson.Basic.t
val make_top_query' : ?args:(string * Yojson.Safe.t) list -> query -> top_query
val make_top_query : ?args:(string * Yojson.Safe.t) list -> query list -> top_query
val empty_top_query : top_query
val top_query_to_json : top_query -> Yojson.Safe.t
val top_query_to_string : top_query -> string
val basic_json_assoc_of_filters_agg : (string * query) list -> [> `Assoc of (string * Yojson.Basic.t) list ]
module Unsafe : sig
val query_of_json : Yojson.Safe.t -> query
val top_query_of_json : Yojson.Safe.t -> top_query
end
|
efc3c9db626155de7b535146ef38c446c4849adfbd259cd5901934baaf02d86f | manavpatnaik/haskell | 24_change_case.hs | changeCase :: String -> String
changeCase "" = ""
changeCase (x:xs)
| (x >= 'a') && (x <= 'z') = toEnum ((fromEnum x) - 32) : changeCase xs
| (x >= 'A') && (x <= 'Z') = toEnum ((fromEnum x) + 32) : changeCase xs
| otherwise = x : changeCase xs
main = do
print(changeCase "Manav")
print(changeCase "mANAV") | null | https://raw.githubusercontent.com/manavpatnaik/haskell/af45c3eb5c3461aa77cf25610dfcb3b41c7f7ef9/practice-set-1-basics/24_change_case.hs | haskell | changeCase :: String -> String
changeCase "" = ""
changeCase (x:xs)
| (x >= 'a') && (x <= 'z') = toEnum ((fromEnum x) - 32) : changeCase xs
| (x >= 'A') && (x <= 'Z') = toEnum ((fromEnum x) + 32) : changeCase xs
| otherwise = x : changeCase xs
main = do
print(changeCase "Manav")
print(changeCase "mANAV") | |
8369cce9c5fd0fc6f2f01740615e4ac194383def6789527aca5c538109313249 | potapenko/playphraseme-site | validation.cljc | (ns playphraseme.validation
(:require [struct.core :as st]))
| null | https://raw.githubusercontent.com/potapenko/playphraseme-site/d50a62a6bc8f463e08365dca96b3a6e5dde4fb12/src/cljc/playphraseme/validation.cljc | clojure | (ns playphraseme.validation
(:require [struct.core :as st]))
| |
a1a4d1c6d735263448f12075f45b3bdd009c4e7abd9d8c5142524914aece18ab | gedge-platform/gedge-platform | gr_manager_sup.erl | Copyright ( c ) 2013 , < >
%%
%% Permission to use, copy, modify, and/or distribute this software for any
%% purpose with or without fee is hereby granted, provided that the above
%% copyright notice and this permission notice appear in all copies.
%%
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
%% @doc Table manager supervisor for all goldrush ets process tables.
%%
%% Manager supervisor responsible for the {@link gr_manager:start_link/3.
%% <em>Manager</em>} processes, which serve as heir of the
%% {@link gr_counter:start_link/0. <em>Counter</em>} and
{ @link gr_param : start_link/0 . < em > Param</em > } ets table processes .
-module(gr_manager_sup).
-behaviour(supervisor).
-type startlink_err() :: {'already_started', pid()} | 'shutdown' | term().
-type startlink_ret() :: {'ok', pid()} | 'ignore' | {'error', startlink_err()}.
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
%% ===================================================================
%% API functions
%% ===================================================================
%% @hidden
-spec start_link() -> startlink_ret().
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%% ===================================================================
%% Supervisor callbacks
%% ===================================================================
%% @hidden
-spec init([]) -> {ok, { {one_for_one, 50, 10}, [supervisor:child_spec()]} }.
init(_Args) ->
{ok, { {one_for_one, 50, 10}, []} }.
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/goldrush/src/gr_manager_sup.erl | erlang |
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
@doc Table manager supervisor for all goldrush ets process tables.
Manager supervisor responsible for the {@link gr_manager:start_link/3.
<em>Manager</em>} processes, which serve as heir of the
{@link gr_counter:start_link/0. <em>Counter</em>} and
API
Supervisor callbacks
===================================================================
API functions
===================================================================
@hidden
===================================================================
Supervisor callbacks
===================================================================
@hidden | Copyright ( c ) 2013 , < >
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
{ @link gr_param : start_link/0 . < em > Param</em > } ets table processes .
-module(gr_manager_sup).
-behaviour(supervisor).
-type startlink_err() :: {'already_started', pid()} | 'shutdown' | term().
-type startlink_ret() :: {'ok', pid()} | 'ignore' | {'error', startlink_err()}.
-export([start_link/0]).
-export([init/1]).
-spec start_link() -> startlink_ret().
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
-spec init([]) -> {ok, { {one_for_one, 50, 10}, [supervisor:child_spec()]} }.
init(_Args) ->
{ok, { {one_for_one, 50, 10}, []} }.
|
34909b7bc0c7b9ceb73b377bf0cd2ccde677c318bf438cb7dd181004f187707c | baryluk/ex11 | tcp_server.erl | Copyright ( C ) 2002 ,
%% File : tcp_server.erl
Author : ( )
%% Purpose : Keeps track of a number of TCP sessions
Last modified : 2002 - 11 - 17
-module(tcp_server).
-export([start_raw_server/4, start_client/3]).
-export([stop/1]).
-export([children/1]).
%% -export([start_child/3]).
start_raw_server(Port , Fun , Max )
This server accepts up to connections on Port
The * first * time a connection is made to Port
%% Then Fun(Socket) is called.
Thereafter messages to the socket result in to the handler .
%% a typical server is usually written like this:
%% To setup a lister
%% start_server(Port) ->
%% S = self(),
%% process_flag(trap_exit, true),
%% tcp_server:start_raw_server(Port,
%% fun(Socket) -> input_handler(Socket, S) end,
15 ,
%% 0)
%% loop().
%% The loop() process is a central controller that all
%% processes can use to synchronize amongst themselfves if necessary
It ends up as the variable " Controller " in the input_handler
%% A typical server is written like this:
%% input_handler(Socket, Controller) ->
%% receive
{ tcp , Socket ,
%% ...
%% gen_tcp:send(Socket, ...)
%%
%% {tcp_closed, Socket} ->
%%
%%
%% Any ->
%% ...
%%
%% end.
start_client(Host, Port, Length) ->
gen_tcp:connect(Host, Port,
[binary, {active, true}, {length, Length}]).
%% Note when start_raw_server returns it should be ready to
%% Immediately accept connections
start_raw_server(Port, Fun, Max, Length) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
Self = self(),
Pid = spawn_link(fun() ->
cold_start(Self, Port, Fun, Max, Length)
end),
receive
{Pid, ok} ->
register(Name, Pid),
{ok, self()};
{Pid, Error} ->
Error
end;
Pid ->
{error, already_started}
end.
stop(Port) when integer(Port) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
not_started;
Pid ->
exit(Pid, kill),
(catch unregister(Name)),
stopped
end.
children(Port) when integer(Port) ->
port_name(Port) ! {children, self()},
receive
{session_server, Reply} -> Reply
end.
port_name(Port) when integer(Port) ->
list_to_atom("portServer" ++ integer_to_list(Port)).
cold_start(Master, Port, Fun, Max, Length) ->
process_flag(trap_exit, true),
io:format("Starting a port server on ~p...~n",[Port]),
case gen_tcp:listen(Port, [binary,
%% {dontroute, true},
{nodelay,true},
{packet, Length},
{reuseaddr, true},
{active, false}]) of
{ok, Listen} ->
%% io:format("Listening on:~p~n",[Listen]),
Master ! {self(), ok},
New = start_accept(Listen, Fun),
%% Now we're ready to run
socket_loop(Listen, New, [], Fun, Max);
Error ->
Master ! {self(), Error}
end.
%% Don't mess with the following code uless you really know what you're
doing ( and Thanks to for heping me get it right )
socket_loop(Listen, New, Active, Fun, Max) ->
receive
{istarted, New} ->
Active1 = [New|Active],
possibly_start_another(false, Listen, Active1, Fun, Max);
{'EXIT', New, Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
possibly_start_another(false, Listen, Active, Fun, Max);
{'EXIT', Pid, Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
Active1 = lists:delete(Pid, Active),
possibly_start_another(New, Listen, Active1, Fun, Max);
{children, From} ->
From ! {session_server, Active},
socket_loop(Listen, New, Active, Fun, Max);
Other ->
io:format("Here in loop:~p~n",[Other])
end.
possibly_start_another(New, Listen, Active, Fun, Max) when pid(New) ->
socket_loop(Listen, New, Active, Fun, Max);
possibly_start_another(false, Listen, Active, Fun, Max) ->
case length(Active) of
N when N < Max ->
New = start_accept(Listen, Fun),
socket_loop(Listen, New, Active, Fun, Max);
_ ->
socket_loop(Listen, false, Active, Fun, Max)
end.
start_accept(Listen, Fun) ->
S = self(),
spawn_link(fun() -> start_child(S, Listen, Fun) end).
start_child(Parent, Listen, Fun) ->
case gen_tcp:accept(Listen) of
{ok, Socket} ->
Parent ! {istarted,self()}, % tell the controller
inet:setopts(Socket, [{nodelay,true},
{active, true}]), % before we activate socket
%% io:format("running the child:~p~n",[Socket]),
Fun(Socket);
Other ->
exit(oops)
end.
| null | https://raw.githubusercontent.com/baryluk/ex11/be8abc64ab9fb50611f93d6631fc33ffdee08fdb/widgets/tcp_server.erl | erlang | File : tcp_server.erl
Purpose : Keeps track of a number of TCP sessions
-export([start_child/3]).
Then Fun(Socket) is called.
a typical server is usually written like this:
To setup a lister
start_server(Port) ->
S = self(),
process_flag(trap_exit, true),
tcp_server:start_raw_server(Port,
fun(Socket) -> input_handler(Socket, S) end,
0)
loop().
The loop() process is a central controller that all
processes can use to synchronize amongst themselfves if necessary
A typical server is written like this:
input_handler(Socket, Controller) ->
receive
...
gen_tcp:send(Socket, ...)
{tcp_closed, Socket} ->
Any ->
...
end.
Note when start_raw_server returns it should be ready to
Immediately accept connections
{dontroute, true},
io:format("Listening on:~p~n",[Listen]),
Now we're ready to run
Don't mess with the following code uless you really know what you're
tell the controller
before we activate socket
io:format("running the child:~p~n",[Socket]), | Copyright ( C ) 2002 ,
Author : ( )
Last modified : 2002 - 11 - 17
-module(tcp_server).
-export([start_raw_server/4, start_client/3]).
-export([stop/1]).
-export([children/1]).
start_raw_server(Port , Fun , Max )
This server accepts up to connections on Port
The * first * time a connection is made to Port
Thereafter messages to the socket result in to the handler .
15 ,
It ends up as the variable " Controller " in the input_handler
{ tcp , Socket ,
start_client(Host, Port, Length) ->
gen_tcp:connect(Host, Port,
[binary, {active, true}, {length, Length}]).
start_raw_server(Port, Fun, Max, Length) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
Self = self(),
Pid = spawn_link(fun() ->
cold_start(Self, Port, Fun, Max, Length)
end),
receive
{Pid, ok} ->
register(Name, Pid),
{ok, self()};
{Pid, Error} ->
Error
end;
Pid ->
{error, already_started}
end.
stop(Port) when integer(Port) ->
Name = port_name(Port),
case whereis(Name) of
undefined ->
not_started;
Pid ->
exit(Pid, kill),
(catch unregister(Name)),
stopped
end.
children(Port) when integer(Port) ->
port_name(Port) ! {children, self()},
receive
{session_server, Reply} -> Reply
end.
port_name(Port) when integer(Port) ->
list_to_atom("portServer" ++ integer_to_list(Port)).
cold_start(Master, Port, Fun, Max, Length) ->
process_flag(trap_exit, true),
io:format("Starting a port server on ~p...~n",[Port]),
case gen_tcp:listen(Port, [binary,
{nodelay,true},
{packet, Length},
{reuseaddr, true},
{active, false}]) of
{ok, Listen} ->
Master ! {self(), ok},
New = start_accept(Listen, Fun),
socket_loop(Listen, New, [], Fun, Max);
Error ->
Master ! {self(), Error}
end.
doing ( and Thanks to for heping me get it right )
socket_loop(Listen, New, Active, Fun, Max) ->
receive
{istarted, New} ->
Active1 = [New|Active],
possibly_start_another(false, Listen, Active1, Fun, Max);
{'EXIT', New, Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
possibly_start_another(false, Listen, Active, Fun, Max);
{'EXIT', Pid, Why} ->
io : format("Child exit=~p ~ n",[Why ] ) ,
Active1 = lists:delete(Pid, Active),
possibly_start_another(New, Listen, Active1, Fun, Max);
{children, From} ->
From ! {session_server, Active},
socket_loop(Listen, New, Active, Fun, Max);
Other ->
io:format("Here in loop:~p~n",[Other])
end.
possibly_start_another(New, Listen, Active, Fun, Max) when pid(New) ->
socket_loop(Listen, New, Active, Fun, Max);
possibly_start_another(false, Listen, Active, Fun, Max) ->
case length(Active) of
N when N < Max ->
New = start_accept(Listen, Fun),
socket_loop(Listen, New, Active, Fun, Max);
_ ->
socket_loop(Listen, false, Active, Fun, Max)
end.
start_accept(Listen, Fun) ->
S = self(),
spawn_link(fun() -> start_child(S, Listen, Fun) end).
start_child(Parent, Listen, Fun) ->
case gen_tcp:accept(Listen) of
{ok, Socket} ->
inet:setopts(Socket, [{nodelay,true},
Fun(Socket);
Other ->
exit(oops)
end.
|
cfd14398c40838ba0c9a4110676e758cb3a38ed5e136ef8a9e8b4a656c152ed6 | haskell/cabal | Foobar.hs | module Foobar where
| null | https://raw.githubusercontent.com/haskell/cabal/00a2351789a460700a2567eb5ecc42cca0af913f/cabal-testsuite/PackageTests/Backpack/Includes5/impl/Foobar.hs | haskell | module Foobar where
| |
04f6e736e0e7c78cfd07be0d836fbe8884df2244a7d1a3ce3167f156fd602036 | gilith/hol-light | calc_num.ml | (* ========================================================================= *)
(* Calculation with naturals. *)
(* *)
, University of Cambridge Computer Laboratory
(* *)
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
(* ========================================================================= *)
needs "arith.ml";;
(* ------------------------------------------------------------------------- *)
Simple rule to get rid of NUMERAL constant .
(* ------------------------------------------------------------------------- *)
let DENUMERAL = GEN_REWRITE_RULE DEPTH_CONV [NUMERAL];;
(* ------------------------------------------------------------------------- *)
(* Big collection of rewrites to do trivial arithmetic. *)
(* *)
Note that we have none for DIV and MOD , and that PRE and SUB are a bit
inefficient ; log(n)^2 instead of log(n ) .
(* ------------------------------------------------------------------------- *)
let ARITH_ZERO = prove
(`(NUMERAL 0 = 0) /\
(BIT0 _0 = _0)`,
REWRITE_TAC[NUMERAL; BIT0; DENUMERAL ADD_CLAUSES]);;
let ARITH_SUC = prove
(`(!n. SUC(NUMERAL n) = NUMERAL(SUC n)) /\
(SUC _0 = BIT1 _0) /\
(!n. SUC (BIT0 n) = BIT1 n) /\
(!n. SUC (BIT1 n) = BIT0 (SUC n))`,
REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL ADD_CLAUSES]);;
* *
let prove
( ` ( ! n. PRE(NUMERAL n ) = NUMERAL(PRE n ) ) /\
( PRE _ 0 = _ 0 ) /\
( ! ) = if n = _ 0 then _ 0 else BIT1 ( PRE n ) ) /\
( ! n. PRE(BIT1 n ) = BIT0 n ) ` ,
REWRITE_TAC[NUMERAL ; BIT1 ; BIT0 ; DENUMERAL PRE ] THEN INDUCT_TAC THEN
REWRITE_TAC[NUMERAL ; DENUMERAL PRE ; DENUMERAL ADD_CLAUSES ; DENUMERAL NOT_SUC ;
ARITH_ZERO ] ) ; ;
* *
let ARITH_PRE = prove
(`(!n. PRE(NUMERAL n) = NUMERAL(PRE n)) /\
(PRE _0 = _0) /\
(!n. PRE(BIT0 n) = if n = _0 then _0 else BIT1 (PRE n)) /\
(!n. PRE(BIT1 n) = BIT0 n)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL PRE] THEN INDUCT_TAC THEN
REWRITE_TAC[NUMERAL; DENUMERAL PRE; DENUMERAL ADD_CLAUSES; DENUMERAL NOT_SUC;
ARITH_ZERO]);;
***)
let ARITH_ADD = prove
(`(!m n. NUMERAL(m) + NUMERAL(n) = NUMERAL(m + n)) /\
(_0 + _0 = _0) /\
(!n. _0 + BIT0 n = BIT0 n) /\
(!n. _0 + BIT1 n = BIT1 n) /\
(!n. BIT0 n + _0 = BIT0 n) /\
(!n. BIT1 n + _0 = BIT1 n) /\
(!m n. BIT0 m + BIT0 n = BIT0 (m + n)) /\
(!m n. BIT0 m + BIT1 n = BIT1 (m + n)) /\
(!m n. BIT1 m + BIT0 n = BIT1 (m + n)) /\
(!m n. BIT1 m + BIT1 n = BIT0 (SUC(m + n)))`,
PURE_REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[ADD_AC]);;
let ARITH_MULT = prove
(`(!m n. NUMERAL(m) * NUMERAL(n) = NUMERAL(m * n)) /\
(_0 * _0 = _0) /\
(!n. _0 * BIT0 n = _0) /\
(!n. _0 * BIT1 n = _0) /\
(!n. BIT0 n * _0 = _0) /\
(!n. BIT1 n * _0 = _0) /\
(!m n. BIT0 m * BIT0 n = BIT0 (BIT0 (m * n))) /\
(!m n. BIT0 m * BIT1 n = BIT0 m + BIT0 (BIT0 (m * n))) /\
(!m n. BIT1 m * BIT0 n = BIT0 n + BIT0 (BIT0 (m * n))) /\
(!m n. BIT1 m * BIT1 n = BIT1 m + BIT0 n + BIT0 (BIT0 (m * n)))`,
PURE_REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL MULT_CLAUSES;
DENUMERAL ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB; ADD_AC]);;
let ARITH_EXP = prove
(`(!m n. (NUMERAL m) EXP (NUMERAL n) = NUMERAL(m EXP n)) /\
(_0 EXP _0 = BIT1 _0) /\
(!m. (BIT0 m) EXP _0 = BIT1 _0) /\
(!m. (BIT1 m) EXP _0 = BIT1 _0) /\
(!n. _0 EXP (BIT0 n) = (_0 EXP n) * (_0 EXP n)) /\
(!m n. (BIT0 m) EXP (BIT0 n) = ((BIT0 m) EXP n) * ((BIT0 m) EXP n)) /\
(!m n. (BIT1 m) EXP (BIT0 n) = ((BIT1 m) EXP n) * ((BIT1 m) EXP n)) /\
(!n. _0 EXP (BIT1 n) = _0) /\
(!m n. (BIT0 m) EXP (BIT1 n) =
BIT0 m * ((BIT0 m) EXP n) * ((BIT0 m) EXP n)) /\
(!m n. (BIT1 m) EXP (BIT1 n) =
BIT1 m * ((BIT1 m) EXP n) * ((BIT1 m) EXP n))`,
REWRITE_TAC[NUMERAL] THEN REPEAT STRIP_TAC THEN
TRY(GEN_REWRITE_TAC (LAND_CONV o RAND_CONV) [BIT0; BIT1]) THEN
REWRITE_TAC[DENUMERAL EXP; DENUMERAL MULT_CLAUSES; EXP_ADD]);;
let ARITH_EVEN = prove
(`(!n. EVEN(NUMERAL n) <=> EVEN n) /\
(EVEN _0 <=> T) /\
(!n. EVEN(BIT0 n) <=> T) /\
(!n. EVEN(BIT1 n) <=> F)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL EVEN; EVEN_ADD]);;
let ARITH_ODD = prove
(`(!n. ODD(NUMERAL n) <=> ODD n) /\
(ODD _0 <=> F) /\
(!n. ODD(BIT0 n) <=> F) /\
(!n. ODD(BIT1 n) <=> T)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL ODD; ODD_ADD]);;
let ARITH_LE = prove
(`(!m n. NUMERAL m <= NUMERAL n <=> m <= n) /\
((_0 <= _0) <=> T) /\
(!n. (BIT0 n <= _0) <=> n <= _0) /\
(!n. (BIT1 n <= _0) <=> F) /\
(!n. (_0 <= BIT0 n) <=> T) /\
(!n. (_0 <= BIT1 n) <=> T) /\
(!m n. (BIT0 m <= BIT0 n) <=> m <= n) /\
(!m n. (BIT0 m <= BIT1 n) <=> m <= n) /\
(!m n. (BIT1 m <= BIT0 n) <=> m < n) /\
(!m n. (BIT1 m <= BIT1 n) <=> m <= n)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL NOT_SUC;
DENUMERAL(GSYM NOT_SUC); SUC_INJ] THEN
REWRITE_TAC[DENUMERAL LE_0] THEN REWRITE_TAC[DENUMERAL LE; GSYM MULT_2] THEN
REWRITE_TAC[LE_MULT_LCANCEL; SUC_INJ;
DENUMERAL MULT_EQ_0; DENUMERAL NOT_SUC] THEN
REWRITE_TAC[DENUMERAL NOT_SUC] THEN REWRITE_TAC[LE_SUC_LT] THEN
REWRITE_TAC[LT_MULT_LCANCEL] THEN
SUBGOAL_THEN `2 = SUC 1` (fun th -> REWRITE_TAC[th]) THENL
[REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL ADD_CLAUSES];
REWRITE_TAC[DENUMERAL NOT_SUC; NOT_SUC; EQ_MULT_LCANCEL] THEN
REWRITE_TAC[ONCE_REWRITE_RULE[DISJ_SYM] LE_LT] THEN
MAP_EVERY X_GEN_TAC [`m:num`; `n:num`] THEN
SUBGOAL_THEN `~(SUC 1 * m = SUC (SUC 1 * n))`
(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(MP_TAC o AP_TERM `EVEN`) THEN
REWRITE_TAC[EVEN_MULT; EVEN_ADD; NUMERAL; BIT1; EVEN]]);;
let ARITH_LT = prove
(`(!m n. NUMERAL m < NUMERAL n <=> m < n) /\
((_0 < _0) <=> F) /\
(!n. (BIT0 n < _0) <=> F) /\
(!n. (BIT1 n < _0) <=> F) /\
(!n. (_0 < BIT0 n) <=> _0 < n) /\
(!n. (_0 < BIT1 n) <=> T) /\
(!m n. (BIT0 m < BIT0 n) <=> m < n) /\
(!m n. (BIT0 m < BIT1 n) <=> m <= n) /\
(!m n. (BIT1 m < BIT0 n) <=> m < n) /\
(!m n. (BIT1 m < BIT1 n) <=> m < n)`,
REWRITE_TAC[NUMERAL; GSYM NOT_LE; ARITH_LE] THEN
REWRITE_TAC[DENUMERAL LE]);;
let ARITH_GE = REWRITE_RULE[GSYM GE; GSYM GT] ARITH_LE;;
let ARITH_GT = REWRITE_RULE[GSYM GE; GSYM GT] ARITH_LT;;
let ARITH_EQ = prove
(`(!m n. (NUMERAL m = NUMERAL n) <=> (m = n)) /\
((_0 = _0) <=> T) /\
(!n. (BIT0 n = _0) <=> (n = _0)) /\
(!n. (BIT1 n = _0) <=> F) /\
(!n. (_0 = BIT0 n) <=> (_0 = n)) /\
(!n. (_0 = BIT1 n) <=> F) /\
(!m n. (BIT0 m = BIT0 n) <=> (m = n)) /\
(!m n. (BIT0 m = BIT1 n) <=> F) /\
(!m n. (BIT1 m = BIT0 n) <=> F) /\
(!m n. (BIT1 m = BIT1 n) <=> (m = n))`,
REWRITE_TAC[NUMERAL; GSYM LE_ANTISYM; ARITH_LE] THEN
REWRITE_TAC[LET_ANTISYM; LTE_ANTISYM; DENUMERAL LE_0]);;
* *
let ARITH_SUB = prove
( ` ( ! m n. NUMERAL m - NUMERAL n = NUMERAL(m - n ) ) /\
( _ 0 - _ 0 = _ 0 ) /\
( ! n. _ 0 - BIT0 n = _ 0 ) /\
( ! n. _ 0 - BIT1 n = _ 0 ) /\
( ! 0 = BIT0 n ) /\
( ! n - _ 0 = BIT1 n ) /\
( ! m n = BIT0 ( m - n ) ) /\
( ! m n = PRE(BIT0 ( m - n ) ) ) /\
( ! m m - BIT0 n = if n < = m then BIT1 ( m - n ) else _ 0 ) /\
( ! m m - BIT1 n = BIT0 ( m - n ) ) ` ,
REWRITE_TAC[NUMERAL ; DENUMERAL SUB_0 ] THEN PURE_REWRITE_TAC[BIT0 ; BIT1 ] THEN
REWRITE_TAC[GSYM MULT_2 ; SUB_SUC ; ] THEN
REWRITE_TAC[SUB ] THEN REPEAT GEN_TAC THEN COND_CASES_TAC THEN
REWRITE_TAC[DENUMERAL SUB_EQ_0 ] THEN
RULE_ASSUM_TAC(REWRITE_RULE[NOT_LE ] ) THEN
ASM_REWRITE_TAC[LE_SUC_LT ; ] THEN
POP_ASSUM(CHOOSE_THEN SUBST1_TAC o REWRITE_RULE[LE_EXISTS ] ) THEN
REWRITE_TAC[ADD1 ; LEFT_ADD_DISTRIB ] THEN
REWRITE_TAC[ADD_SUB2 ; GSYM ADD_ASSOC ] ) ; ;
* *
let ARITH_SUB = prove
(`(!m n. NUMERAL m - NUMERAL n = NUMERAL(m - n)) /\
(_0 - _0 = _0) /\
(!n. _0 - BIT0 n = _0) /\
(!n. _0 - BIT1 n = _0) /\
(!n. BIT0 n - _0 = BIT0 n) /\
(!n. BIT1 n - _0 = BIT1 n) /\
(!m n. BIT0 m - BIT0 n = BIT0 (m - n)) /\
(!m n. BIT0 m - BIT1 n = PRE(BIT0 (m - n))) /\
(!m n. BIT1 m - BIT0 n = if n <= m then BIT1 (m - n) else _0) /\
(!m n. BIT1 m - BIT1 n = BIT0 (m - n))`,
REWRITE_TAC[NUMERAL; DENUMERAL SUB_0] THEN PURE_REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2; SUB_SUC; LEFT_SUB_DISTRIB] THEN
REWRITE_TAC[SUB] THEN REPEAT GEN_TAC THEN COND_CASES_TAC THEN
REWRITE_TAC[DENUMERAL SUB_EQ_0] THEN
RULE_ASSUM_TAC(REWRITE_RULE[NOT_LE]) THEN
ASM_REWRITE_TAC[LE_SUC_LT; LT_MULT_LCANCEL; ARITH_EQ] THEN
POP_ASSUM(CHOOSE_THEN SUBST1_TAC o REWRITE_RULE[LE_EXISTS]) THEN
REWRITE_TAC[ADD1; LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[ADD_SUB2; GSYM ADD_ASSOC]);;
***)
let ARITH = end_itlist CONJ
[ARITH_ZERO; ARITH_SUC; (***ARITH_PRE;***)
ARITH_ADD; ARITH_MULT; ARITH_EXP;
ARITH_EVEN; ARITH_ODD;
ARITH_EQ; ARITH_LE; ARITH_LT; ARITH_GE; ARITH_GT
* * ARITH_SUB * *
(* ------------------------------------------------------------------------- *)
(* Now more delicate conversions for situations where efficiency matters. *)
(* ------------------------------------------------------------------------- *)
let NUM_EVEN_CONV =
let tth,rths = CONJ_PAIR ARITH_EVEN in
GEN_REWRITE_CONV I [tth] THENC GEN_REWRITE_CONV I [rths];;
let NUM_ODD_CONV =
let tth,rths = CONJ_PAIR ARITH_ODD in
GEN_REWRITE_CONV I [tth] THENC GEN_REWRITE_CONV I [rths];;
let NUM_SUC_CONV,NUM_ADD_CONV,NUM_MULT_CONV,NUM_EXP_CONV,
NUM_LT_CONV,NUM_LE_CONV,NUM_EQ_CONV =
let num_ty = type_of(lhand(concl ZERO_DEF)) in
let Comb(NUMERAL_tm,Comb(BIT0_tm,Comb(BIT1_tm,zero_tm))) =
mk_small_numeral 2
and suc_tm = rator(rand(concl TWO))
and one_tm = rand(mk_small_numeral 1)
and add_tm = rator(rator(lhand(snd(strip_forall(concl ADD_0)))))
and mul_tm = rator(rator(rand(snd(strip_forall(concl EXP_2)))))
and exp_tm = rator(rator(lhand(snd(strip_forall(concl EXP_2)))))
and eq_tm = rator(rator(concl TWO)) in
let num_0 = Int 0 and num_1 = Int 1 and num_2 = Int 2 in
let a_tm = mk_var("a",num_ty)
and b_tm = mk_var("b",num_ty)
and c_tm = mk_var("c",num_ty)
and d_tm = mk_var("d",num_ty)
and e_tm = mk_var("e",num_ty)
and h_tm = mk_var("h",num_ty)
and l_tm = mk_var("l",num_ty)
and m_tm = mk_var("m",num_ty)
and n_tm = mk_var("n",num_ty)
and p_tm = mk_var("p",num_ty) in
let STANDARDIZE =
let ilist = [BIT0_tm,BIT0_tm; BIT1_tm,BIT1_tm; zero_tm,zero_tm;
suc_tm,suc_tm; add_tm,add_tm; mul_tm,mul_tm;
exp_tm,exp_tm; eq_tm,eq_tm; NUMERAL_tm,NUMERAL_tm;
a_tm,a_tm; b_tm,b_tm; c_tm,c_tm; d_tm,d_tm; e_tm,e_tm;
h_tm,h_tm; l_tm,l_tm; m_tm,m_tm; n_tm,n_tm; p_tm,p_tm] in
let rec replace tm =
match tm with
Var(_,_) | Const(_,_) -> rev_assocd tm ilist tm
| Comb(s,t) -> mk_comb(replace s,replace t)
| Abs(_,_) -> failwith "replace" in
fun th -> let tm' = replace (concl th) in EQ_MP (REFL tm') th in
let REFL_bit0 = STANDARDIZE(REFL BIT0_tm)
and REFL_bit1 = STANDARDIZE(REFL BIT1_tm) in
let AP_BIT0 th = MK_COMB(REFL_bit0,th)
and AP_BIT1 th = MK_COMB(REFL_bit1,th)
and QUICK_PROVE_HYP ath bth = EQ_MP (DEDUCT_ANTISYM_RULE ath bth) ath in
let rec dest_raw_numeral tm =
match tm with
Comb(Const("BIT1",_),t) -> num_2 */ dest_raw_numeral t +/ num_1
| Comb(Const("BIT0",_),t) -> num_2 */ dest_raw_numeral t
| Const("_0",_) -> num_0 in
let bitcounts =
let rec bctr w z tm =
match tm with
Const("_0",_) -> (w,z)
| Comb(Const("BIT0",_),t) -> bctr w (z + 1) t
| Comb(Const("BIT1",_),t) -> bctr (w + 1) z t
| _ -> failwith "malformed numeral" in
bctr 0 0 in
let rec wellformed tm =
match tm with
Const("_0",_) -> true
| Comb(Const("BIT0",_),t)|Comb(Const("BIT1",_),t) -> wellformed t
| _ -> false in
let rec orderrelation mtm ntm =
if mtm == ntm then
if wellformed mtm then 0 else failwith "orderrelation"
else
match (mtm,ntm) with
Const("_0",_),Const("_0",_) -> 0
| Const("_0",_),_ ->
if wellformed ntm then -1 else failwith "orderrelation"
| _, Const("_0",_) ->
if wellformed ntm then 1 else failwith "orderrelation"
| Comb(Const("BIT0",_),mt),Comb(Const("BIT0",_),nt)
| Comb(Const("BIT1",_),mt),Comb(Const("BIT1",_),nt) ->
orderrelation mt nt
| Comb(Const("BIT0",_),mt),Comb(Const("BIT1",_),nt) ->
if orderrelation mt nt > 0 then 1 else -1
| Comb(Const("BIT1",_),mt),Comb(Const("BIT0",_),nt) ->
if orderrelation mt nt < 0 then -1 else 1 in
let doublebn tm = if tm = zero_tm then tm else mk_comb(BIT0_tm,tm) in
let rec subbn mtm ntm =
match (mtm,ntm) with
(_,Const("_0",_)) -> mtm
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT0",_),nt)) ->
doublebn (subbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT1",_),nt)) ->
doublebn (subbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT0",_),nt)) ->
mk_comb(BIT1_tm,subbn mt nt)
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT1",_),nt)) ->
mk_comb(BIT1_tm,sbcbn mt nt)
| _ -> failwith "malformed numeral or wrong relation"
and sbcbn mtm ntm =
match (mtm,ntm) with
| (Comb(Const("BIT0",_),mt),Const("_0",_)) ->
mk_comb(BIT1_tm,sbcbn mt ntm)
| (Comb(Const("BIT1",_),mt),Const("_0",_)) ->
doublebn mt
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT0",_),nt)) ->
mk_comb(BIT1_tm,sbcbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT1",_),nt)) ->
mk_comb(BIT1_tm,sbcbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT0",_),nt)) ->
doublebn (subbn mt nt)
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT1",_),nt)) ->
doublebn (sbcbn mt nt)
| _ -> failwith "malformed numeral or wrong relation" in
let topsplit tm =
match tm with
Const("_0",_) -> 0,zero_tm
| Comb(Const("BIT1",_),Const("_0",_)) -> 1,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Const("_0",_))) -> 2,zero_tm
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Const("_0",_))) -> 3,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 4,zero_tm
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 5,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 6,zero_tm
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 7,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 0,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 1,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 2,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 3,n
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 4,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 5,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 6,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 7,n
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 8,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 9,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 10,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 11,n
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 12,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 13,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 14,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 15,n
| _ -> failwith "malformed numeral" in
let NUM_ADD_RULE,NUM_ADC_RULE =
let rec mk_compnumeral k base =
if k = 0 then base else
let t = mk_compnumeral (k / 2) base in
if k mod 2 = 1 then mk_comb(BIT1_tm,t) else mk_comb(BIT0_tm,t) in
let bases v =
let part2 = map (fun k -> mk_compnumeral k v) (8--15) in
let part1 = map (subst[mk_comb(BIT0_tm,v),mk_comb(BIT1_tm,v)])
part2
and part0 = map (fun k -> mk_compnumeral k zero_tm) (0--15) in
part0 @ part1 @ part2 in
let starts =
allpairs (fun mtm ntm ->
mk_comb(mk_comb(add_tm,mtm),ntm)) (bases m_tm) (bases n_tm) in
let BITS_INJ = (STANDARDIZE o prove)
(`(BIT0 m = BIT0 n <=> m = n) /\
(BIT1 m = BIT1 n <=> m = n)`,
REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ]) in
let ARITH_0 = (STANDARDIZE o MESON[NUMERAL; ADD_CLAUSES])
`m + _0 = m /\ _0 + n = n` in
let patadj = subst[`SUC(m + _0)`,`SUC m`; `SUC(_0 + n)`,`SUC n`] in
let mkclauses sucflag t =
let tm = if sucflag then mk_comb(suc_tm,t) else t in
let th1 = PURE_REWRITE_CONV[ARITH_ADD; ARITH_SUC; ARITH_0] tm in
let tm1 = patadj(rand(concl th1)) in
if not(free_in add_tm tm1) then th1,
(if free_in m_tm tm1 then 0 else 1) else
let ptm = rand(rand(rand(rand tm1))) in
let tmc = mk_eq(mk_eq(ptm,p_tm),mk_eq(tm,subst[p_tm,ptm] tm1)) in
EQT_ELIM(REWRITE_CONV[ARITH_ADD; ARITH_SUC; ARITH_0; BITS_INJ] tmc),
(if free_in suc_tm tm1 then 3 else 2) in
let add_clauses,add_flags =
let l1,l2 = unzip(map (mkclauses false) starts) in
Array.of_list(map STANDARDIZE l1),Array.of_list l2 in
let adc_clauses,adc_flags =
let l1,l2 = unzip(map (mkclauses true) starts) in
Array.of_list(map STANDARDIZE l1),Array.of_list l2 in
let rec NUM_ADD_RULE mtm ntm =
let m_lo,m_hi = topsplit mtm
and n_lo,n_hi = topsplit ntm in
let m_ind = if m_hi = zero_tm then m_lo else m_lo + 16
and n_ind = if n_hi = zero_tm then n_lo else n_lo + 16 in
let ind = 32 * m_ind + n_ind in
let th1 = Array.get add_clauses ind
and fl = Array.get add_flags ind in
match fl with
0 -> INST [m_hi,m_tm] th1
| 1 -> INST [n_hi,n_tm] th1
| 2 -> let th2 = NUM_ADD_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2)
| 3 -> let th2 = NUM_ADC_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2)
and NUM_ADC_RULE mtm ntm =
let m_lo,m_hi = topsplit mtm
and n_lo,n_hi = topsplit ntm in
let m_ind = if m_hi = zero_tm then m_lo else m_lo + 16
and n_ind = if n_hi = zero_tm then n_lo else n_lo + 16 in
let ind = 32 * m_ind + n_ind in
let th1 = Array.get adc_clauses ind
and fl = Array.get adc_flags ind in
match fl with
0 -> INST [m_hi,m_tm] th1
| 1 -> INST [n_hi,n_tm] th1
| 2 -> let th2 = NUM_ADD_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2)
| 3 -> let th2 = NUM_ADC_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2) in
NUM_ADD_RULE,NUM_ADC_RULE in
let NUM_SHIFT_CONV =
let pth_0 = (STANDARDIZE o prove)
(`(n = a + p * b <=> BIT0 n = BIT0 a + BIT0 p * b)`,
REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2; GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_z = (STANDARDIZE o prove)
(`n = _0 + p * b <=> BIT0 n = _0 + BIT0 p * b`,
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[BIT1; BIT0] THEN
REWRITE_TAC[ADD_CLAUSES; GSYM MULT_2] THEN
REWRITE_TAC[GSYM MULT_ASSOC; EQ_MULT_LCANCEL; ARITH_EQ])
and pth_1 = (STANDARDIZE o prove)
(`(n = a + p * b <=> BIT1 n = BIT1 a + BIT0 p * b)`,
REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2; GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB;
ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_base = (STANDARDIZE o prove)
(`n = _0 + BIT1 _0 * n`,
MESON_TAC[ADD_CLAUSES; MULT_CLAUSES; NUMERAL])
and pth_triv = (STANDARDIZE o prove)
(`_0 = a + p * b <=> _0 = a + BIT0 p * b`,
CONV_TAC(BINOP_CONV SYM_CONV) THEN
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[ADD_EQ_0; MULT_EQ_0; BIT0])
and pths_1 = (Array.of_list o CONJUNCTS o STANDARDIZE o prove)
(`(n = a + p * b <=>
BIT0(BIT0(BIT0(BIT0 n))) =
BIT0(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT0(BIT0 n))) =
BIT1(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT0(BIT0 n))) =
BIT0(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT0(BIT0 n))) =
BIT1(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT0(BIT1(BIT0 n))) =
BIT0(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT1(BIT0 n))) =
BIT1(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT1(BIT0 n))) =
BIT0(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT1(BIT0 n))) =
BIT1(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT0(BIT0(BIT1 n))) =
BIT0(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT0(BIT1 n))) =
BIT1(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT0(BIT1 n))) =
BIT0(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT0(BIT1 n))) =
BIT1(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT0(BIT1(BIT1 n))) =
BIT0(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT1(BIT1 n))) =
BIT1(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT1(BIT1 n))) =
BIT0(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT1(BIT1 n))) =
BIT1(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b)`,
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT0) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT1) THEN
ABBREV_TAC `two = 2` THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ;
GSYM LEFT_ADD_DISTRIB; GSYM MULT_ASSOC])
and pths_0 = (Array.of_list o CONJUNCTS o STANDARDIZE o prove)
(`(n = _0 + p * b <=>
BIT0(BIT0(BIT0(BIT0 n))) =
_0 + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT0(BIT0 n))) =
BIT1 _0 + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT0(BIT0 n))) =
BIT0(BIT1 _0) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT0(BIT0 n))) =
BIT1(BIT1 _0) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT1(BIT0 n))) =
BIT0(BIT0(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT1(BIT0 n))) =
BIT1(BIT0(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT1(BIT0 n))) =
BIT0(BIT1(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT1(BIT0 n))) =
BIT1(BIT1(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT0(BIT1 n))) =
BIT0(BIT0(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT0(BIT1 n))) =
BIT1(BIT0(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT0(BIT1 n))) =
BIT0(BIT1(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT0(BIT1 n))) =
BIT1(BIT1(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT1(BIT1 n))) =
BIT0(BIT0(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT1(BIT1 n))) =
BIT1(BIT0(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT1(BIT1 n))) =
BIT0(BIT1(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT1(BIT1 n))) =
BIT1(BIT1(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b)`,
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT0) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT1) THEN
ABBREV_TAC `two = 2` THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ;
GSYM LEFT_ADD_DISTRIB; GSYM MULT_ASSOC]) in
let rec NUM_SHIFT_CONV k tm =
if k <= 0 then INST [tm,n_tm] pth_base else
match tm with
Comb(_,Comb(_,Comb(_,Comb(_,_)))) when k >= 4 ->
let i,ntm = topsplit tm in
let th1 = NUM_SHIFT_CONV (k - 4) ntm in
(match concl th1 with
Comb(_,Comb(Comb(_,Const("_0",_)),Comb(Comb(_,ptm),btm))) ->
let th2 = Array.get pths_0 i in
let th3 = INST [ntm,n_tm; btm,b_tm; ptm,p_tm] th2 in
EQ_MP th3 th1
| Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
let th2 = Array.get pths_1 i in
let th3 = INST[ntm,n_tm; atm,a_tm; btm,b_tm; ptm,p_tm] th2 in
EQ_MP th3 th1)
| Comb(Const("BIT0",_),ntm) ->
let th1 = NUM_SHIFT_CONV (k - 1) ntm in
(match concl th1 with
Comb(_,Comb(Comb(_,Const("_0",_)),Comb(Comb(_,ptm),btm))) ->
EQ_MP (INST [ntm,n_tm; btm,b_tm; ptm,p_tm] pth_z) th1
| Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
EQ_MP
(INST[ntm,n_tm; atm,a_tm; btm,b_tm; ptm,p_tm] pth_0) th1)
| Comb(Const("BIT1",_),ntm) ->
let th1 = NUM_SHIFT_CONV (k - 1) ntm in
(match concl th1 with
Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
EQ_MP
(INST [ntm,n_tm; atm,a_tm; btm,b_tm; ptm,p_tm] pth_1) th1)
| Const("_0",_) ->
let th1 = NUM_SHIFT_CONV (k - 1) tm in
(match concl th1 with
Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
EQ_MP (INST [atm,a_tm; btm,b_tm; ptm,p_tm] pth_triv)
th1)
| _ -> failwith "malformed numeral" in
NUM_SHIFT_CONV in
let NUM_UNSHIFT_CONV =
let pth_triv = (STANDARDIZE o prove)
(`a + p * _0 = a`,
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES])
and pth_base = (STANDARDIZE o prove)
(`a + BIT1 _0 * b = a + b`,
SUBST1_TAC(SYM(SPEC `BIT1 _0` NUMERAL)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES])
and pth_0 = (STANDARDIZE o prove)
(`BIT0 a + BIT0 p * b = BIT0(a + p * b)`,
REWRITE_TAC[BIT0] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB])
and pth_1 = (STANDARDIZE o prove)
(`BIT1 a + BIT0 p * b = BIT1(a + p * b)`,
REWRITE_TAC[BIT0; BIT1] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_z = (STANDARDIZE o prove)
(`_0 + BIT0 p * b = BIT0(_0 + p * b)`,
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[BIT1; BIT0] THEN REWRITE_TAC[ADD_CLAUSES] THEN
REWRITE_TAC[RIGHT_ADD_DISTRIB])
and puths_1 = (Array.of_list o CONJUNCTS o STANDARDIZE o prove)
(`(a + p * b = n <=>
BIT0(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT1(BIT1 n))))`,
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT0) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT1) THEN
ABBREV_TAC `two = 2` THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ;
GSYM LEFT_ADD_DISTRIB; GSYM MULT_ASSOC]) in
let puths_2 = Array.of_list
(map (fun i -> let th1 = Array.get puths_1 (i mod 16)
and th2 = Array.get puths_1 (i / 16) in
let th3 = GEN_REWRITE_RULE RAND_CONV [th1] th2 in
STANDARDIZE th3) (0--255)) in
let rec NUM_UNSHIFT_CONV tm =
match tm with
Comb(Comb(Const("+",_),atm),Comb(Comb(Const("*",_),ptm),btm)) ->
(match (atm,ptm,btm) with
(_,_,Const("_0",_)) ->
INST [atm,a_tm; ptm,p_tm] pth_triv
| (_,Comb(Const("BIT1",_),Const("_0",_)),_) ->
let th1 = INST [atm,a_tm; btm,b_tm] pth_base in
let Comb(_,Comb(Comb(_,mtm),ntm)) = concl th1 in
TRANS th1 (NUM_ADD_RULE mtm ntm)
| (Comb(_,Comb(_,Comb(_,Comb(_,atm')))),
Comb(_,Comb(_,Comb(_,Comb(_,(Comb(_,_) as ptm'))))),_) ->
let i,_ = topsplit atm in
(match (atm',ptm') with
(Comb(_,Comb(_,Comb(_,Comb(_,atm'')))),
Comb(_,Comb(_,Comb(_,Comb(_,(Comb(_,_) as ptm'')))))) ->
let j,_ = topsplit atm' in
let tm' = mk_comb(mk_comb(add_tm,atm''),
mk_comb(mk_comb(mul_tm,ptm''),btm)) in
let th1 = NUM_UNSHIFT_CONV tm' in
let th2 = INST [atm'',a_tm; ptm'',p_tm; btm,b_tm;
rand(concl th1),n_tm]
(Array.get puths_2 (16 * j + i)) in
EQ_MP th2 th1
| _ ->
let tm' = mk_comb(mk_comb(add_tm,atm'),
mk_comb(mk_comb(mul_tm,ptm'),btm)) in
let th1 = NUM_UNSHIFT_CONV tm' in
let th2 = INST [atm',a_tm; ptm',p_tm; btm,b_tm;
rand(concl th1),n_tm]
(Array.get puths_1 i) in
EQ_MP th2 th1)
| (Const("_0",_),Comb(Const("BIT0",_),qtm),_) ->
let th1 = INST [btm,b_tm; qtm,p_tm] pth_z in
CONV_RULE(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV)) th1
| (Comb(Const("BIT0",_),ctm),Comb(Const("BIT0",_),qtm),_) ->
let th1 = INST [ctm,a_tm; btm,b_tm; qtm,p_tm] pth_0 in
CONV_RULE(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV)) th1
| (Comb(Const("BIT1",_),ctm),Comb(Const("BIT0",_),qtm),_) ->
let th1 = INST [ctm,a_tm; btm,b_tm; qtm,p_tm] pth_1 in
CONV_RULE(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV)) th1
| _ -> failwith "malformed numeral")
| _ -> failwith "malformed numeral" in
NUM_UNSHIFT_CONV in
let NUM_SQUARE_RULE =
let pth_0 = (STANDARDIZE o prove)
(`_0 EXP 2 = _0`,
MESON_TAC[NUMERAL; REWRITE_CONV[ARITH] `0 EXP 2`])
and pth_1 = (STANDARDIZE o prove)
(`(BIT1 _0) EXP 2 = BIT1 _0`,
MESON_TAC[NUMERAL; REWRITE_CONV[ARITH] `1 EXP 2`])
and pth_even = (STANDARDIZE o prove)
(`m EXP 2 = n <=> (BIT0 m) EXP 2 = BIT0(BIT0 n)`,
ABBREV_TAC `two = 2` THEN
REWRITE_TAC[BIT0] THEN EXPAND_TAC "two" THEN
REWRITE_TAC[GSYM MULT_2] THEN REWRITE_TAC[EXP_2] THEN
REWRITE_TAC[AC MULT_AC `(2 * m) * (2 * n) = 2 * 2 * m * n`] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_odd = (STANDARDIZE o prove)
(`m EXP 2 = n <=> (BIT1 m) EXP 2 = BIT1(BIT0(m + n))`,
ABBREV_TAC `two = 2` THEN
REWRITE_TAC[NUMERAL; BIT0; BIT1] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[EXP_2; MULT_CLAUSES; ADD_CLAUSES] THEN
REWRITE_TAC[SUC_INJ; GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[AC ADD_AC `(m + m * 2 * m) + m = m * 2 * m + m + m`] THEN
REWRITE_TAC[GSYM MULT_2; AC MULT_AC `m * 2 * m = 2 * m * m`] THEN
REWRITE_TAC[GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ] THEN
GEN_REWRITE_TAC (RAND_CONV o RAND_CONV) [ADD_SYM] THEN
REWRITE_TAC[EQ_ADD_RCANCEL])
and pth_qstep = (UNDISCH o STANDARDIZE o prove)
(`n + BIT1 _0 = m /\
m EXP 2 = p /\
m + a = BIT0(BIT0 p)
==> (BIT1(BIT1(BIT1 n))) EXP 2 = BIT1(BIT0(BIT0(BIT0 a)))`,
ABBREV_TAC `two = 2` THEN
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
REWRITE_TAC[BIT1; BIT0] THEN EXPAND_TAC "two" THEN
REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[ADD1; LEFT_ADD_DISTRIB; GSYM ADD_ASSOC] THEN
REWRITE_TAC[MULT_ASSOC] THEN REWRITE_TAC[ARITH] THEN
REWRITE_TAC[IMP_CONJ] THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN DISCH_TAC THEN
MATCH_MP_TAC(MESON[EQ_ADD_LCANCEL]
`!m:num. m + n = m + p ==> n = p`) THEN
EXISTS_TAC `16 * (n + 1)` THEN
ASM_REWRITE_TAC[ADD_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[EXP_2] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_CLAUSES; MULT_ASSOC] THEN
REWRITE_TAC[AC MULT_AC `(8 * n) * NUMERAL p = (8 * NUMERAL p) * n`] THEN
REWRITE_TAC[ARITH] THEN
REWRITE_TAC[AC ADD_AC
`(n + 16) + p + q + 49 = (n + p + q) + (16 + 49)`] THEN
REWRITE_TAC[GSYM ADD_ASSOC] THEN REWRITE_TAC[ARITH] THEN
REWRITE_TAC[ADD_ASSOC; EQ_ADD_RCANCEL] THEN
REWRITE_TAC[GSYM ADD_ASSOC; GSYM MULT_2; MULT_ASSOC] THEN
ONCE_REWRITE_TAC[AC ADD_AC `a + b + c:num = b + a + c`] THEN
REWRITE_TAC[GSYM RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[ARITH])
and pth_rec = (UNDISCH o STANDARDIZE o prove)
(`n = l + p * h /\
h + l = m /\
h EXP 2 = a /\
l EXP 2 = c /\
m EXP 2 = d /\
a + c = e /\
e + b = d
==> n EXP 2 = c + p * (b + p * a)`,
REWRITE_TAC[IMP_CONJ] THEN
DISCH_THEN SUBST1_TAC THEN
REPLICATE_TAC 5 (DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[EXP_2; LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN CONV_TAC(BINOP_CONV NUM_CANCEL_CONV) THEN
DISCH_THEN SUBST1_TAC THEN REWRITE_TAC[RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN REWRITE_TAC[ADD_AC])
and pth_toom3 = (STANDARDIZE o prove)
(`h EXP 2 = e /\
l EXP 2 = a /\
(l + BIT1 _0 * (m + BIT1 _0 * h)) EXP 2 =
a + BIT1 _0 * (b + BIT1 _0 * (c + BIT1 _0 * (d + BIT1 _0 * e))) /\
(l + BIT0(BIT1 _0) * (m + BIT0(BIT1 _0) * h)) EXP 2 =
a + BIT0(BIT1 _0) * (b + BIT0(BIT1 _0) *
(c + BIT0(BIT1 _0) * (d + BIT0(BIT1 _0) * e))) /\
(h + BIT0(BIT1 _0) * (m + BIT0(BIT1 _0) * l)) EXP 2 =
e + BIT0(BIT1 _0) * (d + BIT0(BIT1 _0) *
(c + BIT0(BIT1 _0) * (b + BIT0(BIT1 _0) * a)))
==> (l + p * (m + p * h)) EXP 2 =
a + p * (b + p * (c + p * (d + p * e)))`,
ABBREV_TAC `two = 2` THEN
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
REWRITE_TAC[BIT1; BIT0] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[ARITH] THEN
SUBGOAL_THEN
`!p x y z. (x + p * (y + p * z)) EXP 2 =
x * x + p * (2 * x * y + p * ((2 * x * z + y * y) +
p * (2 * y * z + p * z * z)))`
(fun th -> REWRITE_TAC[th])
THENL
[REWRITE_TAC[EXP_2; MULT_2; LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN REWRITE_TAC[ADD_AC];
REWRITE_TAC[EXP_2]] THEN
MAP_EVERY ABBREV_TAC
[`a':num = l * l`; `b' = 2 * l * m`; `c' = 2 * l * h + m * m`;
`d' = 2 * m * h`; `e':num = h * h`] THEN
SUBST1_TAC(AC MULT_AC `2 * m * l = 2 * l * m`) THEN
SUBST1_TAC(AC MULT_AC `2 * h * l = 2 * l * h`) THEN
SUBST1_TAC(AC MULT_AC `2 * h * m = 2 * m * h`) THEN
ASM_REWRITE_TAC[] THEN EXPAND_TAC "two" THEN
POP_ASSUM_LIST(K ALL_TAC) THEN
ASM_CASES_TAC `a':num = a` THEN ASM_REWRITE_TAC[] THEN
ASM_CASES_TAC `e':num = e` THEN ASM_REWRITE_TAC[] THEN
POP_ASSUM_LIST(K ALL_TAC) THEN
REWRITE_TAC[EQ_ADD_LCANCEL; EQ_MULT_LCANCEL] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC] THEN
REWRITE_TAC[ARITH] THEN
REWRITE_TAC[MULT_CLAUSES; EQ_ADD_LCANCEL] THEN
REWRITE_TAC[ADD_ASSOC; EQ_ADD_RCANCEL] THEN
REWRITE_TAC[GSYM ADD_ASSOC] THEN DISCH_TAC THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (MESON[]
`b = b' /\ c = c' /\ d = d'
==> 5 * b + c' + d' = 5 * b' + c + d`)) THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC] THEN
REWRITE_TAC(map (fun k ->
SYM(REWRITE_CONV[ARITH_SUC]
(mk_comb(suc_tm,mk_small_numeral(k - 1)))))
(1--5)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES] THEN
CONV_TAC(LAND_CONV NUM_CANCEL_CONV) THEN DISCH_THEN SUBST_ALL_TAC THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (MESON[]
`b = b' /\ (c:num) = c' /\ d = d'
==> b + d':num = b' + d /\ 4 * b + d' = 4 * b' + d`)) THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC] THEN
REWRITE_TAC(map (fun k ->
SYM(REWRITE_CONV[ARITH_SUC]
(mk_comb(suc_tm,mk_small_numeral(k - 1)))))
(1--4)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES] THEN
CONV_TAC(LAND_CONV(BINOP_CONV NUM_CANCEL_CONV)) THEN
REWRITE_TAC[GSYM MULT_2] THEN ONCE_REWRITE_TAC[ADD_SYM] THEN
REWRITE_TAC[GSYM(el 4 (CONJUNCTS MULT_CLAUSES))] THEN
SIMP_TAC[EQ_MULT_LCANCEL; NOT_SUC])
and pth_even3 = (STANDARDIZE o prove)
(`m EXP 2 = n <=>
(BIT0(BIT0(BIT0 m))) EXP 2 = BIT0(BIT0(BIT0(BIT0(BIT0(BIT0 n)))))`,
ABBREV_TAC `two = 2` THEN
REWRITE_TAC[BIT0] THEN REWRITE_TAC[GSYM MULT_2] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[EXP_2] THEN
REWRITE_TAC[AC MULT_AC
`(2 * 2 * 2 * m) * 2 * 2 * 2 * m = 2 * 2 * 2 * 2 * 2 * 2 * m * m`] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ]) in
let NUM_UNSHIFT2_CONV =
RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV) THENC NUM_UNSHIFT_CONV in
let NUM_UNSHIFT3_CONV =
RAND_CONV(RAND_CONV NUM_UNSHIFT2_CONV) THENC NUM_UNSHIFT_CONV in
let NUM_UNSHIFT4_CONV =
RAND_CONV(RAND_CONV NUM_UNSHIFT3_CONV) THENC NUM_UNSHIFT_CONV in
let BINOP2_CONV conv1 conv2 = COMB2_CONV (RAND_CONV conv1) conv2 in
let TOOM3_CONV = BINOP2_CONV
(LAND_CONV NUM_UNSHIFT2_CONV) NUM_UNSHIFT4_CONV in
let rec GEN_NUM_SQUARE_RULE w z tm =
match tm with
Const("_0",_) -> pth_0
| Comb(Const("BIT0",_),mtm) ->
(match mtm with
Comb(Const("BIT0",_),Comb(Const("BIT0",_),ptm)) ->
let th1 = GEN_NUM_SQUARE_RULE w (z - 3) ptm in
let ntm = rand(concl th1) in
EQ_MP (INST [ptm,m_tm; ntm,n_tm] pth_even3) th1
| _ ->
let th1 = GEN_NUM_SQUARE_RULE w (z - 1) mtm in
let ntm = rand(concl th1) in
EQ_MP (INST [mtm,m_tm; ntm,n_tm] pth_even) th1)
| Comb(Const("BIT1",_),mtm) ->
if mtm = zero_tm then pth_1 else
if (w < 100 || z < 20) && w + z < 150 then
match mtm with
Comb(Const("BIT1",_),Comb(Const("BIT1",_),ntm)) ->
let th1 = NUM_ADD_RULE ntm one_tm in
let mtm = rand(concl th1) in
let th2 = NUM_SQUARE_RULE mtm in
let ptm = rand(concl th2) in
let atm = subbn
(mk_comb(BIT0_tm,mk_comb(BIT0_tm,ptm))) mtm in
let th3 = NUM_ADD_RULE mtm atm in
let th4 = INST
[atm,a_tm; mtm,m_tm; ntm,n_tm; ptm,p_tm] pth_qstep in
QUICK_PROVE_HYP (CONJ th1 (CONJ th2 th3)) th4
| _ ->
let th1 = GEN_NUM_SQUARE_RULE (w - 1) z mtm in
let ntm = rand(concl th1) in
let th2 = EQ_MP (INST [mtm,m_tm; ntm,n_tm] pth_odd) th1 in
(match concl th2 with
Comb(_,Comb(_,Comb(_,Comb(Comb(_,ptm),qtm)))) ->
let th3 = NUM_ADD_RULE ptm qtm in
TRANS th2 (AP_BIT1 (AP_BIT0 th3)))
else if w + z < 800 then
let k2 = (w + z) / 2 in
let th1 = NUM_SHIFT_CONV k2 tm in
let Comb(Comb(_,ltm),Comb(Comb(_,ptm),htm)) = rand(concl th1) in
let th2 = NUM_ADD_RULE htm ltm in
let mtm = rand(concl th2) in
let th3 = NUM_SQUARE_RULE htm
and th4 = NUM_SQUARE_RULE ltm
and th5 = NUM_SQUARE_RULE mtm in
let atm = rand(concl th3)
and ctm = rand(concl th4)
and dtm = rand(concl th5) in
let th6 = NUM_ADD_RULE atm ctm in
let etm = rand(concl th6) in
let btm = subbn dtm etm in
let th7 = NUM_ADD_RULE etm btm in
let dtm = rand(concl th7) in
let th8 = INST [atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm; etm,e_tm;
htm,h_tm; ltm,l_tm; mtm,m_tm; tm,n_tm; ptm,p_tm]
pth_rec in
let th9 = QUICK_PROVE_HYP (end_itlist CONJ
[th1;th2;th3;th4;th5;th6;th7]) th8 in
CONV_RULE(RAND_CONV(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV) THENC
NUM_UNSHIFT_CONV)) th9
else
let k3 = (w + z) / 3 in
let th0 = (NUM_SHIFT_CONV k3 THENC
RAND_CONV(RAND_CONV(NUM_SHIFT_CONV k3))) tm in
let Comb(Comb(_,ltm),Comb(Comb(_,ptm),
Comb(Comb(_,mtm),Comb(Comb(_,_),htm)))) = rand(concl th0) in
let th1 = NUM_SQUARE_RULE htm
and th2 = NUM_SQUARE_RULE ltm in
let atm = rand(concl th2) and etm = rand(concl th1) in
let lnum = dest_raw_numeral ltm
and mnum = dest_raw_numeral mtm
and hnum = dest_raw_numeral htm in
let btm = rand(mk_numeral(num_2 */ lnum */ mnum))
and ctm = rand(mk_numeral(mnum */ mnum +/ num_2 */ lnum */ hnum))
and dtm = rand(mk_numeral(num_2 */ hnum */ mnum)) in
let th = INST
[atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm; etm,e_tm;
htm,h_tm; mtm,m_tm; ltm,l_tm; ptm,p_tm] pth_toom3 in
let th' = CONV_RULE
(BINOP2_CONV
(RAND_CONV(RAND_CONV
(BINOP2_CONV TOOM3_CONV (BINOP2_CONV TOOM3_CONV TOOM3_CONV))))
TOOM3_CONV) th in
let [tm3;tm4;tm5] = conjuncts(rand(rand(lhand(concl th')))) in
let th3 = NUM_SQUARE_RULE (lhand(lhand tm3))
and th4 = NUM_SQUARE_RULE (lhand(lhand tm4))
and th5 = NUM_SQUARE_RULE (lhand(lhand tm5)) in
MP th' (end_itlist CONJ [th1;th2;th3;th4;th5])
and NUM_SQUARE_RULE tm =
let w,z = bitcounts tm in GEN_NUM_SQUARE_RULE w z tm in
NUM_SQUARE_RULE in
let NUM_MUL_RULE =
let QUICK_PROVE_HYP ath bth =
EQ_MP (DEDUCT_ANTISYM_RULE ath bth) ath
and pth_0l,pth_0r = (CONJ_PAIR o STANDARDIZE o prove)
(`_0 * n = _0 /\ m * _0 = _0`,
MESON_TAC[NUMERAL; MULT_CLAUSES])
and pth_1l,pth_1r = (CONJ_PAIR o STANDARDIZE o prove)
(`(BIT1 _0) * n = n /\ m * (BIT1 _0) = m`,
MESON_TAC[NUMERAL; MULT_CLAUSES])
and pth_evenl,pth_evenr = (CONJ_PAIR o STANDARDIZE o prove)
(`(m * n = p <=> (BIT0 m) * n = BIT0 p) /\
(m * n = p <=> m * BIT0 n = BIT0 p)`,
REWRITE_TAC[BIT0] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[AC MULT_AC `m * 2 * n = 2 * m * n`] THEN
REWRITE_TAC[GSYM MULT_ASSOC; EQ_MULT_LCANCEL; ARITH_EQ])
and pth_oddl,pth_oddr = (CONJ_PAIR o STANDARDIZE o prove)
(`(m * n = p <=> BIT1 m * n = BIT0 p + n) /\
(m * n = p <=> m * BIT1 n = BIT0 p + m)`,
REWRITE_TAC[BIT0; BIT1] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[MULT_CLAUSES] THEN
REWRITE_TAC[MESON[MULT_AC; ADD_SYM] `m + m * 2 * n = 2 * m * n + m`] THEN
REWRITE_TAC[GSYM MULT_ASSOC; EQ_MULT_LCANCEL; EQ_ADD_RCANCEL] THEN
REWRITE_TAC[ARITH_EQ]) in
let pth_oo1 = (UNDISCH_ALL o STANDARDIZE o prove)
(`n + p = m /\ SUC(m + n) = a /\ p EXP 2 = b /\ a EXP 2 = c /\ b + d = c
==> ((BIT1 m) * (BIT1 n) = d)`,
ABBREV_TAC `two = 2` THEN REWRITE_TAC[BIT1; IMP_CONJ] THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[EXP_2; GSYM MULT_2] THEN
REPLICATE_TAC 4 (DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[ADD1; AC ADD_AC `((n + p) + n) + 1 = (p + (n + n)) + 1`] THEN
REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[GSYM ADD_ASSOC; MULT_CLAUSES; EQ_ADD_LCANCEL] THEN
DISCH_THEN SUBST1_TAC THEN
REWRITE_TAC[MULT_2; LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN REWRITE_TAC[ADD_AC]) in
let pth_oo2 = PURE_ONCE_REWRITE_RULE[MULT_SYM]
(INST [n_tm,m_tm; m_tm,n_tm] pth_oo1) in
let pth_recodel = (UNDISCH_ALL o STANDARDIZE o prove)
(`SUC(_0 + m) = p ==> (p * n = a + n <=> m * n = a)`,
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES; EQ_ADD_RCANCEL])
and pth_recoder = (UNDISCH_ALL o STANDARDIZE o prove)
(`SUC(_0 + n) = p ==> (m * p = a + m <=> m * n = a)`,
ONCE_REWRITE_TAC[MULT_SYM] THEN
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES; EQ_ADD_RCANCEL]) in
let rec NUM_MUL_RULE k l tm tm' =
match (tm,tm') with
(Const("_0",_),_) -> INST [tm',n_tm] pth_0l
| (_,Const("_0",_)) -> INST [tm,m_tm] pth_0r
| (Comb(Const("BIT1",_),Const("_0",_)),_) -> INST [tm',n_tm] pth_1l
| (_,Comb(Const("BIT1",_),Const("_0",_))) -> INST [tm,m_tm] pth_1r
| (Comb(Const("BIT0",_),mtm),_) ->
let th0 = NUM_MUL_RULE (k - 1) l mtm tm' in
let th1 = INST
[mtm,m_tm; tm',n_tm; rand(concl th0),p_tm] pth_evenl in
EQ_MP th1 th0
| (_,Comb(Const("BIT0",_),ntm)) ->
let th0 = NUM_MUL_RULE k (l - 1) tm ntm in
let th1 = INST
[tm,m_tm; ntm,n_tm; rand(concl th0),p_tm] pth_evenr in
EQ_MP th1 th0
| (Comb(Const("BIT1",_),mtm),Comb(Const("BIT1",_),ntm)) ->
if k <= 50 || l <= 50 ||
Int k */ Int k <=/ Int l ||
Int l */ Int l <= Int k then
match (mtm,ntm) with
(Comb(Const("BIT1",_),Comb(Const("BIT1",_),_)),_) ->
let th1 = NUM_ADC_RULE zero_tm tm in
let ptm = rand(concl th1) in
let th2 = NUM_MUL_RULE k l ptm tm' in
let atm = subbn (rand(concl th2)) tm' in
let th3 = INST [tm,m_tm; tm',n_tm; ptm,p_tm; atm,a_tm]
pth_recodel in
let th4 = PROVE_HYP th1 th3 in
EQ_MP th4 (TRANS th2 (SYM(NUM_ADD_RULE atm tm')))
| (_,Comb(Const("BIT1",_),Comb(Const("BIT1",_),_))) ->
let th1 = NUM_ADC_RULE zero_tm tm' in
let ptm = rand(concl th1) in
let th2 = NUM_MUL_RULE k l tm ptm in
let atm = subbn (rand(concl th2)) tm in
let th3 = INST [tm,m_tm; tm',n_tm; ptm,p_tm; atm,a_tm]
pth_recoder in
let th4 = PROVE_HYP th1 th3 in
EQ_MP th4 (TRANS th2 (SYM(NUM_ADD_RULE atm tm)))
| _ ->
if k <= l then
let th0 = NUM_MUL_RULE (k - 1) l mtm tm' in
let ptm = rand(concl th0) in
let th1 =
EQ_MP (INST [mtm,m_tm; tm',n_tm; ptm,p_tm] pth_oddl) th0 in
let tm1 = lhand(rand(concl th1)) in
TRANS th1 (NUM_ADD_RULE tm1 tm')
else
let th0 = NUM_MUL_RULE k (l - 1) tm ntm in
let ptm = rand(concl th0) in
let th1 =
EQ_MP (INST [tm,m_tm; ntm,n_tm; ptm,p_tm] pth_oddr) th0 in
let tm1 = lhand(rand(concl th1)) in
TRANS th1 (NUM_ADD_RULE tm1 tm)
else
let mval = dest_raw_numeral mtm
and nval = dest_raw_numeral ntm in
if nval <=/ mval then
let ptm = rand(mk_numeral(mval -/ nval)) in
let th2 = NUM_ADD_RULE ntm ptm
and th3 = NUM_ADC_RULE mtm ntm in
let atm = rand(concl th3) in
let th4 = NUM_SQUARE_RULE ptm in
let btm = rand(concl th4) in
let th5 = NUM_SQUARE_RULE atm in
let ctm = rand(concl th5) in
let dtm = subbn ctm btm in
let th6 = NUM_ADD_RULE btm dtm in
let th1 = INST [atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm;
mtm,m_tm; ntm,n_tm; ptm,p_tm] pth_oo1 in
QUICK_PROVE_HYP (end_itlist CONJ
[th2;th3;th4;th5;th6]) th1
else
let ptm = rand(mk_numeral(nval -/ mval)) in
let th2 = NUM_ADD_RULE mtm ptm
and th3 = NUM_ADC_RULE ntm mtm in
let atm = rand(concl th3) in
let th4 = NUM_SQUARE_RULE ptm in
let btm = rand(concl th4) in
let th5 = NUM_SQUARE_RULE atm in
let ctm = rand(concl th5) in
let dtm = subbn ctm btm in
let th6 = NUM_ADD_RULE btm dtm in
let th1 = INST [atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm;
mtm,m_tm; ntm,n_tm; ptm,p_tm] pth_oo2 in
QUICK_PROVE_HYP (end_itlist CONJ
[th2;th3;th4;th5;th6]) th1
| _ -> failwith "NUM_MUL_RULE" in
NUM_MUL_RULE in
let NUM_MULT_CONV' =
let pth_refl = (STANDARDIZE o MESON[EXP_2])
`m EXP 2 = p <=> m * m = p` in
fun tm ->
match tm with
Comb(Comb(Const("*",_),mtm),ntm) ->
if Pervasives.compare mtm ntm = 0 then
let th1 = NUM_SQUARE_RULE mtm in
let ptm = rand(concl th1) in
EQ_MP (INST [mtm,m_tm;ptm,p_tm] pth_refl) th1
else
let w1,z1 = bitcounts mtm and w2,z2 = bitcounts ntm in
NUM_MUL_RULE (w1+z1) (w2+z2) mtm ntm
| _ -> failwith "NUM_MULT_CONV'" in
let NUM_SUC_CONV =
let pth = (STANDARDIZE o prove)
(`SUC(_0 + m) = n <=> SUC(NUMERAL m) = NUMERAL n`,
BINOP_TAC THEN MESON_TAC[NUMERAL; ADD_CLAUSES]) in
fun tm ->
match tm with
Comb(Const("SUC",_),Comb(Const("NUMERAL",_),mtm))
when wellformed mtm ->
let th1 = NUM_ADC_RULE zero_tm mtm in
let ntm = rand(concl th1) in
EQ_MP(INST [mtm,m_tm; ntm,n_tm] pth) th1
| _ -> failwith "NUM_SUC_CONV" in
let NUM_ADD_CONV =
let topthm_add = (STANDARDIZE o MESON[NUMERAL])
`m + n = p <=> NUMERAL m + NUMERAL n = NUMERAL p` in
fun tm ->
match tm with
Comb(Comb(Const("+",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm))
when wellformed mtm && wellformed ntm ->
let th1 = NUM_ADD_RULE mtm ntm in
let ptm = rand(concl th1) in
let th2 = INST [mtm,m_tm; ntm,n_tm; ptm,p_tm] topthm_add in
EQ_MP th2 th1
| _ -> failwith "NUM_ADD_CONV" in
let NUM_MULT_CONV =
let topthm_mul = (STANDARDIZE o MESON[NUMERAL])
`m * n = p <=> NUMERAL m * NUMERAL n = NUMERAL p`
and pth_refl = (STANDARDIZE o MESON[NUMERAL; EXP_2])
`m EXP 2 = p <=> NUMERAL m * NUMERAL m = NUMERAL p` in
fun tm ->
match tm with
Comb(Comb(Const("*",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
if Pervasives.compare mtm ntm = 0 then
let th1 = NUM_SQUARE_RULE mtm in
let ptm = rand(concl th1) in
EQ_MP (INST [mtm,m_tm;ptm,p_tm] pth_refl) th1
else
let w1,z1 = bitcounts mtm and w2,z2 = bitcounts ntm in
let th1 = NUM_MUL_RULE (w1+z1) (w2+z2) mtm ntm in
let ptm = rand(concl th1) in
let th2 = INST [mtm,m_tm; ntm,n_tm; ptm,p_tm] topthm_mul in
EQ_MP th2 th1
| _ -> failwith "NUM_MULT_CONV" in
let NUM_EXP_CONV =
let pth0 = (STANDARDIZE o prove)
(`(m EXP n = p) ==> (p * p = a) ==> (m EXP (BIT0 n) = a)`,
REPEAT(DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[BIT0; EXP_ADD])
and pth1 = (STANDARDIZE o prove)
(`(m EXP n = p) ==> (p * p = b) ==> (m * b = a) ==> (m EXP (BIT1 n) = a)`,
REPEAT(DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[BIT1; EXP_ADD; EXP])
and pth = (STANDARDIZE o prove)
(`m EXP _0 = BIT1 _0`,
MP_TAC (CONJUNCT1 EXP) THEN REWRITE_TAC[NUMERAL; BIT1] THEN
DISCH_THEN MATCH_ACCEPT_TAC)
and tth = (STANDARDIZE o prove)
(`(NUMERAL m) EXP (NUMERAL n) = m EXP n`,
REWRITE_TAC[NUMERAL])
and fth = (STANDARDIZE o prove)
(`m = NUMERAL m`,
REWRITE_TAC[NUMERAL]) in
let tconv = GEN_REWRITE_CONV I [tth] in
let rec NUM_EXP_CONV l r =
if r = zero_tm then INST [l,m_tm] pth else
let b,r' = dest_comb r in
if b = BIT0_tm then
let th1 = NUM_EXP_CONV l r' in
let tm1 = rand(concl th1) in
let th2 = NUM_MULT_CONV' (mk_binop mul_tm tm1 tm1) in
let tm2 = rand(concl th2) in
MP (MP (INST [l,m_tm; r',n_tm; tm1,p_tm; tm2,a_tm] pth0) th1) th2
else
let th1 = NUM_EXP_CONV l r' in
let tm1 = rand(concl th1) in
let th2 = NUM_MULT_CONV' (mk_binop mul_tm tm1 tm1) in
let tm2 = rand(concl th2) in
let th3 = NUM_MULT_CONV' (mk_binop mul_tm l tm2) in
let tm3 = rand(concl th3) in
MP (MP (MP (INST [l,m_tm; r',n_tm; tm1,p_tm; tm2,b_tm; tm3,a_tm]
pth1) th1) th2) th3 in
fun tm -> try let th = tconv tm in
let lop,r = dest_comb (rand(concl th)) in
let _,l = dest_comb lop in
if not (wellformed l && wellformed r) then failwith "" else
let th' = NUM_EXP_CONV l r in
let tm' = rand(concl th') in
TRANS (TRANS th th') (INST [tm',m_tm] fth)
with Failure _ -> failwith "NUM_EXP_CONV" in
let NUM_LT_CONV =
let pth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + n) = p ==> ((NUMERAL n < NUMERAL p) <=> T)`,
REWRITE_TAC[NUMERAL; LT_EXISTS; ADD_CLAUSES] THEN
MESON_TAC[ADD_SYM])
and qth = (UNDISCH o STANDARDIZE o prove)
(`m + p = n ==> (NUMERAL n < NUMERAL p <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NOT_LT; NUMERAL] THEN
MESON_TAC[LE_ADD; ADD_SYM])
and rth = (STANDARDIZE o prove)
(`NUMERAL n < NUMERAL n <=> F`,
MESON_TAC[LT_REFL]) in
fun tm ->
match tm with
Comb(Comb(Const("<",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
let rel = orderrelation mtm ntm in
if rel = 0 then INST[ntm,n_tm] rth
else if rel < 0 then
let dtm = sbcbn ntm mtm in
let th = NUM_ADC_RULE dtm mtm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] pth)
else
let dtm = subbn mtm ntm in
let th = NUM_ADD_RULE dtm ntm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] qth)
| _ -> failwith "NUM_LT_CONV"
and NUM_LE_CONV =
let pth = (UNDISCH o STANDARDIZE o prove)
(`m + n = p ==> ((NUMERAL n <= NUMERAL p) <=> T)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL] THEN
MESON_TAC[LE_ADD; ADD_SYM])
and qth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + p) = n ==> (NUMERAL n <= NUMERAL p <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL; NOT_LE; ADD_CLAUSES; LT_EXISTS] THEN
MESON_TAC[ADD_SYM])
and rth = (STANDARDIZE o prove)
(`NUMERAL n <= NUMERAL n <=> T`,
REWRITE_TAC[LE_REFL]) in
fun tm ->
match tm with
Comb(Comb(Const("<=",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
let rel = orderrelation mtm ntm in
if rel = 0 then INST[ntm,n_tm] rth
else if rel < 0 then
let dtm = subbn ntm mtm in
let th = NUM_ADD_RULE dtm mtm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] pth)
else
let dtm = sbcbn mtm ntm in
let th = NUM_ADC_RULE dtm ntm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] qth)
| _ -> failwith "NUM_LE_CONV"
and NUM_EQ_CONV =
let pth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + n) = p ==> ((NUMERAL n = NUMERAL p) <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL; GSYM LE_ANTISYM; DE_MORGAN_THM] THEN
REWRITE_TAC[NOT_LE; LT_EXISTS; ADD_CLAUSES] THEN
MESON_TAC[ADD_SYM])
and qth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + p) = n ==> ((NUMERAL n = NUMERAL p) <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL; GSYM LE_ANTISYM; DE_MORGAN_THM] THEN
REWRITE_TAC[NOT_LE; LT_EXISTS; ADD_CLAUSES] THEN
MESON_TAC[ADD_SYM])
and rth = (STANDARDIZE o prove)
(`(NUMERAL n = NUMERAL n) <=> T`,
REWRITE_TAC[]) in
fun tm ->
match tm with
Comb(Comb(Const("=",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
let rel = orderrelation mtm ntm in
if rel = 0 then INST [ntm,n_tm] rth
else if rel < 0 then
let dtm = sbcbn ntm mtm in
let th = NUM_ADC_RULE dtm mtm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] pth)
else
let dtm = sbcbn mtm ntm in
let th = NUM_ADC_RULE dtm ntm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] qth)
| _ -> failwith "NUM_EQ_CONV" in
NUM_SUC_CONV,NUM_ADD_CONV,NUM_MULT_CONV,NUM_EXP_CONV,
NUM_LT_CONV,NUM_LE_CONV,NUM_EQ_CONV;;
let NUM_GT_CONV = GEN_REWRITE_CONV I [GT] THENC NUM_LT_CONV;;
let NUM_GE_CONV = GEN_REWRITE_CONV I [GE] THENC NUM_LE_CONV;;
let NUM_PRE_CONV =
let pth = prove
(`(SUC m = n) ==> (PRE n = m)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN REWRITE_TAC[PRE])
and m = `m:num` and n = `n:num` in
let suc = `SUC` in
let pre = `PRE` in
fun tm -> try let l,r = dest_comb tm in
if not (l = pre) then fail() else
let x = dest_numeral r in
let tm' = mk_numeral (x -/ Int 1) in
let th1 = NUM_SUC_CONV (mk_comb(suc,tm')) in
MP (INST [tm',m; r,n] pth) th1
with Failure _ -> failwith "NUM_PRE_CONV";;
let NUM_SUB_CONV =
let pth1 = prove
(`(m + n = p) ==> (p - n = m)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_SUB])
and m = `m:num` and n = `n:num` and p = `p:num`
and minus = `(-)`
and plus = `(+)` in
fun tm -> try let l,r = dest_binop minus tm in
let ln = dest_numeral l
and rn = dest_numeral r in
let kn = ln -/ rn in
let k = mk_numeral kn in
let pth = INST [k,m; l,p; r,n] pth1
and th0 = NUM_ADD_CONV (mk_binop plus k r) in
MP pth th0
with Failure _ -> failwith "NUM_SUB_CONV";;
let NUM_DIV_CONV,NUM_MOD_CONV =
let pth = prove
(`(q * n + r = m) ==> r < n ==> (m DIV n = q) /\ (m MOD n = r)`,
MESON_TAC[DIVMOD_UNIQ])
and m = `m:num` and n = `n:num` and q = `q:num` and r = `r:num`
and dtm = `(DIV)` and mtm = `(MOD)` in
let NUM_DIVMOD_CONV x y =
let k = quo_num x y
and l = mod_num x y in
let th0 = INST [mk_numeral x,m; mk_numeral y,n;
mk_numeral k,q; mk_numeral l,r] pth in
let tm0 = lhand(lhand(concl th0)) in
let th1 = (LAND_CONV NUM_MULT_CONV THENC NUM_ADD_CONV) tm0 in
let th2 = MP th0 th1 in
let tm2 = lhand(concl th2) in
MP th2 (EQT_ELIM(NUM_LT_CONV tm2)) in
(fun tm -> try let xt,yt = dest_binop dtm tm in
CONJUNCT1(NUM_DIVMOD_CONV (dest_numeral xt) (dest_numeral yt))
with Failure _ -> failwith "NUM_DIV_CONV"),
(fun tm -> try let xt,yt = dest_binop mtm tm in
CONJUNCT2(NUM_DIVMOD_CONV (dest_numeral xt) (dest_numeral yt))
with Failure _ -> failwith "NUM_MOD_CONV");;
let NUM_FACT_CONV =
let suc = `SUC`
and mul = `( * )` in
let pth_0 = prove
(`FACT 0 = 1`,
REWRITE_TAC[FACT])
and pth_suc = prove
(`(SUC x = y) ==> (FACT x = w) ==> (y * w = z) ==> (FACT y = z)`,
REPEAT (DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[FACT])
and w = `w:num` and x = `x:num` and y = `y:num` and z = `z:num` in
let mksuc n =
let n' = n -/ (Int 1) in
NUM_SUC_CONV (mk_comb(suc,mk_numeral n')) in
let rec NUM_FACT_CONV n =
if n =/ Int 0 then pth_0 else
let th0 = mksuc n in
let tmx = rand(lhand(concl th0)) in
let tm0 = rand(concl th0) in
let th1 = NUM_FACT_CONV (n -/ Int 1) in
let tm1 = rand(concl th1) in
let th2 = NUM_MULT_CONV (mk_binop mul tm0 tm1) in
let tm2 = rand(concl th2) in
let pth = INST [tmx,x; tm0, y; tm1,w; tm2,z] pth_suc in
MP (MP (MP pth th0) th1) th2 in
fun tm ->
try let l,r = dest_comb tm in
if fst(dest_const l) = "FACT"
then NUM_FACT_CONV (dest_numeral r)
else fail()
with Failure _ -> failwith "NUM_FACT_CONV";;
let NUM_MAX_CONV =
REWR_CONV MAX THENC
RATOR_CONV(RATOR_CONV(RAND_CONV NUM_LE_CONV)) THENC
GEN_REWRITE_CONV I [COND_CLAUSES];;
let NUM_MIN_CONV =
REWR_CONV MIN THENC
RATOR_CONV(RATOR_CONV(RAND_CONV NUM_LE_CONV)) THENC
GEN_REWRITE_CONV I [COND_CLAUSES];;
(* ------------------------------------------------------------------------- *)
(* Final hack-together. *)
(* ------------------------------------------------------------------------- *)
let NUM_REL_CONV =
let gconv_net = itlist (uncurry net_of_conv)
[`NUMERAL m < NUMERAL n`,NUM_LT_CONV;
`NUMERAL m <= NUMERAL n`,NUM_LE_CONV;
`NUMERAL m > NUMERAL n`,NUM_GT_CONV;
`NUMERAL m >= NUMERAL n`,NUM_GE_CONV;
`NUMERAL m = NUMERAL n`,NUM_EQ_CONV]
(basic_net()) in
REWRITES_CONV gconv_net;;
let NUM_RED_CONV =
let gconv_net = itlist (uncurry net_of_conv)
[`SUC(NUMERAL n)`,NUM_SUC_CONV;
`PRE(NUMERAL n)`,NUM_PRE_CONV;
`FACT(NUMERAL n)`,NUM_FACT_CONV;
`NUMERAL m < NUMERAL n`,NUM_LT_CONV;
`NUMERAL m <= NUMERAL n`,NUM_LE_CONV;
`NUMERAL m > NUMERAL n`,NUM_GT_CONV;
`NUMERAL m >= NUMERAL n`,NUM_GE_CONV;
`NUMERAL m = NUMERAL n`,NUM_EQ_CONV;
`EVEN(NUMERAL n)`,NUM_EVEN_CONV;
`ODD(NUMERAL n)`,NUM_ODD_CONV;
`NUMERAL m + NUMERAL n`,NUM_ADD_CONV;
`NUMERAL m - NUMERAL n`,NUM_SUB_CONV;
`NUMERAL m * NUMERAL n`,NUM_MULT_CONV;
`(NUMERAL m) EXP (NUMERAL n)`,NUM_EXP_CONV;
`(NUMERAL m) DIV (NUMERAL n)`,NUM_DIV_CONV;
`(NUMERAL m) MOD (NUMERAL n)`,NUM_MOD_CONV;
`MAX (NUMERAL m) (NUMERAL n)`,NUM_MAX_CONV;
`MIN (NUMERAL m) (NUMERAL n)`,NUM_MIN_CONV]
(basic_net()) in
REWRITES_CONV gconv_net;;
let NUM_REDUCE_CONV = DEPTH_CONV NUM_RED_CONV;;
let NUM_REDUCE_TAC = CONV_TAC NUM_REDUCE_CONV;;
(* ------------------------------------------------------------------------- *)
(* I do like this after all... *)
(* ------------------------------------------------------------------------- *)
let num_CONV =
let SUC_tm = `SUC` in
fun tm ->
let n = dest_numeral tm -/ Int 1 in
if n </ Int 0 then failwith "num_CONV" else
let tm' = mk_numeral n in
SYM(NUM_SUC_CONV (mk_comb(SUC_tm,tm')));;
let THREE = num_CONV `3`;;
(* ------------------------------------------------------------------------- *)
(* Expands "!n. n < numeral-constant ==> P(n)" into all the cases. *)
(* ------------------------------------------------------------------------- *)
let EXPAND_CASES_CONV =
let pth_base = prove
(`(!n. n < 0 ==> P n) <=> T`,
REWRITE_TAC[LT])
and pth_step = prove
(`(!n. n < SUC k ==> P n) <=> (!n. n < k ==> P n) /\ P k`,
REWRITE_TAC[LT] THEN MESON_TAC[]) in
let base_CONV = GEN_REWRITE_CONV I [pth_base]
and step_CONV =
BINDER_CONV(LAND_CONV(RAND_CONV num_CONV)) THENC
GEN_REWRITE_CONV I [pth_step] in
let rec conv tm =
(base_CONV ORELSEC (step_CONV THENC LAND_CONV conv)) tm in
conv THENC (REWRITE_CONV[GSYM CONJ_ASSOC]);;
| null | https://raw.githubusercontent.com/gilith/hol-light/f3f131963f2298b4d65ee5fead6e986a4a14237a/calc_num.ml | ocaml | =========================================================================
Calculation with naturals.
=========================================================================
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Big collection of rewrites to do trivial arithmetic.
-------------------------------------------------------------------------
**ARITH_PRE;**
-------------------------------------------------------------------------
Now more delicate conversions for situations where efficiency matters.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Final hack-together.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
I do like this after all...
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Expands "!n. n < numeral-constant ==> P(n)" into all the cases.
------------------------------------------------------------------------- | , University of Cambridge Computer Laboratory
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
needs "arith.ml";;
Simple rule to get rid of NUMERAL constant .
let DENUMERAL = GEN_REWRITE_RULE DEPTH_CONV [NUMERAL];;
Note that we have none for DIV and MOD , and that PRE and SUB are a bit
inefficient ; log(n)^2 instead of log(n ) .
let ARITH_ZERO = prove
(`(NUMERAL 0 = 0) /\
(BIT0 _0 = _0)`,
REWRITE_TAC[NUMERAL; BIT0; DENUMERAL ADD_CLAUSES]);;
let ARITH_SUC = prove
(`(!n. SUC(NUMERAL n) = NUMERAL(SUC n)) /\
(SUC _0 = BIT1 _0) /\
(!n. SUC (BIT0 n) = BIT1 n) /\
(!n. SUC (BIT1 n) = BIT0 (SUC n))`,
REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL ADD_CLAUSES]);;
* *
let prove
( ` ( ! n. PRE(NUMERAL n ) = NUMERAL(PRE n ) ) /\
( PRE _ 0 = _ 0 ) /\
( ! ) = if n = _ 0 then _ 0 else BIT1 ( PRE n ) ) /\
( ! n. PRE(BIT1 n ) = BIT0 n ) ` ,
REWRITE_TAC[NUMERAL ; BIT1 ; BIT0 ; DENUMERAL PRE ] THEN INDUCT_TAC THEN
REWRITE_TAC[NUMERAL ; DENUMERAL PRE ; DENUMERAL ADD_CLAUSES ; DENUMERAL NOT_SUC ;
ARITH_ZERO ] ) ; ;
* *
let ARITH_PRE = prove
(`(!n. PRE(NUMERAL n) = NUMERAL(PRE n)) /\
(PRE _0 = _0) /\
(!n. PRE(BIT0 n) = if n = _0 then _0 else BIT1 (PRE n)) /\
(!n. PRE(BIT1 n) = BIT0 n)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL PRE] THEN INDUCT_TAC THEN
REWRITE_TAC[NUMERAL; DENUMERAL PRE; DENUMERAL ADD_CLAUSES; DENUMERAL NOT_SUC;
ARITH_ZERO]);;
***)
let ARITH_ADD = prove
(`(!m n. NUMERAL(m) + NUMERAL(n) = NUMERAL(m + n)) /\
(_0 + _0 = _0) /\
(!n. _0 + BIT0 n = BIT0 n) /\
(!n. _0 + BIT1 n = BIT1 n) /\
(!n. BIT0 n + _0 = BIT0 n) /\
(!n. BIT1 n + _0 = BIT1 n) /\
(!m n. BIT0 m + BIT0 n = BIT0 (m + n)) /\
(!m n. BIT0 m + BIT1 n = BIT1 (m + n)) /\
(!m n. BIT1 m + BIT0 n = BIT1 (m + n)) /\
(!m n. BIT1 m + BIT1 n = BIT0 (SUC(m + n)))`,
PURE_REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[ADD_AC]);;
let ARITH_MULT = prove
(`(!m n. NUMERAL(m) * NUMERAL(n) = NUMERAL(m * n)) /\
(_0 * _0 = _0) /\
(!n. _0 * BIT0 n = _0) /\
(!n. _0 * BIT1 n = _0) /\
(!n. BIT0 n * _0 = _0) /\
(!n. BIT1 n * _0 = _0) /\
(!m n. BIT0 m * BIT0 n = BIT0 (BIT0 (m * n))) /\
(!m n. BIT0 m * BIT1 n = BIT0 m + BIT0 (BIT0 (m * n))) /\
(!m n. BIT1 m * BIT0 n = BIT0 n + BIT0 (BIT0 (m * n))) /\
(!m n. BIT1 m * BIT1 n = BIT1 m + BIT0 n + BIT0 (BIT0 (m * n)))`,
PURE_REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL MULT_CLAUSES;
DENUMERAL ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB; ADD_AC]);;
let ARITH_EXP = prove
(`(!m n. (NUMERAL m) EXP (NUMERAL n) = NUMERAL(m EXP n)) /\
(_0 EXP _0 = BIT1 _0) /\
(!m. (BIT0 m) EXP _0 = BIT1 _0) /\
(!m. (BIT1 m) EXP _0 = BIT1 _0) /\
(!n. _0 EXP (BIT0 n) = (_0 EXP n) * (_0 EXP n)) /\
(!m n. (BIT0 m) EXP (BIT0 n) = ((BIT0 m) EXP n) * ((BIT0 m) EXP n)) /\
(!m n. (BIT1 m) EXP (BIT0 n) = ((BIT1 m) EXP n) * ((BIT1 m) EXP n)) /\
(!n. _0 EXP (BIT1 n) = _0) /\
(!m n. (BIT0 m) EXP (BIT1 n) =
BIT0 m * ((BIT0 m) EXP n) * ((BIT0 m) EXP n)) /\
(!m n. (BIT1 m) EXP (BIT1 n) =
BIT1 m * ((BIT1 m) EXP n) * ((BIT1 m) EXP n))`,
REWRITE_TAC[NUMERAL] THEN REPEAT STRIP_TAC THEN
TRY(GEN_REWRITE_TAC (LAND_CONV o RAND_CONV) [BIT0; BIT1]) THEN
REWRITE_TAC[DENUMERAL EXP; DENUMERAL MULT_CLAUSES; EXP_ADD]);;
let ARITH_EVEN = prove
(`(!n. EVEN(NUMERAL n) <=> EVEN n) /\
(EVEN _0 <=> T) /\
(!n. EVEN(BIT0 n) <=> T) /\
(!n. EVEN(BIT1 n) <=> F)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL EVEN; EVEN_ADD]);;
let ARITH_ODD = prove
(`(!n. ODD(NUMERAL n) <=> ODD n) /\
(ODD _0 <=> F) /\
(!n. ODD(BIT0 n) <=> F) /\
(!n. ODD(BIT1 n) <=> T)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL ODD; ODD_ADD]);;
let ARITH_LE = prove
(`(!m n. NUMERAL m <= NUMERAL n <=> m <= n) /\
((_0 <= _0) <=> T) /\
(!n. (BIT0 n <= _0) <=> n <= _0) /\
(!n. (BIT1 n <= _0) <=> F) /\
(!n. (_0 <= BIT0 n) <=> T) /\
(!n. (_0 <= BIT1 n) <=> T) /\
(!m n. (BIT0 m <= BIT0 n) <=> m <= n) /\
(!m n. (BIT0 m <= BIT1 n) <=> m <= n) /\
(!m n. (BIT1 m <= BIT0 n) <=> m < n) /\
(!m n. (BIT1 m <= BIT1 n) <=> m <= n)`,
REWRITE_TAC[NUMERAL; BIT1; BIT0; DENUMERAL NOT_SUC;
DENUMERAL(GSYM NOT_SUC); SUC_INJ] THEN
REWRITE_TAC[DENUMERAL LE_0] THEN REWRITE_TAC[DENUMERAL LE; GSYM MULT_2] THEN
REWRITE_TAC[LE_MULT_LCANCEL; SUC_INJ;
DENUMERAL MULT_EQ_0; DENUMERAL NOT_SUC] THEN
REWRITE_TAC[DENUMERAL NOT_SUC] THEN REWRITE_TAC[LE_SUC_LT] THEN
REWRITE_TAC[LT_MULT_LCANCEL] THEN
SUBGOAL_THEN `2 = SUC 1` (fun th -> REWRITE_TAC[th]) THENL
[REWRITE_TAC[NUMERAL; BIT0; BIT1; DENUMERAL ADD_CLAUSES];
REWRITE_TAC[DENUMERAL NOT_SUC; NOT_SUC; EQ_MULT_LCANCEL] THEN
REWRITE_TAC[ONCE_REWRITE_RULE[DISJ_SYM] LE_LT] THEN
MAP_EVERY X_GEN_TAC [`m:num`; `n:num`] THEN
SUBGOAL_THEN `~(SUC 1 * m = SUC (SUC 1 * n))`
(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(MP_TAC o AP_TERM `EVEN`) THEN
REWRITE_TAC[EVEN_MULT; EVEN_ADD; NUMERAL; BIT1; EVEN]]);;
let ARITH_LT = prove
(`(!m n. NUMERAL m < NUMERAL n <=> m < n) /\
((_0 < _0) <=> F) /\
(!n. (BIT0 n < _0) <=> F) /\
(!n. (BIT1 n < _0) <=> F) /\
(!n. (_0 < BIT0 n) <=> _0 < n) /\
(!n. (_0 < BIT1 n) <=> T) /\
(!m n. (BIT0 m < BIT0 n) <=> m < n) /\
(!m n. (BIT0 m < BIT1 n) <=> m <= n) /\
(!m n. (BIT1 m < BIT0 n) <=> m < n) /\
(!m n. (BIT1 m < BIT1 n) <=> m < n)`,
REWRITE_TAC[NUMERAL; GSYM NOT_LE; ARITH_LE] THEN
REWRITE_TAC[DENUMERAL LE]);;
let ARITH_GE = REWRITE_RULE[GSYM GE; GSYM GT] ARITH_LE;;
let ARITH_GT = REWRITE_RULE[GSYM GE; GSYM GT] ARITH_LT;;
let ARITH_EQ = prove
(`(!m n. (NUMERAL m = NUMERAL n) <=> (m = n)) /\
((_0 = _0) <=> T) /\
(!n. (BIT0 n = _0) <=> (n = _0)) /\
(!n. (BIT1 n = _0) <=> F) /\
(!n. (_0 = BIT0 n) <=> (_0 = n)) /\
(!n. (_0 = BIT1 n) <=> F) /\
(!m n. (BIT0 m = BIT0 n) <=> (m = n)) /\
(!m n. (BIT0 m = BIT1 n) <=> F) /\
(!m n. (BIT1 m = BIT0 n) <=> F) /\
(!m n. (BIT1 m = BIT1 n) <=> (m = n))`,
REWRITE_TAC[NUMERAL; GSYM LE_ANTISYM; ARITH_LE] THEN
REWRITE_TAC[LET_ANTISYM; LTE_ANTISYM; DENUMERAL LE_0]);;
* *
let ARITH_SUB = prove
( ` ( ! m n. NUMERAL m - NUMERAL n = NUMERAL(m - n ) ) /\
( _ 0 - _ 0 = _ 0 ) /\
( ! n. _ 0 - BIT0 n = _ 0 ) /\
( ! n. _ 0 - BIT1 n = _ 0 ) /\
( ! 0 = BIT0 n ) /\
( ! n - _ 0 = BIT1 n ) /\
( ! m n = BIT0 ( m - n ) ) /\
( ! m n = PRE(BIT0 ( m - n ) ) ) /\
( ! m m - BIT0 n = if n < = m then BIT1 ( m - n ) else _ 0 ) /\
( ! m m - BIT1 n = BIT0 ( m - n ) ) ` ,
REWRITE_TAC[NUMERAL ; DENUMERAL SUB_0 ] THEN PURE_REWRITE_TAC[BIT0 ; BIT1 ] THEN
REWRITE_TAC[GSYM MULT_2 ; SUB_SUC ; ] THEN
REWRITE_TAC[SUB ] THEN REPEAT GEN_TAC THEN COND_CASES_TAC THEN
REWRITE_TAC[DENUMERAL SUB_EQ_0 ] THEN
RULE_ASSUM_TAC(REWRITE_RULE[NOT_LE ] ) THEN
ASM_REWRITE_TAC[LE_SUC_LT ; ] THEN
POP_ASSUM(CHOOSE_THEN SUBST1_TAC o REWRITE_RULE[LE_EXISTS ] ) THEN
REWRITE_TAC[ADD1 ; LEFT_ADD_DISTRIB ] THEN
REWRITE_TAC[ADD_SUB2 ; GSYM ADD_ASSOC ] ) ; ;
* *
let ARITH_SUB = prove
(`(!m n. NUMERAL m - NUMERAL n = NUMERAL(m - n)) /\
(_0 - _0 = _0) /\
(!n. _0 - BIT0 n = _0) /\
(!n. _0 - BIT1 n = _0) /\
(!n. BIT0 n - _0 = BIT0 n) /\
(!n. BIT1 n - _0 = BIT1 n) /\
(!m n. BIT0 m - BIT0 n = BIT0 (m - n)) /\
(!m n. BIT0 m - BIT1 n = PRE(BIT0 (m - n))) /\
(!m n. BIT1 m - BIT0 n = if n <= m then BIT1 (m - n) else _0) /\
(!m n. BIT1 m - BIT1 n = BIT0 (m - n))`,
REWRITE_TAC[NUMERAL; DENUMERAL SUB_0] THEN PURE_REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2; SUB_SUC; LEFT_SUB_DISTRIB] THEN
REWRITE_TAC[SUB] THEN REPEAT GEN_TAC THEN COND_CASES_TAC THEN
REWRITE_TAC[DENUMERAL SUB_EQ_0] THEN
RULE_ASSUM_TAC(REWRITE_RULE[NOT_LE]) THEN
ASM_REWRITE_TAC[LE_SUC_LT; LT_MULT_LCANCEL; ARITH_EQ] THEN
POP_ASSUM(CHOOSE_THEN SUBST1_TAC o REWRITE_RULE[LE_EXISTS]) THEN
REWRITE_TAC[ADD1; LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[ADD_SUB2; GSYM ADD_ASSOC]);;
***)
let ARITH = end_itlist CONJ
ARITH_ADD; ARITH_MULT; ARITH_EXP;
ARITH_EVEN; ARITH_ODD;
ARITH_EQ; ARITH_LE; ARITH_LT; ARITH_GE; ARITH_GT
* * ARITH_SUB * *
let NUM_EVEN_CONV =
let tth,rths = CONJ_PAIR ARITH_EVEN in
GEN_REWRITE_CONV I [tth] THENC GEN_REWRITE_CONV I [rths];;
let NUM_ODD_CONV =
let tth,rths = CONJ_PAIR ARITH_ODD in
GEN_REWRITE_CONV I [tth] THENC GEN_REWRITE_CONV I [rths];;
let NUM_SUC_CONV,NUM_ADD_CONV,NUM_MULT_CONV,NUM_EXP_CONV,
NUM_LT_CONV,NUM_LE_CONV,NUM_EQ_CONV =
let num_ty = type_of(lhand(concl ZERO_DEF)) in
let Comb(NUMERAL_tm,Comb(BIT0_tm,Comb(BIT1_tm,zero_tm))) =
mk_small_numeral 2
and suc_tm = rator(rand(concl TWO))
and one_tm = rand(mk_small_numeral 1)
and add_tm = rator(rator(lhand(snd(strip_forall(concl ADD_0)))))
and mul_tm = rator(rator(rand(snd(strip_forall(concl EXP_2)))))
and exp_tm = rator(rator(lhand(snd(strip_forall(concl EXP_2)))))
and eq_tm = rator(rator(concl TWO)) in
let num_0 = Int 0 and num_1 = Int 1 and num_2 = Int 2 in
let a_tm = mk_var("a",num_ty)
and b_tm = mk_var("b",num_ty)
and c_tm = mk_var("c",num_ty)
and d_tm = mk_var("d",num_ty)
and e_tm = mk_var("e",num_ty)
and h_tm = mk_var("h",num_ty)
and l_tm = mk_var("l",num_ty)
and m_tm = mk_var("m",num_ty)
and n_tm = mk_var("n",num_ty)
and p_tm = mk_var("p",num_ty) in
let STANDARDIZE =
let ilist = [BIT0_tm,BIT0_tm; BIT1_tm,BIT1_tm; zero_tm,zero_tm;
suc_tm,suc_tm; add_tm,add_tm; mul_tm,mul_tm;
exp_tm,exp_tm; eq_tm,eq_tm; NUMERAL_tm,NUMERAL_tm;
a_tm,a_tm; b_tm,b_tm; c_tm,c_tm; d_tm,d_tm; e_tm,e_tm;
h_tm,h_tm; l_tm,l_tm; m_tm,m_tm; n_tm,n_tm; p_tm,p_tm] in
let rec replace tm =
match tm with
Var(_,_) | Const(_,_) -> rev_assocd tm ilist tm
| Comb(s,t) -> mk_comb(replace s,replace t)
| Abs(_,_) -> failwith "replace" in
fun th -> let tm' = replace (concl th) in EQ_MP (REFL tm') th in
let REFL_bit0 = STANDARDIZE(REFL BIT0_tm)
and REFL_bit1 = STANDARDIZE(REFL BIT1_tm) in
let AP_BIT0 th = MK_COMB(REFL_bit0,th)
and AP_BIT1 th = MK_COMB(REFL_bit1,th)
and QUICK_PROVE_HYP ath bth = EQ_MP (DEDUCT_ANTISYM_RULE ath bth) ath in
let rec dest_raw_numeral tm =
match tm with
Comb(Const("BIT1",_),t) -> num_2 */ dest_raw_numeral t +/ num_1
| Comb(Const("BIT0",_),t) -> num_2 */ dest_raw_numeral t
| Const("_0",_) -> num_0 in
let bitcounts =
let rec bctr w z tm =
match tm with
Const("_0",_) -> (w,z)
| Comb(Const("BIT0",_),t) -> bctr w (z + 1) t
| Comb(Const("BIT1",_),t) -> bctr (w + 1) z t
| _ -> failwith "malformed numeral" in
bctr 0 0 in
let rec wellformed tm =
match tm with
Const("_0",_) -> true
| Comb(Const("BIT0",_),t)|Comb(Const("BIT1",_),t) -> wellformed t
| _ -> false in
let rec orderrelation mtm ntm =
if mtm == ntm then
if wellformed mtm then 0 else failwith "orderrelation"
else
match (mtm,ntm) with
Const("_0",_),Const("_0",_) -> 0
| Const("_0",_),_ ->
if wellformed ntm then -1 else failwith "orderrelation"
| _, Const("_0",_) ->
if wellformed ntm then 1 else failwith "orderrelation"
| Comb(Const("BIT0",_),mt),Comb(Const("BIT0",_),nt)
| Comb(Const("BIT1",_),mt),Comb(Const("BIT1",_),nt) ->
orderrelation mt nt
| Comb(Const("BIT0",_),mt),Comb(Const("BIT1",_),nt) ->
if orderrelation mt nt > 0 then 1 else -1
| Comb(Const("BIT1",_),mt),Comb(Const("BIT0",_),nt) ->
if orderrelation mt nt < 0 then -1 else 1 in
let doublebn tm = if tm = zero_tm then tm else mk_comb(BIT0_tm,tm) in
let rec subbn mtm ntm =
match (mtm,ntm) with
(_,Const("_0",_)) -> mtm
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT0",_),nt)) ->
doublebn (subbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT1",_),nt)) ->
doublebn (subbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT0",_),nt)) ->
mk_comb(BIT1_tm,subbn mt nt)
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT1",_),nt)) ->
mk_comb(BIT1_tm,sbcbn mt nt)
| _ -> failwith "malformed numeral or wrong relation"
and sbcbn mtm ntm =
match (mtm,ntm) with
| (Comb(Const("BIT0",_),mt),Const("_0",_)) ->
mk_comb(BIT1_tm,sbcbn mt ntm)
| (Comb(Const("BIT1",_),mt),Const("_0",_)) ->
doublebn mt
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT0",_),nt)) ->
mk_comb(BIT1_tm,sbcbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT1",_),nt)) ->
mk_comb(BIT1_tm,sbcbn mt nt)
| (Comb(Const("BIT1",_),mt),Comb(Const("BIT0",_),nt)) ->
doublebn (subbn mt nt)
| (Comb(Const("BIT0",_),mt),Comb(Const("BIT1",_),nt)) ->
doublebn (sbcbn mt nt)
| _ -> failwith "malformed numeral or wrong relation" in
let topsplit tm =
match tm with
Const("_0",_) -> 0,zero_tm
| Comb(Const("BIT1",_),Const("_0",_)) -> 1,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Const("_0",_))) -> 2,zero_tm
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Const("_0",_))) -> 3,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 4,zero_tm
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 5,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 6,zero_tm
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Const("_0",_)))) -> 7,zero_tm
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 0,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 1,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 2,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),n)))) -> 3,n
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 4,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 5,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 6,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),n)))) -> 7,n
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 8,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 9,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 10,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),n)))) -> 11,n
| Comb(Const("BIT0",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 12,n
| Comb(Const("BIT1",_),Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 13,n
| Comb(Const("BIT0",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 14,n
| Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),Comb(Const("BIT1",_),n)))) -> 15,n
| _ -> failwith "malformed numeral" in
let NUM_ADD_RULE,NUM_ADC_RULE =
let rec mk_compnumeral k base =
if k = 0 then base else
let t = mk_compnumeral (k / 2) base in
if k mod 2 = 1 then mk_comb(BIT1_tm,t) else mk_comb(BIT0_tm,t) in
let bases v =
let part2 = map (fun k -> mk_compnumeral k v) (8--15) in
let part1 = map (subst[mk_comb(BIT0_tm,v),mk_comb(BIT1_tm,v)])
part2
and part0 = map (fun k -> mk_compnumeral k zero_tm) (0--15) in
part0 @ part1 @ part2 in
let starts =
allpairs (fun mtm ntm ->
mk_comb(mk_comb(add_tm,mtm),ntm)) (bases m_tm) (bases n_tm) in
let BITS_INJ = (STANDARDIZE o prove)
(`(BIT0 m = BIT0 n <=> m = n) /\
(BIT1 m = BIT1 n <=> m = n)`,
REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ]) in
let ARITH_0 = (STANDARDIZE o MESON[NUMERAL; ADD_CLAUSES])
`m + _0 = m /\ _0 + n = n` in
let patadj = subst[`SUC(m + _0)`,`SUC m`; `SUC(_0 + n)`,`SUC n`] in
let mkclauses sucflag t =
let tm = if sucflag then mk_comb(suc_tm,t) else t in
let th1 = PURE_REWRITE_CONV[ARITH_ADD; ARITH_SUC; ARITH_0] tm in
let tm1 = patadj(rand(concl th1)) in
if not(free_in add_tm tm1) then th1,
(if free_in m_tm tm1 then 0 else 1) else
let ptm = rand(rand(rand(rand tm1))) in
let tmc = mk_eq(mk_eq(ptm,p_tm),mk_eq(tm,subst[p_tm,ptm] tm1)) in
EQT_ELIM(REWRITE_CONV[ARITH_ADD; ARITH_SUC; ARITH_0; BITS_INJ] tmc),
(if free_in suc_tm tm1 then 3 else 2) in
let add_clauses,add_flags =
let l1,l2 = unzip(map (mkclauses false) starts) in
Array.of_list(map STANDARDIZE l1),Array.of_list l2 in
let adc_clauses,adc_flags =
let l1,l2 = unzip(map (mkclauses true) starts) in
Array.of_list(map STANDARDIZE l1),Array.of_list l2 in
let rec NUM_ADD_RULE mtm ntm =
let m_lo,m_hi = topsplit mtm
and n_lo,n_hi = topsplit ntm in
let m_ind = if m_hi = zero_tm then m_lo else m_lo + 16
and n_ind = if n_hi = zero_tm then n_lo else n_lo + 16 in
let ind = 32 * m_ind + n_ind in
let th1 = Array.get add_clauses ind
and fl = Array.get add_flags ind in
match fl with
0 -> INST [m_hi,m_tm] th1
| 1 -> INST [n_hi,n_tm] th1
| 2 -> let th2 = NUM_ADD_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2)
| 3 -> let th2 = NUM_ADC_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2)
and NUM_ADC_RULE mtm ntm =
let m_lo,m_hi = topsplit mtm
and n_lo,n_hi = topsplit ntm in
let m_ind = if m_hi = zero_tm then m_lo else m_lo + 16
and n_ind = if n_hi = zero_tm then n_lo else n_lo + 16 in
let ind = 32 * m_ind + n_ind in
let th1 = Array.get adc_clauses ind
and fl = Array.get adc_flags ind in
match fl with
0 -> INST [m_hi,m_tm] th1
| 1 -> INST [n_hi,n_tm] th1
| 2 -> let th2 = NUM_ADD_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2)
| 3 -> let th2 = NUM_ADC_RULE m_hi n_hi in
(match concl th2 with Comb(_,ptm) ->
let th3 = INST [m_hi,m_tm; n_hi,n_tm;ptm,p_tm] th1 in
EQ_MP th3 th2) in
NUM_ADD_RULE,NUM_ADC_RULE in
let NUM_SHIFT_CONV =
let pth_0 = (STANDARDIZE o prove)
(`(n = a + p * b <=> BIT0 n = BIT0 a + BIT0 p * b)`,
REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2; GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_z = (STANDARDIZE o prove)
(`n = _0 + p * b <=> BIT0 n = _0 + BIT0 p * b`,
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[BIT1; BIT0] THEN
REWRITE_TAC[ADD_CLAUSES; GSYM MULT_2] THEN
REWRITE_TAC[GSYM MULT_ASSOC; EQ_MULT_LCANCEL; ARITH_EQ])
and pth_1 = (STANDARDIZE o prove)
(`(n = a + p * b <=> BIT1 n = BIT1 a + BIT0 p * b)`,
REWRITE_TAC[BIT0; BIT1] THEN
REWRITE_TAC[GSYM MULT_2; GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB;
ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_base = (STANDARDIZE o prove)
(`n = _0 + BIT1 _0 * n`,
MESON_TAC[ADD_CLAUSES; MULT_CLAUSES; NUMERAL])
and pth_triv = (STANDARDIZE o prove)
(`_0 = a + p * b <=> _0 = a + BIT0 p * b`,
CONV_TAC(BINOP_CONV SYM_CONV) THEN
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[ADD_EQ_0; MULT_EQ_0; BIT0])
and pths_1 = (Array.of_list o CONJUNCTS o STANDARDIZE o prove)
(`(n = a + p * b <=>
BIT0(BIT0(BIT0(BIT0 n))) =
BIT0(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT0(BIT0 n))) =
BIT1(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT0(BIT0 n))) =
BIT0(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT0(BIT0 n))) =
BIT1(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT0(BIT1(BIT0 n))) =
BIT0(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT1(BIT0 n))) =
BIT1(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT1(BIT0 n))) =
BIT0(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT1(BIT0 n))) =
BIT1(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT0(BIT0(BIT1 n))) =
BIT0(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT0(BIT1 n))) =
BIT1(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT0(BIT1 n))) =
BIT0(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT0(BIT1 n))) =
BIT1(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT0(BIT1(BIT1 n))) =
BIT0(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT0(BIT1(BIT1 n))) =
BIT1(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT0(BIT1(BIT1(BIT1 n))) =
BIT0(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = a + p * b <=>
BIT1(BIT1(BIT1(BIT1 n))) =
BIT1(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b)`,
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT0) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT1) THEN
ABBREV_TAC `two = 2` THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ;
GSYM LEFT_ADD_DISTRIB; GSYM MULT_ASSOC])
and pths_0 = (Array.of_list o CONJUNCTS o STANDARDIZE o prove)
(`(n = _0 + p * b <=>
BIT0(BIT0(BIT0(BIT0 n))) =
_0 + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT0(BIT0 n))) =
BIT1 _0 + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT0(BIT0 n))) =
BIT0(BIT1 _0) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT0(BIT0 n))) =
BIT1(BIT1 _0) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT1(BIT0 n))) =
BIT0(BIT0(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT1(BIT0 n))) =
BIT1(BIT0(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT1(BIT0 n))) =
BIT0(BIT1(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT1(BIT0 n))) =
BIT1(BIT1(BIT1 _0)) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT0(BIT1 n))) =
BIT0(BIT0(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT0(BIT1 n))) =
BIT1(BIT0(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT0(BIT1 n))) =
BIT0(BIT1(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT0(BIT1 n))) =
BIT1(BIT1(BIT0(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT0(BIT1(BIT1 n))) =
BIT0(BIT0(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT0(BIT1(BIT1 n))) =
BIT1(BIT0(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT0(BIT1(BIT1(BIT1 n))) =
BIT0(BIT1(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b) /\
(n = _0 + p * b <=>
BIT1(BIT1(BIT1(BIT1 n))) =
BIT1(BIT1(BIT1(BIT1 _0))) + BIT0(BIT0(BIT0(BIT0 p))) * b)`,
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT0) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT1) THEN
ABBREV_TAC `two = 2` THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ;
GSYM LEFT_ADD_DISTRIB; GSYM MULT_ASSOC]) in
let rec NUM_SHIFT_CONV k tm =
if k <= 0 then INST [tm,n_tm] pth_base else
match tm with
Comb(_,Comb(_,Comb(_,Comb(_,_)))) when k >= 4 ->
let i,ntm = topsplit tm in
let th1 = NUM_SHIFT_CONV (k - 4) ntm in
(match concl th1 with
Comb(_,Comb(Comb(_,Const("_0",_)),Comb(Comb(_,ptm),btm))) ->
let th2 = Array.get pths_0 i in
let th3 = INST [ntm,n_tm; btm,b_tm; ptm,p_tm] th2 in
EQ_MP th3 th1
| Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
let th2 = Array.get pths_1 i in
let th3 = INST[ntm,n_tm; atm,a_tm; btm,b_tm; ptm,p_tm] th2 in
EQ_MP th3 th1)
| Comb(Const("BIT0",_),ntm) ->
let th1 = NUM_SHIFT_CONV (k - 1) ntm in
(match concl th1 with
Comb(_,Comb(Comb(_,Const("_0",_)),Comb(Comb(_,ptm),btm))) ->
EQ_MP (INST [ntm,n_tm; btm,b_tm; ptm,p_tm] pth_z) th1
| Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
EQ_MP
(INST[ntm,n_tm; atm,a_tm; btm,b_tm; ptm,p_tm] pth_0) th1)
| Comb(Const("BIT1",_),ntm) ->
let th1 = NUM_SHIFT_CONV (k - 1) ntm in
(match concl th1 with
Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
EQ_MP
(INST [ntm,n_tm; atm,a_tm; btm,b_tm; ptm,p_tm] pth_1) th1)
| Const("_0",_) ->
let th1 = NUM_SHIFT_CONV (k - 1) tm in
(match concl th1 with
Comb(_,Comb(Comb(_,atm),Comb(Comb(_,ptm),btm))) ->
EQ_MP (INST [atm,a_tm; btm,b_tm; ptm,p_tm] pth_triv)
th1)
| _ -> failwith "malformed numeral" in
NUM_SHIFT_CONV in
let NUM_UNSHIFT_CONV =
let pth_triv = (STANDARDIZE o prove)
(`a + p * _0 = a`,
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES])
and pth_base = (STANDARDIZE o prove)
(`a + BIT1 _0 * b = a + b`,
SUBST1_TAC(SYM(SPEC `BIT1 _0` NUMERAL)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES])
and pth_0 = (STANDARDIZE o prove)
(`BIT0 a + BIT0 p * b = BIT0(a + p * b)`,
REWRITE_TAC[BIT0] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB])
and pth_1 = (STANDARDIZE o prove)
(`BIT1 a + BIT0 p * b = BIT1(a + p * b)`,
REWRITE_TAC[BIT0; BIT1] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ] THEN
REWRITE_TAC[GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_z = (STANDARDIZE o prove)
(`_0 + BIT0 p * b = BIT0(_0 + p * b)`,
SUBST1_TAC(SYM(SPEC `_0` NUMERAL)) THEN
REWRITE_TAC[BIT1; BIT0] THEN REWRITE_TAC[ADD_CLAUSES] THEN
REWRITE_TAC[RIGHT_ADD_DISTRIB])
and puths_1 = (Array.of_list o CONJUNCTS o STANDARDIZE o prove)
(`(a + p * b = n <=>
BIT0(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT0(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT0(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT1(BIT0 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT1(BIT0 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT0(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT0(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT0(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT0(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT0(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT0(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT0(BIT1(BIT1(BIT1 n)))) /\
(a + p * b = n <=>
BIT1(BIT1(BIT1(BIT1 a))) + BIT0(BIT0(BIT0(BIT0 p))) * b =
BIT1(BIT1(BIT1(BIT1 n))))`,
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT0) THEN
MP_TAC(REWRITE_RULE[GSYM MULT_2] BIT1) THEN
ABBREV_TAC `two = 2` THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
DISCH_THEN(fun th -> REWRITE_TAC[th]) THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; SUC_INJ; EQ_MULT_LCANCEL; ARITH_EQ;
GSYM LEFT_ADD_DISTRIB; GSYM MULT_ASSOC]) in
let puths_2 = Array.of_list
(map (fun i -> let th1 = Array.get puths_1 (i mod 16)
and th2 = Array.get puths_1 (i / 16) in
let th3 = GEN_REWRITE_RULE RAND_CONV [th1] th2 in
STANDARDIZE th3) (0--255)) in
let rec NUM_UNSHIFT_CONV tm =
match tm with
Comb(Comb(Const("+",_),atm),Comb(Comb(Const("*",_),ptm),btm)) ->
(match (atm,ptm,btm) with
(_,_,Const("_0",_)) ->
INST [atm,a_tm; ptm,p_tm] pth_triv
| (_,Comb(Const("BIT1",_),Const("_0",_)),_) ->
let th1 = INST [atm,a_tm; btm,b_tm] pth_base in
let Comb(_,Comb(Comb(_,mtm),ntm)) = concl th1 in
TRANS th1 (NUM_ADD_RULE mtm ntm)
| (Comb(_,Comb(_,Comb(_,Comb(_,atm')))),
Comb(_,Comb(_,Comb(_,Comb(_,(Comb(_,_) as ptm'))))),_) ->
let i,_ = topsplit atm in
(match (atm',ptm') with
(Comb(_,Comb(_,Comb(_,Comb(_,atm'')))),
Comb(_,Comb(_,Comb(_,Comb(_,(Comb(_,_) as ptm'')))))) ->
let j,_ = topsplit atm' in
let tm' = mk_comb(mk_comb(add_tm,atm''),
mk_comb(mk_comb(mul_tm,ptm''),btm)) in
let th1 = NUM_UNSHIFT_CONV tm' in
let th2 = INST [atm'',a_tm; ptm'',p_tm; btm,b_tm;
rand(concl th1),n_tm]
(Array.get puths_2 (16 * j + i)) in
EQ_MP th2 th1
| _ ->
let tm' = mk_comb(mk_comb(add_tm,atm'),
mk_comb(mk_comb(mul_tm,ptm'),btm)) in
let th1 = NUM_UNSHIFT_CONV tm' in
let th2 = INST [atm',a_tm; ptm',p_tm; btm,b_tm;
rand(concl th1),n_tm]
(Array.get puths_1 i) in
EQ_MP th2 th1)
| (Const("_0",_),Comb(Const("BIT0",_),qtm),_) ->
let th1 = INST [btm,b_tm; qtm,p_tm] pth_z in
CONV_RULE(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV)) th1
| (Comb(Const("BIT0",_),ctm),Comb(Const("BIT0",_),qtm),_) ->
let th1 = INST [ctm,a_tm; btm,b_tm; qtm,p_tm] pth_0 in
CONV_RULE(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV)) th1
| (Comb(Const("BIT1",_),ctm),Comb(Const("BIT0",_),qtm),_) ->
let th1 = INST [ctm,a_tm; btm,b_tm; qtm,p_tm] pth_1 in
CONV_RULE(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV)) th1
| _ -> failwith "malformed numeral")
| _ -> failwith "malformed numeral" in
NUM_UNSHIFT_CONV in
let NUM_SQUARE_RULE =
let pth_0 = (STANDARDIZE o prove)
(`_0 EXP 2 = _0`,
MESON_TAC[NUMERAL; REWRITE_CONV[ARITH] `0 EXP 2`])
and pth_1 = (STANDARDIZE o prove)
(`(BIT1 _0) EXP 2 = BIT1 _0`,
MESON_TAC[NUMERAL; REWRITE_CONV[ARITH] `1 EXP 2`])
and pth_even = (STANDARDIZE o prove)
(`m EXP 2 = n <=> (BIT0 m) EXP 2 = BIT0(BIT0 n)`,
ABBREV_TAC `two = 2` THEN
REWRITE_TAC[BIT0] THEN EXPAND_TAC "two" THEN
REWRITE_TAC[GSYM MULT_2] THEN REWRITE_TAC[EXP_2] THEN
REWRITE_TAC[AC MULT_AC `(2 * m) * (2 * n) = 2 * 2 * m * n`] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ])
and pth_odd = (STANDARDIZE o prove)
(`m EXP 2 = n <=> (BIT1 m) EXP 2 = BIT1(BIT0(m + n))`,
ABBREV_TAC `two = 2` THEN
REWRITE_TAC[NUMERAL; BIT0; BIT1] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[EXP_2; MULT_CLAUSES; ADD_CLAUSES] THEN
REWRITE_TAC[SUC_INJ; GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[AC ADD_AC `(m + m * 2 * m) + m = m * 2 * m + m + m`] THEN
REWRITE_TAC[GSYM MULT_2; AC MULT_AC `m * 2 * m = 2 * m * m`] THEN
REWRITE_TAC[GSYM MULT_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ] THEN
GEN_REWRITE_TAC (RAND_CONV o RAND_CONV) [ADD_SYM] THEN
REWRITE_TAC[EQ_ADD_RCANCEL])
and pth_qstep = (UNDISCH o STANDARDIZE o prove)
(`n + BIT1 _0 = m /\
m EXP 2 = p /\
m + a = BIT0(BIT0 p)
==> (BIT1(BIT1(BIT1 n))) EXP 2 = BIT1(BIT0(BIT0(BIT0 a)))`,
ABBREV_TAC `two = 2` THEN
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
REWRITE_TAC[BIT1; BIT0] THEN EXPAND_TAC "two" THEN
REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[ADD1; LEFT_ADD_DISTRIB; GSYM ADD_ASSOC] THEN
REWRITE_TAC[MULT_ASSOC] THEN REWRITE_TAC[ARITH] THEN
REWRITE_TAC[IMP_CONJ] THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN DISCH_TAC THEN
MATCH_MP_TAC(MESON[EQ_ADD_LCANCEL]
`!m:num. m + n = m + p ==> n = p`) THEN
EXISTS_TAC `16 * (n + 1)` THEN
ASM_REWRITE_TAC[ADD_ASSOC; GSYM LEFT_ADD_DISTRIB] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[EXP_2] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_CLAUSES; MULT_ASSOC] THEN
REWRITE_TAC[AC MULT_AC `(8 * n) * NUMERAL p = (8 * NUMERAL p) * n`] THEN
REWRITE_TAC[ARITH] THEN
REWRITE_TAC[AC ADD_AC
`(n + 16) + p + q + 49 = (n + p + q) + (16 + 49)`] THEN
REWRITE_TAC[GSYM ADD_ASSOC] THEN REWRITE_TAC[ARITH] THEN
REWRITE_TAC[ADD_ASSOC; EQ_ADD_RCANCEL] THEN
REWRITE_TAC[GSYM ADD_ASSOC; GSYM MULT_2; MULT_ASSOC] THEN
ONCE_REWRITE_TAC[AC ADD_AC `a + b + c:num = b + a + c`] THEN
REWRITE_TAC[GSYM RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[ARITH])
and pth_rec = (UNDISCH o STANDARDIZE o prove)
(`n = l + p * h /\
h + l = m /\
h EXP 2 = a /\
l EXP 2 = c /\
m EXP 2 = d /\
a + c = e /\
e + b = d
==> n EXP 2 = c + p * (b + p * a)`,
REWRITE_TAC[IMP_CONJ] THEN
DISCH_THEN SUBST1_TAC THEN
REPLICATE_TAC 5 (DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[EXP_2; LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN CONV_TAC(BINOP_CONV NUM_CANCEL_CONV) THEN
DISCH_THEN SUBST1_TAC THEN REWRITE_TAC[RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN REWRITE_TAC[ADD_AC])
and pth_toom3 = (STANDARDIZE o prove)
(`h EXP 2 = e /\
l EXP 2 = a /\
(l + BIT1 _0 * (m + BIT1 _0 * h)) EXP 2 =
a + BIT1 _0 * (b + BIT1 _0 * (c + BIT1 _0 * (d + BIT1 _0 * e))) /\
(l + BIT0(BIT1 _0) * (m + BIT0(BIT1 _0) * h)) EXP 2 =
a + BIT0(BIT1 _0) * (b + BIT0(BIT1 _0) *
(c + BIT0(BIT1 _0) * (d + BIT0(BIT1 _0) * e))) /\
(h + BIT0(BIT1 _0) * (m + BIT0(BIT1 _0) * l)) EXP 2 =
e + BIT0(BIT1 _0) * (d + BIT0(BIT1 _0) *
(c + BIT0(BIT1 _0) * (b + BIT0(BIT1 _0) * a)))
==> (l + p * (m + p * h)) EXP 2 =
a + p * (b + p * (c + p * (d + p * e)))`,
ABBREV_TAC `two = 2` THEN
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
REWRITE_TAC[BIT1; BIT0] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[ARITH] THEN
SUBGOAL_THEN
`!p x y z. (x + p * (y + p * z)) EXP 2 =
x * x + p * (2 * x * y + p * ((2 * x * z + y * y) +
p * (2 * y * z + p * z * z)))`
(fun th -> REWRITE_TAC[th])
THENL
[REWRITE_TAC[EXP_2; MULT_2; LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN REWRITE_TAC[ADD_AC];
REWRITE_TAC[EXP_2]] THEN
MAP_EVERY ABBREV_TAC
[`a':num = l * l`; `b' = 2 * l * m`; `c' = 2 * l * h + m * m`;
`d' = 2 * m * h`; `e':num = h * h`] THEN
SUBST1_TAC(AC MULT_AC `2 * m * l = 2 * l * m`) THEN
SUBST1_TAC(AC MULT_AC `2 * h * l = 2 * l * h`) THEN
SUBST1_TAC(AC MULT_AC `2 * h * m = 2 * m * h`) THEN
ASM_REWRITE_TAC[] THEN EXPAND_TAC "two" THEN
POP_ASSUM_LIST(K ALL_TAC) THEN
ASM_CASES_TAC `a':num = a` THEN ASM_REWRITE_TAC[] THEN
ASM_CASES_TAC `e':num = e` THEN ASM_REWRITE_TAC[] THEN
POP_ASSUM_LIST(K ALL_TAC) THEN
REWRITE_TAC[EQ_ADD_LCANCEL; EQ_MULT_LCANCEL] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC] THEN
REWRITE_TAC[ARITH] THEN
REWRITE_TAC[MULT_CLAUSES; EQ_ADD_LCANCEL] THEN
REWRITE_TAC[ADD_ASSOC; EQ_ADD_RCANCEL] THEN
REWRITE_TAC[GSYM ADD_ASSOC] THEN DISCH_TAC THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (MESON[]
`b = b' /\ c = c' /\ d = d'
==> 5 * b + c' + d' = 5 * b' + c + d`)) THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC] THEN
REWRITE_TAC(map (fun k ->
SYM(REWRITE_CONV[ARITH_SUC]
(mk_comb(suc_tm,mk_small_numeral(k - 1)))))
(1--5)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES] THEN
CONV_TAC(LAND_CONV NUM_CANCEL_CONV) THEN DISCH_THEN SUBST_ALL_TAC THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (MESON[]
`b = b' /\ (c:num) = c' /\ d = d'
==> b + d':num = b' + d /\ 4 * b + d' = 4 * b' + d`)) THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; MULT_ASSOC] THEN
REWRITE_TAC(map (fun k ->
SYM(REWRITE_CONV[ARITH_SUC]
(mk_comb(suc_tm,mk_small_numeral(k - 1)))))
(1--4)) THEN
REWRITE_TAC[MULT_CLAUSES; ADD_CLAUSES] THEN
CONV_TAC(LAND_CONV(BINOP_CONV NUM_CANCEL_CONV)) THEN
REWRITE_TAC[GSYM MULT_2] THEN ONCE_REWRITE_TAC[ADD_SYM] THEN
REWRITE_TAC[GSYM(el 4 (CONJUNCTS MULT_CLAUSES))] THEN
SIMP_TAC[EQ_MULT_LCANCEL; NOT_SUC])
and pth_even3 = (STANDARDIZE o prove)
(`m EXP 2 = n <=>
(BIT0(BIT0(BIT0 m))) EXP 2 = BIT0(BIT0(BIT0(BIT0(BIT0(BIT0 n)))))`,
ABBREV_TAC `two = 2` THEN
REWRITE_TAC[BIT0] THEN REWRITE_TAC[GSYM MULT_2] THEN
EXPAND_TAC "two" THEN REWRITE_TAC[EXP_2] THEN
REWRITE_TAC[AC MULT_AC
`(2 * 2 * 2 * m) * 2 * 2 * 2 * m = 2 * 2 * 2 * 2 * 2 * 2 * m * m`] THEN
REWRITE_TAC[EQ_MULT_LCANCEL; ARITH_EQ]) in
let NUM_UNSHIFT2_CONV =
RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV) THENC NUM_UNSHIFT_CONV in
let NUM_UNSHIFT3_CONV =
RAND_CONV(RAND_CONV NUM_UNSHIFT2_CONV) THENC NUM_UNSHIFT_CONV in
let NUM_UNSHIFT4_CONV =
RAND_CONV(RAND_CONV NUM_UNSHIFT3_CONV) THENC NUM_UNSHIFT_CONV in
let BINOP2_CONV conv1 conv2 = COMB2_CONV (RAND_CONV conv1) conv2 in
let TOOM3_CONV = BINOP2_CONV
(LAND_CONV NUM_UNSHIFT2_CONV) NUM_UNSHIFT4_CONV in
let rec GEN_NUM_SQUARE_RULE w z tm =
match tm with
Const("_0",_) -> pth_0
| Comb(Const("BIT0",_),mtm) ->
(match mtm with
Comb(Const("BIT0",_),Comb(Const("BIT0",_),ptm)) ->
let th1 = GEN_NUM_SQUARE_RULE w (z - 3) ptm in
let ntm = rand(concl th1) in
EQ_MP (INST [ptm,m_tm; ntm,n_tm] pth_even3) th1
| _ ->
let th1 = GEN_NUM_SQUARE_RULE w (z - 1) mtm in
let ntm = rand(concl th1) in
EQ_MP (INST [mtm,m_tm; ntm,n_tm] pth_even) th1)
| Comb(Const("BIT1",_),mtm) ->
if mtm = zero_tm then pth_1 else
if (w < 100 || z < 20) && w + z < 150 then
match mtm with
Comb(Const("BIT1",_),Comb(Const("BIT1",_),ntm)) ->
let th1 = NUM_ADD_RULE ntm one_tm in
let mtm = rand(concl th1) in
let th2 = NUM_SQUARE_RULE mtm in
let ptm = rand(concl th2) in
let atm = subbn
(mk_comb(BIT0_tm,mk_comb(BIT0_tm,ptm))) mtm in
let th3 = NUM_ADD_RULE mtm atm in
let th4 = INST
[atm,a_tm; mtm,m_tm; ntm,n_tm; ptm,p_tm] pth_qstep in
QUICK_PROVE_HYP (CONJ th1 (CONJ th2 th3)) th4
| _ ->
let th1 = GEN_NUM_SQUARE_RULE (w - 1) z mtm in
let ntm = rand(concl th1) in
let th2 = EQ_MP (INST [mtm,m_tm; ntm,n_tm] pth_odd) th1 in
(match concl th2 with
Comb(_,Comb(_,Comb(_,Comb(Comb(_,ptm),qtm)))) ->
let th3 = NUM_ADD_RULE ptm qtm in
TRANS th2 (AP_BIT1 (AP_BIT0 th3)))
else if w + z < 800 then
let k2 = (w + z) / 2 in
let th1 = NUM_SHIFT_CONV k2 tm in
let Comb(Comb(_,ltm),Comb(Comb(_,ptm),htm)) = rand(concl th1) in
let th2 = NUM_ADD_RULE htm ltm in
let mtm = rand(concl th2) in
let th3 = NUM_SQUARE_RULE htm
and th4 = NUM_SQUARE_RULE ltm
and th5 = NUM_SQUARE_RULE mtm in
let atm = rand(concl th3)
and ctm = rand(concl th4)
and dtm = rand(concl th5) in
let th6 = NUM_ADD_RULE atm ctm in
let etm = rand(concl th6) in
let btm = subbn dtm etm in
let th7 = NUM_ADD_RULE etm btm in
let dtm = rand(concl th7) in
let th8 = INST [atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm; etm,e_tm;
htm,h_tm; ltm,l_tm; mtm,m_tm; tm,n_tm; ptm,p_tm]
pth_rec in
let th9 = QUICK_PROVE_HYP (end_itlist CONJ
[th1;th2;th3;th4;th5;th6;th7]) th8 in
CONV_RULE(RAND_CONV(RAND_CONV(RAND_CONV NUM_UNSHIFT_CONV) THENC
NUM_UNSHIFT_CONV)) th9
else
let k3 = (w + z) / 3 in
let th0 = (NUM_SHIFT_CONV k3 THENC
RAND_CONV(RAND_CONV(NUM_SHIFT_CONV k3))) tm in
let Comb(Comb(_,ltm),Comb(Comb(_,ptm),
Comb(Comb(_,mtm),Comb(Comb(_,_),htm)))) = rand(concl th0) in
let th1 = NUM_SQUARE_RULE htm
and th2 = NUM_SQUARE_RULE ltm in
let atm = rand(concl th2) and etm = rand(concl th1) in
let lnum = dest_raw_numeral ltm
and mnum = dest_raw_numeral mtm
and hnum = dest_raw_numeral htm in
let btm = rand(mk_numeral(num_2 */ lnum */ mnum))
and ctm = rand(mk_numeral(mnum */ mnum +/ num_2 */ lnum */ hnum))
and dtm = rand(mk_numeral(num_2 */ hnum */ mnum)) in
let th = INST
[atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm; etm,e_tm;
htm,h_tm; mtm,m_tm; ltm,l_tm; ptm,p_tm] pth_toom3 in
let th' = CONV_RULE
(BINOP2_CONV
(RAND_CONV(RAND_CONV
(BINOP2_CONV TOOM3_CONV (BINOP2_CONV TOOM3_CONV TOOM3_CONV))))
TOOM3_CONV) th in
let [tm3;tm4;tm5] = conjuncts(rand(rand(lhand(concl th')))) in
let th3 = NUM_SQUARE_RULE (lhand(lhand tm3))
and th4 = NUM_SQUARE_RULE (lhand(lhand tm4))
and th5 = NUM_SQUARE_RULE (lhand(lhand tm5)) in
MP th' (end_itlist CONJ [th1;th2;th3;th4;th5])
and NUM_SQUARE_RULE tm =
let w,z = bitcounts tm in GEN_NUM_SQUARE_RULE w z tm in
NUM_SQUARE_RULE in
let NUM_MUL_RULE =
let QUICK_PROVE_HYP ath bth =
EQ_MP (DEDUCT_ANTISYM_RULE ath bth) ath
and pth_0l,pth_0r = (CONJ_PAIR o STANDARDIZE o prove)
(`_0 * n = _0 /\ m * _0 = _0`,
MESON_TAC[NUMERAL; MULT_CLAUSES])
and pth_1l,pth_1r = (CONJ_PAIR o STANDARDIZE o prove)
(`(BIT1 _0) * n = n /\ m * (BIT1 _0) = m`,
MESON_TAC[NUMERAL; MULT_CLAUSES])
and pth_evenl,pth_evenr = (CONJ_PAIR o STANDARDIZE o prove)
(`(m * n = p <=> (BIT0 m) * n = BIT0 p) /\
(m * n = p <=> m * BIT0 n = BIT0 p)`,
REWRITE_TAC[BIT0] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[AC MULT_AC `m * 2 * n = 2 * m * n`] THEN
REWRITE_TAC[GSYM MULT_ASSOC; EQ_MULT_LCANCEL; ARITH_EQ])
and pth_oddl,pth_oddr = (CONJ_PAIR o STANDARDIZE o prove)
(`(m * n = p <=> BIT1 m * n = BIT0 p + n) /\
(m * n = p <=> m * BIT1 n = BIT0 p + m)`,
REWRITE_TAC[BIT0; BIT1] THEN REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[MULT_CLAUSES] THEN
REWRITE_TAC[MESON[MULT_AC; ADD_SYM] `m + m * 2 * n = 2 * m * n + m`] THEN
REWRITE_TAC[GSYM MULT_ASSOC; EQ_MULT_LCANCEL; EQ_ADD_RCANCEL] THEN
REWRITE_TAC[ARITH_EQ]) in
let pth_oo1 = (UNDISCH_ALL o STANDARDIZE o prove)
(`n + p = m /\ SUC(m + n) = a /\ p EXP 2 = b /\ a EXP 2 = c /\ b + d = c
==> ((BIT1 m) * (BIT1 n) = d)`,
ABBREV_TAC `two = 2` THEN REWRITE_TAC[BIT1; IMP_CONJ] THEN
FIRST_X_ASSUM(SUBST1_TAC o SYM) THEN
REWRITE_TAC[EXP_2; GSYM MULT_2] THEN
REPLICATE_TAC 4 (DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[ADD1; AC ADD_AC `((n + p) + n) + 1 = (p + (n + n)) + 1`] THEN
REWRITE_TAC[GSYM MULT_2] THEN
REWRITE_TAC[LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[GSYM ADD_ASSOC; MULT_CLAUSES; EQ_ADD_LCANCEL] THEN
DISCH_THEN SUBST1_TAC THEN
REWRITE_TAC[MULT_2; LEFT_ADD_DISTRIB; RIGHT_ADD_DISTRIB] THEN
REWRITE_TAC[MULT_AC] THEN REWRITE_TAC[ADD_AC]) in
let pth_oo2 = PURE_ONCE_REWRITE_RULE[MULT_SYM]
(INST [n_tm,m_tm; m_tm,n_tm] pth_oo1) in
let pth_recodel = (UNDISCH_ALL o STANDARDIZE o prove)
(`SUC(_0 + m) = p ==> (p * n = a + n <=> m * n = a)`,
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES; EQ_ADD_RCANCEL])
and pth_recoder = (UNDISCH_ALL o STANDARDIZE o prove)
(`SUC(_0 + n) = p ==> (m * p = a + m <=> m * n = a)`,
ONCE_REWRITE_TAC[MULT_SYM] THEN
SUBST1_TAC(MESON[NUMERAL] `_0 = 0`) THEN
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_CLAUSES; MULT_CLAUSES; EQ_ADD_RCANCEL]) in
let rec NUM_MUL_RULE k l tm tm' =
match (tm,tm') with
(Const("_0",_),_) -> INST [tm',n_tm] pth_0l
| (_,Const("_0",_)) -> INST [tm,m_tm] pth_0r
| (Comb(Const("BIT1",_),Const("_0",_)),_) -> INST [tm',n_tm] pth_1l
| (_,Comb(Const("BIT1",_),Const("_0",_))) -> INST [tm,m_tm] pth_1r
| (Comb(Const("BIT0",_),mtm),_) ->
let th0 = NUM_MUL_RULE (k - 1) l mtm tm' in
let th1 = INST
[mtm,m_tm; tm',n_tm; rand(concl th0),p_tm] pth_evenl in
EQ_MP th1 th0
| (_,Comb(Const("BIT0",_),ntm)) ->
let th0 = NUM_MUL_RULE k (l - 1) tm ntm in
let th1 = INST
[tm,m_tm; ntm,n_tm; rand(concl th0),p_tm] pth_evenr in
EQ_MP th1 th0
| (Comb(Const("BIT1",_),mtm),Comb(Const("BIT1",_),ntm)) ->
if k <= 50 || l <= 50 ||
Int k */ Int k <=/ Int l ||
Int l */ Int l <= Int k then
match (mtm,ntm) with
(Comb(Const("BIT1",_),Comb(Const("BIT1",_),_)),_) ->
let th1 = NUM_ADC_RULE zero_tm tm in
let ptm = rand(concl th1) in
let th2 = NUM_MUL_RULE k l ptm tm' in
let atm = subbn (rand(concl th2)) tm' in
let th3 = INST [tm,m_tm; tm',n_tm; ptm,p_tm; atm,a_tm]
pth_recodel in
let th4 = PROVE_HYP th1 th3 in
EQ_MP th4 (TRANS th2 (SYM(NUM_ADD_RULE atm tm')))
| (_,Comb(Const("BIT1",_),Comb(Const("BIT1",_),_))) ->
let th1 = NUM_ADC_RULE zero_tm tm' in
let ptm = rand(concl th1) in
let th2 = NUM_MUL_RULE k l tm ptm in
let atm = subbn (rand(concl th2)) tm in
let th3 = INST [tm,m_tm; tm',n_tm; ptm,p_tm; atm,a_tm]
pth_recoder in
let th4 = PROVE_HYP th1 th3 in
EQ_MP th4 (TRANS th2 (SYM(NUM_ADD_RULE atm tm)))
| _ ->
if k <= l then
let th0 = NUM_MUL_RULE (k - 1) l mtm tm' in
let ptm = rand(concl th0) in
let th1 =
EQ_MP (INST [mtm,m_tm; tm',n_tm; ptm,p_tm] pth_oddl) th0 in
let tm1 = lhand(rand(concl th1)) in
TRANS th1 (NUM_ADD_RULE tm1 tm')
else
let th0 = NUM_MUL_RULE k (l - 1) tm ntm in
let ptm = rand(concl th0) in
let th1 =
EQ_MP (INST [tm,m_tm; ntm,n_tm; ptm,p_tm] pth_oddr) th0 in
let tm1 = lhand(rand(concl th1)) in
TRANS th1 (NUM_ADD_RULE tm1 tm)
else
let mval = dest_raw_numeral mtm
and nval = dest_raw_numeral ntm in
if nval <=/ mval then
let ptm = rand(mk_numeral(mval -/ nval)) in
let th2 = NUM_ADD_RULE ntm ptm
and th3 = NUM_ADC_RULE mtm ntm in
let atm = rand(concl th3) in
let th4 = NUM_SQUARE_RULE ptm in
let btm = rand(concl th4) in
let th5 = NUM_SQUARE_RULE atm in
let ctm = rand(concl th5) in
let dtm = subbn ctm btm in
let th6 = NUM_ADD_RULE btm dtm in
let th1 = INST [atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm;
mtm,m_tm; ntm,n_tm; ptm,p_tm] pth_oo1 in
QUICK_PROVE_HYP (end_itlist CONJ
[th2;th3;th4;th5;th6]) th1
else
let ptm = rand(mk_numeral(nval -/ mval)) in
let th2 = NUM_ADD_RULE mtm ptm
and th3 = NUM_ADC_RULE ntm mtm in
let atm = rand(concl th3) in
let th4 = NUM_SQUARE_RULE ptm in
let btm = rand(concl th4) in
let th5 = NUM_SQUARE_RULE atm in
let ctm = rand(concl th5) in
let dtm = subbn ctm btm in
let th6 = NUM_ADD_RULE btm dtm in
let th1 = INST [atm,a_tm; btm,b_tm; ctm,c_tm; dtm,d_tm;
mtm,m_tm; ntm,n_tm; ptm,p_tm] pth_oo2 in
QUICK_PROVE_HYP (end_itlist CONJ
[th2;th3;th4;th5;th6]) th1
| _ -> failwith "NUM_MUL_RULE" in
NUM_MUL_RULE in
let NUM_MULT_CONV' =
let pth_refl = (STANDARDIZE o MESON[EXP_2])
`m EXP 2 = p <=> m * m = p` in
fun tm ->
match tm with
Comb(Comb(Const("*",_),mtm),ntm) ->
if Pervasives.compare mtm ntm = 0 then
let th1 = NUM_SQUARE_RULE mtm in
let ptm = rand(concl th1) in
EQ_MP (INST [mtm,m_tm;ptm,p_tm] pth_refl) th1
else
let w1,z1 = bitcounts mtm and w2,z2 = bitcounts ntm in
NUM_MUL_RULE (w1+z1) (w2+z2) mtm ntm
| _ -> failwith "NUM_MULT_CONV'" in
let NUM_SUC_CONV =
let pth = (STANDARDIZE o prove)
(`SUC(_0 + m) = n <=> SUC(NUMERAL m) = NUMERAL n`,
BINOP_TAC THEN MESON_TAC[NUMERAL; ADD_CLAUSES]) in
fun tm ->
match tm with
Comb(Const("SUC",_),Comb(Const("NUMERAL",_),mtm))
when wellformed mtm ->
let th1 = NUM_ADC_RULE zero_tm mtm in
let ntm = rand(concl th1) in
EQ_MP(INST [mtm,m_tm; ntm,n_tm] pth) th1
| _ -> failwith "NUM_SUC_CONV" in
let NUM_ADD_CONV =
let topthm_add = (STANDARDIZE o MESON[NUMERAL])
`m + n = p <=> NUMERAL m + NUMERAL n = NUMERAL p` in
fun tm ->
match tm with
Comb(Comb(Const("+",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm))
when wellformed mtm && wellformed ntm ->
let th1 = NUM_ADD_RULE mtm ntm in
let ptm = rand(concl th1) in
let th2 = INST [mtm,m_tm; ntm,n_tm; ptm,p_tm] topthm_add in
EQ_MP th2 th1
| _ -> failwith "NUM_ADD_CONV" in
let NUM_MULT_CONV =
let topthm_mul = (STANDARDIZE o MESON[NUMERAL])
`m * n = p <=> NUMERAL m * NUMERAL n = NUMERAL p`
and pth_refl = (STANDARDIZE o MESON[NUMERAL; EXP_2])
`m EXP 2 = p <=> NUMERAL m * NUMERAL m = NUMERAL p` in
fun tm ->
match tm with
Comb(Comb(Const("*",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
if Pervasives.compare mtm ntm = 0 then
let th1 = NUM_SQUARE_RULE mtm in
let ptm = rand(concl th1) in
EQ_MP (INST [mtm,m_tm;ptm,p_tm] pth_refl) th1
else
let w1,z1 = bitcounts mtm and w2,z2 = bitcounts ntm in
let th1 = NUM_MUL_RULE (w1+z1) (w2+z2) mtm ntm in
let ptm = rand(concl th1) in
let th2 = INST [mtm,m_tm; ntm,n_tm; ptm,p_tm] topthm_mul in
EQ_MP th2 th1
| _ -> failwith "NUM_MULT_CONV" in
let NUM_EXP_CONV =
let pth0 = (STANDARDIZE o prove)
(`(m EXP n = p) ==> (p * p = a) ==> (m EXP (BIT0 n) = a)`,
REPEAT(DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[BIT0; EXP_ADD])
and pth1 = (STANDARDIZE o prove)
(`(m EXP n = p) ==> (p * p = b) ==> (m * b = a) ==> (m EXP (BIT1 n) = a)`,
REPEAT(DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[BIT1; EXP_ADD; EXP])
and pth = (STANDARDIZE o prove)
(`m EXP _0 = BIT1 _0`,
MP_TAC (CONJUNCT1 EXP) THEN REWRITE_TAC[NUMERAL; BIT1] THEN
DISCH_THEN MATCH_ACCEPT_TAC)
and tth = (STANDARDIZE o prove)
(`(NUMERAL m) EXP (NUMERAL n) = m EXP n`,
REWRITE_TAC[NUMERAL])
and fth = (STANDARDIZE o prove)
(`m = NUMERAL m`,
REWRITE_TAC[NUMERAL]) in
let tconv = GEN_REWRITE_CONV I [tth] in
let rec NUM_EXP_CONV l r =
if r = zero_tm then INST [l,m_tm] pth else
let b,r' = dest_comb r in
if b = BIT0_tm then
let th1 = NUM_EXP_CONV l r' in
let tm1 = rand(concl th1) in
let th2 = NUM_MULT_CONV' (mk_binop mul_tm tm1 tm1) in
let tm2 = rand(concl th2) in
MP (MP (INST [l,m_tm; r',n_tm; tm1,p_tm; tm2,a_tm] pth0) th1) th2
else
let th1 = NUM_EXP_CONV l r' in
let tm1 = rand(concl th1) in
let th2 = NUM_MULT_CONV' (mk_binop mul_tm tm1 tm1) in
let tm2 = rand(concl th2) in
let th3 = NUM_MULT_CONV' (mk_binop mul_tm l tm2) in
let tm3 = rand(concl th3) in
MP (MP (MP (INST [l,m_tm; r',n_tm; tm1,p_tm; tm2,b_tm; tm3,a_tm]
pth1) th1) th2) th3 in
fun tm -> try let th = tconv tm in
let lop,r = dest_comb (rand(concl th)) in
let _,l = dest_comb lop in
if not (wellformed l && wellformed r) then failwith "" else
let th' = NUM_EXP_CONV l r in
let tm' = rand(concl th') in
TRANS (TRANS th th') (INST [tm',m_tm] fth)
with Failure _ -> failwith "NUM_EXP_CONV" in
let NUM_LT_CONV =
let pth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + n) = p ==> ((NUMERAL n < NUMERAL p) <=> T)`,
REWRITE_TAC[NUMERAL; LT_EXISTS; ADD_CLAUSES] THEN
MESON_TAC[ADD_SYM])
and qth = (UNDISCH o STANDARDIZE o prove)
(`m + p = n ==> (NUMERAL n < NUMERAL p <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NOT_LT; NUMERAL] THEN
MESON_TAC[LE_ADD; ADD_SYM])
and rth = (STANDARDIZE o prove)
(`NUMERAL n < NUMERAL n <=> F`,
MESON_TAC[LT_REFL]) in
fun tm ->
match tm with
Comb(Comb(Const("<",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
let rel = orderrelation mtm ntm in
if rel = 0 then INST[ntm,n_tm] rth
else if rel < 0 then
let dtm = sbcbn ntm mtm in
let th = NUM_ADC_RULE dtm mtm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] pth)
else
let dtm = subbn mtm ntm in
let th = NUM_ADD_RULE dtm ntm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] qth)
| _ -> failwith "NUM_LT_CONV"
and NUM_LE_CONV =
let pth = (UNDISCH o STANDARDIZE o prove)
(`m + n = p ==> ((NUMERAL n <= NUMERAL p) <=> T)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL] THEN
MESON_TAC[LE_ADD; ADD_SYM])
and qth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + p) = n ==> (NUMERAL n <= NUMERAL p <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL; NOT_LE; ADD_CLAUSES; LT_EXISTS] THEN
MESON_TAC[ADD_SYM])
and rth = (STANDARDIZE o prove)
(`NUMERAL n <= NUMERAL n <=> T`,
REWRITE_TAC[LE_REFL]) in
fun tm ->
match tm with
Comb(Comb(Const("<=",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
let rel = orderrelation mtm ntm in
if rel = 0 then INST[ntm,n_tm] rth
else if rel < 0 then
let dtm = subbn ntm mtm in
let th = NUM_ADD_RULE dtm mtm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] pth)
else
let dtm = sbcbn mtm ntm in
let th = NUM_ADC_RULE dtm ntm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] qth)
| _ -> failwith "NUM_LE_CONV"
and NUM_EQ_CONV =
let pth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + n) = p ==> ((NUMERAL n = NUMERAL p) <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL; GSYM LE_ANTISYM; DE_MORGAN_THM] THEN
REWRITE_TAC[NOT_LE; LT_EXISTS; ADD_CLAUSES] THEN
MESON_TAC[ADD_SYM])
and qth = (UNDISCH o STANDARDIZE o prove)
(`SUC(m + p) = n ==> ((NUMERAL n = NUMERAL p) <=> F)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[NUMERAL; GSYM LE_ANTISYM; DE_MORGAN_THM] THEN
REWRITE_TAC[NOT_LE; LT_EXISTS; ADD_CLAUSES] THEN
MESON_TAC[ADD_SYM])
and rth = (STANDARDIZE o prove)
(`(NUMERAL n = NUMERAL n) <=> T`,
REWRITE_TAC[]) in
fun tm ->
match tm with
Comb(Comb(Const("=",_),Comb(Const("NUMERAL",_),mtm)),
Comb(Const("NUMERAL",_),ntm)) ->
let rel = orderrelation mtm ntm in
if rel = 0 then INST [ntm,n_tm] rth
else if rel < 0 then
let dtm = sbcbn ntm mtm in
let th = NUM_ADC_RULE dtm mtm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] pth)
else
let dtm = sbcbn mtm ntm in
let th = NUM_ADC_RULE dtm ntm in
QUICK_PROVE_HYP th (INST [dtm,m_tm; mtm,n_tm; ntm,p_tm] qth)
| _ -> failwith "NUM_EQ_CONV" in
NUM_SUC_CONV,NUM_ADD_CONV,NUM_MULT_CONV,NUM_EXP_CONV,
NUM_LT_CONV,NUM_LE_CONV,NUM_EQ_CONV;;
let NUM_GT_CONV = GEN_REWRITE_CONV I [GT] THENC NUM_LT_CONV;;
let NUM_GE_CONV = GEN_REWRITE_CONV I [GE] THENC NUM_LE_CONV;;
let NUM_PRE_CONV =
let pth = prove
(`(SUC m = n) ==> (PRE n = m)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN REWRITE_TAC[PRE])
and m = `m:num` and n = `n:num` in
let suc = `SUC` in
let pre = `PRE` in
fun tm -> try let l,r = dest_comb tm in
if not (l = pre) then fail() else
let x = dest_numeral r in
let tm' = mk_numeral (x -/ Int 1) in
let th1 = NUM_SUC_CONV (mk_comb(suc,tm')) in
MP (INST [tm',m; r,n] pth) th1
with Failure _ -> failwith "NUM_PRE_CONV";;
let NUM_SUB_CONV =
let pth1 = prove
(`(m + n = p) ==> (p - n = m)`,
DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[ADD_SUB])
and m = `m:num` and n = `n:num` and p = `p:num`
and minus = `(-)`
and plus = `(+)` in
fun tm -> try let l,r = dest_binop minus tm in
let ln = dest_numeral l
and rn = dest_numeral r in
let kn = ln -/ rn in
let k = mk_numeral kn in
let pth = INST [k,m; l,p; r,n] pth1
and th0 = NUM_ADD_CONV (mk_binop plus k r) in
MP pth th0
with Failure _ -> failwith "NUM_SUB_CONV";;
let NUM_DIV_CONV,NUM_MOD_CONV =
let pth = prove
(`(q * n + r = m) ==> r < n ==> (m DIV n = q) /\ (m MOD n = r)`,
MESON_TAC[DIVMOD_UNIQ])
and m = `m:num` and n = `n:num` and q = `q:num` and r = `r:num`
and dtm = `(DIV)` and mtm = `(MOD)` in
let NUM_DIVMOD_CONV x y =
let k = quo_num x y
and l = mod_num x y in
let th0 = INST [mk_numeral x,m; mk_numeral y,n;
mk_numeral k,q; mk_numeral l,r] pth in
let tm0 = lhand(lhand(concl th0)) in
let th1 = (LAND_CONV NUM_MULT_CONV THENC NUM_ADD_CONV) tm0 in
let th2 = MP th0 th1 in
let tm2 = lhand(concl th2) in
MP th2 (EQT_ELIM(NUM_LT_CONV tm2)) in
(fun tm -> try let xt,yt = dest_binop dtm tm in
CONJUNCT1(NUM_DIVMOD_CONV (dest_numeral xt) (dest_numeral yt))
with Failure _ -> failwith "NUM_DIV_CONV"),
(fun tm -> try let xt,yt = dest_binop mtm tm in
CONJUNCT2(NUM_DIVMOD_CONV (dest_numeral xt) (dest_numeral yt))
with Failure _ -> failwith "NUM_MOD_CONV");;
let NUM_FACT_CONV =
let suc = `SUC`
and mul = `( * )` in
let pth_0 = prove
(`FACT 0 = 1`,
REWRITE_TAC[FACT])
and pth_suc = prove
(`(SUC x = y) ==> (FACT x = w) ==> (y * w = z) ==> (FACT y = z)`,
REPEAT (DISCH_THEN(SUBST1_TAC o SYM)) THEN
REWRITE_TAC[FACT])
and w = `w:num` and x = `x:num` and y = `y:num` and z = `z:num` in
let mksuc n =
let n' = n -/ (Int 1) in
NUM_SUC_CONV (mk_comb(suc,mk_numeral n')) in
let rec NUM_FACT_CONV n =
if n =/ Int 0 then pth_0 else
let th0 = mksuc n in
let tmx = rand(lhand(concl th0)) in
let tm0 = rand(concl th0) in
let th1 = NUM_FACT_CONV (n -/ Int 1) in
let tm1 = rand(concl th1) in
let th2 = NUM_MULT_CONV (mk_binop mul tm0 tm1) in
let tm2 = rand(concl th2) in
let pth = INST [tmx,x; tm0, y; tm1,w; tm2,z] pth_suc in
MP (MP (MP pth th0) th1) th2 in
fun tm ->
try let l,r = dest_comb tm in
if fst(dest_const l) = "FACT"
then NUM_FACT_CONV (dest_numeral r)
else fail()
with Failure _ -> failwith "NUM_FACT_CONV";;
let NUM_MAX_CONV =
REWR_CONV MAX THENC
RATOR_CONV(RATOR_CONV(RAND_CONV NUM_LE_CONV)) THENC
GEN_REWRITE_CONV I [COND_CLAUSES];;
let NUM_MIN_CONV =
REWR_CONV MIN THENC
RATOR_CONV(RATOR_CONV(RAND_CONV NUM_LE_CONV)) THENC
GEN_REWRITE_CONV I [COND_CLAUSES];;
let NUM_REL_CONV =
let gconv_net = itlist (uncurry net_of_conv)
[`NUMERAL m < NUMERAL n`,NUM_LT_CONV;
`NUMERAL m <= NUMERAL n`,NUM_LE_CONV;
`NUMERAL m > NUMERAL n`,NUM_GT_CONV;
`NUMERAL m >= NUMERAL n`,NUM_GE_CONV;
`NUMERAL m = NUMERAL n`,NUM_EQ_CONV]
(basic_net()) in
REWRITES_CONV gconv_net;;
let NUM_RED_CONV =
let gconv_net = itlist (uncurry net_of_conv)
[`SUC(NUMERAL n)`,NUM_SUC_CONV;
`PRE(NUMERAL n)`,NUM_PRE_CONV;
`FACT(NUMERAL n)`,NUM_FACT_CONV;
`NUMERAL m < NUMERAL n`,NUM_LT_CONV;
`NUMERAL m <= NUMERAL n`,NUM_LE_CONV;
`NUMERAL m > NUMERAL n`,NUM_GT_CONV;
`NUMERAL m >= NUMERAL n`,NUM_GE_CONV;
`NUMERAL m = NUMERAL n`,NUM_EQ_CONV;
`EVEN(NUMERAL n)`,NUM_EVEN_CONV;
`ODD(NUMERAL n)`,NUM_ODD_CONV;
`NUMERAL m + NUMERAL n`,NUM_ADD_CONV;
`NUMERAL m - NUMERAL n`,NUM_SUB_CONV;
`NUMERAL m * NUMERAL n`,NUM_MULT_CONV;
`(NUMERAL m) EXP (NUMERAL n)`,NUM_EXP_CONV;
`(NUMERAL m) DIV (NUMERAL n)`,NUM_DIV_CONV;
`(NUMERAL m) MOD (NUMERAL n)`,NUM_MOD_CONV;
`MAX (NUMERAL m) (NUMERAL n)`,NUM_MAX_CONV;
`MIN (NUMERAL m) (NUMERAL n)`,NUM_MIN_CONV]
(basic_net()) in
REWRITES_CONV gconv_net;;
let NUM_REDUCE_CONV = DEPTH_CONV NUM_RED_CONV;;
let NUM_REDUCE_TAC = CONV_TAC NUM_REDUCE_CONV;;
let num_CONV =
let SUC_tm = `SUC` in
fun tm ->
let n = dest_numeral tm -/ Int 1 in
if n </ Int 0 then failwith "num_CONV" else
let tm' = mk_numeral n in
SYM(NUM_SUC_CONV (mk_comb(SUC_tm,tm')));;
let THREE = num_CONV `3`;;
let EXPAND_CASES_CONV =
let pth_base = prove
(`(!n. n < 0 ==> P n) <=> T`,
REWRITE_TAC[LT])
and pth_step = prove
(`(!n. n < SUC k ==> P n) <=> (!n. n < k ==> P n) /\ P k`,
REWRITE_TAC[LT] THEN MESON_TAC[]) in
let base_CONV = GEN_REWRITE_CONV I [pth_base]
and step_CONV =
BINDER_CONV(LAND_CONV(RAND_CONV num_CONV)) THENC
GEN_REWRITE_CONV I [pth_step] in
let rec conv tm =
(base_CONV ORELSEC (step_CONV THENC LAND_CONV conv)) tm in
conv THENC (REWRITE_CONV[GSYM CONJ_ASSOC]);;
|
d588be6e7be35c2e86b3eca60a9b275d8234b00364740cadc389c94772d3435e | gator1/jepsen | project.clj | (defproject jepsen.etcdemo "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:main jepsen.etcdemo
:dependencies [[org.clojure/clojure "1.8.0"]
[jepsen "0.1.4"]
[verschlimmbesserung "0.1.3"]])
| null | https://raw.githubusercontent.com/gator1/jepsen/1932cbd72cbc1f6c2a27abe0fe347ea989f0cfbb/etcdemo/project.clj | clojure | (defproject jepsen.etcdemo "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:main jepsen.etcdemo
:dependencies [[org.clojure/clojure "1.8.0"]
[jepsen "0.1.4"]
[verschlimmbesserung "0.1.3"]])
| |
52758d8c4cf4b4b5b99d8b440260d7ed0f1a652469bea7451df043d263f80795 | kframework/semantic-approaches | SmallStep.hs | module SemanticModel.SmallStep
( Transition, o, nStep
, manyStep
, stepRun
, isLiteral, isNotLiteral, rules
, Trace(..), runTrace
, get, put, modify, runStateT, gets, mzero, mplus
, state
)
where
import Data.Hashable ( hash )
import Control.Monad.State.Lazy
import Data.List.Extra ( nubOn )
class (Eq config, Show syntax) => Transition config syntax | syntax -> config where
isLiteral :: syntax -> Bool
isNotLiteral :: syntax -> Bool
isNotLiteral = not . isLiteral
rules :: [syntax -> Trace config syntax]
o :: syntax -> Trace config syntax
o = buildStep rules
nStep :: Int -> syntax -> Trace config syntax
nStep n syntax
| n >= 0 = foldl ( >>= ) (return syntax) $ replicate n o
nStep _ syntax = error "nStep: Integer argument must be positive."
allStep :: syntax -> [Trace config syntax]
allStep stmt = map (flip nStep stmt) [0..]
buildStep
:: [syntax -> Trace config syntax]
-> syntax -> Trace config syntax
buildStep rules syntax
= foldl (\l r -> l `mplus` r syntax) mzero rules
stepRun :: config -> syntax -> [[(syntax, config)]]
stepRun config syntax
= takeWhile (not . null) $ [ nubOn convert
$ runTrace config
$ flip nStep syntax n | n <- [0..]]
where convert (l,r) = (show l, r)
manyStep :: Trace config syntax -> Trace config syntax
manyStep trace = results `mplus` (toRecurse >>= (\_ -> manyStep toRecurse))
where results = mfilter isLiteral trace
intermediate = (mfilter isNotLiteral trace) >>= o
toRecurse = rebuildState $ nubOn snd $ runTrace undefined intermediate
conversion (syntax, env) = (hash $ show syntax, env)
rebuildState :: [(syntax, config)] -> Trace config syntax
rebuildState = foldl (\l (syntax,state) -> l `mplus` (put state >> return syntax)) mzero
type Trace state = StateT state []
runTrace :: config -> Trace config syntax -> [(syntax, config)]
runTrace = flip runStateT
| null | https://raw.githubusercontent.com/kframework/semantic-approaches/6f64eac09e005fe4eae7141e3c0e0f5711da0647/haskell/semantic-styles/src/SemanticModel/SmallStep.hs | haskell | module SemanticModel.SmallStep
( Transition, o, nStep
, manyStep
, stepRun
, isLiteral, isNotLiteral, rules
, Trace(..), runTrace
, get, put, modify, runStateT, gets, mzero, mplus
, state
)
where
import Data.Hashable ( hash )
import Control.Monad.State.Lazy
import Data.List.Extra ( nubOn )
class (Eq config, Show syntax) => Transition config syntax | syntax -> config where
isLiteral :: syntax -> Bool
isNotLiteral :: syntax -> Bool
isNotLiteral = not . isLiteral
rules :: [syntax -> Trace config syntax]
o :: syntax -> Trace config syntax
o = buildStep rules
nStep :: Int -> syntax -> Trace config syntax
nStep n syntax
| n >= 0 = foldl ( >>= ) (return syntax) $ replicate n o
nStep _ syntax = error "nStep: Integer argument must be positive."
allStep :: syntax -> [Trace config syntax]
allStep stmt = map (flip nStep stmt) [0..]
buildStep
:: [syntax -> Trace config syntax]
-> syntax -> Trace config syntax
buildStep rules syntax
= foldl (\l r -> l `mplus` r syntax) mzero rules
stepRun :: config -> syntax -> [[(syntax, config)]]
stepRun config syntax
= takeWhile (not . null) $ [ nubOn convert
$ runTrace config
$ flip nStep syntax n | n <- [0..]]
where convert (l,r) = (show l, r)
manyStep :: Trace config syntax -> Trace config syntax
manyStep trace = results `mplus` (toRecurse >>= (\_ -> manyStep toRecurse))
where results = mfilter isLiteral trace
intermediate = (mfilter isNotLiteral trace) >>= o
toRecurse = rebuildState $ nubOn snd $ runTrace undefined intermediate
conversion (syntax, env) = (hash $ show syntax, env)
rebuildState :: [(syntax, config)] -> Trace config syntax
rebuildState = foldl (\l (syntax,state) -> l `mplus` (put state >> return syntax)) mzero
type Trace state = StateT state []
runTrace :: config -> Trace config syntax -> [(syntax, config)]
runTrace = flip runStateT
| |
64756d10f4d2b6aff2e33e36e257312688d8828fb1a979007ae0b0113871b069 | ocaml/oasis | BaseData.mli | (******************************************************************************)
OASIS : architecture for building OCaml libraries and applications
(* *)
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
the Free Software Foundation ; either version 2.1 of the License , or ( at
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *)
(* or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more *)
(* details. *)
(* *)
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
(******************************************************************************)
(** Exported modules for embedding
The whole module is {b not exported}. It is auto-generated using other
modules.
*)
* All exported modules from base library , default
content for ' setup.ml ' .
content for 'setup.ml'.
*)
val basesys_ml: string
(** Minimal set of exported modules to load the 'setup.data'
files. Use to create OCaml script that will use 'setup.data'.
Example auto-generated 'myocamlbuild.ml' contains this set.
*)
val basesysenvironment_ml: string
(** Set of modules to load for the 'bundle' subcommand
*)
val basesysbundle_ml: string
(** Toploop for dynrun. *)
val dynrun_ml: string
(** Toploop for dynrun (alternative). *)
val dynrun_for_release_ml: string
(** Toploop for dynrun (yet another alternative). *)
val compiled_setup_ml: string
| null | https://raw.githubusercontent.com/ocaml/oasis/3d1a9421db92a0882ebc58c5df219b18c1e5681d/src/base/BaseData.mli | ocaml | ****************************************************************************
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
your option) any later version, with the OCaml static compilation
exception.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the file COPYING for more
details.
****************************************************************************
* Exported modules for embedding
The whole module is {b not exported}. It is auto-generated using other
modules.
* Minimal set of exported modules to load the 'setup.data'
files. Use to create OCaml script that will use 'setup.data'.
Example auto-generated 'myocamlbuild.ml' contains this set.
* Set of modules to load for the 'bundle' subcommand
* Toploop for dynrun.
* Toploop for dynrun (alternative).
* Toploop for dynrun (yet another alternative). | OASIS : architecture for building OCaml libraries and applications
Copyright ( C ) 2011 - 2016 ,
Copyright ( C ) 2008 - 2011 , OCamlCore SARL
the Free Software Foundation ; either version 2.1 of the License , or ( at
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
* All exported modules from base library , default
content for ' setup.ml ' .
content for 'setup.ml'.
*)
val basesys_ml: string
val basesysenvironment_ml: string
val basesysbundle_ml: string
val dynrun_ml: string
val dynrun_for_release_ml: string
val compiled_setup_ml: string
|
34295e6bf063a28760bcb0e303dcf0778ad0d86e638a35b38ce70bb0839bcd8a | austral/austral | TypeParameters.ml |
Part of the Austral project , under the Apache License v2.0 with LLVM Exceptions .
See LICENSE file for details .
SPDX - License - Identifier : Apache-2.0 WITH LLVM - exception
Part of the Austral project, under the Apache License v2.0 with LLVM Exceptions.
See LICENSE file for details.
SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*)
open Identifier
open TypeParameter
open Error
open ErrorText
open Sexplib
open Std
module Errors = struct
let duplicate_type_parameter param =
let text = match param with
| Some param -> [
Text "Duplicate type parameter ";
Code (typaram_name param |> ident_string)
]
| None -> [
Text "Multiple type parameters have the same name.";
]
in
austral_raise GenericError text
end
type typarams = TyParams of type_parameter list
[@@deriving (show, sexp)]
let empty_typarams: typarams = TyParams []
let typarams_size (typarams: typarams): int =
let (TyParams lst) = typarams in
List.length lst
let get_typaram (typarams: typarams) (name: identifier): type_parameter option =
let (TyParams lst) = typarams in
let pred (typaram: type_parameter): bool =
equal_identifier name (typaram_name typaram)
in
List.find_opt pred lst
let add_typaram (typarams: typarams) (typaram: type_parameter): typarams =
match get_typaram typarams (typaram_name typaram) with
| Some _ ->
Errors.duplicate_type_parameter (Some typaram)
| None ->
let (TyParams lst) = typarams in
let lst = List.rev lst in
let lst = typaram :: lst in
let lst = List.rev lst in
TyParams lst
let typarams_as_list (typarams: typarams): type_parameter list =
let (TyParams lst) = typarams in
lst
let typarams_from_list (lst: type_parameter list): typarams =
List.fold_left (fun set typaram -> add_typaram set typaram)
empty_typarams
lst
let merge_typarams (a: typarams) (b: typarams): typarams =
(* Convert both sets to lists *)
let al: type_parameter list = typarams_as_list a
and bl: type_parameter list = typarams_as_list b
in
(* If any element of b appears in a, error. *)
let _ =
List.map (fun tp ->
if List.exists (fun tp' -> equal_identifier (typaram_name tp) (typaram_name tp')) al then
Errors.duplicate_type_parameter None
else
()) bl
in
TyParams (List.concat [al; bl])
| null | https://raw.githubusercontent.com/austral/austral/69b6f7de36cc9576483acd1ac4a31bf52074dbd1/lib/TypeParameters.ml | ocaml | Convert both sets to lists
If any element of b appears in a, error. |
Part of the Austral project , under the Apache License v2.0 with LLVM Exceptions .
See LICENSE file for details .
SPDX - License - Identifier : Apache-2.0 WITH LLVM - exception
Part of the Austral project, under the Apache License v2.0 with LLVM Exceptions.
See LICENSE file for details.
SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*)
open Identifier
open TypeParameter
open Error
open ErrorText
open Sexplib
open Std
module Errors = struct
let duplicate_type_parameter param =
let text = match param with
| Some param -> [
Text "Duplicate type parameter ";
Code (typaram_name param |> ident_string)
]
| None -> [
Text "Multiple type parameters have the same name.";
]
in
austral_raise GenericError text
end
type typarams = TyParams of type_parameter list
[@@deriving (show, sexp)]
let empty_typarams: typarams = TyParams []
let typarams_size (typarams: typarams): int =
let (TyParams lst) = typarams in
List.length lst
let get_typaram (typarams: typarams) (name: identifier): type_parameter option =
let (TyParams lst) = typarams in
let pred (typaram: type_parameter): bool =
equal_identifier name (typaram_name typaram)
in
List.find_opt pred lst
let add_typaram (typarams: typarams) (typaram: type_parameter): typarams =
match get_typaram typarams (typaram_name typaram) with
| Some _ ->
Errors.duplicate_type_parameter (Some typaram)
| None ->
let (TyParams lst) = typarams in
let lst = List.rev lst in
let lst = typaram :: lst in
let lst = List.rev lst in
TyParams lst
let typarams_as_list (typarams: typarams): type_parameter list =
let (TyParams lst) = typarams in
lst
let typarams_from_list (lst: type_parameter list): typarams =
List.fold_left (fun set typaram -> add_typaram set typaram)
empty_typarams
lst
let merge_typarams (a: typarams) (b: typarams): typarams =
let al: type_parameter list = typarams_as_list a
and bl: type_parameter list = typarams_as_list b
in
let _ =
List.map (fun tp ->
if List.exists (fun tp' -> equal_identifier (typaram_name tp) (typaram_name tp')) al then
Errors.duplicate_type_parameter None
else
()) bl
in
TyParams (List.concat [al; bl])
|
300347df48c105b70bd8c33f2fcef1576cd588343d0ab6c5be18e9a9d2bf6ba6 | rems-project/cerberus | smt2.ml | open Nondeterminism
open Memory_model
open Global_ocaml
let pad = ref 0
prerr_endline ( String.make ! pad ' ' ^ str )
let do_red str= "\x1b[31m" ^ str ^ "\x1b[0m"
let runND exec_mode (type cs) cs_module (m: ('a, Driver.step_kind, 'err, cs, 'st) ndM) (st0: 'st) =
prerr "ENTERING runND";
Debug_ocaml.print_debug 1 [] (fun () ->
"HELLO from Smt2.runND, exec mode= " ^ match exec_mode with
| Exhaustive ->
"exhaustive"
| Random ->
"random"
);
let module CS = (val cs_module : Constraints with type t = cs) in
let (>>=) = CS.bind in
let open CS in
let (*rec*) with_backtracking m xs =
let i = (Random.int (List.length xs)) in
let x = List.nth xs i in
let xs ' = List.init ( List.length xs - 1 ) ( fun z - >
List.nth xs ( if z < i then z else z+1 )
) in
let xs' = List.init (List.length xs - 1) (fun z ->
List.nth xs (if z < i then z else z+1)
) in
*)
m x in (*>>= function
| [] ->
with_backtracking m xs'
| ys ->
return ys in *)
let rec aux (ND m_act) st =
(* TODO: graph export *)
match m_act st with
| (NDactive a, st') ->
prerr "NDactive";
flush_all ();
check_sat >>= begin function
| `UNSAT ->
failwith "NDactive found to be UNSATISFIABLE"
| `SAT ->
CS.string_of_solver >>= fun str ->
return [(Active a, str, st')]
end
| (NDkilled r, st') ->
prerr "NDkilled";
flush_all ();
CS.string_of_solver >>= fun str ->
return [(Killed (st', r), str, st')]
| (NDnd (info, str_ms), st') ->
let xx = Random.int 10000 in
incr pad ;
let str = Printf.sprintf " % sNDnd[%s ] < % d > < size : % d>\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info )
xx
( str_ms ) in
let str = if str_ms > 1 then do_red str else str in
prerr_string str ;
flush_all ( ) ;
incr pad;
let str = Printf.sprintf "%sNDnd[%s] <%d> <size: %d>\n" (String.make !pad ' ')
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info)
xx
(List.length str_ms) in
let str = if List.length str_ms > 1 then do_red str else str in
prerr_string str;
flush_all (); *)
let ret = begin match exec_mode with
| Random ->
with_backtracking (fun (_, z) -> aux z st') str_ms
| Exhaustive ->
List.iter ( fun ( idx , ( info , _ ) ) - >
Printf.fprintf stderr " % s<%d>[%d ] = = > % s\n " ( String.make ! pad ' ' ) ( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
) ( List.mapi ( fun n z - > ( n , z ) ) str_ms ) ;
Printf.fprintf stderr "%s<%d>[%d] ==> %s\n" (String.make !pad ' ') xx idx
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all ();
) (List.mapi (fun n z -> (n, z)) str_ms); *)
foldlM (fun acc (idx, (info, m_act)) ->
Printf.fprintf stderr " % s<%d>[%d ] = = > % s\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
(* with_constraints debug_str *)
aux m_act st' >>= fun z ->
return (z @ acc)
) [] (List.mapi (fun n z -> (n, z)) str_ms)
| Interactive - >
failwith " Smt2.runND : TODO interactive mode "
| Interactive ->
failwith "Smt2.runND: TODO interactive mode"
*)
end in decr pad; ret
| (NDguard (info, cs, m_act), st') ->
Printf.fprintf stderr " % sNDguard[%s]\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
with_constraints info cs begin
check_sat >>= function
| `UNSAT ->
return [] (* backtrack *)
| `SAT ->
aux m_act st'
end
| (NDbranch (info, cs, m_act1, m_act2), st') ->
Printf.fprintf stderr " % sNDbranch[%s]\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
begin match exec_mode with
| Some Interactive - >
failwith " Smt2.runND : TODO interactive mode "
| Some Interactive ->
failwith "Smt2.runND: TODO interactive mode"
*)
| Random ->
with_backtracking (fun (cs, m_act) ->
with_constraints info cs begin
check_sat >>= function
| `UNSAT ->
return []
| `SAT ->
aux m_act st'
end) [(cs, m_act1); (negate cs, m_act2)]
| Exhaustive ->
with_constraints info cs begin
check_sat >>= function
| `UNSAT ->
return []
| `SAT ->
aux m_act1 st'
end >>= fun xs1 ->
with_constraints info (negate cs) begin
check_sat >>= function
| `UNSAT ->
return []
| `SAT ->
aux m_act2 st'
end >>= fun xs2 ->
return (xs1 @ xs2)
end
| (NDstep (info, str_ms), st') ->
Printf.fprintf stderr " % sNDstep[%s]\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
aux (ND (fun st -> NDnd (info, str_ms), st)) st'
in let ret = runEff (aux m st0) in
(* prerr "EXITING"; *)
ret
| null | https://raw.githubusercontent.com/rems-project/cerberus/f9b43246f0b9fa5a6bbfadbdfb82e7bcb0786f2b/ocaml_frontend/smt2.ml | ocaml | rec
>>= function
| [] ->
with_backtracking m xs'
| ys ->
return ys in
TODO: graph export
with_constraints debug_str
backtrack
prerr "EXITING"; | open Nondeterminism
open Memory_model
open Global_ocaml
let pad = ref 0
prerr_endline ( String.make ! pad ' ' ^ str )
let do_red str= "\x1b[31m" ^ str ^ "\x1b[0m"
let runND exec_mode (type cs) cs_module (m: ('a, Driver.step_kind, 'err, cs, 'st) ndM) (st0: 'st) =
prerr "ENTERING runND";
Debug_ocaml.print_debug 1 [] (fun () ->
"HELLO from Smt2.runND, exec mode= " ^ match exec_mode with
| Exhaustive ->
"exhaustive"
| Random ->
"random"
);
let module CS = (val cs_module : Constraints with type t = cs) in
let (>>=) = CS.bind in
let open CS in
let i = (Random.int (List.length xs)) in
let x = List.nth xs i in
let xs ' = List.init ( List.length xs - 1 ) ( fun z - >
List.nth xs ( if z < i then z else z+1 )
) in
let xs' = List.init (List.length xs - 1) (fun z ->
List.nth xs (if z < i then z else z+1)
) in
*)
let rec aux (ND m_act) st =
match m_act st with
| (NDactive a, st') ->
prerr "NDactive";
flush_all ();
check_sat >>= begin function
| `UNSAT ->
failwith "NDactive found to be UNSATISFIABLE"
| `SAT ->
CS.string_of_solver >>= fun str ->
return [(Active a, str, st')]
end
| (NDkilled r, st') ->
prerr "NDkilled";
flush_all ();
CS.string_of_solver >>= fun str ->
return [(Killed (st', r), str, st')]
| (NDnd (info, str_ms), st') ->
let xx = Random.int 10000 in
incr pad ;
let str = Printf.sprintf " % sNDnd[%s ] < % d > < size : % d>\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info )
xx
( str_ms ) in
let str = if str_ms > 1 then do_red str else str in
prerr_string str ;
flush_all ( ) ;
incr pad;
let str = Printf.sprintf "%sNDnd[%s] <%d> <size: %d>\n" (String.make !pad ' ')
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info)
xx
(List.length str_ms) in
let str = if List.length str_ms > 1 then do_red str else str in
prerr_string str;
flush_all (); *)
let ret = begin match exec_mode with
| Random ->
with_backtracking (fun (_, z) -> aux z st') str_ms
| Exhaustive ->
List.iter ( fun ( idx , ( info , _ ) ) - >
Printf.fprintf stderr " % s<%d>[%d ] = = > % s\n " ( String.make ! pad ' ' ) ( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
) ( List.mapi ( fun n z - > ( n , z ) ) str_ms ) ;
Printf.fprintf stderr "%s<%d>[%d] ==> %s\n" (String.make !pad ' ') xx idx
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all ();
) (List.mapi (fun n z -> (n, z)) str_ms); *)
foldlM (fun acc (idx, (info, m_act)) ->
Printf.fprintf stderr " % s<%d>[%d ] = = > % s\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
aux m_act st' >>= fun z ->
return (z @ acc)
) [] (List.mapi (fun n z -> (n, z)) str_ms)
| Interactive - >
failwith " Smt2.runND : TODO interactive mode "
| Interactive ->
failwith "Smt2.runND: TODO interactive mode"
*)
end in decr pad; ret
| (NDguard (info, cs, m_act), st') ->
Printf.fprintf stderr " % sNDguard[%s]\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
with_constraints info cs begin
check_sat >>= function
| `UNSAT ->
| `SAT ->
aux m_act st'
end
| (NDbranch (info, cs, m_act1, m_act2), st') ->
Printf.fprintf stderr " % sNDbranch[%s]\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
begin match exec_mode with
| Some Interactive - >
failwith " Smt2.runND : TODO interactive mode "
| Some Interactive ->
failwith "Smt2.runND: TODO interactive mode"
*)
| Random ->
with_backtracking (fun (cs, m_act) ->
with_constraints info cs begin
check_sat >>= function
| `UNSAT ->
return []
| `SAT ->
aux m_act st'
end) [(cs, m_act1); (negate cs, m_act2)]
| Exhaustive ->
with_constraints info cs begin
check_sat >>= function
| `UNSAT ->
return []
| `SAT ->
aux m_act1 st'
end >>= fun xs1 ->
with_constraints info (negate cs) begin
check_sat >>= function
| `UNSAT ->
return []
| `SAT ->
aux m_act2 st'
end >>= fun xs2 ->
return (xs1 @ xs2)
end
| (NDstep (info, str_ms), st') ->
Printf.fprintf stderr " % sNDstep[%s]\n " ( String.make ! pad ' ' )
( Driver.instance_Show_Show_Driver_step_kind_dict.show_method info ) ;
flush_all ( ) ;
(Driver.instance_Show_Show_Driver_step_kind_dict.show_method info);
flush_all (); *)
aux (ND (fun st -> NDnd (info, str_ms), st)) st'
in let ret = runEff (aux m st0) in
ret
|
36e5e2098b1cbaa9751ba1bf9624b5f07f13d56b9f0f2a14289ef8d7a48c7d9e | Lambda-Logan/faker | CreditCard.hs | |
Module : Faker . App
Description : Module for generating fake credit card numbers
Copyright : ( c ) , 2014 - 2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
Module : Faker.App
Description : Module for generating fake credit card numbers
Copyright : (c) Alexey Gaziev, 2014-2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
-}
module Faker.CreditCard
(
-- * Functions for generate fake credit card numbers
visa
, mastercard
, discover
, americanExpress
, dinersClub
, jcb
, switch
, solo
, dankort
, maestro
, forbrugsforeningen
, laser
) where
import Data.Char (digitToInt, isDigit)
import Faker.Utils
| Returns random visa card number , i.e. " 4784066907150 "
visa :: Faker String
visa = randomCardNumber "visa"
-- | Returns random mastercard card number, i.e. "5524-7275-2305-9123"
mastercard :: Faker String
mastercard = randomCardNumber "mastercard"
-- | Returns random discover card number, i.e. "6485-6297-9249-9908-4511"
discover :: Faker String
discover = randomCardNumber "discover"
| Returns random discover card number , i.e. " 3772 - 746109 - 17862 "
americanExpress :: Faker String
americanExpress = randomCardNumber "american_express"
| Returns random diners card number , i.e. " 3058 - 931015 - 6480 "
dinersClub :: Faker String
dinersClub = randomCardNumber "diners_club"
| Returns random jsb card number , i.e. " 3529 - 3170 - 1533 - 8944 "
jcb :: Faker String
jcb = randomCardNumber "jcb"
| Returns random switch card number , i.e. " 6759 - 8669 - 0174 - 5662 - 863 "
switch :: Faker String
switch = randomCardNumber "switch"
-- | Returns random solo card number, i.e. "6767-9171-7219-8374-98"
solo :: Faker String
solo = randomCardNumber "solo"
-- | Returns random dankort card number, i.e. "5019-5391-9757-3574"
dankort :: Faker String
dankort = randomCardNumber "dankort"
| Returns random maestro card number , i.e. " 563427125821696744 "
maestro :: Faker String
maestro = randomCardNumber "maestro"
| Returns random card number , i.e. " 6007 - 2299 - 2494 - 9683 "
forbrugsforeningen :: Faker String
forbrugsforeningen = randomCardNumber "forbrugsforeningen"
| Returns random laser card number , i.e. " 6709272591057118 "
laser :: Faker String
laser = randomCardNumber "laser"
randomCardNumber :: String -> Faker String
randomCardNumber attr = do
cardNum <- randomValue "credit_card" attr
filledNum <- evalRegex cardNum
return $ addLuhnSum filledNum
addLuhnSum :: String -> String
addLuhnSum numberString =
let numbers = collectNumbers numberString
luhnSum = countLuhnSum numbers 2
luhnDigit = (10 - (luhnSum `mod` 10)) `mod` 10
in init numberString ++ show luhnDigit
countLuhnSum :: [Int] -> Int -> Int
countLuhnSum [] _ = 0
countLuhnSum (x:xs) m =
let nextM = if m == 2 then 1 else 2
in luhnStep x m + countLuhnSum xs nextM
luhnStep :: Int -> Int -> Int
luhnStep x m = sum $ map digitToInt (show (x * m))
collectNumbers :: String -> [Int]
collectNumbers [] = []
collectNumbers str = foldl (\a x -> if isDigit x then digitToInt x : a else a) [] str
| null | https://raw.githubusercontent.com/Lambda-Logan/faker/8935346192e67631b97c1a52f6644ba5ed48a1a2/src/Faker/CreditCard.hs | haskell | * Functions for generate fake credit card numbers
| Returns random mastercard card number, i.e. "5524-7275-2305-9123"
| Returns random discover card number, i.e. "6485-6297-9249-9908-4511"
| Returns random solo card number, i.e. "6767-9171-7219-8374-98"
| Returns random dankort card number, i.e. "5019-5391-9757-3574" | |
Module : Faker . App
Description : Module for generating fake credit card numbers
Copyright : ( c ) , 2014 - 2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
Module : Faker.App
Description : Module for generating fake credit card numbers
Copyright : (c) Alexey Gaziev, 2014-2018
License : MIT
Maintainer :
Stability : experimental
Portability : POSIX
Fake data
-}
module Faker.CreditCard
(
visa
, mastercard
, discover
, americanExpress
, dinersClub
, jcb
, switch
, solo
, dankort
, maestro
, forbrugsforeningen
, laser
) where
import Data.Char (digitToInt, isDigit)
import Faker.Utils
| Returns random visa card number , i.e. " 4784066907150 "
visa :: Faker String
visa = randomCardNumber "visa"
mastercard :: Faker String
mastercard = randomCardNumber "mastercard"
discover :: Faker String
discover = randomCardNumber "discover"
| Returns random discover card number , i.e. " 3772 - 746109 - 17862 "
americanExpress :: Faker String
americanExpress = randomCardNumber "american_express"
| Returns random diners card number , i.e. " 3058 - 931015 - 6480 "
dinersClub :: Faker String
dinersClub = randomCardNumber "diners_club"
| Returns random jsb card number , i.e. " 3529 - 3170 - 1533 - 8944 "
jcb :: Faker String
jcb = randomCardNumber "jcb"
| Returns random switch card number , i.e. " 6759 - 8669 - 0174 - 5662 - 863 "
switch :: Faker String
switch = randomCardNumber "switch"
solo :: Faker String
solo = randomCardNumber "solo"
dankort :: Faker String
dankort = randomCardNumber "dankort"
| Returns random maestro card number , i.e. " 563427125821696744 "
maestro :: Faker String
maestro = randomCardNumber "maestro"
| Returns random card number , i.e. " 6007 - 2299 - 2494 - 9683 "
forbrugsforeningen :: Faker String
forbrugsforeningen = randomCardNumber "forbrugsforeningen"
| Returns random laser card number , i.e. " 6709272591057118 "
laser :: Faker String
laser = randomCardNumber "laser"
randomCardNumber :: String -> Faker String
randomCardNumber attr = do
cardNum <- randomValue "credit_card" attr
filledNum <- evalRegex cardNum
return $ addLuhnSum filledNum
addLuhnSum :: String -> String
addLuhnSum numberString =
let numbers = collectNumbers numberString
luhnSum = countLuhnSum numbers 2
luhnDigit = (10 - (luhnSum `mod` 10)) `mod` 10
in init numberString ++ show luhnDigit
countLuhnSum :: [Int] -> Int -> Int
countLuhnSum [] _ = 0
countLuhnSum (x:xs) m =
let nextM = if m == 2 then 1 else 2
in luhnStep x m + countLuhnSum xs nextM
luhnStep :: Int -> Int -> Int
luhnStep x m = sum $ map digitToInt (show (x * m))
collectNumbers :: String -> [Int]
collectNumbers [] = []
collectNumbers str = foldl (\a x -> if isDigit x then digitToInt x : a else a) [] str
|
efd045deb5a53c738d6740dfe378482b68310d4458c3e1f6cf3250b744e00767 | haskell/cabal | Lens.hs | module Distribution.Types.BuildInfo.Lens (
BuildInfo,
HasBuildInfo (..),
HasBuildInfos (..),
) where
import Distribution.Compat.Lens
import Distribution.Compat.Prelude
import Prelude ()
import Distribution.Compiler (PerCompilerFlavor)
import Distribution.ModuleName (ModuleName)
import Distribution.Types.BuildInfo (BuildInfo)
import Distribution.Types.Dependency (Dependency)
import Distribution.Types.ExeDependency (ExeDependency)
import Distribution.Types.LegacyExeDependency (LegacyExeDependency)
import Distribution.Types.Mixin (Mixin)
import Distribution.Types.PkgconfigDependency (PkgconfigDependency)
import Distribution.Utils.Path
import Language.Haskell.Extension (Extension, Language)
import qualified Distribution.Types.BuildInfo as T
| Classy lenses for ' ' .
class HasBuildInfo a where
buildInfo :: Lens' a BuildInfo
buildable :: Lens' a Bool
buildable = buildInfo . buildable
# INLINE buildable #
buildTools :: Lens' a [LegacyExeDependency]
buildTools = buildInfo . buildTools
# INLINE buildTools #
buildToolDepends :: Lens' a [ExeDependency]
buildToolDepends = buildInfo . buildToolDepends
# INLINE buildToolDepends #
cppOptions :: Lens' a [String]
cppOptions = buildInfo . cppOptions
# INLINE cppOptions #
asmOptions :: Lens' a [String]
asmOptions = buildInfo . asmOptions
# INLINE asmOptions #
cmmOptions :: Lens' a [String]
cmmOptions = buildInfo . cmmOptions
# INLINE cmmOptions #
ccOptions :: Lens' a [String]
ccOptions = buildInfo . ccOptions
# INLINE ccOptions #
cxxOptions :: Lens' a [String]
cxxOptions = buildInfo . cxxOptions
# INLINE cxxOptions #
ldOptions :: Lens' a [String]
ldOptions = buildInfo . ldOptions
# INLINE ldOptions #
hsc2hsOptions :: Lens' a [String]
hsc2hsOptions = buildInfo . hsc2hsOptions
# INLINE hsc2hsOptions #
pkgconfigDepends :: Lens' a [PkgconfigDependency]
pkgconfigDepends = buildInfo . pkgconfigDepends
# INLINE pkgconfigDepends #
frameworks :: Lens' a [String]
frameworks = buildInfo . frameworks
# INLINE frameworks #
extraFrameworkDirs :: Lens' a [String]
extraFrameworkDirs = buildInfo . extraFrameworkDirs
# INLINE extraFrameworkDirs #
asmSources :: Lens' a [FilePath]
asmSources = buildInfo . asmSources
# INLINE asmSources #
cmmSources :: Lens' a [FilePath]
cmmSources = buildInfo . cmmSources
# INLINE cmmSources #
cSources :: Lens' a [FilePath]
cSources = buildInfo . cSources
# INLINE cSources #
cxxSources :: Lens' a [FilePath]
cxxSources = buildInfo . cxxSources
# INLINE cxxSources #
jsSources :: Lens' a [FilePath]
jsSources = buildInfo . jsSources
# INLINE jsSources #
hsSourceDirs :: Lens' a [SymbolicPath PackageDir SourceDir]
hsSourceDirs = buildInfo . hsSourceDirs
# INLINE hsSourceDirs #
otherModules :: Lens' a [ModuleName]
otherModules = buildInfo . otherModules
# INLINE otherModules #
virtualModules :: Lens' a [ModuleName]
virtualModules = buildInfo . virtualModules
# INLINE virtualModules #
autogenModules :: Lens' a [ModuleName]
autogenModules = buildInfo . autogenModules
# INLINE autogenModules #
defaultLanguage :: Lens' a (Maybe Language)
defaultLanguage = buildInfo . defaultLanguage
# INLINE defaultLanguage #
otherLanguages :: Lens' a [Language]
otherLanguages = buildInfo . otherLanguages
# INLINE otherLanguages #
defaultExtensions :: Lens' a [Extension]
defaultExtensions = buildInfo . defaultExtensions
# INLINE defaultExtensions #
otherExtensions :: Lens' a [Extension]
otherExtensions = buildInfo . otherExtensions
# INLINE otherExtensions #
oldExtensions :: Lens' a [Extension]
oldExtensions = buildInfo . oldExtensions
# INLINE oldExtensions #
extraLibs :: Lens' a [String]
extraLibs = buildInfo . extraLibs
# INLINE extraLibs #
extraLibsStatic :: Lens' a [String]
extraLibsStatic = buildInfo . extraLibsStatic
# INLINE extraLibsStatic #
extraGHCiLibs :: Lens' a [String]
extraGHCiLibs = buildInfo . extraGHCiLibs
# INLINE extraGHCiLibs #
extraBundledLibs :: Lens' a [String]
extraBundledLibs = buildInfo . extraBundledLibs
# INLINE extraBundledLibs #
extraLibFlavours :: Lens' a [String]
extraLibFlavours = buildInfo . extraLibFlavours
# INLINE extraLibFlavours #
extraDynLibFlavours :: Lens' a [String]
extraDynLibFlavours = buildInfo . extraDynLibFlavours
# INLINE extraDynLibFlavours #
extraLibDirs :: Lens' a [String]
extraLibDirs = buildInfo . extraLibDirs
# INLINE extraLibDirs #
extraLibDirsStatic :: Lens' a [String]
extraLibDirsStatic = buildInfo . extraLibDirsStatic
# INLINE extraLibDirsStatic #
includeDirs :: Lens' a [FilePath]
includeDirs = buildInfo . includeDirs
# INLINE includeDirs #
includes :: Lens' a [FilePath]
includes = buildInfo . includes
{-# INLINE includes #-}
autogenIncludes :: Lens' a [FilePath]
autogenIncludes = buildInfo . autogenIncludes
# INLINE autogenIncludes #
installIncludes :: Lens' a [FilePath]
installIncludes = buildInfo . installIncludes
# INLINE installIncludes #
options :: Lens' a (PerCompilerFlavor [String])
options = buildInfo . options
# INLINE options #
profOptions :: Lens' a (PerCompilerFlavor [String])
profOptions = buildInfo . profOptions
# INLINE profOptions #
sharedOptions :: Lens' a (PerCompilerFlavor [String])
sharedOptions = buildInfo . sharedOptions
# INLINE sharedOptions #
staticOptions :: Lens' a (PerCompilerFlavor [String])
staticOptions = buildInfo . staticOptions
# INLINE staticOptions #
customFieldsBI :: Lens' a [(String,String)]
customFieldsBI = buildInfo . customFieldsBI
# INLINE customFieldsBI #
targetBuildDepends :: Lens' a [Dependency]
targetBuildDepends = buildInfo . targetBuildDepends
# INLINE targetBuildDepends #
mixins :: Lens' a [Mixin]
mixins = buildInfo . mixins
# INLINE mixins #
instance HasBuildInfo BuildInfo where
buildInfo = id
# INLINE buildInfo #
buildable f s = fmap (\x -> s { T.buildable = x }) (f (T.buildable s))
# INLINE buildable #
buildTools f s = fmap (\x -> s { T.buildTools = x }) (f (T.buildTools s))
# INLINE buildTools #
buildToolDepends f s = fmap (\x -> s { T.buildToolDepends = x }) (f (T.buildToolDepends s))
# INLINE buildToolDepends #
cppOptions f s = fmap (\x -> s { T.cppOptions = x }) (f (T.cppOptions s))
# INLINE cppOptions #
asmOptions f s = fmap (\x -> s { T.asmOptions = x }) (f (T.asmOptions s))
# INLINE asmOptions #
cmmOptions f s = fmap (\x -> s { T.cmmOptions = x }) (f (T.cmmOptions s))
# INLINE cmmOptions #
ccOptions f s = fmap (\x -> s { T.ccOptions = x }) (f (T.ccOptions s))
# INLINE ccOptions #
cxxOptions f s = fmap (\x -> s { T.cxxOptions = x }) (f (T.cxxOptions s))
# INLINE cxxOptions #
ldOptions f s = fmap (\x -> s { T.ldOptions = x }) (f (T.ldOptions s))
# INLINE ldOptions #
hsc2hsOptions f s = fmap (\x -> s { T.hsc2hsOptions = x }) (f (T.hsc2hsOptions s))
# INLINE hsc2hsOptions #
pkgconfigDepends f s = fmap (\x -> s { T.pkgconfigDepends = x }) (f (T.pkgconfigDepends s))
# INLINE pkgconfigDepends #
frameworks f s = fmap (\x -> s { T.frameworks = x }) (f (T.frameworks s))
# INLINE frameworks #
extraFrameworkDirs f s = fmap (\x -> s { T.extraFrameworkDirs = x }) (f (T.extraFrameworkDirs s))
# INLINE extraFrameworkDirs #
asmSources f s = fmap (\x -> s { T.asmSources = x }) (f (T.asmSources s))
# INLINE asmSources #
cmmSources f s = fmap (\x -> s { T.cmmSources = x }) (f (T.cmmSources s))
# INLINE cmmSources #
cSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cSources s))
# INLINE cSources #
cxxSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cxxSources s))
# INLINE cxxSources #
jsSources f s = fmap (\x -> s { T.jsSources = x }) (f (T.jsSources s))
# INLINE jsSources #
hsSourceDirs f s = fmap (\x -> s { T.hsSourceDirs = x }) (f (T.hsSourceDirs s))
# INLINE hsSourceDirs #
otherModules f s = fmap (\x -> s { T.otherModules = x }) (f (T.otherModules s))
# INLINE otherModules #
virtualModules f s = fmap (\x -> s { T.virtualModules = x }) (f (T.virtualModules s))
# INLINE virtualModules #
autogenModules f s = fmap (\x -> s { T.autogenModules = x }) (f (T.autogenModules s))
# INLINE autogenModules #
defaultLanguage f s = fmap (\x -> s { T.defaultLanguage = x }) (f (T.defaultLanguage s))
# INLINE defaultLanguage #
otherLanguages f s = fmap (\x -> s { T.otherLanguages = x }) (f (T.otherLanguages s))
# INLINE otherLanguages #
defaultExtensions f s = fmap (\x -> s { T.defaultExtensions = x }) (f (T.defaultExtensions s))
# INLINE defaultExtensions #
otherExtensions f s = fmap (\x -> s { T.otherExtensions = x }) (f (T.otherExtensions s))
# INLINE otherExtensions #
oldExtensions f s = fmap (\x -> s { T.oldExtensions = x }) (f (T.oldExtensions s))
# INLINE oldExtensions #
extraLibs f s = fmap (\x -> s { T.extraLibs = x }) (f (T.extraLibs s))
# INLINE extraLibs #
extraLibsStatic f s = fmap (\x -> s { T.extraLibsStatic = x}) (f (T.extraLibsStatic s))
# INLINE extraLibsStatic #
extraGHCiLibs f s = fmap (\x -> s { T.extraGHCiLibs = x }) (f (T.extraGHCiLibs s))
# INLINE extraGHCiLibs #
extraBundledLibs f s = fmap (\x -> s { T.extraBundledLibs = x }) (f (T.extraBundledLibs s))
# INLINE extraBundledLibs #
extraLibFlavours f s = fmap (\x -> s { T.extraLibFlavours = x }) (f (T.extraLibFlavours s))
# INLINE extraLibFlavours #
extraDynLibFlavours f s = fmap (\x -> s { T.extraDynLibFlavours = x}) (f (T.extraDynLibFlavours s))
# INLINE extraDynLibFlavours #
extraLibDirs f s = fmap (\x -> s { T.extraLibDirs = x }) (f (T.extraLibDirs s))
# INLINE extraLibDirs #
extraLibDirsStatic f s = fmap (\x -> s { T.extraLibDirsStatic = x}) (f (T.extraLibDirsStatic s))
# INLINE extraLibDirsStatic #
includeDirs f s = fmap (\x -> s { T.includeDirs = x }) (f (T.includeDirs s))
# INLINE includeDirs #
includes f s = fmap (\x -> s { T.includes = x }) (f (T.includes s))
{-# INLINE includes #-}
autogenIncludes f s = fmap (\x -> s { T.autogenIncludes = x }) (f (T.autogenIncludes s))
# INLINE autogenIncludes #
installIncludes f s = fmap (\x -> s { T.installIncludes = x }) (f (T.installIncludes s))
# INLINE installIncludes #
options f s = fmap (\x -> s { T.options = x }) (f (T.options s))
# INLINE options #
profOptions f s = fmap (\x -> s { T.profOptions = x }) (f (T.profOptions s))
# INLINE profOptions #
sharedOptions f s = fmap (\x -> s { T.sharedOptions = x }) (f (T.sharedOptions s))
# INLINE sharedOptions #
staticOptions f s = fmap (\x -> s { T.staticOptions = x }) (f (T.staticOptions s))
# INLINE staticOptions #
customFieldsBI f s = fmap (\x -> s { T.customFieldsBI = x }) (f (T.customFieldsBI s))
# INLINE customFieldsBI #
targetBuildDepends f s = fmap (\x -> s { T.targetBuildDepends = x }) (f (T.targetBuildDepends s))
# INLINE targetBuildDepends #
mixins f s = fmap (\x -> s { T.mixins = x }) (f (T.mixins s))
# INLINE mixins #
class HasBuildInfos a where
traverseBuildInfos :: Traversal' a BuildInfo
| null | https://raw.githubusercontent.com/haskell/cabal/0abbe37187f708e0a5daac8d388167f72ca0db7e/Cabal-syntax/src/Distribution/Types/BuildInfo/Lens.hs | haskell | # INLINE includes #
# INLINE includes # | module Distribution.Types.BuildInfo.Lens (
BuildInfo,
HasBuildInfo (..),
HasBuildInfos (..),
) where
import Distribution.Compat.Lens
import Distribution.Compat.Prelude
import Prelude ()
import Distribution.Compiler (PerCompilerFlavor)
import Distribution.ModuleName (ModuleName)
import Distribution.Types.BuildInfo (BuildInfo)
import Distribution.Types.Dependency (Dependency)
import Distribution.Types.ExeDependency (ExeDependency)
import Distribution.Types.LegacyExeDependency (LegacyExeDependency)
import Distribution.Types.Mixin (Mixin)
import Distribution.Types.PkgconfigDependency (PkgconfigDependency)
import Distribution.Utils.Path
import Language.Haskell.Extension (Extension, Language)
import qualified Distribution.Types.BuildInfo as T
| Classy lenses for ' ' .
class HasBuildInfo a where
buildInfo :: Lens' a BuildInfo
buildable :: Lens' a Bool
buildable = buildInfo . buildable
# INLINE buildable #
buildTools :: Lens' a [LegacyExeDependency]
buildTools = buildInfo . buildTools
# INLINE buildTools #
buildToolDepends :: Lens' a [ExeDependency]
buildToolDepends = buildInfo . buildToolDepends
# INLINE buildToolDepends #
cppOptions :: Lens' a [String]
cppOptions = buildInfo . cppOptions
# INLINE cppOptions #
asmOptions :: Lens' a [String]
asmOptions = buildInfo . asmOptions
# INLINE asmOptions #
cmmOptions :: Lens' a [String]
cmmOptions = buildInfo . cmmOptions
# INLINE cmmOptions #
ccOptions :: Lens' a [String]
ccOptions = buildInfo . ccOptions
# INLINE ccOptions #
cxxOptions :: Lens' a [String]
cxxOptions = buildInfo . cxxOptions
# INLINE cxxOptions #
ldOptions :: Lens' a [String]
ldOptions = buildInfo . ldOptions
# INLINE ldOptions #
hsc2hsOptions :: Lens' a [String]
hsc2hsOptions = buildInfo . hsc2hsOptions
# INLINE hsc2hsOptions #
pkgconfigDepends :: Lens' a [PkgconfigDependency]
pkgconfigDepends = buildInfo . pkgconfigDepends
# INLINE pkgconfigDepends #
frameworks :: Lens' a [String]
frameworks = buildInfo . frameworks
# INLINE frameworks #
extraFrameworkDirs :: Lens' a [String]
extraFrameworkDirs = buildInfo . extraFrameworkDirs
# INLINE extraFrameworkDirs #
asmSources :: Lens' a [FilePath]
asmSources = buildInfo . asmSources
# INLINE asmSources #
cmmSources :: Lens' a [FilePath]
cmmSources = buildInfo . cmmSources
# INLINE cmmSources #
cSources :: Lens' a [FilePath]
cSources = buildInfo . cSources
# INLINE cSources #
cxxSources :: Lens' a [FilePath]
cxxSources = buildInfo . cxxSources
# INLINE cxxSources #
jsSources :: Lens' a [FilePath]
jsSources = buildInfo . jsSources
# INLINE jsSources #
hsSourceDirs :: Lens' a [SymbolicPath PackageDir SourceDir]
hsSourceDirs = buildInfo . hsSourceDirs
# INLINE hsSourceDirs #
otherModules :: Lens' a [ModuleName]
otherModules = buildInfo . otherModules
# INLINE otherModules #
virtualModules :: Lens' a [ModuleName]
virtualModules = buildInfo . virtualModules
# INLINE virtualModules #
autogenModules :: Lens' a [ModuleName]
autogenModules = buildInfo . autogenModules
# INLINE autogenModules #
defaultLanguage :: Lens' a (Maybe Language)
defaultLanguage = buildInfo . defaultLanguage
# INLINE defaultLanguage #
otherLanguages :: Lens' a [Language]
otherLanguages = buildInfo . otherLanguages
# INLINE otherLanguages #
defaultExtensions :: Lens' a [Extension]
defaultExtensions = buildInfo . defaultExtensions
# INLINE defaultExtensions #
otherExtensions :: Lens' a [Extension]
otherExtensions = buildInfo . otherExtensions
# INLINE otherExtensions #
oldExtensions :: Lens' a [Extension]
oldExtensions = buildInfo . oldExtensions
# INLINE oldExtensions #
extraLibs :: Lens' a [String]
extraLibs = buildInfo . extraLibs
# INLINE extraLibs #
extraLibsStatic :: Lens' a [String]
extraLibsStatic = buildInfo . extraLibsStatic
# INLINE extraLibsStatic #
extraGHCiLibs :: Lens' a [String]
extraGHCiLibs = buildInfo . extraGHCiLibs
# INLINE extraGHCiLibs #
extraBundledLibs :: Lens' a [String]
extraBundledLibs = buildInfo . extraBundledLibs
# INLINE extraBundledLibs #
extraLibFlavours :: Lens' a [String]
extraLibFlavours = buildInfo . extraLibFlavours
# INLINE extraLibFlavours #
extraDynLibFlavours :: Lens' a [String]
extraDynLibFlavours = buildInfo . extraDynLibFlavours
# INLINE extraDynLibFlavours #
extraLibDirs :: Lens' a [String]
extraLibDirs = buildInfo . extraLibDirs
# INLINE extraLibDirs #
extraLibDirsStatic :: Lens' a [String]
extraLibDirsStatic = buildInfo . extraLibDirsStatic
# INLINE extraLibDirsStatic #
includeDirs :: Lens' a [FilePath]
includeDirs = buildInfo . includeDirs
# INLINE includeDirs #
includes :: Lens' a [FilePath]
includes = buildInfo . includes
autogenIncludes :: Lens' a [FilePath]
autogenIncludes = buildInfo . autogenIncludes
# INLINE autogenIncludes #
installIncludes :: Lens' a [FilePath]
installIncludes = buildInfo . installIncludes
# INLINE installIncludes #
options :: Lens' a (PerCompilerFlavor [String])
options = buildInfo . options
# INLINE options #
profOptions :: Lens' a (PerCompilerFlavor [String])
profOptions = buildInfo . profOptions
# INLINE profOptions #
sharedOptions :: Lens' a (PerCompilerFlavor [String])
sharedOptions = buildInfo . sharedOptions
# INLINE sharedOptions #
staticOptions :: Lens' a (PerCompilerFlavor [String])
staticOptions = buildInfo . staticOptions
# INLINE staticOptions #
customFieldsBI :: Lens' a [(String,String)]
customFieldsBI = buildInfo . customFieldsBI
# INLINE customFieldsBI #
targetBuildDepends :: Lens' a [Dependency]
targetBuildDepends = buildInfo . targetBuildDepends
# INLINE targetBuildDepends #
mixins :: Lens' a [Mixin]
mixins = buildInfo . mixins
# INLINE mixins #
instance HasBuildInfo BuildInfo where
buildInfo = id
# INLINE buildInfo #
buildable f s = fmap (\x -> s { T.buildable = x }) (f (T.buildable s))
# INLINE buildable #
buildTools f s = fmap (\x -> s { T.buildTools = x }) (f (T.buildTools s))
# INLINE buildTools #
buildToolDepends f s = fmap (\x -> s { T.buildToolDepends = x }) (f (T.buildToolDepends s))
# INLINE buildToolDepends #
cppOptions f s = fmap (\x -> s { T.cppOptions = x }) (f (T.cppOptions s))
# INLINE cppOptions #
asmOptions f s = fmap (\x -> s { T.asmOptions = x }) (f (T.asmOptions s))
# INLINE asmOptions #
cmmOptions f s = fmap (\x -> s { T.cmmOptions = x }) (f (T.cmmOptions s))
# INLINE cmmOptions #
ccOptions f s = fmap (\x -> s { T.ccOptions = x }) (f (T.ccOptions s))
# INLINE ccOptions #
cxxOptions f s = fmap (\x -> s { T.cxxOptions = x }) (f (T.cxxOptions s))
# INLINE cxxOptions #
ldOptions f s = fmap (\x -> s { T.ldOptions = x }) (f (T.ldOptions s))
# INLINE ldOptions #
hsc2hsOptions f s = fmap (\x -> s { T.hsc2hsOptions = x }) (f (T.hsc2hsOptions s))
# INLINE hsc2hsOptions #
pkgconfigDepends f s = fmap (\x -> s { T.pkgconfigDepends = x }) (f (T.pkgconfigDepends s))
# INLINE pkgconfigDepends #
frameworks f s = fmap (\x -> s { T.frameworks = x }) (f (T.frameworks s))
# INLINE frameworks #
extraFrameworkDirs f s = fmap (\x -> s { T.extraFrameworkDirs = x }) (f (T.extraFrameworkDirs s))
# INLINE extraFrameworkDirs #
asmSources f s = fmap (\x -> s { T.asmSources = x }) (f (T.asmSources s))
# INLINE asmSources #
cmmSources f s = fmap (\x -> s { T.cmmSources = x }) (f (T.cmmSources s))
# INLINE cmmSources #
cSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cSources s))
# INLINE cSources #
cxxSources f s = fmap (\x -> s { T.cSources = x }) (f (T.cxxSources s))
# INLINE cxxSources #
jsSources f s = fmap (\x -> s { T.jsSources = x }) (f (T.jsSources s))
# INLINE jsSources #
hsSourceDirs f s = fmap (\x -> s { T.hsSourceDirs = x }) (f (T.hsSourceDirs s))
# INLINE hsSourceDirs #
otherModules f s = fmap (\x -> s { T.otherModules = x }) (f (T.otherModules s))
# INLINE otherModules #
virtualModules f s = fmap (\x -> s { T.virtualModules = x }) (f (T.virtualModules s))
# INLINE virtualModules #
autogenModules f s = fmap (\x -> s { T.autogenModules = x }) (f (T.autogenModules s))
# INLINE autogenModules #
defaultLanguage f s = fmap (\x -> s { T.defaultLanguage = x }) (f (T.defaultLanguage s))
# INLINE defaultLanguage #
otherLanguages f s = fmap (\x -> s { T.otherLanguages = x }) (f (T.otherLanguages s))
# INLINE otherLanguages #
defaultExtensions f s = fmap (\x -> s { T.defaultExtensions = x }) (f (T.defaultExtensions s))
# INLINE defaultExtensions #
otherExtensions f s = fmap (\x -> s { T.otherExtensions = x }) (f (T.otherExtensions s))
# INLINE otherExtensions #
oldExtensions f s = fmap (\x -> s { T.oldExtensions = x }) (f (T.oldExtensions s))
# INLINE oldExtensions #
extraLibs f s = fmap (\x -> s { T.extraLibs = x }) (f (T.extraLibs s))
# INLINE extraLibs #
extraLibsStatic f s = fmap (\x -> s { T.extraLibsStatic = x}) (f (T.extraLibsStatic s))
# INLINE extraLibsStatic #
extraGHCiLibs f s = fmap (\x -> s { T.extraGHCiLibs = x }) (f (T.extraGHCiLibs s))
# INLINE extraGHCiLibs #
extraBundledLibs f s = fmap (\x -> s { T.extraBundledLibs = x }) (f (T.extraBundledLibs s))
# INLINE extraBundledLibs #
extraLibFlavours f s = fmap (\x -> s { T.extraLibFlavours = x }) (f (T.extraLibFlavours s))
# INLINE extraLibFlavours #
extraDynLibFlavours f s = fmap (\x -> s { T.extraDynLibFlavours = x}) (f (T.extraDynLibFlavours s))
# INLINE extraDynLibFlavours #
extraLibDirs f s = fmap (\x -> s { T.extraLibDirs = x }) (f (T.extraLibDirs s))
# INLINE extraLibDirs #
extraLibDirsStatic f s = fmap (\x -> s { T.extraLibDirsStatic = x}) (f (T.extraLibDirsStatic s))
# INLINE extraLibDirsStatic #
includeDirs f s = fmap (\x -> s { T.includeDirs = x }) (f (T.includeDirs s))
# INLINE includeDirs #
includes f s = fmap (\x -> s { T.includes = x }) (f (T.includes s))
autogenIncludes f s = fmap (\x -> s { T.autogenIncludes = x }) (f (T.autogenIncludes s))
# INLINE autogenIncludes #
installIncludes f s = fmap (\x -> s { T.installIncludes = x }) (f (T.installIncludes s))
# INLINE installIncludes #
options f s = fmap (\x -> s { T.options = x }) (f (T.options s))
# INLINE options #
profOptions f s = fmap (\x -> s { T.profOptions = x }) (f (T.profOptions s))
# INLINE profOptions #
sharedOptions f s = fmap (\x -> s { T.sharedOptions = x }) (f (T.sharedOptions s))
# INLINE sharedOptions #
staticOptions f s = fmap (\x -> s { T.staticOptions = x }) (f (T.staticOptions s))
# INLINE staticOptions #
customFieldsBI f s = fmap (\x -> s { T.customFieldsBI = x }) (f (T.customFieldsBI s))
# INLINE customFieldsBI #
targetBuildDepends f s = fmap (\x -> s { T.targetBuildDepends = x }) (f (T.targetBuildDepends s))
# INLINE targetBuildDepends #
mixins f s = fmap (\x -> s { T.mixins = x }) (f (T.mixins s))
# INLINE mixins #
class HasBuildInfos a where
traverseBuildInfos :: Traversal' a BuildInfo
|
063962d2866bcb5c74aa1ff1d1f0bf24dd6b0df03d240b6552893428e803fcd8 | sqd/haskell-C89-interpreter | Grammar.hs | module Grammar(
Exp(..),
VarInit(..),
VariableDeclaration(..),
FunctionDefinition(..),
StructDefinition(..),
Jump(..),
Program(..),
Structure(..),
)where
import Definition
import Type
data Exp = Exp Identifier [Exp] | Constant Literal deriving Show
data VarInit = InitList [Exp] | InitExp Exp deriving Show
data VariableDeclaration = VarDecl Type Identifier (Maybe VarInit) | ArrDecl Type Identifier Exp (Maybe VarInit) deriving Show
data FunctionDefinition = FuncDef Identifier Type [(Type, Identifier)] [Structure] deriving Show
data StructDefinition = StructDef Identifier [(Type, Identifier)] deriving Show
data Jump = Return Exp | Break | Continue deriving Show
data Program = Program [FunctionDefinition] [StructDefinition] [VariableDeclaration] deriving Show
data Structure =
IfBlock Exp [Structure] [Structure] |
SwitchBlock Exp [([Exp], [Structure])] |
WhileBlock Exp [Structure] |
DoWhileBlock Exp [Structure] |
ForBlock Exp Exp Exp [Structure] |
Expression Exp |
Declaration VariableDeclaration |
UCJump Jump |
LocalStructDefinition StructDefinition |
DarkMagic Identifier
deriving Show
| null | https://raw.githubusercontent.com/sqd/haskell-C89-interpreter/cd0cd344cf07eba29a906b62fb31ea120adfca86/Grammar.hs | haskell | module Grammar(
Exp(..),
VarInit(..),
VariableDeclaration(..),
FunctionDefinition(..),
StructDefinition(..),
Jump(..),
Program(..),
Structure(..),
)where
import Definition
import Type
data Exp = Exp Identifier [Exp] | Constant Literal deriving Show
data VarInit = InitList [Exp] | InitExp Exp deriving Show
data VariableDeclaration = VarDecl Type Identifier (Maybe VarInit) | ArrDecl Type Identifier Exp (Maybe VarInit) deriving Show
data FunctionDefinition = FuncDef Identifier Type [(Type, Identifier)] [Structure] deriving Show
data StructDefinition = StructDef Identifier [(Type, Identifier)] deriving Show
data Jump = Return Exp | Break | Continue deriving Show
data Program = Program [FunctionDefinition] [StructDefinition] [VariableDeclaration] deriving Show
data Structure =
IfBlock Exp [Structure] [Structure] |
SwitchBlock Exp [([Exp], [Structure])] |
WhileBlock Exp [Structure] |
DoWhileBlock Exp [Structure] |
ForBlock Exp Exp Exp [Structure] |
Expression Exp |
Declaration VariableDeclaration |
UCJump Jump |
LocalStructDefinition StructDefinition |
DarkMagic Identifier
deriving Show
| |
281674648d03fd16a82a028f7913571209a5f16adf991a8b00d888fd226bdb02 | acl2/acl2 | top.lisp | C Library
;
Copyright ( C ) 2023 Kestrel Institute ( )
Copyright ( C ) 2023 Kestrel Technology LLC ( )
;
License : A 3 - clause BSD license . See the LICENSE file distributed with ACL2 .
;
Author : ( )
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(in-package "C")
(include-book "../symbolic-computation-states")
(include-book "../shallow-embedding")
(include-book "types")
(include-book "values")
(include-book "type-of-value")
(include-book "test-value")
(include-book "exec-const")
(include-book "exec-ident")
(include-book "exec-unary")
(include-book "exec-binary-strict-pure")
(include-book "exec-cast")
(include-book "exec-arrsub")
(include-book "exec-expr-pure")
(include-book "exec-expr-call")
(include-book "exec-expr-call-or-pure")
(include-book "exec-expr-asg")
(include-book "exec-expr-call-or-asg")
(include-book "exec-fun")
(include-book "exec-stmt")
(include-book "exec-initer")
(include-book "exec-block-item")
(include-book "init-scope")
(include-book "adjust-type")
(include-book "static-variable-pointers")
(include-book "identifiers")
(include-book "wrappers")
(include-book "if-distributivity")
(include-book "returns")
(include-book "executable-counterparts")
(include-book "limit")
(include-book "not-error")
(include-book "integer-operations")
(include-book "misc-rewrite")
(include-book "type-prescriptions")
(include-book "compound-recognizers")
(include-book "flexible-array-member")
(include-book "if-star")
(include-book "boolean-equality")
(include-book "hide")
(include-book "pointed-integers")
(include-book "sint-from-boolean")
(local (include-book "kestrel/built-ins/disable" :dir :system))
(local (acl2::disable-most-builtin-logic-defuns))
(local (acl2::disable-builtin-rewrite-rules-for-defaults))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defxdoc+ atc-symbolic-execution-rules
:parents (atc-execution)
:short "Symbolic execution rules for ATC."
:long
(xdoc::topstring
(xdoc::p
"Currently, the generated proofs of function correctness
are carried out via symbolic execution of the C code.
The C code is a constant value,
because we are generating proofs over specific C functions;
this makes symbolic execution possible.")
(xdoc::p
"In order to make these generated proofs more robust,
we carry them out in a theory that consists exactly of
(what we believe to be) all and only the needed rules.
This file defines that theory.
This set of rules has been defined by
not only thinking of what is needed for symbolic execution,
but also experimenting with several symbolic execution proofs,
starting with the empty theory and adding rules
as needed to advance the symbolic execution,
and also by looking at the C dynamic semantics.
There is no guarantee (meta proof) that
these rules will suffice for every use of ATC;
there is also no guarantee that
the proof will not be defeated by some ACL2 heuristic in some cases.
Nonetheless, the proof strategy seems sound and robust,
and if a generated proof fails
it should be possible to (prove and) use additional rules.")
(xdoc::p
"Some of the rules that are used in the symbolic execution
rewrite calls of functions used in the deeply embedded dynamic semantics
into their shallowly embedded counterparts,
under hypothesis on the types of the arguments.
For instance, @('(exec-unary op x compst)')
is rewritten to @('(<op>-<type> x)')
when @('op') is the unary operation corresponding to @('<op>')
(unary plus, unary minus, bitwise complement, or logical complement),
and @('x') has type @('<type>').
These shallowly embedded counterparts are used
in the ACL2 functions from which C code is represented:
thus, the rewrite rules serve to turn (the execution of) the C code
into the ACL2 terms from which the C code is generated,
which is at the core of proving the correctness of the generated C code.")
(xdoc::p
"For recursive ACL2 functions that model C execution
(e.g. @(tsee exec-expr-pure)),
we introduce opener rules,
which include @(tsee syntaxp) hypotheses requiring that
the C abstract syntax being executed is a quoted constant.
Some of these opener rules include binding hypotheses,
which avoid symbolically executing the same pieces of C abstract syntax
multiple times in some situations.")
(xdoc::p
"We collect the rules in lists,
each of which serves a particular symbolic execution purpose.
Certain rules may appear in multiple lists,
when they serve multiple symbolic execution purposes.
The current organization and subdivision of the rules in these lists
is reasonable, but can (and will) certainly be improved"))
:order-subtopics t
:default-parent t)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defval *atc-all-rules*
:short "List of all the (generic) rules for the proofs generated by ATC."
:long
(xdoc::topstring
(xdoc::p
"These are the ones used in all the generated proofs.
In addition, each proof includes a few additional rules
that depend on the specific C-representing ACL2 functions involved.
See @(see atc-implementation)."))
(append *atc-symbolic-computation-state-rules*
*atc-tyname-to-type-rules*
*atc-type-kind-rules*
*atc-valuep-rules*
*atc-value-listp-rules*
*atc-value-optionp-rules*
*atc-value-kind-rules*
*atc-type-of-value-rules*
*atc-type-of-value-option-rules*
*atc-value-array->elemtype-rules*
*atc-array-length-rules*
*atc-array-length-write-rules*
*atc-static-variable-pointer-rules*
*atc-exec-ident-rules*
*atc-exec-const-rules*
*atc-exec-arrsub-rules*
*atc-exec-unary-nonpointer-rules*
*atc-exec-indir-rules*
*atc-exec-cast-rules*
*atc-exec-binary-strict-pure-rules*
*atc-test-value-rules*
*atc-exec-expr-pure-rules*
*atc-exec-expr-pure-list-rules*
*atc-exec-expr-call-rules*
*atc-exec-expr-call-or-pure-rules*
*atc-exec-expr-asg-rules*
*atc-exec-expr-call-or-asg-rules*
*atc-exec-fun-rules*
*atc-exec-stmt-rules*
*atc-exec-initer-rules*
*atc-init-value-to-value-rules*
*atc-exec-block-item-rules*
*atc-exec-block-item-list-rules*
*atc-init-scope-rules*
*atc-adjust-type-rules*
*atc-other-executable-counterpart-rules*
*atc-wrapper-rules*
*atc-distributivity-over-if-rewrite-rules*
*atc-identifier-rules*
*atc-integer-const-rules*
*atc-integer-size-rules*
*atc-integer-ops-1-return-rewrite-rules*
*atc-integer-ops-2-return-rewrite-rules*
*atc-integer-convs-return-rewrite-rules*
*atc-array-read-return-rewrite-rules*
*atc-array-write-return-rewrite-rules*
*atc-integer-ops-1-type-prescription-rules*
*atc-integer-ops-2-type-prescription-rules*
*atc-integer-convs-type-prescription-rules*
*atc-array-read-type-prescription-rules*
*atc-misc-rewrite-rules*
*atc-type-prescription-rules*
*atc-compound-recognizer-rules*
*integer-value-disjoint-rules*
*array-value-disjoint-rules*
*atc-sint-from-boolean*
*atc-boolean-from-sint*
*atc-integer-ifix-rules*
*atc-limit-rules*
*atc-not-error-rules*
*atc-value-result-fix-rules*
*atc-lognot-sint-rules*
*atc-boolean-from-integer-return-rules*
*atc-integer-constructors-return-rules*
*atc-computation-state-return-rules*
*atc-value-fix-rules*
*atc-flexible-array-member-rules*
*atc-pointed-integer-rules*))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; We define a theory for the rules because experiments show that
; a long time is spent by ACL2 translating hints,
given that * ATC - ALL - RULES * consists of thousands of rules .
; We use this theory in the generated proofs (see generation.lisp).
(deftheory atc-all-rules *atc-all-rules*)
| null | https://raw.githubusercontent.com/acl2/acl2/2ca2950fc647e50315cee2b8a07e36694f7e61f4/books/kestrel/c/atc/symbolic-execution-rules/top.lisp | lisp |
We define a theory for the rules because experiments show that
a long time is spent by ACL2 translating hints,
We use this theory in the generated proofs (see generation.lisp). | C Library
Copyright ( C ) 2023 Kestrel Institute ( )
Copyright ( C ) 2023 Kestrel Technology LLC ( )
License : A 3 - clause BSD license . See the LICENSE file distributed with ACL2 .
Author : ( )
(in-package "C")
(include-book "../symbolic-computation-states")
(include-book "../shallow-embedding")
(include-book "types")
(include-book "values")
(include-book "type-of-value")
(include-book "test-value")
(include-book "exec-const")
(include-book "exec-ident")
(include-book "exec-unary")
(include-book "exec-binary-strict-pure")
(include-book "exec-cast")
(include-book "exec-arrsub")
(include-book "exec-expr-pure")
(include-book "exec-expr-call")
(include-book "exec-expr-call-or-pure")
(include-book "exec-expr-asg")
(include-book "exec-expr-call-or-asg")
(include-book "exec-fun")
(include-book "exec-stmt")
(include-book "exec-initer")
(include-book "exec-block-item")
(include-book "init-scope")
(include-book "adjust-type")
(include-book "static-variable-pointers")
(include-book "identifiers")
(include-book "wrappers")
(include-book "if-distributivity")
(include-book "returns")
(include-book "executable-counterparts")
(include-book "limit")
(include-book "not-error")
(include-book "integer-operations")
(include-book "misc-rewrite")
(include-book "type-prescriptions")
(include-book "compound-recognizers")
(include-book "flexible-array-member")
(include-book "if-star")
(include-book "boolean-equality")
(include-book "hide")
(include-book "pointed-integers")
(include-book "sint-from-boolean")
(local (include-book "kestrel/built-ins/disable" :dir :system))
(local (acl2::disable-most-builtin-logic-defuns))
(local (acl2::disable-builtin-rewrite-rules-for-defaults))
(defxdoc+ atc-symbolic-execution-rules
:parents (atc-execution)
:short "Symbolic execution rules for ATC."
:long
(xdoc::topstring
(xdoc::p
"Currently, the generated proofs of function correctness
are carried out via symbolic execution of the C code.
The C code is a constant value,
this makes symbolic execution possible.")
(xdoc::p
"In order to make these generated proofs more robust,
we carry them out in a theory that consists exactly of
(what we believe to be) all and only the needed rules.
This file defines that theory.
This set of rules has been defined by
not only thinking of what is needed for symbolic execution,
but also experimenting with several symbolic execution proofs,
starting with the empty theory and adding rules
as needed to advance the symbolic execution,
and also by looking at the C dynamic semantics.
There is no guarantee (meta proof) that
there is also no guarantee that
the proof will not be defeated by some ACL2 heuristic in some cases.
Nonetheless, the proof strategy seems sound and robust,
and if a generated proof fails
it should be possible to (prove and) use additional rules.")
(xdoc::p
"Some of the rules that are used in the symbolic execution
rewrite calls of functions used in the deeply embedded dynamic semantics
into their shallowly embedded counterparts,
under hypothesis on the types of the arguments.
For instance, @('(exec-unary op x compst)')
is rewritten to @('(<op>-<type> x)')
when @('op') is the unary operation corresponding to @('<op>')
(unary plus, unary minus, bitwise complement, or logical complement),
and @('x') has type @('<type>').
These shallowly embedded counterparts are used
in the ACL2 functions from which C code is represented:
thus, the rewrite rules serve to turn (the execution of) the C code
into the ACL2 terms from which the C code is generated,
which is at the core of proving the correctness of the generated C code.")
(xdoc::p
"For recursive ACL2 functions that model C execution
(e.g. @(tsee exec-expr-pure)),
we introduce opener rules,
which include @(tsee syntaxp) hypotheses requiring that
the C abstract syntax being executed is a quoted constant.
Some of these opener rules include binding hypotheses,
which avoid symbolically executing the same pieces of C abstract syntax
multiple times in some situations.")
(xdoc::p
"We collect the rules in lists,
each of which serves a particular symbolic execution purpose.
Certain rules may appear in multiple lists,
when they serve multiple symbolic execution purposes.
The current organization and subdivision of the rules in these lists
is reasonable, but can (and will) certainly be improved"))
:order-subtopics t
:default-parent t)
(defval *atc-all-rules*
:short "List of all the (generic) rules for the proofs generated by ATC."
:long
(xdoc::topstring
(xdoc::p
"These are the ones used in all the generated proofs.
In addition, each proof includes a few additional rules
that depend on the specific C-representing ACL2 functions involved.
See @(see atc-implementation)."))
(append *atc-symbolic-computation-state-rules*
*atc-tyname-to-type-rules*
*atc-type-kind-rules*
*atc-valuep-rules*
*atc-value-listp-rules*
*atc-value-optionp-rules*
*atc-value-kind-rules*
*atc-type-of-value-rules*
*atc-type-of-value-option-rules*
*atc-value-array->elemtype-rules*
*atc-array-length-rules*
*atc-array-length-write-rules*
*atc-static-variable-pointer-rules*
*atc-exec-ident-rules*
*atc-exec-const-rules*
*atc-exec-arrsub-rules*
*atc-exec-unary-nonpointer-rules*
*atc-exec-indir-rules*
*atc-exec-cast-rules*
*atc-exec-binary-strict-pure-rules*
*atc-test-value-rules*
*atc-exec-expr-pure-rules*
*atc-exec-expr-pure-list-rules*
*atc-exec-expr-call-rules*
*atc-exec-expr-call-or-pure-rules*
*atc-exec-expr-asg-rules*
*atc-exec-expr-call-or-asg-rules*
*atc-exec-fun-rules*
*atc-exec-stmt-rules*
*atc-exec-initer-rules*
*atc-init-value-to-value-rules*
*atc-exec-block-item-rules*
*atc-exec-block-item-list-rules*
*atc-init-scope-rules*
*atc-adjust-type-rules*
*atc-other-executable-counterpart-rules*
*atc-wrapper-rules*
*atc-distributivity-over-if-rewrite-rules*
*atc-identifier-rules*
*atc-integer-const-rules*
*atc-integer-size-rules*
*atc-integer-ops-1-return-rewrite-rules*
*atc-integer-ops-2-return-rewrite-rules*
*atc-integer-convs-return-rewrite-rules*
*atc-array-read-return-rewrite-rules*
*atc-array-write-return-rewrite-rules*
*atc-integer-ops-1-type-prescription-rules*
*atc-integer-ops-2-type-prescription-rules*
*atc-integer-convs-type-prescription-rules*
*atc-array-read-type-prescription-rules*
*atc-misc-rewrite-rules*
*atc-type-prescription-rules*
*atc-compound-recognizer-rules*
*integer-value-disjoint-rules*
*array-value-disjoint-rules*
*atc-sint-from-boolean*
*atc-boolean-from-sint*
*atc-integer-ifix-rules*
*atc-limit-rules*
*atc-not-error-rules*
*atc-value-result-fix-rules*
*atc-lognot-sint-rules*
*atc-boolean-from-integer-return-rules*
*atc-integer-constructors-return-rules*
*atc-computation-state-return-rules*
*atc-value-fix-rules*
*atc-flexible-array-member-rules*
*atc-pointed-integer-rules*))
given that * ATC - ALL - RULES * consists of thousands of rules .
(deftheory atc-all-rules *atc-all-rules*)
|
bedd39cce66b2099d138723c38d34cdd1d87b2f36025b45ac8725f0eeaf82831 | Risto-Stevcev/bastet | Functions.ml | open Interface
let const, flip =
let open Function in
const, flip
and id =
let open Function.Category in
id
and ( <. ) = Function.Infix.( <. )
module Monoid (M : MONOID) = struct
module I = Infix.Magma (M)
let power =
(fun x p ->
let open I in
let rec go p =
match p with
| p when p <= 0 -> M.empty
| p when p = 1 -> x
| p when p mod 2 = 0 ->
let x' = go (p / 2) in
x' <:> x'
| _ ->
let x' = go (p / 2) in
x' <:> x' <:> x
in
go p
: M.t -> int -> M.t)
and guard =
(fun p a ->
match p with
| true -> a
| false -> M.empty
: bool -> M.t -> M.t)
end
module Functor (F : FUNCTOR) = struct
let void = (fun fa -> F.map (const ()) fa : 'a F.t -> unit F.t)
and void_right = (fun a fb -> F.map (const a) fb : 'a -> 'b F.t -> 'a F.t)
and void_left = (fun fa b -> F.map (const b) fa : 'a F.t -> 'b -> 'b F.t)
and flap = (fun fs a -> F.map (fun f -> f a) fs : ('a -> 'b) F.t -> 'a -> 'b F.t)
end
module Apply (A : APPLY) = struct
module I = Infix.Apply (A)
open I
let apply_first = (fun a b -> const <$> a <*> b : 'a A.t -> 'b A.t -> 'a A.t)
and apply_second = (fun a b -> const id <$> a <*> b : 'a A.t -> 'b A.t -> 'b A.t)
and apply_both =
(fun a b -> (fun a' b' -> a', b') <$> a <*> b : 'a A.t -> 'b A.t -> ('a * 'b) A.t)
and lift2 = (fun f a b -> f <$> a <*> b : ('a -> 'b -> 'c) -> 'a A.t -> 'b A.t -> 'c A.t)
and lift3 =
(fun f a b c -> f <$> a <*> b <*> c
: ('a -> 'b -> 'c -> 'd) -> 'a A.t -> 'b A.t -> 'c A.t -> 'd A.t)
and lift4 =
(fun f a b c d -> f <$> a <*> b <*> c <*> d
: ('a -> 'b -> 'c -> 'd -> 'e) -> 'a A.t -> 'b A.t -> 'c A.t -> 'd A.t -> 'e A.t)
and lift5 =
(fun f a b c d e -> f <$> a <*> b <*> c <*> d <*> e
: ('a -> 'b -> 'c -> 'd -> 'e -> 'f) ->
'a A.t ->
'b A.t ->
'c A.t ->
'd A.t ->
'e A.t ->
'f A.t)
module Infix = struct
let ( <* ) = apply_first
and ( *> ) = apply_second
end
end
module Apply' (A : APPLY) (T : TYPE) = struct
module F = Function.Apply (struct
type t = T.t
end)
module F' = Function.Apply (struct
type t = T.t A.t
end)
module Apply_F = Apply (F)
module Apply_A = Apply (A)
let apply_const =
(fun f x -> F'.apply Apply_A.apply_first f x : (T.t A.t -> 'a A.t) -> T.t A.t -> T.t A.t)
let apply_first =
(fun f g x -> Apply_F.lift2 Apply_A.apply_first f g x
: (T.t -> 'a A.t) -> (T.t -> 'b A.t) -> T.t -> 'a A.t)
and apply_second =
(fun f g x -> Apply_F.lift2 Apply_A.apply_second f g x
: (T.t -> 'a A.t) -> (T.t -> 'b A.t) -> T.t -> 'b A.t)
and apply_both =
(fun f g x -> Apply_F.lift2 Apply_A.apply_both f g x
: (T.t -> 'a A.t) -> (T.t -> 'b A.t) -> T.t -> ('a * 'b) A.t)
end
module Applicative (A : APPLICATIVE) = struct
module I = Infix.Apply (A)
let liftA1 =
(fun f fa ->
let open I in
A.pure f <*> fa
: ('a -> 'b) -> 'a A.t -> 'b A.t)
and when_ =
(fun p fa ->
match p with
| true -> fa
| false -> A.pure ()
: bool -> unit A.t -> unit A.t)
and unless =
(fun p fa ->
match not p with
| true -> fa
| false -> A.pure ()
: bool -> unit A.t -> unit A.t)
end
module Monad (M : MONAD) = struct
module I = Infix.Monad (M)
module A = Applicative (M)
let flatten =
(fun m ->
let open I in
m >>= id
: 'a M.t M.t -> 'a M.t)
and compose_kliesli =
(fun f g a ->
let open I in
f a >>= g
: ('a -> 'b M.t) -> ('b -> 'c M.t) -> 'a -> 'c M.t)
and compose_kliesli_flipped =
(fun f g a ->
let open I in
f =<< g a
: ('b -> 'c M.t) -> ('a -> 'b M.t) -> 'a -> 'c M.t)
and if_m =
(fun p t f ->
let open I in
p >>= fun p' ->
match p' with
| true -> t
| false -> f
: bool M.t -> 'a M.t -> 'a M.t -> 'a M.t)
and liftM1 =
(fun f fa ->
let open I in
fa >>= fun fa' -> M.pure (f fa')
: ('a -> 'b) -> 'a M.t -> 'b M.t)
and ap =
(fun f fa ->
let open I in
f >>= fun f' ->
fa >>= fun fa' -> M.pure (f' fa')
: ('a -> 'b) M.t -> 'a M.t -> 'b M.t)
and when_ =
(fun p fa ->
let open I in
p >>= fun p' -> A.when_ p' fa
: bool M.t -> unit M.t -> unit M.t)
and unless =
(fun p fa ->
let open I in
p >>= fun p' -> A.unless p' fa
: bool M.t -> unit M.t -> unit M.t)
end
module Foldable (F : FOLDABLE) = struct
module Semigroup (S : SEMIGROUP) = struct
module FM = F.Fold_Map_Any (Endo.Monoid)
module I = Infix.Magma (S)
let surround_map =
(fun ~delimiter f fa ->
let open I in
let joined a = Endo.Endo (fun m -> delimiter <:> f a <:> m) in
let (Endo.Endo fn) = FM.fold_map joined fa in
fn delimiter
: delimiter:S.t -> ('a -> S.t) -> 'a F.t -> S.t)
let surround =
(fun ~delimiter fa -> surround_map ~delimiter id fa : delimiter:S.t -> 'a F.t -> S.t)
end
module Monoid (M : MONOID) = struct
module FM = F.Fold_Map (M)
module I = Infix.Magma (M)
type acc = {
init : bool;
acc : M.t;
}
let fold = (FM.fold_map id : M.t F.t -> M.t)
and intercalate =
(fun ~separator xs ->
let go acc x =
match acc with
| { init = true; acc = _ } -> { init = false; acc = x }
| { init = _; acc = acc' } ->
let open I in
{ init = false; acc = acc' <:> separator <:> x }
in
(F.fold_left go { init = true; acc = M.empty } xs).acc
: separator:M.t -> M.t F.t -> M.t)
end
module Applicative (A : APPLICATIVE) = struct
module Fn = Apply (A)
let traverse' =
(fun f fa -> F.fold_right (Fn.apply_second <. f) (A.pure ()) fa
: ('a -> 'b A.t) -> 'a F.t -> unit A.t)
let sequence' = (fun fa -> traverse' id fa : 'a A.t F.t -> unit A.t)
end
module Plus (P : PLUS) = struct
let one_of = (fun fa -> F.fold_right P.alt P.empty fa : 'a P.t F.t -> 'a P.t)
end
module Monad (M : MONAD) = struct
module I = Infix.Monad (M)
let fold_monad =
(fun f a fa ->
let open I in
F.fold_left (fun acc x -> acc >>= flip f x) (M.pure a) fa
: ('a -> 'b -> 'a M.t) -> 'a -> 'b F.t -> 'a M.t)
end
end
module Traversable (T : TRAVERSABLE_F) = struct
module Internal = struct
type ('s, 'a) accum = {
accum : 's;
value : 'a;
}
type ('s, 'a) state = 's -> ('s, 'a) accum
let apply_state = (fun s a -> s a : ('s, 'a) state -> 's -> ('s, 'a) accum)
module State_Left (Type : TYPE) = struct
module Functor : FUNCTOR with type 'a t = (Type.t, 'a) state = struct
type 'a t = (Type.t, 'a) state
let map f k s =
match apply_state k s with
| { accum = s1; value = a } -> { accum = s1; value = f a }
end
module Apply : APPLY with type 'a t = (Type.t, 'a) state = struct
include Functor
let apply f x s =
match apply_state f s with
| { accum = s1; value = f' } -> (
match apply_state x s1 with
| { accum = s2; value = x' } -> { accum = s2; value = f' x' })
end
module Applicative : APPLICATIVE with type 'a t = (Type.t, 'a) state = struct
include Apply
let pure a s = { accum = s; value = a }
end
end
module State_Right (Type : TYPE) = struct
module Functor : FUNCTOR with type 'a t = (Type.t, 'a) state = struct
type 'a t = (Type.t, 'a) state
let map f k s =
match apply_state k s with
| { accum = s1; value = a } -> { accum = s1; value = f a }
end
module Apply : APPLY with type 'a t = (Type.t, 'a) state = struct
include Functor
let apply f x s =
match apply_state x s with
| { accum = s1; value = x' } -> (
match apply_state f s1 with
| { accum = s2; value = f' } -> { accum = s2; value = f' x' })
end
module Applicative : APPLICATIVE with type 'a t = (Type.t, 'a) state = struct
include Apply
let pure a s = { accum = s; value = a }
end
end
module Map_Accum (Type : TYPE) (T : TRAVERSABLE_F) = struct
module SL = State_Left (struct
type t = Type.t
end)
module SR = State_Right (struct
type t = Type.t
end)
module TSL = T (SL.Applicative)
module TSR = T (SR.Applicative)
let map_accum_left =
(fun f s xs -> apply_state (TSL.traverse (fun a s' -> f s' a) xs) s
: ('s -> 'a -> ('s, 'b) accum) -> 's -> 'a TSL.t -> ('s, 'b TSL.t) accum)
and map_accum_right =
(fun f s xs -> apply_state (TSR.traverse (fun a s' -> f s' a) xs) s
: ('s -> 'a -> ('s, 'b) accum) -> 's -> 'a TSR.t -> ('s, 'b TSR.t) accum)
end
end
module Scan (Type : TYPE) = struct
module MA =
Internal.Map_Accum
(struct
type t = Type.t
end)
(T)
let scan_left =
(fun f init xs ->
(MA.map_accum_left
(fun b a ->
let b' = f b a in
{ accum = b'; value = b' })
init
xs)
.value
: ('b -> 'a -> 'b) -> 'b -> 'a MA.TSL.t -> 'b MA.TSL.t)
and scan_right =
(fun f init xs ->
(MA.map_accum_right
(fun b a ->
let b' = f a b in
{ accum = b'; value = b' })
init
xs)
.value
: ('a -> 'b -> 'b) -> 'b -> 'a MA.TSR.t -> 'b MA.TSR.t)
end
end
module Infix = struct
module Apply (A : APPLY) = struct
module Functions = Apply (A)
let ( <* ) = Functions.apply_first
and ( *> ) = Functions.apply_second
end
module Monad (M : MONAD) = struct
module Functions = Infix.Monad (M)
let ( >=> ), ( <=< ) =
let open Functions in
( >=> ), ( <=< )
end
module Void (F : FUNCTOR) = struct
module Functions = Functor (F)
let ( $> ) = Functions.void_left
and ( <$ ) = Functions.void_right
and ( <@> ) = Functions.flap
end
end
| null | https://raw.githubusercontent.com/Risto-Stevcev/bastet/030db286f57d2e316897f0600d40b34777eabba6/bastet/src/Functions.ml | ocaml | open Interface
let const, flip =
let open Function in
const, flip
and id =
let open Function.Category in
id
and ( <. ) = Function.Infix.( <. )
module Monoid (M : MONOID) = struct
module I = Infix.Magma (M)
let power =
(fun x p ->
let open I in
let rec go p =
match p with
| p when p <= 0 -> M.empty
| p when p = 1 -> x
| p when p mod 2 = 0 ->
let x' = go (p / 2) in
x' <:> x'
| _ ->
let x' = go (p / 2) in
x' <:> x' <:> x
in
go p
: M.t -> int -> M.t)
and guard =
(fun p a ->
match p with
| true -> a
| false -> M.empty
: bool -> M.t -> M.t)
end
module Functor (F : FUNCTOR) = struct
let void = (fun fa -> F.map (const ()) fa : 'a F.t -> unit F.t)
and void_right = (fun a fb -> F.map (const a) fb : 'a -> 'b F.t -> 'a F.t)
and void_left = (fun fa b -> F.map (const b) fa : 'a F.t -> 'b -> 'b F.t)
and flap = (fun fs a -> F.map (fun f -> f a) fs : ('a -> 'b) F.t -> 'a -> 'b F.t)
end
module Apply (A : APPLY) = struct
module I = Infix.Apply (A)
open I
let apply_first = (fun a b -> const <$> a <*> b : 'a A.t -> 'b A.t -> 'a A.t)
and apply_second = (fun a b -> const id <$> a <*> b : 'a A.t -> 'b A.t -> 'b A.t)
and apply_both =
(fun a b -> (fun a' b' -> a', b') <$> a <*> b : 'a A.t -> 'b A.t -> ('a * 'b) A.t)
and lift2 = (fun f a b -> f <$> a <*> b : ('a -> 'b -> 'c) -> 'a A.t -> 'b A.t -> 'c A.t)
and lift3 =
(fun f a b c -> f <$> a <*> b <*> c
: ('a -> 'b -> 'c -> 'd) -> 'a A.t -> 'b A.t -> 'c A.t -> 'd A.t)
and lift4 =
(fun f a b c d -> f <$> a <*> b <*> c <*> d
: ('a -> 'b -> 'c -> 'd -> 'e) -> 'a A.t -> 'b A.t -> 'c A.t -> 'd A.t -> 'e A.t)
and lift5 =
(fun f a b c d e -> f <$> a <*> b <*> c <*> d <*> e
: ('a -> 'b -> 'c -> 'd -> 'e -> 'f) ->
'a A.t ->
'b A.t ->
'c A.t ->
'd A.t ->
'e A.t ->
'f A.t)
module Infix = struct
let ( <* ) = apply_first
and ( *> ) = apply_second
end
end
module Apply' (A : APPLY) (T : TYPE) = struct
module F = Function.Apply (struct
type t = T.t
end)
module F' = Function.Apply (struct
type t = T.t A.t
end)
module Apply_F = Apply (F)
module Apply_A = Apply (A)
let apply_const =
(fun f x -> F'.apply Apply_A.apply_first f x : (T.t A.t -> 'a A.t) -> T.t A.t -> T.t A.t)
let apply_first =
(fun f g x -> Apply_F.lift2 Apply_A.apply_first f g x
: (T.t -> 'a A.t) -> (T.t -> 'b A.t) -> T.t -> 'a A.t)
and apply_second =
(fun f g x -> Apply_F.lift2 Apply_A.apply_second f g x
: (T.t -> 'a A.t) -> (T.t -> 'b A.t) -> T.t -> 'b A.t)
and apply_both =
(fun f g x -> Apply_F.lift2 Apply_A.apply_both f g x
: (T.t -> 'a A.t) -> (T.t -> 'b A.t) -> T.t -> ('a * 'b) A.t)
end
module Applicative (A : APPLICATIVE) = struct
module I = Infix.Apply (A)
let liftA1 =
(fun f fa ->
let open I in
A.pure f <*> fa
: ('a -> 'b) -> 'a A.t -> 'b A.t)
and when_ =
(fun p fa ->
match p with
| true -> fa
| false -> A.pure ()
: bool -> unit A.t -> unit A.t)
and unless =
(fun p fa ->
match not p with
| true -> fa
| false -> A.pure ()
: bool -> unit A.t -> unit A.t)
end
module Monad (M : MONAD) = struct
module I = Infix.Monad (M)
module A = Applicative (M)
let flatten =
(fun m ->
let open I in
m >>= id
: 'a M.t M.t -> 'a M.t)
and compose_kliesli =
(fun f g a ->
let open I in
f a >>= g
: ('a -> 'b M.t) -> ('b -> 'c M.t) -> 'a -> 'c M.t)
and compose_kliesli_flipped =
(fun f g a ->
let open I in
f =<< g a
: ('b -> 'c M.t) -> ('a -> 'b M.t) -> 'a -> 'c M.t)
and if_m =
(fun p t f ->
let open I in
p >>= fun p' ->
match p' with
| true -> t
| false -> f
: bool M.t -> 'a M.t -> 'a M.t -> 'a M.t)
and liftM1 =
(fun f fa ->
let open I in
fa >>= fun fa' -> M.pure (f fa')
: ('a -> 'b) -> 'a M.t -> 'b M.t)
and ap =
(fun f fa ->
let open I in
f >>= fun f' ->
fa >>= fun fa' -> M.pure (f' fa')
: ('a -> 'b) M.t -> 'a M.t -> 'b M.t)
and when_ =
(fun p fa ->
let open I in
p >>= fun p' -> A.when_ p' fa
: bool M.t -> unit M.t -> unit M.t)
and unless =
(fun p fa ->
let open I in
p >>= fun p' -> A.unless p' fa
: bool M.t -> unit M.t -> unit M.t)
end
module Foldable (F : FOLDABLE) = struct
module Semigroup (S : SEMIGROUP) = struct
module FM = F.Fold_Map_Any (Endo.Monoid)
module I = Infix.Magma (S)
let surround_map =
(fun ~delimiter f fa ->
let open I in
let joined a = Endo.Endo (fun m -> delimiter <:> f a <:> m) in
let (Endo.Endo fn) = FM.fold_map joined fa in
fn delimiter
: delimiter:S.t -> ('a -> S.t) -> 'a F.t -> S.t)
let surround =
(fun ~delimiter fa -> surround_map ~delimiter id fa : delimiter:S.t -> 'a F.t -> S.t)
end
module Monoid (M : MONOID) = struct
module FM = F.Fold_Map (M)
module I = Infix.Magma (M)
type acc = {
init : bool;
acc : M.t;
}
let fold = (FM.fold_map id : M.t F.t -> M.t)
and intercalate =
(fun ~separator xs ->
let go acc x =
match acc with
| { init = true; acc = _ } -> { init = false; acc = x }
| { init = _; acc = acc' } ->
let open I in
{ init = false; acc = acc' <:> separator <:> x }
in
(F.fold_left go { init = true; acc = M.empty } xs).acc
: separator:M.t -> M.t F.t -> M.t)
end
module Applicative (A : APPLICATIVE) = struct
module Fn = Apply (A)
let traverse' =
(fun f fa -> F.fold_right (Fn.apply_second <. f) (A.pure ()) fa
: ('a -> 'b A.t) -> 'a F.t -> unit A.t)
let sequence' = (fun fa -> traverse' id fa : 'a A.t F.t -> unit A.t)
end
module Plus (P : PLUS) = struct
let one_of = (fun fa -> F.fold_right P.alt P.empty fa : 'a P.t F.t -> 'a P.t)
end
module Monad (M : MONAD) = struct
module I = Infix.Monad (M)
let fold_monad =
(fun f a fa ->
let open I in
F.fold_left (fun acc x -> acc >>= flip f x) (M.pure a) fa
: ('a -> 'b -> 'a M.t) -> 'a -> 'b F.t -> 'a M.t)
end
end
module Traversable (T : TRAVERSABLE_F) = struct
module Internal = struct
type ('s, 'a) accum = {
accum : 's;
value : 'a;
}
type ('s, 'a) state = 's -> ('s, 'a) accum
let apply_state = (fun s a -> s a : ('s, 'a) state -> 's -> ('s, 'a) accum)
module State_Left (Type : TYPE) = struct
module Functor : FUNCTOR with type 'a t = (Type.t, 'a) state = struct
type 'a t = (Type.t, 'a) state
let map f k s =
match apply_state k s with
| { accum = s1; value = a } -> { accum = s1; value = f a }
end
module Apply : APPLY with type 'a t = (Type.t, 'a) state = struct
include Functor
let apply f x s =
match apply_state f s with
| { accum = s1; value = f' } -> (
match apply_state x s1 with
| { accum = s2; value = x' } -> { accum = s2; value = f' x' })
end
module Applicative : APPLICATIVE with type 'a t = (Type.t, 'a) state = struct
include Apply
let pure a s = { accum = s; value = a }
end
end
module State_Right (Type : TYPE) = struct
module Functor : FUNCTOR with type 'a t = (Type.t, 'a) state = struct
type 'a t = (Type.t, 'a) state
let map f k s =
match apply_state k s with
| { accum = s1; value = a } -> { accum = s1; value = f a }
end
module Apply : APPLY with type 'a t = (Type.t, 'a) state = struct
include Functor
let apply f x s =
match apply_state x s with
| { accum = s1; value = x' } -> (
match apply_state f s1 with
| { accum = s2; value = f' } -> { accum = s2; value = f' x' })
end
module Applicative : APPLICATIVE with type 'a t = (Type.t, 'a) state = struct
include Apply
let pure a s = { accum = s; value = a }
end
end
module Map_Accum (Type : TYPE) (T : TRAVERSABLE_F) = struct
module SL = State_Left (struct
type t = Type.t
end)
module SR = State_Right (struct
type t = Type.t
end)
module TSL = T (SL.Applicative)
module TSR = T (SR.Applicative)
let map_accum_left =
(fun f s xs -> apply_state (TSL.traverse (fun a s' -> f s' a) xs) s
: ('s -> 'a -> ('s, 'b) accum) -> 's -> 'a TSL.t -> ('s, 'b TSL.t) accum)
and map_accum_right =
(fun f s xs -> apply_state (TSR.traverse (fun a s' -> f s' a) xs) s
: ('s -> 'a -> ('s, 'b) accum) -> 's -> 'a TSR.t -> ('s, 'b TSR.t) accum)
end
end
module Scan (Type : TYPE) = struct
module MA =
Internal.Map_Accum
(struct
type t = Type.t
end)
(T)
let scan_left =
(fun f init xs ->
(MA.map_accum_left
(fun b a ->
let b' = f b a in
{ accum = b'; value = b' })
init
xs)
.value
: ('b -> 'a -> 'b) -> 'b -> 'a MA.TSL.t -> 'b MA.TSL.t)
and scan_right =
(fun f init xs ->
(MA.map_accum_right
(fun b a ->
let b' = f a b in
{ accum = b'; value = b' })
init
xs)
.value
: ('a -> 'b -> 'b) -> 'b -> 'a MA.TSR.t -> 'b MA.TSR.t)
end
end
module Infix = struct
module Apply (A : APPLY) = struct
module Functions = Apply (A)
let ( <* ) = Functions.apply_first
and ( *> ) = Functions.apply_second
end
module Monad (M : MONAD) = struct
module Functions = Infix.Monad (M)
let ( >=> ), ( <=< ) =
let open Functions in
( >=> ), ( <=< )
end
module Void (F : FUNCTOR) = struct
module Functions = Functor (F)
let ( $> ) = Functions.void_left
and ( <$ ) = Functions.void_right
and ( <@> ) = Functions.flap
end
end
| |
b592f76eb861f14be1ca91d8662edbf34af6a514e29e02fcd826578f43e8dd04 | tweag/linear-base | Linear.hs | # LANGUAGE MagicHash #
# LANGUAGE NoImplicitPrelude #
-- | This module defines a stream-like type named 'Replicator', which is
mainly used in the definition of the ' Data . Unrestricted . Linear . '
-- class to provide efficient linear duplication.
-- The API of 'Replicator' is close to the one of an infinite stream: it
-- can either produce a new value linearly (with 'next' or 'next#'), or be
-- linearly discarded (with 'consume' or 'extract').
--
-- A crucial aspect, from a performance standpoint, is that the 'pure' function
-- (which takes an unrestricted argument) is implemented efficiently: the
-- 'Replicator' returns /the same/ value on each call to 'next'. That is, the
-- pointer is always shared. This will allow 'Data.Unrestricted.Linear.Movable'
types to be given an efficient instance of ' Data . Unrestricted . Linear . ' .
-- Instances of both 'Data.Unrestricted.Linear.Movable' and
' Data . Unrestricted . Linear . ' typically involve deep copies . The
implementation of ' pure ' lets us make sure that , for @Movable@ types , only one
deep copy is performed , rather than one per additional replica .
--
-- Strictly speaking, the implementation of '(<*>)' plays a role in all this as
-- well:
For two ' pure ' ' Replicators ' @fs@ and @as@ , @fs \<*\ > as@ is a pure
-- 'Replicator'. Together, 'pure' and '(<*>)' form the
-- 'Data.Functor.Linear.Applicative' instance of 'Replicator'.
module Data.Replicator.Linear
( Replicator,
consume,
duplicate,
map,
pure,
(<*>),
next,
next#,
take,
extract,
extend,
Elim,
elim,
)
where
import Data.Replicator.Linear.Internal
import Data.Replicator.Linear.Internal.Instances ()
| null | https://raw.githubusercontent.com/tweag/linear-base/69f1b73f852dac5fbdd7294dd8d709f73c634efb/src/Data/Replicator/Linear.hs | haskell | | This module defines a stream-like type named 'Replicator', which is
class to provide efficient linear duplication.
The API of 'Replicator' is close to the one of an infinite stream: it
can either produce a new value linearly (with 'next' or 'next#'), or be
linearly discarded (with 'consume' or 'extract').
A crucial aspect, from a performance standpoint, is that the 'pure' function
(which takes an unrestricted argument) is implemented efficiently: the
'Replicator' returns /the same/ value on each call to 'next'. That is, the
pointer is always shared. This will allow 'Data.Unrestricted.Linear.Movable'
Instances of both 'Data.Unrestricted.Linear.Movable' and
Strictly speaking, the implementation of '(<*>)' plays a role in all this as
well:
'Replicator'. Together, 'pure' and '(<*>)' form the
'Data.Functor.Linear.Applicative' instance of 'Replicator'. | # LANGUAGE MagicHash #
# LANGUAGE NoImplicitPrelude #
mainly used in the definition of the ' Data . Unrestricted . Linear . '
types to be given an efficient instance of ' Data . Unrestricted . Linear . ' .
' Data . Unrestricted . Linear . ' typically involve deep copies . The
implementation of ' pure ' lets us make sure that , for @Movable@ types , only one
deep copy is performed , rather than one per additional replica .
For two ' pure ' ' Replicators ' @fs@ and @as@ , @fs \<*\ > as@ is a pure
module Data.Replicator.Linear
( Replicator,
consume,
duplicate,
map,
pure,
(<*>),
next,
next#,
take,
extract,
extend,
Elim,
elim,
)
where
import Data.Replicator.Linear.Internal
import Data.Replicator.Linear.Internal.Instances ()
|
b72b8b8e96c4ea1d4aeca046d37f4905a1ba6d2245122fcd003e7e4f825b6285 | Ramarren/cells | hello-world-q.lisp | ;; -*- mode: Lisp; Syntax: Common-Lisp; Package: cells; -*-
;;;
;;;
Copyright ( c ) 1995,2003 by .
;;;
;;; Permission is hereby granted, free of charge, to any person obtaining a copy
;;; of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
;;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is furnished
;;; to do so, subject to the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
;;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
;;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
;;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
;;; FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
;;; IN THE SOFTWARE.
(in-package :cells)
;;;
( defstrudel computer
( happen : cell : ephemeral : initform ( c - in nil ) )
;;; (location :cell t
: initform ( c ? ( case ( ^happen )
;;; (:leave :away)
;;; (:arrive :at-home)
;;; (t (c-value c))))
;;; :accessor location)
( response : cell : ephemeral : initform nil : initarg : response : accessor response ) ) )
(def-c-output response((self computer) new-response old-response)
(when new-response
(format t "~&computer: ~a" new-response)))
(def-c-output happen((self computer))
(when new-value
(format t "~&happen: ~a" new-value)))
(defun hello-world-q ()
(let ((dell (make-instance 'computer
:response (c? (bwhen (h (happen self))
(if (eql (^location) :at-home)
(case h
(:knock-knock "who's there?")
(:world "hello, world."))
"<silence>"))))))
(dotimes (n 2)
(setf (happen dell) :knock-knock))
(setf (happen dell) :arrive)
(setf (happen dell) :knock-knock)
(setf (happen dell) :world)
(values)))
#+(or)
(hello-world)
#+(or)
(traceo sm-echo)
#| output
happen: knock-knock
computer: <silence>
happen: knock-knock
computer: <silence>
happen: arrive
happen: knock-knock
computer: who's there?
happen: world
computer: hello, world.
|#
| null | https://raw.githubusercontent.com/Ramarren/cells/cced2e55c363572914358c0a693ebac2caed4e22/cells-test/hello-world-q.lisp | lisp | -*- mode: Lisp; Syntax: Common-Lisp; Package: cells; -*-
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
(location :cell t
(:leave :away)
(:arrive :at-home)
(t (c-value c))))
:accessor location)
output
happen: knock-knock
computer: <silence>
happen: knock-knock
computer: <silence>
happen: arrive
happen: knock-knock
computer: who's there?
happen: world
computer: hello, world.
| Copyright ( c ) 1995,2003 by .
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is furnished
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(in-package :cells)
( defstrudel computer
( happen : cell : ephemeral : initform ( c - in nil ) )
: initform ( c ? ( case ( ^happen )
( response : cell : ephemeral : initform nil : initarg : response : accessor response ) ) )
(def-c-output response((self computer) new-response old-response)
(when new-response
(format t "~&computer: ~a" new-response)))
(def-c-output happen((self computer))
(when new-value
(format t "~&happen: ~a" new-value)))
(defun hello-world-q ()
(let ((dell (make-instance 'computer
:response (c? (bwhen (h (happen self))
(if (eql (^location) :at-home)
(case h
(:knock-knock "who's there?")
(:world "hello, world."))
"<silence>"))))))
(dotimes (n 2)
(setf (happen dell) :knock-knock))
(setf (happen dell) :arrive)
(setf (happen dell) :knock-knock)
(setf (happen dell) :world)
(values)))
#+(or)
(hello-world)
#+(or)
(traceo sm-echo)
|
de23ece90810e52de02ada6aeaeac0ff0e58fdd88f7505b5015aed39303ef926 | hyperfiddle/electric | compiler2.cljc | (ns dustin.compiler2
(:require [minitest :refer [tests]]
[missionary.core :as m]
[dustin.trace25 :refer [from-trace!]]))
(def conjv (fnil conj []))
(def conjs (fnil conj #{}))
(defn parenting [acc parent child]
(-> acc
(assoc-in [child :parent] parent)
(update-in [parent :children] conjv child)))
(defn analyze-form [nodes form]
(let [idx (count nodes)]
(if (coll? form)
(let [[f & args] form]
(case f
fmap (reduce (fn [nodes form]
(let [child-index (count nodes)
nodes (analyze-form nodes form)]
(parenting nodes idx child-index)))
(conj nodes
{:type 'fmap
:form form
:f (first args)})
(next args))
bind (let [nodes (conj nodes
{:type 'bind
:form form
:f (second args)})
child-index (count nodes)
nodes (analyze-form nodes (first args))]
(parenting nodes idx child-index))))
(if (symbol? form)
(conj nodes {:type 'user
:form form})
(throw (ex-info "Unknown form." {:form form}))))))
(defn analyze [form]
(analyze-form [] form))
(defn source-map [form]
(reduce-kv (fn [r i x] (update r (:form x) conjs i)) {} (analyze form)))
(tests
(analyze '(fmap + >a >b))
:=
'[{:type fmap, :form (fmap + >a >b), :f +, :children [1 2]}
{:type user, :form >a, :parent 0}
{:type user, :form >b, :parent 0}]
(source-map '(fmap + >a >b)) :=
'{(fmap + >a >b) #{0},
>a #{1},
>b #{2}}
)
;;;;;;;;;;;;;
;; RUNTIME ;;
;;;;;;;;;;;;;
(defmacro amb= [& forms]
`(case (m/?= (m/enumerate (range ~(count forms))))
~@(interleave (range) forms)))
(defn bind [m f]
(m/relieve {} (m/ap (m/?! (f (m/?! m))))))
(defn trace! [tracef >effects]
(m/stream! (m/ap (tracef (m/?? >effects)))))
;;;;;;;;;;;;;
EMITTER ; ;
;;;;;;;;;;;;;
(defn prefixer [prefix index]
(symbol (str prefix "_" index)))
(defn gen-trace-pairs [prefixf analyzed-ast]
(map-indexed (fn [idx _] `{[~idx] (m/?? ~(prefixf idx))}) analyzed-ast))
(tests
(gen-trace-pairs (partial prefixer '>node)
'[[0 _]
[1 _]])
:= [{[0] `(m/?? ~'>node_0)}
{[1] `(m/?? ~'>node_1)}])
(defn gen-trace [prefixf analyzed-ast]
`(trace! ~(prefixf 'tracef)
(m/stream! (m/relieve merge (m/ap (amb= ~@(gen-trace-pairs prefixf analyzed-ast)))))))
(defn emit-bindings [prefixf analyzed-ast passives]
(reverse
(map-indexed
(fn [idx {:keys [type form f children]}]
(if (contains? passives idx)
`[~(prefixf idx) (from-trace! [~idx] ~(prefixf 'replayer))]
(case type
bind `[~(prefixf idx) (m/signal! (bind ~(prefixf (first children)) ~f))]
fmap `[~(prefixf idx) (m/signal! (m/latest ~f ~@(map prefixf children)))]
user `[~(prefixf idx) (m/signal! ~form)])))
analyzed-ast)))
(defn emit [{:keys [analyzed-ast prefix passives]
:or {prefix (gensym)}}]
(let [prefixf (partial prefixer prefix)
bindings (mapcat identity (emit-bindings prefixf analyzed-ast passives))]
`(fn ~(mapv prefixf ['replayer 'tracef])
(let [~@bindings]
~(gen-trace prefixf analyzed-ast)))))
(tests
(emit {:analyzed-ast (analyze '(fmap clojure.core/+ >a >b))
:prefix '>node
:passives #{1 2}})
:=
`(fn [~'>node_replayer ~'>node_tracef]
(let [~'>node_2 (from-trace! [2] ~'>node_replayer)
~'>node_1 (from-trace! [1] ~'>node_replayer)
~'>node_0 (m/signal! (m/latest + ~'>node_1 ~'>node_2))]
(trace! ~'>node_tracef (m/stream! (m/relieve merge (m/ap (amb= {[0] (m/?? ~'>node_0)}
{[1] (m/?? ~'>node_1)}
{[2] (m/?? ~'>node_2)}))))))))
(tests
(emit {:analyzed-ast (analyze `(~'fmap + >a >b))
:prefix '>node})
:=
`(fn [~'>node_replayer ~'>node_tracef]
(let [~'>node_2 (m/signal! >b)
~'>node_1 (m/signal! >a)
~'>node_0 (m/signal! (m/latest + ~'>node_1 ~'>node_2))]
(trace! ~'>node_tracef (m/stream! (m/relieve merge (m/ap (amb= {[0] (m/?? ~'>node_0)}
{[1] (m/?? ~'>node_1)}
{[2] (m/?? ~'>node_2)}))))))))
(defprotocol Observable
(subscribe! [this listenf])
(unsubscribe! [this listenf]))
(defprotocol IReplay
(replay! [this effect]))
(defn log! [reactor]
(let [!trace (atom [])]
(subscribe! reactor #(swap! !trace conj %))
!trace))
(deftype Reactor [cancel ;; Stop the reactor
!callbacks
!replayers]
IReplay
(replay! [_ frame-effects]
(doseq [cb @!replayers]
(cb frame-effects)))
Observable
(subscribe! [_ f] (swap! !callbacks conj f))
(unsubscribe! [_ f] (swap! !callbacks disj f)))
(defn reactor! [initf]
(let [!callbacks (atom #{})
!replayers (atom #{})
task (m/reactor
(let [>replayer (m/stream! (m/observe (fn [cb]
(swap! !replayers conj cb)
(fn []
(swap! !replayers disj cb)))))]
(initf >replayer
(fn [effects]
(doseq [cb @!callbacks]
(cb effects))))))
cancel (task (fn [_] (prn "Success"))
prn)]
(->Reactor cancel !callbacks !replayers)))
(defmacro dataflow [ast & [passives]]
`(reactor! ~(emit {:analyzed-ast (analyze ast)
:passives passives})))
| null | https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2021/compiler2.cljc | clojure |
RUNTIME ;;
;
Stop the reactor | (ns dustin.compiler2
(:require [minitest :refer [tests]]
[missionary.core :as m]
[dustin.trace25 :refer [from-trace!]]))
(def conjv (fnil conj []))
(def conjs (fnil conj #{}))
(defn parenting [acc parent child]
(-> acc
(assoc-in [child :parent] parent)
(update-in [parent :children] conjv child)))
(defn analyze-form [nodes form]
(let [idx (count nodes)]
(if (coll? form)
(let [[f & args] form]
(case f
fmap (reduce (fn [nodes form]
(let [child-index (count nodes)
nodes (analyze-form nodes form)]
(parenting nodes idx child-index)))
(conj nodes
{:type 'fmap
:form form
:f (first args)})
(next args))
bind (let [nodes (conj nodes
{:type 'bind
:form form
:f (second args)})
child-index (count nodes)
nodes (analyze-form nodes (first args))]
(parenting nodes idx child-index))))
(if (symbol? form)
(conj nodes {:type 'user
:form form})
(throw (ex-info "Unknown form." {:form form}))))))
(defn analyze [form]
(analyze-form [] form))
(defn source-map [form]
(reduce-kv (fn [r i x] (update r (:form x) conjs i)) {} (analyze form)))
(tests
(analyze '(fmap + >a >b))
:=
'[{:type fmap, :form (fmap + >a >b), :f +, :children [1 2]}
{:type user, :form >a, :parent 0}
{:type user, :form >b, :parent 0}]
(source-map '(fmap + >a >b)) :=
'{(fmap + >a >b) #{0},
>a #{1},
>b #{2}}
)
(defmacro amb= [& forms]
`(case (m/?= (m/enumerate (range ~(count forms))))
~@(interleave (range) forms)))
(defn bind [m f]
(m/relieve {} (m/ap (m/?! (f (m/?! m))))))
(defn trace! [tracef >effects]
(m/stream! (m/ap (tracef (m/?? >effects)))))
(defn prefixer [prefix index]
(symbol (str prefix "_" index)))
(defn gen-trace-pairs [prefixf analyzed-ast]
(map-indexed (fn [idx _] `{[~idx] (m/?? ~(prefixf idx))}) analyzed-ast))
(tests
(gen-trace-pairs (partial prefixer '>node)
'[[0 _]
[1 _]])
:= [{[0] `(m/?? ~'>node_0)}
{[1] `(m/?? ~'>node_1)}])
(defn gen-trace [prefixf analyzed-ast]
`(trace! ~(prefixf 'tracef)
(m/stream! (m/relieve merge (m/ap (amb= ~@(gen-trace-pairs prefixf analyzed-ast)))))))
(defn emit-bindings [prefixf analyzed-ast passives]
(reverse
(map-indexed
(fn [idx {:keys [type form f children]}]
(if (contains? passives idx)
`[~(prefixf idx) (from-trace! [~idx] ~(prefixf 'replayer))]
(case type
bind `[~(prefixf idx) (m/signal! (bind ~(prefixf (first children)) ~f))]
fmap `[~(prefixf idx) (m/signal! (m/latest ~f ~@(map prefixf children)))]
user `[~(prefixf idx) (m/signal! ~form)])))
analyzed-ast)))
(defn emit [{:keys [analyzed-ast prefix passives]
:or {prefix (gensym)}}]
(let [prefixf (partial prefixer prefix)
bindings (mapcat identity (emit-bindings prefixf analyzed-ast passives))]
`(fn ~(mapv prefixf ['replayer 'tracef])
(let [~@bindings]
~(gen-trace prefixf analyzed-ast)))))
(tests
(emit {:analyzed-ast (analyze '(fmap clojure.core/+ >a >b))
:prefix '>node
:passives #{1 2}})
:=
`(fn [~'>node_replayer ~'>node_tracef]
(let [~'>node_2 (from-trace! [2] ~'>node_replayer)
~'>node_1 (from-trace! [1] ~'>node_replayer)
~'>node_0 (m/signal! (m/latest + ~'>node_1 ~'>node_2))]
(trace! ~'>node_tracef (m/stream! (m/relieve merge (m/ap (amb= {[0] (m/?? ~'>node_0)}
{[1] (m/?? ~'>node_1)}
{[2] (m/?? ~'>node_2)}))))))))
(tests
(emit {:analyzed-ast (analyze `(~'fmap + >a >b))
:prefix '>node})
:=
`(fn [~'>node_replayer ~'>node_tracef]
(let [~'>node_2 (m/signal! >b)
~'>node_1 (m/signal! >a)
~'>node_0 (m/signal! (m/latest + ~'>node_1 ~'>node_2))]
(trace! ~'>node_tracef (m/stream! (m/relieve merge (m/ap (amb= {[0] (m/?? ~'>node_0)}
{[1] (m/?? ~'>node_1)}
{[2] (m/?? ~'>node_2)}))))))))
(defprotocol Observable
(subscribe! [this listenf])
(unsubscribe! [this listenf]))
(defprotocol IReplay
(replay! [this effect]))
(defn log! [reactor]
(let [!trace (atom [])]
(subscribe! reactor #(swap! !trace conj %))
!trace))
!callbacks
!replayers]
IReplay
(replay! [_ frame-effects]
(doseq [cb @!replayers]
(cb frame-effects)))
Observable
(subscribe! [_ f] (swap! !callbacks conj f))
(unsubscribe! [_ f] (swap! !callbacks disj f)))
(defn reactor! [initf]
(let [!callbacks (atom #{})
!replayers (atom #{})
task (m/reactor
(let [>replayer (m/stream! (m/observe (fn [cb]
(swap! !replayers conj cb)
(fn []
(swap! !replayers disj cb)))))]
(initf >replayer
(fn [effects]
(doseq [cb @!callbacks]
(cb effects))))))
cancel (task (fn [_] (prn "Success"))
prn)]
(->Reactor cancel !callbacks !replayers)))
(defmacro dataflow [ast & [passives]]
`(reactor! ~(emit {:analyzed-ast (analyze ast)
:passives passives})))
|
c9c8d26e2cea89281ae39727d318f87a500f2fcb42d6acf515ac863d33ff901a | roburio/utcp | utcp.ml | type state = State.t
let empty = State.empty
let start_listen = State.start_listen
let stop_listen = State.stop_listen
type flow = State.Connection.t
module FM = State.CM
let pp_flow = State.Connection.pp
let peers (src, src_port, dst, dst_port) =
(src, src_port), (dst, dst_port)
type output = Ipaddr.t * Ipaddr.t * Cstruct.t
let timer = Tcptimer.timer
let handle_buf = Input.handle_buf
let connect = User.connect
let close = User.close
let recv = User.recv
let send = User.send
module Segment = Segment
module Sequence = Sequence
module Timers = Timers
module State = State
module Input = Input
module User = User
| null | https://raw.githubusercontent.com/roburio/utcp/6cf6100a256caba3f3346d70bfc514aa8d371ca6/src/utcp.ml | ocaml | type state = State.t
let empty = State.empty
let start_listen = State.start_listen
let stop_listen = State.stop_listen
type flow = State.Connection.t
module FM = State.CM
let pp_flow = State.Connection.pp
let peers (src, src_port, dst, dst_port) =
(src, src_port), (dst, dst_port)
type output = Ipaddr.t * Ipaddr.t * Cstruct.t
let timer = Tcptimer.timer
let handle_buf = Input.handle_buf
let connect = User.connect
let close = User.close
let recv = User.recv
let send = User.send
module Segment = Segment
module Sequence = Sequence
module Timers = Timers
module State = State
module Input = Input
module User = User
| |
fa141ea639dae642e19c2270ca65938e532b9d28d15bdc85bdb35dd01a75f145 | psholtz/MIT-SICP | exercise1-44.scm | ;;
Exercise 1.44
;;
;; The idea of "smoothing" a function is an important concept in signal processing. If f is a function
;; and dx is some small number, then the smoothed version of f is the function whose value at a point x
;; is the average of f(x-dx), f(x), f(x+dx). Write a procedure "smooth" that takes as input a procedure
;; that computes f and returns a procedure that computes the smoothed f. It is sometimes valuable to
;; repeatedly smoth a function (that is, smooth the smoothed function, and so on) to obtain the
;; n-fold smoothed function. Show how to generate the n-fold smoothed function of any given function
using " smooth " and " repeated " from exercise 1.43 .
;;
;;
;; Define the "smooth" procedure:
;;
(define (smooth f)
(define dx 0.00001) ;; define the "dx" differential
(define (average a b c)
(/ (+ a b c) 3.0))
(lambda (x)
(average (f (- x dx)) (f x) (f (+ x dx)))))
;;
;; Let's run a use case by defining an impluse function.
;; For the use case, we will use an impulse funtion defined
to be 3 at x=0 , and 0 everywhere else .
;;
;; To construct this impulse, we will use a generic function
;; definition which gives an impulse of "value" at x=a.
;;
(define (impulse-maker a value)
(lambda (x)
(if (= x a)
value
0)))
;;
;; Define our impulse procedure:
;;
(define impulse (impulse-maker 0 3))
;;
;; Test the impulse:
;;
(impulse -1)
;; ==> 0
(impulse 0)
= = > 3
(impulse 1)
;; ==> 0
;;
;; Now let's try to "smooth" the impulse:
;;
((smooth impulse) 0)
= = > 1.0
;;
This is what we expect , since ( / ( + 0.0 3.0 0.0 ) 3.0 ) evaluates to 1.0
;;
(= (/ (+ 0 3 0) 3.0) ((smooth impulse) 0))
;;
;; Let's examine the call graph for ((smooth impulse) 0) to see how the
;; interpreter arrives at this answer:
;;
-----------------------------
((smooth impulse) 0)
-----------------------------
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx)))) 0)
------------------------------
(average (impulse (- 0 dx))
(impulse 0)
(impulse (+ 0 dx)))
------------------------------
(average 0 3 0)
------------------------------
1.0
------------------------------
;;
;; Now let's try to compose the "smooth" function with itself,
;; and see what we get for an answer:
;;
((smooth (smooth impulse)) 0)
= = > 1.0
;;
;; This may seem a bit surprising. If ((smooth impulse) 0) is equal
to 1 , we might naively assume that ( ( smooth ( smooth impulse ) ) 0 )
should be equal to 1/3 ( i.e. , cuts the value of ( smooth impulse )
at zero down again by 1/3 ) .
;;
;; To see why this is not the case, let's expand the call graph
;; for ((smooth (smooth impulse)) 0):
;;
----------------------------------
((smooth (smooth impulse)) 0)
----------------------------------
((smooth
(lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))) 0)
----------------------------------
((lambda (y)
(average
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(- y dx))
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
y)
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(+ y dx)))) 0)
----------------------------------
(average
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(- 0 dx))
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
0)
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(+ 0 dx)))
-----------------------------------
(average
(average
(impulse (- (- 0 dx) dx))
(impulse (- 0 dx))
(impulse (+ (- 0 dx) dx)))
(average
(impulse (- 0 dx))
(impulse 0)
(impluse (+ 0 dx)))
(average
(impulse (- (+ 0 dx) dx))
(impulse (+ 0 dx))
(impulse (+ (+ 0 dx) dx))))
------------------------------------
(average
(average
(impulse (- -0.00001 dx))
(impulse -0.00001)
(impulse (+ -0.00001 dx)))
(average
(impulse -0.00001)
(impulse 0)
(impulse +0.00001))
(average
(impulse (- +0.00001 dx))
(impulse +0.00001)
(impulse (+ +0.00001 dx))))
-------------------------------------
(average
(average
(impulse -0.00002)
(impulse -0.00001)
(impulse 0))
(average
(impulse -0.00001)
(impulse 0)
(impulse +0.00001))
(average
(impulse 0)
(impulse +0.00001)
(impulse +0.00002)))
--------------------------------------
(average
(average 0 0 3)
(average 0 3 0)
(average 3 0 0))
--------------------------------------
(average 1.0 1.0 1.0)
--------------------------------------
1.0
--------------------------------------
;;
So indeed , ( ( smooth ( smooth impulse ) ) 0 ) works out to 1.0 as well .
;;
;;
;; Let's look at a couple more n-fold recursive calls of "smooth":
;;
((smooth (smooth (smooth impulse))) 0)
= = > 0.777777777777777
((smooth (smooth (smooth (smooth impulse)))) 0)
= = > 0.703703703703703
((smooth (smooth (smooth (smooth (smooth impulse))))) 0)
= = > 0.629629629629629
;;
;; Now let's implement the "repeated" abstraction as described in the text.
;;
;; Give definition of "repeated" procedure:
;;
(define (compose f g)
(lambda (x) (f (g x))))
(define (repeated f n)
(define (repeated-iter g c)
(cond ((>= c n) g)
(else
(repeated-iter (compose g f) (+ c 1)))))
(repeated-iter f 1))
;;
;; Let's run some unit tests to see if "repeated" works the way we
;;
;; Definition of "smooth-n-times":
;;
(define (smooth-n-times f n)
((repeated smooth n) f))
;;
;; Let's run some unit tests, to see if we get the answers we expect:
;;
(= ((smooth-n-times impulse 1) 0) ((smooth impulse) 0))
(= ((smooth-n-times impulse 2) 0) ((smooth (smooth impulse)) 0))
(= ((smooth-n-times impulse 3) 0) ((smooth (smooth (smooth impulse))) 0))
(= ((smooth-n-times impulse 4) 0) ((smooth (smooth (smooth (smooth impulse)))) 0))
(= ((smooth-n-times impulse 5) 0) ((smooth (smooth (smooth (smooth (smooth impulse))))) 0)) | null | https://raw.githubusercontent.com/psholtz/MIT-SICP/01e9b722ac5008e26f386624849117ca8fa80906/Section-1.3/mit-scheme/exercise1-44.scm | scheme |
The idea of "smoothing" a function is an important concept in signal processing. If f is a function
and dx is some small number, then the smoothed version of f is the function whose value at a point x
is the average of f(x-dx), f(x), f(x+dx). Write a procedure "smooth" that takes as input a procedure
that computes f and returns a procedure that computes the smoothed f. It is sometimes valuable to
repeatedly smoth a function (that is, smooth the smoothed function, and so on) to obtain the
n-fold smoothed function. Show how to generate the n-fold smoothed function of any given function
Define the "smooth" procedure:
define the "dx" differential
Let's run a use case by defining an impluse function.
For the use case, we will use an impulse funtion defined
To construct this impulse, we will use a generic function
definition which gives an impulse of "value" at x=a.
Define our impulse procedure:
Test the impulse:
==> 0
==> 0
Now let's try to "smooth" the impulse:
Let's examine the call graph for ((smooth impulse) 0) to see how the
interpreter arrives at this answer:
Now let's try to compose the "smooth" function with itself,
and see what we get for an answer:
This may seem a bit surprising. If ((smooth impulse) 0) is equal
To see why this is not the case, let's expand the call graph
for ((smooth (smooth impulse)) 0):
Let's look at a couple more n-fold recursive calls of "smooth":
Now let's implement the "repeated" abstraction as described in the text.
Give definition of "repeated" procedure:
Let's run some unit tests to see if "repeated" works the way we
Definition of "smooth-n-times":
Let's run some unit tests, to see if we get the answers we expect:
| Exercise 1.44
using " smooth " and " repeated " from exercise 1.43 .
(define (smooth f)
(define (average a b c)
(/ (+ a b c) 3.0))
(lambda (x)
(average (f (- x dx)) (f x) (f (+ x dx)))))
to be 3 at x=0 , and 0 everywhere else .
(define (impulse-maker a value)
(lambda (x)
(if (= x a)
value
0)))
(define impulse (impulse-maker 0 3))
(impulse -1)
(impulse 0)
= = > 3
(impulse 1)
((smooth impulse) 0)
= = > 1.0
This is what we expect , since ( / ( + 0.0 3.0 0.0 ) 3.0 ) evaluates to 1.0
(= (/ (+ 0 3 0) 3.0) ((smooth impulse) 0))
-----------------------------
((smooth impulse) 0)
-----------------------------
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx)))) 0)
------------------------------
(average (impulse (- 0 dx))
(impulse 0)
(impulse (+ 0 dx)))
------------------------------
(average 0 3 0)
------------------------------
1.0
------------------------------
((smooth (smooth impulse)) 0)
= = > 1.0
to 1 , we might naively assume that ( ( smooth ( smooth impulse ) ) 0 )
should be equal to 1/3 ( i.e. , cuts the value of ( smooth impulse )
at zero down again by 1/3 ) .
----------------------------------
((smooth (smooth impulse)) 0)
----------------------------------
((smooth
(lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))) 0)
----------------------------------
((lambda (y)
(average
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(- y dx))
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
y)
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(+ y dx)))) 0)
----------------------------------
(average
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(- 0 dx))
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
0)
((lambda (x)
(average
(impulse (- x dx))
(impulse x)
(impulse (+ x dx))))
(+ 0 dx)))
-----------------------------------
(average
(average
(impulse (- (- 0 dx) dx))
(impulse (- 0 dx))
(impulse (+ (- 0 dx) dx)))
(average
(impulse (- 0 dx))
(impulse 0)
(impluse (+ 0 dx)))
(average
(impulse (- (+ 0 dx) dx))
(impulse (+ 0 dx))
(impulse (+ (+ 0 dx) dx))))
------------------------------------
(average
(average
(impulse (- -0.00001 dx))
(impulse -0.00001)
(impulse (+ -0.00001 dx)))
(average
(impulse -0.00001)
(impulse 0)
(impulse +0.00001))
(average
(impulse (- +0.00001 dx))
(impulse +0.00001)
(impulse (+ +0.00001 dx))))
-------------------------------------
(average
(average
(impulse -0.00002)
(impulse -0.00001)
(impulse 0))
(average
(impulse -0.00001)
(impulse 0)
(impulse +0.00001))
(average
(impulse 0)
(impulse +0.00001)
(impulse +0.00002)))
--------------------------------------
(average
(average 0 0 3)
(average 0 3 0)
(average 3 0 0))
--------------------------------------
(average 1.0 1.0 1.0)
--------------------------------------
1.0
--------------------------------------
So indeed , ( ( smooth ( smooth impulse ) ) 0 ) works out to 1.0 as well .
((smooth (smooth (smooth impulse))) 0)
= = > 0.777777777777777
((smooth (smooth (smooth (smooth impulse)))) 0)
= = > 0.703703703703703
((smooth (smooth (smooth (smooth (smooth impulse))))) 0)
= = > 0.629629629629629
(define (compose f g)
(lambda (x) (f (g x))))
(define (repeated f n)
(define (repeated-iter g c)
(cond ((>= c n) g)
(else
(repeated-iter (compose g f) (+ c 1)))))
(repeated-iter f 1))
(define (smooth-n-times f n)
((repeated smooth n) f))
(= ((smooth-n-times impulse 1) 0) ((smooth impulse) 0))
(= ((smooth-n-times impulse 2) 0) ((smooth (smooth impulse)) 0))
(= ((smooth-n-times impulse 3) 0) ((smooth (smooth (smooth impulse))) 0))
(= ((smooth-n-times impulse 4) 0) ((smooth (smooth (smooth (smooth impulse)))) 0))
(= ((smooth-n-times impulse 5) 0) ((smooth (smooth (smooth (smooth (smooth impulse))))) 0)) |
fc5a0b5301708f0ba385da58be158661a854865dc75b1feb33cc6b561a703cdd | camsaul/methodical | clos_test.clj | (ns methodical.impl.combo.clos-test
(:require [clojure.string :as str]
[clojure.test :as t]
[methodical.core :as m]
[methodical.impl.combo.clos :as combo.clos]
[methodical.interface :as i]))
(defn- combine-methods [primary-methods aux-methods]
(i/combine-methods (combo.clos/->CLOSStandardMethodCombination) primary-methods aux-methods))
(defn- make-method-fn
"Return 3 functions:
* `calls`, which returns a sequence of calls made;
* `make-method`; which makes a method impl that adds its invocation (`(method-key & args)`) to calls, and returns
its first arg (if any) with `method-key` appended.
* `record-call!`, which records the invocation (just like `make-method` does, but for cases where you don't want to
use this.)"
[]
(let [calls* (atom [])]
;; fn names provided for clarity/debugging
[(fn calls []
@calls*)
(fn make-method [method-key]
(fn [& [first-arg :as args]]
(swap! calls* conj (cons (symbol method-key) args))
(conj (vec first-arg) method-key)))
(fn record-call! [method-key & args]
(swap! calls* conj (cons (symbol method-key) args)))]))
(defn- make-primary-method
"Makes a primary method that appends `method-key` (default `:primary`) to its first arg, returning that as the result.
If it has a `next-method`, wraps all other args like `(method-key arg)` and calls the next method like:
(let [next-result (apply next-method result (rest args))]
(conj (vec next-result) :method-key-after))."
([make-method]
(make-primary-method make-method :primary))
([make-method method-key]
(let [f (make-method method-key)]
(fn [next-method & args]
(let [result (apply f args)]
(if next-method
(let [result' (apply next-method result (for [arg (rest args)]
(list (symbol method-key) arg)))]
(conj (vec result') (keyword (str (name method-key) "-after"))))
result))))))
(t/deftest before-test
(t/testing "before methods for CLOS method combinations"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method] (make-method-fn)
f (combine-methods
[(make-primary-method make-method)]
{:before [(make-method :before-1)
(make-method :before-2)]})]
(t/testing "result"
(t/is (= [:primary]
(apply f args))
"Return values of before methods should be ignored"))
(t/testing "calls"
(t/is (= [(cons 'before-1 args)
(cons 'before-2 args)
(cons 'primary args)]
(calls))
"Before methods should be called in order from most-specific to least-specific")))))))
(t/deftest after-test
(t/testing "after methods for CLOS method combinations"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method] (make-method-fn)
f (combine-methods
[(make-primary-method make-method)]
{:after [(make-method :after-1)
(make-method :after-2)]})]
(t/testing "result"
(t/is (= [:primary]
(apply f args))
"Return values of after methods should be ignored"))
(t/testing "calls"
(t/is (= [(cons 'primary args)
'(after-2 [:primary])
'(after-1 [:primary])]
(calls))
"after methods should be called in order from least- to most-specific with result of primary fn")))))))
(defn- make-around-method
"Makes an around method that appends `<method-key>-before` to the first arg (if any), wraps all other args
in `(<method-key>-before arg)`, calls `next-method`, then appends the `<method-key>-after` to the result."
[record-call! method-key]
(let [[before-key after-key] (map #(keyword (str (name method-key) \- %)) ["before" "after"])]
(fn [next-method & [acc & rest-args :as args]]
(apply record-call! before-key args)
(let [acc' (when (seq args)
(conj (vec acc) before-key))
rest-args' (for [arg rest-args]
(list (symbol (name before-key)) arg))
args' (when acc' (cons acc' rest-args'))
result (apply next-method args')]
(record-call! after-key result)
(conj (vec result) after-key)))))
(t/deftest around-test
(t/testing "around methods"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method record-call!] (make-method-fn)
f (combine-methods
[(make-primary-method make-method)]
{:around [(make-around-method record-call! :around-1)
(make-around-method record-call! :around-2)]})]
(t/testing "result"
(let [expected-args (if (empty? args)
[:primary :around-1-after :around-2-after]
[:around-2-before :around-1-before :primary :around-1-after :around-2-after])]
(t/is (= expected-args
(apply f args))
"Around methods should be able to modify args, and modify the results")))
(t/testing "calls"
(let [expected-calls (if (empty? args)
'[(around-2-before)
(around-1-before)
(primary)
(around-1-after [:primary])
(around-2-after [:primary :around-1-after])]
[(cons 'around-2-before args)
(concat '(around-1-before [:around-2-before])
(for [arg (rest args)]
(list 'around-2-before arg)))
(concat '(primary [:around-2-before :around-1-before])
(for [arg (rest args)]
(list 'around-1-before (list 'around-2-before arg))))
'(around-1-after [:around-2-before :around-1-before :primary])
'(around-2-after [:around-2-before :around-1-before :primary :around-1-after])])]
(t/is (= expected-calls
(calls))
"Around methods should be applied, in or in order from least- to most- specific"))))))))
(t/deftest primary-method-test
(t/testing "Empty primary-methods"
(t/is (= nil
(combine-methods [] {:before [(constantly :before)]}))
"combine-methods should return nil if there are no matching primary methods."))
(t/testing "next-method"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method] (make-method-fn)
f
(combine-methods [(make-primary-method make-method :primary-1)
(make-primary-method make-method :primary-2)]
nil)]
(t/is (= [:primary-1 :primary-2 :primary-1-after]
(f []))
"Calling `next-method` should invoke the next method")
(t/testing "calls"
(t/is (= '[(primary-1 [])
(primary-2 [:primary-1])]
(calls)))))))))
(t/deftest everything-test
(let [[calls make-method record-call!] (make-method-fn)
f
(combine-methods [(make-primary-method make-method :primary-1)
(make-primary-method make-method :primary-2)]
{:before [(make-method :before-1)
(make-method :before-2)]
:after [(make-method :after-1)
(make-method :after-2)]
:around [(make-around-method record-call! :around-1)
(make-around-method record-call! :around-2)]})]
(t/is (= [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after :around-1-after :around-2-after]
(f []))
"Results of before/after methods should be ignored")
(t/is (= '[(around-2-before [])
(around-1-before [:around-2-before])
(before-1 [:around-2-before :around-1-before])
(before-2 [:around-2-before :around-1-before])
(primary-1 [:around-2-before :around-1-before])
(primary-2 [:around-2-before :around-1-before :primary-1])
(after-2 [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after])
(after-1 [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after])
(around-1-after [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after])
(around-2-after [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after
:around-1-after])]
(calls))
"Aux methods should get called in the order we expect")))
(m/defmulti ^:private clos-multifn class
:combo (m/clos-method-combination))
(m/defmethod clos-multifn Object
[s]
(str s "!"))
(m/defmethod clos-multifn clojure.lang.PersistentVector
[coll]
(next-method (str/join coll)))
(m/defmethod clos-multifn :around String
[s]
(str (next-method s) " <-> " (next-method s)))
(t/deftest e2e-test
(t/is (= "A! <-> A!"
(clos-multifn "A")))
(t/is (= "ABC!"
(clos-multifn ["A" "B" "C"]))))
| null | https://raw.githubusercontent.com/camsaul/methodical/de05a43afe93d6732f8630b16dc512302703a30c/test/methodical/impl/combo/clos_test.clj | clojure |
which makes a method impl that adds its invocation (`(method-key & args)`) to calls, and returns
fn names provided for clarity/debugging | (ns methodical.impl.combo.clos-test
(:require [clojure.string :as str]
[clojure.test :as t]
[methodical.core :as m]
[methodical.impl.combo.clos :as combo.clos]
[methodical.interface :as i]))
(defn- combine-methods [primary-methods aux-methods]
(i/combine-methods (combo.clos/->CLOSStandardMethodCombination) primary-methods aux-methods))
(defn- make-method-fn
"Return 3 functions:
its first arg (if any) with `method-key` appended.
* `record-call!`, which records the invocation (just like `make-method` does, but for cases where you don't want to
use this.)"
[]
(let [calls* (atom [])]
[(fn calls []
@calls*)
(fn make-method [method-key]
(fn [& [first-arg :as args]]
(swap! calls* conj (cons (symbol method-key) args))
(conj (vec first-arg) method-key)))
(fn record-call! [method-key & args]
(swap! calls* conj (cons (symbol method-key) args)))]))
(defn- make-primary-method
"Makes a primary method that appends `method-key` (default `:primary`) to its first arg, returning that as the result.
If it has a `next-method`, wraps all other args like `(method-key arg)` and calls the next method like:
(let [next-result (apply next-method result (rest args))]
(conj (vec next-result) :method-key-after))."
([make-method]
(make-primary-method make-method :primary))
([make-method method-key]
(let [f (make-method method-key)]
(fn [next-method & args]
(let [result (apply f args)]
(if next-method
(let [result' (apply next-method result (for [arg (rest args)]
(list (symbol method-key) arg)))]
(conj (vec result') (keyword (str (name method-key) "-after"))))
result))))))
(t/deftest before-test
(t/testing "before methods for CLOS method combinations"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method] (make-method-fn)
f (combine-methods
[(make-primary-method make-method)]
{:before [(make-method :before-1)
(make-method :before-2)]})]
(t/testing "result"
(t/is (= [:primary]
(apply f args))
"Return values of before methods should be ignored"))
(t/testing "calls"
(t/is (= [(cons 'before-1 args)
(cons 'before-2 args)
(cons 'primary args)]
(calls))
"Before methods should be called in order from most-specific to least-specific")))))))
(t/deftest after-test
(t/testing "after methods for CLOS method combinations"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method] (make-method-fn)
f (combine-methods
[(make-primary-method make-method)]
{:after [(make-method :after-1)
(make-method :after-2)]})]
(t/testing "result"
(t/is (= [:primary]
(apply f args))
"Return values of after methods should be ignored"))
(t/testing "calls"
(t/is (= [(cons 'primary args)
'(after-2 [:primary])
'(after-1 [:primary])]
(calls))
"after methods should be called in order from least- to most-specific with result of primary fn")))))))
(defn- make-around-method
"Makes an around method that appends `<method-key>-before` to the first arg (if any), wraps all other args
in `(<method-key>-before arg)`, calls `next-method`, then appends the `<method-key>-after` to the result."
[record-call! method-key]
(let [[before-key after-key] (map #(keyword (str (name method-key) \- %)) ["before" "after"])]
(fn [next-method & [acc & rest-args :as args]]
(apply record-call! before-key args)
(let [acc' (when (seq args)
(conj (vec acc) before-key))
rest-args' (for [arg rest-args]
(list (symbol (name before-key)) arg))
args' (when acc' (cons acc' rest-args'))
result (apply next-method args')]
(record-call! after-key result)
(conj (vec result) after-key)))))
(t/deftest around-test
(t/testing "around methods"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method record-call!] (make-method-fn)
f (combine-methods
[(make-primary-method make-method)]
{:around [(make-around-method record-call! :around-1)
(make-around-method record-call! :around-2)]})]
(t/testing "result"
(let [expected-args (if (empty? args)
[:primary :around-1-after :around-2-after]
[:around-2-before :around-1-before :primary :around-1-after :around-2-after])]
(t/is (= expected-args
(apply f args))
"Around methods should be able to modify args, and modify the results")))
(t/testing "calls"
(let [expected-calls (if (empty? args)
'[(around-2-before)
(around-1-before)
(primary)
(around-1-after [:primary])
(around-2-after [:primary :around-1-after])]
[(cons 'around-2-before args)
(concat '(around-1-before [:around-2-before])
(for [arg (rest args)]
(list 'around-2-before arg)))
(concat '(primary [:around-2-before :around-1-before])
(for [arg (rest args)]
(list 'around-1-before (list 'around-2-before arg))))
'(around-1-after [:around-2-before :around-1-before :primary])
'(around-2-after [:around-2-before :around-1-before :primary :around-1-after])])]
(t/is (= expected-calls
(calls))
"Around methods should be applied, in or in order from least- to most- specific"))))))))
(t/deftest primary-method-test
(t/testing "Empty primary-methods"
(t/is (= nil
(combine-methods [] {:before [(constantly :before)]}))
"combine-methods should return nil if there are no matching primary methods."))
(t/testing "next-method"
(doseq [args [[]
[[]]
[[] :v2]
[[] :v2 :v3]
[[] :v2 :v3 :v4]
[[] :v2 :v3 :v4 :v5]]]
(t/testing (format "%d args" (count args))
(let [[calls make-method] (make-method-fn)
f
(combine-methods [(make-primary-method make-method :primary-1)
(make-primary-method make-method :primary-2)]
nil)]
(t/is (= [:primary-1 :primary-2 :primary-1-after]
(f []))
"Calling `next-method` should invoke the next method")
(t/testing "calls"
(t/is (= '[(primary-1 [])
(primary-2 [:primary-1])]
(calls)))))))))
(t/deftest everything-test
(let [[calls make-method record-call!] (make-method-fn)
f
(combine-methods [(make-primary-method make-method :primary-1)
(make-primary-method make-method :primary-2)]
{:before [(make-method :before-1)
(make-method :before-2)]
:after [(make-method :after-1)
(make-method :after-2)]
:around [(make-around-method record-call! :around-1)
(make-around-method record-call! :around-2)]})]
(t/is (= [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after :around-1-after :around-2-after]
(f []))
"Results of before/after methods should be ignored")
(t/is (= '[(around-2-before [])
(around-1-before [:around-2-before])
(before-1 [:around-2-before :around-1-before])
(before-2 [:around-2-before :around-1-before])
(primary-1 [:around-2-before :around-1-before])
(primary-2 [:around-2-before :around-1-before :primary-1])
(after-2 [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after])
(after-1 [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after])
(around-1-after [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after])
(around-2-after [:around-2-before :around-1-before :primary-1 :primary-2 :primary-1-after
:around-1-after])]
(calls))
"Aux methods should get called in the order we expect")))
(m/defmulti ^:private clos-multifn class
:combo (m/clos-method-combination))
(m/defmethod clos-multifn Object
[s]
(str s "!"))
(m/defmethod clos-multifn clojure.lang.PersistentVector
[coll]
(next-method (str/join coll)))
(m/defmethod clos-multifn :around String
[s]
(str (next-method s) " <-> " (next-method s)))
(t/deftest e2e-test
(t/is (= "A! <-> A!"
(clos-multifn "A")))
(t/is (= "ABC!"
(clos-multifn ["A" "B" "C"]))))
|
d010636f22490032694f411a3168a16b8a7fa22e3c04a0c84c00269ffe8d5eb4 | inhabitedtype/ocaml-aws | registerDefaultPatchBaseline.ml | open Types
open Aws
type input = RegisterDefaultPatchBaselineRequest.t
type output = RegisterDefaultPatchBaselineResult.t
type error = Errors_internal.t
let service = "ssm"
let signature_version = Request.V4
let to_http service region req =
let uri =
Uri.add_query_params
(Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region)))
(List.append
[ "Version", [ "2014-11-06" ]; "Action", [ "RegisterDefaultPatchBaseline" ] ]
(Util.drop_empty
(Uri.query_of_encoded
(Query.render (RegisterDefaultPatchBaselineRequest.to_query req)))))
in
`POST, uri, []
let of_http body =
try
let xml = Ezxmlm.from_string body in
let resp = Xml.member "RegisterDefaultPatchBaselineResponse" (snd xml) in
try
Util.or_error
(Util.option_bind resp RegisterDefaultPatchBaselineResult.parse)
(let open Error in
BadResponse
{ body
; message = "Could not find well formed RegisterDefaultPatchBaselineResult."
})
with Xml.RequiredFieldMissing msg ->
let open Error in
`Error
(BadResponse
{ body
; message =
"Error parsing RegisterDefaultPatchBaselineResult - missing field in body \
or children: "
^ msg
})
with Failure msg ->
`Error
(let open Error in
BadResponse { body; message = "Error parsing xml: " ^ msg })
let parse_error code err =
let errors = [] @ Errors_internal.common in
match Errors_internal.of_string err with
| Some var ->
if List.mem var errors
&&
match Errors_internal.to_http_code var with
| Some var -> var = code
| None -> true
then Some var
else None
| None -> None
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/3bc554af7ae7ef9e2dcea44a1b72c9e687435fa9/libraries/ssm/lib/registerDefaultPatchBaseline.ml | ocaml | open Types
open Aws
type input = RegisterDefaultPatchBaselineRequest.t
type output = RegisterDefaultPatchBaselineResult.t
type error = Errors_internal.t
let service = "ssm"
let signature_version = Request.V4
let to_http service region req =
let uri =
Uri.add_query_params
(Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region)))
(List.append
[ "Version", [ "2014-11-06" ]; "Action", [ "RegisterDefaultPatchBaseline" ] ]
(Util.drop_empty
(Uri.query_of_encoded
(Query.render (RegisterDefaultPatchBaselineRequest.to_query req)))))
in
`POST, uri, []
let of_http body =
try
let xml = Ezxmlm.from_string body in
let resp = Xml.member "RegisterDefaultPatchBaselineResponse" (snd xml) in
try
Util.or_error
(Util.option_bind resp RegisterDefaultPatchBaselineResult.parse)
(let open Error in
BadResponse
{ body
; message = "Could not find well formed RegisterDefaultPatchBaselineResult."
})
with Xml.RequiredFieldMissing msg ->
let open Error in
`Error
(BadResponse
{ body
; message =
"Error parsing RegisterDefaultPatchBaselineResult - missing field in body \
or children: "
^ msg
})
with Failure msg ->
`Error
(let open Error in
BadResponse { body; message = "Error parsing xml: " ^ msg })
let parse_error code err =
let errors = [] @ Errors_internal.common in
match Errors_internal.of_string err with
| Some var ->
if List.mem var errors
&&
match Errors_internal.to_http_code var with
| Some var -> var = code
| None -> true
then Some var
else None
| None -> None
| |
3fa4c0d37335085697963821a2a91bb704472d9325e3edde2524d333cdfdf712 | mbj/stratosphere | ConfigurationItemProperty.hs | module Stratosphere.ResourceGroups.Group.ConfigurationItemProperty (
module Exports, ConfigurationItemProperty(..),
mkConfigurationItemProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import {-# SOURCE #-} Stratosphere.ResourceGroups.Group.ConfigurationParameterProperty as Exports
import Stratosphere.ResourceProperties
import Stratosphere.Value
data ConfigurationItemProperty
= ConfigurationItemProperty {parameters :: (Prelude.Maybe [ConfigurationParameterProperty]),
type' :: (Prelude.Maybe (Value Prelude.Text))}
mkConfigurationItemProperty :: ConfigurationItemProperty
mkConfigurationItemProperty
= ConfigurationItemProperty
{parameters = Prelude.Nothing, type' = Prelude.Nothing}
instance ToResourceProperties ConfigurationItemProperty where
toResourceProperties ConfigurationItemProperty {..}
= ResourceProperties
{awsType = "AWS::ResourceGroups::Group.ConfigurationItem",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "Parameters" Prelude.<$> parameters,
(JSON..=) "Type" Prelude.<$> type'])}
instance JSON.ToJSON ConfigurationItemProperty where
toJSON ConfigurationItemProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "Parameters" Prelude.<$> parameters,
(JSON..=) "Type" Prelude.<$> type']))
instance Property "Parameters" ConfigurationItemProperty where
type PropertyType "Parameters" ConfigurationItemProperty = [ConfigurationParameterProperty]
set newValue ConfigurationItemProperty {..}
= ConfigurationItemProperty
{parameters = Prelude.pure newValue, ..}
instance Property "Type" ConfigurationItemProperty where
type PropertyType "Type" ConfigurationItemProperty = Value Prelude.Text
set newValue ConfigurationItemProperty {..}
= ConfigurationItemProperty {type' = Prelude.pure newValue, ..} | null | https://raw.githubusercontent.com/mbj/stratosphere/c70f301715425247efcda29af4f3fcf7ec04aa2f/services/resourcegroups/gen/Stratosphere/ResourceGroups/Group/ConfigurationItemProperty.hs | haskell | # SOURCE # | module Stratosphere.ResourceGroups.Group.ConfigurationItemProperty (
module Exports, ConfigurationItemProperty(..),
mkConfigurationItemProperty
) where
import qualified Data.Aeson as JSON
import qualified Stratosphere.Prelude as Prelude
import Stratosphere.Property
import Stratosphere.ResourceProperties
import Stratosphere.Value
data ConfigurationItemProperty
= ConfigurationItemProperty {parameters :: (Prelude.Maybe [ConfigurationParameterProperty]),
type' :: (Prelude.Maybe (Value Prelude.Text))}
mkConfigurationItemProperty :: ConfigurationItemProperty
mkConfigurationItemProperty
= ConfigurationItemProperty
{parameters = Prelude.Nothing, type' = Prelude.Nothing}
instance ToResourceProperties ConfigurationItemProperty where
toResourceProperties ConfigurationItemProperty {..}
= ResourceProperties
{awsType = "AWS::ResourceGroups::Group.ConfigurationItem",
supportsTags = Prelude.False,
properties = Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "Parameters" Prelude.<$> parameters,
(JSON..=) "Type" Prelude.<$> type'])}
instance JSON.ToJSON ConfigurationItemProperty where
toJSON ConfigurationItemProperty {..}
= JSON.object
(Prelude.fromList
(Prelude.catMaybes
[(JSON..=) "Parameters" Prelude.<$> parameters,
(JSON..=) "Type" Prelude.<$> type']))
instance Property "Parameters" ConfigurationItemProperty where
type PropertyType "Parameters" ConfigurationItemProperty = [ConfigurationParameterProperty]
set newValue ConfigurationItemProperty {..}
= ConfigurationItemProperty
{parameters = Prelude.pure newValue, ..}
instance Property "Type" ConfigurationItemProperty where
type PropertyType "Type" ConfigurationItemProperty = Value Prelude.Text
set newValue ConfigurationItemProperty {..}
= ConfigurationItemProperty {type' = Prelude.pure newValue, ..} |
24ffeff5a45dd59f331e60ee08219eb8fd826c474e67f1b05c0977025dad9c40 | rwilcox/my-learnings-docs | learning_time_series_database_druid.md.rkt | #lang scribble/text
@(require "scribble-utils.rkt")
---
path: /learnings/learning_time_series_database_druid
title: Learning Druid
---
# Table Of Contents
<!-- toc -->
# What is Druid
Druid is a time series database that ingests data from various sources (files, streaming) which are controlled via spec files. Users can later query time series data using SQL or druid native query language (a JSON schema), or roll up data to be less granular.
# Druid Setup
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{
Druid has several process types, briefly described below:
* **Coordinator** processes manage data availability on the cluster. The workload on the Coordinator process tends to increase with the number of segments in the cluster. They watch over the Historical processes on the Data servers. They are responsible for assigning segments to specific servers, and for ensuring segments are well-balanced across Historicals.
* **Overlord** processes control the assignment of data ingestion workloads. They watch over the MiddleManager processes on the Data servers and are the controllers of data ingestion into Druid. They are responsible for assigning ingestion tasks to MiddleManagers and for coordinating segment publishing.
* **Broker** processes handle queries from external clients.
* **Router** processes are optional; they route requests to Brokers, Coordinators, and Overlords.
* **Historical** processes store queryable data. They handle storage and querying on "historical" data (including any streaming data that has been in the system long enough to be committed). Historical processes download segments from deep storage and respond to queries about these segments. They don't accept writes.
* **MiddleManager** OR **Indexer** processes ingest data. Instead of forking separate JVM processes per-task, the Indexer runs tasks as individual threads within a single JVM process
* **Supervisor** if you are using a streaming ingest somewhere
}
External Dependencies:
* Zookeeper
* ingestion method
* long term storage for segments (DB, block storage or big data cluster)
* metadata storage <-- usually in a mysql or Postgres database
@quote-highlight[#:title "ZooKeeper · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{
The operations that happen over ZK are:
* Coordinator leader election
* Segment "publishing" protocol from Historical <— also called segment announcement, when the historical boots up and needs to tell broker what segments it can query
* Segment load/drop protocol between Coordinator and Historical
* Overlord leader election
* Overlord/MiddleManager task management
* Overlord to Indexer taks management. (Note: generated tasks - ie perfect rollups - may get very large depending on number of segments or metric columns involved)
}
# Design of Druid Data Structure
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Druid data is stored in datasources, which are similar to tables in a traditional RDBMS. Each datasource is partitioned by time and, optionally, further partitioned by other attributes }
Each record in Druid:
* timestamp
* dimensions
* metrics
Dimensions are bits of data that relate to the topic at hand. For example, building an analytics tool, would be:
* browser type
* page URL
Druid can then allow you to group records with distinct timestamps into a time series and let's you see to see how many times in an hour a particular URL was visited by Firefox.
And that eventually the cardinality of that will increase as you group more and more records together: the metrics part will be aggregated together as they now "cover" the same time period. (Logically. Physically these may reside in seperate segments). ie Druid's idea of what "the same timestamp" is is Waayyyyy more flexible and different from what ie Postgres or Java thinks of as the same timestamp.
(This is the granularity)
> my understanding is that any time it’s creating a segment chunk it will kind of by nature merge any data with matching timestamp and dimensions
- DG
@quote-highlight[#:title "Introduction to Apache Druid · Apache Druid"
#:author "nil"
#:page-number 0
#:url "/"]{Columnar storage format. Druid uses column-oriented storage }
## Configuring the Supervisor for your datastore
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{When a supervisor spec is submitted via the POST /druid/indexer/v1/supervisor endpoint, it is persisted in the configured metadata database. There can only be a single supervisor per dataSource, and submitting a second spec for the same dataSource will overwrite the previous one.
}
Q: Is this only for Kafka, or for all?
## Your datastore schema
### General Schema Design
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{At the time of this writing, Druid does not support nested dimensions. Nested dimensions need to be flattened }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Druid columns have types specific upfront and Druid does not, at this time, natively support nested data. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Create metrics corresponding to the types of aggregations that you want to be able to query. Typically this includes "sum", "min", and "max" (in one of the long, float, or double flavors). If you want to be able to compute percentiles or quantiles, use Druid's approximate aggregators. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Druid does not think of data points as being part of a "time series". Instead, Druid treats each point separately for ingestion and aggregation. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Druid is not a timeseries database, but it is a natural choice for storing timeseries data. Its flexible data model allows it to store both timeseries and non-timeseries data, even in the same datasource. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Totally flat schemas substantially increase performance, since the need for joins is eliminated at query time. As an an added speed boost, this also allows Druid's query layer to operate directly on compressed dictionary-encoded data. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{In Druid, on the other hand, it is common to use totally flat datasources that do not require joins at query time }
However, eventually wide columns potentially cause performed issues as segment size correlates with number of columns
### Nulls and Druid
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{By default, Druid string dimension columns use the values '' and null interchangeably and numeric and metric columns can not represent null at all, instead coercing nulls to 0. However, Druid also provides a SQL compatible null handling mode, which must be enabled at the system level, through druid.generic.useDefaultValueForNull. This setting, when set to false, will allow Druid to at ingestion time create segments whose string columns can distinguish '' from null, and numeric columns which can represent null valued rows instead of 0. }
### Rollup
@quote-highlight[#:title "Druid data model · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-model.html"]{Metrics are columns that Druid stores in an aggregated form. Metrics are most useful when you enable rollup. If you specify a metric, you can apply an aggregation function to each row during ingestion }
@quote-highlight[#:title "Druid data model · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-model.html"]{If you disable rollup, then Druid treats the set of dimensions like a set of columns to ingest. The dimensions behave exactly as you would expect from any database that does not support a rollup feature. }
@quote-highlight[#:title "Druid data model · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-model.html"]{Druid also uses the primary timestamp column for time-based data management operations such as dropping time chunks, overwriting time chunks, and time-based retention rules. }
### Druid Specs
Couples invest source to destination data source + transformations
See [Tutorial: writing an ingestion spec](-ingestion-spec.html)
## Segments
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Druid stores data in files called segments. Historical processes cache data segments on local disk and serve queries from that cache as well as from an in-memory cache. }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Each time range is called a chunk (for example, a single day, if your datasource is partitioned by day). Within a chunk, data is partitioned into one or more segments. Each segment is a single file, typically comprising up to a few million rows of data }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{For example, if you have hourly segments, but you have more data in an hour than a single segment can hold, you can create multiple segments for the same hour. These segments will share the same datasource, interval, and version, but have linearly increasing partition numbers. }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{The smoosh files represent multiple files "smooshed" together in order to minimize the number of file descriptors that must be open to house the data. They are files of up to 2GB in size (to match the limit of a memory mapped ByteBuffer in Java) }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Identifiers for segments are typically constructed using the segment datasource, interval start time (in ISO 8601 format), interval end time (in ISO 8601 format), and a version }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{For Druid to operate well under heavy query load, it is important for the segment file size to be within the recommended range of 300MB-700MB. If your segment files are larger than this range, then consider either changing the granularity of the time interval or partitioning your data and tweaking the targetPartitionSize in your partitionsSpec (a good starting point for this parameter is 5 million rows). }
Also note may have performance issues if too many segments in a chunk (slow metadata queries, slow coordinator load-drop)
## Data flowing through Druid
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{On the ingestion side, Druid's primary ingestion methods are all pull-based and offer transactional guarantees }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{On the Coordinator / Historical side:The Coordinator polls the metadata store periodically (by default, every 1 minute) for newly published segments.When the Coordinator finds a segment that is published and used, but unavailable, it chooses a Historical process to load that segment and instructs that Historical to do so.The Historical loads the segment and begins serving it.At this point, if the indexing task was waiting for handoff, it will exit. }
# Ingest
Can ingest from multiple data sources
Can also apply ingest side filters, transforms and un-nestle data
@quote-highlight[#:title "Ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Streaming ingestion uses an ongoing process called a supervisor that reads from the data stream to ingest data into Druid }
@quote-highlight[#:title "Introduction to Apache Druid · Apache Druid"
#:author "nil"
#:page-number 0
#:url "/"]{Druid supports streaming inserts, but not streaming updates }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Supervised "seekable-stream" ingestion methods like Kafka and Kinesis are idempotent due to the fact that stream offsets and segment metadata are stored together and updated in lock-step. }
## and specs
See [ingestion spec](-spec.html)
## And rollup
@quote-highlight[#:title "Data rollup · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{If you use a best-effort rollup ingestion configuration that does not guarantee perfect rollup, try one of the following:
* Switch to a guaranteed perfect rollup option
* Reindex or compact your data in the background after initial ingestion. }
@quote-highlight[#:title "Data rollup · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{You can optionally load the same data into more than one Druid datasource. For example:Create a "full" datasource that has rollup disabled, or enabled, but with a minimal rollup ratio.Create a second "abbreviated" datasource with fewer dimensions and a higher rollup ratio. When queries only involve dimensions in the "abbreviated" set, use the second datasource to reduce query times. Often, this method only requires a small increase in storage footprint because abbreviated datasources tend to be substantially smaller. }
@quote-highlight[#:title "Data rollup · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Design your schema with fewer dimensions and lower cardinality dimensions to yield better rollup ratios. }
### See also
* [An explainer about rollup, cardinality, and segments from Netflix](-explainer-about-druid-rollup-cardinality-and-segments-from-netflix/)
## Streaming: From Kafka
@quote-highlight[#:title "Partitioning · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Kafka topic partitioning defines how Druid partitions the datasource. You can also reindex or compact to repartition after initial ingestion. }
@quote-highlight[#:title "Partitioning · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Not all ingestion methods support an explicit partitioning configuration, and not all have equivalent levels of flexibility }
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{When you enable the Kafka indexing service, you can configure supervisors on the Overlord to manage the creation and lifetime of Kafka indexing tasks }
# Compaction
Can configure Coordinator to perform automatic compaction, or can manually submit compaction jobs.
## See also
* [Compaction]()
# Storage after ingestion (Deep Storage)
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Druid uses deep storage to store any data that has been ingested into the system. }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{When the indexing task has finished reading data for the segment, it pushes it to deep storage and then publishes it by writing a record into the metadata store. }
@quote-highlight[#:title "Introduction to Apache Druid · Apache Druid"
#:author "nil"
#:page-number 0
#:url "/"]{Deep storage is typically cloud storage, HDFS, or a shared filesystem }
While creating the segment (and the chunk files within that segment) the data lives in the middlemanager / indexer, then sends to deep storage once segement compacted, indexed etc. See [data ingestion in Druid](-ingestion-in-druid-overview/)
# Querying
## Druid SQL
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#client-apis"]{You can make Druid SQL queries using HTTP via POST to the endpoint /druid/v2/sql/. The request should be a JSON object with a "query" field, like {"query" : "SELECT COUNT(*) FROM data_source WHERE foo = 'bar'"} }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#query-translation"]{Currently, Druid does not support pushing down predicates (condition and filter) past a Join (i.e. into Join's children). Druid only supports pushing predicates into the join if they originated from above the join. Hence, the location of predicates and filters in your Druid SQL is very important. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#query-translation"]{Try to avoid subqueries underneath joins: they affect both performance and scalability. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#dynamic-parameters"]{Druid SQL supports dynamic parameters using question mark (?) syntax, where parameters are bound to ? placeholders at execution time. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{The WHERE clause refers to columns in the FROM table, and will be translated to native filters. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{The FROM clause can refer to any of the following:
* Table datasources from the druid schema. This is the default schema, so Druid table datasources can be referenced as either druid.dataSourceName or simply dataSourceName.
* Lookups from the lookup schema, for example lookup.countries. Note that lookups can also be queried using the LOOKUP function.
* Subqueries.Joins between anything in this list, except between native datasources (table, lookup, query) and system tables. The join condition must be an equality between expressions from the left- and right-hand side of the join.
* Metadata tables from the INFORMATION_SCHEMA or sys schemas. Unlike the other options for the FROM clause, metadata tables are not considered datasources. They exist only in the SQL layer. }
## Using console
Can use the dot menu beside the Run option to translate Druid SQL to Native (JSON based) query syntax!
## Architecture
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Queries are distributed across the Druid cluster, and managed by a Broker. Queries first enter the Broker, which identifies the segments with data that may pertain to that query }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{On the query side, the Druid Broker is responsible for ensuring that a consistent set of segments is involved in a given query }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Broker will then identify which Historicals and MiddleManagers are serving those segments and distributes a rewritten subquery to each of those processes. }
# Ops
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{GET /druid/indexer/v1/supervisor/<supervisorId>/status returns a snapshot report of the current state of the tasks managed by the given supervisor. }
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{The POST /druid/indexer/v1/supervisor/<supervisorId>/reset operation clears stored offsets, causing the supervisor to start reading offsets from either the earliest or latest offsets in Kafka (depending on the value of useEarliestOffset). After clearing stored offsets, the supervisor kills and recreates any active tasks, so that tasks begin reading from valid offsets. }
Broker is not horiz scalable and keeps metadata for all segments. Which takes heap.
Very high number of segments may run into memory map or file descriptor limits on historical instances
Can tell broker to only watch certain segments, and seperate these out into tiers
Lookup loading happens - by default - async so may need to watch logs to see if a lookup has failed to load.
Lookups not monitored with Druid Metrics
## and ZK interactions
Druid.indexer.runner.maxZnkdeBytes
You can limit number of files by ingest task at a time:
* maxSplitSize - max num of bytes in a single subtask
* maxNumFiles - max number of input files to process in a single subtask
But these limit parallelism in big clusters..
You can also set Zookeeper servers and clients to use the JAVA PROPERTY ONLY [jute.maxbuffer]() to make this bigger. note you can seemingly NOT set this in zook.cfg
## looking into running a Druid cluster on spot instances
See [Fyber engineering blog: running cost effective Druid cluster on Spot instances](-a-cost-effective-druid-cluster-on-aws-spot-instances/)
### See also
* [the generated specification is too big]()
## metrics
[Druid Metrics]()
# See also
* [Baeldug explains Druid](-druid-event-driven-data) <-- this is REALLY good
* [My Druid Pinboard category](:rwilcox/t:apache_druid/)
# Watching
Neat videos I should watch:
* [Performance Tuning of Druid Cluster at High Scale at ironSource]()
* [Inside Druid's storage and query engine](-apache-druids-storage-and-query-engine/)
| null | https://raw.githubusercontent.com/rwilcox/my-learnings-docs/8326c43cacef76546293453a1fbb47bebb873911/learning_time_series_database_druid.md.rkt | racket | they route requests to Brokers, Coordinators, and Overlords. | #lang scribble/text
@(require "scribble-utils.rkt")
---
path: /learnings/learning_time_series_database_druid
title: Learning Druid
---
# Table Of Contents
<!-- toc -->
# What is Druid
Druid is a time series database that ingests data from various sources (files, streaming) which are controlled via spec files. Users can later query time series data using SQL or druid native query language (a JSON schema), or roll up data to be less granular.
# Druid Setup
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{
Druid has several process types, briefly described below:
* **Coordinator** processes manage data availability on the cluster. The workload on the Coordinator process tends to increase with the number of segments in the cluster. They watch over the Historical processes on the Data servers. They are responsible for assigning segments to specific servers, and for ensuring segments are well-balanced across Historicals.
* **Overlord** processes control the assignment of data ingestion workloads. They watch over the MiddleManager processes on the Data servers and are the controllers of data ingestion into Druid. They are responsible for assigning ingestion tasks to MiddleManagers and for coordinating segment publishing.
* **Broker** processes handle queries from external clients.
* **Historical** processes store queryable data. They handle storage and querying on "historical" data (including any streaming data that has been in the system long enough to be committed). Historical processes download segments from deep storage and respond to queries about these segments. They don't accept writes.
* **MiddleManager** OR **Indexer** processes ingest data. Instead of forking separate JVM processes per-task, the Indexer runs tasks as individual threads within a single JVM process
* **Supervisor** if you are using a streaming ingest somewhere
}
External Dependencies:
* Zookeeper
* ingestion method
* long term storage for segments (DB, block storage or big data cluster)
* metadata storage <-- usually in a mysql or Postgres database
@quote-highlight[#:title "ZooKeeper · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{
The operations that happen over ZK are:
* Coordinator leader election
* Segment "publishing" protocol from Historical <— also called segment announcement, when the historical boots up and needs to tell broker what segments it can query
* Segment load/drop protocol between Coordinator and Historical
* Overlord leader election
* Overlord/MiddleManager task management
* Overlord to Indexer taks management. (Note: generated tasks - ie perfect rollups - may get very large depending on number of segments or metric columns involved)
}
# Design of Druid Data Structure
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Druid data is stored in datasources, which are similar to tables in a traditional RDBMS. Each datasource is partitioned by time and, optionally, further partitioned by other attributes }
Each record in Druid:
* timestamp
* dimensions
* metrics
Dimensions are bits of data that relate to the topic at hand. For example, building an analytics tool, would be:
* browser type
* page URL
Druid can then allow you to group records with distinct timestamps into a time series and let's you see to see how many times in an hour a particular URL was visited by Firefox.
And that eventually the cardinality of that will increase as you group more and more records together: the metrics part will be aggregated together as they now "cover" the same time period. (Logically. Physically these may reside in seperate segments). ie Druid's idea of what "the same timestamp" is is Waayyyyy more flexible and different from what ie Postgres or Java thinks of as the same timestamp.
(This is the granularity)
> my understanding is that any time it’s creating a segment chunk it will kind of by nature merge any data with matching timestamp and dimensions
- DG
@quote-highlight[#:title "Introduction to Apache Druid · Apache Druid"
#:author "nil"
#:page-number 0
#:url "/"]{Columnar storage format. Druid uses column-oriented storage }
## Configuring the Supervisor for your datastore
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{When a supervisor spec is submitted via the POST /druid/indexer/v1/supervisor endpoint, it is persisted in the configured metadata database. There can only be a single supervisor per dataSource, and submitting a second spec for the same dataSource will overwrite the previous one.
}
Q: Is this only for Kafka, or for all?
## Your datastore schema
### General Schema Design
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{At the time of this writing, Druid does not support nested dimensions. Nested dimensions need to be flattened }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Druid columns have types specific upfront and Druid does not, at this time, natively support nested data. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Create metrics corresponding to the types of aggregations that you want to be able to query. Typically this includes "sum", "min", and "max" (in one of the long, float, or double flavors). If you want to be able to compute percentiles or quantiles, use Druid's approximate aggregators. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Druid does not think of data points as being part of a "time series". Instead, Druid treats each point separately for ingestion and aggregation. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Druid is not a timeseries database, but it is a natural choice for storing timeseries data. Its flexible data model allows it to store both timeseries and non-timeseries data, even in the same datasource. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{Totally flat schemas substantially increase performance, since the need for joins is eliminated at query time. As an an added speed boost, this also allows Druid's query layer to operate directly on compressed dictionary-encoded data. }
@quote-highlight[#:title "Schema design tips · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-design.html"]{In Druid, on the other hand, it is common to use totally flat datasources that do not require joins at query time }
However, eventually wide columns potentially cause performed issues as segment size correlates with number of columns
### Nulls and Druid
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{By default, Druid string dimension columns use the values '' and null interchangeably and numeric and metric columns can not represent null at all, instead coercing nulls to 0. However, Druid also provides a SQL compatible null handling mode, which must be enabled at the system level, through druid.generic.useDefaultValueForNull. This setting, when set to false, will allow Druid to at ingestion time create segments whose string columns can distinguish '' from null, and numeric columns which can represent null valued rows instead of 0. }
### Rollup
@quote-highlight[#:title "Druid data model · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-model.html"]{Metrics are columns that Druid stores in an aggregated form. Metrics are most useful when you enable rollup. If you specify a metric, you can apply an aggregation function to each row during ingestion }
@quote-highlight[#:title "Druid data model · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-model.html"]{If you disable rollup, then Druid treats the set of dimensions like a set of columns to ingest. The dimensions behave exactly as you would expect from any database that does not support a rollup feature. }
@quote-highlight[#:title "Druid data model · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-model.html"]{Druid also uses the primary timestamp column for time-based data management operations such as dropping time chunks, overwriting time chunks, and time-based retention rules. }
### Druid Specs
Couples invest source to destination data source + transformations
See [Tutorial: writing an ingestion spec](-ingestion-spec.html)
## Segments
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Druid stores data in files called segments. Historical processes cache data segments on local disk and serve queries from that cache as well as from an in-memory cache. }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Each time range is called a chunk (for example, a single day, if your datasource is partitioned by day). Within a chunk, data is partitioned into one or more segments. Each segment is a single file, typically comprising up to a few million rows of data }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{For example, if you have hourly segments, but you have more data in an hour than a single segment can hold, you can create multiple segments for the same hour. These segments will share the same datasource, interval, and version, but have linearly increasing partition numbers. }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{The smoosh files represent multiple files "smooshed" together in order to minimize the number of file descriptors that must be open to house the data. They are files of up to 2GB in size (to match the limit of a memory mapped ByteBuffer in Java) }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Identifiers for segments are typically constructed using the segment datasource, interval start time (in ISO 8601 format), interval end time (in ISO 8601 format), and a version }
@quote-highlight[#:title "Segments · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{For Druid to operate well under heavy query load, it is important for the segment file size to be within the recommended range of 300MB-700MB. If your segment files are larger than this range, then consider either changing the granularity of the time interval or partitioning your data and tweaking the targetPartitionSize in your partitionsSpec (a good starting point for this parameter is 5 million rows). }
Also note may have performance issues if too many segments in a chunk (slow metadata queries, slow coordinator load-drop)
## Data flowing through Druid
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{On the ingestion side, Druid's primary ingestion methods are all pull-based and offer transactional guarantees }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{On the Coordinator / Historical side:The Coordinator polls the metadata store periodically (by default, every 1 minute) for newly published segments.When the Coordinator finds a segment that is published and used, but unavailable, it chooses a Historical process to load that segment and instructs that Historical to do so.The Historical loads the segment and begins serving it.At this point, if the indexing task was waiting for handoff, it will exit. }
# Ingest
Can ingest from multiple data sources
Can also apply ingest side filters, transforms and un-nestle data
@quote-highlight[#:title "Ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Streaming ingestion uses an ongoing process called a supervisor that reads from the data stream to ingest data into Druid }
@quote-highlight[#:title "Introduction to Apache Druid · Apache Druid"
#:author "nil"
#:page-number 0
#:url "/"]{Druid supports streaming inserts, but not streaming updates }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Supervised "seekable-stream" ingestion methods like Kafka and Kinesis are idempotent due to the fact that stream offsets and segment metadata are stored together and updated in lock-step. }
## and specs
See [ingestion spec](-spec.html)
## And rollup
@quote-highlight[#:title "Data rollup · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{If you use a best-effort rollup ingestion configuration that does not guarantee perfect rollup, try one of the following:
* Switch to a guaranteed perfect rollup option
* Reindex or compact your data in the background after initial ingestion. }
@quote-highlight[#:title "Data rollup · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{You can optionally load the same data into more than one Druid datasource. For example:Create a "full" datasource that has rollup disabled, or enabled, but with a minimal rollup ratio.Create a second "abbreviated" datasource with fewer dimensions and a higher rollup ratio. When queries only involve dimensions in the "abbreviated" set, use the second datasource to reduce query times. Often, this method only requires a small increase in storage footprint because abbreviated datasources tend to be substantially smaller. }
@quote-highlight[#:title "Data rollup · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Design your schema with fewer dimensions and lower cardinality dimensions to yield better rollup ratios. }
### See also
* [An explainer about rollup, cardinality, and segments from Netflix](-explainer-about-druid-rollup-cardinality-and-segments-from-netflix/)
## Streaming: From Kafka
@quote-highlight[#:title "Partitioning · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Kafka topic partitioning defines how Druid partitions the datasource. You can also reindex or compact to repartition after initial ingestion. }
@quote-highlight[#:title "Partitioning · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Not all ingestion methods support an explicit partitioning configuration, and not all have equivalent levels of flexibility }
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{When you enable the Kafka indexing service, you can configure supervisors on the Overlord to manage the creation and lifetime of Kafka indexing tasks }
# Compaction
Can configure Coordinator to perform automatic compaction, or can manually submit compaction jobs.
## See also
* [Compaction]()
# Storage after ingestion (Deep Storage)
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Druid uses deep storage to store any data that has been ingested into the system. }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{When the indexing task has finished reading data for the segment, it pushes it to deep storage and then publishes it by writing a record into the metadata store. }
@quote-highlight[#:title "Introduction to Apache Druid · Apache Druid"
#:author "nil"
#:page-number 0
#:url "/"]{Deep storage is typically cloud storage, HDFS, or a shared filesystem }
While creating the segment (and the chunk files within that segment) the data lives in the middlemanager / indexer, then sends to deep storage once segement compacted, indexed etc. See [data ingestion in Druid](-ingestion-in-druid-overview/)
# Querying
## Druid SQL
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#client-apis"]{You can make Druid SQL queries using HTTP via POST to the endpoint /druid/v2/sql/. The request should be a JSON object with a "query" field, like {"query" : "SELECT COUNT(*) FROM data_source WHERE foo = 'bar'"} }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#query-translation"]{Currently, Druid does not support pushing down predicates (condition and filter) past a Join (i.e. into Join's children). Druid only supports pushing predicates into the join if they originated from above the join. Hence, the location of predicates and filters in your Druid SQL is very important. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#query-translation"]{Try to avoid subqueries underneath joins: they affect both performance and scalability. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url "#dynamic-parameters"]{Druid SQL supports dynamic parameters using question mark (?) syntax, where parameters are bound to ? placeholders at execution time. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{The WHERE clause refers to columns in the FROM table, and will be translated to native filters. }
@quote-highlight[#:title "SQL · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{The FROM clause can refer to any of the following:
* Table datasources from the druid schema. This is the default schema, so Druid table datasources can be referenced as either druid.dataSourceName or simply dataSourceName.
* Lookups from the lookup schema, for example lookup.countries. Note that lookups can also be queried using the LOOKUP function.
* Subqueries.Joins between anything in this list, except between native datasources (table, lookup, query) and system tables. The join condition must be an equality between expressions from the left- and right-hand side of the join.
* Metadata tables from the INFORMATION_SCHEMA or sys schemas. Unlike the other options for the FROM clause, metadata tables are not considered datasources. They exist only in the SQL layer. }
## Using console
Can use the dot menu beside the Run option to translate Druid SQL to Native (JSON based) query syntax!
## Architecture
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Queries are distributed across the Druid cluster, and managed by a Broker. Queries first enter the Broker, which identifies the segments with data that may pertain to that query }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{On the query side, the Druid Broker is responsible for ensuring that a consistent set of segments is involved in a given query }
@quote-highlight[#:title "Design · Apache Druid"
#:author "nil"
#:page-number 0
#:url ""]{Broker will then identify which Historicals and MiddleManagers are serving those segments and distributes a rewritten subquery to each of those processes. }
# Ops
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{GET /druid/indexer/v1/supervisor/<supervisorId>/status returns a snapshot report of the current state of the tasks managed by the given supervisor. }
@quote-highlight[#:title "Apache Kafka ingestion · Apache Druid"
#:author "nil"
#:page-number 0
#:url "-core/kafka-ingestion.html"]{The POST /druid/indexer/v1/supervisor/<supervisorId>/reset operation clears stored offsets, causing the supervisor to start reading offsets from either the earliest or latest offsets in Kafka (depending on the value of useEarliestOffset). After clearing stored offsets, the supervisor kills and recreates any active tasks, so that tasks begin reading from valid offsets. }
Broker is not horiz scalable and keeps metadata for all segments. Which takes heap.
Very high number of segments may run into memory map or file descriptor limits on historical instances
Can tell broker to only watch certain segments, and seperate these out into tiers
Lookup loading happens - by default - async so may need to watch logs to see if a lookup has failed to load.
Lookups not monitored with Druid Metrics
## and ZK interactions
Druid.indexer.runner.maxZnkdeBytes
You can limit number of files by ingest task at a time:
* maxSplitSize - max num of bytes in a single subtask
* maxNumFiles - max number of input files to process in a single subtask
But these limit parallelism in big clusters..
You can also set Zookeeper servers and clients to use the JAVA PROPERTY ONLY [jute.maxbuffer]() to make this bigger. note you can seemingly NOT set this in zook.cfg
## looking into running a Druid cluster on spot instances
See [Fyber engineering blog: running cost effective Druid cluster on Spot instances](-a-cost-effective-druid-cluster-on-aws-spot-instances/)
### See also
* [the generated specification is too big]()
## metrics
[Druid Metrics]()
# See also
* [Baeldug explains Druid](-druid-event-driven-data) <-- this is REALLY good
* [My Druid Pinboard category](:rwilcox/t:apache_druid/)
# Watching
Neat videos I should watch:
* [Performance Tuning of Druid Cluster at High Scale at ironSource]()
* [Inside Druid's storage and query engine](-apache-druids-storage-and-query-engine/)
|
4bc89fff9d4c73075ad0e88566f6f5949a80e65715289099d902d17ec8fb7a4e | binaryage/cljs-oops | gcall_dynamic.cljs | (ns oops.arena.gcall-dynamic
(:require [oops.core :refer [gcall+]]
[oops.tools :refer [init-arena-test! done-arena-test! testing]]))
(init-arena-test!)
; we are compiling under advanced mode
(testing "simple dynamic gcall"
(gcall+ (identity "f") "p1" "p2"))
(testing "retageted dynamic gcall"
(gcall+ (identity "a.f") "p1" "p2"))
(done-arena-test!)
| null | https://raw.githubusercontent.com/binaryage/cljs-oops/a2b48d59047c28decb0d6334e2debbf21848e29c/test/src/arena/oops/arena/gcall_dynamic.cljs | clojure | we are compiling under advanced mode | (ns oops.arena.gcall-dynamic
(:require [oops.core :refer [gcall+]]
[oops.tools :refer [init-arena-test! done-arena-test! testing]]))
(init-arena-test!)
(testing "simple dynamic gcall"
(gcall+ (identity "f") "p1" "p2"))
(testing "retageted dynamic gcall"
(gcall+ (identity "a.f") "p1" "p2"))
(done-arena-test!)
|
34a7b86513834e872ac06e541a142188c834e96cc9ca761372fca2affd126049 | RoadRunnr/dtlsex | dtlsex_session_cache_api.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2011 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
%%
-module(dtlsex_session_cache_api).
-include("dtlsex_handshake.hrl").
-include("dtlsex_internal.hrl").
-type key() :: {{host(), inet:port_number()}, session_id()} | {inet:port_number(), session_id()}.
-callback init(list()) -> db_handle().
-callback terminate(db_handle()) -> any().
-callback lookup(db_handle(), key()) -> #session{} | undefined.
-callback update(db_handle(), key(), #session{}) -> any().
-callback delete(db_handle(), key()) -> any().
-callback foldl(fun(), term(), db_handle()) -> term().
-callback select_session(db_handle(), {host(), inet:port_number()} | inet:port_number()) -> [#session{}].
-callback size(db_handle()) -> integer().
| null | https://raw.githubusercontent.com/RoadRunnr/dtlsex/6cb9e52ff00ab0e5f33e0c4b54bf46eacddeb8e7/src/dtlsex_session_cache_api.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
| Copyright Ericsson AB 2008 - 2011 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(dtlsex_session_cache_api).
-include("dtlsex_handshake.hrl").
-include("dtlsex_internal.hrl").
-type key() :: {{host(), inet:port_number()}, session_id()} | {inet:port_number(), session_id()}.
-callback init(list()) -> db_handle().
-callback terminate(db_handle()) -> any().
-callback lookup(db_handle(), key()) -> #session{} | undefined.
-callback update(db_handle(), key(), #session{}) -> any().
-callback delete(db_handle(), key()) -> any().
-callback foldl(fun(), term(), db_handle()) -> term().
-callback select_session(db_handle(), {host(), inet:port_number()} | inet:port_number()) -> [#session{}].
-callback size(db_handle()) -> integer().
|
6f80925227b748efba47d042fef23edff2be6fb48e29c751f2548245d61419c3 | sixohsix/tak | Util.hs | module Tak.Util where
import Tak.Types
import Data.Foldable (Foldable, toList)
clamp :: Int -> Int -> Int -> Int
clamp low high = max low . min high
comboBreakers :: Foldable t => [(a -> Bool)] -> t a -> [Int]
comboBreakers fs lSeq =
cBreakers fs (zip [0..] (toList lSeq))
where
cBreakers _ [] = []
cBreakers [] ((idx, _):rest) = idx:(cBreakers fs rest)
cBreakers (f:ff) ((idx, line):rest) = case f line of
True -> cBreakers (f:ff) rest
False -> cBreakers ff ((idx, line):rest)
moveRelative :: Pos -> Pos -> Pos
moveRelative (Pos ol or) (Pos dl dr) =
if dl <= 0
then Pos ol (or + dr)
else Pos (ol + dl) dr
shift :: Pos -> Pos -> Pos
shift (Pos l0 r0) (Pos l1 r1) = Pos (max 0 (l0 + l1)) (max 0 (r0 + r1))
isModified :: SimpleEditor -> Bool
isModified ed = (lastSavePtr ed) /= 0
| null | https://raw.githubusercontent.com/sixohsix/tak/6310d19faa683156933dde38666c11dc087d79ea/src/Tak/Util.hs | haskell | module Tak.Util where
import Tak.Types
import Data.Foldable (Foldable, toList)
clamp :: Int -> Int -> Int -> Int
clamp low high = max low . min high
comboBreakers :: Foldable t => [(a -> Bool)] -> t a -> [Int]
comboBreakers fs lSeq =
cBreakers fs (zip [0..] (toList lSeq))
where
cBreakers _ [] = []
cBreakers [] ((idx, _):rest) = idx:(cBreakers fs rest)
cBreakers (f:ff) ((idx, line):rest) = case f line of
True -> cBreakers (f:ff) rest
False -> cBreakers ff ((idx, line):rest)
moveRelative :: Pos -> Pos -> Pos
moveRelative (Pos ol or) (Pos dl dr) =
if dl <= 0
then Pos ol (or + dr)
else Pos (ol + dl) dr
shift :: Pos -> Pos -> Pos
shift (Pos l0 r0) (Pos l1 r1) = Pos (max 0 (l0 + l1)) (max 0 (r0 + r1))
isModified :: SimpleEditor -> Bool
isModified ed = (lastSavePtr ed) /= 0
| |
221a4f0142fe4fae6226ee783f7aa77e52359e887775e436a96145c6b81df9b9 | untangled-web/untangled-ui | clip_tool.cljs | (ns untangled.ui.clip-tool
(:require [om.next :as om :refer [defui]]
[om.dom :as dom]
[untangled.client.core :as uc]
[untangled.ui.clip-geometry :as cg]))
(defn refresh-clip-region [this props]
(let [{:keys [url size handle-size] :or {handle-size 10}} props
{:keys [width height]} size
{:keys [canvas image-object clip-region]} (om/get-state this)
aspect-ratio (/ (.-width image-object) (.-height image-object))
w (-> props :size :width)
h (/ w aspect-ratio)
ctx (.getContext canvas "2d")
ul-handle (cg/new-handle (:ul clip-region) handle-size)
lr-handle (cg/new-handle (:lr clip-region) handle-size)]
(when (and url (not= url (.-src image-object)))
(set! (.-src image-object) url))
(cg/draw-rect ctx (cg/->Rectangle (cg/->Point 0 0) (cg/->Point width height)) :solid-white)
(.drawImage ctx image-object 0 0 w h)
(cg/draw-rect ctx clip-region :solid)
(cg/draw-rect ctx ul-handle :solid-black)
(cg/draw-rect ctx lr-handle :solid-black)))
(defn translate-clip-region
"Convert a clip region from clip tool coordinates to image coordinates. The size is the size of the clip tool."
[clip-region size image-object]
(let [{:keys [ul lr]} clip-region
img-w (.-width image-object)
img-h (.-height image-object)
img-aspect (/ img-w img-h)
tool-bbox (cg/->Rectangle (cg/->Point 0 0) (cg/->Point (:width size) (:height size)))
scaled-image-bbox (cg/max-rect tool-bbox img-aspect)
w (cg/width scaled-image-bbox)
scale (/ img-w w)
ul-x (* scale (:x ul))
ul-y (* scale (:y ul))
lr-x (* scale (:x lr))
lr-y (* scale (:y lr))]
(cg/->Rectangle (cg/->Point ul-x ul-y) (cg/->Point lr-x lr-y))))
(defn generate-url [id clip-region size image-object]
(let [image-clip-area (translate-clip-region clip-region size image-object)
ul (:ul image-clip-area)
lr (:lr image-clip-area)]
(str "/assets/" id "/?x1=" (:x ul) "&y1=" (:y ul) "&x2=" (:x lr) "&y2=" (:y lr) "&width=" (+ (cg/width clip-region) 10))
))
(defn constrain-size [old-clip min-size new-clip]
(let [w-new-clip (cg/width new-clip)
h-new-clip (cg/height new-clip)]
(if (or (> min-size w-new-clip) (> min-size h-new-clip)) old-clip new-clip)))
(defn change-cursor [canvas cursor-type]
(set! (.-cursor (.-style canvas)) cursor-type))
(defn constrain-corner [^cg/Rectangle orig-clip ^cg/Rects new-clip aspect-ratio]
(let [ul-new (:ul new-clip)
lr-new (:lr new-clip)
ul-old (:ul orig-clip)
lr-old (:lr orig-clip)
dw (- (cg/width orig-clip) (cg/width new-clip))
dh (- (cg/height orig-clip) (cg/height new-clip))
ul-moving? (or (not= ul-new ul-old))
dx (* dh aspect-ratio)
dy (/ dw aspect-ratio)]
(if ul-moving?
(if (> (Math/abs dw) (Math/abs dh))
(cg/->Rectangle (cg/->Point (:x ul-new) (+ dy (:y ul-old))) lr-old)
(cg/->Rectangle (cg/->Point (+ dx (:x ul-old)) (:y ul-new)) lr-old))
(if (> (Math/abs dw) (Math/abs dh))
(cg/->Rectangle ul-old (cg/->Point (:x lr-new) (- (:y lr-old) dy)))
(cg/->Rectangle ul-old (cg/->Point (- (:x lr-old) dx) (:y lr-new)))))))
(defn dragUL [comp evt]
(let [{:keys [canvas clip-region aspect-ratio min-size origin]} (om/get-state comp)
{:keys [ul lr]} clip-region
target (cg/event->dom-coords evt canvas)
new-ul (cg/diff-translate ul origin target)
new-clip (constrain-size clip-region min-size (constrain-corner clip-region (cg/->Rectangle new-ul (:lr clip-region)) aspect-ratio))]
(change-cursor canvas "nw-resize")
(om/update-state! comp assoc :origin target :clip-region new-clip)))
(defn dragLR [comp evt]
(let [{:keys [canvas clip-region aspect-ratio min-size origin]} (om/get-state comp)
{:keys [ul lr]} clip-region
target (cg/event->dom-coords evt canvas)
new-lr (cg/diff-translate lr origin target)
new-clip (constrain-size clip-region min-size (constrain-corner clip-region (cg/->Rectangle (:ul clip-region) new-lr) aspect-ratio))]
(change-cursor canvas "nw-resize")
(om/update-state! comp assoc :origin target :clip-region new-clip)))
(defn pan [comp evt]
(let [{:keys [canvas clip-region origin]} (om/get-state comp)
target (cg/event->dom-coords evt canvas)
new-clip (cg/diff-translate-rect clip-region origin target)]
(change-cursor canvas "move")
(om/update-state! comp assoc :origin target :clip-region new-clip)))
(defn mouseDown [this evt]
(let [{:keys [canvas clip-region handle-size]} (om/get-state this)
canvas-point (cg/event->dom-coords evt canvas)
ul-handle (cg/new-handle (:ul clip-region) handle-size)
lr-handle (cg/new-handle (:lr clip-region) handle-size)]
(cond
(cg/inside-rect? ul-handle canvas-point) (om/update-state! this assoc :active-operation :drag-ul :origin canvas-point)
(cg/inside-rect? lr-handle canvas-point) (om/update-state! this assoc :active-operation :drag-lr :origin canvas-point)
(cg/inside-rect? clip-region canvas-point) (om/update-state! this assoc :active-operation :pan :origin canvas-point))
(refresh-clip-region this (om/props this))))
(defn mouseUp [this evt]
(let [{:keys [canvas]} (om/get-state this)]
(set! (.-cursor (.-style canvas)) "")
(om/update-state! this assoc :active-operation :none :origin nil)
(refresh-clip-region this (om/props this))))
(defn mouseMoved [this evt onChange]
(let [{:keys [active-operation]} (om/get-state this)
{:keys [size]} (om/props this)]
(case active-operation
:drag-ul (dragUL this evt)
:drag-lr (dragLR this evt)
:pan (pan this evt)
nil)
(when (and onChange (not= active-operation :none))
(let [{:keys [clip-region image-object] :as state} (om/get-state this)]
(onChange (assoc state :clip-region (translate-clip-region clip-region size image-object)))))
(refresh-clip-region this (om/props this))))
(defn set-initial-clip [comp img]
(let [{:keys [aspect-ratio canvas]} (om/get-state comp)
canvas-bbox (cg/->Rectangle (cg/->Point 0 0) (cg/->Point (.-width canvas) (.-height canvas)))
img-aspect (/ (.-width img) (.-height img))
img-bbox (cg/max-rect canvas-bbox img-aspect)
clip (cg/max-rect img-bbox aspect-ratio)]
(om/update-state! comp assoc :clip-region clip)))
(defui ^:once ClipTool
static uc/InitialAppState
(uc/initial-state [clz {:keys [image-url id aspect-ratio handle-size width height] :or {image-url "" id "clip-1"
aspect-ratio 1 width 400 height 400 handle-size 10} :as params}]
{:id id
:url image-url
:aspect-ratio aspect-ratio
:handle-size handle-size
:size {:width width :height height}})
static om/IQuery
(query [this] [:id :url :size :aspect-ratio :handle-size])
static om/Ident
(ident [this props] [:clip-tools/by-id (:id props)])
Object
(initLocalState [this]
(let [img (js/Image.)]
(set! (.-onload img) (fn []
(set-initial-clip this img)
(let [{:keys [size]} (om/props this)
onChange (om/get-computed this :onChange)
{:keys [clip-region]} (om/get-state this)]
(when onChange (onChange (assoc (om/get-state this) :clip-region (translate-clip-region clip-region size img)))))
(refresh-clip-region this (om/props this))))
{:image-object img
:origin (cg/->Point 0 0)
:clip-region (cg/->Rectangle (cg/->Point 0 0)
(cg/->Point 0 0))
:activeOperation :none
:min-size 20}))
(shouldComponentUpdate [this next-props next-state] false)
(componentWillReceiveProps [this props] (refresh-clip-region this props)) ; for URL changes
(componentDidMount [this newprops]
(let [{:keys [url handle-size aspect-ratio size]} (om/props this)
{:keys [image-object clip-region] :as state} (om/get-state this)]
(om/update-state! this assoc :aspect-ratio aspect-ratio :handle-size (or handle-size 10))
(set! (.-src image-object) url)
(refresh-clip-region this newprops)))
(render [this]
(let [{:keys [id size]} (om/props this)
onChange (om/get-computed this :onChange)]
(dom/div #js {:style #js {:width "500px"}}
(dom/canvas #js {:ref (fn [ele] (when ele (om/update-state! this assoc :canvas ele)))
:id id
:width (str (:width size) "px")
:height (str (:height size) "px")
:onMouseDown (fn [evt] (mouseDown this evt))
:onMouseMove (fn [evt] (mouseMoved this evt onChange))
:onMouseUp (fn [evt] (mouseUp this evt))
:className "clip-tool"})))))
(def ui-clip-tool (om/factory ClipTool))
(defn refresh-image [canvas component]
(when (-> component om/props :image-object)
(let [props (om/props component)
{:keys [clip-region image-object]} props
sx (-> clip-region :ul :x)
sy (-> clip-region :ul :y)
sw (cg/width clip-region)
sh (cg/height clip-region)
aspect-ratio (/ sw sh)
w (-> props :width)
h (/ w aspect-ratio)
ctx (.getContext canvas "2d")]
(cg/draw-rect ctx (cg/->Rectangle (cg/->Point 0 0) (cg/->Point w h)) :solid-black)
(.drawImage ctx image-object sx sy sw sh 0 0 w h))))
(defui ^:once PreviewClip
Object
(render [this]
(let [{:keys [filename width height clip-region]} (om/props this)
{:keys [ul lr]} clip-region]
(dom/div #js {:style #js {:position "relative" :top "-400px" :left "500px"}}
(dom/canvas #js {:ref (fn [elem] (when elem
(refresh-image elem this)))
:style #js {:border "1px solid black"}
:width (str width "px")
:height (str height "px")
:className "preview-clip"})
(dom/div nil (str filename
"?x1=" (-> ul :x int)
"&y1=" (-> ul :y int)
"&x2=" (-> lr :x int)
"&y2=" (-> lr :y int)
"&width=" width))))))
(def ui-preview-clip
"Render a preview of a clipped image. "
(om/factory PreviewClip))
| null | https://raw.githubusercontent.com/untangled-web/untangled-ui/ae101f90cd9b7bf5d0c80e9453595fdfe784923c/src/main/untangled/ui/clip_tool.cljs | clojure | for URL changes | (ns untangled.ui.clip-tool
(:require [om.next :as om :refer [defui]]
[om.dom :as dom]
[untangled.client.core :as uc]
[untangled.ui.clip-geometry :as cg]))
(defn refresh-clip-region [this props]
(let [{:keys [url size handle-size] :or {handle-size 10}} props
{:keys [width height]} size
{:keys [canvas image-object clip-region]} (om/get-state this)
aspect-ratio (/ (.-width image-object) (.-height image-object))
w (-> props :size :width)
h (/ w aspect-ratio)
ctx (.getContext canvas "2d")
ul-handle (cg/new-handle (:ul clip-region) handle-size)
lr-handle (cg/new-handle (:lr clip-region) handle-size)]
(when (and url (not= url (.-src image-object)))
(set! (.-src image-object) url))
(cg/draw-rect ctx (cg/->Rectangle (cg/->Point 0 0) (cg/->Point width height)) :solid-white)
(.drawImage ctx image-object 0 0 w h)
(cg/draw-rect ctx clip-region :solid)
(cg/draw-rect ctx ul-handle :solid-black)
(cg/draw-rect ctx lr-handle :solid-black)))
(defn translate-clip-region
"Convert a clip region from clip tool coordinates to image coordinates. The size is the size of the clip tool."
[clip-region size image-object]
(let [{:keys [ul lr]} clip-region
img-w (.-width image-object)
img-h (.-height image-object)
img-aspect (/ img-w img-h)
tool-bbox (cg/->Rectangle (cg/->Point 0 0) (cg/->Point (:width size) (:height size)))
scaled-image-bbox (cg/max-rect tool-bbox img-aspect)
w (cg/width scaled-image-bbox)
scale (/ img-w w)
ul-x (* scale (:x ul))
ul-y (* scale (:y ul))
lr-x (* scale (:x lr))
lr-y (* scale (:y lr))]
(cg/->Rectangle (cg/->Point ul-x ul-y) (cg/->Point lr-x lr-y))))
(defn generate-url [id clip-region size image-object]
(let [image-clip-area (translate-clip-region clip-region size image-object)
ul (:ul image-clip-area)
lr (:lr image-clip-area)]
(str "/assets/" id "/?x1=" (:x ul) "&y1=" (:y ul) "&x2=" (:x lr) "&y2=" (:y lr) "&width=" (+ (cg/width clip-region) 10))
))
(defn constrain-size [old-clip min-size new-clip]
(let [w-new-clip (cg/width new-clip)
h-new-clip (cg/height new-clip)]
(if (or (> min-size w-new-clip) (> min-size h-new-clip)) old-clip new-clip)))
(defn change-cursor [canvas cursor-type]
(set! (.-cursor (.-style canvas)) cursor-type))
(defn constrain-corner [^cg/Rectangle orig-clip ^cg/Rects new-clip aspect-ratio]
(let [ul-new (:ul new-clip)
lr-new (:lr new-clip)
ul-old (:ul orig-clip)
lr-old (:lr orig-clip)
dw (- (cg/width orig-clip) (cg/width new-clip))
dh (- (cg/height orig-clip) (cg/height new-clip))
ul-moving? (or (not= ul-new ul-old))
dx (* dh aspect-ratio)
dy (/ dw aspect-ratio)]
(if ul-moving?
(if (> (Math/abs dw) (Math/abs dh))
(cg/->Rectangle (cg/->Point (:x ul-new) (+ dy (:y ul-old))) lr-old)
(cg/->Rectangle (cg/->Point (+ dx (:x ul-old)) (:y ul-new)) lr-old))
(if (> (Math/abs dw) (Math/abs dh))
(cg/->Rectangle ul-old (cg/->Point (:x lr-new) (- (:y lr-old) dy)))
(cg/->Rectangle ul-old (cg/->Point (- (:x lr-old) dx) (:y lr-new)))))))
(defn dragUL [comp evt]
(let [{:keys [canvas clip-region aspect-ratio min-size origin]} (om/get-state comp)
{:keys [ul lr]} clip-region
target (cg/event->dom-coords evt canvas)
new-ul (cg/diff-translate ul origin target)
new-clip (constrain-size clip-region min-size (constrain-corner clip-region (cg/->Rectangle new-ul (:lr clip-region)) aspect-ratio))]
(change-cursor canvas "nw-resize")
(om/update-state! comp assoc :origin target :clip-region new-clip)))
(defn dragLR [comp evt]
(let [{:keys [canvas clip-region aspect-ratio min-size origin]} (om/get-state comp)
{:keys [ul lr]} clip-region
target (cg/event->dom-coords evt canvas)
new-lr (cg/diff-translate lr origin target)
new-clip (constrain-size clip-region min-size (constrain-corner clip-region (cg/->Rectangle (:ul clip-region) new-lr) aspect-ratio))]
(change-cursor canvas "nw-resize")
(om/update-state! comp assoc :origin target :clip-region new-clip)))
(defn pan [comp evt]
(let [{:keys [canvas clip-region origin]} (om/get-state comp)
target (cg/event->dom-coords evt canvas)
new-clip (cg/diff-translate-rect clip-region origin target)]
(change-cursor canvas "move")
(om/update-state! comp assoc :origin target :clip-region new-clip)))
(defn mouseDown [this evt]
(let [{:keys [canvas clip-region handle-size]} (om/get-state this)
canvas-point (cg/event->dom-coords evt canvas)
ul-handle (cg/new-handle (:ul clip-region) handle-size)
lr-handle (cg/new-handle (:lr clip-region) handle-size)]
(cond
(cg/inside-rect? ul-handle canvas-point) (om/update-state! this assoc :active-operation :drag-ul :origin canvas-point)
(cg/inside-rect? lr-handle canvas-point) (om/update-state! this assoc :active-operation :drag-lr :origin canvas-point)
(cg/inside-rect? clip-region canvas-point) (om/update-state! this assoc :active-operation :pan :origin canvas-point))
(refresh-clip-region this (om/props this))))
(defn mouseUp [this evt]
(let [{:keys [canvas]} (om/get-state this)]
(set! (.-cursor (.-style canvas)) "")
(om/update-state! this assoc :active-operation :none :origin nil)
(refresh-clip-region this (om/props this))))
(defn mouseMoved [this evt onChange]
(let [{:keys [active-operation]} (om/get-state this)
{:keys [size]} (om/props this)]
(case active-operation
:drag-ul (dragUL this evt)
:drag-lr (dragLR this evt)
:pan (pan this evt)
nil)
(when (and onChange (not= active-operation :none))
(let [{:keys [clip-region image-object] :as state} (om/get-state this)]
(onChange (assoc state :clip-region (translate-clip-region clip-region size image-object)))))
(refresh-clip-region this (om/props this))))
(defn set-initial-clip [comp img]
(let [{:keys [aspect-ratio canvas]} (om/get-state comp)
canvas-bbox (cg/->Rectangle (cg/->Point 0 0) (cg/->Point (.-width canvas) (.-height canvas)))
img-aspect (/ (.-width img) (.-height img))
img-bbox (cg/max-rect canvas-bbox img-aspect)
clip (cg/max-rect img-bbox aspect-ratio)]
(om/update-state! comp assoc :clip-region clip)))
(defui ^:once ClipTool
static uc/InitialAppState
(uc/initial-state [clz {:keys [image-url id aspect-ratio handle-size width height] :or {image-url "" id "clip-1"
aspect-ratio 1 width 400 height 400 handle-size 10} :as params}]
{:id id
:url image-url
:aspect-ratio aspect-ratio
:handle-size handle-size
:size {:width width :height height}})
static om/IQuery
(query [this] [:id :url :size :aspect-ratio :handle-size])
static om/Ident
(ident [this props] [:clip-tools/by-id (:id props)])
Object
(initLocalState [this]
(let [img (js/Image.)]
(set! (.-onload img) (fn []
(set-initial-clip this img)
(let [{:keys [size]} (om/props this)
onChange (om/get-computed this :onChange)
{:keys [clip-region]} (om/get-state this)]
(when onChange (onChange (assoc (om/get-state this) :clip-region (translate-clip-region clip-region size img)))))
(refresh-clip-region this (om/props this))))
{:image-object img
:origin (cg/->Point 0 0)
:clip-region (cg/->Rectangle (cg/->Point 0 0)
(cg/->Point 0 0))
:activeOperation :none
:min-size 20}))
(shouldComponentUpdate [this next-props next-state] false)
(componentDidMount [this newprops]
(let [{:keys [url handle-size aspect-ratio size]} (om/props this)
{:keys [image-object clip-region] :as state} (om/get-state this)]
(om/update-state! this assoc :aspect-ratio aspect-ratio :handle-size (or handle-size 10))
(set! (.-src image-object) url)
(refresh-clip-region this newprops)))
(render [this]
(let [{:keys [id size]} (om/props this)
onChange (om/get-computed this :onChange)]
(dom/div #js {:style #js {:width "500px"}}
(dom/canvas #js {:ref (fn [ele] (when ele (om/update-state! this assoc :canvas ele)))
:id id
:width (str (:width size) "px")
:height (str (:height size) "px")
:onMouseDown (fn [evt] (mouseDown this evt))
:onMouseMove (fn [evt] (mouseMoved this evt onChange))
:onMouseUp (fn [evt] (mouseUp this evt))
:className "clip-tool"})))))
(def ui-clip-tool (om/factory ClipTool))
(defn refresh-image [canvas component]
(when (-> component om/props :image-object)
(let [props (om/props component)
{:keys [clip-region image-object]} props
sx (-> clip-region :ul :x)
sy (-> clip-region :ul :y)
sw (cg/width clip-region)
sh (cg/height clip-region)
aspect-ratio (/ sw sh)
w (-> props :width)
h (/ w aspect-ratio)
ctx (.getContext canvas "2d")]
(cg/draw-rect ctx (cg/->Rectangle (cg/->Point 0 0) (cg/->Point w h)) :solid-black)
(.drawImage ctx image-object sx sy sw sh 0 0 w h))))
(defui ^:once PreviewClip
Object
(render [this]
(let [{:keys [filename width height clip-region]} (om/props this)
{:keys [ul lr]} clip-region]
(dom/div #js {:style #js {:position "relative" :top "-400px" :left "500px"}}
(dom/canvas #js {:ref (fn [elem] (when elem
(refresh-image elem this)))
:style #js {:border "1px solid black"}
:width (str width "px")
:height (str height "px")
:className "preview-clip"})
(dom/div nil (str filename
"?x1=" (-> ul :x int)
"&y1=" (-> ul :y int)
"&x2=" (-> lr :x int)
"&y2=" (-> lr :y int)
"&width=" width))))))
(def ui-preview-clip
"Render a preview of a clipped image. "
(om/factory PreviewClip))
|
89298d08b48b7353fac111be4b6c725b9a6d7c557339bde86e03d5b1324b20d6 | anoma/juvix | Internal.hs | module Juvix.Compiler.Internal
( module Juvix.Compiler.Internal.Language,
module Juvix.Compiler.Internal.Data,
module Juvix.Compiler.Internal.Pretty,
module Juvix.Compiler.Internal.Translation,
)
where
import Juvix.Compiler.Internal.Data
import Juvix.Compiler.Internal.Language
import Juvix.Compiler.Internal.Pretty
import Juvix.Compiler.Internal.Translation
| null | https://raw.githubusercontent.com/anoma/juvix/ff39db3319b0478f0f3fc82f61d1746a27e13427/src/Juvix/Compiler/Internal.hs | haskell | module Juvix.Compiler.Internal
( module Juvix.Compiler.Internal.Language,
module Juvix.Compiler.Internal.Data,
module Juvix.Compiler.Internal.Pretty,
module Juvix.Compiler.Internal.Translation,
)
where
import Juvix.Compiler.Internal.Data
import Juvix.Compiler.Internal.Language
import Juvix.Compiler.Internal.Pretty
import Juvix.Compiler.Internal.Translation
| |
3931483352ea63375b9d6d1ba733fc56e17d8b03c6bba505b92be221c8b59540 | hopv/MoCHi | HCCSSolver.ml | open Util
open Combinator
type t = HCCS.t -> PredSubst.t
exception NoSolution
exception UnsolvableCore of string list
exception Unknown
* { 6 Dynamically linked solvers }
let ext_solve_duality =
ref (fun _ -> assert false : t)
let solve_duality hcs =
Logger.log_block1 "HCCSSolver.solve_duality" !ext_solve_duality hcs
let ext_solve_pdr =
ref (fun _ -> assert false : t)
let solve_pdr hcs =
Logger.log_block1 "HCCSSolver.solve_pdr" !ext_solve_pdr hcs
let ref_solver = ref (fun _ -> assert false : t)
let link_dyn solver = ref_solver := solver
let get_dyn () = !ref_solver
let ext_solve_unit = ref (fun _ _ -> assert false : t -> t)
let ext_solve_bool = ref (fun _ _ -> assert false : t -> t)
let check_solvability_first = ref false
let solve_dyn hcs =
if !check_solvability_first && not (FwHCCSSolver.is_solvable hcs) then
begin
Logger.printf "not solvable:@, %a@," HCCS.pr hcs;
raise NoSolution
end;
hcs
|> (!ref_solver
this should be first
this should be first
|> CheckHCCSSolver.solve
|> !ext_solve_unit |> !ext_solve_bool)
let solve_dyn =
Logger.log_block1 "HCCSSolver.solve_dyn"
~after:(Logger.printf "solution:@, %a" PredSubst.pr)
solve_dyn
let ext_of_string = ref (fun _ -> assert false)
let of_string_dyn str = !ext_of_string str
| null | https://raw.githubusercontent.com/hopv/MoCHi/b0ac0d626d64b1e3c779d8e98cb232121cc3196a/fpat/HCCSSolver.ml | ocaml | open Util
open Combinator
type t = HCCS.t -> PredSubst.t
exception NoSolution
exception UnsolvableCore of string list
exception Unknown
* { 6 Dynamically linked solvers }
let ext_solve_duality =
ref (fun _ -> assert false : t)
let solve_duality hcs =
Logger.log_block1 "HCCSSolver.solve_duality" !ext_solve_duality hcs
let ext_solve_pdr =
ref (fun _ -> assert false : t)
let solve_pdr hcs =
Logger.log_block1 "HCCSSolver.solve_pdr" !ext_solve_pdr hcs
let ref_solver = ref (fun _ -> assert false : t)
let link_dyn solver = ref_solver := solver
let get_dyn () = !ref_solver
let ext_solve_unit = ref (fun _ _ -> assert false : t -> t)
let ext_solve_bool = ref (fun _ _ -> assert false : t -> t)
let check_solvability_first = ref false
let solve_dyn hcs =
if !check_solvability_first && not (FwHCCSSolver.is_solvable hcs) then
begin
Logger.printf "not solvable:@, %a@," HCCS.pr hcs;
raise NoSolution
end;
hcs
|> (!ref_solver
this should be first
this should be first
|> CheckHCCSSolver.solve
|> !ext_solve_unit |> !ext_solve_bool)
let solve_dyn =
Logger.log_block1 "HCCSSolver.solve_dyn"
~after:(Logger.printf "solution:@, %a" PredSubst.pr)
solve_dyn
let ext_of_string = ref (fun _ -> assert false)
let of_string_dyn str = !ext_of_string str
| |
baa6a888eb8b32e9378538e75c482e6458509c22f5d95e7d1f3bfad1aec99b79 | OCamlPro/drom | main.ml | !{header-ml}
open Ppxlib
open Ast_builder.Default
let verbose = match Sys.getenv_opt "!{name:upp}_DEBUG" with
| None | Some "0" | Some "false" | Some "no" -> 0
| Some s ->
match s with
| "true" -> 1
| s -> match int_of_string_opt s with
| Some i -> i
| None -> 0
let dprintf ?(v=1) ?(force=false) fmt =
if force || verbose >= v then Format.ksprintf (fun s -> Format.eprintf "%s@." s) fmt
else Printf.ifprintf () fmt
let expand_ext ~loc ~path:_ expr = match expr.pexp_desc with
| Pexp_record (l, _) ->
let e = pexp_tuple ~loc (List.map snd l) in
dprintf "%s\nchanged in\n%s\n"
(Pprintast.string_of_expression expr)
(Pprintast.string_of_expression e);
e
| _ -> expr
let extension_ext =
Extension.declare "ext"
Extension.Context.expression
Ast_pattern.(single_expr_payload __)
expand_ext
let rule_ext = Context_free.Rule.extension extension_ext
let () =
Driver.register_transformation "ppx_ext" ~rules:[rule_ext]
| null | https://raw.githubusercontent.com/OCamlPro/drom/7bc86026e0e170f1a468b437d369aefc5dbea5d7/src/drom_lib/share/drom/skeletons/packages/ppx_rewriter/main.ml | ocaml | !{header-ml}
open Ppxlib
open Ast_builder.Default
let verbose = match Sys.getenv_opt "!{name:upp}_DEBUG" with
| None | Some "0" | Some "false" | Some "no" -> 0
| Some s ->
match s with
| "true" -> 1
| s -> match int_of_string_opt s with
| Some i -> i
| None -> 0
let dprintf ?(v=1) ?(force=false) fmt =
if force || verbose >= v then Format.ksprintf (fun s -> Format.eprintf "%s@." s) fmt
else Printf.ifprintf () fmt
let expand_ext ~loc ~path:_ expr = match expr.pexp_desc with
| Pexp_record (l, _) ->
let e = pexp_tuple ~loc (List.map snd l) in
dprintf "%s\nchanged in\n%s\n"
(Pprintast.string_of_expression expr)
(Pprintast.string_of_expression e);
e
| _ -> expr
let extension_ext =
Extension.declare "ext"
Extension.Context.expression
Ast_pattern.(single_expr_payload __)
expand_ext
let rule_ext = Context_free.Rule.extension extension_ext
let () =
Driver.register_transformation "ppx_ext" ~rules:[rule_ext]
| |
5e269d8e4d021f41b8ab120f3b8a0a924fcccbe1aadce00e88163051e82abcdc | alt-romes/slfl | MakeMeWorkState.hs | data State b a = State (!b -o (a * !b));
synth runState :: State b a -o (!b -o (a * !b));
synth bind :: State c a -o (a -o State c b) -o State c b;
#synth bind :: (State c a -o (a -o State c b) -o State c b) | using (runState);
synth return :: a -o State b a;
synth get :: State a a;
synth put :: !a -o (State a 1);
synth modify :: (!a -o !a) -o State a 1;
synth evalState :: State b a -o !b -o a;
main = runState (bind (return 2) (\x -> return x)) (!0);
| null | https://raw.githubusercontent.com/alt-romes/slfl/4956fcce8ff2ca7622799fe0715c118b568b74eb/STLLC/MakeMeWorkState.hs | haskell | data State b a = State (!b -o (a * !b));
synth runState :: State b a -o (!b -o (a * !b));
synth bind :: State c a -o (a -o State c b) -o State c b;
#synth bind :: (State c a -o (a -o State c b) -o State c b) | using (runState);
synth return :: a -o State b a;
synth get :: State a a;
synth put :: !a -o (State a 1);
synth modify :: (!a -o !a) -o State a 1;
synth evalState :: State b a -o !b -o a;
main = runState (bind (return 2) (\x -> return x)) (!0);
| |
081b3dc91ec571da01591a2ce99a73c00c7de9d652ed3f09053bc2247d2782a9 | crategus/cl-cffi-gtk | rtest-gobject-enumeration.lisp | (def-suite gobject-enumeration :in gobject-suite)
(in-suite gobject-enumeration)
;;; --- Types and Values -------------------------------------------------------
(test define-g-enum-macro
(is (equal '(PROGN
(DEFCENUM (GTK-WINDOW-TYPE :INT)
(:TOPLEVEL 0)
(:POPUP 1))
(GOBJECT::REGISTER-ENUM-TYPE "GtkWindowType" 'GTK-WINDOW-TYPE)
(EXPORT 'GTK-WINDOW-TYPE (FIND-PACKAGE "GTK"))
(GLIB-INIT::AT-INIT NIL
(IF (FOREIGN-SYMBOL-POINTER "gtk_window_type_get_type")
(FOREIGN-FUNCALL-POINTER
(FOREIGN-SYMBOL-POINTER "gtk_window_type_get_type")
NIL
G-SIZE)
(WARN "Type initializer '~A' is not available"
"gtk_window_type_get_type"))))
(macroexpand '(define-g-enum "GtkWindowType" gtk-window-type
(:export t
:type-initializer "gtk_window_type_get_type")
(:toplevel 0)
(:popup 1))))))
(test define-g-flags-macro
(is (equal '(PROGN
(DEFBITFIELD GDK-DRAG-ACTION
(:DEFAULT 1)
(:COPY 2)
(:MOVE 4)
(:LINK 8)
(:PRIVATE 16)
(:ASK 32))
(GOBJECT::REGISTER-FLAGS-TYPE "GdkDragAction" 'GDK-DRAG-ACTION)
(EXPORT 'GDK-DRAG-ACTION (FIND-PACKAGE "GDK"))
(GLIB-INIT::AT-INIT NIL
(IF (FOREIGN-SYMBOL-POINTER "gdk_drag_action_get_type")
(FOREIGN-FUNCALL-POINTER
(FOREIGN-SYMBOL-POINTER "gdk_drag_action_get_type") NIL
G-SIZE)
(WARN "Type initializer '~A' is not available"
"gdk_drag_action_get_type"))))
(macroexpand '(define-g-flags "GdkDragAction" gdk-drag-action
(:export t
:type-initializer "gdk_drag_action_get_type")
(:default 1)
(:copy 2)
(:move 4)
(:link 8)
(:private 16)
(:ask 32))))))
;;; g-enum-class
(test g-enum-class
(is (= 32 (foreign-type-size '(:struct g-enum-class))))
(is (equal '(:maximum :minimum :n-values :type-class :values)
(stable-sort (foreign-slot-names '(:struct g-enum-class))
#'string-lessp))))
;;; g-enum-value
(test g-enum-value
(is (= 24 (foreign-type-size '(:struct g-enum-value))))
(is (equal '(:name :nick :value)
(stable-sort (foreign-slot-names '(:struct g-enum-value))
#'string-lessp))))
;;; g-flags-class
(test g-flags-class
(is (= 24 (foreign-type-size '(:struct g-flags-class))))
(is (equal '(:mask :n-values :type-class :values)
(stable-sort (foreign-slot-names '(:struct g-flags-class))
#'string-lessp))))
;;; g-flags-value
(test g-flags-value
(is (= 24 (foreign-type-size '(:struct g-flags-value))))
(is (equal '(:name :nick :value)
(stable-sort (foreign-slot-names '(:struct g-flags-value))
#'string-lessp))))
;;; --- Functions --------------------------------------------------------------
;;; G_ENUM_CLASS_TYPE
;;; G_ENUM_CLASS_TYPE_NAME
;;; g-type-is-enum
(test g-type-is-enum
(is-false (g-type-is-enum "GtkDialogFlags"))
(is-true (g-type-is-enum "GtkWindowType"))
(is-false (g-type-is-enum "GdkWindow")))
;;; G_ENUM_CLASS
;;; G_IS_ENUM_CLASS
;;; G_TYPE_IS_FLAGS
(test g-type-is-enum
(is-true (g-type-is-flags "GtkDialogFlags"))
(is-false (g-type-is-flags "GtkWindowType"))
(is-false (g-type-is-flags "GdkWindow")))
;;; G_FLAGS_CLASS
;;; G_IS_FLAGS_CLASS
G_FLAGS_CLASS_TYPE
;;; g_enum_get_value
;;; g_enum_get_value_by_name
;;; g_enum_get_value_by_nick
;;; g_enum_to_string
;;; g_flags_get_first_value
;;; g_flags_get_value_by_name
g_flags_get_value_by_nick
;;; g_flags_to_string
g_enum_register_static
;;; g_flags_register_static
;;; g_enum_complete_type_info
;;; g_flags_complete_type_info
2021 - 4 - 7
| null | https://raw.githubusercontent.com/crategus/cl-cffi-gtk/27bdcefb703e7ae144f506929f1935468b6987ad/test/rtest-gobject-enumeration.lisp | lisp | --- Types and Values -------------------------------------------------------
g-enum-class
g-enum-value
g-flags-class
g-flags-value
--- Functions --------------------------------------------------------------
G_ENUM_CLASS_TYPE
G_ENUM_CLASS_TYPE_NAME
g-type-is-enum
G_ENUM_CLASS
G_IS_ENUM_CLASS
G_TYPE_IS_FLAGS
G_FLAGS_CLASS
G_IS_FLAGS_CLASS
g_enum_get_value
g_enum_get_value_by_name
g_enum_get_value_by_nick
g_enum_to_string
g_flags_get_first_value
g_flags_get_value_by_name
g_flags_to_string
g_flags_register_static
g_enum_complete_type_info
g_flags_complete_type_info | (def-suite gobject-enumeration :in gobject-suite)
(in-suite gobject-enumeration)
(test define-g-enum-macro
(is (equal '(PROGN
(DEFCENUM (GTK-WINDOW-TYPE :INT)
(:TOPLEVEL 0)
(:POPUP 1))
(GOBJECT::REGISTER-ENUM-TYPE "GtkWindowType" 'GTK-WINDOW-TYPE)
(EXPORT 'GTK-WINDOW-TYPE (FIND-PACKAGE "GTK"))
(GLIB-INIT::AT-INIT NIL
(IF (FOREIGN-SYMBOL-POINTER "gtk_window_type_get_type")
(FOREIGN-FUNCALL-POINTER
(FOREIGN-SYMBOL-POINTER "gtk_window_type_get_type")
NIL
G-SIZE)
(WARN "Type initializer '~A' is not available"
"gtk_window_type_get_type"))))
(macroexpand '(define-g-enum "GtkWindowType" gtk-window-type
(:export t
:type-initializer "gtk_window_type_get_type")
(:toplevel 0)
(:popup 1))))))
(test define-g-flags-macro
(is (equal '(PROGN
(DEFBITFIELD GDK-DRAG-ACTION
(:DEFAULT 1)
(:COPY 2)
(:MOVE 4)
(:LINK 8)
(:PRIVATE 16)
(:ASK 32))
(GOBJECT::REGISTER-FLAGS-TYPE "GdkDragAction" 'GDK-DRAG-ACTION)
(EXPORT 'GDK-DRAG-ACTION (FIND-PACKAGE "GDK"))
(GLIB-INIT::AT-INIT NIL
(IF (FOREIGN-SYMBOL-POINTER "gdk_drag_action_get_type")
(FOREIGN-FUNCALL-POINTER
(FOREIGN-SYMBOL-POINTER "gdk_drag_action_get_type") NIL
G-SIZE)
(WARN "Type initializer '~A' is not available"
"gdk_drag_action_get_type"))))
(macroexpand '(define-g-flags "GdkDragAction" gdk-drag-action
(:export t
:type-initializer "gdk_drag_action_get_type")
(:default 1)
(:copy 2)
(:move 4)
(:link 8)
(:private 16)
(:ask 32))))))
(test g-enum-class
(is (= 32 (foreign-type-size '(:struct g-enum-class))))
(is (equal '(:maximum :minimum :n-values :type-class :values)
(stable-sort (foreign-slot-names '(:struct g-enum-class))
#'string-lessp))))
(test g-enum-value
(is (= 24 (foreign-type-size '(:struct g-enum-value))))
(is (equal '(:name :nick :value)
(stable-sort (foreign-slot-names '(:struct g-enum-value))
#'string-lessp))))
(test g-flags-class
(is (= 24 (foreign-type-size '(:struct g-flags-class))))
(is (equal '(:mask :n-values :type-class :values)
(stable-sort (foreign-slot-names '(:struct g-flags-class))
#'string-lessp))))
(test g-flags-value
(is (= 24 (foreign-type-size '(:struct g-flags-value))))
(is (equal '(:name :nick :value)
(stable-sort (foreign-slot-names '(:struct g-flags-value))
#'string-lessp))))
(test g-type-is-enum
(is-false (g-type-is-enum "GtkDialogFlags"))
(is-true (g-type-is-enum "GtkWindowType"))
(is-false (g-type-is-enum "GdkWindow")))
(test g-type-is-enum
(is-true (g-type-is-flags "GtkDialogFlags"))
(is-false (g-type-is-flags "GtkWindowType"))
(is-false (g-type-is-flags "GdkWindow")))
G_FLAGS_CLASS_TYPE
g_flags_get_value_by_nick
g_enum_register_static
2021 - 4 - 7
|
e1d94022a4732495c76b26729c37c3871eb540eb81e0f46f6b76ba7d2c1551e6 | v-kolesnikov/sicp | 2_05_test.clj | (ns sicp.chapter02.2-05-test
(:require [clojure.test :refer :all]
[sicp.chapter02.2-05 :as sicp-2-05]
[sicp.test-helper :refer :all]))
(deftest car-test
(assert-equal 3 (sicp-2-05/car (sicp-2-05/cons 3 8)))
(assert-equal 5 (sicp-2-05/car (sicp-2-05/cons 5 0))))
(deftest cdr-test
(assert-equal 8 (sicp-2-05/cdr (sicp-2-05/cons 3 8)))
(assert-equal 0 (sicp-2-05/cdr (sicp-2-05/cons 5 0))))
| null | https://raw.githubusercontent.com/v-kolesnikov/sicp/4298de6083440a75898e97aad658025a8cecb631/test/sicp/chapter02/2_05_test.clj | clojure | (ns sicp.chapter02.2-05-test
(:require [clojure.test :refer :all]
[sicp.chapter02.2-05 :as sicp-2-05]
[sicp.test-helper :refer :all]))
(deftest car-test
(assert-equal 3 (sicp-2-05/car (sicp-2-05/cons 3 8)))
(assert-equal 5 (sicp-2-05/car (sicp-2-05/cons 5 0))))
(deftest cdr-test
(assert-equal 8 (sicp-2-05/cdr (sicp-2-05/cons 3 8)))
(assert-equal 0 (sicp-2-05/cdr (sicp-2-05/cons 5 0))))
| |
49fbf28ed9e060f41d57d3ee9b1a1d1ebc067bd857dd39bca4b04d149fd71fb8 | BillHallahan/G2 | PolyRef.hs | # LANGUAGE DeriveFunctor #
# LANGUAGE DeriveFoldable #
{-# LANGUAGE DeriveTraversable#-}
# LANGUAGE FlexibleContexts #
# LANGUAGE TupleSections #
module G2.Liquid.Inference.PolyRef ( PolyBound (.. )
, RefNamePolyBound
, ExprPolyBound
, extractExprPolyBoundWithRoot
, extractExprPolyBound
, extractTypePolyBound
, extractTypeAppAndFuncPolyBound
, headValue
, removeHead
, extractValues
, uniqueIds
, mapPB
, filterPB
, allPB
, zipPB
, zipWithPB
, zipWithMaybePB
, zip3PB) where
import G2.Language
import qualified Data.HashMap.Lazy as HM
import Data.List
import Data.Maybe
type RefNamePolyBound = PolyBound String
type ExprPolyBound = PolyBound [Expr]
type TypePolyBound = PolyBound Type
-- | The subexpressions of an expression corresponding to the polymorphic
-- arguments. If a polymorphic argument is instantiated with a polymorphic
-- type, these are nested recursively.
data PolyBound v = PolyBound v [PolyBound v] deriving (Eq, Read, Show, Functor, Foldable, Traversable)
-------------------------------
-- ExprPolyBound
-------------------------------
extractExprPolyBoundWithRoot :: Expr -> ExprPolyBound
extractExprPolyBoundWithRoot e = PolyBound [e] $ extractExprPolyBound e
extractExprPolyBound :: Expr -> [ExprPolyBound]
extractExprPolyBound e
| Data dc:_ <- unApp e =
let
bound = leadingTyForAllBindings dc
m = extractExprPolyBound' e
bound_es = map (\i -> HM.lookupDefault [] i m) bound
in
map (\es -> PolyBound es (mergeExprPolyBound . transpose $ map extractExprPolyBound es)) bound_es
| otherwise = []
mergeExprPolyBound :: [[ExprPolyBound]] -> [ExprPolyBound]
mergeExprPolyBound = mapMaybe (\pb -> case pb of
(p:pbb) -> Just $ foldr mergeExprPolyBound' p pbb
[] -> Nothing)
mergeExprPolyBound' :: ExprPolyBound -> ExprPolyBound -> ExprPolyBound
mergeExprPolyBound' (PolyBound es1 pb1) (PolyBound es2 pb2) =
PolyBound (es1 ++ es2) (map (uncurry mergeExprPolyBound') $ zip pb1 pb2)
extractExprPolyBound' :: Expr -> HM.HashMap Id [Expr]
extractExprPolyBound' e
| Data dc:es <- unApp e =
let
es' = filter (not . isType) es
argtys = argumentTypes . PresType . inTyForAlls $ typeOf dc
argtys_es = zip argtys es'
(direct, indirect) = partition fstIsTyVar argtys_es
direct' = mapMaybe fstMapTyVar direct
indirect' = map (uncurry substTypes) indirect
direct_hm = foldr (HM.unionWith (++)) HM.empty
$ map (\(i, e_) -> uncurry HM.singleton (i, e_:[])) direct'
in
foldr (HM.unionWith (++)) direct_hm $ map (extractExprPolyBound' . adjustIndirectTypes) indirect'
| otherwise = HM.empty
where
isType (Type _) = True
isType _ = False
fstIsTyVar (TyVar _, _) = True
fstIsTyVar _ = False
fstMapTyVar (TyVar i, x) = Just (i, x)
fstMapTyVar _ = Nothing
substTypes :: Type -> Expr -> Expr
substTypes t e
| _:ts <- unTyApp t
, e':es <- unApp e =
mkApp $ e':substTypes' ts es
substTypes _ e = e
substTypes' :: [Type] -> [Expr] -> [Expr]
substTypes' (t:ts) (Type _:es) = Type t:substTypes' ts es
substTypes' _ es = es
adjustIndirectTypes :: Expr -> Expr
adjustIndirectTypes e
| Data dc:es <- unApp e =
let
tyses = filter (isType) es
tyses' = map (\(Type t) -> t) tyses
bound = leadingTyForAllBindings dc
bound_tyses = zip bound tyses'
in
mkApp $ Data (foldr (uncurry retype) dc $ bound_tyses):es
| otherwise = e
where
isType (Type _) = True
isType _ = False
-------------------------------
-- TypePolyBound
-------------------------------
-- | Unrolls TyApp'ed args, while also keeping them in the base type
extractTypePolyBound :: Type -> TypePolyBound
extractTypePolyBound t =
let
(_:ts) = unTyApp t
in
PolyBound t $ map extractTypePolyBound ts
| Unrolls TyApp'ed and TyFunc'ed args , while also keeping them in the base type
extractTypeAppAndFuncPolyBound :: Type -> TypePolyBound
extractTypeAppAndFuncPolyBound t@(TyApp _ _) =
let
(_:ts) = unTyApp t
in
PolyBound t $ map extractTypePolyBound ts
extractTypeAppAndFuncPolyBound t@(TyFun _ _) =
let
ts = splitTyFuns t
in
PolyBound t $ map extractTypePolyBound ts
extractTypeAppAndFuncPolyBound t = PolyBound t []
-------------------------------
Generic PolyBound functions
-------------------------------
headValue :: PolyBound v -> v
headValue (PolyBound v _) = v
removeHead :: PolyBound v -> [PolyBound v]
removeHead (PolyBound _ vs) = vs
extractValues :: PolyBound v -> [v]
extractValues (PolyBound v ps) = v:concatMap extractValues ps
uniqueIds :: PolyBound v -> PolyBound Int
uniqueIds = snd . uniqueIds' 0
uniqueIds' :: Int -> PolyBound v -> (Int, PolyBound Int)
uniqueIds' n (PolyBound _ ps) =
let
(n', ps') = mapAccumR (uniqueIds') (n + 1) ps
in
(n', PolyBound n ps')
mapPB :: (a -> b) -> PolyBound a -> PolyBound b
mapPB f (PolyBound v ps) = PolyBound (f v) (map (mapPB f) ps)
filterPB :: (PolyBound a -> Bool) -> PolyBound a -> Maybe (PolyBound a)
filterPB p pb@(PolyBound v xs) =
case p pb of
True -> Just $ PolyBound v (mapMaybe (filterPB p) xs)
False -> Nothing
allPB :: (a -> Bool) -> PolyBound a -> Bool
allPB p = all p . extractValues
zipPB :: PolyBound a -> PolyBound b -> PolyBound (a, b)
zipPB (PolyBound a pba) (PolyBound b pbb) = PolyBound (a, b) (zipWith zipPB pba pbb)
zipWithPB :: (a -> b -> c) -> PolyBound a -> PolyBound b -> PolyBound c
zipWithPB f (PolyBound a pba) (PolyBound b pbb) = PolyBound (f a b) (zipWith (zipWithPB f) pba pbb)
zipWithMaybePB :: (Maybe a -> Maybe b -> c) -> PolyBound a -> PolyBound b -> PolyBound c
zipWithMaybePB f pba pbb = zipWithMaybePB' f (mapPB Just pba) (mapPB Just pbb)
zipWithMaybePB' :: (Maybe a -> Maybe b -> c) -> PolyBound (Maybe a) -> PolyBound (Maybe b) -> PolyBound c
zipWithMaybePB' f (PolyBound a pba) (PolyBound b pbb) =
let
c = f a b
rep_nt = repeat (PolyBound Nothing [])
pbc = takeWhile (\(x, y) -> isJust (headValue x) || isJust (headValue y))
$ zip (pba ++ rep_nt) (pbb ++ rep_nt)
in
PolyBound c $ map (uncurry (zipWithMaybePB' f)) pbc
zip3PB :: PolyBound a -> PolyBound b -> PolyBound c -> PolyBound (a, b, c)
zip3PB (PolyBound a pba) (PolyBound b pbb) (PolyBound c pbc) =
PolyBound (a, b, c) (zipWith3 zip3PB pba pbb pbc)
| null | https://raw.githubusercontent.com/BillHallahan/G2/0683a633f9dad2cd5066d2515645fa79bb105401/src/G2/Liquid/Inference/PolyRef.hs | haskell | # LANGUAGE DeriveTraversable#
| The subexpressions of an expression corresponding to the polymorphic
arguments. If a polymorphic argument is instantiated with a polymorphic
type, these are nested recursively.
-----------------------------
ExprPolyBound
-----------------------------
-----------------------------
TypePolyBound
-----------------------------
| Unrolls TyApp'ed args, while also keeping them in the base type
-----------------------------
----------------------------- | # LANGUAGE DeriveFunctor #
# LANGUAGE DeriveFoldable #
# LANGUAGE FlexibleContexts #
# LANGUAGE TupleSections #
module G2.Liquid.Inference.PolyRef ( PolyBound (.. )
, RefNamePolyBound
, ExprPolyBound
, extractExprPolyBoundWithRoot
, extractExprPolyBound
, extractTypePolyBound
, extractTypeAppAndFuncPolyBound
, headValue
, removeHead
, extractValues
, uniqueIds
, mapPB
, filterPB
, allPB
, zipPB
, zipWithPB
, zipWithMaybePB
, zip3PB) where
import G2.Language
import qualified Data.HashMap.Lazy as HM
import Data.List
import Data.Maybe
type RefNamePolyBound = PolyBound String
type ExprPolyBound = PolyBound [Expr]
type TypePolyBound = PolyBound Type
data PolyBound v = PolyBound v [PolyBound v] deriving (Eq, Read, Show, Functor, Foldable, Traversable)
extractExprPolyBoundWithRoot :: Expr -> ExprPolyBound
extractExprPolyBoundWithRoot e = PolyBound [e] $ extractExprPolyBound e
extractExprPolyBound :: Expr -> [ExprPolyBound]
extractExprPolyBound e
| Data dc:_ <- unApp e =
let
bound = leadingTyForAllBindings dc
m = extractExprPolyBound' e
bound_es = map (\i -> HM.lookupDefault [] i m) bound
in
map (\es -> PolyBound es (mergeExprPolyBound . transpose $ map extractExprPolyBound es)) bound_es
| otherwise = []
mergeExprPolyBound :: [[ExprPolyBound]] -> [ExprPolyBound]
mergeExprPolyBound = mapMaybe (\pb -> case pb of
(p:pbb) -> Just $ foldr mergeExprPolyBound' p pbb
[] -> Nothing)
mergeExprPolyBound' :: ExprPolyBound -> ExprPolyBound -> ExprPolyBound
mergeExprPolyBound' (PolyBound es1 pb1) (PolyBound es2 pb2) =
PolyBound (es1 ++ es2) (map (uncurry mergeExprPolyBound') $ zip pb1 pb2)
extractExprPolyBound' :: Expr -> HM.HashMap Id [Expr]
extractExprPolyBound' e
| Data dc:es <- unApp e =
let
es' = filter (not . isType) es
argtys = argumentTypes . PresType . inTyForAlls $ typeOf dc
argtys_es = zip argtys es'
(direct, indirect) = partition fstIsTyVar argtys_es
direct' = mapMaybe fstMapTyVar direct
indirect' = map (uncurry substTypes) indirect
direct_hm = foldr (HM.unionWith (++)) HM.empty
$ map (\(i, e_) -> uncurry HM.singleton (i, e_:[])) direct'
in
foldr (HM.unionWith (++)) direct_hm $ map (extractExprPolyBound' . adjustIndirectTypes) indirect'
| otherwise = HM.empty
where
isType (Type _) = True
isType _ = False
fstIsTyVar (TyVar _, _) = True
fstIsTyVar _ = False
fstMapTyVar (TyVar i, x) = Just (i, x)
fstMapTyVar _ = Nothing
substTypes :: Type -> Expr -> Expr
substTypes t e
| _:ts <- unTyApp t
, e':es <- unApp e =
mkApp $ e':substTypes' ts es
substTypes _ e = e
substTypes' :: [Type] -> [Expr] -> [Expr]
substTypes' (t:ts) (Type _:es) = Type t:substTypes' ts es
substTypes' _ es = es
adjustIndirectTypes :: Expr -> Expr
adjustIndirectTypes e
| Data dc:es <- unApp e =
let
tyses = filter (isType) es
tyses' = map (\(Type t) -> t) tyses
bound = leadingTyForAllBindings dc
bound_tyses = zip bound tyses'
in
mkApp $ Data (foldr (uncurry retype) dc $ bound_tyses):es
| otherwise = e
where
isType (Type _) = True
isType _ = False
extractTypePolyBound :: Type -> TypePolyBound
extractTypePolyBound t =
let
(_:ts) = unTyApp t
in
PolyBound t $ map extractTypePolyBound ts
| Unrolls TyApp'ed and TyFunc'ed args , while also keeping them in the base type
extractTypeAppAndFuncPolyBound :: Type -> TypePolyBound
extractTypeAppAndFuncPolyBound t@(TyApp _ _) =
let
(_:ts) = unTyApp t
in
PolyBound t $ map extractTypePolyBound ts
extractTypeAppAndFuncPolyBound t@(TyFun _ _) =
let
ts = splitTyFuns t
in
PolyBound t $ map extractTypePolyBound ts
extractTypeAppAndFuncPolyBound t = PolyBound t []
Generic PolyBound functions
headValue :: PolyBound v -> v
headValue (PolyBound v _) = v
removeHead :: PolyBound v -> [PolyBound v]
removeHead (PolyBound _ vs) = vs
extractValues :: PolyBound v -> [v]
extractValues (PolyBound v ps) = v:concatMap extractValues ps
uniqueIds :: PolyBound v -> PolyBound Int
uniqueIds = snd . uniqueIds' 0
uniqueIds' :: Int -> PolyBound v -> (Int, PolyBound Int)
uniqueIds' n (PolyBound _ ps) =
let
(n', ps') = mapAccumR (uniqueIds') (n + 1) ps
in
(n', PolyBound n ps')
mapPB :: (a -> b) -> PolyBound a -> PolyBound b
mapPB f (PolyBound v ps) = PolyBound (f v) (map (mapPB f) ps)
filterPB :: (PolyBound a -> Bool) -> PolyBound a -> Maybe (PolyBound a)
filterPB p pb@(PolyBound v xs) =
case p pb of
True -> Just $ PolyBound v (mapMaybe (filterPB p) xs)
False -> Nothing
allPB :: (a -> Bool) -> PolyBound a -> Bool
allPB p = all p . extractValues
zipPB :: PolyBound a -> PolyBound b -> PolyBound (a, b)
zipPB (PolyBound a pba) (PolyBound b pbb) = PolyBound (a, b) (zipWith zipPB pba pbb)
zipWithPB :: (a -> b -> c) -> PolyBound a -> PolyBound b -> PolyBound c
zipWithPB f (PolyBound a pba) (PolyBound b pbb) = PolyBound (f a b) (zipWith (zipWithPB f) pba pbb)
zipWithMaybePB :: (Maybe a -> Maybe b -> c) -> PolyBound a -> PolyBound b -> PolyBound c
zipWithMaybePB f pba pbb = zipWithMaybePB' f (mapPB Just pba) (mapPB Just pbb)
zipWithMaybePB' :: (Maybe a -> Maybe b -> c) -> PolyBound (Maybe a) -> PolyBound (Maybe b) -> PolyBound c
zipWithMaybePB' f (PolyBound a pba) (PolyBound b pbb) =
let
c = f a b
rep_nt = repeat (PolyBound Nothing [])
pbc = takeWhile (\(x, y) -> isJust (headValue x) || isJust (headValue y))
$ zip (pba ++ rep_nt) (pbb ++ rep_nt)
in
PolyBound c $ map (uncurry (zipWithMaybePB' f)) pbc
zip3PB :: PolyBound a -> PolyBound b -> PolyBound c -> PolyBound (a, b, c)
zip3PB (PolyBound a pba) (PolyBound b pbb) (PolyBound c pbc) =
PolyBound (a, b, c) (zipWith3 zip3PB pba pbb pbc)
|
dc8d7bf69db72d4274531c700c65bca2a3abc23ad3b865c6ee9119b60b23ca02 | icicle-lang/icicle-ambiata | Data.hs | # LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TupleSections #
module Icicle.Test.Gen.Runtime.Data where
import Data.ByteString (ByteString)
import qualified Data.ByteString as ByteString
import qualified Data.List as List
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import qualified Data.Vector as Boxed
import qualified Data.Vector.Storable as Storable
import Disorder.Corpus
import Hedgehog
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Icicle.Data.Name
import Icicle.Runtime.Data
import qualified Icicle.Runtime.Data.Logical as Logical
import qualified Icicle.Runtime.Data.Schema as Schema
import qualified Icicle.Runtime.Data.Striped as Striped
import Icicle.Test.Gen.Data.Name
import P
import qualified Prelude as Savage
import qualified X.Data.Vector.Cons as Cons
genBool64 :: Gen Bool64
genBool64 =
Gen.element [False64, True64]
genTime64 :: Gen Time64
genTime64 =
fmap packTime $
UnpackedTime64
<$> Gen.integral (Range.linearFrom 2000 1601 2999)
<*> Gen.integral (Range.constant 1 12)
<*> Gen.integral (Range.constant 1 28)
<*> Gen.integral (Range.constant 0 86399)
genQueryTime :: Gen QueryTime
genQueryTime =
QueryTime <$> genTime64
genInputTime :: Gen InputTime
genInputTime =
InputTime <$> genTime64
genSnapshotTime :: Gen SnapshotTime
genSnapshotTime =
SnapshotTime <$> genQueryTime
genError64 :: Gen Error64
genError64 =
Gen.enumBounded
genResultError :: Gen Error64
genResultError = do
Generate any tag , but if it 's NotAnError just return Tombstone .
e <- genError64
case e of
NotAnError64
-> return Tombstone64
_ -> return e
genTombstoneOrSuccess :: Gen Error64
genTombstoneOrSuccess =
Gen.element [NotAnError64, Tombstone64]
genField :: Gen a -> Gen (Field a)
genField g =
Field <$> Gen.element boats <*> g
genSchema :: Gen Schema
genSchema =
Gen.recursive Gen.choice [
pure Schema.Unit
, pure Schema.Bool
, pure Schema.Int
, pure Schema.Double
, pure Schema.Time
, pure Schema.String
] [
Schema.Sum <$> genSchema <*> genSchema
, Schema.Option <$> genSchema
, Schema.Result <$> genSchema
, Schema.Pair <$> genSchema <*> genSchema
, Schema.Struct . Cons.unsafeFromList <$> Gen.list (Range.linear 1 5) (genField genSchema)
, Schema.Array <$> genSchema
, Schema.Map <$> genSchema <*> genSchema
]
genChar :: Gen Char
genChar =
Gen.filter (/= '\0') Gen.unicode
genString :: Gen ByteString
genString =
Gen.choice [
Gen.element viruses
, Gen.utf8 (Range.linear 0 20) genChar
]
genValue :: Schema -> Gen Value
genValue = \case
Schema.Unit ->
pure Logical.Unit
Schema.Bool ->
Logical.Bool <$> genBool64
Schema.Int ->
Logical.Int <$> Gen.int64 Range.linearBounded
Schema.Double ->
Logical.Double <$> Gen.double (Range.linearFracFrom 0 (-1e308) (1e308))
Schema.Time ->
Logical.Time <$> genTime64
Schema.String ->
Logical.String <$> genString
Schema.Sum x y ->
Gen.choice [
Logical.Left <$> genValue x
, Logical.Right <$> genValue y
]
Schema.Option x ->
Gen.choice [
pure Logical.None
, Logical.Some <$> genValue x
]
Schema.Result x ->
Gen.choice [
Logical.Error <$> genResultError
, Logical.Success <$> genValue x
]
Schema.Pair x y ->
Logical.Pair <$> genValue x <*> genValue y
Schema.Struct fields ->
Logical.Struct <$> traverse (genValue . fieldData) fields
Schema.Array x ->
Logical.Array . Boxed.fromList <$> Gen.list (Range.linear 0 10) (genValue x)
Schema.Map k v ->
Logical.Map <$> Gen.map (Range.linear 0 10) ((,) <$> genValue k <*> genValue v)
genColumn :: Schema -> Gen Column
genColumn schema =
Gen.just $
rightToMaybe . Striped.fromLogical schema . Boxed.fromList <$> Gen.list (Range.linear 0 10) (genValue schema)
genColumnN :: Int -> Schema -> Gen Column
genColumnN n schema =
Gen.just $
rightToMaybe . Striped.fromLogical schema . Boxed.fromList <$> Gen.list (Range.singleton n) (genValue schema)
genSingleton :: Schema -> Gen (Value, Column)
genSingleton schema =
Gen.just $ do
x <- genValue schema
pure . fmap (x,) . rightToMaybe . Striped.fromLogical schema $ Boxed.singleton x
genEntityHash :: Gen EntityHash
genEntityHash =
EntityHash
<$> Gen.word32 (Range.constant 0 5)
genEntityId :: Gen EntityId
genEntityId =
EntityId
<$> Gen.element simpsons
genEntityKey :: Gen EntityKey
genEntityKey = do
eid <- genEntityId
pure $
EntityKey (EntityHash . fromIntegral . ByteString.length $ unEntityId eid) eid
genEntityInputColumn :: Schema -> Gen InputColumn
genEntityInputColumn schema = do
column <- genColumn schema
let
n =
Striped.length column
times <- Storable.fromList . List.sort <$> Gen.list (Range.singleton n) genInputTime
tombstones <- Storable.fromList <$> Gen.list (Range.singleton n) genTombstoneOrSuccess
pure $ InputColumn (Storable.singleton $ fromIntegral n) times tombstones column
genInputColumn :: Int -> Schema -> Gen InputColumn
genInputColumn n_entities schema = do
columns <- Gen.list (Range.singleton n_entities) (genEntityInputColumn schema)
either (\x -> Savage.error $ "genInputColumn: " <> show x) pure . concatInputColumn $
Cons.unsafeFromList columns
genInputColumns :: Int -> Gen (Map InputId InputColumn)
genInputColumns n_entities =
Gen.map (Range.linear 1 5) $
(,) <$> genInputId <*> (genInputColumn n_entities =<< genSchema)
genInputN :: Int -> Gen Input
genInputN n_entities = do
entities <- Boxed.fromList <$> Gen.list (Range.singleton n_entities) genEntityKey
Input entities
<$> genInputColumns (Boxed.length entities)
genInput :: Gen Input
genInput = do
genInputN =<< Gen.int (Range.linear 1 5)
genInputSchemas :: Gen (Map InputId Schema)
genInputSchemas =
Map.fromList
<$> Gen.list (Range.linear 1 5) ((,) <$> genInputId <*> genSchema)
genOutputColumn :: Int -> Gen Column
genOutputColumn n_entities =
Gen.just $ do
schema <- genSchema
columns <- Gen.list (Range.singleton n_entities) (genColumn schema)
pure . rightToMaybe . Striped.unsafeConcat $ Cons.unsafeFromList columns
genOutputColumns :: Int -> Gen (Map OutputId Column)
genOutputColumns n_entities =
Map.fromList
<$> Gen.list (Range.linear 1 5) ((,) <$> genOutputId <*> genOutputColumn n_entities)
genOutputN :: Int -> Gen a -> Gen (Output a)
genOutputN n genKey = do
keys <- Boxed.fromList <$> Gen.list (Range.singleton n) genKey
Output keys
<$> genOutputColumns (Boxed.length keys)
genOutput :: Gen a -> Gen (Output a)
genOutput genKey = do
n <- Gen.int (Range.linear 1 5)
genOutputN n genKey
genSnapshotKey :: Gen SnapshotKey
genSnapshotKey =
SnapshotKey <$> genEntityKey
genChordKey :: Gen ChordKey
genChordKey =
ChordKey
<$> genEntityKey
<*> Gen.element weather
| null | https://raw.githubusercontent.com/icicle-lang/icicle-ambiata/9b9cc45a75f66603007e4db7e5f3ba908cae2df2/icicle-compiler/test/Icicle/Test/Gen/Runtime/Data.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE LambdaCase #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TupleSections #
module Icicle.Test.Gen.Runtime.Data where
import Data.ByteString (ByteString)
import qualified Data.ByteString as ByteString
import qualified Data.List as List
import Data.Map (Map)
import qualified Data.Map.Strict as Map
import qualified Data.Vector as Boxed
import qualified Data.Vector.Storable as Storable
import Disorder.Corpus
import Hedgehog
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import Icicle.Data.Name
import Icicle.Runtime.Data
import qualified Icicle.Runtime.Data.Logical as Logical
import qualified Icicle.Runtime.Data.Schema as Schema
import qualified Icicle.Runtime.Data.Striped as Striped
import Icicle.Test.Gen.Data.Name
import P
import qualified Prelude as Savage
import qualified X.Data.Vector.Cons as Cons
genBool64 :: Gen Bool64
genBool64 =
Gen.element [False64, True64]
genTime64 :: Gen Time64
genTime64 =
fmap packTime $
UnpackedTime64
<$> Gen.integral (Range.linearFrom 2000 1601 2999)
<*> Gen.integral (Range.constant 1 12)
<*> Gen.integral (Range.constant 1 28)
<*> Gen.integral (Range.constant 0 86399)
genQueryTime :: Gen QueryTime
genQueryTime =
QueryTime <$> genTime64
genInputTime :: Gen InputTime
genInputTime =
InputTime <$> genTime64
genSnapshotTime :: Gen SnapshotTime
genSnapshotTime =
SnapshotTime <$> genQueryTime
genError64 :: Gen Error64
genError64 =
Gen.enumBounded
genResultError :: Gen Error64
genResultError = do
Generate any tag , but if it 's NotAnError just return Tombstone .
e <- genError64
case e of
NotAnError64
-> return Tombstone64
_ -> return e
genTombstoneOrSuccess :: Gen Error64
genTombstoneOrSuccess =
Gen.element [NotAnError64, Tombstone64]
genField :: Gen a -> Gen (Field a)
genField g =
Field <$> Gen.element boats <*> g
genSchema :: Gen Schema
genSchema =
Gen.recursive Gen.choice [
pure Schema.Unit
, pure Schema.Bool
, pure Schema.Int
, pure Schema.Double
, pure Schema.Time
, pure Schema.String
] [
Schema.Sum <$> genSchema <*> genSchema
, Schema.Option <$> genSchema
, Schema.Result <$> genSchema
, Schema.Pair <$> genSchema <*> genSchema
, Schema.Struct . Cons.unsafeFromList <$> Gen.list (Range.linear 1 5) (genField genSchema)
, Schema.Array <$> genSchema
, Schema.Map <$> genSchema <*> genSchema
]
genChar :: Gen Char
genChar =
Gen.filter (/= '\0') Gen.unicode
genString :: Gen ByteString
genString =
Gen.choice [
Gen.element viruses
, Gen.utf8 (Range.linear 0 20) genChar
]
genValue :: Schema -> Gen Value
genValue = \case
Schema.Unit ->
pure Logical.Unit
Schema.Bool ->
Logical.Bool <$> genBool64
Schema.Int ->
Logical.Int <$> Gen.int64 Range.linearBounded
Schema.Double ->
Logical.Double <$> Gen.double (Range.linearFracFrom 0 (-1e308) (1e308))
Schema.Time ->
Logical.Time <$> genTime64
Schema.String ->
Logical.String <$> genString
Schema.Sum x y ->
Gen.choice [
Logical.Left <$> genValue x
, Logical.Right <$> genValue y
]
Schema.Option x ->
Gen.choice [
pure Logical.None
, Logical.Some <$> genValue x
]
Schema.Result x ->
Gen.choice [
Logical.Error <$> genResultError
, Logical.Success <$> genValue x
]
Schema.Pair x y ->
Logical.Pair <$> genValue x <*> genValue y
Schema.Struct fields ->
Logical.Struct <$> traverse (genValue . fieldData) fields
Schema.Array x ->
Logical.Array . Boxed.fromList <$> Gen.list (Range.linear 0 10) (genValue x)
Schema.Map k v ->
Logical.Map <$> Gen.map (Range.linear 0 10) ((,) <$> genValue k <*> genValue v)
genColumn :: Schema -> Gen Column
genColumn schema =
Gen.just $
rightToMaybe . Striped.fromLogical schema . Boxed.fromList <$> Gen.list (Range.linear 0 10) (genValue schema)
genColumnN :: Int -> Schema -> Gen Column
genColumnN n schema =
Gen.just $
rightToMaybe . Striped.fromLogical schema . Boxed.fromList <$> Gen.list (Range.singleton n) (genValue schema)
genSingleton :: Schema -> Gen (Value, Column)
genSingleton schema =
Gen.just $ do
x <- genValue schema
pure . fmap (x,) . rightToMaybe . Striped.fromLogical schema $ Boxed.singleton x
genEntityHash :: Gen EntityHash
genEntityHash =
EntityHash
<$> Gen.word32 (Range.constant 0 5)
genEntityId :: Gen EntityId
genEntityId =
EntityId
<$> Gen.element simpsons
genEntityKey :: Gen EntityKey
genEntityKey = do
eid <- genEntityId
pure $
EntityKey (EntityHash . fromIntegral . ByteString.length $ unEntityId eid) eid
genEntityInputColumn :: Schema -> Gen InputColumn
genEntityInputColumn schema = do
column <- genColumn schema
let
n =
Striped.length column
times <- Storable.fromList . List.sort <$> Gen.list (Range.singleton n) genInputTime
tombstones <- Storable.fromList <$> Gen.list (Range.singleton n) genTombstoneOrSuccess
pure $ InputColumn (Storable.singleton $ fromIntegral n) times tombstones column
genInputColumn :: Int -> Schema -> Gen InputColumn
genInputColumn n_entities schema = do
columns <- Gen.list (Range.singleton n_entities) (genEntityInputColumn schema)
either (\x -> Savage.error $ "genInputColumn: " <> show x) pure . concatInputColumn $
Cons.unsafeFromList columns
genInputColumns :: Int -> Gen (Map InputId InputColumn)
genInputColumns n_entities =
Gen.map (Range.linear 1 5) $
(,) <$> genInputId <*> (genInputColumn n_entities =<< genSchema)
genInputN :: Int -> Gen Input
genInputN n_entities = do
entities <- Boxed.fromList <$> Gen.list (Range.singleton n_entities) genEntityKey
Input entities
<$> genInputColumns (Boxed.length entities)
genInput :: Gen Input
genInput = do
genInputN =<< Gen.int (Range.linear 1 5)
genInputSchemas :: Gen (Map InputId Schema)
genInputSchemas =
Map.fromList
<$> Gen.list (Range.linear 1 5) ((,) <$> genInputId <*> genSchema)
genOutputColumn :: Int -> Gen Column
genOutputColumn n_entities =
Gen.just $ do
schema <- genSchema
columns <- Gen.list (Range.singleton n_entities) (genColumn schema)
pure . rightToMaybe . Striped.unsafeConcat $ Cons.unsafeFromList columns
genOutputColumns :: Int -> Gen (Map OutputId Column)
genOutputColumns n_entities =
Map.fromList
<$> Gen.list (Range.linear 1 5) ((,) <$> genOutputId <*> genOutputColumn n_entities)
genOutputN :: Int -> Gen a -> Gen (Output a)
genOutputN n genKey = do
keys <- Boxed.fromList <$> Gen.list (Range.singleton n) genKey
Output keys
<$> genOutputColumns (Boxed.length keys)
genOutput :: Gen a -> Gen (Output a)
genOutput genKey = do
n <- Gen.int (Range.linear 1 5)
genOutputN n genKey
genSnapshotKey :: Gen SnapshotKey
genSnapshotKey =
SnapshotKey <$> genEntityKey
genChordKey :: Gen ChordKey
genChordKey =
ChordKey
<$> genEntityKey
<*> Gen.element weather
|
902b5a674901abf18485a1f76f8f637c0173be267c2e6c9cc35695869049068a | teknql/wing | resource_test.clj | (ns wing.core.resource-test
(:require [wing.core.resource :as sut :refer [with-resource Resource]]
[clojure.test :as t :refer [deftest testing is]]))
(deftest with-resource-test
(testing "calls release in reverse order"
(let [release-calls (atom 0)]
(with-resource [foo (reify Resource
(release [x]
(is (= 1 @release-calls))
(swap! release-calls inc)))
bar (reify Resource
(release [x]
(is (= 0 @release-calls))
(swap! release-calls inc)))])
(is (= 2 @release-calls))))
(testing "returns the result of the expression"
(let [result
(with-resource [foo (reify Resource
(release [x] nil))]
5)]
(is (= 5 result))))
(testing "calls release even if there is an error, and propagates it"
(let [release-called (atom false)]
(is (thrown?
Exception
(with-resource [foo (reify Resource
(release [x] (reset! release-called true)))]
(throw (Exception. "Boom!")))))
(is @release-called))))
| null | https://raw.githubusercontent.com/teknql/wing/de8148bf48210eac0c0e3f8e31346b5b2ead39c2/test/wing/core/resource_test.clj | clojure | (ns wing.core.resource-test
(:require [wing.core.resource :as sut :refer [with-resource Resource]]
[clojure.test :as t :refer [deftest testing is]]))
(deftest with-resource-test
(testing "calls release in reverse order"
(let [release-calls (atom 0)]
(with-resource [foo (reify Resource
(release [x]
(is (= 1 @release-calls))
(swap! release-calls inc)))
bar (reify Resource
(release [x]
(is (= 0 @release-calls))
(swap! release-calls inc)))])
(is (= 2 @release-calls))))
(testing "returns the result of the expression"
(let [result
(with-resource [foo (reify Resource
(release [x] nil))]
5)]
(is (= 5 result))))
(testing "calls release even if there is an error, and propagates it"
(let [release-called (atom false)]
(is (thrown?
Exception
(with-resource [foo (reify Resource
(release [x] (reset! release-called true)))]
(throw (Exception. "Boom!")))))
(is @release-called))))
| |
018c9c7d27844bf295492733c98de764635856b27c1a22a60474396e03833c29 | liqd/thentos | Types.hs | {- Safe -}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveFunctor #-}
# LANGUAGE DeriveGeneric #
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE TemplateHaskell #-}
module Thentos.Action.Types where
import Control.Exception (SomeException)
import Control.Monad.Reader (ReaderT(ReaderT))
import Control.Monad.State (StateT)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Either (EitherT(EitherT))
import Data.Configifier ((>>.))
import Database.PostgreSQL.Simple (Connection)
import Data.Pool (Pool)
import LIO.Core (LIO)
import LIO.TCB (ioTCB)
import Thentos.Types
import Thentos.Config
import Thentos.Prelude
import Thentos.CookieSession.CSRF
data ActionEnv =
ActionEnv
{ _aStConfig :: ThentosConfig
, _aStDb :: Pool Connection
}
deriving (Generic)
makeLenses ''ActionEnv
class GetThentosDb a where
getThentosDb :: Getter a (Pool Connection)
instance GetThentosDb ActionEnv where
getThentosDb = aStDb
class GetThentosConfig a where
getThentosConfig :: Getter a ThentosConfig
instance GetThentosConfig ActionEnv where
getThentosConfig = aStConfig
type MonadThentosConfig v m = (MonadReader v m, GetThentosConfig v)
instance GetCsrfSecret ActionEnv where
csrfSecret = pre $ aStConfig . to (>>. (Proxy :: Proxy '["csrf_secret"])) . _Just . csrfSecret . _Just
-- | The 'Action' monad transformer stack. It contains:
--
- ' LIO ' as a base monad .
-- - A state of polymorphic type (for use e.g. by the frontend handlers to store cookies etc.)
- The option of throwing @ThentosError e@. ( Not ' ActionError e ' , which contains
-- authorization errors that must not be catchable from inside an 'Action'.)
- An ' ActionEnv ' in a reader . The state can be used by actions for calls to ' LIO ' , which
-- will have authorized effect. Since it is contained in a reader, actions do not have the
-- power to corrupt it.
newtype ActionStack e s a =
ActionStack
{ fromAction :: ReaderT ActionEnv
(EitherT (ThentosError e)
(StateT s
(LIO DCLabel))) a
}
deriving (Functor, Generic)
instance Applicative (ActionStack e s) where
pure = ActionStack . pure
(ActionStack ua) <*> (ActionStack ua') = ActionStack $ ua <*> ua'
instance Monad (ActionStack e s) where
return = pure
(ActionStack ua) >>= f = ActionStack $ ua >>= fromAction . f
instance MonadReader ActionEnv (ActionStack e s) where
ask = ActionStack ask
local f = ActionStack . local f . fromAction
instance MonadError (ThentosError e) (ActionStack e s) where
throwError = ActionStack . throwError
catchError (ActionStack ua) h = ActionStack $ catchError ua (fromAction . h)
instance MonadState s (ActionStack e s) where
state = ActionStack . state
instance MonadLIO DCLabel (ActionStack e s) where
liftLIO lio = ActionStack . ReaderT $ \_ -> EitherT (Right <$> lift lio)
instance MonadRandom (ActionStack e s) where
getRandomBytes = liftLIO . ioTCB . getRandomBytes
type MonadQuery e v m =
(GetThentosDb v,
GetThentosConfig v,
MonadReader v m,
MonadThentosError e m,
MonadThentosIO m)
type MonadAction e v m = (MonadQuery e v m, MonadRandom m)
| Errors known by ' runActionE ' , ' runAction ' , ....
--
-- FIXME DOC
-- The 'MonadError' instance of newtype 'Action' lets you throw and catch errors from *within* the
-- 'Action', i.e., at construction time). These are errors are handled in the 'ActionErrorThentos'
-- constructor. Label errors and other (possibly async) exceptions are caught (if possible) in
' runActionE ' and its friends and maintained in other ' ActionError ' constructors .
data ActionError e =
ActionErrorThentos (ThentosError e)
| ActionErrorAnyLabel AnyLabelError
| ActionErrorUnknown SomeException
deriving (Show)
instance (Show e, Typeable e) => Exception (ActionError e)
| null | https://raw.githubusercontent.com/liqd/thentos/f7d53d8e9d11956d2cc83efb5f5149876109b098/thentos-core/src/Thentos/Action/Types.hs | haskell | Safe
# LANGUAGE ConstraintKinds #
# LANGUAGE DataKinds #
# LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
| The 'Action' monad transformer stack. It contains:
- A state of polymorphic type (for use e.g. by the frontend handlers to store cookies etc.)
authorization errors that must not be catchable from inside an 'Action'.)
will have authorized effect. Since it is contained in a reader, actions do not have the
power to corrupt it.
FIXME DOC
The 'MonadError' instance of newtype 'Action' lets you throw and catch errors from *within* the
'Action', i.e., at construction time). These are errors are handled in the 'ActionErrorThentos'
constructor. Label errors and other (possibly async) exceptions are caught (if possible) in |
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Thentos.Action.Types where
import Control.Exception (SomeException)
import Control.Monad.Reader (ReaderT(ReaderT))
import Control.Monad.State (StateT)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Either (EitherT(EitherT))
import Data.Configifier ((>>.))
import Database.PostgreSQL.Simple (Connection)
import Data.Pool (Pool)
import LIO.Core (LIO)
import LIO.TCB (ioTCB)
import Thentos.Types
import Thentos.Config
import Thentos.Prelude
import Thentos.CookieSession.CSRF
data ActionEnv =
ActionEnv
{ _aStConfig :: ThentosConfig
, _aStDb :: Pool Connection
}
deriving (Generic)
makeLenses ''ActionEnv
class GetThentosDb a where
getThentosDb :: Getter a (Pool Connection)
instance GetThentosDb ActionEnv where
getThentosDb = aStDb
class GetThentosConfig a where
getThentosConfig :: Getter a ThentosConfig
instance GetThentosConfig ActionEnv where
getThentosConfig = aStConfig
type MonadThentosConfig v m = (MonadReader v m, GetThentosConfig v)
instance GetCsrfSecret ActionEnv where
csrfSecret = pre $ aStConfig . to (>>. (Proxy :: Proxy '["csrf_secret"])) . _Just . csrfSecret . _Just
- ' LIO ' as a base monad .
- The option of throwing @ThentosError e@. ( Not ' ActionError e ' , which contains
- An ' ActionEnv ' in a reader . The state can be used by actions for calls to ' LIO ' , which
newtype ActionStack e s a =
ActionStack
{ fromAction :: ReaderT ActionEnv
(EitherT (ThentosError e)
(StateT s
(LIO DCLabel))) a
}
deriving (Functor, Generic)
instance Applicative (ActionStack e s) where
pure = ActionStack . pure
(ActionStack ua) <*> (ActionStack ua') = ActionStack $ ua <*> ua'
instance Monad (ActionStack e s) where
return = pure
(ActionStack ua) >>= f = ActionStack $ ua >>= fromAction . f
instance MonadReader ActionEnv (ActionStack e s) where
ask = ActionStack ask
local f = ActionStack . local f . fromAction
instance MonadError (ThentosError e) (ActionStack e s) where
throwError = ActionStack . throwError
catchError (ActionStack ua) h = ActionStack $ catchError ua (fromAction . h)
instance MonadState s (ActionStack e s) where
state = ActionStack . state
instance MonadLIO DCLabel (ActionStack e s) where
liftLIO lio = ActionStack . ReaderT $ \_ -> EitherT (Right <$> lift lio)
instance MonadRandom (ActionStack e s) where
getRandomBytes = liftLIO . ioTCB . getRandomBytes
type MonadQuery e v m =
(GetThentosDb v,
GetThentosConfig v,
MonadReader v m,
MonadThentosError e m,
MonadThentosIO m)
type MonadAction e v m = (MonadQuery e v m, MonadRandom m)
| Errors known by ' runActionE ' , ' runAction ' , ....
' runActionE ' and its friends and maintained in other ' ActionError ' constructors .
data ActionError e =
ActionErrorThentos (ThentosError e)
| ActionErrorAnyLabel AnyLabelError
| ActionErrorUnknown SomeException
deriving (Show)
instance (Show e, Typeable e) => Exception (ActionError e)
|
85eec4469c8dd89b2697cbb5c13573c76fd9de4ffb1ad4912a2d4789d50c2c69 | GillianPlatform/Gillian | WType.ml | type t =
(* Used only for work in compilation *)
| WList
| WNull
| WBool
| WString
| WPtr
| WInt
| WAny
| WSet
(** Are types t1 and t2 compatible *)
let compatible t1 t2 =
match (t1, t2) with
| WAny, _ -> true
| _, WAny -> true
| t1, t2 when t1 = t2 -> true
| _ -> false
let strongest t1 t2 =
match (t1, t2) with
| WAny, t -> t
| t, WAny -> t
| _ -> t1
careful there is no strongest for two different types
let pp fmt t =
let s = Format.fprintf fmt "@[%s@]" in
match t with
| WList -> s "List"
| WNull -> s "NullType"
| WBool -> s "Bool"
| WString -> s "String"
| WPtr -> s "Pointer"
| WInt -> s "Int"
| WAny -> s "Any"
| WSet -> s "Set"
exception Unmatching_types
module TypeMap = Map.Make (struct
type t = WLExpr.tt
let compare = Stdlib.compare
end)
let of_variable (var : string) (type_context : t TypeMap.t) : t option =
TypeMap.find_opt (WLExpr.PVar var) type_context
let of_val v =
let open WVal in
match v with
| Bool _ -> WBool
| Int _ -> WInt
| Str _ -> WString
| Null -> WNull
| VList _ -> WList
(** returns (x, y) when unop takes type x and returns type y *)
let of_unop u =
match u with
| WUnOp.NOT -> (WBool, WBool)
| WUnOp.LEN -> (WList, WInt)
| WUnOp.REV -> (WList, WList)
| WUnOp.HEAD -> (WList, WAny)
| WUnOp.TAIL -> (WList, WList)
* returns ( x , y , z ) when takes types x and y and returns type z
let of_binop b =
match b with
| WBinOp.NEQ | WBinOp.EQUAL -> (WAny, WAny, WBool)
| WBinOp.LESSTHAN
| WBinOp.GREATERTHAN
| WBinOp.LESSEQUAL
| WBinOp.GREATEREQUAL -> (WInt, WInt, WBool)
| WBinOp.TIMES | WBinOp.DIV | WBinOp.MOD -> (WInt, WInt, WInt)
| WBinOp.AND | WBinOp.OR -> (WBool, WBool, WBool)
| WBinOp.LSTCONS -> (WAny, WList, WList)
| WBinOp.LSTCAT -> (WList, WList, WList)
| WBinOp.LSTNTH -> (WList, WInt, WAny)
| WBinOp.PLUS | WBinOp.MINUS -> (WAny, WAny, WAny)
TODO : improve this , because we can add Ints AND Pointers
(** checks and adds to typemap *)
let needs_to_be expr t knownp =
let bare_expr = WLExpr.get expr in
match TypeMap.find_opt bare_expr knownp with
| Some tp when not (compatible t tp) ->
failwith
(Format.asprintf
"I inferred both types %a and %a on expression %a at location %s" pp
tp pp t WLExpr.pp expr
(CodeLoc.str (WLExpr.get_loc expr)))
| Some tp -> TypeMap.add bare_expr (strongest t tp) knownp
| None -> TypeMap.add bare_expr t knownp
* Infers a TypeMap from a logic_expr
let rec infer_logic_expr knownp lexpr =
let open WLExpr in
let bare_lexpr = get lexpr in
match bare_lexpr with
| LVal v -> TypeMap.add bare_lexpr (of_val v) knownp
| LBinOp (le1, b, le2) ->
let inferred = infer_logic_expr (infer_logic_expr knownp le1) le2 in
let t1, t2, t3 = of_binop b in
TypeMap.add bare_lexpr t3
(needs_to_be le1 t1 (needs_to_be le2 t2 inferred))
| LUnOp (u, le) ->
let inferred = infer_logic_expr knownp le in
let t1, t2 = of_unop u in
TypeMap.add bare_lexpr t2 (needs_to_be le t1 inferred)
| LLSub (le1, le2, le3) ->
let inferred =
infer_logic_expr
(infer_logic_expr (infer_logic_expr knownp le1) le2)
le3
in
let t0, t1, t2, t3 = (WList, WList, WInt, WInt) in
TypeMap.add bare_lexpr t0
(needs_to_be le1 t1 (needs_to_be le2 t2 (needs_to_be le3 t3 inferred)))
| LVar _ -> knownp
| PVar _ -> knownp
| LEList lel ->
TypeMap.add bare_lexpr WList (List.fold_left infer_logic_expr knownp lel)
| LESet lel ->
TypeMap.add bare_lexpr WSet (List.fold_left infer_logic_expr knownp lel)
* Single step of inference for that gets a TypeMap from a single assertion
let rec infer_single_assert_step asser known =
let same_type e1 e2 knownp =
let bare_e1, bare_e2 = (WLExpr.get e1, WLExpr.get e2) in
let topt1 = TypeMap.find_opt bare_e1 knownp in
let topt2 = TypeMap.find_opt bare_e2 knownp in
match (topt1, topt2) with
| Some t1, Some t2 when not (compatible t1 t2) ->
failwith
(Format.asprintf
"Expressions %a and %a should have the same type but are of types \
%a and %a in assertion %a at location %s"
WLExpr.pp e1 WLExpr.pp e2 pp t1 pp t2 WLAssert.pp asser
(CodeLoc.str (WLAssert.get_loc asser)))
| Some t1, Some t2 -> Some (strongest t1 t2)
| Some t1, None -> Some t1
| None, Some t2 -> Some t2
| None, None -> None
in
let rec infer_formula f k =
match WLFormula.get f with
| WLFormula.LTrue | WLFormula.LFalse -> k
| WLFormula.LNot f -> infer_formula f k
| WLFormula.LAnd (f1, f2) | WLFormula.LOr (f1, f2) ->
infer_formula f2 (infer_formula f1 known)
| WLFormula.LEq (le1, le2) -> (
let bare_le1, bare_le2 = (WLExpr.get le1, WLExpr.get le2) in
let inferred = infer_logic_expr (infer_logic_expr known le1) le2 in
let topt = same_type le1 le2 inferred in
match topt with
| Some t -> TypeMap.add bare_le1 t (TypeMap.add bare_le2 t inferred)
| None -> inferred)
| WLFormula.LLess (le1, le2)
| WLFormula.LGreater (le1, le2)
| WLFormula.LLessEq (le1, le2)
| WLFormula.LGreaterEq (le1, le2) ->
let bare_le1, bare_le2 = (WLExpr.get le1, WLExpr.get le2) in
let inferred = infer_logic_expr (infer_logic_expr known le1) le2 in
let inferredp = needs_to_be le1 WInt (needs_to_be le2 WInt inferred) in
TypeMap.add bare_le1 WInt (TypeMap.add bare_le2 WInt inferredp)
in
match WLAssert.get asser with
| WLAssert.LEmp -> known
| WLAssert.LStar (la1, la2) ->
infer_single_assert_step la2 (infer_single_assert_step la1 known)
| WLAssert.LPred (_, lel) -> List.fold_left infer_logic_expr known lel
| WLAssert.LPointsTo (le1, le2) ->
let inferred =
List.fold_left infer_logic_expr (infer_logic_expr known le1) le2
in
needs_to_be le1 WList inferred
| WLAssert.LBlockPointsTo (le1, le2) ->
let inferred =
List.fold_left infer_logic_expr (infer_logic_expr known le1) le2
in
needs_to_be le1 WList inferred
| WLAssert.LPure f -> infer_formula f known
let infer_single_assert known asser =
let rec find_fixed_point f a =
let b = f a in
if Stdlib.compare a b = 0 then b else find_fixed_point f b
in
find_fixed_point (infer_single_assert_step asser) known
let infer_types_pred (params : (string * t option) list) assert_list =
let join_params_and_asserts _le topt1 topt2 =
match (topt1, topt2) with
| Some t1, Some t2 when t1 = t2 -> Some t1
| Some t, None when t <> WAny -> Some t
| None, Some t when t <> WAny -> Some t
| _ -> None
in
let join_asserts _le topt1 topt2 =
match (topt1, topt2) with
| Some t1, Some t2 when t1 = t2 -> Some t1
| _ -> None
in
let infers_on_params =
List.fold_left
(fun (map : 'a TypeMap.t) (x, ot) ->
match ot with
| None -> map
| Some t -> TypeMap.add (PVar x) t map)
TypeMap.empty params
in
let infers_on_asserts =
List.map (infer_single_assert TypeMap.empty) assert_list
in
let hd, tl = (List.hd infers_on_asserts, List.tl infers_on_asserts) in
let infers_on_asserts = List.fold_left (TypeMap.merge join_asserts) hd tl in
let result =
TypeMap.merge join_params_and_asserts infers_on_params infers_on_asserts
in
result
| null | https://raw.githubusercontent.com/GillianPlatform/Gillian/190c5a054c9572c81af6bc44c6a80b8a499766f8/wisl/lib/syntax/WType.ml | ocaml | Used only for work in compilation
* Are types t1 and t2 compatible
* returns (x, y) when unop takes type x and returns type y
* checks and adds to typemap | type t =
| WList
| WNull
| WBool
| WString
| WPtr
| WInt
| WAny
| WSet
let compatible t1 t2 =
match (t1, t2) with
| WAny, _ -> true
| _, WAny -> true
| t1, t2 when t1 = t2 -> true
| _ -> false
let strongest t1 t2 =
match (t1, t2) with
| WAny, t -> t
| t, WAny -> t
| _ -> t1
careful there is no strongest for two different types
let pp fmt t =
let s = Format.fprintf fmt "@[%s@]" in
match t with
| WList -> s "List"
| WNull -> s "NullType"
| WBool -> s "Bool"
| WString -> s "String"
| WPtr -> s "Pointer"
| WInt -> s "Int"
| WAny -> s "Any"
| WSet -> s "Set"
exception Unmatching_types
module TypeMap = Map.Make (struct
type t = WLExpr.tt
let compare = Stdlib.compare
end)
let of_variable (var : string) (type_context : t TypeMap.t) : t option =
TypeMap.find_opt (WLExpr.PVar var) type_context
let of_val v =
let open WVal in
match v with
| Bool _ -> WBool
| Int _ -> WInt
| Str _ -> WString
| Null -> WNull
| VList _ -> WList
let of_unop u =
match u with
| WUnOp.NOT -> (WBool, WBool)
| WUnOp.LEN -> (WList, WInt)
| WUnOp.REV -> (WList, WList)
| WUnOp.HEAD -> (WList, WAny)
| WUnOp.TAIL -> (WList, WList)
* returns ( x , y , z ) when takes types x and y and returns type z
let of_binop b =
match b with
| WBinOp.NEQ | WBinOp.EQUAL -> (WAny, WAny, WBool)
| WBinOp.LESSTHAN
| WBinOp.GREATERTHAN
| WBinOp.LESSEQUAL
| WBinOp.GREATEREQUAL -> (WInt, WInt, WBool)
| WBinOp.TIMES | WBinOp.DIV | WBinOp.MOD -> (WInt, WInt, WInt)
| WBinOp.AND | WBinOp.OR -> (WBool, WBool, WBool)
| WBinOp.LSTCONS -> (WAny, WList, WList)
| WBinOp.LSTCAT -> (WList, WList, WList)
| WBinOp.LSTNTH -> (WList, WInt, WAny)
| WBinOp.PLUS | WBinOp.MINUS -> (WAny, WAny, WAny)
TODO : improve this , because we can add Ints AND Pointers
let needs_to_be expr t knownp =
let bare_expr = WLExpr.get expr in
match TypeMap.find_opt bare_expr knownp with
| Some tp when not (compatible t tp) ->
failwith
(Format.asprintf
"I inferred both types %a and %a on expression %a at location %s" pp
tp pp t WLExpr.pp expr
(CodeLoc.str (WLExpr.get_loc expr)))
| Some tp -> TypeMap.add bare_expr (strongest t tp) knownp
| None -> TypeMap.add bare_expr t knownp
* Infers a TypeMap from a logic_expr
let rec infer_logic_expr knownp lexpr =
let open WLExpr in
let bare_lexpr = get lexpr in
match bare_lexpr with
| LVal v -> TypeMap.add bare_lexpr (of_val v) knownp
| LBinOp (le1, b, le2) ->
let inferred = infer_logic_expr (infer_logic_expr knownp le1) le2 in
let t1, t2, t3 = of_binop b in
TypeMap.add bare_lexpr t3
(needs_to_be le1 t1 (needs_to_be le2 t2 inferred))
| LUnOp (u, le) ->
let inferred = infer_logic_expr knownp le in
let t1, t2 = of_unop u in
TypeMap.add bare_lexpr t2 (needs_to_be le t1 inferred)
| LLSub (le1, le2, le3) ->
let inferred =
infer_logic_expr
(infer_logic_expr (infer_logic_expr knownp le1) le2)
le3
in
let t0, t1, t2, t3 = (WList, WList, WInt, WInt) in
TypeMap.add bare_lexpr t0
(needs_to_be le1 t1 (needs_to_be le2 t2 (needs_to_be le3 t3 inferred)))
| LVar _ -> knownp
| PVar _ -> knownp
| LEList lel ->
TypeMap.add bare_lexpr WList (List.fold_left infer_logic_expr knownp lel)
| LESet lel ->
TypeMap.add bare_lexpr WSet (List.fold_left infer_logic_expr knownp lel)
* Single step of inference for that gets a TypeMap from a single assertion
let rec infer_single_assert_step asser known =
let same_type e1 e2 knownp =
let bare_e1, bare_e2 = (WLExpr.get e1, WLExpr.get e2) in
let topt1 = TypeMap.find_opt bare_e1 knownp in
let topt2 = TypeMap.find_opt bare_e2 knownp in
match (topt1, topt2) with
| Some t1, Some t2 when not (compatible t1 t2) ->
failwith
(Format.asprintf
"Expressions %a and %a should have the same type but are of types \
%a and %a in assertion %a at location %s"
WLExpr.pp e1 WLExpr.pp e2 pp t1 pp t2 WLAssert.pp asser
(CodeLoc.str (WLAssert.get_loc asser)))
| Some t1, Some t2 -> Some (strongest t1 t2)
| Some t1, None -> Some t1
| None, Some t2 -> Some t2
| None, None -> None
in
let rec infer_formula f k =
match WLFormula.get f with
| WLFormula.LTrue | WLFormula.LFalse -> k
| WLFormula.LNot f -> infer_formula f k
| WLFormula.LAnd (f1, f2) | WLFormula.LOr (f1, f2) ->
infer_formula f2 (infer_formula f1 known)
| WLFormula.LEq (le1, le2) -> (
let bare_le1, bare_le2 = (WLExpr.get le1, WLExpr.get le2) in
let inferred = infer_logic_expr (infer_logic_expr known le1) le2 in
let topt = same_type le1 le2 inferred in
match topt with
| Some t -> TypeMap.add bare_le1 t (TypeMap.add bare_le2 t inferred)
| None -> inferred)
| WLFormula.LLess (le1, le2)
| WLFormula.LGreater (le1, le2)
| WLFormula.LLessEq (le1, le2)
| WLFormula.LGreaterEq (le1, le2) ->
let bare_le1, bare_le2 = (WLExpr.get le1, WLExpr.get le2) in
let inferred = infer_logic_expr (infer_logic_expr known le1) le2 in
let inferredp = needs_to_be le1 WInt (needs_to_be le2 WInt inferred) in
TypeMap.add bare_le1 WInt (TypeMap.add bare_le2 WInt inferredp)
in
match WLAssert.get asser with
| WLAssert.LEmp -> known
| WLAssert.LStar (la1, la2) ->
infer_single_assert_step la2 (infer_single_assert_step la1 known)
| WLAssert.LPred (_, lel) -> List.fold_left infer_logic_expr known lel
| WLAssert.LPointsTo (le1, le2) ->
let inferred =
List.fold_left infer_logic_expr (infer_logic_expr known le1) le2
in
needs_to_be le1 WList inferred
| WLAssert.LBlockPointsTo (le1, le2) ->
let inferred =
List.fold_left infer_logic_expr (infer_logic_expr known le1) le2
in
needs_to_be le1 WList inferred
| WLAssert.LPure f -> infer_formula f known
let infer_single_assert known asser =
let rec find_fixed_point f a =
let b = f a in
if Stdlib.compare a b = 0 then b else find_fixed_point f b
in
find_fixed_point (infer_single_assert_step asser) known
let infer_types_pred (params : (string * t option) list) assert_list =
let join_params_and_asserts _le topt1 topt2 =
match (topt1, topt2) with
| Some t1, Some t2 when t1 = t2 -> Some t1
| Some t, None when t <> WAny -> Some t
| None, Some t when t <> WAny -> Some t
| _ -> None
in
let join_asserts _le topt1 topt2 =
match (topt1, topt2) with
| Some t1, Some t2 when t1 = t2 -> Some t1
| _ -> None
in
let infers_on_params =
List.fold_left
(fun (map : 'a TypeMap.t) (x, ot) ->
match ot with
| None -> map
| Some t -> TypeMap.add (PVar x) t map)
TypeMap.empty params
in
let infers_on_asserts =
List.map (infer_single_assert TypeMap.empty) assert_list
in
let hd, tl = (List.hd infers_on_asserts, List.tl infers_on_asserts) in
let infers_on_asserts = List.fold_left (TypeMap.merge join_asserts) hd tl in
let result =
TypeMap.merge join_params_and_asserts infers_on_params infers_on_asserts
in
result
|
bffa4bc12d48f6f8dda1897446d13ce706a19827918aee95aec5058b31842140 | GrammaTech/sel | scopes-2.lisp | (defvar *a* 1)
(defvar *b* 1)
(+ *a* *b*)
| null | https://raw.githubusercontent.com/GrammaTech/sel/a59174c02a454e8d588614e221cf281260cf12f8/test/etc/lisp-scopes/scopes-2.lisp | lisp | (defvar *a* 1)
(defvar *b* 1)
(+ *a* *b*)
| |
49d0627a0ea5df7f63a0d35f26b3544e4ee08c5148d3a84863926a708141ee95 | google/codeworld | EmbedAsUrl.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
Copyright 2020 The CodeWorld Authors . All rights reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Util.EmbedAsUrl
( embedAsUrl,
)
where
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Unsafe as B
import qualified Data.Text.Encoding as T
import Language.Haskell.TH.Syntax
import System.IO.Unsafe
embedAsUrl :: String -> FilePath -> Q Exp
embedAsUrl contentType f = do
qAddDependentFile f
payload <- runIO $ B64.encode <$> B.readFile f
let uri = "data:" <> BC.pack contentType <> ";base64," <> payload
[e|
T.decodeUtf8 $ unsafePerformIO $
B.unsafePackAddressLen
$(return $ LitE $ IntegerL $ fromIntegral $ B.length uri)
$(return $ LitE $ StringPrimL $ B.unpack uri)
|]
| null | https://raw.githubusercontent.com/google/codeworld/77b0863075be12e3bc5f182a53fcc38b038c3e16/codeworld-api/src/Util/EmbedAsUrl.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE TemplateHaskell #
Copyright 2020 The CodeWorld Authors . All rights reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2020 The CodeWorld Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-}
module Util.EmbedAsUrl
( embedAsUrl,
)
where
import qualified Data.ByteString as B
import qualified Data.ByteString.Base64 as B64
import qualified Data.ByteString.Char8 as BC
import qualified Data.ByteString.Unsafe as B
import qualified Data.Text.Encoding as T
import Language.Haskell.TH.Syntax
import System.IO.Unsafe
embedAsUrl :: String -> FilePath -> Q Exp
embedAsUrl contentType f = do
qAddDependentFile f
payload <- runIO $ B64.encode <$> B.readFile f
let uri = "data:" <> BC.pack contentType <> ";base64," <> payload
[e|
T.decodeUtf8 $ unsafePerformIO $
B.unsafePackAddressLen
$(return $ LitE $ IntegerL $ fromIntegral $ B.length uri)
$(return $ LitE $ StringPrimL $ B.unpack uri)
|]
|
6d5fbfcd7db9fe53caa2f54d7a1f54254d62df70abce9f7de4c8b6b8a0ab792e | aggieben/weblocks | weblocks.lisp |
(in-package :weblocks-test)
;;; testing root-composite
(deftest root-composite-1
(with-request :get nil
(root-composite))
nil nil)
(deftest root-composite-2
(with-request :get nil
(setf (root-composite) 'foobar)
(multiple-value-bind (res present-p)
(root-composite)
(values res (not (null present-p)))))
foobar t)
| null | https://raw.githubusercontent.com/aggieben/weblocks/8d86be6a4fff8dde0b94181ba60d0dca2cbd9e25/test/weblocks.lisp | lisp | testing root-composite |
(in-package :weblocks-test)
(deftest root-composite-1
(with-request :get nil
(root-composite))
nil nil)
(deftest root-composite-2
(with-request :get nil
(setf (root-composite) 'foobar)
(multiple-value-bind (res present-p)
(root-composite)
(values res (not (null present-p)))))
foobar t)
|
8bfa6bcf4ea837a0412da68b51f22c55112976dfd08a0f63ba799636420d5735 | lem-project/lem | sourcelist.lisp | (defpackage :lem.sourcelist
(:use :cl :lem)
(:export :title-attribute
:position-attribute
:with-sourcelist
:append-jump-function
:append-sourcelist
:jump-highlighting)
#+sbcl
(:lock t))
(in-package :lem.sourcelist)
(define-attribute jump-highlight
(t :background "cyan"))
(define-attribute title-attribute
(:light :foreground "blue")
(:dark :foreground "cyan"))
(define-attribute position-attribute
(:light :foreground "dark red")
(:dark :foreground "red"))
(defvar *sourcelist-point*)
(defvar *current-sourcelist-buffer* nil)
(defstruct sourcelist
(buffer nil :read-only t :type buffer)
temp-point
(elements (make-array 0 :adjustable t :fill-pointer 0))
(index -1))
(defstruct jump
get-location-function
get-highlight-overlay-function)
(defun get-sourcelist (buffer)
(buffer-value buffer 'sourcelist))
(defun (setf get-sourcelist) (sourcelist buffer)
(setf (buffer-value buffer 'sourcelist) sourcelist))
(defun call-with-sourcelist (buffer-name function focus read-only-p enable-undo-p)
(let* ((buffer (make-buffer buffer-name :read-only-p read-only-p :enable-undo-p enable-undo-p))
(sourcelist (make-sourcelist :buffer buffer)))
(with-buffer-read-only buffer nil
(let ((*inhibit-read-only* t))
(erase-buffer buffer)
(with-point ((*sourcelist-point* (buffer-point buffer) :left-inserting))
(funcall function sourcelist))
(buffer-start (buffer-point buffer))
(change-buffer-mode buffer 'sourcelist-mode t)
(if focus
(setf (current-window) (display-buffer buffer))
(display-buffer buffer))
(setf (variable-value 'line-wrap :buffer buffer) nil)
(setf (get-sourcelist buffer) sourcelist)
(setf *current-sourcelist-buffer* buffer)))))
(defmacro with-sourcelist ((var buffer-name &key focus (read-only-p t) (enable-undo-p nil))
&body body)
`(call-with-sourcelist ,buffer-name
(lambda (,var)
,@body)
,focus
,read-only-p
,enable-undo-p))
(defun append-jump (sourcelist jump)
(vector-push-extend jump (sourcelist-elements sourcelist)))
(defun put-sourcelist-index (sourcelist start end)
(put-text-property start end
'sourcelist
(length (sourcelist-elements sourcelist))))
(defun append-jump-function (sourcelist start end jump-function)
(put-sourcelist-index sourcelist start end)
(append-jump sourcelist (make-jump :get-location-function jump-function)))
(defun append-sourcelist (sourcelist write-function jump-function
&key highlight-overlay-function)
(let ((point *sourcelist-point*))
(with-point ((start-point point :right-inserting))
(funcall write-function point)
(unless (start-line-p point)
(insert-character point #\newline))
(when jump-function
(put-sourcelist-index sourcelist start-point point)
(append-jump sourcelist
(make-jump :get-location-function jump-function
:get-highlight-overlay-function highlight-overlay-function))))))
(defun get-highlight-overlay-default (point)
(with-point ((start point)
(end point))
(make-overlay (back-to-indentation start)
(line-end end)
'jump-highlight)))
(defun jump-highlighting (&optional (point (current-point)) jump)
(let ((overlay
(funcall (alexandria:if-let
((fn (and jump (jump-get-highlight-overlay-function jump))))
fn
#'get-highlight-overlay-default)
point)))
(start-timer (make-timer (lambda ()
(delete-overlay overlay))
:name "jump-highlighting")
300)))
(defun jump-current-element (index sourcelist)
(let ((jump (aref (sourcelist-elements sourcelist) index)))
(funcall (jump-get-location-function jump)
(let ((buffer-name (sourcelist-buffer sourcelist)))
(lambda (buffer)
(with-point ((p (buffer-point buffer)))
(let ((sourcelist-window
(car (get-buffer-windows (get-buffer buffer-name)))))
(unless sourcelist-window
(let ((sourcelist-buffer (get-buffer buffer-name)))
(setf sourcelist-window
(display-buffer sourcelist-buffer))))
(if (eq (current-window) sourcelist-window)
(setf (current-window) (pop-to-buffer buffer))
(switch-to-buffer buffer))
(move-point (buffer-point buffer) p))))))
(jump-highlighting (current-point) jump)))
(define-key *global-keymap* "C-x n" 'sourcelist-next)
(define-key *global-keymap* "C-x C-n" 'sourcelist-next)
(define-key *global-keymap* "M-N" 'sourcelist-next)
(define-command sourcelist-next () ()
(when *current-sourcelist-buffer*
(alexandria:when-let ((sourcelist (get-sourcelist *current-sourcelist-buffer*)))
(when (< (1+ (sourcelist-index sourcelist))
(length (sourcelist-elements sourcelist)))
(jump-current-element
(incf (sourcelist-index sourcelist))
sourcelist)))))
(define-key *global-keymap* "C-x p" 'sourcelist-previous)
(define-key *global-keymap* "C-x C-p" 'sourcelist-previous)
(define-key *global-keymap* "M-P" 'sourcelist-previous)
(define-command sourcelist-previous () ()
(when *current-sourcelist-buffer*
(alexandria:when-let ((sourcelist (get-sourcelist *current-sourcelist-buffer*)))
(when (<= 0 (1- (sourcelist-index sourcelist)))
(jump-current-element
(decf (sourcelist-index sourcelist))
sourcelist)))))
(define-minor-mode sourcelist-mode
(:name "sourcelist"
:keymap *sourcelist-mode-keymap*))
(define-key *sourcelist-mode-keymap* "Return" 'sourcelist-jump)
(define-key *sourcelist-mode-keymap* "q" 'quit-active-window)
(define-command sourcelist-jump () ()
(alexandria:when-let ((sourcelist (get-sourcelist (current-buffer)))
(index (text-property-at (current-point) 'sourcelist)))
(jump-current-element (setf (sourcelist-index sourcelist) index)
sourcelist)))
| null | https://raw.githubusercontent.com/lem-project/lem/f9061a817492bbef0ff9aea27e5305d5ba6bcf16/src/ext/sourcelist.lisp | lisp | (defpackage :lem.sourcelist
(:use :cl :lem)
(:export :title-attribute
:position-attribute
:with-sourcelist
:append-jump-function
:append-sourcelist
:jump-highlighting)
#+sbcl
(:lock t))
(in-package :lem.sourcelist)
(define-attribute jump-highlight
(t :background "cyan"))
(define-attribute title-attribute
(:light :foreground "blue")
(:dark :foreground "cyan"))
(define-attribute position-attribute
(:light :foreground "dark red")
(:dark :foreground "red"))
(defvar *sourcelist-point*)
(defvar *current-sourcelist-buffer* nil)
(defstruct sourcelist
(buffer nil :read-only t :type buffer)
temp-point
(elements (make-array 0 :adjustable t :fill-pointer 0))
(index -1))
(defstruct jump
get-location-function
get-highlight-overlay-function)
(defun get-sourcelist (buffer)
(buffer-value buffer 'sourcelist))
(defun (setf get-sourcelist) (sourcelist buffer)
(setf (buffer-value buffer 'sourcelist) sourcelist))
(defun call-with-sourcelist (buffer-name function focus read-only-p enable-undo-p)
(let* ((buffer (make-buffer buffer-name :read-only-p read-only-p :enable-undo-p enable-undo-p))
(sourcelist (make-sourcelist :buffer buffer)))
(with-buffer-read-only buffer nil
(let ((*inhibit-read-only* t))
(erase-buffer buffer)
(with-point ((*sourcelist-point* (buffer-point buffer) :left-inserting))
(funcall function sourcelist))
(buffer-start (buffer-point buffer))
(change-buffer-mode buffer 'sourcelist-mode t)
(if focus
(setf (current-window) (display-buffer buffer))
(display-buffer buffer))
(setf (variable-value 'line-wrap :buffer buffer) nil)
(setf (get-sourcelist buffer) sourcelist)
(setf *current-sourcelist-buffer* buffer)))))
(defmacro with-sourcelist ((var buffer-name &key focus (read-only-p t) (enable-undo-p nil))
&body body)
`(call-with-sourcelist ,buffer-name
(lambda (,var)
,@body)
,focus
,read-only-p
,enable-undo-p))
(defun append-jump (sourcelist jump)
(vector-push-extend jump (sourcelist-elements sourcelist)))
(defun put-sourcelist-index (sourcelist start end)
(put-text-property start end
'sourcelist
(length (sourcelist-elements sourcelist))))
(defun append-jump-function (sourcelist start end jump-function)
(put-sourcelist-index sourcelist start end)
(append-jump sourcelist (make-jump :get-location-function jump-function)))
(defun append-sourcelist (sourcelist write-function jump-function
&key highlight-overlay-function)
(let ((point *sourcelist-point*))
(with-point ((start-point point :right-inserting))
(funcall write-function point)
(unless (start-line-p point)
(insert-character point #\newline))
(when jump-function
(put-sourcelist-index sourcelist start-point point)
(append-jump sourcelist
(make-jump :get-location-function jump-function
:get-highlight-overlay-function highlight-overlay-function))))))
(defun get-highlight-overlay-default (point)
(with-point ((start point)
(end point))
(make-overlay (back-to-indentation start)
(line-end end)
'jump-highlight)))
(defun jump-highlighting (&optional (point (current-point)) jump)
(let ((overlay
(funcall (alexandria:if-let
((fn (and jump (jump-get-highlight-overlay-function jump))))
fn
#'get-highlight-overlay-default)
point)))
(start-timer (make-timer (lambda ()
(delete-overlay overlay))
:name "jump-highlighting")
300)))
(defun jump-current-element (index sourcelist)
(let ((jump (aref (sourcelist-elements sourcelist) index)))
(funcall (jump-get-location-function jump)
(let ((buffer-name (sourcelist-buffer sourcelist)))
(lambda (buffer)
(with-point ((p (buffer-point buffer)))
(let ((sourcelist-window
(car (get-buffer-windows (get-buffer buffer-name)))))
(unless sourcelist-window
(let ((sourcelist-buffer (get-buffer buffer-name)))
(setf sourcelist-window
(display-buffer sourcelist-buffer))))
(if (eq (current-window) sourcelist-window)
(setf (current-window) (pop-to-buffer buffer))
(switch-to-buffer buffer))
(move-point (buffer-point buffer) p))))))
(jump-highlighting (current-point) jump)))
(define-key *global-keymap* "C-x n" 'sourcelist-next)
(define-key *global-keymap* "C-x C-n" 'sourcelist-next)
(define-key *global-keymap* "M-N" 'sourcelist-next)
(define-command sourcelist-next () ()
(when *current-sourcelist-buffer*
(alexandria:when-let ((sourcelist (get-sourcelist *current-sourcelist-buffer*)))
(when (< (1+ (sourcelist-index sourcelist))
(length (sourcelist-elements sourcelist)))
(jump-current-element
(incf (sourcelist-index sourcelist))
sourcelist)))))
(define-key *global-keymap* "C-x p" 'sourcelist-previous)
(define-key *global-keymap* "C-x C-p" 'sourcelist-previous)
(define-key *global-keymap* "M-P" 'sourcelist-previous)
(define-command sourcelist-previous () ()
(when *current-sourcelist-buffer*
(alexandria:when-let ((sourcelist (get-sourcelist *current-sourcelist-buffer*)))
(when (<= 0 (1- (sourcelist-index sourcelist)))
(jump-current-element
(decf (sourcelist-index sourcelist))
sourcelist)))))
(define-minor-mode sourcelist-mode
(:name "sourcelist"
:keymap *sourcelist-mode-keymap*))
(define-key *sourcelist-mode-keymap* "Return" 'sourcelist-jump)
(define-key *sourcelist-mode-keymap* "q" 'quit-active-window)
(define-command sourcelist-jump () ()
(alexandria:when-let ((sourcelist (get-sourcelist (current-buffer)))
(index (text-property-at (current-point) 'sourcelist)))
(jump-current-element (setf (sourcelist-index sourcelist) index)
sourcelist)))
| |
0ad487e64784764714297fae4b723cf8272c3918b1d08109255189641c0b69c4 | weyrick/roadsend-php | generate.scm | (module generate
(main main))
;;; XXX I added null-ok by hand! make sure this generates it next time
;;; around. --tpd 2005.4.12
;;; XXXXXX although... it looks like it's not used by php-gtk anyway
--tpd 2005.5.2
;;;
;;; globals
;;; =======
(define *classes* (make-hashtable))
(define *filename* #f)
;;;
;;; main
;;; ====
(define (main args)
(set! *filename* (list-ref args 1))
(generate-gtk-functions))
(define (generate-custom-properties)
(let ((classes '()))
(for-each-list *filename*
(lambda (x)
(when (eqv? (car x) 'object)
(set! classes (cons x classes)))))
(set! classes (reverse! classes))
(for-each (lambda (class)
(unless (null? (class-properties class))
(print "(def-property-getter (" (underscores-to-dashes (studly-to-underscores (symbol->string (class-cname class)))) "-custom-lookup obj prop ref? k) " (class-cname class))
(for-each (lambda (prop)
(print (tab) "(" (property-name prop) " " (property-type prop) ")"))
(class-properties class))
(display* (tab) ")" #\newline #\newline)))
classes)))
(define (generate-gtk-functions)
;;the static methods of the gtk class
(let ((classname 'Gtk)
(gtk-functions '()))
(for-each-list *filename*
(lambda (a)
(when (and (pair? a) (eqv? (car a) 'function))
(set! gtk-functions (cons a gtk-functions)))))
(set! gtk-functions (reverse! gtk-functions))
(print "(def-static-methods " classname " " (studly-to-underscores (symbol->string classname)))
(for-each (lambda (function)
(let ((str-name (symbol->string (function-cname function))))
(unless (or (pregexp-match "_new" str-name)
(pregexp-match "_get_type" str-name))
(display* (tab) "(")
(let ((function-options '()))
(when (function-requires-special-c-name? classname function)
(set! function-options
(cons* :c-name (function-special-c-name function)
function-options)))
(when (not (eqv? 'none (function-return-type function)))
(set! function-options
(cons* :return-type (function-return-type function)
function-options)))
(if (null? function-options)
(display (function-php-name function))
(display (cons (function-php-name function) function-options))))
(for-each-parameter function
(lambda (param)
(display* #\space
`(,(parameter-name param)
:gtk-type
,(parameter-type param)
,@(if (parameter-optional? param)
`(:default ,(parameter-default param))
'())))))
(display* ")" #\newline))))
gtk-functions)
(display* (tab) ")" #\newline #\newline)))
(define (generate-gdk-functions)
the static methods of the class
(let ((classname 'Gdk)
(gdk-functions '()))
(for-each-list *filename*
(lambda (a)
(when (and (pair? a) (eqv? (car a) 'function))
(set! gdk-functions (cons a gdk-functions)))))
(set! gdk-functions (reverse! gdk-functions))
(print "(def-static-methods " classname " " (studly-to-underscores (symbol->string classname)))
(for-each (lambda (function)
(display* (tab) "(")
(let ((function-options '()))
(when (function-requires-special-c-name? classname function)
(set! function-options
(cons* :c-name (function-special-c-name function)
function-options)))
(when (not (eqv? 'none (function-return-type function)))
(set! function-options
(cons* :return-type (function-return-type function)
function-options)))
(if (null? function-options)
(display (function-php-name function))
(display (cons (function-php-name function) function-options))))
(for-each-parameter function
(lambda (param)
(display* #\space
`(,(parameter-name param)
:gtk-type
,(parameter-type param)
,@(if (parameter-optional? param)
`(:default ,(parameter-default param))
'())))))
(display* ")" #\newline))
gdk-functions)
(display* (tab) ")" #\newline #\newline)))
(define (generate-gtk-methods)
(populate-classes)
(for-each-class (lambda (classname methods)
; (when (some? (lambda (a) (some? parameter-optional? (method-parameters a))) methods)
(print "(def-pgtk-methods " classname " " (studly-to-underscores (symbol->string classname)))
(for-each (lambda (method)
(display* (tab) "(")
(let ((method-options '()))
(when (method-requires-special-c-name? classname method)
(set! method-options
(cons* :c-name (method-special-c-name method)
method-options)))
(when (not (eqv? 'none (method-return-type method)))
(set! method-options
(cons* :return-type (method-return-type method)
method-options)))
(if (null? method-options)
(display (method-php-name method))
(display (cons (method-php-name method) method-options))))
(for-each-parameter method
(lambda (param)
(display* #\space
`(,(parameter-name param)
:gtk-type
,(parameter-type param)
,@(if (parameter-optional? param)
`(:default ,(parameter-default param))
'())))))
(display* ")" #\newline))
methods)
(display* (tab) ")" #\newline #\newline))))
(define (populate-classes)
(for-each-list *filename*
(lambda (x)
(when (eqv? (car x) 'method)
(add-method x)))))
;;;
;;; random utility functions
;;; ========================
(define (some? pred lst)
(bind-exit (return)
(for-each (lambda (a)
(when (pred a) (return #t)))
lst)
#f))
(define (symbol-downcase sym)
(string->symbol (string-downcase (symbol->string sym))))
(define (dashes-to-underscores str)
(pregexp-replace* "-" str "_"))
(define (underscores-to-dashes str)
(pregexp-replace* "_" str "-"))
(define (any-to-studly str)
(apply string-append (map string-capitalize (pregexp-split "(-|_)+" (string-downcase str)))))
(define (studly-to-underscores str)
(with-output-to-string
(lambda ()
(let ((first-char? #t)
(prev-was-caps? #t))
(let loop ((chars (string->list str)))
(unless (null? chars)
(let ((char (car chars)))
(if first-char?
(begin (set! first-char? #f)
(display (char-downcase char))
(loop (cdr chars)))
(begin (if (char-upper-case? char)
(if prev-was-caps?
(display (char-downcase char))
(begin (set! prev-was-caps? #t)
(display* #\_ (char-downcase char))))
(begin (set! prev-was-caps? #f)
(display char)))
(loop (cdr chars)))))))))))
(define (for-each-sexpr-if filename test thunk)
(with-input-from-file filename
(lambda ()
(let loop ((sexpr (read)))
(unless (eof-object? sexpr)
(when (test sexpr)
(thunk sexpr))
(loop (read)))))))
(define (for-each-sexpr filename thunk)
(for-each-sexpr-if filename (lambda (s) #t) thunk))
(define (for-each-list filename thunk)
(for-each-sexpr-if filename list? thunk))
(define (tab #!optional (times 1) (tab-length 3))
(make-string (* times tab-length) #\space))
;;;
;;; Properties
;;; ==========
(define (property-name prop)
(list-ref (list-ref prop 1) 2))
(define (property-type prop)
(list-ref (list-ref prop 1) 1))
;;;
;;; CLASSes
;;; =======
(define (class-properties class)
(filter (lambda (a) (and (pair? a) (eqv? (car a) 'field)))
class))
(define (class-name class)
(list-ref class 1))
(define (class-cname class)
(cadr (assoc 'c-name (cddr class))))
(define (class-methods classname)
(or (get-class classname)
'()))
(define (get-class classname)
(hashtable-get *classes* classname))
(define (add-class classname)
(when (not (get-class classname))
(hashtable-put! *classes* classname '())))
(define (for-each-class thunk)
(hashtable-for-each *classes* thunk))
;;;
;;; METHODs
;;; =======
(define (method-php-name method)
(list-ref method 1))
(define (method-class method)
(symbol-append (car (list-ref (list-ref method 2) 2))
(list-ref (list-ref method 2) 1)))
(define (method-cname method)
(list-ref (list-ref method 3) 1))
(define (method-return-type method)
(list-ref (list-ref method 4) 1))
(define (method-parameters method)
(filter (lambda (x)
(and (pair? x)
(eqv? 'parameter (car x))))
(list-tail method 5)))
(define (add-method method)
(let ((classname (method-class method)))
(add-class classname) ; ensure class has been added
(hashtable-put! *classes* classname (cons method (hashtable-get *classes* classname)))))
(define (for-each-method classname thunk)
(for-each thunk (class-methods classname)))
(define (method-requires-special-c-name? classname method)
(not
(string=? (string-append (studly-to-underscores (symbol->string classname))
"_"
(symbol->string (method-php-name method)))
(symbol->string (method-cname method)))))
(define (method-special-c-name method)
(method-cname method))
;;;
;;; METHODs
;;; =======
(define (function-php-name function)
(list-ref function 1))
(define (function-cname function)
(list-ref (list-ref function 2) 1))
(define (function-return-type function)
(list-ref (list-ref function 3) 1))
(define (function-parameters function)
(filter (lambda (x)
(and (pair? x)
(eqv? 'parameter (car x))))
function))
( list - tail function 4 ) ) )
(define (function-requires-special-c-name? classname function)
(not
(string=? (string-append (studly-to-underscores (symbol->string classname))
"_"
(symbol->string (function-php-name function)))
(symbol->string (function-cname function)))))
(define (function-special-c-name function)
(function-cname function))
;;;
;;; PARAMETERs
;;; ==========
(define (parameter-name param)
(list-ref (cadr param) 2))
(define (parameter-type param)
(list-ref (cadr param) 1))
(define (for-each-parameter method thunk)
(for-each thunk
(if (eqv? (car method) 'method)
(method-parameters method)
(function-parameters method))))
(define (parameter-optional? param)
(assoc 'default (cdr param)))
(define (parameter-default param)
(cadr (assoc 'default (cdr param)))) | null | https://raw.githubusercontent.com/weyrick/roadsend-php/d6301a897b1a02d7a85bdb915bea91d0991eb158/runtime/ext/gtk/defs/generate.scm | scheme | XXX I added null-ok by hand! make sure this generates it next time
around. --tpd 2005.4.12
XXXXXX although... it looks like it's not used by php-gtk anyway
globals
=======
main
====
the static methods of the gtk class
(when (some? (lambda (a) (some? parameter-optional? (method-parameters a))) methods)
random utility functions
========================
Properties
==========
CLASSes
=======
METHODs
=======
ensure class has been added
METHODs
=======
PARAMETERs
========== | (module generate
(main main))
--tpd 2005.5.2
(define *classes* (make-hashtable))
(define *filename* #f)
(define (main args)
(set! *filename* (list-ref args 1))
(generate-gtk-functions))
(define (generate-custom-properties)
(let ((classes '()))
(for-each-list *filename*
(lambda (x)
(when (eqv? (car x) 'object)
(set! classes (cons x classes)))))
(set! classes (reverse! classes))
(for-each (lambda (class)
(unless (null? (class-properties class))
(print "(def-property-getter (" (underscores-to-dashes (studly-to-underscores (symbol->string (class-cname class)))) "-custom-lookup obj prop ref? k) " (class-cname class))
(for-each (lambda (prop)
(print (tab) "(" (property-name prop) " " (property-type prop) ")"))
(class-properties class))
(display* (tab) ")" #\newline #\newline)))
classes)))
(define (generate-gtk-functions)
(let ((classname 'Gtk)
(gtk-functions '()))
(for-each-list *filename*
(lambda (a)
(when (and (pair? a) (eqv? (car a) 'function))
(set! gtk-functions (cons a gtk-functions)))))
(set! gtk-functions (reverse! gtk-functions))
(print "(def-static-methods " classname " " (studly-to-underscores (symbol->string classname)))
(for-each (lambda (function)
(let ((str-name (symbol->string (function-cname function))))
(unless (or (pregexp-match "_new" str-name)
(pregexp-match "_get_type" str-name))
(display* (tab) "(")
(let ((function-options '()))
(when (function-requires-special-c-name? classname function)
(set! function-options
(cons* :c-name (function-special-c-name function)
function-options)))
(when (not (eqv? 'none (function-return-type function)))
(set! function-options
(cons* :return-type (function-return-type function)
function-options)))
(if (null? function-options)
(display (function-php-name function))
(display (cons (function-php-name function) function-options))))
(for-each-parameter function
(lambda (param)
(display* #\space
`(,(parameter-name param)
:gtk-type
,(parameter-type param)
,@(if (parameter-optional? param)
`(:default ,(parameter-default param))
'())))))
(display* ")" #\newline))))
gtk-functions)
(display* (tab) ")" #\newline #\newline)))
(define (generate-gdk-functions)
the static methods of the class
(let ((classname 'Gdk)
(gdk-functions '()))
(for-each-list *filename*
(lambda (a)
(when (and (pair? a) (eqv? (car a) 'function))
(set! gdk-functions (cons a gdk-functions)))))
(set! gdk-functions (reverse! gdk-functions))
(print "(def-static-methods " classname " " (studly-to-underscores (symbol->string classname)))
(for-each (lambda (function)
(display* (tab) "(")
(let ((function-options '()))
(when (function-requires-special-c-name? classname function)
(set! function-options
(cons* :c-name (function-special-c-name function)
function-options)))
(when (not (eqv? 'none (function-return-type function)))
(set! function-options
(cons* :return-type (function-return-type function)
function-options)))
(if (null? function-options)
(display (function-php-name function))
(display (cons (function-php-name function) function-options))))
(for-each-parameter function
(lambda (param)
(display* #\space
`(,(parameter-name param)
:gtk-type
,(parameter-type param)
,@(if (parameter-optional? param)
`(:default ,(parameter-default param))
'())))))
(display* ")" #\newline))
gdk-functions)
(display* (tab) ")" #\newline #\newline)))
(define (generate-gtk-methods)
(populate-classes)
(for-each-class (lambda (classname methods)
(print "(def-pgtk-methods " classname " " (studly-to-underscores (symbol->string classname)))
(for-each (lambda (method)
(display* (tab) "(")
(let ((method-options '()))
(when (method-requires-special-c-name? classname method)
(set! method-options
(cons* :c-name (method-special-c-name method)
method-options)))
(when (not (eqv? 'none (method-return-type method)))
(set! method-options
(cons* :return-type (method-return-type method)
method-options)))
(if (null? method-options)
(display (method-php-name method))
(display (cons (method-php-name method) method-options))))
(for-each-parameter method
(lambda (param)
(display* #\space
`(,(parameter-name param)
:gtk-type
,(parameter-type param)
,@(if (parameter-optional? param)
`(:default ,(parameter-default param))
'())))))
(display* ")" #\newline))
methods)
(display* (tab) ")" #\newline #\newline))))
(define (populate-classes)
(for-each-list *filename*
(lambda (x)
(when (eqv? (car x) 'method)
(add-method x)))))
(define (some? pred lst)
(bind-exit (return)
(for-each (lambda (a)
(when (pred a) (return #t)))
lst)
#f))
(define (symbol-downcase sym)
(string->symbol (string-downcase (symbol->string sym))))
(define (dashes-to-underscores str)
(pregexp-replace* "-" str "_"))
(define (underscores-to-dashes str)
(pregexp-replace* "_" str "-"))
(define (any-to-studly str)
(apply string-append (map string-capitalize (pregexp-split "(-|_)+" (string-downcase str)))))
(define (studly-to-underscores str)
(with-output-to-string
(lambda ()
(let ((first-char? #t)
(prev-was-caps? #t))
(let loop ((chars (string->list str)))
(unless (null? chars)
(let ((char (car chars)))
(if first-char?
(begin (set! first-char? #f)
(display (char-downcase char))
(loop (cdr chars)))
(begin (if (char-upper-case? char)
(if prev-was-caps?
(display (char-downcase char))
(begin (set! prev-was-caps? #t)
(display* #\_ (char-downcase char))))
(begin (set! prev-was-caps? #f)
(display char)))
(loop (cdr chars)))))))))))
(define (for-each-sexpr-if filename test thunk)
(with-input-from-file filename
(lambda ()
(let loop ((sexpr (read)))
(unless (eof-object? sexpr)
(when (test sexpr)
(thunk sexpr))
(loop (read)))))))
(define (for-each-sexpr filename thunk)
(for-each-sexpr-if filename (lambda (s) #t) thunk))
(define (for-each-list filename thunk)
(for-each-sexpr-if filename list? thunk))
(define (tab #!optional (times 1) (tab-length 3))
(make-string (* times tab-length) #\space))
(define (property-name prop)
(list-ref (list-ref prop 1) 2))
(define (property-type prop)
(list-ref (list-ref prop 1) 1))
(define (class-properties class)
(filter (lambda (a) (and (pair? a) (eqv? (car a) 'field)))
class))
(define (class-name class)
(list-ref class 1))
(define (class-cname class)
(cadr (assoc 'c-name (cddr class))))
(define (class-methods classname)
(or (get-class classname)
'()))
(define (get-class classname)
(hashtable-get *classes* classname))
(define (add-class classname)
(when (not (get-class classname))
(hashtable-put! *classes* classname '())))
(define (for-each-class thunk)
(hashtable-for-each *classes* thunk))
(define (method-php-name method)
(list-ref method 1))
(define (method-class method)
(symbol-append (car (list-ref (list-ref method 2) 2))
(list-ref (list-ref method 2) 1)))
(define (method-cname method)
(list-ref (list-ref method 3) 1))
(define (method-return-type method)
(list-ref (list-ref method 4) 1))
(define (method-parameters method)
(filter (lambda (x)
(and (pair? x)
(eqv? 'parameter (car x))))
(list-tail method 5)))
(define (add-method method)
(let ((classname (method-class method)))
(hashtable-put! *classes* classname (cons method (hashtable-get *classes* classname)))))
(define (for-each-method classname thunk)
(for-each thunk (class-methods classname)))
(define (method-requires-special-c-name? classname method)
(not
(string=? (string-append (studly-to-underscores (symbol->string classname))
"_"
(symbol->string (method-php-name method)))
(symbol->string (method-cname method)))))
(define (method-special-c-name method)
(method-cname method))
(define (function-php-name function)
(list-ref function 1))
(define (function-cname function)
(list-ref (list-ref function 2) 1))
(define (function-return-type function)
(list-ref (list-ref function 3) 1))
(define (function-parameters function)
(filter (lambda (x)
(and (pair? x)
(eqv? 'parameter (car x))))
function))
( list - tail function 4 ) ) )
(define (function-requires-special-c-name? classname function)
(not
(string=? (string-append (studly-to-underscores (symbol->string classname))
"_"
(symbol->string (function-php-name function)))
(symbol->string (function-cname function)))))
(define (function-special-c-name function)
(function-cname function))
(define (parameter-name param)
(list-ref (cadr param) 2))
(define (parameter-type param)
(list-ref (cadr param) 1))
(define (for-each-parameter method thunk)
(for-each thunk
(if (eqv? (car method) 'method)
(method-parameters method)
(function-parameters method))))
(define (parameter-optional? param)
(assoc 'default (cdr param)))
(define (parameter-default param)
(cadr (assoc 'default (cdr param)))) |
8a8e286c90fc1040d9ac79dc7d82b7eeaf260e405ec8bc75936c17bb16c23923 | xapi-project/xen-api | test_no_migrate.ml |
* Copyright ( C ) 2016 Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) 2016 Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
let ops = [`suspend; `checkpoint; `pool_migrate; `migrate_send]
let op_string = function
| `suspend ->
"suspend"
| `checkpoint ->
"checkpoint"
| `pool_migrate ->
"pool_migrate"
| `migrate_send ->
"migrate_send"
| _ ->
"other"
let testcases =
nest , nomig , force , permitted
[
(false, false, false, true)
; (false, false, true, true)
; (false, true, false, false)
; (false, true, true, true)
; (true, false, false, false)
; (true, false, true, true)
; (true, true, false, false)
; (true, true, true, true)
]
NB , we choose a PV guest here for testing even though some of these options
make no sense for ( e.g. nested virt ) . The logic 's all the same though and
it means we can avoid making up a VM_guest_metrics record with the feature
flags set
make no sense for PV (e.g. nested virt). The logic's all the same though and
it means we can avoid making up a VM_guest_metrics record with the feature
flags set *)
let run_test (nv, nm, force, permitted) op =
let __context = Test_common.make_test_database () in
let vm =
Test_common.make_vm ~__context ~hVM_boot_policy:"" ~domain_type:`pv ()
in
let metrics = Db.VM.get_metrics ~__context ~self:vm in
let strict = not force in
Db.VM.set_power_state ~__context ~self:vm ~value:`Running ;
Db.VM_metrics.set_current_domain_type ~__context ~self:metrics
~value:(Db.VM.get_domain_type ~__context ~self:vm) ;
Db.VM_metrics.set_nested_virt ~__context ~self:metrics ~value:nv ;
Db.VM_metrics.set_nomigrate ~__context ~self:metrics ~value:nm ;
Xapi_vm_lifecycle.get_operation_error ~__context ~self:vm ~op ~strict
|> function
| None when permitted ->
()
| None ->
Alcotest.fail
(Printf.sprintf "nv=%b nm=%b force=%b permitted=%b op=%s" nv nm force
permitted (op_string op)
)
| Some _ when not permitted ->
()
| Some (x, _) ->
Alcotest.fail
(Printf.sprintf "nv=%b nm=%b force=%b permitted=%b op=%s error was=%s"
nv nm force permitted (op_string op) x
)
let test' op = List.iter (fun t -> run_test t op) testcases
let test =
[
("test_no_migrate_00", `Quick, fun () -> test' `suspend)
; ("test_no_migrate_01", `Quick, fun () -> test' `checkpoint)
; ("test_no_migrate_02", `Quick, fun () -> test' `pool_migrate)
; ("test_no_migrate_03", `Quick, fun () -> test' `migrate_send)
]
| null | https://raw.githubusercontent.com/xapi-project/xen-api/e984d34bd9ff60d7224a841270db0fe1dfe42e7a/ocaml/tests/test_no_migrate.ml | ocaml |
* Copyright ( C ) 2016 Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) 2016 Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
let ops = [`suspend; `checkpoint; `pool_migrate; `migrate_send]
let op_string = function
| `suspend ->
"suspend"
| `checkpoint ->
"checkpoint"
| `pool_migrate ->
"pool_migrate"
| `migrate_send ->
"migrate_send"
| _ ->
"other"
let testcases =
nest , nomig , force , permitted
[
(false, false, false, true)
; (false, false, true, true)
; (false, true, false, false)
; (false, true, true, true)
; (true, false, false, false)
; (true, false, true, true)
; (true, true, false, false)
; (true, true, true, true)
]
NB , we choose a PV guest here for testing even though some of these options
make no sense for ( e.g. nested virt ) . The logic 's all the same though and
it means we can avoid making up a VM_guest_metrics record with the feature
flags set
make no sense for PV (e.g. nested virt). The logic's all the same though and
it means we can avoid making up a VM_guest_metrics record with the feature
flags set *)
let run_test (nv, nm, force, permitted) op =
let __context = Test_common.make_test_database () in
let vm =
Test_common.make_vm ~__context ~hVM_boot_policy:"" ~domain_type:`pv ()
in
let metrics = Db.VM.get_metrics ~__context ~self:vm in
let strict = not force in
Db.VM.set_power_state ~__context ~self:vm ~value:`Running ;
Db.VM_metrics.set_current_domain_type ~__context ~self:metrics
~value:(Db.VM.get_domain_type ~__context ~self:vm) ;
Db.VM_metrics.set_nested_virt ~__context ~self:metrics ~value:nv ;
Db.VM_metrics.set_nomigrate ~__context ~self:metrics ~value:nm ;
Xapi_vm_lifecycle.get_operation_error ~__context ~self:vm ~op ~strict
|> function
| None when permitted ->
()
| None ->
Alcotest.fail
(Printf.sprintf "nv=%b nm=%b force=%b permitted=%b op=%s" nv nm force
permitted (op_string op)
)
| Some _ when not permitted ->
()
| Some (x, _) ->
Alcotest.fail
(Printf.sprintf "nv=%b nm=%b force=%b permitted=%b op=%s error was=%s"
nv nm force permitted (op_string op) x
)
let test' op = List.iter (fun t -> run_test t op) testcases
let test =
[
("test_no_migrate_00", `Quick, fun () -> test' `suspend)
; ("test_no_migrate_01", `Quick, fun () -> test' `checkpoint)
; ("test_no_migrate_02", `Quick, fun () -> test' `pool_migrate)
; ("test_no_migrate_03", `Quick, fun () -> test' `migrate_send)
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.