_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
9ddf0366ed1a3418388ef645a8c6dd640cf6c3f5d25b793751862d8bbd32e778 | wfnuser/sicp-solutions | e4-19.scm | MIT - Scheme will throw error . It works like 's way . | null | https://raw.githubusercontent.com/wfnuser/sicp-solutions/2c94b28d8ee004dcbfe7311f866e5a346ee01d12/ch4/e4-19.scm | scheme | MIT - Scheme will throw error . It works like 's way . | |
9e70e5fb1255521917debcbd88faecfa8fe6c4f9d7fae10a0335c8a024d98aaf | reflex-frp/reflex-dom-contrib | Xhr.hs | # LANGUAGE CPP #
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE GADTs #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE RecursiveDo #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE GADTs #
{-# LANGUAGE FlexibleContexts #-}
{-|
Convenience functions for dealing with XMLHttpRequest.
-}
module Reflex.Dom.Contrib.Xhr where
------------------------------------------------------------------------------
import Control.Lens
import Control.Monad.Reader
import Data.Aeson
import Data.ByteString.Lazy (ByteString)
import Data.Default
import Data.Map (Map)
import qualified Data.Map as M
import Data.String.Conv
import Data.Text (Text)
import qualified Data.Text as T
import Network.HTTP.Types.URI
------------------------------------------------------------------------------
import Reflex
import Reflex.Dom.Core
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- | URL encodes a map of key-value pairs.
formEncode :: Map Text ByteString -> Text
formEncode m =
T.intercalate "&" $
map (\(k,v) -> k <> "=" <> (encodeToText v)) $ M.toList m
where
encodeToText :: ByteString -> Text
encodeToText = toS . urlEncode True . toS
------------------------------------------------------------------------------
-- | Form encodes a JSON object.
formEncodeJSON :: ToJSON a => a -> Text
formEncodeJSON a = case toJSON a of
Object m ->
formEncode $ M.fromList $ map (bimap id encode) $ itoList m
_ -> error "formEncodeJSON requires an Object"
------------------------------------------------------------------------------
-- | Convenience function for constructing a POST request.
toPost
:: Text
-- ^ URL
-> a
-- ^ The post data
-> XhrRequest a
toPost url d =
XhrRequest "POST" url $ def { _xhrRequestConfig_headers = headerUrlEnc
, _xhrRequestConfig_sendData = d
}
where
headerUrlEnc :: Map Text Text
headerUrlEnc = "Content-type" =: "application/x-www-form-urlencoded"
--toPost
-- :: Text
-- -- ^ URL
-- -> Text
-- -- ^ The post data
- ( Maybe Text )
toPost url d =
XhrRequest " POST " url $ def { _ xhrRequestConfig_headers = headerUrlEnc
, Just d
-- }
-- where
-- headerUrlEnc :: Map Text Text
-- headerUrlEnc = "Content-type" =: "application/x-www-form-urlencoded"
------------------------------------------------------------------------------
| This is the foundational primitive for the XHR API because it gives you
-- full control over request generation and response parsing and also allows
-- you to match things that generated the request with their corresponding
-- responses.
performAJAX
:: (MonadWidget t m, IsXhrPayload a)
=> (a -> XhrRequest a)
-- ^ Function to build the request
-> (XhrResponse -> b)
-- ^ Function to parse the response
-> Event t a
-> m (Event t (a, b))
performAJAX mkRequest parseResponse req =
performEventAsync $ ffor req $ \a cb -> do
_ <- newXMLHttpRequest (mkRequest a) $ \response ->
liftIO $ cb (a, parseResponse response)
return ()
------------------------------------------------------------------------------
| Performs an async XHR taking a JSON object as input and another JSON
-- object as output.
performJsonAjax
:: (MonadWidget t m, ToJSON a, FromJSON b)
=> Event t (Text, a)
-- ^ Event with a URL and a JSON object to be sent
-> m (Event t (a, Maybe b))
performJsonAjax req =
performEventAsync $ ffor req $ \(url,a) cb -> do
_ <- newXMLHttpRequest (mkRequest url a) $ \response ->
liftIO $ cb (a, decodeXhrResponse response)
return ()
where
mkRequest url a = toPost url (T.unpack $ formEncodeJSON a)
| null | https://raw.githubusercontent.com/reflex-frp/reflex-dom-contrib/09d1223a3eadda768a6701410b4532fda8c7033d/src/Reflex/Dom/Contrib/Xhr.hs | haskell | # LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
# LANGUAGE RecursiveDo #
# LANGUAGE FlexibleContexts #
|
Convenience functions for dealing with XMLHttpRequest.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| URL encodes a map of key-value pairs.
----------------------------------------------------------------------------
| Form encodes a JSON object.
----------------------------------------------------------------------------
| Convenience function for constructing a POST request.
^ URL
^ The post data
toPost
:: Text
-- ^ URL
-> Text
-- ^ The post data
}
where
headerUrlEnc :: Map Text Text
headerUrlEnc = "Content-type" =: "application/x-www-form-urlencoded"
----------------------------------------------------------------------------
full control over request generation and response parsing and also allows
you to match things that generated the request with their corresponding
responses.
^ Function to build the request
^ Function to parse the response
----------------------------------------------------------------------------
object as output.
^ Event with a URL and a JSON object to be sent | # LANGUAGE CPP #
# LANGUAGE GADTs #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE GADTs #
module Reflex.Dom.Contrib.Xhr where
import Control.Lens
import Control.Monad.Reader
import Data.Aeson
import Data.ByteString.Lazy (ByteString)
import Data.Default
import Data.Map (Map)
import qualified Data.Map as M
import Data.String.Conv
import Data.Text (Text)
import qualified Data.Text as T
import Network.HTTP.Types.URI
import Reflex
import Reflex.Dom.Core
formEncode :: Map Text ByteString -> Text
formEncode m =
T.intercalate "&" $
map (\(k,v) -> k <> "=" <> (encodeToText v)) $ M.toList m
where
encodeToText :: ByteString -> Text
encodeToText = toS . urlEncode True . toS
formEncodeJSON :: ToJSON a => a -> Text
formEncodeJSON a = case toJSON a of
Object m ->
formEncode $ M.fromList $ map (bimap id encode) $ itoList m
_ -> error "formEncodeJSON requires an Object"
toPost
:: Text
-> a
-> XhrRequest a
toPost url d =
XhrRequest "POST" url $ def { _xhrRequestConfig_headers = headerUrlEnc
, _xhrRequestConfig_sendData = d
}
where
headerUrlEnc :: Map Text Text
headerUrlEnc = "Content-type" =: "application/x-www-form-urlencoded"
- ( Maybe Text )
toPost url d =
XhrRequest " POST " url $ def { _ xhrRequestConfig_headers = headerUrlEnc
, Just d
| This is the foundational primitive for the XHR API because it gives you
performAJAX
:: (MonadWidget t m, IsXhrPayload a)
=> (a -> XhrRequest a)
-> (XhrResponse -> b)
-> Event t a
-> m (Event t (a, b))
performAJAX mkRequest parseResponse req =
performEventAsync $ ffor req $ \a cb -> do
_ <- newXMLHttpRequest (mkRequest a) $ \response ->
liftIO $ cb (a, parseResponse response)
return ()
| Performs an async XHR taking a JSON object as input and another JSON
performJsonAjax
:: (MonadWidget t m, ToJSON a, FromJSON b)
=> Event t (Text, a)
-> m (Event t (a, Maybe b))
performJsonAjax req =
performEventAsync $ ffor req $ \(url,a) cb -> do
_ <- newXMLHttpRequest (mkRequest url a) $ \response ->
liftIO $ cb (a, decodeXhrResponse response)
return ()
where
mkRequest url a = toPost url (T.unpack $ formEncodeJSON a)
|
c362d1b413b5906c5ad619f7ae271badb4e75ee7b1211d19f23d8f01b01c7ece | snoyberg/mono-traversable | Containers.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE CPP #
-- | Warning: This module should be considered highly experimental.
module Data.Containers where
import Prelude hiding (lookup)
import Data.Maybe (fromMaybe)
import qualified Data.Map.Strict as Map
import qualified Data.IntMap.Strict as IntMap
import qualified Data.HashMap.Strict as HashMap
import Data.Hashable (Hashable)
import qualified Data.Set as Set
import qualified Data.HashSet as HashSet
import Data.Monoid (Monoid (..))
import Data.MonoTraversable (MonoFunctor(..), MonoFoldable, MonoTraversable, Element, GrowingAppend, ofoldl', otoList)
import Data.Function (on)
import qualified Data.List as List
import qualified Data.IntSet as IntSet
import qualified Data.Text.Lazy as LText
import qualified Data.Text as Text
import qualified Data.ByteString.Lazy as LByteString
import qualified Data.ByteString as ByteString
import Control.Arrow ((***))
import GHC.Exts (Constraint)
| A container whose values are stored in Key - Value pairs .
class (Data.Monoid.Monoid set, Semigroup set, MonoFoldable set, Eq (ContainerKey set), GrowingAppend set) => SetContainer set where
-- | The type of the key
type ContainerKey set
-- | Check if there is a value with the supplied key
-- in the container.
member :: ContainerKey set -> set -> Bool
-- | Check if there isn't a value with the supplied key
-- in the container.
notMember :: ContainerKey set -> set -> Bool
| Get the union of two containers .
union :: set -> set -> set
| Combine a collection of @SetContainer@s , with left - most values overriding
-- when there are matching keys.
--
-- @since 1.0.0
unions :: (MonoFoldable mono, Element mono ~ set) => mono -> set
unions = ofoldl' union Data.Monoid.mempty
# INLINE unions #
| Get the difference of two containers .
difference :: set -> set -> set
| Get the intersection of two containers .
intersection :: set -> set -> set
-- | Get a list of all of the keys in the container.
keys :: set -> [ContainerKey set]
-- | This instance uses the functions from "Data.Map.Strict".
instance Ord k => SetContainer (Map.Map k v) where
type ContainerKey (Map.Map k v) = k
member = Map.member
{-# INLINE member #-}
notMember = Map.notMember
# INLINE notMember #
union = Map.union
# INLINE union #
unions = Map.unions . otoList
# INLINE unions #
difference = Map.difference
# INLINE difference #
intersection = Map.intersection
# INLINE intersection #
keys = Map.keys
# INLINE keys #
| This instance uses the functions from " Data . . Strict " .
instance (Eq key, Hashable key) => SetContainer (HashMap.HashMap key value) where
type ContainerKey (HashMap.HashMap key value) = key
member = HashMap.member
{-# INLINE member #-}
notMember k = not . HashMap.member k
# INLINE notMember #
union = HashMap.union
# INLINE union #
unions = HashMap.unions . otoList
# INLINE unions #
difference = HashMap.difference
# INLINE difference #
intersection = HashMap.intersection
# INLINE intersection #
keys = HashMap.keys
# INLINE keys #
-- | This instance uses the functions from "Data.IntMap.Strict".
instance SetContainer (IntMap.IntMap value) where
type ContainerKey (IntMap.IntMap value) = Int
member = IntMap.member
{-# INLINE member #-}
notMember = IntMap.notMember
# INLINE notMember #
union = IntMap.union
# INLINE union #
unions = IntMap.unions . otoList
# INLINE unions #
difference = IntMap.difference
# INLINE difference #
intersection = IntMap.intersection
# INLINE intersection #
keys = IntMap.keys
# INLINE keys #
instance Ord element => SetContainer (Set.Set element) where
type ContainerKey (Set.Set element) = element
member = Set.member
{-# INLINE member #-}
notMember = Set.notMember
# INLINE notMember #
union = Set.union
# INLINE union #
unions = Set.unions . otoList
# INLINE unions #
difference = Set.difference
# INLINE difference #
intersection = Set.intersection
# INLINE intersection #
keys = Set.toList
# INLINE keys #
instance (Eq element, Hashable element) => SetContainer (HashSet.HashSet element) where
type ContainerKey (HashSet.HashSet element) = element
member = HashSet.member
{-# INLINE member #-}
notMember e = not . HashSet.member e
# INLINE notMember #
union = HashSet.union
# INLINE union #
difference = HashSet.difference
# INLINE difference #
intersection = HashSet.intersection
# INLINE intersection #
keys = HashSet.toList
# INLINE keys #
instance SetContainer IntSet.IntSet where
type ContainerKey IntSet.IntSet = Int
member = IntSet.member
{-# INLINE member #-}
notMember = IntSet.notMember
# INLINE notMember #
union = IntSet.union
# INLINE union #
difference = IntSet.difference
# INLINE difference #
intersection = IntSet.intersection
# INLINE intersection #
keys = IntSet.toList
# INLINE keys #
instance Eq key => SetContainer [(key, value)] where
type ContainerKey [(key, value)] = key
member k = List.any ((== k) . fst)
{-# INLINE member #-}
notMember k = not . member k
# INLINE notMember #
union = List.unionBy ((==) `on` fst)
# INLINE union #
x `difference` y =
loop x
where
loop [] = []
loop ((k, v):rest) =
case lookup k y of
Nothing -> (k, v) : loop rest
Just _ -> loop rest
intersection = List.intersectBy ((==) `on` fst)
# INLINE intersection #
keys = map fst
# INLINE keys #
-- | A guaranteed-polymorphic @Map@, which allows for more polymorphic versions
-- of functions.
class PolyMap map where
| Get the difference between two maps , using the left map 's values .
differenceMap :: map value1 -> map value2 -> map value1
: : ( value1 - > value2 - > Maybe )
- > map - > map value2 - > map value1
differenceWithMap :: (value1 -> value2 -> Maybe value1)
-> map value1 -> map value2 -> map value1
-}
| Get the intersection of two maps , using the left map 's values .
intersectionMap :: map value1 -> map value2 -> map value1
| Get the intersection of two maps with a supplied function
-- that takes in the left map's value and the right map's value.
intersectionWithMap :: (value1 -> value2 -> value3)
-> map value1 -> map value2 -> map value3
-- | This instance uses the functions from "Data.Map.Strict".
instance Ord key => PolyMap (Map.Map key) where
differenceMap = Map.difference
# INLINE differenceMap #
= Map.differenceWith
intersectionMap = Map.intersection
# INLINE intersectionMap #
intersectionWithMap = Map.intersectionWith
# INLINE intersectionWithMap #
| This instance uses the functions from " Data . . Strict " .
instance (Eq key, Hashable key) => PolyMap (HashMap.HashMap key) where
differenceMap = HashMap.difference
# INLINE differenceMap #
= HashMap.differenceWith
intersectionMap = HashMap.intersection
# INLINE intersectionMap #
intersectionWithMap = HashMap.intersectionWith
# INLINE intersectionWithMap #
-- | This instance uses the functions from "Data.IntMap.Strict".
instance PolyMap IntMap.IntMap where
differenceMap = IntMap.difference
# INLINE differenceMap #
= IntMap.differenceWith
intersectionMap = IntMap.intersection
# INLINE intersectionMap #
intersectionWithMap = IntMap.intersectionWith
# INLINE intersectionWithMap #
-- | A @Map@ type polymorphic in both its key and value.
class BiPolyMap map where
type BPMKeyConstraint map key :: Constraint
mapKeysWith :: (BPMKeyConstraint map k1, BPMKeyConstraint map k2)
=> (v -> v -> v) -- ^ combine values that now overlap
-> (k1 -> k2)
-> map k1 v
-> map k2 v
instance BiPolyMap Map.Map where
type BPMKeyConstraint Map.Map key = Ord key
mapKeysWith = Map.mapKeysWith
# INLINE mapKeysWith #
instance BiPolyMap HashMap.HashMap where
type BPMKeyConstraint HashMap.HashMap key = (Hashable key, Eq key)
mapKeysWith g f =
mapFromList . unionsWith g . map go . mapToList
where
go (k, v) = [(f k, v)]
# INLINE mapKeysWith #
-- | Polymorphic typeclass for interacting with different map types
class (MonoTraversable map, SetContainer map) => IsMap map where
| In some cases , ' MapValue ' and ' Element ' will be different , e.g. , the
' IsMap ' instance of associated lists .
type MapValue map
-- | Look up a value in a map with a specified key.
lookup :: ContainerKey map -> map -> Maybe (MapValue map)
-- | Insert a key-value pair into a map.
insertMap :: ContainerKey map -> MapValue map -> map -> map
-- | Delete a key-value pair of a map using a specified key.
deleteMap :: ContainerKey map -> map -> map
-- | Create a map from a single key-value pair.
singletonMap :: ContainerKey map -> MapValue map -> map
-- | Convert a list of key-value pairs to a map
mapFromList :: [(ContainerKey map, MapValue map)] -> map
-- | Convert a map to a list of key-value pairs.
mapToList :: map -> [(ContainerKey map, MapValue map)]
-- | Like 'lookup', but uses a default value when the key does
-- not exist in the map.
findWithDefault :: MapValue map -> ContainerKey map -> map -> MapValue map
findWithDefault def key = fromMaybe def . lookup key
-- | Insert a key-value pair into a map.
--
-- Inserts the value directly if the key does not exist in the map. Otherwise,
-- apply a supplied function that accepts the new value and the previous value
-- and insert that result into the map.
insertWith :: (MapValue map -> MapValue map -> MapValue map)
-- ^ function that accepts the new value and the
-- previous value and returns the value that will be
-- set in the map.
-> ContainerKey map -- ^ key
-> MapValue map -- ^ new value to insert
-> map -- ^ input map
-> map -- ^ resulting map
insertWith f k v m =
v' `seq` insertMap k v' m
where
v' =
case lookup k m of
Nothing -> v
Just vold -> f v vold
-- | Insert a key-value pair into a map.
--
-- Inserts the value directly if the key does not exist in the map. Otherwise,
-- apply a supplied function that accepts the key, the new value, and the
-- previous value and insert that result into the map.
insertWithKey
:: (ContainerKey map -> MapValue map -> MapValue map -> MapValue map)
-- ^ function that accepts the key, the new value, and the
-- previous value and returns the value that will be
-- set in the map.
-> ContainerKey map -- ^ key
-> MapValue map -- ^ new value to insert
-> map -- ^ input map
-> map -- ^ resulting map
insertWithKey f k v m =
v' `seq` insertMap k v' m
where
v' =
case lookup k m of
Nothing -> v
Just vold -> f k v vold
-- | Insert a key-value pair into a map, return the previous key's value
-- if it existed.
--
-- Inserts the value directly if the key does not exist in the map. Otherwise,
-- apply a supplied function that accepts the key, the new value, and the
-- previous value and insert that result into the map.
insertLookupWithKey
:: (ContainerKey map -> MapValue map -> MapValue map -> MapValue map)
-- ^ function that accepts the key, the new value, and the
-- previous value and returns the value that will be
-- set in the map.
-> ContainerKey map -- ^ key
-> MapValue map -- ^ new value to insert
-> map -- ^ input map
-> (Maybe (MapValue map), map) -- ^ previous value and the resulting map
insertLookupWithKey f k v m =
v' `seq` (mold, insertMap k v' m)
where
(mold, v') =
case lookup k m of
Nothing -> (Nothing, v)
Just vold -> (Just vold, f k v vold)
-- | Apply a function to the value of a given key.
--
-- Returns the input map when the key-value pair does not exist.
adjustMap
:: (MapValue map -> MapValue map)
-- ^ function to apply to the previous value
-> ContainerKey map -- ^ key
-> map -- ^ input map
-> map -- ^ resulting map
adjustMap f k m =
case lookup k m of
Nothing -> m
Just v ->
let v' = f v
in v' `seq` insertMap k v' m
-- | Equivalent to 'adjustMap', but the function accepts the key,
-- as well as the previous value.
adjustWithKey
:: (ContainerKey map -> MapValue map -> MapValue map)
-- ^ function that accepts the key and the previous value
-- and returns the new value
-> ContainerKey map -- ^ key
-> map -- ^ input map
-> map -- ^ resulting map
adjustWithKey f k m =
case lookup k m of
Nothing -> m
Just v ->
let v' = f k v
in v' `seq` insertMap k v' m
-- | Apply a function to the value of a given key.
--
-- If the function returns 'Nothing', this deletes the key-value pair.
--
-- Returns the input map when the key-value pair does not exist.
updateMap
:: (MapValue map -> Maybe (MapValue map))
-- ^ function that accepts the previous value
-- and returns the new value or 'Nothing'
-> ContainerKey map -- ^ key
-> map -- ^ input map
-> map -- ^ resulting map
updateMap f k m =
case lookup k m of
Nothing -> m
Just v ->
case f v of
Nothing -> deleteMap k m
Just v' -> v' `seq` insertMap k v' m
-- | Equivalent to 'updateMap', but the function accepts the key,
-- as well as the previous value.
updateWithKey
:: (ContainerKey map -> MapValue map -> Maybe (MapValue map))
-- ^ function that accepts the key and the previous value
-- and returns the new value or 'Nothing'
-> ContainerKey map -- ^ key
-> map -- ^ input map
-> map -- ^ resulting map
updateWithKey f k m =
case lookup k m of
Nothing -> m
Just v ->
case f k v of
Nothing -> deleteMap k m
Just v' -> v' `seq` insertMap k v' m
-- | Apply a function to the value of a given key.
--
-- If the map does not contain the key this returns 'Nothing'
-- and the input map.
--
-- If the map does contain the key but the function returns 'Nothing',
-- this returns the previous value and the map with the key-value pair removed.
--
-- If the map contains the key and the function returns a value,
-- this returns the new value and the map with the key-value pair with the new value.
updateLookupWithKey
:: (ContainerKey map -> MapValue map -> Maybe (MapValue map))
-- ^ function that accepts the key and the previous value
-- and returns the new value or 'Nothing'
-> ContainerKey map -- ^ key
-> map -- ^ input map
-> (Maybe (MapValue map), map) -- ^ previous/new value and the resulting map
updateLookupWithKey f k m =
case lookup k m of
Nothing -> (Nothing, m)
Just v ->
case f k v of
Nothing -> (Just v, deleteMap k m)
Just v' -> v' `seq` (Just v', insertMap k v' m)
-- | Update/Delete the value of a given key.
--
-- Applies a function to previous value of a given key, if it results in 'Nothing'
-- delete the key-value pair from the map, otherwise replace the previous value
-- with the new value.
alterMap
:: (Maybe (MapValue map) -> Maybe (MapValue map))
-- ^ function that accepts the previous value and
-- returns the new value or 'Nothing'
-> ContainerKey map -- ^ key
-> map -- ^ input map
-> map -- ^ resulting map
alterMap f k m =
case f mold of
Nothing ->
case mold of
Nothing -> m
Just _ -> deleteMap k m
Just v -> insertMap k v m
where
mold = lookup k m
| Combine two maps .
--
-- When a key exists in both maps, apply a function
-- to both of the values and use the result of that as the value
-- of the key in the resulting map.
unionWith
:: (MapValue map -> MapValue map -> MapValue map)
^ function that accepts the first map 's value and the second map 's value
-- and returns the new value that will be used
^ first map
^ second map
-> map -- ^ resulting map
unionWith f x y =
mapFromList $ loop $ mapToList x ++ mapToList y
where
loop [] = []
loop ((k, v):rest) =
case List.lookup k rest of
Nothing -> (k, v) : loop rest
Just v' -> (k, f v v') : loop (deleteMap k rest)
-- Equivalent to 'unionWith', but the function accepts the key,
-- as well as both of the map's values.
unionWithKey
:: (ContainerKey map -> MapValue map -> MapValue map -> MapValue map)
^ function that accepts the key , the first map 's value and the
second map 's value and returns the new value that will be used
^ first map
^ second map
-> map -- ^ resulting map
unionWithKey f x y =
mapFromList $ loop $ mapToList x ++ mapToList y
where
loop [] = []
loop ((k, v):rest) =
case List.lookup k rest of
Nothing -> (k, v) : loop rest
Just v' -> (k, f k v v') : loop (deleteMap k rest)
-- | Combine a list of maps.
--
When a key exists in two different maps , apply a function
-- to both of the values and use the result of that as the value
-- of the key in the resulting map.
unionsWith
:: (MapValue map -> MapValue map -> MapValue map)
^ function that accepts the first map 's value and the second map 's value
-- and returns the new value that will be used
-> [map] -- ^ input list of maps
-> map -- ^ resulting map
unionsWith _ [] = mempty
unionsWith _ [x] = x
unionsWith f (x:y:z) = unionsWith f (unionWith f x y:z)
-- | Apply a function over every key-value pair of a map.
mapWithKey
:: (ContainerKey map -> MapValue map -> MapValue map)
-- ^ function that accepts the key and the previous value
-- and returns the new value
-> map -- ^ input map
-> map -- ^ resulting map
mapWithKey f =
mapFromList . map go . mapToList
where
go (k, v) = (k, f k v)
-- | Apply a function over every key of a pair and run
-- 'unionsWith' over the results.
omapKeysWith
:: (MapValue map -> MapValue map -> MapValue map)
^ function that accepts the first map 's value and the second map 's value
-- and returns the new value that will be used
-> (ContainerKey map -> ContainerKey map)
-- ^ function that accepts the previous key and
-- returns the new key
-> map -- ^ input map
-> map -- ^ resulting map
omapKeysWith g f =
mapFromList . unionsWith g . map go . mapToList
where
go (k, v) = [(f k, v)]
-- | Filter values in a map.
--
-- @since 1.0.9.0
filterMap :: IsMap map => (MapValue map -> Bool) -> map -> map
filterMap p = mapFromList . filter (p . snd) . mapToList
-- | This instance uses the functions from "Data.Map.Strict".
instance Ord key => IsMap (Map.Map key value) where
type MapValue (Map.Map key value) = value
lookup = Map.lookup
{-# INLINE lookup #-}
insertMap = Map.insert
# INLINE insertMap #
deleteMap = Map.delete
# INLINE deleteMap #
singletonMap = Map.singleton
# INLINE singletonMap #
mapFromList = Map.fromList
# INLINE mapFromList #
mapToList = Map.toList
# INLINE mapToList #
findWithDefault = Map.findWithDefault
# INLINE findWithDefault #
insertWith = Map.insertWith
# INLINE insertWith #
insertWithKey = Map.insertWithKey
# INLINE insertWithKey #
insertLookupWithKey = Map.insertLookupWithKey
# INLINE insertLookupWithKey #
adjustMap = Map.adjust
# INLINE adjustMap #
adjustWithKey = Map.adjustWithKey
# INLINE adjustWithKey #
updateMap = Map.update
# INLINE updateMap #
updateWithKey = Map.updateWithKey
# INLINE updateWithKey #
updateLookupWithKey = Map.updateLookupWithKey
# INLINE updateLookupWithKey #
alterMap = Map.alter
# INLINE alterMap #
unionWith = Map.unionWith
# INLINE unionWith #
unionWithKey = Map.unionWithKey
# INLINE unionWithKey #
unionsWith = Map.unionsWith
# INLINE unionsWith #
mapWithKey = Map.mapWithKey
# INLINE mapWithKey #
omapKeysWith = Map.mapKeysWith
# INLINE omapKeysWith #
filterMap = Map.filter
{-# INLINE filterMap #-}
| This instance uses the functions from " Data . . Strict " .
instance (Eq key, Hashable key) => IsMap (HashMap.HashMap key value) where
type MapValue (HashMap.HashMap key value) = value
lookup = HashMap.lookup
{-# INLINE lookup #-}
insertMap = HashMap.insert
# INLINE insertMap #
deleteMap = HashMap.delete
# INLINE deleteMap #
singletonMap = HashMap.singleton
# INLINE singletonMap #
mapFromList = HashMap.fromList
# INLINE mapFromList #
mapToList = HashMap.toList
# INLINE mapToList #
--findWithDefault = HashMap.findWithDefault
insertWith = HashMap.insertWith
# INLINE insertWith #
insertWithKey = HashMap.insertWithKey
--insertLookupWithKey = HashMap.insertLookupWithKey
adjustMap = HashMap.adjust
# INLINE adjustMap #
adjustWithKey = HashMap.adjustWithKey
--updateMap = HashMap.update
--updateWithKey = HashMap.updateWithKey
updateLookupWithKey = HashMap.updateLookupWithKey
alterMap =
unionWith = HashMap.unionWith
# INLINE unionWith #
unionWithKey = HashMap.unionWithKey
--unionsWith = HashMap.unionsWith
--mapWithKey = HashMap.mapWithKey
--mapKeysWith = HashMap.mapKeysWith
filterMap = HashMap.filter
{-# INLINE filterMap #-}
-- | This instance uses the functions from "Data.IntMap.Strict".
instance IsMap (IntMap.IntMap value) where
type MapValue (IntMap.IntMap value) = value
lookup = IntMap.lookup
{-# INLINE lookup #-}
insertMap = IntMap.insert
# INLINE insertMap #
deleteMap = IntMap.delete
# INLINE deleteMap #
singletonMap = IntMap.singleton
# INLINE singletonMap #
mapFromList = IntMap.fromList
# INLINE mapFromList #
mapToList = IntMap.toList
# INLINE mapToList #
findWithDefault = IntMap.findWithDefault
# INLINE findWithDefault #
insertWith = IntMap.insertWith
# INLINE insertWith #
insertWithKey = IntMap.insertWithKey
# INLINE insertWithKey #
insertLookupWithKey = IntMap.insertLookupWithKey
# INLINE insertLookupWithKey #
adjustMap = IntMap.adjust
# INLINE adjustMap #
adjustWithKey = IntMap.adjustWithKey
# INLINE adjustWithKey #
updateMap = IntMap.update
# INLINE updateMap #
updateWithKey = IntMap.updateWithKey
# INLINE updateWithKey #
--updateLookupWithKey = IntMap.updateLookupWithKey
alterMap = IntMap.alter
# INLINE alterMap #
unionWith = IntMap.unionWith
# INLINE unionWith #
unionWithKey = IntMap.unionWithKey
# INLINE unionWithKey #
unionsWith = IntMap.unionsWith
# INLINE unionsWith #
mapWithKey = IntMap.mapWithKey
# INLINE mapWithKey #
omapKeysWith = IntMap.mapKeysWith
# INLINE omapKeysWith #
filterMap = IntMap.filter
{-# INLINE filterMap #-}
instance Eq key => IsMap [(key, value)] where
type MapValue [(key, value)] = value
lookup = List.lookup
{-# INLINE lookup #-}
insertMap k v = ((k, v):) . deleteMap k
# INLINE insertMap #
deleteMap k = List.filter ((/= k) . fst)
# INLINE deleteMap #
singletonMap k v = [(k, v)]
# INLINE singletonMap #
mapFromList = id
# INLINE mapFromList #
mapToList = id
# INLINE mapToList #
-- | Polymorphic typeclass for interacting with different set types
class (SetContainer set, Element set ~ ContainerKey set) => IsSet set where
-- | Insert a value into a set.
insertSet :: Element set -> set -> set
-- | Delete a value from a set.
deleteSet :: Element set -> set -> set
-- | Create a set from a single element.
singletonSet :: Element set -> set
-- | Convert a list to a set.
setFromList :: [Element set] -> set
-- | Convert a set to a list.
setToList :: set -> [Element set]
-- | Filter values in a set.
--
-- @since 1.0.12.0
filterSet :: (Element set -> Bool) -> set -> set
filterSet p = setFromList . filter p . setToList
instance Ord element => IsSet (Set.Set element) where
insertSet = Set.insert
# INLINE insertSet #
deleteSet = Set.delete
# INLINE deleteSet #
singletonSet = Set.singleton
# INLINE singletonSet #
setFromList = Set.fromList
# INLINE setFromList #
setToList = Set.toList
# INLINE setToList #
filterSet = Set.filter
# INLINE filterSet #
instance (Eq element, Hashable element) => IsSet (HashSet.HashSet element) where
insertSet = HashSet.insert
# INLINE insertSet #
deleteSet = HashSet.delete
# INLINE deleteSet #
singletonSet = HashSet.singleton
# INLINE singletonSet #
setFromList = HashSet.fromList
# INLINE setFromList #
setToList = HashSet.toList
# INLINE setToList #
filterSet = HashSet.filter
# INLINE filterSet #
instance IsSet IntSet.IntSet where
insertSet = IntSet.insert
# INLINE insertSet #
deleteSet = IntSet.delete
# INLINE deleteSet #
singletonSet = IntSet.singleton
# INLINE singletonSet #
setFromList = IntSet.fromList
# INLINE setFromList #
setToList = IntSet.toList
# INLINE setToList #
filterSet = IntSet.filter
# INLINE filterSet #
| Zip operations on ' MonoFunctor 's .
class MonoFunctor mono => MonoZip mono where
| Combine each element of two ' 's using a supplied function .
ozipWith :: (Element mono -> Element mono -> Element mono) -> mono -> mono -> mono
| Take two ' 's and return a list of the pairs of their elements .
ozip :: mono -> mono -> [(Element mono, Element mono)]
| Take a list of pairs of elements and return a ' ' of the first
components and a ' ' of the second components .
ounzip :: [(Element mono, Element mono)] -> (mono, mono)
instance MonoZip ByteString.ByteString where
ozip = ByteString.zip
ounzip = ByteString.unzip
ozipWith f xs = ByteString.pack . ByteString.zipWith f xs
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
instance MonoZip LByteString.ByteString where
ozip = LByteString.zip
ounzip = LByteString.unzip
ozipWith f xs = LByteString.pack . LByteString.zipWith f xs
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
instance MonoZip Text.Text where
ozip = Text.zip
ounzip = (Text.pack *** Text.pack) . List.unzip
ozipWith = Text.zipWith
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
instance MonoZip LText.Text where
ozip = LText.zip
ounzip = (LText.pack *** LText.pack) . List.unzip
ozipWith = LText.zipWith
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
-- | Type class for maps whose keys can be converted into sets.
class SetContainer set => HasKeysSet set where
-- | Type of the key set.
type KeySet set
-- | Convert a map into a set of its keys.
keysSet :: set -> KeySet set
instance Ord k => HasKeysSet (Map.Map k v) where
type KeySet (Map.Map k v) = Set.Set k
keysSet = Map.keysSet
instance HasKeysSet (IntMap.IntMap v) where
type KeySet (IntMap.IntMap v) = IntSet.IntSet
keysSet = IntMap.keysSet
instance (Hashable k, Eq k) => HasKeysSet (HashMap.HashMap k v) where
type KeySet (HashMap.HashMap k v) = HashSet.HashSet k
keysSet = setFromList . HashMap.keys
| null | https://raw.githubusercontent.com/snoyberg/mono-traversable/224e4c014310e93877b39fa8c22a9d76492d42c4/mono-traversable/src/Data/Containers.hs | haskell | # LANGUAGE ConstraintKinds #
| Warning: This module should be considered highly experimental.
| The type of the key
| Check if there is a value with the supplied key
in the container.
| Check if there isn't a value with the supplied key
in the container.
when there are matching keys.
@since 1.0.0
| Get a list of all of the keys in the container.
| This instance uses the functions from "Data.Map.Strict".
# INLINE member #
# INLINE member #
| This instance uses the functions from "Data.IntMap.Strict".
# INLINE member #
# INLINE member #
# INLINE member #
# INLINE member #
# INLINE member #
| A guaranteed-polymorphic @Map@, which allows for more polymorphic versions
of functions.
that takes in the left map's value and the right map's value.
| This instance uses the functions from "Data.Map.Strict".
| This instance uses the functions from "Data.IntMap.Strict".
| A @Map@ type polymorphic in both its key and value.
^ combine values that now overlap
| Polymorphic typeclass for interacting with different map types
| Look up a value in a map with a specified key.
| Insert a key-value pair into a map.
| Delete a key-value pair of a map using a specified key.
| Create a map from a single key-value pair.
| Convert a list of key-value pairs to a map
| Convert a map to a list of key-value pairs.
| Like 'lookup', but uses a default value when the key does
not exist in the map.
| Insert a key-value pair into a map.
Inserts the value directly if the key does not exist in the map. Otherwise,
apply a supplied function that accepts the new value and the previous value
and insert that result into the map.
^ function that accepts the new value and the
previous value and returns the value that will be
set in the map.
^ key
^ new value to insert
^ input map
^ resulting map
| Insert a key-value pair into a map.
Inserts the value directly if the key does not exist in the map. Otherwise,
apply a supplied function that accepts the key, the new value, and the
previous value and insert that result into the map.
^ function that accepts the key, the new value, and the
previous value and returns the value that will be
set in the map.
^ key
^ new value to insert
^ input map
^ resulting map
| Insert a key-value pair into a map, return the previous key's value
if it existed.
Inserts the value directly if the key does not exist in the map. Otherwise,
apply a supplied function that accepts the key, the new value, and the
previous value and insert that result into the map.
^ function that accepts the key, the new value, and the
previous value and returns the value that will be
set in the map.
^ key
^ new value to insert
^ input map
^ previous value and the resulting map
| Apply a function to the value of a given key.
Returns the input map when the key-value pair does not exist.
^ function to apply to the previous value
^ key
^ input map
^ resulting map
| Equivalent to 'adjustMap', but the function accepts the key,
as well as the previous value.
^ function that accepts the key and the previous value
and returns the new value
^ key
^ input map
^ resulting map
| Apply a function to the value of a given key.
If the function returns 'Nothing', this deletes the key-value pair.
Returns the input map when the key-value pair does not exist.
^ function that accepts the previous value
and returns the new value or 'Nothing'
^ key
^ input map
^ resulting map
| Equivalent to 'updateMap', but the function accepts the key,
as well as the previous value.
^ function that accepts the key and the previous value
and returns the new value or 'Nothing'
^ key
^ input map
^ resulting map
| Apply a function to the value of a given key.
If the map does not contain the key this returns 'Nothing'
and the input map.
If the map does contain the key but the function returns 'Nothing',
this returns the previous value and the map with the key-value pair removed.
If the map contains the key and the function returns a value,
this returns the new value and the map with the key-value pair with the new value.
^ function that accepts the key and the previous value
and returns the new value or 'Nothing'
^ key
^ input map
^ previous/new value and the resulting map
| Update/Delete the value of a given key.
Applies a function to previous value of a given key, if it results in 'Nothing'
delete the key-value pair from the map, otherwise replace the previous value
with the new value.
^ function that accepts the previous value and
returns the new value or 'Nothing'
^ key
^ input map
^ resulting map
When a key exists in both maps, apply a function
to both of the values and use the result of that as the value
of the key in the resulting map.
and returns the new value that will be used
^ resulting map
Equivalent to 'unionWith', but the function accepts the key,
as well as both of the map's values.
^ resulting map
| Combine a list of maps.
to both of the values and use the result of that as the value
of the key in the resulting map.
and returns the new value that will be used
^ input list of maps
^ resulting map
| Apply a function over every key-value pair of a map.
^ function that accepts the key and the previous value
and returns the new value
^ input map
^ resulting map
| Apply a function over every key of a pair and run
'unionsWith' over the results.
and returns the new value that will be used
^ function that accepts the previous key and
returns the new key
^ input map
^ resulting map
| Filter values in a map.
@since 1.0.9.0
| This instance uses the functions from "Data.Map.Strict".
# INLINE lookup #
# INLINE filterMap #
# INLINE lookup #
findWithDefault = HashMap.findWithDefault
insertLookupWithKey = HashMap.insertLookupWithKey
updateMap = HashMap.update
updateWithKey = HashMap.updateWithKey
unionsWith = HashMap.unionsWith
mapWithKey = HashMap.mapWithKey
mapKeysWith = HashMap.mapKeysWith
# INLINE filterMap #
| This instance uses the functions from "Data.IntMap.Strict".
# INLINE lookup #
updateLookupWithKey = IntMap.updateLookupWithKey
# INLINE filterMap #
# INLINE lookup #
| Polymorphic typeclass for interacting with different set types
| Insert a value into a set.
| Delete a value from a set.
| Create a set from a single element.
| Convert a list to a set.
| Convert a set to a list.
| Filter values in a set.
@since 1.0.12.0
| Type class for maps whose keys can be converted into sets.
| Type of the key set.
| Convert a map into a set of its keys. | # LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE CPP #
module Data.Containers where
import Prelude hiding (lookup)
import Data.Maybe (fromMaybe)
import qualified Data.Map.Strict as Map
import qualified Data.IntMap.Strict as IntMap
import qualified Data.HashMap.Strict as HashMap
import Data.Hashable (Hashable)
import qualified Data.Set as Set
import qualified Data.HashSet as HashSet
import Data.Monoid (Monoid (..))
import Data.MonoTraversable (MonoFunctor(..), MonoFoldable, MonoTraversable, Element, GrowingAppend, ofoldl', otoList)
import Data.Function (on)
import qualified Data.List as List
import qualified Data.IntSet as IntSet
import qualified Data.Text.Lazy as LText
import qualified Data.Text as Text
import qualified Data.ByteString.Lazy as LByteString
import qualified Data.ByteString as ByteString
import Control.Arrow ((***))
import GHC.Exts (Constraint)
| A container whose values are stored in Key - Value pairs .
class (Data.Monoid.Monoid set, Semigroup set, MonoFoldable set, Eq (ContainerKey set), GrowingAppend set) => SetContainer set where
type ContainerKey set
member :: ContainerKey set -> set -> Bool
notMember :: ContainerKey set -> set -> Bool
| Get the union of two containers .
union :: set -> set -> set
| Combine a collection of @SetContainer@s , with left - most values overriding
unions :: (MonoFoldable mono, Element mono ~ set) => mono -> set
unions = ofoldl' union Data.Monoid.mempty
# INLINE unions #
| Get the difference of two containers .
difference :: set -> set -> set
| Get the intersection of two containers .
intersection :: set -> set -> set
keys :: set -> [ContainerKey set]
instance Ord k => SetContainer (Map.Map k v) where
type ContainerKey (Map.Map k v) = k
member = Map.member
notMember = Map.notMember
# INLINE notMember #
union = Map.union
# INLINE union #
unions = Map.unions . otoList
# INLINE unions #
difference = Map.difference
# INLINE difference #
intersection = Map.intersection
# INLINE intersection #
keys = Map.keys
# INLINE keys #
| This instance uses the functions from " Data . . Strict " .
instance (Eq key, Hashable key) => SetContainer (HashMap.HashMap key value) where
type ContainerKey (HashMap.HashMap key value) = key
member = HashMap.member
notMember k = not . HashMap.member k
# INLINE notMember #
union = HashMap.union
# INLINE union #
unions = HashMap.unions . otoList
# INLINE unions #
difference = HashMap.difference
# INLINE difference #
intersection = HashMap.intersection
# INLINE intersection #
keys = HashMap.keys
# INLINE keys #
instance SetContainer (IntMap.IntMap value) where
type ContainerKey (IntMap.IntMap value) = Int
member = IntMap.member
notMember = IntMap.notMember
# INLINE notMember #
union = IntMap.union
# INLINE union #
unions = IntMap.unions . otoList
# INLINE unions #
difference = IntMap.difference
# INLINE difference #
intersection = IntMap.intersection
# INLINE intersection #
keys = IntMap.keys
# INLINE keys #
instance Ord element => SetContainer (Set.Set element) where
type ContainerKey (Set.Set element) = element
member = Set.member
notMember = Set.notMember
# INLINE notMember #
union = Set.union
# INLINE union #
unions = Set.unions . otoList
# INLINE unions #
difference = Set.difference
# INLINE difference #
intersection = Set.intersection
# INLINE intersection #
keys = Set.toList
# INLINE keys #
instance (Eq element, Hashable element) => SetContainer (HashSet.HashSet element) where
type ContainerKey (HashSet.HashSet element) = element
member = HashSet.member
notMember e = not . HashSet.member e
# INLINE notMember #
union = HashSet.union
# INLINE union #
difference = HashSet.difference
# INLINE difference #
intersection = HashSet.intersection
# INLINE intersection #
keys = HashSet.toList
# INLINE keys #
instance SetContainer IntSet.IntSet where
type ContainerKey IntSet.IntSet = Int
member = IntSet.member
notMember = IntSet.notMember
# INLINE notMember #
union = IntSet.union
# INLINE union #
difference = IntSet.difference
# INLINE difference #
intersection = IntSet.intersection
# INLINE intersection #
keys = IntSet.toList
# INLINE keys #
instance Eq key => SetContainer [(key, value)] where
type ContainerKey [(key, value)] = key
member k = List.any ((== k) . fst)
notMember k = not . member k
# INLINE notMember #
union = List.unionBy ((==) `on` fst)
# INLINE union #
x `difference` y =
loop x
where
loop [] = []
loop ((k, v):rest) =
case lookup k y of
Nothing -> (k, v) : loop rest
Just _ -> loop rest
intersection = List.intersectBy ((==) `on` fst)
# INLINE intersection #
keys = map fst
# INLINE keys #
class PolyMap map where
| Get the difference between two maps , using the left map 's values .
differenceMap :: map value1 -> map value2 -> map value1
: : ( value1 - > value2 - > Maybe )
- > map - > map value2 - > map value1
differenceWithMap :: (value1 -> value2 -> Maybe value1)
-> map value1 -> map value2 -> map value1
-}
| Get the intersection of two maps , using the left map 's values .
intersectionMap :: map value1 -> map value2 -> map value1
| Get the intersection of two maps with a supplied function
intersectionWithMap :: (value1 -> value2 -> value3)
-> map value1 -> map value2 -> map value3
instance Ord key => PolyMap (Map.Map key) where
differenceMap = Map.difference
# INLINE differenceMap #
= Map.differenceWith
intersectionMap = Map.intersection
# INLINE intersectionMap #
intersectionWithMap = Map.intersectionWith
# INLINE intersectionWithMap #
| This instance uses the functions from " Data . . Strict " .
instance (Eq key, Hashable key) => PolyMap (HashMap.HashMap key) where
differenceMap = HashMap.difference
# INLINE differenceMap #
= HashMap.differenceWith
intersectionMap = HashMap.intersection
# INLINE intersectionMap #
intersectionWithMap = HashMap.intersectionWith
# INLINE intersectionWithMap #
instance PolyMap IntMap.IntMap where
differenceMap = IntMap.difference
# INLINE differenceMap #
= IntMap.differenceWith
intersectionMap = IntMap.intersection
# INLINE intersectionMap #
intersectionWithMap = IntMap.intersectionWith
# INLINE intersectionWithMap #
class BiPolyMap map where
type BPMKeyConstraint map key :: Constraint
mapKeysWith :: (BPMKeyConstraint map k1, BPMKeyConstraint map k2)
-> (k1 -> k2)
-> map k1 v
-> map k2 v
instance BiPolyMap Map.Map where
type BPMKeyConstraint Map.Map key = Ord key
mapKeysWith = Map.mapKeysWith
# INLINE mapKeysWith #
instance BiPolyMap HashMap.HashMap where
type BPMKeyConstraint HashMap.HashMap key = (Hashable key, Eq key)
mapKeysWith g f =
mapFromList . unionsWith g . map go . mapToList
where
go (k, v) = [(f k, v)]
# INLINE mapKeysWith #
class (MonoTraversable map, SetContainer map) => IsMap map where
| In some cases , ' MapValue ' and ' Element ' will be different , e.g. , the
' IsMap ' instance of associated lists .
type MapValue map
lookup :: ContainerKey map -> map -> Maybe (MapValue map)
insertMap :: ContainerKey map -> MapValue map -> map -> map
deleteMap :: ContainerKey map -> map -> map
singletonMap :: ContainerKey map -> MapValue map -> map
mapFromList :: [(ContainerKey map, MapValue map)] -> map
mapToList :: map -> [(ContainerKey map, MapValue map)]
findWithDefault :: MapValue map -> ContainerKey map -> map -> MapValue map
findWithDefault def key = fromMaybe def . lookup key
insertWith :: (MapValue map -> MapValue map -> MapValue map)
insertWith f k v m =
v' `seq` insertMap k v' m
where
v' =
case lookup k m of
Nothing -> v
Just vold -> f v vold
insertWithKey
:: (ContainerKey map -> MapValue map -> MapValue map -> MapValue map)
insertWithKey f k v m =
v' `seq` insertMap k v' m
where
v' =
case lookup k m of
Nothing -> v
Just vold -> f k v vold
insertLookupWithKey
:: (ContainerKey map -> MapValue map -> MapValue map -> MapValue map)
insertLookupWithKey f k v m =
v' `seq` (mold, insertMap k v' m)
where
(mold, v') =
case lookup k m of
Nothing -> (Nothing, v)
Just vold -> (Just vold, f k v vold)
adjustMap
:: (MapValue map -> MapValue map)
adjustMap f k m =
case lookup k m of
Nothing -> m
Just v ->
let v' = f v
in v' `seq` insertMap k v' m
adjustWithKey
:: (ContainerKey map -> MapValue map -> MapValue map)
adjustWithKey f k m =
case lookup k m of
Nothing -> m
Just v ->
let v' = f k v
in v' `seq` insertMap k v' m
updateMap
:: (MapValue map -> Maybe (MapValue map))
updateMap f k m =
case lookup k m of
Nothing -> m
Just v ->
case f v of
Nothing -> deleteMap k m
Just v' -> v' `seq` insertMap k v' m
updateWithKey
:: (ContainerKey map -> MapValue map -> Maybe (MapValue map))
updateWithKey f k m =
case lookup k m of
Nothing -> m
Just v ->
case f k v of
Nothing -> deleteMap k m
Just v' -> v' `seq` insertMap k v' m
updateLookupWithKey
:: (ContainerKey map -> MapValue map -> Maybe (MapValue map))
updateLookupWithKey f k m =
case lookup k m of
Nothing -> (Nothing, m)
Just v ->
case f k v of
Nothing -> (Just v, deleteMap k m)
Just v' -> v' `seq` (Just v', insertMap k v' m)
alterMap
:: (Maybe (MapValue map) -> Maybe (MapValue map))
alterMap f k m =
case f mold of
Nothing ->
case mold of
Nothing -> m
Just _ -> deleteMap k m
Just v -> insertMap k v m
where
mold = lookup k m
| Combine two maps .
unionWith
:: (MapValue map -> MapValue map -> MapValue map)
^ function that accepts the first map 's value and the second map 's value
^ first map
^ second map
unionWith f x y =
mapFromList $ loop $ mapToList x ++ mapToList y
where
loop [] = []
loop ((k, v):rest) =
case List.lookup k rest of
Nothing -> (k, v) : loop rest
Just v' -> (k, f v v') : loop (deleteMap k rest)
unionWithKey
:: (ContainerKey map -> MapValue map -> MapValue map -> MapValue map)
^ function that accepts the key , the first map 's value and the
second map 's value and returns the new value that will be used
^ first map
^ second map
unionWithKey f x y =
mapFromList $ loop $ mapToList x ++ mapToList y
where
loop [] = []
loop ((k, v):rest) =
case List.lookup k rest of
Nothing -> (k, v) : loop rest
Just v' -> (k, f k v v') : loop (deleteMap k rest)
When a key exists in two different maps , apply a function
unionsWith
:: (MapValue map -> MapValue map -> MapValue map)
^ function that accepts the first map 's value and the second map 's value
unionsWith _ [] = mempty
unionsWith _ [x] = x
unionsWith f (x:y:z) = unionsWith f (unionWith f x y:z)
mapWithKey
:: (ContainerKey map -> MapValue map -> MapValue map)
mapWithKey f =
mapFromList . map go . mapToList
where
go (k, v) = (k, f k v)
omapKeysWith
:: (MapValue map -> MapValue map -> MapValue map)
^ function that accepts the first map 's value and the second map 's value
-> (ContainerKey map -> ContainerKey map)
omapKeysWith g f =
mapFromList . unionsWith g . map go . mapToList
where
go (k, v) = [(f k, v)]
filterMap :: IsMap map => (MapValue map -> Bool) -> map -> map
filterMap p = mapFromList . filter (p . snd) . mapToList
instance Ord key => IsMap (Map.Map key value) where
type MapValue (Map.Map key value) = value
lookup = Map.lookup
insertMap = Map.insert
# INLINE insertMap #
deleteMap = Map.delete
# INLINE deleteMap #
singletonMap = Map.singleton
# INLINE singletonMap #
mapFromList = Map.fromList
# INLINE mapFromList #
mapToList = Map.toList
# INLINE mapToList #
findWithDefault = Map.findWithDefault
# INLINE findWithDefault #
insertWith = Map.insertWith
# INLINE insertWith #
insertWithKey = Map.insertWithKey
# INLINE insertWithKey #
insertLookupWithKey = Map.insertLookupWithKey
# INLINE insertLookupWithKey #
adjustMap = Map.adjust
# INLINE adjustMap #
adjustWithKey = Map.adjustWithKey
# INLINE adjustWithKey #
updateMap = Map.update
# INLINE updateMap #
updateWithKey = Map.updateWithKey
# INLINE updateWithKey #
updateLookupWithKey = Map.updateLookupWithKey
# INLINE updateLookupWithKey #
alterMap = Map.alter
# INLINE alterMap #
unionWith = Map.unionWith
# INLINE unionWith #
unionWithKey = Map.unionWithKey
# INLINE unionWithKey #
unionsWith = Map.unionsWith
# INLINE unionsWith #
mapWithKey = Map.mapWithKey
# INLINE mapWithKey #
omapKeysWith = Map.mapKeysWith
# INLINE omapKeysWith #
filterMap = Map.filter
| This instance uses the functions from " Data . . Strict " .
instance (Eq key, Hashable key) => IsMap (HashMap.HashMap key value) where
type MapValue (HashMap.HashMap key value) = value
lookup = HashMap.lookup
insertMap = HashMap.insert
# INLINE insertMap #
deleteMap = HashMap.delete
# INLINE deleteMap #
singletonMap = HashMap.singleton
# INLINE singletonMap #
mapFromList = HashMap.fromList
# INLINE mapFromList #
mapToList = HashMap.toList
# INLINE mapToList #
insertWith = HashMap.insertWith
# INLINE insertWith #
insertWithKey = HashMap.insertWithKey
adjustMap = HashMap.adjust
# INLINE adjustMap #
adjustWithKey = HashMap.adjustWithKey
updateLookupWithKey = HashMap.updateLookupWithKey
alterMap =
unionWith = HashMap.unionWith
# INLINE unionWith #
unionWithKey = HashMap.unionWithKey
filterMap = HashMap.filter
instance IsMap (IntMap.IntMap value) where
type MapValue (IntMap.IntMap value) = value
lookup = IntMap.lookup
insertMap = IntMap.insert
# INLINE insertMap #
deleteMap = IntMap.delete
# INLINE deleteMap #
singletonMap = IntMap.singleton
# INLINE singletonMap #
mapFromList = IntMap.fromList
# INLINE mapFromList #
mapToList = IntMap.toList
# INLINE mapToList #
findWithDefault = IntMap.findWithDefault
# INLINE findWithDefault #
insertWith = IntMap.insertWith
# INLINE insertWith #
insertWithKey = IntMap.insertWithKey
# INLINE insertWithKey #
insertLookupWithKey = IntMap.insertLookupWithKey
# INLINE insertLookupWithKey #
adjustMap = IntMap.adjust
# INLINE adjustMap #
adjustWithKey = IntMap.adjustWithKey
# INLINE adjustWithKey #
updateMap = IntMap.update
# INLINE updateMap #
updateWithKey = IntMap.updateWithKey
# INLINE updateWithKey #
alterMap = IntMap.alter
# INLINE alterMap #
unionWith = IntMap.unionWith
# INLINE unionWith #
unionWithKey = IntMap.unionWithKey
# INLINE unionWithKey #
unionsWith = IntMap.unionsWith
# INLINE unionsWith #
mapWithKey = IntMap.mapWithKey
# INLINE mapWithKey #
omapKeysWith = IntMap.mapKeysWith
# INLINE omapKeysWith #
filterMap = IntMap.filter
instance Eq key => IsMap [(key, value)] where
type MapValue [(key, value)] = value
lookup = List.lookup
insertMap k v = ((k, v):) . deleteMap k
# INLINE insertMap #
deleteMap k = List.filter ((/= k) . fst)
# INLINE deleteMap #
singletonMap k v = [(k, v)]
# INLINE singletonMap #
mapFromList = id
# INLINE mapFromList #
mapToList = id
# INLINE mapToList #
class (SetContainer set, Element set ~ ContainerKey set) => IsSet set where
insertSet :: Element set -> set -> set
deleteSet :: Element set -> set -> set
singletonSet :: Element set -> set
setFromList :: [Element set] -> set
setToList :: set -> [Element set]
filterSet :: (Element set -> Bool) -> set -> set
filterSet p = setFromList . filter p . setToList
instance Ord element => IsSet (Set.Set element) where
insertSet = Set.insert
# INLINE insertSet #
deleteSet = Set.delete
# INLINE deleteSet #
singletonSet = Set.singleton
# INLINE singletonSet #
setFromList = Set.fromList
# INLINE setFromList #
setToList = Set.toList
# INLINE setToList #
filterSet = Set.filter
# INLINE filterSet #
instance (Eq element, Hashable element) => IsSet (HashSet.HashSet element) where
insertSet = HashSet.insert
# INLINE insertSet #
deleteSet = HashSet.delete
# INLINE deleteSet #
singletonSet = HashSet.singleton
# INLINE singletonSet #
setFromList = HashSet.fromList
# INLINE setFromList #
setToList = HashSet.toList
# INLINE setToList #
filterSet = HashSet.filter
# INLINE filterSet #
instance IsSet IntSet.IntSet where
insertSet = IntSet.insert
# INLINE insertSet #
deleteSet = IntSet.delete
# INLINE deleteSet #
singletonSet = IntSet.singleton
# INLINE singletonSet #
setFromList = IntSet.fromList
# INLINE setFromList #
setToList = IntSet.toList
# INLINE setToList #
filterSet = IntSet.filter
# INLINE filterSet #
| Zip operations on ' MonoFunctor 's .
class MonoFunctor mono => MonoZip mono where
| Combine each element of two ' 's using a supplied function .
ozipWith :: (Element mono -> Element mono -> Element mono) -> mono -> mono -> mono
| Take two ' 's and return a list of the pairs of their elements .
ozip :: mono -> mono -> [(Element mono, Element mono)]
| Take a list of pairs of elements and return a ' ' of the first
components and a ' ' of the second components .
ounzip :: [(Element mono, Element mono)] -> (mono, mono)
instance MonoZip ByteString.ByteString where
ozip = ByteString.zip
ounzip = ByteString.unzip
ozipWith f xs = ByteString.pack . ByteString.zipWith f xs
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
instance MonoZip LByteString.ByteString where
ozip = LByteString.zip
ounzip = LByteString.unzip
ozipWith f xs = LByteString.pack . LByteString.zipWith f xs
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
instance MonoZip Text.Text where
ozip = Text.zip
ounzip = (Text.pack *** Text.pack) . List.unzip
ozipWith = Text.zipWith
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
instance MonoZip LText.Text where
ozip = LText.zip
ounzip = (LText.pack *** LText.pack) . List.unzip
ozipWith = LText.zipWith
# INLINE ozip #
# INLINE ounzip #
# INLINE ozipWith #
class SetContainer set => HasKeysSet set where
type KeySet set
keysSet :: set -> KeySet set
instance Ord k => HasKeysSet (Map.Map k v) where
type KeySet (Map.Map k v) = Set.Set k
keysSet = Map.keysSet
instance HasKeysSet (IntMap.IntMap v) where
type KeySet (IntMap.IntMap v) = IntSet.IntSet
keysSet = IntMap.keysSet
instance (Hashable k, Eq k) => HasKeysSet (HashMap.HashMap k v) where
type KeySet (HashMap.HashMap k v) = HashSet.HashSet k
keysSet = setFromList . HashMap.keys
|
5c3e4917cc6ede9f23a58ee54f84ba987cf5ecc568b8cd3f8ffed89eb455537b | kpblc2000/KpblcLispLib | _kpblc-conv-value-to-bool.lsp | (defun _kpblc-conv-value-to-bool (value)
;|
* Ôóíêöèÿ ïðåîáðàçîâàíèÿ ïåðåäàííîãî çíà÷åíèÿ â ëèñïîâîå t|nil. Äëÿ îøèáî÷íûõ çíà÷åíèé âîçâðàùàåò nil.
* Ïàðàìåòðû âûçîâà:
value ; ïðåîáðàçîâûâàåìîå çíà÷åíèå
* Ïðèìåðû âûçîâà:
(_kpblc-conv-value-to-bool "0") ; nil
(_kpblc-conv-value-to-bool "1") ; T
(_kpblc-conv-value-to-bool "-1") ; T
|;
(cond ((and (= (type value) 'str) (= (vl-string-trim " 0" value) "")) nil)
((and (= (type value) 'str)
(member (strcase (vl-string-trim " 0\t" value)) '("NO" "ÍÅÒ" "FALSE" ""))
) ;_ end of and
nil
)
((= (type value) 'vl-catch-all-apply-error) nil)
(t (not (member value '(0 "0" nil :vlax-false))))
) ;_ end of cond
) ;_ end of defun
| null | https://raw.githubusercontent.com/kpblc2000/KpblcLispLib/49d1d9d29078b4167cc65dc881bea61b706c620d/lsp/conv/value/_kpblc-conv-value-to-bool.lsp | lisp | |
ïðåîáðàçîâûâàåìîå çíà÷åíèå
nil
T
T
_ end of and
_ end of cond
_ end of defun
| (defun _kpblc-conv-value-to-bool (value)
* Ôóíêöèÿ ïðåîáðàçîâàíèÿ ïåðåäàííîãî çíà÷åíèÿ â ëèñïîâîå t|nil. Äëÿ îøèáî÷íûõ çíà÷åíèé âîçâðàùàåò nil.
* Ïàðàìåòðû âûçîâà:
* Ïðèìåðû âûçîâà:
(cond ((and (= (type value) 'str) (= (vl-string-trim " 0" value) "")) nil)
((and (= (type value) 'str)
(member (strcase (vl-string-trim " 0\t" value)) '("NO" "ÍÅÒ" "FALSE" ""))
nil
)
((= (type value) 'vl-catch-all-apply-error) nil)
(t (not (member value '(0 "0" nil :vlax-false))))
|
5acf6d9c1300c5d32f364238971c3e7c5f6d7b1136ed9a70547453d64a211077 | sadiqj/ocaml-esp32 | pr7391.ml | class virtual child1 parent =
object
method private parent = parent
end
class virtual child2 =
object(_ : 'self)
constraint 'parent = < previous: 'self option; .. >
method private virtual parent: 'parent
end
Worked in 4.03
let _ =
object(self)
method previous = None
method child =
object
inherit child1 self
inherit child2
end
end;;
[%%expect{|
class virtual child1 : 'a -> object method private parent : 'a end
class virtual child2 :
object ('a)
method private virtual parent : < previous : 'a option; .. >
end
- : < child : child2; previous : child2 option > = <obj>
|}]
Worked in 4.03
let _ =
object(self)
method previous = None
method child (_ : unit) =
object
inherit child1 self
inherit child2
end
end;;
[%%expect{|
- : < child : unit -> child2; previous : child2 option > = <obj>
|}]
Worked in 4.03
let _ =
object(self)
method previous = None
method child () =
object
inherit child1 self
inherit child2
end
end;;
[%%expect{|
- : < child : unit -> child2; previous : child2 option > = <obj>
|}]
Did n't work in 4.03
let _ =
object(self)
method previous = None
method child =
let o =
object
inherit child1 self
inherit child2
end
in o
end;;
[%%expect{|
Line _, characters 16-22:
Error: The method parent has type < child : 'a; previous : 'b option >
but is expected to have type < previous : < .. > option; .. >
Self type cannot escape its class
|}]
| null | https://raw.githubusercontent.com/sadiqj/ocaml-esp32/33aad4ca2becb9701eb90d779c1b1183aefeb578/testsuite/tests/typing-gadts/pr7391.ml | ocaml | class virtual child1 parent =
object
method private parent = parent
end
class virtual child2 =
object(_ : 'self)
constraint 'parent = < previous: 'self option; .. >
method private virtual parent: 'parent
end
Worked in 4.03
let _ =
object(self)
method previous = None
method child =
object
inherit child1 self
inherit child2
end
end;;
[%%expect{|
class virtual child1 : 'a -> object method private parent : 'a end
class virtual child2 :
object ('a)
method private virtual parent : < previous : 'a option; .. >
end
- : < child : child2; previous : child2 option > = <obj>
|}]
Worked in 4.03
let _ =
object(self)
method previous = None
method child (_ : unit) =
object
inherit child1 self
inherit child2
end
end;;
[%%expect{|
- : < child : unit -> child2; previous : child2 option > = <obj>
|}]
Worked in 4.03
let _ =
object(self)
method previous = None
method child () =
object
inherit child1 self
inherit child2
end
end;;
[%%expect{|
- : < child : unit -> child2; previous : child2 option > = <obj>
|}]
Did n't work in 4.03
let _ =
object(self)
method previous = None
method child =
let o =
object
inherit child1 self
inherit child2
end
in o
end;;
[%%expect{|
Line _, characters 16-22:
Error: The method parent has type < child : 'a; previous : 'b option >
but is expected to have type < previous : < .. > option; .. >
Self type cannot escape its class
|}]
| |
3423e4c7752ba84a0ce3091d17493fb906fa057fe1617d9f504b9610175b3180 | gregwebs/Shelly.hs | RunSpec.hs | module RunSpec ( runSpec ) where
import TestInit
import qualified Data.Text as T
import Data.Text (Text)
import System.IO
runSpec :: Spec
runSpec = do
describe "run" $ do
it "simple command" $ do
res <- shelly $ run "echo" [ "wibble" ]
res @?= "wibble\n"
it "with escaping" $ do
res <- shelly $ run "echo" [ "*" ]
res @?= "*\n"
it "without escaping" $ do
res <- shelly $ escaping False $ run "echo" [ "*" ]
if isWindows
then res @?= "*\n"
else assert $ "README.md" `elem` T.words res
it "with binary handle mode" $ do
res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True))
$ run "cat" [ "test/data/nonascii.txt" ]
if isWindows
then res @?= "Selbstverst\228ndlich \252berraschend\r\n"
else res @?= "Selbstverst\228ndlich \252berraschend\n"
unless isWindows $ do
it "script at $PWD" $ do
res <- shelly $ do
run_ "chmod" ["+x", "test/data/hello.sh"]
run "./test/data/hello.sh" []
res @?= "Hello!\n"
describe "cmd" $ do
let shouldBeTxt res t = res @?= (t :: Text)
it "with Text" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text)
res `shouldBeTxt` "wibble\n"
it "with String" $ do
res <- shelly $ cmd "echo" "wibble"
res `shouldBeTxt` "wibble\n"
it "with [Text]" $ do
res <- shelly $ cmd "echo" (["wibble"] :: [Text])
res `shouldBeTxt` "wibble\n"
it "with [String]" $ do
res <- shelly $ cmd "echo" ["wibble"]
res `shouldBeTxt` "wibble\n"
Check all two argument permutations ( with replacement ) of { Text , String , [ Text ] , [ String ] } .
it "with Text and Text" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) ("wobble" :: Text)
res `shouldBeTxt` "wibble wobble\n"
it "with Text and String" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) "wobble"
res `shouldBeTxt` "wibble wobble\n"
it "with Text and [Text]" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) (["wobble", "wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with Text and [String]" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) ["wobble", "wurble"]
res `shouldBeTxt` "wibble wobble wurble\n"
it "with String and Text" $ do
res <- shelly $ cmd "echo" "wibble" ("wobble" :: Text)
res `shouldBeTxt` "wibble wobble\n"
it "with String and String" $ do
res <- shelly $ cmd "echo" "wibble" "wobble"
res `shouldBeTxt` "wibble wobble\n"
it "with String and [Text]" $ do
res <- shelly $ cmd "echo" "wibble" (["wobble", "wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and Text" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) ("wurble" :: Text)
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and String" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) "wurble"
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and [Text]" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) (["wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and [String]" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) ["wurble"]
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and Text " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] ("wurble" :: Text)
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and String " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] "wurble"
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and [Text] " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] (["wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and [String] " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] ["wurble"]
res `shouldBeTxt` "wibble wobble wurble\n"
-- Check unit cases
it "returns Unit" $ do
res <- shelly $ cmd "echo" "wibble" "wobble"
res @?= ()
it "works with underscore" $ do
_ <- shelly $ cmd "echo" "wibble" "wobble"
True `shouldBe` True
This should now compile without a warning since ghc should infer Sh ( ) instead of Sh Text .
it "defaults to Unit" $ do
shelly $ cmd "echo" "wibble" "wobble"
True `shouldBe` True
-- Bash-related commands
describe "bash" $ do
it "simple command" $ do
res <- shelly $ bash "echo" [ "wibble" ]
res @?= "wibble\n"
it "without escaping" $ do
res <- shelly $ escaping False $ bash "echo" [ "*" ]
assert $ "README.md" `elem` T.words res
it "with binary handle mode" $ do
res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True))
$ bash "cat" [ "test/data/nonascii.txt" ]
if isWindows
then res @?= "Selbstverst\228ndlich \252berraschend\r\n"
else res @?= "Selbstverst\228ndlich \252berraschend\n"
This throws spurious errors on some systems
it " can detect failing commands in pipes " $ do
< - shelly $ escaping False $ errExit False $ do
bashPipeFail
bash _ " echo " [ " ' foo ' " , " | " , " ls " , " \"eoueouoe\ " " , " 2>/dev / null " , " | " , " echo " , " ' bar ' " ]
lastExitCode
eCode ` shouldSatisfy ` ( /= 0 )
it "can detect failing commands in pipes" $ do
eCode <- shelly $ escaping False $ errExit False $ do
bashPipeFail
bash_ "echo" ["'foo'", "|", "ls", "\"eoueouoe\"", "2>/dev/null", "|", "echo", "'bar'" ]
lastExitCode
eCode `shouldSatisfy` (/= 0)
-}
it "preserve pipe behaviour" $ do
(eCode, res) <- shelly $ escaping False $ errExit False $ do
res <-
if isWindows
then bash "echo" [ "foo", "|", "echo", "bar" ]
else bash "echo" [ "'foo'", "|", "echo", "'bar'" ]
eCode <- lastExitCode
return (eCode, res)
if isWindows
then res @?= "bar'\n"
else res @?= "bar\n"
eCode @?= 0
| null | https://raw.githubusercontent.com/gregwebs/Shelly.hs/25a7884aa5a227707968b45b1336af71c1e627f6/test/src/RunSpec.hs | haskell | Check unit cases
Bash-related commands | module RunSpec ( runSpec ) where
import TestInit
import qualified Data.Text as T
import Data.Text (Text)
import System.IO
runSpec :: Spec
runSpec = do
describe "run" $ do
it "simple command" $ do
res <- shelly $ run "echo" [ "wibble" ]
res @?= "wibble\n"
it "with escaping" $ do
res <- shelly $ run "echo" [ "*" ]
res @?= "*\n"
it "without escaping" $ do
res <- shelly $ escaping False $ run "echo" [ "*" ]
if isWindows
then res @?= "*\n"
else assert $ "README.md" `elem` T.words res
it "with binary handle mode" $ do
res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True))
$ run "cat" [ "test/data/nonascii.txt" ]
if isWindows
then res @?= "Selbstverst\228ndlich \252berraschend\r\n"
else res @?= "Selbstverst\228ndlich \252berraschend\n"
unless isWindows $ do
it "script at $PWD" $ do
res <- shelly $ do
run_ "chmod" ["+x", "test/data/hello.sh"]
run "./test/data/hello.sh" []
res @?= "Hello!\n"
describe "cmd" $ do
let shouldBeTxt res t = res @?= (t :: Text)
it "with Text" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text)
res `shouldBeTxt` "wibble\n"
it "with String" $ do
res <- shelly $ cmd "echo" "wibble"
res `shouldBeTxt` "wibble\n"
it "with [Text]" $ do
res <- shelly $ cmd "echo" (["wibble"] :: [Text])
res `shouldBeTxt` "wibble\n"
it "with [String]" $ do
res <- shelly $ cmd "echo" ["wibble"]
res `shouldBeTxt` "wibble\n"
Check all two argument permutations ( with replacement ) of { Text , String , [ Text ] , [ String ] } .
it "with Text and Text" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) ("wobble" :: Text)
res `shouldBeTxt` "wibble wobble\n"
it "with Text and String" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) "wobble"
res `shouldBeTxt` "wibble wobble\n"
it "with Text and [Text]" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) (["wobble", "wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with Text and [String]" $ do
res <- shelly $ cmd "echo" ("wibble" :: Text) ["wobble", "wurble"]
res `shouldBeTxt` "wibble wobble wurble\n"
it "with String and Text" $ do
res <- shelly $ cmd "echo" "wibble" ("wobble" :: Text)
res `shouldBeTxt` "wibble wobble\n"
it "with String and String" $ do
res <- shelly $ cmd "echo" "wibble" "wobble"
res `shouldBeTxt` "wibble wobble\n"
it "with String and [Text]" $ do
res <- shelly $ cmd "echo" "wibble" (["wobble", "wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and Text" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) ("wurble" :: Text)
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and String" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) "wurble"
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and [Text]" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) (["wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [Text] and [String]" $ do
res <- shelly $ cmd "echo" (["wibble", "wobble"] :: [Text]) ["wurble"]
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and Text " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] ("wurble" :: Text)
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and String " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] "wurble"
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and [Text] " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] (["wurble"] :: [Text])
res `shouldBeTxt` "wibble wobble wurble\n"
it "with [String] and [String] " $ do
res <- shelly $ cmd "echo" ["wibble", "wobble"] ["wurble"]
res `shouldBeTxt` "wibble wobble wurble\n"
it "returns Unit" $ do
res <- shelly $ cmd "echo" "wibble" "wobble"
res @?= ()
it "works with underscore" $ do
_ <- shelly $ cmd "echo" "wibble" "wobble"
True `shouldBe` True
This should now compile without a warning since ghc should infer Sh ( ) instead of Sh Text .
it "defaults to Unit" $ do
shelly $ cmd "echo" "wibble" "wobble"
True `shouldBe` True
describe "bash" $ do
it "simple command" $ do
res <- shelly $ bash "echo" [ "wibble" ]
res @?= "wibble\n"
it "without escaping" $ do
res <- shelly $ escaping False $ bash "echo" [ "*" ]
assert $ "README.md" `elem` T.words res
it "with binary handle mode" $ do
res <- shelly $ onCommandHandles (initOutputHandles (flip hSetBinaryMode True))
$ bash "cat" [ "test/data/nonascii.txt" ]
if isWindows
then res @?= "Selbstverst\228ndlich \252berraschend\r\n"
else res @?= "Selbstverst\228ndlich \252berraschend\n"
This throws spurious errors on some systems
it " can detect failing commands in pipes " $ do
< - shelly $ escaping False $ errExit False $ do
bashPipeFail
bash _ " echo " [ " ' foo ' " , " | " , " ls " , " \"eoueouoe\ " " , " 2>/dev / null " , " | " , " echo " , " ' bar ' " ]
lastExitCode
eCode ` shouldSatisfy ` ( /= 0 )
it "can detect failing commands in pipes" $ do
eCode <- shelly $ escaping False $ errExit False $ do
bashPipeFail
bash_ "echo" ["'foo'", "|", "ls", "\"eoueouoe\"", "2>/dev/null", "|", "echo", "'bar'" ]
lastExitCode
eCode `shouldSatisfy` (/= 0)
-}
it "preserve pipe behaviour" $ do
(eCode, res) <- shelly $ escaping False $ errExit False $ do
res <-
if isWindows
then bash "echo" [ "foo", "|", "echo", "bar" ]
else bash "echo" [ "'foo'", "|", "echo", "'bar'" ]
eCode <- lastExitCode
return (eCode, res)
if isWindows
then res @?= "bar'\n"
else res @?= "bar\n"
eCode @?= 0
|
d41d65acfd24f4e27205e0b6a1d6edad072425638f7b88e0d3563247bd8d636e | ferd/ReVault | revault_tls.erl | %%% Shared definitions between revault_tls_serv and revault_tls_client.
%%% Both the client and the server implement a process with a reception
%%% loop to which this module can send messages and read from them.
%%%
There is one internal API to be used by revault_sync_fsm , and
one internal API to be used by the client and servers .
-module(revault_tls).
-include_lib("public_key/include/public_key.hrl").
-include("revault_tls.hrl").
-record(buf, {acc=[<<>>], seen=0, needed=0}).
callbacks from within the FSM
-export([callback/1, mode/2, peer/4, accept_peer/3, unpeer/3, send/3, reply/4, unpack/2]).
%% shared functions related to data transport and serialization
-export([wrap/1, unwrap/1, unwrap_all/1, unpack/1, send_local/2,
buf_add/2, buf_new/0, buf_size/1]).
%% shared functions related to TLS certs
-export([pin_certfile_opts_server/1, pin_certfile_opts_client/1,
pin_certfiles_opts_server/1, pin_certfiles_opts_client/1,
make_selfsigned_cert/2]).
-record(state, {proc, name, dirs, mode, serv_conn}).
-type state() :: term().
-type cb_state() :: {?MODULE, state()}.
-type buf() :: #buf{}.
-export_type([state/0, buf/0]).
-if(?OTP_RELEASE < 26).
a bug prevents TLS 1.3 from working well with cert pinning in versions prior
to OTP-26.rc-1 :
%% -side-tls-cert-validation-woes-in-tls-1-3/1586
-define(TLS_VSN, 'tlsv1.2').
-else.
-define(TLS_VSN, 'tlsv1.3').
-endif.
-spec callback(term()) -> state().
callback({Name, DirOpts}) ->
{?MODULE, #state{proc=Name, name=Name, dirs=DirOpts}};
callback({Proc, Name, DirOpts}) ->
{?MODULE, #state{proc=Proc, name=Name, dirs=DirOpts}}.
-spec mode(client|server, state()) -> {term(), cb_state()}.
mode(Mode, S=#state{proc=Proc, name=Name, dirs=DirOpts}) ->
Res = case Mode of
client ->
revault_protocols_tls_sup:start_client(Proc, DirOpts);
server ->
Assumes that all servers have the full DirOpts view if this is the
first place to start it .
case revault_protocols_tls_sup:start_server(DirOpts) of
{ok, Pid} ->
revault_tls_serv:map(Name, Proc),
{ok, Pid};
{error, {already_started, Pid}} ->
revault_tls_serv:map(Name, Proc),
{ok, Pid}
end
end,
{Res, {?MODULE, S#state{mode=Mode}}}.
%% @doc only callable from the client-side.
peer(Local, Peer, Attrs, S=#state{name=Dir, dirs=#{<<"peers">> := Peers}}) ->
case Peers of
#{Peer := Map} ->
Payload = {revault, make_ref(), revault_data_wrapper:peer(Dir, Attrs)},
{revault_tls_client:peer(Local, Peer, Map, Payload), {?MODULE, S}};
_ ->
{{error, unknown_peer}, {?MODULE, S}}
end.
accept_peer(Remote, Marker, S=#state{proc=Proc}) ->
{ok, Conn} = revault_tls_serv:accept_peer(Proc, Remote, Marker),
{ok, {?MODULE, S#state{serv_conn=Conn}}}.
unpeer(Proc, Remote, S=#state{mode=client}) ->
{revault_tls_client:unpeer(Proc, Remote),
{?MODULE, S}};
unpeer(Proc, Remote, S=#state{mode=server, serv_conn=Conn}) ->
{revault_tls_serv:unpeer(Proc, Remote, Conn),
{?MODULE, S#state{serv_conn=undefined}}}.
%% @doc only callable from the client-side, server always replies.
send(Remote, Payload, S=#state{proc=Proc, mode=client}) ->
Marker = make_ref(),
Res = case revault_tls_client:send(Proc, Remote, Marker, Payload) of
ok -> {ok, Marker};
Other -> Other
end,
{Res, {?MODULE, S}}.
% [Remote, Marker, revault_data_wrapper:ok()]),
reply(Remote, Marker, Payload, S=#state{proc=Proc, mode=client}) ->
{revault_tls_client:reply(Proc, Remote, Marker, Payload),
{?MODULE, S}};
reply(Remote, Marker, Payload, S=#state{proc=Proc, mode=server, serv_conn=Conn}) ->
{revault_tls_serv:reply(Proc, Remote, Conn, Marker, Payload),
{?MODULE, S}}.
unpack(_, _) -> error(undef).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% INTERNAL SHARED CALLS %%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
wrap({revault, _Marker, _Payload}=Msg) ->
Bin = term_to_binary(Msg, [compressed, {minor_version, 2}]),
%% Tag by length to ease parsing.
%% Prepare extra versions that could do things like deal with
%% signed messages and add options and whatnot, or could let
%% us spin up legacy state machines whenever that could happen.
%%
If we need more than 18,000 petabytes for a message and more
%% than 65535 protocol versions, we're either very successful or
%% failing in bad ways.
<<(byte_size(Bin)):64/unsigned, ?VSN:16/unsigned, Bin/binary>>.
-spec unwrap(buf()) -> {ok, ?VSN, term(), buf()} | {error, incomplete, buf()}.
unwrap(B=#buf{seen=S, needed=N, acc=Acc}) when S >= N ->
case iolist_to_binary(lists:reverse(Acc)) of
<<Size:64/unsigned, ?VSN:16/unsigned, Payload/binary>> ->
<<Term:Size/binary, Rest/binary>> = Payload,
{revault, Marker, Msg} = binary_to_term(Term),
{ok, ?VSN, {revault, Marker, unpack(Msg)},
buf_add(Rest, buf_reset(B))};
IncompleteBin ->
{error, incomplete,
B#buf{acc=[IncompleteBin]}}
end;
unwrap(B=#buf{}) ->
{error, incomplete, B}.
unpack({peer, ?VSN, Remote, Attrs}) -> {peer, Remote, Attrs};
unpack({ask, ?VSN}) -> ask;
unpack({ok, ?VSN}) -> ok;
unpack({error, ?VSN, R}) -> {error, R};
unpack({manifest, ?VSN}) -> manifest;
unpack({manifest, ?VSN, Data}) -> {manifest, Data};
unpack({file, ?VSN, Path, Meta, Bin}) -> {file, Path, Meta, Bin};
unpack({fetch, ?VSN, Path}) -> {fetch, Path};
unpack({sync_complete, ?VSN}) -> sync_complete;
unpack({deleted_file, ?VSN, Path, Meta}) -> {deleted_file, Path, Meta};
unpack({conflict_file, ?VSN, WorkPath, Path, Count, Meta, Bin}) ->
{conflict_file, WorkPath, Path, Count, Meta, Bin};
unpack(Term) ->
Term.
-spec unwrap_all(buf()) -> {[term()], buf()}.
unwrap_all(Buf) ->
unwrap_all(Buf, []).
unwrap_all(Buf, Acc) ->
case revault_tls:unwrap(Buf) of
{error, incomplete, NewBuf} ->
{lists:reverse(Acc), NewBuf};
{ok, ?VSN, Payload, NewBuf} ->
unwrap_all(NewBuf, [Payload|Acc])
end.
-spec buf_add(binary(), buf()) -> buf().
buf_add(Bin, B=#buf{seen=0, needed=0, acc=Acc}) ->
case iolist_to_binary([lists:reverse(Acc),Bin]) of
<<Size:64/unsigned, ?VSN:16/unsigned, _/binary>> = NewBin ->
Add 10 bytes to the needed size to cover the
B#buf{seen=byte_size(NewBin), needed=Size+10, acc=[NewBin]};
IncompleteBin ->
B#buf{acc=[IncompleteBin]}
end;
buf_add(Bin, B=#buf{acc=Acc, seen=N}) ->
B#buf{acc=[Bin|Acc], seen=N+byte_size(Bin)}.
-spec buf_new() -> buf().
buf_new() -> #buf{}.
-spec buf_reset(buf()) -> buf().
buf_reset(_) -> #buf{}.
-spec buf_size(buf()) -> non_neg_integer().
buf_size(#buf{seen=N}) -> N.
send_local(Proc, Payload) ->
gproc:send({n, l, {revault_fsm, Proc}}, Payload).
pin_certfile_opts_client(FileNames) ->
pin_certfile_opts(FileNames).
pin_certfile_opts_server(FileNames) ->
[{fail_if_no_peer_cert, true}
| pin_certfile_opts(FileNames)].
pin_certfiles_opts_client(FileNames) ->
pin_certfiles_opts(FileNames).
pin_certfiles_opts_server(FileNames) ->
[{fail_if_no_peer_cert, true}
| pin_certfiles_opts(FileNames)].
make_selfsigned_cert(Dir, CertName) ->
check_openssl_vsn(),
Key = filename:join(Dir, CertName ++ ".key"),
Cert = filename:join(Dir, CertName ++ ".crt"),
ok = filelib:ensure_dir(Cert),
Cmd = io_lib:format(
"openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes "
"-keyout '~ts' -out '~ts' -subj '/CN=example.org' "
"-addext 'subjectAltName=DNS:example.org,DNS:www.example.org,IP:127.0.0.1'",
[Key, Cert] % TODO: escape quotes
),
os:cmd(Cmd).
%%%%%%%%%%%%%%%
%%% PRIVATE %%%
%%%%%%%%%%%%%%%
pin_certfile_opts(FileName) ->
own parsing of certs and chains .
case file:read_file(FileName) of
{ok, Cert} ->
tak:pem_to_ssl_options(Cert) ++
[{verify, verify_peer}];
{error, enoent} ->
error({certificate_not_found, FileName})
end.
pin_certfiles_opts(FileNames) ->
pin_certfiles_opts(FileNames, [], []).
pin_certfiles_opts([], CAs, PinCerts) ->
TODO : drop tlsv1.2 and mandate 1.3 when
%% is resolved
[{cacerts, CAs},
{verify_fun, {fun verify_pins/3, PinCerts}},
{versions, [?TLS_VSN]},
{verify, verify_peer}];
pin_certfiles_opts([FileName|FileNames], CAs, PinCerts) ->
own parsing , but then extract the option and allow setting
%% multiple certificates. We need this because we have many possibly valid
%% clients that can all be allowed to contact us as a server (whereas a
client may expect to only contact one valid server ) .
[{cacerts, [CADer]},
{verify_fun, {_PinFun, PinCert}} | _] = pin_certfile_opts(FileName),
pin_certfiles_opts(FileNames, [CADer|CAs], [PinCert|PinCerts]).
-spec verify_pins(OtpCert, Event, InitialUserState) ->
{valid, UserState} | {fail, Reason :: term()} | {unknown, UserState}
when
OtpCert :: #'OTPCertificate'{},
Event :: {bad_cert, Reason :: atom() | {revoked, atom()}}
| {extension, #'Extension'{}} | valid | valid_peer,
InitialUserState :: UserState,
UserState :: term().
verify_pins(PinCert, valid_peer, PinCerts) ->
case lists:member(PinCert, PinCerts) of
true -> {valid, PinCerts};
false -> {fail, {peer_cert_unknown, subject(PinCert)}}
end;
verify_pins(_Cert, {extension, _}, PinCerts) ->
{unknown, PinCerts};
verify_pins(PinCert, {bad_cert, selfsigned_peer}, PinCerts) ->
case lists:member(PinCert, PinCerts) of
true -> {valid, PinCerts};
false -> {fail, {bad_cert, selfsigned_peer}}
end;
verify_pins(_Cert, {bad_cert, _} = Reason, _PinCerts) ->
{fail, Reason};
verify_pins(_Cert, valid, PinCerts) ->
{valid, PinCerts}.
subject(#'OTPCertificate'{ tbsCertificate = TBS }) ->
public_key:pkix_normalize_name(TBS#'OTPTBSCertificate'.subject).
check_openssl_vsn() ->
Vsn = os:cmd("openssl version"),
VsnMatch = "(Open|Libre)SSL ([0-9]+)\\.([0-9]+)\\.([0-9]+)",
case re:run(Vsn, VsnMatch, [{capture, all_but_first, list}]) of
{match, [Type, Major, Minor, Patch]} ->
try
check_openssl_vsn(Type, list_to_integer(Major),
list_to_integer(Minor),
list_to_integer(Patch))
catch
error:bad_vsn ->
error({openssl_vsn, Vsn})
end;
_ ->
error({openssl_vsn, Vsn})
end.
Using OpenSSL > = 1.1.1 or LibreSSL > = 3.1.0
check_openssl_vsn("Libre", A, B, _) when A > 3;
A == 3, B >= 1 ->
ok;
check_openssl_vsn("Open", A, B, C) when A > 1;
A == 1, B > 1;
A == 1, B == 1, C >= 1 ->
ok;
check_openssl_vsn(_, _, _, _) ->
error(bad_vsn).
| null | https://raw.githubusercontent.com/ferd/ReVault/340f237afe263a8c12062240fd4318b233bdbae1/apps/revault/src/revault_tls.erl | erlang | Shared definitions between revault_tls_serv and revault_tls_client.
Both the client and the server implement a process with a reception
loop to which this module can send messages and read from them.
shared functions related to data transport and serialization
shared functions related to TLS certs
-side-tls-cert-validation-woes-in-tls-1-3/1586
@doc only callable from the client-side.
@doc only callable from the client-side, server always replies.
[Remote, Marker, revault_data_wrapper:ok()]),
INTERNAL SHARED CALLS %%%
Tag by length to ease parsing.
Prepare extra versions that could do things like deal with
signed messages and add options and whatnot, or could let
us spin up legacy state machines whenever that could happen.
than 65535 protocol versions, we're either very successful or
failing in bad ways.
TODO: escape quotes
PRIVATE %%%
is resolved
multiple certificates. We need this because we have many possibly valid
clients that can all be allowed to contact us as a server (whereas a | There is one internal API to be used by revault_sync_fsm , and
one internal API to be used by the client and servers .
-module(revault_tls).
-include_lib("public_key/include/public_key.hrl").
-include("revault_tls.hrl").
-record(buf, {acc=[<<>>], seen=0, needed=0}).
callbacks from within the FSM
-export([callback/1, mode/2, peer/4, accept_peer/3, unpeer/3, send/3, reply/4, unpack/2]).
-export([wrap/1, unwrap/1, unwrap_all/1, unpack/1, send_local/2,
buf_add/2, buf_new/0, buf_size/1]).
-export([pin_certfile_opts_server/1, pin_certfile_opts_client/1,
pin_certfiles_opts_server/1, pin_certfiles_opts_client/1,
make_selfsigned_cert/2]).
-record(state, {proc, name, dirs, mode, serv_conn}).
-type state() :: term().
-type cb_state() :: {?MODULE, state()}.
-type buf() :: #buf{}.
-export_type([state/0, buf/0]).
-if(?OTP_RELEASE < 26).
a bug prevents TLS 1.3 from working well with cert pinning in versions prior
to OTP-26.rc-1 :
-define(TLS_VSN, 'tlsv1.2').
-else.
-define(TLS_VSN, 'tlsv1.3').
-endif.
-spec callback(term()) -> state().
callback({Name, DirOpts}) ->
{?MODULE, #state{proc=Name, name=Name, dirs=DirOpts}};
callback({Proc, Name, DirOpts}) ->
{?MODULE, #state{proc=Proc, name=Name, dirs=DirOpts}}.
-spec mode(client|server, state()) -> {term(), cb_state()}.
mode(Mode, S=#state{proc=Proc, name=Name, dirs=DirOpts}) ->
Res = case Mode of
client ->
revault_protocols_tls_sup:start_client(Proc, DirOpts);
server ->
Assumes that all servers have the full DirOpts view if this is the
first place to start it .
case revault_protocols_tls_sup:start_server(DirOpts) of
{ok, Pid} ->
revault_tls_serv:map(Name, Proc),
{ok, Pid};
{error, {already_started, Pid}} ->
revault_tls_serv:map(Name, Proc),
{ok, Pid}
end
end,
{Res, {?MODULE, S#state{mode=Mode}}}.
peer(Local, Peer, Attrs, S=#state{name=Dir, dirs=#{<<"peers">> := Peers}}) ->
case Peers of
#{Peer := Map} ->
Payload = {revault, make_ref(), revault_data_wrapper:peer(Dir, Attrs)},
{revault_tls_client:peer(Local, Peer, Map, Payload), {?MODULE, S}};
_ ->
{{error, unknown_peer}, {?MODULE, S}}
end.
accept_peer(Remote, Marker, S=#state{proc=Proc}) ->
{ok, Conn} = revault_tls_serv:accept_peer(Proc, Remote, Marker),
{ok, {?MODULE, S#state{serv_conn=Conn}}}.
unpeer(Proc, Remote, S=#state{mode=client}) ->
{revault_tls_client:unpeer(Proc, Remote),
{?MODULE, S}};
unpeer(Proc, Remote, S=#state{mode=server, serv_conn=Conn}) ->
{revault_tls_serv:unpeer(Proc, Remote, Conn),
{?MODULE, S#state{serv_conn=undefined}}}.
send(Remote, Payload, S=#state{proc=Proc, mode=client}) ->
Marker = make_ref(),
Res = case revault_tls_client:send(Proc, Remote, Marker, Payload) of
ok -> {ok, Marker};
Other -> Other
end,
{Res, {?MODULE, S}}.
reply(Remote, Marker, Payload, S=#state{proc=Proc, mode=client}) ->
{revault_tls_client:reply(Proc, Remote, Marker, Payload),
{?MODULE, S}};
reply(Remote, Marker, Payload, S=#state{proc=Proc, mode=server, serv_conn=Conn}) ->
{revault_tls_serv:reply(Proc, Remote, Conn, Marker, Payload),
{?MODULE, S}}.
unpack(_, _) -> error(undef).
wrap({revault, _Marker, _Payload}=Msg) ->
Bin = term_to_binary(Msg, [compressed, {minor_version, 2}]),
If we need more than 18,000 petabytes for a message and more
<<(byte_size(Bin)):64/unsigned, ?VSN:16/unsigned, Bin/binary>>.
-spec unwrap(buf()) -> {ok, ?VSN, term(), buf()} | {error, incomplete, buf()}.
unwrap(B=#buf{seen=S, needed=N, acc=Acc}) when S >= N ->
case iolist_to_binary(lists:reverse(Acc)) of
<<Size:64/unsigned, ?VSN:16/unsigned, Payload/binary>> ->
<<Term:Size/binary, Rest/binary>> = Payload,
{revault, Marker, Msg} = binary_to_term(Term),
{ok, ?VSN, {revault, Marker, unpack(Msg)},
buf_add(Rest, buf_reset(B))};
IncompleteBin ->
{error, incomplete,
B#buf{acc=[IncompleteBin]}}
end;
unwrap(B=#buf{}) ->
{error, incomplete, B}.
unpack({peer, ?VSN, Remote, Attrs}) -> {peer, Remote, Attrs};
unpack({ask, ?VSN}) -> ask;
unpack({ok, ?VSN}) -> ok;
unpack({error, ?VSN, R}) -> {error, R};
unpack({manifest, ?VSN}) -> manifest;
unpack({manifest, ?VSN, Data}) -> {manifest, Data};
unpack({file, ?VSN, Path, Meta, Bin}) -> {file, Path, Meta, Bin};
unpack({fetch, ?VSN, Path}) -> {fetch, Path};
unpack({sync_complete, ?VSN}) -> sync_complete;
unpack({deleted_file, ?VSN, Path, Meta}) -> {deleted_file, Path, Meta};
unpack({conflict_file, ?VSN, WorkPath, Path, Count, Meta, Bin}) ->
{conflict_file, WorkPath, Path, Count, Meta, Bin};
unpack(Term) ->
Term.
-spec unwrap_all(buf()) -> {[term()], buf()}.
unwrap_all(Buf) ->
unwrap_all(Buf, []).
unwrap_all(Buf, Acc) ->
case revault_tls:unwrap(Buf) of
{error, incomplete, NewBuf} ->
{lists:reverse(Acc), NewBuf};
{ok, ?VSN, Payload, NewBuf} ->
unwrap_all(NewBuf, [Payload|Acc])
end.
-spec buf_add(binary(), buf()) -> buf().
buf_add(Bin, B=#buf{seen=0, needed=0, acc=Acc}) ->
case iolist_to_binary([lists:reverse(Acc),Bin]) of
<<Size:64/unsigned, ?VSN:16/unsigned, _/binary>> = NewBin ->
Add 10 bytes to the needed size to cover the
B#buf{seen=byte_size(NewBin), needed=Size+10, acc=[NewBin]};
IncompleteBin ->
B#buf{acc=[IncompleteBin]}
end;
buf_add(Bin, B=#buf{acc=Acc, seen=N}) ->
B#buf{acc=[Bin|Acc], seen=N+byte_size(Bin)}.
-spec buf_new() -> buf().
buf_new() -> #buf{}.
-spec buf_reset(buf()) -> buf().
buf_reset(_) -> #buf{}.
-spec buf_size(buf()) -> non_neg_integer().
buf_size(#buf{seen=N}) -> N.
send_local(Proc, Payload) ->
gproc:send({n, l, {revault_fsm, Proc}}, Payload).
pin_certfile_opts_client(FileNames) ->
pin_certfile_opts(FileNames).
pin_certfile_opts_server(FileNames) ->
[{fail_if_no_peer_cert, true}
| pin_certfile_opts(FileNames)].
pin_certfiles_opts_client(FileNames) ->
pin_certfiles_opts(FileNames).
pin_certfiles_opts_server(FileNames) ->
[{fail_if_no_peer_cert, true}
| pin_certfiles_opts(FileNames)].
make_selfsigned_cert(Dir, CertName) ->
check_openssl_vsn(),
Key = filename:join(Dir, CertName ++ ".key"),
Cert = filename:join(Dir, CertName ++ ".crt"),
ok = filelib:ensure_dir(Cert),
Cmd = io_lib:format(
"openssl req -x509 -newkey rsa:4096 -sha256 -days 3650 -nodes "
"-keyout '~ts' -out '~ts' -subj '/CN=example.org' "
"-addext 'subjectAltName=DNS:example.org,DNS:www.example.org,IP:127.0.0.1'",
),
os:cmd(Cmd).
pin_certfile_opts(FileName) ->
own parsing of certs and chains .
case file:read_file(FileName) of
{ok, Cert} ->
tak:pem_to_ssl_options(Cert) ++
[{verify, verify_peer}];
{error, enoent} ->
error({certificate_not_found, FileName})
end.
pin_certfiles_opts(FileNames) ->
pin_certfiles_opts(FileNames, [], []).
pin_certfiles_opts([], CAs, PinCerts) ->
TODO : drop tlsv1.2 and mandate 1.3 when
[{cacerts, CAs},
{verify_fun, {fun verify_pins/3, PinCerts}},
{versions, [?TLS_VSN]},
{verify, verify_peer}];
pin_certfiles_opts([FileName|FileNames], CAs, PinCerts) ->
own parsing , but then extract the option and allow setting
client may expect to only contact one valid server ) .
[{cacerts, [CADer]},
{verify_fun, {_PinFun, PinCert}} | _] = pin_certfile_opts(FileName),
pin_certfiles_opts(FileNames, [CADer|CAs], [PinCert|PinCerts]).
-spec verify_pins(OtpCert, Event, InitialUserState) ->
{valid, UserState} | {fail, Reason :: term()} | {unknown, UserState}
when
OtpCert :: #'OTPCertificate'{},
Event :: {bad_cert, Reason :: atom() | {revoked, atom()}}
| {extension, #'Extension'{}} | valid | valid_peer,
InitialUserState :: UserState,
UserState :: term().
verify_pins(PinCert, valid_peer, PinCerts) ->
case lists:member(PinCert, PinCerts) of
true -> {valid, PinCerts};
false -> {fail, {peer_cert_unknown, subject(PinCert)}}
end;
verify_pins(_Cert, {extension, _}, PinCerts) ->
{unknown, PinCerts};
verify_pins(PinCert, {bad_cert, selfsigned_peer}, PinCerts) ->
case lists:member(PinCert, PinCerts) of
true -> {valid, PinCerts};
false -> {fail, {bad_cert, selfsigned_peer}}
end;
verify_pins(_Cert, {bad_cert, _} = Reason, _PinCerts) ->
{fail, Reason};
verify_pins(_Cert, valid, PinCerts) ->
{valid, PinCerts}.
subject(#'OTPCertificate'{ tbsCertificate = TBS }) ->
public_key:pkix_normalize_name(TBS#'OTPTBSCertificate'.subject).
check_openssl_vsn() ->
Vsn = os:cmd("openssl version"),
VsnMatch = "(Open|Libre)SSL ([0-9]+)\\.([0-9]+)\\.([0-9]+)",
case re:run(Vsn, VsnMatch, [{capture, all_but_first, list}]) of
{match, [Type, Major, Minor, Patch]} ->
try
check_openssl_vsn(Type, list_to_integer(Major),
list_to_integer(Minor),
list_to_integer(Patch))
catch
error:bad_vsn ->
error({openssl_vsn, Vsn})
end;
_ ->
error({openssl_vsn, Vsn})
end.
Using OpenSSL > = 1.1.1 or LibreSSL > = 3.1.0
check_openssl_vsn("Libre", A, B, _) when A > 3;
A == 3, B >= 1 ->
ok;
check_openssl_vsn("Open", A, B, C) when A > 1;
A == 1, B > 1;
A == 1, B == 1, C >= 1 ->
ok;
check_openssl_vsn(_, _, _, _) ->
error(bad_vsn).
|
32e6702e8f03ab9bda559f586c89cba77aa181890fd0dd5ceff967f7d5514a50 | rtoy/ansi-cl-tests | make-load-form-saving-slots.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sat May 17 11:54:54 2003
;;;; Contains: Tests of MAKE-LOAD-FORM-SAVING-SLOTS
(in-package :cl-test)
;;; These are tests of MAKE-LOAD-FORM-SAVING-SLOTS proper; tests involving
;;; file compilation will be located elsewhere.
(defstruct mlfss-01 a b c)
(deftest make-load-form-saving-slots.1
(let* ((obj (make-mlfss-01))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(values
(length forms)
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(eqt (class-of obj) (class-of newobj)))))
2 t)
(deftest make-load-form-saving-slots.2
(let* ((obj (make-mlfss-01))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(a b)))))
(values
(length forms)
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(eqt (class-of obj) (class-of newobj)))))
2 t)
(defclass mlfss-02 () ((a :initarg :a) (b :initarg :b) (c :initarg :c)))
(deftest make-load-form-saving-slots.3
(let* ((obj (make-instance 'mlfss-02))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c)))))
2 t (nil nil nil))
(deftest make-load-form-saving-slots.4
(let* ((obj (make-instance 'mlfss-02 :a 1 :b 'a :c '(x y z)))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(a b c)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(map-slot-value newobj '(a b c)))))
2 t (t t t) (1 a (x y z)))
(deftest make-load-form-saving-slots.5
(let* ((obj (make-instance 'mlfss-02 :a #(x y z)))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(a b)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(slot-value newobj 'a))))
2 t (t nil nil) #(x y z))
(deftest make-load-form-saving-slots.6
(let* ((obj (make-instance 'mlfss-02))
(forms (multiple-value-list
(make-load-form-saving-slots obj :allow-other-keys nil))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c)))))
2 t (nil nil nil))
;;; If :slot-names is missing, all initialized slots are retained
(deftest make-load-form-saving-slots.7
(let* ((obj (make-instance 'mlfss-02 :a (list 'x) :c 6/5))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(map-slot-value newobj '(a c)))))
2 t (t nil t) ((x) 6/5))
;;; If :slot-names is present, all initialized slots in the list are retained
(deftest make-load-form-saving-slots.8
(let* ((obj (make-instance 'mlfss-02 :a (list 'x) :c 6/5))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(c)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(slot-value newobj 'c))))
2 t (nil nil t) 6/5)
;; It takes an :environment parameter
(deftest make-load-form-saving-slots.9
(let* ((obj (make-instance 'mlfss-02 :a 7 :c 64 :b 100))
(forms (multiple-value-list
(make-load-form-saving-slots obj :environment nil))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(map-slot-value newobj '(a b c)))))
2 t (t t t) (7 100 64))
(defpackage "CL-TEST-MLFSS-PACKAGE" (:use) (:export #:a))
(defstruct mlfss-03 cl-test-mlfss-package:a)
(deftest make-load-form-savings-slots.10
(let* ((obj (make-mlfss-03 :a 17))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(mlfss-03-a obj)
(length forms)
(eqt (class-of obj) (class-of newobj))
(mlfss-03-a newobj))))
17 2 t 17)
(deftest make-load-form-savings-slots.11
(let* ((obj (make-mlfss-03 :a 17))
(forms (multiple-value-list
(make-load-form-saving-slots
obj
:slot-names '(cl-test-mlfss-package:a)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(mlfss-03-a obj)
(length forms)
(eqt (class-of obj) (class-of newobj))
(mlfss-03-a newobj))))
17 2 t 17)
(defstruct mlfss-04 (a 0 :read-only t))
(deftest make-load-form-savings-slots.12
(let* ((obj (make-mlfss-04 :a 123))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(mlfss-04-a obj)
(length forms)
(eqt (class-of obj) (class-of newobj))
(mlfss-04-a newobj))))
123 2 t 123)
;;; General error tests
(deftest make-load-form-saving-slots.error.1
(signals-error (make-load-form-saving-slots) program-error)
t)
(deftest make-load-form-saving-slots.error.2
(signals-error (make-load-form-saving-slots (make-instance 'mlfss-02)
:slot-names)
program-error)
t)
(deftest make-load-form-saving-slots.error.3
(signals-error (make-load-form-saving-slots (make-instance 'mlfss-02)
(gensym) t)
program-error)
t)
| null | https://raw.githubusercontent.com/rtoy/ansi-cl-tests/9708f3977220c46def29f43bb237e97d62033c1d/make-load-form-saving-slots.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of MAKE-LOAD-FORM-SAVING-SLOTS
These are tests of MAKE-LOAD-FORM-SAVING-SLOTS proper; tests involving
file compilation will be located elsewhere.
If :slot-names is missing, all initialized slots are retained
If :slot-names is present, all initialized slots in the list are retained
It takes an :environment parameter
General error tests | Author :
Created : Sat May 17 11:54:54 2003
(in-package :cl-test)
(defstruct mlfss-01 a b c)
(deftest make-load-form-saving-slots.1
(let* ((obj (make-mlfss-01))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(values
(length forms)
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(eqt (class-of obj) (class-of newobj)))))
2 t)
(deftest make-load-form-saving-slots.2
(let* ((obj (make-mlfss-01))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(a b)))))
(values
(length forms)
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(eqt (class-of obj) (class-of newobj)))))
2 t)
(defclass mlfss-02 () ((a :initarg :a) (b :initarg :b) (c :initarg :c)))
(deftest make-load-form-saving-slots.3
(let* ((obj (make-instance 'mlfss-02))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c)))))
2 t (nil nil nil))
(deftest make-load-form-saving-slots.4
(let* ((obj (make-instance 'mlfss-02 :a 1 :b 'a :c '(x y z)))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(a b c)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(map-slot-value newobj '(a b c)))))
2 t (t t t) (1 a (x y z)))
(deftest make-load-form-saving-slots.5
(let* ((obj (make-instance 'mlfss-02 :a #(x y z)))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(a b)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(slot-value newobj 'a))))
2 t (t nil nil) #(x y z))
(deftest make-load-form-saving-slots.6
(let* ((obj (make-instance 'mlfss-02))
(forms (multiple-value-list
(make-load-form-saving-slots obj :allow-other-keys nil))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c)))))
2 t (nil nil nil))
(deftest make-load-form-saving-slots.7
(let* ((obj (make-instance 'mlfss-02 :a (list 'x) :c 6/5))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(map-slot-value newobj '(a c)))))
2 t (t nil t) ((x) 6/5))
(deftest make-load-form-saving-slots.8
(let* ((obj (make-instance 'mlfss-02 :a (list 'x) :c 6/5))
(forms (multiple-value-list
(make-load-form-saving-slots obj :slot-names '(c)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(slot-value newobj 'c))))
2 t (nil nil t) 6/5)
(deftest make-load-form-saving-slots.9
(let* ((obj (make-instance 'mlfss-02 :a 7 :c 64 :b 100))
(forms (multiple-value-list
(make-load-form-saving-slots obj :environment nil))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(length forms)
(eqt (class-of obj) (class-of newobj))
(map-slot-boundp* newobj '(a b c))
(map-slot-value newobj '(a b c)))))
2 t (t t t) (7 100 64))
(defpackage "CL-TEST-MLFSS-PACKAGE" (:use) (:export #:a))
(defstruct mlfss-03 cl-test-mlfss-package:a)
(deftest make-load-form-savings-slots.10
(let* ((obj (make-mlfss-03 :a 17))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(mlfss-03-a obj)
(length forms)
(eqt (class-of obj) (class-of newobj))
(mlfss-03-a newobj))))
17 2 t 17)
(deftest make-load-form-savings-slots.11
(let* ((obj (make-mlfss-03 :a 17))
(forms (multiple-value-list
(make-load-form-saving-slots
obj
:slot-names '(cl-test-mlfss-package:a)))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(mlfss-03-a obj)
(length forms)
(eqt (class-of obj) (class-of newobj))
(mlfss-03-a newobj))))
17 2 t 17)
(defstruct mlfss-04 (a 0 :read-only t))
(deftest make-load-form-savings-slots.12
(let* ((obj (make-mlfss-04 :a 123))
(forms (multiple-value-list
(make-load-form-saving-slots obj))))
(let ((newobj (eval (first forms))))
(eval (subst newobj obj (second forms)))
(values
(mlfss-04-a obj)
(length forms)
(eqt (class-of obj) (class-of newobj))
(mlfss-04-a newobj))))
123 2 t 123)
(deftest make-load-form-saving-slots.error.1
(signals-error (make-load-form-saving-slots) program-error)
t)
(deftest make-load-form-saving-slots.error.2
(signals-error (make-load-form-saving-slots (make-instance 'mlfss-02)
:slot-names)
program-error)
t)
(deftest make-load-form-saving-slots.error.3
(signals-error (make-load-form-saving-slots (make-instance 'mlfss-02)
(gensym) t)
program-error)
t)
|
82551c9b4a6d4b3f692e3da5ae5ea5e27c86f0fcd6d384e857b78fa0ff330b3a | szktty/starlight | seplist.mli | type ('a, 'b) t
val empty : ('a, 'b) t
val one : 'a -> ('a, 'b) t
val cons : 'a -> sep:'b -> ('a, 'b) t -> ('a, 'b) t
val hd : ('a, 'b) t -> 'a option
val hd_exn : ('a, 'b) t -> 'a
val tl : ('a, 'b) t -> ('b option * ('a, 'b) t) option
val tl_exn : ('a, 'b) t -> 'b option * ('a, 'b) t
val values : ('a, 'b) t -> 'a list
val rev : ('a, 'b) t -> ('a, 'b) t
val length : ('a, 'b) t -> int
val iter : ('a, 'b) t -> f:('b option -> 'a -> unit) -> unit
val iteri : ('a, 'b) t -> f:(int -> 'b option -> 'a -> unit) -> unit
val fold_left : ('a, 'b) t -> init:'c -> f:('c -> 'b option -> 'a -> 'c) -> 'c
val opt_iter : ('a, 'b) t option -> f:('b option -> 'a -> unit) -> unit
| null | https://raw.githubusercontent.com/szktty/starlight/0dec4d272c3ea5dd0f82940a24dfe758ab641ca5/compiler/seplist.mli | ocaml | type ('a, 'b) t
val empty : ('a, 'b) t
val one : 'a -> ('a, 'b) t
val cons : 'a -> sep:'b -> ('a, 'b) t -> ('a, 'b) t
val hd : ('a, 'b) t -> 'a option
val hd_exn : ('a, 'b) t -> 'a
val tl : ('a, 'b) t -> ('b option * ('a, 'b) t) option
val tl_exn : ('a, 'b) t -> 'b option * ('a, 'b) t
val values : ('a, 'b) t -> 'a list
val rev : ('a, 'b) t -> ('a, 'b) t
val length : ('a, 'b) t -> int
val iter : ('a, 'b) t -> f:('b option -> 'a -> unit) -> unit
val iteri : ('a, 'b) t -> f:(int -> 'b option -> 'a -> unit) -> unit
val fold_left : ('a, 'b) t -> init:'c -> f:('c -> 'b option -> 'a -> 'c) -> 'c
val opt_iter : ('a, 'b) t option -> f:('b option -> 'a -> unit) -> unit
| |
058c99b4b241e79ff30f076ff374ae71c2a0adae6808490384311cb40a80bbf6 | comby-tools/comby | test_rewrite_rule_omega.ml | open Core
open Match
open Rewriter
open Test_helpers
include Test_omega
let run_rule source match_template rewrite_template rule =
Generic.first ~configuration match_template source
|> function
| Error _ -> print_string "bad"
| Ok result ->
match result with
| ({ environment; _ } as m) ->
let e = Rule.(result_env @@ apply rule environment) in
match e with
| None -> print_string "bad bad"
| Some e ->
{ m with environment = e }
|> List.return
|> Rewrite.all ~source ~rewrite_template
|> (fun x -> Option.value_exn x)
|> (fun { rewritten_source; _ } -> rewritten_source)
|> print_string
let%expect_test "rewrite_rule" =
let source = {|int|} in
let match_template = {|:[1]|} in
let rewrite_template = {|:[1]|} in
let rule =
{|
where rewrite :[1] { "int" -> "expect" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|expect|}]
let%expect_test "sequenced_rewrite_rule" =
let source = {|{ { a : { b : { c : d } } } }|} in
let match_template = {|{ :[a] : :[rest] }|} in
let rewrite_template = {|{ :[a] : :[rest] }|} in
let rule =
{|
where
rewrite :[a] { "a" -> "qqq" },
rewrite :[rest] { "{ b : { :[other] } }" -> "{ :[other] }" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|{ { qqq : { c : d } } }|}]
let%expect_test "rewrite_rule_for_list" =
let source = {|[1, 2, 3, 4,]|} in
let match_template = {|[:[contents]]|} in
let rewrite_template = {|[:[contents]]|} in
let rule =
{|
where rewrite :[contents] { ":[[x]]," -> ":[[x]];" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|[1; 2; 3; 4;]|}]
let%expect_test "rewrite_rule_for_list_strip_last" =
let source = {|[1, 2, 3, 4]|} in
let match_template = {|[:[contents]]|} in
let rewrite_template = {|[:[contents]]|} in
let rule =
{|
where rewrite :[contents] { ":[x], " -> ":[x]; " }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|[1; 2; 3; 4]|}]
let%expect_test "haskell_example" =
let source = {|
(concat
[ "blah blah blah"
, "blah"
])
|} in
let match_template = {|(concat [:[contents]])|} in
let rewrite_template = {|(:[contents])|} in
let rule =
{|
where rewrite :[contents] { "," -> "++" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|
( "blah blah blah"
++ "blah"
)
|}]
let%expect_test "rewrite_freeform_antecedent_pattern" =
let source = {|
(concat
[ "blah blah blah"
, "blah"
])
|} in
let match_template = {|:[contents]|} in
let rewrite_template = {|(:[contents])|} in
let rule =
{|
where rewrite :[contents] { concat [:[x]] -> "nice" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|(
(nice)
)|}]
| null | https://raw.githubusercontent.com/comby-tools/comby/5304b3118921d8ace346ca78cd8a5af30737b022/test/common/test_rewrite_rule_omega.ml | ocaml | open Core
open Match
open Rewriter
open Test_helpers
include Test_omega
let run_rule source match_template rewrite_template rule =
Generic.first ~configuration match_template source
|> function
| Error _ -> print_string "bad"
| Ok result ->
match result with
| ({ environment; _ } as m) ->
let e = Rule.(result_env @@ apply rule environment) in
match e with
| None -> print_string "bad bad"
| Some e ->
{ m with environment = e }
|> List.return
|> Rewrite.all ~source ~rewrite_template
|> (fun x -> Option.value_exn x)
|> (fun { rewritten_source; _ } -> rewritten_source)
|> print_string
let%expect_test "rewrite_rule" =
let source = {|int|} in
let match_template = {|:[1]|} in
let rewrite_template = {|:[1]|} in
let rule =
{|
where rewrite :[1] { "int" -> "expect" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|expect|}]
let%expect_test "sequenced_rewrite_rule" =
let source = {|{ { a : { b : { c : d } } } }|} in
let match_template = {|{ :[a] : :[rest] }|} in
let rewrite_template = {|{ :[a] : :[rest] }|} in
let rule =
{|
where
rewrite :[a] { "a" -> "qqq" },
rewrite :[rest] { "{ b : { :[other] } }" -> "{ :[other] }" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|{ { qqq : { c : d } } }|}]
let%expect_test "rewrite_rule_for_list" =
let source = {|[1, 2, 3, 4,]|} in
let match_template = {|[:[contents]]|} in
let rewrite_template = {|[:[contents]]|} in
let rule =
{|
where rewrite :[contents] { ":[[x]]," -> ":[[x]];" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|[1; 2; 3; 4;]|}]
let%expect_test "rewrite_rule_for_list_strip_last" =
let source = {|[1, 2, 3, 4]|} in
let match_template = {|[:[contents]]|} in
let rewrite_template = {|[:[contents]]|} in
let rule =
{|
where rewrite :[contents] { ":[x], " -> ":[x]; " }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|[1; 2; 3; 4]|}]
let%expect_test "haskell_example" =
let source = {|
(concat
[ "blah blah blah"
, "blah"
])
|} in
let match_template = {|(concat [:[contents]])|} in
let rewrite_template = {|(:[contents])|} in
let rule =
{|
where rewrite :[contents] { "," -> "++" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|
( "blah blah blah"
++ "blah"
)
|}]
let%expect_test "rewrite_freeform_antecedent_pattern" =
let source = {|
(concat
[ "blah blah blah"
, "blah"
])
|} in
let match_template = {|:[contents]|} in
let rewrite_template = {|(:[contents])|} in
let rule =
{|
where rewrite :[contents] { concat [:[x]] -> "nice" }
|}
|> Language.Rule.create
|> Or_error.ok_exn
in
run_rule source match_template rewrite_template rule;
[%expect_exact {|(
(nice)
)|}]
| |
3da9b328682498ef6c3bcd45b6402d875de205f3f0c7554566c1979d0fe22536 | kraison/graph-utils | cut.lisp | (in-package :graph-utils)
(defmethod minimal-cut! ((graph graph))
(let ((removed-edges nil))
(labels ((cut (g)
(cond ((or (< (node-count g) 2)
(= 0 (edge-count g))
(>= (length (find-components g)) 2))
g)
(t
(push (first (cluster graph :edge-span
:edge-removal-count 1))
removed-edges)
(cut graph)))))
(cut graph))
(mapcar #'(lambda (edge)
(subseq edge 0 2))
removed-edges)))
(defmethod minimal-cut ((graph graph) &key (method :cluster))
(let ((g (copy-graph graph)))
(cond ((eq method :cluster)
(values (minimal-cut! g) g)))))
| null | https://raw.githubusercontent.com/kraison/graph-utils/ffc2864825a4771ea9a5d68857e0c86b6a2f239c/cut.lisp | lisp | (in-package :graph-utils)
(defmethod minimal-cut! ((graph graph))
(let ((removed-edges nil))
(labels ((cut (g)
(cond ((or (< (node-count g) 2)
(= 0 (edge-count g))
(>= (length (find-components g)) 2))
g)
(t
(push (first (cluster graph :edge-span
:edge-removal-count 1))
removed-edges)
(cut graph)))))
(cut graph))
(mapcar #'(lambda (edge)
(subseq edge 0 2))
removed-edges)))
(defmethod minimal-cut ((graph graph) &key (method :cluster))
(let ((g (copy-graph graph)))
(cond ((eq method :cluster)
(values (minimal-cut! g) g)))))
| |
db8bc87c336686d7c2e4e6f5477b736e964fa926113f7e6eb232a79fdbd4a871 | metrics-clojure/metrics-clojure | project.clj | (defproject metrics-clojure-riemann "3.0.0-SNAPSHOT"
:description "Riemann reporter integration for metrics-clojure"
:url "-clojure"
:license {:name "MIT"}
:profiles {:dev {:global-vars {*warn-on-reflection* true}}}
:dependencies [[metrics-clojure "3.0.0-SNAPSHOT"]
[io.riemann/metrics4-riemann-reporter "0.5.1"]])
| null | https://raw.githubusercontent.com/metrics-clojure/metrics-clojure/a1dbacc748a1f8165f0094e2229c84f228efe29b/metrics-clojure-riemann/project.clj | clojure | (defproject metrics-clojure-riemann "3.0.0-SNAPSHOT"
:description "Riemann reporter integration for metrics-clojure"
:url "-clojure"
:license {:name "MIT"}
:profiles {:dev {:global-vars {*warn-on-reflection* true}}}
:dependencies [[metrics-clojure "3.0.0-SNAPSHOT"]
[io.riemann/metrics4-riemann-reporter "0.5.1"]])
| |
60056da0d74aeb19d23dda13cc402a360dc2a797af94029d03ff5b66501d27cf | ctdean/backtick | migrate.clj | (ns backtick.db.migrate
(:require [conf.core :as conf]
[ragtime.jdbc :as jdbc]
[ragtime.repl :as repl]))
(defn load-config []
{:datastore (jdbc/sql-database (conf/get :database-url))
:migrations (jdbc/load-resources "backtick/migrations")})
(defn migrate []
(repl/migrate (load-config)))
(defn rollback []
(repl/rollback (load-config)))
| null | https://raw.githubusercontent.com/ctdean/backtick/00f16405903fbd11b161b32572a199b3bc9c11b0/dev/backtick/db/migrate.clj | clojure | (ns backtick.db.migrate
(:require [conf.core :as conf]
[ragtime.jdbc :as jdbc]
[ragtime.repl :as repl]))
(defn load-config []
{:datastore (jdbc/sql-database (conf/get :database-url))
:migrations (jdbc/load-resources "backtick/migrations")})
(defn migrate []
(repl/migrate (load-config)))
(defn rollback []
(repl/rollback (load-config)))
| |
5283df2637ed27223e055fd848b9c1b4a0404d87cd9f933d46b8780881155e97 | haskell-jp/makeMistakesToLearnHaskell | Core.hs | {-# OPTIONS_GHC -Wno-unused-imports #-}
module Education.MakeMistakesToLearnHaskell.Exercise.Core
( runHaskellExercise
, runHaskellExerciseWithStdin
, noVeirificationExercise
, notYetImplementedVeirificationExercise
, isInWords
, detailsForgetToWriteDo
, detailsDoConsistentWidth
, isInconsistentlyIndentedAfter
) where
#include <imports/external.hs>
import Education.MakeMistakesToLearnHaskell.Env
import qualified Education.MakeMistakesToLearnHaskell.Evaluator.RunHaskell as RunHaskell
import Education.MakeMistakesToLearnHaskell.Evaluator.Types
import Education.MakeMistakesToLearnHaskell.Exercise.Types
import Education.MakeMistakesToLearnHaskell.Diagnosis
import Education.MakeMistakesToLearnHaskell.Text
runHaskellExercise
:: Diagnosis
-> Text
-> Env
-> FilePath
-> IO Result
runHaskellExercise = runHaskellExercise' Nothing
-- TODO: refactor with resultForUser
runHaskellExercise'
:: Maybe RunHaskellParameters
-> Diagnosis
-> Text
-> Env
-> FilePath
-> IO Result
runHaskellExercise' mParam diag right e prgFile = do
let rhp = fromMaybe defaultRunHaskellParameters mParam
result <- runHaskell e $ rhp { runHaskellParametersArgs = [prgFile] }
case result of
Right (outB, _errB {- TODO: print stderr -}) -> do
let out = canonicalizeNewlines outB
msg =
Text.unlines
[ Text.replicate 80 "="
, "Your program's output: " <> Text.pack (show out) -- TODO: pretty print
, " Expected output: " <> Text.pack (show right)
]
return $
if out == right
then Success $ "Nice output!\n\n" <> msg
else Fail $ "Wrong output!\n\n" <> msg
Left err ->
case err of
RunHaskell.RunHaskellNotFound ->
return $ Error "runhaskell command is not available.\nInstall stack or Haskell Platform."
RunHaskell.RunHaskellFailure _ msg -> do
logDebug e $ "RunHaskellFailure: " <> msg
code <- readUtf8File prgFile
putStrLn "==================== GHC output ===================="
return $ Fail $ appendDiagnosis diag code msg
runHaskellExercise の入力有りバージョン
runHaskellExerciseWithStdin
:: Diagnosis
-> Gen String
-> (Text -> Text)
-> Env
-> FilePath
-> IO Result
runHaskellExerciseWithStdin diag gen calcRight env prgFile = do
let qcArgs = QuickCheck.stdArgs { QuickCheck.chatty = True }
maxSuccessSize = envQcMaxSuccessSize env
resultRef <- newIORef $ error "Assertion failure: no result written after QuickCheck"
qr <- quickCheckWithResult qcArgs $
QuickCheck.forAll gen $ \inputS ->
QuickCheck.withMaxSuccess maxSuccessSize $
QuickCheck.ioProperty $ do
let input = Text.pack inputS
params = defaultRunHaskellParameters
{ runHaskellParametersArgs = [prgFile]
, runHaskellParametersStdin = TextEncoding.encodeUtf8 input
}
code <- readUtf8File prgFile
result <- resultForUser diag code [" For input: " <> Text.pack (show input)] calcRight input <$> runHaskell env params
writeIORef resultRef result
return $
case result of
Success _ -> True
_other -> False
logDebug env $ ByteString.pack $ "QuickCheck result: " ++ show qr
readIORef resultRef
resultForUser
:: Diagnosis
-> Text
-> [Text]
-> (Text -> Text)
-> Text
-> Either RunHaskellError (ByteString, ByteString)
-> Result
resultForUser _diag _code messageFooter calcRight input (Right (outB, _errB {- TODO: print stderr -})) =
let out = canonicalizeNewlines outB
right = calcRight input
msg =
Text.unlines $
[ Text.replicate 80 "="
, "Your program's output: " <> Text.pack (show out) -- TODO: pretty print
, " Expected output: " <> Text.pack (show right)
] ++ messageFooter
in
if right == out
then Success $ "Nice output!\n\n" <> msg
else Fail $ "Wrong output!\n\n" <> msg
resultForUser _diag _code _messageFooter _calcRight _minput (Left RunHaskell.RunHaskellNotFound) =
Error "runhaskell command is not available.\nInstall stack or Haskell Platform."
resultForUser diag code _messageFooter _calcRight _minput (Left (RunHaskell.RunHaskellFailure _ msg)) =
Fail $ appendDiagnosis diag code msg
isInWords :: Text -> [Text] -> Bool
isInWords wd = any (Text.isInfixOf wd)
detailsForgetToWriteDo :: Text -> Details
detailsForgetToWriteDo funcNames =
"HINT: You seem to have forgotten to write `do`. `do` must be put before listing " <> funcNames <> "."
detailsDoConsistentWidth :: Details
detailsDoConsistentWidth = "HINT: instructions in a `do` must be in a consistent width."
isInconsistentlyIndentedAfter :: SourceCode -> Text -> Bool
isInconsistentlyIndentedAfter code wd =
not
$ allSame
$ map (Text.length . Text.takeWhile Char.isSpace)
$ cropAfterWord wd
$ Text.lines code
where
cropAfterWord :: Text -> [SourceCode] -> [SourceCode]
cropAfterWord w ls =
-- Against my expectaion,
-- 'dropWhile (isInWords w . Text.words) ls' returns ls as is.
-- While this function should return an empty list
-- if 'ls' doesn't contain 'w'.
let (_nonContaining, containing) = List.break (isInWords w . Text.words) ls
in
if null containing
then []
except the first line , which contains ' w '
allSame :: Eq a => [a] -> Bool
allSame [] = True
allSame [_] = True
allSame (x1 : x2 : xs) = x1 == x2 && allSame xs
noVeirificationExercise :: Env -> String -> IO Result
noVeirificationExercise _ _ = return NotVerified
notYetImplementedVeirificationExercise :: Env -> String -> IO Result
notYetImplementedVeirificationExercise _ _ = return NotYetImplemented
| null | https://raw.githubusercontent.com/haskell-jp/makeMistakesToLearnHaskell/554e74ce09372d8b4c3c2d1158b9ca0784f2b571/src/Education/MakeMistakesToLearnHaskell/Exercise/Core.hs | haskell | # OPTIONS_GHC -Wno-unused-imports #
TODO: refactor with resultForUser
TODO: print stderr
TODO: pretty print
TODO: print stderr
TODO: pretty print
Against my expectaion,
'dropWhile (isInWords w . Text.words) ls' returns ls as is.
While this function should return an empty list
if 'ls' doesn't contain 'w'. |
module Education.MakeMistakesToLearnHaskell.Exercise.Core
( runHaskellExercise
, runHaskellExerciseWithStdin
, noVeirificationExercise
, notYetImplementedVeirificationExercise
, isInWords
, detailsForgetToWriteDo
, detailsDoConsistentWidth
, isInconsistentlyIndentedAfter
) where
#include <imports/external.hs>
import Education.MakeMistakesToLearnHaskell.Env
import qualified Education.MakeMistakesToLearnHaskell.Evaluator.RunHaskell as RunHaskell
import Education.MakeMistakesToLearnHaskell.Evaluator.Types
import Education.MakeMistakesToLearnHaskell.Exercise.Types
import Education.MakeMistakesToLearnHaskell.Diagnosis
import Education.MakeMistakesToLearnHaskell.Text
runHaskellExercise
:: Diagnosis
-> Text
-> Env
-> FilePath
-> IO Result
runHaskellExercise = runHaskellExercise' Nothing
runHaskellExercise'
:: Maybe RunHaskellParameters
-> Diagnosis
-> Text
-> Env
-> FilePath
-> IO Result
runHaskellExercise' mParam diag right e prgFile = do
let rhp = fromMaybe defaultRunHaskellParameters mParam
result <- runHaskell e $ rhp { runHaskellParametersArgs = [prgFile] }
case result of
let out = canonicalizeNewlines outB
msg =
Text.unlines
[ Text.replicate 80 "="
, " Expected output: " <> Text.pack (show right)
]
return $
if out == right
then Success $ "Nice output!\n\n" <> msg
else Fail $ "Wrong output!\n\n" <> msg
Left err ->
case err of
RunHaskell.RunHaskellNotFound ->
return $ Error "runhaskell command is not available.\nInstall stack or Haskell Platform."
RunHaskell.RunHaskellFailure _ msg -> do
logDebug e $ "RunHaskellFailure: " <> msg
code <- readUtf8File prgFile
putStrLn "==================== GHC output ===================="
return $ Fail $ appendDiagnosis diag code msg
runHaskellExercise の入力有りバージョン
runHaskellExerciseWithStdin
:: Diagnosis
-> Gen String
-> (Text -> Text)
-> Env
-> FilePath
-> IO Result
runHaskellExerciseWithStdin diag gen calcRight env prgFile = do
let qcArgs = QuickCheck.stdArgs { QuickCheck.chatty = True }
maxSuccessSize = envQcMaxSuccessSize env
resultRef <- newIORef $ error "Assertion failure: no result written after QuickCheck"
qr <- quickCheckWithResult qcArgs $
QuickCheck.forAll gen $ \inputS ->
QuickCheck.withMaxSuccess maxSuccessSize $
QuickCheck.ioProperty $ do
let input = Text.pack inputS
params = defaultRunHaskellParameters
{ runHaskellParametersArgs = [prgFile]
, runHaskellParametersStdin = TextEncoding.encodeUtf8 input
}
code <- readUtf8File prgFile
result <- resultForUser diag code [" For input: " <> Text.pack (show input)] calcRight input <$> runHaskell env params
writeIORef resultRef result
return $
case result of
Success _ -> True
_other -> False
logDebug env $ ByteString.pack $ "QuickCheck result: " ++ show qr
readIORef resultRef
resultForUser
:: Diagnosis
-> Text
-> [Text]
-> (Text -> Text)
-> Text
-> Either RunHaskellError (ByteString, ByteString)
-> Result
let out = canonicalizeNewlines outB
right = calcRight input
msg =
Text.unlines $
[ Text.replicate 80 "="
, " Expected output: " <> Text.pack (show right)
] ++ messageFooter
in
if right == out
then Success $ "Nice output!\n\n" <> msg
else Fail $ "Wrong output!\n\n" <> msg
resultForUser _diag _code _messageFooter _calcRight _minput (Left RunHaskell.RunHaskellNotFound) =
Error "runhaskell command is not available.\nInstall stack or Haskell Platform."
resultForUser diag code _messageFooter _calcRight _minput (Left (RunHaskell.RunHaskellFailure _ msg)) =
Fail $ appendDiagnosis diag code msg
isInWords :: Text -> [Text] -> Bool
isInWords wd = any (Text.isInfixOf wd)
detailsForgetToWriteDo :: Text -> Details
detailsForgetToWriteDo funcNames =
"HINT: You seem to have forgotten to write `do`. `do` must be put before listing " <> funcNames <> "."
detailsDoConsistentWidth :: Details
detailsDoConsistentWidth = "HINT: instructions in a `do` must be in a consistent width."
isInconsistentlyIndentedAfter :: SourceCode -> Text -> Bool
isInconsistentlyIndentedAfter code wd =
not
$ allSame
$ map (Text.length . Text.takeWhile Char.isSpace)
$ cropAfterWord wd
$ Text.lines code
where
cropAfterWord :: Text -> [SourceCode] -> [SourceCode]
cropAfterWord w ls =
let (_nonContaining, containing) = List.break (isInWords w . Text.words) ls
in
if null containing
then []
except the first line , which contains ' w '
allSame :: Eq a => [a] -> Bool
allSame [] = True
allSame [_] = True
allSame (x1 : x2 : xs) = x1 == x2 && allSame xs
noVeirificationExercise :: Env -> String -> IO Result
noVeirificationExercise _ _ = return NotVerified
notYetImplementedVeirificationExercise :: Env -> String -> IO Result
notYetImplementedVeirificationExercise _ _ = return NotYetImplemented
|
b2885d85cf2a6c45129ca73cdf7f9a61cafd7fd760be08643412a3a204c7231a | scheme/scsh | user-group.scm | ;;; User info
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (user-info uid/name)
((cond ((string? uid/name) name->user-info)
((integer? uid/name) (lambda (uid)
(user-id->user-info (integer->user-id uid))))
(else (error "user-info arg must be string or integer" uid/name)))
uid/name))
(define (user-info:name user-info)
(os-string->string (user-info-name user-info)))
(define (user-info:uid user-info)
(user-id->integer (user-info-id user-info)))
(define (user-info:gid user-info)
(group-id->integer (user-info-group user-info)))
(define (user-info:home-dir user-info)
(os-string->string (user-info-home-directory user-info)))
(define (user-info:shell user-info)
(os-string->string (user-info-shell user-info)))
;;; Derived functions
(define (->uid uid/name)
(user-info:uid (user-info uid/name)))
(define (->username uid/name)
(user-info:name (user-info uid/name)))
(define (%homedir uid/name)
(user-info:home-dir (user-info uid/name)))
(define home-directory "")
(define (init-home-directory home)
(set! home-directory home))
(define (home-dir . maybe-user)
(if (pair? maybe-user)
(let ((user (car maybe-user)))
(ensure-file-name-is-nondirectory
(or (%homedir user)
(error "Cannot get user's home directory"
user))))
home-directory))
;;; (home-file [user] fname)
(define (home-file arg1 . maybe-arg2)
(receive (dir fname)
(if (pair? maybe-arg2)
(values (home-dir arg1) (car maybe-arg2))
(values home-directory arg1))
(string-append (file-name-as-directory dir) fname)))
;;; Group info
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define (group-info gid/name)
((cond ((string? gid/name) name->group-info)
((integer? gid/name) (lambda (gid)
(group-id->group-info (integer->group-id gid))))
(else (error "group-info arg must be string or integer" gid/name)))
gid/name))
(define (group-info:name group-info)
(os-string->string (group-info-name group-info)))
(define (group-info:gid group-info)
(group-id->integer (group-info-id group-info)))
(define (group-info:members group-info)
(map os-string->string (group-info-members group-info)))
;;; Derived functions
(define (->gid name)
(group-info:gid (group-info name)))
(define (->groupname gid)
(group-info:name (group-info gid)))
| null | https://raw.githubusercontent.com/scheme/scsh/114432435e4eadd54334df6b37fcae505079b49f/scheme/user-group.scm | scheme | User info
Derived functions
(home-file [user] fname)
Group info
Derived functions |
(define (user-info uid/name)
((cond ((string? uid/name) name->user-info)
((integer? uid/name) (lambda (uid)
(user-id->user-info (integer->user-id uid))))
(else (error "user-info arg must be string or integer" uid/name)))
uid/name))
(define (user-info:name user-info)
(os-string->string (user-info-name user-info)))
(define (user-info:uid user-info)
(user-id->integer (user-info-id user-info)))
(define (user-info:gid user-info)
(group-id->integer (user-info-group user-info)))
(define (user-info:home-dir user-info)
(os-string->string (user-info-home-directory user-info)))
(define (user-info:shell user-info)
(os-string->string (user-info-shell user-info)))
(define (->uid uid/name)
(user-info:uid (user-info uid/name)))
(define (->username uid/name)
(user-info:name (user-info uid/name)))
(define (%homedir uid/name)
(user-info:home-dir (user-info uid/name)))
(define home-directory "")
(define (init-home-directory home)
(set! home-directory home))
(define (home-dir . maybe-user)
(if (pair? maybe-user)
(let ((user (car maybe-user)))
(ensure-file-name-is-nondirectory
(or (%homedir user)
(error "Cannot get user's home directory"
user))))
home-directory))
(define (home-file arg1 . maybe-arg2)
(receive (dir fname)
(if (pair? maybe-arg2)
(values (home-dir arg1) (car maybe-arg2))
(values home-directory arg1))
(string-append (file-name-as-directory dir) fname)))
(define (group-info gid/name)
((cond ((string? gid/name) name->group-info)
((integer? gid/name) (lambda (gid)
(group-id->group-info (integer->group-id gid))))
(else (error "group-info arg must be string or integer" gid/name)))
gid/name))
(define (group-info:name group-info)
(os-string->string (group-info-name group-info)))
(define (group-info:gid group-info)
(group-id->integer (group-info-id group-info)))
(define (group-info:members group-info)
(map os-string->string (group-info-members group-info)))
(define (->gid name)
(group-info:gid (group-info name)))
(define (->groupname gid)
(group-info:name (group-info gid)))
|
e28edd64d62bbc5c6c5ea9c74132dc66fac0b6d220a68d6ec854dff082d2c13e | haskell/stylish-haskell | Config.hs | --------------------------------------------------------------------------------
{-# LANGUAGE BlockArguments #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Language.Haskell.Stylish.Config
( Extensions
, Config (..)
, ExitCodeBehavior (..)
, defaultConfigBytes
, configFilePath
, loadConfig
, parseConfig
) where
--------------------------------------------------------------------------------
import Control.Applicative ((<|>))
import Control.Monad (forM, mzero)
import Data.Aeson (FromJSON (..))
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as A
import qualified Data.ByteString as B
import Data.ByteString.Lazy (fromStrict)
import Data.Char (toLower)
import qualified Data.FileEmbed as FileEmbed
import Data.List (intercalate,
nub)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Data.YAML (prettyPosWithSource)
import Data.YAML.Aeson (decode1Strict)
import System.Directory
import System.FilePath ((</>))
import qualified System.IO as IO (Newline (..),
nativeNewline)
import Text.Read (readMaybe)
--------------------------------------------------------------------------------
import qualified Language.Haskell.Stylish.Config.Cabal as Cabal
import Language.Haskell.Stylish.Config.Internal
import Language.Haskell.Stylish.Step
import qualified Language.Haskell.Stylish.Step.Data as Data
import qualified Language.Haskell.Stylish.Step.Imports as Imports
import qualified Language.Haskell.Stylish.Step.LanguagePragmas as LanguagePragmas
import qualified Language.Haskell.Stylish.Step.ModuleHeader as ModuleHeader
import qualified Language.Haskell.Stylish.Step.SimpleAlign as SimpleAlign
import qualified Language.Haskell.Stylish.Step.Squash as Squash
import qualified Language.Haskell.Stylish.Step.Tabs as Tabs
import qualified Language.Haskell.Stylish.Step.TrailingWhitespace as TrailingWhitespace
import qualified Language.Haskell.Stylish.Step.UnicodeSyntax as UnicodeSyntax
import Language.Haskell.Stylish.Verbose
--------------------------------------------------------------------------------
type Extensions = [String]
--------------------------------------------------------------------------------
data Config = Config
{ configSteps :: [Step]
, configColumns :: Maybe Int
, configLanguageExtensions :: [String]
, configNewline :: IO.Newline
, configCabal :: Bool
, configExitCode :: ExitCodeBehavior
}
--------------------------------------------------------------------------------
data ExitCodeBehavior
= NormalExitBehavior
| ErrorOnFormatExitBehavior
deriving (Eq)
instance Show ExitCodeBehavior where
show NormalExitBehavior = "normal"
show ErrorOnFormatExitBehavior = "error_on_format"
--------------------------------------------------------------------------------
instance FromJSON Config where
parseJSON = parseConfig
--------------------------------------------------------------------------------
configFileName :: String
configFileName = ".stylish-haskell.yaml"
--------------------------------------------------------------------------------
defaultConfigBytes :: B.ByteString
defaultConfigBytes = $(FileEmbed.embedFile "data/stylish-haskell.yaml")
--------------------------------------------------------------------------------
configFilePath :: Verbose -> Maybe FilePath -> IO (Maybe FilePath)
configFilePath _ (Just userSpecified) = return (Just userSpecified)
configFilePath verbose Nothing = do
current <- getCurrentDirectory
configPath <- getXdgDirectory XdgConfig "stylish-haskell"
home <- getHomeDirectory
search verbose $
[d </> configFileName | d <- ancestors current] ++
[configPath </> "config.yaml", home </> configFileName]
search :: Verbose -> [FilePath] -> IO (Maybe FilePath)
search _ [] = return Nothing
search verbose (f : fs) = do
TODO Maybe catch an error here , might be unreadable
exists <- doesFileExist f
verbose $ f ++ if exists then " exists" else " does not exist"
if exists then return (Just f) else search verbose fs
--------------------------------------------------------------------------------
loadConfig :: Verbose -> Maybe FilePath -> IO Config
loadConfig verbose userSpecified = do
mbFp <- configFilePath verbose userSpecified
verbose $ "Loading configuration at " ++ fromMaybe "<embedded>" mbFp
bytes <- maybe (return defaultConfigBytes) B.readFile mbFp
case decode1Strict bytes of
Left (pos, err) -> error $ prettyPosWithSource pos (fromStrict bytes) ("Language.Haskell.Stylish.Config.loadConfig: " ++ err)
Right config -> do
cabalLanguageExtensions <- if configCabal config
then map toStr <$> Cabal.findLanguageExtensions verbose
else pure []
return $ config
{ configLanguageExtensions = nub $
configLanguageExtensions config ++ cabalLanguageExtensions
}
where toStr (ext, True) = show ext
toStr (ext, False) = "No" ++ show ext
--------------------------------------------------------------------------------
parseConfig :: A.Value -> A.Parser Config
parseConfig (A.Object o) = do
First load the config without the actual steps
config <- Config
<$> pure []
<*> (o A..:! "columns" A..!= Just 80)
<*> (o A..:? "language_extensions" A..!= [])
<*> (o A..:? "newline" >>= parseEnum newlines IO.nativeNewline)
<*> (o A..:? "cabal" A..!= True)
<*> (o A..:? "exit_code" >>= parseEnum exitCodes NormalExitBehavior)
-- Then fill in the steps based on the partial config we already have
stepValues <- o A..: "steps" :: A.Parser [A.Value]
steps <- mapM (parseSteps config) stepValues
return config {configSteps = concat steps}
where
newlines =
[ ("native", IO.nativeNewline)
, ("lf", IO.LF)
, ("crlf", IO.CRLF)
]
exitCodes =
[ ("normal", NormalExitBehavior)
, ("error_on_format", ErrorOnFormatExitBehavior)
]
parseConfig _ = mzero
--------------------------------------------------------------------------------
catalog :: Map String (Config -> A.Object -> A.Parser Step)
catalog = M.fromList
[ ("imports", parseImports)
, ("module_header", parseModuleHeader)
, ("records", parseRecords)
, ("language_pragmas", parseLanguagePragmas)
, ("simple_align", parseSimpleAlign)
, ("squash", parseSquash)
, ("tabs", parseTabs)
, ("trailing_whitespace", parseTrailingWhitespace)
, ("unicode_syntax", parseUnicodeSyntax)
]
--------------------------------------------------------------------------------
parseSteps :: Config -> A.Value -> A.Parser [Step]
parseSteps config val = do
map' <- parseJSON val :: A.Parser (Map String A.Value)
forM (M.toList map') $ \(k, v) -> case (M.lookup k catalog, v) of
(Just parser, A.Object o) -> parser config o
_ -> fail $ "Invalid declaration for " ++ k
--------------------------------------------------------------------------------
-- | Utility for enum-like options
parseEnum :: [(String, a)] -> a -> Maybe String -> A.Parser a
parseEnum _ def Nothing = return def
parseEnum strs _ (Just k) = case lookup k strs of
Just v -> return v
Nothing -> fail $ "Unknown option: " ++ k ++ ", should be one of: " ++
intercalate ", " (map fst strs)
--------------------------------------------------------------------------------
parseModuleHeader :: Config -> A.Object -> A.Parser Step
parseModuleHeader config o = fmap (ModuleHeader.step columns) $ ModuleHeader.Config
<$> (o A..:? "indent" A..!= ModuleHeader.indent def)
<*> (o A..:? "sort" A..!= ModuleHeader.sort def)
<*> (o A..:? "separate_lists" A..!= ModuleHeader.separateLists def)
<*> (o A..:? "break_where" >>= parseEnum breakWhere (ModuleHeader.breakWhere def))
<*> (o A..:? "open_bracket" >>= parseEnum openBracket (ModuleHeader.openBracket def))
where
def = ModuleHeader.defaultConfig
columns = configColumns config
breakWhere =
[ ("exports", ModuleHeader.Exports)
, ("single", ModuleHeader.Single)
, ("inline", ModuleHeader.Inline)
, ("always", ModuleHeader.Always)
]
openBracket =
[ ("same_line", ModuleHeader.SameLine)
, ("next_line", ModuleHeader.NextLine)
]
--------------------------------------------------------------------------------
parseSimpleAlign :: Config -> A.Object -> A.Parser Step
parseSimpleAlign c o = SimpleAlign.step
<$> pure (configColumns c)
<*> (SimpleAlign.Config
<$> parseAlign "cases" SimpleAlign.cCases
<*> parseAlign "top_level_patterns" SimpleAlign.cTopLevelPatterns
<*> parseAlign "records" SimpleAlign.cRecords
<*> parseAlign "multi_way_if" SimpleAlign.cMultiWayIf)
where
parseAlign key f =
(o A..:? key >>= parseEnum aligns (f SimpleAlign.defaultConfig)) <|>
(boolToAlign <$> o A..: key)
aligns =
[ ("always", SimpleAlign.Always)
, ("adjacent", SimpleAlign.Adjacent)
, ("never", SimpleAlign.Never)
]
boolToAlign True = SimpleAlign.Always
boolToAlign False = SimpleAlign.Never
--------------------------------------------------------------------------------
parseRecords :: Config -> A.Object -> A.Parser Step
parseRecords c o = Data.step
<$> (Data.Config
<$> (o A..: "equals" >>= parseIndent)
<*> (o A..: "first_field" >>= parseIndent)
<*> (o A..: "field_comment")
<*> (o A..: "deriving")
<*> (o A..:? "break_enums" A..!= False)
<*> (o A..:? "break_single_constructors" A..!= True)
<*> (o A..: "via" >>= parseIndent)
<*> (o A..:? "curried_context" A..!= False)
<*> (o A..:? "sort_deriving" A..!= True)
<*> pure configMaxColumns)
where
configMaxColumns =
maybe Data.NoMaxColumns Data.MaxColumns (configColumns c)
parseIndent :: A.Value -> A.Parser Data.Indent
parseIndent = \case
A.String "same_line" -> return Data.SameLine
A.String t | "indent " `T.isPrefixOf` t ->
case readMaybe (T.unpack $ T.drop 7 t) of
Just n -> return $ Data.Indent n
Nothing -> fail $ "Indent: not a number" <> T.unpack (T.drop 7 t)
A.String t -> fail $ "can't parse indent setting: " <> T.unpack t
_ -> fail "Expected string for indent value"
--------------------------------------------------------------------------------
parseSquash :: Config -> A.Object -> A.Parser Step
parseSquash _ _ = return Squash.step
--------------------------------------------------------------------------------
parseImports :: Config -> A.Object -> A.Parser Step
parseImports config o = fmap (Imports.step columns) $ Imports.Options
<$> (o A..:? "align" >>= parseEnum aligns (def Imports.importAlign))
<*> (o A..:? "list_align" >>= parseEnum listAligns (def Imports.listAlign))
<*> (o A..:? "pad_module_names" A..!= def Imports.padModuleNames)
<*> (o A..:? "long_list_align" >>= parseEnum longListAligns (def Imports.longListAlign))
<*> (o A..:? "empty_list_align" >>= parseEnum emptyListAligns (def Imports.emptyListAlign))
Note that padding has to be at least 1 . Default is 4 .
<*> (o A..:? "list_padding" >>= maybe (pure $ def Imports.listPadding) parseListPadding)
<*> o A..:? "separate_lists" A..!= def Imports.separateLists
<*> o A..:? "space_surround" A..!= def Imports.spaceSurround
<*> o A..:? "post_qualify" A..!= def Imports.postQualified
<*> o A..:? "group_imports" A..!= def Imports.groupImports
<*> o A..:? "group_rules" A..!= def Imports.groupRules
where
def f = f Imports.defaultOptions
columns = configColumns config
aligns =
[ ("global", Imports.Global)
, ("file", Imports.File)
, ("group", Imports.Group)
, ("none", Imports.None)
]
listAligns =
[ ("new_line", Imports.NewLine)
, ("with_module_name", Imports.WithModuleName)
, ("with_alias", Imports.WithAlias)
, ("after_alias", Imports.AfterAlias)
, ("repeat", Imports.Repeat)
]
longListAligns =
[ ("inline", Imports.Inline)
, ("new_line", Imports.InlineWithBreak)
, ("new_line_multiline", Imports.InlineToMultiline)
, ("multiline", Imports.Multiline)
]
emptyListAligns =
[ ("inherit", Imports.Inherit)
, ("right_after", Imports.RightAfter)
]
parseListPadding = \case
A.String "module_name" -> pure Imports.LPModuleName
A.Number n | n >= 1 -> pure $ Imports.LPConstant (truncate n)
v -> A.typeMismatch "'module_name' or >=1 number" v
--------------------------------------------------------------------------------
parseLanguagePragmas :: Config -> A.Object -> A.Parser Step
parseLanguagePragmas config o = LanguagePragmas.step
<$> pure (configColumns config)
<*> (o A..:? "style" >>= parseEnum styles LanguagePragmas.Vertical)
<*> o A..:? "align" A..!= True
<*> o A..:? "remove_redundant" A..!= True
<*> mkLanguage o
where
styles =
[ ("vertical", LanguagePragmas.Vertical)
, ("compact", LanguagePragmas.Compact)
, ("compact_line", LanguagePragmas.CompactLine)
, ("vertical_compact", LanguagePragmas.VerticalCompact)
]
--------------------------------------------------------------------------------
-- | Utilities for validating language prefixes
mkLanguage :: A.Object -> A.Parser String
mkLanguage o = do
lang <- o A..:? "language_prefix"
maybe (pure "LANGUAGE") validate lang
where
validate :: String -> A.Parser String
validate s
| fmap toLower s == "language" = pure s
| otherwise = fail "please provide a valid language prefix"
--------------------------------------------------------------------------------
parseTabs :: Config -> A.Object -> A.Parser Step
parseTabs _ o = Tabs.step
<$> o A..:? "spaces" A..!= 8
--------------------------------------------------------------------------------
parseTrailingWhitespace :: Config -> A.Object -> A.Parser Step
parseTrailingWhitespace _ _ = return TrailingWhitespace.step
--------------------------------------------------------------------------------
parseUnicodeSyntax :: Config -> A.Object -> A.Parser Step
parseUnicodeSyntax _ o = UnicodeSyntax.step
<$> o A..:? "add_language_pragma" A..!= True
<*> mkLanguage o
| null | https://raw.githubusercontent.com/haskell/stylish-haskell/628cf06a6e8b55b56b36df33edc8f5f9f2415aa9/lib/Language/Haskell/Stylish/Config.hs | haskell | ------------------------------------------------------------------------------
# LANGUAGE BlockArguments #
# LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Then fill in the steps based on the partial config we already have
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Utility for enum-like options
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Utilities for validating language prefixes
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | # LANGUAGE LambdaCase #
# LANGUAGE TemplateHaskell #
module Language.Haskell.Stylish.Config
( Extensions
, Config (..)
, ExitCodeBehavior (..)
, defaultConfigBytes
, configFilePath
, loadConfig
, parseConfig
) where
import Control.Applicative ((<|>))
import Control.Monad (forM, mzero)
import Data.Aeson (FromJSON (..))
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as A
import qualified Data.ByteString as B
import Data.ByteString.Lazy (fromStrict)
import Data.Char (toLower)
import qualified Data.FileEmbed as FileEmbed
import Data.List (intercalate,
nub)
import Data.Map (Map)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Text as T
import Data.YAML (prettyPosWithSource)
import Data.YAML.Aeson (decode1Strict)
import System.Directory
import System.FilePath ((</>))
import qualified System.IO as IO (Newline (..),
nativeNewline)
import Text.Read (readMaybe)
import qualified Language.Haskell.Stylish.Config.Cabal as Cabal
import Language.Haskell.Stylish.Config.Internal
import Language.Haskell.Stylish.Step
import qualified Language.Haskell.Stylish.Step.Data as Data
import qualified Language.Haskell.Stylish.Step.Imports as Imports
import qualified Language.Haskell.Stylish.Step.LanguagePragmas as LanguagePragmas
import qualified Language.Haskell.Stylish.Step.ModuleHeader as ModuleHeader
import qualified Language.Haskell.Stylish.Step.SimpleAlign as SimpleAlign
import qualified Language.Haskell.Stylish.Step.Squash as Squash
import qualified Language.Haskell.Stylish.Step.Tabs as Tabs
import qualified Language.Haskell.Stylish.Step.TrailingWhitespace as TrailingWhitespace
import qualified Language.Haskell.Stylish.Step.UnicodeSyntax as UnicodeSyntax
import Language.Haskell.Stylish.Verbose
type Extensions = [String]
data Config = Config
{ configSteps :: [Step]
, configColumns :: Maybe Int
, configLanguageExtensions :: [String]
, configNewline :: IO.Newline
, configCabal :: Bool
, configExitCode :: ExitCodeBehavior
}
data ExitCodeBehavior
= NormalExitBehavior
| ErrorOnFormatExitBehavior
deriving (Eq)
instance Show ExitCodeBehavior where
show NormalExitBehavior = "normal"
show ErrorOnFormatExitBehavior = "error_on_format"
instance FromJSON Config where
parseJSON = parseConfig
configFileName :: String
configFileName = ".stylish-haskell.yaml"
defaultConfigBytes :: B.ByteString
defaultConfigBytes = $(FileEmbed.embedFile "data/stylish-haskell.yaml")
configFilePath :: Verbose -> Maybe FilePath -> IO (Maybe FilePath)
configFilePath _ (Just userSpecified) = return (Just userSpecified)
configFilePath verbose Nothing = do
current <- getCurrentDirectory
configPath <- getXdgDirectory XdgConfig "stylish-haskell"
home <- getHomeDirectory
search verbose $
[d </> configFileName | d <- ancestors current] ++
[configPath </> "config.yaml", home </> configFileName]
search :: Verbose -> [FilePath] -> IO (Maybe FilePath)
search _ [] = return Nothing
search verbose (f : fs) = do
TODO Maybe catch an error here , might be unreadable
exists <- doesFileExist f
verbose $ f ++ if exists then " exists" else " does not exist"
if exists then return (Just f) else search verbose fs
loadConfig :: Verbose -> Maybe FilePath -> IO Config
loadConfig verbose userSpecified = do
mbFp <- configFilePath verbose userSpecified
verbose $ "Loading configuration at " ++ fromMaybe "<embedded>" mbFp
bytes <- maybe (return defaultConfigBytes) B.readFile mbFp
case decode1Strict bytes of
Left (pos, err) -> error $ prettyPosWithSource pos (fromStrict bytes) ("Language.Haskell.Stylish.Config.loadConfig: " ++ err)
Right config -> do
cabalLanguageExtensions <- if configCabal config
then map toStr <$> Cabal.findLanguageExtensions verbose
else pure []
return $ config
{ configLanguageExtensions = nub $
configLanguageExtensions config ++ cabalLanguageExtensions
}
where toStr (ext, True) = show ext
toStr (ext, False) = "No" ++ show ext
parseConfig :: A.Value -> A.Parser Config
parseConfig (A.Object o) = do
First load the config without the actual steps
config <- Config
<$> pure []
<*> (o A..:! "columns" A..!= Just 80)
<*> (o A..:? "language_extensions" A..!= [])
<*> (o A..:? "newline" >>= parseEnum newlines IO.nativeNewline)
<*> (o A..:? "cabal" A..!= True)
<*> (o A..:? "exit_code" >>= parseEnum exitCodes NormalExitBehavior)
stepValues <- o A..: "steps" :: A.Parser [A.Value]
steps <- mapM (parseSteps config) stepValues
return config {configSteps = concat steps}
where
newlines =
[ ("native", IO.nativeNewline)
, ("lf", IO.LF)
, ("crlf", IO.CRLF)
]
exitCodes =
[ ("normal", NormalExitBehavior)
, ("error_on_format", ErrorOnFormatExitBehavior)
]
parseConfig _ = mzero
catalog :: Map String (Config -> A.Object -> A.Parser Step)
catalog = M.fromList
[ ("imports", parseImports)
, ("module_header", parseModuleHeader)
, ("records", parseRecords)
, ("language_pragmas", parseLanguagePragmas)
, ("simple_align", parseSimpleAlign)
, ("squash", parseSquash)
, ("tabs", parseTabs)
, ("trailing_whitespace", parseTrailingWhitespace)
, ("unicode_syntax", parseUnicodeSyntax)
]
parseSteps :: Config -> A.Value -> A.Parser [Step]
parseSteps config val = do
map' <- parseJSON val :: A.Parser (Map String A.Value)
forM (M.toList map') $ \(k, v) -> case (M.lookup k catalog, v) of
(Just parser, A.Object o) -> parser config o
_ -> fail $ "Invalid declaration for " ++ k
parseEnum :: [(String, a)] -> a -> Maybe String -> A.Parser a
parseEnum _ def Nothing = return def
parseEnum strs _ (Just k) = case lookup k strs of
Just v -> return v
Nothing -> fail $ "Unknown option: " ++ k ++ ", should be one of: " ++
intercalate ", " (map fst strs)
parseModuleHeader :: Config -> A.Object -> A.Parser Step
parseModuleHeader config o = fmap (ModuleHeader.step columns) $ ModuleHeader.Config
<$> (o A..:? "indent" A..!= ModuleHeader.indent def)
<*> (o A..:? "sort" A..!= ModuleHeader.sort def)
<*> (o A..:? "separate_lists" A..!= ModuleHeader.separateLists def)
<*> (o A..:? "break_where" >>= parseEnum breakWhere (ModuleHeader.breakWhere def))
<*> (o A..:? "open_bracket" >>= parseEnum openBracket (ModuleHeader.openBracket def))
where
def = ModuleHeader.defaultConfig
columns = configColumns config
breakWhere =
[ ("exports", ModuleHeader.Exports)
, ("single", ModuleHeader.Single)
, ("inline", ModuleHeader.Inline)
, ("always", ModuleHeader.Always)
]
openBracket =
[ ("same_line", ModuleHeader.SameLine)
, ("next_line", ModuleHeader.NextLine)
]
parseSimpleAlign :: Config -> A.Object -> A.Parser Step
parseSimpleAlign c o = SimpleAlign.step
<$> pure (configColumns c)
<*> (SimpleAlign.Config
<$> parseAlign "cases" SimpleAlign.cCases
<*> parseAlign "top_level_patterns" SimpleAlign.cTopLevelPatterns
<*> parseAlign "records" SimpleAlign.cRecords
<*> parseAlign "multi_way_if" SimpleAlign.cMultiWayIf)
where
parseAlign key f =
(o A..:? key >>= parseEnum aligns (f SimpleAlign.defaultConfig)) <|>
(boolToAlign <$> o A..: key)
aligns =
[ ("always", SimpleAlign.Always)
, ("adjacent", SimpleAlign.Adjacent)
, ("never", SimpleAlign.Never)
]
boolToAlign True = SimpleAlign.Always
boolToAlign False = SimpleAlign.Never
parseRecords :: Config -> A.Object -> A.Parser Step
parseRecords c o = Data.step
<$> (Data.Config
<$> (o A..: "equals" >>= parseIndent)
<*> (o A..: "first_field" >>= parseIndent)
<*> (o A..: "field_comment")
<*> (o A..: "deriving")
<*> (o A..:? "break_enums" A..!= False)
<*> (o A..:? "break_single_constructors" A..!= True)
<*> (o A..: "via" >>= parseIndent)
<*> (o A..:? "curried_context" A..!= False)
<*> (o A..:? "sort_deriving" A..!= True)
<*> pure configMaxColumns)
where
configMaxColumns =
maybe Data.NoMaxColumns Data.MaxColumns (configColumns c)
parseIndent :: A.Value -> A.Parser Data.Indent
parseIndent = \case
A.String "same_line" -> return Data.SameLine
A.String t | "indent " `T.isPrefixOf` t ->
case readMaybe (T.unpack $ T.drop 7 t) of
Just n -> return $ Data.Indent n
Nothing -> fail $ "Indent: not a number" <> T.unpack (T.drop 7 t)
A.String t -> fail $ "can't parse indent setting: " <> T.unpack t
_ -> fail "Expected string for indent value"
parseSquash :: Config -> A.Object -> A.Parser Step
parseSquash _ _ = return Squash.step
parseImports :: Config -> A.Object -> A.Parser Step
parseImports config o = fmap (Imports.step columns) $ Imports.Options
<$> (o A..:? "align" >>= parseEnum aligns (def Imports.importAlign))
<*> (o A..:? "list_align" >>= parseEnum listAligns (def Imports.listAlign))
<*> (o A..:? "pad_module_names" A..!= def Imports.padModuleNames)
<*> (o A..:? "long_list_align" >>= parseEnum longListAligns (def Imports.longListAlign))
<*> (o A..:? "empty_list_align" >>= parseEnum emptyListAligns (def Imports.emptyListAlign))
Note that padding has to be at least 1 . Default is 4 .
<*> (o A..:? "list_padding" >>= maybe (pure $ def Imports.listPadding) parseListPadding)
<*> o A..:? "separate_lists" A..!= def Imports.separateLists
<*> o A..:? "space_surround" A..!= def Imports.spaceSurround
<*> o A..:? "post_qualify" A..!= def Imports.postQualified
<*> o A..:? "group_imports" A..!= def Imports.groupImports
<*> o A..:? "group_rules" A..!= def Imports.groupRules
where
def f = f Imports.defaultOptions
columns = configColumns config
aligns =
[ ("global", Imports.Global)
, ("file", Imports.File)
, ("group", Imports.Group)
, ("none", Imports.None)
]
listAligns =
[ ("new_line", Imports.NewLine)
, ("with_module_name", Imports.WithModuleName)
, ("with_alias", Imports.WithAlias)
, ("after_alias", Imports.AfterAlias)
, ("repeat", Imports.Repeat)
]
longListAligns =
[ ("inline", Imports.Inline)
, ("new_line", Imports.InlineWithBreak)
, ("new_line_multiline", Imports.InlineToMultiline)
, ("multiline", Imports.Multiline)
]
emptyListAligns =
[ ("inherit", Imports.Inherit)
, ("right_after", Imports.RightAfter)
]
parseListPadding = \case
A.String "module_name" -> pure Imports.LPModuleName
A.Number n | n >= 1 -> pure $ Imports.LPConstant (truncate n)
v -> A.typeMismatch "'module_name' or >=1 number" v
parseLanguagePragmas :: Config -> A.Object -> A.Parser Step
parseLanguagePragmas config o = LanguagePragmas.step
<$> pure (configColumns config)
<*> (o A..:? "style" >>= parseEnum styles LanguagePragmas.Vertical)
<*> o A..:? "align" A..!= True
<*> o A..:? "remove_redundant" A..!= True
<*> mkLanguage o
where
styles =
[ ("vertical", LanguagePragmas.Vertical)
, ("compact", LanguagePragmas.Compact)
, ("compact_line", LanguagePragmas.CompactLine)
, ("vertical_compact", LanguagePragmas.VerticalCompact)
]
mkLanguage :: A.Object -> A.Parser String
mkLanguage o = do
lang <- o A..:? "language_prefix"
maybe (pure "LANGUAGE") validate lang
where
validate :: String -> A.Parser String
validate s
| fmap toLower s == "language" = pure s
| otherwise = fail "please provide a valid language prefix"
parseTabs :: Config -> A.Object -> A.Parser Step
parseTabs _ o = Tabs.step
<$> o A..:? "spaces" A..!= 8
parseTrailingWhitespace :: Config -> A.Object -> A.Parser Step
parseTrailingWhitespace _ _ = return TrailingWhitespace.step
parseUnicodeSyntax :: Config -> A.Object -> A.Parser Step
parseUnicodeSyntax _ o = UnicodeSyntax.step
<$> o A..:? "add_language_pragma" A..!= True
<*> mkLanguage o
|
f1a8d2fb00f2074a6fd9cb0dd56b7510b9c16743d93bc16416ea0a2a414c874c | reflex-frp/reflex-todomvc | warp.hs | import qualified Reflex.TodoMVC
import Language.Javascript.JSaddle.Warp (run)
main :: IO ()
main = run 3702 Reflex.TodoMVC.main
| null | https://raw.githubusercontent.com/reflex-frp/reflex-todomvc/3facdd7fc1cc585611012c6fef1cafb77a2dfa7a/src-bin/warp.hs | haskell | import qualified Reflex.TodoMVC
import Language.Javascript.JSaddle.Warp (run)
main :: IO ()
main = run 3702 Reflex.TodoMVC.main
| |
505933a2a65f0175b451b2f1841ef970753f968ca9718ebbaed85435cd55cc9b | fukamachi/fukacl | list.lisp | (in-package :fukacl)
(defun flatten (x)
(labels ((rec (x acc)
(cond ((null x) acc)
((atom x) (cons x acc))
(t (rec
(car x)
(rec (cdr x) acc))))))
(rec x nil)))
(defun group (source n)
(if (not (listp source)) (error "group: not list"))
(if (zerop n) (error "zero length"))
(labels ((rec (source acc)
(let ((rest (nthcdr n source)))
(if (consp rest)
(rec rest (cons
(subseq source 0 n)
acc))
(nreverse
(cons source acc))))))
(if source (rec source nil) nil)))
(defun sort* (seq pred &key key)
(sort (copy-seq seq) pred :key key))
(defun mappend (f &rest lists)
(apply #'append (apply #'mapcar f lists)))
(defun n.. (start end)
(loop for i from start upto end collect i))
(defun 1.. (end)
(n.. 1 end))
(defun 0.. (end)
(n.. 0 end))
| null | https://raw.githubusercontent.com/fukamachi/fukacl/c633005755c4273dbee3aef54a8d86a8fbecc6f5/list.lisp | lisp | (in-package :fukacl)
(defun flatten (x)
(labels ((rec (x acc)
(cond ((null x) acc)
((atom x) (cons x acc))
(t (rec
(car x)
(rec (cdr x) acc))))))
(rec x nil)))
(defun group (source n)
(if (not (listp source)) (error "group: not list"))
(if (zerop n) (error "zero length"))
(labels ((rec (source acc)
(let ((rest (nthcdr n source)))
(if (consp rest)
(rec rest (cons
(subseq source 0 n)
acc))
(nreverse
(cons source acc))))))
(if source (rec source nil) nil)))
(defun sort* (seq pred &key key)
(sort (copy-seq seq) pred :key key))
(defun mappend (f &rest lists)
(apply #'append (apply #'mapcar f lists)))
(defun n.. (start end)
(loop for i from start upto end collect i))
(defun 1.. (end)
(n.. 1 end))
(defun 0.. (end)
(n.. 0 end))
| |
74012c0970e93a5886172772eb56496d124b09650bde33f6e1c3389bdbf666f2 | pedestal/samples | start.cljs | Copyright 2013 Relevance , Inc.
; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( )
; which can be found in the file epl-v10.html at the root of this distribution.
;
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
;
; You must not remove this notice, or any other, from this software.
(ns helloworld-app.start
(:require [io.pedestal.app.protocols :as p]
[io.pedestal.app :as app]
[io.pedestal.app.render.push :as push-render]
[io.pedestal.app.render :as render]
[io.pedestal.app.messages :as msg]
[domina :as dom]))
(defn inc-transform [old-value _]
((fnil inc 0) old-value))
(def count-app {:version 2
:transform [[:inc [:count] inc-transform]]})
(defn receive-input [input-queue]
(p/put-message input-queue {msg/topic [:count] msg/type :inc})
(.setTimeout js/window #(receive-input input-queue) 3000))
(defn create-app [render-config]
(let [app (app/build count-app)
render-fn (push-render/renderer "content" render-config render/log-fn)
app-model (render/consume-app-model app render-fn)]
(app/begin app)
(receive-input (:input app))
{:app app :app-model app-model}))
(defn render-value [r [_ _ old-value new-value] input-queue]
(dom/destroy-children! (dom/by-id "content"))
(dom/append! (dom/by-id "content")
(str "<h1>" new-value " Hello Worlds</h1>")))
(defn render-config []
[[:value [:**] render-value]])
(defn ^:export main []
(create-app (render-config)))
| null | https://raw.githubusercontent.com/pedestal/samples/caaf04afe255586f8f4e1235deeb0c1904179355/helloworld-app/app/src/helloworld_app/start.cljs | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright 2013 Relevance , Inc.
Eclipse Public License 1.0 ( )
(ns helloworld-app.start
(:require [io.pedestal.app.protocols :as p]
[io.pedestal.app :as app]
[io.pedestal.app.render.push :as push-render]
[io.pedestal.app.render :as render]
[io.pedestal.app.messages :as msg]
[domina :as dom]))
(defn inc-transform [old-value _]
((fnil inc 0) old-value))
(def count-app {:version 2
:transform [[:inc [:count] inc-transform]]})
(defn receive-input [input-queue]
(p/put-message input-queue {msg/topic [:count] msg/type :inc})
(.setTimeout js/window #(receive-input input-queue) 3000))
(defn create-app [render-config]
(let [app (app/build count-app)
render-fn (push-render/renderer "content" render-config render/log-fn)
app-model (render/consume-app-model app render-fn)]
(app/begin app)
(receive-input (:input app))
{:app app :app-model app-model}))
(defn render-value [r [_ _ old-value new-value] input-queue]
(dom/destroy-children! (dom/by-id "content"))
(dom/append! (dom/by-id "content")
(str "<h1>" new-value " Hello Worlds</h1>")))
(defn render-config []
[[:value [:**] render-value]])
(defn ^:export main []
(create-app (render-config)))
|
2e8499a11e0c5d461e2d005aa0e2a3c757582d04603f51791f5e23e2dd191840 | danielsz/certificaat | authorization.clj | (ns certificaat.acme4j.authorization
(:require [clojure.tools.logging :as log]
[certificaat.domain :refer [Certificaat]]
[certificaat.utils :refer [load-url]])
(:import [org.shredzone.acme4j Authorization]
[org.shredzone.acme4j.challenge Http01Challenge Dns01Challenge]
org.shredzone.acme4j.Status
org.shredzone.acme4j.exception.AcmeProtocolException))
(defn delete [auth]
(.deactivate auth))
(defn restore [login path]
(.bindAuthorization login (load-url path)))
(extend-type Authorization
Certificaat
(valid? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/VALID (.getStatus this)))
(invalid? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/INVALID (.getStatus this)))
(pending? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/PENDING (.getStatus this)))
(deactivated? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/DEACTIVATED (.getStatus this)))
(expired? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/EXPIRED (.getStatus this)))
(revoked? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/REVOKED (.getStatus this)))
(marshal [this path]
(spit path (.getLocation this))))
| null | https://raw.githubusercontent.com/danielsz/certificaat/955429f6303da9a687cf636d93437281f43ddb71/src/certificaat/acme4j/authorization.clj | clojure | (ns certificaat.acme4j.authorization
(:require [clojure.tools.logging :as log]
[certificaat.domain :refer [Certificaat]]
[certificaat.utils :refer [load-url]])
(:import [org.shredzone.acme4j Authorization]
[org.shredzone.acme4j.challenge Http01Challenge Dns01Challenge]
org.shredzone.acme4j.Status
org.shredzone.acme4j.exception.AcmeProtocolException))
(defn delete [auth]
(.deactivate auth))
(defn restore [login path]
(.bindAuthorization login (load-url path)))
(extend-type Authorization
Certificaat
(valid? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/VALID (.getStatus this)))
(invalid? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/INVALID (.getStatus this)))
(pending? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/PENDING (.getStatus this)))
(deactivated? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/DEACTIVATED (.getStatus this)))
(expired? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/EXPIRED (.getStatus this)))
(revoked? [this]
(log/debug "Authorization status:" (.getStatus this))
(= Status/REVOKED (.getStatus this)))
(marshal [this path]
(spit path (.getLocation this))))
| |
b188f5cadb084bd6c4d5b9ed20d79ecce902975a9cd3bf79eb833cd9052e3fc5 | expipiplus1/vulkan | VK_NVX_multiview_per_view_attributes.hs | {-# language CPP #-}
-- | = Name
--
-- VK_NVX_multiview_per_view_attributes - device extension
--
-- == VK_NVX_multiview_per_view_attributes
--
-- [__Name String__]
-- @VK_NVX_multiview_per_view_attributes@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
98
--
-- [__Revision__]
1
--
-- [__Extension and Version Dependencies__]
--
- Requires support for Vulkan 1.0
--
-- - Requires @VK_KHR_multiview@ to be enabled for any device-level
-- functionality
--
-- [__Contact__]
--
-
-- <-Docs/issues/new?body=[VK_NVX_multiview_per_view_attributes] @jeffbolznv%0A*Here describe the issue or question you have about the VK_NVX_multiview_per_view_attributes extension* >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
2017 - 01 - 13
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <-Registry/blob/master/extensions/NV/SPV_NVX_multiview_per_view_attributes.html SPV_NVX_multiview_per_view_attributes>
--
-- - This extension provides API support for
-- < GL_NVX_multiview_per_view_attributes>
--
-- - This extension interacts with @VK_NV_viewport_array2@.
--
-- [__Contributors__]
--
- , NVIDIA
--
- , NVIDIA
--
-- == Description
--
-- This extension adds a new way to write shaders to be used with multiview
-- subpasses, where the attributes for all views are written out by a
-- single invocation of the
-- <-extensions/html/vkspec.html#pipelines-graphics-subsets-pre-rasterization pre-rasterization shader stages>.
Related SPIR - V and GLSL extensions
@SPV_NVX_multiview_per_view_attributes@ and
-- @GL_NVX_multiview_per_view_attributes@ introduce per-view position and
-- viewport mask attributes arrays, and this extension defines how those
per - view attribute arrays are interpreted by Vulkan . Pipelines using
-- per-view attributes /may/ only execute the
-- <-extensions/html/vkspec.html#pipelines-graphics-subsets-pre-rasterization pre-rasterization shader stages>
-- once for all views rather than once per-view, which reduces redundant
-- shading work.
--
A subpass creation flag controls whether the subpass uses this
extension . A subpass /must/ either exclusively use this extension or not
-- use it at all.
--
Some Vulkan implementations only support the position attribute varying
between views in the X component . A subpass can declare via a second
creation flag whether all pipelines compiled for this subpass will obey
-- this restriction.
--
Shaders that use the new per - view outputs ( e.g. )
/must/ also write the non - per - view output ( @gl_Position@ ) , and the
-- values written /must/ be such that
-- @gl_Position = gl_PositionPerViewNV[gl_ViewIndex]@ for all views in the
subpass . Implementations are free to either use the per - view outputs or
-- the non-per-view outputs, whichever would be more efficient.
--
-- If @VK_NV_viewport_array2@ is not also supported and enabled, the
-- per-view viewport mask /must/ not be used.
--
-- == New Structures
--
-- - Extending
' Vulkan . ' :
--
-- - 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX'
--
-- == New Enum Constants
--
-- - 'NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME'
--
-- - 'NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION'
--
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
--
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX '
--
-- - Extending
' Vulkan . Core10.Enums . SubpassDescriptionFlagBits . SubpassDescriptionFlagBits ' :
--
- ' Vulkan . Core10.Enums . SubpassDescriptionFlagBits . SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX '
--
- ' Vulkan . Core10.Enums . SubpassDescriptionFlagBits . SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX '
--
-- == New Built-In Variables
--
-- - <-extensions/html/vkspec.html#interfaces-builtin-variables-positionperview PositionPerViewNV>
--
-- - <-extensions/html/vkspec.html#interfaces-builtin-variables-viewportmaskperview ViewportMaskPerViewNV>
--
-- == New SPIR-V Capabilities
--
-- - <-extensions/html/vkspec.html#spirvenv-capabilities-table-PerViewAttributesNV PerViewAttributesNV>
--
-- == Examples
--
> # version 450 core
-- >
-- > #extension GL_KHX_multiview : enable
> # extension GL_NVX_multiview_per_view_attributes : enable
-- >
-- > layout(location = 0) in vec4 position;
> layout(set = 0 , binding = 0 ) uniform Block { mat4 mvpPerView[2 ] ; } buf ;
-- >
-- > void main()
-- > {
> // Output both per - view positions and gl_Position as a function
-- > // of gl_ViewIndex
-- > gl_PositionPerViewNV[0] = buf.mvpPerView[0] * position;
-- > gl_PositionPerViewNV[1] = buf.mvpPerView[1] * position;
-- > gl_Position = buf.mvpPerView[gl_ViewIndex] * position;
-- > }
--
-- == Version History
--
- Revision 1 , 2017 - 01 - 13 ( )
--
-- - Internal revisions
--
-- == See Also
--
-- 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX'
--
-- == Document Notes
--
-- For more information, see the
-- <-extensions/html/vkspec.html#VK_NVX_multiview_per_view_attributes Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_NVX_multiview_per_view_attributes ( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(..)
, NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION
, pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION
, NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME
, pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX))
-- | VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX - Structure
-- describing multiview limits that can be supported by an implementation
--
-- = Description
--
-- If the 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX' structure
-- is included in the @pNext@ chain of the
' Vulkan . '
-- structure passed to
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2 ' ,
-- it is filled in with each corresponding implementation-dependent
-- property.
--
-- == Valid Usage (Implicit)
--
-- = See Also
--
< -extensions/html/vkspec.html#VK_NVX_multiview_per_view_attributes VK_NVX_multiview_per_view_attributes > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
| # limits - perViewPositionAllComponents # @perViewPositionAllComponents@ is
' Vulkan . Core10.FundamentalTypes . TRUE ' if the implementation supports
-- per-view position values that differ in components other than the X
-- component.
perViewPositionAllComponents :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX)
#endif
deriving instance Show PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
instance ToCStruct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (perViewPositionAllComponents))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
peekCStruct p = do
perViewPositionAllComponents <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
(bool32ToBool perViewPositionAllComponents)
instance Storable PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
zero = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
zero
type NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION = 1
No documentation found for TopLevel " VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION "
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION :: forall a . Integral a => a
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION = 1
type NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME = "VK_NVX_multiview_per_view_attributes"
No documentation found for TopLevel " VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME = "VK_NVX_multiview_per_view_attributes"
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/ebc0dde0bcd9cf251f18538de6524eb4f2ab3e9d/src/Vulkan/Extensions/VK_NVX_multiview_per_view_attributes.hs | haskell | # language CPP #
| = Name
VK_NVX_multiview_per_view_attributes - device extension
== VK_NVX_multiview_per_view_attributes
[__Name String__]
@VK_NVX_multiview_per_view_attributes@
[__Extension Type__]
Device extension
[__Registered Extension Number__]
[__Revision__]
[__Extension and Version Dependencies__]
- Requires @VK_KHR_multiview@ to be enabled for any device-level
functionality
[__Contact__]
<-Docs/issues/new?body=[VK_NVX_multiview_per_view_attributes] @jeffbolznv%0A*Here describe the issue or question you have about the VK_NVX_multiview_per_view_attributes extension* >
== Other Extension Metadata
[__Last Modified Date__]
[__IP Status__]
No known IP claims.
[__Interactions and External Dependencies__]
- This extension requires
<-Registry/blob/master/extensions/NV/SPV_NVX_multiview_per_view_attributes.html SPV_NVX_multiview_per_view_attributes>
- This extension provides API support for
< GL_NVX_multiview_per_view_attributes>
- This extension interacts with @VK_NV_viewport_array2@.
[__Contributors__]
== Description
This extension adds a new way to write shaders to be used with multiview
subpasses, where the attributes for all views are written out by a
single invocation of the
<-extensions/html/vkspec.html#pipelines-graphics-subsets-pre-rasterization pre-rasterization shader stages>.
@GL_NVX_multiview_per_view_attributes@ introduce per-view position and
viewport mask attributes arrays, and this extension defines how those
per-view attributes /may/ only execute the
<-extensions/html/vkspec.html#pipelines-graphics-subsets-pre-rasterization pre-rasterization shader stages>
once for all views rather than once per-view, which reduces redundant
shading work.
use it at all.
this restriction.
values written /must/ be such that
@gl_Position = gl_PositionPerViewNV[gl_ViewIndex]@ for all views in the
the non-per-view outputs, whichever would be more efficient.
If @VK_NV_viewport_array2@ is not also supported and enabled, the
per-view viewport mask /must/ not be used.
== New Structures
- Extending
- 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX'
== New Enum Constants
- 'NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME'
- 'NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION'
- Extending
== New Built-In Variables
- <-extensions/html/vkspec.html#interfaces-builtin-variables-positionperview PositionPerViewNV>
- <-extensions/html/vkspec.html#interfaces-builtin-variables-viewportmaskperview ViewportMaskPerViewNV>
== New SPIR-V Capabilities
- <-extensions/html/vkspec.html#spirvenv-capabilities-table-PerViewAttributesNV PerViewAttributesNV>
== Examples
>
> #extension GL_KHX_multiview : enable
>
> layout(location = 0) in vec4 position;
>
> void main()
> {
> // of gl_ViewIndex
> gl_PositionPerViewNV[0] = buf.mvpPerView[0] * position;
> gl_PositionPerViewNV[1] = buf.mvpPerView[1] * position;
> gl_Position = buf.mvpPerView[gl_ViewIndex] * position;
> }
== Version History
- Internal revisions
== See Also
'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX'
== Document Notes
For more information, see the
<-extensions/html/vkspec.html#VK_NVX_multiview_per_view_attributes Vulkan Specification>
This page is a generated document. Fixes and changes should be made to
the generator scripts, not directly.
| VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX - Structure
describing multiview limits that can be supported by an implementation
= Description
If the 'PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX' structure
is included in the @pNext@ chain of the
structure passed to
it is filled in with each corresponding implementation-dependent
property.
== Valid Usage (Implicit)
= See Also
per-view position values that differ in components other than the X
component. | 98
1
- Requires support for Vulkan 1.0
-
2017 - 01 - 13
- , NVIDIA
- , NVIDIA
Related SPIR - V and GLSL extensions
@SPV_NVX_multiview_per_view_attributes@ and
per - view attribute arrays are interpreted by Vulkan . Pipelines using
A subpass creation flag controls whether the subpass uses this
extension . A subpass /must/ either exclusively use this extension or not
Some Vulkan implementations only support the position attribute varying
between views in the X component . A subpass can declare via a second
creation flag whether all pipelines compiled for this subpass will obey
Shaders that use the new per - view outputs ( e.g. )
/must/ also write the non - per - view output ( @gl_Position@ ) , and the
subpass . Implementations are free to either use the per - view outputs or
' Vulkan . ' :
- Extending ' Vulkan . Core10.Enums . StructureType . StructureType ' :
- ' Vulkan . Core10.Enums . StructureType . STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX '
' Vulkan . Core10.Enums . SubpassDescriptionFlagBits . SubpassDescriptionFlagBits ' :
- ' Vulkan . Core10.Enums . SubpassDescriptionFlagBits . SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX '
- ' Vulkan . Core10.Enums . SubpassDescriptionFlagBits . SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX '
> # version 450 core
> # extension GL_NVX_multiview_per_view_attributes : enable
> layout(set = 0 , binding = 0 ) uniform Block { mat4 mvpPerView[2 ] ; } buf ;
> // Output both per - view positions and gl_Position as a function
- Revision 1 , 2017 - 01 - 13 ( )
module Vulkan.Extensions.VK_NVX_multiview_per_view_attributes ( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(..)
, NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION
, pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION
, NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME
, pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME
) where
import Foreign.Marshal.Alloc (allocaBytes)
import Foreign.Ptr (nullPtr)
import Foreign.Ptr (plusPtr)
import Vulkan.CStruct (FromCStruct)
import Vulkan.CStruct (FromCStruct(..))
import Vulkan.CStruct (ToCStruct)
import Vulkan.CStruct (ToCStruct(..))
import Vulkan.Zero (Zero(..))
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import Foreign.Storable (Storable(peek))
import Foreign.Storable (Storable(poke))
import qualified Foreign.Storable (Storable(..))
import GHC.Generics (Generic)
import Foreign.Ptr (Ptr)
import Data.Kind (Type)
import Vulkan.Core10.FundamentalTypes (bool32ToBool)
import Vulkan.Core10.FundamentalTypes (boolToBool32)
import Vulkan.Core10.FundamentalTypes (Bool32)
import Vulkan.Core10.Enums.StructureType (StructureType)
import Vulkan.Core10.Enums.StructureType (StructureType(STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX))
' Vulkan . '
' Vulkan . Core11.Promoted_From_VK_KHR_get_physical_device_properties2.getPhysicalDeviceProperties2 ' ,
< -extensions/html/vkspec.html#VK_NVX_multiview_per_view_attributes VK_NVX_multiview_per_view_attributes > ,
' Vulkan . Core10.FundamentalTypes . Bool32 ' ,
' Vulkan . Core10.Enums . StructureType . StructureType '
data PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
| # limits - perViewPositionAllComponents # @perViewPositionAllComponents@ is
' Vulkan . Core10.FundamentalTypes . TRUE ' if the implementation supports
perViewPositionAllComponents :: Bool }
deriving (Typeable, Eq)
#if defined(GENERIC_INSTANCES)
deriving instance Generic (PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX)
#endif
deriving instance Show PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
instance ToCStruct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
withCStruct x f = allocaBytes 24 $ \p -> pokeCStruct p x (f p)
pokeCStruct p PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX{..} f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (perViewPositionAllComponents))
f
cStructSize = 24
cStructAlignment = 8
pokeZeroCStruct p f = do
poke ((p `plusPtr` 0 :: Ptr StructureType)) (STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX)
poke ((p `plusPtr` 8 :: Ptr (Ptr ()))) (nullPtr)
poke ((p `plusPtr` 16 :: Ptr Bool32)) (boolToBool32 (zero))
f
instance FromCStruct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
peekCStruct p = do
perViewPositionAllComponents <- peek @Bool32 ((p `plusPtr` 16 :: Ptr Bool32))
pure $ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
(bool32ToBool perViewPositionAllComponents)
instance Storable PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
sizeOf ~_ = 24
alignment ~_ = 8
peek = peekCStruct
poke ptr poked = pokeCStruct ptr poked (pure ())
instance Zero PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX where
zero = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
zero
type NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION = 1
No documentation found for TopLevel " VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION "
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION :: forall a . Integral a => a
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION = 1
type NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME = "VK_NVX_multiview_per_view_attributes"
No documentation found for TopLevel " VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME = "VK_NVX_multiview_per_view_attributes"
|
55c285d2fed324df831ff03c5afb1b7fe7bc3c145abf978369d56cfa69d2d4f5 | Eventuria/demonstration-gsd | Event.hs | # LANGUAGE InstanceSigs , TypeApplications #
module Eventuria.GSD.Write.Model.Events.PropertyTesting.Event where
import Test.QuickCheck
import Test.QuickCheck.Instances.Text ()
import Test.QuickCheck.Instances.Time ()
import Test.QuickCheck.Instances.UUID ()
import Generic.Random
import Eventuria.GSD.Write.Model.Events.Event
instance Arbitrary GsdEvent where
arbitrary :: Gen GsdEvent
arbitrary = genericArbitraryU
| null | https://raw.githubusercontent.com/Eventuria/demonstration-gsd/5c7692b310086bc172d3fd4e1eaf09ae51ea468f/test/Eventuria/GSD/Write/Model/Events/PropertyTesting/Event.hs | haskell | # LANGUAGE InstanceSigs , TypeApplications #
module Eventuria.GSD.Write.Model.Events.PropertyTesting.Event where
import Test.QuickCheck
import Test.QuickCheck.Instances.Text ()
import Test.QuickCheck.Instances.Time ()
import Test.QuickCheck.Instances.UUID ()
import Generic.Random
import Eventuria.GSD.Write.Model.Events.Event
instance Arbitrary GsdEvent where
arbitrary :: Gen GsdEvent
arbitrary = genericArbitraryU
| |
2fb9b218fb3173576d664a5ccd4ac0effa8c2418c0882d9138503d4017bdd0e4 | cloudant/fabric | fabric_doc_attachments.erl | Copyright 2010 Cloudant
%
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(fabric_doc_attachments).
-include("fabric.hrl").
-include_lib("couch/include/couch_db.hrl").
%% couch api calls
-export([receiver/2]).
receiver(_Req, undefined) ->
<<"">>;
receiver(_Req, {unknown_transfer_encoding, Unknown}) ->
exit({unknown_transfer_encoding, Unknown});
receiver(Req, chunked) ->
MiddleMan = spawn(fun() -> middleman(Req, chunked) end),
fun(4096, ChunkFun, ok) ->
write_chunks(MiddleMan, ChunkFun)
end;
receiver(_Req, 0) ->
<<"">>;
receiver(Req, Length) when is_integer(Length) ->
maybe_send_continue(Req),
Middleman = spawn(fun() -> middleman(Req, Length) end),
fun() ->
Middleman ! {self(), gimme_data},
Timeout = fabric_util:attachments_timeout(),
receive
{Middleman, Data} ->
rexi:reply(attachment_chunk_received),
iolist_to_binary(Data)
after Timeout ->
exit(timeout)
end
end;
receiver(_Req, Length) ->
exit({length_not_integer, Length}).
%%
%% internal
%%
maybe_send_continue(#httpd{mochi_req = MochiReq} = Req) ->
case couch_httpd:header_value(Req, "expect") of
undefined ->
ok;
Expect ->
case string:to_lower(Expect) of
"100-continue" ->
MochiReq:start_raw_response({100, gb_trees:empty()});
_ ->
ok
end
end.
write_chunks(MiddleMan, ChunkFun) ->
MiddleMan ! {self(), gimme_data},
Timeout = fabric_util:attachments_timeout(),
receive
{MiddleMan, ChunkRecordList} ->
rexi:reply(attachment_chunk_received),
case flush_chunks(ChunkRecordList, ChunkFun) of
continue -> write_chunks(MiddleMan, ChunkFun);
done -> ok
end
after Timeout ->
exit(timeout)
end.
flush_chunks([], _ChunkFun) ->
continue;
flush_chunks([{0, _}], _ChunkFun) ->
done;
flush_chunks([Chunk | Rest], ChunkFun) ->
ChunkFun(Chunk, ok),
flush_chunks(Rest, ChunkFun).
receive_unchunked_attachment(_Req, 0) ->
ok;
receive_unchunked_attachment(Req, Length) ->
receive {MiddleMan, go} ->
Data = couch_httpd:recv(Req, 0),
MiddleMan ! {self(), Data}
end,
receive_unchunked_attachment(Req, Length - size(Data)).
middleman(Req, chunked) ->
% spawn a process to actually receive the uploaded data
RcvFun = fun(ChunkRecord, ok) ->
receive {From, go} -> From ! {self(), ChunkRecord} end, ok
end,
Receiver = spawn(fun() -> couch_httpd:recv_chunked(Req,4096,RcvFun,ok) end),
% take requests from the DB writers and get data from the receiver
N = erlang:list_to_integer(config:get("cluster","n")),
Timeout = fabric_util:request_timeout(),
middleman_loop(Receiver, N, [], [], Timeout);
middleman(Req, Length) ->
Receiver = spawn(fun() -> receive_unchunked_attachment(Req, Length) end),
N = erlang:list_to_integer(config:get("cluster","n")),
Timeout = fabric_util:request_timeout(),
middleman_loop(Receiver, N, [], [], Timeout).
middleman_loop(Receiver, N, Counters0, ChunkList0, Timeout) ->
receive {From, gimme_data} ->
% Figure out how far along this writer (From) is in the list
ListIndex = case fabric_dict:lookup_element(From, Counters0) of
undefined -> 0;
I -> I
end,
% Talk to the receiver to get another chunk if necessary
ChunkList1 = if ListIndex == length(ChunkList0) ->
Receiver ! {self(), go},
receive
{Receiver, ChunkRecord} ->
ChunkList0 ++ [ChunkRecord]
end;
true -> ChunkList0 end,
% reply to the writer
Reply = lists:nthtail(ListIndex, ChunkList1),
From ! {self(), Reply},
% Update the counter for this writer
Counters1 = fabric_dict:update_counter(From, length(Reply), Counters0),
% Drop any chunks that have been sent to all writers
Size = fabric_dict:size(Counters1),
NumToDrop = lists:min([I || {_, I} <- Counters1]),
{ChunkList3, Counters3} =
if Size == N andalso NumToDrop > 0 ->
ChunkList2 = lists:nthtail(NumToDrop, ChunkList1),
Counters2 = [{F, I-NumToDrop} || {F, I} <- Counters1],
{ChunkList2, Counters2};
true ->
{ChunkList1, Counters1}
end,
middleman_loop(Receiver, N, Counters3, ChunkList3, Timeout)
after Timeout ->
ok
end.
| null | https://raw.githubusercontent.com/cloudant/fabric/217d0b00bc4301bd834e76d902edf9c052e18b87/src/fabric_doc_attachments.erl | erlang |
use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
couch api calls
internal
spawn a process to actually receive the uploaded data
take requests from the DB writers and get data from the receiver
Figure out how far along this writer (From) is in the list
Talk to the receiver to get another chunk if necessary
reply to the writer
Update the counter for this writer
Drop any chunks that have been sent to all writers | Copyright 2010 Cloudant
Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(fabric_doc_attachments).
-include("fabric.hrl").
-include_lib("couch/include/couch_db.hrl").
-export([receiver/2]).
receiver(_Req, undefined) ->
<<"">>;
receiver(_Req, {unknown_transfer_encoding, Unknown}) ->
exit({unknown_transfer_encoding, Unknown});
receiver(Req, chunked) ->
MiddleMan = spawn(fun() -> middleman(Req, chunked) end),
fun(4096, ChunkFun, ok) ->
write_chunks(MiddleMan, ChunkFun)
end;
receiver(_Req, 0) ->
<<"">>;
receiver(Req, Length) when is_integer(Length) ->
maybe_send_continue(Req),
Middleman = spawn(fun() -> middleman(Req, Length) end),
fun() ->
Middleman ! {self(), gimme_data},
Timeout = fabric_util:attachments_timeout(),
receive
{Middleman, Data} ->
rexi:reply(attachment_chunk_received),
iolist_to_binary(Data)
after Timeout ->
exit(timeout)
end
end;
receiver(_Req, Length) ->
exit({length_not_integer, Length}).
maybe_send_continue(#httpd{mochi_req = MochiReq} = Req) ->
case couch_httpd:header_value(Req, "expect") of
undefined ->
ok;
Expect ->
case string:to_lower(Expect) of
"100-continue" ->
MochiReq:start_raw_response({100, gb_trees:empty()});
_ ->
ok
end
end.
write_chunks(MiddleMan, ChunkFun) ->
MiddleMan ! {self(), gimme_data},
Timeout = fabric_util:attachments_timeout(),
receive
{MiddleMan, ChunkRecordList} ->
rexi:reply(attachment_chunk_received),
case flush_chunks(ChunkRecordList, ChunkFun) of
continue -> write_chunks(MiddleMan, ChunkFun);
done -> ok
end
after Timeout ->
exit(timeout)
end.
flush_chunks([], _ChunkFun) ->
continue;
flush_chunks([{0, _}], _ChunkFun) ->
done;
flush_chunks([Chunk | Rest], ChunkFun) ->
ChunkFun(Chunk, ok),
flush_chunks(Rest, ChunkFun).
receive_unchunked_attachment(_Req, 0) ->
ok;
receive_unchunked_attachment(Req, Length) ->
receive {MiddleMan, go} ->
Data = couch_httpd:recv(Req, 0),
MiddleMan ! {self(), Data}
end,
receive_unchunked_attachment(Req, Length - size(Data)).
middleman(Req, chunked) ->
RcvFun = fun(ChunkRecord, ok) ->
receive {From, go} -> From ! {self(), ChunkRecord} end, ok
end,
Receiver = spawn(fun() -> couch_httpd:recv_chunked(Req,4096,RcvFun,ok) end),
N = erlang:list_to_integer(config:get("cluster","n")),
Timeout = fabric_util:request_timeout(),
middleman_loop(Receiver, N, [], [], Timeout);
middleman(Req, Length) ->
Receiver = spawn(fun() -> receive_unchunked_attachment(Req, Length) end),
N = erlang:list_to_integer(config:get("cluster","n")),
Timeout = fabric_util:request_timeout(),
middleman_loop(Receiver, N, [], [], Timeout).
middleman_loop(Receiver, N, Counters0, ChunkList0, Timeout) ->
receive {From, gimme_data} ->
ListIndex = case fabric_dict:lookup_element(From, Counters0) of
undefined -> 0;
I -> I
end,
ChunkList1 = if ListIndex == length(ChunkList0) ->
Receiver ! {self(), go},
receive
{Receiver, ChunkRecord} ->
ChunkList0 ++ [ChunkRecord]
end;
true -> ChunkList0 end,
Reply = lists:nthtail(ListIndex, ChunkList1),
From ! {self(), Reply},
Counters1 = fabric_dict:update_counter(From, length(Reply), Counters0),
Size = fabric_dict:size(Counters1),
NumToDrop = lists:min([I || {_, I} <- Counters1]),
{ChunkList3, Counters3} =
if Size == N andalso NumToDrop > 0 ->
ChunkList2 = lists:nthtail(NumToDrop, ChunkList1),
Counters2 = [{F, I-NumToDrop} || {F, I} <- Counters1],
{ChunkList2, Counters2};
true ->
{ChunkList1, Counters1}
end,
middleman_loop(Receiver, N, Counters3, ChunkList3, Timeout)
after Timeout ->
ok
end.
|
c188cdb87900c39c0d747075f1db68b18dce2ec2a24208a4ec3de596222643d7 | replikativ/chat42app | main.cljs | (ns env.ios.main
(:require [chat42app.ios.core :as core]))
(core/init)
| null | https://raw.githubusercontent.com/replikativ/chat42app/5d2caedf6509043823b946c44ceda51b078324b3/env/prod/env/ios/main.cljs | clojure | (ns env.ios.main
(:require [chat42app.ios.core :as core]))
(core/init)
| |
56f405b519fffb8f25782b676cebcc7d518a2ed36cad35c71a6b2d6e3ea0f380 | stamourv/optimization-coach | typed-racket.rkt | #lang racket/base
;; Typed Racket-specific optimization analysis.
(require racket/match
"structs.rkt" "causality-merging.rkt" "profiling.rkt")
(provide report-typed-racket)
(define (report-typed-racket TR-log profile hot-functions)
(log->report
(causality-merging
(prune-cold-TR-failures TR-log profile hot-functions))))
Returns a report - entry or # f , which means prune .
(define (log-entry->report-entry l)
(match l
[(log-entry kind msg stx located-stx (? number? pos))
(define start (sub1 pos))
(define end (+ start (syntax-span stx)))
(if (opt-log-entry? l)
(success-report-entry kind msg located-stx 'typed-racket start end)
(near-miss-report-entry kind msg located-stx 'typed-racket start end
(missed-opt-log-entry-badness l)
(missed-opt-log-entry-irritants l)))]
[_ #f])) ; no source location, ignore
;; converts log-entry structs to report-entry structs for further
;; processing
(define (log->report log)
(filter values (map log-entry->report-entry log)))
;;--------------------------------------------------------------------
(define (prune-cold-TR-failures TR-log profile hot-functions)
(define total-time (and profile (profile-total-time profile)))
;; #f if no profiling info is available for this function
(define (pos->node pos)
(and profile
pos
(for/first ([p (in-list (profile-nodes profile))]
#:when (let* ([from (node-pos p)]
[span (node-span p)])
(and from span
(<= from pos (+ from span)))))
p)))
(if (not profile)
TR-log ; keep everything if we don't have profile info
(for/list ([l (in-list TR-log)]
#:when (or (opt-log-entry? l) ; don't prune successes
;; in hot function?
(memq (pos->node (log-entry-pos l)) hot-functions)))
(define profile-entry (memq (pos->node (log-entry-pos l)) hot-functions))
(define badness-multiplier
(if profile-entry
(/ (node-self (car profile-entry)) total-time)
1))
(match l
[(missed-opt-log-entry kind msg stx located-stx pos
irritants merged-irritants badness)
(missed-opt-log-entry kind msg stx located-stx pos
irritants merged-irritants
uses ceiling to never go down to 0
;; both badness and badness-multiplier are non-0
(ceiling (* badness badness-multiplier)))]
[_ l])))) ; keep as is
| null | https://raw.githubusercontent.com/stamourv/optimization-coach/dbd6cf06613bf285b4540301ea86dd87239eab7d/optimization-coach/typed-racket.rkt | racket | Typed Racket-specific optimization analysis.
no source location, ignore
converts log-entry structs to report-entry structs for further
processing
--------------------------------------------------------------------
#f if no profiling info is available for this function
keep everything if we don't have profile info
don't prune successes
in hot function?
both badness and badness-multiplier are non-0
keep as is | #lang racket/base
(require racket/match
"structs.rkt" "causality-merging.rkt" "profiling.rkt")
(provide report-typed-racket)
(define (report-typed-racket TR-log profile hot-functions)
(log->report
(causality-merging
(prune-cold-TR-failures TR-log profile hot-functions))))
Returns a report - entry or # f , which means prune .
(define (log-entry->report-entry l)
(match l
[(log-entry kind msg stx located-stx (? number? pos))
(define start (sub1 pos))
(define end (+ start (syntax-span stx)))
(if (opt-log-entry? l)
(success-report-entry kind msg located-stx 'typed-racket start end)
(near-miss-report-entry kind msg located-stx 'typed-racket start end
(missed-opt-log-entry-badness l)
(missed-opt-log-entry-irritants l)))]
(define (log->report log)
(filter values (map log-entry->report-entry log)))
(define (prune-cold-TR-failures TR-log profile hot-functions)
(define total-time (and profile (profile-total-time profile)))
(define (pos->node pos)
(and profile
pos
(for/first ([p (in-list (profile-nodes profile))]
#:when (let* ([from (node-pos p)]
[span (node-span p)])
(and from span
(<= from pos (+ from span)))))
p)))
(if (not profile)
(for/list ([l (in-list TR-log)]
(memq (pos->node (log-entry-pos l)) hot-functions)))
(define profile-entry (memq (pos->node (log-entry-pos l)) hot-functions))
(define badness-multiplier
(if profile-entry
(/ (node-self (car profile-entry)) total-time)
1))
(match l
[(missed-opt-log-entry kind msg stx located-stx pos
irritants merged-irritants badness)
(missed-opt-log-entry kind msg stx located-stx pos
irritants merged-irritants
uses ceiling to never go down to 0
(ceiling (* badness badness-multiplier)))]
|
2780260d80282c18de2996ddfbfc86364e983868df3aa537760b7f19ff48dde2 | hdbc/hdbc-odbc | Utils.hs | -*- mode : ; -*-
-}
module Database.HDBC.ODBC.Utils where
import Foreign.Ptr
import Control.Exception
import Foreign.Marshal.Array
withAnyArr0 :: (a -> IO (Ptr b)) -- ^ Function that transforms input data into pointer
-> (Ptr b -> IO ()) -- ^ Function that frees generated data
-> [a] -- ^ List of input data
-> (Ptr (Ptr b) -> IO c) -- ^ Action to run with the C array
-> IO c -- ^ Return value
withAnyArr0 input2ptract freeact inp action =
bracket (mapM input2ptract inp)
(\clist -> mapM_ freeact clist)
(\clist -> withArray0 nullPtr clist action)
| null | https://raw.githubusercontent.com/hdbc/hdbc-odbc/06833d77799f16634d2038bcdc308c35d4752cdd/Database/HDBC/ODBC/Utils.hs | haskell | ^ Function that transforms input data into pointer
^ Function that frees generated data
^ List of input data
^ Action to run with the C array
^ Return value | -*- mode : ; -*-
-}
module Database.HDBC.ODBC.Utils where
import Foreign.Ptr
import Control.Exception
import Foreign.Marshal.Array
withAnyArr0 input2ptract freeact inp action =
bracket (mapM input2ptract inp)
(\clist -> mapM_ freeact clist)
(\clist -> withArray0 nullPtr clist action)
|
ceb751c12a810c484c7a68f318360365843434a39ead6fcdb8863adfd6e467a5 | facebook/flow | exports.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type export =
TODO : DefaultType , e.g. ` export default class { } `
| Default (** e.g. `export default function() {}` *)
| Named of string (** `export const foo: string = "foo"` *)
| NamedType of string (** `export type T = string` *)
| Module of string * export list (** `declare module "foo" { ... exports ... }` *)
[@@deriving show { with_path = false }]
type t = export list [@@deriving show { with_path = false }]
module Export_sig = struct
type 'loc t = {
module_kind: 'loc Type_sig_pack.module_kind option;
module_refs: string Type_sig_collections.Module_refs.t;
local_defs: 'loc Type_sig_pack.packed_def Type_sig_collections.Local_defs.t;
remote_refs: 'loc Type_sig_pack.remote_ref Type_sig_collections.Remote_refs.t;
pattern_defs: 'loc Type_sig_pack.packed Type_sig_collections.Pattern_defs.t;
patterns: 'loc Type_sig_pack.pattern Type_sig_collections.Patterns.t;
}
[@@warning "-69"]
let of_module
{
Packed_type_sig.Module.module_kind;
module_refs;
local_defs;
remote_refs;
pattern_defs;
patterns;
} =
{ module_kind = Some module_kind; module_refs; local_defs; remote_refs; pattern_defs; patterns }
let of_builtins ~module_refs ~local_defs ~remote_refs ~pattern_defs ~patterns =
{ module_kind = None; module_refs; local_defs; remote_refs; pattern_defs; patterns }
let of_builtin_module ~module_refs ~local_defs ~remote_refs ~module_kind ~pattern_defs ~patterns =
{ module_kind = Some module_kind; module_refs; local_defs; remote_refs; pattern_defs; patterns }
end
let local_def_of_index type_sig index =
Type_sig_collections.Local_defs.get type_sig.Export_sig.local_defs index
let pattern_of_index type_sig index =
Type_sig_collections.Patterns.get type_sig.Export_sig.patterns index
let pattern_def_of_index type_sig index =
Type_sig_collections.Pattern_defs.get type_sig.Export_sig.pattern_defs index
module Eval = struct
open Type_sig
open Type_sig_pack
type 'loc evaled =
| Annot of 'loc packed_annot
| Value of 'loc packed_value
| ClassDecl
| EnumDecl
| Nothing
let seen_ref seen = function
| LocalRef { index; _ } ->
let dupe = Type_sig_collections.Local_defs.IndexSet.mem index seen in
let seen = Type_sig_collections.Local_defs.IndexSet.add index seen in
(dupe, seen)
| _ -> (false, seen)
(** Looks up an object field by name. Returns [None] if the name doesn't
exist or isn't a field. *)
let field_of_obj_props name props =
let open Base.Option.Let_syntax in
match%bind SMap.find_opt name props with
| ObjValueField (_, field, _) -> Some field
| ObjValueAccess _
| ObjValueMethod _ ->
(* Accessors and methods don't have any sub-properties to contribute *)
None
let rec pattern type_sig seen : 'a pattern -> 'a evaled = function
| PDef index -> packed type_sig seen (pattern_def_of_index type_sig index)
| PropP { name; def; _ } ->
let evaled = pattern type_sig seen (pattern_of_index type_sig def) in
get_field type_sig seen name evaled
| ComputedP _
| UnsupportedLiteralP _
| ObjRestP _
| IndexP _
| ArrRestP _ ->
(* TODO? *)
Nothing
and tyref type_sig seen (r : 'a tyref) : 'a evaled =
match r with
| Qualified { name = _name; qualification; _ } ->
(match tyref type_sig seen qualification with
| Annot _ ->
(* TODO: get `_qual._name` *)
Nothing
| Value _ ->
(* TODO: get `_qual._name` *)
Nothing
| ClassDecl -> ClassDecl
| EnumDecl -> EnumDecl
| Nothing -> Nothing)
| Unqualified r -> ref type_sig seen r
and ref type_sig seen (r : 'loc packed_ref) : 'loc evaled =
let (dupe, seen) = seen_ref seen r in
if dupe then
Nothing
else
match r with
| LocalRef { index; _ } -> def type_sig seen (local_def_of_index type_sig index)
| RemoteRef _
| BuiltinRef _ ->
(* TODO: remember these cross-module aliases. if the remote thing matches,
we can also suggest everything that aliases to it. *)
Nothing
(** [def type_sig d] steps through variable definitions to keep walking the
initializer. all other definitions (like classes or functions) do not contribute
any exported names, so we don't need to walk them.
so, for [var x = y], returns [Some y]; for all other definitions, returns [None]. *)
and def type_sig seen : 'loc packed_def -> 'loc evaled = function
| Variable { def; _ } -> packed type_sig seen def
| TypeAlias { body; _ } -> packed type_sig seen body
| ClassBinding _ -> ClassDecl
| DeclareClassBinding _ -> ClassDecl
| EnumBinding _ -> EnumDecl
| DisabledEnumBinding _ -> EnumDecl
| Interface _
| FunBinding _
| DeclareFun _
| OpaqueType _ ->
(* None of these contain anything that can be imported separately. For example,
you can't `import {someMethod} ...` from an exported class. *)
Nothing
and packed type_sig seen : 'loc packed -> 'loc evaled = function
| Value x -> Value x
| Annot x -> Annot x
| Ref r -> ref type_sig seen r
| TyRef r -> tyref type_sig seen r
| TyRefApp { name; _ } -> tyref type_sig seen name
| Eval (_, x, op) -> eval type_sig seen x op
| Pattern index -> pattern type_sig seen (pattern_of_index type_sig index)
| Require _
| ImportDynamic _ ->
(* TODO: remember these cross-module aliases. if the remote thing matches,
we can also suggest everything that aliases to it. *)
Nothing
| Err _ -> Nothing
| ModuleRef _
| AsyncVoidReturn _ ->
(* TODO? *)
Nothing
and eval type_sig seen (x : 'loc packed) (op : 'loc packed op) : 'loc evaled =
match op with
| GetProp name -> packed type_sig seen x |> get_field type_sig seen name
| _ ->
(* TODO? *)
Nothing
* [ type_sig name evaled ] destructures an object pattern like
[ let { name } = evaled ] , returning [ Some evaled.name ] if [ evaled ] is an
object AND it has a [ name ] field ; [ None ] otherwise .
[let { name } = evaled], returning [Some evaled.name] if [evaled] is an
object AND it has a [name] field; [None] otherwise. *)
and get_field type_sig seen (name : string) (evaled : 'a evaled) : 'a evaled =
match evaled with
| Value (ObjLit { props; _ }) ->
(match field_of_obj_props name props with
| Some p -> packed type_sig seen p
| None -> Nothing)
TODO
| Annot _ ->
(* TODO? *)
Nothing
| Value
( ClassExpr _ | FunExpr _ | StringVal _ | StringLit _ | LongStringLit _ | NumberVal _
| NumberLit _ | BooleanVal _ | BooleanLit _ | NullLit _ | ArrayLit _ | BigIntVal _
| BigIntLit _ ) ->
Nothing
| ClassDecl -> Nothing
| EnumDecl -> Nothing
| Nothing -> Nothing
end
* [ add_named_type acc name def ] adds [ NamedType name ] to [ acc ] if [ def ] is a
class or enum , since its type is also exported because classes and enums are both
values and types .
class or enum, since its type is also exported because classes and enums are both
values and types. *)
let add_named_type acc name = function
| Eval.ClassDecl
| Eval.EnumDecl ->
NamedType name :: acc
| Eval.Value _
| Eval.Annot _
| Eval.Nothing ->
acc
let empty_seen = Type_sig_collections.Local_defs.IndexSet.empty
module ESM = struct
open Type_sig_pack
let fold_name type_sig acc name value =
match value with
| ExportRef ref ->
let acc = Eval.ref type_sig empty_seen ref |> add_named_type acc name in
Named name :: acc
| ExportBinding index ->
let acc =
let def = Eval.def type_sig empty_seen (local_def_of_index type_sig index) in
add_named_type acc name def
in
Named name :: acc
| ExportDefault _
| ExportDefaultBinding _ ->
Default :: acc
| ExportFrom _ ->
(* TODO: ExportFrom defines aliases, which we don't handle yet. TS
keeps track of them and only suggests them if the re-exported thing
can't be imported. *)
acc
let fold_type acc name value =
match value with
| ExportTypeRef _
| ExportTypeBinding _ ->
NamedType name :: acc
| ExportTypeFrom _ ->
(* TODO: ExportTypeFrom defines aliases, which we don't handle yet. TS
keeps track of them and only suggests them if the re-exported thing
can't be imported. *)
acc
let exports type_sig type_exports exports info =
(* TODO: re-exports *)
let (ESModuleInfo { type_export_keys; export_keys; type_stars = _; stars = _; strict = _ }) =
info
in
let acc = Base.Array.fold2_exn ~init:[] ~f:(fold_name type_sig) export_keys exports in
Base.Array.fold2_exn ~init:acc ~f:fold_type type_export_keys type_exports
end
module CJS = struct
open Type_sig
open Type_sig_pack
(** only objects can be destructured on import *)
let exports_of_value acc type_sig = function
| ObjLit { props; _ } ->
SMap.fold
(fun name value acc ->
only property names that are valid identifier names can currently be
imported : ` module.exports = { " Foo Bar " : true } ` can not be imported
as ` import { " Foo Bar " as Foo_bar } ... ` yet . This will be allowed by
; until then , we only bother
indexing names that can actually be imported .
imported: `module.exports = { "Foo Bar": true }` cannot be imported
as `import { "Foo Bar" as Foo_bar } ...` yet. This will be allowed by
; until then, we only bother
indexing names that can actually be imported. *)
if Parser_flow.string_is_valid_identifier_name name then
let acc =
match value with
| ObjValueField (_, Value (ClassExpr _), _) -> NamedType name :: acc
| ObjValueField (_, Ref ref, _) ->
Eval.ref type_sig empty_seen ref |> add_named_type acc name
| _ -> acc
in
Named name :: acc
else
acc)
props
acc
| ArrayLit _
| BooleanLit _
| BooleanVal _
| ClassExpr _
| FunExpr _
| LongStringLit _
| NullLit _
| NumberLit _
| NumberVal _
| ObjSpreadLit _
| StringLit _
| StringVal _
| BigIntVal _
| BigIntLit _ ->
acc
let exports_of_annot acc = function
| ObjAnnot { props; _ } -> SMap.fold (fun name _value acc -> Named name :: acc) props acc
| _ ->
TODO : handle TEMPORARY_Object , ReadOnly , Exact , if they wrap objects ?
acc
let add_named_exports acc type_sig packed =
match Eval.packed type_sig empty_seen packed with
| Eval.Annot annot -> exports_of_annot acc annot
| Eval.Value value -> exports_of_value acc type_sig value
| Eval.ClassDecl
| Eval.EnumDecl
| Eval.Nothing ->
acc
let add_default_exports type_sig acc = function
| Some module_exports -> Default :: add_named_exports acc type_sig module_exports
| None -> acc
let fold_type acc name value =
let open Type_sig_pack in
match value with
| ExportTypeRef _
| ExportTypeBinding _ ->
NamedType name :: acc
| ExportTypeFrom _ ->
(* TODO: ExportTypeFrom defines aliases, which we don't handle yet. TS
keeps track of them and only suggests them if the re-exported thing
can't be imported. *)
acc
let exports type_sig type_exports exports info =
(* TODO: re-exports *)
let (CJSModuleInfo { type_export_keys; type_stars = _; strict = _ }) = info in
let acc = add_default_exports type_sig [] exports in
Base.Array.fold2_exn ~init:acc ~f:fold_type type_export_keys type_exports
end
let add_global =
let add_named name acc =
if String.contains name '$' then
acc
else
Named name :: acc
in
fun global_sig name index acc ->
let def = local_def_of_index global_sig index in
match def with
| Type_sig.Variable _ ->
add_named_type acc name (Eval.def global_sig empty_seen def) |> add_named name
| Type_sig.FunBinding _
| Type_sig.DeclareFun _ ->
add_named name acc
| Type_sig.TypeAlias _
| Type_sig.Interface _
| Type_sig.OpaqueType _ ->
NamedType name :: acc
| Type_sig.ClassBinding _
| Type_sig.DeclareClassBinding _
| Type_sig.EnumBinding _
| Type_sig.DisabledEnumBinding _ ->
add_named name (NamedType name :: acc)
let of_sig export_sig : t =
match export_sig.Export_sig.module_kind with
| Some (Type_sig_pack.ESModule { type_exports; exports; info }) ->
ESM.exports export_sig type_exports exports info
| Some (Type_sig_pack.CJSModule { type_exports; exports; info }) ->
CJS.exports export_sig type_exports exports info
| None -> failwith "unexpected exports in global scope"
let of_module type_sig : t = type_sig |> Export_sig.of_module |> of_sig
let of_builtins
{
Packed_type_sig.Builtins.modules;
module_refs;
local_defs;
remote_refs;
pattern_defs;
patterns;
globals;
} =
let global_sig =
Export_sig.of_builtins ~module_refs ~local_defs ~remote_refs ~pattern_defs ~patterns
in
[]
|> SMap.fold (add_global global_sig) globals
|> SMap.fold
(fun name { Packed_type_sig.Builtins.loc = _; module_kind } acc ->
let export_sig =
Export_sig.of_builtin_module
~module_refs
~local_defs
~remote_refs
~module_kind
~pattern_defs
~patterns
in
Module (name, of_sig export_sig) :: acc)
modules
let empty = []
| null | https://raw.githubusercontent.com/facebook/flow/a952bafb6f0965a92dc31359751c5461ee7bbb92/src/parser_utils/exports/exports.ml | ocaml | * e.g. `export default function() {}`
* `export const foo: string = "foo"`
* `export type T = string`
* `declare module "foo" { ... exports ... }`
* Looks up an object field by name. Returns [None] if the name doesn't
exist or isn't a field.
Accessors and methods don't have any sub-properties to contribute
TODO?
TODO: get `_qual._name`
TODO: get `_qual._name`
TODO: remember these cross-module aliases. if the remote thing matches,
we can also suggest everything that aliases to it.
* [def type_sig d] steps through variable definitions to keep walking the
initializer. all other definitions (like classes or functions) do not contribute
any exported names, so we don't need to walk them.
so, for [var x = y], returns [Some y]; for all other definitions, returns [None].
None of these contain anything that can be imported separately. For example,
you can't `import {someMethod} ...` from an exported class.
TODO: remember these cross-module aliases. if the remote thing matches,
we can also suggest everything that aliases to it.
TODO?
TODO?
TODO?
TODO: ExportFrom defines aliases, which we don't handle yet. TS
keeps track of them and only suggests them if the re-exported thing
can't be imported.
TODO: ExportTypeFrom defines aliases, which we don't handle yet. TS
keeps track of them and only suggests them if the re-exported thing
can't be imported.
TODO: re-exports
* only objects can be destructured on import
TODO: ExportTypeFrom defines aliases, which we don't handle yet. TS
keeps track of them and only suggests them if the re-exported thing
can't be imported.
TODO: re-exports |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
type export =
TODO : DefaultType , e.g. ` export default class { } `
[@@deriving show { with_path = false }]
type t = export list [@@deriving show { with_path = false }]
module Export_sig = struct
type 'loc t = {
module_kind: 'loc Type_sig_pack.module_kind option;
module_refs: string Type_sig_collections.Module_refs.t;
local_defs: 'loc Type_sig_pack.packed_def Type_sig_collections.Local_defs.t;
remote_refs: 'loc Type_sig_pack.remote_ref Type_sig_collections.Remote_refs.t;
pattern_defs: 'loc Type_sig_pack.packed Type_sig_collections.Pattern_defs.t;
patterns: 'loc Type_sig_pack.pattern Type_sig_collections.Patterns.t;
}
[@@warning "-69"]
let of_module
{
Packed_type_sig.Module.module_kind;
module_refs;
local_defs;
remote_refs;
pattern_defs;
patterns;
} =
{ module_kind = Some module_kind; module_refs; local_defs; remote_refs; pattern_defs; patterns }
let of_builtins ~module_refs ~local_defs ~remote_refs ~pattern_defs ~patterns =
{ module_kind = None; module_refs; local_defs; remote_refs; pattern_defs; patterns }
let of_builtin_module ~module_refs ~local_defs ~remote_refs ~module_kind ~pattern_defs ~patterns =
{ module_kind = Some module_kind; module_refs; local_defs; remote_refs; pattern_defs; patterns }
end
let local_def_of_index type_sig index =
Type_sig_collections.Local_defs.get type_sig.Export_sig.local_defs index
let pattern_of_index type_sig index =
Type_sig_collections.Patterns.get type_sig.Export_sig.patterns index
let pattern_def_of_index type_sig index =
Type_sig_collections.Pattern_defs.get type_sig.Export_sig.pattern_defs index
module Eval = struct
open Type_sig
open Type_sig_pack
type 'loc evaled =
| Annot of 'loc packed_annot
| Value of 'loc packed_value
| ClassDecl
| EnumDecl
| Nothing
let seen_ref seen = function
| LocalRef { index; _ } ->
let dupe = Type_sig_collections.Local_defs.IndexSet.mem index seen in
let seen = Type_sig_collections.Local_defs.IndexSet.add index seen in
(dupe, seen)
| _ -> (false, seen)
let field_of_obj_props name props =
let open Base.Option.Let_syntax in
match%bind SMap.find_opt name props with
| ObjValueField (_, field, _) -> Some field
| ObjValueAccess _
| ObjValueMethod _ ->
None
let rec pattern type_sig seen : 'a pattern -> 'a evaled = function
| PDef index -> packed type_sig seen (pattern_def_of_index type_sig index)
| PropP { name; def; _ } ->
let evaled = pattern type_sig seen (pattern_of_index type_sig def) in
get_field type_sig seen name evaled
| ComputedP _
| UnsupportedLiteralP _
| ObjRestP _
| IndexP _
| ArrRestP _ ->
Nothing
and tyref type_sig seen (r : 'a tyref) : 'a evaled =
match r with
| Qualified { name = _name; qualification; _ } ->
(match tyref type_sig seen qualification with
| Annot _ ->
Nothing
| Value _ ->
Nothing
| ClassDecl -> ClassDecl
| EnumDecl -> EnumDecl
| Nothing -> Nothing)
| Unqualified r -> ref type_sig seen r
and ref type_sig seen (r : 'loc packed_ref) : 'loc evaled =
let (dupe, seen) = seen_ref seen r in
if dupe then
Nothing
else
match r with
| LocalRef { index; _ } -> def type_sig seen (local_def_of_index type_sig index)
| RemoteRef _
| BuiltinRef _ ->
Nothing
and def type_sig seen : 'loc packed_def -> 'loc evaled = function
| Variable { def; _ } -> packed type_sig seen def
| TypeAlias { body; _ } -> packed type_sig seen body
| ClassBinding _ -> ClassDecl
| DeclareClassBinding _ -> ClassDecl
| EnumBinding _ -> EnumDecl
| DisabledEnumBinding _ -> EnumDecl
| Interface _
| FunBinding _
| DeclareFun _
| OpaqueType _ ->
Nothing
and packed type_sig seen : 'loc packed -> 'loc evaled = function
| Value x -> Value x
| Annot x -> Annot x
| Ref r -> ref type_sig seen r
| TyRef r -> tyref type_sig seen r
| TyRefApp { name; _ } -> tyref type_sig seen name
| Eval (_, x, op) -> eval type_sig seen x op
| Pattern index -> pattern type_sig seen (pattern_of_index type_sig index)
| Require _
| ImportDynamic _ ->
Nothing
| Err _ -> Nothing
| ModuleRef _
| AsyncVoidReturn _ ->
Nothing
and eval type_sig seen (x : 'loc packed) (op : 'loc packed op) : 'loc evaled =
match op with
| GetProp name -> packed type_sig seen x |> get_field type_sig seen name
| _ ->
Nothing
* [ type_sig name evaled ] destructures an object pattern like
[ let { name } = evaled ] , returning [ Some evaled.name ] if [ evaled ] is an
object AND it has a [ name ] field ; [ None ] otherwise .
[let { name } = evaled], returning [Some evaled.name] if [evaled] is an
object AND it has a [name] field; [None] otherwise. *)
and get_field type_sig seen (name : string) (evaled : 'a evaled) : 'a evaled =
match evaled with
| Value (ObjLit { props; _ }) ->
(match field_of_obj_props name props with
| Some p -> packed type_sig seen p
| None -> Nothing)
TODO
| Annot _ ->
Nothing
| Value
( ClassExpr _ | FunExpr _ | StringVal _ | StringLit _ | LongStringLit _ | NumberVal _
| NumberLit _ | BooleanVal _ | BooleanLit _ | NullLit _ | ArrayLit _ | BigIntVal _
| BigIntLit _ ) ->
Nothing
| ClassDecl -> Nothing
| EnumDecl -> Nothing
| Nothing -> Nothing
end
* [ add_named_type acc name def ] adds [ NamedType name ] to [ acc ] if [ def ] is a
class or enum , since its type is also exported because classes and enums are both
values and types .
class or enum, since its type is also exported because classes and enums are both
values and types. *)
let add_named_type acc name = function
| Eval.ClassDecl
| Eval.EnumDecl ->
NamedType name :: acc
| Eval.Value _
| Eval.Annot _
| Eval.Nothing ->
acc
let empty_seen = Type_sig_collections.Local_defs.IndexSet.empty
module ESM = struct
open Type_sig_pack
let fold_name type_sig acc name value =
match value with
| ExportRef ref ->
let acc = Eval.ref type_sig empty_seen ref |> add_named_type acc name in
Named name :: acc
| ExportBinding index ->
let acc =
let def = Eval.def type_sig empty_seen (local_def_of_index type_sig index) in
add_named_type acc name def
in
Named name :: acc
| ExportDefault _
| ExportDefaultBinding _ ->
Default :: acc
| ExportFrom _ ->
acc
let fold_type acc name value =
match value with
| ExportTypeRef _
| ExportTypeBinding _ ->
NamedType name :: acc
| ExportTypeFrom _ ->
acc
let exports type_sig type_exports exports info =
let (ESModuleInfo { type_export_keys; export_keys; type_stars = _; stars = _; strict = _ }) =
info
in
let acc = Base.Array.fold2_exn ~init:[] ~f:(fold_name type_sig) export_keys exports in
Base.Array.fold2_exn ~init:acc ~f:fold_type type_export_keys type_exports
end
module CJS = struct
open Type_sig
open Type_sig_pack
let exports_of_value acc type_sig = function
| ObjLit { props; _ } ->
SMap.fold
(fun name value acc ->
only property names that are valid identifier names can currently be
imported : ` module.exports = { " Foo Bar " : true } ` can not be imported
as ` import { " Foo Bar " as Foo_bar } ... ` yet . This will be allowed by
; until then , we only bother
indexing names that can actually be imported .
imported: `module.exports = { "Foo Bar": true }` cannot be imported
as `import { "Foo Bar" as Foo_bar } ...` yet. This will be allowed by
; until then, we only bother
indexing names that can actually be imported. *)
if Parser_flow.string_is_valid_identifier_name name then
let acc =
match value with
| ObjValueField (_, Value (ClassExpr _), _) -> NamedType name :: acc
| ObjValueField (_, Ref ref, _) ->
Eval.ref type_sig empty_seen ref |> add_named_type acc name
| _ -> acc
in
Named name :: acc
else
acc)
props
acc
| ArrayLit _
| BooleanLit _
| BooleanVal _
| ClassExpr _
| FunExpr _
| LongStringLit _
| NullLit _
| NumberLit _
| NumberVal _
| ObjSpreadLit _
| StringLit _
| StringVal _
| BigIntVal _
| BigIntLit _ ->
acc
let exports_of_annot acc = function
| ObjAnnot { props; _ } -> SMap.fold (fun name _value acc -> Named name :: acc) props acc
| _ ->
TODO : handle TEMPORARY_Object , ReadOnly , Exact , if they wrap objects ?
acc
let add_named_exports acc type_sig packed =
match Eval.packed type_sig empty_seen packed with
| Eval.Annot annot -> exports_of_annot acc annot
| Eval.Value value -> exports_of_value acc type_sig value
| Eval.ClassDecl
| Eval.EnumDecl
| Eval.Nothing ->
acc
let add_default_exports type_sig acc = function
| Some module_exports -> Default :: add_named_exports acc type_sig module_exports
| None -> acc
let fold_type acc name value =
let open Type_sig_pack in
match value with
| ExportTypeRef _
| ExportTypeBinding _ ->
NamedType name :: acc
| ExportTypeFrom _ ->
acc
let exports type_sig type_exports exports info =
let (CJSModuleInfo { type_export_keys; type_stars = _; strict = _ }) = info in
let acc = add_default_exports type_sig [] exports in
Base.Array.fold2_exn ~init:acc ~f:fold_type type_export_keys type_exports
end
let add_global =
let add_named name acc =
if String.contains name '$' then
acc
else
Named name :: acc
in
fun global_sig name index acc ->
let def = local_def_of_index global_sig index in
match def with
| Type_sig.Variable _ ->
add_named_type acc name (Eval.def global_sig empty_seen def) |> add_named name
| Type_sig.FunBinding _
| Type_sig.DeclareFun _ ->
add_named name acc
| Type_sig.TypeAlias _
| Type_sig.Interface _
| Type_sig.OpaqueType _ ->
NamedType name :: acc
| Type_sig.ClassBinding _
| Type_sig.DeclareClassBinding _
| Type_sig.EnumBinding _
| Type_sig.DisabledEnumBinding _ ->
add_named name (NamedType name :: acc)
let of_sig export_sig : t =
match export_sig.Export_sig.module_kind with
| Some (Type_sig_pack.ESModule { type_exports; exports; info }) ->
ESM.exports export_sig type_exports exports info
| Some (Type_sig_pack.CJSModule { type_exports; exports; info }) ->
CJS.exports export_sig type_exports exports info
| None -> failwith "unexpected exports in global scope"
let of_module type_sig : t = type_sig |> Export_sig.of_module |> of_sig
let of_builtins
{
Packed_type_sig.Builtins.modules;
module_refs;
local_defs;
remote_refs;
pattern_defs;
patterns;
globals;
} =
let global_sig =
Export_sig.of_builtins ~module_refs ~local_defs ~remote_refs ~pattern_defs ~patterns
in
[]
|> SMap.fold (add_global global_sig) globals
|> SMap.fold
(fun name { Packed_type_sig.Builtins.loc = _; module_kind } acc ->
let export_sig =
Export_sig.of_builtin_module
~module_refs
~local_defs
~remote_refs
~module_kind
~pattern_defs
~patterns
in
Module (name, of_sig export_sig) :: acc)
modules
let empty = []
|
1ff44d16037e39a681ef56d20b9890ec61f0c3b2eb8891c65a06cf68cf25027e | icfpcontest2021/icfpcontest2021.github.io | Main.hs | module Main where
import qualified BrainWall.Database.Tests
import qualified BrainWall.Edge.Slope.Tests
import qualified BrainWall.Edge.Tests
import qualified BrainWall.Main.Prosecutor.Tests
import qualified BrainWall.Polygon.ContainsEdge.Tests
import qualified BrainWall.Polygon.ContainsPoint.Tests
import qualified BrainWall.Polygon.Tests
import qualified BrainWall.Problem.Tests
import qualified Test.Tasty as Tasty
main :: IO ()
main = Tasty.defaultMain $ Tasty.testGroup "brain-wall"
[ BrainWall.Database.Tests.tests
, BrainWall.Edge.Tests.tests
, BrainWall.Edge.Slope.Tests.tests
, BrainWall.Main.Prosecutor.Tests.tests
, BrainWall.Polygon.ContainsEdge.Tests.tests
, BrainWall.Polygon.ContainsPoint.Tests.tests
, BrainWall.Polygon.Tests.tests
, BrainWall.Problem.Tests.tests
]
| null | https://raw.githubusercontent.com/icfpcontest2021/icfpcontest2021.github.io/fb23fea2a8ecec7740017d3dda78d921c1df5a26/toolchain/tests/Main.hs | haskell | module Main where
import qualified BrainWall.Database.Tests
import qualified BrainWall.Edge.Slope.Tests
import qualified BrainWall.Edge.Tests
import qualified BrainWall.Main.Prosecutor.Tests
import qualified BrainWall.Polygon.ContainsEdge.Tests
import qualified BrainWall.Polygon.ContainsPoint.Tests
import qualified BrainWall.Polygon.Tests
import qualified BrainWall.Problem.Tests
import qualified Test.Tasty as Tasty
main :: IO ()
main = Tasty.defaultMain $ Tasty.testGroup "brain-wall"
[ BrainWall.Database.Tests.tests
, BrainWall.Edge.Tests.tests
, BrainWall.Edge.Slope.Tests.tests
, BrainWall.Main.Prosecutor.Tests.tests
, BrainWall.Polygon.ContainsEdge.Tests.tests
, BrainWall.Polygon.ContainsPoint.Tests.tests
, BrainWall.Polygon.Tests.tests
, BrainWall.Problem.Tests.tests
]
| |
883c07ae3f028574528f8fb6668be23c1d41794185afc8ea827d1fe77c6e5966 | ScottBrooks/Erlcraft | erlcraft_client_fsm.erl | -module(erlcraft_client_fsm).
-author('').
Based off of -blocking_TCP_server_using_OTP_principles
By < saleyn at gmail.com >
-behaviour(gen_fsm).
-export([start_link/0, set_socket/2, send_packet/2]).
%% gen_fsm callbacks
-export([init/1, handle_event/3,
handle_sync_event/4, handle_info/3, terminate/3, code_change/4]).
FSM States
-export([
'WAIT_FOR_SOCKET'/2,
'WAIT_FOR_DATA'/2
]).
-record(state, {
socket, % client socket
addr, % client address
packet_buffer,
socket_log,
client
}).
-define(TIMEOUT, 120000).
%%%------------------------------------------------------------------------
%%% API
%%%------------------------------------------------------------------------
%%-------------------------------------------------------------------------
@spec ( Socket ) - > { ok , Pid } | ignore | { error , Error }
%% @doc To be called by the supervisor in order to start the server.
If init/1 fails with , the function returns { error , Reason } .
%% If init/1 returns {stop,Reason} or ignore, the process is
%% terminated and the function returns {error,Reason} or ignore,
%% respectively.
%% @end
%%-------------------------------------------------------------------------
start_link() ->
gen_fsm:start_link(?MODULE, [], []).
set_socket(Pid, Socket) when is_pid(Pid), is_port(Socket) ->
io:format("socket set!~n", []),
gen_fsm:send_event(Pid, {socket_ready, Socket}).
send_packet(Pid, Data) when is_pid(Pid) ->
gen_fsm:send_all_state_event(Pid, {packet, Data}).
%%%------------------------------------------------------------------------
%%% Callback functions from gen_server
%%%------------------------------------------------------------------------
%%-------------------------------------------------------------------------
%% Func: init/1
Returns : { ok , , StateData } |
{ ok , , StateData , Timeout } |
%% ignore |
{ stop , StopReason }
@private
%%-------------------------------------------------------------------------
init([]) ->
process_flag(trap_exit, true),
{ok, File} = file:open("socketlog", write),
{ok, Client} = gen_server:start_link(erlcraft_client, [self()], []),
io:format("FSM Pid: ~p~n", [self()]),
{ok, 'WAIT_FOR_SOCKET', #state{packet_buffer = <<>>, socket_log = File, client = Client}}.
%%-------------------------------------------------------------------------
%% Func: StateName/2
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData }
@private
%%-------------------------------------------------------------------------
'WAIT_FOR_SOCKET'({socket_ready, Socket}, State) when is_port(Socket) ->
io:format("Socket Ready!~n", []),
% Now we own the socket
inet:setopts(Socket, [{active, once}, {packet, raw}, binary]),
{ok, {IP, _Port}} = inet:peername(Socket),
io:format("Peer: ~p~n", [IP]),
{next_state, 'WAIT_FOR_DATA', State#state{socket=Socket, addr=IP}, ?TIMEOUT};
'WAIT_FOR_SOCKET'(Other, State) ->
error_logger:error_msg("State: 'WAIT_FOR_SOCKET'. Unexpected message: ~p\n", [Other]),
%% Allow to receive async messages
{next_state, 'WAIT_FOR_SOCKET', State}.
%% Notification event coming from client
'WAIT_FOR_DATA'({data, Data}, #state{socket = S, client=ClientPid, packet_buffer=PacketBuffer} = State) ->
CombinedData = <<PacketBuffer/binary, Data/binary>>,
case mc:handle_data(CombinedData) of
{more, PartialData} ->
{next_state, 'WAIT_FOR_DATA', State#state{packet_buffer = PartialData}, ?TIMEOUT};
{done, DecodedPacket, Rest} ->
case mc:handle_packet(ClientPid, DecodedPacket) of
none -> ok;
Reply -> ok = gen_tcp:send(S, Reply), ok
end,
%Tail call back to ourself with the rest of the data
'WAIT_FOR_DATA'({data, Rest}, State#state{packet_buffer = <<>>})
end;
'WAIT_FOR_DATA'(timeout, State) ->
io:format("Timeout: ~n", []),
error_logger:error_msg("~p Client connection timeout - closing.\n", [self()]),
{stop, normal, State};
'WAIT_FOR_DATA'(Data, State) ->
io:format("~p Ignoring data: ~p\n", [self(), Data]),
{next_state, 'WAIT_FOR_DATA', State, ?TIMEOUT}.
%%-------------------------------------------------------------------------
%% Func: handle_event/3
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData }
@private
%%-------------------------------------------------------------------------
handle_event({packet, Data}, StateName, #state{socket = S, socket_log = Log} = StateData) ->
%io:format("OOB: Packet data: ~p~n", [Data]),
%io:format("Len: ~p~n", [size(Data)]),
ok = gen_tcp:send(S, Data),
ok = file:write(Log, Data),
file:sync(Log),
{next_state, StateName, StateData};
handle_event(Event, StateName, StateData) ->
{stop, {StateName, undefined_event, Event}, StateData}.
%%-------------------------------------------------------------------------
%% Func: handle_sync_event/4
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ reply , Reply , NextStateName , NextStateData } |
{ reply , Reply , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData } |
%% {stop, Reason, Reply, NewStateData}
@private
%%-------------------------------------------------------------------------
handle_sync_event(Event, _From, StateName, StateData) ->
{stop, {StateName, undefined_event, Event}, StateData}.
%%-------------------------------------------------------------------------
%% Func: handle_info/3
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData }
@private
%%-------------------------------------------------------------------------
handle_info({tcp, Socket, Bin}, StateName, #state{socket=Socket} = StateData) ->
% Flow control: enable forwarding of next TCP message
inet:setopts(Socket, [{active, once}]),
?MODULE:StateName({data, Bin}, StateData);
handle_info({tcp_closed, Socket}, _StateName,
#state{socket=Socket, addr=Addr} = StateData) ->
error_logger:info_msg("~p Client ~p disconnected.\n", [self(), Addr]),
{stop, normal, StateData};
handle_info(_Info, StateName, StateData) ->
{noreply, StateName, StateData}.
%%-------------------------------------------------------------------------
Func : terminate/3
Purpose : Shutdown the fsm
%% Returns: any
@private
%%-------------------------------------------------------------------------
terminate(_Reason, _StateName, #state{socket=Socket, socket_log = Log}) ->
(catch gen_tcp:close(Socket)),
file:close(Log),
ok.
%%-------------------------------------------------------------------------
%% Func: code_change/4
%% Purpose: Convert process state when code is changed
Returns : { ok , NewState , NewStateData }
@private
%%-------------------------------------------------------------------------
code_change(_OldVsn, StateName, StateData, _Extra) ->
{ok, StateName, StateData}.
| null | https://raw.githubusercontent.com/ScottBrooks/Erlcraft/ed336eb8da4d83e937687ce34feecb76b4128288/src/erlcraft_client_fsm.erl | erlang | gen_fsm callbacks
client socket
client address
------------------------------------------------------------------------
API
------------------------------------------------------------------------
-------------------------------------------------------------------------
@doc To be called by the supervisor in order to start the server.
If init/1 returns {stop,Reason} or ignore, the process is
terminated and the function returns {error,Reason} or ignore,
respectively.
@end
-------------------------------------------------------------------------
------------------------------------------------------------------------
Callback functions from gen_server
------------------------------------------------------------------------
-------------------------------------------------------------------------
Func: init/1
ignore |
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Func: StateName/2
-------------------------------------------------------------------------
Now we own the socket
Allow to receive async messages
Notification event coming from client
Tail call back to ourself with the rest of the data
-------------------------------------------------------------------------
Func: handle_event/3
-------------------------------------------------------------------------
io:format("OOB: Packet data: ~p~n", [Data]),
io:format("Len: ~p~n", [size(Data)]),
-------------------------------------------------------------------------
Func: handle_sync_event/4
{stop, Reason, Reply, NewStateData}
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Func: handle_info/3
-------------------------------------------------------------------------
Flow control: enable forwarding of next TCP message
-------------------------------------------------------------------------
Returns: any
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Func: code_change/4
Purpose: Convert process state when code is changed
------------------------------------------------------------------------- | -module(erlcraft_client_fsm).
-author('').
Based off of -blocking_TCP_server_using_OTP_principles
By < saleyn at gmail.com >
-behaviour(gen_fsm).
-export([start_link/0, set_socket/2, send_packet/2]).
-export([init/1, handle_event/3,
handle_sync_event/4, handle_info/3, terminate/3, code_change/4]).
FSM States
-export([
'WAIT_FOR_SOCKET'/2,
'WAIT_FOR_DATA'/2
]).
-record(state, {
packet_buffer,
socket_log,
client
}).
-define(TIMEOUT, 120000).
@spec ( Socket ) - > { ok , Pid } | ignore | { error , Error }
If init/1 fails with , the function returns { error , Reason } .
start_link() ->
gen_fsm:start_link(?MODULE, [], []).
set_socket(Pid, Socket) when is_pid(Pid), is_port(Socket) ->
io:format("socket set!~n", []),
gen_fsm:send_event(Pid, {socket_ready, Socket}).
send_packet(Pid, Data) when is_pid(Pid) ->
gen_fsm:send_all_state_event(Pid, {packet, Data}).
Returns : { ok , , StateData } |
{ ok , , StateData , Timeout } |
{ stop , StopReason }
@private
init([]) ->
process_flag(trap_exit, true),
{ok, File} = file:open("socketlog", write),
{ok, Client} = gen_server:start_link(erlcraft_client, [self()], []),
io:format("FSM Pid: ~p~n", [self()]),
{ok, 'WAIT_FOR_SOCKET', #state{packet_buffer = <<>>, socket_log = File, client = Client}}.
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData }
@private
'WAIT_FOR_SOCKET'({socket_ready, Socket}, State) when is_port(Socket) ->
io:format("Socket Ready!~n", []),
inet:setopts(Socket, [{active, once}, {packet, raw}, binary]),
{ok, {IP, _Port}} = inet:peername(Socket),
io:format("Peer: ~p~n", [IP]),
{next_state, 'WAIT_FOR_DATA', State#state{socket=Socket, addr=IP}, ?TIMEOUT};
'WAIT_FOR_SOCKET'(Other, State) ->
error_logger:error_msg("State: 'WAIT_FOR_SOCKET'. Unexpected message: ~p\n", [Other]),
{next_state, 'WAIT_FOR_SOCKET', State}.
'WAIT_FOR_DATA'({data, Data}, #state{socket = S, client=ClientPid, packet_buffer=PacketBuffer} = State) ->
CombinedData = <<PacketBuffer/binary, Data/binary>>,
case mc:handle_data(CombinedData) of
{more, PartialData} ->
{next_state, 'WAIT_FOR_DATA', State#state{packet_buffer = PartialData}, ?TIMEOUT};
{done, DecodedPacket, Rest} ->
case mc:handle_packet(ClientPid, DecodedPacket) of
none -> ok;
Reply -> ok = gen_tcp:send(S, Reply), ok
end,
'WAIT_FOR_DATA'({data, Rest}, State#state{packet_buffer = <<>>})
end;
'WAIT_FOR_DATA'(timeout, State) ->
io:format("Timeout: ~n", []),
error_logger:error_msg("~p Client connection timeout - closing.\n", [self()]),
{stop, normal, State};
'WAIT_FOR_DATA'(Data, State) ->
io:format("~p Ignoring data: ~p\n", [self(), Data]),
{next_state, 'WAIT_FOR_DATA', State, ?TIMEOUT}.
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData }
@private
handle_event({packet, Data}, StateName, #state{socket = S, socket_log = Log} = StateData) ->
ok = gen_tcp:send(S, Data),
ok = file:write(Log, Data),
file:sync(Log),
{next_state, StateName, StateData};
handle_event(Event, StateName, StateData) ->
{stop, {StateName, undefined_event, Event}, StateData}.
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ reply , Reply , NextStateName , NextStateData } |
{ reply , Reply , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData } |
@private
handle_sync_event(Event, _From, StateName, StateData) ->
{stop, {StateName, undefined_event, Event}, StateData}.
Returns : { next_state , NextStateName , NextStateData } |
{ next_state , NextStateName , NextStateData , Timeout } |
{ stop , , NewStateData }
@private
handle_info({tcp, Socket, Bin}, StateName, #state{socket=Socket} = StateData) ->
inet:setopts(Socket, [{active, once}]),
?MODULE:StateName({data, Bin}, StateData);
handle_info({tcp_closed, Socket}, _StateName,
#state{socket=Socket, addr=Addr} = StateData) ->
error_logger:info_msg("~p Client ~p disconnected.\n", [self(), Addr]),
{stop, normal, StateData};
handle_info(_Info, StateName, StateData) ->
{noreply, StateName, StateData}.
Func : terminate/3
Purpose : Shutdown the fsm
@private
terminate(_Reason, _StateName, #state{socket=Socket, socket_log = Log}) ->
(catch gen_tcp:close(Socket)),
file:close(Log),
ok.
Returns : { ok , NewState , NewStateData }
@private
code_change(_OldVsn, StateName, StateData, _Extra) ->
{ok, StateName, StateData}.
|
ba1e962adc7c9f2db5edb900c03b02d5904092264c2717bdd5919dd3893ce00b | input-output-hk/ouroboros-network | Node.hs | # LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -Wno - orphans #
module Ouroboros.Consensus.Byron.Node (
PBftSignatureThreshold (..)
, ProtocolParamsByron (..)
, byronBlockForging
, defaultPBftSignatureThreshold
, mkByronConfig
, protocolClientInfoByron
, protocolInfoByron
-- * Secrets
, ByronLeaderCredentials (..)
, ByronLeaderCredentialsError
, mkByronLeaderCredentials
, mkPBftCanBeLeader
) where
import Control.Monad.Except
import Data.Coerce (coerce)
import Data.Maybe
import Data.Text (Text)
import Data.Void (Void)
import qualified Cardano.Chain.Delegation as Delegation
import qualified Cardano.Chain.Genesis as Genesis
import Cardano.Chain.ProtocolConstants (kEpochSlots)
import Cardano.Chain.Slotting (EpochSlots (..))
import qualified Cardano.Chain.Update as Update
import qualified Cardano.Crypto as Crypto
import Ouroboros.Network.Magic (NetworkMagic (..))
import Ouroboros.Consensus.Block
import Ouroboros.Consensus.BlockchainTime (SystemStart (..))
import Ouroboros.Consensus.Config
import Ouroboros.Consensus.Config.SupportsNode
import Ouroboros.Consensus.HeaderValidation
import Ouroboros.Consensus.Ledger.Abstract
import Ouroboros.Consensus.Ledger.Extended
import qualified Ouroboros.Consensus.Mempool as Mempool
import Ouroboros.Consensus.Node.InitStorage
import Ouroboros.Consensus.Node.ProtocolInfo
import Ouroboros.Consensus.Node.Run
import Ouroboros.Consensus.NodeId (CoreNodeId)
import Ouroboros.Consensus.Protocol.Abstract
import Ouroboros.Consensus.Protocol.PBFT
import qualified Ouroboros.Consensus.Protocol.PBFT.State as S
import Ouroboros.Consensus.Storage.ChainDB.Init (InitChainDB (..))
import Ouroboros.Consensus.Storage.ImmutableDB (simpleChunkInfo)
import Ouroboros.Consensus.Util ((....:))
import Ouroboros.Consensus.Byron.Crypto.DSIGN
import Ouroboros.Consensus.Byron.Ledger
import Ouroboros.Consensus.Byron.Ledger.Conversions
import Ouroboros.Consensus.Byron.Ledger.Inspect ()
import Ouroboros.Consensus.Byron.Node.Serialisation ()
import Ouroboros.Consensus.Byron.Protocol
{-------------------------------------------------------------------------------
Credentials
-------------------------------------------------------------------------------}
| Credentials needed to produce blocks in the era .
data ByronLeaderCredentials = ByronLeaderCredentials {
blcSignKey :: Crypto.SigningKey
, blcDlgCert :: Delegation.Certificate
-- | Only core nodes can produce blocks. The 'CoreNodeId' is used to
-- determine the order (round-robin) in which core nodes produce blocks.
, blcCoreNodeId :: CoreNodeId
-- | Identifier for this set of credentials.
--
-- Useful when the node is running with multiple sets of credentials.
, blcLabel :: Text
}
deriving (Show)
-- | Make the 'ByronLeaderCredentials', with a couple sanity checks:
--
-- * That the block signing key and the delegation certificate match.
* That the delegation certificate does correspond to one of the genesis
-- keys from the genesis file.
--
mkByronLeaderCredentials ::
Genesis.Config
-> Crypto.SigningKey
-> Delegation.Certificate
-> Text
-> Either ByronLeaderCredentialsError ByronLeaderCredentials
mkByronLeaderCredentials gc sk cert lbl = do
guard (Delegation.delegateVK cert == Crypto.toVerification sk)
?! NodeSigningKeyDoesNotMatchDelegationCertificate
let vkGenesis = Delegation.issuerVK cert
nid <- genesisKeyCoreNodeId gc (VerKeyByronDSIGN vkGenesis)
?! DelegationCertificateNotFromGenesisKey
return ByronLeaderCredentials {
blcSignKey = sk
, blcDlgCert = cert
, blcCoreNodeId = nid
, blcLabel = lbl
}
where
(?!) :: Maybe a -> e -> Either e a
Just x ?! _ = Right x
Nothing ?! e = Left e
data ByronLeaderCredentialsError =
NodeSigningKeyDoesNotMatchDelegationCertificate
| DelegationCertificateNotFromGenesisKey
deriving (Eq, Show)
------------------------------------------------------------------------------
BlockForging
------------------------------------------------------------------------------
BlockForging
-------------------------------------------------------------------------------}
type instance CannotForge ByronBlock = PBftCannotForge PBftByronCrypto
type instance ForgeStateInfo ByronBlock = ()
type instance ForgeStateUpdateError ByronBlock = Void
byronBlockForging
:: Monad m
=> Mempool.TxOverrides ByronBlock
-> ByronLeaderCredentials
-> BlockForging m ByronBlock
byronBlockForging maxTxCapacityOverrides creds = BlockForging {
forgeLabel = blcLabel creds
, canBeLeader
, updateForgeState = \_ _ _ -> return $ ForgeStateUpdated ()
, checkCanForge = \cfg slot tickedPBftState _isLeader () ->
pbftCheckCanForge
(configConsensus cfg)
canBeLeader
slot
tickedPBftState
, forgeBlock = \cfg -> return ....: forgeByronBlock cfg maxTxCapacityOverrides
}
where
canBeLeader = mkPBftCanBeLeader creds
mkPBftCanBeLeader :: ByronLeaderCredentials -> CanBeLeader (PBft PBftByronCrypto)
mkPBftCanBeLeader (ByronLeaderCredentials sk cert nid _) = PBftCanBeLeader {
pbftCanBeLeaderCoreNodeId = nid
, pbftCanBeLeaderSignKey = SignKeyByronDSIGN sk
, pbftCanBeLeaderDlgCert = cert
}
------------------------------------------------------------------------------
ProtocolInfo
------------------------------------------------------------------------------
ProtocolInfo
-------------------------------------------------------------------------------}
| See chapter 4.1 of
-- -ledger-specs/byronChainSpec/latest/download-by-type/doc-pdf/blockchain-spec
defaultPBftSignatureThreshold :: PBftSignatureThreshold
defaultPBftSignatureThreshold = PBftSignatureThreshold 0.22
| Parameters needed to run
data ProtocolParamsByron = ProtocolParamsByron {
byronGenesis :: Genesis.Config
, byronPbftSignatureThreshold :: Maybe PBftSignatureThreshold
, byronProtocolVersion :: Update.ProtocolVersion
, byronSoftwareVersion :: Update.SoftwareVersion
, byronLeaderCredentials :: Maybe ByronLeaderCredentials
, byronMaxTxCapacityOverrides :: Mempool.TxOverrides ByronBlock
}
protocolInfoByron ::
forall m. Monad m
=> ProtocolParamsByron
-> ProtocolInfo m ByronBlock
protocolInfoByron ProtocolParamsByron {
byronGenesis = genesisConfig
, byronPbftSignatureThreshold = mSigThresh
, byronProtocolVersion = pVer
, byronSoftwareVersion = sVer
, byronLeaderCredentials = mLeaderCreds
, byronMaxTxCapacityOverrides = maxTxCapacityOverrides
} =
ProtocolInfo {
pInfoConfig = TopLevelConfig {
topLevelConfigProtocol = PBftConfig {
pbftParams = byronPBftParams compactedGenesisConfig mSigThresh
}
, topLevelConfigLedger = compactedGenesisConfig
, topLevelConfigBlock = blockConfig
, topLevelConfigCodec = mkByronCodecConfig compactedGenesisConfig
, topLevelConfigStorage = ByronStorageConfig blockConfig
}
, pInfoInitLedger = ExtLedgerState {
-- Important: don't pass the compacted genesis config to
' initByronLedgerState ' , it needs the full one , including the AVVM
-- balances.
ledgerState = initByronLedgerState genesisConfig Nothing
, headerState = genesisHeaderState S.empty
}
, pInfoBlockForging =
return
$ fmap (byronBlockForging maxTxCapacityOverrides)
$ maybeToList mLeaderCreds
}
where
compactedGenesisConfig = compactGenesisConfig genesisConfig
blockConfig = mkByronConfig compactedGenesisConfig pVer sVer
protocolClientInfoByron :: EpochSlots -> ProtocolClientInfo ByronBlock
protocolClientInfoByron epochSlots =
ProtocolClientInfo {
pClientInfoCodecConfig = ByronCodecConfig {
getByronEpochSlots = epochSlots
}
}
byronPBftParams :: Genesis.Config -> Maybe PBftSignatureThreshold -> PBftParams
byronPBftParams cfg threshold = PBftParams {
pbftSecurityParam = genesisSecurityParam cfg
, pbftNumNodes = genesisNumCoreNodes cfg
, pbftSignatureThreshold = fromMaybe defaultPBftSignatureThreshold threshold
}
mkByronConfig :: Genesis.Config
-> Update.ProtocolVersion
-> Update.SoftwareVersion
-> BlockConfig ByronBlock
mkByronConfig genesisConfig pVer sVer = ByronConfig {
byronGenesisConfig = genesisConfig
, byronProtocolVersion = pVer
, byronSoftwareVersion = sVer
}
{-------------------------------------------------------------------------------
ConfigSupportsNode instance
-------------------------------------------------------------------------------}
instance ConfigSupportsNode ByronBlock where
getSystemStart =
SystemStart
. Genesis.gdStartTime
. extractGenesisData
getNetworkMagic =
NetworkMagic
. Crypto.unProtocolMagicId
. Genesis.gdProtocolMagicId
. extractGenesisData
extractGenesisData :: BlockConfig ByronBlock -> Genesis.GenesisData
extractGenesisData = Genesis.configGenesisData . byronGenesisConfig
{-------------------------------------------------------------------------------
NodeInitStorage instance
-------------------------------------------------------------------------------}
instance NodeInitStorage ByronBlock where
The epoch size is fixed and can be derived from @k@ by the ledger
-- ('kEpochSlots').
nodeImmutableDbChunkInfo =
simpleChunkInfo
. (coerce :: EpochSlots -> EpochSize)
. kEpochSlots
. Genesis.gdK
. extractGenesisData
. getByronBlockConfig
-- If the current chain is empty, produce a genesis EBB and add it to the
ChainDB . Only an EBB can have Genesis (= empty chain ) as its predecessor .
nodeInitChainDB cfg InitChainDB { getCurrentLedger, addBlock } = do
tip <- ledgerTipPoint <$> getCurrentLedger
case tip of
BlockPoint {} -> return ()
GenesisPoint -> addBlock genesisEBB
where
genesisEBB =
forgeEBB (getByronBlockConfig cfg) (SlotNo 0) (BlockNo 0) GenesisHash
nodeCheckIntegrity = verifyBlockIntegrity . getByronBlockConfig
------------------------------------------------------------------------------
RunNode instance
------------------------------------------------------------------------------
RunNode instance
-------------------------------------------------------------------------------}
instance BlockSupportsMetrics ByronBlock where
isSelfIssued = isSelfIssuedConstUnknown
instance RunNode ByronBlock
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/ebb34fa4d1ba1357e3b803a49f750d2ae3df19e5/ouroboros-consensus-byron/src/Ouroboros/Consensus/Byron/Node.hs | haskell | # LANGUAGE ScopedTypeVariables #
* Secrets
------------------------------------------------------------------------------
Credentials
------------------------------------------------------------------------------
| Only core nodes can produce blocks. The 'CoreNodeId' is used to
determine the order (round-robin) in which core nodes produce blocks.
| Identifier for this set of credentials.
Useful when the node is running with multiple sets of credentials.
| Make the 'ByronLeaderCredentials', with a couple sanity checks:
* That the block signing key and the delegation certificate match.
keys from the genesis file.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
-ledger-specs/byronChainSpec/latest/download-by-type/doc-pdf/blockchain-spec
Important: don't pass the compacted genesis config to
balances.
------------------------------------------------------------------------------
ConfigSupportsNode instance
------------------------------------------------------------------------------
------------------------------------------------------------------------------
NodeInitStorage instance
------------------------------------------------------------------------------
('kEpochSlots').
If the current chain is empty, produce a genesis EBB and add it to the
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------} | # LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -Wno - orphans #
module Ouroboros.Consensus.Byron.Node (
PBftSignatureThreshold (..)
, ProtocolParamsByron (..)
, byronBlockForging
, defaultPBftSignatureThreshold
, mkByronConfig
, protocolClientInfoByron
, protocolInfoByron
, ByronLeaderCredentials (..)
, ByronLeaderCredentialsError
, mkByronLeaderCredentials
, mkPBftCanBeLeader
) where
import Control.Monad.Except
import Data.Coerce (coerce)
import Data.Maybe
import Data.Text (Text)
import Data.Void (Void)
import qualified Cardano.Chain.Delegation as Delegation
import qualified Cardano.Chain.Genesis as Genesis
import Cardano.Chain.ProtocolConstants (kEpochSlots)
import Cardano.Chain.Slotting (EpochSlots (..))
import qualified Cardano.Chain.Update as Update
import qualified Cardano.Crypto as Crypto
import Ouroboros.Network.Magic (NetworkMagic (..))
import Ouroboros.Consensus.Block
import Ouroboros.Consensus.BlockchainTime (SystemStart (..))
import Ouroboros.Consensus.Config
import Ouroboros.Consensus.Config.SupportsNode
import Ouroboros.Consensus.HeaderValidation
import Ouroboros.Consensus.Ledger.Abstract
import Ouroboros.Consensus.Ledger.Extended
import qualified Ouroboros.Consensus.Mempool as Mempool
import Ouroboros.Consensus.Node.InitStorage
import Ouroboros.Consensus.Node.ProtocolInfo
import Ouroboros.Consensus.Node.Run
import Ouroboros.Consensus.NodeId (CoreNodeId)
import Ouroboros.Consensus.Protocol.Abstract
import Ouroboros.Consensus.Protocol.PBFT
import qualified Ouroboros.Consensus.Protocol.PBFT.State as S
import Ouroboros.Consensus.Storage.ChainDB.Init (InitChainDB (..))
import Ouroboros.Consensus.Storage.ImmutableDB (simpleChunkInfo)
import Ouroboros.Consensus.Util ((....:))
import Ouroboros.Consensus.Byron.Crypto.DSIGN
import Ouroboros.Consensus.Byron.Ledger
import Ouroboros.Consensus.Byron.Ledger.Conversions
import Ouroboros.Consensus.Byron.Ledger.Inspect ()
import Ouroboros.Consensus.Byron.Node.Serialisation ()
import Ouroboros.Consensus.Byron.Protocol
| Credentials needed to produce blocks in the era .
data ByronLeaderCredentials = ByronLeaderCredentials {
blcSignKey :: Crypto.SigningKey
, blcDlgCert :: Delegation.Certificate
, blcCoreNodeId :: CoreNodeId
, blcLabel :: Text
}
deriving (Show)
* That the delegation certificate does correspond to one of the genesis
mkByronLeaderCredentials ::
Genesis.Config
-> Crypto.SigningKey
-> Delegation.Certificate
-> Text
-> Either ByronLeaderCredentialsError ByronLeaderCredentials
mkByronLeaderCredentials gc sk cert lbl = do
guard (Delegation.delegateVK cert == Crypto.toVerification sk)
?! NodeSigningKeyDoesNotMatchDelegationCertificate
let vkGenesis = Delegation.issuerVK cert
nid <- genesisKeyCoreNodeId gc (VerKeyByronDSIGN vkGenesis)
?! DelegationCertificateNotFromGenesisKey
return ByronLeaderCredentials {
blcSignKey = sk
, blcDlgCert = cert
, blcCoreNodeId = nid
, blcLabel = lbl
}
where
(?!) :: Maybe a -> e -> Either e a
Just x ?! _ = Right x
Nothing ?! e = Left e
data ByronLeaderCredentialsError =
NodeSigningKeyDoesNotMatchDelegationCertificate
| DelegationCertificateNotFromGenesisKey
deriving (Eq, Show)
BlockForging
BlockForging
type instance CannotForge ByronBlock = PBftCannotForge PBftByronCrypto
type instance ForgeStateInfo ByronBlock = ()
type instance ForgeStateUpdateError ByronBlock = Void
byronBlockForging
:: Monad m
=> Mempool.TxOverrides ByronBlock
-> ByronLeaderCredentials
-> BlockForging m ByronBlock
byronBlockForging maxTxCapacityOverrides creds = BlockForging {
forgeLabel = blcLabel creds
, canBeLeader
, updateForgeState = \_ _ _ -> return $ ForgeStateUpdated ()
, checkCanForge = \cfg slot tickedPBftState _isLeader () ->
pbftCheckCanForge
(configConsensus cfg)
canBeLeader
slot
tickedPBftState
, forgeBlock = \cfg -> return ....: forgeByronBlock cfg maxTxCapacityOverrides
}
where
canBeLeader = mkPBftCanBeLeader creds
mkPBftCanBeLeader :: ByronLeaderCredentials -> CanBeLeader (PBft PBftByronCrypto)
mkPBftCanBeLeader (ByronLeaderCredentials sk cert nid _) = PBftCanBeLeader {
pbftCanBeLeaderCoreNodeId = nid
, pbftCanBeLeaderSignKey = SignKeyByronDSIGN sk
, pbftCanBeLeaderDlgCert = cert
}
ProtocolInfo
ProtocolInfo
| See chapter 4.1 of
defaultPBftSignatureThreshold :: PBftSignatureThreshold
defaultPBftSignatureThreshold = PBftSignatureThreshold 0.22
| Parameters needed to run
data ProtocolParamsByron = ProtocolParamsByron {
byronGenesis :: Genesis.Config
, byronPbftSignatureThreshold :: Maybe PBftSignatureThreshold
, byronProtocolVersion :: Update.ProtocolVersion
, byronSoftwareVersion :: Update.SoftwareVersion
, byronLeaderCredentials :: Maybe ByronLeaderCredentials
, byronMaxTxCapacityOverrides :: Mempool.TxOverrides ByronBlock
}
protocolInfoByron ::
forall m. Monad m
=> ProtocolParamsByron
-> ProtocolInfo m ByronBlock
protocolInfoByron ProtocolParamsByron {
byronGenesis = genesisConfig
, byronPbftSignatureThreshold = mSigThresh
, byronProtocolVersion = pVer
, byronSoftwareVersion = sVer
, byronLeaderCredentials = mLeaderCreds
, byronMaxTxCapacityOverrides = maxTxCapacityOverrides
} =
ProtocolInfo {
pInfoConfig = TopLevelConfig {
topLevelConfigProtocol = PBftConfig {
pbftParams = byronPBftParams compactedGenesisConfig mSigThresh
}
, topLevelConfigLedger = compactedGenesisConfig
, topLevelConfigBlock = blockConfig
, topLevelConfigCodec = mkByronCodecConfig compactedGenesisConfig
, topLevelConfigStorage = ByronStorageConfig blockConfig
}
, pInfoInitLedger = ExtLedgerState {
' initByronLedgerState ' , it needs the full one , including the AVVM
ledgerState = initByronLedgerState genesisConfig Nothing
, headerState = genesisHeaderState S.empty
}
, pInfoBlockForging =
return
$ fmap (byronBlockForging maxTxCapacityOverrides)
$ maybeToList mLeaderCreds
}
where
compactedGenesisConfig = compactGenesisConfig genesisConfig
blockConfig = mkByronConfig compactedGenesisConfig pVer sVer
protocolClientInfoByron :: EpochSlots -> ProtocolClientInfo ByronBlock
protocolClientInfoByron epochSlots =
ProtocolClientInfo {
pClientInfoCodecConfig = ByronCodecConfig {
getByronEpochSlots = epochSlots
}
}
byronPBftParams :: Genesis.Config -> Maybe PBftSignatureThreshold -> PBftParams
byronPBftParams cfg threshold = PBftParams {
pbftSecurityParam = genesisSecurityParam cfg
, pbftNumNodes = genesisNumCoreNodes cfg
, pbftSignatureThreshold = fromMaybe defaultPBftSignatureThreshold threshold
}
mkByronConfig :: Genesis.Config
-> Update.ProtocolVersion
-> Update.SoftwareVersion
-> BlockConfig ByronBlock
mkByronConfig genesisConfig pVer sVer = ByronConfig {
byronGenesisConfig = genesisConfig
, byronProtocolVersion = pVer
, byronSoftwareVersion = sVer
}
instance ConfigSupportsNode ByronBlock where
getSystemStart =
SystemStart
. Genesis.gdStartTime
. extractGenesisData
getNetworkMagic =
NetworkMagic
. Crypto.unProtocolMagicId
. Genesis.gdProtocolMagicId
. extractGenesisData
extractGenesisData :: BlockConfig ByronBlock -> Genesis.GenesisData
extractGenesisData = Genesis.configGenesisData . byronGenesisConfig
instance NodeInitStorage ByronBlock where
The epoch size is fixed and can be derived from @k@ by the ledger
nodeImmutableDbChunkInfo =
simpleChunkInfo
. (coerce :: EpochSlots -> EpochSize)
. kEpochSlots
. Genesis.gdK
. extractGenesisData
. getByronBlockConfig
ChainDB . Only an EBB can have Genesis (= empty chain ) as its predecessor .
nodeInitChainDB cfg InitChainDB { getCurrentLedger, addBlock } = do
tip <- ledgerTipPoint <$> getCurrentLedger
case tip of
BlockPoint {} -> return ()
GenesisPoint -> addBlock genesisEBB
where
genesisEBB =
forgeEBB (getByronBlockConfig cfg) (SlotNo 0) (BlockNo 0) GenesisHash
nodeCheckIntegrity = verifyBlockIntegrity . getByronBlockConfig
RunNode instance
RunNode instance
instance BlockSupportsMetrics ByronBlock where
isSelfIssued = isSelfIssuedConstUnknown
instance RunNode ByronBlock
|
fc4efe0e8b98178da9642905171f2fec0a85c449e78b17f3ec2d220a84762907 | haskell-hvr/hslogger | Formatter.hs | # LANGUAGE CPP #
Copyright ( c ) 2005 - 2011 :
Copyright (c) 2005-2011 John Goerzen
License: BSD3
-}
{- |
Definition of log formatter support
A few basic, and extendable formatters are defined.
Please see "System.Log.Logger" for extensive documentation on the
logging system.
-}
module System.Log.Formatter( LogFormatter
, nullFormatter
, simpleLogFormatter
, tfLogFormatter
, varFormatter
) where
import Data.List
import Control.Applicative ((<$>))
import Control.Concurrent (myThreadId)
#ifndef mingw32_HOST_OS
import System.Posix.Process (getProcessID)
#endif
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (defaultTimeLocale)
#else
import System.Locale (defaultTimeLocale)
#endif
import Data.Time (getZonedTime,getCurrentTime,formatTime)
import System.Log
| A LogFormatter is used to format log messages . Note that it is paramterized on the
-- 'Handler' to allow the formatter to use information specific to the handler
( an example of can be seen in the formatter used in ' System . Log . Handler . ' )
type LogFormatter a = a -- ^ The LogHandler that the passed message came from
-> LogRecord -- ^ The log message and priority
-> String -- ^ The logger name
-> IO String -- ^ The formatted log message
-- | Returns the passed message as is, ie. no formatting is done.
nullFormatter :: LogFormatter a
nullFormatter _ (_,msg) _ = return msg
-- | Takes a format string, and returns a formatter that may be used to
-- format log messages. The format string may contain variables prefixed with
-- a $-sign which will be replaced at runtime with corresponding values. The
-- currently supported variables are:
--
-- * @$msg@ - The actual log message
--
-- * @$loggername@ - The name of the logger
--
-- * @$prio@ - The priority level of the message
--
-- * @$tid@ - The thread ID
--
-- * @$pid@ - Process ID (Not available on windows)
--
-- * @$time@ - The current time
--
* @$utcTime@ - The current time in UTC Time
simpleLogFormatter :: String -> LogFormatter a
simpleLogFormatter format h (prio, msg) loggername =
tfLogFormatter "%F %X %Z" format h (prio,msg) loggername
| Like ' simpleLogFormatter ' but allow the time format to be specified in the first
-- parameter (this is passed to 'Date.Time.Format.formatTime')
tfLogFormatter :: String -> String -> LogFormatter a
tfLogFormatter timeFormat format = do
varFormatter [("time", formatTime defaultTimeLocale timeFormat <$> getZonedTime)
,("utcTime", formatTime defaultTimeLocale timeFormat <$> getCurrentTime)
]
format
-- | An extensible formatter that allows new substition /variables/ to be defined.
-- Each variable has an associated IO action that is used to produce the
-- string to substitute for the variable name. The predefined variables are the same
-- as for 'simpleLogFormatter' /excluding/ @$time@ and @$utcTime@.
varFormatter :: [(String, IO String)] -> String -> LogFormatter a
varFormatter vars format _h (prio,msg) loggername = do
outmsg <- replaceVarM (vars++[("msg", return msg)
,("prio", return $ show prio)
,("loggername", return loggername)
,("tid", show <$> myThreadId)
#ifndef mingw32_HOST_OS
,("pid", show <$> getProcessID)
#endif
]
)
format
return outmsg
-- | Replace some '$' variables in a string with supplied values
replaceVarM :: [(String, IO String)] -- ^ A list of (variableName, action to get the replacement string) pairs
-> String -- ^ String to perform substitution on
-> IO String -- ^ Resulting string
replaceVarM _ [] = return []
replaceVarM keyVals (s:ss) | s=='$' = do (f,rest) <- replaceStart keyVals ss
repRest <- replaceVarM keyVals rest
return $ f ++ repRest
| otherwise = replaceVarM keyVals ss >>= return . (s:)
where
replaceStart [] str = return ("$",str)
replaceStart ((k,v):kvs) str | k `isPrefixOf` str = do vs <- v
return (vs, drop (length k) str)
| otherwise = replaceStart kvs str
| null | https://raw.githubusercontent.com/haskell-hvr/hslogger/4c3ca34ea91fc00774a505d8d2a2aca8ece7a76c/src/System/Log/Formatter.hs | haskell | |
Definition of log formatter support
A few basic, and extendable formatters are defined.
Please see "System.Log.Logger" for extensive documentation on the
logging system.
'Handler' to allow the formatter to use information specific to the handler
^ The LogHandler that the passed message came from
^ The log message and priority
^ The logger name
^ The formatted log message
| Returns the passed message as is, ie. no formatting is done.
| Takes a format string, and returns a formatter that may be used to
format log messages. The format string may contain variables prefixed with
a $-sign which will be replaced at runtime with corresponding values. The
currently supported variables are:
* @$msg@ - The actual log message
* @$loggername@ - The name of the logger
* @$prio@ - The priority level of the message
* @$tid@ - The thread ID
* @$pid@ - Process ID (Not available on windows)
* @$time@ - The current time
parameter (this is passed to 'Date.Time.Format.formatTime')
| An extensible formatter that allows new substition /variables/ to be defined.
Each variable has an associated IO action that is used to produce the
string to substitute for the variable name. The predefined variables are the same
as for 'simpleLogFormatter' /excluding/ @$time@ and @$utcTime@.
| Replace some '$' variables in a string with supplied values
^ A list of (variableName, action to get the replacement string) pairs
^ String to perform substitution on
^ Resulting string | # LANGUAGE CPP #
Copyright ( c ) 2005 - 2011 :
Copyright (c) 2005-2011 John Goerzen
License: BSD3
-}
module System.Log.Formatter( LogFormatter
, nullFormatter
, simpleLogFormatter
, tfLogFormatter
, varFormatter
) where
import Data.List
import Control.Applicative ((<$>))
import Control.Concurrent (myThreadId)
#ifndef mingw32_HOST_OS
import System.Posix.Process (getProcessID)
#endif
#if MIN_VERSION_time(1,5,0)
import Data.Time.Format (defaultTimeLocale)
#else
import System.Locale (defaultTimeLocale)
#endif
import Data.Time (getZonedTime,getCurrentTime,formatTime)
import System.Log
| A LogFormatter is used to format log messages . Note that it is paramterized on the
( an example of can be seen in the formatter used in ' System . Log . Handler . ' )
nullFormatter :: LogFormatter a
nullFormatter _ (_,msg) _ = return msg
* @$utcTime@ - The current time in UTC Time
simpleLogFormatter :: String -> LogFormatter a
simpleLogFormatter format h (prio, msg) loggername =
tfLogFormatter "%F %X %Z" format h (prio,msg) loggername
| Like ' simpleLogFormatter ' but allow the time format to be specified in the first
tfLogFormatter :: String -> String -> LogFormatter a
tfLogFormatter timeFormat format = do
varFormatter [("time", formatTime defaultTimeLocale timeFormat <$> getZonedTime)
,("utcTime", formatTime defaultTimeLocale timeFormat <$> getCurrentTime)
]
format
varFormatter :: [(String, IO String)] -> String -> LogFormatter a
varFormatter vars format _h (prio,msg) loggername = do
outmsg <- replaceVarM (vars++[("msg", return msg)
,("prio", return $ show prio)
,("loggername", return loggername)
,("tid", show <$> myThreadId)
#ifndef mingw32_HOST_OS
,("pid", show <$> getProcessID)
#endif
]
)
format
return outmsg
replaceVarM _ [] = return []
replaceVarM keyVals (s:ss) | s=='$' = do (f,rest) <- replaceStart keyVals ss
repRest <- replaceVarM keyVals rest
return $ f ++ repRest
| otherwise = replaceVarM keyVals ss >>= return . (s:)
where
replaceStart [] str = return ("$",str)
replaceStart ((k,v):kvs) str | k `isPrefixOf` str = do vs <- v
return (vs, drop (length k) str)
| otherwise = replaceStart kvs str
|
15fd21fc4ab55db7c44fd1f15ef0470609e10da6087190c1ce4e6ff113bcbf0a | mewa/clojure-k8s | extensions_v_beta_.clj | (ns kubernetes.api.extensions-v-beta-
(:require [kubernetes.core :refer [call-api check-required-params with-collection-format]])
(:import (java.io File)))
(defn create-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
create a DaemonSet"
([namespace body ] (create-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-daemon-set
"
create a DaemonSet"
([namespace body ] (create-extensions-v1beta1-namespaced-daemon-set namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-deployment-with-http-info
"
create a Deployment"
([namespace body ] (create-extensions-v1beta1-namespaced-deployment-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-deployment
"
create a Deployment"
([namespace body ] (create-extensions-v1beta1-namespaced-deployment namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-deployment-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-deployment-rollback-with-http-info
"
create rollback of a Deployment"
([name namespace body ] (create-extensions-v1beta1-namespaced-deployment-rollback-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/rollback" :post
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-deployment-rollback
"
create rollback of a Deployment"
([name namespace body ] (create-extensions-v1beta1-namespaced-deployment-rollback name namespace body nil))
([name namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-deployment-rollback-with-http-info name namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-ingress-with-http-info
"
create an Ingress"
([namespace body ] (create-extensions-v1beta1-namespaced-ingress-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-ingress
"
create an Ingress"
([namespace body ] (create-extensions-v1beta1-namespaced-ingress namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-ingress-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-network-policy-with-http-info
"
create a NetworkPolicy"
([namespace body ] (create-extensions-v1beta1-namespaced-network-policy-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-network-policy
"
create a NetworkPolicy"
([namespace body ] (create-extensions-v1beta1-namespaced-network-policy namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-network-policy-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-replica-set-with-http-info
"
create a ReplicaSet"
([namespace body ] (create-extensions-v1beta1-namespaced-replica-set-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-replica-set
"
create a ReplicaSet"
([namespace body ] (create-extensions-v1beta1-namespaced-replica-set namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-replica-set-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-pod-security-policy-with-http-info
"
create a PodSecurityPolicy"
([body ] (create-extensions-v1beta1-pod-security-policy-with-http-info body nil))
([body {:keys [pretty ]}]
(check-required-params body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies" :post
{:path-params {}
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-pod-security-policy
"
create a PodSecurityPolicy"
([body ] (create-extensions-v1beta1-pod-security-policy body nil))
([body optional-params]
(:data (create-extensions-v1beta1-pod-security-policy-with-http-info body optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-daemon-set-with-http-info
"
delete collection of DaemonSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-daemon-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-daemon-set
"
delete collection of DaemonSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-daemon-set namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-daemon-set-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-deployment-with-http-info
"
delete collection of Deployment"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-deployment-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-deployment
"
delete collection of Deployment"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-deployment namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-deployment-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-ingress-with-http-info
"
delete collection of Ingress"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-ingress-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-ingress
"
delete collection of Ingress"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-ingress namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-ingress-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-network-policy-with-http-info
"
delete collection of NetworkPolicy"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-network-policy-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-network-policy
"
delete collection of NetworkPolicy"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-network-policy namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-network-policy-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-replica-set-with-http-info
"
delete collection of ReplicaSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-replica-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-replica-set
"
delete collection of ReplicaSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-replica-set namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-replica-set-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-pod-security-policy-with-http-info
"
delete collection of PodSecurityPolicy"
([] (delete-extensions-v1beta1-collection-pod-security-policy-with-http-info nil))
([{:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/podsecuritypolicies" :delete
{:path-params {}
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-pod-security-policy
"
delete collection of PodSecurityPolicy"
([] (delete-extensions-v1beta1-collection-pod-security-policy nil))
([optional-params]
(:data (delete-extensions-v1beta1-collection-pod-security-policy-with-http-info optional-params))))
(defn delete-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
delete a DaemonSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-daemon-set
"
delete a DaemonSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-daemon-set name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-deployment-with-http-info
"
delete a Deployment"
([name namespace body ] (delete-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-deployment
"
delete a Deployment"
([name namespace body ] (delete-extensions-v1beta1-namespaced-deployment name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-ingress-with-http-info
"
delete an Ingress"
([name namespace body ] (delete-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-ingress
"
delete an Ingress"
([name namespace body ] (delete-extensions-v1beta1-namespaced-ingress name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-network-policy-with-http-info
"
delete a NetworkPolicy"
([name namespace body ] (delete-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-network-policy
"
delete a NetworkPolicy"
([name namespace body ] (delete-extensions-v1beta1-namespaced-network-policy name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-replica-set-with-http-info
"
delete a ReplicaSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-replica-set
"
delete a ReplicaSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-replica-set name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-pod-security-policy-with-http-info
"
delete a PodSecurityPolicy"
([name body ] (delete-extensions-v1beta1-pod-security-policy-with-http-info name body nil))
([name body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :delete
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-pod-security-policy
"
delete a PodSecurityPolicy"
([name body ] (delete-extensions-v1beta1-pod-security-policy name body nil))
([name body optional-params]
(:data (delete-extensions-v1beta1-pod-security-policy-with-http-info name body optional-params))))
(defn get-extensions-v1beta1-api-resources-with-http-info
"
get available resources"
[]
(call-api "/apis/extensions/v1beta1/" :get
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:content-types ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]}))
(defn get-extensions-v1beta1-api-resources
"
get available resources"
[]
(:data (get-extensions-v1beta1-api-resources-with-http-info)))
(defn list-extensions-v1beta1-daemon-set-for-all-namespaces-with-http-info
"
list or watch objects of kind DaemonSet"
([] (list-extensions-v1beta1-daemon-set-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/daemonsets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-daemon-set-for-all-namespaces
"
list or watch objects of kind DaemonSet"
([] (list-extensions-v1beta1-daemon-set-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-daemon-set-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-deployment-for-all-namespaces-with-http-info
"
list or watch objects of kind Deployment"
([] (list-extensions-v1beta1-deployment-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/deployments" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-deployment-for-all-namespaces
"
list or watch objects of kind Deployment"
([] (list-extensions-v1beta1-deployment-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-deployment-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-ingress-for-all-namespaces-with-http-info
"
list or watch objects of kind Ingress"
([] (list-extensions-v1beta1-ingress-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/ingresses" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-ingress-for-all-namespaces
"
list or watch objects of kind Ingress"
([] (list-extensions-v1beta1-ingress-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-ingress-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
list or watch objects of kind DaemonSet"
([namespace ] (list-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-daemon-set
"
list or watch objects of kind DaemonSet"
([namespace ] (list-extensions-v1beta1-namespaced-daemon-set namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-deployment-with-http-info
"
list or watch objects of kind Deployment"
([namespace ] (list-extensions-v1beta1-namespaced-deployment-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-deployment
"
list or watch objects of kind Deployment"
([namespace ] (list-extensions-v1beta1-namespaced-deployment namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-deployment-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-ingress-with-http-info
"
list or watch objects of kind Ingress"
([namespace ] (list-extensions-v1beta1-namespaced-ingress-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-ingress
"
list or watch objects of kind Ingress"
([namespace ] (list-extensions-v1beta1-namespaced-ingress namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-ingress-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-network-policy-with-http-info
"
list or watch objects of kind NetworkPolicy"
([namespace ] (list-extensions-v1beta1-namespaced-network-policy-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-network-policy
"
list or watch objects of kind NetworkPolicy"
([namespace ] (list-extensions-v1beta1-namespaced-network-policy namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-network-policy-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-replica-set-with-http-info
"
list or watch objects of kind ReplicaSet"
([namespace ] (list-extensions-v1beta1-namespaced-replica-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-replica-set
"
list or watch objects of kind ReplicaSet"
([namespace ] (list-extensions-v1beta1-namespaced-replica-set namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-replica-set-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-network-policy-for-all-namespaces-with-http-info
"
list or watch objects of kind NetworkPolicy"
([] (list-extensions-v1beta1-network-policy-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/networkpolicies" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-network-policy-for-all-namespaces
"
list or watch objects of kind NetworkPolicy"
([] (list-extensions-v1beta1-network-policy-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-network-policy-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-pod-security-policy-with-http-info
"
list or watch objects of kind PodSecurityPolicy"
([] (list-extensions-v1beta1-pod-security-policy-with-http-info nil))
([{:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/podsecuritypolicies" :get
{:path-params {}
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-pod-security-policy
"
list or watch objects of kind PodSecurityPolicy"
([] (list-extensions-v1beta1-pod-security-policy nil))
([optional-params]
(:data (list-extensions-v1beta1-pod-security-policy-with-http-info optional-params))))
(defn list-extensions-v1beta1-replica-set-for-all-namespaces-with-http-info
"
list or watch objects of kind ReplicaSet"
([] (list-extensions-v1beta1-replica-set-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/replicasets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-replica-set-for-all-namespaces
"
list or watch objects of kind ReplicaSet"
([] (list-extensions-v1beta1-replica-set-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-replica-set-for-all-namespaces-with-http-info optional-params))))
(defn patch-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
partially update the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-daemon-set
"
partially update the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-daemon-set-status-with-http-info
"
partially update status of the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-daemon-set-status
"
partially update status of the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-deployment-with-http-info
"
partially update the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-deployment
"
partially update the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-deployment-scale-with-http-info
"
partially update scale of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/scale" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-deployment-scale
"
partially update scale of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-scale name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-deployment-status-with-http-info
"
partially update status of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-deployment-status
"
partially update status of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-ingress-with-http-info
"
partially update the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-ingress
"
partially update the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-ingress-status-with-http-info
"
partially update status of the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-ingress-status
"
partially update status of the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-network-policy-with-http-info
"
partially update the specified NetworkPolicy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-network-policy
"
partially update the specified NetworkPolicy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-network-policy name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replica-set-with-http-info
"
partially update the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replica-set
"
partially update the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replica-set-scale-with-http-info
"
partially update scale of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/scale" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replica-set-scale
"
partially update scale of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-scale name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replica-set-status-with-http-info
"
partially update status of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replica-set-status
"
partially update status of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info
"
partially update scale of the specified ReplicationControllerDummy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicationcontrollers/{name}/scale" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale
"
partially update scale of the specified ReplicationControllerDummy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-pod-security-policy-with-http-info
"
partially update the specified PodSecurityPolicy"
([name body ] (patch-extensions-v1beta1-pod-security-policy-with-http-info name body nil))
([name body {:keys [pretty ]}]
(check-required-params name body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :patch
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-pod-security-policy
"
partially update the specified PodSecurityPolicy"
([name body ] (patch-extensions-v1beta1-pod-security-policy name body nil))
([name body optional-params]
(:data (patch-extensions-v1beta1-pod-security-policy-with-http-info name body optional-params))))
(defn read-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
read the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-daemon-set
"
read the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-daemon-set-status-with-http-info
"
read status of the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-daemon-set-status
"
read status of the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-deployment-with-http-info
"
read the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-deployment
"
read the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-deployment-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-deployment-scale-with-http-info
"
read scale of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/scale" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-deployment-scale
"
read scale of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-scale name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-deployment-status-with-http-info
"
read status of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-deployment-status
"
read status of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-ingress-with-http-info
"
read the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-ingress
"
read the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-ingress-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-ingress-status-with-http-info
"
read status of the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-ingress-status
"
read status of the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-network-policy-with-http-info
"
read the specified NetworkPolicy"
([name namespace ] (read-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-network-policy
"
read the specified NetworkPolicy"
([name namespace ] (read-extensions-v1beta1-namespaced-network-policy name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replica-set-with-http-info
"
read the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replica-set
"
read the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replica-set-scale-with-http-info
"
read scale of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/scale" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replica-set-scale
"
read scale of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-scale name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replica-set-status-with-http-info
"
read status of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replica-set-status
"
read status of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info
"
read scale of the specified ReplicationControllerDummy"
([name namespace ] (read-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicationcontrollers/{name}/scale" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replication-controller-dummy-scale
"
read scale of the specified ReplicationControllerDummy"
([name namespace ] (read-extensions-v1beta1-namespaced-replication-controller-dummy-scale name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-pod-security-policy-with-http-info
"
read the specified PodSecurityPolicy"
([name ] (read-extensions-v1beta1-pod-security-policy-with-http-info name nil))
([name {:keys [pretty exact export ]}]
(check-required-params name)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :get
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-pod-security-policy
"
read the specified PodSecurityPolicy"
([name ] (read-extensions-v1beta1-pod-security-policy name nil))
([name optional-params]
(:data (read-extensions-v1beta1-pod-security-policy-with-http-info name optional-params))))
(defn replace-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
replace the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-daemon-set
"
replace the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-daemon-set-status-with-http-info
"
replace status of the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-daemon-set-status
"
replace status of the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-deployment-with-http-info
"
replace the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-deployment
"
replace the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-deployment-scale-with-http-info
"
replace scale of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/scale" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-deployment-scale
"
replace scale of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-scale name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-deployment-status-with-http-info
"
replace status of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-deployment-status
"
replace status of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-ingress-with-http-info
"
replace the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-ingress
"
replace the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-ingress-status-with-http-info
"
replace status of the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-ingress-status
"
replace status of the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-network-policy-with-http-info
"
replace the specified NetworkPolicy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-network-policy
"
replace the specified NetworkPolicy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-network-policy name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replica-set-with-http-info
"
replace the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replica-set
"
replace the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replica-set-scale-with-http-info
"
replace scale of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/scale" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replica-set-scale
"
replace scale of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-scale name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replica-set-status-with-http-info
"
replace status of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replica-set-status
"
replace status of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info
"
replace scale of the specified ReplicationControllerDummy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicationcontrollers/{name}/scale" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale
"
replace scale of the specified ReplicationControllerDummy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-pod-security-policy-with-http-info
"
replace the specified PodSecurityPolicy"
([name body ] (replace-extensions-v1beta1-pod-security-policy-with-http-info name body nil))
([name body {:keys [pretty ]}]
(check-required-params name body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :put
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-pod-security-policy
"
replace the specified PodSecurityPolicy"
([name body ] (replace-extensions-v1beta1-pod-security-policy name body nil))
([name body optional-params]
(:data (replace-extensions-v1beta1-pod-security-policy-with-http-info name body optional-params))))
(defn watch-extensions-v1beta1-daemon-set-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of DaemonSet"
([] (watch-extensions-v1beta1-daemon-set-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/daemonsets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-daemon-set-list-for-all-namespaces
"
watch individual changes to a list of DaemonSet"
([] (watch-extensions-v1beta1-daemon-set-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-daemon-set-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-deployment-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of Deployment"
([] (watch-extensions-v1beta1-deployment-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/deployments" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-deployment-list-for-all-namespaces
"
watch individual changes to a list of Deployment"
([] (watch-extensions-v1beta1-deployment-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-deployment-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-ingress-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of Ingress"
([] (watch-extensions-v1beta1-ingress-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/ingresses" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-ingress-list-for-all-namespaces
"
watch individual changes to a list of Ingress"
([] (watch-extensions-v1beta1-ingress-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-ingress-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
watch changes to an object of kind DaemonSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/daemonsets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-daemon-set
"
watch changes to an object of kind DaemonSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-daemon-set name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-daemon-set-list-with-http-info
"
watch individual changes to a list of DaemonSet"
([namespace ] (watch-extensions-v1beta1-namespaced-daemon-set-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/daemonsets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-daemon-set-list
"
watch individual changes to a list of DaemonSet"
([namespace ] (watch-extensions-v1beta1-namespaced-daemon-set-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-daemon-set-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-deployment-with-http-info
"
watch changes to an object of kind Deployment"
([name namespace ] (watch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/deployments/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-deployment
"
watch changes to an object of kind Deployment"
([name namespace ] (watch-extensions-v1beta1-namespaced-deployment name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-deployment-list-with-http-info
"
watch individual changes to a list of Deployment"
([namespace ] (watch-extensions-v1beta1-namespaced-deployment-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/deployments" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-deployment-list
"
watch individual changes to a list of Deployment"
([namespace ] (watch-extensions-v1beta1-namespaced-deployment-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-deployment-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-ingress-with-http-info
"
watch changes to an object of kind Ingress"
([name namespace ] (watch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/ingresses/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-ingress
"
watch changes to an object of kind Ingress"
([name namespace ] (watch-extensions-v1beta1-namespaced-ingress name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-ingress-list-with-http-info
"
watch individual changes to a list of Ingress"
([namespace ] (watch-extensions-v1beta1-namespaced-ingress-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/ingresses" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-ingress-list
"
watch individual changes to a list of Ingress"
([namespace ] (watch-extensions-v1beta1-namespaced-ingress-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-ingress-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-network-policy-with-http-info
"
watch changes to an object of kind NetworkPolicy"
([name namespace ] (watch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/networkpolicies/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-network-policy
"
watch changes to an object of kind NetworkPolicy"
([name namespace ] (watch-extensions-v1beta1-namespaced-network-policy name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-network-policy-list-with-http-info
"
watch individual changes to a list of NetworkPolicy"
([namespace ] (watch-extensions-v1beta1-namespaced-network-policy-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/networkpolicies" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-network-policy-list
"
watch individual changes to a list of NetworkPolicy"
([namespace ] (watch-extensions-v1beta1-namespaced-network-policy-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-network-policy-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-replica-set-with-http-info
"
watch changes to an object of kind ReplicaSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/replicasets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-replica-set
"
watch changes to an object of kind ReplicaSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-replica-set name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-replica-set-list-with-http-info
"
watch individual changes to a list of ReplicaSet"
([namespace ] (watch-extensions-v1beta1-namespaced-replica-set-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/replicasets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-replica-set-list
"
watch individual changes to a list of ReplicaSet"
([namespace ] (watch-extensions-v1beta1-namespaced-replica-set-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-replica-set-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-network-policy-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of NetworkPolicy"
([] (watch-extensions-v1beta1-network-policy-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/networkpolicies" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-network-policy-list-for-all-namespaces
"
watch individual changes to a list of NetworkPolicy"
([] (watch-extensions-v1beta1-network-policy-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-network-policy-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-pod-security-policy-with-http-info
"
watch changes to an object of kind PodSecurityPolicy"
([name ] (watch-extensions-v1beta1-pod-security-policy-with-http-info name nil))
([name {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name)
(call-api "/apis/extensions/v1beta1/watch/podsecuritypolicies/{name}" :get
{:path-params {"name" name }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-pod-security-policy
"
watch changes to an object of kind PodSecurityPolicy"
([name ] (watch-extensions-v1beta1-pod-security-policy name nil))
([name optional-params]
(:data (watch-extensions-v1beta1-pod-security-policy-with-http-info name optional-params))))
(defn watch-extensions-v1beta1-pod-security-policy-list-with-http-info
"
watch individual changes to a list of PodSecurityPolicy"
([] (watch-extensions-v1beta1-pod-security-policy-list-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/podsecuritypolicies" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-pod-security-policy-list
"
watch individual changes to a list of PodSecurityPolicy"
([] (watch-extensions-v1beta1-pod-security-policy-list nil))
([optional-params]
(:data (watch-extensions-v1beta1-pod-security-policy-list-with-http-info optional-params))))
(defn watch-extensions-v1beta1-replica-set-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of ReplicaSet"
([] (watch-extensions-v1beta1-replica-set-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/replicasets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-replica-set-list-for-all-namespaces
"
watch individual changes to a list of ReplicaSet"
([] (watch-extensions-v1beta1-replica-set-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-replica-set-list-for-all-namespaces-with-http-info optional-params))))
| null | https://raw.githubusercontent.com/mewa/clojure-k8s/a7e8d82f0e0bc47e5678c72d7d9b8216080ccffc/src/kubernetes/api/extensions_v_beta_.clj | clojure | (ns kubernetes.api.extensions-v-beta-
(:require [kubernetes.core :refer [call-api check-required-params with-collection-format]])
(:import (java.io File)))
(defn create-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
create a DaemonSet"
([namespace body ] (create-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-daemon-set
"
create a DaemonSet"
([namespace body ] (create-extensions-v1beta1-namespaced-daemon-set namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-deployment-with-http-info
"
create a Deployment"
([namespace body ] (create-extensions-v1beta1-namespaced-deployment-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-deployment
"
create a Deployment"
([namespace body ] (create-extensions-v1beta1-namespaced-deployment namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-deployment-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-deployment-rollback-with-http-info
"
create rollback of a Deployment"
([name namespace body ] (create-extensions-v1beta1-namespaced-deployment-rollback-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/rollback" :post
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-deployment-rollback
"
create rollback of a Deployment"
([name namespace body ] (create-extensions-v1beta1-namespaced-deployment-rollback name namespace body nil))
([name namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-deployment-rollback-with-http-info name namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-ingress-with-http-info
"
create an Ingress"
([namespace body ] (create-extensions-v1beta1-namespaced-ingress-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-ingress
"
create an Ingress"
([namespace body ] (create-extensions-v1beta1-namespaced-ingress namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-ingress-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-network-policy-with-http-info
"
create a NetworkPolicy"
([namespace body ] (create-extensions-v1beta1-namespaced-network-policy-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-network-policy
"
create a NetworkPolicy"
([namespace body ] (create-extensions-v1beta1-namespaced-network-policy namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-network-policy-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-namespaced-replica-set-with-http-info
"
create a ReplicaSet"
([namespace body ] (create-extensions-v1beta1-namespaced-replica-set-with-http-info namespace body nil))
([namespace body {:keys [pretty ]}]
(check-required-params namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets" :post
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-namespaced-replica-set
"
create a ReplicaSet"
([namespace body ] (create-extensions-v1beta1-namespaced-replica-set namespace body nil))
([namespace body optional-params]
(:data (create-extensions-v1beta1-namespaced-replica-set-with-http-info namespace body optional-params))))
(defn create-extensions-v1beta1-pod-security-policy-with-http-info
"
create a PodSecurityPolicy"
([body ] (create-extensions-v1beta1-pod-security-policy-with-http-info body nil))
([body {:keys [pretty ]}]
(check-required-params body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies" :post
{:path-params {}
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn create-extensions-v1beta1-pod-security-policy
"
create a PodSecurityPolicy"
([body ] (create-extensions-v1beta1-pod-security-policy body nil))
([body optional-params]
(:data (create-extensions-v1beta1-pod-security-policy-with-http-info body optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-daemon-set-with-http-info
"
delete collection of DaemonSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-daemon-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-daemon-set
"
delete collection of DaemonSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-daemon-set namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-daemon-set-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-deployment-with-http-info
"
delete collection of Deployment"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-deployment-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-deployment
"
delete collection of Deployment"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-deployment namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-deployment-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-ingress-with-http-info
"
delete collection of Ingress"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-ingress-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-ingress
"
delete collection of Ingress"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-ingress namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-ingress-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-network-policy-with-http-info
"
delete collection of NetworkPolicy"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-network-policy-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-network-policy
"
delete collection of NetworkPolicy"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-network-policy namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-network-policy-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-namespaced-replica-set-with-http-info
"
delete collection of ReplicaSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-replica-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets" :delete
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-namespaced-replica-set
"
delete collection of ReplicaSet"
([namespace ] (delete-extensions-v1beta1-collection-namespaced-replica-set namespace nil))
([namespace optional-params]
(:data (delete-extensions-v1beta1-collection-namespaced-replica-set-with-http-info namespace optional-params))))
(defn delete-extensions-v1beta1-collection-pod-security-policy-with-http-info
"
delete collection of PodSecurityPolicy"
([] (delete-extensions-v1beta1-collection-pod-security-policy-with-http-info nil))
([{:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/podsecuritypolicies" :delete
{:path-params {}
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-collection-pod-security-policy
"
delete collection of PodSecurityPolicy"
([] (delete-extensions-v1beta1-collection-pod-security-policy nil))
([optional-params]
(:data (delete-extensions-v1beta1-collection-pod-security-policy-with-http-info optional-params))))
(defn delete-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
delete a DaemonSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-daemon-set
"
delete a DaemonSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-daemon-set name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-deployment-with-http-info
"
delete a Deployment"
([name namespace body ] (delete-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-deployment
"
delete a Deployment"
([name namespace body ] (delete-extensions-v1beta1-namespaced-deployment name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-ingress-with-http-info
"
delete an Ingress"
([name namespace body ] (delete-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-ingress
"
delete an Ingress"
([name namespace body ] (delete-extensions-v1beta1-namespaced-ingress name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-network-policy-with-http-info
"
delete a NetworkPolicy"
([name namespace body ] (delete-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-network-policy
"
delete a NetworkPolicy"
([name namespace body ] (delete-extensions-v1beta1-namespaced-network-policy name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-namespaced-replica-set-with-http-info
"
delete a ReplicaSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :delete
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-namespaced-replica-set
"
delete a ReplicaSet"
([name namespace body ] (delete-extensions-v1beta1-namespaced-replica-set name namespace body nil))
([name namespace body optional-params]
(:data (delete-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body optional-params))))
(defn delete-extensions-v1beta1-pod-security-policy-with-http-info
"
delete a PodSecurityPolicy"
([name body ] (delete-extensions-v1beta1-pod-security-policy-with-http-info name body nil))
([name body {:keys [pretty grace-period-seconds orphan-dependents propagation-policy ]}]
(check-required-params name body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :delete
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty "gracePeriodSeconds" grace-period-seconds "orphanDependents" orphan-dependents "propagationPolicy" propagation-policy }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn delete-extensions-v1beta1-pod-security-policy
"
delete a PodSecurityPolicy"
([name body ] (delete-extensions-v1beta1-pod-security-policy name body nil))
([name body optional-params]
(:data (delete-extensions-v1beta1-pod-security-policy-with-http-info name body optional-params))))
(defn get-extensions-v1beta1-api-resources-with-http-info
"
get available resources"
[]
(call-api "/apis/extensions/v1beta1/" :get
{:path-params {}
:header-params {}
:query-params {}
:form-params {}
:content-types ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]}))
(defn get-extensions-v1beta1-api-resources
"
get available resources"
[]
(:data (get-extensions-v1beta1-api-resources-with-http-info)))
(defn list-extensions-v1beta1-daemon-set-for-all-namespaces-with-http-info
"
list or watch objects of kind DaemonSet"
([] (list-extensions-v1beta1-daemon-set-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/daemonsets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-daemon-set-for-all-namespaces
"
list or watch objects of kind DaemonSet"
([] (list-extensions-v1beta1-daemon-set-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-daemon-set-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-deployment-for-all-namespaces-with-http-info
"
list or watch objects of kind Deployment"
([] (list-extensions-v1beta1-deployment-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/deployments" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-deployment-for-all-namespaces
"
list or watch objects of kind Deployment"
([] (list-extensions-v1beta1-deployment-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-deployment-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-ingress-for-all-namespaces-with-http-info
"
list or watch objects of kind Ingress"
([] (list-extensions-v1beta1-ingress-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/ingresses" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-ingress-for-all-namespaces
"
list or watch objects of kind Ingress"
([] (list-extensions-v1beta1-ingress-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-ingress-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
list or watch objects of kind DaemonSet"
([namespace ] (list-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-daemon-set
"
list or watch objects of kind DaemonSet"
([namespace ] (list-extensions-v1beta1-namespaced-daemon-set namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-daemon-set-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-deployment-with-http-info
"
list or watch objects of kind Deployment"
([namespace ] (list-extensions-v1beta1-namespaced-deployment-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-deployment
"
list or watch objects of kind Deployment"
([namespace ] (list-extensions-v1beta1-namespaced-deployment namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-deployment-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-ingress-with-http-info
"
list or watch objects of kind Ingress"
([namespace ] (list-extensions-v1beta1-namespaced-ingress-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-ingress
"
list or watch objects of kind Ingress"
([namespace ] (list-extensions-v1beta1-namespaced-ingress namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-ingress-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-network-policy-with-http-info
"
list or watch objects of kind NetworkPolicy"
([namespace ] (list-extensions-v1beta1-namespaced-network-policy-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-network-policy
"
list or watch objects of kind NetworkPolicy"
([namespace ] (list-extensions-v1beta1-namespaced-network-policy namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-network-policy-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-namespaced-replica-set-with-http-info
"
list or watch objects of kind ReplicaSet"
([namespace ] (list-extensions-v1beta1-namespaced-replica-set-with-http-info namespace nil))
([namespace {:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-namespaced-replica-set
"
list or watch objects of kind ReplicaSet"
([namespace ] (list-extensions-v1beta1-namespaced-replica-set namespace nil))
([namespace optional-params]
(:data (list-extensions-v1beta1-namespaced-replica-set-with-http-info namespace optional-params))))
(defn list-extensions-v1beta1-network-policy-for-all-namespaces-with-http-info
"
list or watch objects of kind NetworkPolicy"
([] (list-extensions-v1beta1-network-policy-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/networkpolicies" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-network-policy-for-all-namespaces
"
list or watch objects of kind NetworkPolicy"
([] (list-extensions-v1beta1-network-policy-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-network-policy-for-all-namespaces-with-http-info optional-params))))
(defn list-extensions-v1beta1-pod-security-policy-with-http-info
"
list or watch objects of kind PodSecurityPolicy"
([] (list-extensions-v1beta1-pod-security-policy-with-http-info nil))
([{:keys [pretty continue field-selector include-uninitialized label-selector limit resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/podsecuritypolicies" :get
{:path-params {}
:header-params {}
:query-params {"pretty" pretty "continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-pod-security-policy
"
list or watch objects of kind PodSecurityPolicy"
([] (list-extensions-v1beta1-pod-security-policy nil))
([optional-params]
(:data (list-extensions-v1beta1-pod-security-policy-with-http-info optional-params))))
(defn list-extensions-v1beta1-replica-set-for-all-namespaces-with-http-info
"
list or watch objects of kind ReplicaSet"
([] (list-extensions-v1beta1-replica-set-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/replicasets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn list-extensions-v1beta1-replica-set-for-all-namespaces
"
list or watch objects of kind ReplicaSet"
([] (list-extensions-v1beta1-replica-set-for-all-namespaces nil))
([optional-params]
(:data (list-extensions-v1beta1-replica-set-for-all-namespaces-with-http-info optional-params))))
(defn patch-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
partially update the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-daemon-set
"
partially update the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-daemon-set-status-with-http-info
"
partially update status of the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-daemon-set-status
"
partially update status of the specified DaemonSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-daemon-set-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-deployment-with-http-info
"
partially update the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-deployment
"
partially update the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-deployment-scale-with-http-info
"
partially update scale of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/scale" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-deployment-scale
"
partially update scale of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-scale name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-deployment-status-with-http-info
"
partially update status of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-deployment-status
"
partially update status of the specified Deployment"
([name namespace body ] (patch-extensions-v1beta1-namespaced-deployment-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-ingress-with-http-info
"
partially update the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-ingress
"
partially update the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-ingress-status-with-http-info
"
partially update status of the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-ingress-status
"
partially update status of the specified Ingress"
([name namespace body ] (patch-extensions-v1beta1-namespaced-ingress-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-network-policy-with-http-info
"
partially update the specified NetworkPolicy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-network-policy
"
partially update the specified NetworkPolicy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-network-policy name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replica-set-with-http-info
"
partially update the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replica-set
"
partially update the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replica-set-scale-with-http-info
"
partially update scale of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/scale" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replica-set-scale
"
partially update scale of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-scale name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replica-set-status-with-http-info
"
partially update status of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/status" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replica-set-status
"
partially update status of the specified ReplicaSet"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replica-set-status name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info
"
partially update scale of the specified ReplicationControllerDummy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicationcontrollers/{name}/scale" :patch
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale
"
partially update scale of the specified ReplicationControllerDummy"
([name namespace body ] (patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale name namespace body nil))
([name namespace body optional-params]
(:data (patch-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body optional-params))))
(defn patch-extensions-v1beta1-pod-security-policy-with-http-info
"
partially update the specified PodSecurityPolicy"
([name body ] (patch-extensions-v1beta1-pod-security-policy-with-http-info name body nil))
([name body {:keys [pretty ]}]
(check-required-params name body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :patch
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["application/json-patch+json" "application/merge-patch+json" "application/strategic-merge-patch+json"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn patch-extensions-v1beta1-pod-security-policy
"
partially update the specified PodSecurityPolicy"
([name body ] (patch-extensions-v1beta1-pod-security-policy name body nil))
([name body optional-params]
(:data (patch-extensions-v1beta1-pod-security-policy-with-http-info name body optional-params))))
(defn read-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
read the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-daemon-set
"
read the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-daemon-set-status-with-http-info
"
read status of the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-daemon-set-status
"
read status of the specified DaemonSet"
([name namespace ] (read-extensions-v1beta1-namespaced-daemon-set-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-deployment-with-http-info
"
read the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-deployment
"
read the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-deployment-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-deployment-scale-with-http-info
"
read scale of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/scale" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-deployment-scale
"
read scale of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-scale name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-deployment-status-with-http-info
"
read status of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-deployment-status
"
read status of the specified Deployment"
([name namespace ] (read-extensions-v1beta1-namespaced-deployment-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-ingress-with-http-info
"
read the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-ingress
"
read the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-ingress-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-ingress-status-with-http-info
"
read status of the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-ingress-status
"
read status of the specified Ingress"
([name namespace ] (read-extensions-v1beta1-namespaced-ingress-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-network-policy-with-http-info
"
read the specified NetworkPolicy"
([name namespace ] (read-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-network-policy
"
read the specified NetworkPolicy"
([name namespace ] (read-extensions-v1beta1-namespaced-network-policy name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replica-set-with-http-info
"
read the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace nil))
([name namespace {:keys [pretty exact export ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replica-set
"
read the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replica-set-scale-with-http-info
"
read scale of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/scale" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replica-set-scale
"
read scale of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-scale name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replica-set-status-with-http-info
"
read status of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/status" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replica-set-status
"
read status of the specified ReplicaSet"
([name namespace ] (read-extensions-v1beta1-namespaced-replica-set-status name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info
"
read scale of the specified ReplicationControllerDummy"
([name namespace ] (read-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace nil))
([name namespace {:keys [pretty ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicationcontrollers/{name}/scale" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-namespaced-replication-controller-dummy-scale
"
read scale of the specified ReplicationControllerDummy"
([name namespace ] (read-extensions-v1beta1-namespaced-replication-controller-dummy-scale name namespace nil))
([name namespace optional-params]
(:data (read-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace optional-params))))
(defn read-extensions-v1beta1-pod-security-policy-with-http-info
"
read the specified PodSecurityPolicy"
([name ] (read-extensions-v1beta1-pod-security-policy-with-http-info name nil))
([name {:keys [pretty exact export ]}]
(check-required-params name)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :get
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty "exact" exact "export" export }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn read-extensions-v1beta1-pod-security-policy
"
read the specified PodSecurityPolicy"
([name ] (read-extensions-v1beta1-pod-security-policy name nil))
([name optional-params]
(:data (read-extensions-v1beta1-pod-security-policy-with-http-info name optional-params))))
(defn replace-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
replace the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-daemon-set
"
replace the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-daemon-set-status-with-http-info
"
replace status of the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/daemonsets/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-daemon-set-status
"
replace status of the specified DaemonSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-daemon-set-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-daemon-set-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-deployment-with-http-info
"
replace the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-deployment
"
replace the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-deployment-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-deployment-scale-with-http-info
"
replace scale of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/scale" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-deployment-scale
"
replace scale of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-scale name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-deployment-scale-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-deployment-status-with-http-info
"
replace status of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/deployments/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-deployment-status
"
replace status of the specified Deployment"
([name namespace body ] (replace-extensions-v1beta1-namespaced-deployment-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-deployment-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-ingress-with-http-info
"
replace the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-ingress
"
replace the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-ingress-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-ingress-status-with-http-info
"
replace status of the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/ingresses/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-ingress-status
"
replace status of the specified Ingress"
([name namespace body ] (replace-extensions-v1beta1-namespaced-ingress-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-ingress-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-network-policy-with-http-info
"
replace the specified NetworkPolicy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/networkpolicies/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-network-policy
"
replace the specified NetworkPolicy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-network-policy name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replica-set-with-http-info
"
replace the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replica-set
"
replace the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replica-set-scale-with-http-info
"
replace scale of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/scale" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replica-set-scale
"
replace scale of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-scale name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replica-set-scale-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replica-set-status-with-http-info
"
replace status of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicasets/{name}/status" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replica-set-status
"
replace status of the specified ReplicaSet"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replica-set-status name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replica-set-status-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info
"
replace scale of the specified ReplicationControllerDummy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body nil))
([name namespace body {:keys [pretty ]}]
(check-required-params name namespace body)
(call-api "/apis/extensions/v1beta1/namespaces/{namespace}/replicationcontrollers/{name}/scale" :put
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale
"
replace scale of the specified ReplicationControllerDummy"
([name namespace body ] (replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale name namespace body nil))
([name namespace body optional-params]
(:data (replace-extensions-v1beta1-namespaced-replication-controller-dummy-scale-with-http-info name namespace body optional-params))))
(defn replace-extensions-v1beta1-pod-security-policy-with-http-info
"
replace the specified PodSecurityPolicy"
([name body ] (replace-extensions-v1beta1-pod-security-policy-with-http-info name body nil))
([name body {:keys [pretty ]}]
(check-required-params name body)
(call-api "/apis/extensions/v1beta1/podsecuritypolicies/{name}" :put
{:path-params {"name" name }
:header-params {}
:query-params {"pretty" pretty }
:form-params {}
:body-param body
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn replace-extensions-v1beta1-pod-security-policy
"
replace the specified PodSecurityPolicy"
([name body ] (replace-extensions-v1beta1-pod-security-policy name body nil))
([name body optional-params]
(:data (replace-extensions-v1beta1-pod-security-policy-with-http-info name body optional-params))))
(defn watch-extensions-v1beta1-daemon-set-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of DaemonSet"
([] (watch-extensions-v1beta1-daemon-set-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/daemonsets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-daemon-set-list-for-all-namespaces
"
watch individual changes to a list of DaemonSet"
([] (watch-extensions-v1beta1-daemon-set-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-daemon-set-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-deployment-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of Deployment"
([] (watch-extensions-v1beta1-deployment-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/deployments" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-deployment-list-for-all-namespaces
"
watch individual changes to a list of Deployment"
([] (watch-extensions-v1beta1-deployment-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-deployment-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-ingress-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of Ingress"
([] (watch-extensions-v1beta1-ingress-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/ingresses" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-ingress-list-for-all-namespaces
"
watch individual changes to a list of Ingress"
([] (watch-extensions-v1beta1-ingress-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-ingress-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-namespaced-daemon-set-with-http-info
"
watch changes to an object of kind DaemonSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/daemonsets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-daemon-set
"
watch changes to an object of kind DaemonSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-daemon-set name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-daemon-set-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-daemon-set-list-with-http-info
"
watch individual changes to a list of DaemonSet"
([namespace ] (watch-extensions-v1beta1-namespaced-daemon-set-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/daemonsets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-daemon-set-list
"
watch individual changes to a list of DaemonSet"
([namespace ] (watch-extensions-v1beta1-namespaced-daemon-set-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-daemon-set-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-deployment-with-http-info
"
watch changes to an object of kind Deployment"
([name namespace ] (watch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/deployments/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-deployment
"
watch changes to an object of kind Deployment"
([name namespace ] (watch-extensions-v1beta1-namespaced-deployment name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-deployment-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-deployment-list-with-http-info
"
watch individual changes to a list of Deployment"
([namespace ] (watch-extensions-v1beta1-namespaced-deployment-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/deployments" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-deployment-list
"
watch individual changes to a list of Deployment"
([namespace ] (watch-extensions-v1beta1-namespaced-deployment-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-deployment-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-ingress-with-http-info
"
watch changes to an object of kind Ingress"
([name namespace ] (watch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/ingresses/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-ingress
"
watch changes to an object of kind Ingress"
([name namespace ] (watch-extensions-v1beta1-namespaced-ingress name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-ingress-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-ingress-list-with-http-info
"
watch individual changes to a list of Ingress"
([namespace ] (watch-extensions-v1beta1-namespaced-ingress-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/ingresses" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-ingress-list
"
watch individual changes to a list of Ingress"
([namespace ] (watch-extensions-v1beta1-namespaced-ingress-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-ingress-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-network-policy-with-http-info
"
watch changes to an object of kind NetworkPolicy"
([name namespace ] (watch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/networkpolicies/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-network-policy
"
watch changes to an object of kind NetworkPolicy"
([name namespace ] (watch-extensions-v1beta1-namespaced-network-policy name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-network-policy-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-network-policy-list-with-http-info
"
watch individual changes to a list of NetworkPolicy"
([namespace ] (watch-extensions-v1beta1-namespaced-network-policy-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/networkpolicies" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-network-policy-list
"
watch individual changes to a list of NetworkPolicy"
([namespace ] (watch-extensions-v1beta1-namespaced-network-policy-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-network-policy-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-replica-set-with-http-info
"
watch changes to an object of kind ReplicaSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace nil))
([name namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/replicasets/{name}" :get
{:path-params {"name" name "namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-replica-set
"
watch changes to an object of kind ReplicaSet"
([name namespace ] (watch-extensions-v1beta1-namespaced-replica-set name namespace nil))
([name namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-replica-set-with-http-info name namespace optional-params))))
(defn watch-extensions-v1beta1-namespaced-replica-set-list-with-http-info
"
watch individual changes to a list of ReplicaSet"
([namespace ] (watch-extensions-v1beta1-namespaced-replica-set-list-with-http-info namespace nil))
([namespace {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params namespace)
(call-api "/apis/extensions/v1beta1/watch/namespaces/{namespace}/replicasets" :get
{:path-params {"namespace" namespace }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-namespaced-replica-set-list
"
watch individual changes to a list of ReplicaSet"
([namespace ] (watch-extensions-v1beta1-namespaced-replica-set-list namespace nil))
([namespace optional-params]
(:data (watch-extensions-v1beta1-namespaced-replica-set-list-with-http-info namespace optional-params))))
(defn watch-extensions-v1beta1-network-policy-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of NetworkPolicy"
([] (watch-extensions-v1beta1-network-policy-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/networkpolicies" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-network-policy-list-for-all-namespaces
"
watch individual changes to a list of NetworkPolicy"
([] (watch-extensions-v1beta1-network-policy-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-network-policy-list-for-all-namespaces-with-http-info optional-params))))
(defn watch-extensions-v1beta1-pod-security-policy-with-http-info
"
watch changes to an object of kind PodSecurityPolicy"
([name ] (watch-extensions-v1beta1-pod-security-policy-with-http-info name nil))
([name {:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(check-required-params name)
(call-api "/apis/extensions/v1beta1/watch/podsecuritypolicies/{name}" :get
{:path-params {"name" name }
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-pod-security-policy
"
watch changes to an object of kind PodSecurityPolicy"
([name ] (watch-extensions-v1beta1-pod-security-policy name nil))
([name optional-params]
(:data (watch-extensions-v1beta1-pod-security-policy-with-http-info name optional-params))))
(defn watch-extensions-v1beta1-pod-security-policy-list-with-http-info
"
watch individual changes to a list of PodSecurityPolicy"
([] (watch-extensions-v1beta1-pod-security-policy-list-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/podsecuritypolicies" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-pod-security-policy-list
"
watch individual changes to a list of PodSecurityPolicy"
([] (watch-extensions-v1beta1-pod-security-policy-list nil))
([optional-params]
(:data (watch-extensions-v1beta1-pod-security-policy-list-with-http-info optional-params))))
(defn watch-extensions-v1beta1-replica-set-list-for-all-namespaces-with-http-info
"
watch individual changes to a list of ReplicaSet"
([] (watch-extensions-v1beta1-replica-set-list-for-all-namespaces-with-http-info nil))
([{:keys [continue field-selector include-uninitialized label-selector limit pretty resource-version timeout-seconds watch ]}]
(call-api "/apis/extensions/v1beta1/watch/replicasets" :get
{:path-params {}
:header-params {}
:query-params {"continue" continue "fieldSelector" field-selector "includeUninitialized" include-uninitialized "labelSelector" label-selector "limit" limit "pretty" pretty "resourceVersion" resource-version "timeoutSeconds" timeout-seconds "watch" watch }
:form-params {}
:content-types ["*/*"]
:accepts ["application/json" "application/yaml" "application/vnd.kubernetes.protobuf" "application/json;stream=watch" "application/vnd.kubernetes.protobuf;stream=watch"]
:auth-names ["BearerToken" "HTTPBasic"]})))
(defn watch-extensions-v1beta1-replica-set-list-for-all-namespaces
"
watch individual changes to a list of ReplicaSet"
([] (watch-extensions-v1beta1-replica-set-list-for-all-namespaces nil))
([optional-params]
(:data (watch-extensions-v1beta1-replica-set-list-for-all-namespaces-with-http-info optional-params))))
| |
7bc790ab65843f3ad795d5605c20d8deec22260350d47171cb747ae1b7bbfef0 | circuithub/fast-downward | Axiom.hs | {-# language OverloadedStrings #-}
# language RecordWildCards #
module FastDownward.SAS.Axiom ( Axiom(..), toSAS ) where
import Data.Sequence ( Seq )
import qualified Data.Sequence as Seq
import qualified Data.Text.Lazy.Builder
import qualified Data.Text.Lazy.Builder.Int
import FastDownward.SAS.DomainIndex ( DomainIndex )
import qualified FastDownward.SAS.DomainIndex as DomainIndex
import FastDownward.SAS.VariableAssignment ( VariableAssignment )
import qualified FastDownward.SAS.VariableAssignment as VariableAssignment
import FastDownward.SAS.VariableIndex ( VariableIndex )
import qualified FastDownward.SAS.VariableIndex as VariableIndex
data Axiom =
Axiom
{ variable :: VariableIndex
, conditions :: Seq VariableAssignment
, pre :: DomainIndex
, post :: DomainIndex
}
deriving
( Show )
toSAS :: Axiom -> Data.Text.Lazy.Builder.Builder
toSAS Axiom{..} =
"begin_rule\n"
<> Data.Text.Lazy.Builder.Int.decimal ( Seq.length conditions ) <> "\n"
<> foldMap ( \x -> VariableAssignment.toSAS x <> "\n" ) conditions
<> VariableIndex.toSAS variable <> " " <> DomainIndex.toSAS pre <> " " <> DomainIndex.toSAS post <> "\n"
<> "end_rule"
| null | https://raw.githubusercontent.com/circuithub/fast-downward/9f6375af83f693076c957251e8ac0ab5c3dd63d7/FastDownward/SAS/Axiom.hs | haskell | # language OverloadedStrings # | # language RecordWildCards #
module FastDownward.SAS.Axiom ( Axiom(..), toSAS ) where
import Data.Sequence ( Seq )
import qualified Data.Sequence as Seq
import qualified Data.Text.Lazy.Builder
import qualified Data.Text.Lazy.Builder.Int
import FastDownward.SAS.DomainIndex ( DomainIndex )
import qualified FastDownward.SAS.DomainIndex as DomainIndex
import FastDownward.SAS.VariableAssignment ( VariableAssignment )
import qualified FastDownward.SAS.VariableAssignment as VariableAssignment
import FastDownward.SAS.VariableIndex ( VariableIndex )
import qualified FastDownward.SAS.VariableIndex as VariableIndex
data Axiom =
Axiom
{ variable :: VariableIndex
, conditions :: Seq VariableAssignment
, pre :: DomainIndex
, post :: DomainIndex
}
deriving
( Show )
toSAS :: Axiom -> Data.Text.Lazy.Builder.Builder
toSAS Axiom{..} =
"begin_rule\n"
<> Data.Text.Lazy.Builder.Int.decimal ( Seq.length conditions ) <> "\n"
<> foldMap ( \x -> VariableAssignment.toSAS x <> "\n" ) conditions
<> VariableIndex.toSAS variable <> " " <> DomainIndex.toSAS pre <> " " <> DomainIndex.toSAS post <> "\n"
<> "end_rule"
|
37ff8b6b5521b61309484fc7afd6e2f2d9e6e7d333e59e59df2e5c19c75afc9d | geophf/1HaskellADay | Exercise.hs | module Y2017.M01.D05.Exercise where
import Data.Array
below imports available from 1HaskellADay git repository
import Data.SAIPE.USStates
import Graph.ScoreCard
import Y2016.M12.D21.Exercise
import Y2016.M12.D22.Exercise
import Y2017.M01.D04.Exercise
-
So , yesterday we looked at US State SAIPE data . The solution showed us the
two standout states : Wyoming had the smallest population and the least poverty ,
and California had the largest population and the most poverty .
But ' least ' and ' most ' can be measured several ways . Yesterday , we simply looked
at the number of people in poverty , but did not take into account the total
population of the State . Who knows ? Maybe California has the least poverty if
one adjusts for population ? Yes ? No ?
Let 's find out .
Today 's exercise .
Read in the SAIPE data from
Y2016 / M12 / D15 / SAIPESNC_15DEC16_11_35_13_00.csv.gz
collating by US State ( see yesterday 's exercise ) , then , enhance the score - card
with the ratio of people in poverty to the entire population :
-
So, yesterday we looked at US State SAIPE data. The solution showed us the
two standout states: Wyoming had the smallest population and the least poverty,
and California had the largest population and the most poverty.
But 'least' and 'most' can be measured several ways. Yesterday, we simply looked
at the number of people in poverty, but did not take into account the total
population of the State. Who knows? Maybe California has the least poverty if
one adjusts for population? Yes? No?
Let's find out.
Today's Haskell exercise.
Read in the SAIPE data from
Y2016/M12/D15/SAIPESNC_15DEC16_11_35_13_00.csv.gz
collating by US State (see yesterday's exercise), then, enhance the score-card
with the ratio of people in poverty to the entire population:
--}
type PovertyRatio = Float -- really a poverty / population ration, but okay
povertyRatio :: ScoreCard a Axes Float -> PovertyRatio
povertyRatio scorecard = undefined
data Attribs = POPULATION | POVERTY | POVERTYRATIO
| TOTALDEBT | PERCAPITADEBT -- will be used (read further down)
deriving (Eq, Ord, Enum, Bounded, Ix, Show)
type EnhancedSC = ScoreCard USState Attribs Float
enhancedScoreCard :: ScoreCard USState Axes Float -> EnhancedSC
enhancedScoreCard scorecard = undefined
Great . Now . Which US State has the highest poverty ratio ? The lowest ?
impoverished :: [EnhancedSC] -> [EnhancedSC]
impoverished = undefined
-
Remember reading in US State total and per capita debt ? That was the exercise
for Y2016.M12.D22 . Re - read in that information again from :
Y2016 / M12 / D22 / personal_debt_load_by_US_state.csv
And round out the US State scorecard information with those attributes :
-
Remember reading in US State total and per capita debt? That was the exercise
for Y2016.M12.D22. Re-read in that information again from:
Y2016/M12/D22/personal_debt_load_by_US_state.csv
And round out the US State scorecard information with those attributes:
--}
augmentWithDebtInfo :: EnhancedSC -> USStateDebt -> EnhancedSC
augmentWithDebtInfo scorecard debtinfo = undefined
-
Now that you have a set of collated data for US States with debt and poverty
information , is there a correllation ? Or : what are the top 5 US States in debt ?
Bottom 5 ? What are the top 5 US State with the highest poverty ratios ? Lowest ?
Now ratios are very small numbers [ 0 , 1 ] , but populations and debts are very
big ones . Tomorrow we 'll level these disparate data sets and look at clustered
results .
-
Now that you have a set of collated data for US States with debt and poverty
information, is there a correllation? Or: what are the top 5 US States in debt?
Bottom 5? What are the top 5 US State with the highest poverty ratios? Lowest?
Now ratios are very small numbers [0, 1], but populations and debts are very
big ones. Tomorrow we'll level these disparate data sets and look at clustered
results.
--}
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2017/M01/D05/Exercise.hs | haskell | }
really a poverty / population ration, but okay
will be used (read further down)
}
} | module Y2017.M01.D05.Exercise where
import Data.Array
below imports available from 1HaskellADay git repository
import Data.SAIPE.USStates
import Graph.ScoreCard
import Y2016.M12.D21.Exercise
import Y2016.M12.D22.Exercise
import Y2017.M01.D04.Exercise
-
So , yesterday we looked at US State SAIPE data . The solution showed us the
two standout states : Wyoming had the smallest population and the least poverty ,
and California had the largest population and the most poverty .
But ' least ' and ' most ' can be measured several ways . Yesterday , we simply looked
at the number of people in poverty , but did not take into account the total
population of the State . Who knows ? Maybe California has the least poverty if
one adjusts for population ? Yes ? No ?
Let 's find out .
Today 's exercise .
Read in the SAIPE data from
Y2016 / M12 / D15 / SAIPESNC_15DEC16_11_35_13_00.csv.gz
collating by US State ( see yesterday 's exercise ) , then , enhance the score - card
with the ratio of people in poverty to the entire population :
-
So, yesterday we looked at US State SAIPE data. The solution showed us the
two standout states: Wyoming had the smallest population and the least poverty,
and California had the largest population and the most poverty.
But 'least' and 'most' can be measured several ways. Yesterday, we simply looked
at the number of people in poverty, but did not take into account the total
population of the State. Who knows? Maybe California has the least poverty if
one adjusts for population? Yes? No?
Let's find out.
Today's Haskell exercise.
Read in the SAIPE data from
Y2016/M12/D15/SAIPESNC_15DEC16_11_35_13_00.csv.gz
collating by US State (see yesterday's exercise), then, enhance the score-card
with the ratio of people in poverty to the entire population:
povertyRatio :: ScoreCard a Axes Float -> PovertyRatio
povertyRatio scorecard = undefined
data Attribs = POPULATION | POVERTY | POVERTYRATIO
deriving (Eq, Ord, Enum, Bounded, Ix, Show)
type EnhancedSC = ScoreCard USState Attribs Float
enhancedScoreCard :: ScoreCard USState Axes Float -> EnhancedSC
enhancedScoreCard scorecard = undefined
Great . Now . Which US State has the highest poverty ratio ? The lowest ?
impoverished :: [EnhancedSC] -> [EnhancedSC]
impoverished = undefined
-
Remember reading in US State total and per capita debt ? That was the exercise
for Y2016.M12.D22 . Re - read in that information again from :
Y2016 / M12 / D22 / personal_debt_load_by_US_state.csv
And round out the US State scorecard information with those attributes :
-
Remember reading in US State total and per capita debt? That was the exercise
for Y2016.M12.D22. Re-read in that information again from:
Y2016/M12/D22/personal_debt_load_by_US_state.csv
And round out the US State scorecard information with those attributes:
augmentWithDebtInfo :: EnhancedSC -> USStateDebt -> EnhancedSC
augmentWithDebtInfo scorecard debtinfo = undefined
-
Now that you have a set of collated data for US States with debt and poverty
information , is there a correllation ? Or : what are the top 5 US States in debt ?
Bottom 5 ? What are the top 5 US State with the highest poverty ratios ? Lowest ?
Now ratios are very small numbers [ 0 , 1 ] , but populations and debts are very
big ones . Tomorrow we 'll level these disparate data sets and look at clustered
results .
-
Now that you have a set of collated data for US States with debt and poverty
information, is there a correllation? Or: what are the top 5 US States in debt?
Bottom 5? What are the top 5 US State with the highest poverty ratios? Lowest?
Now ratios are very small numbers [0, 1], but populations and debts are very
big ones. Tomorrow we'll level these disparate data sets and look at clustered
results.
|
1cc965a39b3b489b20f5cf192a46a33f58821bbc4638e30325a969e2d48c9333 | hyperfiddle/electric | explorer.cljc | (ns hyperfiddle.hfql.explorer
(:require [hyperfiddle.photon :as p]
[hyperfiddle.explorer :as ex]
[hyperfiddle.photon-dom :as dom]
[hyperfiddle.hfql.ui :as ui]
[hyperfiddle.hfql :as hfql]
[hyperfiddle.spec :as spec]
[hyperfiddle.api :as hf]
[hyperfiddle.gridsheet :as gridsheet :refer [GridSheet]])
#?(:cljs (:require-macros [hyperfiddle.hfql.explorer])))
(p/defn Sequence ":: t m a -> m t a" [Vs] (p/fn [] (p/for [V Vs] (V.))))
(defmacro capture [dynvars & body]
(let [syms (repeatedly #(gensym))]
`(let [~@(interleave syms dynvars)]
(p/fn []
(binding [~@(interleave dynvars syms)]
~@body)))))
(p/def Rec)
(defn inject-rows [rows]
(assert (vector? rows) (every? vector? rows))
(with-meta rows {:tag ::rows}))
(defn rows? [x] (= ::rows (:tag (meta x))))
(p/defn Identity [x] x)
(p/defn OptionsRenderer [ctx] nil)
(p/defn SelectOptions [{::hf/keys [summarize options continuation Value] :as ctx}]
(let [value (hfql/JoinAllTheTree. ctx)
labelf (or summarize Identity)]
(p/client
(dom/select
(p/server
(p/for [opt-e (options.)]
(let [opt-value (hfql/JoinAllTheTree. (continuation. opt-e))
selected? (= value opt-value)
text (labelf. opt-value)]
(p/client
(dom/option {::dom/selected selected?} (dom/text text)))))))
nil)))
(defn key-name [k] (cond (seq? k) (name (first k))
(qualified-ident? k) (name k)
:else k))
(p/defn HeaderRenderer [{::hf/keys [as attribute]} k] (or k as attribute))
(p/defn TableHeader [{::hf/keys [attribute as] :as ctx} k]
(let [attribute (key-name (or k as attribute))]
(p/client (dom/span {::dom/class "hf-header"}
(dom/text attribute))
nil)))
(p/defn CellRenderer [ctx value]
(if (= ::nil value)
(new (::hf/Value ctx))
value))
(p/defn FormsTransposedToRows
[{::hf/keys [keys]
::keys [RenderHeader RenderCell RenderOptions]
:as ctx}
v
depth]
(inject-rows
(into [(p/fn [] [depth (p/fn [] (p/for [k keys] (p/fn [] (RenderHeader. ctx k))))])]
cat (p/for [ctx v]
[(p/fn [] [depth (p/fn []
(let [{::hf/keys [keys values]} ctx]
(p/for-by first [[k ctx] (mapv vector keys values)]
(p/fn []
(let [{::hf/keys [render summarize options]} ctx]
(cond render (render. ctx)
options (RenderOptions. ctx)
:else (let [value (hfql/JoinAllTheTree. ctx)]
(if summarize
(summarize. value)
(RenderCell. ctx value)))))))) )])]))))
(p/defn FormLabelRenderer [{::hf/keys [attribute] :as ctx} depth]
[(p/fn [] [depth (p/fn [] [(p/fn [] attribute)])])])
(p/defn FormLabelRendererWithInputs
[{::hf/keys [attribute arguments options]
::keys [RenderOptions RenderHeader]
:as ctx}
depth]
(into [(p/fn [] [depth (p/fn [] [(p/fn [] (RenderHeader. ctx nil))
(p/fn [] (when options (RenderOptions. ctx)))])])]
cat
(p/for-by first [[arg-name props :as arg] arguments]
[(p/fn [] [(inc depth) (p/fn []
(let [spec (ui/attr-spec attribute)]
[(p/fn [] (p/client (dom/span {::dom/title (pr-str (:hyperfiddle.spec/form (spec/arg spec arg-name)))}
(dom/text (str "🔎 " (name arg-name)))) nil))
(p/fn [] (ui/GrayInput. false spec {::dom/placeholder "bob…"} arg) nil)]))])])))
(p/defn HandleCardMany [{::hf/keys [type] :as ctx} v depth]
(case type
::hf/leaf (into [] cat (p/for [ctx v] (Rec. ctx (inc depth))))
(FormsTransposedToRows. ctx v depth)))
(defmacro rows [& rows] `[(p/fn [] ~@rows)])
(defmacro row [depth cols] `[~depth (p/fn [] ~cols)])
(defn convey [ctx-a ctx-b] (merge ctx-b (select-keys ctx-a [::RenderCell ::RenderHeader ::RenderFormLabel ::RenderOptions])))
(p/defn TreeToExplorer [{::keys [RenderFormLabel RenderCell RenderHeader RenderOptions]
:or {RenderCell CellRenderer
RenderHeader HeaderRenderer
RenderFormLabel FormLabelRenderer
RenderOptions OptionsRenderer}
:as root-ctx}]
(binding [Rec (p/fn [{::hf/keys [type render keys values Value] :as ctx} depth]
(let [ctx (merge ctx {::RenderCell RenderCell
::RenderHeader RenderHeader
::RenderFormLabel RenderFormLabel
::RenderOptions RenderOptions})]
(if render
(let [v (render. (assoc ctx ::depth depth))]
(if (rows? v)
v
[(capture [Rec] [depth (p/fn [] [(p/fn [] v)])])]))
(case type
::hf/leaf (rows (row depth [(p/fn [] (Value.))]))
::hf/keys (into [] cat (p/for-by first [[k ctx'] (mapv vector keys values)]
(let [ctx (convey ctx ctx')]
(if (= ::hf/leaf (::hf/type ctx))
[(capture [Rec] (row depth [(p/fn [] (RenderHeader. ctx nil))
(p/fn [] (if-let [render (::hf/render ctx)]
(render. ctx)
(RenderCell. ctx ::nil)))]))]
(into (RenderFormLabel. ctx depth)
(Rec. ctx (inc depth)))))))
(let [v (Value.)]
(cond (vector? v) (HandleCardMany. ctx v depth) ; card many
card one
:else (throw (ex-info "unreachable" {:value v}))))))))]
(new Rec root-ctx 0)))
(defn col->idx "Return the numeric index of a Excel-like column name (a string).
e.g.: (col->idx \"AA\") := 26"
[col]
(assert (and (string? col) (not-empty col)))
0 based
(reduce (fn [r char] (+ (* r 26) (inc (- #?(:clj (int char), :cljs (.charCodeAt char 0)) 65)) ))
0
(clojure.string/upper-case col))))
(defn idx->col [idx]
(assert (>= idx 0))
(let [n (mod idx 26)
rest (int (/ idx 26))
char (char (+ 65 n))]
(if (pos? rest)
(str (idx->col (dec rest)) char)
(str char))))
(defn column-range
([end] (column-range 0 end))
([start end]
(mapv idx->col (range start end))))
(defn parse-props [{::keys [page-size row-height columns]
:or {page-size 40
row-height 24
columns 2}
:as props}]
(merge
{:hyperfiddle.explorer/page-size page-size
:hyperfiddle.explorer/row-height row-height
::dom/style {:height (str "calc(("page-size" + 1) * "row-height"px)")}
::gridsheet/grid-template-columns (str "20rem repeat(" (dec columns)", 1fr)")}
props))
(p/defn Explorer [props hfql]
(let [xs (new (Sequence. (TreeToExplorer. hfql)))]
(binding [ex/cols (if-let [columns (::columns props)]
(column-range columns)
ex/cols)
ex/Format (p/fn [M a]
(let [row (M.)]
(some-> (get row (col->idx a))
(new)
(pr-str))))]
(ex/BasicExplorer. (parse-props props) xs))))
(p/defn ExplorerWithUI [props hfql]
(Explorer. props (merge {::RenderCell (p/fn [ctx _] (ui/SpecDispatch. ctx))
::RenderHeader TableHeader
::RenderFormLabel FormLabelRendererWithInputs
::RenderOptions SelectOptions}
hfql)))
| null | https://raw.githubusercontent.com/hyperfiddle/electric/e633dc635cf84e0a2320b664ba722b696ce0067b/scratch/geoffrey/2022/explorer.cljc | clojure | card many | (ns hyperfiddle.hfql.explorer
(:require [hyperfiddle.photon :as p]
[hyperfiddle.explorer :as ex]
[hyperfiddle.photon-dom :as dom]
[hyperfiddle.hfql.ui :as ui]
[hyperfiddle.hfql :as hfql]
[hyperfiddle.spec :as spec]
[hyperfiddle.api :as hf]
[hyperfiddle.gridsheet :as gridsheet :refer [GridSheet]])
#?(:cljs (:require-macros [hyperfiddle.hfql.explorer])))
(p/defn Sequence ":: t m a -> m t a" [Vs] (p/fn [] (p/for [V Vs] (V.))))
(defmacro capture [dynvars & body]
(let [syms (repeatedly #(gensym))]
`(let [~@(interleave syms dynvars)]
(p/fn []
(binding [~@(interleave dynvars syms)]
~@body)))))
(p/def Rec)
(defn inject-rows [rows]
(assert (vector? rows) (every? vector? rows))
(with-meta rows {:tag ::rows}))
(defn rows? [x] (= ::rows (:tag (meta x))))
(p/defn Identity [x] x)
(p/defn OptionsRenderer [ctx] nil)
(p/defn SelectOptions [{::hf/keys [summarize options continuation Value] :as ctx}]
(let [value (hfql/JoinAllTheTree. ctx)
labelf (or summarize Identity)]
(p/client
(dom/select
(p/server
(p/for [opt-e (options.)]
(let [opt-value (hfql/JoinAllTheTree. (continuation. opt-e))
selected? (= value opt-value)
text (labelf. opt-value)]
(p/client
(dom/option {::dom/selected selected?} (dom/text text)))))))
nil)))
(defn key-name [k] (cond (seq? k) (name (first k))
(qualified-ident? k) (name k)
:else k))
(p/defn HeaderRenderer [{::hf/keys [as attribute]} k] (or k as attribute))
(p/defn TableHeader [{::hf/keys [attribute as] :as ctx} k]
(let [attribute (key-name (or k as attribute))]
(p/client (dom/span {::dom/class "hf-header"}
(dom/text attribute))
nil)))
(p/defn CellRenderer [ctx value]
(if (= ::nil value)
(new (::hf/Value ctx))
value))
(p/defn FormsTransposedToRows
[{::hf/keys [keys]
::keys [RenderHeader RenderCell RenderOptions]
:as ctx}
v
depth]
(inject-rows
(into [(p/fn [] [depth (p/fn [] (p/for [k keys] (p/fn [] (RenderHeader. ctx k))))])]
cat (p/for [ctx v]
[(p/fn [] [depth (p/fn []
(let [{::hf/keys [keys values]} ctx]
(p/for-by first [[k ctx] (mapv vector keys values)]
(p/fn []
(let [{::hf/keys [render summarize options]} ctx]
(cond render (render. ctx)
options (RenderOptions. ctx)
:else (let [value (hfql/JoinAllTheTree. ctx)]
(if summarize
(summarize. value)
(RenderCell. ctx value)))))))) )])]))))
(p/defn FormLabelRenderer [{::hf/keys [attribute] :as ctx} depth]
[(p/fn [] [depth (p/fn [] [(p/fn [] attribute)])])])
(p/defn FormLabelRendererWithInputs
[{::hf/keys [attribute arguments options]
::keys [RenderOptions RenderHeader]
:as ctx}
depth]
(into [(p/fn [] [depth (p/fn [] [(p/fn [] (RenderHeader. ctx nil))
(p/fn [] (when options (RenderOptions. ctx)))])])]
cat
(p/for-by first [[arg-name props :as arg] arguments]
[(p/fn [] [(inc depth) (p/fn []
(let [spec (ui/attr-spec attribute)]
[(p/fn [] (p/client (dom/span {::dom/title (pr-str (:hyperfiddle.spec/form (spec/arg spec arg-name)))}
(dom/text (str "🔎 " (name arg-name)))) nil))
(p/fn [] (ui/GrayInput. false spec {::dom/placeholder "bob…"} arg) nil)]))])])))
(p/defn HandleCardMany [{::hf/keys [type] :as ctx} v depth]
(case type
::hf/leaf (into [] cat (p/for [ctx v] (Rec. ctx (inc depth))))
(FormsTransposedToRows. ctx v depth)))
(defmacro rows [& rows] `[(p/fn [] ~@rows)])
(defmacro row [depth cols] `[~depth (p/fn [] ~cols)])
(defn convey [ctx-a ctx-b] (merge ctx-b (select-keys ctx-a [::RenderCell ::RenderHeader ::RenderFormLabel ::RenderOptions])))
(p/defn TreeToExplorer [{::keys [RenderFormLabel RenderCell RenderHeader RenderOptions]
:or {RenderCell CellRenderer
RenderHeader HeaderRenderer
RenderFormLabel FormLabelRenderer
RenderOptions OptionsRenderer}
:as root-ctx}]
(binding [Rec (p/fn [{::hf/keys [type render keys values Value] :as ctx} depth]
(let [ctx (merge ctx {::RenderCell RenderCell
::RenderHeader RenderHeader
::RenderFormLabel RenderFormLabel
::RenderOptions RenderOptions})]
(if render
(let [v (render. (assoc ctx ::depth depth))]
(if (rows? v)
v
[(capture [Rec] [depth (p/fn [] [(p/fn [] v)])])]))
(case type
::hf/leaf (rows (row depth [(p/fn [] (Value.))]))
::hf/keys (into [] cat (p/for-by first [[k ctx'] (mapv vector keys values)]
(let [ctx (convey ctx ctx')]
(if (= ::hf/leaf (::hf/type ctx))
[(capture [Rec] (row depth [(p/fn [] (RenderHeader. ctx nil))
(p/fn [] (if-let [render (::hf/render ctx)]
(render. ctx)
(RenderCell. ctx ::nil)))]))]
(into (RenderFormLabel. ctx depth)
(Rec. ctx (inc depth)))))))
(let [v (Value.)]
card one
:else (throw (ex-info "unreachable" {:value v}))))))))]
(new Rec root-ctx 0)))
(defn col->idx "Return the numeric index of a Excel-like column name (a string).
e.g.: (col->idx \"AA\") := 26"
[col]
(assert (and (string? col) (not-empty col)))
0 based
(reduce (fn [r char] (+ (* r 26) (inc (- #?(:clj (int char), :cljs (.charCodeAt char 0)) 65)) ))
0
(clojure.string/upper-case col))))
(defn idx->col [idx]
(assert (>= idx 0))
(let [n (mod idx 26)
rest (int (/ idx 26))
char (char (+ 65 n))]
(if (pos? rest)
(str (idx->col (dec rest)) char)
(str char))))
(defn column-range
([end] (column-range 0 end))
([start end]
(mapv idx->col (range start end))))
(defn parse-props [{::keys [page-size row-height columns]
:or {page-size 40
row-height 24
columns 2}
:as props}]
(merge
{:hyperfiddle.explorer/page-size page-size
:hyperfiddle.explorer/row-height row-height
::dom/style {:height (str "calc(("page-size" + 1) * "row-height"px)")}
::gridsheet/grid-template-columns (str "20rem repeat(" (dec columns)", 1fr)")}
props))
(p/defn Explorer [props hfql]
(let [xs (new (Sequence. (TreeToExplorer. hfql)))]
(binding [ex/cols (if-let [columns (::columns props)]
(column-range columns)
ex/cols)
ex/Format (p/fn [M a]
(let [row (M.)]
(some-> (get row (col->idx a))
(new)
(pr-str))))]
(ex/BasicExplorer. (parse-props props) xs))))
(p/defn ExplorerWithUI [props hfql]
(Explorer. props (merge {::RenderCell (p/fn [ctx _] (ui/SpecDispatch. ctx))
::RenderHeader TableHeader
::RenderFormLabel FormLabelRendererWithInputs
::RenderOptions SelectOptions}
hfql)))
|
e0115f346f72c9c29edf6943a86a542afd8e729dd1cfd7d7b9515d97743b4125 | rowangithub/DOrder | instrument.ml | (** Insert sampling statements into the analyzing code *)
open Parsetree
open Typedtree
open Types
open Frame
open Longident
open Backwalker
let pp = Format.fprintf
let log_fname = "mllog"
let tempt_arr_prefix = "tmp_pre"
let tempt_arr_postfix = "temp_post"
(*If a function is a measure => do not instrument it*)
let is_measure se_env funname =
(String.compare funname "List.length" = 0) ||
(Hashtbl.fold (fun _ ms res ->
if (res) then res
else
List.exists (fun (m, _) -> String.compare funname (Path.name m) = 0) ms
) se_env.measures false)
(*If a function is userdefined => may instrument it*)
let is_user_function se_env funname =
Hashtbl.fold (fun f _ res ->
if (res) then res
else String.compare funname (Path.name f) = 0
) se_env.funframebindings false
(** expression is from text; count how many arguments are specified *)
let rec count_arguments expression = match expression.pexp_desc with
| Parsetree.Pexp_function (lbl, elbl, exp) ->
let (p, e) = List.hd exp in
if lbl="" then
match e.pexp_desc with
| Pexp_when _ -> 0
| _ -> 1 + count_arguments e
else 1 + count_arguments e
| _ -> 0
let count_app_arguments expression =
let rec fold_string_list res fs = match fs with
| [] -> res
| [a] -> res ^ a
| a::l -> fold_string_list (res ^ a ^ ".") l in
match expression.pexp_desc with
| Parsetree.Pexp_apply (e, l) -> (match e.pexp_desc with
| Parsetree.Pexp_ident id ->
let names = Longident.flatten id in
(fold_string_list "" names, List.length l)
| _ -> assert false)
| _ -> assert false
let is_base_typed f = match f with
| Frame.Farrow _ -> false
| Frame.Frecord _ -> false
| Frame.Ftuple _ -> false
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_list -> false
| f -> true
* Print out min and functions for boundary of higher order functions
let print_min_max formatter =
(pp formatter
"@;@[<hov2>let@ min l =@ List.fold_left (fun res li -> min res li) (-1) l @]@.";
pp formatter
"@;@[<hov2>let@ max l =@ List.fold_left (fun res li -> max res li) 0 l @]@.")
(** Print out test harnesses if programmers specify them in a separate file *)
let print_harnesses fname formatter =
let fname = fname ^ "_harness" in
try
let lines = ref [] in
let chan = open_in fname in
let _ = try
while true; do
lines := input_line chan :: !lines
done
with End_of_file ->
close_in chan in
let lines = List.rev !lines in
List.iter (fun line ->
pp formatter "@;@[<hov2>%s@;@]" line;
) lines
with _ -> () (* No test harnesses available in a seperate file. Fine. *)
let is_recursive_type path dec =
let allparams = match dec.type_kind with
| Type_variant decs -> decs
| kind -> assert false in
List.exists (fun (cstrname, params) ->
List.exists (fun param ->
match param.desc with (*Must FIXME.*)
| Tconstr (p, _, _) when (Path.same p path) -> true
| Tconstr (p, _, _) when (Path.same p Predef.path_list) -> true
| _ -> false
) params
) allparams
(** Dump inductive data structures ... *)
let print_udt types udt_table ppf =
Hashtbl.iter (fun path declaration ->
if List.exists (fun ty ->
String.compare ty (Path.name path) = 0) types &&
is_recursive_type path declaration then
let allparams = match declaration.type_kind with
| Type_variant decs -> decs
| kind -> assert false in
(* Type of allparams : (string * type_expr list) list *)
(** printf constructor argument *)
let log_cstr_params n =
let _ = if (n > 0) then (pp ppf "(") in
let args = Array.init n (fun i -> "t_"^(string_of_int i)) in
let _ = Array.iteri (fun i arg ->
if (i < n - 1) then pp ppf "%s, " arg
else pp ppf "%s" arg
) args in
if (n > 0) then pp ppf ")" in
(** printf the process of this constructor *)
let log_cstr_process cstrname links values =
if (links = []) then
(if (values = []) then pp ppf "@; @[None@]"
else
let value = List.hd values in
(pp ppf "@; @[(fprintf outch (\"%s#%d#%s;%s,\") t_%d (-1000); @]" cstrname (-1) "%d" "%d" value;
pp ppf "@; @[Some t_%d)@]" value))
else
(List.iter (fun (link, p) ->
if (Path.same p Predef.path_list) then
(pp ppf "@; @[let ele_%d = @;@[%a@] in @]"
link
(fun ppf unit ->
pp ppf "@[List.fold_right (fun v res -> @;@[%a@] @;@[%a@]) t_%d (-1000) @]"
(fun ppf unit ->
pp ppf "@; @[let ele = sample_%s v in @]" (Path.name path)
) ()
(fun ppf unit ->
pp ppf "@; @[match ele with @. | None -> res @. | Some ele -> (if (!callflag) then fprintf outch (\"Cons#1#%s;%s,\") ele res; ele) @]" "%d" "%d"
) ()
link
) ();
pp ppf "@; @[let ele_%d = if ele_%d = (-1000) then None else Some ele_%d in @]" link link link
)
else pp ppf "@; @[let ele_%d = sample_%s t_%d in @]" link (Path.name path) link
) links;
if (values = [] && links <> []) then (
List.iter (fun (link, _) ->
pp ppf "@; @[let _ = match ele_%d with @. | None -> if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") %s (-1000) @. | Some ele_%d -> @. if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") %s ele_%d in @]"
link
(cstrname) (link) "%d" "%d" "(!xxxcounter)"
link (cstrname) (link) "%d" "%d" "(!xxxcounter)" link
) links;
pp ppf "@; @[let c = (!xxxcounter) in (xxxcounter := c+1; Some c)@]"
)
else (
List.iter (fun value ->
List.iter (fun (link, _) ->
pp ppf "@; @[let _ = match ele_%d with @. | None -> if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") t_%d (-1000) @. | Some ele_%d -> @. if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") t_%d ele_%d in @]"
link
(cstrname) (link) "%d" "%d" value
link (cstrname) (link) "%d" "%d" value link
) links
) values;
if (values = []) then pp ppf "@[None@]"
else pp ppf "@; @[(Some t_%d)@]" (List.hd values)) ) in
(** printf the whole sampling strategy for user inductive data type *)
pp ppf "@;@[%a %a @]"
(fun ppf unit -> pp ppf "@[<hov2>let rec sample_%s t = @. match t with @]" (Path.name path)) ()
pp ppf " @;@[if @[(!callflag ) then ( @;@[%s@ ] ; @;@[%a@ ] ; @;@[%a@ ] @;@[%a@ ] ; @;@[%s@];)@]@ ] "
List.iter (fun (cstrname, params) ->
let (links, values, _) = List.fold_left (fun (links, values, index) param ->
match param.desc with (*Must FIXME.*)
| Tconstr (p, _, _) when (Path.same p path) -> (links @ [(index, p)], values, index+1)
| Tconstr (p, tys, _) when (Path.same p Predef.path_list) ->
(let _ = assert ((List.length tys) = 1) in
let ty = List.hd tys in
match ty.desc with
| Tconstr _ -> (links @ [(index, p)], values, index+1)
| _ -> (links, values, index+1))
| Tconstr _ -> (links, values, index+1)
| _ -> (links, values @ [index], index+1)
) ([], [], 0) params in
let n = List.length params in
pp ppf "@; @[| %s %a -> @. %a @]"
cstrname
(fun ppf unit -> ignore (log_cstr_params n)) ()
(fun ppf unit -> ignore (log_cstr_process cstrname links values)) ()
) allparams ) ()
) udt_table
* funpexpr is of f x include x or y if any of them free .
* for each free var , return its upper and lower bound
* for each free var, return its upper and lower bound
*)
let random_value freevars pred =
let lowers = (Hashtbl.create 5) in
let uppers = (Hashtbl.create 5) in
let _ = List.iter (fun freevar ->
Hashtbl.replace lowers freevar [];
Hashtbl.replace uppers freevar []
) freevars in
let _ = Predicate.map_pred (fun pred -> match pred with
| Predicate.Atom (Predicate.Var var, Predicate.Gt, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| Predicate.Atom (Predicate.Var var, Predicate.Ge, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| Predicate.Atom (bound, Predicate.Gt, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (bound, Predicate.Ge, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (Predicate.Var var, Predicate.Lt, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (Predicate.Var var, Predicate.Le, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (bound, Predicate.Lt, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| Predicate.Atom (bound, Predicate.Le, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| _ -> pred
) pred in
(lowers, uppers)
let rec delete_redundant_UF es =
(*begin:delete redundant UFs*)
match es with
| (Predicate.FunApp ("UF", es')) :: es ->
let es = delete_redundant_UF es in
let (f, args) = (List.hd es', List.tl es') in
(match f with
| Predicate.Var f -> (Predicate.FunApp (Path.name f, args)) :: es
| _ -> assert false)
| e::es -> e::(delete_redundant_UF es)
| [] -> [] (*end*)
(** Give a test to higher order function *)
let make_test localdefs fr =
(** frees --> the input parameters *)
(** args --> the synthesized arguments for the test call *)
let localdefs = try List.remove_assoc (Frame.returnpath) localdefs
with _ -> assert false in
let rec frame_equals f1 f2 = match (f1, f2) with
| (Frame.Farrow (_, f1, f2, _), Frame.Farrow (_, f1', f2', _)) ->
(frame_equals f1 f1' && frame_equals f2 f2')
| (Frame.Fconstr (p,fs,_,_,_), Frame.Fconstr (p',fs',_,_,_)) ->
(Path.same p p') && (List.length fs == List.length fs') &&
(List.for_all2 (fun f f' -> frame_equals f f') fs fs')
| (Frame.Frecord _, Frame.Frecord _) -> true
| (Frame.Ftuple _, Frame.Ftuple _) -> true
| (Frame.Fvar _, Frame.Fvar _) -> true
| _ -> false in (* Fixme. The above implementation must be fixed! *)
let rec loop flag frees args rfrs fr = match fr with
| Frame.Farrow (_, f1, f2, _) ->
if (flag) then
let (frees, args, rfrs) = loop false frees args rfrs f1 in
(match f2 with
| Frame.Farrow _ -> loop true frees args rfrs f2
| fr -> (frees, args, [fr]))
else
let (v, _) =
try List.find (fun (_, ld) -> frame_equals ld fr) localdefs
with _ -> (assert false) in
(frees, args@[v], rfrs)
| Frame.Fvar _ ->
let v = Path.mk_ident ("v" ^ string_of_int (List.length frees)) in
(frees@[v], args@[v], rfrs)
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_int ->
let v = Path.mk_ident ("v" ^ string_of_int (List.length frees)) in
(frees@[v], args@[v], rfrs)
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_unit ->
(frees, args@[Path.mk_ident "unit"], rfrs)
| Frame.Ftuple (fs, _) ->
List.fold_left (fun (frees, args, rfrs) fr ->
loop flag frees args rfrs fr
) (frees, args, rfrs) fs
| Frame.Frecord (_,fs,_) ->
List.fold_left (fun (frees, args, rfrs) (fr,_,_) ->
loop flag frees args rfrs fr
) (frees, args, rfrs) fs
| _ ->
let (v, _) = List.find (fun (_, ld) -> frame_equals ld fr) localdefs in
(frees, args@[v], rfrs) in
let (frees, args, rfrs) = loop true [] [] [] fr in
(frees, List.map (fun arg -> Predicate.Var arg) args, List.hd rfrs)
(** Synthesize the call to a higher order function *)
let synthesize formatter funname bad_constraint locals defs allbindings =
(** Search symbolic constraint *)
let fun_stack = ref [] in
let _ = Predicate.map_expr (fun pexpr -> match pexpr with
| Predicate.FunApp (fn, es) when (String.compare fn "UF" = 0) ->
let funpath = List.hd es in
(match funpath with
| Predicate.Var funpath ->
if (String.compare funname (Path.name funpath) = 0) then
try
let (_,fr) = List.find (fun (p,f) -> Path.same p funpath) allbindings in
let n_args = Frame.count_args fr in
let n_actuals = (List.length es)-1 in
if (n_actuals >= n_args) then
(fun_stack := pexpr::(!fun_stack); pexpr)
else pexpr
with _ -> assert false
else pexpr
| _ -> (assert false))
| _ -> pexpr
) bad_constraint.post in (** search all higher order function constriants *)
(** defines how to print higher order function arguments *)
let log_args formatter args =
List.fold_left (fun index arg -> match arg with
| Predicate.FunApp (fn, es) when (String.compare fn "UF" = 0) -> assert false
let funpath = List.hd es in
( match funpath with
| Predicate .
( try
let ( _ , fr ) = ( fun ( p , f ) - > Path.same p funpath ) ( allbindings ) in
let n_args = Frame.count_args fr in
let n_actuals = ( es)-1 in
if ( n_actuals > = n_args ) then
let _ = pp formatter
" @[(try fprintf outch ( \"%s#%s,\ " ) ( ( % a ) ) with _ - > ( ) ) ; @ ] "
( funname ^ " _ " ^ ( string_of_int index ) ) " % d " Predicate.pprint_pexpr arg in
( index+1 )
else ( index+1 )
with _ - > assert false )
| _ - > ( Format.fprintf Format.std_formatter " Fail to deal with % a@. " Predicate.pprint_pexpr arg ; assert false ) )
(match funpath with
| Predicate.Var funpath ->
(try
let (_, fr) = List.find (fun (p,f) -> Path.same p funpath) (allbindings) in
let n_args = Frame.count_args fr in
let n_actuals = (List.length es)-1 in
if (n_actuals >= n_args) then
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
else (index+1)
with _ -> assert false)
| _ -> (Format.fprintf Format.std_formatter "Fail to deal with %a@." Predicate.pprint_pexpr arg; assert false))*)
| Predicate.FunApp (fn, es) when (String.compare fn "List.hd" = 0 || String.compare fn "List.tl" = 0)
(* The argument should be a int list only => ignore any int int... int list *) ->
let vars = Predicate.exp_vars arg in
let b = List.for_all (fun var ->
try
let (_, fr) = List.find (fun (p,f) -> Path.same p var) (allbindings) in
match fr with
| Frame.Fconstr (x,fs,_,_,_) when x = Predef.path_list -> is_base_typed (List.hd fs)
| fr -> assert false
with _ -> assert false
) vars in
if (b) then
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
else (index + 1)
| Predicate.FunApp (fn, es) when (String.compare fn "List.length" = 0) ->
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
| Predicate.FunApp (funpath, es) ->
(*(Format.fprintf Format.std_formatter "Ill arg: %a@." Predicate.pprint_pexpr arg; assert false)*)
((try
let (_, fr) = List.find (fun (p,f) -> String.compare (Path.name p) funpath = 0) (allbindings) in
let n_args = Frame.count_args fr in
let n_actuals = (List.length es) in
if (n_actuals >= n_args) then
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
else (index+1)
with _ -> assert false))
| _ ->
let vars = Predicate.exp_vars arg in
let b = List.for_all (fun var ->
try
(let (_, fr) = List.find (fun (p,f) -> Path.same p var) (allbindings) in
match fr with
| Frame.Fconstr (x,_,_,_,_) when (x = Predef.path_unit) -> false
| Frame.Farrow _ -> false
| _ -> true)
with _ -> (* Fixme. Hard coded. *)
(if (String.compare (Path.name var) "unit" = 0) then false
else true)
) vars in
if (b) then
let _ = pp formatter
"@[fprintf outch (\"%s#%s,\") ((%a)); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
else (* this argument is not an integer so just forget about it *) (index+1)
) 0 args in
let print_callargs formatter args =
List.iter (fun arg -> match arg with (* Fixme. Hard coded. *)
| Predicate.Var p when (String.compare (Path.name p) "unit" = 0) ->
pp formatter " %s" "()"
| _ ->
pp formatter " %a" Predicate.pprint_pexpr arg
) args in
let fun_stack = Common.remove_duplicates !fun_stack in
let _ = Format.fprintf Format.std_formatter " ---- \n " in
let _ = List.iter ( fun f - >
( Format.fprintf Format.std_formatter " % a is in funstack ! Predicate.pprint_pexpr f )
) fun_stack in
let _ = List.iter ( fun ( p , _ ) - > Format.fprintf Format.std_formatter " def % s@. " ( Path.unique_name p ) ) ( defs ) in
let _ = List.iter ( fun ( p , _ ) - > Format.fprintf Format.std_formatter " local % s@. " ( Path.unique_name p ) ) ( locals ) in
let _ = Format.fprintf Format.std_formatter " ---- \n " in
let _ = List.iter (fun f ->
(Format.fprintf Format.std_formatter "%a is in funstack! \n" Predicate.pprint_pexpr f)
) fun_stack in
let _ = List.iter (fun (p, _) -> Format.fprintf Format.std_formatter "def %s@." (Path.unique_name p)) (defs) in
let _ = List.iter (fun (p, _) -> Format.fprintf Format.std_formatter "local %s@." (Path.unique_name p)) (locals) in
let _ = Format.fprintf Format.std_formatter "---- \n" in*)
if (List.length fun_stack > 0) then
List.iter (fun fun_pexpr -> match fun_pexpr with
| Predicate.FunApp ("UF", _::callargs) ->
let callargs = delete_redundant_UF callargs in
let frees = List.fold_left (fun freeset callarg ->
* may contain some variable not in scope ; generate int for them
let callargvars = Predicate.exp_vars callarg in
(** arguments should be bound by parameters
and environment that the function is defined *)
let frees = List.filter (fun callargvar ->
List.for_all (fun (def,_) -> not (Path.same def callargvar)) (locals@defs) &&
(* In the meantime free must be an integer. *)
Question : how about we have a free that is a list ? ! !
(try
let (_, fr) = List.find (fun (p, _) -> Path.same callargvar p) allbindings in
(match fr with
| Frame.Fconstr (p,_,_,_,_) when (Path.same p Predef.path_int) -> true
| Frame.Fvar _ -> true | _ -> false)
with _ -> true)
) callargvars in
if (List.length frees = 0) then freeset
else (
(*Printf.fprintf stdout "funname = %s \n" funname;
List.iter (fun free -> Printf.fprintf stdout
"free var %s \n" (Path.name free)) frees;
List.iter (fun (def,_) -> Printf.fprintf stdout
"def var %s \n" (Path.name def)) (locals@defs);*)
freeset @ frees)
) [] callargs in
(** call higher order function *)
if (List.length frees > 0) then
let (lowers, uppers) = random_value frees bad_constraint.post in
pp formatter "@;@[%a %a %a@]"
(fun ppf unit ->
(List.iter (fun free ->
(** the upper and lower bounds of free bounds a loop *)
let uppers = Common.remove_duplicates (Hashtbl.find uppers free) in
let lowers = Common.remove_duplicates (Hashtbl.find lowers free) in
(* Forbid any bound that is same to fun_pexpr. Fix me? Reject all function call based bound? *)
let uppers = List.filter (fun upper -> not (upper = fun_pexpr)) uppers in
let lowers = List.filter (fun lower -> not (lower = fun_pexpr)) lowers in
(*let _ = List.iter (fun upper -> Format.fprintf Format.std_formatter "upper=%a@." Predicate.pprint_pexpr upper) uppers in
let _ = List.iter (fun lower -> Format.fprintf Format.std_formatter "lower=%a@." Predicate.pprint_pexpr lower) lowers in*)
let int_locals = Common.map_partial (fun (v, f) -> match f with
| Frame.Fvar _ -> Some v
| Frame.Fconstr (p,_,_,_,_) when Path.same p Predef.path_int -> Some v
| _ -> None
) locals in
let printbounds flag formatter bounds =
if (List.length bounds = 0) then
(** if free is actually a parameter then use it *)
if (List.exists (fun (p, _) -> Path.same p free) allbindings &&
not (List.exists (fun p -> Path.same p free) int_locals)) then
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s" (Path.name path)
) formatter (free::int_locals)
else
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s" (Path.name path)
) formatter int_locals
else
(** filter bounds that are not going out of scope *)
let printbound formatter bound =
(*let _ = Format.fprintf Format.std_formatter "A bound as %a@." Predicate.pprint_pexpr bound in*)
let boundvars = Predicate.exp_vars bound in
let unboundvars = List.filter (fun boundvar ->
List.for_all (fun (def,_) -> not (Path.same def boundvar)) (locals @ defs)
) boundvars in
if (List.length unboundvars = 0) then
Predicate.pprint_pexpr formatter bound
else
(* Although this bound is intended to be eliminated, but it may actually refer to a function paprameter *)
(*let b = List.for_all (fun uv -> List.exists (fun (p, _) -> Path.same uv p) allbindings) unboundvars in*)
let fn = if flag then "max" else "min" in
let args = List.fold_left (fun res arg -> res^(Path.name arg)^";") "" int_locals in
let unbound_substs = List.map (fun ubv ->
(ubv, Predicate.Var (Path.Pident (Ident.create_persistent (fn ^ "["^ args ^"]")))
)) unboundvars in
Predicate.pprint_pexpr formatter (
Predicate.exp_apply_substs unbound_substs bound) in
Common.pprint_list "; " printbound formatter bounds in
pp ppf "@[in let _ = for %s = min([%a]) to max([%a]) do@]"
(Path.name free) (printbounds false) lowers (printbounds true) uppers
) frees)) ()
" @[<hv0>@[<2 > if@ % a@]@;@[<2 > then@ % a@]%a@ ] "
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" funname) ()
(fun ppf unit -> ignore(log_args ppf callargs)) ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s%a)) with _->(fprintf outch (\"\t\")))@]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs;) ()
("(callflag := true)")) ()
(fun ppf unit ->
List.iter (fun _ -> pp ppf "@[%s@]" "done") frees
) ()
else
let _ = pp formatter "@;@[in let _ = if (!callflag) then ((callflag := false); @]" in
let _ = pp formatter "@;@[fprintf outch (\"%s:\"); @]" funname in
let _ = log_args formatter callargs in (
pp formatter
"@;@[(try fprintf outch (\"%s#%s\\t\") ((%s%a)) with _->(fprintf outch (\"\t\"))); @]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs;
pp formatter
"@;@[(callflag := true)) @]")
| _ -> assert false
) fun_stack
else (** No constraints from post-condition; try to directly call it *)
let (_, fr) = List.find (fun (p,f) -> String.compare (Path.name p) funname = 0) (allbindings) in
(**--------- check each argument to make sure it actually can be called -----------------*)
try let (frees, callargs, rfr) = make_test (allbindings@defs) fr in
pp formatter "@;@[in let _ = %a %a %a@]"
(fun ppf unit ->
(List.iter (fun free ->
let int_locals = Common.map_partial (fun (v, f) -> match f with
| Frame.Fvar _ -> Some v
| Frame.Fconstr (p,_,_,_,_) when Path.same p Predef.path_int -> Some v
| _ -> None
) (locals@defs) in
let printbounds flag formatter bounds =
. , 1 should be min and int used in the program
if (List.length bounds = 0) then
if (flag) then pp formatter "1"
else pp formatter "-1"
else
if (flag) then
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s+1" (Path.name path)
) formatter bounds
else
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s-1" (Path.name path)
) formatter bounds in
pp ppf "@[for %s = min([%a]) to max([%a]) do @]"
(Path.name free) (printbounds false) int_locals (printbounds true) int_locals
) frees)) ()
" @[<hv0>@[<2 > if@ % a@]@;@[<2 > then@ % a@]%a@ ] "
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" funname) ()
(fun ppf unit -> ignore(log_args ppf callargs)) ()
(fun ppf unit -> (* If the function return is not an integer type, then just print 0 *)
match rfr with
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_int ->
pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s%a)) with _->(fprintf outch (\"\t\")))@]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs
| frame ->
pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s%a); 0) with _->(fprintf outch (\"\t\")))@]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs) ()
("(callflag := true)")) ()
(fun ppf unit ->
List.iter (fun _ -> pp ppf "@[%s@]" "done ") frees
) () with _ -> ()
(**--------------------------------------------------------------------------------------*)
let is_base_typed f = match f with
| Frame.Farrow _ -> false
| Frame.Frecord _ -> false
| Frame.Ftuple _ -> false
| Frame . false
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_list -> false
| f -> true
let decode_record name n =
let names = Str.split (Str.regexp "\\.") name in
(*let _ = Format.fprintf Format.std_formatter "record_name = %s@." name in
let _ = List.iter (fun name -> Format.fprintf Format.std_formatter "split_record_name=%s@." name) names in*)
if (List.length names < 2) then name
else
let (name, fields) = (List.hd names, List.tl names) in
List.fold_left (fun res field ->
try
let proj = int_of_string field in
let is = Array.to_list (Array.init n (fun i -> i)) in
let arg = List.fold_left (fun res i ->
let res = if (i = proj) then res ^ "x" else res ^ "_" in
if (i = n - 1) then res else res ^ ","
) "" is in
"((fun (" ^ arg ^") -> x) (" ^ res ^ "))"
with _ -> res ^ "." ^ field
) name fields
(** dump any interesting parameter or return construct *)
let rec dump formatter env fenv measures pat frame bad_constraint locals defs allbindings n_args =
let patfrs = bind env pat frame in
(if n_args > 0 then List.iter (fun (path, fr) ->
let pathname = Path.ident_name path in
match pathname with
| Some x -> (
(** Ocaml does not support tuple selection. n_args is used for the len of the tuple. So x.0 --> (fun (x,_) -> x) x *)
let xv = decode_record x n_args in
match fr with
| Fconstr (y,_,_,_,_) when y = Predef.path_unit -> ()
| Fconstr (y,fs,_,_,_) when y = Predef.path_list && is_base_typed (List.hd fs) -> (* printing list *)
if !(Clflags.reachability) then
(** Print linkability; Should consider if the content of the list is another container *)
match (List.hd fs) with
| Fconstr (y,_,_,_,_) when Hashtbl.mem measures y ->
pp formatter "@;@[%a %a %a %a@]"
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"%s:\");@]" (x^"_heap")) ()
(fun ppf unit -> pp ppf "@[ignore (List.fold_right (fun v res -> @;@[%a@] @;@[%a@]) %s (-1000)) @]"
(fun ppf unit ->
pp ppf "@; @[let ele = sample_%s v in @]" (Path.name y)
) ()
(fun ppf unit ->
pp ppf "@; @[match ele with @. | None -> res @. | Some ele -> (if (!callflag) then fprintf outch (\"Cons#1#%s;%s,\") ele res; ele) @]" "%d" "%d"
) ()
xv) ()
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"\t\")@]") ()
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ()
| _ ->
(pp formatter "@;@[%a %a %a %a %a %a@]"
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"%s:\");@]" (x^"_heap")) ()
(fun ppf unit ->
pp ppf "@[in let _ = for ith = 0 to ((List.length %s)-1) do@]" xv
) ()
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"Cons#1#%s;%s,\") ((List.nth %s (%s))) ((List.nth %s (%s))) with _->((fprintf outch (\"Cons#1#%s;-1000\") ((List.nth %s (%s))) )))@]"
"%d" "%d" xv "ith" xv "ith+1" "%d" xv "ith";) ()
("(callflag := true)")) ()
(fun ppf unit -> pp ppf "@[%s@]" "done;"
) ()
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"\t\")@]") ()
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ())
else
(pp formatter "@;@[%a %a %a %a@]"
(fun ppf unit ->
pp ppf "@[in let _ = for ith = 0 to ((List.length %s)-1) do@]" xv
) ()
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" x) ()
(fun ppf unit -> pp ppf
"@[fprintf outch (\"%s#%s,\") ((%s)); @]"
(x ^ "_" ^ "0") "%d" "ith") ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((List.nth %s %s)) with _->(fprintf outch (\"\t\")))@]"
(x ^ "_" ^ "r") "%d" xv "ith";) ()
("(callflag := true)")) ()
(fun ppf unit -> pp ppf "@[%s@]" "done"
) ()
(** print the length of *)
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ())
| Fconstr (y,fs,_,_,_) when y = Predef.path_list -> (** only print list length *)
if !(Clflags.reachability) then
()
else
(pp formatter "@;@[%a@]"
(** print the length of *)
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ())
| Fconstr (y,_,_,_,_) when y = Predef.path_array -> (* printing array *)
(* Considering for-all paired array property, array dumping could be complex *)
let arr_pairs = detect_arr_adj_pattern bad_constraint allbindings ( * : sound ?
try
let () = List.find (fun (pe, _) -> pe = pexpr) arr_pair in
with _ ->*)
let print_arr x y z (* x: the array reference; y: the display name of the array; z: index *) =
(pp formatter "@;@[%a %a %a@]"
(fun ppf unit ->
pp ppf "@[in let _ = for ith = 0 to ((Array.length %s)-1) do@]" x
) ()
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" y) ()
(fun ppf unit -> pp ppf
"@[fprintf outch (\"%s#%s,\") ((%s)); @]"
(z ^ "_" ^ "0") "%d" "ith") ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s,\") ((%s.(%s))) with _->(fprintf outch (\"\t\")))@]"
(z ^ "_" ^ "r") "%d" x "ith";) ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s.(%s))) with _->(fprintf outch (\"\t\")))@]"
(z ^ "_" ^ "r'") "%d" x "(ith-1)";) ()
("(callflag := true)")) ()
(fun ppf unit -> pp ppf "@[%s@]" "done"
) ()) in
(** Print the tempt array and result array respectively *)
(print_arr (tempt_arr_prefix^x) (tempt_arr_prefix^x) x; print_arr x (tempt_arr_postfix^x) x;
(pp formatter "@;@[%a@]"
(** print the length of *)
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((Array.length %s)) @]" (x ^ "_l") "%d" xv
) ())
)
| Fconstr (y,_,_,_,_) when Hashtbl.mem measures y -> (* User defined data type *)
let measures = Hashtbl.find measures y in
(List.iter (fun (m, recflag) ->
if recflag then
let m = Path.name m in
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((%s)) @]"
(x ^ "_" ^ m) "%d" (m ^ " " ^ xv)
) measures;
(** Print linkability *)
if !(Clflags.reachability) then
pp formatter "@;@[in let _ = if (!callflag) then (fprintf outch (\"%s:\"); ignore (%s); fprintf outch (\"\\t\")) @]"
(x ^ "_heap") ("sample_" ^ (Path.name y) ^ " " ^ xv)
)
| Fconstr (y,_,_,_,_) when Path.same y Predef.path_bool ->
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") (if (%s) then 1 else 0) @]" x "%d" xv
| Farrow _ -> (** Higher order function return dump *)
(synthesize formatter x bad_constraint (locals) defs allbindings)
* First class value dump
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((%s)) @]" x "%d" xv
| Frecord _ -> assert false
| Ftuple (fs, _) ->
let pats = Array.to_list (Array.init (List.length fs) (fun i ->
Tpat_var (Ident.create_persistent (
x^"."^(string_of_int i)) ))) in
List.iter2 (fun p f ->
ignore(dump formatter env fenv measures p f bad_constraint locals defs allbindings (List.length fs))
) pats fs
| _ -> (** Fixme for more support *)
(pp Format.err_formatter
"Value cannot be dumped for %s \n" x;
flush stderr; assert false)
)
| None -> (print_string "Value cannot be dumped \n"; assert false)
) patfrs; patfrs)
(** dump the function by making up an instruction *)
let rec dump_fun_Info formatter env fenv measures fr bad_constraint locals defs allbindings n_args =
match fr with
| Farrow (Some pat, f1, f2, _) -> (
(** dump the parameter *)
let locals' = dump formatter env fenv measures pat f1 bad_constraint locals defs allbindings n_args in
(** dump the return *)
Common.remove_duplicates
dump_fun_Info formatter env fenv measures f2 bad_constraint locals defs allbindings (n_args-1))
| _ -> (** dump the return only *)
(ignore (dump formatter env fenv measures (Tpat_var (Ident.create "r")) fr bad_constraint locals defs allbindings (n_args+1));
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"\\n\") @]";
pp formatter "@[%s@]" "in r"; n_args)
let myFind tbl compare =
match (Hashtbl.fold (fun k v res -> match res with
| None -> if (compare k = 0) then Some (k, v) else None
| Some _ -> res) tbl None) with
Some r -> r
| None -> assert false
(** add instructions so that array can be copied and dumped *)
let dump_side_effects formatter se_env lookup_fr funlocation funname env fenv =
let (funpath, fr) =
try
let (funpath, _) = Env.lookup_value (Lident funname) env in
let fr = Hashtbl.find se_env.funframebindings funpath in
(funpath, fr)
with Not_found ->
myFind se_env.funframebindings (fun k -> String.compare (Path.name k) funname) in
let allbindings = Frame.get_fun_bindings env fr in
(* If an array is encountered copy it into a temp variable and output the temp *)
let arrs = List.fold_left (fun resarrs (p, f) -> match f with
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_array -> resarrs @ [p]
| _ -> resarrs
) [] allbindings in
List.iter (fun arr ->
let arrname = Path.name arr in
pp formatter "@[let %s = Array.copy %s in @]" (tempt_arr_prefix^arrname) arrname
) arrs
let complete_funcall_info = Hashtbl.create 3
(** add instructions so that function x can be dumped*)
let dump_fun formatter se_env lookup_fr funlocation funname env fenv exptxt =
if (is_measure se_env funname) then pp formatter "@[%s@]" "in r" else
let _ = Printf.printf " funname=%s\n " funname in
let n_args = count_arguments exptxt in
let _ = pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"name:%s\\t\") %s @]"
"%s" ("\"" ^ funname ^ "\"") in
let measures = se_env.measures in
try
let (funpath, _) = Env.lookup_value (Lident funname) env in
Lightenv.find funpath fenv in
Hashtbl.find se_env.funframebindings funpath in
let (bad_constraint, defs) =
try (Hashtbl.find se_env.badbindings funpath,
Hashtbl.find se_env.fundefenvs funpath)
with _ -> (
if !(se_env.dty) then
({pre = Predicate.Not Predicate.True; post = Predicate.Not Predicate.True},
try Hashtbl.find se_env.fundefenvs funpath with _ -> (
Format.fprintf Format.std_formatter "%s unfound@." (Path.name funpath); assert false))
else (Format.fprintf Format.std_formatter "%s unfound@." (Path.name funpath); assert false)) in
let locals = [] in
(** locals are function parameters while
defs are defined in the context of function definition *)
let allbindings = Frame.get_fun_bindings env fr in
let n_args' = dump_fun_Info formatter env fenv measures fr bad_constraint locals defs allbindings n_args in
if (n_args' < 0) then (Hashtbl.replace complete_funcall_info funname n_args)
* Possibly local function not in env or fenv
try let fr = lookup_fr funlocation in
dump_fun_Info formatter env fenv fr with _ - > assert false
dump_fun_Info formatter env fenv fr with _ -> assert false*)
let (funpath, fr) = myFind se_env.funframebindings
(fun k -> String.compare (Path.name k) funname) in
let (bad_constraint, defs) =
try (Hashtbl.find se_env.badbindings funpath,
Hashtbl.find se_env.fundefenvs funpath)
with _ -> (({pre = Predicate.Not Predicate.True; post = Predicate.Not Predicate.True},
try Hashtbl.find se_env.fundefenvs funpath with _ -> assert false)
) in
let locals = [] in
(** locals are function parameters while
defs are defined in the context of function definition *)
let allbindings = Frame.get_fun_bindings env fr in
let n_args' = dump_fun_Info formatter env fenv measures fr bad_constraint locals defs allbindings n_args in
if (n_args' < 0) then (Hashtbl.replace complete_funcall_info funname n_args)
(** add instructions so that function application at apploc can be dumped *)
let dump_app formatter se_env apploc exptxt =
let (fname, n_args) = count_app_arguments exptxt in
if (not (is_measure se_env fname)) (*&& (is_user_function se_env fname)*) then (
let sys_def_vars =
["guard"; "assert_guard"; "pattern"; "hoencoding"; "arraygetencoding";
"fixarrayencoding"; "ho_pattern"; "envrefreshed"; "lstlenencoding"; "dtyencoding"] in
let measures = se_env.measures in
if (Hashtbl.mem se_env.funcallenvs apploc) then
let context = Hashtbl.find se_env.funcallenvs apploc in
(pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"env:%s\\t\") \"%s\" in @]"
"%s" (Location.tostring apploc);
List.iter (fun (path, fr) ->
let pathname = Path.name path in
match fr with
| Fconstr (x,_,_,_,_) when x = Predef.path_unit -> ()
| Fconstr (x,_,_,_,_) when x = Predef.path_array -> ()
| Fconstr (x,_,_,_,_) when Hashtbl.mem measures x -> ()
| Fconstr (x,_,_,_,_) when x = Predef.path_list ->
if (List.for_all (fun var -> (String.compare var pathname != 0)) sys_def_vars) then
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) in @]"
(pathname ^ "_l") "%d" pathname
| Farrow _ -> ()
| Fconstr (x,_,_,_,_) when x = Predef.path_bool ->
if (List.for_all (fun var -> (String.compare var pathname != 0)) sys_def_vars) then
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") (if (%s) then 1 else 0) in @]"
pathname "%d" pathname
* First class value dump
if (List.for_all (fun var -> (String.compare var pathname != 0)) sys_def_vars) then
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((%s)) in @]"
pathname "%d" pathname
| Frecord _ | Ftuple _ -> ()
| _ -> (** Fixme for more support *)
(Format.fprintf Format.std_formatter
"Value cannot be dumped for %s \n" pathname;
assert false)
) context;
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"\\n\") in @]")
else
if (Hashtbl.mem complete_funcall_info fname) then
let n_args' = Hashtbl.find complete_funcall_info fname in
if (n_args = n_args') then
(pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"env:%s\\t\") \"%s\" in @]"
"%s" (Location.tostring apploc);
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"\\n\") in @]")
else () )
(** Define a local read function for helper *)
let read_log () =
let lines = ref [] in
let chan = open_in log_fname in
try
while true; do
lines := input_line chan :: !lines
done; []
with End_of_file ->
close_in chan;
List.rev !lines ;;
let read_dumpings pos_samples =
let logs = read_log () in
let env_stack = Stack.create () in
List.iter (fun log ->
(*let _ = pp Format.err_formatter
"log = %s @." log in*)
let bindings = Str.split (Str.regexp "[ \t]+") log in
let (namebinding, value_bindings) =
(List.hd bindings, List.tl bindings) in
let _ = pp " namebinding = % s @. " namebinding in
"namebinding = %s @." namebinding in*)
let name = Str.split (Str.regexp ":") namebinding in
let (name_name, name_value) = (List.hd name, List.nth name 1) in
if (String.compare name_name "env" = 0) then
Stack.push (name_value, value_bindings) env_stack
else
List.map ( Str.split ( Str.regexp " : " ) ) para_return_bindings
value_bindings in
let (loc, env_value_bindings) = Stack.pop env_stack in
let loc = loc ^ "_" ^ name_value in
if (Hashtbl.mem pos_samples loc) then
let (_, samples) = Hashtbl.find pos_samples loc in
Hashtbl.replace pos_samples loc (name_value, samples@[(data, env_value_bindings)])
else
Hashtbl.replace pos_samples loc (name_value, [(data, env_value_bindings)])
) logs
| null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/liquid/instrument.ml | ocaml | * Insert sampling statements into the analyzing code
If a function is a measure => do not instrument it
If a function is userdefined => may instrument it
* expression is from text; count how many arguments are specified
* Print out test harnesses if programmers specify them in a separate file
No test harnesses available in a seperate file. Fine.
Must FIXME.
* Dump inductive data structures ...
Type of allparams : (string * type_expr list) list
* printf constructor argument
* printf the process of this constructor
* printf the whole sampling strategy for user inductive data type
Must FIXME.
begin:delete redundant UFs
end
* Give a test to higher order function
* frees --> the input parameters
* args --> the synthesized arguments for the test call
Fixme. The above implementation must be fixed!
* Synthesize the call to a higher order function
* Search symbolic constraint
* search all higher order function constriants
* defines how to print higher order function arguments
The argument should be a int list only => ignore any int int... int list
(Format.fprintf Format.std_formatter "Ill arg: %a@." Predicate.pprint_pexpr arg; assert false)
Fixme. Hard coded.
this argument is not an integer so just forget about it
Fixme. Hard coded.
* arguments should be bound by parameters
and environment that the function is defined
In the meantime free must be an integer.
Printf.fprintf stdout "funname = %s \n" funname;
List.iter (fun free -> Printf.fprintf stdout
"free var %s \n" (Path.name free)) frees;
List.iter (fun (def,_) -> Printf.fprintf stdout
"def var %s \n" (Path.name def)) (locals@defs);
* call higher order function
* the upper and lower bounds of free bounds a loop
Forbid any bound that is same to fun_pexpr. Fix me? Reject all function call based bound?
let _ = List.iter (fun upper -> Format.fprintf Format.std_formatter "upper=%a@." Predicate.pprint_pexpr upper) uppers in
let _ = List.iter (fun lower -> Format.fprintf Format.std_formatter "lower=%a@." Predicate.pprint_pexpr lower) lowers in
* if free is actually a parameter then use it
* filter bounds that are not going out of scope
let _ = Format.fprintf Format.std_formatter "A bound as %a@." Predicate.pprint_pexpr bound in
Although this bound is intended to be eliminated, but it may actually refer to a function paprameter
let b = List.for_all (fun uv -> List.exists (fun (p, _) -> Path.same uv p) allbindings) unboundvars in
* No constraints from post-condition; try to directly call it
*--------- check each argument to make sure it actually can be called -----------------
If the function return is not an integer type, then just print 0
*--------------------------------------------------------------------------------------
let _ = Format.fprintf Format.std_formatter "record_name = %s@." name in
let _ = List.iter (fun name -> Format.fprintf Format.std_formatter "split_record_name=%s@." name) names in
* dump any interesting parameter or return construct
* Ocaml does not support tuple selection. n_args is used for the len of the tuple. So x.0 --> (fun (x,_) -> x) x
printing list
* Print linkability; Should consider if the content of the list is another container
* print the length of
* only print list length
* print the length of
printing array
Considering for-all paired array property, array dumping could be complex
x: the array reference; y: the display name of the array; z: index
* Print the tempt array and result array respectively
* print the length of
User defined data type
* Print linkability
* Higher order function return dump
* Fixme for more support
* dump the function by making up an instruction
* dump the parameter
* dump the return
* dump the return only
* add instructions so that array can be copied and dumped
If an array is encountered copy it into a temp variable and output the temp
* add instructions so that function x can be dumped
* locals are function parameters while
defs are defined in the context of function definition
* locals are function parameters while
defs are defined in the context of function definition
* add instructions so that function application at apploc can be dumped
&& (is_user_function se_env fname)
* Fixme for more support
* Define a local read function for helper
let _ = pp Format.err_formatter
"log = %s @." log in |
open Parsetree
open Typedtree
open Types
open Frame
open Longident
open Backwalker
let pp = Format.fprintf
let log_fname = "mllog"
let tempt_arr_prefix = "tmp_pre"
let tempt_arr_postfix = "temp_post"
let is_measure se_env funname =
(String.compare funname "List.length" = 0) ||
(Hashtbl.fold (fun _ ms res ->
if (res) then res
else
List.exists (fun (m, _) -> String.compare funname (Path.name m) = 0) ms
) se_env.measures false)
let is_user_function se_env funname =
Hashtbl.fold (fun f _ res ->
if (res) then res
else String.compare funname (Path.name f) = 0
) se_env.funframebindings false
let rec count_arguments expression = match expression.pexp_desc with
| Parsetree.Pexp_function (lbl, elbl, exp) ->
let (p, e) = List.hd exp in
if lbl="" then
match e.pexp_desc with
| Pexp_when _ -> 0
| _ -> 1 + count_arguments e
else 1 + count_arguments e
| _ -> 0
let count_app_arguments expression =
let rec fold_string_list res fs = match fs with
| [] -> res
| [a] -> res ^ a
| a::l -> fold_string_list (res ^ a ^ ".") l in
match expression.pexp_desc with
| Parsetree.Pexp_apply (e, l) -> (match e.pexp_desc with
| Parsetree.Pexp_ident id ->
let names = Longident.flatten id in
(fold_string_list "" names, List.length l)
| _ -> assert false)
| _ -> assert false
let is_base_typed f = match f with
| Frame.Farrow _ -> false
| Frame.Frecord _ -> false
| Frame.Ftuple _ -> false
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_list -> false
| f -> true
* Print out min and functions for boundary of higher order functions
let print_min_max formatter =
(pp formatter
"@;@[<hov2>let@ min l =@ List.fold_left (fun res li -> min res li) (-1) l @]@.";
pp formatter
"@;@[<hov2>let@ max l =@ List.fold_left (fun res li -> max res li) 0 l @]@.")
let print_harnesses fname formatter =
let fname = fname ^ "_harness" in
try
let lines = ref [] in
let chan = open_in fname in
let _ = try
while true; do
lines := input_line chan :: !lines
done
with End_of_file ->
close_in chan in
let lines = List.rev !lines in
List.iter (fun line ->
pp formatter "@;@[<hov2>%s@;@]" line;
) lines
let is_recursive_type path dec =
let allparams = match dec.type_kind with
| Type_variant decs -> decs
| kind -> assert false in
List.exists (fun (cstrname, params) ->
List.exists (fun param ->
| Tconstr (p, _, _) when (Path.same p path) -> true
| Tconstr (p, _, _) when (Path.same p Predef.path_list) -> true
| _ -> false
) params
) allparams
let print_udt types udt_table ppf =
Hashtbl.iter (fun path declaration ->
if List.exists (fun ty ->
String.compare ty (Path.name path) = 0) types &&
is_recursive_type path declaration then
let allparams = match declaration.type_kind with
| Type_variant decs -> decs
| kind -> assert false in
let log_cstr_params n =
let _ = if (n > 0) then (pp ppf "(") in
let args = Array.init n (fun i -> "t_"^(string_of_int i)) in
let _ = Array.iteri (fun i arg ->
if (i < n - 1) then pp ppf "%s, " arg
else pp ppf "%s" arg
) args in
if (n > 0) then pp ppf ")" in
let log_cstr_process cstrname links values =
if (links = []) then
(if (values = []) then pp ppf "@; @[None@]"
else
let value = List.hd values in
(pp ppf "@; @[(fprintf outch (\"%s#%d#%s;%s,\") t_%d (-1000); @]" cstrname (-1) "%d" "%d" value;
pp ppf "@; @[Some t_%d)@]" value))
else
(List.iter (fun (link, p) ->
if (Path.same p Predef.path_list) then
(pp ppf "@; @[let ele_%d = @;@[%a@] in @]"
link
(fun ppf unit ->
pp ppf "@[List.fold_right (fun v res -> @;@[%a@] @;@[%a@]) t_%d (-1000) @]"
(fun ppf unit ->
pp ppf "@; @[let ele = sample_%s v in @]" (Path.name path)
) ()
(fun ppf unit ->
pp ppf "@; @[match ele with @. | None -> res @. | Some ele -> (if (!callflag) then fprintf outch (\"Cons#1#%s;%s,\") ele res; ele) @]" "%d" "%d"
) ()
link
) ();
pp ppf "@; @[let ele_%d = if ele_%d = (-1000) then None else Some ele_%d in @]" link link link
)
else pp ppf "@; @[let ele_%d = sample_%s t_%d in @]" link (Path.name path) link
) links;
if (values = [] && links <> []) then (
List.iter (fun (link, _) ->
pp ppf "@; @[let _ = match ele_%d with @. | None -> if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") %s (-1000) @. | Some ele_%d -> @. if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") %s ele_%d in @]"
link
(cstrname) (link) "%d" "%d" "(!xxxcounter)"
link (cstrname) (link) "%d" "%d" "(!xxxcounter)" link
) links;
pp ppf "@; @[let c = (!xxxcounter) in (xxxcounter := c+1; Some c)@]"
)
else (
List.iter (fun value ->
List.iter (fun (link, _) ->
pp ppf "@; @[let _ = match ele_%d with @. | None -> if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") t_%d (-1000) @. | Some ele_%d -> @. if (!callflag) then fprintf outch (\"%s#%d#%s;%s,\") t_%d ele_%d in @]"
link
(cstrname) (link) "%d" "%d" value
link (cstrname) (link) "%d" "%d" value link
) links
) values;
if (values = []) then pp ppf "@[None@]"
else pp ppf "@; @[(Some t_%d)@]" (List.hd values)) ) in
pp ppf "@;@[%a %a @]"
(fun ppf unit -> pp ppf "@[<hov2>let rec sample_%s t = @. match t with @]" (Path.name path)) ()
pp ppf " @;@[if @[(!callflag ) then ( @;@[%s@ ] ; @;@[%a@ ] ; @;@[%a@ ] @;@[%a@ ] ; @;@[%s@];)@]@ ] "
List.iter (fun (cstrname, params) ->
let (links, values, _) = List.fold_left (fun (links, values, index) param ->
| Tconstr (p, _, _) when (Path.same p path) -> (links @ [(index, p)], values, index+1)
| Tconstr (p, tys, _) when (Path.same p Predef.path_list) ->
(let _ = assert ((List.length tys) = 1) in
let ty = List.hd tys in
match ty.desc with
| Tconstr _ -> (links @ [(index, p)], values, index+1)
| _ -> (links, values, index+1))
| Tconstr _ -> (links, values, index+1)
| _ -> (links, values @ [index], index+1)
) ([], [], 0) params in
let n = List.length params in
pp ppf "@; @[| %s %a -> @. %a @]"
cstrname
(fun ppf unit -> ignore (log_cstr_params n)) ()
(fun ppf unit -> ignore (log_cstr_process cstrname links values)) ()
) allparams ) ()
) udt_table
* funpexpr is of f x include x or y if any of them free .
* for each free var , return its upper and lower bound
* for each free var, return its upper and lower bound
*)
let random_value freevars pred =
let lowers = (Hashtbl.create 5) in
let uppers = (Hashtbl.create 5) in
let _ = List.iter (fun freevar ->
Hashtbl.replace lowers freevar [];
Hashtbl.replace uppers freevar []
) freevars in
let _ = Predicate.map_pred (fun pred -> match pred with
| Predicate.Atom (Predicate.Var var, Predicate.Gt, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| Predicate.Atom (Predicate.Var var, Predicate.Ge, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| Predicate.Atom (bound, Predicate.Gt, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (bound, Predicate.Ge, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (Predicate.Var var, Predicate.Lt, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (Predicate.Var var, Predicate.Le, bound) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace uppers var (bound::(Hashtbl.find uppers var));
pred)
| Predicate.Atom (bound, Predicate.Lt, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| Predicate.Atom (bound, Predicate.Le, Predicate.Var var) ->
(if (List.exists (fun freevar -> Path.same freevar var) freevars) then
Hashtbl.replace lowers var (bound::(Hashtbl.find lowers var));
pred)
| _ -> pred
) pred in
(lowers, uppers)
let rec delete_redundant_UF es =
match es with
| (Predicate.FunApp ("UF", es')) :: es ->
let es = delete_redundant_UF es in
let (f, args) = (List.hd es', List.tl es') in
(match f with
| Predicate.Var f -> (Predicate.FunApp (Path.name f, args)) :: es
| _ -> assert false)
| e::es -> e::(delete_redundant_UF es)
let make_test localdefs fr =
let localdefs = try List.remove_assoc (Frame.returnpath) localdefs
with _ -> assert false in
let rec frame_equals f1 f2 = match (f1, f2) with
| (Frame.Farrow (_, f1, f2, _), Frame.Farrow (_, f1', f2', _)) ->
(frame_equals f1 f1' && frame_equals f2 f2')
| (Frame.Fconstr (p,fs,_,_,_), Frame.Fconstr (p',fs',_,_,_)) ->
(Path.same p p') && (List.length fs == List.length fs') &&
(List.for_all2 (fun f f' -> frame_equals f f') fs fs')
| (Frame.Frecord _, Frame.Frecord _) -> true
| (Frame.Ftuple _, Frame.Ftuple _) -> true
| (Frame.Fvar _, Frame.Fvar _) -> true
let rec loop flag frees args rfrs fr = match fr with
| Frame.Farrow (_, f1, f2, _) ->
if (flag) then
let (frees, args, rfrs) = loop false frees args rfrs f1 in
(match f2 with
| Frame.Farrow _ -> loop true frees args rfrs f2
| fr -> (frees, args, [fr]))
else
let (v, _) =
try List.find (fun (_, ld) -> frame_equals ld fr) localdefs
with _ -> (assert false) in
(frees, args@[v], rfrs)
| Frame.Fvar _ ->
let v = Path.mk_ident ("v" ^ string_of_int (List.length frees)) in
(frees@[v], args@[v], rfrs)
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_int ->
let v = Path.mk_ident ("v" ^ string_of_int (List.length frees)) in
(frees@[v], args@[v], rfrs)
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_unit ->
(frees, args@[Path.mk_ident "unit"], rfrs)
| Frame.Ftuple (fs, _) ->
List.fold_left (fun (frees, args, rfrs) fr ->
loop flag frees args rfrs fr
) (frees, args, rfrs) fs
| Frame.Frecord (_,fs,_) ->
List.fold_left (fun (frees, args, rfrs) (fr,_,_) ->
loop flag frees args rfrs fr
) (frees, args, rfrs) fs
| _ ->
let (v, _) = List.find (fun (_, ld) -> frame_equals ld fr) localdefs in
(frees, args@[v], rfrs) in
let (frees, args, rfrs) = loop true [] [] [] fr in
(frees, List.map (fun arg -> Predicate.Var arg) args, List.hd rfrs)
let synthesize formatter funname bad_constraint locals defs allbindings =
let fun_stack = ref [] in
let _ = Predicate.map_expr (fun pexpr -> match pexpr with
| Predicate.FunApp (fn, es) when (String.compare fn "UF" = 0) ->
let funpath = List.hd es in
(match funpath with
| Predicate.Var funpath ->
if (String.compare funname (Path.name funpath) = 0) then
try
let (_,fr) = List.find (fun (p,f) -> Path.same p funpath) allbindings in
let n_args = Frame.count_args fr in
let n_actuals = (List.length es)-1 in
if (n_actuals >= n_args) then
(fun_stack := pexpr::(!fun_stack); pexpr)
else pexpr
with _ -> assert false
else pexpr
| _ -> (assert false))
| _ -> pexpr
let log_args formatter args =
List.fold_left (fun index arg -> match arg with
| Predicate.FunApp (fn, es) when (String.compare fn "UF" = 0) -> assert false
let funpath = List.hd es in
( match funpath with
| Predicate .
( try
let ( _ , fr ) = ( fun ( p , f ) - > Path.same p funpath ) ( allbindings ) in
let n_args = Frame.count_args fr in
let n_actuals = ( es)-1 in
if ( n_actuals > = n_args ) then
let _ = pp formatter
" @[(try fprintf outch ( \"%s#%s,\ " ) ( ( % a ) ) with _ - > ( ) ) ; @ ] "
( funname ^ " _ " ^ ( string_of_int index ) ) " % d " Predicate.pprint_pexpr arg in
( index+1 )
else ( index+1 )
with _ - > assert false )
| _ - > ( Format.fprintf Format.std_formatter " Fail to deal with % a@. " Predicate.pprint_pexpr arg ; assert false ) )
(match funpath with
| Predicate.Var funpath ->
(try
let (_, fr) = List.find (fun (p,f) -> Path.same p funpath) (allbindings) in
let n_args = Frame.count_args fr in
let n_actuals = (List.length es)-1 in
if (n_actuals >= n_args) then
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
else (index+1)
with _ -> assert false)
| _ -> (Format.fprintf Format.std_formatter "Fail to deal with %a@." Predicate.pprint_pexpr arg; assert false))*)
| Predicate.FunApp (fn, es) when (String.compare fn "List.hd" = 0 || String.compare fn "List.tl" = 0)
let vars = Predicate.exp_vars arg in
let b = List.for_all (fun var ->
try
let (_, fr) = List.find (fun (p,f) -> Path.same p var) (allbindings) in
match fr with
| Frame.Fconstr (x,fs,_,_,_) when x = Predef.path_list -> is_base_typed (List.hd fs)
| fr -> assert false
with _ -> assert false
) vars in
if (b) then
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
else (index + 1)
| Predicate.FunApp (fn, es) when (String.compare fn "List.length" = 0) ->
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
| Predicate.FunApp (funpath, es) ->
((try
let (_, fr) = List.find (fun (p,f) -> String.compare (Path.name p) funpath = 0) (allbindings) in
let n_args = Frame.count_args fr in
let n_actuals = (List.length es) in
if (n_actuals >= n_args) then
let _ = pp formatter
"@[(try fprintf outch (\"%s#%s,\") ((%a)) with _->()); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
else (index+1)
with _ -> assert false))
| _ ->
let vars = Predicate.exp_vars arg in
let b = List.for_all (fun var ->
try
(let (_, fr) = List.find (fun (p,f) -> Path.same p var) (allbindings) in
match fr with
| Frame.Fconstr (x,_,_,_,_) when (x = Predef.path_unit) -> false
| Frame.Farrow _ -> false
| _ -> true)
(if (String.compare (Path.name var) "unit" = 0) then false
else true)
) vars in
if (b) then
let _ = pp formatter
"@[fprintf outch (\"%s#%s,\") ((%a)); @]"
(funname ^ "_" ^ (string_of_int index)) "%d" Predicate.pprint_pexpr arg in
(index+1)
) 0 args in
let print_callargs formatter args =
| Predicate.Var p when (String.compare (Path.name p) "unit" = 0) ->
pp formatter " %s" "()"
| _ ->
pp formatter " %a" Predicate.pprint_pexpr arg
) args in
let fun_stack = Common.remove_duplicates !fun_stack in
let _ = Format.fprintf Format.std_formatter " ---- \n " in
let _ = List.iter ( fun f - >
( Format.fprintf Format.std_formatter " % a is in funstack ! Predicate.pprint_pexpr f )
) fun_stack in
let _ = List.iter ( fun ( p , _ ) - > Format.fprintf Format.std_formatter " def % s@. " ( Path.unique_name p ) ) ( defs ) in
let _ = List.iter ( fun ( p , _ ) - > Format.fprintf Format.std_formatter " local % s@. " ( Path.unique_name p ) ) ( locals ) in
let _ = Format.fprintf Format.std_formatter " ---- \n " in
let _ = List.iter (fun f ->
(Format.fprintf Format.std_formatter "%a is in funstack! \n" Predicate.pprint_pexpr f)
) fun_stack in
let _ = List.iter (fun (p, _) -> Format.fprintf Format.std_formatter "def %s@." (Path.unique_name p)) (defs) in
let _ = List.iter (fun (p, _) -> Format.fprintf Format.std_formatter "local %s@." (Path.unique_name p)) (locals) in
let _ = Format.fprintf Format.std_formatter "---- \n" in*)
if (List.length fun_stack > 0) then
List.iter (fun fun_pexpr -> match fun_pexpr with
| Predicate.FunApp ("UF", _::callargs) ->
let callargs = delete_redundant_UF callargs in
let frees = List.fold_left (fun freeset callarg ->
* may contain some variable not in scope ; generate int for them
let callargvars = Predicate.exp_vars callarg in
let frees = List.filter (fun callargvar ->
List.for_all (fun (def,_) -> not (Path.same def callargvar)) (locals@defs) &&
Question : how about we have a free that is a list ? ! !
(try
let (_, fr) = List.find (fun (p, _) -> Path.same callargvar p) allbindings in
(match fr with
| Frame.Fconstr (p,_,_,_,_) when (Path.same p Predef.path_int) -> true
| Frame.Fvar _ -> true | _ -> false)
with _ -> true)
) callargvars in
if (List.length frees = 0) then freeset
else (
freeset @ frees)
) [] callargs in
if (List.length frees > 0) then
let (lowers, uppers) = random_value frees bad_constraint.post in
pp formatter "@;@[%a %a %a@]"
(fun ppf unit ->
(List.iter (fun free ->
let uppers = Common.remove_duplicates (Hashtbl.find uppers free) in
let lowers = Common.remove_duplicates (Hashtbl.find lowers free) in
let uppers = List.filter (fun upper -> not (upper = fun_pexpr)) uppers in
let lowers = List.filter (fun lower -> not (lower = fun_pexpr)) lowers in
let int_locals = Common.map_partial (fun (v, f) -> match f with
| Frame.Fvar _ -> Some v
| Frame.Fconstr (p,_,_,_,_) when Path.same p Predef.path_int -> Some v
| _ -> None
) locals in
let printbounds flag formatter bounds =
if (List.length bounds = 0) then
if (List.exists (fun (p, _) -> Path.same p free) allbindings &&
not (List.exists (fun p -> Path.same p free) int_locals)) then
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s" (Path.name path)
) formatter (free::int_locals)
else
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s" (Path.name path)
) formatter int_locals
else
let printbound formatter bound =
let boundvars = Predicate.exp_vars bound in
let unboundvars = List.filter (fun boundvar ->
List.for_all (fun (def,_) -> not (Path.same def boundvar)) (locals @ defs)
) boundvars in
if (List.length unboundvars = 0) then
Predicate.pprint_pexpr formatter bound
else
let fn = if flag then "max" else "min" in
let args = List.fold_left (fun res arg -> res^(Path.name arg)^";") "" int_locals in
let unbound_substs = List.map (fun ubv ->
(ubv, Predicate.Var (Path.Pident (Ident.create_persistent (fn ^ "["^ args ^"]")))
)) unboundvars in
Predicate.pprint_pexpr formatter (
Predicate.exp_apply_substs unbound_substs bound) in
Common.pprint_list "; " printbound formatter bounds in
pp ppf "@[in let _ = for %s = min([%a]) to max([%a]) do@]"
(Path.name free) (printbounds false) lowers (printbounds true) uppers
) frees)) ()
" @[<hv0>@[<2 > if@ % a@]@;@[<2 > then@ % a@]%a@ ] "
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" funname) ()
(fun ppf unit -> ignore(log_args ppf callargs)) ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s%a)) with _->(fprintf outch (\"\t\")))@]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs;) ()
("(callflag := true)")) ()
(fun ppf unit ->
List.iter (fun _ -> pp ppf "@[%s@]" "done") frees
) ()
else
let _ = pp formatter "@;@[in let _ = if (!callflag) then ((callflag := false); @]" in
let _ = pp formatter "@;@[fprintf outch (\"%s:\"); @]" funname in
let _ = log_args formatter callargs in (
pp formatter
"@;@[(try fprintf outch (\"%s#%s\\t\") ((%s%a)) with _->(fprintf outch (\"\t\"))); @]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs;
pp formatter
"@;@[(callflag := true)) @]")
| _ -> assert false
) fun_stack
let (_, fr) = List.find (fun (p,f) -> String.compare (Path.name p) funname = 0) (allbindings) in
try let (frees, callargs, rfr) = make_test (allbindings@defs) fr in
pp formatter "@;@[in let _ = %a %a %a@]"
(fun ppf unit ->
(List.iter (fun free ->
let int_locals = Common.map_partial (fun (v, f) -> match f with
| Frame.Fvar _ -> Some v
| Frame.Fconstr (p,_,_,_,_) when Path.same p Predef.path_int -> Some v
| _ -> None
) (locals@defs) in
let printbounds flag formatter bounds =
. , 1 should be min and int used in the program
if (List.length bounds = 0) then
if (flag) then pp formatter "1"
else pp formatter "-1"
else
if (flag) then
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s+1" (Path.name path)
) formatter bounds
else
Common.pprint_list "; " (fun formatter path ->
pp formatter "%s-1" (Path.name path)
) formatter bounds in
pp ppf "@[for %s = min([%a]) to max([%a]) do @]"
(Path.name free) (printbounds false) int_locals (printbounds true) int_locals
) frees)) ()
" @[<hv0>@[<2 > if@ % a@]@;@[<2 > then@ % a@]%a@ ] "
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" funname) ()
(fun ppf unit -> ignore(log_args ppf callargs)) ()
match rfr with
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_int ->
pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s%a)) with _->(fprintf outch (\"\t\")))@]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs
| frame ->
pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s%a); 0) with _->(fprintf outch (\"\t\")))@]"
(funname ^ "_" ^ "r") "%d" funname (print_callargs) callargs) ()
("(callflag := true)")) ()
(fun ppf unit ->
List.iter (fun _ -> pp ppf "@[%s@]" "done ") frees
) () with _ -> ()
let is_base_typed f = match f with
| Frame.Farrow _ -> false
| Frame.Frecord _ -> false
| Frame.Ftuple _ -> false
| Frame . false
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_list -> false
| f -> true
let decode_record name n =
let names = Str.split (Str.regexp "\\.") name in
if (List.length names < 2) then name
else
let (name, fields) = (List.hd names, List.tl names) in
List.fold_left (fun res field ->
try
let proj = int_of_string field in
let is = Array.to_list (Array.init n (fun i -> i)) in
let arg = List.fold_left (fun res i ->
let res = if (i = proj) then res ^ "x" else res ^ "_" in
if (i = n - 1) then res else res ^ ","
) "" is in
"((fun (" ^ arg ^") -> x) (" ^ res ^ "))"
with _ -> res ^ "." ^ field
) name fields
let rec dump formatter env fenv measures pat frame bad_constraint locals defs allbindings n_args =
let patfrs = bind env pat frame in
(if n_args > 0 then List.iter (fun (path, fr) ->
let pathname = Path.ident_name path in
match pathname with
| Some x -> (
let xv = decode_record x n_args in
match fr with
| Fconstr (y,_,_,_,_) when y = Predef.path_unit -> ()
if !(Clflags.reachability) then
match (List.hd fs) with
| Fconstr (y,_,_,_,_) when Hashtbl.mem measures y ->
pp formatter "@;@[%a %a %a %a@]"
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"%s:\");@]" (x^"_heap")) ()
(fun ppf unit -> pp ppf "@[ignore (List.fold_right (fun v res -> @;@[%a@] @;@[%a@]) %s (-1000)) @]"
(fun ppf unit ->
pp ppf "@; @[let ele = sample_%s v in @]" (Path.name y)
) ()
(fun ppf unit ->
pp ppf "@; @[match ele with @. | None -> res @. | Some ele -> (if (!callflag) then fprintf outch (\"Cons#1#%s;%s,\") ele res; ele) @]" "%d" "%d"
) ()
xv) ()
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"\t\")@]") ()
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ()
| _ ->
(pp formatter "@;@[%a %a %a %a %a %a@]"
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"%s:\");@]" (x^"_heap")) ()
(fun ppf unit ->
pp ppf "@[in let _ = for ith = 0 to ((List.length %s)-1) do@]" xv
) ()
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"Cons#1#%s;%s,\") ((List.nth %s (%s))) ((List.nth %s (%s))) with _->((fprintf outch (\"Cons#1#%s;-1000\") ((List.nth %s (%s))) )))@]"
"%d" "%d" xv "ith" xv "ith+1" "%d" xv "ith";) ()
("(callflag := true)")) ()
(fun ppf unit -> pp ppf "@[%s@]" "done;"
) ()
(fun ppf unit -> pp ppf "@[in let _ = fprintf outch (\"\t\")@]") ()
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ())
else
(pp formatter "@;@[%a %a %a %a@]"
(fun ppf unit ->
pp ppf "@[in let _ = for ith = 0 to ((List.length %s)-1) do@]" xv
) ()
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" x) ()
(fun ppf unit -> pp ppf
"@[fprintf outch (\"%s#%s,\") ((%s)); @]"
(x ^ "_" ^ "0") "%d" "ith") ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((List.nth %s %s)) with _->(fprintf outch (\"\t\")))@]"
(x ^ "_" ^ "r") "%d" xv "ith";) ()
("(callflag := true)")) ()
(fun ppf unit -> pp ppf "@[%s@]" "done"
) ()
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ())
if !(Clflags.reachability) then
()
else
(pp formatter "@;@[%a@]"
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) @]" (x ^ "_l") "%d" xv
) ())
let arr_pairs = detect_arr_adj_pattern bad_constraint allbindings ( * : sound ?
try
let () = List.find (fun (pe, _) -> pe = pexpr) arr_pair in
with _ ->*)
(pp formatter "@;@[%a %a %a@]"
(fun ppf unit ->
pp ppf "@[in let _ = for ith = 0 to ((Array.length %s)-1) do@]" x
) ()
(fun ppf unit -> pp ppf "@;@[if @[(!callflag) then (@;@[%s@]; @;@[%a@]; @;@[%a@] @;@[%a@]; @;@[%a@]; @;@[%s@];)@]@]"
("(callflag := false)")
(fun ppf unit -> pp ppf "@[fprintf outch (\"%s:\")@]" y) ()
(fun ppf unit -> pp ppf
"@[fprintf outch (\"%s#%s,\") ((%s)); @]"
(z ^ "_" ^ "0") "%d" "ith") ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s,\") ((%s.(%s))) with _->(fprintf outch (\"\t\")))@]"
(z ^ "_" ^ "r") "%d" x "ith";) ()
(fun ppf unit -> pp ppf "@[(try fprintf outch (\"%s#%s\\t\") ((%s.(%s))) with _->(fprintf outch (\"\t\")))@]"
(z ^ "_" ^ "r'") "%d" x "(ith-1)";) ()
("(callflag := true)")) ()
(fun ppf unit -> pp ppf "@[%s@]" "done"
) ()) in
(print_arr (tempt_arr_prefix^x) (tempt_arr_prefix^x) x; print_arr x (tempt_arr_postfix^x) x;
(pp formatter "@;@[%a@]"
(fun ppf unit ->
pp ppf "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((Array.length %s)) @]" (x ^ "_l") "%d" xv
) ())
)
let measures = Hashtbl.find measures y in
(List.iter (fun (m, recflag) ->
if recflag then
let m = Path.name m in
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((%s)) @]"
(x ^ "_" ^ m) "%d" (m ^ " " ^ xv)
) measures;
if !(Clflags.reachability) then
pp formatter "@;@[in let _ = if (!callflag) then (fprintf outch (\"%s:\"); ignore (%s); fprintf outch (\"\\t\")) @]"
(x ^ "_heap") ("sample_" ^ (Path.name y) ^ " " ^ xv)
)
| Fconstr (y,_,_,_,_) when Path.same y Predef.path_bool ->
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") (if (%s) then 1 else 0) @]" x "%d" xv
(synthesize formatter x bad_constraint (locals) defs allbindings)
* First class value dump
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((%s)) @]" x "%d" xv
| Frecord _ -> assert false
| Ftuple (fs, _) ->
let pats = Array.to_list (Array.init (List.length fs) (fun i ->
Tpat_var (Ident.create_persistent (
x^"."^(string_of_int i)) ))) in
List.iter2 (fun p f ->
ignore(dump formatter env fenv measures p f bad_constraint locals defs allbindings (List.length fs))
) pats fs
(pp Format.err_formatter
"Value cannot be dumped for %s \n" x;
flush stderr; assert false)
)
| None -> (print_string "Value cannot be dumped \n"; assert false)
) patfrs; patfrs)
let rec dump_fun_Info formatter env fenv measures fr bad_constraint locals defs allbindings n_args =
match fr with
| Farrow (Some pat, f1, f2, _) -> (
let locals' = dump formatter env fenv measures pat f1 bad_constraint locals defs allbindings n_args in
Common.remove_duplicates
dump_fun_Info formatter env fenv measures f2 bad_constraint locals defs allbindings (n_args-1))
(ignore (dump formatter env fenv measures (Tpat_var (Ident.create "r")) fr bad_constraint locals defs allbindings (n_args+1));
pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"\\n\") @]";
pp formatter "@[%s@]" "in r"; n_args)
let myFind tbl compare =
match (Hashtbl.fold (fun k v res -> match res with
| None -> if (compare k = 0) then Some (k, v) else None
| Some _ -> res) tbl None) with
Some r -> r
| None -> assert false
let dump_side_effects formatter se_env lookup_fr funlocation funname env fenv =
let (funpath, fr) =
try
let (funpath, _) = Env.lookup_value (Lident funname) env in
let fr = Hashtbl.find se_env.funframebindings funpath in
(funpath, fr)
with Not_found ->
myFind se_env.funframebindings (fun k -> String.compare (Path.name k) funname) in
let allbindings = Frame.get_fun_bindings env fr in
let arrs = List.fold_left (fun resarrs (p, f) -> match f with
| Frame.Fconstr (x,_,_,_,_) when x = Predef.path_array -> resarrs @ [p]
| _ -> resarrs
) [] allbindings in
List.iter (fun arr ->
let arrname = Path.name arr in
pp formatter "@[let %s = Array.copy %s in @]" (tempt_arr_prefix^arrname) arrname
) arrs
let complete_funcall_info = Hashtbl.create 3
let dump_fun formatter se_env lookup_fr funlocation funname env fenv exptxt =
if (is_measure se_env funname) then pp formatter "@[%s@]" "in r" else
let _ = Printf.printf " funname=%s\n " funname in
let n_args = count_arguments exptxt in
let _ = pp formatter "@;@[in let _ = if (!callflag) then fprintf outch (\"name:%s\\t\") %s @]"
"%s" ("\"" ^ funname ^ "\"") in
let measures = se_env.measures in
try
let (funpath, _) = Env.lookup_value (Lident funname) env in
Lightenv.find funpath fenv in
Hashtbl.find se_env.funframebindings funpath in
let (bad_constraint, defs) =
try (Hashtbl.find se_env.badbindings funpath,
Hashtbl.find se_env.fundefenvs funpath)
with _ -> (
if !(se_env.dty) then
({pre = Predicate.Not Predicate.True; post = Predicate.Not Predicate.True},
try Hashtbl.find se_env.fundefenvs funpath with _ -> (
Format.fprintf Format.std_formatter "%s unfound@." (Path.name funpath); assert false))
else (Format.fprintf Format.std_formatter "%s unfound@." (Path.name funpath); assert false)) in
let locals = [] in
let allbindings = Frame.get_fun_bindings env fr in
let n_args' = dump_fun_Info formatter env fenv measures fr bad_constraint locals defs allbindings n_args in
if (n_args' < 0) then (Hashtbl.replace complete_funcall_info funname n_args)
* Possibly local function not in env or fenv
try let fr = lookup_fr funlocation in
dump_fun_Info formatter env fenv fr with _ - > assert false
dump_fun_Info formatter env fenv fr with _ -> assert false*)
let (funpath, fr) = myFind se_env.funframebindings
(fun k -> String.compare (Path.name k) funname) in
let (bad_constraint, defs) =
try (Hashtbl.find se_env.badbindings funpath,
Hashtbl.find se_env.fundefenvs funpath)
with _ -> (({pre = Predicate.Not Predicate.True; post = Predicate.Not Predicate.True},
try Hashtbl.find se_env.fundefenvs funpath with _ -> assert false)
) in
let locals = [] in
let allbindings = Frame.get_fun_bindings env fr in
let n_args' = dump_fun_Info formatter env fenv measures fr bad_constraint locals defs allbindings n_args in
if (n_args' < 0) then (Hashtbl.replace complete_funcall_info funname n_args)
let dump_app formatter se_env apploc exptxt =
let (fname, n_args) = count_app_arguments exptxt in
let sys_def_vars =
["guard"; "assert_guard"; "pattern"; "hoencoding"; "arraygetencoding";
"fixarrayencoding"; "ho_pattern"; "envrefreshed"; "lstlenencoding"; "dtyencoding"] in
let measures = se_env.measures in
if (Hashtbl.mem se_env.funcallenvs apploc) then
let context = Hashtbl.find se_env.funcallenvs apploc in
(pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"env:%s\\t\") \"%s\" in @]"
"%s" (Location.tostring apploc);
List.iter (fun (path, fr) ->
let pathname = Path.name path in
match fr with
| Fconstr (x,_,_,_,_) when x = Predef.path_unit -> ()
| Fconstr (x,_,_,_,_) when x = Predef.path_array -> ()
| Fconstr (x,_,_,_,_) when Hashtbl.mem measures x -> ()
| Fconstr (x,_,_,_,_) when x = Predef.path_list ->
if (List.for_all (fun var -> (String.compare var pathname != 0)) sys_def_vars) then
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((List.length %s)) in @]"
(pathname ^ "_l") "%d" pathname
| Farrow _ -> ()
| Fconstr (x,_,_,_,_) when x = Predef.path_bool ->
if (List.for_all (fun var -> (String.compare var pathname != 0)) sys_def_vars) then
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") (if (%s) then 1 else 0) in @]"
pathname "%d" pathname
* First class value dump
if (List.for_all (fun var -> (String.compare var pathname != 0)) sys_def_vars) then
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"%s:%s\\t\") ((%s)) in @]"
pathname "%d" pathname
| Frecord _ | Ftuple _ -> ()
(Format.fprintf Format.std_formatter
"Value cannot be dumped for %s \n" pathname;
assert false)
) context;
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"\\n\") in @]")
else
if (Hashtbl.mem complete_funcall_info fname) then
let n_args' = Hashtbl.find complete_funcall_info fname in
if (n_args = n_args') then
(pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"env:%s\\t\") \"%s\" in @]"
"%s" (Location.tostring apploc);
pp formatter "@;@[<hov2>let _ = if (!callflag) then fprintf outch (\"\\n\") in @]")
else () )
let read_log () =
let lines = ref [] in
let chan = open_in log_fname in
try
while true; do
lines := input_line chan :: !lines
done; []
with End_of_file ->
close_in chan;
List.rev !lines ;;
let read_dumpings pos_samples =
let logs = read_log () in
let env_stack = Stack.create () in
List.iter (fun log ->
let bindings = Str.split (Str.regexp "[ \t]+") log in
let (namebinding, value_bindings) =
(List.hd bindings, List.tl bindings) in
let _ = pp " namebinding = % s @. " namebinding in
"namebinding = %s @." namebinding in*)
let name = Str.split (Str.regexp ":") namebinding in
let (name_name, name_value) = (List.hd name, List.nth name 1) in
if (String.compare name_name "env" = 0) then
Stack.push (name_value, value_bindings) env_stack
else
List.map ( Str.split ( Str.regexp " : " ) ) para_return_bindings
value_bindings in
let (loc, env_value_bindings) = Stack.pop env_stack in
let loc = loc ^ "_" ^ name_value in
if (Hashtbl.mem pos_samples loc) then
let (_, samples) = Hashtbl.find pos_samples loc in
Hashtbl.replace pos_samples loc (name_value, samples@[(data, env_value_bindings)])
else
Hashtbl.replace pos_samples loc (name_value, [(data, env_value_bindings)])
) logs
|
7e13965e62d5801a1e2b0bee8c2b54908701f105a579083cd3affd6fe7e4b6cd | tnelson/Forge | multiplicityFormulas.rkt | #lang forge/core
(set-option! 'verbose 0)
(sig Color #:abstract)
(sig Red #:one #:extends Color)
(sig Green #:one #:extends Color)
(sig Blue #:one #:extends Color)
(sig Node #:abstract)
(sig N1 #:one #:extends Node)
(sig N2 #:one #:extends Node)
(sig N3 #:one #:extends Node)
(relation edges (Node Node Color))
(inst test-inst
(= edges (+ (-> (+ (-> (atom 'N10) (atom 'N20))
(+ (-> (atom 'N10) (atom 'N30))
(+ (-> (atom 'N20) (atom 'N30))
(-> (atom 'N30) (atom 'N30))))) (atom 'Red0))
(-> (+ (-> (atom 'N10) (atom 'N10))
(+ (-> (atom 'N10) (atom 'N20))
(+ (-> (atom 'N10) (atom 'N30))
(+ (-> (atom 'N20) (atom 'N30))
(-> (atom 'N30) (atom 'N20)))))) (atom 'Green0)))))
(pred Some
(some (join N1 (join edges Red)))
(some (join N2 (join edges Red)))
(some (join N3 (join edges Red)))
(! (some (join (join edges Red) N1)))
(some (join (join edges Red) N2))
(some (join (join edges Red) N3)))
(pred No
(! (no (join N1 (join edges Red))))
(! (no (join N2 (join edges Red))))
(! (no (join N3 (join edges Red))))
(no (join (join edges Red) N1))
(! (no (join (join edges Red) N2)))
(! (no (join (join edges Red) N3))))
(pred One1
(! (one (join N1 (join edges Red))))
(one (join N2 (join edges Red)))
(one (join N3 (join edges Red)))
(! (one (join (join edges Red) N1)))
(one (join (join edges Red) N2))
(! (one (join (join edges Red) N3))))
(pred Lone1
(! (lone (join N1 (join edges Red))))
(lone (join N2 (join edges Red)))
(lone (join N3 (join edges Red)))
(lone (join (join edges Red) N1))
(lone (join (join edges Red) N2))
(! (lone (join (join edges Red) N3))))
; These are treated as multiplicity formulas by ast.rkt,
; rather than quantifier formulas.
(pred One2
(one ([n Node])
(in (-> n Node)
(join edges Green)))
(one ([n1 Node]
[n2 Node])
(&& (!= n1 n2)
(in (+ (-> n1 n2) (-> n2 n1))
(join edges Green))))
(one ([n Node]
[c Color])
(in (-> n (-> Node c))
edges)))
(pred Lone2
one
(in (-> n Node)
(join edges Green)))
(lone ([n Node]) ; no
(in (-> Node n)
(join edges Green)))
one
[n2 Node])
(&& (!= n1 n2)
(in (+ (-> n1 n2) (-> n2 n1))
(join edges Green))))
(lone ([n1 Node] ; no
[n2 Node])
(in (+ (-> Node (+ n1 n2))
(-> (+ n1 n2) Node))
(join edges Green)))
(lone ([n Node] ;o ne
[c Color])
(in (-> (-> n Node) c)
edges))
(lone ([n Node] ; no
[c Color])
(in (-> (+ (-> n Node) (-> Node n)) c)
edges)))
(pred (SomePred n)
(in (-> (-> n Node) Red)
edges))
(pred Equivalence
(iff (lone ([n Node]) (SomePred n))
(|| (no ([n Node]) (SomePred n))
(one ([n Node]) (SomePred n)))))
(test someAsMultiplicity
#:preds [Some]
#:bounds [test-inst]
#:expect theorem)
(test noAsMultiplicity
#:preds [No]
#:bounds [test-inst]
#:expect theorem)
(test oneAsMultiplicity
#:preds [One1]
#:bounds [test-inst]
#:expect theorem)
(test loneAsMultiplicity
#:preds [Lone1]
#:bounds [test-inst]
#:expect theorem)
( test oneAsQuantifer ; CUURRENTLY BUGGED
# : [ One2 ]
; #:bounds [test-inst]
; #:expect theorem)
( test loneAsQuantifer ; CUURRENTLY BUGGED
# : [ Lone2 ]
; #:bounds [test-inst]
; #:expect theorem)
(test loneEquivalentOneNo
#:preds [Equivalence]
#:expect theorem)
| null | https://raw.githubusercontent.com/tnelson/Forge/1687cba0ebdb598c29c51845d43c98a459d0588f/forge/tests/forge-core/formulas/multiplicityFormulas.rkt | racket | These are treated as multiplicity formulas by ast.rkt,
rather than quantifier formulas.
no
no
o ne
no
CUURRENTLY BUGGED
#:bounds [test-inst]
#:expect theorem)
CUURRENTLY BUGGED
#:bounds [test-inst]
#:expect theorem) | #lang forge/core
(set-option! 'verbose 0)
(sig Color #:abstract)
(sig Red #:one #:extends Color)
(sig Green #:one #:extends Color)
(sig Blue #:one #:extends Color)
(sig Node #:abstract)
(sig N1 #:one #:extends Node)
(sig N2 #:one #:extends Node)
(sig N3 #:one #:extends Node)
(relation edges (Node Node Color))
(inst test-inst
(= edges (+ (-> (+ (-> (atom 'N10) (atom 'N20))
(+ (-> (atom 'N10) (atom 'N30))
(+ (-> (atom 'N20) (atom 'N30))
(-> (atom 'N30) (atom 'N30))))) (atom 'Red0))
(-> (+ (-> (atom 'N10) (atom 'N10))
(+ (-> (atom 'N10) (atom 'N20))
(+ (-> (atom 'N10) (atom 'N30))
(+ (-> (atom 'N20) (atom 'N30))
(-> (atom 'N30) (atom 'N20)))))) (atom 'Green0)))))
(pred Some
(some (join N1 (join edges Red)))
(some (join N2 (join edges Red)))
(some (join N3 (join edges Red)))
(! (some (join (join edges Red) N1)))
(some (join (join edges Red) N2))
(some (join (join edges Red) N3)))
(pred No
(! (no (join N1 (join edges Red))))
(! (no (join N2 (join edges Red))))
(! (no (join N3 (join edges Red))))
(no (join (join edges Red) N1))
(! (no (join (join edges Red) N2)))
(! (no (join (join edges Red) N3))))
(pred One1
(! (one (join N1 (join edges Red))))
(one (join N2 (join edges Red)))
(one (join N3 (join edges Red)))
(! (one (join (join edges Red) N1)))
(one (join (join edges Red) N2))
(! (one (join (join edges Red) N3))))
(pred Lone1
(! (lone (join N1 (join edges Red))))
(lone (join N2 (join edges Red)))
(lone (join N3 (join edges Red)))
(lone (join (join edges Red) N1))
(lone (join (join edges Red) N2))
(! (lone (join (join edges Red) N3))))
(pred One2
(one ([n Node])
(in (-> n Node)
(join edges Green)))
(one ([n1 Node]
[n2 Node])
(&& (!= n1 n2)
(in (+ (-> n1 n2) (-> n2 n1))
(join edges Green))))
(one ([n Node]
[c Color])
(in (-> n (-> Node c))
edges)))
(pred Lone2
one
(in (-> n Node)
(join edges Green)))
(in (-> Node n)
(join edges Green)))
one
[n2 Node])
(&& (!= n1 n2)
(in (+ (-> n1 n2) (-> n2 n1))
(join edges Green))))
[n2 Node])
(in (+ (-> Node (+ n1 n2))
(-> (+ n1 n2) Node))
(join edges Green)))
[c Color])
(in (-> (-> n Node) c)
edges))
[c Color])
(in (-> (+ (-> n Node) (-> Node n)) c)
edges)))
(pred (SomePred n)
(in (-> (-> n Node) Red)
edges))
(pred Equivalence
(iff (lone ([n Node]) (SomePred n))
(|| (no ([n Node]) (SomePred n))
(one ([n Node]) (SomePred n)))))
(test someAsMultiplicity
#:preds [Some]
#:bounds [test-inst]
#:expect theorem)
(test noAsMultiplicity
#:preds [No]
#:bounds [test-inst]
#:expect theorem)
(test oneAsMultiplicity
#:preds [One1]
#:bounds [test-inst]
#:expect theorem)
(test loneAsMultiplicity
#:preds [Lone1]
#:bounds [test-inst]
#:expect theorem)
# : [ One2 ]
# : [ Lone2 ]
(test loneEquivalentOneNo
#:preds [Equivalence]
#:expect theorem)
|
aa73f476cb9cecbb92e3069af79280285a210986f20cca31bad8bb6bda629ccf | aeternity/aesim | aesim.erl | -module(aesim).
-behaviour(application).
%=== EXPORTS ===================================================================
-export([main/1]).
-export([start/2]).
-export([stop/1]).
%=== API FUNCTIONS =============================================================
-spec main(term()) -> no_return().
main(Args) ->
start_simulator(Args).
%% Dialyzer don't like we create an anonymous function that doesn't return
-dialyzer({nowarn_function, start/2}).
start(_StartType, StartArgs) ->
{ok, spawn(fun() -> start_simulator(StartArgs) end)}.
stop(_State) -> ok.
%=== INTERNAL FUNCTIONS ========================================================
start_simulator(Args) ->
case aesim_simulator:run(parse_options(Args)) of
normal -> erlang:halt(0);
_ -> erlang:halt(1)
end.
parse_options(Args) ->
{ok, Regex} = re:compile("^([a-z][a-zA-Z0-9_]*)=(.*)$"),
parse_options(Args, Regex, #{}).
parse_options([], _Regex, Opts) -> Opts;
parse_options([Opt | Rest], Regex, Opts) ->
case re:run(Opt, Regex, [{capture, all_but_first, list}]) of
{match, [KeyStr, Value]} ->
Key = list_to_atom(KeyStr),
parse_options(Rest, Regex, Opts#{Key => Value});
_ ->
io:format(standard_error, "Invalid option: ~s~n", [Opt]),
erlang:halt(1)
end.
| null | https://raw.githubusercontent.com/aeternity/aesim/20d89621a0994e1b6cf2b766d188ec29283e77e1/src/aesim.erl | erlang | === EXPORTS ===================================================================
=== API FUNCTIONS =============================================================
Dialyzer don't like we create an anonymous function that doesn't return
=== INTERNAL FUNCTIONS ======================================================== | -module(aesim).
-behaviour(application).
-export([main/1]).
-export([start/2]).
-export([stop/1]).
-spec main(term()) -> no_return().
main(Args) ->
start_simulator(Args).
-dialyzer({nowarn_function, start/2}).
start(_StartType, StartArgs) ->
{ok, spawn(fun() -> start_simulator(StartArgs) end)}.
stop(_State) -> ok.
start_simulator(Args) ->
case aesim_simulator:run(parse_options(Args)) of
normal -> erlang:halt(0);
_ -> erlang:halt(1)
end.
parse_options(Args) ->
{ok, Regex} = re:compile("^([a-z][a-zA-Z0-9_]*)=(.*)$"),
parse_options(Args, Regex, #{}).
parse_options([], _Regex, Opts) -> Opts;
parse_options([Opt | Rest], Regex, Opts) ->
case re:run(Opt, Regex, [{capture, all_but_first, list}]) of
{match, [KeyStr, Value]} ->
Key = list_to_atom(KeyStr),
parse_options(Rest, Regex, Opts#{Key => Value});
_ ->
io:format(standard_error, "Invalid option: ~s~n", [Opt]),
erlang:halt(1)
end.
|
357077b88b193f6ea5808d0f8354b5ddd20cc8b1705a4802acb97f5d423e258e | racket/typed-racket | tc-lambda-unit.rkt | #lang racket/unit
(require "../utils/utils.rkt"
"../utils/plambda-utils.rkt"
racket/list syntax/parse syntax/stx
racket/match syntax/private/id-table
racket/sequence
(contract-req)
"../rep/type-rep.rkt"
"../rep/object-rep.rkt"
"../rep/rep-utils.rkt"
(rename-in (combine-in "../types/abbrev.rkt"
"../types/utils.rkt")
[-> t:->]
[->* t:->*]
[one-of/c t:one-of/c])
"../private/type-annotation.rkt"
"../private/syntax-properties.rkt"
"../types/resolve.rkt"
"../types/type-table.rkt"
"signatures.rkt"
"tc-metafunctions.rkt"
"tc-subst.rkt"
"../env/lexical-env.rkt"
"../env/tvar-env.rkt"
"../env/index-env.rkt"
"../env/scoped-tvar-env.rkt"
"../utils/tc-utils.rkt"
(for-template racket/base)
(for-syntax racket/base))
(import tc-expr^)
(export tc-lambda^)
(define-syntax-class cl-rhs
#:literal-sets (kernel-literals)
#:attributes (i cond)
[pattern i:id #:attr cond #f]
[pattern (if cond:expr e:expr i:id)])
(define-syntax-class rebuild-let*
#:literal-sets (kernel-literals)
#:attributes (mapping)
(pattern (#%expression :rebuild-let*))
(pattern (let-values ([(new-id) e:cl-rhs]) body:rebuild-let*)
#:attr mapping (free-id-table-set (attribute body.mapping) #'e.i #'new-id))
(pattern body:expr
#:attr mapping (make-immutable-free-id-table)))
positional : ( listof identifier ? )
;; rest: id or #f
;; syntax: syntax? - the improper syntax list of identifiers
;; (i.e. (append positional (or id '())) but syntax)
(struct formals (positional rest syntax) #:transparent)
;; When expanding a keyword or optional lambda, Racket adds into the expanded
;; code more lambdas, where syntax objects for the original lambda's parameters
;; are reused. Since Typed Racket stores type information stored in the syntax
;; objects, when the orginal lambda is a polymorphic function, that information
;; might carry out-of-scope type variables. In this case, we need to remove it
;; from parameter syntax objects.
;;
;; not-in-poly is #t if the original TR function is polymorphic.
(define (make-formals stx [not-in-poly #t])
(define (maybe-remove a)
(if (and not-in-poly (from-plambda-property a))
(syntax-property-remove a 'type-label)
a))
(let loop ([s stx] [acc null])
(cond
[(pair? s) (loop (cdr s) (cons (maybe-remove (car s)) acc))]
[(null? s) (formals (reverse acc) #f stx)]
[(pair? (syntax-e s)) (loop (stx-cdr s) (cons (maybe-remove (stx-car s)) acc))]
[(null? (syntax-e s)) (formals (reverse acc) #f stx)]
[else (formals (reverse acc) (maybe-remove s) stx)])))
;; Currently no support for objects representing the rest argument
(define (formals->objects f)
(for/list ([i (in-list (formals-positional f))])
(make-Path null i)))
(define (expected-str tys-len rst arg-len rest-id)
(format "Expected function with ~a argument~a~a, but got function with ~a argument~a~a"
tys-len
(if (= tys-len 1) "" "s")
(if rst
" and a rest arg"
"")
arg-len
(if (= arg-len 1) "" "s")
(if rest-id " and a rest arg" "")))
;; tc-lambda-body: Typechecks the body with the given args and names
and returns the resulting Arrow ? .
;; arg-names: The identifiers of the positional args
;; arg-types: The types of the positional args
rest - arg+type : Either # f for no rest argument or ( list rest - id rest - type ( rest - type ) )
;; where rest-id is the identifier of the rest arg,
and ( ListOf rest - type ) is the type that identifier would
;; have in the function body
;; expected: The expected type of the body forms.
;; body: The body of the lambda to typecheck.
(define/cond-contract
(tc-lambda-body arg-names arg-types
#:rest-id+type+body-type [rest-id+type+body-type #f]
#:expected [expected #f] body)
(->* ((listof identifier?) (listof Type?) syntax?)
(#:rest-id+type+body-type (or/c #f (list/c identifier? (or/c Rest? RestDots?) Type?))
#:expected (or/c #f tc-results/c))
Arrow?)
(define-values (rst-id rst-type names types)
(match rest-id+type+body-type
[(list id rst body-type)
(values id rst (cons id arg-names) (cons body-type arg-types))]
[_ (values #f #f arg-names arg-types)]))
(-Arrow
arg-types
(abstract-results
(with-extended-lexical-env
[#:identifiers names
#:types types]
(tc-body/check body expected))
arg-names #:rest-id rst-id)
#:T+ #true ;; shallow can trust the results of a literal lambda
#:rest rst-type))
;; check-clause: Checks that a lambda clause has arguments and body matching the expected type
;; arg-list: The identifiers of the positional args in the lambda form
;; rest-id: The identifier of the rest arg, or #f for no rest arg
;; body: The body of the lambda to typecheck.
;; arg-tys: The expected positional argument types.
;; rst: #f, expected rest arg Rest, or expected RestDots
ret - ty : The expected type of the body of the lambda .
(define/cond-contract (check-clause arg-list rest-id body arg-tys rst ret-ty)
((listof identifier?)
(or/c #f identifier?) syntax? (listof Type?) (or/c #f Rest? RestDots?)
tc-results/c
. -> .
Arrow?)
(let* ([arg-len (length arg-list)]
[arg-tys-len (length arg-tys)]
[extra-arg-count (- arg-len arg-tys-len)]
[arg-types
(cond
[(andmap type-annotation arg-list)
(get-types arg-list #:default Univ)]
[(zero? extra-arg-count)
(map (lambda (a t) (get-type a #:default t)) arg-list arg-tys)]
[(negative? extra-arg-count) (take arg-tys arg-len)]
[else
(define tail-tys (match rst
[(Rest: rst-tys)
(define rst-len (length rst-tys))
(for/list ([_ (in-range extra-arg-count)]
[rst-t (in-list-cycle rst-tys)])
rst-t)]
[_ (for/list ([_ (in-range extra-arg-count)])
-Bottom)]))
(append arg-tys tail-tys)])])
;; Check that the number of formal arguments is valid for the expected type.
;; Thus it must be able to accept the number of arguments that the expected
type has . So we check for two cases : if the function does n't accept
;; enough arguments, or if it requires too many arguments.
;; This allows a form like (lambda args body) to have the type (-> Symbol
;; Number) with out a rest arg.
(when (or (and (< arg-len arg-tys-len) (not rest-id))
(and (> arg-len arg-tys-len) (not rst)))
(tc-error/delayed (expected-str arg-tys-len rst arg-len rest-id)))
rst - type - the type of the rest argument in the Arrow type
;; rest-body-type - the type the rest argument id has in the body
;; of the function
;; e.g. for
(: foo ( - > * ( ) ( ) # : rest String Number ) )
;; (define (foo . rest-strings) ...)
the caller can provide 0 or more Strings , so the Arrow 's
;; rest spec would be (make-Rest (list -String))
;; and in the body of the function, the rest argument
identifier ( rest - strings ) have type ( )
(define-values (rst-type rest-body-type)
(cond
;; there's not a rest ident... easy
[(not rest-id) (values #f #f)]
;; a dotted rest spec, so the body has a ListDots
[(RestDots? rst)
(match-define (RestDots: dty dbound) rst)
(values rst (make-ListDots dty dbound))]
;; the rest-id is dotted?, lets go get its type
[(dotted? rest-id)
=> (λ (dbound)
(define ty (extend-tvars (list dbound) (get-type rest-id #:default Univ)))
(values (make-RestDots ty dbound)
(make-ListDots ty dbound)))]
[else
;; otherwise let's get the sequence of types the rest argument would have
;; and call it 'rest-types' (i.e. in our above example 'foo', this would
;; be (list -String)
(define rest-types
(cond
[(type-annotation rest-id) (list (get-type rest-id #:default Univ))]
[else
(match rst
[#f (list -Bottom)]
[(? Type? t) (list t)]
[(Rest: rst-ts) rst-ts]
[_ (list Univ)])]))
;; now that we have the list of types, we need to calculate, based on how many
;; positional argument identifiers there are, how the rest should look
i.e. if our rest was ( ) * ( i.e. an even length rest arg of numbers
followed by strings ) and there was 1 more positional argument that positional
domain types , then that extra positional arg would be type ( i.e. the type
;; it gets since its type is coming from the rest type) and the rest id's type
in the body of the function would ( Pair Str ( ) * ) ( i.e. the rest arg
would _ have _ to have a in it , and then would have 0 or more
(cond
[(= arg-len arg-tys-len)
(values (make-Rest rest-types)
(make-CyclicListof rest-types))]
;; some of the args are _in_ the rst arg (i.e. they
;; do not have argument names) ...
[(<= arg-len arg-tys-len)
(define extra-types (drop arg-tys arg-len))
(define rst-type (apply Un (append extra-types rest-types)))
(values (make-Rest (list rst-type))
(make-Listof rst-type))]
;; there are named args whose type came from the rst argument
;; i.e. we need to pull there types out of the rst arg
[else
(define rest-remainder (drop rest-types (remainder extra-arg-count
(length rest-types))))
(values (make-Rest rest-types)
(-Tuple* rest-remainder
(make-CyclicListof rest-types)))])]))
(tc-lambda-body
arg-list
arg-types
#:rest-id+type+body-type (and rst-type (list rest-id rst-type rest-body-type))
#:expected ret-ty
body)))
;; typecheck a single lambda, with argument list and body
drest - ty and drest - bound are both false or not false
(define/cond-contract (tc/lambda-clause/check f body arg-tys ret-ty rst)
(-> formals?
syntax?
(listof Type?)
(or/c tc-results/c #f)
(or/c #f Rest? RestDots?)
Arrow?)
(check-clause (formals-positional f)
(formals-rest f)
body
arg-tys
rst
ret-ty))
;; typecheck a single opt-lambda clause with argument list and body
(define/cond-contract (tc/opt-lambda-clause arg-list body aux-table)
(-> (listof identifier?) syntax? free-id-table?
(listof Arrow?))
arg - types : Listof[Type ? ]
(define arg-types
(for/list ([a (in-list arg-list)])
(get-type a #:default (lambda ()
(define id (free-id-table-ref aux-table a #f))
(cond
[id
(define ty (get-type id #:default Univ))
(if (optional-non-immediate-arg-property id)
(Un -Unsafe-Undefined ty)
ty)]
[else Univ])))))
(list
(match (and (not (null? arg-list))
(free-id-table-ref aux-table (last arg-list) #f))
[id #:when (and id (rst-arg-property id))
;; when an opt-lambda with a rest parameter is expanded, the resulting
;; expression contains a generated lambda, which is being checked and whose
;; last positional parameter `new-rest` corresponds to the rest parameter of the original
;; function.
;; we use `arg-types` to make the arrow consistent with the original type
;; annotation, i.e. `new-rest` needs to have the same type as the original
;; function's the rest parameter. In order to check the function body, we
;; duplicate `arg-types` with the last element changed from `ty` to
` ( )
(Arrow-update
(tc-lambda-body arg-list
(list-update arg-types (sub1 (length
arg-types))
(lambda (ty) (make-Listof ty)))
body)
dom
(lambda (dom)
arg-types))]
[_ (tc-lambda-body arg-list arg-types body)])))
;; restrict-to-arity : Arrow? nat -> (or/c #f Arrow?)
;; either produces a new arrow which is a subtype of arr with arity n,
;; or #f is that is not possible
(define (restrict-Arrow-to-arity arrow n)
(match arrow
;; currently does not handle rest arguments
[(Arrow: args #f '() _)
#:when (= n (length args))
arrow]
[_ #f]))
(define/cond-contract (tc/lambda-clause f body)
(-> formals? syntax? (listof Arrow?))
(define aux-table
(syntax-parse body
[(b:rebuild-let*) (values (attribute b.mapping))]
[_ (make-immutable-free-id-table)]))
(define arg-list (formals-positional f))
(define rest-id (formals-rest f))
(define eta-expanded?
(syntax-parse body
[(((~literal #%plain-app) fun:id j:id ...)) ;; restricted to ids to avoid re-typechecking
#:when (equal? (length arg-list)
(length (syntax->list #'(j ...))))
#:when (andmap free-identifier=? arg-list (syntax->list #'(j ...)))
#'fun]
[_ #f]))
(cond
[(and (> (free-id-table-count aux-table) 0) (not rest-id))
(tc/opt-lambda-clause arg-list body aux-table)]
[else
(define arg-types (get-types arg-list #:default (lambda () #f)))
(define rest-type
(cond
;; Lambda with poly dotted rest argument
[(and rest-id (dotted? rest-id))
=>
(λ (bound)
(unless (bound-index? bound)
(if (bound-tvar? bound)
(tc-error "Bound on ... type (~a) is not an appropriate type variable" bound)
(tc-error/stx rest-id "Bound on ... type (~a) was not in scope" bound)))
(make-RestDots (extend-tvars (list bound) (get-type rest-id #:default Univ))
bound))]
;; Lambda with regular rest argument
[rest-id (match (get-type rest-id #:default Univ)
[(? Type? t) (make-Rest (list t))]
[(? Rest? rst) rst]
[(? RestDots? rst) rst])]
;; Lambda with no rest argument
[else #f]))
(cond
special case for un - annotated eta - expansions
[(and eta-expanded? (not rest-id) (andmap not arg-types)
;; FIXME: should also handle polymorphic types
;; but we can't return anything but a (listof arr?) here
;; FIXME: misses optimization opportunities in this code
(match (tc-expr eta-expanded?)
[(tc-result1: (Fun: arrows))
(define possibles
(for*/list ([arrow (in-list arrows)]
[restricted (in-value (restrict-Arrow-to-arity
arrow
(length arg-list)))]
#:when restricted)
restricted))
(if (null? possibles)
#f
possibles)]
[_ #f]))
=>
(lambda (x)
(register-ignored! (car (syntax-e body)))
x)]
[else
(define rest-body-type
(match rest-type
[#f #f]
[(Rest: ts) (make-CyclicListof ts)]
[(RestDots: dty dbound) (make-ListDots dty dbound)]))
(list
(tc-lambda-body
arg-list
(map (λ (v) (or v Univ)) arg-types)
#:rest-id+type+body-type (and rest-type (list rest-id rest-type rest-body-type))
body))])]))
;; case-arities
;; a description of the supported arities for a case-lambda
;; we have seen thus far while checking a case-lambda (recall
;; that, for Function types and type checking purposes, all
;; functions are case-lambdas)
fixed - arities : ( listof natural ? )
;; supported unique options so far for fixed argument counts,
;; where for each element n, n < rest-pos, and the list should
;; not contain duplicates
;; (NOTE: once we encounter a rest arg at position rest-pos, we
;; _remove_ arity counts that the rest encompasses (i.e.
;; when n >= rest-pos) -- see example below of
;; checking a case-lambda)
;; rest-pos : (or/c natural? +inf.0)
;; at what position would an argument be in the rest argument
;;
;; We construct these specs _while_ we are parsing and checking
;; case-lambdas to help us know if a clause is dead code,
which Arrow types we should type check a particular case - lambda
;; clause at, etc
;;
;; e.g. after each step of looking at the below case-lambda,
;; we would have the following case-arity-spec (i.e. after
;; using `add-to-arities` to update the case-arities):
;; (case-lambda
[ ( x ) ... ] ; = = > ( case - arities ' ( 1 ) + inf.0 )
[ ( x y ) ... ] ; = = > ( case - arities ' ( 1 2 ) + inf.0 )
;; [(x y z) ...] ; ==> (case-arities '(1 2 3) +inf.0)
[ ( x y . rst ) ... ] ; = = > ( case - arities ' ( 1 2 ) 2 )
;; [l ...]) ; ==> (case-arities '() 0)
(struct case-arities (fixed-arities rest-pos) #:transparent)
;; initially, we have seen no fixed arities and it is impossible for
;; an argument to be in a rest argument from a previous clause
(define initial-case-arities (case-arities '() +inf.0))
;; Adds the arity described by formals 'f' to the arities
;; described by 'arities'. See above example near `case-arities`
;; definition.
(define/cond-contract (add-to-arities arities f)
(-> case-arities? formals? case-arities?)
(match* (arities f)
[((case-arities fixed-arities rest-pos)
(formals positional rst _))
(define arity (length positional))
(define new-rest-pos
(if rst (min rest-pos arity) rest-pos))
(define new-fixed-arities
(cond
[(eqv? +inf.0 new-rest-pos) (cons arity fixed-arities)]
[else (for/list ([i (in-list (cons arity fixed-arities))]
#:when (< i new-rest-pos))
i)]))
(case-arities new-fixed-arities new-rest-pos)]))
;; Determines if the given formals would be
;; covered by a supported arity in arities
(define/cond-contract (in-arities? arities f-or-arrow)
(-> case-arities? (or/c formals? Arrow?) boolean?)
(match* (arities f-or-arrow)
[((case-arities fixed-arities rest-pos)
(or (formals (app length arity) rst _)
(Arrow: (app length arity) rst _ _)))
(or (>= arity rest-pos)
(and (not rst) (memv arity fixed-arities) #t))]))
Returns a list of Arrows where the list contains all the valid Arrows
;; from 'arrows' that could apply to a clause with formals 'f', given we
;; have already seen case-arities 'seen'.
(define/cond-contract (arrows-matching-seen+formals arrows seen f)
(-> (listof Arrow?) case-arities? formals? (listof Arrow?))
(match-define (formals formals-positionals formals-rest? _) f)
(define pos-count (length formals-positionals))
(for*/list ([arrow (in-list arrows)]
[dom (in-value (Arrow-dom arrow))]
[rst (in-value (Arrow-rst arrow))]
#:unless (in-arities? seen arrow)
#:when (cond
[formals-rest?
(or (Rest? rst) (>= (length dom) pos-count))]
[rst (<= (length dom) pos-count)]
[else (= (length dom) pos-count)]))
arrow))
;; For each clause (i.e. each elem in formals+bodies) we figure out which
;; of the expected arrows it needs to type check at and which clauses
;; are dead code.
;;
;; Returns the association list mapping clauses to the arrows they need
;; to type check at.
(define/cond-contract (create-to-check-list formals+bodies expected-arrows)
(-> (listof (cons/c formals? syntax?))
(listof Arrow?)
(listof (cons/c (cons/c formals? syntax?)
(listof Arrow?))))
;; arities we have seen so far while checking case-lambda clauses
(define seen initial-case-arities)
(for*/list ([f+b (in-list formals+bodies)]
[clause-formals (in-value (car f+b))]
[clause-body (in-value (cdr f+b))])
(define matching-arrows
(arrows-matching-seen+formals expected-arrows
seen
clause-formals))
(when (or (in-arities? seen clause-formals)
(null? matching-arrows))
(warn-unreachable clause-body)
(add-dead-lambda-branch (formals-syntax clause-formals)))
(set! seen (add-to-arities seen clause-formals))
(cons f+b matching-arrows)))
;; formals+bodies : formals and bodies to check
;; expected-arrows : expected arrow types for the overall case-lambda
;; orig-arrows : an association list recording if any formals and bodies
have _ already _ been checked at a certain Arrow type
(define/cond-contract
(check-mono-lambda/expected formals+bodies expected-arrows orig-arrows)
(-> (listof (cons/c formals? syntax?))
(listof Arrow?)
(listof (cons/c (cons/c formals? syntax?)
(listof Arrow?)))
(listof Arrow?))
(define to-check-list (create-to-check-list formals+bodies expected-arrows))
(cond
[(and (andmap (λ (f+b+arrows) (null? (cdr f+b+arrows)))
to-check-list)
;; If the empty function is expected, then don't error out
(not (null? expected-arrows)))
TODO improve error message .
(tc-error/expr #:return (list (-Arrow null -Bottom #:rest Univ))
"Expected a function of type ~a, but got a function with the wrong arity"
(make-Fun expected-arrows))]
[else
(for*/list ([(f+b arrows-to-check-against) (in-assoc to-check-list)]
[clause-formals (in-value (car f+b))]
[clause-body (in-value (cdr f+b))]
[orig-arrows (in-value (assoc-ref orig-arrows f+b '()))]
[arrow (in-list arrows-to-check-against)])
;; NOTE!!! checking clauses against all applicable arrows is sound, but
;; less complete than actually intersecting all of the arrow types and
;; then checking clauses against the result
(match arrow
;; if this clause has an orig-arrow, we already checked it once and that
;; was it's arrow type -- we don't want to check it again at the same arrow
[_ #:when (member arrow orig-arrows) arrow]
[(Arrow: dom rst '() rng)
(define expected
(values->tc-results rng (formals->objects clause-formals)))
(tc/lambda-clause/check
clause-formals clause-body dom expected rst)]))]))
;; typecheck a sequence of case-lambda clauses
(define/cond-contract (tc/mono-lambda formals+bodies expected)
(-> (listof (cons/c formals? syntax?))
(or/c #f tc-results/c)
(listof Arrow?))
(define expected-arrows
(match expected
[(tc-result1: t)
(define resolved (resolve t))
(match resolved
[(Fun: arrows)
#:when (for/and ([arr (in-list arrows)])
(null? (Arrow-kws arr)))
arrows]
[_ #f])]
[_ #f]))
(cond
[expected-arrows
if we have expected Arrows , proceed with checking against them
(check-mono-lambda/expected formals+bodies expected-arrows '())]
[else
if we do n't have expected Arrows , we may need to re - check some
of the bodies against Arrow types derived while checking the
bodies for soundness sake , so first we will check their bodies
;; with no expected type and then use check-mono-lambda/expected
;; to perform any re-checking that is needed
;; arities we have seen so far while checking case-lambda clauses
(define seen initial-case-arities)
;; clauses that are unreachable because of the formals and ordering
;; of the case-lambda clauses
(define unreachable-clauses '())
;; now we typecheck the bodies, recording their arrows (resulting-arrows)
and the mapping of which formals+body produced which Arrow ( already - checked ) ,
;; all while updating which arities we have seen and which, if any, case-lambda
;; clauses are in fact unreachable
(define-values (resulting-arrowss already-checked)
(for*/lists (_1 _2)
([f+b (in-list formals+bodies)]
[f (in-value (car f+b))]
[b (in-value (cdr f+b))]
#:unless (let ([unreachable? (in-arities? seen f)])
(when unreachable?
(warn-unreachable b)
(add-dead-lambda-branch (formals-syntax f))
(set! unreachable-clauses
(cons f+b unreachable-clauses)))
unreachable?))
(set! seen (add-to-arities seen f))
(define resulting-arrow (tc/lambda-clause f b))
(values resulting-arrow
(cons f+b resulting-arrow))))
(define resulting-arrows (apply append resulting-arrowss))
if there was more than one live case - lambda clause , we may need
;; to recheck some clauses against some of the arrows generated
;; during checking for soundness sake,
;; e.g.
;; if we naively check (case-lambda
[ ( [ x : ] . [ rst : * ] ) x ]
[ [ rst : * ] 0 ]
we get ( case- > ( - > Num Num * )
( - > Num * Zero ) )
;; which is unsound (i.e. we can upcast an intersection to either
type , namely in this case to ( - > Num * Zero ) , and then call
;; it as the identity function on any number, which does not
;; always produce the constant 0). In other words, our `case->`
;; is really an UNORDERED intersection that we just don't work
;; super hard to check function application with, it is not
;; truly an ordered intersection, and thus if some function `f`
;; has type `A ∧ B` it must be checked at both `A` and `B`.
(cond
[(> (- (length formals+bodies)
(length unreachable-clauses))
1)
(check-mono-lambda/expected (remove* unreachable-clauses
formals+bodies)
resulting-arrows
already-checked)]
[else
resulting-arrows])]))
(define (tc/dep-lambda formalss-stx bodies-stx dep-fun-ty)
(parameterize ([with-refinements? #t])
(match-define (DepFun: raw-dom raw-pre raw-rng) dep-fun-ty)
(define formalss (stx-map make-formals formalss-stx))
(define bodies (syntax->list bodies-stx))
(match* (formalss bodies)
[((list fs) (list body))
(cond
[(not (= (length (formals-positional fs))
(length raw-dom)))
(tc-error/expr #:return dep-fun-ty
(format "Expected ~a positional arguments, given ~a."
(length raw-dom)
(length (formals-positional fs))))]
[(formals-rest fs)
(tc-error/expr #:return dep-fun-ty
"Dependent functions do not currently support rest arguments.")]
[else
(define arg-names (formals-positional fs))
(define dom (for/list ([d (in-list raw-dom)])
(instantiate-obj d arg-names)))
(define pre (instantiate-obj raw-pre arg-names))
(with-naively-extended-lexical-env
[#:identifiers arg-names
#:types dom
#:props (list pre)]
(tc-body/check body (values->tc-results raw-rng (map -id-path arg-names))))
dep-fun-ty])]
[(fs bs)
(tc-error/expr #:return dep-fun-ty
"Dependent functions must have a single arity.")])))
(define (tc/mono-lambda/type formalss bodies expected [not-in-poly #t])
(match expected
[(tc-result1:(? DepFun? dep-fun-ty))
(tc/dep-lambda formalss bodies dep-fun-ty)]
[_ (make-Fun
(tc/mono-lambda
(for/list ([f (in-syntax formalss)]
[b (in-syntax bodies)])
(cons (make-formals f not-in-poly) b))
expected))]))
;; tc/plambda syntax tvarss-list syntax-list syntax-list type -> Poly
;; formals and bodies must by syntax-lists
(define/cond-contract (tc/plambda form tvarss-list formals bodies expected)
(syntax? (listof list?) syntax? syntax? (or/c tc-results/c #f) . -> . Type?)
(define/cond-contract (maybe-loop form formals bodies expected)
(syntax? syntax? syntax? (or/c tc-results/c #f) . -> . Type?)
(match expected
[(tc-result1: (app resolve (or (? Poly?) (? PolyDots?) (? PolyRow?))))
(tc/plambda form (remove-poly-layer tvarss-list) formals bodies expected)]
[_
(define remaining-layers (remove-poly-layer tvarss-list))
(if (null? remaining-layers)
(tc/mono-lambda/type formals bodies expected #f)
(tc/plambda form remaining-layers formals bodies expected))]))
;; check the bodies appropriately
;; and make both annotated and declared type variables point to the
;; same actual type variables (the fresh names)
(define (extend-and-loop form ns formals bodies expected)
(let loop ([tvarss tvarss])
(match tvarss
[(list) (maybe-loop form formals bodies expected)]
[(cons (list (list tvars ...) dotted) rest-tvarss)
(extend-indexes dotted
(extend-tvars/new tvars ns
(loop rest-tvarss)))]
[(cons tvars rest-tvarss)
(extend-tvars/new tvars ns
(loop rest-tvarss))])))
(define tvarss (get-poly-layer tvarss-list))
(match expected
[(tc-result1: (app resolve (and t (Poly-fresh: ns fresh-ns expected*))))
;; make sure the declared and annotated type variable arities match up
;; with the expected type variable arity
(for ([tvars (in-list tvarss)])
(when (and (pair? tvars) (list? (car tvars)))
(tc-error
"Expected a polymorphic function without ..., but given function/annotation had ..."))
(unless (= (length tvars) (length fresh-ns))
(tc-error "Expected ~a type variables, but given ~a"
(length fresh-ns) (length tvars))))
(make-Poly #:original-names ns fresh-ns (extend-and-loop form fresh-ns formals bodies (ret expected*)))]
[(tc-result1: (app resolve (and t (PolyDots-names: (list ns ... dvar) expected*))))
;; make sure the declared and annotated type variable arities match up
;; with the expected type variable arity
(for ((tvars (in-list tvarss)))
(match tvars
[(list (list vars ...) dotted)
(unless (= (length vars) (length ns))
(tc-error "Expected ~a non-dotted type variables, but given ~a"
(length ns) (length vars)))]
[else
(tc-error "Expected a polymorphic function with ..., but function/annotation had no ...")]))
(make-PolyDots (append ns (list dvar)) (extend-and-loop form ns formals bodies (ret expected*)))]
[(tc-result1: (app resolve (and t (PolyRow-fresh: ns fresh-ns expected* constraints))))
(for ((tvars (in-list tvarss)))
(when (and (pair? tvars) (list? (car tvars)))
(tc-error
"Expected a polymorphic function without ..., but given function/annotation had ..."))
(unless (= (length tvars) 1)
(tc-error "Expected ~a type variable, but given ~a"
1 (length tvars))))
(make-PolyRow
#:original-names ns
fresh-ns
(extend-and-loop form fresh-ns
formals bodies (ret expected*))
constraints)]
[_
(define lengths
(remove-duplicates
(for/list ([tvars (in-list tvarss)])
(match tvars
[(list (list vars ...) dotted)
(length vars)]
[(list vars ...)
(length vars)]))))
(define dots
(remove-duplicates
(for/list ([tvars (in-list tvarss)])
(match tvars
[(list (list vars ...) dotted) #t]
[(list vars ...) #f]))))
(unless (= 1 (length lengths))
(tc-error "Expected annotations to have the same number of type variables, but given ~a"
lengths))
(unless (= 1 (length dots))
(tc-error "Expected annotations to all have ... or none to have ..., but given both"))
(define dotted (and (car dots) (second (car tvarss))))
(define ns (build-list (car lengths) (lambda (_) (gensym))))
(define results (extend-and-loop form ns formals bodies expected))
(if dotted
(make-PolyDots (append ns (list dotted)) results)
(make-Poly #:original-names (car tvarss) ns results))]))
;; typecheck a sequence of case-lambda clauses, which is possibly polymorphic
;; tc/lambda : syntax syntax-list syntax-list (or/c tc-results #f) -> tc-results
(define (tc/lambda form formals bodies expected)
(if (or (has-poly-annotation? form)
(match expected
[(tc-result1: (app resolve t)) (or (Poly? t) (PolyDots? t) (PolyRow? t))]
[_ #f]))
(ret (tc/plambda form (get-poly-tvarss form) formals bodies expected) -true-propset)
(ret (tc/mono-lambda/type formals bodies expected) -true-propset)))
;; formals : the formal arguments to the loop
;; body : a block containing the body of the loop
;; name : the name of the loop
;; args : the types of the actual arguments to the loop
ret : the expected return type of the whole expression
;; Returns both the tc-results of the function and of the body
(define (tc/rec-lambda/check formals* body name args return)
(define formals (syntax->list formals*))
(define ft (t:->* args (tc-results->values return)))
(define names (cons name formals))
(with-extended-lexical-env
[#:identifiers (cons name formals)
#:types (cons ft args)]
(values
(erase-identifiers (ret ft) names)
(erase-identifiers (tc-body/check body return) names))))
| null | https://raw.githubusercontent.com/racket/typed-racket/1dde78d165472d67ae682b68622d2b7ee3e15e1e/typed-racket-lib/typed-racket/typecheck/tc-lambda-unit.rkt | racket | rest: id or #f
syntax: syntax? - the improper syntax list of identifiers
(i.e. (append positional (or id '())) but syntax)
When expanding a keyword or optional lambda, Racket adds into the expanded
code more lambdas, where syntax objects for the original lambda's parameters
are reused. Since Typed Racket stores type information stored in the syntax
objects, when the orginal lambda is a polymorphic function, that information
might carry out-of-scope type variables. In this case, we need to remove it
from parameter syntax objects.
not-in-poly is #t if the original TR function is polymorphic.
Currently no support for objects representing the rest argument
tc-lambda-body: Typechecks the body with the given args and names
arg-names: The identifiers of the positional args
arg-types: The types of the positional args
where rest-id is the identifier of the rest arg,
have in the function body
expected: The expected type of the body forms.
body: The body of the lambda to typecheck.
shallow can trust the results of a literal lambda
check-clause: Checks that a lambda clause has arguments and body matching the expected type
arg-list: The identifiers of the positional args in the lambda form
rest-id: The identifier of the rest arg, or #f for no rest arg
body: The body of the lambda to typecheck.
arg-tys: The expected positional argument types.
rst: #f, expected rest arg Rest, or expected RestDots
Check that the number of formal arguments is valid for the expected type.
Thus it must be able to accept the number of arguments that the expected
enough arguments, or if it requires too many arguments.
This allows a form like (lambda args body) to have the type (-> Symbol
Number) with out a rest arg.
rest-body-type - the type the rest argument id has in the body
of the function
e.g. for
(define (foo . rest-strings) ...)
rest spec would be (make-Rest (list -String))
and in the body of the function, the rest argument
there's not a rest ident... easy
a dotted rest spec, so the body has a ListDots
the rest-id is dotted?, lets go get its type
otherwise let's get the sequence of types the rest argument would have
and call it 'rest-types' (i.e. in our above example 'foo', this would
be (list -String)
now that we have the list of types, we need to calculate, based on how many
positional argument identifiers there are, how the rest should look
it gets since its type is coming from the rest type) and the rest id's type
some of the args are _in_ the rst arg (i.e. they
do not have argument names) ...
there are named args whose type came from the rst argument
i.e. we need to pull there types out of the rst arg
typecheck a single lambda, with argument list and body
typecheck a single opt-lambda clause with argument list and body
when an opt-lambda with a rest parameter is expanded, the resulting
expression contains a generated lambda, which is being checked and whose
last positional parameter `new-rest` corresponds to the rest parameter of the original
function.
we use `arg-types` to make the arrow consistent with the original type
annotation, i.e. `new-rest` needs to have the same type as the original
function's the rest parameter. In order to check the function body, we
duplicate `arg-types` with the last element changed from `ty` to
restrict-to-arity : Arrow? nat -> (or/c #f Arrow?)
either produces a new arrow which is a subtype of arr with arity n,
or #f is that is not possible
currently does not handle rest arguments
restricted to ids to avoid re-typechecking
Lambda with poly dotted rest argument
Lambda with regular rest argument
Lambda with no rest argument
FIXME: should also handle polymorphic types
but we can't return anything but a (listof arr?) here
FIXME: misses optimization opportunities in this code
case-arities
a description of the supported arities for a case-lambda
we have seen thus far while checking a case-lambda (recall
that, for Function types and type checking purposes, all
functions are case-lambdas)
supported unique options so far for fixed argument counts,
where for each element n, n < rest-pos, and the list should
not contain duplicates
(NOTE: once we encounter a rest arg at position rest-pos, we
_remove_ arity counts that the rest encompasses (i.e.
when n >= rest-pos) -- see example below of
checking a case-lambda)
rest-pos : (or/c natural? +inf.0)
at what position would an argument be in the rest argument
We construct these specs _while_ we are parsing and checking
case-lambdas to help us know if a clause is dead code,
clause at, etc
e.g. after each step of looking at the below case-lambda,
we would have the following case-arity-spec (i.e. after
using `add-to-arities` to update the case-arities):
(case-lambda
= = > ( case - arities ' ( 1 ) + inf.0 )
= = > ( case - arities ' ( 1 2 ) + inf.0 )
[(x y z) ...] ; ==> (case-arities '(1 2 3) +inf.0)
= = > ( case - arities ' ( 1 2 ) 2 )
[l ...]) ; ==> (case-arities '() 0)
initially, we have seen no fixed arities and it is impossible for
an argument to be in a rest argument from a previous clause
Adds the arity described by formals 'f' to the arities
described by 'arities'. See above example near `case-arities`
definition.
Determines if the given formals would be
covered by a supported arity in arities
from 'arrows' that could apply to a clause with formals 'f', given we
have already seen case-arities 'seen'.
For each clause (i.e. each elem in formals+bodies) we figure out which
of the expected arrows it needs to type check at and which clauses
are dead code.
Returns the association list mapping clauses to the arrows they need
to type check at.
arities we have seen so far while checking case-lambda clauses
formals+bodies : formals and bodies to check
expected-arrows : expected arrow types for the overall case-lambda
orig-arrows : an association list recording if any formals and bodies
If the empty function is expected, then don't error out
NOTE!!! checking clauses against all applicable arrows is sound, but
less complete than actually intersecting all of the arrow types and
then checking clauses against the result
if this clause has an orig-arrow, we already checked it once and that
was it's arrow type -- we don't want to check it again at the same arrow
typecheck a sequence of case-lambda clauses
with no expected type and then use check-mono-lambda/expected
to perform any re-checking that is needed
arities we have seen so far while checking case-lambda clauses
clauses that are unreachable because of the formals and ordering
of the case-lambda clauses
now we typecheck the bodies, recording their arrows (resulting-arrows)
all while updating which arities we have seen and which, if any, case-lambda
clauses are in fact unreachable
to recheck some clauses against some of the arrows generated
during checking for soundness sake,
e.g.
if we naively check (case-lambda
which is unsound (i.e. we can upcast an intersection to either
it as the identity function on any number, which does not
always produce the constant 0). In other words, our `case->`
is really an UNORDERED intersection that we just don't work
super hard to check function application with, it is not
truly an ordered intersection, and thus if some function `f`
has type `A ∧ B` it must be checked at both `A` and `B`.
tc/plambda syntax tvarss-list syntax-list syntax-list type -> Poly
formals and bodies must by syntax-lists
check the bodies appropriately
and make both annotated and declared type variables point to the
same actual type variables (the fresh names)
make sure the declared and annotated type variable arities match up
with the expected type variable arity
make sure the declared and annotated type variable arities match up
with the expected type variable arity
typecheck a sequence of case-lambda clauses, which is possibly polymorphic
tc/lambda : syntax syntax-list syntax-list (or/c tc-results #f) -> tc-results
formals : the formal arguments to the loop
body : a block containing the body of the loop
name : the name of the loop
args : the types of the actual arguments to the loop
Returns both the tc-results of the function and of the body | #lang racket/unit
(require "../utils/utils.rkt"
"../utils/plambda-utils.rkt"
racket/list syntax/parse syntax/stx
racket/match syntax/private/id-table
racket/sequence
(contract-req)
"../rep/type-rep.rkt"
"../rep/object-rep.rkt"
"../rep/rep-utils.rkt"
(rename-in (combine-in "../types/abbrev.rkt"
"../types/utils.rkt")
[-> t:->]
[->* t:->*]
[one-of/c t:one-of/c])
"../private/type-annotation.rkt"
"../private/syntax-properties.rkt"
"../types/resolve.rkt"
"../types/type-table.rkt"
"signatures.rkt"
"tc-metafunctions.rkt"
"tc-subst.rkt"
"../env/lexical-env.rkt"
"../env/tvar-env.rkt"
"../env/index-env.rkt"
"../env/scoped-tvar-env.rkt"
"../utils/tc-utils.rkt"
(for-template racket/base)
(for-syntax racket/base))
(import tc-expr^)
(export tc-lambda^)
(define-syntax-class cl-rhs
#:literal-sets (kernel-literals)
#:attributes (i cond)
[pattern i:id #:attr cond #f]
[pattern (if cond:expr e:expr i:id)])
(define-syntax-class rebuild-let*
#:literal-sets (kernel-literals)
#:attributes (mapping)
(pattern (#%expression :rebuild-let*))
(pattern (let-values ([(new-id) e:cl-rhs]) body:rebuild-let*)
#:attr mapping (free-id-table-set (attribute body.mapping) #'e.i #'new-id))
(pattern body:expr
#:attr mapping (make-immutable-free-id-table)))
positional : ( listof identifier ? )
(struct formals (positional rest syntax) #:transparent)
(define (make-formals stx [not-in-poly #t])
(define (maybe-remove a)
(if (and not-in-poly (from-plambda-property a))
(syntax-property-remove a 'type-label)
a))
(let loop ([s stx] [acc null])
(cond
[(pair? s) (loop (cdr s) (cons (maybe-remove (car s)) acc))]
[(null? s) (formals (reverse acc) #f stx)]
[(pair? (syntax-e s)) (loop (stx-cdr s) (cons (maybe-remove (stx-car s)) acc))]
[(null? (syntax-e s)) (formals (reverse acc) #f stx)]
[else (formals (reverse acc) (maybe-remove s) stx)])))
(define (formals->objects f)
(for/list ([i (in-list (formals-positional f))])
(make-Path null i)))
(define (expected-str tys-len rst arg-len rest-id)
(format "Expected function with ~a argument~a~a, but got function with ~a argument~a~a"
tys-len
(if (= tys-len 1) "" "s")
(if rst
" and a rest arg"
"")
arg-len
(if (= arg-len 1) "" "s")
(if rest-id " and a rest arg" "")))
and returns the resulting Arrow ? .
rest - arg+type : Either # f for no rest argument or ( list rest - id rest - type ( rest - type ) )
and ( ListOf rest - type ) is the type that identifier would
(define/cond-contract
(tc-lambda-body arg-names arg-types
#:rest-id+type+body-type [rest-id+type+body-type #f]
#:expected [expected #f] body)
(->* ((listof identifier?) (listof Type?) syntax?)
(#:rest-id+type+body-type (or/c #f (list/c identifier? (or/c Rest? RestDots?) Type?))
#:expected (or/c #f tc-results/c))
Arrow?)
(define-values (rst-id rst-type names types)
(match rest-id+type+body-type
[(list id rst body-type)
(values id rst (cons id arg-names) (cons body-type arg-types))]
[_ (values #f #f arg-names arg-types)]))
(-Arrow
arg-types
(abstract-results
(with-extended-lexical-env
[#:identifiers names
#:types types]
(tc-body/check body expected))
arg-names #:rest-id rst-id)
#:rest rst-type))
ret - ty : The expected type of the body of the lambda .
(define/cond-contract (check-clause arg-list rest-id body arg-tys rst ret-ty)
((listof identifier?)
(or/c #f identifier?) syntax? (listof Type?) (or/c #f Rest? RestDots?)
tc-results/c
. -> .
Arrow?)
(let* ([arg-len (length arg-list)]
[arg-tys-len (length arg-tys)]
[extra-arg-count (- arg-len arg-tys-len)]
[arg-types
(cond
[(andmap type-annotation arg-list)
(get-types arg-list #:default Univ)]
[(zero? extra-arg-count)
(map (lambda (a t) (get-type a #:default t)) arg-list arg-tys)]
[(negative? extra-arg-count) (take arg-tys arg-len)]
[else
(define tail-tys (match rst
[(Rest: rst-tys)
(define rst-len (length rst-tys))
(for/list ([_ (in-range extra-arg-count)]
[rst-t (in-list-cycle rst-tys)])
rst-t)]
[_ (for/list ([_ (in-range extra-arg-count)])
-Bottom)]))
(append arg-tys tail-tys)])])
type has . So we check for two cases : if the function does n't accept
(when (or (and (< arg-len arg-tys-len) (not rest-id))
(and (> arg-len arg-tys-len) (not rst)))
(tc-error/delayed (expected-str arg-tys-len rst arg-len rest-id)))
rst - type - the type of the rest argument in the Arrow type
(: foo ( - > * ( ) ( ) # : rest String Number ) )
the caller can provide 0 or more Strings , so the Arrow 's
identifier ( rest - strings ) have type ( )
(define-values (rst-type rest-body-type)
(cond
[(not rest-id) (values #f #f)]
[(RestDots? rst)
(match-define (RestDots: dty dbound) rst)
(values rst (make-ListDots dty dbound))]
[(dotted? rest-id)
=> (λ (dbound)
(define ty (extend-tvars (list dbound) (get-type rest-id #:default Univ)))
(values (make-RestDots ty dbound)
(make-ListDots ty dbound)))]
[else
(define rest-types
(cond
[(type-annotation rest-id) (list (get-type rest-id #:default Univ))]
[else
(match rst
[#f (list -Bottom)]
[(? Type? t) (list t)]
[(Rest: rst-ts) rst-ts]
[_ (list Univ)])]))
i.e. if our rest was ( ) * ( i.e. an even length rest arg of numbers
followed by strings ) and there was 1 more positional argument that positional
domain types , then that extra positional arg would be type ( i.e. the type
in the body of the function would ( Pair Str ( ) * ) ( i.e. the rest arg
would _ have _ to have a in it , and then would have 0 or more
(cond
[(= arg-len arg-tys-len)
(values (make-Rest rest-types)
(make-CyclicListof rest-types))]
[(<= arg-len arg-tys-len)
(define extra-types (drop arg-tys arg-len))
(define rst-type (apply Un (append extra-types rest-types)))
(values (make-Rest (list rst-type))
(make-Listof rst-type))]
[else
(define rest-remainder (drop rest-types (remainder extra-arg-count
(length rest-types))))
(values (make-Rest rest-types)
(-Tuple* rest-remainder
(make-CyclicListof rest-types)))])]))
(tc-lambda-body
arg-list
arg-types
#:rest-id+type+body-type (and rst-type (list rest-id rst-type rest-body-type))
#:expected ret-ty
body)))
drest - ty and drest - bound are both false or not false
(define/cond-contract (tc/lambda-clause/check f body arg-tys ret-ty rst)
(-> formals?
syntax?
(listof Type?)
(or/c tc-results/c #f)
(or/c #f Rest? RestDots?)
Arrow?)
(check-clause (formals-positional f)
(formals-rest f)
body
arg-tys
rst
ret-ty))
(define/cond-contract (tc/opt-lambda-clause arg-list body aux-table)
(-> (listof identifier?) syntax? free-id-table?
(listof Arrow?))
arg - types : Listof[Type ? ]
(define arg-types
(for/list ([a (in-list arg-list)])
(get-type a #:default (lambda ()
(define id (free-id-table-ref aux-table a #f))
(cond
[id
(define ty (get-type id #:default Univ))
(if (optional-non-immediate-arg-property id)
(Un -Unsafe-Undefined ty)
ty)]
[else Univ])))))
(list
(match (and (not (null? arg-list))
(free-id-table-ref aux-table (last arg-list) #f))
[id #:when (and id (rst-arg-property id))
` ( )
(Arrow-update
(tc-lambda-body arg-list
(list-update arg-types (sub1 (length
arg-types))
(lambda (ty) (make-Listof ty)))
body)
dom
(lambda (dom)
arg-types))]
[_ (tc-lambda-body arg-list arg-types body)])))
(define (restrict-Arrow-to-arity arrow n)
(match arrow
[(Arrow: args #f '() _)
#:when (= n (length args))
arrow]
[_ #f]))
(define/cond-contract (tc/lambda-clause f body)
(-> formals? syntax? (listof Arrow?))
(define aux-table
(syntax-parse body
[(b:rebuild-let*) (values (attribute b.mapping))]
[_ (make-immutable-free-id-table)]))
(define arg-list (formals-positional f))
(define rest-id (formals-rest f))
(define eta-expanded?
(syntax-parse body
#:when (equal? (length arg-list)
(length (syntax->list #'(j ...))))
#:when (andmap free-identifier=? arg-list (syntax->list #'(j ...)))
#'fun]
[_ #f]))
(cond
[(and (> (free-id-table-count aux-table) 0) (not rest-id))
(tc/opt-lambda-clause arg-list body aux-table)]
[else
(define arg-types (get-types arg-list #:default (lambda () #f)))
(define rest-type
(cond
[(and rest-id (dotted? rest-id))
=>
(λ (bound)
(unless (bound-index? bound)
(if (bound-tvar? bound)
(tc-error "Bound on ... type (~a) is not an appropriate type variable" bound)
(tc-error/stx rest-id "Bound on ... type (~a) was not in scope" bound)))
(make-RestDots (extend-tvars (list bound) (get-type rest-id #:default Univ))
bound))]
[rest-id (match (get-type rest-id #:default Univ)
[(? Type? t) (make-Rest (list t))]
[(? Rest? rst) rst]
[(? RestDots? rst) rst])]
[else #f]))
(cond
special case for un - annotated eta - expansions
[(and eta-expanded? (not rest-id) (andmap not arg-types)
(match (tc-expr eta-expanded?)
[(tc-result1: (Fun: arrows))
(define possibles
(for*/list ([arrow (in-list arrows)]
[restricted (in-value (restrict-Arrow-to-arity
arrow
(length arg-list)))]
#:when restricted)
restricted))
(if (null? possibles)
#f
possibles)]
[_ #f]))
=>
(lambda (x)
(register-ignored! (car (syntax-e body)))
x)]
[else
(define rest-body-type
(match rest-type
[#f #f]
[(Rest: ts) (make-CyclicListof ts)]
[(RestDots: dty dbound) (make-ListDots dty dbound)]))
(list
(tc-lambda-body
arg-list
(map (λ (v) (or v Univ)) arg-types)
#:rest-id+type+body-type (and rest-type (list rest-id rest-type rest-body-type))
body))])]))
fixed - arities : ( listof natural ? )
which Arrow types we should type check a particular case - lambda
(struct case-arities (fixed-arities rest-pos) #:transparent)
(define initial-case-arities (case-arities '() +inf.0))
(define/cond-contract (add-to-arities arities f)
(-> case-arities? formals? case-arities?)
(match* (arities f)
[((case-arities fixed-arities rest-pos)
(formals positional rst _))
(define arity (length positional))
(define new-rest-pos
(if rst (min rest-pos arity) rest-pos))
(define new-fixed-arities
(cond
[(eqv? +inf.0 new-rest-pos) (cons arity fixed-arities)]
[else (for/list ([i (in-list (cons arity fixed-arities))]
#:when (< i new-rest-pos))
i)]))
(case-arities new-fixed-arities new-rest-pos)]))
(define/cond-contract (in-arities? arities f-or-arrow)
(-> case-arities? (or/c formals? Arrow?) boolean?)
(match* (arities f-or-arrow)
[((case-arities fixed-arities rest-pos)
(or (formals (app length arity) rst _)
(Arrow: (app length arity) rst _ _)))
(or (>= arity rest-pos)
(and (not rst) (memv arity fixed-arities) #t))]))
Returns a list of Arrows where the list contains all the valid Arrows
(define/cond-contract (arrows-matching-seen+formals arrows seen f)
(-> (listof Arrow?) case-arities? formals? (listof Arrow?))
(match-define (formals formals-positionals formals-rest? _) f)
(define pos-count (length formals-positionals))
(for*/list ([arrow (in-list arrows)]
[dom (in-value (Arrow-dom arrow))]
[rst (in-value (Arrow-rst arrow))]
#:unless (in-arities? seen arrow)
#:when (cond
[formals-rest?
(or (Rest? rst) (>= (length dom) pos-count))]
[rst (<= (length dom) pos-count)]
[else (= (length dom) pos-count)]))
arrow))
(define/cond-contract (create-to-check-list formals+bodies expected-arrows)
(-> (listof (cons/c formals? syntax?))
(listof Arrow?)
(listof (cons/c (cons/c formals? syntax?)
(listof Arrow?))))
(define seen initial-case-arities)
(for*/list ([f+b (in-list formals+bodies)]
[clause-formals (in-value (car f+b))]
[clause-body (in-value (cdr f+b))])
(define matching-arrows
(arrows-matching-seen+formals expected-arrows
seen
clause-formals))
(when (or (in-arities? seen clause-formals)
(null? matching-arrows))
(warn-unreachable clause-body)
(add-dead-lambda-branch (formals-syntax clause-formals)))
(set! seen (add-to-arities seen clause-formals))
(cons f+b matching-arrows)))
have _ already _ been checked at a certain Arrow type
(define/cond-contract
(check-mono-lambda/expected formals+bodies expected-arrows orig-arrows)
(-> (listof (cons/c formals? syntax?))
(listof Arrow?)
(listof (cons/c (cons/c formals? syntax?)
(listof Arrow?)))
(listof Arrow?))
(define to-check-list (create-to-check-list formals+bodies expected-arrows))
(cond
[(and (andmap (λ (f+b+arrows) (null? (cdr f+b+arrows)))
to-check-list)
(not (null? expected-arrows)))
TODO improve error message .
(tc-error/expr #:return (list (-Arrow null -Bottom #:rest Univ))
"Expected a function of type ~a, but got a function with the wrong arity"
(make-Fun expected-arrows))]
[else
(for*/list ([(f+b arrows-to-check-against) (in-assoc to-check-list)]
[clause-formals (in-value (car f+b))]
[clause-body (in-value (cdr f+b))]
[orig-arrows (in-value (assoc-ref orig-arrows f+b '()))]
[arrow (in-list arrows-to-check-against)])
(match arrow
[_ #:when (member arrow orig-arrows) arrow]
[(Arrow: dom rst '() rng)
(define expected
(values->tc-results rng (formals->objects clause-formals)))
(tc/lambda-clause/check
clause-formals clause-body dom expected rst)]))]))
(define/cond-contract (tc/mono-lambda formals+bodies expected)
(-> (listof (cons/c formals? syntax?))
(or/c #f tc-results/c)
(listof Arrow?))
(define expected-arrows
(match expected
[(tc-result1: t)
(define resolved (resolve t))
(match resolved
[(Fun: arrows)
#:when (for/and ([arr (in-list arrows)])
(null? (Arrow-kws arr)))
arrows]
[_ #f])]
[_ #f]))
(cond
[expected-arrows
if we have expected Arrows , proceed with checking against them
(check-mono-lambda/expected formals+bodies expected-arrows '())]
[else
if we do n't have expected Arrows , we may need to re - check some
of the bodies against Arrow types derived while checking the
bodies for soundness sake , so first we will check their bodies
(define seen initial-case-arities)
(define unreachable-clauses '())
and the mapping of which formals+body produced which Arrow ( already - checked ) ,
(define-values (resulting-arrowss already-checked)
(for*/lists (_1 _2)
([f+b (in-list formals+bodies)]
[f (in-value (car f+b))]
[b (in-value (cdr f+b))]
#:unless (let ([unreachable? (in-arities? seen f)])
(when unreachable?
(warn-unreachable b)
(add-dead-lambda-branch (formals-syntax f))
(set! unreachable-clauses
(cons f+b unreachable-clauses)))
unreachable?))
(set! seen (add-to-arities seen f))
(define resulting-arrow (tc/lambda-clause f b))
(values resulting-arrow
(cons f+b resulting-arrow))))
(define resulting-arrows (apply append resulting-arrowss))
if there was more than one live case - lambda clause , we may need
[ ( [ x : ] . [ rst : * ] ) x ]
[ [ rst : * ] 0 ]
we get ( case- > ( - > Num Num * )
( - > Num * Zero ) )
type , namely in this case to ( - > Num * Zero ) , and then call
(cond
[(> (- (length formals+bodies)
(length unreachable-clauses))
1)
(check-mono-lambda/expected (remove* unreachable-clauses
formals+bodies)
resulting-arrows
already-checked)]
[else
resulting-arrows])]))
(define (tc/dep-lambda formalss-stx bodies-stx dep-fun-ty)
(parameterize ([with-refinements? #t])
(match-define (DepFun: raw-dom raw-pre raw-rng) dep-fun-ty)
(define formalss (stx-map make-formals formalss-stx))
(define bodies (syntax->list bodies-stx))
(match* (formalss bodies)
[((list fs) (list body))
(cond
[(not (= (length (formals-positional fs))
(length raw-dom)))
(tc-error/expr #:return dep-fun-ty
(format "Expected ~a positional arguments, given ~a."
(length raw-dom)
(length (formals-positional fs))))]
[(formals-rest fs)
(tc-error/expr #:return dep-fun-ty
"Dependent functions do not currently support rest arguments.")]
[else
(define arg-names (formals-positional fs))
(define dom (for/list ([d (in-list raw-dom)])
(instantiate-obj d arg-names)))
(define pre (instantiate-obj raw-pre arg-names))
(with-naively-extended-lexical-env
[#:identifiers arg-names
#:types dom
#:props (list pre)]
(tc-body/check body (values->tc-results raw-rng (map -id-path arg-names))))
dep-fun-ty])]
[(fs bs)
(tc-error/expr #:return dep-fun-ty
"Dependent functions must have a single arity.")])))
(define (tc/mono-lambda/type formalss bodies expected [not-in-poly #t])
(match expected
[(tc-result1:(? DepFun? dep-fun-ty))
(tc/dep-lambda formalss bodies dep-fun-ty)]
[_ (make-Fun
(tc/mono-lambda
(for/list ([f (in-syntax formalss)]
[b (in-syntax bodies)])
(cons (make-formals f not-in-poly) b))
expected))]))
(define/cond-contract (tc/plambda form tvarss-list formals bodies expected)
(syntax? (listof list?) syntax? syntax? (or/c tc-results/c #f) . -> . Type?)
(define/cond-contract (maybe-loop form formals bodies expected)
(syntax? syntax? syntax? (or/c tc-results/c #f) . -> . Type?)
(match expected
[(tc-result1: (app resolve (or (? Poly?) (? PolyDots?) (? PolyRow?))))
(tc/plambda form (remove-poly-layer tvarss-list) formals bodies expected)]
[_
(define remaining-layers (remove-poly-layer tvarss-list))
(if (null? remaining-layers)
(tc/mono-lambda/type formals bodies expected #f)
(tc/plambda form remaining-layers formals bodies expected))]))
(define (extend-and-loop form ns formals bodies expected)
(let loop ([tvarss tvarss])
(match tvarss
[(list) (maybe-loop form formals bodies expected)]
[(cons (list (list tvars ...) dotted) rest-tvarss)
(extend-indexes dotted
(extend-tvars/new tvars ns
(loop rest-tvarss)))]
[(cons tvars rest-tvarss)
(extend-tvars/new tvars ns
(loop rest-tvarss))])))
(define tvarss (get-poly-layer tvarss-list))
(match expected
[(tc-result1: (app resolve (and t (Poly-fresh: ns fresh-ns expected*))))
(for ([tvars (in-list tvarss)])
(when (and (pair? tvars) (list? (car tvars)))
(tc-error
"Expected a polymorphic function without ..., but given function/annotation had ..."))
(unless (= (length tvars) (length fresh-ns))
(tc-error "Expected ~a type variables, but given ~a"
(length fresh-ns) (length tvars))))
(make-Poly #:original-names ns fresh-ns (extend-and-loop form fresh-ns formals bodies (ret expected*)))]
[(tc-result1: (app resolve (and t (PolyDots-names: (list ns ... dvar) expected*))))
(for ((tvars (in-list tvarss)))
(match tvars
[(list (list vars ...) dotted)
(unless (= (length vars) (length ns))
(tc-error "Expected ~a non-dotted type variables, but given ~a"
(length ns) (length vars)))]
[else
(tc-error "Expected a polymorphic function with ..., but function/annotation had no ...")]))
(make-PolyDots (append ns (list dvar)) (extend-and-loop form ns formals bodies (ret expected*)))]
[(tc-result1: (app resolve (and t (PolyRow-fresh: ns fresh-ns expected* constraints))))
(for ((tvars (in-list tvarss)))
(when (and (pair? tvars) (list? (car tvars)))
(tc-error
"Expected a polymorphic function without ..., but given function/annotation had ..."))
(unless (= (length tvars) 1)
(tc-error "Expected ~a type variable, but given ~a"
1 (length tvars))))
(make-PolyRow
#:original-names ns
fresh-ns
(extend-and-loop form fresh-ns
formals bodies (ret expected*))
constraints)]
[_
(define lengths
(remove-duplicates
(for/list ([tvars (in-list tvarss)])
(match tvars
[(list (list vars ...) dotted)
(length vars)]
[(list vars ...)
(length vars)]))))
(define dots
(remove-duplicates
(for/list ([tvars (in-list tvarss)])
(match tvars
[(list (list vars ...) dotted) #t]
[(list vars ...) #f]))))
(unless (= 1 (length lengths))
(tc-error "Expected annotations to have the same number of type variables, but given ~a"
lengths))
(unless (= 1 (length dots))
(tc-error "Expected annotations to all have ... or none to have ..., but given both"))
(define dotted (and (car dots) (second (car tvarss))))
(define ns (build-list (car lengths) (lambda (_) (gensym))))
(define results (extend-and-loop form ns formals bodies expected))
(if dotted
(make-PolyDots (append ns (list dotted)) results)
(make-Poly #:original-names (car tvarss) ns results))]))
(define (tc/lambda form formals bodies expected)
(if (or (has-poly-annotation? form)
(match expected
[(tc-result1: (app resolve t)) (or (Poly? t) (PolyDots? t) (PolyRow? t))]
[_ #f]))
(ret (tc/plambda form (get-poly-tvarss form) formals bodies expected) -true-propset)
(ret (tc/mono-lambda/type formals bodies expected) -true-propset)))
ret : the expected return type of the whole expression
(define (tc/rec-lambda/check formals* body name args return)
(define formals (syntax->list formals*))
(define ft (t:->* args (tc-results->values return)))
(define names (cons name formals))
(with-extended-lexical-env
[#:identifiers (cons name formals)
#:types (cons ft args)]
(values
(erase-identifiers (ret ft) names)
(erase-identifiers (tc-body/check body return) names))))
|
46402dea8e3c75a1278a89e4302bbcba46fc0ff3b64fd65db94c2febd54f039a | rd--/hsc3 | Command.hs | -- | Collection of standard /command/ modules.
module Sound.Sc3.Server.Command (module S) where
import Sound.Sc3.Server.Command.Enum as S
import Sound.Sc3.Server.Command.Plain as S
| null | https://raw.githubusercontent.com/rd--/hsc3/7dc748106639999947548d0b3205a468cfc55fed/Sound/Sc3/Server/Command.hs | haskell | | Collection of standard /command/ modules. | module Sound.Sc3.Server.Command (module S) where
import Sound.Sc3.Server.Command.Enum as S
import Sound.Sc3.Server.Command.Plain as S
|
97b847294e1b2c5c0d7fa46ae113ac87815ec4fe601c07aecf9bbb31f05820c2 | racket/eopl | interp-tests.rkt | #lang eopl
(require eopl/tests/private/utils)
(require "data-structures.rkt") ; for expval constructors
(require "interp.rkt") ; for value-of-program
(require "cps-out-lang.rkt") ; for cps-program->string, cps-out-scan&parse
(require (only-in racket pretty-print))
(define instrument-cps (make-parameter #f))
run : String - > ExpVal
(define run
(lambda (string)
(value-of-program (cps-out-scan&parse string))))
(define equal-answer?
(lambda (ans correct-ans)
(equal? ans (sloppy->expval correct-ans))))
(define sloppy->expval
(lambda (sloppy-val)
(cond
((number? sloppy-val) (num-val sloppy-val))
((boolean? sloppy-val) (bool-val sloppy-val))
(else
(eopl:error 'sloppy->expval
"Can't convert sloppy value to expval: ~s"
sloppy-val)))))
(define-syntax-rule (check-run (name str res) ...)
(begin
(cond [(eqv? 'res 'error)
(check-exn always? (lambda () (run str)))]
[else
(check equal-answer? (run str) 'res (symbol->string 'name))])
...))
;; this consists entirely of expressions that are already in cps.
;; exercise: for each expression that is marked "not in cps",
;; explain why it is not cps.
;;;;;;;;;;;;;;;; tests ;;;;;;;;;;;;;;;;
(check-run
;; simple arithmetic
(positive-const "11" 11)
(negative-const "-33" -33)
(simple-arith-1 "-(44,33)" 11)
;; nested arithmetic
(nested-arith-left "-(-(44,33),22)" -11)
(nested-arith-right "-(55, -(22,11))" 44)
(cps-nested-arith-left "let x = -(44,33) in -(x,22)" -11)
(cps-nested-arith-right "let y = -(22,11) in -(55, y)" 44)
;; simple variables
(test-var-1 "x" 10)
(test-var-2 "-(x,1)" 9)
(test-var-3 "-(1,x)" -9)
;; simple unbound variables
(test-unbound-var-1 "foo" error)
(test-unbound-var-2 "-(x,foo)" error)
;; simple conditionals
(if-true "if zero?(0) then 3 else 4" 3)
(if-false "if zero?(1) then 3 else 4" 4)
;; test dynamic typechecking
(no-bool-to-diff-1 "-(zero?(0),1)" error)
(no-bool-to-diff-2 "-(1,zero?(0))" error)
(no-int-to-if "if 1 then 2 else 3" error)
;; make sure that the test and both arms get evaluated
;; properly.
(if-eval-test-true "let x = -(11,11) in if zero?(x) then 3 else 4" 3)
(if-eval-test-false "let x = -(11,12)in if zero?(x) then 3 else 4" 4)
;; and make sure the other arm doesn't get evaluated.
(if-eval-test-true-2 "if zero?(-(11, 11)) then 3 else foo" 3)
(if-eval-test-false-2 "if zero?(-(11,12)) then foo else 4" 4)
;; simple let
(simple-let-1 "let x = 3 in x" 3)
make sure the body and rhs get evaluated
(eval-let-body "let x = 3 in -(x,1)" 2)
(eval-let-rhs "let x = -(4,1) in -(x,1)" 2)
;; check nested let and shadowing
(simple-nested-let "let x = 3 in let y = 4 in -(x,y)" -1)
(check-shadowing-in-body "let x = 3 in let x = 4 in x" 4)
(check-shadowing-in-rhs "let x = 3 in let x = -(x,1) in x" 2)
;; simple applications
(apply-proc-in-rator-pos "(proc(x) -(x,1) 30)" 29)
(apply-simple-proc "let f = proc (x) -(x,1) in (f 30)" 29)
(let-to-proc-1 "(proc(f)(f 30) proc(x)-(x,1))" 29)
( nested - procs - not - in - cps " ( ( proc ( x ) proc ( y ) -(x , y ) 5 ) 6 ) " -1 )
(nested-procs-in-tf "(proc (x y) -(x,y) 5 6)" -1)
(nested-procs2 "let f = proc(x y) -(x,y) in (f -(10,5) 6)"
-1)
;; (y-combinator-1-not-in-tf "
;; let fix = proc (f)
;; let d = proc (x) proc (z) ((f (x x)) z)
;; in proc (n) ((f (d d)) n)
;; in let
t4 m = proc ( f ) proc(x ) if ) then 0 else -((f -(x,1)),-4 )
in let = ( fix m )
in ( times4 3 ) " 12 )
;; ;; this one is not in cps
;; (twice "
;; (proc (twice)
( ( twice proc ( z ) -(z,1 ) ) 11 )
;; proc (f) proc (x) (f (f x)))"
9 )
(twice-in-cps "
let twice = proc(f x k)
(f x proc (z) (f z k))
in (twice
proc (x k) (k -(x,1))
11
proc(z) z)"
9)
(cps-both-simple "
let f = proc (x) -(x,1)
in (f 27)"
26)
(sum-test-1 "+()" 0)
(sum-test-2 "+(2,3,4)" 9)
(letrec-test-1 "letrec f(x) = 17 in 34" 34)
(letrec-test-2 "letrec f(x y) = -(x,y) in -(34, 2)" 32)
(letrec-test-3 "
letrec even(x) = if zero?(x) then zero?(0) else (odd -(x,1))
odd (x) = if zero?(x) then zero?(1) else (even -(x,1))
in (even 5)"
#f)
;; not in cps
;; (cps-simple-rator "
;; let f = proc (x) -(x,1)
;; in (f (f 27))"
25 )
;; (cps-simple-rand "
;; let f = proc (x) proc (y) -(x,y)
in ( ( f 27 ) 4 ) "
23 )
;; (cps-neither-simple "
;; let f = proc (x) proc (y) -(x, y)
in let g = proc ( z ) -(z , 1 )
;; in ((f 27) (g 11))"
17 )
( cps - serious - zero - test "
let f = proc ( x ) -(x , 1 )
in if zero?((f 1 ) ) then 11 else 22 "
11 )
(print-test-1
"let x = 3 in printk(-(x,1)); 33"
33)
(store-test-0
"newrefk(33, proc (loc1) 44)"
44)
(store-test-1
"newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
derefk(loc1, proc(ans)ans)))"
33)
(store-test-2 "
newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
setrefk(loc1, 22);
derefk(loc1, proc(ans)ans)))"
22)
(store-test-2a "
newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
setrefk(loc1, 22);
derefk(loc1, proc (ans) -(ans,1))))"
21)
(store-test-3 "
newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
setrefk(loc2, 22);
derefk(loc1, proc(ans)ans)))"
33)
(gensym-cps "
newrefk(0,
proc(ctr)
let g = proc(k) derefk(ctr,
proc(v) setrefk(ctr, -(v,-1)); (k v))
in (g
proc (x) (g
proc (y) -(x,y))))"
-1)
in the example above , ctr is public . Here it is local .
(gensym-cps-2 "
let makeg = proc (k1)
newrefk(0, proc (ctr)
(k1 proc (k)
derefk(ctr,
proc (v)
setrefk(ctr,-(v,-1));(k v))))
in (makeg
proc(g)
(g
proc (x) (g
proc (y) -(x,y))))"
-1)
)
| null | https://raw.githubusercontent.com/racket/eopl/43575d6e95dc34ca6e49b305180f696565e16e0f/tests/chapter6/cps-side-effects-lang/interp-tests.rkt | racket | for expval constructors
for value-of-program
for cps-program->string, cps-out-scan&parse
this consists entirely of expressions that are already in cps.
exercise: for each expression that is marked "not in cps",
explain why it is not cps.
tests ;;;;;;;;;;;;;;;;
simple arithmetic
nested arithmetic
simple variables
simple unbound variables
simple conditionals
test dynamic typechecking
make sure that the test and both arms get evaluated
properly.
and make sure the other arm doesn't get evaluated.
simple let
check nested let and shadowing
simple applications
(y-combinator-1-not-in-tf "
let fix = proc (f)
let d = proc (x) proc (z) ((f (x x)) z)
in proc (n) ((f (d d)) n)
in let
;; this one is not in cps
(twice "
(proc (twice)
proc (f) proc (x) (f (f x)))"
not in cps
(cps-simple-rator "
let f = proc (x) -(x,1)
in (f (f 27))"
(cps-simple-rand "
let f = proc (x) proc (y) -(x,y)
(cps-neither-simple "
let f = proc (x) proc (y) -(x, y)
in ((f 27) (g 11))"
(k v))
(k v)))) | #lang eopl
(require eopl/tests/private/utils)
(require (only-in racket pretty-print))
(define instrument-cps (make-parameter #f))
run : String - > ExpVal
(define run
(lambda (string)
(value-of-program (cps-out-scan&parse string))))
(define equal-answer?
(lambda (ans correct-ans)
(equal? ans (sloppy->expval correct-ans))))
(define sloppy->expval
(lambda (sloppy-val)
(cond
((number? sloppy-val) (num-val sloppy-val))
((boolean? sloppy-val) (bool-val sloppy-val))
(else
(eopl:error 'sloppy->expval
"Can't convert sloppy value to expval: ~s"
sloppy-val)))))
(define-syntax-rule (check-run (name str res) ...)
(begin
(cond [(eqv? 'res 'error)
(check-exn always? (lambda () (run str)))]
[else
(check equal-answer? (run str) 'res (symbol->string 'name))])
...))
(check-run
(positive-const "11" 11)
(negative-const "-33" -33)
(simple-arith-1 "-(44,33)" 11)
(nested-arith-left "-(-(44,33),22)" -11)
(nested-arith-right "-(55, -(22,11))" 44)
(cps-nested-arith-left "let x = -(44,33) in -(x,22)" -11)
(cps-nested-arith-right "let y = -(22,11) in -(55, y)" 44)
(test-var-1 "x" 10)
(test-var-2 "-(x,1)" 9)
(test-var-3 "-(1,x)" -9)
(test-unbound-var-1 "foo" error)
(test-unbound-var-2 "-(x,foo)" error)
(if-true "if zero?(0) then 3 else 4" 3)
(if-false "if zero?(1) then 3 else 4" 4)
(no-bool-to-diff-1 "-(zero?(0),1)" error)
(no-bool-to-diff-2 "-(1,zero?(0))" error)
(no-int-to-if "if 1 then 2 else 3" error)
(if-eval-test-true "let x = -(11,11) in if zero?(x) then 3 else 4" 3)
(if-eval-test-false "let x = -(11,12)in if zero?(x) then 3 else 4" 4)
(if-eval-test-true-2 "if zero?(-(11, 11)) then 3 else foo" 3)
(if-eval-test-false-2 "if zero?(-(11,12)) then foo else 4" 4)
(simple-let-1 "let x = 3 in x" 3)
make sure the body and rhs get evaluated
(eval-let-body "let x = 3 in -(x,1)" 2)
(eval-let-rhs "let x = -(4,1) in -(x,1)" 2)
(simple-nested-let "let x = 3 in let y = 4 in -(x,y)" -1)
(check-shadowing-in-body "let x = 3 in let x = 4 in x" 4)
(check-shadowing-in-rhs "let x = 3 in let x = -(x,1) in x" 2)
(apply-proc-in-rator-pos "(proc(x) -(x,1) 30)" 29)
(apply-simple-proc "let f = proc (x) -(x,1) in (f 30)" 29)
(let-to-proc-1 "(proc(f)(f 30) proc(x)-(x,1))" 29)
( nested - procs - not - in - cps " ( ( proc ( x ) proc ( y ) -(x , y ) 5 ) 6 ) " -1 )
(nested-procs-in-tf "(proc (x y) -(x,y) 5 6)" -1)
(nested-procs2 "let f = proc(x y) -(x,y) in (f -(10,5) 6)"
-1)
t4 m = proc ( f ) proc(x ) if ) then 0 else -((f -(x,1)),-4 )
in let = ( fix m )
in ( times4 3 ) " 12 )
( ( twice proc ( z ) -(z,1 ) ) 11 )
9 )
(twice-in-cps "
let twice = proc(f x k)
(f x proc (z) (f z k))
in (twice
proc (x k) (k -(x,1))
11
proc(z) z)"
9)
(cps-both-simple "
let f = proc (x) -(x,1)
in (f 27)"
26)
(sum-test-1 "+()" 0)
(sum-test-2 "+(2,3,4)" 9)
(letrec-test-1 "letrec f(x) = 17 in 34" 34)
(letrec-test-2 "letrec f(x y) = -(x,y) in -(34, 2)" 32)
(letrec-test-3 "
letrec even(x) = if zero?(x) then zero?(0) else (odd -(x,1))
odd (x) = if zero?(x) then zero?(1) else (even -(x,1))
in (even 5)"
#f)
25 )
in ( ( f 27 ) 4 ) "
23 )
in let g = proc ( z ) -(z , 1 )
17 )
( cps - serious - zero - test "
let f = proc ( x ) -(x , 1 )
in if zero?((f 1 ) ) then 11 else 22 "
11 )
(print-test-1
"let x = 3 in printk(-(x,1)); 33"
33)
(store-test-0
"newrefk(33, proc (loc1) 44)"
44)
(store-test-1
"newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
derefk(loc1, proc(ans)ans)))"
33)
(store-test-2 "
newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
derefk(loc1, proc(ans)ans)))"
22)
(store-test-2a "
newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
derefk(loc1, proc (ans) -(ans,1))))"
21)
(store-test-3 "
newrefk(33, proc (loc1)
newrefk(44, proc (loc2)
derefk(loc1, proc(ans)ans)))"
33)
(gensym-cps "
newrefk(0,
proc(ctr)
let g = proc(k) derefk(ctr,
in (g
proc (x) (g
proc (y) -(x,y))))"
-1)
in the example above , ctr is public . Here it is local .
(gensym-cps-2 "
let makeg = proc (k1)
newrefk(0, proc (ctr)
(k1 proc (k)
derefk(ctr,
proc (v)
in (makeg
proc(g)
(g
proc (x) (g
proc (y) -(x,y))))"
-1)
)
|
b68fe7dc92510b6bc4203d424a40cdc2d77ebfd11df4d4ecf1e424d312477071 | dmitryvk/sbcl-win32-threads | debug.lisp | (in-package "SB!VM")
(define-vop (debug-cur-sp)
(:translate sb!di::current-sp)
(:policy :fast-safe)
(:results (res :scs (sap-reg)))
(:result-types system-area-pointer)
(:generator 1
(move csp-tn res)))
(define-vop (debug-cur-fp)
(:translate sb!di::current-fp)
(:policy :fast-safe)
(:results (res :scs (sap-reg)))
(:result-types system-area-pointer)
(:generator 1
(move cfp-tn res)))
(define-vop (read-control-stack)
(:translate sb!kernel:stack-ref)
(:policy :fast-safe)
(:args (object :scs (sap-reg))
(offset :scs (any-reg)))
(:arg-types system-area-pointer positive-fixnum)
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:generator 5
(inst ldwx offset object result)))
(define-vop (read-control-stack-c)
(:translate sb!kernel:stack-ref)
(:policy :fast-safe)
(:args (object :scs (sap-reg)))
(:info offset)
make room for multiply by limiting to 12 bits
(:arg-types system-area-pointer (:constant (signed-byte 12)))
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:generator 4
(inst ldw (* offset n-word-bytes) object result)))
(define-vop (write-control-stack)
(:translate sb!kernel:%set-stack-ref)
(:policy :fast-safe)
(:args (object :scs (sap-reg) :target sap)
(offset :scs (any-reg))
(value :scs (descriptor-reg) :target result))
(:arg-types system-area-pointer positive-fixnum *)
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:temporary (:scs (sap-reg) :from (:argument 1)) sap)
(:generator 2
(inst add object offset sap)
(inst stw value 0 sap)
(move value result)))
(define-vop (write-control-stack-c)
(:translate %set-stack-ref)
(:policy :fast-safe)
(:args (sap :scs (sap-reg))
(value :scs (descriptor-reg) :target result))
(:info offset)
(:arg-types system-area-pointer (:constant (signed-byte 12)) *)
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:generator 1
(inst stw value (* offset n-word-bytes) sap)
(move value result)))
(define-vop (code-from-mumble)
(:policy :fast-safe)
(:args (thing :scs (descriptor-reg)))
(:results (code :scs (descriptor-reg)))
(:temporary (:scs (non-descriptor-reg)) temp)
(:variant-vars lowtag)
(:generator 5
(let ((bogus (gen-label))
(done (gen-label)))
(loadw temp thing 0 lowtag)
(inst srl temp n-widetag-bits temp)
(inst comb := zero-tn temp bogus)
(inst sll temp (1- (integer-length n-word-bytes)) temp)
(unless (= lowtag other-pointer-lowtag)
(inst addi (- lowtag other-pointer-lowtag) temp temp))
(inst sub thing temp code)
(emit-label done)
(assemble (*elsewhere*)
(emit-label bogus)
(inst b done)
(move null-tn code t)))))
(define-vop (code-from-lra code-from-mumble)
(:translate sb!di::lra-code-header)
(:variant other-pointer-lowtag))
(define-vop (code-from-fun code-from-mumble)
(:translate sb!di::fun-code-header)
(:variant fun-pointer-lowtag))
(define-vop (%make-lisp-obj)
(:policy :fast-safe)
(:translate %make-lisp-obj)
(:args (value :scs (unsigned-reg) :target result))
(:arg-types unsigned-num)
(:results (result :scs (descriptor-reg)))
(:generator 1
(move value result)))
(define-vop (get-lisp-obj-address)
(:policy :fast-safe)
(:translate sb!di::get-lisp-obj-address)
(:args (thing :scs (descriptor-reg) :target result))
(:results (result :scs (unsigned-reg)))
(:result-types unsigned-num)
(:generator 1
(move thing result)))
(define-vop (fun-word-offset)
(:policy :fast-safe)
(:translate sb!di::fun-word-offset)
(:args (fun :scs (descriptor-reg)))
(:results (res :scs (unsigned-reg)))
(:result-types positive-fixnum)
(:generator 5
(loadw res fun 0 fun-pointer-lowtag)
(inst srl res n-widetag-bits res)))
| null | https://raw.githubusercontent.com/dmitryvk/sbcl-win32-threads/5abfd64b00a0937ba2df2919f177697d1d91bde4/src/compiler/hppa/debug.lisp | lisp | (in-package "SB!VM")
(define-vop (debug-cur-sp)
(:translate sb!di::current-sp)
(:policy :fast-safe)
(:results (res :scs (sap-reg)))
(:result-types system-area-pointer)
(:generator 1
(move csp-tn res)))
(define-vop (debug-cur-fp)
(:translate sb!di::current-fp)
(:policy :fast-safe)
(:results (res :scs (sap-reg)))
(:result-types system-area-pointer)
(:generator 1
(move cfp-tn res)))
(define-vop (read-control-stack)
(:translate sb!kernel:stack-ref)
(:policy :fast-safe)
(:args (object :scs (sap-reg))
(offset :scs (any-reg)))
(:arg-types system-area-pointer positive-fixnum)
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:generator 5
(inst ldwx offset object result)))
(define-vop (read-control-stack-c)
(:translate sb!kernel:stack-ref)
(:policy :fast-safe)
(:args (object :scs (sap-reg)))
(:info offset)
make room for multiply by limiting to 12 bits
(:arg-types system-area-pointer (:constant (signed-byte 12)))
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:generator 4
(inst ldw (* offset n-word-bytes) object result)))
(define-vop (write-control-stack)
(:translate sb!kernel:%set-stack-ref)
(:policy :fast-safe)
(:args (object :scs (sap-reg) :target sap)
(offset :scs (any-reg))
(value :scs (descriptor-reg) :target result))
(:arg-types system-area-pointer positive-fixnum *)
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:temporary (:scs (sap-reg) :from (:argument 1)) sap)
(:generator 2
(inst add object offset sap)
(inst stw value 0 sap)
(move value result)))
(define-vop (write-control-stack-c)
(:translate %set-stack-ref)
(:policy :fast-safe)
(:args (sap :scs (sap-reg))
(value :scs (descriptor-reg) :target result))
(:info offset)
(:arg-types system-area-pointer (:constant (signed-byte 12)) *)
(:results (result :scs (descriptor-reg)))
(:result-types *)
(:generator 1
(inst stw value (* offset n-word-bytes) sap)
(move value result)))
(define-vop (code-from-mumble)
(:policy :fast-safe)
(:args (thing :scs (descriptor-reg)))
(:results (code :scs (descriptor-reg)))
(:temporary (:scs (non-descriptor-reg)) temp)
(:variant-vars lowtag)
(:generator 5
(let ((bogus (gen-label))
(done (gen-label)))
(loadw temp thing 0 lowtag)
(inst srl temp n-widetag-bits temp)
(inst comb := zero-tn temp bogus)
(inst sll temp (1- (integer-length n-word-bytes)) temp)
(unless (= lowtag other-pointer-lowtag)
(inst addi (- lowtag other-pointer-lowtag) temp temp))
(inst sub thing temp code)
(emit-label done)
(assemble (*elsewhere*)
(emit-label bogus)
(inst b done)
(move null-tn code t)))))
(define-vop (code-from-lra code-from-mumble)
(:translate sb!di::lra-code-header)
(:variant other-pointer-lowtag))
(define-vop (code-from-fun code-from-mumble)
(:translate sb!di::fun-code-header)
(:variant fun-pointer-lowtag))
(define-vop (%make-lisp-obj)
(:policy :fast-safe)
(:translate %make-lisp-obj)
(:args (value :scs (unsigned-reg) :target result))
(:arg-types unsigned-num)
(:results (result :scs (descriptor-reg)))
(:generator 1
(move value result)))
(define-vop (get-lisp-obj-address)
(:policy :fast-safe)
(:translate sb!di::get-lisp-obj-address)
(:args (thing :scs (descriptor-reg) :target result))
(:results (result :scs (unsigned-reg)))
(:result-types unsigned-num)
(:generator 1
(move thing result)))
(define-vop (fun-word-offset)
(:policy :fast-safe)
(:translate sb!di::fun-word-offset)
(:args (fun :scs (descriptor-reg)))
(:results (res :scs (unsigned-reg)))
(:result-types positive-fixnum)
(:generator 5
(loadw res fun 0 fun-pointer-lowtag)
(inst srl res n-widetag-bits res)))
| |
83e89bb7cea97a4f9799be30620b3ff7a497fb680c3f6f8cff7d0b3990a87521 | auser/hermes | thrift_server.erl | %%%-------------------------------------------------------------------
%%% File : thrift_server.erl
%%% Author : <>
%%% Description :
%%%
Created : 28 Jan 2008 by < >
%%%-------------------------------------------------------------------
-module(thrift_server).
-behaviour(gen_server).
%% API
-export([start_link/3, stop/1, take_socket/2]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {listen_socket, acceptor_ref, service, handler}).
%%====================================================================
%% API
%%====================================================================
%%--------------------------------------------------------------------
Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error }
%% Description: Starts the server
%%--------------------------------------------------------------------
start_link(Port, Service, HandlerModule) when is_integer(Port), is_atom(HandlerModule) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, {Port, Service, HandlerModule}, []).
%%--------------------------------------------------------------------
%% Function: stop(Pid) -> ok, {error, Reason}
%% Description: Stops the server.
%%--------------------------------------------------------------------
stop(Pid) when is_pid(Pid) ->
gen_server:call(Pid, stop).
take_socket(Server, Socket) ->
gen_server:call(Server, {take_socket, Socket}).
%%====================================================================
%% gen_server callbacks
%%====================================================================
%%--------------------------------------------------------------------
%% Function: init(Args) -> {ok, State} |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% Description: Initiates the server
%%--------------------------------------------------------------------
init({Port, Service, Handler}) ->
{ok, Socket} = gen_tcp:listen(Port,
[binary,
{packet, 0},
{active, false},
{nodelay, true},
{reuseaddr, true}]),
{ok, Ref} = prim_inet:async_accept(Socket, -1),
{ok, #state{listen_socket = Socket,
acceptor_ref = Ref,
service = Service,
handler = Handler}}.
%%--------------------------------------------------------------------
Function : % % handle_call(Request , From , State ) - > { reply , Reply , State } |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% Description: Handling call messages
%%--------------------------------------------------------------------
handle_call(stop, _From, State) ->
{stop, stopped, ok, State};
handle_call({take_socket, Socket}, {FromPid, _Tag}, State) ->
Result = gen_tcp:controlling_process(Socket, FromPid),
{reply, Result, State}.
%%--------------------------------------------------------------------
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling cast messages
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% Description: Handling all non call/cast messages
%%--------------------------------------------------------------------
handle_info({inet_async, ListenSocket, Ref, {ok, ClientSocket}},
State = #state{listen_socket = ListenSocket,
acceptor_ref = Ref,
service = Service,
handler = Handler}) ->
case set_sockopt(ListenSocket, ClientSocket) of
ok ->
%% New client connected - start processor
start_processor(ClientSocket, Service, Handler),
{ok, NewRef} = prim_inet:async_accept(ListenSocket, -1),
{noreply, State#state{acceptor_ref = NewRef}};
{error, Reason} ->
error_logger:error_msg("Couldn't set socket opts: ~p~n",
[Reason]),
{stop, Reason, State}
end;
handle_info({inet_async, ListenSocket, Ref, Error}, State) ->
error_logger:error_msg("Error in acceptor: ~p~n", [Error]),
{stop, Error, State};
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% Function: terminate(Reason, State) -> void()
%% Description: This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any necessary
%% cleaning up. When it returns, the gen_server terminates with Reason.
%% The return value is ignored.
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
%% Description: Convert process state when code is changed
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%--------------------------------------------------------------------
Internal functions
%%--------------------------------------------------------------------
set_sockopt(ListenSocket, ClientSocket) ->
true = inet_db:register_socket(ClientSocket, inet_tcp),
case prim_inet:getopts(ListenSocket,
[active, nodelay, keepalive, delay_send, priority, tos]) of
{ok, Opts} ->
case prim_inet:setopts(ClientSocket, Opts) of
ok -> ok;
Error -> gen_tcp:close(ClientSocket),
Error
end;
Error ->
gen_tcp:close(ClientSocket),
Error
end.
start_processor(Socket, Service, Handler) ->
Server = self(),
ProtoGen = fun() ->
% Become the controlling process
ok = take_socket(Server, Socket),
{ok, SocketTransport} = thrift_socket_transport:new(Socket),
{ok, BufferedTransport} = thrift_buffered_transport:new(SocketTransport),
{ok, Protocol} = thrift_binary_protocol:new(BufferedTransport),
{ok, Protocol, Protocol}
end,
spawn(thrift_processor, init, [{Server, ProtoGen, Service, Handler}]).
| null | https://raw.githubusercontent.com/auser/hermes/32741eb75398ebbcbf640e2c73dfd2a54f0d1241/deps/thrift/src/thrift_server.erl | erlang | -------------------------------------------------------------------
File : thrift_server.erl
Author : <>
Description :
-------------------------------------------------------------------
API
gen_server callbacks
====================================================================
API
====================================================================
--------------------------------------------------------------------
Description: Starts the server
--------------------------------------------------------------------
--------------------------------------------------------------------
Function: stop(Pid) -> ok, {error, Reason}
Description: Stops the server.
--------------------------------------------------------------------
====================================================================
gen_server callbacks
====================================================================
--------------------------------------------------------------------
Function: init(Args) -> {ok, State} |
ignore |
{stop, Reason}
Description: Initiates the server
--------------------------------------------------------------------
--------------------------------------------------------------------
% handle_call(Request , From , State ) - > { reply , Reply , State } |
{stop, Reason, Reply, State} |
{stop, Reason, State}
Description: Handling call messages
--------------------------------------------------------------------
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling cast messages
--------------------------------------------------------------------
--------------------------------------------------------------------
{stop, Reason, State}
Description: Handling all non call/cast messages
--------------------------------------------------------------------
New client connected - start processor
--------------------------------------------------------------------
Function: terminate(Reason, State) -> void()
Description: This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any necessary
cleaning up. When it returns, the gen_server terminates with Reason.
The return value is ignored.
--------------------------------------------------------------------
--------------------------------------------------------------------
Description: Convert process state when code is changed
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
Become the controlling process | Created : 28 Jan 2008 by < >
-module(thrift_server).
-behaviour(gen_server).
-export([start_link/3, stop/1, take_socket/2]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {listen_socket, acceptor_ref, service, handler}).
Function : start_link ( ) - > { ok , Pid } | ignore | { error , Error }
start_link(Port, Service, HandlerModule) when is_integer(Port), is_atom(HandlerModule) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, {Port, Service, HandlerModule}, []).
stop(Pid) when is_pid(Pid) ->
gen_server:call(Pid, stop).
take_socket(Server, Socket) ->
gen_server:call(Server, {take_socket, Socket}).
{ ok , State , Timeout } |
init({Port, Service, Handler}) ->
{ok, Socket} = gen_tcp:listen(Port,
[binary,
{packet, 0},
{active, false},
{nodelay, true},
{reuseaddr, true}]),
{ok, Ref} = prim_inet:async_accept(Socket, -1),
{ok, #state{listen_socket = Socket,
acceptor_ref = Ref,
service = Service,
handler = Handler}}.
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call(stop, _From, State) ->
{stop, stopped, ok, State};
handle_call({take_socket, Socket}, {FromPid, _Tag}, State) ->
Result = gen_tcp:controlling_process(Socket, FromPid),
{reply, Result, State}.
Function : handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(_Msg, State) ->
{noreply, State}.
Function : handle_info(Info , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info({inet_async, ListenSocket, Ref, {ok, ClientSocket}},
State = #state{listen_socket = ListenSocket,
acceptor_ref = Ref,
service = Service,
handler = Handler}) ->
case set_sockopt(ListenSocket, ClientSocket) of
ok ->
start_processor(ClientSocket, Service, Handler),
{ok, NewRef} = prim_inet:async_accept(ListenSocket, -1),
{noreply, State#state{acceptor_ref = NewRef}};
{error, Reason} ->
error_logger:error_msg("Couldn't set socket opts: ~p~n",
[Reason]),
{stop, Reason, State}
end;
handle_info({inet_async, ListenSocket, Ref, Error}, State) ->
error_logger:error_msg("Error in acceptor: ~p~n", [Error]),
{stop, Error, State};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
Func : code_change(OldVsn , State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
set_sockopt(ListenSocket, ClientSocket) ->
true = inet_db:register_socket(ClientSocket, inet_tcp),
case prim_inet:getopts(ListenSocket,
[active, nodelay, keepalive, delay_send, priority, tos]) of
{ok, Opts} ->
case prim_inet:setopts(ClientSocket, Opts) of
ok -> ok;
Error -> gen_tcp:close(ClientSocket),
Error
end;
Error ->
gen_tcp:close(ClientSocket),
Error
end.
start_processor(Socket, Service, Handler) ->
Server = self(),
ProtoGen = fun() ->
ok = take_socket(Server, Socket),
{ok, SocketTransport} = thrift_socket_transport:new(Socket),
{ok, BufferedTransport} = thrift_buffered_transport:new(SocketTransport),
{ok, Protocol} = thrift_binary_protocol:new(BufferedTransport),
{ok, Protocol, Protocol}
end,
spawn(thrift_processor, init, [{Server, ProtoGen, Service, Handler}]).
|
7dfd7f3f21ad54df0b1ebc481c649fd07021b20cdc3b6ad845f8f224909713ce | greglook/blocks | project.clj | (defproject mvxcvi/blocks-tests "2.0.3"
:description "Generative tests for block storage implementations."
:url ""
:license {:name "Public Domain"
:url "/"}
:deploy-branches ["master"]
:pedantic? :abort
:dependencies
[[org.clojure/clojure "1.10.1"]
[org.clojure/test.check "0.10.0"]
[mvxcvi/blocks "2.0.3"]
[mvxcvi/test.carly "0.4.1"]
[mvxcvi/puget "1.2.0"]])
| null | https://raw.githubusercontent.com/greglook/blocks/c962d4431c05ac686a0cd1145e05bc0a0e0c4d7d/blocks-tests/project.clj | clojure | (defproject mvxcvi/blocks-tests "2.0.3"
:description "Generative tests for block storage implementations."
:url ""
:license {:name "Public Domain"
:url "/"}
:deploy-branches ["master"]
:pedantic? :abort
:dependencies
[[org.clojure/clojure "1.10.1"]
[org.clojure/test.check "0.10.0"]
[mvxcvi/blocks "2.0.3"]
[mvxcvi/test.carly "0.4.1"]
[mvxcvi/puget "1.2.0"]])
| |
329be9c1dffbe466362acfbd88a615e943182b7192f3c704589066cdd986ab54 | dwincort/UISF | Examples.hs | {-# LANGUAGE Arrows #-}
Last modified by :
Last modified on : 5/25/2013
-- This file is a set of various UI examples showing off the features
of the various widgets in UISF .
module FRP.UISF.Examples.Examples where
import FRP.UISF
import FRP.UISF.Graphics
import Numeric (showHex)
-- | This example displays the time from the start of the GUI application.
timeEx :: UISF () ()
timeEx = title "Time" $ accumTime >>> display <<< spacer
-- | This example shows off 'button's and state by presenting a plus and
-- minus button with a counter that is adjusted by them.
buttonEx :: UISF () ()
buttonEx = title "Buttons" $ topDown $ proc _ -> do
(x,y) <- leftRight (proc _ -> do
x <- edge <<< button "+" -< ()
y <- edge <<< button "-" -< ()
returnA -< (x, y)) -< ()
rec v <- delay 0 -< (case (x,y) of
(Just _, Nothing) -> v+1
(Nothing, Just _) -> v-1
_ -> v)
display -< v
spacer -< ()
-- | This example shows off the 'checkbox' widgets.
checkboxEx :: UISF () ()
checkboxEx = title "Checkboxes" $ topDown $ proc _ -> do
x <- checkbox "Monday" False -< ()
y <- checkbox "Tuesday" True -< ()
z <- checkbox "Wednesday" True -< ()
let v = bin x ++ bin y ++ bin z
displayStr -< v
spacer -< ()
where
bin True = "1"
bin False = "0"
-- | This example shows off the 'radio' button widget.
radioButtonEx :: UISF () ()
radioButtonEx = title "Radio Buttons" $ topDown $ radio list 0 >>> arr (list!!) >>> displayStr >>> spacer
where
list = ["apple", "orange", "banana"]
-- | This example shows off integral sliders (horizontal 'hiSlider's in
-- this case).
shoppinglist :: UISF () ()
shoppinglist = title "Shopping List" $ topDown $ proc _ -> do
a <- spacer <<< title "apples" (hiSlider 1 (0,10) 3) -< ()
b <- spacer <<< title "bananas" (hiSlider 1 (0,10) 7) -< ()
title "total" display -< (a + b)
-- | This example shows off both vertical sliders as well as the 'canvas'
-- widget. The canvas widget can be used to easily create custom graphics
-- in the GUI. Here, it is used to make a color swatch that is
-- controllable with RGB values by the sliders.
colorDemo :: UISF () ()
colorDemo = title "Color" $ leftRight $ proc _ -> do
r <- newColorSlider (coloredUIText Red "R") -< ()
g <- newColorSlider (coloredUIText Green "G") -< ()
b <- newColorSlider (coloredUIText Blue "B") -< ()
changed <- unique -< (r,g,b)
pad (4,8,0,0) $ canvas' layout rect -< changed
where
layout = makeLayout (Stretchy 10) (Stretchy 10)
newColorSlider l = title l $ topDown $ proc _ -> do
v <- viSlider 16 (0,255) 0 -< ()
_ <- setSize (22,22) displayStr -< showHex v ""
returnA -< v
rect (r,g,b) d = withColor' (rgbE r g b) (rectangleFilled ((0,0),d))
-- | This example shows off the 'textbox' widget. Text can be typed in, and
-- that text is transferred to the 'display' widget below when the button
-- is pressed.
textboxdemo :: UISF () ()
textboxdemo = title "Saving Text" $ topDown $ proc _ -> do
str <- leftRight $ label "Text: " >>> textbox "" -< Nothing
b <- button "Save text to below" -< ()
rec str' <- delay "" -< if b then str else str'
leftRight $ label "Saved value: " >>> displayStr -< str'
uitext :: UIText
uitext = coloredUIText Red "H" `appendUIText`
coloredUIText Yellow "e" `appendUIText`
coloredUIText Green "l" `appendUIText`
coloredUIText Cyan "l" `appendUIText`
coloredUIText Blue "o" `appendUIText`
coloredUIText Magenta " W" `appendUIText`
coloredUIText Red "o" `appendUIText`
coloredUIText Yellow "r" `appendUIText`
coloredUIText Green "l" `appendUIText`
coloredUIText Cyan "d" `appendUIText`
coloredUIText Blue "!"
uitext' = fontUIText Helvetica18 uitext
uitextdemo = title "Color and Fonts" $ constA Nothing >>> textField CharWrap uitext' >>> constA ()
-- | This is the main demo that incorporates all of the other examples
-- together. In addition to demonstrating how
-- different widgets can connect, it also shows off the tabbing
-- behavior built in to the GUI. Pressing tab cycles through focusable
-- elements, and pressing shift-tab cycles in reverse.
main :: IO ()
main = runUI (defaultUIParams {uiSize=(500, 520), uiCloseOnEsc=True}) $
(leftRight $ (bottomUp $ timeEx >>> buttonEx) >>> checkboxEx >>> radioButtonEx) >>>
(leftRight $ shoppinglist >>> colorDemo) >>> textboxdemo >>> uitextdemo
linesWith s = cons (case break (== '\n') s of
(l, "") -> (l,[])
(l, s') -> (l++"\n", case s' of
[] -> []
_:s'' -> linesWith s''))
where
cons ~(h, t) = h : t | null | https://raw.githubusercontent.com/dwincort/UISF/291a6155e41f351d938daed3d2be6f8baf832a19/FRP/UISF/Examples/Examples.hs | haskell | # LANGUAGE Arrows #
This file is a set of various UI examples showing off the features
| This example displays the time from the start of the GUI application.
| This example shows off 'button's and state by presenting a plus and
minus button with a counter that is adjusted by them.
| This example shows off the 'checkbox' widgets.
| This example shows off the 'radio' button widget.
| This example shows off integral sliders (horizontal 'hiSlider's in
this case).
| This example shows off both vertical sliders as well as the 'canvas'
widget. The canvas widget can be used to easily create custom graphics
in the GUI. Here, it is used to make a color swatch that is
controllable with RGB values by the sliders.
| This example shows off the 'textbox' widget. Text can be typed in, and
that text is transferred to the 'display' widget below when the button
is pressed.
| This is the main demo that incorporates all of the other examples
together. In addition to demonstrating how
different widgets can connect, it also shows off the tabbing
behavior built in to the GUI. Pressing tab cycles through focusable
elements, and pressing shift-tab cycles in reverse. |
Last modified by :
Last modified on : 5/25/2013
of the various widgets in UISF .
module FRP.UISF.Examples.Examples where
import FRP.UISF
import FRP.UISF.Graphics
import Numeric (showHex)
timeEx :: UISF () ()
timeEx = title "Time" $ accumTime >>> display <<< spacer
buttonEx :: UISF () ()
buttonEx = title "Buttons" $ topDown $ proc _ -> do
(x,y) <- leftRight (proc _ -> do
x <- edge <<< button "+" -< ()
y <- edge <<< button "-" -< ()
returnA -< (x, y)) -< ()
rec v <- delay 0 -< (case (x,y) of
(Just _, Nothing) -> v+1
(Nothing, Just _) -> v-1
_ -> v)
display -< v
spacer -< ()
checkboxEx :: UISF () ()
checkboxEx = title "Checkboxes" $ topDown $ proc _ -> do
x <- checkbox "Monday" False -< ()
y <- checkbox "Tuesday" True -< ()
z <- checkbox "Wednesday" True -< ()
let v = bin x ++ bin y ++ bin z
displayStr -< v
spacer -< ()
where
bin True = "1"
bin False = "0"
radioButtonEx :: UISF () ()
radioButtonEx = title "Radio Buttons" $ topDown $ radio list 0 >>> arr (list!!) >>> displayStr >>> spacer
where
list = ["apple", "orange", "banana"]
shoppinglist :: UISF () ()
shoppinglist = title "Shopping List" $ topDown $ proc _ -> do
a <- spacer <<< title "apples" (hiSlider 1 (0,10) 3) -< ()
b <- spacer <<< title "bananas" (hiSlider 1 (0,10) 7) -< ()
title "total" display -< (a + b)
colorDemo :: UISF () ()
colorDemo = title "Color" $ leftRight $ proc _ -> do
r <- newColorSlider (coloredUIText Red "R") -< ()
g <- newColorSlider (coloredUIText Green "G") -< ()
b <- newColorSlider (coloredUIText Blue "B") -< ()
changed <- unique -< (r,g,b)
pad (4,8,0,0) $ canvas' layout rect -< changed
where
layout = makeLayout (Stretchy 10) (Stretchy 10)
newColorSlider l = title l $ topDown $ proc _ -> do
v <- viSlider 16 (0,255) 0 -< ()
_ <- setSize (22,22) displayStr -< showHex v ""
returnA -< v
rect (r,g,b) d = withColor' (rgbE r g b) (rectangleFilled ((0,0),d))
textboxdemo :: UISF () ()
textboxdemo = title "Saving Text" $ topDown $ proc _ -> do
str <- leftRight $ label "Text: " >>> textbox "" -< Nothing
b <- button "Save text to below" -< ()
rec str' <- delay "" -< if b then str else str'
leftRight $ label "Saved value: " >>> displayStr -< str'
uitext :: UIText
uitext = coloredUIText Red "H" `appendUIText`
coloredUIText Yellow "e" `appendUIText`
coloredUIText Green "l" `appendUIText`
coloredUIText Cyan "l" `appendUIText`
coloredUIText Blue "o" `appendUIText`
coloredUIText Magenta " W" `appendUIText`
coloredUIText Red "o" `appendUIText`
coloredUIText Yellow "r" `appendUIText`
coloredUIText Green "l" `appendUIText`
coloredUIText Cyan "d" `appendUIText`
coloredUIText Blue "!"
uitext' = fontUIText Helvetica18 uitext
uitextdemo = title "Color and Fonts" $ constA Nothing >>> textField CharWrap uitext' >>> constA ()
main :: IO ()
main = runUI (defaultUIParams {uiSize=(500, 520), uiCloseOnEsc=True}) $
(leftRight $ (bottomUp $ timeEx >>> buttonEx) >>> checkboxEx >>> radioButtonEx) >>>
(leftRight $ shoppinglist >>> colorDemo) >>> textboxdemo >>> uitextdemo
linesWith s = cons (case break (== '\n') s of
(l, "") -> (l,[])
(l, s') -> (l++"\n", case s' of
[] -> []
_:s'' -> linesWith s''))
where
cons ~(h, t) = h : t |
226d8b415bd922cddc02cac8258242d51f431a1e8a75711abe99ec15bacf57a1 | Hendekagon/iiiiioiooooo | event.cljs | Copyright ( c ) . All rights reserved .
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ^{:doc "This namespace contains functions to work with browser
events. It is based on the Google Closure Library event system."
:author "Bobby Calderwood"}
clojure.browser.event
(:require [goog.events :as events])
(:import (goog.events EventTarget EventType)))
(defprotocol IEventType
(event-types [this]))
(extend-protocol IEventType
EventTarget
(event-types
[this]
(into {}
(map
(fn [[k v]]
[(keyword (.toLowerCase k))
v])
(merge
(js->clj EventType))))))
(when (exists? js/Element)
(extend-protocol IEventType
js/Element
(event-types
[this]
(into {}
(map
(fn [[k v]]
[(keyword (.toLowerCase k))
v])
(merge
(js->clj EventType)))))))
(defn listen
([src type fn]
(listen src type fn false))
([src type fn capture?]
(events/listen src
(get (event-types src) type type)
fn
capture?)))
(defn listen-once
([src type fn]
(listen-once src type fn false))
([src type fn capture?]
(events/listenOnce src
(get (event-types src) type type)
fn
capture?)))
(defn unlisten
([src type fn]
(unlisten src type fn false))
([src type fn capture?]
(events/unlisten src
(get (event-types src) type type)
fn
capture?)))
(defn unlisten-by-key
[key]
(events/unlistenByKey key))
(defn dispatch-event
[src event]
(events/dispatchEvent src event))
(defn expose [e]
(events/expose e))
(defn fire-listeners
[obj type capture event])
(defn total-listener-count []
(events/getTotalListenerCount))
TODO
(defn get-listener [src type listener opt_capt opt_handler]); ⇒ ?Listener
(defn all-listeners [obj type capture]); ⇒ Array.<Listener>
(defn unique-event-id [event-type]); ⇒ string
(defn has-listener [obj opt_type opt_capture]); ⇒ boolean
TODO ? ( defn listen - with - wrapper [ src wrapper listener opt_capt opt_handler ] )
TODO ? ( defn protect - browser - event - entry - point [ errorHandler ] )
(defn remove-all [opt_obj opt_type opt_capt]); ⇒ number
TODO ? ( - with - wrapper [ src wrapper listener opt_capt opt_handler ] )
| null | https://raw.githubusercontent.com/Hendekagon/iiiiioiooooo/f7c2ab0468798b2065bdff9c564ca10a966cfa79/public/js/clojure/browser/event.cljs | clojure | The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
⇒ ?Listener
⇒ Array.<Listener>
⇒ string
⇒ boolean
⇒ number | Copyright ( c ) . All rights reserved .
Eclipse Public License 1.0 ( -1.0.php )
(ns ^{:doc "This namespace contains functions to work with browser
events. It is based on the Google Closure Library event system."
:author "Bobby Calderwood"}
clojure.browser.event
(:require [goog.events :as events])
(:import (goog.events EventTarget EventType)))
(defprotocol IEventType
(event-types [this]))
(extend-protocol IEventType
EventTarget
(event-types
[this]
(into {}
(map
(fn [[k v]]
[(keyword (.toLowerCase k))
v])
(merge
(js->clj EventType))))))
(when (exists? js/Element)
(extend-protocol IEventType
js/Element
(event-types
[this]
(into {}
(map
(fn [[k v]]
[(keyword (.toLowerCase k))
v])
(merge
(js->clj EventType)))))))
(defn listen
([src type fn]
(listen src type fn false))
([src type fn capture?]
(events/listen src
(get (event-types src) type type)
fn
capture?)))
(defn listen-once
([src type fn]
(listen-once src type fn false))
([src type fn capture?]
(events/listenOnce src
(get (event-types src) type type)
fn
capture?)))
(defn unlisten
([src type fn]
(unlisten src type fn false))
([src type fn capture?]
(events/unlisten src
(get (event-types src) type type)
fn
capture?)))
(defn unlisten-by-key
[key]
(events/unlistenByKey key))
(defn dispatch-event
[src event]
(events/dispatchEvent src event))
(defn expose [e]
(events/expose e))
(defn fire-listeners
[obj type capture event])
(defn total-listener-count []
(events/getTotalListenerCount))
TODO
TODO ? ( defn listen - with - wrapper [ src wrapper listener opt_capt opt_handler ] )
TODO ? ( defn protect - browser - event - entry - point [ errorHandler ] )
TODO ? ( - with - wrapper [ src wrapper listener opt_capt opt_handler ] )
|
ab1ceac9816c53d6b21cee6956f816ffdb01ee8755aebd7de524c6e087e4bd12 | Zeta611/L | value_analyzer.mli | module HoleCoeffs : sig
type t = int list
val hole_count : 'a list -> int
end
type value =
| VNum of HoleCoeffs.t
| VPair of value * value
val value_of_plain_value :
([< `Num of int | `Pair of 'a * 'a ] as 'a) -> hole_cnt:int -> value
val count_holes : Shape_analyzer.ty -> L.number
val value_of_hole_type : Shape_analyzer.ty -> value
val string_of_value : value -> string
type id = string
type env = id -> value
exception TypeError of string
exception RunError of string
exception PathError of string
val ( @: ) : 'a * 'b -> ('a -> 'b) -> 'a -> 'b
val eval :
(L.id -> value) ->
L.expr ->
Path.path ->
Shape_analyzer.ty ->
L.plain_value ->
bool * value
| null | https://raw.githubusercontent.com/Zeta611/L/5a2f6625bfab897946a56369665a11ef65fc5b92/lib/value_analyzer.mli | ocaml | module HoleCoeffs : sig
type t = int list
val hole_count : 'a list -> int
end
type value =
| VNum of HoleCoeffs.t
| VPair of value * value
val value_of_plain_value :
([< `Num of int | `Pair of 'a * 'a ] as 'a) -> hole_cnt:int -> value
val count_holes : Shape_analyzer.ty -> L.number
val value_of_hole_type : Shape_analyzer.ty -> value
val string_of_value : value -> string
type id = string
type env = id -> value
exception TypeError of string
exception RunError of string
exception PathError of string
val ( @: ) : 'a * 'b -> ('a -> 'b) -> 'a -> 'b
val eval :
(L.id -> value) ->
L.expr ->
Path.path ->
Shape_analyzer.ty ->
L.plain_value ->
bool * value
| |
2530482e290ad8195399a6d20ec8316fee984dd5f07bf8686f73f19534a44a4a | wdanilo/haskell-logger | Drop.hs | # LANGUAGE TemplateHaskell #
# LANGUAGE FunctionalDependencies #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# LANGUAGE NoMonomorphismRestriction #
{-# LANGUAGE OverloadedStrings #-}
-----------------------------------------------------------------------------
-- |
-- Module : System.Log.Logger.Drop
Copyright : ( C ) 2015 Flowbox
-- License : Apache-2.0
Maintainer : < >
-- Stability : stable
-- Portability : portable
-----------------------------------------------------------------------------
module System.Log.Logger.Drop where
import Data.Monoid
import Control.Applicative
import System.Log.Data (MonadRecord(appendRecord), LogBuilder, LookupDataSet, Msg, Lvl)
import Control.Lens hiding (children)
import System.Log.Log (Log, MonadLogger(appendLog), LogFormat, LogFormat)
import Control.Monad.Trans (lift)
import Control.Monad.State (StateT, runStateT)
import qualified Control.Monad.State as State
import Control.Monad.IO.Class (MonadIO, liftIO)
import Text.PrettyPrint.ANSI.Leijen (Doc, putDoc)
import System.Log.Logger.Handler (MonadLoggerHandler, addHandler)
import System.Log.Logger.Priority (MonadPriorityLogger, getPriority, setPriority)
import Control.Monad.Trans (MonadTrans)
----------------------------------------------------------------------
DropLogger
----------------------------------------------------------------------
newtype DropLoggerT m a = DropLoggerT { runDropLoggerT :: m a } deriving (Monad, MonadIO, Applicative, Functor)
instance MonadTrans DropLoggerT where
lift = DropLoggerT
type instance LogFormat (DropLoggerT m) = LogFormat m
instance (Monad m, Applicative m) => MonadLogger (DropLoggerT m) where
appendLog _ = return ()
instance Monad m => MonadRecord d (DropLoggerT m) where
appendRecord _ = return ()
instance (Monad m, MonadLoggerHandler h m) => MonadLoggerHandler h (DropLoggerT m) where
addHandler _ = return ()
instance Monad m => MonadPriorityLogger (DropLoggerT m) where
getPriority = return undefined
setPriority _ = return ()
| null | https://raw.githubusercontent.com/wdanilo/haskell-logger/bdf3b64f50c0a8e26bd44fdb882e72ffbe19fd3f/src/System/Log/Logger/Drop.hs | haskell | # LANGUAGE OverloadedStrings #
---------------------------------------------------------------------------
|
Module : System.Log.Logger.Drop
License : Apache-2.0
Stability : stable
Portability : portable
---------------------------------------------------------------------------
--------------------------------------------------------------------
-------------------------------------------------------------------- | # LANGUAGE TemplateHaskell #
# LANGUAGE FunctionalDependencies #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# LANGUAGE NoMonomorphismRestriction #
Copyright : ( C ) 2015 Flowbox
Maintainer : < >
module System.Log.Logger.Drop where
import Data.Monoid
import Control.Applicative
import System.Log.Data (MonadRecord(appendRecord), LogBuilder, LookupDataSet, Msg, Lvl)
import Control.Lens hiding (children)
import System.Log.Log (Log, MonadLogger(appendLog), LogFormat, LogFormat)
import Control.Monad.Trans (lift)
import Control.Monad.State (StateT, runStateT)
import qualified Control.Monad.State as State
import Control.Monad.IO.Class (MonadIO, liftIO)
import Text.PrettyPrint.ANSI.Leijen (Doc, putDoc)
import System.Log.Logger.Handler (MonadLoggerHandler, addHandler)
import System.Log.Logger.Priority (MonadPriorityLogger, getPriority, setPriority)
import Control.Monad.Trans (MonadTrans)
DropLogger
newtype DropLoggerT m a = DropLoggerT { runDropLoggerT :: m a } deriving (Monad, MonadIO, Applicative, Functor)
instance MonadTrans DropLoggerT where
lift = DropLoggerT
type instance LogFormat (DropLoggerT m) = LogFormat m
instance (Monad m, Applicative m) => MonadLogger (DropLoggerT m) where
appendLog _ = return ()
instance Monad m => MonadRecord d (DropLoggerT m) where
appendRecord _ = return ()
instance (Monad m, MonadLoggerHandler h m) => MonadLoggerHandler h (DropLoggerT m) where
addHandler _ = return ()
instance Monad m => MonadPriorityLogger (DropLoggerT m) where
getPriority = return undefined
setPriority _ = return ()
|
f77cd2e28d6e5956c94f07ddb3194d40d1b183a6521911c45c81cc044ff351eb | OCamlPro/opam-bin | commandInstall.ml | (**************************************************************************)
(* *)
Copyright 2020 OCamlPro & Origin Labs
(* *)
(* All rights reserved. This file is distributed under the terms of the *)
GNU Lesser General Public License version 2.1 , with the special
(* exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Ezcmd.TYPES
open Ez_file.V1
open EzFile.OP
open EzConfig.OP
open Ez_opam_file.V1
module OpamParserTypes = OpamParserTypes.FullPos
let cmd_name = "install"
let add_repo ~repo ~url =
if not ( Sys.file_exists (Globals.opam_repo_dir // repo ) ) then
Misc.call ~nvo:None
[| "opam"; "remote" ; "add" ; repo ; "--dont-select"; url |]
let install_exe () =
let s = EzFile.read_file Sys.executable_name in
if Sys.file_exists Globals.opambin_bin then
Sys.remove Globals.opambin_bin;
EzFile.write_file Globals.opambin_bin s;
Unix.chmod Globals.opambin_bin 0o755;
Printf.eprintf "Executable copied as %s\n%!" Globals.opambin_bin;
EzFile.make_dir ~p:true Globals.opam_plugins_bin_dir ;
Misc.call ~nvo:None [|
"ln"; "-sf" ;
".." // Globals.command // Globals.command_exe ;
Globals.opam_plugins_bin_dir // Globals.command
|]
let hooks = [
"pre-session-commands",
Printf.sprintf {| ["%s" "pre-session"] |}
Globals.opambin_bin ;
"pre-build-commands",
Printf.sprintf
{| ["%s" "pre-build" name version "--opamfile=%%{opamfile}%%" {?opamfile} depends] |}
Globals.opambin_bin ;
"wrap-build-commands",
Printf.sprintf {| ["%s" "wrap-build" name version depends "--"] |}
Globals.opambin_bin ;
"pre-install-commands",
Printf.sprintf {| ["%s" "pre-install" name version depends] |}
Globals.opambin_bin ;
"wrap-install-commands",
Printf.sprintf {| ["%s" "wrap-install" name version depends "--"] |}
Globals.opambin_bin ;
"post-install-commands",
Printf.sprintf
{| ["%s" "post-install" name version "--opamfile=%%{opamfile}%%" {?opamfile} depends installed-files] { error-code = 0} |}
Globals.opambin_bin ;
"post-session-commands",
Printf.sprintf {| ["%s" "post-session"] |}
Globals.opambin_bin ;
"pre-remove-commands",
Printf.sprintf {| ["%s" "pre-remove" name version depends] |}
Globals.opambin_bin ;
]
let remove_opam_hooks file_contents =
let rec iter items found rev =
match items with
| [] ->
if found then begin
Printf.eprintf "Found hooks to remove\n%!";
Some ( List.rev rev )
end
else begin
Printf.eprintf "No hooks to remove\n%!";
None
end
| item :: items ->
let open OpamParserTypes in
match item.pelem with
| Variable (name, _) ->
if List.mem_assoc name.pelem hooks then
iter items true rev
else
iter items found ( item :: rev )
| _ ->
iter items found ( item :: rev )
in
iter file_contents false []
let install_hooks () =
Misc.change_opam_config (fun file_contents ->
let file_contents =
match remove_opam_hooks file_contents with
| None -> file_contents
| Some file_contents -> file_contents
in
Printf.eprintf "Adding %s hooks\n%!" Globals.command;
Some (
List.rev @@
(
(
List.map (fun (hook_name, hook) ->
Misc.opam_variable hook_name "%s" hook) hooks
)
@
List.rev file_contents
)
)
)
let install_repos () =
EzFile.make_dir ~p:true Globals.opambin_store_repo_packages_dir;
EzFile.write_file ( Globals.opambin_store_repo_dir // "repo" )
{|
opam-version: "2.0"
archive-mirrors: "../../cache"
|};
EzFile.write_file ( Globals.opambin_store_repo_dir // "version" )
"0.9.0";
add_repo ~repo:Globals.opam_opambin_repo
~url:( Printf.sprintf "file"
Globals.opambin_store_repo_dir )
let install_patches () =
let patches_url = !!Config.patches_url in
if EzString.starts_with patches_url ~prefix:"file://" then
(* nothing to do *)
Printf.eprintf "Using %s for patches\n%!" patches_url
else
let opambin_patches_dir = Globals.opambin_patches_dir in
let tmp_dir = opambin_patches_dir ^ ".tmp" in
if EzString.starts_with patches_url ~prefix:"git@" then begin
Misc.call ~nvo:None [| "rm"; "-rf"; tmp_dir |];
Misc.call ~nvo:None [| "git"; "clone" ; patches_url ; tmp_dir |];
Misc.call ~nvo:None [| "rm"; "-rf"; opambin_patches_dir |];
Misc.call ~nvo:None [| "mv"; tmp_dir; opambin_patches_dir |]
end else
if EzString.starts_with patches_url ~prefix:"https://"
|| EzString.starts_with patches_url ~prefix:"http://" then begin
let output = Globals.opambin_dir // "relocation-patches.tar.gz" in
Printf.eprintf "Downloading patches...\n%!";
match Misc.wget ~nvo:None ~url:patches_url ~output with
| None ->
Printf.kprintf failwith "Could not retrieve archive at %s" patches_url
| Some output ->
Misc.call ~nvo:None [| "rm"; "-rf"; tmp_dir |];
EzFile.make_dir ~p:true tmp_dir ;
Unix.chdir tmp_dir ;
Misc.call ~nvo:None [| "tar" ; "zxf" ; output |] ;
Unix.chdir Globals.curdir;
let patches_subdir =
let patches_subdir = tmp_dir // "patches" in
match
if Sys.file_exists patches_subdir then
Some patches_subdir
else
match EzFile.readdir tmp_dir with
| [| dir |] ->
let patches_subdir = tmp_dir // dir // "patches" in
if Sys.file_exists patches_subdir then
Some patches_subdir
else
None
| _ -> None
with
| Some patches_subdir -> patches_subdir
| None ->
Printf.kprintf failwith
"archive %s does not contain 'patches/' subdir" patches_url
in
Misc.call ~nvo:None [| "rm"; "-rf"; opambin_patches_dir |];
EzFile.make_dir ~p:true opambin_patches_dir;
Sys.rename patches_subdir (opambin_patches_dir // "patches");
Misc.call ~nvo:None [| "rm"; "-rf"; tmp_dir |];
Sys.remove output
end
else
begin
Printf.eprintf
"Error: patches_url '%s' should either be local (file://) or git (git@, http[s]://)\n%!" patches_url;
exit 2
end
let action args =
Printf.eprintf "%s\n\n%!" Globals.about ;
EzFile.make_dir ~p:true Globals.opambin_dir ;
Config.save ();
EzFile.make_dir ~p:true Globals.opambin_cache_dir;
match args with
| [] ->
install_exe ();
install_hooks ();
install_repos ();
install_patches ()
| _ ->
List.iter (function
| "exe" -> install_exe ()
| "hooks" -> install_hooks ()
| "repos" -> install_repos ()
| "patches" -> install_patches ()
| s ->
Printf.eprintf "Error: unexpected argument %S" s;
exit 2)
args
let cmd =
let anon_args = ref [] in
{
cmd_name ;
cmd_action = (fun () -> action !anon_args) ;
cmd_args = [
[], Anons (fun list -> anon_args := list),
Ezcmd.info "No args = all, otherwise 'exe', 'hooks' and/or 'repos'";
];
cmd_man = [
`S "DESCRIPTION" ;
`Blocks [
`P {|Here are the names of the actions performed by the command, that can be passed as arguments:|};
`I ("[exe]",
{|Install the current executable as an opam plugin, so that it is possible to use "opam bin" instead of "opam-bin" from anywhere;|});
`I ("[hooks]",
{|Modify the hooks of opam to call opam-bin everytime a package is installed;|});
`I ("[repos]",
{|Add the repository of generated binary packages as one of the default opam repository|});
`I ("[patches]", {|Download/upgrade the set of relocation patches|});
];
`P {|Without argument, all the actions are performed.|}
];
cmd_doc = {|
installs opam-bin in opam and download the set of relocation patches.
|}
}
| null | https://raw.githubusercontent.com/OCamlPro/opam-bin/322e46827280af0fdf6a24ac65893b67d3b9f269/src/opam_bin_lib/commandInstall.ml | ocaml | ************************************************************************
All rights reserved. This file is distributed under the terms of the
exception on linking described in the file LICENSE.
************************************************************************
nothing to do | Copyright 2020 OCamlPro & Origin Labs
GNU Lesser General Public License version 2.1 , with the special
open Ezcmd.TYPES
open Ez_file.V1
open EzFile.OP
open EzConfig.OP
open Ez_opam_file.V1
module OpamParserTypes = OpamParserTypes.FullPos
let cmd_name = "install"
let add_repo ~repo ~url =
if not ( Sys.file_exists (Globals.opam_repo_dir // repo ) ) then
Misc.call ~nvo:None
[| "opam"; "remote" ; "add" ; repo ; "--dont-select"; url |]
let install_exe () =
let s = EzFile.read_file Sys.executable_name in
if Sys.file_exists Globals.opambin_bin then
Sys.remove Globals.opambin_bin;
EzFile.write_file Globals.opambin_bin s;
Unix.chmod Globals.opambin_bin 0o755;
Printf.eprintf "Executable copied as %s\n%!" Globals.opambin_bin;
EzFile.make_dir ~p:true Globals.opam_plugins_bin_dir ;
Misc.call ~nvo:None [|
"ln"; "-sf" ;
".." // Globals.command // Globals.command_exe ;
Globals.opam_plugins_bin_dir // Globals.command
|]
let hooks = [
"pre-session-commands",
Printf.sprintf {| ["%s" "pre-session"] |}
Globals.opambin_bin ;
"pre-build-commands",
Printf.sprintf
{| ["%s" "pre-build" name version "--opamfile=%%{opamfile}%%" {?opamfile} depends] |}
Globals.opambin_bin ;
"wrap-build-commands",
Printf.sprintf {| ["%s" "wrap-build" name version depends "--"] |}
Globals.opambin_bin ;
"pre-install-commands",
Printf.sprintf {| ["%s" "pre-install" name version depends] |}
Globals.opambin_bin ;
"wrap-install-commands",
Printf.sprintf {| ["%s" "wrap-install" name version depends "--"] |}
Globals.opambin_bin ;
"post-install-commands",
Printf.sprintf
{| ["%s" "post-install" name version "--opamfile=%%{opamfile}%%" {?opamfile} depends installed-files] { error-code = 0} |}
Globals.opambin_bin ;
"post-session-commands",
Printf.sprintf {| ["%s" "post-session"] |}
Globals.opambin_bin ;
"pre-remove-commands",
Printf.sprintf {| ["%s" "pre-remove" name version depends] |}
Globals.opambin_bin ;
]
let remove_opam_hooks file_contents =
let rec iter items found rev =
match items with
| [] ->
if found then begin
Printf.eprintf "Found hooks to remove\n%!";
Some ( List.rev rev )
end
else begin
Printf.eprintf "No hooks to remove\n%!";
None
end
| item :: items ->
let open OpamParserTypes in
match item.pelem with
| Variable (name, _) ->
if List.mem_assoc name.pelem hooks then
iter items true rev
else
iter items found ( item :: rev )
| _ ->
iter items found ( item :: rev )
in
iter file_contents false []
let install_hooks () =
Misc.change_opam_config (fun file_contents ->
let file_contents =
match remove_opam_hooks file_contents with
| None -> file_contents
| Some file_contents -> file_contents
in
Printf.eprintf "Adding %s hooks\n%!" Globals.command;
Some (
List.rev @@
(
(
List.map (fun (hook_name, hook) ->
Misc.opam_variable hook_name "%s" hook) hooks
)
@
List.rev file_contents
)
)
)
let install_repos () =
EzFile.make_dir ~p:true Globals.opambin_store_repo_packages_dir;
EzFile.write_file ( Globals.opambin_store_repo_dir // "repo" )
{|
opam-version: "2.0"
archive-mirrors: "../../cache"
|};
EzFile.write_file ( Globals.opambin_store_repo_dir // "version" )
"0.9.0";
add_repo ~repo:Globals.opam_opambin_repo
~url:( Printf.sprintf "file"
Globals.opambin_store_repo_dir )
let install_patches () =
let patches_url = !!Config.patches_url in
if EzString.starts_with patches_url ~prefix:"file://" then
Printf.eprintf "Using %s for patches\n%!" patches_url
else
let opambin_patches_dir = Globals.opambin_patches_dir in
let tmp_dir = opambin_patches_dir ^ ".tmp" in
if EzString.starts_with patches_url ~prefix:"git@" then begin
Misc.call ~nvo:None [| "rm"; "-rf"; tmp_dir |];
Misc.call ~nvo:None [| "git"; "clone" ; patches_url ; tmp_dir |];
Misc.call ~nvo:None [| "rm"; "-rf"; opambin_patches_dir |];
Misc.call ~nvo:None [| "mv"; tmp_dir; opambin_patches_dir |]
end else
if EzString.starts_with patches_url ~prefix:"https://"
|| EzString.starts_with patches_url ~prefix:"http://" then begin
let output = Globals.opambin_dir // "relocation-patches.tar.gz" in
Printf.eprintf "Downloading patches...\n%!";
match Misc.wget ~nvo:None ~url:patches_url ~output with
| None ->
Printf.kprintf failwith "Could not retrieve archive at %s" patches_url
| Some output ->
Misc.call ~nvo:None [| "rm"; "-rf"; tmp_dir |];
EzFile.make_dir ~p:true tmp_dir ;
Unix.chdir tmp_dir ;
Misc.call ~nvo:None [| "tar" ; "zxf" ; output |] ;
Unix.chdir Globals.curdir;
let patches_subdir =
let patches_subdir = tmp_dir // "patches" in
match
if Sys.file_exists patches_subdir then
Some patches_subdir
else
match EzFile.readdir tmp_dir with
| [| dir |] ->
let patches_subdir = tmp_dir // dir // "patches" in
if Sys.file_exists patches_subdir then
Some patches_subdir
else
None
| _ -> None
with
| Some patches_subdir -> patches_subdir
| None ->
Printf.kprintf failwith
"archive %s does not contain 'patches/' subdir" patches_url
in
Misc.call ~nvo:None [| "rm"; "-rf"; opambin_patches_dir |];
EzFile.make_dir ~p:true opambin_patches_dir;
Sys.rename patches_subdir (opambin_patches_dir // "patches");
Misc.call ~nvo:None [| "rm"; "-rf"; tmp_dir |];
Sys.remove output
end
else
begin
Printf.eprintf
"Error: patches_url '%s' should either be local (file://) or git (git@, http[s]://)\n%!" patches_url;
exit 2
end
let action args =
Printf.eprintf "%s\n\n%!" Globals.about ;
EzFile.make_dir ~p:true Globals.opambin_dir ;
Config.save ();
EzFile.make_dir ~p:true Globals.opambin_cache_dir;
match args with
| [] ->
install_exe ();
install_hooks ();
install_repos ();
install_patches ()
| _ ->
List.iter (function
| "exe" -> install_exe ()
| "hooks" -> install_hooks ()
| "repos" -> install_repos ()
| "patches" -> install_patches ()
| s ->
Printf.eprintf "Error: unexpected argument %S" s;
exit 2)
args
let cmd =
let anon_args = ref [] in
{
cmd_name ;
cmd_action = (fun () -> action !anon_args) ;
cmd_args = [
[], Anons (fun list -> anon_args := list),
Ezcmd.info "No args = all, otherwise 'exe', 'hooks' and/or 'repos'";
];
cmd_man = [
`S "DESCRIPTION" ;
`Blocks [
`P {|Here are the names of the actions performed by the command, that can be passed as arguments:|};
`I ("[exe]",
{|Install the current executable as an opam plugin, so that it is possible to use "opam bin" instead of "opam-bin" from anywhere;|});
`I ("[hooks]",
{|Modify the hooks of opam to call opam-bin everytime a package is installed;|});
`I ("[repos]",
{|Add the repository of generated binary packages as one of the default opam repository|});
`I ("[patches]", {|Download/upgrade the set of relocation patches|});
];
`P {|Without argument, all the actions are performed.|}
];
cmd_doc = {|
installs opam-bin in opam and download the set of relocation patches.
|}
}
|
54cf0eface93c1d2b9a35594b801e86394ec8b79db718f981d77d3cfbce4059e | Bogdanp/deta | operator.rkt | #lang racket/base
(require (for-syntax racket/base
racket/syntax)
racket/match
syntax/parse
syntax/parse/define)
(provide
define-ops)
(define-syntax (define-ops stx)
(syntax-parse stx
[(_ kind:id [op:id (~optional maybe-op-str:str)] ...)
#:with (op-str ...) (for/list ([op (in-list (syntax-e #'(op ...)))]
[maybe-str (in-list (syntax-e #'((~? maybe-op-str #f) ...)))])
(cond
[(syntax->datum maybe-str) maybe-str]
[else (datum->syntax op (string-upcase (symbol->string (syntax->datum op))))]))
#:with write-operator-id (format-id stx "write-~a-operator" #'kind)
#:with match-expander-id (format-id stx "~a-operator" #'kind)
(syntax/loc stx
(begin
(provide
write-operator-id
match-expander-id)
(define (write-operator-id id)
(write-string
(case id
[(op) op-str] ...)))
(define-match-expander match-expander-id
(lambda (stx)
(syntax-parse stx
[(_) #'(or 'op ...)]
[(_ out) #'(and (or 'op ...) out)])))))]))
(define-ops unary
[bitwise-not "~"]
[date]
[interval]
[json]
[jsonb]
[not]
[time]
[timestamp])
(define-ops binary
[=] [>] [<] [>=] [<=] [<>] [!=]
[array-contains? "@>"]
[array-overlap? "&&"]
[array-ref]
[cast]
[extract]
[ilike]
[in]
[is-distinct "IS DISTINCT"]
[is]
[json-check-path "@@"]
[json-contains-all? "?&"]
[json-contains-any? "?|"]
[json-contains-path? "@?"]
[json-contains? "?"]
[json-ref-text "->>"]
[json-ref-text/path "#>>"]
[json-subset? "<@"]
[json-superset? "@>"]
[like]
[position]
[similar-to "SIMILAR TO"])
(define-ops ternary
[array-slice]
[between]
[trim])
(define-ops variadic
[+] [-] [*] [/] [%]
[<<] [>>]
[and]
[array-concat "||"]
[bitwise-and "&"]
[bitwise-or "|"]
[bitwise-xor "#"]
[json-concat "||"]
[json-ref "->"]
[json-ref/path "#>"]
[json-remove "-"]
[json-remove/path "#-"]
[or]
[string-concat "||"])
| null | https://raw.githubusercontent.com/Bogdanp/deta/503860156f5cb1dbecb4339e299ee86a10b66d32/deta-lib/private/dialect/operator.rkt | racket | #lang racket/base
(require (for-syntax racket/base
racket/syntax)
racket/match
syntax/parse
syntax/parse/define)
(provide
define-ops)
(define-syntax (define-ops stx)
(syntax-parse stx
[(_ kind:id [op:id (~optional maybe-op-str:str)] ...)
#:with (op-str ...) (for/list ([op (in-list (syntax-e #'(op ...)))]
[maybe-str (in-list (syntax-e #'((~? maybe-op-str #f) ...)))])
(cond
[(syntax->datum maybe-str) maybe-str]
[else (datum->syntax op (string-upcase (symbol->string (syntax->datum op))))]))
#:with write-operator-id (format-id stx "write-~a-operator" #'kind)
#:with match-expander-id (format-id stx "~a-operator" #'kind)
(syntax/loc stx
(begin
(provide
write-operator-id
match-expander-id)
(define (write-operator-id id)
(write-string
(case id
[(op) op-str] ...)))
(define-match-expander match-expander-id
(lambda (stx)
(syntax-parse stx
[(_) #'(or 'op ...)]
[(_ out) #'(and (or 'op ...) out)])))))]))
(define-ops unary
[bitwise-not "~"]
[date]
[interval]
[json]
[jsonb]
[not]
[time]
[timestamp])
(define-ops binary
[=] [>] [<] [>=] [<=] [<>] [!=]
[array-contains? "@>"]
[array-overlap? "&&"]
[array-ref]
[cast]
[extract]
[ilike]
[in]
[is-distinct "IS DISTINCT"]
[is]
[json-check-path "@@"]
[json-contains-all? "?&"]
[json-contains-any? "?|"]
[json-contains-path? "@?"]
[json-contains? "?"]
[json-ref-text "->>"]
[json-ref-text/path "#>>"]
[json-subset? "<@"]
[json-superset? "@>"]
[like]
[position]
[similar-to "SIMILAR TO"])
(define-ops ternary
[array-slice]
[between]
[trim])
(define-ops variadic
[+] [-] [*] [/] [%]
[<<] [>>]
[and]
[array-concat "||"]
[bitwise-and "&"]
[bitwise-or "|"]
[bitwise-xor "#"]
[json-concat "||"]
[json-ref "->"]
[json-ref/path "#>"]
[json-remove "-"]
[json-remove/path "#-"]
[or]
[string-concat "||"])
| |
0ab8756726336205200a0f011d30155f977aa3d21e193f7f55ddfc42bb3f3eb7 | jiangpengnju/htdp2e | ex146.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex146) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Design sorted > ? . The function consumes a NEList - of - temperatures . It produces # t
if the temperatures are sorted in descending order , that is , if the second is
smaller than the first , the third smaller than the second , and so on .
Otherwise it produces # f.
A NEList - of - temperatures is one of :
- ( cons CTemperature ' ( ) )
- ( cons CTemperature NEList - of - temperatures )
; interpretation: non-empty lists of measured temperatures.
A CTemperature is a Number greater or equal to -273 .
(define ex2 (cons 1 '()))
(define ex3 (cons 1 (cons 2 (cons 3 '()))))
(define ex4 (cons 3 (cons 2 (cons 1 '()))))
NEList - of - temperatures - > Boolean
are the items of anelot sorted in descending order
(check-expect (sorted>? ex2) #t)
(check-expect (sorted>? ex3) #f)
(check-expect (sorted>? ex4) #t)
(define (sorted>? anelot)
(cond
[(empty? (rest anelot)) #t]
[(cons? (rest anelot))
(and (> (first anelot) (first (rest anelot)))
(sorted>? (rest anelot)))])) | null | https://raw.githubusercontent.com/jiangpengnju/htdp2e/d41555519fbb378330f75c88141f72b00a9ab1d3/arbitrarily-large-data/designing-with-self-referential-data-dafinitions/ex146.rkt | racket | about the language level of this file in a form that our tools can easily process.
interpretation: non-empty lists of measured temperatures. | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex146) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Design sorted > ? . The function consumes a NEList - of - temperatures . It produces # t
if the temperatures are sorted in descending order , that is , if the second is
smaller than the first , the third smaller than the second , and so on .
Otherwise it produces # f.
A NEList - of - temperatures is one of :
- ( cons CTemperature ' ( ) )
- ( cons CTemperature NEList - of - temperatures )
A CTemperature is a Number greater or equal to -273 .
(define ex2 (cons 1 '()))
(define ex3 (cons 1 (cons 2 (cons 3 '()))))
(define ex4 (cons 3 (cons 2 (cons 1 '()))))
NEList - of - temperatures - > Boolean
are the items of anelot sorted in descending order
(check-expect (sorted>? ex2) #t)
(check-expect (sorted>? ex3) #f)
(check-expect (sorted>? ex4) #t)
(define (sorted>? anelot)
(cond
[(empty? (rest anelot)) #t]
[(cons? (rest anelot))
(and (> (first anelot) (first (rest anelot)))
(sorted>? (rest anelot)))])) |
89a646bd2c808d52fb4f7bcdeb728763fc5cbf86eccfa2bf77467940969ec670 | merlin-lang/merlin | Merlin_TC.ml | open Merlin_Types
open Merlin_Util
open Merlin_Error
open Frenetic_OpenFlow
open Frenetic_Packet
module NK = Frenetic_NetKAT
let major = ref 0
let fresh_major () : int =
incr major ; !major
module TC = struct
type tcdev = string
type tchandle = int * int
type tcrate = rate
type tcprio = TCPrio of int
type tcqdisc = TCQdisc of tchandle option * tchandle * int option
type tcclass = TCClass of tchandle * tchandle * tcrate * tcprio
type tcfilter = Merlin_Types.pred
type tccmd =
| TCQdiscCmd of tcdev * tcqdisc
| TCClassCmd of tcdev * tcclass
| TCFilterCmd of tcdev * tchandle * tcfilter * tchandle
(* Utility functions *)
let get_byte (n:int64) (i:int) : int =
if i < 0 || i > 5 then
raise (Invalid_argument "Int64.get_byte index out of range");
Int64.to_int (Int64.logand 0xFFL (Int64.shift_right_logical n (8 * i)))
let string32_of_dlAddr x =
let fst = Format.sprintf "0x%02x%02x%02x%02x"
(get_byte x 5) (get_byte x 4) (get_byte x 3) (get_byte x 2) in
let snd = Format.sprintf "0x%02x:%02x0000" (get_byte x 1) (get_byte x 0) in
(fst,snd)
let rate_to_mbps_string n =
let mbps = Int64.div n 131072L in
Printf.sprintf "%Ld mbps" mbps
Convert TC types to command strings
let string_of_tchandle (h1,h2) =
Printf.sprintf "%d:%d" h1 h2
let string_of_tcqdisc qd =
match qd with TCQdisc(p, c, d) ->
let parent = match p with
| None -> "root"
| Some((h1,h2)) -> Printf.sprintf "%d:%d" h1 h2 in
let handle, _ = c in (* Qdisc handle only has a major number *)
let default = match d with
| None -> ""
| Some(i) -> Printf.sprintf "default %d" i in
Printf.sprintf "parent %s handle %d: htb %s" parent handle default
let string_of_tcclass c =
match c with TCClass(p, i, r, pr) ->
let (p1,p2) = p in
let (i1,i2) = i in
let Rate(min,max) = r in
let rate = rate_to_mbps_string min in
let r = if rate = "" then rate else "rate " ^ rate in
let ceiling = rate_to_mbps_string max in
let c = if ceiling = "" then ceiling else "ceil " ^ ceiling in
let TCPrio(p) = pr in
Printf.sprintf "parent %d:%d classid %d:%d %s %s prio %d"
p1 p2 i1 i2 r c p
let string_of_test t =
match t with
| NK.EthSrc n ->
let fst,snd = string32_of_dlAddr n in
Printf.sprintf
"protocol 802_3 u32 match u32 %s 0xffffffff at 6 match u32 %s 0xffff0000 at 10"
fst snd
| NK.EthDst n ->
let fst,snd = string32_of_dlAddr n in
Printf.sprintf
"protocol 802_3 u32 match u32 %s 0xffffffff at 0 match u32 %s 0xffff0000 at 4"
fst snd
| NK.IP4Src (n,_) ->
Printf.sprintf "protocol ip prio 1 u32 match ip src %s"
(string_of_nwAddr n)
| NK.IP4Dst (n,_) ->
Printf.sprintf "protocol ip prio 1 u32 match ip dst %s"
(string_of_nwAddr n)
| NK.IPProto n ->
Printf.sprintf "protocol ip prio 1 u32 match ip protocol %s"
(string_of_int n)
| NK.TCPSrcPort n ->
Printf.sprintf "protocol ip prio 1 u32 match ip sport %s"
(string_of_int n)
| NK.TCPDstPort n ->
Printf.sprintf "protocol ip prio 1 u32 match ip dport %s"
(string_of_int n)
| _ ->
""(* raise (Unimplementable_tc_test f) *)
let rec string_of_tcfilter (p:tcfilter) : string = match p with
| Test (t) -> string_of_test t
| And (p,p') ->
Printf.sprintf "%s %s"
(string_of_tcfilter p)
(string_of_tcfilter p')
This is a hack . tc really needs separate filters attached to the same class
to implement an or . So whatever 's calling this function should ideally pull
the Or apart and issue separate calls . The is to allow the string to
be split after - the - fact , though that should not be relied on
to implement an or. So whatever's calling this function should ideally pull
the Or apart and issue separate calls. The \n is to allow the string to
be split after-the-fact, though that should not be relied on *)
| Or (p, p') -> Printf.sprintf "%s\n%s"
(string_of_tcfilter p)
(string_of_tcfilter p')
| Nothing -> ""
| Everything -> ""
| p -> raise (Unimplementable_tc_predicate p)
(* Only support adding new control settings for now *)
let string_of_tccmd cmd =
match cmd with
| TCQdiscCmd(dev, qd) -> Printf.sprintf "tc qdisc add dev %s %s" dev
(string_of_tcqdisc qd)
| TCClassCmd(dev, cl) -> Printf.sprintf "tc class add dev %s %s" dev
(string_of_tcclass cl)
| TCFilterCmd(dev, parent, pr, h) ->
Printf.sprintf "tc filter add dev %s parent %s %s flowid %s" dev
(string_of_tchandle parent)
(string_of_tcfilter pr)
(string_of_tchandle h)
let tccmds_of_spec (pr:tcfilter) (r:tcrate) =
let dev = "eth0" in
let prio = TCPrio(1) in
let major = (fresh_major ()) in
let qhandle = (major,0) in
let chandle = (major, 1) in
let qd = TCQdisc(None, qhandle, None) in
let cl = TCClass(qhandle,chandle,r,prio) in
[TCQdiscCmd(dev, qd) ; TCClassCmd(dev, cl) ; TCFilterCmd(dev, qhandle, pr, chandle)]
end
| null | https://raw.githubusercontent.com/merlin-lang/merlin/35a88bce024a8b8be858c796f1cd718e4a660529/lib/Merlin_TC.ml | ocaml | Utility functions
Qdisc handle only has a major number
raise (Unimplementable_tc_test f)
Only support adding new control settings for now | open Merlin_Types
open Merlin_Util
open Merlin_Error
open Frenetic_OpenFlow
open Frenetic_Packet
module NK = Frenetic_NetKAT
let major = ref 0
let fresh_major () : int =
incr major ; !major
module TC = struct
type tcdev = string
type tchandle = int * int
type tcrate = rate
type tcprio = TCPrio of int
type tcqdisc = TCQdisc of tchandle option * tchandle * int option
type tcclass = TCClass of tchandle * tchandle * tcrate * tcprio
type tcfilter = Merlin_Types.pred
type tccmd =
| TCQdiscCmd of tcdev * tcqdisc
| TCClassCmd of tcdev * tcclass
| TCFilterCmd of tcdev * tchandle * tcfilter * tchandle
let get_byte (n:int64) (i:int) : int =
if i < 0 || i > 5 then
raise (Invalid_argument "Int64.get_byte index out of range");
Int64.to_int (Int64.logand 0xFFL (Int64.shift_right_logical n (8 * i)))
let string32_of_dlAddr x =
let fst = Format.sprintf "0x%02x%02x%02x%02x"
(get_byte x 5) (get_byte x 4) (get_byte x 3) (get_byte x 2) in
let snd = Format.sprintf "0x%02x:%02x0000" (get_byte x 1) (get_byte x 0) in
(fst,snd)
let rate_to_mbps_string n =
let mbps = Int64.div n 131072L in
Printf.sprintf "%Ld mbps" mbps
Convert TC types to command strings
let string_of_tchandle (h1,h2) =
Printf.sprintf "%d:%d" h1 h2
let string_of_tcqdisc qd =
match qd with TCQdisc(p, c, d) ->
let parent = match p with
| None -> "root"
| Some((h1,h2)) -> Printf.sprintf "%d:%d" h1 h2 in
let default = match d with
| None -> ""
| Some(i) -> Printf.sprintf "default %d" i in
Printf.sprintf "parent %s handle %d: htb %s" parent handle default
let string_of_tcclass c =
match c with TCClass(p, i, r, pr) ->
let (p1,p2) = p in
let (i1,i2) = i in
let Rate(min,max) = r in
let rate = rate_to_mbps_string min in
let r = if rate = "" then rate else "rate " ^ rate in
let ceiling = rate_to_mbps_string max in
let c = if ceiling = "" then ceiling else "ceil " ^ ceiling in
let TCPrio(p) = pr in
Printf.sprintf "parent %d:%d classid %d:%d %s %s prio %d"
p1 p2 i1 i2 r c p
let string_of_test t =
match t with
| NK.EthSrc n ->
let fst,snd = string32_of_dlAddr n in
Printf.sprintf
"protocol 802_3 u32 match u32 %s 0xffffffff at 6 match u32 %s 0xffff0000 at 10"
fst snd
| NK.EthDst n ->
let fst,snd = string32_of_dlAddr n in
Printf.sprintf
"protocol 802_3 u32 match u32 %s 0xffffffff at 0 match u32 %s 0xffff0000 at 4"
fst snd
| NK.IP4Src (n,_) ->
Printf.sprintf "protocol ip prio 1 u32 match ip src %s"
(string_of_nwAddr n)
| NK.IP4Dst (n,_) ->
Printf.sprintf "protocol ip prio 1 u32 match ip dst %s"
(string_of_nwAddr n)
| NK.IPProto n ->
Printf.sprintf "protocol ip prio 1 u32 match ip protocol %s"
(string_of_int n)
| NK.TCPSrcPort n ->
Printf.sprintf "protocol ip prio 1 u32 match ip sport %s"
(string_of_int n)
| NK.TCPDstPort n ->
Printf.sprintf "protocol ip prio 1 u32 match ip dport %s"
(string_of_int n)
| _ ->
let rec string_of_tcfilter (p:tcfilter) : string = match p with
| Test (t) -> string_of_test t
| And (p,p') ->
Printf.sprintf "%s %s"
(string_of_tcfilter p)
(string_of_tcfilter p')
This is a hack . tc really needs separate filters attached to the same class
to implement an or . So whatever 's calling this function should ideally pull
the Or apart and issue separate calls . The is to allow the string to
be split after - the - fact , though that should not be relied on
to implement an or. So whatever's calling this function should ideally pull
the Or apart and issue separate calls. The \n is to allow the string to
be split after-the-fact, though that should not be relied on *)
| Or (p, p') -> Printf.sprintf "%s\n%s"
(string_of_tcfilter p)
(string_of_tcfilter p')
| Nothing -> ""
| Everything -> ""
| p -> raise (Unimplementable_tc_predicate p)
let string_of_tccmd cmd =
match cmd with
| TCQdiscCmd(dev, qd) -> Printf.sprintf "tc qdisc add dev %s %s" dev
(string_of_tcqdisc qd)
| TCClassCmd(dev, cl) -> Printf.sprintf "tc class add dev %s %s" dev
(string_of_tcclass cl)
| TCFilterCmd(dev, parent, pr, h) ->
Printf.sprintf "tc filter add dev %s parent %s %s flowid %s" dev
(string_of_tchandle parent)
(string_of_tcfilter pr)
(string_of_tchandle h)
let tccmds_of_spec (pr:tcfilter) (r:tcrate) =
let dev = "eth0" in
let prio = TCPrio(1) in
let major = (fresh_major ()) in
let qhandle = (major,0) in
let chandle = (major, 1) in
let qd = TCQdisc(None, qhandle, None) in
let cl = TCClass(qhandle,chandle,r,prio) in
[TCQdiscCmd(dev, qd) ; TCClassCmd(dev, cl) ; TCFilterCmd(dev, qhandle, pr, chandle)]
end
|
15f6aaf54568050169c4810dac0c88e613319e8aade7ec930f01c88fa49f8eec | rtoy/cmucl | composite.lisp | -*- Mode : Lisp ; Syntax : Common - Lisp ; Package : XLIB ; -*-
;;; ---------------------------------------------------------------------------
;;; Title: Composite Extension
Created : 2014 - 11 - 17
Author : < >
;;; ---------------------------------------------------------------------------
;;;
( c ) copyright 2014 by
;;;
;;; Permission is granted to any individual or institution to use,
;;; copy, modify, and distribute this software, provided that this
;;; complete copyright and permission notice is maintained, intact, in
;;; all copies and supporting documentation.
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
;;;
(in-package :xlib)
(export '(composite-query-version
composite-redirect-window
composite-redirect-subwindows
composite-unredirect-window
composite-unredirect-subwindows
composite-get-overlay-window
composite-release-overlay-window))
(define-extension "Composite")
(defconstant +composite-major+ 0
"Major version.")
(defconstant +composite-minor+ 4
"Minor version.")
(defconstant +redirect-automatic+ 0
"The automatic update type automatically updates the parent window.")
(defconstant +redirect-manual+ 1
"Prevents some activities that would otherwise be automatic.")
;; xrequests
(defconstant +composite-QueryVersion+ 0
"Query for the version of composite.")
(defconstant +composite-RedirectWindow+ 1
"Store this hierarchy off-screen.")
(defconstant +composite-RedirectSubwindows+ 2
"Store only the sub-hierarchy.")
(defconstant +composite-UnredirectWindow+ 3
"Stop storing the window and subwindows.")
(defconstant +composite-UnredirectSubwindows+ 4
"Stop storing the sub-hierarchy.")
(defconstant +composite-CreateRegionFromBorderClip+ 5
"The region clinpped against the surrounding windows.")
(defconstant +composite-NameWindowPixmap+ 6
"The off-screen pixmap for the window.")
(defconstant +composite-GetOverlayWindow+ 7
"Get a surface to draw on.")
(defconstant +composite-ReleaseOverlayWindow+ 8
"Release the overlay surface.")
(defmacro composite-opcode (display)
`(extension-opcode ,display "Composite"))
;; types
(deftype update-type () '(card8))
;; x requests
(defun composite-query-version (display)
"Query for the version. All clients are expected to query!"
(declare (type display display))
(with-buffer-request-and-reply (display (composite-opcode display) nil :sizes (32))
((data +composite-QueryVersion+)
(card32 +composite-major+)
(card32 +composite-minor+))
(values
(card32-get 8)
(card32-get 12))))
(defun composite-redirect-window (window update-type)
"Store window and its children off-screen, using update-type for whether to
sync those or not."
(let ((display (window-display window)))
(declare (type display display)
(type window window)
(type update-type update-type))
(with-buffer-request (display (composite-opcode display))
(data +composite-redirectwindow+)
(window window)
(card8 update-type)
(card8 0)
(card16 0))))
(defun composite-redirect-subwindows (window update-type)
"Store the subwindows of the window (but not the window itself).
update-type determines if syncing is allowed."
(let ((display (window-display window)))
(declare (type display display)
(type window window)
(type update-type update-type))
(with-buffer-request (display (composite-opcode display))
(data +composite-redirectsubwindows+)
(window window)
(card8 update-type)
(card8 0)
(card16 0))))
(defun composite-unredirect-window (window)
"Terminates the redirection."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-unredirectwindow+)
(window window))))
(defun composite-unredirect-subwindows (window)
"Terminates the redirection of the child hierarchies of window."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-unredirectsubwindows+)
(window window))))
(defun composite-create-region-from-border-clip (window region)
"Region clipped on surrounding windows."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-createregionfromborderclip+)
(card32 region)
(window window))))
(defun composite-name-window-pixmap (window drawable)
"Refer to an off-screen pixmap for the window."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-namewindowpixmap+)
(window window)
(drawable drawable))))
(defun composite-get-overlay-window (window)
"Take control of the window for composite use. A place to draw things without
interference. Requires a compositing window manager to be running in order to
use the overlay. Release it with COMPOSITE-RELEASE-OVERLAY-WINDOW."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request-and-reply (display (composite-opcode display) nil :sizes (32))
((data +composite-getoverlaywindow+)
(window window))
(values (card32-get 8)))))
(defun composite-release-overlay-window (window)
"Release a window which was controlled by COMPOSITE-GET-OVERLAY-WINDOW."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-releaseoverlaywindow+)
(window window))))
| null | https://raw.githubusercontent.com/rtoy/cmucl/263e93982390688b10caead45e494825d10d5966/src/clx/extensions/composite.lisp | lisp | Syntax : Common - Lisp ; Package : XLIB ; -*-
---------------------------------------------------------------------------
Title: Composite Extension
---------------------------------------------------------------------------
Permission is granted to any individual or institution to use,
copy, modify, and distribute this software, provided that this
complete copyright and permission notice is maintained, intact, in
all copies and supporting documentation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
xrequests
types
x requests | Created : 2014 - 11 - 17
Author : < >
( c ) copyright 2014 by
(in-package :xlib)
(export '(composite-query-version
composite-redirect-window
composite-redirect-subwindows
composite-unredirect-window
composite-unredirect-subwindows
composite-get-overlay-window
composite-release-overlay-window))
(define-extension "Composite")
(defconstant +composite-major+ 0
"Major version.")
(defconstant +composite-minor+ 4
"Minor version.")
(defconstant +redirect-automatic+ 0
"The automatic update type automatically updates the parent window.")
(defconstant +redirect-manual+ 1
"Prevents some activities that would otherwise be automatic.")
(defconstant +composite-QueryVersion+ 0
"Query for the version of composite.")
(defconstant +composite-RedirectWindow+ 1
"Store this hierarchy off-screen.")
(defconstant +composite-RedirectSubwindows+ 2
"Store only the sub-hierarchy.")
(defconstant +composite-UnredirectWindow+ 3
"Stop storing the window and subwindows.")
(defconstant +composite-UnredirectSubwindows+ 4
"Stop storing the sub-hierarchy.")
(defconstant +composite-CreateRegionFromBorderClip+ 5
"The region clinpped against the surrounding windows.")
(defconstant +composite-NameWindowPixmap+ 6
"The off-screen pixmap for the window.")
(defconstant +composite-GetOverlayWindow+ 7
"Get a surface to draw on.")
(defconstant +composite-ReleaseOverlayWindow+ 8
"Release the overlay surface.")
(defmacro composite-opcode (display)
`(extension-opcode ,display "Composite"))
(deftype update-type () '(card8))
(defun composite-query-version (display)
"Query for the version. All clients are expected to query!"
(declare (type display display))
(with-buffer-request-and-reply (display (composite-opcode display) nil :sizes (32))
((data +composite-QueryVersion+)
(card32 +composite-major+)
(card32 +composite-minor+))
(values
(card32-get 8)
(card32-get 12))))
(defun composite-redirect-window (window update-type)
"Store window and its children off-screen, using update-type for whether to
sync those or not."
(let ((display (window-display window)))
(declare (type display display)
(type window window)
(type update-type update-type))
(with-buffer-request (display (composite-opcode display))
(data +composite-redirectwindow+)
(window window)
(card8 update-type)
(card8 0)
(card16 0))))
(defun composite-redirect-subwindows (window update-type)
"Store the subwindows of the window (but not the window itself).
update-type determines if syncing is allowed."
(let ((display (window-display window)))
(declare (type display display)
(type window window)
(type update-type update-type))
(with-buffer-request (display (composite-opcode display))
(data +composite-redirectsubwindows+)
(window window)
(card8 update-type)
(card8 0)
(card16 0))))
(defun composite-unredirect-window (window)
"Terminates the redirection."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-unredirectwindow+)
(window window))))
(defun composite-unredirect-subwindows (window)
"Terminates the redirection of the child hierarchies of window."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-unredirectsubwindows+)
(window window))))
(defun composite-create-region-from-border-clip (window region)
"Region clipped on surrounding windows."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-createregionfromborderclip+)
(card32 region)
(window window))))
(defun composite-name-window-pixmap (window drawable)
"Refer to an off-screen pixmap for the window."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-namewindowpixmap+)
(window window)
(drawable drawable))))
(defun composite-get-overlay-window (window)
"Take control of the window for composite use. A place to draw things without
interference. Requires a compositing window manager to be running in order to
use the overlay. Release it with COMPOSITE-RELEASE-OVERLAY-WINDOW."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request-and-reply (display (composite-opcode display) nil :sizes (32))
((data +composite-getoverlaywindow+)
(window window))
(values (card32-get 8)))))
(defun composite-release-overlay-window (window)
"Release a window which was controlled by COMPOSITE-GET-OVERLAY-WINDOW."
(let ((display (window-display window)))
(declare (type display display)
(type window window))
(with-buffer-request (display (composite-opcode display))
(data +composite-releaseoverlaywindow+)
(window window))))
|
5b22ff43c0ece1d19ccdb5d5164f9e0521bf5547181ddbe6208734d7bab9033b | xavierleroy/ocamlmpi | mpi.mli | (***********************************************************************)
(* *)
(* The Caml/MPI interface *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1998 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file LICENSE. *)
(* *)
(***********************************************************************)
* { 1 Caml bindings for the Message Passing Interface ( MPI ) library }
* { 2 Error reporting }
exception Error of string
(* Raised when an operation of the [Mpi] module encounters an error.
The string argument describes the error. *)
* { 2 Basic operations on communicators }
type communicator
(** The type of communicators. Communicators are groups of
nodes (processing elements) that can exchange data. *)
type rank = int
* The type of ranks of nodes . Nodes in a given communicator
are assigned integer ranks [ 0 , 1 , ... , N-1 ] where [ N ]
is the size of the communicator .
are assigned integer ranks [0, 1, ..., N-1] where [N]
is the size of the communicator. *)
val comm_world: communicator
(** The global communicator. *)
external comm_size: communicator -> int = "caml_mpi_comm_size"
(** Return the size (number of nodes) in the given communicator. *)
external comm_rank: communicator -> rank = "caml_mpi_comm_rank"
(** Return the rank of the calling node in the given communicator.
The rank [Mpi.comm_rank c] is between 0 (inclusive) and
[Mpi.comm_size c] (exclusive). *)
* { 2 Point - to - point communication }
type tag = int
(** The type of tags associated with messages in point-to-point
communications. Tags are positive integers in the range
[0...32767]. *)
val send: 'a -> rank -> tag -> communicator -> unit
* [ Mpi.send d dst tag comm ] sends a message containing data [ d ]
to the node that has rank [ dst ] in communicator [ comm ] .
The message is sent with tag [ tag ] . Depending on the
underlying MPI implementation , message sending can be
synchronous or asynchronous ; that is , [ Mpi.send ] can block
until the target node receives the message , or [ Mpi.send ]
can return before the target node has received the message .
to the node that has rank [dst] in communicator [comm].
The message is sent with tag [tag]. Depending on the
underlying MPI implementation, message sending can be
synchronous or asynchronous; that is, [Mpi.send] can block
until the target node receives the message, or [Mpi.send]
can return before the target node has received the message. *)
val receive: rank -> tag -> communicator -> 'a
* [ Mpi.receive src tag comm ] blocks until a message is available ,
and returns the data contained in that message .
The [ src ] argument selects the desired source for the message :
if [ src ] is [ Mpi.any_source ] , messages from any node in communicator
[ comm ] are accepted ; otherwise , only messages sent by the node
having rank [ src ] in [ comm ] are accepted .
Similarly , the [ tag ] argument selects messages by their tag :
if [ tag ] is [ Mpi.any_tag ] , messages are accepted regardless of
their tags ; otherwise , only messages with tag equal to [ tag ]
are accepted .
Warning : just like the [ Marshal.from _ * ] functions ,
[ Mpi.receive ] is not type - safe . The value returned by
[ Mpi.receive ] does not possess type [ ' a ]
for all [ ' a ] ; it has one , unique type which can not be determined
at compile - type . The programmer should be careful about using
the returned value with the right type .
and returns the data contained in that message.
The [src] argument selects the desired source for the message:
if [src] is [Mpi.any_source], messages from any node in communicator
[comm] are accepted; otherwise, only messages sent by the node
having rank [src] in [comm] are accepted.
Similarly, the [tag] argument selects messages by their tag:
if [tag] is [Mpi.any_tag], messages are accepted regardless of
their tags; otherwise, only messages with tag equal to [tag]
are accepted.
Warning: just like the [Marshal.from_*] functions,
[Mpi.receive] is not type-safe. The Caml value returned by
[Mpi.receive] does not possess type ['a]
for all ['a]; it has one, unique type which cannot be determined
at compile-type. The programmer should be careful about using
the returned value with the right type. *)
val receive_status: rank -> tag -> communicator -> 'a * rank * tag
(** Same as [Mpi.receive], but returns a triple [(d, src, tag)]
where [d] is the data associated with the message,
[src] the rank of the node that sent the message,
and [tag] the actual tag attached to the message. *)
val probe: rank -> tag -> communicator -> rank * tag
* [ Mpi.probe src tag comm ] blocks until a message is available
on communicator [ comm ] , with source and tag matching the
[ src ] and [ tag ] arguments as described in [ Mpi.receive ] .
It then returns the rank of the node that sent the message
and the actual tag attached to the message . The message itself
is not read , and can be retrieved later with [ Mpi.receive ]
or [ Mpi.receive_status ] .
on communicator [comm], with source and tag matching the
[src] and [tag] arguments as described in [Mpi.receive].
It then returns the rank of the node that sent the message
and the actual tag attached to the message. The message itself
is not read, and can be retrieved later with [Mpi.receive]
or [Mpi.receive_status]. *)
val iprobe: rank -> tag -> communicator -> (rank * tag) option
(** [Mpi.iprobe src tag comm] is a non-blocking counterpart to
{!probe}. If there is no matching message waiting it returns
[None]. Otherwise, it returns [Some (rank, tag)] like
{!probe}. *)
val any_tag: tag
val any_source: rank
* The special values of the [ tag ] and [ src ] arguments of
[ Mpi.receive ] , [ Mpi.receive_status ] and [ Mpi.probe ] ,
indicating that any message tag is acceptable ( for [ Mpi.any_tag ] )
or any message source is acceptable ( for [ Mpi.any_source ] ) .
[Mpi.receive], [Mpi.receive_status] and [Mpi.probe],
indicating that any message tag is acceptable (for [Mpi.any_tag])
or any message source is acceptable (for [Mpi.any_source]). *)
val send_int: int -> rank -> tag -> communicator -> unit
val receive_int: rank -> tag -> communicator -> int
val send_float: float -> rank -> tag -> communicator -> unit
val receive_float: rank -> tag -> communicator -> float
val send_int_array: int array -> rank -> tag -> communicator -> unit
val receive_int_array: int array -> rank -> tag -> communicator -> unit
val send_float_array: float array -> rank -> tag -> communicator -> unit
val receive_float_array: float array -> rank -> tag -> communicator -> unit
val send_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> tag -> communicator -> unit
val send_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> rank -> tag -> communicator -> unit
val send_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> tag -> communicator -> unit
val send_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> tag -> communicator -> unit
val send_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> tag -> communicator -> unit
val receive_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> tag -> communicator -> unit
val receive_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> rank -> tag -> communicator -> unit
val receive_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> tag -> communicator -> unit
val receive_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> tag -> communicator -> unit
val receive_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> tag -> communicator -> unit
* Specialized versions of [ Mpi.send ] and [ Mpi.receive ]
for communicating integers , floating - point numbers ,
arrays of integers , arrays of floating - point numbers and
bigarrays .
These specialized versions are more efficient than
[ Mpi.send ] and [ Mpi.receive ] since less copying is involved .
The arguments to the [ Mpi.send _ * ] functions have the same
meaning as for [ Mpi.send ] .
The arguments to [ Mpi.receive_int ] and [ Mpi.receive_float ]
have the same meaning as for [ Mpi.receive ] .
[ Mpi.receive_int_array ] , [ Mpi.receive_float_array ] and
[ Mpi.receive_bigarray * ]
have one extra argument , which is the array in which the data
of the received message is stored . The caller is responsible
for pre - allocating an array large enough to hold the incoming data .
It is an error to send a message using one of the specialized
[ Mpi.send _ * ] functions and receive it with the generic
[ Mpi.receive ] function , and conversely .
It is possible to receive a bigarray with different dimensions
than those used to send it ; only the total number of elements must
match .
for communicating integers, floating-point numbers,
arrays of integers, arrays of floating-point numbers and
bigarrays.
These specialized versions are more efficient than
[Mpi.send] and [Mpi.receive] since less copying is involved.
The arguments to the [Mpi.send_*] functions have the same
meaning as for [Mpi.send].
The arguments to [Mpi.receive_int] and [Mpi.receive_float]
have the same meaning as for [Mpi.receive].
[Mpi.receive_int_array], [Mpi.receive_float_array] and
[Mpi.receive_bigarray*]
have one extra argument, which is the array in which the data
of the received message is stored. The caller is responsible
for pre-allocating an array large enough to hold the incoming data.
It is an error to send a message using one of the specialized
[Mpi.send_*] functions and receive it with the generic
[Mpi.receive] function, and conversely.
It is possible to receive a bigarray with different dimensions
than those used to send it; only the total number of elements must
match. *)
* { 2 Non - blocking communication }
type request
(** Encapsulates MPI Request object, also contains the
associated send/recv buffer in the wrapper object *)
val null_request: request
val isend: 'a -> rank -> tag -> communicator -> request
val isend_varlength: 'a -> rank -> tag -> communicator -> request * request
* Post non - blocking send operation .
[ Mpi.send d dst tag comm ] posts a send operation for data [ d ]
to the node that has rank [ dst ] in communicator [ comm ]
with tag [ tag ] .
Same parameters as [ Mpi.send ] , but returns immediately with
a pair of [ Mpi.request ] objects after posting two send operations for
transmission of message length and the message itself
buffer . The request objects can be used to wait for the
completion of the send operation .
[Mpi.send d dst tag comm] posts a send operation for data [d]
to the node that has rank [dst] in communicator [comm ]
with tag [tag].
Same parameters as [Mpi.send], but returns immediately with
a pair of [Mpi.request] objects after posting two send operations for
transmission of message length and the message itself
buffer. The request objects can be used to wait for the
completion of the send operation. *)
val ireceive: int -> rank -> tag -> communicator -> request
val ireceive_varlength: rank -> tag -> communicator -> request
(** Post non-blocking receive operation.
Same parameters as [Mpi.receive], but returns with received
buffer length and an Mpi.request object, which can be used to
wait for the completion of the receive operation.
This call currently blocks until the buffer length has been received,
therefore it has to follow the asynchronous send operation in
call sequence.
*)
val wait: request -> unit
(** Wait for the completion of a non-blocking operation *)
val wait_pair: request * request -> unit
(** Wait for the completion of an ordered pair of non-blocking
operations *)
val wait_receive: request -> 'a
(** Wait for the completion of a non-blocking receive operation
and return the received object *)
* { 2 Group communication }
val barrier: communicator -> unit
(** [Mpi.barrier comm] suspends the calling process until all
nodes in communicator [comm] are executing [Mpi.barrier comm].
Then all nodes return from [Mpi.barrier] and continue executing. *)
* { 3 Broadcast }
val broadcast: 'a -> rank -> communicator -> 'a
(** [Mpi.broadcast d root comm] broadcasts data [d] from node
with rank [root] in [comm] to all other nodes in [comm].
All nodes in [comm] must call [Mpi.broadcast] with the same
[root] and [comm] arguments. The [d] argument is significant
only at node [root]; it is ignored at other nodes.
[Mpi.broadcast] returns the broadcast data. *)
val broadcast_opt: 'a option -> rank -> communicator -> 'a
* Same as [ Mpi.broadcast ] , except that the data ( first argument )
is provided as an option type . The root node must provide a
first argument of the form [ Some d ] where [ d ] is the data to
broadcast . The other node provide [ None ] as their first
argument .
is provided as an option type. The root node must provide a
first argument of the form [Some d] where [d] is the data to
broadcast. The other node provide [None] as their first
argument. *)
val broadcast_int: int -> rank -> communicator -> int
val broadcast_float: float -> rank -> communicator -> float
val broadcast_int_array: int array -> rank -> communicator -> unit
val broadcast_float_array: float array -> rank -> communicator -> unit
val broadcast_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> communicator -> unit
val broadcast_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> rank -> communicator -> unit
val broadcast_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> communicator -> unit
val broadcast_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> communicator -> unit
val broadcast_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> communicator -> unit
* Specialized versions of [ Mpi.broadcast ] for integers , floats ,
arrays of integers , arrays of floats and bigarrays . For
[ Mpi.broadcast_int ] and [ Mpi.broadcast_float ] , the broadcast
value is returned as result , and the first argument is significant
only at the root node .
For [ ] , [ Mpi.broadcast_float_array ] and
[ Mpi.broadcast_bigarray * ] , the broadcast value is stored in the
array passed as first argument ; thus , the first argument is
significant at all nodes .
arrays of integers, arrays of floats and bigarrays. For
[Mpi.broadcast_int] and [Mpi.broadcast_float], the broadcast
value is returned as result, and the first argument is significant
only at the root node.
For [Mpi.broadcast_int_array], [Mpi.broadcast_float_array] and
[Mpi.broadcast_bigarray*], the broadcast value is stored in the
array passed as first argument; thus, the first argument is
significant at all nodes. *)
* { 3 Scatter }
val scatter: 'a array -> rank -> communicator -> 'a
* [ Mpi.scatter a root comm ] scatters the elements of array [ a ]
from node [ root ] to all nodes in [ comm ] . The node with rank [ i ]
in [ comm ] receives the element [ a.(i ) ] and returns it as result
of [ Mpi.scatter ] . The [ a ] argument is significant only at node
[ root ] ; an empty array [ [ || ] ] can be given as first argument
at other nodes .
from node [root] to all nodes in [comm]. The node with rank [i]
in [comm] receives the element [a.(i)] and returns it as result
of [Mpi.scatter]. The [a] argument is significant only at node
[root]; an empty array [[||]] can be given as first argument
at other nodes. *)
val scatter_int: int array -> rank -> communicator -> int
val scatter_float: float array -> rank -> communicator -> float
val scatter_from_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> communicator -> 'a
val scatter_from_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> communicator -> 'a
val scatter_from_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> communicator -> 'a
val scatter_from_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> communicator -> 'a
(** Specialized versions of [Mpi.scatter] for integers, floats and
values from bigarrays. *)
val scatter_int_array: int array -> int array -> rank -> communicator -> unit
val scatter_float_array:
float array -> float array -> rank -> communicator -> unit
val scatter_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> rank -> communicator -> unit
val scatter_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> rank -> communicator -> unit
* Specialized versions of [ Mpi.scatter ] for arrays of integers ,
arrays of floats and bigarrays .
[ Mpi.scatter_int_array comm ]
splits the array [ src ] at node [ root ] into [ Mpi.comm_size comm ]
chunks of size [ Array.length dst ] , and sends the chunks to
each node , storing them into array [ dst ] at each node .
The [ src ] argument is significant only at node [ root ] .
[ Mpi.scatter_float_array ] and [ Mpi.scatter_bigarray * ] are similar .
Use the [ Bigarray.genarray_of_array * ] functions to , for example ,
scatter from [ n ] dimensions to [ n-1 ] dimensions . In any case ,
only the total number of elements matters .
arrays of floats and bigarrays.
[Mpi.scatter_int_array src dst root comm]
splits the array [src] at node [root] into [Mpi.comm_size comm]
chunks of size [Array.length dst], and sends the chunks to
each node, storing them into array [dst] at each node.
The [src] argument is significant only at node [root].
[Mpi.scatter_float_array] and [Mpi.scatter_bigarray*] are similar.
Use the [Bigarray.genarray_of_array*] functions to, for example,
scatter from [n] dimensions to [n-1] dimensions. In any case,
only the total number of elements matters. *)
* { 3 Gather }
val gather: 'a -> rank -> communicator -> 'a array
(** [Mpi.gather d root comm] gathers the values of the [d] argument
at all nodes onto node [root], and returns those values as an
array. At node [root], [Mpi.gather] returns an array of
size [Mpi.comm_size comm]; element number [i] is the value
provided for argument [d] by node [i]. At other nodes,
the empty array [[||]] is returned. *)
val gather_int: int -> int array -> rank -> communicator -> unit
val gather_float: float -> float array -> rank -> communicator -> unit
val gather_to_bigarray:
'a -> ('a, 'b, 'c) Bigarray.Genarray.t -> rank -> communicator -> unit
val gather_to_bigarray1:
'a -> ('a, 'b, 'c) Bigarray.Array1.t -> rank -> communicator -> unit
val gather_to_bigarray2:
'a -> ('a, 'b, 'c) Bigarray.Array2.t -> rank -> communicator -> unit
val gather_to_bigarray3:
'a -> ('a, 'b, 'c) Bigarray.Array3.t -> rank -> communicator -> unit
(** Specialized versions of [Mpi.gather] for integers, floats and
values to bigarrays. *)
val gather_int_array: int array -> int array -> rank -> communicator -> unit
val gather_float_array:
float array -> float array -> rank -> communicator -> unit
val gather_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> rank -> communicator -> unit
val gather_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> rank -> communicator -> unit
* Specialized versions of [ Mpi.gather ] for arrays of integers ,
arrays of floats and bigarrays .
[ Mpi.gather_int_array comm ]
sends the arrays [ src ] at each node to the node [ root ] .
At node [ root ] , the arrays are concatenated and stored in the
argument [ dst ] . [ dst ] is significant only at node [ root ] .
[ Mpi.gather_float_array ] and [ Mpi.gather_bigarray * ] are similar .
Use the [ Bigarray.genarray_of_array * ] functions to , for example ,
gather from [ n-1 ] dimensions to [ n ] dimensions . In any case ,
only the total number of elements matters .
arrays of floats and bigarrays.
[Mpi.gather_int_array src dst root comm]
sends the arrays [src] at each node to the node [root].
At node [root], the arrays are concatenated and stored in the
argument [dst]. [dst] is significant only at node [root].
[Mpi.gather_float_array] and [Mpi.gather_bigarray*] are similar.
Use the [Bigarray.genarray_of_array*] functions to, for example,
gather from [n-1] dimensions to [n] dimensions. In any case,
only the total number of elements matters. *)
* { 3 Gather to all }
val allgather: 'a -> communicator -> 'a array
val allgather_int: int -> int array -> communicator -> unit
val allgather_float: float -> float array -> communicator -> unit
val allgather_to_bigarray:
'a -> ('a, 'b, 'c) Bigarray.Genarray.t -> communicator -> unit
val allgather_to_bigarray1:
'a -> ('a, 'b, 'c) Bigarray.Array1.t -> communicator -> unit
val allgather_to_bigarray2:
'a -> ('a, 'b, 'c) Bigarray.Array2.t -> communicator -> unit
val allgather_to_bigarray3:
'a -> ('a, 'b, 'c) Bigarray.Array3.t -> communicator -> unit
val allgather_int_array: int array -> int array -> communicator -> unit
val allgather_float_array:
float array -> float array -> communicator -> unit
val allgather_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> communicator -> unit
val allgather_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> communicator -> unit
(** The [Mpi.allgather*] functions behave like the corresponding
[Mpi.gather*] functions, except that the result of the gather
operation is available at all nodes, not only at the root node.
In other terms, [Mpi.allgather] is equivalent to [Mpi.gather]
at root [r] followed by a broadcast of the result from node [r]. *)
* { 3 All to all }
val alltoall: 'a array -> communicator -> 'a array
val alltoall_int_array: int array -> int array -> communicator -> unit
val alltoall_float_array: float array -> float array -> communicator -> unit
val alltoall_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> communicator -> unit
val alltoall_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> communicator -> unit
val alltoall_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> communicator -> unit
val alltoall_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> communicator -> unit
(** Using the [Mpi.alltoall*] functions, each process effectively does
an [Mpi.scatter*] followed by an [Mpi.gather*]. They can also be
seen as an extension to [Mpi.allgather*] where each process sends
distinct data to each of the receivers.
Both send and receive arrays must have the same size at all
nodes. *)
* { 3 Reduce }
type _ op =
Max : [< `Int | `Float ] op
| Min : [< `Int | `Float ] op
| Sum : [< `Int | `Float ] op
| Prod : [< `Int | `Float ] op
| Land : [< `Int ] op
| Lor : [< `Int ] op
| Xor : [< `Int ] op
| Int_max : [< `Int ] op
| Int_min : [< `Int ] op
| Int_sum : [< `Int ] op
| Int_prod : [< `Int ] op
| Int_land : [< `Int ] op
| Int_lor : [< `Int ] op
| Int_xor : [< `Int ] op
| Float_max : [< `Float ] op
| Float_min : [< `Float ] op
| Float_sum : [< `Float ] op
| Float_prod : [< `Float ] op (** *)
* The operations that can be performed by a reduce or scan ; some of
them are only valid for integers . [ ] and [ Min ]
are maximum and minimum ; [ Sum ] and [ Prod ]
are summation ( [ + ] ) and product ( [ * ] ) .
[ Land ] , [ Lor ] and [ Xor ] are logical ( bit - per - bit ) and ,
or and exclusive - or .
The constructors prefixed by [ Int _ ] or [ Float _ ]
( e.g. [ Int_max ] , [ Float_sum ] ) are type - specialized variants of
the non - prefixed constructors . For example , [ Int_max ] is [ ]
specialized to integer values , and [ Float_sum ] is [ Sum ]
specialized to floating - point values . These specialized
constructors are included for backward compatibility with earlier
versions of this library . They will be deprecated in the future .
them are only valid for integers. [Max] and [Min]
are maximum and minimum; [Sum] and [Prod]
are summation ([+]) and product ([*]).
[Land], [Lor] and [Xor] are logical (bit-per-bit) and,
or and exclusive-or.
The constructors prefixed by [Int_] or [Float_]
(e.g. [Int_max], [Float_sum]) are type-specialized variants of
the non-prefixed constructors. For example, [Int_max] is [Max]
specialized to integer values, and [Float_sum] is [Sum]
specialized to floating-point values. These specialized
constructors are included for backward compatibility with earlier
versions of this library. They will be deprecated in the future. *)
val reduce_int: int -> [`Int] op -> rank -> communicator -> int
val reduce_float: float -> [`Float] op -> rank -> communicator -> float
* [ Mpi.reduce_int d op root comm ] computes the value of
[ d0 op d1 op ... ] , where [ d0 ... dN ] are the values of
the [ d ] argument at every node in [ comm ] . The result value
is returned at node with rank [ root ] . A meaningless integer
is returned at other nodes . [ Mpi.reduce_float ] is similar
except for the use of floating - point operations instead of
integer operations .
[d0 op d1 op ... op dN], where [d0 ... dN] are the values of
the [d] argument at every node in [comm]. The result value
is returned at node with rank [root]. A meaningless integer
is returned at other nodes. [Mpi.reduce_float] is similar
except for the use of floating-point operations instead of
integer operations. *)
val reduce_int_array:
int array -> int array -> [`Int] op -> rank -> communicator -> unit
val reduce_float_array:
float array -> float array -> [`Float] op -> rank -> communicator -> unit
val reduce_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> ('a, 'b, 'c) Bigarray.Array0.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> 'any op -> rank -> communicator -> unit
* [ Mpi.reduce_int_array d res op root comm ] computes
[ Array.length d ] reductions by operation [ op ] simultaneously .
For every [ i ] , the values of [ d.(i ) ] at every node
are combined using [ op ] and the result is stored into [ dst.(i ) ]
at node [ root ] . For [ Mpi.reduce_bigarray * ] applied to an array
of floating - point values , an exception is raised for the
[ Land ] , [ Lor ] and [ Xor ] operations and the others
are interpreted as floating - point operations .
[Array.length d] reductions by operation [op] simultaneously.
For every [i], the values of [d.(i)] at every node
are combined using [op] and the result is stored into [dst.(i)]
at node [root]. For [Mpi.reduce_bigarray*] applied to an array
of floating-point values, an exception is raised for the
[Land], [Lor] and [Xor] operations and the others
are interpreted as floating-point operations. *)
* { 3 Reduce to all }
val allreduce_int: int -> [`Int] op -> communicator -> int
val allreduce_float: float -> [`Float] op -> communicator -> float
val allreduce_int_array:
int array -> int array -> [`Int] op -> communicator -> unit
val allreduce_float_array:
float array -> float array -> [`Float] op -> communicator -> unit
val allreduce_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> 'any op -> communicator -> unit
val allreduce_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> ('a, 'b, 'c) Bigarray.Array0.t
-> 'any op -> communicator -> unit
val allreduce_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> 'any op -> communicator -> unit
val allreduce_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> 'any op -> communicator -> unit
val allreduce_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> 'any op -> communicator -> unit
* The [ Mpi.allreduce _ * ] operations are similar to the
corresponding [ Mpi.reduce _ * ] operations , except that the result
of the reduction is made available at all nodes .
For [ * ] applied to an array of floating - point
values , an exception is raised for the [ Land ] , [ Lor ]
and [ Xor ] operations and the others are interpreted as
floating - point operations .
corresponding [Mpi.reduce_*] operations, except that the result
of the reduction is made available at all nodes.
For [Mpi.allreduce_bigarray*] applied to an array of floating-point
values, an exception is raised for the [Land], [Lor]
and [Xor] operations and the others are interpreted as
floating-point operations. *)
* { 3 Scan }
val scan_int: int -> [`Int] op -> communicator -> int
val scan_float: float -> [`Float] op -> communicator -> float
(** [Mpi.scan_int d res op comm] performs a scan operation over
the integers [d] at every node. Let [d0 ... dN] be the
values of the [d] at every node in [comm]. At node with rank [R],
[Mpi.scan_int d res op comm] returns [d0 op ... op dR].
[Mpi.scan_float] is similar. *)
val scan_int_array:
int array -> int array -> [`Int] op -> communicator -> unit
val scan_float_array:
float array -> float array -> [`Float] op -> communicator -> unit
val scan_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> 'any op -> communicator -> unit
val scan_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> ('a, 'b, 'c) Bigarray.Array0.t
-> 'any op -> communicator -> unit
val scan_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> 'any op -> communicator -> unit
val scan_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> 'any op -> communicator -> unit
val scan_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> 'any op -> communicator -> unit
* Same as [ Mpi.scan_int ] and [ Mpi.scan_float ] , but perform several
scanning operations on the elements of the input array ( first
argument ) . The result is stored in the array passed as second
argument at the root node . For [ Mpi.scan_bigarray * ] applied to
an array of floating - point values , an exception is raised for
the [ Land ] , [ Lor ] and [ Xor ] operations and the
others are interpreted as floating - point operations .
scanning operations on the elements of the input array (first
argument). The result is stored in the array passed as second
argument at the root node. For [Mpi.scan_bigarray*] applied to
an array of floating-point values, an exception is raised for
the [Land], [Lor] and [Xor] operations and the
others are interpreted as floating-point operations. *)
* { 2 Advanced operations on communicators }
val comm_compare: communicator -> communicator -> bool
* Compare two communicators and return [ true ] if they are the same ,
[ false ] otherwise .
[false] otherwise. *)
type color = int
val comm_split: communicator -> color -> int -> communicator
(** [Mpi.comm_split comm col key] splits the communicator into
several communicators based on the values of [col] and
[key] at every node. For each distinct value of the [col]
argument, a new communicator is created. It contains all
nodes of [comm] that have presented that particular value of
[key] to [Mpi.comm_split]. The ordering of nodes in the
new communicator is determined by the [key] argument:
nodes are ordered by increasing values of [key], and in case
of ties, by their original order in [comm]. Thus, to preserve
the same ordering as in [comm], it suffices that all nodes
present [0] as the [key] argument. In each node, the communicator
returned is the one that corresponds to the [color] argument
of that node. *)
val color_none: color
(** In [Mpi.comm_split], a node can pass [Mpi.color_none] as the
[col] argument to indicate that it does not want to be part
of any of the new communicators. [Mpi.comm_split] then
returns a null communicator (allowing no communications) in
that node. *)
* { 3 Cartesian topologies }
val cart_create:
communicator -> int array -> bool array -> bool -> communicator
(** [Mpi.cart_create comm dims periodic reorder] embeds a cartesian
topology (multi-dimensional grid) on the nodes of
communicator [comm], and return a
new communicator with that information attached.
The length of [dims] determines the number of dimensions of
the topology. For each dimension [d], [dims.(d)] specifies
the number of nodes in that dimension, and [periodic.(d)]
says whether that dimension is periodic (wraps around) or not.
[reorder] determines whether the ranks of nodes in the new
communicator can be reordered for better efficiency ([true])
or must remain the same as in [comm] ([false]).
The initial communicator [comm] must contain at least as many
nodes as specified by [dims]. *)
val dims_create: int -> int array -> int array
* [ Mpi.dims_create numnodes hints ] helps determining a
suitable [ dims ] argument to [ Mpi.cart_create ]
given a number of nodes [ numnodes ] , the number of
dimensions required , and optional constraints .
The length of the [ hints ] array determines the number of
dimensions . For each dimension [ d ] , [ hints.(d ) ] , if not null ,
is the number of nodes required along this dimension . If null ,
[ Mpi.dims_create ] figures out a suitable number .
For instance , [ Mpi.dims_create 24 [ |0;0| ] ] returns reasonable
dimensions for a two - dimensional grid containing 24 nodes .
suitable [dims] argument to [Mpi.cart_create]
given a number of nodes [numnodes], the number of
dimensions required, and optional constraints.
The length of the [hints] array determines the number of
dimensions. For each dimension [d], [hints.(d)], if not null,
is the number of nodes required along this dimension. If null,
[Mpi.dims_create] figures out a suitable number.
For instance, [Mpi.dims_create 24 [|0;0|]] returns reasonable
dimensions for a two-dimensional grid containing 24 nodes. *)
val cart_rank: communicator -> int array -> rank
* [ Mpi.cart_rank comm coords ] return the rank of the node in
the cartesian topology [ comm ] that is at coordinates [ coords ] .
The [ coords ] array must have one element per dimension of the
cartesian topology . Individual coordinates range between [ 0 ]
( inclusive ) and the corresponding dimension ( exclusive ) .
the cartesian topology [comm] that is at coordinates [coords].
The [coords] array must have one element per dimension of the
cartesian topology. Individual coordinates range between [0]
(inclusive) and the corresponding dimension (exclusive). *)
val cart_coords: communicator -> rank -> int array
* The inverse operation of [ Mpi.cart_rank ] .
[ Mpi.cart_coords comm r ] returns the cartesian coordinates
of the node having rank [ r ] in [ comm ] .
[Mpi.cart_coords comm r] returns the cartesian coordinates
of the node having rank [r] in [comm]. *)
* { 3 Process group management }
type group
* The type of groups . Groups represent sets of nodes
( processing elements ) . Unlike communicators , they can not
be used directly for communication . Instead , one constructs
a group representing the desired set of nodes , then build
a communicator for this group .
(processing elements). Unlike communicators, they cannot
be used directly for communication. Instead, one constructs
a group representing the desired set of nodes, then build
a communicator for this group. *)
val comm_create: communicator -> group -> communicator
(** [Mpi.comm_create comm group] creates a communicator
whose nodes are those described in [group]. [comm] is
the initial communicator; the nodes in [group] must be
a subset of those in [comm]. The null communicator is
returned to the nodes that are not part of [group]. *)
val group_size: group -> int
(** Return the size (number of nodes) in the given group. *)
val group_rank: group -> rank
(** Return the rank of the calling node in the given group. *)
val group_translate_ranks: group -> rank array -> group -> rank array
* [ Mpi.group_translate_ranks g1 ranks ] translates the ranks
of a number of nodes from one group to another . [ rank ]
is an array of node ranks relative to group [ g1 ] . The
returned array contains the ranks for the same nodes , relative
to group [ ] .
of a number of nodes from one group to another. [rank]
is an array of node ranks relative to group [g1]. The
returned array contains the ranks for the same nodes, relative
to group [g2]. *)
val comm_group: communicator -> group
(** [Mpi.comm_group comm] returns the group of all nodes belonging
to the communicator [comm], with the same ranks as in [comm]. *)
val group_union: group -> group -> group
val group_intersection: group -> group -> group
val group_difference: group -> group -> group
(** Union, intersection and set difference over groups. *)
val group_incl: group -> rank array -> group
* [ Mpi.group_incl group ranks ] returns the subset of [ group ]
containing the nodes whose ranks are given in the array [ ranks ] .
containing the nodes whose ranks are given in the array [ranks]. *)
val group_excl: group -> rank array -> group
(** [Mpi.group_excl group ranks] returns the subset of [group]
containing the nodes whose ranks are not given in the array
[ranks]. *)
type group_range = { range_first: int; range_last: int; range_stride: int }
(** A group range represents the set of nodes whose ranks are
([range_first]; [range_first + range_stride]; ...; [range_last]). *)
val group_range_incl: group -> group_range array -> group
(** [Mpi.group_range_incl group ranges] returns the subset of [group]
containing the nodes whose ranks belong to the ranges
listed in [ranges]. *)
val group_range_excl: group -> group_range array -> group
(** [Mpi.group_range_excl group ranges] returns the subset of [group]
containing the nodes whose ranks do not belong to the ranges
listed in [ranges]. *)
* { 2 Miscellaneous }
external wtime: unit -> float = "caml_mpi_wtime"
(** Return the wall-clock time elapsed at the calling node
since the beginning of the program execution. *)
| null | https://raw.githubusercontent.com/xavierleroy/ocamlmpi/29eae81935a39dd866edf088f1d61e780b3156c8/mpi.mli | ocaml | *********************************************************************
The Caml/MPI interface
the special exception on linking described in file LICENSE.
*********************************************************************
Raised when an operation of the [Mpi] module encounters an error.
The string argument describes the error.
* The type of communicators. Communicators are groups of
nodes (processing elements) that can exchange data.
* The global communicator.
* Return the size (number of nodes) in the given communicator.
* Return the rank of the calling node in the given communicator.
The rank [Mpi.comm_rank c] is between 0 (inclusive) and
[Mpi.comm_size c] (exclusive).
* The type of tags associated with messages in point-to-point
communications. Tags are positive integers in the range
[0...32767].
* Same as [Mpi.receive], but returns a triple [(d, src, tag)]
where [d] is the data associated with the message,
[src] the rank of the node that sent the message,
and [tag] the actual tag attached to the message.
* [Mpi.iprobe src tag comm] is a non-blocking counterpart to
{!probe}. If there is no matching message waiting it returns
[None]. Otherwise, it returns [Some (rank, tag)] like
{!probe}.
* Encapsulates MPI Request object, also contains the
associated send/recv buffer in the wrapper object
* Post non-blocking receive operation.
Same parameters as [Mpi.receive], but returns with received
buffer length and an Mpi.request object, which can be used to
wait for the completion of the receive operation.
This call currently blocks until the buffer length has been received,
therefore it has to follow the asynchronous send operation in
call sequence.
* Wait for the completion of a non-blocking operation
* Wait for the completion of an ordered pair of non-blocking
operations
* Wait for the completion of a non-blocking receive operation
and return the received object
* [Mpi.barrier comm] suspends the calling process until all
nodes in communicator [comm] are executing [Mpi.barrier comm].
Then all nodes return from [Mpi.barrier] and continue executing.
* [Mpi.broadcast d root comm] broadcasts data [d] from node
with rank [root] in [comm] to all other nodes in [comm].
All nodes in [comm] must call [Mpi.broadcast] with the same
[root] and [comm] arguments. The [d] argument is significant
only at node [root]; it is ignored at other nodes.
[Mpi.broadcast] returns the broadcast data.
* Specialized versions of [Mpi.scatter] for integers, floats and
values from bigarrays.
* [Mpi.gather d root comm] gathers the values of the [d] argument
at all nodes onto node [root], and returns those values as an
array. At node [root], [Mpi.gather] returns an array of
size [Mpi.comm_size comm]; element number [i] is the value
provided for argument [d] by node [i]. At other nodes,
the empty array [[||]] is returned.
* Specialized versions of [Mpi.gather] for integers, floats and
values to bigarrays.
* The [Mpi.allgather*] functions behave like the corresponding
[Mpi.gather*] functions, except that the result of the gather
operation is available at all nodes, not only at the root node.
In other terms, [Mpi.allgather] is equivalent to [Mpi.gather]
at root [r] followed by a broadcast of the result from node [r].
* Using the [Mpi.alltoall*] functions, each process effectively does
an [Mpi.scatter*] followed by an [Mpi.gather*]. They can also be
seen as an extension to [Mpi.allgather*] where each process sends
distinct data to each of the receivers.
Both send and receive arrays must have the same size at all
nodes.
*
* [Mpi.scan_int d res op comm] performs a scan operation over
the integers [d] at every node. Let [d0 ... dN] be the
values of the [d] at every node in [comm]. At node with rank [R],
[Mpi.scan_int d res op comm] returns [d0 op ... op dR].
[Mpi.scan_float] is similar.
* [Mpi.comm_split comm col key] splits the communicator into
several communicators based on the values of [col] and
[key] at every node. For each distinct value of the [col]
argument, a new communicator is created. It contains all
nodes of [comm] that have presented that particular value of
[key] to [Mpi.comm_split]. The ordering of nodes in the
new communicator is determined by the [key] argument:
nodes are ordered by increasing values of [key], and in case
of ties, by their original order in [comm]. Thus, to preserve
the same ordering as in [comm], it suffices that all nodes
present [0] as the [key] argument. In each node, the communicator
returned is the one that corresponds to the [color] argument
of that node.
* In [Mpi.comm_split], a node can pass [Mpi.color_none] as the
[col] argument to indicate that it does not want to be part
of any of the new communicators. [Mpi.comm_split] then
returns a null communicator (allowing no communications) in
that node.
* [Mpi.cart_create comm dims periodic reorder] embeds a cartesian
topology (multi-dimensional grid) on the nodes of
communicator [comm], and return a
new communicator with that information attached.
The length of [dims] determines the number of dimensions of
the topology. For each dimension [d], [dims.(d)] specifies
the number of nodes in that dimension, and [periodic.(d)]
says whether that dimension is periodic (wraps around) or not.
[reorder] determines whether the ranks of nodes in the new
communicator can be reordered for better efficiency ([true])
or must remain the same as in [comm] ([false]).
The initial communicator [comm] must contain at least as many
nodes as specified by [dims].
* [Mpi.comm_create comm group] creates a communicator
whose nodes are those described in [group]. [comm] is
the initial communicator; the nodes in [group] must be
a subset of those in [comm]. The null communicator is
returned to the nodes that are not part of [group].
* Return the size (number of nodes) in the given group.
* Return the rank of the calling node in the given group.
* [Mpi.comm_group comm] returns the group of all nodes belonging
to the communicator [comm], with the same ranks as in [comm].
* Union, intersection and set difference over groups.
* [Mpi.group_excl group ranks] returns the subset of [group]
containing the nodes whose ranks are not given in the array
[ranks].
* A group range represents the set of nodes whose ranks are
([range_first]; [range_first + range_stride]; ...; [range_last]).
* [Mpi.group_range_incl group ranges] returns the subset of [group]
containing the nodes whose ranks belong to the ranges
listed in [ranges].
* [Mpi.group_range_excl group ranges] returns the subset of [group]
containing the nodes whose ranks do not belong to the ranges
listed in [ranges].
* Return the wall-clock time elapsed at the calling node
since the beginning of the program execution. | , projet Cristal , INRIA Rocquencourt
Copyright 1998 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
* { 1 Caml bindings for the Message Passing Interface ( MPI ) library }
* { 2 Error reporting }
exception Error of string
* { 2 Basic operations on communicators }
type communicator
type rank = int
* The type of ranks of nodes . Nodes in a given communicator
are assigned integer ranks [ 0 , 1 , ... , N-1 ] where [ N ]
is the size of the communicator .
are assigned integer ranks [0, 1, ..., N-1] where [N]
is the size of the communicator. *)
val comm_world: communicator
external comm_size: communicator -> int = "caml_mpi_comm_size"
external comm_rank: communicator -> rank = "caml_mpi_comm_rank"
* { 2 Point - to - point communication }
type tag = int
val send: 'a -> rank -> tag -> communicator -> unit
* [ Mpi.send d dst tag comm ] sends a message containing data [ d ]
to the node that has rank [ dst ] in communicator [ comm ] .
The message is sent with tag [ tag ] . Depending on the
underlying MPI implementation , message sending can be
synchronous or asynchronous ; that is , [ Mpi.send ] can block
until the target node receives the message , or [ Mpi.send ]
can return before the target node has received the message .
to the node that has rank [dst] in communicator [comm].
The message is sent with tag [tag]. Depending on the
underlying MPI implementation, message sending can be
synchronous or asynchronous; that is, [Mpi.send] can block
until the target node receives the message, or [Mpi.send]
can return before the target node has received the message. *)
val receive: rank -> tag -> communicator -> 'a
* [ Mpi.receive src tag comm ] blocks until a message is available ,
and returns the data contained in that message .
The [ src ] argument selects the desired source for the message :
if [ src ] is [ Mpi.any_source ] , messages from any node in communicator
[ comm ] are accepted ; otherwise , only messages sent by the node
having rank [ src ] in [ comm ] are accepted .
Similarly , the [ tag ] argument selects messages by their tag :
if [ tag ] is [ Mpi.any_tag ] , messages are accepted regardless of
their tags ; otherwise , only messages with tag equal to [ tag ]
are accepted .
Warning : just like the [ Marshal.from _ * ] functions ,
[ Mpi.receive ] is not type - safe . The value returned by
[ Mpi.receive ] does not possess type [ ' a ]
for all [ ' a ] ; it has one , unique type which can not be determined
at compile - type . The programmer should be careful about using
the returned value with the right type .
and returns the data contained in that message.
The [src] argument selects the desired source for the message:
if [src] is [Mpi.any_source], messages from any node in communicator
[comm] are accepted; otherwise, only messages sent by the node
having rank [src] in [comm] are accepted.
Similarly, the [tag] argument selects messages by their tag:
if [tag] is [Mpi.any_tag], messages are accepted regardless of
their tags; otherwise, only messages with tag equal to [tag]
are accepted.
Warning: just like the [Marshal.from_*] functions,
[Mpi.receive] is not type-safe. The Caml value returned by
[Mpi.receive] does not possess type ['a]
for all ['a]; it has one, unique type which cannot be determined
at compile-type. The programmer should be careful about using
the returned value with the right type. *)
val receive_status: rank -> tag -> communicator -> 'a * rank * tag
val probe: rank -> tag -> communicator -> rank * tag
* [ Mpi.probe src tag comm ] blocks until a message is available
on communicator [ comm ] , with source and tag matching the
[ src ] and [ tag ] arguments as described in [ Mpi.receive ] .
It then returns the rank of the node that sent the message
and the actual tag attached to the message . The message itself
is not read , and can be retrieved later with [ Mpi.receive ]
or [ Mpi.receive_status ] .
on communicator [comm], with source and tag matching the
[src] and [tag] arguments as described in [Mpi.receive].
It then returns the rank of the node that sent the message
and the actual tag attached to the message. The message itself
is not read, and can be retrieved later with [Mpi.receive]
or [Mpi.receive_status]. *)
val iprobe: rank -> tag -> communicator -> (rank * tag) option
val any_tag: tag
val any_source: rank
* The special values of the [ tag ] and [ src ] arguments of
[ Mpi.receive ] , [ Mpi.receive_status ] and [ Mpi.probe ] ,
indicating that any message tag is acceptable ( for [ Mpi.any_tag ] )
or any message source is acceptable ( for [ Mpi.any_source ] ) .
[Mpi.receive], [Mpi.receive_status] and [Mpi.probe],
indicating that any message tag is acceptable (for [Mpi.any_tag])
or any message source is acceptable (for [Mpi.any_source]). *)
val send_int: int -> rank -> tag -> communicator -> unit
val receive_int: rank -> tag -> communicator -> int
val send_float: float -> rank -> tag -> communicator -> unit
val receive_float: rank -> tag -> communicator -> float
val send_int_array: int array -> rank -> tag -> communicator -> unit
val receive_int_array: int array -> rank -> tag -> communicator -> unit
val send_float_array: float array -> rank -> tag -> communicator -> unit
val receive_float_array: float array -> rank -> tag -> communicator -> unit
val send_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> tag -> communicator -> unit
val send_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> rank -> tag -> communicator -> unit
val send_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> tag -> communicator -> unit
val send_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> tag -> communicator -> unit
val send_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> tag -> communicator -> unit
val receive_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> tag -> communicator -> unit
val receive_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> rank -> tag -> communicator -> unit
val receive_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> tag -> communicator -> unit
val receive_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> tag -> communicator -> unit
val receive_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> tag -> communicator -> unit
* Specialized versions of [ Mpi.send ] and [ Mpi.receive ]
for communicating integers , floating - point numbers ,
arrays of integers , arrays of floating - point numbers and
bigarrays .
These specialized versions are more efficient than
[ Mpi.send ] and [ Mpi.receive ] since less copying is involved .
The arguments to the [ Mpi.send _ * ] functions have the same
meaning as for [ Mpi.send ] .
The arguments to [ Mpi.receive_int ] and [ Mpi.receive_float ]
have the same meaning as for [ Mpi.receive ] .
[ Mpi.receive_int_array ] , [ Mpi.receive_float_array ] and
[ Mpi.receive_bigarray * ]
have one extra argument , which is the array in which the data
of the received message is stored . The caller is responsible
for pre - allocating an array large enough to hold the incoming data .
It is an error to send a message using one of the specialized
[ Mpi.send _ * ] functions and receive it with the generic
[ Mpi.receive ] function , and conversely .
It is possible to receive a bigarray with different dimensions
than those used to send it ; only the total number of elements must
match .
for communicating integers, floating-point numbers,
arrays of integers, arrays of floating-point numbers and
bigarrays.
These specialized versions are more efficient than
[Mpi.send] and [Mpi.receive] since less copying is involved.
The arguments to the [Mpi.send_*] functions have the same
meaning as for [Mpi.send].
The arguments to [Mpi.receive_int] and [Mpi.receive_float]
have the same meaning as for [Mpi.receive].
[Mpi.receive_int_array], [Mpi.receive_float_array] and
[Mpi.receive_bigarray*]
have one extra argument, which is the array in which the data
of the received message is stored. The caller is responsible
for pre-allocating an array large enough to hold the incoming data.
It is an error to send a message using one of the specialized
[Mpi.send_*] functions and receive it with the generic
[Mpi.receive] function, and conversely.
It is possible to receive a bigarray with different dimensions
than those used to send it; only the total number of elements must
match. *)
* { 2 Non - blocking communication }
type request
val null_request: request
val isend: 'a -> rank -> tag -> communicator -> request
val isend_varlength: 'a -> rank -> tag -> communicator -> request * request
* Post non - blocking send operation .
[ Mpi.send d dst tag comm ] posts a send operation for data [ d ]
to the node that has rank [ dst ] in communicator [ comm ]
with tag [ tag ] .
Same parameters as [ Mpi.send ] , but returns immediately with
a pair of [ Mpi.request ] objects after posting two send operations for
transmission of message length and the message itself
buffer . The request objects can be used to wait for the
completion of the send operation .
[Mpi.send d dst tag comm] posts a send operation for data [d]
to the node that has rank [dst] in communicator [comm ]
with tag [tag].
Same parameters as [Mpi.send], but returns immediately with
a pair of [Mpi.request] objects after posting two send operations for
transmission of message length and the message itself
buffer. The request objects can be used to wait for the
completion of the send operation. *)
val ireceive: int -> rank -> tag -> communicator -> request
val ireceive_varlength: rank -> tag -> communicator -> request
val wait: request -> unit
val wait_pair: request * request -> unit
val wait_receive: request -> 'a
* { 2 Group communication }
val barrier: communicator -> unit
* { 3 Broadcast }
val broadcast: 'a -> rank -> communicator -> 'a
val broadcast_opt: 'a option -> rank -> communicator -> 'a
* Same as [ Mpi.broadcast ] , except that the data ( first argument )
is provided as an option type . The root node must provide a
first argument of the form [ Some d ] where [ d ] is the data to
broadcast . The other node provide [ None ] as their first
argument .
is provided as an option type. The root node must provide a
first argument of the form [Some d] where [d] is the data to
broadcast. The other node provide [None] as their first
argument. *)
val broadcast_int: int -> rank -> communicator -> int
val broadcast_float: float -> rank -> communicator -> float
val broadcast_int_array: int array -> rank -> communicator -> unit
val broadcast_float_array: float array -> rank -> communicator -> unit
val broadcast_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> communicator -> unit
val broadcast_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> rank -> communicator -> unit
val broadcast_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> communicator -> unit
val broadcast_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> communicator -> unit
val broadcast_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> communicator -> unit
* Specialized versions of [ Mpi.broadcast ] for integers , floats ,
arrays of integers , arrays of floats and bigarrays . For
[ Mpi.broadcast_int ] and [ Mpi.broadcast_float ] , the broadcast
value is returned as result , and the first argument is significant
only at the root node .
For [ ] , [ Mpi.broadcast_float_array ] and
[ Mpi.broadcast_bigarray * ] , the broadcast value is stored in the
array passed as first argument ; thus , the first argument is
significant at all nodes .
arrays of integers, arrays of floats and bigarrays. For
[Mpi.broadcast_int] and [Mpi.broadcast_float], the broadcast
value is returned as result, and the first argument is significant
only at the root node.
For [Mpi.broadcast_int_array], [Mpi.broadcast_float_array] and
[Mpi.broadcast_bigarray*], the broadcast value is stored in the
array passed as first argument; thus, the first argument is
significant at all nodes. *)
* { 3 Scatter }
val scatter: 'a array -> rank -> communicator -> 'a
* [ Mpi.scatter a root comm ] scatters the elements of array [ a ]
from node [ root ] to all nodes in [ comm ] . The node with rank [ i ]
in [ comm ] receives the element [ a.(i ) ] and returns it as result
of [ Mpi.scatter ] . The [ a ] argument is significant only at node
[ root ] ; an empty array [ [ || ] ] can be given as first argument
at other nodes .
from node [root] to all nodes in [comm]. The node with rank [i]
in [comm] receives the element [a.(i)] and returns it as result
of [Mpi.scatter]. The [a] argument is significant only at node
[root]; an empty array [[||]] can be given as first argument
at other nodes. *)
val scatter_int: int array -> rank -> communicator -> int
val scatter_float: float array -> rank -> communicator -> float
val scatter_from_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> rank -> communicator -> 'a
val scatter_from_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> rank -> communicator -> 'a
val scatter_from_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> rank -> communicator -> 'a
val scatter_from_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> rank -> communicator -> 'a
val scatter_int_array: int array -> int array -> rank -> communicator -> unit
val scatter_float_array:
float array -> float array -> rank -> communicator -> unit
val scatter_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> rank -> communicator -> unit
val scatter_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> rank -> communicator -> unit
* Specialized versions of [ Mpi.scatter ] for arrays of integers ,
arrays of floats and bigarrays .
[ Mpi.scatter_int_array comm ]
splits the array [ src ] at node [ root ] into [ Mpi.comm_size comm ]
chunks of size [ Array.length dst ] , and sends the chunks to
each node , storing them into array [ dst ] at each node .
The [ src ] argument is significant only at node [ root ] .
[ Mpi.scatter_float_array ] and [ Mpi.scatter_bigarray * ] are similar .
Use the [ Bigarray.genarray_of_array * ] functions to , for example ,
scatter from [ n ] dimensions to [ n-1 ] dimensions . In any case ,
only the total number of elements matters .
arrays of floats and bigarrays.
[Mpi.scatter_int_array src dst root comm]
splits the array [src] at node [root] into [Mpi.comm_size comm]
chunks of size [Array.length dst], and sends the chunks to
each node, storing them into array [dst] at each node.
The [src] argument is significant only at node [root].
[Mpi.scatter_float_array] and [Mpi.scatter_bigarray*] are similar.
Use the [Bigarray.genarray_of_array*] functions to, for example,
scatter from [n] dimensions to [n-1] dimensions. In any case,
only the total number of elements matters. *)
* { 3 Gather }
val gather: 'a -> rank -> communicator -> 'a array
val gather_int: int -> int array -> rank -> communicator -> unit
val gather_float: float -> float array -> rank -> communicator -> unit
val gather_to_bigarray:
'a -> ('a, 'b, 'c) Bigarray.Genarray.t -> rank -> communicator -> unit
val gather_to_bigarray1:
'a -> ('a, 'b, 'c) Bigarray.Array1.t -> rank -> communicator -> unit
val gather_to_bigarray2:
'a -> ('a, 'b, 'c) Bigarray.Array2.t -> rank -> communicator -> unit
val gather_to_bigarray3:
'a -> ('a, 'b, 'c) Bigarray.Array3.t -> rank -> communicator -> unit
val gather_int_array: int array -> int array -> rank -> communicator -> unit
val gather_float_array:
float array -> float array -> rank -> communicator -> unit
val gather_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> rank -> communicator -> unit
val gather_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> rank -> communicator -> unit
* Specialized versions of [ Mpi.gather ] for arrays of integers ,
arrays of floats and bigarrays .
[ Mpi.gather_int_array comm ]
sends the arrays [ src ] at each node to the node [ root ] .
At node [ root ] , the arrays are concatenated and stored in the
argument [ dst ] . [ dst ] is significant only at node [ root ] .
[ Mpi.gather_float_array ] and [ Mpi.gather_bigarray * ] are similar .
Use the [ Bigarray.genarray_of_array * ] functions to , for example ,
gather from [ n-1 ] dimensions to [ n ] dimensions . In any case ,
only the total number of elements matters .
arrays of floats and bigarrays.
[Mpi.gather_int_array src dst root comm]
sends the arrays [src] at each node to the node [root].
At node [root], the arrays are concatenated and stored in the
argument [dst]. [dst] is significant only at node [root].
[Mpi.gather_float_array] and [Mpi.gather_bigarray*] are similar.
Use the [Bigarray.genarray_of_array*] functions to, for example,
gather from [n-1] dimensions to [n] dimensions. In any case,
only the total number of elements matters. *)
* { 3 Gather to all }
val allgather: 'a -> communicator -> 'a array
val allgather_int: int -> int array -> communicator -> unit
val allgather_float: float -> float array -> communicator -> unit
val allgather_to_bigarray:
'a -> ('a, 'b, 'c) Bigarray.Genarray.t -> communicator -> unit
val allgather_to_bigarray1:
'a -> ('a, 'b, 'c) Bigarray.Array1.t -> communicator -> unit
val allgather_to_bigarray2:
'a -> ('a, 'b, 'c) Bigarray.Array2.t -> communicator -> unit
val allgather_to_bigarray3:
'a -> ('a, 'b, 'c) Bigarray.Array3.t -> communicator -> unit
val allgather_int_array: int array -> int array -> communicator -> unit
val allgather_float_array:
float array -> float array -> communicator -> unit
val allgather_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> communicator -> unit
val allgather_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> communicator -> unit
* { 3 All to all }
val alltoall: 'a array -> communicator -> 'a array
val alltoall_int_array: int array -> int array -> communicator -> unit
val alltoall_float_array: float array -> float array -> communicator -> unit
val alltoall_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> communicator -> unit
val alltoall_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> communicator -> unit
val alltoall_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> communicator -> unit
val alltoall_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> communicator -> unit
* { 3 Reduce }
type _ op =
Max : [< `Int | `Float ] op
| Min : [< `Int | `Float ] op
| Sum : [< `Int | `Float ] op
| Prod : [< `Int | `Float ] op
| Land : [< `Int ] op
| Lor : [< `Int ] op
| Xor : [< `Int ] op
| Int_max : [< `Int ] op
| Int_min : [< `Int ] op
| Int_sum : [< `Int ] op
| Int_prod : [< `Int ] op
| Int_land : [< `Int ] op
| Int_lor : [< `Int ] op
| Int_xor : [< `Int ] op
| Float_max : [< `Float ] op
| Float_min : [< `Float ] op
| Float_sum : [< `Float ] op
* The operations that can be performed by a reduce or scan ; some of
them are only valid for integers . [ ] and [ Min ]
are maximum and minimum ; [ Sum ] and [ Prod ]
are summation ( [ + ] ) and product ( [ * ] ) .
[ Land ] , [ Lor ] and [ Xor ] are logical ( bit - per - bit ) and ,
or and exclusive - or .
The constructors prefixed by [ Int _ ] or [ Float _ ]
( e.g. [ Int_max ] , [ Float_sum ] ) are type - specialized variants of
the non - prefixed constructors . For example , [ Int_max ] is [ ]
specialized to integer values , and [ Float_sum ] is [ Sum ]
specialized to floating - point values . These specialized
constructors are included for backward compatibility with earlier
versions of this library . They will be deprecated in the future .
them are only valid for integers. [Max] and [Min]
are maximum and minimum; [Sum] and [Prod]
are summation ([+]) and product ([*]).
[Land], [Lor] and [Xor] are logical (bit-per-bit) and,
or and exclusive-or.
The constructors prefixed by [Int_] or [Float_]
(e.g. [Int_max], [Float_sum]) are type-specialized variants of
the non-prefixed constructors. For example, [Int_max] is [Max]
specialized to integer values, and [Float_sum] is [Sum]
specialized to floating-point values. These specialized
constructors are included for backward compatibility with earlier
versions of this library. They will be deprecated in the future. *)
val reduce_int: int -> [`Int] op -> rank -> communicator -> int
val reduce_float: float -> [`Float] op -> rank -> communicator -> float
* [ Mpi.reduce_int d op root comm ] computes the value of
[ d0 op d1 op ... ] , where [ d0 ... dN ] are the values of
the [ d ] argument at every node in [ comm ] . The result value
is returned at node with rank [ root ] . A meaningless integer
is returned at other nodes . [ Mpi.reduce_float ] is similar
except for the use of floating - point operations instead of
integer operations .
[d0 op d1 op ... op dN], where [d0 ... dN] are the values of
the [d] argument at every node in [comm]. The result value
is returned at node with rank [root]. A meaningless integer
is returned at other nodes. [Mpi.reduce_float] is similar
except for the use of floating-point operations instead of
integer operations. *)
val reduce_int_array:
int array -> int array -> [`Int] op -> rank -> communicator -> unit
val reduce_float_array:
float array -> float array -> [`Float] op -> rank -> communicator -> unit
val reduce_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> ('a, 'b, 'c) Bigarray.Array0.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> 'any op -> rank -> communicator -> unit
val reduce_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> 'any op -> rank -> communicator -> unit
* [ Mpi.reduce_int_array d res op root comm ] computes
[ Array.length d ] reductions by operation [ op ] simultaneously .
For every [ i ] , the values of [ d.(i ) ] at every node
are combined using [ op ] and the result is stored into [ dst.(i ) ]
at node [ root ] . For [ Mpi.reduce_bigarray * ] applied to an array
of floating - point values , an exception is raised for the
[ Land ] , [ Lor ] and [ Xor ] operations and the others
are interpreted as floating - point operations .
[Array.length d] reductions by operation [op] simultaneously.
For every [i], the values of [d.(i)] at every node
are combined using [op] and the result is stored into [dst.(i)]
at node [root]. For [Mpi.reduce_bigarray*] applied to an array
of floating-point values, an exception is raised for the
[Land], [Lor] and [Xor] operations and the others
are interpreted as floating-point operations. *)
* { 3 Reduce to all }
val allreduce_int: int -> [`Int] op -> communicator -> int
val allreduce_float: float -> [`Float] op -> communicator -> float
val allreduce_int_array:
int array -> int array -> [`Int] op -> communicator -> unit
val allreduce_float_array:
float array -> float array -> [`Float] op -> communicator -> unit
val allreduce_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> 'any op -> communicator -> unit
val allreduce_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> ('a, 'b, 'c) Bigarray.Array0.t
-> 'any op -> communicator -> unit
val allreduce_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> 'any op -> communicator -> unit
val allreduce_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> 'any op -> communicator -> unit
val allreduce_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> 'any op -> communicator -> unit
* The [ Mpi.allreduce _ * ] operations are similar to the
corresponding [ Mpi.reduce _ * ] operations , except that the result
of the reduction is made available at all nodes .
For [ * ] applied to an array of floating - point
values , an exception is raised for the [ Land ] , [ Lor ]
and [ Xor ] operations and the others are interpreted as
floating - point operations .
corresponding [Mpi.reduce_*] operations, except that the result
of the reduction is made available at all nodes.
For [Mpi.allreduce_bigarray*] applied to an array of floating-point
values, an exception is raised for the [Land], [Lor]
and [Xor] operations and the others are interpreted as
floating-point operations. *)
* { 3 Scan }
val scan_int: int -> [`Int] op -> communicator -> int
val scan_float: float -> [`Float] op -> communicator -> float
val scan_int_array:
int array -> int array -> [`Int] op -> communicator -> unit
val scan_float_array:
float array -> float array -> [`Float] op -> communicator -> unit
val scan_bigarray:
('a, 'b, 'c) Bigarray.Genarray.t -> ('a, 'b, 'c) Bigarray.Genarray.t
-> 'any op -> communicator -> unit
val scan_bigarray0:
('a, 'b, 'c) Bigarray.Array0.t -> ('a, 'b, 'c) Bigarray.Array0.t
-> 'any op -> communicator -> unit
val scan_bigarray1:
('a, 'b, 'c) Bigarray.Array1.t -> ('a, 'b, 'c) Bigarray.Array1.t
-> 'any op -> communicator -> unit
val scan_bigarray2:
('a, 'b, 'c) Bigarray.Array2.t -> ('a, 'b, 'c) Bigarray.Array2.t
-> 'any op -> communicator -> unit
val scan_bigarray3:
('a, 'b, 'c) Bigarray.Array3.t -> ('a, 'b, 'c) Bigarray.Array3.t
-> 'any op -> communicator -> unit
* Same as [ Mpi.scan_int ] and [ Mpi.scan_float ] , but perform several
scanning operations on the elements of the input array ( first
argument ) . The result is stored in the array passed as second
argument at the root node . For [ Mpi.scan_bigarray * ] applied to
an array of floating - point values , an exception is raised for
the [ Land ] , [ Lor ] and [ Xor ] operations and the
others are interpreted as floating - point operations .
scanning operations on the elements of the input array (first
argument). The result is stored in the array passed as second
argument at the root node. For [Mpi.scan_bigarray*] applied to
an array of floating-point values, an exception is raised for
the [Land], [Lor] and [Xor] operations and the
others are interpreted as floating-point operations. *)
* { 2 Advanced operations on communicators }
val comm_compare: communicator -> communicator -> bool
* Compare two communicators and return [ true ] if they are the same ,
[ false ] otherwise .
[false] otherwise. *)
type color = int
val comm_split: communicator -> color -> int -> communicator
val color_none: color
* { 3 Cartesian topologies }
val cart_create:
communicator -> int array -> bool array -> bool -> communicator
val dims_create: int -> int array -> int array
* [ Mpi.dims_create numnodes hints ] helps determining a
suitable [ dims ] argument to [ Mpi.cart_create ]
given a number of nodes [ numnodes ] , the number of
dimensions required , and optional constraints .
The length of the [ hints ] array determines the number of
dimensions . For each dimension [ d ] , [ hints.(d ) ] , if not null ,
is the number of nodes required along this dimension . If null ,
[ Mpi.dims_create ] figures out a suitable number .
For instance , [ Mpi.dims_create 24 [ |0;0| ] ] returns reasonable
dimensions for a two - dimensional grid containing 24 nodes .
suitable [dims] argument to [Mpi.cart_create]
given a number of nodes [numnodes], the number of
dimensions required, and optional constraints.
The length of the [hints] array determines the number of
dimensions. For each dimension [d], [hints.(d)], if not null,
is the number of nodes required along this dimension. If null,
[Mpi.dims_create] figures out a suitable number.
For instance, [Mpi.dims_create 24 [|0;0|]] returns reasonable
dimensions for a two-dimensional grid containing 24 nodes. *)
val cart_rank: communicator -> int array -> rank
* [ Mpi.cart_rank comm coords ] return the rank of the node in
the cartesian topology [ comm ] that is at coordinates [ coords ] .
The [ coords ] array must have one element per dimension of the
cartesian topology . Individual coordinates range between [ 0 ]
( inclusive ) and the corresponding dimension ( exclusive ) .
the cartesian topology [comm] that is at coordinates [coords].
The [coords] array must have one element per dimension of the
cartesian topology. Individual coordinates range between [0]
(inclusive) and the corresponding dimension (exclusive). *)
val cart_coords: communicator -> rank -> int array
* The inverse operation of [ Mpi.cart_rank ] .
[ Mpi.cart_coords comm r ] returns the cartesian coordinates
of the node having rank [ r ] in [ comm ] .
[Mpi.cart_coords comm r] returns the cartesian coordinates
of the node having rank [r] in [comm]. *)
* { 3 Process group management }
type group
* The type of groups . Groups represent sets of nodes
( processing elements ) . Unlike communicators , they can not
be used directly for communication . Instead , one constructs
a group representing the desired set of nodes , then build
a communicator for this group .
(processing elements). Unlike communicators, they cannot
be used directly for communication. Instead, one constructs
a group representing the desired set of nodes, then build
a communicator for this group. *)
val comm_create: communicator -> group -> communicator
val group_size: group -> int
val group_rank: group -> rank
val group_translate_ranks: group -> rank array -> group -> rank array
* [ Mpi.group_translate_ranks g1 ranks ] translates the ranks
of a number of nodes from one group to another . [ rank ]
is an array of node ranks relative to group [ g1 ] . The
returned array contains the ranks for the same nodes , relative
to group [ ] .
of a number of nodes from one group to another. [rank]
is an array of node ranks relative to group [g1]. The
returned array contains the ranks for the same nodes, relative
to group [g2]. *)
val comm_group: communicator -> group
val group_union: group -> group -> group
val group_intersection: group -> group -> group
val group_difference: group -> group -> group
val group_incl: group -> rank array -> group
* [ Mpi.group_incl group ranks ] returns the subset of [ group ]
containing the nodes whose ranks are given in the array [ ranks ] .
containing the nodes whose ranks are given in the array [ranks]. *)
val group_excl: group -> rank array -> group
type group_range = { range_first: int; range_last: int; range_stride: int }
val group_range_incl: group -> group_range array -> group
val group_range_excl: group -> group_range array -> group
* { 2 Miscellaneous }
external wtime: unit -> float = "caml_mpi_wtime"
|
f1759c3eb19d535b99ff59de1fd1a1a0b4ce115b9214910634c688c1a45f0cd1 | jordanthayer/ocaml-search | sharedq.ml | $ I d : sharedq.ml , v 1.1 2003/06/20 16:30:46 ruml Exp $
shared queues
shared queues
*)
type 'a t = {
q : 'a Queue.t;
(* the right to access or modify q *)
mod_rights : Mutex.t;
(* signaled when something is added to q *)
addition : Condition.t;
}
let create () =
{ mod_rights = Mutex.create ();
q = Queue.create ();
addition = Condition.create (); }
let mutexing = Wrthreads.with_mutex
let add q x =
mutexing q.mod_rights (fun () -> Queue.add x q.q);
Condition.signal q.addition
let is_empty q =
mutexing q.mod_rights (fun () -> Queue.is_empty q.q)
let iter f q =
mutexing q.mod_rights (fun () -> Queue.iter f q.q)
let fold f a q =
mutexing q.mod_rights (fun () -> Queue.fold f a q.q)
exception Empty
let take q =
mutexing q.mod_rights (fun () ->
try
Queue.take q.q
with Queue.Empty -> raise Empty)
let take_next q =
mutexing q.mod_rights (fun () ->
while Queue.is_empty q.q do
(* unlocks mutex and blocks *)
Condition.wait q.addition q.mod_rights
(* mutex now locked again *)
done;
Queue.take q.q)
EOF
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/wrthreads/sharedq.ml | ocaml | the right to access or modify q
signaled when something is added to q
unlocks mutex and blocks
mutex now locked again | $ I d : sharedq.ml , v 1.1 2003/06/20 16:30:46 ruml Exp $
shared queues
shared queues
*)
type 'a t = {
q : 'a Queue.t;
mod_rights : Mutex.t;
addition : Condition.t;
}
let create () =
{ mod_rights = Mutex.create ();
q = Queue.create ();
addition = Condition.create (); }
let mutexing = Wrthreads.with_mutex
let add q x =
mutexing q.mod_rights (fun () -> Queue.add x q.q);
Condition.signal q.addition
let is_empty q =
mutexing q.mod_rights (fun () -> Queue.is_empty q.q)
let iter f q =
mutexing q.mod_rights (fun () -> Queue.iter f q.q)
let fold f a q =
mutexing q.mod_rights (fun () -> Queue.fold f a q.q)
exception Empty
let take q =
mutexing q.mod_rights (fun () ->
try
Queue.take q.q
with Queue.Empty -> raise Empty)
let take_next q =
mutexing q.mod_rights (fun () ->
while Queue.is_empty q.q do
Condition.wait q.addition q.mod_rights
done;
Queue.take q.q)
EOF
|
63407d7cd2d9e1a9b1c8baf13612b79d18a2ff585423d27e3133a755df30ddd1 | NorfairKing/smos | Report.hs | {-# LANGUAGE OverloadedStrings #-}
module Smos.Actions.Report where
import Smos.Actions.Browser
import Smos.Actions.File
import Smos.Actions.Report.Next
import Smos.Actions.Report.Stuck
import Smos.Actions.Report.Timestamps
import Smos.Actions.Report.Waiting
import Smos.Actions.Report.Work
import Smos.Types
allPlainReportActions :: [Action]
allPlainReportActions =
concat
[ allPlainReportExitActions,
allPlainReportNextActions,
allPlainReportWaitingActions,
allPlainReportTimestampsActions,
allPlainReportStuckActions,
allPlainReportWorkActions
]
allPlainReportExitActions :: [Action]
allPlainReportExitActions =
[ exitReport
]
allReportUsingActions :: [ActionUsing Char]
allReportUsingActions =
concat
[ allReportNextActionsUsingActions,
allReportWaitingUsingActions,
allReportTimestampsUsingActions,
allReportStuckUsingActions,
allReportWorkUsingActions
]
-- Exit a Report
-- If there is a file open, go to it (this already works via another Action?)
-- If there is no file open, go to the browser in the workflow dir
exitReport :: Action
exitReport =
Action
{ actionName = "exitReport",
actionDescription = "Exit any smos report, back to open file or browser",
actionFunc = do
ec <- gets smosStateCursor
case editorCursorLastOpenedFile ec of
Just fp -> switchToFile fp
Nothing -> actionFunc selectBrowserWorkflow
}
| null | https://raw.githubusercontent.com/NorfairKing/smos/f72b26c2e66ab4f3ec879a1bedc6c0e8eeb18a01/smos/src/Smos/Actions/Report.hs | haskell | # LANGUAGE OverloadedStrings #
Exit a Report
If there is a file open, go to it (this already works via another Action?)
If there is no file open, go to the browser in the workflow dir |
module Smos.Actions.Report where
import Smos.Actions.Browser
import Smos.Actions.File
import Smos.Actions.Report.Next
import Smos.Actions.Report.Stuck
import Smos.Actions.Report.Timestamps
import Smos.Actions.Report.Waiting
import Smos.Actions.Report.Work
import Smos.Types
allPlainReportActions :: [Action]
allPlainReportActions =
concat
[ allPlainReportExitActions,
allPlainReportNextActions,
allPlainReportWaitingActions,
allPlainReportTimestampsActions,
allPlainReportStuckActions,
allPlainReportWorkActions
]
allPlainReportExitActions :: [Action]
allPlainReportExitActions =
[ exitReport
]
allReportUsingActions :: [ActionUsing Char]
allReportUsingActions =
concat
[ allReportNextActionsUsingActions,
allReportWaitingUsingActions,
allReportTimestampsUsingActions,
allReportStuckUsingActions,
allReportWorkUsingActions
]
exitReport :: Action
exitReport =
Action
{ actionName = "exitReport",
actionDescription = "Exit any smos report, back to open file or browser",
actionFunc = do
ec <- gets smosStateCursor
case editorCursorLastOpenedFile ec of
Just fp -> switchToFile fp
Nothing -> actionFunc selectBrowserWorkflow
}
|
206f8c5d37d23b3825cc05b125cd122c1d8ecbd883b90d73f7110e39f4757c49 | rescript-association/genType | oprint.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
Projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open Format
open Outcometree
exception Ellipsis
let cautious f ppf arg =
try f ppf arg with
Ellipsis -> fprintf ppf "..."
let out_ident = ref pp_print_string
let map_primitive_name = ref (fun x -> x)
let print_lident ppf = function
| "::" -> !out_ident ppf "(::)"
| s -> !out_ident ppf s
let rec print_ident ppf =
function
Oide_ident s -> print_lident ppf s
| Oide_dot (id, s) ->
print_ident ppf id; pp_print_char ppf '.'; print_lident ppf s
| Oide_apply (id1, id2) ->
fprintf ppf "%a(%a)" print_ident id1 print_ident id2
let parenthesized_ident name =
(List.mem name ["or"; "mod"; "land"; "lor"; "lxor"; "lsl"; "lsr"; "asr"])
||
(match name.[0] with
'a'..'z' | 'A'..'Z' | '\223'..'\246' | '\248'..'\255' | '_' ->
false
| _ -> true)
let value_ident ppf name =
if parenthesized_ident name then
fprintf ppf "( %s )" name
else
pp_print_string ppf name
(* Values *)
let valid_float_lexeme s =
let l = String.length s in
let rec loop i =
if i >= l then s ^ "." else
match s.[i] with
| '0' .. '9' | '-' -> loop (i+1)
| _ -> s
in loop 0
let float_repres f =
match classify_float f with
FP_nan -> "nan"
| FP_infinite ->
if f < 0.0 then "neg_infinity" else "infinity"
| _ ->
let float_val =
let s1 = Printf.sprintf "%.12g" f in
if f = float_of_string s1 then s1 else
let s2 = Printf.sprintf "%.15g" f in
if f = float_of_string s2 then s2 else
Printf.sprintf "%.18g" f
in valid_float_lexeme float_val
let parenthesize_if_neg ppf fmt v isneg =
if isneg then pp_print_char ppf '(';
fprintf ppf fmt v;
if isneg then pp_print_char ppf ')'
let escape_string s =
(* Escape only C0 control characters (bytes <= 0x1F), DEL(0x7F), '\\' and '"' *)
let n = ref 0 in
for i = 0 to String.length s - 1 do
n := !n +
(match String.unsafe_get s i with
| '\"' | '\\' | '\n' | '\t' | '\r' | '\b' -> 2
| '\x00' .. '\x1F'
| '\x7F' -> 4
| _ -> 1)
done;
if !n = String.length s then s else begin
let s' = Bytes.create !n in
n := 0;
for i = 0 to String.length s - 1 do
begin match String.unsafe_get s i with
| ('\"' | '\\') as c ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n c
| '\n' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'n'
| '\t' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 't'
| '\r' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'r'
| '\b' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'b'
| '\x00' .. '\x1F' | '\x7F' as c ->
let a = Char.code c in
Bytes.unsafe_set s' !n '\\';
incr n;
Bytes.unsafe_set s' !n (Char.chr (48 + a / 100));
incr n;
Bytes.unsafe_set s' !n (Char.chr (48 + (a / 10) mod 10));
incr n;
Bytes.unsafe_set s' !n (Char.chr (48 + a mod 10));
| c -> Bytes.unsafe_set s' !n c
end;
incr n
done;
Bytes.to_string s'
end
let print_out_string ppf s =
let not_escaped =
(* let the user dynamically choose if strings should be escaped: *)
match Sys.getenv_opt "OCAMLTOP_UTF_8" with
| None -> true
| Some x ->
match bool_of_string_opt x with
| None -> true
| Some f -> f in
if not_escaped then
fprintf ppf "\"%s\"" (escape_string s)
else
fprintf ppf "%S" s
let print_out_value ppf tree =
let rec print_tree_1 ppf =
function
| Oval_constr (name, [param]) ->
fprintf ppf "@[<1>%a@ %a@]" print_ident name print_constr_param param
| Oval_constr (name, (_ :: _ as params)) ->
fprintf ppf "@[<1>%a@ (%a)@]" print_ident name
(print_tree_list print_tree_1 ",") params
| Oval_variant (name, Some param) ->
fprintf ppf "@[<2>`%s@ %a@]" name print_constr_param param
| tree -> print_simple_tree ppf tree
and print_constr_param ppf = function
| Oval_int i -> parenthesize_if_neg ppf "%i" i (i < 0)
| Oval_int32 i -> parenthesize_if_neg ppf "%lil" i (i < 0l)
| Oval_int64 i -> parenthesize_if_neg ppf "%LiL" i (i < 0L)
| Oval_nativeint i -> parenthesize_if_neg ppf "%nin" i (i < 0n)
| Oval_float f -> parenthesize_if_neg ppf "%s" (float_repres f) (f < 0.0)
| Oval_string (_,_, Ostr_bytes) as tree ->
pp_print_char ppf '(';
print_simple_tree ppf tree;
pp_print_char ppf ')';
| tree -> print_simple_tree ppf tree
and print_simple_tree ppf =
function
Oval_int i -> fprintf ppf "%i" i
| Oval_int32 i -> fprintf ppf "%lil" i
| Oval_int64 i -> fprintf ppf "%LiL" i
| Oval_nativeint i -> fprintf ppf "%nin" i
| Oval_float f -> pp_print_string ppf (float_repres f)
| Oval_char c -> fprintf ppf "%C" c
| Oval_string (s, maxlen, kind) ->
begin try
let len = String.length s in
let s = if len > maxlen then String.sub s 0 maxlen else s in
begin match kind with
| Ostr_bytes -> fprintf ppf "Bytes.of_string %S" s
| Ostr_string -> print_out_string ppf s
end;
(if len > maxlen then
fprintf ppf
"... (* string length %d; truncated *)" len
)
with
Invalid_argument _ (* "String.create" *)-> fprintf ppf "<huge string>"
end
| Oval_list tl ->
fprintf ppf "@[<1>[%a]@]" (print_tree_list print_tree_1 ";") tl
| Oval_array tl ->
fprintf ppf "@[<2>[|%a|]@]" (print_tree_list print_tree_1 ";") tl
| Oval_constr (name, []) -> print_ident ppf name
| Oval_variant (name, None) -> fprintf ppf "`%s" name
| Oval_stuff s -> pp_print_string ppf s
| Oval_record fel ->
fprintf ppf "@[<1>{%a}@]" (cautious (print_fields true)) fel
| Oval_ellipsis -> raise Ellipsis
| Oval_printer f -> f ppf
| Oval_tuple tree_list ->
fprintf ppf "@[<1>(%a)@]" (print_tree_list print_tree_1 ",") tree_list
| tree -> fprintf ppf "@[<1>(%a)@]" (cautious print_tree_1) tree
and print_fields first ppf =
function
[] -> ()
| (name, tree) :: fields ->
if not first then fprintf ppf ";@ ";
fprintf ppf "@[<1>%a@ =@ %a@]" print_ident name (cautious print_tree_1)
tree;
print_fields false ppf fields
and print_tree_list print_item sep ppf tree_list =
let rec print_list first ppf =
function
[] -> ()
| tree :: tree_list ->
if not first then fprintf ppf "%s@ " sep;
print_item ppf tree;
print_list false ppf tree_list
in
cautious (print_list true) ppf tree_list
in
cautious print_tree_1 ppf tree
let out_value = ref print_out_value
(* Types *)
let rec print_list_init pr sep ppf =
function
[] -> ()
| a :: l -> sep ppf; pr ppf a; print_list_init pr sep ppf l
let rec print_list pr sep ppf =
function
[] -> ()
| [a] -> pr ppf a
| a :: l -> pr ppf a; sep ppf; print_list pr sep ppf l
let pr_present =
print_list (fun ppf s -> fprintf ppf "`%s" s) (fun ppf -> fprintf ppf "@ ")
let pr_vars =
print_list (fun ppf s -> fprintf ppf "'%s" s) (fun ppf -> fprintf ppf "@ ")
let rec print_out_type ppf =
function
| Otyp_alias (ty, s) ->
fprintf ppf "@[%a@ as '%s@]" print_out_type ty s
| Otyp_poly (sl, ty) ->
fprintf ppf "@[<hov 2>%a.@ %a@]"
pr_vars sl
print_out_type ty
| ty ->
print_out_type_1 ppf ty
and print_out_type_1 ppf =
function
Otyp_arrow (lab, ty1, ty2) ->
pp_open_box ppf 0;
if lab <> "" then (pp_print_string ppf lab; pp_print_char ppf ':');
print_out_type_2 ppf ty1;
pp_print_string ppf " ->";
pp_print_space ppf ();
print_out_type_1 ppf ty2;
pp_close_box ppf ()
| ty -> print_out_type_2 ppf ty
and print_out_type_2 ppf =
function
Otyp_tuple tyl ->
fprintf ppf "@[<0>%a@]" (print_typlist print_simple_out_type " *") tyl
| ty -> print_simple_out_type ppf ty
and print_simple_out_type ppf =
function
Otyp_class (ng, id, tyl) ->
fprintf ppf "@[%a%s#%a@]" print_typargs tyl (if ng then "_" else "")
print_ident id
| Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js", "Fn"), name ),
[tyl])
->
let res =
if name = "arity0" then
Otyp_arrow ("", Otyp_constr (Oide_ident "unit", []),tyl)
else tyl
in
fprintf ppf "@[<0>(%a@ [@bs])@]" print_out_type_1 res
| Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js_OO", "Meth" ),name),
[tyl])
->
let res =
if name = "arity0" then
Otyp_arrow ("", Otyp_constr (Oide_ident "unit", []),tyl)
else tyl
in
fprintf ppf "@[<0>(%a@ [@bs.meth])@]" print_out_type_1 res
| Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js_OO", "Callback" ), _),
[tyl])
->
fprintf ppf "@[<0>(%a@ [@bs.this])@]" print_out_type_1 tyl
| Otyp_constr (id, tyl) ->
pp_open_box ppf 0;
print_typargs ppf tyl;
print_ident ppf id;
pp_close_box ppf ()
| Otyp_object (fields, rest) ->
fprintf ppf "@[<2>< %a >@]" (print_fields rest) fields
| Otyp_stuff s -> pp_print_string ppf s
| Otyp_var (ng, s) -> fprintf ppf "'%s%s" (if ng then "_" else "") s
| Otyp_variant (non_gen, row_fields, closed, tags) ->
let print_present ppf =
function
None | Some [] -> ()
| Some l -> fprintf ppf "@;<1 -2>> @[<hov>%a@]" pr_present l
in
let print_fields ppf =
function
Ovar_fields fields ->
print_list print_row_field (fun ppf -> fprintf ppf "@;<1 -2>| ")
ppf fields
| Ovar_typ typ ->
print_simple_out_type ppf typ
in
fprintf ppf "%s[%s@[<hv>@[<hv>%a@]%a ]@]" (if non_gen then "_" else "")
(if closed then if tags = None then " " else "< "
else if tags = None then "> " else "? ")
print_fields row_fields
print_present tags
| Otyp_alias _ | Otyp_poly _ | Otyp_arrow _ | Otyp_tuple _ as ty ->
pp_open_box ppf 1;
pp_print_char ppf '(';
print_out_type ppf ty;
pp_print_char ppf ')';
pp_close_box ppf ()
| Otyp_abstract | Otyp_open
| Otyp_sum _ | Otyp_manifest (_, _) -> ()
| Otyp_record lbls -> print_record_decl ppf lbls
| Otyp_module (p, n, tyl) ->
fprintf ppf "@[<1>(module %s" p;
let first = ref true in
List.iter2
(fun s t ->
let sep = if !first then (first := false; "with") else "and" in
fprintf ppf " %s type %s = %a" sep s print_out_type t
)
n tyl;
fprintf ppf ")@]"
| Otyp_attribute (t, attr) ->
fprintf ppf "@[<1>(%a [@@%s])@]" print_out_type t attr.oattr_name
and print_record_decl ppf lbls =
fprintf ppf "{%a@;<1 -2>}"
(print_list_init print_out_label (fun ppf -> fprintf ppf "@ ")) lbls
and print_fields rest ppf =
function
[] ->
begin match rest with
Some non_gen -> fprintf ppf "%s.." (if non_gen then "_" else "")
| None -> ()
end
| [s, t] ->
fprintf ppf "%s : %a" s print_out_type t;
begin match rest with
Some _ -> fprintf ppf ";@ "
| None -> ()
end;
print_fields rest ppf []
| (s, t) :: l ->
fprintf ppf "%s : %a;@ %a" s print_out_type t (print_fields rest) l
and print_row_field ppf (l, opt_amp, tyl) =
let pr_of ppf =
if opt_amp then fprintf ppf " of@ &@ "
else if tyl <> [] then fprintf ppf " of@ "
else fprintf ppf ""
in
fprintf ppf "@[<hv 2>`%s%t%a@]" l pr_of (print_typlist print_out_type " &")
tyl
and print_typlist print_elem sep ppf =
function
[] -> ()
| [ty] -> print_elem ppf ty
| ty :: tyl ->
print_elem ppf ty;
pp_print_string ppf sep;
pp_print_space ppf ();
print_typlist print_elem sep ppf tyl
and print_typargs ppf =
function
[] -> ()
| [ty1] -> print_simple_out_type ppf ty1; pp_print_space ppf ()
| tyl ->
pp_open_box ppf 1;
pp_print_char ppf '(';
print_typlist print_out_type "," ppf tyl;
pp_print_char ppf ')';
pp_close_box ppf ();
pp_print_space ppf ()
and print_out_label ppf (name, mut, arg) =
fprintf ppf "@[<2>%s%s :@ %a@];" (if mut then "mutable " else "") name
print_out_type arg
let out_type = ref print_out_type
(* Class types *)
let type_parameter ppf (ty, (co, cn)) =
fprintf ppf "%s%s"
(if not cn then "+" else if not co then "-" else "")
(if ty = "_" then ty else "'"^ty)
let print_out_class_params ppf =
function
[] -> ()
| tyl ->
fprintf ppf "@[<1>[%a]@]@ "
(print_list type_parameter (fun ppf -> fprintf ppf ", "))
tyl
let rec print_out_class_type ppf =
function
Octy_constr (id, tyl) ->
let pr_tyl ppf =
function
[] -> ()
| tyl ->
fprintf ppf "@[<1>[%a]@]@ " (print_typlist !out_type ",") tyl
in
fprintf ppf "@[%a%a@]" pr_tyl tyl print_ident id
| Octy_arrow (lab, ty, cty) ->
fprintf ppf "@[%s%a ->@ %a@]" (if lab <> "" then lab ^ ":" else "")
print_out_type_2 ty print_out_class_type cty
| Octy_signature (self_ty, csil) ->
let pr_param ppf =
function
Some ty -> fprintf ppf "@ @[(%a)@]" !out_type ty
| None -> ()
in
fprintf ppf "@[<hv 2>@[<2>object%a@]@ %a@;<1 -2>end@]" pr_param self_ty
(print_list print_out_class_sig_item (fun ppf -> fprintf ppf "@ "))
csil
and print_out_class_sig_item ppf =
function
Ocsg_constraint (ty1, ty2) ->
fprintf ppf "@[<2>constraint %a =@ %a@]" !out_type ty1
!out_type ty2
| Ocsg_method (name, priv, virt, ty) ->
fprintf ppf "@[<2>method %s%s%s :@ %a@]"
(if priv then "private " else "") (if virt then "virtual " else "")
name !out_type ty
| Ocsg_value (name, mut, vr, ty) ->
fprintf ppf "@[<2>val %s%s%s :@ %a@]"
(if mut then "mutable " else "")
(if vr then "virtual " else "")
name !out_type ty
let out_class_type = ref print_out_class_type
Signature
let out_module_type = ref (fun _ -> failwith "Oprint.out_module_type")
let out_sig_item = ref (fun _ -> failwith "Oprint.out_sig_item")
let out_signature = ref (fun _ -> failwith "Oprint.out_signature")
let out_type_extension = ref (fun _ -> failwith "Oprint.out_type_extension")
let rec print_out_functor funct ppf =
function
Omty_functor (_, None, mty_res) ->
if funct then fprintf ppf "() %a" (print_out_functor true) mty_res
else fprintf ppf "functor@ () %a" (print_out_functor true) mty_res
| Omty_functor (name, Some mty_arg, mty_res) -> begin
match name, funct with
| "_", true ->
fprintf ppf "->@ %a ->@ %a"
print_out_module_type mty_arg (print_out_functor false) mty_res
| "_", false ->
fprintf ppf "%a ->@ %a"
print_out_module_type mty_arg (print_out_functor false) mty_res
| name, true ->
fprintf ppf "(%s : %a) %a" name
print_out_module_type mty_arg (print_out_functor true) mty_res
| name, false ->
fprintf ppf "functor@ (%s : %a) %a" name
print_out_module_type mty_arg (print_out_functor true) mty_res
end
| m ->
if funct then fprintf ppf "->@ %a" print_out_module_type m
else print_out_module_type ppf m
and print_out_module_type ppf =
function
Omty_abstract -> ()
| Omty_functor _ as t ->
fprintf ppf "@[<2>%a@]" (print_out_functor false) t
| Omty_ident id -> fprintf ppf "%a" print_ident id
| Omty_signature sg ->
fprintf ppf "@[<hv 2>sig@ %a@;<1 -2>end@]" !out_signature sg
| Omty_alias id -> fprintf ppf "(module %a)" print_ident id
and print_out_signature ppf =
function
[] -> ()
| [item] -> !out_sig_item ppf item
| Osig_typext(ext, Oext_first) :: items ->
(* Gather together the extension constructors *)
let rec gather_extensions acc items =
match items with
Osig_typext(ext, Oext_next) :: items ->
gather_extensions
((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)
items
| _ -> (List.rev acc, items)
in
let exts, items =
gather_extensions
[(ext.oext_name, ext.oext_args, ext.oext_ret_type)]
items
in
let te =
{ otyext_name = ext.oext_type_name;
otyext_params = ext.oext_type_params;
otyext_constructors = exts;
otyext_private = ext.oext_private }
in
fprintf ppf "%a@ %a" !out_type_extension te print_out_signature items
| item :: items ->
fprintf ppf "%a@ %a" !out_sig_item item print_out_signature items
and print_out_sig_item ppf =
function
Osig_class (vir_flag, name, params, clt, rs) ->
fprintf ppf "@[<2>%s%s@ %a%s@ :@ %a@]"
(if rs = Orec_next then "and" else "class")
(if vir_flag then " virtual" else "") print_out_class_params params
name !out_class_type clt
| Osig_class_type (vir_flag, name, params, clt, rs) ->
fprintf ppf "@[<2>%s%s@ %a%s@ =@ %a@]"
(if rs = Orec_next then "and" else "class type")
(if vir_flag then " virtual" else "") print_out_class_params params
name !out_class_type clt
| Osig_typext (ext, Oext_exception) ->
fprintf ppf "@[<2>exception %a@]"
print_out_constr (ext.oext_name, ext.oext_args, ext.oext_ret_type)
| Osig_typext (ext, _es) ->
print_out_extension_constructor ppf ext
| Osig_modtype (name, Omty_abstract) ->
fprintf ppf "@[<2>module type %s@]" name
| Osig_modtype (name, mty) ->
fprintf ppf "@[<2>module type %s =@ %a@]" name !out_module_type mty
| Osig_module (name, Omty_alias id, _) ->
fprintf ppf "@[<2>module %s =@ %a@]" name print_ident id
| Osig_module (name, mty, rs) ->
fprintf ppf "@[<2>%s %s :@ %a@]"
(match rs with Orec_not -> "module"
| Orec_first -> "module rec"
| Orec_next -> "and")
name !out_module_type mty
| Osig_type(td, rs) ->
print_out_type_decl
(match rs with
| Orec_not -> "type nonrec"
| Orec_first -> "type"
| Orec_next -> "and")
ppf td
| Osig_value vd ->
let kwd = if vd.oval_prims = [] then "val" else "external" in
let pr_prims ppf =
function
[] -> ()
| s :: sl ->
fprintf ppf "@ = \"%s\"" s;
List.iter (fun s ->
(* TODO: in general, we should print bs attributes, some attributes like
bs.splice does need it *)
fprintf ppf "@ \"%s\"" (!map_primitive_name s)
) sl
in
fprintf ppf "@[<2>%s %a :@ %a%a%a@]" kwd value_ident vd.oval_name
!out_type vd.oval_type pr_prims vd.oval_prims
(fun ppf -> List.iter (fun a -> fprintf ppf "@ [@@@@%s]" a.oattr_name))
vd.oval_attributes
| Osig_ellipsis ->
fprintf ppf "..."
and print_out_type_decl kwd ppf td =
let print_constraints ppf =
List.iter
(fun (ty1, ty2) ->
fprintf ppf "@ @[<2>constraint %a =@ %a@]" !out_type ty1
!out_type ty2)
td.otype_cstrs
in
let type_defined ppf =
match td.otype_params with
[] -> pp_print_string ppf td.otype_name
| [param] -> fprintf ppf "@[%a@ %s@]" type_parameter param td.otype_name
| _ ->
fprintf ppf "@[(@[%a)@]@ %s@]"
(print_list type_parameter (fun ppf -> fprintf ppf ",@ "))
td.otype_params
td.otype_name
in
let print_manifest ppf =
function
Otyp_manifest (ty, _) -> fprintf ppf " =@ %a" !out_type ty
| _ -> ()
in
let print_name_params ppf =
fprintf ppf "%s %t%a" kwd type_defined print_manifest td.otype_type
in
let ty =
match td.otype_type with
Otyp_manifest (_, ty) -> ty
| _ -> td.otype_type
in
let print_private ppf = function
Asttypes.Private -> fprintf ppf " private"
| Asttypes.Public -> ()
in
let print_immediate ppf =
if td.otype_immediate then fprintf ppf " [%@%@immediate]" else ()
in
let print_unboxed ppf =
if td.otype_unboxed then fprintf ppf " [%@%@unboxed]" else ()
in
let print_out_tkind ppf = function
| Otyp_abstract -> ()
| Otyp_record lbls ->
fprintf ppf " =%a %a"
print_private td.otype_private
print_record_decl lbls
| Otyp_sum constrs ->
fprintf ppf " =%a@;<1 2>%a"
print_private td.otype_private
(print_list print_out_constr (fun ppf -> fprintf ppf "@ | ")) constrs
| Otyp_open ->
fprintf ppf " =%a .."
print_private td.otype_private
| ty ->
fprintf ppf " =%a@;<1 2>%a"
print_private td.otype_private
!out_type ty
in
fprintf ppf "@[<2>@[<hv 2>%t%a@]%t%t%t@]"
print_name_params
print_out_tkind ty
print_constraints
print_immediate
print_unboxed
and print_out_constr ppf (name, tyl,ret_type_opt) =
let name =
match name with
# 7200
| s -> s
in
match ret_type_opt with
| None ->
begin match tyl with
| [] ->
pp_print_string ppf name
| _ ->
fprintf ppf "@[<2>%s of@ %a@]" name
(print_typlist print_simple_out_type " *") tyl
end
| Some ret_type ->
begin match tyl with
| [] ->
fprintf ppf "@[<2>%s :@ %a@]" name print_simple_out_type ret_type
| _ ->
fprintf ppf "@[<2>%s :@ %a -> %a@]" name
(print_typlist print_simple_out_type " *")
tyl print_simple_out_type ret_type
end
and print_out_extension_constructor ppf ext =
let print_extended_type ppf =
let print_type_parameter ppf ty =
fprintf ppf "%s"
(if ty = "_" then ty else "'"^ty)
in
match ext.oext_type_params with
[] -> fprintf ppf "%s" ext.oext_type_name
| [ty_param] ->
fprintf ppf "@[%a@ %s@]"
print_type_parameter
ty_param
ext.oext_type_name
| _ ->
fprintf ppf "@[(@[%a)@]@ %s@]"
(print_list print_type_parameter (fun ppf -> fprintf ppf ",@ "))
ext.oext_type_params
ext.oext_type_name
in
fprintf ppf "@[<hv 2>type %t +=%s@;<1 2>%a@]"
print_extended_type
(if ext.oext_private = Asttypes.Private then " private" else "")
print_out_constr (ext.oext_name, ext.oext_args, ext.oext_ret_type)
and print_out_type_extension ppf te =
let print_extended_type ppf =
let print_type_parameter ppf ty =
fprintf ppf "%s"
(if ty = "_" then ty else "'"^ty)
in
match te.otyext_params with
[] -> fprintf ppf "%s" te.otyext_name
| [param] ->
fprintf ppf "@[%a@ %s@]"
print_type_parameter param
te.otyext_name
| _ ->
fprintf ppf "@[(@[%a)@]@ %s@]"
(print_list print_type_parameter (fun ppf -> fprintf ppf ",@ "))
te.otyext_params
te.otyext_name
in
fprintf ppf "@[<hv 2>type %t +=%s@;<1 2>%a@]"
print_extended_type
(if te.otyext_private = Asttypes.Private then " private" else "")
(print_list print_out_constr (fun ppf -> fprintf ppf "@ | "))
te.otyext_constructors
let _ = out_module_type := print_out_module_type
let _ = out_signature := print_out_signature
let _ = out_sig_item := print_out_sig_item
let _ = out_type_extension := print_out_type_extension
(* Phrases *)
let print_out_exception ppf exn outv =
match exn with
Sys.Break -> fprintf ppf "Interrupted.@."
| Out_of_memory -> fprintf ppf "Out of memory during evaluation.@."
| Stack_overflow ->
fprintf ppf "Stack overflow during evaluation (looping recursion?).@."
| _ -> fprintf ppf "@[Exception:@ %a.@]@." !out_value outv
let rec print_items ppf =
function
[] -> ()
| (Osig_typext(ext, Oext_first), None) :: items ->
(* Gather together extension constructors *)
let rec gather_extensions acc items =
match items with
(Osig_typext(ext, Oext_next), None) :: items ->
gather_extensions
((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)
items
| _ -> (List.rev acc, items)
in
let exts, items =
gather_extensions
[(ext.oext_name, ext.oext_args, ext.oext_ret_type)]
items
in
let te =
{ otyext_name = ext.oext_type_name;
otyext_params = ext.oext_type_params;
otyext_constructors = exts;
otyext_private = ext.oext_private }
in
fprintf ppf "@[%a@]" !out_type_extension te;
if items <> [] then fprintf ppf "@ %a" print_items items
| (tree, valopt) :: items ->
begin match valopt with
Some v ->
fprintf ppf "@[<2>%a =@ %a@]" !out_sig_item tree
!out_value v
| None -> fprintf ppf "@[%a@]" !out_sig_item tree
end;
if items <> [] then fprintf ppf "@ %a" print_items items
let print_out_phrase ppf =
function
Ophr_eval (outv, ty) ->
fprintf ppf "@[- : %a@ =@ %a@]@." !out_type ty !out_value outv
| Ophr_signature [] -> ()
| Ophr_signature items -> fprintf ppf "@[<v>%a@]@." print_items items
| Ophr_exception (exn, outv) -> print_out_exception ppf exn outv
let out_phrase = ref print_out_phrase
| null | https://raw.githubusercontent.com/rescript-association/genType/c44251e969fb10d27a38d2bdeff6a5f4d778594f/src/compiler-libs-406/oprint.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Values
Escape only C0 control characters (bytes <= 0x1F), DEL(0x7F), '\\' and '"'
let the user dynamically choose if strings should be escaped:
"String.create"
Types
Class types
Gather together the extension constructors
TODO: in general, we should print bs attributes, some attributes like
bs.splice does need it
Phrases
Gather together extension constructors | Projet Cristal , INRIA Rocquencourt
Copyright 2002 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Format
open Outcometree
exception Ellipsis
let cautious f ppf arg =
try f ppf arg with
Ellipsis -> fprintf ppf "..."
let out_ident = ref pp_print_string
let map_primitive_name = ref (fun x -> x)
let print_lident ppf = function
| "::" -> !out_ident ppf "(::)"
| s -> !out_ident ppf s
let rec print_ident ppf =
function
Oide_ident s -> print_lident ppf s
| Oide_dot (id, s) ->
print_ident ppf id; pp_print_char ppf '.'; print_lident ppf s
| Oide_apply (id1, id2) ->
fprintf ppf "%a(%a)" print_ident id1 print_ident id2
let parenthesized_ident name =
(List.mem name ["or"; "mod"; "land"; "lor"; "lxor"; "lsl"; "lsr"; "asr"])
||
(match name.[0] with
'a'..'z' | 'A'..'Z' | '\223'..'\246' | '\248'..'\255' | '_' ->
false
| _ -> true)
let value_ident ppf name =
if parenthesized_ident name then
fprintf ppf "( %s )" name
else
pp_print_string ppf name
let valid_float_lexeme s =
let l = String.length s in
let rec loop i =
if i >= l then s ^ "." else
match s.[i] with
| '0' .. '9' | '-' -> loop (i+1)
| _ -> s
in loop 0
let float_repres f =
match classify_float f with
FP_nan -> "nan"
| FP_infinite ->
if f < 0.0 then "neg_infinity" else "infinity"
| _ ->
let float_val =
let s1 = Printf.sprintf "%.12g" f in
if f = float_of_string s1 then s1 else
let s2 = Printf.sprintf "%.15g" f in
if f = float_of_string s2 then s2 else
Printf.sprintf "%.18g" f
in valid_float_lexeme float_val
let parenthesize_if_neg ppf fmt v isneg =
if isneg then pp_print_char ppf '(';
fprintf ppf fmt v;
if isneg then pp_print_char ppf ')'
let escape_string s =
let n = ref 0 in
for i = 0 to String.length s - 1 do
n := !n +
(match String.unsafe_get s i with
| '\"' | '\\' | '\n' | '\t' | '\r' | '\b' -> 2
| '\x00' .. '\x1F'
| '\x7F' -> 4
| _ -> 1)
done;
if !n = String.length s then s else begin
let s' = Bytes.create !n in
n := 0;
for i = 0 to String.length s - 1 do
begin match String.unsafe_get s i with
| ('\"' | '\\') as c ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n c
| '\n' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'n'
| '\t' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 't'
| '\r' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'r'
| '\b' ->
Bytes.unsafe_set s' !n '\\'; incr n; Bytes.unsafe_set s' !n 'b'
| '\x00' .. '\x1F' | '\x7F' as c ->
let a = Char.code c in
Bytes.unsafe_set s' !n '\\';
incr n;
Bytes.unsafe_set s' !n (Char.chr (48 + a / 100));
incr n;
Bytes.unsafe_set s' !n (Char.chr (48 + (a / 10) mod 10));
incr n;
Bytes.unsafe_set s' !n (Char.chr (48 + a mod 10));
| c -> Bytes.unsafe_set s' !n c
end;
incr n
done;
Bytes.to_string s'
end
let print_out_string ppf s =
let not_escaped =
match Sys.getenv_opt "OCAMLTOP_UTF_8" with
| None -> true
| Some x ->
match bool_of_string_opt x with
| None -> true
| Some f -> f in
if not_escaped then
fprintf ppf "\"%s\"" (escape_string s)
else
fprintf ppf "%S" s
let print_out_value ppf tree =
let rec print_tree_1 ppf =
function
| Oval_constr (name, [param]) ->
fprintf ppf "@[<1>%a@ %a@]" print_ident name print_constr_param param
| Oval_constr (name, (_ :: _ as params)) ->
fprintf ppf "@[<1>%a@ (%a)@]" print_ident name
(print_tree_list print_tree_1 ",") params
| Oval_variant (name, Some param) ->
fprintf ppf "@[<2>`%s@ %a@]" name print_constr_param param
| tree -> print_simple_tree ppf tree
and print_constr_param ppf = function
| Oval_int i -> parenthesize_if_neg ppf "%i" i (i < 0)
| Oval_int32 i -> parenthesize_if_neg ppf "%lil" i (i < 0l)
| Oval_int64 i -> parenthesize_if_neg ppf "%LiL" i (i < 0L)
| Oval_nativeint i -> parenthesize_if_neg ppf "%nin" i (i < 0n)
| Oval_float f -> parenthesize_if_neg ppf "%s" (float_repres f) (f < 0.0)
| Oval_string (_,_, Ostr_bytes) as tree ->
pp_print_char ppf '(';
print_simple_tree ppf tree;
pp_print_char ppf ')';
| tree -> print_simple_tree ppf tree
and print_simple_tree ppf =
function
Oval_int i -> fprintf ppf "%i" i
| Oval_int32 i -> fprintf ppf "%lil" i
| Oval_int64 i -> fprintf ppf "%LiL" i
| Oval_nativeint i -> fprintf ppf "%nin" i
| Oval_float f -> pp_print_string ppf (float_repres f)
| Oval_char c -> fprintf ppf "%C" c
| Oval_string (s, maxlen, kind) ->
begin try
let len = String.length s in
let s = if len > maxlen then String.sub s 0 maxlen else s in
begin match kind with
| Ostr_bytes -> fprintf ppf "Bytes.of_string %S" s
| Ostr_string -> print_out_string ppf s
end;
(if len > maxlen then
fprintf ppf
"... (* string length %d; truncated *)" len
)
with
end
| Oval_list tl ->
fprintf ppf "@[<1>[%a]@]" (print_tree_list print_tree_1 ";") tl
| Oval_array tl ->
fprintf ppf "@[<2>[|%a|]@]" (print_tree_list print_tree_1 ";") tl
| Oval_constr (name, []) -> print_ident ppf name
| Oval_variant (name, None) -> fprintf ppf "`%s" name
| Oval_stuff s -> pp_print_string ppf s
| Oval_record fel ->
fprintf ppf "@[<1>{%a}@]" (cautious (print_fields true)) fel
| Oval_ellipsis -> raise Ellipsis
| Oval_printer f -> f ppf
| Oval_tuple tree_list ->
fprintf ppf "@[<1>(%a)@]" (print_tree_list print_tree_1 ",") tree_list
| tree -> fprintf ppf "@[<1>(%a)@]" (cautious print_tree_1) tree
and print_fields first ppf =
function
[] -> ()
| (name, tree) :: fields ->
if not first then fprintf ppf ";@ ";
fprintf ppf "@[<1>%a@ =@ %a@]" print_ident name (cautious print_tree_1)
tree;
print_fields false ppf fields
and print_tree_list print_item sep ppf tree_list =
let rec print_list first ppf =
function
[] -> ()
| tree :: tree_list ->
if not first then fprintf ppf "%s@ " sep;
print_item ppf tree;
print_list false ppf tree_list
in
cautious (print_list true) ppf tree_list
in
cautious print_tree_1 ppf tree
let out_value = ref print_out_value
let rec print_list_init pr sep ppf =
function
[] -> ()
| a :: l -> sep ppf; pr ppf a; print_list_init pr sep ppf l
let rec print_list pr sep ppf =
function
[] -> ()
| [a] -> pr ppf a
| a :: l -> pr ppf a; sep ppf; print_list pr sep ppf l
let pr_present =
print_list (fun ppf s -> fprintf ppf "`%s" s) (fun ppf -> fprintf ppf "@ ")
let pr_vars =
print_list (fun ppf s -> fprintf ppf "'%s" s) (fun ppf -> fprintf ppf "@ ")
let rec print_out_type ppf =
function
| Otyp_alias (ty, s) ->
fprintf ppf "@[%a@ as '%s@]" print_out_type ty s
| Otyp_poly (sl, ty) ->
fprintf ppf "@[<hov 2>%a.@ %a@]"
pr_vars sl
print_out_type ty
| ty ->
print_out_type_1 ppf ty
and print_out_type_1 ppf =
function
Otyp_arrow (lab, ty1, ty2) ->
pp_open_box ppf 0;
if lab <> "" then (pp_print_string ppf lab; pp_print_char ppf ':');
print_out_type_2 ppf ty1;
pp_print_string ppf " ->";
pp_print_space ppf ();
print_out_type_1 ppf ty2;
pp_close_box ppf ()
| ty -> print_out_type_2 ppf ty
and print_out_type_2 ppf =
function
Otyp_tuple tyl ->
fprintf ppf "@[<0>%a@]" (print_typlist print_simple_out_type " *") tyl
| ty -> print_simple_out_type ppf ty
and print_simple_out_type ppf =
function
Otyp_class (ng, id, tyl) ->
fprintf ppf "@[%a%s#%a@]" print_typargs tyl (if ng then "_" else "")
print_ident id
| Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js", "Fn"), name ),
[tyl])
->
let res =
if name = "arity0" then
Otyp_arrow ("", Otyp_constr (Oide_ident "unit", []),tyl)
else tyl
in
fprintf ppf "@[<0>(%a@ [@bs])@]" print_out_type_1 res
| Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js_OO", "Meth" ),name),
[tyl])
->
let res =
if name = "arity0" then
Otyp_arrow ("", Otyp_constr (Oide_ident "unit", []),tyl)
else tyl
in
fprintf ppf "@[<0>(%a@ [@bs.meth])@]" print_out_type_1 res
| Otyp_constr (Oide_dot (Oide_dot (Oide_ident "Js_OO", "Callback" ), _),
[tyl])
->
fprintf ppf "@[<0>(%a@ [@bs.this])@]" print_out_type_1 tyl
| Otyp_constr (id, tyl) ->
pp_open_box ppf 0;
print_typargs ppf tyl;
print_ident ppf id;
pp_close_box ppf ()
| Otyp_object (fields, rest) ->
fprintf ppf "@[<2>< %a >@]" (print_fields rest) fields
| Otyp_stuff s -> pp_print_string ppf s
| Otyp_var (ng, s) -> fprintf ppf "'%s%s" (if ng then "_" else "") s
| Otyp_variant (non_gen, row_fields, closed, tags) ->
let print_present ppf =
function
None | Some [] -> ()
| Some l -> fprintf ppf "@;<1 -2>> @[<hov>%a@]" pr_present l
in
let print_fields ppf =
function
Ovar_fields fields ->
print_list print_row_field (fun ppf -> fprintf ppf "@;<1 -2>| ")
ppf fields
| Ovar_typ typ ->
print_simple_out_type ppf typ
in
fprintf ppf "%s[%s@[<hv>@[<hv>%a@]%a ]@]" (if non_gen then "_" else "")
(if closed then if tags = None then " " else "< "
else if tags = None then "> " else "? ")
print_fields row_fields
print_present tags
| Otyp_alias _ | Otyp_poly _ | Otyp_arrow _ | Otyp_tuple _ as ty ->
pp_open_box ppf 1;
pp_print_char ppf '(';
print_out_type ppf ty;
pp_print_char ppf ')';
pp_close_box ppf ()
| Otyp_abstract | Otyp_open
| Otyp_sum _ | Otyp_manifest (_, _) -> ()
| Otyp_record lbls -> print_record_decl ppf lbls
| Otyp_module (p, n, tyl) ->
fprintf ppf "@[<1>(module %s" p;
let first = ref true in
List.iter2
(fun s t ->
let sep = if !first then (first := false; "with") else "and" in
fprintf ppf " %s type %s = %a" sep s print_out_type t
)
n tyl;
fprintf ppf ")@]"
| Otyp_attribute (t, attr) ->
fprintf ppf "@[<1>(%a [@@%s])@]" print_out_type t attr.oattr_name
and print_record_decl ppf lbls =
fprintf ppf "{%a@;<1 -2>}"
(print_list_init print_out_label (fun ppf -> fprintf ppf "@ ")) lbls
and print_fields rest ppf =
function
[] ->
begin match rest with
Some non_gen -> fprintf ppf "%s.." (if non_gen then "_" else "")
| None -> ()
end
| [s, t] ->
fprintf ppf "%s : %a" s print_out_type t;
begin match rest with
Some _ -> fprintf ppf ";@ "
| None -> ()
end;
print_fields rest ppf []
| (s, t) :: l ->
fprintf ppf "%s : %a;@ %a" s print_out_type t (print_fields rest) l
and print_row_field ppf (l, opt_amp, tyl) =
let pr_of ppf =
if opt_amp then fprintf ppf " of@ &@ "
else if tyl <> [] then fprintf ppf " of@ "
else fprintf ppf ""
in
fprintf ppf "@[<hv 2>`%s%t%a@]" l pr_of (print_typlist print_out_type " &")
tyl
and print_typlist print_elem sep ppf =
function
[] -> ()
| [ty] -> print_elem ppf ty
| ty :: tyl ->
print_elem ppf ty;
pp_print_string ppf sep;
pp_print_space ppf ();
print_typlist print_elem sep ppf tyl
and print_typargs ppf =
function
[] -> ()
| [ty1] -> print_simple_out_type ppf ty1; pp_print_space ppf ()
| tyl ->
pp_open_box ppf 1;
pp_print_char ppf '(';
print_typlist print_out_type "," ppf tyl;
pp_print_char ppf ')';
pp_close_box ppf ();
pp_print_space ppf ()
and print_out_label ppf (name, mut, arg) =
fprintf ppf "@[<2>%s%s :@ %a@];" (if mut then "mutable " else "") name
print_out_type arg
let out_type = ref print_out_type
let type_parameter ppf (ty, (co, cn)) =
fprintf ppf "%s%s"
(if not cn then "+" else if not co then "-" else "")
(if ty = "_" then ty else "'"^ty)
let print_out_class_params ppf =
function
[] -> ()
| tyl ->
fprintf ppf "@[<1>[%a]@]@ "
(print_list type_parameter (fun ppf -> fprintf ppf ", "))
tyl
let rec print_out_class_type ppf =
function
Octy_constr (id, tyl) ->
let pr_tyl ppf =
function
[] -> ()
| tyl ->
fprintf ppf "@[<1>[%a]@]@ " (print_typlist !out_type ",") tyl
in
fprintf ppf "@[%a%a@]" pr_tyl tyl print_ident id
| Octy_arrow (lab, ty, cty) ->
fprintf ppf "@[%s%a ->@ %a@]" (if lab <> "" then lab ^ ":" else "")
print_out_type_2 ty print_out_class_type cty
| Octy_signature (self_ty, csil) ->
let pr_param ppf =
function
Some ty -> fprintf ppf "@ @[(%a)@]" !out_type ty
| None -> ()
in
fprintf ppf "@[<hv 2>@[<2>object%a@]@ %a@;<1 -2>end@]" pr_param self_ty
(print_list print_out_class_sig_item (fun ppf -> fprintf ppf "@ "))
csil
and print_out_class_sig_item ppf =
function
Ocsg_constraint (ty1, ty2) ->
fprintf ppf "@[<2>constraint %a =@ %a@]" !out_type ty1
!out_type ty2
| Ocsg_method (name, priv, virt, ty) ->
fprintf ppf "@[<2>method %s%s%s :@ %a@]"
(if priv then "private " else "") (if virt then "virtual " else "")
name !out_type ty
| Ocsg_value (name, mut, vr, ty) ->
fprintf ppf "@[<2>val %s%s%s :@ %a@]"
(if mut then "mutable " else "")
(if vr then "virtual " else "")
name !out_type ty
let out_class_type = ref print_out_class_type
Signature
let out_module_type = ref (fun _ -> failwith "Oprint.out_module_type")
let out_sig_item = ref (fun _ -> failwith "Oprint.out_sig_item")
let out_signature = ref (fun _ -> failwith "Oprint.out_signature")
let out_type_extension = ref (fun _ -> failwith "Oprint.out_type_extension")
let rec print_out_functor funct ppf =
function
Omty_functor (_, None, mty_res) ->
if funct then fprintf ppf "() %a" (print_out_functor true) mty_res
else fprintf ppf "functor@ () %a" (print_out_functor true) mty_res
| Omty_functor (name, Some mty_arg, mty_res) -> begin
match name, funct with
| "_", true ->
fprintf ppf "->@ %a ->@ %a"
print_out_module_type mty_arg (print_out_functor false) mty_res
| "_", false ->
fprintf ppf "%a ->@ %a"
print_out_module_type mty_arg (print_out_functor false) mty_res
| name, true ->
fprintf ppf "(%s : %a) %a" name
print_out_module_type mty_arg (print_out_functor true) mty_res
| name, false ->
fprintf ppf "functor@ (%s : %a) %a" name
print_out_module_type mty_arg (print_out_functor true) mty_res
end
| m ->
if funct then fprintf ppf "->@ %a" print_out_module_type m
else print_out_module_type ppf m
and print_out_module_type ppf =
function
Omty_abstract -> ()
| Omty_functor _ as t ->
fprintf ppf "@[<2>%a@]" (print_out_functor false) t
| Omty_ident id -> fprintf ppf "%a" print_ident id
| Omty_signature sg ->
fprintf ppf "@[<hv 2>sig@ %a@;<1 -2>end@]" !out_signature sg
| Omty_alias id -> fprintf ppf "(module %a)" print_ident id
and print_out_signature ppf =
function
[] -> ()
| [item] -> !out_sig_item ppf item
| Osig_typext(ext, Oext_first) :: items ->
let rec gather_extensions acc items =
match items with
Osig_typext(ext, Oext_next) :: items ->
gather_extensions
((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)
items
| _ -> (List.rev acc, items)
in
let exts, items =
gather_extensions
[(ext.oext_name, ext.oext_args, ext.oext_ret_type)]
items
in
let te =
{ otyext_name = ext.oext_type_name;
otyext_params = ext.oext_type_params;
otyext_constructors = exts;
otyext_private = ext.oext_private }
in
fprintf ppf "%a@ %a" !out_type_extension te print_out_signature items
| item :: items ->
fprintf ppf "%a@ %a" !out_sig_item item print_out_signature items
and print_out_sig_item ppf =
function
Osig_class (vir_flag, name, params, clt, rs) ->
fprintf ppf "@[<2>%s%s@ %a%s@ :@ %a@]"
(if rs = Orec_next then "and" else "class")
(if vir_flag then " virtual" else "") print_out_class_params params
name !out_class_type clt
| Osig_class_type (vir_flag, name, params, clt, rs) ->
fprintf ppf "@[<2>%s%s@ %a%s@ =@ %a@]"
(if rs = Orec_next then "and" else "class type")
(if vir_flag then " virtual" else "") print_out_class_params params
name !out_class_type clt
| Osig_typext (ext, Oext_exception) ->
fprintf ppf "@[<2>exception %a@]"
print_out_constr (ext.oext_name, ext.oext_args, ext.oext_ret_type)
| Osig_typext (ext, _es) ->
print_out_extension_constructor ppf ext
| Osig_modtype (name, Omty_abstract) ->
fprintf ppf "@[<2>module type %s@]" name
| Osig_modtype (name, mty) ->
fprintf ppf "@[<2>module type %s =@ %a@]" name !out_module_type mty
| Osig_module (name, Omty_alias id, _) ->
fprintf ppf "@[<2>module %s =@ %a@]" name print_ident id
| Osig_module (name, mty, rs) ->
fprintf ppf "@[<2>%s %s :@ %a@]"
(match rs with Orec_not -> "module"
| Orec_first -> "module rec"
| Orec_next -> "and")
name !out_module_type mty
| Osig_type(td, rs) ->
print_out_type_decl
(match rs with
| Orec_not -> "type nonrec"
| Orec_first -> "type"
| Orec_next -> "and")
ppf td
| Osig_value vd ->
let kwd = if vd.oval_prims = [] then "val" else "external" in
let pr_prims ppf =
function
[] -> ()
| s :: sl ->
fprintf ppf "@ = \"%s\"" s;
List.iter (fun s ->
fprintf ppf "@ \"%s\"" (!map_primitive_name s)
) sl
in
fprintf ppf "@[<2>%s %a :@ %a%a%a@]" kwd value_ident vd.oval_name
!out_type vd.oval_type pr_prims vd.oval_prims
(fun ppf -> List.iter (fun a -> fprintf ppf "@ [@@@@%s]" a.oattr_name))
vd.oval_attributes
| Osig_ellipsis ->
fprintf ppf "..."
and print_out_type_decl kwd ppf td =
let print_constraints ppf =
List.iter
(fun (ty1, ty2) ->
fprintf ppf "@ @[<2>constraint %a =@ %a@]" !out_type ty1
!out_type ty2)
td.otype_cstrs
in
let type_defined ppf =
match td.otype_params with
[] -> pp_print_string ppf td.otype_name
| [param] -> fprintf ppf "@[%a@ %s@]" type_parameter param td.otype_name
| _ ->
fprintf ppf "@[(@[%a)@]@ %s@]"
(print_list type_parameter (fun ppf -> fprintf ppf ",@ "))
td.otype_params
td.otype_name
in
let print_manifest ppf =
function
Otyp_manifest (ty, _) -> fprintf ppf " =@ %a" !out_type ty
| _ -> ()
in
let print_name_params ppf =
fprintf ppf "%s %t%a" kwd type_defined print_manifest td.otype_type
in
let ty =
match td.otype_type with
Otyp_manifest (_, ty) -> ty
| _ -> td.otype_type
in
let print_private ppf = function
Asttypes.Private -> fprintf ppf " private"
| Asttypes.Public -> ()
in
let print_immediate ppf =
if td.otype_immediate then fprintf ppf " [%@%@immediate]" else ()
in
let print_unboxed ppf =
if td.otype_unboxed then fprintf ppf " [%@%@unboxed]" else ()
in
let print_out_tkind ppf = function
| Otyp_abstract -> ()
| Otyp_record lbls ->
fprintf ppf " =%a %a"
print_private td.otype_private
print_record_decl lbls
| Otyp_sum constrs ->
fprintf ppf " =%a@;<1 2>%a"
print_private td.otype_private
(print_list print_out_constr (fun ppf -> fprintf ppf "@ | ")) constrs
| Otyp_open ->
fprintf ppf " =%a .."
print_private td.otype_private
| ty ->
fprintf ppf " =%a@;<1 2>%a"
print_private td.otype_private
!out_type ty
in
fprintf ppf "@[<2>@[<hv 2>%t%a@]%t%t%t@]"
print_name_params
print_out_tkind ty
print_constraints
print_immediate
print_unboxed
and print_out_constr ppf (name, tyl,ret_type_opt) =
let name =
match name with
# 7200
| s -> s
in
match ret_type_opt with
| None ->
begin match tyl with
| [] ->
pp_print_string ppf name
| _ ->
fprintf ppf "@[<2>%s of@ %a@]" name
(print_typlist print_simple_out_type " *") tyl
end
| Some ret_type ->
begin match tyl with
| [] ->
fprintf ppf "@[<2>%s :@ %a@]" name print_simple_out_type ret_type
| _ ->
fprintf ppf "@[<2>%s :@ %a -> %a@]" name
(print_typlist print_simple_out_type " *")
tyl print_simple_out_type ret_type
end
and print_out_extension_constructor ppf ext =
let print_extended_type ppf =
let print_type_parameter ppf ty =
fprintf ppf "%s"
(if ty = "_" then ty else "'"^ty)
in
match ext.oext_type_params with
[] -> fprintf ppf "%s" ext.oext_type_name
| [ty_param] ->
fprintf ppf "@[%a@ %s@]"
print_type_parameter
ty_param
ext.oext_type_name
| _ ->
fprintf ppf "@[(@[%a)@]@ %s@]"
(print_list print_type_parameter (fun ppf -> fprintf ppf ",@ "))
ext.oext_type_params
ext.oext_type_name
in
fprintf ppf "@[<hv 2>type %t +=%s@;<1 2>%a@]"
print_extended_type
(if ext.oext_private = Asttypes.Private then " private" else "")
print_out_constr (ext.oext_name, ext.oext_args, ext.oext_ret_type)
and print_out_type_extension ppf te =
let print_extended_type ppf =
let print_type_parameter ppf ty =
fprintf ppf "%s"
(if ty = "_" then ty else "'"^ty)
in
match te.otyext_params with
[] -> fprintf ppf "%s" te.otyext_name
| [param] ->
fprintf ppf "@[%a@ %s@]"
print_type_parameter param
te.otyext_name
| _ ->
fprintf ppf "@[(@[%a)@]@ %s@]"
(print_list print_type_parameter (fun ppf -> fprintf ppf ",@ "))
te.otyext_params
te.otyext_name
in
fprintf ppf "@[<hv 2>type %t +=%s@;<1 2>%a@]"
print_extended_type
(if te.otyext_private = Asttypes.Private then " private" else "")
(print_list print_out_constr (fun ppf -> fprintf ppf "@ | "))
te.otyext_constructors
let _ = out_module_type := print_out_module_type
let _ = out_signature := print_out_signature
let _ = out_sig_item := print_out_sig_item
let _ = out_type_extension := print_out_type_extension
let print_out_exception ppf exn outv =
match exn with
Sys.Break -> fprintf ppf "Interrupted.@."
| Out_of_memory -> fprintf ppf "Out of memory during evaluation.@."
| Stack_overflow ->
fprintf ppf "Stack overflow during evaluation (looping recursion?).@."
| _ -> fprintf ppf "@[Exception:@ %a.@]@." !out_value outv
let rec print_items ppf =
function
[] -> ()
| (Osig_typext(ext, Oext_first), None) :: items ->
let rec gather_extensions acc items =
match items with
(Osig_typext(ext, Oext_next), None) :: items ->
gather_extensions
((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)
items
| _ -> (List.rev acc, items)
in
let exts, items =
gather_extensions
[(ext.oext_name, ext.oext_args, ext.oext_ret_type)]
items
in
let te =
{ otyext_name = ext.oext_type_name;
otyext_params = ext.oext_type_params;
otyext_constructors = exts;
otyext_private = ext.oext_private }
in
fprintf ppf "@[%a@]" !out_type_extension te;
if items <> [] then fprintf ppf "@ %a" print_items items
| (tree, valopt) :: items ->
begin match valopt with
Some v ->
fprintf ppf "@[<2>%a =@ %a@]" !out_sig_item tree
!out_value v
| None -> fprintf ppf "@[%a@]" !out_sig_item tree
end;
if items <> [] then fprintf ppf "@ %a" print_items items
let print_out_phrase ppf =
function
Ophr_eval (outv, ty) ->
fprintf ppf "@[- : %a@ =@ %a@]@." !out_type ty !out_value outv
| Ophr_signature [] -> ()
| Ophr_signature items -> fprintf ppf "@[<v>%a@]@." print_items items
| Ophr_exception (exn, outv) -> print_out_exception ppf exn outv
let out_phrase = ref print_out_phrase
|
8ea3521600e09028d3425de7830b454248d1dd2b9944f6e9ca574ca9d767eb7b | hbr/fmlib | source_extractor.ml | open Fmlib_pretty
module Pretty = Fmlib_pretty.Print
type t = {
range: Position.range;
extra: int;
number_width: int;
pos: Position.t;
line: string;
doc: Print.doc;
}
let of_range
(extra: int)
(range: Position.range)
: t
=
assert (0 <= extra);
assert (Position.is_valid_range range);
let number_width =
(Position.line (snd range) + 1)
|> string_of_int
|> String.length
in
{
range;
extra;
number_width;
pos = Position.start;
line = "";
doc = Pretty.empty;
}
let of_position
(extra: int)
(pos: Position.t)
: t
=
of_range extra (pos, pos)
let needs_more (ext: t): bool =
let _, p2 = ext.range in
Position.(line ext.pos <= line p2)
let is_in_range (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line pos1 <= line p.pos + p.extra
&&
line p.pos <= line pos2
let is_start_line (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line p.pos = line pos1
&&
line pos1 < line pos2
let is_end_line (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line pos1 < line p.pos
&&
line p.pos = line pos2
let is_one_line (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line p.pos = line pos1
&&
line pos1 = line pos2
let source_separator: string =
" | "
let source_indent (p: t): int =
p.number_width
+
String.length source_separator
let source_line (p: t): Pretty.doc =
let str =
Position.line p.pos + 1 |> string_of_int
in
let n = p.number_width - String.length str
in
assert (0 <= n);
Pretty.(
fill n ' '
<+>
text str
<+>
text source_separator
<+>
text p.line
<+>
cut
)
let start_line_marker (p: t): Pretty.doc =
let col = Position.column (fst p.range) in
Pretty.(
fill (source_indent p + col) ' '
<+>
char 'v'
<+>
fill 10 '-'
<+>
cut
)
let end_line_marker (p: t): Pretty.doc =
let col = Position.column (snd p.range)
and ind = source_indent p
in
Pretty.(
fill ind ' '
<+>
fill (col - ind) '-'
<+>
char '^'
<+>
cut
)
let one_line_marker (is_last: bool) (p: t): Pretty.doc =
let open Position in
let pos1, pos2 = p.range in
let c1 = Position.column pos1
and c2 = Position.column pos2
in
assert (line pos1 = line pos2);
assert (c1 <= c2);
let len = c2 - c1 in
let len = max len 1 in
let open Pretty
in
let annotation =
if len = 1 && c1 < String.length p.line
then
let ch = p.line.[c1] in
if ch < ' ' || Char.chr 127 <= ch then
text (" nonprintable '" ^ Char.escaped ch ^ "'")
else
empty
else if len = 1 && c1 = String.length p.line then
if is_last then
text " end of input"
else
text " end of line"
else
Pretty.empty
in
Pretty.(
fill (source_indent p + c1) ' '
<+>
fill len '^'
<+>
annotation
<+>
cut
)
let receive_char (is_last: bool) (c: char) (p: t): t =
let pos = Position.next c p.pos in
if c <> '\n' then
{
p with
pos;
line = p.line ^ String.make 1 c;
}
else if is_in_range p then
let open Pretty in
let doc =
if is_start_line p then
start_line_marker p <+> source_line p
else if is_one_line p then
source_line p <+> one_line_marker is_last p
else if is_end_line p then
source_line p <+> end_line_marker p
else
source_line p
in
{
p with
pos;
line = "";
doc = p.doc <+> doc;
}
else
{
p with
pos;
line = "";
}
let put: char -> t -> t =
receive_char false
let put_end: t -> t =
receive_char true '\n'
let document (p: t): Pretty.doc =
p.doc
let run_on_string = Run_on.string needs_more put put_end
let run_on_channel = Run_on.channel needs_more put put_end
| null | https://raw.githubusercontent.com/hbr/fmlib/0c7b923605a211e9c706d427fb33c5ba40248321/src/parse/source_extractor.ml | ocaml | open Fmlib_pretty
module Pretty = Fmlib_pretty.Print
type t = {
range: Position.range;
extra: int;
number_width: int;
pos: Position.t;
line: string;
doc: Print.doc;
}
let of_range
(extra: int)
(range: Position.range)
: t
=
assert (0 <= extra);
assert (Position.is_valid_range range);
let number_width =
(Position.line (snd range) + 1)
|> string_of_int
|> String.length
in
{
range;
extra;
number_width;
pos = Position.start;
line = "";
doc = Pretty.empty;
}
let of_position
(extra: int)
(pos: Position.t)
: t
=
of_range extra (pos, pos)
let needs_more (ext: t): bool =
let _, p2 = ext.range in
Position.(line ext.pos <= line p2)
let is_in_range (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line pos1 <= line p.pos + p.extra
&&
line p.pos <= line pos2
let is_start_line (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line p.pos = line pos1
&&
line pos1 < line pos2
let is_end_line (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line pos1 < line p.pos
&&
line p.pos = line pos2
let is_one_line (p: t): bool =
let open Position in
let pos1, pos2 = p.range in
line p.pos = line pos1
&&
line pos1 = line pos2
let source_separator: string =
" | "
let source_indent (p: t): int =
p.number_width
+
String.length source_separator
let source_line (p: t): Pretty.doc =
let str =
Position.line p.pos + 1 |> string_of_int
in
let n = p.number_width - String.length str
in
assert (0 <= n);
Pretty.(
fill n ' '
<+>
text str
<+>
text source_separator
<+>
text p.line
<+>
cut
)
let start_line_marker (p: t): Pretty.doc =
let col = Position.column (fst p.range) in
Pretty.(
fill (source_indent p + col) ' '
<+>
char 'v'
<+>
fill 10 '-'
<+>
cut
)
let end_line_marker (p: t): Pretty.doc =
let col = Position.column (snd p.range)
and ind = source_indent p
in
Pretty.(
fill ind ' '
<+>
fill (col - ind) '-'
<+>
char '^'
<+>
cut
)
let one_line_marker (is_last: bool) (p: t): Pretty.doc =
let open Position in
let pos1, pos2 = p.range in
let c1 = Position.column pos1
and c2 = Position.column pos2
in
assert (line pos1 = line pos2);
assert (c1 <= c2);
let len = c2 - c1 in
let len = max len 1 in
let open Pretty
in
let annotation =
if len = 1 && c1 < String.length p.line
then
let ch = p.line.[c1] in
if ch < ' ' || Char.chr 127 <= ch then
text (" nonprintable '" ^ Char.escaped ch ^ "'")
else
empty
else if len = 1 && c1 = String.length p.line then
if is_last then
text " end of input"
else
text " end of line"
else
Pretty.empty
in
Pretty.(
fill (source_indent p + c1) ' '
<+>
fill len '^'
<+>
annotation
<+>
cut
)
let receive_char (is_last: bool) (c: char) (p: t): t =
let pos = Position.next c p.pos in
if c <> '\n' then
{
p with
pos;
line = p.line ^ String.make 1 c;
}
else if is_in_range p then
let open Pretty in
let doc =
if is_start_line p then
start_line_marker p <+> source_line p
else if is_one_line p then
source_line p <+> one_line_marker is_last p
else if is_end_line p then
source_line p <+> end_line_marker p
else
source_line p
in
{
p with
pos;
line = "";
doc = p.doc <+> doc;
}
else
{
p with
pos;
line = "";
}
let put: char -> t -> t =
receive_char false
let put_end: t -> t =
receive_char true '\n'
let document (p: t): Pretty.doc =
p.doc
let run_on_string = Run_on.string needs_more put put_end
let run_on_channel = Run_on.channel needs_more put put_end
| |
b275420f14d5df5bfe0fadc4e01ab17ed40c968d37b1494a7c939004d4e8f43c | abcdw/rde | ssh.scm | ;;; rde --- Reproducible development environment.
;;;
Copyright © 2021 , 2022 < >
Copyright © 2022 < >
;;;
;;; This file is part of rde.
;;;
;;; rde is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; rde is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with rde. If not, see </>.
(define-module (rde features ssh)
#:use-module (rde features)
#:use-module (rde features predicates)
#:use-module (gnu home-services ssh)
#:use-module (gnu home services)
#:use-module (gnu home services shepherd)
#:use-module (gnu services)
#:use-module (gnu packages)
#:use-module (gnu packages ssh)
#:use-module (guix gexp)
#:export (feature-ssh)
#:re-export (home-ssh-configuration
ssh-host
ssh-match))
(define* (feature-ssh
#:key
(ssh openssh)
(ssh-configuration (home-ssh-configuration))
(ssh-agent? #f))
"Setup and configure ssh and ssh-agent."
(ensure-pred file-like? ssh)
(ensure-pred home-ssh-configuration? ssh-configuration)
(ensure-pred boolean? ssh-agent?)
(define (ssh-home-services config)
"Returns home services related to SSH."
(append
(if ssh-agent?
(let* ((sock "ssh-agent.sock"))
(list
(simple-service
'start-ssh-agent-at-startup
home-shepherd-service-type
(list
(shepherd-service
(documentation "Run the ssh-agent at startup.")
(provision '(ssh-agent))
(requirement '())
(start
#~(make-forkexec-constructor
(list (string-append
#$(get-value 'ssh config)
"/bin/ssh-agent")
"-d" "-a"
(string-append (getenv "XDG_RUNTIME_DIR") "/" #$sock))
#:log-file (string-append
(or (getenv "XDG_LOG_HOME")
(format #f "~a/.local/var/log"
(getenv "HOME")))
"/ssh-agent.log")))
(stop #~(make-kill-destructor)))))
(simple-service
'ssh-auth-socket-env-export
home-environment-variables-service-type
`(("SSH_AUTH_SOCK" . ,(string-append "$XDG_RUNTIME_DIR/" sock))))))
'())
(list (service home-ssh-service-type
ssh-configuration))))
(feature
(name 'ssh)
(values `((ssh . ,ssh)
,@(if ssh-agent?
'((ssh-agent? . #t))
'())))
(home-services-getter ssh-home-services)))
| null | https://raw.githubusercontent.com/abcdw/rde/5b8605f421d0b8a9569e43cb6f7e651e7a8f7218/src/rde/features/ssh.scm | scheme | rde --- Reproducible development environment.
This file is part of rde.
rde is free software; you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
rde is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with rde. If not, see </>. | Copyright © 2021 , 2022 < >
Copyright © 2022 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
(define-module (rde features ssh)
#:use-module (rde features)
#:use-module (rde features predicates)
#:use-module (gnu home-services ssh)
#:use-module (gnu home services)
#:use-module (gnu home services shepherd)
#:use-module (gnu services)
#:use-module (gnu packages)
#:use-module (gnu packages ssh)
#:use-module (guix gexp)
#:export (feature-ssh)
#:re-export (home-ssh-configuration
ssh-host
ssh-match))
(define* (feature-ssh
#:key
(ssh openssh)
(ssh-configuration (home-ssh-configuration))
(ssh-agent? #f))
"Setup and configure ssh and ssh-agent."
(ensure-pred file-like? ssh)
(ensure-pred home-ssh-configuration? ssh-configuration)
(ensure-pred boolean? ssh-agent?)
(define (ssh-home-services config)
"Returns home services related to SSH."
(append
(if ssh-agent?
(let* ((sock "ssh-agent.sock"))
(list
(simple-service
'start-ssh-agent-at-startup
home-shepherd-service-type
(list
(shepherd-service
(documentation "Run the ssh-agent at startup.")
(provision '(ssh-agent))
(requirement '())
(start
#~(make-forkexec-constructor
(list (string-append
#$(get-value 'ssh config)
"/bin/ssh-agent")
"-d" "-a"
(string-append (getenv "XDG_RUNTIME_DIR") "/" #$sock))
#:log-file (string-append
(or (getenv "XDG_LOG_HOME")
(format #f "~a/.local/var/log"
(getenv "HOME")))
"/ssh-agent.log")))
(stop #~(make-kill-destructor)))))
(simple-service
'ssh-auth-socket-env-export
home-environment-variables-service-type
`(("SSH_AUTH_SOCK" . ,(string-append "$XDG_RUNTIME_DIR/" sock))))))
'())
(list (service home-ssh-service-type
ssh-configuration))))
(feature
(name 'ssh)
(values `((ssh . ,ssh)
,@(if ssh-agent?
'((ssh-agent? . #t))
'())))
(home-services-getter ssh-home-services)))
|
1b6131966a61cb6ac93f908988960f314470c8d08d8100078f21193fc4f92988 | openbadgefactory/salava | routes.clj | (ns salava.oauth.routes
(:require [clojure.pprint :refer [pprint]]
[clojure.string :as string]
[compojure.api.sweet :refer :all]
[ring.util.http-response :refer :all]
[ring.util.response :refer [redirect]]
[salava.core.layout :as layout]
[schema.core :as s]
[salava.core.util :refer [get-base-path get-full-path get-plugins]]
[salava.oauth.db :as d]
[salava.oauth.facebook :as f]
[salava.oauth.google :as g]
[salava.oauth.linkedin :as l]
[salava.user.db :as u]
[salava.core.helper :refer [dump private?]]
[salava.core.access :as access]
salava.core.restructure))
(defn route-def [ctx]
(routes
(context "/user" []
(layout/main ctx "/oauth/facebook")
(layout/main ctx "/oauth/google")
(layout/main ctx "/oauth/linkedin")
#_(layout/main ctx "/terms")
(GET "/oauth2/authorize" req
:no-doc true
:summary ""
:query-params [response_type :- s/Str
client_id :- s/Str
redirect_uri :- s/Str
state :- s/Str
code_challenge :- s/Str
code_challenge_method :- s/Str]
:current-user current-user
:flash-message flash-message
(let [client (get-in ctx [:config :oauth :client client_id])
redirect-to (str "/user/oauth2/authorize?" (:query-string req))
login-redirect {:value (if (:id current-user) nil redirect-to) :max-age 1200 :http-only true :path "/"}]
(if (and (= response_type "code") (= (:redirect_uri client) redirect_uri) (= code_challenge_method "S256"))
(-> (layout/main-response ctx current-user flash-message nil)
(assoc-in [:cookies "login_redirect"] login-redirect))
(bad-request {:message "400 Bad Request"}))))
(POST "/oauth2/authorize" []
:no-doc true
:summary ""
:form-params [client_id :- s/Str
state :- s/Str
code_challenge :- s/Str]
:auth-rules access/signed
:current-user current-user
(if-let [client (get-in ctx [:config :oauth :client client_id])]
(-> (redirect (str (:redirect_uri client)
"?code=" (d/authorization-code ctx client_id (:id current-user) code_challenge)
"&host=" (get-full-path ctx)
"&state=" state))
(assoc-in [:session :identity] nil))
(bad-request {:message "400 Bad Request"})))
(POST "/oauth2/unauthorize" []
:no-doc true
:summary ""
:form-params [client_id :- s/Str]
:auth-rules access/signed
:current-user current-user
(ok (d/unauthorize-client ctx client_id (:id current-user))))
(POST "/oauth2/token" []
:no-doc true
:summary ""
:form-params [grant_type :- s/Str
client_id :- s/Str
{redirect_uri :- s/Str nil}
{code :- s/Str nil}
{refresh_token :- s/Str nil}
{code_verifier :- s/Str nil}]
(let [e400 (bad-request {:message "400 Bad Request"})
client (get-in ctx [:config :oauth :client client_id])]
(cond
(and (= grant_type "code")
(not (nil? code))
(= (:redirect_uri client) redirect_uri))
(if-let [out (d/new-access-token ctx client_id code code_verifier)] (ok out) e400)
(and (= grant_type "refresh_token")
(not (nil? refresh_token)))
(if-let [out (d/refresh-access-token ctx client_id (string/split refresh_token #"-" 2))] (ok out) e400)
:else e400)))
(PUT "/oauth2/firebase_token" []
:no-doc true
:summary ""
:body-params [client_id :- s/Str
id :- s/Int
token :- s/Str]
:auth-rules access/signed
:current-user current-user
(if (get-in ctx [:config :oauth :client client_id])
(ok (d/set-firebase-token ctx client_id (:id current-user) id token))
(bad-request {:message "400 Bad Request"})))
)
(context "/oauth" []
:tags ["oauth"]
(GET "/google" req
:no-doc true
:query-params [{code :- s/Str nil}
{state :- s/Str nil}
{error :- s/Str nil}]
:current-user current-user
(let [r (g/google-login ctx code (:id current-user) error)
{:keys [status user-id message new-user]} r
_ (if (= true (get-in req [:session :seen-terms])) (d/insert-user-terms ctx user-id "accepted"))
accepted-terms? (u/get-accepted-terms-by-id ctx user-id)
invitation (get-in req [:session :invitation] nil)]
(if (= status "success")
(if (and (not= accepted-terms? "accepted") (not= false accepted-terms?))
(if current-user
(redirect (str (get-base-path ctx) "/user/terms/" (:id current-user)))
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/user/terms?service=google&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))
(if current-user
(redirect (str (get-base-path ctx) "/user/oauth/google"))
(if new-user
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social?service=google&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false)
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social")) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))))
(if current-user
(assoc (redirect (str (get-base-path ctx) "/user/oauth/google")) :flash message)
(assoc (redirect (str (get-base-path ctx) "/user/login")) :flash message)))))
(GET "/google/deauthorize" []
:no-doc true
:query-params [code :- s/Str]
:return {:status (s/enum "success" "error")
(s/optional-key :message) s/Str}
:auth-rules access/signed
:current-user current-user
(let [{:keys [status message]} (g/google-deauthorize ctx code (:id current-user))]
(if (= status "success")
(redirect (str (get-base-path ctx) "/user/oauth/google"))
(assoc (redirect (str (get-base-path ctx) "/user/oauth/google")) :flash message))))
(GET "/facebook" req
:no-doc true
:query-params [{code :- s/Str nil}
{error :- s/Str nil}]
:current-user current-user
(let [{:keys [status user-id message role private new-user]} (f/facebook-login ctx code (:id current-user) error)
_ (if (= true (get-in req [:session :seen-terms])) (d/insert-user-terms ctx user-id "accepted"))
accepted-terms? (u/get-accepted-terms-by-id ctx user-id)
invitation (get-in req [:session :invitation] nil)]
(if (= status "success")
(if (and (not= accepted-terms? "accepted") (not= false accepted-terms?))
(if current-user
(redirect (str (get-base-path ctx) "/user/terms/"))
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/user/terms?service=facebook&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))
(if current-user
(redirect (str (get-base-path ctx) "/user/oauth/facebook"))
(if new-user
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social?service=facebook&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false)
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social")) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))))
(if current-user
(assoc (redirect (str (get-base-path ctx) "/user/oauth/facebook")) :flash message)
(assoc (redirect (str (get-base-path ctx) "/user/login")) :flash message)))))
(GET "/facebook/deauthorize" []
:no-doc true
:query-params [code :- s/Str]
:auth-rules access/signed
:current-user current-user
(let [{:keys [status message]} (f/facebook-deauthorize ctx code (:id current-user))]
(if (= status "success")
(redirect (str (get-base-path ctx) "/user/oauth/facebook"))
(assoc (redirect (str (get-base-path ctx) "/user/oauth/facebook")) :flash message))))
(GET "/linkedin" req
:no-doc true
:query-params [{code :- s/Str nil}
{state :- s/Str nil}
{error :- s/Str nil}]
:current-user current-user
(let [r (l/linkedin-login ctx code state (:id current-user) error)
{:keys [status user-id message new-user]} r
_ (if (= true (get-in req [:session :seen-terms])) (d/insert-user-terms ctx user-id "accepted"))
accepted-terms? (u/get-accepted-terms-by-id ctx user-id)
invitation (get-in req [:session :invitation] nil)]
(if (= status "success")
(if (and (not= accepted-terms? "accepted") (not= false accepted-terms?))
(if current-user
(redirect (str (get-base-path ctx) "/user/terms/" (:id current-user)))
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/user/terms?service=linkedin&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))
(if current-user
(redirect (str (get-base-path ctx) "/user/oauth/linkedin"))
(if new-user
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social?service=linkedin&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false)
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social")) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))))
(if current-user
(assoc (redirect (str (get-base-path ctx) "/user/oauth/linkedin")) :flash message)
(assoc (redirect (str (get-base-path ctx) "/user/login")) :flash message)))))
(GET "/linkedin/deauthorize" []
:no-doc true
:return {:status (s/enum "success" "error")
(s/optional-key :message) s/Str}
:auth-rules access/signed
:current-user current-user
(ok (l/linkedin-deauthorize ctx (:id current-user)))))
(context "/obpv1/oauth" []
:tags ["oauth"]
(GET "/status/:service" []
:return {:active s/Bool :no-password? s/Bool}
:summary "Get user's remote service login status"
:path-params [service :- (s/enum "facebook" "linkedin" "google")]
:auth-rules access/signed
:current-user current-user
(ok (d/login-status ctx (:id current-user) service))))))
| null | https://raw.githubusercontent.com/openbadgefactory/salava/97f05992406e4dcbe3c4bff75c04378d19606b61/src/clj/salava/oauth/routes.clj | clojure | (ns salava.oauth.routes
(:require [clojure.pprint :refer [pprint]]
[clojure.string :as string]
[compojure.api.sweet :refer :all]
[ring.util.http-response :refer :all]
[ring.util.response :refer [redirect]]
[salava.core.layout :as layout]
[schema.core :as s]
[salava.core.util :refer [get-base-path get-full-path get-plugins]]
[salava.oauth.db :as d]
[salava.oauth.facebook :as f]
[salava.oauth.google :as g]
[salava.oauth.linkedin :as l]
[salava.user.db :as u]
[salava.core.helper :refer [dump private?]]
[salava.core.access :as access]
salava.core.restructure))
(defn route-def [ctx]
(routes
(context "/user" []
(layout/main ctx "/oauth/facebook")
(layout/main ctx "/oauth/google")
(layout/main ctx "/oauth/linkedin")
#_(layout/main ctx "/terms")
(GET "/oauth2/authorize" req
:no-doc true
:summary ""
:query-params [response_type :- s/Str
client_id :- s/Str
redirect_uri :- s/Str
state :- s/Str
code_challenge :- s/Str
code_challenge_method :- s/Str]
:current-user current-user
:flash-message flash-message
(let [client (get-in ctx [:config :oauth :client client_id])
redirect-to (str "/user/oauth2/authorize?" (:query-string req))
login-redirect {:value (if (:id current-user) nil redirect-to) :max-age 1200 :http-only true :path "/"}]
(if (and (= response_type "code") (= (:redirect_uri client) redirect_uri) (= code_challenge_method "S256"))
(-> (layout/main-response ctx current-user flash-message nil)
(assoc-in [:cookies "login_redirect"] login-redirect))
(bad-request {:message "400 Bad Request"}))))
(POST "/oauth2/authorize" []
:no-doc true
:summary ""
:form-params [client_id :- s/Str
state :- s/Str
code_challenge :- s/Str]
:auth-rules access/signed
:current-user current-user
(if-let [client (get-in ctx [:config :oauth :client client_id])]
(-> (redirect (str (:redirect_uri client)
"?code=" (d/authorization-code ctx client_id (:id current-user) code_challenge)
"&host=" (get-full-path ctx)
"&state=" state))
(assoc-in [:session :identity] nil))
(bad-request {:message "400 Bad Request"})))
(POST "/oauth2/unauthorize" []
:no-doc true
:summary ""
:form-params [client_id :- s/Str]
:auth-rules access/signed
:current-user current-user
(ok (d/unauthorize-client ctx client_id (:id current-user))))
(POST "/oauth2/token" []
:no-doc true
:summary ""
:form-params [grant_type :- s/Str
client_id :- s/Str
{redirect_uri :- s/Str nil}
{code :- s/Str nil}
{refresh_token :- s/Str nil}
{code_verifier :- s/Str nil}]
(let [e400 (bad-request {:message "400 Bad Request"})
client (get-in ctx [:config :oauth :client client_id])]
(cond
(and (= grant_type "code")
(not (nil? code))
(= (:redirect_uri client) redirect_uri))
(if-let [out (d/new-access-token ctx client_id code code_verifier)] (ok out) e400)
(and (= grant_type "refresh_token")
(not (nil? refresh_token)))
(if-let [out (d/refresh-access-token ctx client_id (string/split refresh_token #"-" 2))] (ok out) e400)
:else e400)))
(PUT "/oauth2/firebase_token" []
:no-doc true
:summary ""
:body-params [client_id :- s/Str
id :- s/Int
token :- s/Str]
:auth-rules access/signed
:current-user current-user
(if (get-in ctx [:config :oauth :client client_id])
(ok (d/set-firebase-token ctx client_id (:id current-user) id token))
(bad-request {:message "400 Bad Request"})))
)
(context "/oauth" []
:tags ["oauth"]
(GET "/google" req
:no-doc true
:query-params [{code :- s/Str nil}
{state :- s/Str nil}
{error :- s/Str nil}]
:current-user current-user
(let [r (g/google-login ctx code (:id current-user) error)
{:keys [status user-id message new-user]} r
_ (if (= true (get-in req [:session :seen-terms])) (d/insert-user-terms ctx user-id "accepted"))
accepted-terms? (u/get-accepted-terms-by-id ctx user-id)
invitation (get-in req [:session :invitation] nil)]
(if (= status "success")
(if (and (not= accepted-terms? "accepted") (not= false accepted-terms?))
(if current-user
(redirect (str (get-base-path ctx) "/user/terms/" (:id current-user)))
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/user/terms?service=google&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))
(if current-user
(redirect (str (get-base-path ctx) "/user/oauth/google"))
(if new-user
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social?service=google&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false)
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social")) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))))
(if current-user
(assoc (redirect (str (get-base-path ctx) "/user/oauth/google")) :flash message)
(assoc (redirect (str (get-base-path ctx) "/user/login")) :flash message)))))
(GET "/google/deauthorize" []
:no-doc true
:query-params [code :- s/Str]
:return {:status (s/enum "success" "error")
(s/optional-key :message) s/Str}
:auth-rules access/signed
:current-user current-user
(let [{:keys [status message]} (g/google-deauthorize ctx code (:id current-user))]
(if (= status "success")
(redirect (str (get-base-path ctx) "/user/oauth/google"))
(assoc (redirect (str (get-base-path ctx) "/user/oauth/google")) :flash message))))
(GET "/facebook" req
:no-doc true
:query-params [{code :- s/Str nil}
{error :- s/Str nil}]
:current-user current-user
(let [{:keys [status user-id message role private new-user]} (f/facebook-login ctx code (:id current-user) error)
_ (if (= true (get-in req [:session :seen-terms])) (d/insert-user-terms ctx user-id "accepted"))
accepted-terms? (u/get-accepted-terms-by-id ctx user-id)
invitation (get-in req [:session :invitation] nil)]
(if (= status "success")
(if (and (not= accepted-terms? "accepted") (not= false accepted-terms?))
(if current-user
(redirect (str (get-base-path ctx) "/user/terms/"))
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/user/terms?service=facebook&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))
(if current-user
(redirect (str (get-base-path ctx) "/user/oauth/facebook"))
(if new-user
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social?service=facebook&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false)
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social")) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))))
(if current-user
(assoc (redirect (str (get-base-path ctx) "/user/oauth/facebook")) :flash message)
(assoc (redirect (str (get-base-path ctx) "/user/login")) :flash message)))))
(GET "/facebook/deauthorize" []
:no-doc true
:query-params [code :- s/Str]
:auth-rules access/signed
:current-user current-user
(let [{:keys [status message]} (f/facebook-deauthorize ctx code (:id current-user))]
(if (= status "success")
(redirect (str (get-base-path ctx) "/user/oauth/facebook"))
(assoc (redirect (str (get-base-path ctx) "/user/oauth/facebook")) :flash message))))
(GET "/linkedin" req
:no-doc true
:query-params [{code :- s/Str nil}
{state :- s/Str nil}
{error :- s/Str nil}]
:current-user current-user
(let [r (l/linkedin-login ctx code state (:id current-user) error)
{:keys [status user-id message new-user]} r
_ (if (= true (get-in req [:session :seen-terms])) (d/insert-user-terms ctx user-id "accepted"))
accepted-terms? (u/get-accepted-terms-by-id ctx user-id)
invitation (get-in req [:session :invitation] nil)]
(if (= status "success")
(if (and (not= accepted-terms? "accepted") (not= false accepted-terms?))
(if current-user
(redirect (str (get-base-path ctx) "/user/terms/" (:id current-user)))
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/user/terms?service=linkedin&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))
(if current-user
(redirect (str (get-base-path ctx) "/user/oauth/linkedin"))
(if new-user
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social?service=linkedin&new-user=" new-user)) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false)
(u/finalize-login ctx (assoc (redirect (str (get-base-path ctx) "/social")) :invitation invitation) user-id (get-in req [:session :pending :user-badge-id]) false))))
(if current-user
(assoc (redirect (str (get-base-path ctx) "/user/oauth/linkedin")) :flash message)
(assoc (redirect (str (get-base-path ctx) "/user/login")) :flash message)))))
(GET "/linkedin/deauthorize" []
:no-doc true
:return {:status (s/enum "success" "error")
(s/optional-key :message) s/Str}
:auth-rules access/signed
:current-user current-user
(ok (l/linkedin-deauthorize ctx (:id current-user)))))
(context "/obpv1/oauth" []
:tags ["oauth"]
(GET "/status/:service" []
:return {:active s/Bool :no-password? s/Bool}
:summary "Get user's remote service login status"
:path-params [service :- (s/enum "facebook" "linkedin" "google")]
:auth-rules access/signed
:current-user current-user
(ok (d/login-status ctx (:id current-user) service))))))
| |
4f64dd3aaf18fed282878eb675cd470ba5bfc368fb860fe7b7bb32cac091c731 | pallet/alembic | profiles.clj | {:clojure-1.6.0 {:dependencies [[org.clojure/clojure "1.6.0"]]}
:clojure-1.5.1 {:dependencies [[org.clojure/clojure "1.5.1"]]}
:clojure-1.5.0 {:dependencies [[org.clojure/clojure "1.5.0"]]}
:clojure-1.4.0 {:dependencies [[org.clojure/clojure "1.4.0"]]}
:dev {:plugins [[lein-pallet-release "RELEASE"]]}}
| null | https://raw.githubusercontent.com/pallet/alembic/bff5478c1ddb990c03e41c03cd19fb21353fd064/profiles.clj | clojure | {:clojure-1.6.0 {:dependencies [[org.clojure/clojure "1.6.0"]]}
:clojure-1.5.1 {:dependencies [[org.clojure/clojure "1.5.1"]]}
:clojure-1.5.0 {:dependencies [[org.clojure/clojure "1.5.0"]]}
:clojure-1.4.0 {:dependencies [[org.clojure/clojure "1.4.0"]]}
:dev {:plugins [[lein-pallet-release "RELEASE"]]}}
| |
a76a88d15e332e0931b46872524ca4b9ab22680c4586b759a79696dc70879382 | mfelleisen/Evolution | cards.rkt | #lang racket
;; ===================================================================================================
;; equips cards from ../ with serialization for GUI and dist. impl.
;; SERVICES
(provide
(all-from-out "../cards.rkt")
;; JSexpr -> Card
json->card
;; Card -> JSexpr
card->json)
;; ===================================================================================================
;; DEPENDENCIES
(require "../traits.rkt" "../cards.rkt")
(module+ test
(require rackunit))
;; ===================================================================================================
;; IMPLEMENTATION
(define (card->json c)
`[,(card-food-points c) ,(trait->string (card-trait c))])
(define (json->card j)
(match j
[`(,(? integer? food) ,trait)
(define candidate (card food (string->trait trait)))
(cond
[(member candidate all-cards) => first]
[else (error 'json->card "~e does not specify an Evolution card")])]))
;; ===================================================================================================
(module+ test
(define a-card (list-ref all-cards (random (length all-cards))))
(check-equal? (json->card (card->json a-card)) a-card "json->card is left inverse to card->json"))
| null | https://raw.githubusercontent.com/mfelleisen/Evolution/cf5ef02696339f8e83cc831932206c6a4ead689e/Xternal/cards.rkt | racket | ===================================================================================================
equips cards from ../ with serialization for GUI and dist. impl.
SERVICES
JSexpr -> Card
Card -> JSexpr
===================================================================================================
DEPENDENCIES
===================================================================================================
IMPLEMENTATION
=================================================================================================== | #lang racket
(provide
(all-from-out "../cards.rkt")
json->card
card->json)
(require "../traits.rkt" "../cards.rkt")
(module+ test
(require rackunit))
(define (card->json c)
`[,(card-food-points c) ,(trait->string (card-trait c))])
(define (json->card j)
(match j
[`(,(? integer? food) ,trait)
(define candidate (card food (string->trait trait)))
(cond
[(member candidate all-cards) => first]
[else (error 'json->card "~e does not specify an Evolution card")])]))
(module+ test
(define a-card (list-ref all-cards (random (length all-cards))))
(check-equal? (json->card (card->json a-card)) a-card "json->card is left inverse to card->json"))
|
1d7d1482a491463d3b2cb97f4c8662d37b2160f76e07b704bbad41d7f6d24277 | MaskRay/OJHaskell | 73.hs | import Data.Ratio
maxn = 12000
traverse a b c d
| b > maxn || d > maxn || c%d <= 1%3 || 1%2 <= a%b = 0
| otherwise = traverse a b m n + traverse m n c d + fromEnum (1%3 < m%n && m%n < 1%2 && n <= maxn)
where m = a+c
n = b+d
main = print $ traverse 0 1 1 1 | null | https://raw.githubusercontent.com/MaskRay/OJHaskell/ba24050b2480619f10daa7d37fca558182ba006c/Project%20Euler/73.hs | haskell | import Data.Ratio
maxn = 12000
traverse a b c d
| b > maxn || d > maxn || c%d <= 1%3 || 1%2 <= a%b = 0
| otherwise = traverse a b m n + traverse m n c d + fromEnum (1%3 < m%n && m%n < 1%2 && n <= maxn)
where m = a+c
n = b+d
main = print $ traverse 0 1 1 1 | |
1708533fb47fccf446c066fd6929ddb181a845f050aed5e281e489a76228d63a | fendor/hsimport | Utils.hs |
module HsImport.Utils
( firstSrcLine
, lastSrcLine
, srcSpan
, declSrcLoc
, importDecls
) where
import qualified Language.Haskell.Exts as HS
import HsImport.Types
declSrcLoc :: Decl -> SrcLoc
declSrcLoc decl = HS.SrcLoc srcFile srcLine srcCol
where
declSrcSpan = srcSpan . HS.ann $ decl
srcFile = HS.srcSpanFilename declSrcSpan
srcLine = HS.srcSpanStartLine declSrcSpan
srcCol = HS.srcSpanStartColumn declSrcSpan
importDecls :: Module -> [ImportDecl]
importDecls (HS.Module _ _ _ imports _) = imports
importDecls (HS.XmlPage _ _ _ _ _ _ _) = []
importDecls (HS.XmlHybrid _ _ _ imports _ _ _ _ _) = imports
| null | https://raw.githubusercontent.com/fendor/hsimport/9be9918b06545cfd7282e4db08c2b88f1d8162cd/lib/HsImport/Utils.hs | haskell |
module HsImport.Utils
( firstSrcLine
, lastSrcLine
, srcSpan
, declSrcLoc
, importDecls
) where
import qualified Language.Haskell.Exts as HS
import HsImport.Types
declSrcLoc :: Decl -> SrcLoc
declSrcLoc decl = HS.SrcLoc srcFile srcLine srcCol
where
declSrcSpan = srcSpan . HS.ann $ decl
srcFile = HS.srcSpanFilename declSrcSpan
srcLine = HS.srcSpanStartLine declSrcSpan
srcCol = HS.srcSpanStartColumn declSrcSpan
importDecls :: Module -> [ImportDecl]
importDecls (HS.Module _ _ _ imports _) = imports
importDecls (HS.XmlPage _ _ _ _ _ _ _) = []
importDecls (HS.XmlHybrid _ _ _ imports _ _ _ _ _) = imports
| |
c2185901a97dcd35e76018ee9f424593cc0338fac9402998a2ab3f0f0e94b8a7 | RichiH/git-annex | Cost.hs | Remote costs .
-
- Copyright 2011 - 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2011-2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Config.Cost where
We use a float for a cost to ensure that there is a cost in
- between any two other costs .
- between any two other costs. -}
type Cost = Float
{- Some predefined default costs.
- Users setting costs in config files can be aware of these,
- and pick values relative to them. So don't change. -}
cheapRemoteCost :: Cost
cheapRemoteCost = 100
nearlyCheapRemoteCost :: Cost
nearlyCheapRemoteCost = 110
semiExpensiveRemoteCost :: Cost
semiExpensiveRemoteCost = 175
expensiveRemoteCost :: Cost
expensiveRemoteCost = 200
veryExpensiveRemoteCost :: Cost
veryExpensiveRemoteCost = 1000
{- Adjusts a remote's cost to reflect it being encrypted. -}
encryptedRemoteCostAdj :: Cost
encryptedRemoteCostAdj = 50
Given an ordered list of costs , and the position of one of the items
- the list , inserts a new cost into the list , in between the item
- and the item after it .
-
- If two or move items have the same cost , their costs are adjusted
- to make room . The costs of other items in the list are left
- unchanged .
-
- To insert the new cost before any other in the list , specify a negative
- position . To insert the new cost at the end of the list , specify a
- position longer than the list .
- the list, inserts a new cost into the list, in between the item
- and the item after it.
-
- If two or move items have the same cost, their costs are adjusted
- to make room. The costs of other items in the list are left
- unchanged.
-
- To insert the new cost before any other in the list, specify a negative
- position. To insert the new cost at the end of the list, specify a
- position longer than the list.
-}
insertCostAfter :: [Cost] -> Int -> [Cost]
insertCostAfter [] _ = []
insertCostAfter l pos
| pos < 0 = costBetween 0 (l !! 0) : l
| nextpos > maxpos = l ++ [1 + l !! maxpos]
| item == nextitem =
let (_dup:new:l') = insertCostAfter lastsegment 0
in firstsegment ++ [costBetween item new, new] ++ l'
| otherwise =
firstsegment ++ [costBetween item nextitem ] ++ lastsegment
where
nextpos = pos + 1
maxpos = length l - 1
item = l !! pos
nextitem = l !! nextpos
(firstsegment, lastsegment) = splitAt (pos + 1) l
costBetween :: Cost -> Cost -> Cost
costBetween x y
| x == y = x
| x > y = -- avoid fractions unless needed
let mid = y + (x - y) / 2
mid' = fromIntegral (floor mid :: Int)
in if mid' > y then mid' else mid
| otherwise = costBetween y x
{- Make sure the remote cost numbers work out. -}
prop_cost_sane :: Bool
prop_cost_sane = False `notElem`
[ expensiveRemoteCost > 0
, cheapRemoteCost < nearlyCheapRemoteCost
, nearlyCheapRemoteCost < semiExpensiveRemoteCost
, semiExpensiveRemoteCost < expensiveRemoteCost
, cheapRemoteCost + encryptedRemoteCostAdj > nearlyCheapRemoteCost
, nearlyCheapRemoteCost + encryptedRemoteCostAdj < semiExpensiveRemoteCost
, nearlyCheapRemoteCost + encryptedRemoteCostAdj < expensiveRemoteCost
]
| null | https://raw.githubusercontent.com/RichiH/git-annex/bbcad2b0af8cd9264d0cb86e6ca126ae626171f3/Config/Cost.hs | haskell | Some predefined default costs.
- Users setting costs in config files can be aware of these,
- and pick values relative to them. So don't change.
Adjusts a remote's cost to reflect it being encrypted.
avoid fractions unless needed
Make sure the remote cost numbers work out. | Remote costs .
-
- Copyright 2011 - 2013 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2011-2013 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Config.Cost where
We use a float for a cost to ensure that there is a cost in
- between any two other costs .
- between any two other costs. -}
type Cost = Float
cheapRemoteCost :: Cost
cheapRemoteCost = 100
nearlyCheapRemoteCost :: Cost
nearlyCheapRemoteCost = 110
semiExpensiveRemoteCost :: Cost
semiExpensiveRemoteCost = 175
expensiveRemoteCost :: Cost
expensiveRemoteCost = 200
veryExpensiveRemoteCost :: Cost
veryExpensiveRemoteCost = 1000
encryptedRemoteCostAdj :: Cost
encryptedRemoteCostAdj = 50
Given an ordered list of costs , and the position of one of the items
- the list , inserts a new cost into the list , in between the item
- and the item after it .
-
- If two or move items have the same cost , their costs are adjusted
- to make room . The costs of other items in the list are left
- unchanged .
-
- To insert the new cost before any other in the list , specify a negative
- position . To insert the new cost at the end of the list , specify a
- position longer than the list .
- the list, inserts a new cost into the list, in between the item
- and the item after it.
-
- If two or move items have the same cost, their costs are adjusted
- to make room. The costs of other items in the list are left
- unchanged.
-
- To insert the new cost before any other in the list, specify a negative
- position. To insert the new cost at the end of the list, specify a
- position longer than the list.
-}
insertCostAfter :: [Cost] -> Int -> [Cost]
insertCostAfter [] _ = []
insertCostAfter l pos
| pos < 0 = costBetween 0 (l !! 0) : l
| nextpos > maxpos = l ++ [1 + l !! maxpos]
| item == nextitem =
let (_dup:new:l') = insertCostAfter lastsegment 0
in firstsegment ++ [costBetween item new, new] ++ l'
| otherwise =
firstsegment ++ [costBetween item nextitem ] ++ lastsegment
where
nextpos = pos + 1
maxpos = length l - 1
item = l !! pos
nextitem = l !! nextpos
(firstsegment, lastsegment) = splitAt (pos + 1) l
costBetween :: Cost -> Cost -> Cost
costBetween x y
| x == y = x
let mid = y + (x - y) / 2
mid' = fromIntegral (floor mid :: Int)
in if mid' > y then mid' else mid
| otherwise = costBetween y x
prop_cost_sane :: Bool
prop_cost_sane = False `notElem`
[ expensiveRemoteCost > 0
, cheapRemoteCost < nearlyCheapRemoteCost
, nearlyCheapRemoteCost < semiExpensiveRemoteCost
, semiExpensiveRemoteCost < expensiveRemoteCost
, cheapRemoteCost + encryptedRemoteCostAdj > nearlyCheapRemoteCost
, nearlyCheapRemoteCost + encryptedRemoteCostAdj < semiExpensiveRemoteCost
, nearlyCheapRemoteCost + encryptedRemoteCostAdj < expensiveRemoteCost
]
|
f778b39e1e9eb7a4f3948bb30f019ea177c5b2ccb4372accffdc03de731e74dd | nineties/Choco | ArrayOpt.hs | -------------------------------------------------
Choco --
Chikadzume Oriented Compiler --
Copyright 2007 - 2008 by Basement fairy --
-------------------------------------------------
module ArrayOpt (
arrayOpt
) where
import Choco
import Const
import LamSyn
import Outputable
import Var
import Types
import Control.Monad.State
import qualified Data.Set as S
simplif :: Lambda -> ChocoM Lambda
simplif lam = case lam of
Lvar _ -> return lam
Llit _ -> return lam
Lapp f args p -> do
f' <- simplif f
args' <- mapM simplif args
return $ Lapp f' args' p
Lfun args body -> do
body' <- simplif body
return $ Lfun args body'
Llet Strict (ary, Lprim PcreateArray args) body -> do
args' <- mapM simplif args
case head args' of
Llit (IntC n) -> do
ids <- replicateM n newUniq
let names = map (\i -> var_name ary ++ show i) [0..]
let vars = zipWith
(\i n -> mkVar n i (var_type ary) (isGlobal ary)) ids names
case eliminateArray ary vars body of
Just body' | isSimpleArgument (args' !! 1)
-> do
putLog (text "\tchange array" <+> ppr ary <+> text "to variable(s)")
body'' <- simplif body'
return $ foldr
(\v cont -> Llet Variable (v, args'!!1) cont)
body'' vars
Just body' -> do
putLog (text "\tchange array" <+> ppr ary <+> text "to variable(s)")
v' <- copyVar ary
body'' <- simplif body'
return$ Llet Strict (v', args' !! 1) $
foldr (\v cont -> Llet Variable (v, Lvar v') cont)
body'' vars
Nothing -> do
body' <- simplif body
return $ Llet Strict (ary, Lprim PcreateArray args') body'
_ -> do
body' <- simplif body
return $ Llet Strict (ary, Lprim PcreateArray args') body'
Llet str (v, e) cont -> do
e' <- simplif e
cont' <- simplif cont
return $ Llet str (v, e') cont'
Lletrec (v, e) cont -> do
e' <- simplif e
cont' <- simplif cont
return $ Lletrec (v, e') cont'
Lprim p args -> do
args' <- mapM simplif args
return $ Lprim p args'
Lcond e1 e2 e3 -> do
e1' <- simplif e1
e2' <- simplif e2
e3' <- simplif e3
return $ Lcond e1' e2' e3'
Lseq e1 e2 -> do
e1' <- simplif e1
e2' <- simplif e2
return $ Lseq e1' e2'
Lwhile e1 e2 -> do
e1' <- simplif e1
e2' <- simplif e2
return $ Lwhile e1' e2'
Lfor v e1 e2 e3 -> do
e1' <- simplif e1
e2' <- simplif e2
e3' <- simplif e3
return $ Lfor v e1' e2' e3'
Lassign v e -> do
e' <- simplif e
return $ Lassign v e'
eliminateArray :: Var -> [Var] -> Lambda -> Maybe Lambda
eliminateArray ary elems lam = case lam of
Lvar v | v == ary -> Nothing
| otherwise -> return $ Lvar v
Llit c -> return $ Llit c
Lapp f args p -> do
f' <- eliminateArray ary elems f
args' <- mapM (eliminateArray ary elems) args
return $ Lapp f' args' p
Lfun params body
| isGlobal ary -> do
body' <- eliminateArray ary elems body
return $ Lfun params body'
| ary `S.member` (freeVars lam) -> Nothing
| otherwise -> return lam
Llet str (v, e1) e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Llet str (v, e1') e2'
Lletrec (v, e1) e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Lletrec (v, e1') e2'
Lprim ParraySet [Lvar v, Llit (IntC n), e] | v == ary
-> do e' <- eliminateArray ary elems e
return $ Lassign (elems!!n) e'
Lprim ParrayRef [Lvar v, Llit (IntC n)] | v == ary
-> return $ Lvar (elems!!n)
Lprim p args -> do
args' <- mapM (eliminateArray ary elems) args
return $ Lprim p args'
Lcond e1 e2 e3 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
e3' <- eliminateArray ary elems e3
return $ Lcond e1' e2' e3'
Lseq e1 e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Lseq e1' e2'
Lwhile e1 e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Lwhile e1' e2'
Lfor v e1 e2 e3 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
e3' <- eliminateArray ary elems e3
return $ Lfor v e1' e2' e3'
Lassign v e -> do
e' <- eliminateArray ary elems e
return $ Lassign v e'
arrayOpt = simplif
| null | https://raw.githubusercontent.com/nineties/Choco/0081351d0b556ff74f096accb65c9ab45d29ddfe/src/ArrayOpt.hs | haskell | -----------------------------------------------
----------------------------------------------- | module ArrayOpt (
arrayOpt
) where
import Choco
import Const
import LamSyn
import Outputable
import Var
import Types
import Control.Monad.State
import qualified Data.Set as S
simplif :: Lambda -> ChocoM Lambda
simplif lam = case lam of
Lvar _ -> return lam
Llit _ -> return lam
Lapp f args p -> do
f' <- simplif f
args' <- mapM simplif args
return $ Lapp f' args' p
Lfun args body -> do
body' <- simplif body
return $ Lfun args body'
Llet Strict (ary, Lprim PcreateArray args) body -> do
args' <- mapM simplif args
case head args' of
Llit (IntC n) -> do
ids <- replicateM n newUniq
let names = map (\i -> var_name ary ++ show i) [0..]
let vars = zipWith
(\i n -> mkVar n i (var_type ary) (isGlobal ary)) ids names
case eliminateArray ary vars body of
Just body' | isSimpleArgument (args' !! 1)
-> do
putLog (text "\tchange array" <+> ppr ary <+> text "to variable(s)")
body'' <- simplif body'
return $ foldr
(\v cont -> Llet Variable (v, args'!!1) cont)
body'' vars
Just body' -> do
putLog (text "\tchange array" <+> ppr ary <+> text "to variable(s)")
v' <- copyVar ary
body'' <- simplif body'
return$ Llet Strict (v', args' !! 1) $
foldr (\v cont -> Llet Variable (v, Lvar v') cont)
body'' vars
Nothing -> do
body' <- simplif body
return $ Llet Strict (ary, Lprim PcreateArray args') body'
_ -> do
body' <- simplif body
return $ Llet Strict (ary, Lprim PcreateArray args') body'
Llet str (v, e) cont -> do
e' <- simplif e
cont' <- simplif cont
return $ Llet str (v, e') cont'
Lletrec (v, e) cont -> do
e' <- simplif e
cont' <- simplif cont
return $ Lletrec (v, e') cont'
Lprim p args -> do
args' <- mapM simplif args
return $ Lprim p args'
Lcond e1 e2 e3 -> do
e1' <- simplif e1
e2' <- simplif e2
e3' <- simplif e3
return $ Lcond e1' e2' e3'
Lseq e1 e2 -> do
e1' <- simplif e1
e2' <- simplif e2
return $ Lseq e1' e2'
Lwhile e1 e2 -> do
e1' <- simplif e1
e2' <- simplif e2
return $ Lwhile e1' e2'
Lfor v e1 e2 e3 -> do
e1' <- simplif e1
e2' <- simplif e2
e3' <- simplif e3
return $ Lfor v e1' e2' e3'
Lassign v e -> do
e' <- simplif e
return $ Lassign v e'
eliminateArray :: Var -> [Var] -> Lambda -> Maybe Lambda
eliminateArray ary elems lam = case lam of
Lvar v | v == ary -> Nothing
| otherwise -> return $ Lvar v
Llit c -> return $ Llit c
Lapp f args p -> do
f' <- eliminateArray ary elems f
args' <- mapM (eliminateArray ary elems) args
return $ Lapp f' args' p
Lfun params body
| isGlobal ary -> do
body' <- eliminateArray ary elems body
return $ Lfun params body'
| ary `S.member` (freeVars lam) -> Nothing
| otherwise -> return lam
Llet str (v, e1) e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Llet str (v, e1') e2'
Lletrec (v, e1) e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Lletrec (v, e1') e2'
Lprim ParraySet [Lvar v, Llit (IntC n), e] | v == ary
-> do e' <- eliminateArray ary elems e
return $ Lassign (elems!!n) e'
Lprim ParrayRef [Lvar v, Llit (IntC n)] | v == ary
-> return $ Lvar (elems!!n)
Lprim p args -> do
args' <- mapM (eliminateArray ary elems) args
return $ Lprim p args'
Lcond e1 e2 e3 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
e3' <- eliminateArray ary elems e3
return $ Lcond e1' e2' e3'
Lseq e1 e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Lseq e1' e2'
Lwhile e1 e2 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
return $ Lwhile e1' e2'
Lfor v e1 e2 e3 -> do
e1' <- eliminateArray ary elems e1
e2' <- eliminateArray ary elems e2
e3' <- eliminateArray ary elems e3
return $ Lfor v e1' e2' e3'
Lassign v e -> do
e' <- eliminateArray ary elems e
return $ Lassign v e'
arrayOpt = simplif
|
8bef7026be7647b996796110f086b09f0de8a3fa059a93b69d834894769a266c | mikesperber/icfp2020 | PolysemyUtil.hs | # LANGUAGE DataKinds , FlexibleContexts , GADTs , LambdaCase , PolyKinds , RankNTypes , ScopedTypeVariables , TypeApplications , TypeOperators , TypeFamilies , UnicodeSyntax #
# LANGUAGE FlexibleInstances #
# LANGUAGE MonoLocalBinds #
# LANGUAGE UndecidableInstances #
module PolysemyUtil where
import Polysemy
import Polysemy.Internal
import Polysemy.Internal.Union
raise2Under :: ∀ e3 e2 e1 r a. Sem (e1 ': e2 ': r) a -> Sem (e1 ': e2 ': e3 ': r) a
raise2Under = hoistSem $ hoist raise2Under . weakenUnder
where
weakenUnder :: ∀ m x. Union (e1 ': e2 ': r) m x -> Union (e1 ': e2 ': e3 ': r) m x
weakenUnder (Union Here a) = Union Here a
weakenUnder (Union (There Here) a) = Union (There Here) a
weakenUnder (Union (There (There n)) a) = Union (There (There (There n))) a
# INLINE weakenUnder #
# INLINE raise2Under #
| null | https://raw.githubusercontent.com/mikesperber/icfp2020/974b8b7ca7ee8811f43398551c97c8ac7ae1cbb1/hearts/src/PolysemyUtil.hs | haskell | # LANGUAGE DataKinds , FlexibleContexts , GADTs , LambdaCase , PolyKinds , RankNTypes , ScopedTypeVariables , TypeApplications , TypeOperators , TypeFamilies , UnicodeSyntax #
# LANGUAGE FlexibleInstances #
# LANGUAGE MonoLocalBinds #
# LANGUAGE UndecidableInstances #
module PolysemyUtil where
import Polysemy
import Polysemy.Internal
import Polysemy.Internal.Union
raise2Under :: ∀ e3 e2 e1 r a. Sem (e1 ': e2 ': r) a -> Sem (e1 ': e2 ': e3 ': r) a
raise2Under = hoistSem $ hoist raise2Under . weakenUnder
where
weakenUnder :: ∀ m x. Union (e1 ': e2 ': r) m x -> Union (e1 ': e2 ': e3 ': r) m x
weakenUnder (Union Here a) = Union Here a
weakenUnder (Union (There Here) a) = Union (There Here) a
weakenUnder (Union (There (There n)) a) = Union (There (There (There n))) a
# INLINE weakenUnder #
# INLINE raise2Under #
| |
d0a4a8feacbf67c599293e72667b63b660011520e79d530525b0f2b44941074d | xmonad/xmonad-contrib | AvoidFloats.hs | # LANGUAGE PatternGuards , FlexibleContexts , FlexibleInstances , MultiParamTypeClasses , TupleSections #
-----------------------------------------------------------------------------
-- |
Module : XMonad . Layout . AvoidFloats
-- Description : Avoid floats when placing tiled windows.
Copyright : ( c ) 2014 < >
-- License : BSD3-style (see LICENSE)
--
Maintainer : ( c ) < >
-- Stability : unstable
-- Portability : unportable
--
-- Find a maximum empty rectangle around floating windows and use that area
-- to display non-floating windows.
--
-----------------------------------------------------------------------------
module XMonad.Layout.AvoidFloats (
-- * Usage
-- $usage
avoidFloats,
avoidFloats',
AvoidFloatMsg(..),
AvoidFloatItemMsg(..),
) where
import XMonad
import XMonad.Layout.LayoutModifier
import XMonad.Prelude (fi, mapMaybe, maximumBy, sortOn)
import qualified XMonad.StackSet as W
import Data.Ord
import qualified Data.Map as M
import qualified Data.Set as S
-- $usage
You can use this module with the following in your file :
--
> import XMonad . Layout . AvoidFloats
--
and modify the layouts to call avoidFloats on the layouts where you want the
-- non-floating windows to not be behind floating windows.
--
-- > layoutHook = ... ||| avoidFloats Full ||| ...
--
-- For more detailed instructions on editing the layoutHook see
-- <#customizing-xmonad the tutorial> and
-- "XMonad.Doc.Extending#Editing_the_layout_hook".
--
-- Then add appropriate key bindings, for example:
--
> , ( ( modm .| . shiftMask , xK_b ) , )
> , ( ( modm .| . controlMask , xK_b ) , withFocused $ sendMessage . AvoidFloatToggleItem )
> , ( ( modm .| . shiftMask .| . controlMask , xK_b ) , sendMessage ( AvoidFloatSet False ) > > sendMessage AvoidFloatClearItems )
--
-- For detailed instructions on editing your key bindings, see
-- <#customizing-xmonad the tutorial>.
--
-- Note that this module is incompatible with an old way of configuring
" XMonad . Actions . FloatSnap " . If you are having problems , please update your
-- configuration.
-- | Avoid floating windows unless the resulting area for windows would be too small.
-- In that case, use the whole screen as if this layout modifier wasn't there.
-- No windows are avoided by default, they need to be added using signals.
avoidFloats
:: l a -- ^ Layout to modify.
-> ModifiedLayout AvoidFloats l a
avoidFloats = avoidFloats' 100 100 False
-- | Avoid floating windows unless the resulting area for windows would be too small.
-- In that case, use the whole screen as if this layout modifier wasn't there.
avoidFloats'
:: Int -- ^ Minimum width of the area used for non-floating windows.
-> Int -- ^ Minimum height of the area used for non-floating windows.
-> Bool -- ^ If floating windows should be avoided by default.
-> l a -- ^ Layout to modify.
-> ModifiedLayout AvoidFloats l a
avoidFloats' w h act = ModifiedLayout (AvoidFloats Nothing S.empty w h act)
data AvoidFloats a = AvoidFloats
{ cache :: Maybe ((M.Map a W.RationalRect, Rectangle), Rectangle)
, chosen :: S.Set a
, minw :: Int
, minh :: Int
, avoidAll :: Bool
} deriving (Read, Show)
-- | Change the state of the whole avoid float layout modifier.
data AvoidFloatMsg
= AvoidFloatToggle -- ^ Toggle between avoiding all or only selected.
| AvoidFloatSet Bool -- ^ Set if all all floating windows should be avoided.
| AvoidFloatClearItems -- ^ Clear the set of windows to specifically avoid.
-- | Change the state of the avoid float layout modifier conserning a specific window.
data AvoidFloatItemMsg a
= AvoidFloatAddItem a -- ^ Add a window to always avoid.
| AvoidFloatRemoveItem a -- ^ Stop always avoiding selected window.
| AvoidFloatToggleItem a -- ^ Toggle between always avoiding selected window.
instance Message AvoidFloatMsg
instance Typeable a => Message (AvoidFloatItemMsg a)
instance LayoutModifier AvoidFloats Window where
modifyLayoutWithUpdate lm w r = withDisplay $ \d -> do
floating <- gets $ W.floating . windowset
case cache lm of
Just (key, mer) | key == (floating,r) -> (, Nothing) <$> runLayout w mer
_ -> do rs <- io $ map toRect <$> mapM (getWindowAttributes d) (filter shouldAvoid $ M.keys floating)
let mer = maximumBy (comparing area) $ filter bigEnough $ maxEmptyRectangles r rs
(, Just $ pruneWindows $ lm { cache = Just ((floating,r),mer) }) <$> runLayout w mer
where
toRect :: WindowAttributes -> Rectangle
toRect wa = let b = fi $ wa_border_width wa
in Rectangle (fi $ wa_x wa) (fi $ wa_y wa) (fi $ wa_width wa + 2*b) (fi $ wa_height wa + 2*b)
bigEnough :: Rectangle -> Bool
bigEnough rect = rect_width rect >= fi (minw lm) && rect_height rect >= fi (minh lm)
shouldAvoid a = avoidAll lm || a `S.member` chosen lm
pureMess lm m
| Just AvoidFloatToggle <- fromMessage m = Just $ lm { avoidAll = not (avoidAll lm), cache = Nothing }
| Just (AvoidFloatSet s) <- fromMessage m, s /= avoidAll lm = Just $ lm { avoidAll = s, cache = Nothing }
| Just AvoidFloatClearItems <- fromMessage m = Just $ lm { chosen = S.empty, cache = Nothing }
| Just (AvoidFloatAddItem a) <- fromMessage m, a `S.notMember` chosen lm = Just $ lm { chosen = S.insert a (chosen lm), cache = Nothing }
| Just (AvoidFloatRemoveItem a) <- fromMessage m, a `S.member` chosen lm = Just $ lm { chosen = S.delete a (chosen lm), cache = Nothing }
| Just (AvoidFloatToggleItem a) <- fromMessage m = let op = if a `S.member` chosen lm then S.delete else S.insert
in Just $ lm { chosen = op a (chosen lm), cache = Nothing }
| otherwise = Nothing
pruneWindows :: AvoidFloats Window -> AvoidFloats Window
pruneWindows lm = case cache lm of
Nothing -> lm
Just ((floating,_),_) -> lm { chosen = S.filter (`M.member` floating) (chosen lm) }
| Find all maximum empty rectangles ( ) that are axis aligned . This is
done in O(n^2 ) time using a modified version of the algoprithm MERAlg 1
described in \"On the maximum empty rectangle problem\ " by , D.T.
and W.-L HSU . Published in Discrete Applied Mathematics 8 ( 1984 . )
maxEmptyRectangles :: Rectangle -> [Rectangle] -> [Rectangle]
maxEmptyRectangles br rectangles = filter (\a -> area a > 0) $ upAndDownEdge ++ noneOrUpEdge ++ downEdge
where
upAndDownEdge = findGaps br rectangles
noneOrUpEdge = concatMap (everyLower br bottoms) bottoms
downEdge = mapMaybe (bottomEdge br bottoms) bottoms
bottoms = sortOn bottom $ splitContainers rectangles
everyLower :: Rectangle -> [Rectangle] -> Rectangle -> [Rectangle]
everyLower br bottoms r = let (rs, boundLeft, boundRight, boundRects) = foldr (everyUpper r) ([], left br, right br, reverse bottoms) bottoms
(boundLeft', boundRight', _) = shrinkBounds boundLeft boundRight boundRects r (top br)
in mkRect boundLeft' boundRight' (top br) (top r) ?: rs
everyUpper
:: Rectangle -- ^ The current rectangle where the top edge is used.
-> Rectangle -- ^ The current rectangle where the bottom edge is used.
-> ([Rectangle],Int,Int,[Rectangle]) -- ^ List of MERs found so far, left bound, right bound and list of rectangles used for bounds.
-> ([Rectangle],Int,Int,[Rectangle])
everyUpper lower upper (rs, boundLeft, boundRight, boundRects) = (r?:rs, boundLeft', boundRight', boundRects')
where
r = mkRect boundLeft' boundRight' (bottom upper) (top lower)
(boundLeft', boundRight', boundRects') = shrinkBounds boundLeft boundRight boundRects lower (bottom upper)
shrinkBounds :: Int -> Int -> [Rectangle] -> Rectangle -> Int -> (Int, Int, [Rectangle])
shrinkBounds boundLeft boundRight boundRects lower upperLimit = (boundLeft', boundRight', boundRects')
where
(shrinkers, boundRects') = span (\a -> bottom a > upperLimit) boundRects
(boundLeft', boundRight') = foldr (shrinkBounds' lower) (boundLeft, boundRight) $ filter (\a -> top a < top lower) shrinkers
shrinkBounds' :: Rectangle -> Rectangle -> (Int, Int) -> (Int, Int)
shrinkBounds' mr r (boundLeft, boundRight)
| right r < right mr = (max boundLeft $ right r, boundRight)
| left r > left mr = (boundLeft, min boundRight $ left r)
| otherwise = (right r, left r) -- r is horizontally covering all of mr; make sure the area of this rectangle will always be 0.
bottomEdge :: Rectangle -> [Rectangle] -> Rectangle -> Maybe Rectangle
bottomEdge br bottoms r = let rs = filter (\a -> bottom r < bottom a && top a < bottom br) bottoms
boundLeft = maximum $ left br : filter (< right r) (map right rs)
boundRight = minimum $ right br : filter (> left r) (map left rs)
in if any (\a -> left a <= left r && right r <= right a) rs
then Nothing
else mkRect boundLeft boundRight (bottom r) (bottom br)
-- | Split rectangles that horizontally fully contains another rectangle
-- without sharing either the left or right side.
splitContainers :: [Rectangle] -> [Rectangle]
splitContainers rects = splitContainers' [] $ sortOn rect_width rects
where
splitContainers' :: [Rectangle] -> [Rectangle] -> [Rectangle]
splitContainers' res [] = res
splitContainers' res (r:rs) = splitContainers' (r:res) $ concatMap (doSplit r) rs
doSplit :: Rectangle -> Rectangle -> [Rectangle]
doSplit guide r
| left guide <= left r || right r <= right guide = [r]
| otherwise = let w0 = fi (rect_x guide - rect_x r) + (rect_width guide `div` 2)
w1 = rect_width r - w0
in [ Rectangle (rect_x r) (rect_y r) w0 (rect_height r)
, Rectangle (rect_x r + fi w0) (rect_y r) w1 (rect_height r)
]
-- | Find all horizontal gaps that are left empty from top to bottom of screen.
findGaps
:: Rectangle -- ^ Bounding rectangle.
-> [Rectangle] -- ^ List of all rectangles that can cover areas in the bounding rectangle.
-> [Rectangle]
findGaps br rs = let (gaps,end) = foldr findGaps' ([], left br) $ sortOn (Down . left) $ filter inBounds rs
lastgap = mkRect end (right br) (top br) (bottom br)
in lastgap?:gaps
where
findGaps' :: Rectangle -> ([Rectangle], Int) -> ([Rectangle], Int)
findGaps' r (gaps, end) = let gap = mkRect end (left r) (top br) (bottom br)
in (gap?:gaps, max end (right r))
inBounds :: Rectangle -> Bool
inBounds r = left r < right br && left br < right r
(?:) :: Maybe a -> [a] -> [a]
Just x ?: xs = x:xs
_ ?: xs = xs
left, right, top, bottom, area :: Rectangle -> Int
left r = fi (rect_x r)
right r = fi (rect_x r) + fi (rect_width r)
top r = fi (rect_y r)
bottom r = fi (rect_y r) + fi (rect_height r)
area r = fi (rect_width r * rect_height r)
mkRect :: Int -> Int -> Int -> Int -> Maybe Rectangle
mkRect l r t b = let rect = Rectangle (fi l) (fi t) (fi $ max 0 $ r-l) (fi $ max 0 $ b-t)
in if area rect > 0
then Just rect
else Nothing
| null | https://raw.githubusercontent.com/xmonad/xmonad-contrib/571d017b8259340971db1736eedc992a54e9022c/XMonad/Layout/AvoidFloats.hs | haskell | ---------------------------------------------------------------------------
|
Description : Avoid floats when placing tiled windows.
License : BSD3-style (see LICENSE)
Stability : unstable
Portability : unportable
Find a maximum empty rectangle around floating windows and use that area
to display non-floating windows.
---------------------------------------------------------------------------
* Usage
$usage
$usage
non-floating windows to not be behind floating windows.
> layoutHook = ... ||| avoidFloats Full ||| ...
For more detailed instructions on editing the layoutHook see
<#customizing-xmonad the tutorial> and
"XMonad.Doc.Extending#Editing_the_layout_hook".
Then add appropriate key bindings, for example:
For detailed instructions on editing your key bindings, see
<#customizing-xmonad the tutorial>.
Note that this module is incompatible with an old way of configuring
configuration.
| Avoid floating windows unless the resulting area for windows would be too small.
In that case, use the whole screen as if this layout modifier wasn't there.
No windows are avoided by default, they need to be added using signals.
^ Layout to modify.
| Avoid floating windows unless the resulting area for windows would be too small.
In that case, use the whole screen as if this layout modifier wasn't there.
^ Minimum width of the area used for non-floating windows.
^ Minimum height of the area used for non-floating windows.
^ If floating windows should be avoided by default.
^ Layout to modify.
| Change the state of the whole avoid float layout modifier.
^ Toggle between avoiding all or only selected.
^ Set if all all floating windows should be avoided.
^ Clear the set of windows to specifically avoid.
| Change the state of the avoid float layout modifier conserning a specific window.
^ Add a window to always avoid.
^ Stop always avoiding selected window.
^ Toggle between always avoiding selected window.
^ The current rectangle where the top edge is used.
^ The current rectangle where the bottom edge is used.
^ List of MERs found so far, left bound, right bound and list of rectangles used for bounds.
r is horizontally covering all of mr; make sure the area of this rectangle will always be 0.
| Split rectangles that horizontally fully contains another rectangle
without sharing either the left or right side.
| Find all horizontal gaps that are left empty from top to bottom of screen.
^ Bounding rectangle.
^ List of all rectangles that can cover areas in the bounding rectangle. | # LANGUAGE PatternGuards , FlexibleContexts , FlexibleInstances , MultiParamTypeClasses , TupleSections #
Module : XMonad . Layout . AvoidFloats
Copyright : ( c ) 2014 < >
Maintainer : ( c ) < >
module XMonad.Layout.AvoidFloats (
avoidFloats,
avoidFloats',
AvoidFloatMsg(..),
AvoidFloatItemMsg(..),
) where
import XMonad
import XMonad.Layout.LayoutModifier
import XMonad.Prelude (fi, mapMaybe, maximumBy, sortOn)
import qualified XMonad.StackSet as W
import Data.Ord
import qualified Data.Map as M
import qualified Data.Set as S
You can use this module with the following in your file :
> import XMonad . Layout . AvoidFloats
and modify the layouts to call avoidFloats on the layouts where you want the
> , ( ( modm .| . shiftMask , xK_b ) , )
> , ( ( modm .| . controlMask , xK_b ) , withFocused $ sendMessage . AvoidFloatToggleItem )
> , ( ( modm .| . shiftMask .| . controlMask , xK_b ) , sendMessage ( AvoidFloatSet False ) > > sendMessage AvoidFloatClearItems )
" XMonad . Actions . FloatSnap " . If you are having problems , please update your
avoidFloats
-> ModifiedLayout AvoidFloats l a
avoidFloats = avoidFloats' 100 100 False
avoidFloats'
-> ModifiedLayout AvoidFloats l a
avoidFloats' w h act = ModifiedLayout (AvoidFloats Nothing S.empty w h act)
data AvoidFloats a = AvoidFloats
{ cache :: Maybe ((M.Map a W.RationalRect, Rectangle), Rectangle)
, chosen :: S.Set a
, minw :: Int
, minh :: Int
, avoidAll :: Bool
} deriving (Read, Show)
data AvoidFloatMsg
data AvoidFloatItemMsg a
instance Message AvoidFloatMsg
instance Typeable a => Message (AvoidFloatItemMsg a)
instance LayoutModifier AvoidFloats Window where
modifyLayoutWithUpdate lm w r = withDisplay $ \d -> do
floating <- gets $ W.floating . windowset
case cache lm of
Just (key, mer) | key == (floating,r) -> (, Nothing) <$> runLayout w mer
_ -> do rs <- io $ map toRect <$> mapM (getWindowAttributes d) (filter shouldAvoid $ M.keys floating)
let mer = maximumBy (comparing area) $ filter bigEnough $ maxEmptyRectangles r rs
(, Just $ pruneWindows $ lm { cache = Just ((floating,r),mer) }) <$> runLayout w mer
where
toRect :: WindowAttributes -> Rectangle
toRect wa = let b = fi $ wa_border_width wa
in Rectangle (fi $ wa_x wa) (fi $ wa_y wa) (fi $ wa_width wa + 2*b) (fi $ wa_height wa + 2*b)
bigEnough :: Rectangle -> Bool
bigEnough rect = rect_width rect >= fi (minw lm) && rect_height rect >= fi (minh lm)
shouldAvoid a = avoidAll lm || a `S.member` chosen lm
pureMess lm m
| Just AvoidFloatToggle <- fromMessage m = Just $ lm { avoidAll = not (avoidAll lm), cache = Nothing }
| Just (AvoidFloatSet s) <- fromMessage m, s /= avoidAll lm = Just $ lm { avoidAll = s, cache = Nothing }
| Just AvoidFloatClearItems <- fromMessage m = Just $ lm { chosen = S.empty, cache = Nothing }
| Just (AvoidFloatAddItem a) <- fromMessage m, a `S.notMember` chosen lm = Just $ lm { chosen = S.insert a (chosen lm), cache = Nothing }
| Just (AvoidFloatRemoveItem a) <- fromMessage m, a `S.member` chosen lm = Just $ lm { chosen = S.delete a (chosen lm), cache = Nothing }
| Just (AvoidFloatToggleItem a) <- fromMessage m = let op = if a `S.member` chosen lm then S.delete else S.insert
in Just $ lm { chosen = op a (chosen lm), cache = Nothing }
| otherwise = Nothing
pruneWindows :: AvoidFloats Window -> AvoidFloats Window
pruneWindows lm = case cache lm of
Nothing -> lm
Just ((floating,_),_) -> lm { chosen = S.filter (`M.member` floating) (chosen lm) }
| Find all maximum empty rectangles ( ) that are axis aligned . This is
done in O(n^2 ) time using a modified version of the algoprithm MERAlg 1
described in \"On the maximum empty rectangle problem\ " by , D.T.
and W.-L HSU . Published in Discrete Applied Mathematics 8 ( 1984 . )
maxEmptyRectangles :: Rectangle -> [Rectangle] -> [Rectangle]
maxEmptyRectangles br rectangles = filter (\a -> area a > 0) $ upAndDownEdge ++ noneOrUpEdge ++ downEdge
where
upAndDownEdge = findGaps br rectangles
noneOrUpEdge = concatMap (everyLower br bottoms) bottoms
downEdge = mapMaybe (bottomEdge br bottoms) bottoms
bottoms = sortOn bottom $ splitContainers rectangles
everyLower :: Rectangle -> [Rectangle] -> Rectangle -> [Rectangle]
everyLower br bottoms r = let (rs, boundLeft, boundRight, boundRects) = foldr (everyUpper r) ([], left br, right br, reverse bottoms) bottoms
(boundLeft', boundRight', _) = shrinkBounds boundLeft boundRight boundRects r (top br)
in mkRect boundLeft' boundRight' (top br) (top r) ?: rs
everyUpper
-> ([Rectangle],Int,Int,[Rectangle])
everyUpper lower upper (rs, boundLeft, boundRight, boundRects) = (r?:rs, boundLeft', boundRight', boundRects')
where
r = mkRect boundLeft' boundRight' (bottom upper) (top lower)
(boundLeft', boundRight', boundRects') = shrinkBounds boundLeft boundRight boundRects lower (bottom upper)
shrinkBounds :: Int -> Int -> [Rectangle] -> Rectangle -> Int -> (Int, Int, [Rectangle])
shrinkBounds boundLeft boundRight boundRects lower upperLimit = (boundLeft', boundRight', boundRects')
where
(shrinkers, boundRects') = span (\a -> bottom a > upperLimit) boundRects
(boundLeft', boundRight') = foldr (shrinkBounds' lower) (boundLeft, boundRight) $ filter (\a -> top a < top lower) shrinkers
shrinkBounds' :: Rectangle -> Rectangle -> (Int, Int) -> (Int, Int)
shrinkBounds' mr r (boundLeft, boundRight)
| right r < right mr = (max boundLeft $ right r, boundRight)
| left r > left mr = (boundLeft, min boundRight $ left r)
bottomEdge :: Rectangle -> [Rectangle] -> Rectangle -> Maybe Rectangle
bottomEdge br bottoms r = let rs = filter (\a -> bottom r < bottom a && top a < bottom br) bottoms
boundLeft = maximum $ left br : filter (< right r) (map right rs)
boundRight = minimum $ right br : filter (> left r) (map left rs)
in if any (\a -> left a <= left r && right r <= right a) rs
then Nothing
else mkRect boundLeft boundRight (bottom r) (bottom br)
splitContainers :: [Rectangle] -> [Rectangle]
splitContainers rects = splitContainers' [] $ sortOn rect_width rects
where
splitContainers' :: [Rectangle] -> [Rectangle] -> [Rectangle]
splitContainers' res [] = res
splitContainers' res (r:rs) = splitContainers' (r:res) $ concatMap (doSplit r) rs
doSplit :: Rectangle -> Rectangle -> [Rectangle]
doSplit guide r
| left guide <= left r || right r <= right guide = [r]
| otherwise = let w0 = fi (rect_x guide - rect_x r) + (rect_width guide `div` 2)
w1 = rect_width r - w0
in [ Rectangle (rect_x r) (rect_y r) w0 (rect_height r)
, Rectangle (rect_x r + fi w0) (rect_y r) w1 (rect_height r)
]
findGaps
-> [Rectangle]
findGaps br rs = let (gaps,end) = foldr findGaps' ([], left br) $ sortOn (Down . left) $ filter inBounds rs
lastgap = mkRect end (right br) (top br) (bottom br)
in lastgap?:gaps
where
findGaps' :: Rectangle -> ([Rectangle], Int) -> ([Rectangle], Int)
findGaps' r (gaps, end) = let gap = mkRect end (left r) (top br) (bottom br)
in (gap?:gaps, max end (right r))
inBounds :: Rectangle -> Bool
inBounds r = left r < right br && left br < right r
(?:) :: Maybe a -> [a] -> [a]
Just x ?: xs = x:xs
_ ?: xs = xs
left, right, top, bottom, area :: Rectangle -> Int
left r = fi (rect_x r)
right r = fi (rect_x r) + fi (rect_width r)
top r = fi (rect_y r)
bottom r = fi (rect_y r) + fi (rect_height r)
area r = fi (rect_width r * rect_height r)
mkRect :: Int -> Int -> Int -> Int -> Maybe Rectangle
mkRect l r t b = let rect = Rectangle (fi l) (fi t) (fi $ max 0 $ r-l) (fi $ max 0 $ b-t)
in if area rect > 0
then Just rect
else Nothing
|
9ddcd6789066ae7bf730ba765e9cc726d90c16db5b68e5b282e4dd56c25b6246 | janestreet/bonsai | util.mli | open! Core
open! Js_of_ocaml
(** [am_running_how] provides information on how the code is currently being run:
- [`Node_test] means that the code is being run using node as part of an expect_test
- [`Node_benchmark] means that the code is being run using node as part of a benchmark
- [`Node] means that the code is being run using node, but not as part of an
expect_test or a benchmark
- [`Browser_benchmark] means that the code is being run in the browser as part of a
benchmark
- [`Browser] means that the code is being run in a browser but not as part of a
benchmark
*)
val am_running_how
: [ `Browser | `Browser_benchmark | `Node | `Node_benchmark | `Node_test ]
* [ am_within_disabled_fieldset ] traverses up the DOM to see whether an event occurred
within a fieldset element with the disabled attribute . As this function requires DOM
interaction , it will return [ false ] if the code is not running in the browser .
Note : because this function bubbles up from the target of the event , it 's possible
that the event occurs within a disabled fieldset , but the form element which performs
this check is not within a disabled fieldset ( or vice versa ) .
For example , mousemove events will originate from the element under the mouse , so if
the mouse is over a different disabled form , [ am_within_disabled_fieldset ] will be
[ true ] , even if the component which performs this check is not .
within a fieldset element with the disabled attribute. As this function requires DOM
interaction, it will return [false] if the code is not running in the browser.
Note: because this function bubbles up from the target of the event, it's possible
that the event occurs within a disabled fieldset, but the form element which performs
this check is not within a disabled fieldset (or vice versa).
For example, mousemove events will originate from the element under the mouse, so if
the mouse is over a different disabled form, [am_within_disabled_fieldset] will be
[true], even if the component which performs this check is not.
*)
val am_within_disabled_fieldset : #Dom_html.event Js.t -> bool
| null | https://raw.githubusercontent.com/janestreet/bonsai/4baeedc75bf73a0915e04dc02d8a49b78779e9b0/web/util.mli | ocaml | * [am_running_how] provides information on how the code is currently being run:
- [`Node_test] means that the code is being run using node as part of an expect_test
- [`Node_benchmark] means that the code is being run using node as part of a benchmark
- [`Node] means that the code is being run using node, but not as part of an
expect_test or a benchmark
- [`Browser_benchmark] means that the code is being run in the browser as part of a
benchmark
- [`Browser] means that the code is being run in a browser but not as part of a
benchmark
| open! Core
open! Js_of_ocaml
val am_running_how
: [ `Browser | `Browser_benchmark | `Node | `Node_benchmark | `Node_test ]
* [ am_within_disabled_fieldset ] traverses up the DOM to see whether an event occurred
within a fieldset element with the disabled attribute . As this function requires DOM
interaction , it will return [ false ] if the code is not running in the browser .
Note : because this function bubbles up from the target of the event , it 's possible
that the event occurs within a disabled fieldset , but the form element which performs
this check is not within a disabled fieldset ( or vice versa ) .
For example , mousemove events will originate from the element under the mouse , so if
the mouse is over a different disabled form , [ am_within_disabled_fieldset ] will be
[ true ] , even if the component which performs this check is not .
within a fieldset element with the disabled attribute. As this function requires DOM
interaction, it will return [false] if the code is not running in the browser.
Note: because this function bubbles up from the target of the event, it's possible
that the event occurs within a disabled fieldset, but the form element which performs
this check is not within a disabled fieldset (or vice versa).
For example, mousemove events will originate from the element under the mouse, so if
the mouse is over a different disabled form, [am_within_disabled_fieldset] will be
[true], even if the component which performs this check is not.
*)
val am_within_disabled_fieldset : #Dom_html.event Js.t -> bool
|
83ad4aee06d7f650bb2bfa7943f17bad78bca8b944c1a52f75e23b30e9e13da7 | databrary/databrary | Document.hs | # LANGUAGE GeneralizedNewtypeDeriving , TemplateHaskell , OverloadedStrings #
module Solr.Document
( SolrDocument(..)
, SolrRecordMeasures(..)
, SolrSegment(..)
, metricField
) where
import qualified Data.Aeson as JSON
import qualified Data.Aeson.TH as JTH
import qualified Data.ByteString as BS
import Data.Char (isAlphaNum)
import Data.Int (Int16)
import qualified Data.HashMap.Strict as HM
import Data.Monoid ((<>))
import qualified Data.Text as T
import Model.Id.Types
import Model.Permission.Types
import Model.Release.Types
import Model.Party.Types
import Model.Volume.Types
import Model.Container.Types
import Model.Offset
import Model.Segment
import Model.Format.Types
import Model.Asset.Types
import Model.Time
import Model.Age
import Model.Record.Types
import Model.Category.Types
import Model.Metric
import Model.Tag.Types
import Model.Comment.Types
import StringUtil
safeField :: T.Text -> T.Text
safeField = T.map safeChar where
safeChar c
| isAlphaNum c = c
| otherwise = '_'
newtype SolrRecordMeasures = SolrRecordMeasures [(Metric, MeasureDatum)]
metricLabel :: Metric -> T.Text
metricLabel Metric{ metricType = MeasureTypeText, metricOptions = _:_ } = "enum"
metricLabel m@Metric{ metricType = MeasureTypeText }
| metricLong m = "long"
| otherwise = "text"
metricLabel Metric{ metricType = MeasureTypeNumeric } = "numeric"
metricLabel Metric{ metricType = MeasureTypeDate } = "date"
metricLabel Metric{ metricType = MeasureTypeVoid } = "void"
metricField :: Metric -> T.Text
metricField m = "record_" <> metricLabel m <> ('_' `T.cons` safeField (metricName m))
-- slight hack because we actually index dates as datetimes
metricDatum :: Metric -> MeasureDatum -> JSON.Value
metricDatum Metric{ metricType = MeasureTypeDate } d = JSON.toJSON $ d <> "T12:00:00Z"
metricDatum Metric{ metricType = MeasureTypeVoid } _ = JSON.toJSON True
metricDatum _ d = JSON.toJSON d
measureKeyValue :: JSON.KeyValue kv => (Metric, MeasureDatum) -> kv
measureKeyValue (m, d) = metricField m JSON..= metricDatum m d
instance JSON.ToJSON SolrRecordMeasures where
toJSON (SolrRecordMeasures ms) =
JSON.object $ map measureKeyValue ms
toEncoding (SolrRecordMeasures ms) =
JSON.pairs $ foldr ((<>) . measureKeyValue) mempty ms
newtype SolrSegment = SolrSegment Segment deriving (JSON.FromJSON)
instance Show SolrSegment where
showsPrec _ (SolrSegment s) = showSegmentWith (shows . offsetMillis) s
instance JSON.ToJSON SolrSegment where
toJSON s = JSON.String $ T.pack $ show s
toEncoding s = JSON.toEncoding $ show s
data SolrDocument
= SolrParty
{ solrId :: !BS.ByteString
, solrPartyId :: Id Party
, solrPartySortName :: T.Text
, solrPartyPreName :: Maybe T.Text
, solrPartyAffiliation :: Maybe T.Text
, solrPartyIsInstitution :: Bool
, solrPartyAuthorization :: Maybe Permission
}
| SolrVolume
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrName :: Maybe T.Text
, solrBody :: Maybe T.Text -- body
, solrVolumeOwnerIds :: [Id Party]
, solrVolumeOwnerNames :: [T.Text]
, solrCitation :: Maybe T.Text
, solrCitationYear :: Maybe Int16
}
| SolrContainer
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrName :: Maybe T.Text
, solrContainerTop :: Bool
, solrContainerDate :: Maybe MaskedDate
, solrRelease :: Maybe Release
}
| SolrAsset -- Slot
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrAssetId :: Id Asset
, solrName :: Maybe T.Text
, solrFormatId :: Id Format
, solrRelease :: Maybe Release
}
| SolrExcerpt
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrAssetId :: Id Asset
, solrRelease :: Maybe Release
}
| SolrRecord -- Slot
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrRecordId :: Id Record
, solrRecordCategoryId :: Id Category
, solrRecordMeasures :: SolrRecordMeasures
, solrRecordAge :: Maybe Age
}
| SolrTagId
{ solrId :: !BS.ByteString
, solrTagId :: Id Tag
, solrTagName :: TagName
}
| SolrTag -- Use
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrTagId :: Id Tag
, solrTagName :: TagName
, solrKeyword :: Maybe TagName
, solrPartyId :: Id Party
}
| SolrComment
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrCommentId :: Id Comment
, solrPartyId :: Id Party
, solrBody :: Maybe T.Text
}
$(return []) -- force new decl group for splice:
solrToJSON :: SolrDocument -> JSON.Value
solrToJSON =
$(JTH.mkToJSON JTH.defaultOptions
{ JTH.fieldLabelModifier = \('s':'o':'l':'r':s) -> fromCamel s
, JTH.constructorTagModifier = \('S':'o':'l':'r':s) -> fromCamel s
, JTH.omitNothingFields = True
, JTH.sumEncoding = JTH.TaggedObject
{ JTH.tagFieldName = "content_type"
, JTH.contentsFieldName = error "solrToJSON: contentsFieldName"
}
} ''SolrDocument)
fixToJSON :: SolrDocument -> JSON.Value -> JSON.Value
fixToJSON SolrRecord{} (JSON.Object o) = JSON.Object $
maybe o (HM.union $ HM.delete k o) $ do
JSON.Object m <- HM.lookup k o
return m
where k = "record_measures"
fixToJSON _ j = j
instance JSON.ToJSON SolrDocument where
toJSON s = fixToJSON s $ solrToJSON s
TODO : toEncoding ( and cleanup fixToJSON )
| null | https://raw.githubusercontent.com/databrary/databrary/685f3c625b960268f5d9b04e3d7c6146bea5afda/src/Solr/Document.hs | haskell | slight hack because we actually index dates as datetimes
body
Slot
Slot
Use
force new decl group for splice: | # LANGUAGE GeneralizedNewtypeDeriving , TemplateHaskell , OverloadedStrings #
module Solr.Document
( SolrDocument(..)
, SolrRecordMeasures(..)
, SolrSegment(..)
, metricField
) where
import qualified Data.Aeson as JSON
import qualified Data.Aeson.TH as JTH
import qualified Data.ByteString as BS
import Data.Char (isAlphaNum)
import Data.Int (Int16)
import qualified Data.HashMap.Strict as HM
import Data.Monoid ((<>))
import qualified Data.Text as T
import Model.Id.Types
import Model.Permission.Types
import Model.Release.Types
import Model.Party.Types
import Model.Volume.Types
import Model.Container.Types
import Model.Offset
import Model.Segment
import Model.Format.Types
import Model.Asset.Types
import Model.Time
import Model.Age
import Model.Record.Types
import Model.Category.Types
import Model.Metric
import Model.Tag.Types
import Model.Comment.Types
import StringUtil
safeField :: T.Text -> T.Text
safeField = T.map safeChar where
safeChar c
| isAlphaNum c = c
| otherwise = '_'
newtype SolrRecordMeasures = SolrRecordMeasures [(Metric, MeasureDatum)]
metricLabel :: Metric -> T.Text
metricLabel Metric{ metricType = MeasureTypeText, metricOptions = _:_ } = "enum"
metricLabel m@Metric{ metricType = MeasureTypeText }
| metricLong m = "long"
| otherwise = "text"
metricLabel Metric{ metricType = MeasureTypeNumeric } = "numeric"
metricLabel Metric{ metricType = MeasureTypeDate } = "date"
metricLabel Metric{ metricType = MeasureTypeVoid } = "void"
metricField :: Metric -> T.Text
metricField m = "record_" <> metricLabel m <> ('_' `T.cons` safeField (metricName m))
metricDatum :: Metric -> MeasureDatum -> JSON.Value
metricDatum Metric{ metricType = MeasureTypeDate } d = JSON.toJSON $ d <> "T12:00:00Z"
metricDatum Metric{ metricType = MeasureTypeVoid } _ = JSON.toJSON True
metricDatum _ d = JSON.toJSON d
measureKeyValue :: JSON.KeyValue kv => (Metric, MeasureDatum) -> kv
measureKeyValue (m, d) = metricField m JSON..= metricDatum m d
instance JSON.ToJSON SolrRecordMeasures where
toJSON (SolrRecordMeasures ms) =
JSON.object $ map measureKeyValue ms
toEncoding (SolrRecordMeasures ms) =
JSON.pairs $ foldr ((<>) . measureKeyValue) mempty ms
newtype SolrSegment = SolrSegment Segment deriving (JSON.FromJSON)
instance Show SolrSegment where
showsPrec _ (SolrSegment s) = showSegmentWith (shows . offsetMillis) s
instance JSON.ToJSON SolrSegment where
toJSON s = JSON.String $ T.pack $ show s
toEncoding s = JSON.toEncoding $ show s
data SolrDocument
= SolrParty
{ solrId :: !BS.ByteString
, solrPartyId :: Id Party
, solrPartySortName :: T.Text
, solrPartyPreName :: Maybe T.Text
, solrPartyAffiliation :: Maybe T.Text
, solrPartyIsInstitution :: Bool
, solrPartyAuthorization :: Maybe Permission
}
| SolrVolume
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrName :: Maybe T.Text
, solrVolumeOwnerIds :: [Id Party]
, solrVolumeOwnerNames :: [T.Text]
, solrCitation :: Maybe T.Text
, solrCitationYear :: Maybe Int16
}
| SolrContainer
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrName :: Maybe T.Text
, solrContainerTop :: Bool
, solrContainerDate :: Maybe MaskedDate
, solrRelease :: Maybe Release
}
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrAssetId :: Id Asset
, solrName :: Maybe T.Text
, solrFormatId :: Id Format
, solrRelease :: Maybe Release
}
| SolrExcerpt
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrAssetId :: Id Asset
, solrRelease :: Maybe Release
}
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrRecordId :: Id Record
, solrRecordCategoryId :: Id Category
, solrRecordMeasures :: SolrRecordMeasures
, solrRecordAge :: Maybe Age
}
| SolrTagId
{ solrId :: !BS.ByteString
, solrTagId :: Id Tag
, solrTagName :: TagName
}
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrTagId :: Id Tag
, solrTagName :: TagName
, solrKeyword :: Maybe TagName
, solrPartyId :: Id Party
}
| SolrComment
{ solrId :: !BS.ByteString
, solrVolumeId :: Id Volume
, solrContainerId :: Id Container
, solrSegment :: SolrSegment
, solrSegmentDuration :: Maybe Offset
, solrCommentId :: Id Comment
, solrPartyId :: Id Party
, solrBody :: Maybe T.Text
}
solrToJSON :: SolrDocument -> JSON.Value
solrToJSON =
$(JTH.mkToJSON JTH.defaultOptions
{ JTH.fieldLabelModifier = \('s':'o':'l':'r':s) -> fromCamel s
, JTH.constructorTagModifier = \('S':'o':'l':'r':s) -> fromCamel s
, JTH.omitNothingFields = True
, JTH.sumEncoding = JTH.TaggedObject
{ JTH.tagFieldName = "content_type"
, JTH.contentsFieldName = error "solrToJSON: contentsFieldName"
}
} ''SolrDocument)
fixToJSON :: SolrDocument -> JSON.Value -> JSON.Value
fixToJSON SolrRecord{} (JSON.Object o) = JSON.Object $
maybe o (HM.union $ HM.delete k o) $ do
JSON.Object m <- HM.lookup k o
return m
where k = "record_measures"
fixToJSON _ j = j
instance JSON.ToJSON SolrDocument where
toJSON s = fixToJSON s $ solrToJSON s
TODO : toEncoding ( and cleanup fixToJSON )
|
07f949b34fd61809f624f90873ceefe012acfd144bf842765dd6d082f314977b | xmonad/xmonad-contrib | Spiral.hs | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
-----------------------------------------------------------------------------
-- |
-- Module : XMonad.Layout.Spiral
-- Description : A spiral tiling layout.
Copyright : ( c ) < >
-- License : BSD3-style (see LICENSE)
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
-- A spiral tiling layout.
--
-----------------------------------------------------------------------------
module XMonad.Layout.Spiral (
-- * Usage
-- $usage
spiral
, spiralWithDir
, Rotation (..)
, Direction (..)
, SpiralWithDir
) where
import Data.Ratio
import XMonad hiding ( Rotation )
import XMonad.StackSet ( integrate )
-- $usage
-- You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
--
> import XMonad . Layout . Spiral
--
Then edit your @layoutHook@ by adding the Spiral layout :
--
> myLayout = spiral ( 6/7 ) ||| etc ..
-- > main = xmonad def { layoutHook = myLayout }
--
-- For more detailed instructions on editing the layoutHook see
-- <#customizing-xmonad the tutorial> and
-- "XMonad.Doc.Extending#Editing_the_layout_hook".
fibs :: [Integer]
fibs = 1 : 1 : zipWith (+) fibs (tail fibs)
mkRatios :: [Integer] -> [Rational]
mkRatios (x1:x2:xs) = (x1 % x2) : mkRatios (x2:xs)
mkRatios _ = []
data Rotation = CW | CCW deriving (Read, Show)
data Direction = East | South | West | North deriving (Eq, Enum, Read, Show)
blend :: Rational -> [Rational] -> [Rational]
blend scale ratios = zipWith (+) ratios scaleFactors
where
len = length ratios
step = (scale - (1 % 1)) / fromIntegral len
scaleFactors = map (* step) . reverse . take len $ [0..]
-- | A spiral layout. The parameter controls the size ratio between
successive windows in the spiral . Sensible values range from 0
up to the aspect ratio of your monitor ( often 4\/3 ) .
--
-- By default, the spiral is counterclockwise, starting to the east.
-- See also 'spiralWithDir'.
spiral :: Rational -> SpiralWithDir a
spiral = spiralWithDir East CW
-- | Create a spiral layout, specifying the starting cardinal direction,
-- the spiral direction (clockwise or counterclockwise), and the
-- size ratio.
spiralWithDir :: Direction -> Rotation -> Rational -> SpiralWithDir a
spiralWithDir = SpiralWithDir
data SpiralWithDir a = SpiralWithDir Direction Rotation Rational
deriving ( Read, Show )
instance LayoutClass SpiralWithDir a where
pureLayout (SpiralWithDir dir rot scale) sc stack = zip ws rects
where ws = integrate stack
ratios = blend scale . reverse . take (length ws - 1) . mkRatios $ tail fibs
rects = divideRects (zip ratios dirs) sc
dirs = dropWhile (/= dir) $ case rot of
CW -> cycle [East .. North]
CCW -> cycle [North, West, South, East]
handleMessage (SpiralWithDir dir rot scale) = return . fmap resize . fromMessage
where resize Expand = spiralWithDir dir rot $ (21 % 20) * scale
resize Shrink = spiralWithDir dir rot $ (20 % 21) * scale
description _ = "Spiral"
This will produce one more rectangle than there are splits details
divideRects :: [(Rational, Direction)] -> Rectangle -> [Rectangle]
divideRects [] r = [r]
divideRects ((r,d):xs) rect = case divideRect r d rect of
(r1, r2) -> r1 : divideRects xs r2
-- It's much simpler if we work with all Integers and convert to
-- Rectangle at the end.
data Rect = Rect Integer Integer Integer Integer
fromRect :: Rect -> Rectangle
fromRect (Rect x y w h) = Rectangle (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h)
toRect :: Rectangle -> Rect
toRect (Rectangle x y w h) = Rect (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h)
divideRect :: Rational -> Direction -> Rectangle -> (Rectangle, Rectangle)
divideRect r d rect = let (r1, r2) = divideRect' r d $ toRect rect in
(fromRect r1, fromRect r2)
divideRect' :: Rational -> Direction -> Rect -> (Rect, Rect)
divideRect' ratio dir (Rect x y w h) =
case dir of
East -> let (w1, w2) = chop ratio w in (Rect x y w1 h, Rect (x + w1) y w2 h)
South -> let (h1, h2) = chop ratio h in (Rect x y w h1, Rect x (y + h1) w h2)
West -> let (w1, w2) = chop (1 - ratio) w in (Rect (x + w1) y w2 h, Rect x y w1 h)
North -> let (h1, h2) = chop (1 - ratio) h in (Rect x (y + h1) w h2, Rect x y w h1)
chop :: Rational -> Integer -> (Integer, Integer)
chop rat n = let f = (fromIntegral n * numerator rat) `div` denominator rat in
(f, n - f)
| null | https://raw.githubusercontent.com/xmonad/xmonad-contrib/571d017b8259340971db1736eedc992a54e9022c/XMonad/Layout/Spiral.hs | haskell | ---------------------------------------------------------------------------
|
Module : XMonad.Layout.Spiral
Description : A spiral tiling layout.
License : BSD3-style (see LICENSE)
Stability : stable
Portability : portable
A spiral tiling layout.
---------------------------------------------------------------------------
* Usage
$usage
$usage
You can use this module with the following in your @~\/.xmonad\/xmonad.hs@:
> main = xmonad def { layoutHook = myLayout }
For more detailed instructions on editing the layoutHook see
<#customizing-xmonad the tutorial> and
"XMonad.Doc.Extending#Editing_the_layout_hook".
| A spiral layout. The parameter controls the size ratio between
By default, the spiral is counterclockwise, starting to the east.
See also 'spiralWithDir'.
| Create a spiral layout, specifying the starting cardinal direction,
the spiral direction (clockwise or counterclockwise), and the
size ratio.
It's much simpler if we work with all Integers and convert to
Rectangle at the end. | # LANGUAGE FlexibleInstances , MultiParamTypeClasses #
Copyright : ( c ) < >
Maintainer : < >
module XMonad.Layout.Spiral (
spiral
, spiralWithDir
, Rotation (..)
, Direction (..)
, SpiralWithDir
) where
import Data.Ratio
import XMonad hiding ( Rotation )
import XMonad.StackSet ( integrate )
> import XMonad . Layout . Spiral
Then edit your @layoutHook@ by adding the Spiral layout :
> myLayout = spiral ( 6/7 ) ||| etc ..
fibs :: [Integer]
fibs = 1 : 1 : zipWith (+) fibs (tail fibs)
mkRatios :: [Integer] -> [Rational]
mkRatios (x1:x2:xs) = (x1 % x2) : mkRatios (x2:xs)
mkRatios _ = []
data Rotation = CW | CCW deriving (Read, Show)
data Direction = East | South | West | North deriving (Eq, Enum, Read, Show)
blend :: Rational -> [Rational] -> [Rational]
blend scale ratios = zipWith (+) ratios scaleFactors
where
len = length ratios
step = (scale - (1 % 1)) / fromIntegral len
scaleFactors = map (* step) . reverse . take len $ [0..]
successive windows in the spiral . Sensible values range from 0
up to the aspect ratio of your monitor ( often 4\/3 ) .
spiral :: Rational -> SpiralWithDir a
spiral = spiralWithDir East CW
spiralWithDir :: Direction -> Rotation -> Rational -> SpiralWithDir a
spiralWithDir = SpiralWithDir
data SpiralWithDir a = SpiralWithDir Direction Rotation Rational
deriving ( Read, Show )
instance LayoutClass SpiralWithDir a where
pureLayout (SpiralWithDir dir rot scale) sc stack = zip ws rects
where ws = integrate stack
ratios = blend scale . reverse . take (length ws - 1) . mkRatios $ tail fibs
rects = divideRects (zip ratios dirs) sc
dirs = dropWhile (/= dir) $ case rot of
CW -> cycle [East .. North]
CCW -> cycle [North, West, South, East]
handleMessage (SpiralWithDir dir rot scale) = return . fmap resize . fromMessage
where resize Expand = spiralWithDir dir rot $ (21 % 20) * scale
resize Shrink = spiralWithDir dir rot $ (20 % 21) * scale
description _ = "Spiral"
This will produce one more rectangle than there are splits details
divideRects :: [(Rational, Direction)] -> Rectangle -> [Rectangle]
divideRects [] r = [r]
divideRects ((r,d):xs) rect = case divideRect r d rect of
(r1, r2) -> r1 : divideRects xs r2
data Rect = Rect Integer Integer Integer Integer
fromRect :: Rect -> Rectangle
fromRect (Rect x y w h) = Rectangle (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h)
toRect :: Rectangle -> Rect
toRect (Rectangle x y w h) = Rect (fromIntegral x) (fromIntegral y) (fromIntegral w) (fromIntegral h)
divideRect :: Rational -> Direction -> Rectangle -> (Rectangle, Rectangle)
divideRect r d rect = let (r1, r2) = divideRect' r d $ toRect rect in
(fromRect r1, fromRect r2)
divideRect' :: Rational -> Direction -> Rect -> (Rect, Rect)
divideRect' ratio dir (Rect x y w h) =
case dir of
East -> let (w1, w2) = chop ratio w in (Rect x y w1 h, Rect (x + w1) y w2 h)
South -> let (h1, h2) = chop ratio h in (Rect x y w h1, Rect x (y + h1) w h2)
West -> let (w1, w2) = chop (1 - ratio) w in (Rect (x + w1) y w2 h, Rect x y w1 h)
North -> let (h1, h2) = chop (1 - ratio) h in (Rect x (y + h1) w h2, Rect x y w h1)
chop :: Rational -> Integer -> (Integer, Integer)
chop rat n = let f = (fromIntegral n * numerator rat) `div` denominator rat in
(f, n - f)
|
2b266d65209d378f27ba8e04bd31de0be436357b0359bb3eff5f578b67bbc97a | adnelson/nixfromnpm | Common.hs | # LANGUAGE LambdaCase #
# LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE ViewPatterns #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TypeFamilies #
module NixFromNpm.Common (
module ClassyPrelude,
module Control.Applicative,
module Control.Exception.Lifted,
module Control.Monad,
module Control.Monad.Catch,
module Control.Monad.Except,
module Control.Monad.Identity,
module Control.Monad.Reader,
module Control.Monad.State.Strict,
module Control.Monad.Trans,
module Control.Monad.RWS.Strict,
module Data.Char,
module Data.Default,
module Data.Either,
module Data.HashMap.Strict,
module Data.List,
module Data.Map.Strict,
module Data.Maybe,
module Data.String.Utils,
module Filesystem.Path.CurrentOS,
module GHC.Exts,
module Network.URI,
module Filesystem.Path.Wrappers,
module Text.Render,
module Text.Printf,
module Control.Monad.Trans.Control,
module System.Console.ANSI,
Name, AuthToken, Record, (//), (<>),
uriToText, uriToString, putStrsLn, putStrs, maybeIf, failC,
errorC, joinBy, mapJoinBy, getEnv, modifyMap, unsafeParseURI,
parseURIText, withColor, withUL, warn, warns, assert, fatal, fatalC,
partitionEither, throw, eitherToMaybe
#if !MIN_VERSION_mono_traversable(1,0,7)
, dropSuffix
#endif
) where
import ClassyPrelude hiding (assert, asList, find, FilePath, bracket,
maximum, maximumBy, (</>), (<>),
minimum, try, stripPrefix, ioError,
mapM_, sequence_, foldM, forM_, throw, throwIO,
filterM, replicateM, writeFile, readFile,
writeFileUtf8, readFileUtf8, catch, catches,
Handler)
import Control.Exception (throw)
import Control.Monad.Catch (catch, catches, Handler(..))
import qualified Prelude as P
import Control.Monad.RWS.Strict hiding (Any, (<>))
import Control.Monad (when)
import Control.Monad.Trans (MonadIO(..), lift)
import Control.Monad.Reader (ReaderT(..), MonadReader(..), (<=<), (>=>), ask,
asks)
import Control.Monad.State.Strict (MonadState, StateT, State, get, gets,
modify, put, liftM, liftIO, runState,
runStateT, execState, execStateT,
evalState, evalStateT)
import Control.Monad.Except (ExceptT, MonadError(..), throwError, runExceptT)
import Control.Exception.Lifted () -- hiding (assert, )
import Control.Monad.Identity (Identity(..))
import Control.Monad.Trans.Control
import Control.Applicative hiding (empty, optional)
import Data.Char (isDigit, isAlpha)
import Data.Default
import Data.List (maximum, maximumBy)
import Data.HashMap.Strict (HashMap, (!))
import qualified Data.HashMap.Strict as H
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (fromJust, isJust, isNothing)
import Data.Monoid ((<>))
import Data.Either (isRight, isLeft)
import Data.String.Utils hiding (join)
import qualified Data.Text as T
import Filesystem.Path.CurrentOS hiding (concat, null, (<.>), empty)
import GHC.Exts (IsList)
import Text.Render hiding (renderParens)
import Text.Printf (printf)
import Network.URI (URI(..), URIAuth(..), parseURI, parseAbsoluteURI,
parseRelativeReference, relativeTo)
import qualified Network.URI as NU
import Shelly hiding (get, relativeTo)
import System.Console.ANSI
import Filesystem.Path.Wrappers
-- | Indicates that the text is some identifier.
type Name = Text
-- | Used to indicate something is meant for authentication.
type AuthToken = ByteString
-- | A record is a lookup table with string keys.
type Record = HashMap Name
newtype FatalError = Fatal Text deriving (Show, Eq, Typeable)
instance Exception FatalError
-- | Creates a new hashmap by applying a function to every key in it.
alterKeys :: (Eq k, Hashable k, Eq k', Hashable k') =>
(k -> k') -> HashMap k v -> HashMap k' v
alterKeys f mp = do
let pairs = H.toList mp
newPairs = P.map (\(k, v) -> (f k, v)) pairs
newMap = H.fromList newPairs
newMap
-- | Create a hashmap by applying a test to everything in the existing
-- map. If the test returns Just, put it in the result, and otherwise leave
-- it out.
modifyHashMap :: (Eq k, Hashable k)
=> (a -> Maybe b) -> HashMap k a -> HashMap k b
modifyHashMap test inputMap = foldl' step mempty $ H.toList inputMap where
step result (k, elem) = case test elem of
Nothing -> result
Just newElem -> H.insert k newElem result
-- | Same as modifyHashMap, but for Data.Maps.
modifyMap :: Ord k => (a -> Maybe b) -> Map k a -> Map k b
modifyMap test inputMap = foldl' step mempty $ M.toList inputMap where
step result (k, elem) = case test elem of
Nothing -> result
Just newElem -> M.insert k newElem result
-- | Convert a URI into Text.
uriToText :: URI -> Text
uriToText = pack . uriToString
-- | Convert a URI into String.
uriToString :: URI -> String
uriToString uri = NU.uriToString id uri ""
-- | Concatenate text and print it to stdout with a newline.
putStrsLn :: MonadIO m => [Text] -> m ()
putStrsLn = putStrLn . concat
-- | Concatenate text and print it to stdout.
putStrs :: MonadIO m => [Text] -> m ()
putStrs = putStr . concat
#if !MIN_VERSION_mono_traversable(1,0,7)
-- | Strip the given suffix from the given string.
dropSuffix :: Text -> Text -> Text
dropSuffix suffix input = case T.stripSuffix suffix input of
Nothing -> input
Just stripped -> stripped
#endif
-- | Return a Just value if the argument is True, else Nothing.
maybeIf :: Bool -> a -> Maybe a
maybeIf True x = Just x
maybeIf False _ = Nothing
-- | Synonym for intercalate.
joinBy :: Text -> [Text] -> Text
joinBy = T.intercalate
-- | Map a function and intercalate the results.
mapJoinBy :: Text -> (a -> Text) -> [a] -> Text
mapJoinBy sep func = joinBy sep . map func
-- | Reads an environment variable.
getEnv :: MonadIO m => Text -> m (Maybe Text)
getEnv = shelly . silently . get_env
-- | Call the monadic fail function, concatenating a list of Text.
failC :: Monad m => [Text] -> m a
failC = fail . unpack . concat
-- | Throw an error after concatenation a list of Text.
errorC :: [Text] -> a
errorC = error . unpack . concat
| Appends text to URI with a slash . Ex : foo.com // bar = =
-- foo.com/bar.
(//) :: URI -> Text -> URI
uri // txt = do
let fixedUri = unsafeParseURI $ case T.last (uriToText uri) of
'/' -> uriToText uri
_ -> uriToText uri <> "/"
case parseRelativeReference (unpack txt) of
Nothing -> errorC ["Invalid appending URI: ", tshow txt]
Just uri' -> uri' `relativeTo` fixedUri
unsafeParseURI :: Text -> URI
unsafeParseURI txt = case parseURIText txt of
Nothing -> errorC ["Invalid URI text: ", tshow txt]
Just uri -> uri
parseURIText :: Text -> Maybe URI
parseURIText = parseURI . unpack
withColor :: MonadIO io => Color -> io a -> io a
withColor color action = do
liftIO $ setSGR [SetColor Foreground Vivid color]
result <- action
liftIO $ setSGR [Reset]
return result
withUL :: MonadIO io => io a -> io a
withUL action = do
liftIO $ setSGR [SetUnderlining SingleUnderline]
result <- action
liftIO $ setSGR [SetUnderlining NoUnderline]
return result
-- | Print a warning string in red.
warn :: MonadIO io => Text -> io ()
warn msg = withColor Red $ putStrsLn ["WARNING: ", msg]
-- | Print a warning string by concatenating strings.
warns :: MonadIO io => [Text] -> io ()
warns = warn . concat
-- | Throws the given exception if the test fails.
assert :: (Monad m, Exception e) => m Bool -> e -> m ()
assert test err = test >>= \case
True -> return ()
False -> throw err
-- | Throw a fatal error.
fatal :: Text -> a
fatal = throw . Fatal
-- | Like `fatal` but takes a list which it concatenates.
fatalC :: [Text] -> a
fatalC = fatal . concat
-- | Split up a list based on a predicate.
partitionEither :: (a -> Either b c) -> [a] -> ([b], [c])
partitionEither f = partitionEithers . map f
-- | Convert an `Either` to a `Maybe`.
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe (Left _) = Nothing
eitherToMaybe (Right x) = Just x
| null | https://raw.githubusercontent.com/adnelson/nixfromnpm/4ab773cdead920d2312e864857fabaf5f739a80e/src/NixFromNpm/Common.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE TypeSynonymInstances #
hiding (assert, )
| Indicates that the text is some identifier.
| Used to indicate something is meant for authentication.
| A record is a lookup table with string keys.
| Creates a new hashmap by applying a function to every key in it.
| Create a hashmap by applying a test to everything in the existing
map. If the test returns Just, put it in the result, and otherwise leave
it out.
| Same as modifyHashMap, but for Data.Maps.
| Convert a URI into Text.
| Convert a URI into String.
| Concatenate text and print it to stdout with a newline.
| Concatenate text and print it to stdout.
| Strip the given suffix from the given string.
| Return a Just value if the argument is True, else Nothing.
| Synonym for intercalate.
| Map a function and intercalate the results.
| Reads an environment variable.
| Call the monadic fail function, concatenating a list of Text.
| Throw an error after concatenation a list of Text.
foo.com/bar.
| Print a warning string in red.
| Print a warning string by concatenating strings.
| Throws the given exception if the test fails.
| Throw a fatal error.
| Like `fatal` but takes a list which it concatenates.
| Split up a list based on a predicate.
| Convert an `Either` to a `Maybe`. | # LANGUAGE LambdaCase #
# LANGUAGE CPP #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE ViewPatterns #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE TypeFamilies #
module NixFromNpm.Common (
module ClassyPrelude,
module Control.Applicative,
module Control.Exception.Lifted,
module Control.Monad,
module Control.Monad.Catch,
module Control.Monad.Except,
module Control.Monad.Identity,
module Control.Monad.Reader,
module Control.Monad.State.Strict,
module Control.Monad.Trans,
module Control.Monad.RWS.Strict,
module Data.Char,
module Data.Default,
module Data.Either,
module Data.HashMap.Strict,
module Data.List,
module Data.Map.Strict,
module Data.Maybe,
module Data.String.Utils,
module Filesystem.Path.CurrentOS,
module GHC.Exts,
module Network.URI,
module Filesystem.Path.Wrappers,
module Text.Render,
module Text.Printf,
module Control.Monad.Trans.Control,
module System.Console.ANSI,
Name, AuthToken, Record, (//), (<>),
uriToText, uriToString, putStrsLn, putStrs, maybeIf, failC,
errorC, joinBy, mapJoinBy, getEnv, modifyMap, unsafeParseURI,
parseURIText, withColor, withUL, warn, warns, assert, fatal, fatalC,
partitionEither, throw, eitherToMaybe
#if !MIN_VERSION_mono_traversable(1,0,7)
, dropSuffix
#endif
) where
import ClassyPrelude hiding (assert, asList, find, FilePath, bracket,
maximum, maximumBy, (</>), (<>),
minimum, try, stripPrefix, ioError,
mapM_, sequence_, foldM, forM_, throw, throwIO,
filterM, replicateM, writeFile, readFile,
writeFileUtf8, readFileUtf8, catch, catches,
Handler)
import Control.Exception (throw)
import Control.Monad.Catch (catch, catches, Handler(..))
import qualified Prelude as P
import Control.Monad.RWS.Strict hiding (Any, (<>))
import Control.Monad (when)
import Control.Monad.Trans (MonadIO(..), lift)
import Control.Monad.Reader (ReaderT(..), MonadReader(..), (<=<), (>=>), ask,
asks)
import Control.Monad.State.Strict (MonadState, StateT, State, get, gets,
modify, put, liftM, liftIO, runState,
runStateT, execState, execStateT,
evalState, evalStateT)
import Control.Monad.Except (ExceptT, MonadError(..), throwError, runExceptT)
import Control.Monad.Identity (Identity(..))
import Control.Monad.Trans.Control
import Control.Applicative hiding (empty, optional)
import Data.Char (isDigit, isAlpha)
import Data.Default
import Data.List (maximum, maximumBy)
import Data.HashMap.Strict (HashMap, (!))
import qualified Data.HashMap.Strict as H
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Data.Maybe (fromJust, isJust, isNothing)
import Data.Monoid ((<>))
import Data.Either (isRight, isLeft)
import Data.String.Utils hiding (join)
import qualified Data.Text as T
import Filesystem.Path.CurrentOS hiding (concat, null, (<.>), empty)
import GHC.Exts (IsList)
import Text.Render hiding (renderParens)
import Text.Printf (printf)
import Network.URI (URI(..), URIAuth(..), parseURI, parseAbsoluteURI,
parseRelativeReference, relativeTo)
import qualified Network.URI as NU
import Shelly hiding (get, relativeTo)
import System.Console.ANSI
import Filesystem.Path.Wrappers
type Name = Text
type AuthToken = ByteString
type Record = HashMap Name
newtype FatalError = Fatal Text deriving (Show, Eq, Typeable)
instance Exception FatalError
alterKeys :: (Eq k, Hashable k, Eq k', Hashable k') =>
(k -> k') -> HashMap k v -> HashMap k' v
alterKeys f mp = do
let pairs = H.toList mp
newPairs = P.map (\(k, v) -> (f k, v)) pairs
newMap = H.fromList newPairs
newMap
modifyHashMap :: (Eq k, Hashable k)
=> (a -> Maybe b) -> HashMap k a -> HashMap k b
modifyHashMap test inputMap = foldl' step mempty $ H.toList inputMap where
step result (k, elem) = case test elem of
Nothing -> result
Just newElem -> H.insert k newElem result
modifyMap :: Ord k => (a -> Maybe b) -> Map k a -> Map k b
modifyMap test inputMap = foldl' step mempty $ M.toList inputMap where
step result (k, elem) = case test elem of
Nothing -> result
Just newElem -> M.insert k newElem result
uriToText :: URI -> Text
uriToText = pack . uriToString
uriToString :: URI -> String
uriToString uri = NU.uriToString id uri ""
putStrsLn :: MonadIO m => [Text] -> m ()
putStrsLn = putStrLn . concat
putStrs :: MonadIO m => [Text] -> m ()
putStrs = putStr . concat
#if !MIN_VERSION_mono_traversable(1,0,7)
dropSuffix :: Text -> Text -> Text
dropSuffix suffix input = case T.stripSuffix suffix input of
Nothing -> input
Just stripped -> stripped
#endif
maybeIf :: Bool -> a -> Maybe a
maybeIf True x = Just x
maybeIf False _ = Nothing
joinBy :: Text -> [Text] -> Text
joinBy = T.intercalate
mapJoinBy :: Text -> (a -> Text) -> [a] -> Text
mapJoinBy sep func = joinBy sep . map func
getEnv :: MonadIO m => Text -> m (Maybe Text)
getEnv = shelly . silently . get_env
failC :: Monad m => [Text] -> m a
failC = fail . unpack . concat
errorC :: [Text] -> a
errorC = error . unpack . concat
| Appends text to URI with a slash . Ex : foo.com // bar = =
(//) :: URI -> Text -> URI
uri // txt = do
let fixedUri = unsafeParseURI $ case T.last (uriToText uri) of
'/' -> uriToText uri
_ -> uriToText uri <> "/"
case parseRelativeReference (unpack txt) of
Nothing -> errorC ["Invalid appending URI: ", tshow txt]
Just uri' -> uri' `relativeTo` fixedUri
unsafeParseURI :: Text -> URI
unsafeParseURI txt = case parseURIText txt of
Nothing -> errorC ["Invalid URI text: ", tshow txt]
Just uri -> uri
parseURIText :: Text -> Maybe URI
parseURIText = parseURI . unpack
withColor :: MonadIO io => Color -> io a -> io a
withColor color action = do
liftIO $ setSGR [SetColor Foreground Vivid color]
result <- action
liftIO $ setSGR [Reset]
return result
withUL :: MonadIO io => io a -> io a
withUL action = do
liftIO $ setSGR [SetUnderlining SingleUnderline]
result <- action
liftIO $ setSGR [SetUnderlining NoUnderline]
return result
warn :: MonadIO io => Text -> io ()
warn msg = withColor Red $ putStrsLn ["WARNING: ", msg]
warns :: MonadIO io => [Text] -> io ()
warns = warn . concat
assert :: (Monad m, Exception e) => m Bool -> e -> m ()
assert test err = test >>= \case
True -> return ()
False -> throw err
fatal :: Text -> a
fatal = throw . Fatal
fatalC :: [Text] -> a
fatalC = fatal . concat
partitionEither :: (a -> Either b c) -> [a] -> ([b], [c])
partitionEither f = partitionEithers . map f
eitherToMaybe :: Either a b -> Maybe b
eitherToMaybe (Left _) = Nothing
eitherToMaybe (Right x) = Just x
|
1bf0864e0bab720ac4167e336f39c8db9cd20c38a0e33be9b1b001a20f346a20 | liqula/react-hs | TestClient.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE EmptyDataDecls #-}
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE LambdaCase #
# LANGUAGE MagicHash #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeFamilyDependencies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - orphans #
module Main (main) where
import Control.Monad
import Data.Monoid ((<>))
import Data.Typeable (Typeable, Proxy(..))
import Debug.Trace
import GHC.Generics (Generic)
import Data.Time (UTCTime(..), fromGregorian)
import React.Flux
import React.Flux.Addons.Intl
import qualified Data.Text as T
import GHCJS.Types (JSVal, JSString)
import JavaScript.Array (JSArray)
import qualified Data.JSString.Text as JSS
data OutputStoreData = OutputStoreData
deriving (Eq, Show, Typeable)
instance StoreData OutputStoreData where
type StoreAction OutputStoreData = [T.Text]
-- log both to the console and to js_output
transform ss OutputStoreData = do
mapM_ (js_output . JSS.textToJSString) ss
trace (unlines $ map T.unpack ss) $ return OutputStoreData
initOutputStore :: IO ()
initOutputStore = registerInitialStore OutputStoreData
output :: [T.Text] -> [SomeStoreAction]
output s = [action @OutputStoreData s]
--------------------------------------------------------------------------------
--- Events
--------------------------------------------------------------------------------
logM :: (T.Text -> Bool) -> T.Text
logM f = "alt modifier: " <> (T.pack $ show (f "Alt"))
logT :: EventTarget -> T.Text
logT t = eventTargetProp t "id"
tshow :: Show a => a -> T.Text
tshow = T.pack . show
rawShowView :: View Int
rawShowView = mkView "raw show view" elemShow
eventsView :: View ()
eventsView = mkView "events" $ \() ->
div_ $ do
p_ ["key" $= "text"] $
input_ [ "type" $= "text"
, "id" $= "keyinput"
, "placeholder" $= "onKeyDown"
, onKeyDown $ \e k -> simpleHandler $ output
[ "keydown"
, tshow e
, tshow k
, logM (keyGetModifierState k)
, logT (evtTarget e)
, logT (evtCurrentTarget e)
]
, onFocus $ \e _ -> simpleHandler $ output
[ "focus"
, tshow e
--, logT $ focusRelatedTarget f
]
]
p_ ["key" $= "click"] $
label_ [ "id" $= "clickinput"
, onClick $ \e m -> simpleHandler $ output
[ "click"
, tshow e
, tshow m
, logM (mouseGetModifierState m)
, logT ( mouseRelatedTarget m )
]
]
"onClick"
p_ ["key" $= "touch"] $
label_ [ "id" $= "touchinput"
, onTouchStart $ \e t -> simpleHandler $ output
[ "touchstart"
, tshow e
, tshow t
, logM (touchGetModifierState t)
, logT (touchTarget $ head $ touchTargets t)
, "endtouch"
]
]
"onTouchStart"
p_ ["key" $= "prevent"] $
a_ [ "id" $= "some-link"
, "href" $= ""
, onClick $ \_ _ -> preventDefault $ output ["Click some-link"]
]
"Testing preventDefault"
div_ ["key" $= "prop"] $
div_ [ "id" $= "outer-div"
, onClick $ \_ _ -> simpleHandler $ output ["Click on outer div"]
, capturePhase $ onDoubleClick $ \_ _ -> stopPropagation $ output ["Double click outer div"]
] $ do
span_ [ "id" $= "inner-span"
, onClick $ \e _ -> stopPropagation e `seq` simpleHandler (output ["Click inner span"])
, onDoubleClick $ \_ _ -> simpleHandler $ output ["Double click inner span"]
]
"Testing stopPropagation"
p_ [ "id" $= "raw-show-view", "key" $= "raw"] $ view_ rawShowView "raw" 42
--------------------------------------------------------------------------------
--- Stores and should component update
--------------------------------------------------------------------------------
data Character = Character !Int !String
deriving (Typeable, Eq)
instance Show Character where
show (Character i s) = "C" ++ show i ++ " - " ++ s
data CharacterPair = CharacterPair {
c1 :: !Character
, c2 :: !Character
} deriving (Typeable, Eq)
instance Show CharacterPair where
show (CharacterPair x1 x2) = show x1 ++ ", " ++ show x2
data Humans = Humans
{ h1 :: !CharacterPair
, h2 :: !CharacterPair
} deriving (Typeable, Eq, Show)
instance HasField "h1" Humans CharacterPair where
getField = h1
instance HasField "h2" Humans CharacterPair where
getField = h2
data Tiste = Tiste
{ t1 :: !CharacterPair
, t2 :: !CharacterPair
} deriving (Typeable, Eq, Show)
data CharacterIndex = P1_C1 | P1_C2 | P2_C1 | P2_C2
deriving (Show, Eq, Typeable, Generic, Bounded, Enum)
data TestStoreAction = IncrementCharacter CharacterIndex
| NoChangeToCharacters
deriving (Show, Typeable, Generic)
incrChar :: Character -> Character
incrChar (Character i s) = Character (i+1) s
instance StoreData Humans where
type StoreAction Humans = TestStoreAction
transform NoChangeToCharacters cg = return cg
-- normally would use lenses to update part of the store
transform (IncrementCharacter P1_C1) cg = return $ cg { h1 = (h1 cg) { c1 = incrChar (c1 $ h1 cg) }}
transform (IncrementCharacter P1_C2) cg = return $ cg { h1 = (h1 cg) { c2 = incrChar (c2 $ h1 cg) }}
transform (IncrementCharacter P2_C1) cg = return $ cg { h2 = (h2 cg) { c1 = incrChar (c1 $ h2 cg) }}
transform (IncrementCharacter P2_C2) cg = return $ cg { h2 = (h2 cg) { c2 = incrChar (c2 $ h2 cg) }}
instance StoreData Tiste where
type StoreAction Tiste = TestStoreAction
transform NoChangeToCharacters cg = return cg
-- normally would use lenses to update part of the store
transform (IncrementCharacter P1_C1) cg = return $ cg { t1 = (t1 cg) { c1 = incrChar (c1 $ t1 cg) }}
transform (IncrementCharacter P1_C2) cg = return $ cg { t1 = (t1 cg) { c2 = incrChar (c2 $ t1 cg) }}
transform (IncrementCharacter P2_C1) cg = return $ cg { t2 = (t2 cg) { c1 = incrChar (c1 $ t2 cg) }}
transform (IncrementCharacter P2_C2) cg = return $ cg { t2 = (t2 cg) { c2 = incrChar (c2 $ t2 cg) }}
initCharacterStore :: IO ()
initCharacterStore = do
registerInitialStore $
Humans
{ h1 = CharacterPair
{ c1 = Character 10 "Quick Ben"
, c2 = Character 20 "Whiskeyjack"
}
, h2 = CharacterPair
{ c1 = Character 30 "Fiddler"
, c2 = Character 40 "Kruppe"
}
}
registerInitialStore $
Tiste
{ t1 = CharacterPair
{ c1 = Character 100 "Andarist"
, c2 = Character 110 "Osseric"
}
, t2 = CharacterPair
{ c1 = Character 120 "Anomander Rake"
, c2 = Character 130 "Korlot"
}
}
logWhenUpdated_ :: String -> ReactElementM handler ()
logWhenUpdated_ m = foreign_ "hsreact$log_when_updated" ["key" $= "log", "message" &= m] mempty
singleCharacterView :: View Character
singleCharacterView = mkView "single-character" $ \c ->
logWhenUpdated_ $ "Single character " ++ show c
twoCharacterView :: View (Character, Character)
twoCharacterView = mkView "two-character" $ \(ch1, ch2) ->
logWhenUpdated_ $ "Two characters " ++ show ch1 ++ " and " ++ show ch2
pairCharacterView :: View CharacterPair
pairCharacterView = mkView "pair-characters" $ \p ->
logWhenUpdated_ $ "Pair of characters " ++ show p
statefulCharacterView :: View Character
statefulCharacterView = mkStatefulView "stateful-char" (-100 :: Int) $ \s c ->
p_ $ do
logWhenUpdated_ ("Stateful character " ++ show c)
span_ ["className" $= "state", "key" $= "cur state"] $ elemShow s
button_ [ "className" $= "incr-state"
, onClick $ \_ _ -> simpleHandler $ \s' -> ([], Just $ s' + 1)
, "key" $= "btn"
]
"Incr"
fullHumanView :: View (Character, Character)
fullHumanView = mkControllerView @'[StoreArg Humans] "full humans" $ \humans (extra1, extra2) ->
ul_ ["id" $= "full-humans-view"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "All the humans, plus Andarist and Rake"
li_ ["key" $= "11"] $ view_ singleCharacterView "11" (c1 $ h1 humans)
li_ ["key" $= "12"] $ view_ singleCharacterView "12" (c2 $ h1 humans)
li_ ["key" $= "21"] $ view_ singleCharacterView "21" (c1 $ h2 humans)
li_ ["key" $= "22"] $ view_ singleCharacterView "22" (c2 $ h2 humans)
li_ ["key" $= "112"] $ view_ twoCharacterView "112" (c1 $ h1 humans, c2 $ h1 humans)
li_ ["key" $= "212"] $ view_ pairCharacterView "212" (h2 $ humans)
li_ ["key" $= "extra1"] $ view_ singleCharacterView "extra1" extra1
li_ ["key" $= "extra2"] $ view_ singleCharacterView "extra2" extra2
tisteAndHumansView :: View ()
tisteAndHumansView = mkControllerView @'[StoreArg Tiste] "tiste-and-humans" $ \tiste () ->
div_ ["id" $= "tiste-view"] $ do
ul_ ["id" $= "tiste-sub-view", "key" $= "tiste-sub-view"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "All the tiste"
li_ ["key" $= "11"] $ view_ singleCharacterView "11" (c1 $ t1 tiste)
li_ ["key" $= "12"] $ view_ singleCharacterView "12" (c2 $ t1 tiste)
li_ ["key" $= "21"] $ view_ singleCharacterView "21" (c1 $ t2 tiste)
li_ ["key" $= "22"] $ view_ singleCharacterView "22" (c2 $ t2 tiste)
view_ fullHumanView "humans" (c1 $ t1 tiste, c1 $ t2 tiste)
dualCharacterView :: View ()
dualCharacterView = mkControllerView @'[StoreArg Humans, StoreArg Tiste] "dual-characters" $ \humans tiste () ->
ul_ ["id" $= "dual-character-view"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "Quick Ben and Andarist"
li_ ["key" $= "human11"] $ view_ singleCharacterView "11" (c1 $ h1 humans)
li_ ["key" $= "tiste11"] $ view_ singleCharacterView "11" (c1 $ t1 tiste)
li_ ["key" $= "state"] $ view_ statefulCharacterView "state" (c1 $ t2 tiste)
TODO : ' StoreField ' has internal errors ( crashes on browser console ) .
tisteAndSomeHumansView :: View ()
, StoreField Humans " h1 " CharacterPair
ul_ ["id" $= "tiste-and-some-humans"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "Just Rake, Korlot, Quick Ben, and Whiskeyjack"
li_ ["key" $= "t21"] $ view_ singleCharacterView "21" (c1 $ t2 tiste)
li_ ["key" $= "t22"] $ view_ singleCharacterView "22" (c2 $ t2 tiste)
li _ [ " key " $ = " h11 " ] $ view _ singleCharacterView " 11 " ( c1 humanPair )
-- li_ ["key" $= "h12"] $ view_ singleCharacterView "12" (c2 humanPair)
buttons_ :: forall s. (StoreData s, TestStoreAction ~ StoreAction s) => Proxy s -> T.Text -> ReactElementM 'EventHandlerCode ()
buttons_ _ lbl =
ul_ ["id" &= lbl, "key" &= lbl] $ do
li_ ["key" $= "none"] $
button_
[ "id" &= (lbl <> "-none")
, onClick $ \_ _ -> simpleHandler [action @s NoChangeToCharacters]
]
(elemText $ lbl <> " No Change")
forM_ [minBound..maxBound] $ \idx ->
li_ ["key" &= (lbl <> "-change-" <> tshow idx)] $
button_
[ "id" &= (lbl <> "-" <> tshow idx)
, onClick $ \_ _ -> simpleHandler [action @s $ IncrementCharacter idx]
] (elemText $ lbl <> tshow idx)
storeSpec :: View ()
storeSpec = mkView "store spec" $ \() ->
div_ ["id" $= "store-spec"] $ do
buttons_ (Proxy :: Proxy Humans) "Humans"
buttons_ (Proxy :: Proxy Tiste) "Tiste"
view_ tisteAndHumansView "tiste-and-human" ()
view_ dualCharacterView "dual" ()
view_ tisteAndSomeHumansView "tiste-and-some" ()
--------------------------------------------------------------------------------
--- Callback returning view
--------------------------------------------------------------------------------
callbackViewTest :: View (Int, String)
callbackViewTest = mkView "callback view props test" $ \(i, s) ->
p_ [ "id" $= "callback-view-props-test"] $
elemString $ "Props are " ++ show i ++ " and " ++ s
callbackViewWrapper :: View ()
callbackViewWrapper = mkView "callback view wrapper" $ \() ->
div_ ["id" $= "callback-view-wrapper"] $
foreign_ "hsreact$callback_wrapper" [ callbackRenderingView "foo" callbackViewTest ] mempty
--------------------------------------------------------------------------------
--- Intl
--------------------------------------------------------------------------------
intlSpec :: View ()
intlSpec = mkView "intl" $ \() ->
intlProvider_ "en" (Just js_translations) Nothing $
view_ intlSpecBody "intl-body" ()
intlSpecBody :: View ()
intlSpecBody = mkView "intl body" $ \() -> div_ ["id" $= "intl-spec"] $
ul_ $ do
li_ ["id" $= "f-number", "key" $= "f-number"] $
formattedNumber_ [ "value" @= (0.9 :: Double), "style" $= "percent" ]
li_ ["id" $= "f-int", "key" $= "f-int"] $ int_ 100000
li_ ["id" $= "f-double", "key" $= "f-double"] $ double_ 40000.2
li_ ["id" $= "f-number-prop", "key" $= "f-number-prop"] $
input_ [formattedNumberProp "placeholder" (123456 :: Int) []]
let moon = fromGregorian 1969 7 20
fullDayF = DayFormat { weekdayF = Just "long", eraF = Just "short", yearF = Just "2-digit", monthF = Just "long", dayF = Just "2-digit" }
li_ ["id" $= "f-shortday", "key" $= "f-shortday"] $ day_ shortDate moon
li_ ["id" $= "f-fullday", "key" $= "f-fullday"] $ day_ fullDayF moon
li_ ["id" $= "f-date", "key" $= "f-date"] $ formattedDate_ (Left moon)
[ "weekday" $= "short", "month" $= "short", "day" $= "numeric", "year" $= "2-digit" ]
li_ ["id" $= "f-date-prop", "key" $= "f-date-prop"] $
input_ [formattedDateProp "placeholder" (Left moon) []]
1969 - 7 - 20 02:56 UTC
fullT = ( fullDayF
, TimeFormat { hourF = Just "numeric", minuteF = Just "2-digit", secondF = Just "numeric", timeZoneNameF = Just "long" }
)
li_ ["id" $= "f-shorttime", "key" $= "f-shorttime"] $ utcTime_ shortDateTime step
li_ ["id" $= "f-fulltime", "key" $= "f-fulltime"] $ utcTime_ fullT step
li_ ["id" $= "f-time", "key" $= "f-time"] $ formattedDate_ (Right step)
[ "year" $= "2-digit", "month" $= "short", "day" $= "numeric"
, "hour" $= "numeric", "minute" $= "2-digit", "second" $= "numeric"
, "timeZoneName" $= "short"
, "timeZone" $= "Pacific/Tahiti"
]
li_ ["id" $= "f-time-prop", "key" $= "f-time-prop"] $
input_ [formattedDateProp "placeholder" (Right step)
[ "year" `iprop` ("2-digit" :: String)
, "month" `iprop` ("short" :: String)
, "day" `iprop` ("2-digit" :: String)
, "hour" `iprop` ("numeric" :: String)
, "timeZone" `iprop` ("Pacific/Tahiti" :: String)
]
]
li_ ["id" $= "f-relative", "key" $= "f-relative"] $ relativeTo_ step
li_ ["id" $= "f-relative-days", "key" $= "f-relative-days"] $ formattedRelative_ step [ "units" $= "day" ]
li_ ["id" $= "f-plural", "key" $= "f-plural"] $ plural_ [ "value" @= (100 :: Int), "one" $= "plural one", "other" $= "plural other"]
li_ ["id" $= "f-plural-prop", "key" $= "f-plural-prop"] $
input_ [pluralProp "placeholder" (100 :: Int) ["one" `iprop` ("plural one" :: String), "other" `iprop` ("plural other" :: String)]]
li_ ["id" $= "f-msg", "key" $= "f-msg"] $
$(message "photos" "{name} took {numPhotos, plural, =0 {no photos} =1 {one photo} other {# photos}} {takenAgo}.")
[ "name" $= "Neil Armstrong"
, "numPhotos" @= (100 :: Int)
, elementProperty "takenAgo" $ span_ ["id" $= "takenAgoSpan"] "years ago"
]
li_ ["id" $= "f-msg-prop", "key" $= "f-msg-prop"] $
input_ [ $(messageProp "placeholder" "photosprop" "{name} took {numPhotos, plural, =0 {no photos} =1 {one photo} other {# photos}}")
[ "name" `iprop` ("Neil Armstrong" :: String)
, "numPhotos" `iprop` (100 :: Int)
]
]
li_ ["id" $= "f-msg-with-trans", "key" $= "f-msg-with-trans"] $
$(message "with_trans" "this is not used {abc}") ["abc" $= "xxx"]
li_ ["id" $= "f-msg-with-descr", "key" $= "f-msg-with-descr"] $
$(message' "photos2" "How many photos?" "{name} took {numPhotos, plural, =0 {no photos} =1 {one photo} other {# photos}}.")
[ "name" $= "Neil Armstrong"
, "numPhotos" @= (0 :: Int)
]
li_ ["id" $= "f-msg-prop-with-descr", "key" $= "f-msg-prop-with-descr"] $
input_ [$(messageProp' "placeholder" "photosprop2" "How many photos?" "{name} took {numPhotos, number} photos")
[ "name" `iprop` ("Neil Armstrong" :: String)
, "numPhotos" `iprop` (0 :: Int)
]
]
li_ ["id" $= "f-html-msg", "key" $= "f-html-msg"] $
$(htmlMsg "html1" "<b>{num}</b> is the answer to life, the universe, and everything")
[ "num" @= (42 :: Int) ]
li_ ["id" $= "f-html-msg-with-descr", "key" $= "f-html-msg-with-descr"] $
$(htmlMsg' "html2" "Hitchhiker's Guide" "{num} is the <b>answer</b> to life, the universe, and everything")
[ "num" @= (42 :: Int) ]
--------------------------------------------------------------------------------
--- Main
--------------------------------------------------------------------------------
-- | Test a lifecycle view with all lifecycle methods nothing
testClient :: View ()
testClient = mkView "app" $ \() ->
div_ $ do
view_ eventsView "events" ()
view_ storeSpec "store" ()
view_ intlSpec "intl" ()
view_ callbackViewWrapper "callback" ()
div_ ["key" $= "raw"] $
rawJsRendering js_testRawJs $
span_ ["id" $= "test-raw-js-body", "key" $= "raw-body"]
"Raw Javascript Render Body"
main :: IO ()
main = do
initOutputStore
initCharacterStore
reactRenderView "app" testClient
#ifdef __GHCJS__
foreign import javascript unsafe
"hsreact$log_message($1)"
js_output :: JSString -> IO ()
foreign import javascript unsafe
"React['createElement']('p', {'id': 'test-raw-js-para', 'key': 'test-raw-para'}, $2)"
js_testRawJs :: JSVal -> JSArray -> IO JSVal
foreign import javascript unsafe
"{'with_trans': 'message from translation {abc}'}"
js_translations :: JSVal
#else
js_output :: JSString -> IO ()
js_output _ = error "js_output only works with GHCJS"
js_testRawJs :: JSVal -> JSArray -> IO JSVal
js_testRawJs _ _ = error "js_testRawJs only works with GHCJS"
js_translations :: JSVal
js_translations = error "js_translations only works with GHCJS"
#endif
writeIntlMessages (intlFormatJson "msgs/jsonmsgs.json")
writeIntlMessages (intlFormatJsonWithoutDescription "msgs/jsonnodescr.json")
writeIntlMessages (intlFormatAndroidXML "msgs/android.xml")
| null | https://raw.githubusercontent.com/liqula/react-hs/204c96ee3514b5ddec65378d37872266b05e8954/react-hs/test/client/TestClient.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE EmptyDataDecls #
# LANGUAGE OverloadedStrings #
log both to the console and to js_output
------------------------------------------------------------------------------
- Events
------------------------------------------------------------------------------
, logT $ focusRelatedTarget f
------------------------------------------------------------------------------
- Stores and should component update
------------------------------------------------------------------------------
normally would use lenses to update part of the store
normally would use lenses to update part of the store
li_ ["key" $= "h12"] $ view_ singleCharacterView "12" (c2 humanPair)
------------------------------------------------------------------------------
- Callback returning view
------------------------------------------------------------------------------
------------------------------------------------------------------------------
- Intl
------------------------------------------------------------------------------
------------------------------------------------------------------------------
- Main
------------------------------------------------------------------------------
| Test a lifecycle view with all lifecycle methods nothing | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE LambdaCase #
# LANGUAGE MagicHash #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PolyKinds #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TupleSections #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeFamilyDependencies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - orphans #
module Main (main) where
import Control.Monad
import Data.Monoid ((<>))
import Data.Typeable (Typeable, Proxy(..))
import Debug.Trace
import GHC.Generics (Generic)
import Data.Time (UTCTime(..), fromGregorian)
import React.Flux
import React.Flux.Addons.Intl
import qualified Data.Text as T
import GHCJS.Types (JSVal, JSString)
import JavaScript.Array (JSArray)
import qualified Data.JSString.Text as JSS
data OutputStoreData = OutputStoreData
deriving (Eq, Show, Typeable)
instance StoreData OutputStoreData where
type StoreAction OutputStoreData = [T.Text]
transform ss OutputStoreData = do
mapM_ (js_output . JSS.textToJSString) ss
trace (unlines $ map T.unpack ss) $ return OutputStoreData
initOutputStore :: IO ()
initOutputStore = registerInitialStore OutputStoreData
output :: [T.Text] -> [SomeStoreAction]
output s = [action @OutputStoreData s]
logM :: (T.Text -> Bool) -> T.Text
logM f = "alt modifier: " <> (T.pack $ show (f "Alt"))
logT :: EventTarget -> T.Text
logT t = eventTargetProp t "id"
tshow :: Show a => a -> T.Text
tshow = T.pack . show
rawShowView :: View Int
rawShowView = mkView "raw show view" elemShow
eventsView :: View ()
eventsView = mkView "events" $ \() ->
div_ $ do
p_ ["key" $= "text"] $
input_ [ "type" $= "text"
, "id" $= "keyinput"
, "placeholder" $= "onKeyDown"
, onKeyDown $ \e k -> simpleHandler $ output
[ "keydown"
, tshow e
, tshow k
, logM (keyGetModifierState k)
, logT (evtTarget e)
, logT (evtCurrentTarget e)
]
, onFocus $ \e _ -> simpleHandler $ output
[ "focus"
, tshow e
]
]
p_ ["key" $= "click"] $
label_ [ "id" $= "clickinput"
, onClick $ \e m -> simpleHandler $ output
[ "click"
, tshow e
, tshow m
, logM (mouseGetModifierState m)
, logT ( mouseRelatedTarget m )
]
]
"onClick"
p_ ["key" $= "touch"] $
label_ [ "id" $= "touchinput"
, onTouchStart $ \e t -> simpleHandler $ output
[ "touchstart"
, tshow e
, tshow t
, logM (touchGetModifierState t)
, logT (touchTarget $ head $ touchTargets t)
, "endtouch"
]
]
"onTouchStart"
p_ ["key" $= "prevent"] $
a_ [ "id" $= "some-link"
, "href" $= ""
, onClick $ \_ _ -> preventDefault $ output ["Click some-link"]
]
"Testing preventDefault"
div_ ["key" $= "prop"] $
div_ [ "id" $= "outer-div"
, onClick $ \_ _ -> simpleHandler $ output ["Click on outer div"]
, capturePhase $ onDoubleClick $ \_ _ -> stopPropagation $ output ["Double click outer div"]
] $ do
span_ [ "id" $= "inner-span"
, onClick $ \e _ -> stopPropagation e `seq` simpleHandler (output ["Click inner span"])
, onDoubleClick $ \_ _ -> simpleHandler $ output ["Double click inner span"]
]
"Testing stopPropagation"
p_ [ "id" $= "raw-show-view", "key" $= "raw"] $ view_ rawShowView "raw" 42
data Character = Character !Int !String
deriving (Typeable, Eq)
instance Show Character where
show (Character i s) = "C" ++ show i ++ " - " ++ s
data CharacterPair = CharacterPair {
c1 :: !Character
, c2 :: !Character
} deriving (Typeable, Eq)
instance Show CharacterPair where
show (CharacterPair x1 x2) = show x1 ++ ", " ++ show x2
data Humans = Humans
{ h1 :: !CharacterPair
, h2 :: !CharacterPair
} deriving (Typeable, Eq, Show)
instance HasField "h1" Humans CharacterPair where
getField = h1
instance HasField "h2" Humans CharacterPair where
getField = h2
data Tiste = Tiste
{ t1 :: !CharacterPair
, t2 :: !CharacterPair
} deriving (Typeable, Eq, Show)
data CharacterIndex = P1_C1 | P1_C2 | P2_C1 | P2_C2
deriving (Show, Eq, Typeable, Generic, Bounded, Enum)
data TestStoreAction = IncrementCharacter CharacterIndex
| NoChangeToCharacters
deriving (Show, Typeable, Generic)
incrChar :: Character -> Character
incrChar (Character i s) = Character (i+1) s
instance StoreData Humans where
type StoreAction Humans = TestStoreAction
transform NoChangeToCharacters cg = return cg
transform (IncrementCharacter P1_C1) cg = return $ cg { h1 = (h1 cg) { c1 = incrChar (c1 $ h1 cg) }}
transform (IncrementCharacter P1_C2) cg = return $ cg { h1 = (h1 cg) { c2 = incrChar (c2 $ h1 cg) }}
transform (IncrementCharacter P2_C1) cg = return $ cg { h2 = (h2 cg) { c1 = incrChar (c1 $ h2 cg) }}
transform (IncrementCharacter P2_C2) cg = return $ cg { h2 = (h2 cg) { c2 = incrChar (c2 $ h2 cg) }}
instance StoreData Tiste where
type StoreAction Tiste = TestStoreAction
transform NoChangeToCharacters cg = return cg
transform (IncrementCharacter P1_C1) cg = return $ cg { t1 = (t1 cg) { c1 = incrChar (c1 $ t1 cg) }}
transform (IncrementCharacter P1_C2) cg = return $ cg { t1 = (t1 cg) { c2 = incrChar (c2 $ t1 cg) }}
transform (IncrementCharacter P2_C1) cg = return $ cg { t2 = (t2 cg) { c1 = incrChar (c1 $ t2 cg) }}
transform (IncrementCharacter P2_C2) cg = return $ cg { t2 = (t2 cg) { c2 = incrChar (c2 $ t2 cg) }}
initCharacterStore :: IO ()
initCharacterStore = do
registerInitialStore $
Humans
{ h1 = CharacterPair
{ c1 = Character 10 "Quick Ben"
, c2 = Character 20 "Whiskeyjack"
}
, h2 = CharacterPair
{ c1 = Character 30 "Fiddler"
, c2 = Character 40 "Kruppe"
}
}
registerInitialStore $
Tiste
{ t1 = CharacterPair
{ c1 = Character 100 "Andarist"
, c2 = Character 110 "Osseric"
}
, t2 = CharacterPair
{ c1 = Character 120 "Anomander Rake"
, c2 = Character 130 "Korlot"
}
}
logWhenUpdated_ :: String -> ReactElementM handler ()
logWhenUpdated_ m = foreign_ "hsreact$log_when_updated" ["key" $= "log", "message" &= m] mempty
singleCharacterView :: View Character
singleCharacterView = mkView "single-character" $ \c ->
logWhenUpdated_ $ "Single character " ++ show c
twoCharacterView :: View (Character, Character)
twoCharacterView = mkView "two-character" $ \(ch1, ch2) ->
logWhenUpdated_ $ "Two characters " ++ show ch1 ++ " and " ++ show ch2
pairCharacterView :: View CharacterPair
pairCharacterView = mkView "pair-characters" $ \p ->
logWhenUpdated_ $ "Pair of characters " ++ show p
statefulCharacterView :: View Character
statefulCharacterView = mkStatefulView "stateful-char" (-100 :: Int) $ \s c ->
p_ $ do
logWhenUpdated_ ("Stateful character " ++ show c)
span_ ["className" $= "state", "key" $= "cur state"] $ elemShow s
button_ [ "className" $= "incr-state"
, onClick $ \_ _ -> simpleHandler $ \s' -> ([], Just $ s' + 1)
, "key" $= "btn"
]
"Incr"
fullHumanView :: View (Character, Character)
fullHumanView = mkControllerView @'[StoreArg Humans] "full humans" $ \humans (extra1, extra2) ->
ul_ ["id" $= "full-humans-view"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "All the humans, plus Andarist and Rake"
li_ ["key" $= "11"] $ view_ singleCharacterView "11" (c1 $ h1 humans)
li_ ["key" $= "12"] $ view_ singleCharacterView "12" (c2 $ h1 humans)
li_ ["key" $= "21"] $ view_ singleCharacterView "21" (c1 $ h2 humans)
li_ ["key" $= "22"] $ view_ singleCharacterView "22" (c2 $ h2 humans)
li_ ["key" $= "112"] $ view_ twoCharacterView "112" (c1 $ h1 humans, c2 $ h1 humans)
li_ ["key" $= "212"] $ view_ pairCharacterView "212" (h2 $ humans)
li_ ["key" $= "extra1"] $ view_ singleCharacterView "extra1" extra1
li_ ["key" $= "extra2"] $ view_ singleCharacterView "extra2" extra2
tisteAndHumansView :: View ()
tisteAndHumansView = mkControllerView @'[StoreArg Tiste] "tiste-and-humans" $ \tiste () ->
div_ ["id" $= "tiste-view"] $ do
ul_ ["id" $= "tiste-sub-view", "key" $= "tiste-sub-view"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "All the tiste"
li_ ["key" $= "11"] $ view_ singleCharacterView "11" (c1 $ t1 tiste)
li_ ["key" $= "12"] $ view_ singleCharacterView "12" (c2 $ t1 tiste)
li_ ["key" $= "21"] $ view_ singleCharacterView "21" (c1 $ t2 tiste)
li_ ["key" $= "22"] $ view_ singleCharacterView "22" (c2 $ t2 tiste)
view_ fullHumanView "humans" (c1 $ t1 tiste, c1 $ t2 tiste)
dualCharacterView :: View ()
dualCharacterView = mkControllerView @'[StoreArg Humans, StoreArg Tiste] "dual-characters" $ \humans tiste () ->
ul_ ["id" $= "dual-character-view"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "Quick Ben and Andarist"
li_ ["key" $= "human11"] $ view_ singleCharacterView "11" (c1 $ h1 humans)
li_ ["key" $= "tiste11"] $ view_ singleCharacterView "11" (c1 $ t1 tiste)
li_ ["key" $= "state"] $ view_ statefulCharacterView "state" (c1 $ t2 tiste)
TODO : ' StoreField ' has internal errors ( crashes on browser console ) .
tisteAndSomeHumansView :: View ()
, StoreField Humans " h1 " CharacterPair
ul_ ["id" $= "tiste-and-some-humans"] $ do
li_ ["key" $= "header"] $ logWhenUpdated_ "Just Rake, Korlot, Quick Ben, and Whiskeyjack"
li_ ["key" $= "t21"] $ view_ singleCharacterView "21" (c1 $ t2 tiste)
li_ ["key" $= "t22"] $ view_ singleCharacterView "22" (c2 $ t2 tiste)
li _ [ " key " $ = " h11 " ] $ view _ singleCharacterView " 11 " ( c1 humanPair )
buttons_ :: forall s. (StoreData s, TestStoreAction ~ StoreAction s) => Proxy s -> T.Text -> ReactElementM 'EventHandlerCode ()
buttons_ _ lbl =
ul_ ["id" &= lbl, "key" &= lbl] $ do
li_ ["key" $= "none"] $
button_
[ "id" &= (lbl <> "-none")
, onClick $ \_ _ -> simpleHandler [action @s NoChangeToCharacters]
]
(elemText $ lbl <> " No Change")
forM_ [minBound..maxBound] $ \idx ->
li_ ["key" &= (lbl <> "-change-" <> tshow idx)] $
button_
[ "id" &= (lbl <> "-" <> tshow idx)
, onClick $ \_ _ -> simpleHandler [action @s $ IncrementCharacter idx]
] (elemText $ lbl <> tshow idx)
storeSpec :: View ()
storeSpec = mkView "store spec" $ \() ->
div_ ["id" $= "store-spec"] $ do
buttons_ (Proxy :: Proxy Humans) "Humans"
buttons_ (Proxy :: Proxy Tiste) "Tiste"
view_ tisteAndHumansView "tiste-and-human" ()
view_ dualCharacterView "dual" ()
view_ tisteAndSomeHumansView "tiste-and-some" ()
callbackViewTest :: View (Int, String)
callbackViewTest = mkView "callback view props test" $ \(i, s) ->
p_ [ "id" $= "callback-view-props-test"] $
elemString $ "Props are " ++ show i ++ " and " ++ s
callbackViewWrapper :: View ()
callbackViewWrapper = mkView "callback view wrapper" $ \() ->
div_ ["id" $= "callback-view-wrapper"] $
foreign_ "hsreact$callback_wrapper" [ callbackRenderingView "foo" callbackViewTest ] mempty
intlSpec :: View ()
intlSpec = mkView "intl" $ \() ->
intlProvider_ "en" (Just js_translations) Nothing $
view_ intlSpecBody "intl-body" ()
intlSpecBody :: View ()
intlSpecBody = mkView "intl body" $ \() -> div_ ["id" $= "intl-spec"] $
ul_ $ do
li_ ["id" $= "f-number", "key" $= "f-number"] $
formattedNumber_ [ "value" @= (0.9 :: Double), "style" $= "percent" ]
li_ ["id" $= "f-int", "key" $= "f-int"] $ int_ 100000
li_ ["id" $= "f-double", "key" $= "f-double"] $ double_ 40000.2
li_ ["id" $= "f-number-prop", "key" $= "f-number-prop"] $
input_ [formattedNumberProp "placeholder" (123456 :: Int) []]
let moon = fromGregorian 1969 7 20
fullDayF = DayFormat { weekdayF = Just "long", eraF = Just "short", yearF = Just "2-digit", monthF = Just "long", dayF = Just "2-digit" }
li_ ["id" $= "f-shortday", "key" $= "f-shortday"] $ day_ shortDate moon
li_ ["id" $= "f-fullday", "key" $= "f-fullday"] $ day_ fullDayF moon
li_ ["id" $= "f-date", "key" $= "f-date"] $ formattedDate_ (Left moon)
[ "weekday" $= "short", "month" $= "short", "day" $= "numeric", "year" $= "2-digit" ]
li_ ["id" $= "f-date-prop", "key" $= "f-date-prop"] $
input_ [formattedDateProp "placeholder" (Left moon) []]
1969 - 7 - 20 02:56 UTC
fullT = ( fullDayF
, TimeFormat { hourF = Just "numeric", minuteF = Just "2-digit", secondF = Just "numeric", timeZoneNameF = Just "long" }
)
li_ ["id" $= "f-shorttime", "key" $= "f-shorttime"] $ utcTime_ shortDateTime step
li_ ["id" $= "f-fulltime", "key" $= "f-fulltime"] $ utcTime_ fullT step
li_ ["id" $= "f-time", "key" $= "f-time"] $ formattedDate_ (Right step)
[ "year" $= "2-digit", "month" $= "short", "day" $= "numeric"
, "hour" $= "numeric", "minute" $= "2-digit", "second" $= "numeric"
, "timeZoneName" $= "short"
, "timeZone" $= "Pacific/Tahiti"
]
li_ ["id" $= "f-time-prop", "key" $= "f-time-prop"] $
input_ [formattedDateProp "placeholder" (Right step)
[ "year" `iprop` ("2-digit" :: String)
, "month" `iprop` ("short" :: String)
, "day" `iprop` ("2-digit" :: String)
, "hour" `iprop` ("numeric" :: String)
, "timeZone" `iprop` ("Pacific/Tahiti" :: String)
]
]
li_ ["id" $= "f-relative", "key" $= "f-relative"] $ relativeTo_ step
li_ ["id" $= "f-relative-days", "key" $= "f-relative-days"] $ formattedRelative_ step [ "units" $= "day" ]
li_ ["id" $= "f-plural", "key" $= "f-plural"] $ plural_ [ "value" @= (100 :: Int), "one" $= "plural one", "other" $= "plural other"]
li_ ["id" $= "f-plural-prop", "key" $= "f-plural-prop"] $
input_ [pluralProp "placeholder" (100 :: Int) ["one" `iprop` ("plural one" :: String), "other" `iprop` ("plural other" :: String)]]
li_ ["id" $= "f-msg", "key" $= "f-msg"] $
$(message "photos" "{name} took {numPhotos, plural, =0 {no photos} =1 {one photo} other {# photos}} {takenAgo}.")
[ "name" $= "Neil Armstrong"
, "numPhotos" @= (100 :: Int)
, elementProperty "takenAgo" $ span_ ["id" $= "takenAgoSpan"] "years ago"
]
li_ ["id" $= "f-msg-prop", "key" $= "f-msg-prop"] $
input_ [ $(messageProp "placeholder" "photosprop" "{name} took {numPhotos, plural, =0 {no photos} =1 {one photo} other {# photos}}")
[ "name" `iprop` ("Neil Armstrong" :: String)
, "numPhotos" `iprop` (100 :: Int)
]
]
li_ ["id" $= "f-msg-with-trans", "key" $= "f-msg-with-trans"] $
$(message "with_trans" "this is not used {abc}") ["abc" $= "xxx"]
li_ ["id" $= "f-msg-with-descr", "key" $= "f-msg-with-descr"] $
$(message' "photos2" "How many photos?" "{name} took {numPhotos, plural, =0 {no photos} =1 {one photo} other {# photos}}.")
[ "name" $= "Neil Armstrong"
, "numPhotos" @= (0 :: Int)
]
li_ ["id" $= "f-msg-prop-with-descr", "key" $= "f-msg-prop-with-descr"] $
input_ [$(messageProp' "placeholder" "photosprop2" "How many photos?" "{name} took {numPhotos, number} photos")
[ "name" `iprop` ("Neil Armstrong" :: String)
, "numPhotos" `iprop` (0 :: Int)
]
]
li_ ["id" $= "f-html-msg", "key" $= "f-html-msg"] $
$(htmlMsg "html1" "<b>{num}</b> is the answer to life, the universe, and everything")
[ "num" @= (42 :: Int) ]
li_ ["id" $= "f-html-msg-with-descr", "key" $= "f-html-msg-with-descr"] $
$(htmlMsg' "html2" "Hitchhiker's Guide" "{num} is the <b>answer</b> to life, the universe, and everything")
[ "num" @= (42 :: Int) ]
testClient :: View ()
testClient = mkView "app" $ \() ->
div_ $ do
view_ eventsView "events" ()
view_ storeSpec "store" ()
view_ intlSpec "intl" ()
view_ callbackViewWrapper "callback" ()
div_ ["key" $= "raw"] $
rawJsRendering js_testRawJs $
span_ ["id" $= "test-raw-js-body", "key" $= "raw-body"]
"Raw Javascript Render Body"
main :: IO ()
main = do
initOutputStore
initCharacterStore
reactRenderView "app" testClient
#ifdef __GHCJS__
foreign import javascript unsafe
"hsreact$log_message($1)"
js_output :: JSString -> IO ()
foreign import javascript unsafe
"React['createElement']('p', {'id': 'test-raw-js-para', 'key': 'test-raw-para'}, $2)"
js_testRawJs :: JSVal -> JSArray -> IO JSVal
foreign import javascript unsafe
"{'with_trans': 'message from translation {abc}'}"
js_translations :: JSVal
#else
js_output :: JSString -> IO ()
js_output _ = error "js_output only works with GHCJS"
js_testRawJs :: JSVal -> JSArray -> IO JSVal
js_testRawJs _ _ = error "js_testRawJs only works with GHCJS"
js_translations :: JSVal
js_translations = error "js_translations only works with GHCJS"
#endif
writeIntlMessages (intlFormatJson "msgs/jsonmsgs.json")
writeIntlMessages (intlFormatJsonWithoutDescription "msgs/jsonnodescr.json")
writeIntlMessages (intlFormatAndroidXML "msgs/android.xml")
|
4c08eb554dbcaf60ab5a90d72653017d1c195acb648d8a2c8108c82d017e8296 | Beluga-lang/Beluga | session.ml | open Support
open Beluga
open Syntax.Int
module CompS = Store.Cid.Comp
module F = Fun
module P = Pretty.Int.DefaultPrinter
let dprintf, _, _ = Debug.(makeFunctions' (toFlags [15]))
open Debug.Fmt
type t =
{ theorems : Theorem.t DynArray.t
; finished_theorems: (Theorem.t * Comp.exp option) DynArray.t
; mutual_group : Id.cid_mutual_group
}
let make mutual_group thms =
{ theorems = DynArray.of_list thms
; finished_theorems = DynArray.make 32
; mutual_group
}
(** Gets the list of mutual declarations corresponding to the
currently loaded theorems in the active session.
*)
let get_mutual_decs (s : t) : Comp.total_dec list =
CompS.lookup_mutual_group s.mutual_group
(** Constructs a list of all theorems in this session, both
incomplete and finished.
The incomplete theorems come before the complete theorems.
*)
let full_theorem_list c =
DynArray.to_list c.theorems
@ List.map Pair.fst (DynArray.to_list c.finished_theorems)
let remove_current_theorem s =
DynArray.delete s.theorems 0
let mark_current_theorem_as_proven s e_trans =
let t = DynArray.get s.theorems 0 in
remove_current_theorem s;
DynArray.add s.finished_theorems (t, e_trans)
let defer_theorem s =
let t = DynArray.get s.theorems 0 in
remove_current_theorem s;
DynArray.add s.theorems t
(** Gets the next theorem from the interpreter state.
Returns `None` if there are no theorems left,
i.e. all theorems in the mutual block have been proven.
*)
let next_theorem s : Theorem.t option =
DynArray.head s.theorems
(** Decides whether a given `cid` is in among the currently being
proven theorems. *)
let cid_is_in_current_theorem_set s c =
List.exists (F.flip Theorem.has_cid_of c) (DynArray.to_list s.theorems)
(** Infer invocation kind based on `exp_syn` and the current theorem
*)
let infer_invocation_kind s (i : Comp.exp) : Comp.invoke_kind =
match Comp.head_of_application i with
| Comp.Const (_, c) when cid_is_in_current_theorem_set s c -> `ih
| _ -> `lemma
let lookup_theorem c name =
let open Option in
DynArray.rfind_opt_idx
c.theorems
F.(flip Theorem.has_name_of name)
$> Pair.snd
(** Selects a theorem by name in the current session.
Returns whether the selection succeeded. (A theorem by such name could be found.)
*)
let select_theorem c name =
match
DynArray.rfind_opt_idx
c.theorems
(F.flip Theorem.has_name_of name)
with
| None -> false
| Some (i, t) ->
DynArray.delete c.theorems i;
DynArray.insert c.theorems 0 t;
true
let get_session_kind c : [`introduced | `loaded] =
let existing_holes = Holes.get_harpoon_subgoals () in
(* If the theorems in the session do not have
* any predefined holes in the loaded files,
* that session is newly defined in this harpoon process,
*)
let is_loaded =
full_theorem_list c
|> List.exists
begin fun thm ->
existing_holes
|> List.map F.(Pair.fst ++ Pair.snd)
|> List.exists (Theorem.has_cid_of thm)
end
in
if is_loaded
then `loaded
else `introduced
let prepare_translated_proofs tes total_decs =
let trans_name name =
Name.(mk_name (SomeString ("_" ^ show name ^ "_trans")))
in
(* create the totality declarations for the translated
proofs, and allocate the mutual group with them. *)
let total_decs =
List.map
(fun dec ->
let open Comp in
{ dec with name = trans_name dec.name })
total_decs
in
let mutual_group_id =
CompS.add_mutual_group total_decs
in
(* map from old cids to new cids *)
let h = Hashtbl.create 8 in
let etaus =
List.map
begin fun (t, e) ->
let open CompS in
let cid, entry = Theorem.get_entry' t in
let tau = entry.Entry.typ in
let _ =
(* the type to store for the newly allocated must be without
inductive stars, so we obtain it directly from the store entry
for the proof. *)
add
begin fun cid' ->
(* associate the cid of this theorem to the newly allocated
cid for the translated proof *)
Hashtbl.add h cid cid';
mk_entry
None
(trans_name entry.Entry.name)
tau
entry.Entry.implicit_arguments
mutual_group_id
None
(* We use None for the declaration number here.
This technically means that these definitions are
considered floating. However, this will not be a problem
during late scopechecking because definitions belonging
to the same mutual group skip late scopechecking. *)
end
in
we need to check the translated proof against the type * with *
inductive stars , so we obtain it from the initial subgoal of the
theorem
inductive stars, so we obtain it from the initial subgoal of the
theorem *)
let tau_ann = Theorem.get_statement t |> Whnf.cnormCTyp in
(e, tau_ann)
end
tes
in
Now h is populated , so we can rewrite the programs with the
new cids .
First , convert the hashtable to a function sending unmapped
entries to themselves .
new cids.
First, convert the hashtable to a function sending unmapped
entries to themselves.
*)
let cid_map k =
Hashtbl.find_opt h k |> Option.value ~default:k
in
let etaus =
List.map
(fun (e, tau) -> (CidProgRewrite.exp cid_map e, tau))
etaus
in
(etaus, total_decs)
type translation_check_result =
[ `some_translations_failed
| `check_error of exn
| `ok
]
* Checks the translated proofs in the session .
All theorems in the session must be finished .
This function will allocate one new mutual group , and for each
theorem , a new cid for the translated proof .
Next , it will rewrite the cids in each translated proof to
refer to the new cids .
Finally , it checks each program .
All theorems in the session must be finished.
This function will allocate one new mutual group, and for each
theorem, a new cid for the translated proof.
Next, it will rewrite the cids in each translated proof to
refer to the new cids.
Finally, it checks each program.
*)
let check_translated_proofs c : translation_check_result =
match
DynArray.to_list c.finished_theorems
|> List.traverse (fun (t, e) -> let open Option in e $> fun e -> (t, e))
with
| None -> `some_translations_failed
| Some tes ->
let ettaus, total_decs =
prepare_translated_proofs tes (get_mutual_decs c)
in
dprintf begin fun p ->
let open Format in
p.fmt "[check_translated_proofs] @[<v>total_decs:\
@,@[%a@]@]"
(pp_print_list ~pp_sep: pp_print_cut
P.fmt_ppr_cmp_total_dec)
total_decs
end;
try
List.iter
(fun (e, tau) ->
dprintf begin fun p ->
p.fmt "[check_translated_proofs] statement @[%a@]"
P.(fmt_ppr_cmp_typ LF.Empty l0) tau
end;
Check.Comp.check None LF.Empty LF.Empty total_decs
e (tau, Whnf.m_id))
ettaus;
`ok
with
| exc -> `check_error exc
(** Runs the theorem configuration prompt to construct a mutual
group of theorems.
*)
let configuration_wizard' io automation_state : Id.cid_mutual_group * Theorem.t list =
let rec do_prompts i : Theorem.Conf.t list =
IO.printf io "Configuring theorem #%d@." i;
(* prompt for name, and allow using empty to signal we're done. *)
match
IO.parsed_prompt io " Name of theorem (:quit or empty to finish): "
None
Parser.(maybe next_theorem)
with
| None | Some `quit -> []
| Some (`next name) ->
let tau, k =
(* XXX These calls are sketchy as hell.
There must be a better place to put them -je
*)
Reconstruct.reset_fvarCnstr ();
Store.FCVar.clear ();
(* Now prompt for the statement, and disallow empty to signal we're done. *)
IO.parsed_prompt io " Statement of theorem: " None
Parser.(cmp_typ $> Interactive.elaborate_typ LF.Empty)
in
dprintf begin fun p ->
p.fmt "@[<v 2>[harpoon] [configuration_wizard] elaborated type\
@,@[%a@]\
@,with %d implicit parameters@]"
P.(fmt_ppr_cmp_typ LF.Empty l0) tau
k
end;
let order =
let p =
let open Parser in
alt
(trust_order $> Either.left)
(total_order numeric_total_order $> Either.right)
$> begin function
| Either.Right no ->
let order = Reconstruct.numeric_order tau no in
dprintf begin fun p ->
p.fmt "[configuration_wizard] @[<v>elaborated numeric order\
@, @[%a@]\
@,considering %d implicit arguments.@]"
P.(fmt_ppr_cmp_numeric_order) order
k
end;
Either.right order
| trust -> trust
(* TODO we should check that the order is legit
here so that we can right away prompt the user
for a correct one; currently this check only
happens very late when the theorem set is
configured. *)
end
in
IO.parsed_prompt io " Induction order (empty for none): " None
(Parser.maybe p)
in
IO.printf io "@]";
let total_dec_kind =
match order with
| Some (Either.Right no) -> `inductive no
| Some (Either.Left (Synext.Comp.Trust _)) -> `trust
| None -> `not_recursive
in
let conf = Theorem.Conf.make name total_dec_kind tau k in
conf :: do_prompts (i + 1)
in
let confs = do_prompts 1 in
Theorem.configure_set (IO.formatter io) automation_state confs
let configuration_wizard io automation_state : t option =
let mutual_group, thms = configuration_wizard' io automation_state in
(* c will be populated with theorems; if there are none it's
because the session is over. *)
match thms with
| _ :: _ ->
Some (make mutual_group thms)
| [] -> None
let fmt_ppr_theorem_list ppf c =
let open Format in
let theorem_list = full_theorem_list c in
let fmt_ppr_theorem_completeness ppf t =
match Theorem.completeness t with
| `complete -> fprintf ppf " (finished)"
| _ -> ()
in
let fmt_ppr_indexed_theorem ppf (i, t) =
fprintf ppf "%d. %a%a" (i + 1)
Name.pp (Theorem.get_name t)
fmt_ppr_theorem_completeness t
in
let fmt_ppr_indexed_theorems =
Format.pp_print_list ~pp_sep: Format.pp_print_cut fmt_ppr_indexed_theorem
in
(* It may be better to add the current session name to this message *)
fprintf ppf
"@[<v>%a@]"
fmt_ppr_indexed_theorems (List.index theorem_list)
let materialize_theorems c =
if DynArray.length c.theorems > 0 then
Error.violation
"[materialize_theorems] not all theorems are complete";
DynArray.iter F.(Theorem.materialize ++ Pair.fst) c.finished_theorems
| null | https://raw.githubusercontent.com/Beluga-lang/Beluga/520c41534820e12a52d640f3151480e790baa17e/src/harpoon/session.ml | ocaml | * Gets the list of mutual declarations corresponding to the
currently loaded theorems in the active session.
* Constructs a list of all theorems in this session, both
incomplete and finished.
The incomplete theorems come before the complete theorems.
* Gets the next theorem from the interpreter state.
Returns `None` if there are no theorems left,
i.e. all theorems in the mutual block have been proven.
* Decides whether a given `cid` is in among the currently being
proven theorems.
* Infer invocation kind based on `exp_syn` and the current theorem
* Selects a theorem by name in the current session.
Returns whether the selection succeeded. (A theorem by such name could be found.)
If the theorems in the session do not have
* any predefined holes in the loaded files,
* that session is newly defined in this harpoon process,
create the totality declarations for the translated
proofs, and allocate the mutual group with them.
map from old cids to new cids
the type to store for the newly allocated must be without
inductive stars, so we obtain it directly from the store entry
for the proof.
associate the cid of this theorem to the newly allocated
cid for the translated proof
We use None for the declaration number here.
This technically means that these definitions are
considered floating. However, this will not be a problem
during late scopechecking because definitions belonging
to the same mutual group skip late scopechecking.
* Runs the theorem configuration prompt to construct a mutual
group of theorems.
prompt for name, and allow using empty to signal we're done.
XXX These calls are sketchy as hell.
There must be a better place to put them -je
Now prompt for the statement, and disallow empty to signal we're done.
TODO we should check that the order is legit
here so that we can right away prompt the user
for a correct one; currently this check only
happens very late when the theorem set is
configured.
c will be populated with theorems; if there are none it's
because the session is over.
It may be better to add the current session name to this message | open Support
open Beluga
open Syntax.Int
module CompS = Store.Cid.Comp
module F = Fun
module P = Pretty.Int.DefaultPrinter
let dprintf, _, _ = Debug.(makeFunctions' (toFlags [15]))
open Debug.Fmt
type t =
{ theorems : Theorem.t DynArray.t
; finished_theorems: (Theorem.t * Comp.exp option) DynArray.t
; mutual_group : Id.cid_mutual_group
}
let make mutual_group thms =
{ theorems = DynArray.of_list thms
; finished_theorems = DynArray.make 32
; mutual_group
}
let get_mutual_decs (s : t) : Comp.total_dec list =
CompS.lookup_mutual_group s.mutual_group
let full_theorem_list c =
DynArray.to_list c.theorems
@ List.map Pair.fst (DynArray.to_list c.finished_theorems)
let remove_current_theorem s =
DynArray.delete s.theorems 0
let mark_current_theorem_as_proven s e_trans =
let t = DynArray.get s.theorems 0 in
remove_current_theorem s;
DynArray.add s.finished_theorems (t, e_trans)
let defer_theorem s =
let t = DynArray.get s.theorems 0 in
remove_current_theorem s;
DynArray.add s.theorems t
let next_theorem s : Theorem.t option =
DynArray.head s.theorems
let cid_is_in_current_theorem_set s c =
List.exists (F.flip Theorem.has_cid_of c) (DynArray.to_list s.theorems)
let infer_invocation_kind s (i : Comp.exp) : Comp.invoke_kind =
match Comp.head_of_application i with
| Comp.Const (_, c) when cid_is_in_current_theorem_set s c -> `ih
| _ -> `lemma
let lookup_theorem c name =
let open Option in
DynArray.rfind_opt_idx
c.theorems
F.(flip Theorem.has_name_of name)
$> Pair.snd
let select_theorem c name =
match
DynArray.rfind_opt_idx
c.theorems
(F.flip Theorem.has_name_of name)
with
| None -> false
| Some (i, t) ->
DynArray.delete c.theorems i;
DynArray.insert c.theorems 0 t;
true
let get_session_kind c : [`introduced | `loaded] =
let existing_holes = Holes.get_harpoon_subgoals () in
let is_loaded =
full_theorem_list c
|> List.exists
begin fun thm ->
existing_holes
|> List.map F.(Pair.fst ++ Pair.snd)
|> List.exists (Theorem.has_cid_of thm)
end
in
if is_loaded
then `loaded
else `introduced
let prepare_translated_proofs tes total_decs =
let trans_name name =
Name.(mk_name (SomeString ("_" ^ show name ^ "_trans")))
in
let total_decs =
List.map
(fun dec ->
let open Comp in
{ dec with name = trans_name dec.name })
total_decs
in
let mutual_group_id =
CompS.add_mutual_group total_decs
in
let h = Hashtbl.create 8 in
let etaus =
List.map
begin fun (t, e) ->
let open CompS in
let cid, entry = Theorem.get_entry' t in
let tau = entry.Entry.typ in
let _ =
add
begin fun cid' ->
Hashtbl.add h cid cid';
mk_entry
None
(trans_name entry.Entry.name)
tau
entry.Entry.implicit_arguments
mutual_group_id
None
end
in
we need to check the translated proof against the type * with *
inductive stars , so we obtain it from the initial subgoal of the
theorem
inductive stars, so we obtain it from the initial subgoal of the
theorem *)
let tau_ann = Theorem.get_statement t |> Whnf.cnormCTyp in
(e, tau_ann)
end
tes
in
Now h is populated , so we can rewrite the programs with the
new cids .
First , convert the hashtable to a function sending unmapped
entries to themselves .
new cids.
First, convert the hashtable to a function sending unmapped
entries to themselves.
*)
let cid_map k =
Hashtbl.find_opt h k |> Option.value ~default:k
in
let etaus =
List.map
(fun (e, tau) -> (CidProgRewrite.exp cid_map e, tau))
etaus
in
(etaus, total_decs)
type translation_check_result =
[ `some_translations_failed
| `check_error of exn
| `ok
]
* Checks the translated proofs in the session .
All theorems in the session must be finished .
This function will allocate one new mutual group , and for each
theorem , a new cid for the translated proof .
Next , it will rewrite the cids in each translated proof to
refer to the new cids .
Finally , it checks each program .
All theorems in the session must be finished.
This function will allocate one new mutual group, and for each
theorem, a new cid for the translated proof.
Next, it will rewrite the cids in each translated proof to
refer to the new cids.
Finally, it checks each program.
*)
let check_translated_proofs c : translation_check_result =
match
DynArray.to_list c.finished_theorems
|> List.traverse (fun (t, e) -> let open Option in e $> fun e -> (t, e))
with
| None -> `some_translations_failed
| Some tes ->
let ettaus, total_decs =
prepare_translated_proofs tes (get_mutual_decs c)
in
dprintf begin fun p ->
let open Format in
p.fmt "[check_translated_proofs] @[<v>total_decs:\
@,@[%a@]@]"
(pp_print_list ~pp_sep: pp_print_cut
P.fmt_ppr_cmp_total_dec)
total_decs
end;
try
List.iter
(fun (e, tau) ->
dprintf begin fun p ->
p.fmt "[check_translated_proofs] statement @[%a@]"
P.(fmt_ppr_cmp_typ LF.Empty l0) tau
end;
Check.Comp.check None LF.Empty LF.Empty total_decs
e (tau, Whnf.m_id))
ettaus;
`ok
with
| exc -> `check_error exc
let configuration_wizard' io automation_state : Id.cid_mutual_group * Theorem.t list =
let rec do_prompts i : Theorem.Conf.t list =
IO.printf io "Configuring theorem #%d@." i;
match
IO.parsed_prompt io " Name of theorem (:quit or empty to finish): "
None
Parser.(maybe next_theorem)
with
| None | Some `quit -> []
| Some (`next name) ->
let tau, k =
Reconstruct.reset_fvarCnstr ();
Store.FCVar.clear ();
IO.parsed_prompt io " Statement of theorem: " None
Parser.(cmp_typ $> Interactive.elaborate_typ LF.Empty)
in
dprintf begin fun p ->
p.fmt "@[<v 2>[harpoon] [configuration_wizard] elaborated type\
@,@[%a@]\
@,with %d implicit parameters@]"
P.(fmt_ppr_cmp_typ LF.Empty l0) tau
k
end;
let order =
let p =
let open Parser in
alt
(trust_order $> Either.left)
(total_order numeric_total_order $> Either.right)
$> begin function
| Either.Right no ->
let order = Reconstruct.numeric_order tau no in
dprintf begin fun p ->
p.fmt "[configuration_wizard] @[<v>elaborated numeric order\
@, @[%a@]\
@,considering %d implicit arguments.@]"
P.(fmt_ppr_cmp_numeric_order) order
k
end;
Either.right order
| trust -> trust
end
in
IO.parsed_prompt io " Induction order (empty for none): " None
(Parser.maybe p)
in
IO.printf io "@]";
let total_dec_kind =
match order with
| Some (Either.Right no) -> `inductive no
| Some (Either.Left (Synext.Comp.Trust _)) -> `trust
| None -> `not_recursive
in
let conf = Theorem.Conf.make name total_dec_kind tau k in
conf :: do_prompts (i + 1)
in
let confs = do_prompts 1 in
Theorem.configure_set (IO.formatter io) automation_state confs
let configuration_wizard io automation_state : t option =
let mutual_group, thms = configuration_wizard' io automation_state in
match thms with
| _ :: _ ->
Some (make mutual_group thms)
| [] -> None
let fmt_ppr_theorem_list ppf c =
let open Format in
let theorem_list = full_theorem_list c in
let fmt_ppr_theorem_completeness ppf t =
match Theorem.completeness t with
| `complete -> fprintf ppf " (finished)"
| _ -> ()
in
let fmt_ppr_indexed_theorem ppf (i, t) =
fprintf ppf "%d. %a%a" (i + 1)
Name.pp (Theorem.get_name t)
fmt_ppr_theorem_completeness t
in
let fmt_ppr_indexed_theorems =
Format.pp_print_list ~pp_sep: Format.pp_print_cut fmt_ppr_indexed_theorem
in
fprintf ppf
"@[<v>%a@]"
fmt_ppr_indexed_theorems (List.index theorem_list)
let materialize_theorems c =
if DynArray.length c.theorems > 0 then
Error.violation
"[materialize_theorems] not all theorems are complete";
DynArray.iter F.(Theorem.materialize ++ Pair.fst) c.finished_theorems
|
94a011f7877cba0b67679ed8b659cf7a428d9c8eb70e761be3a9fcf06a5199eb | clojure-interop/java-jdk | SynthButtonUI.clj | (ns javax.swing.plaf.synth.SynthButtonUI
"Provides the Synth L&F UI delegate for
JButton."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf.synth SynthButtonUI]))
(defn ->synth-button-ui
"Constructor."
(^SynthButtonUI []
(new SynthButtonUI )))
(defn *create-ui
"Creates a new UI object for the given component.
c - component to create UI object for - `javax.swing.JComponent`
returns: the UI object - `javax.swing.plaf.ComponentUI`"
(^javax.swing.plaf.ComponentUI [^javax.swing.JComponent c]
(SynthButtonUI/createUI c)))
(defn get-minimum-size
"Returns the specified component's minimum size appropriate for
the look and feel. If null is returned, the minimum
size will be calculated by the component's layout manager instead
(this is the preferred approach for any component with a specific
layout manager installed). The default implementation of this
method invokes getPreferredSize and returns that value.
this argument is often ignored , but might be used if the UI object is stateless and shared by multiple components - ` javax.swing . JComponent `
returns: a Dimension object or null - `java.awt.Dimension`"
(^java.awt.Dimension [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getMinimumSize c))))
(defn get-maximum-size
"Returns the specified component's maximum size appropriate for
the look and feel. If null is returned, the maximum
size will be calculated by the component's layout manager instead
(this is the preferred approach for any component with a specific
layout manager installed). The default implementation of this
method invokes getPreferredSize and returns that value.
this argument is often ignored , but might be used if the UI object is stateless and shared by multiple components - ` javax.swing . JComponent `
returns: a Dimension object or null - `java.awt.Dimension`"
(^java.awt.Dimension [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getMaximumSize c))))
(defn paint-border
"Paints the border.
context - a component context - `javax.swing.plaf.synth.SynthContext`
g - Graphics to paint on - `java.awt.Graphics`
x - the X coordinate - `int`
y - the Y coordinate - `int`
w - width of the border - `int`
h - height of the border - `int`"
([^SynthButtonUI this ^javax.swing.plaf.synth.SynthContext context ^java.awt.Graphics g ^Integer x ^Integer y ^Integer w ^Integer h]
(-> this (.paintBorder context g x y w h))))
(defn update
"Notifies this UI delegate to repaint the specified component.
This method paints the component background, then calls
the paint(SynthContext,Graphics) method.
In general, this method does not need to be overridden by subclasses.
All Look and Feel rendering code should reside in the paint method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthButtonUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.update g c))))
(defn property-change
"This method gets called when a bound property is changed.
e - A PropertyChangeEvent object describing the event source and the property that has changed. - `java.beans.PropertyChangeEvent`"
([^SynthButtonUI this ^java.beans.PropertyChangeEvent e]
(-> this (.propertyChange e))))
(defn paint
"Paints the specified component according to the Look and Feel.
This method is not used by Synth Look and Feel.
Painting is handled by the paint(SynthContext,Graphics) method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthButtonUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.paint g c))))
(defn get-context
"Returns the Context for the specified component.
c - Component requesting SynthContext. - `javax.swing.JComponent`
returns: SynthContext describing component. - `javax.swing.plaf.synth.SynthContext`"
(^javax.swing.plaf.synth.SynthContext [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getContext c))))
(defn get-baseline
"Returns the baseline.
c - JComponent baseline is being requested for - `javax.swing.JComponent`
width - the width to get the baseline for - `int`
height - the height to get the baseline for - `int`
returns: baseline or a value < 0 indicating there is no reasonable
baseline - `int`"
(^Integer [^SynthButtonUI this ^javax.swing.JComponent c ^Integer width ^Integer height]
(-> this (.getBaseline c width height))))
(defn get-preferred-size
"Returns the specified component's preferred size appropriate for
the look and feel. If null is returned, the preferred
size will be calculated by the component's layout manager instead
(this is the preferred approach for any component with a specific
layout manager installed). The default implementation of this
method returns null.
this argument is often ignored , but might be used if the UI object is stateless and shared by multiple components - ` javax.swing . JComponent `
returns: `java.awt.Dimension`"
(^java.awt.Dimension [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getPreferredSize c))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/plaf/synth/SynthButtonUI.clj | clojure | (ns javax.swing.plaf.synth.SynthButtonUI
"Provides the Synth L&F UI delegate for
JButton."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.plaf.synth SynthButtonUI]))
(defn ->synth-button-ui
"Constructor."
(^SynthButtonUI []
(new SynthButtonUI )))
(defn *create-ui
"Creates a new UI object for the given component.
c - component to create UI object for - `javax.swing.JComponent`
returns: the UI object - `javax.swing.plaf.ComponentUI`"
(^javax.swing.plaf.ComponentUI [^javax.swing.JComponent c]
(SynthButtonUI/createUI c)))
(defn get-minimum-size
"Returns the specified component's minimum size appropriate for
the look and feel. If null is returned, the minimum
size will be calculated by the component's layout manager instead
(this is the preferred approach for any component with a specific
layout manager installed). The default implementation of this
method invokes getPreferredSize and returns that value.
this argument is often ignored , but might be used if the UI object is stateless and shared by multiple components - ` javax.swing . JComponent `
returns: a Dimension object or null - `java.awt.Dimension`"
(^java.awt.Dimension [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getMinimumSize c))))
(defn get-maximum-size
"Returns the specified component's maximum size appropriate for
the look and feel. If null is returned, the maximum
size will be calculated by the component's layout manager instead
(this is the preferred approach for any component with a specific
layout manager installed). The default implementation of this
method invokes getPreferredSize and returns that value.
this argument is often ignored , but might be used if the UI object is stateless and shared by multiple components - ` javax.swing . JComponent `
returns: a Dimension object or null - `java.awt.Dimension`"
(^java.awt.Dimension [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getMaximumSize c))))
(defn paint-border
"Paints the border.
context - a component context - `javax.swing.plaf.synth.SynthContext`
g - Graphics to paint on - `java.awt.Graphics`
x - the X coordinate - `int`
y - the Y coordinate - `int`
w - width of the border - `int`
h - height of the border - `int`"
([^SynthButtonUI this ^javax.swing.plaf.synth.SynthContext context ^java.awt.Graphics g ^Integer x ^Integer y ^Integer w ^Integer h]
(-> this (.paintBorder context g x y w h))))
(defn update
"Notifies this UI delegate to repaint the specified component.
This method paints the component background, then calls
the paint(SynthContext,Graphics) method.
In general, this method does not need to be overridden by subclasses.
All Look and Feel rendering code should reside in the paint method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthButtonUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.update g c))))
(defn property-change
"This method gets called when a bound property is changed.
e - A PropertyChangeEvent object describing the event source and the property that has changed. - `java.beans.PropertyChangeEvent`"
([^SynthButtonUI this ^java.beans.PropertyChangeEvent e]
(-> this (.propertyChange e))))
(defn paint
"Paints the specified component according to the Look and Feel.
This method is not used by Synth Look and Feel.
Painting is handled by the paint(SynthContext,Graphics) method.
g - the Graphics object used for painting - `java.awt.Graphics`
c - the component being painted - `javax.swing.JComponent`"
([^SynthButtonUI this ^java.awt.Graphics g ^javax.swing.JComponent c]
(-> this (.paint g c))))
(defn get-context
"Returns the Context for the specified component.
c - Component requesting SynthContext. - `javax.swing.JComponent`
returns: SynthContext describing component. - `javax.swing.plaf.synth.SynthContext`"
(^javax.swing.plaf.synth.SynthContext [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getContext c))))
(defn get-baseline
"Returns the baseline.
c - JComponent baseline is being requested for - `javax.swing.JComponent`
width - the width to get the baseline for - `int`
height - the height to get the baseline for - `int`
returns: baseline or a value < 0 indicating there is no reasonable
baseline - `int`"
(^Integer [^SynthButtonUI this ^javax.swing.JComponent c ^Integer width ^Integer height]
(-> this (.getBaseline c width height))))
(defn get-preferred-size
"Returns the specified component's preferred size appropriate for
the look and feel. If null is returned, the preferred
size will be calculated by the component's layout manager instead
(this is the preferred approach for any component with a specific
layout manager installed). The default implementation of this
method returns null.
this argument is often ignored , but might be used if the UI object is stateless and shared by multiple components - ` javax.swing . JComponent `
returns: `java.awt.Dimension`"
(^java.awt.Dimension [^SynthButtonUI this ^javax.swing.JComponent c]
(-> this (.getPreferredSize c))))
| |
b3845fa73401a645150277d5d8f75b2790571e29d35293ae82f9f18cc0bacbe7 | blajzer/dib | CDepScanner.hs | # LANGUAGE GeneralizedNewtypeDeriving , ExistentialQuantification , KindSignatures , FlexibleContexts #
Copyright ( c ) 2010 - 2018
-- See LICENSE for license information.
-- | C dependency scanner. Runs a stripped-down pre-processor to scan for
-- include files (recursively).
module Dib.Scanners.CDepScanner (
cDepScanner
) where
import Dib.Types
import qualified Data.List as L
import qualified Data.Set as S
import qualified Data.Text as T
import qualified System.Directory as Dir
import qualified System.FilePath as F
import Control.Applicative()
import Control.Monad.State.Lazy
data Dependency = Dependency String String
deriving (Show)
instance Eq Dependency where
(Dependency f1 _) == (Dependency f2 _) = f1 == f2
instance Ord Dependency where
compare (Dependency f1 _) (Dependency f2 _) = compare f1 f2
getPathFromDep :: Dependency -> String
getPathFromDep (Dependency _ path) = path
-- already read includes, list of include paths
data ParseState = PS {
currentDeps :: S.Set Dependency,
searchPaths :: [String] }
deriving (Show)
newtype DepGatherer a = DepGatherer {
runDepGatherer :: StateT ParseState IO a
} deriving (Functor, Applicative, Monad, MonadIO, MonadState ParseState)
removeCR :: String -> String
removeCR = filter (/= '\r')
removeLeadingWS :: String -> String
removeLeadingWS = dropWhile (\x -> x == ' ' || x == '\t')
removeBlockComment :: String -> String
removeBlockComment ('*':'/':xs) = removeComments xs
removeBlockComment (_:xs) = removeBlockComment xs
removeBlockComment [] = error "Unterminated block comment."
removeLineComment :: String -> String
removeLineComment ('\n':xs) = removeComments xs
removeLineComment (_:xs) = removeLineComment xs
removeLineComment [] = []
processCharLiteral :: String -> String
processCharLiteral ('\\':x:'\'':xs) = '\\' : x : '\'' : removeComments xs
processCharLiteral (x:'\'':xs) = x : '\'' : removeComments xs
processCharLiteral (x:xs) = x : removeComments xs
processCharLiteral [] = []
processStringLiteral :: String -> String
processStringLiteral ('\\':'"':xs) = '\\' : '"' : processStringLiteral xs
processStringLiteral ('"':xs) = '"' : removeComments xs
processStringLiteral (x:xs) = x : processStringLiteral xs
processStringLiteral [] = error "Unterminated string literal."
processDirective :: String -> String
processDirective ('\\':'\n':xs) = '\\' : '\n' : processDirective xs
processDirective ('\n':xs) = '\n' : removeComments xs
processDirective (x:xs) = x : processDirective xs
processDirective [] = []
removeComments :: String -> String
removeComments ('#':xs) = '#' : processDirective xs
removeComments ('/':'*':xs) = removeBlockComment xs
removeComments ('/':'/':xs) = removeLineComment xs
removeComments ('\'':xs) = '\'' : processCharLiteral xs
removeComments ('"':xs) = '"' : processStringLiteral xs
removeComments (x:xs) = x : removeComments xs
removeComments [] = []
filterBlank :: [String] -> [String]
filterBlank = filter (\x -> x /= "\n" && x /= [])
extractIncludes :: [String] -> [String]
extractIncludes = filter (\x -> "#include" == takeWhile (/= ' ') x)
dequoteInclude :: String -> String
dequoteInclude s =
let endPortion = dropWhile (\x -> x /= '\"' && x /= '<') s
endLen = length endPortion
in if endLen > 0 then takeWhile (\x -> x /= '\"' && x /= '>') $ tail endPortion else []
-- intial pass, removes comments and leading whitespace, then filters out extra lines
pass1 :: String -> [String]
pass1 s = filterBlank $ map removeLeadingWS $ lines $ removeComments (removeCR s)
second pass , cleans up includes
pass2 :: [String] -> [String]
pass2 l = filterBlank $ map dequoteInclude $ extractIncludes l
gatherDependencies :: String -> [String]
gatherDependencies = pass2.pass1
possibleFilenames :: FilePath -> [FilePath] -> [FilePath]
possibleFilenames file = map (\p -> F.normalise $ F.combine p file)
pathToDependency :: FilePath -> Dependency
pathToDependency path = Dependency (F.takeFileName path) path
spider :: forall (m :: * -> *).(MonadIO m, MonadState ParseState m) => String -> m ()
spider file = do
state <- get
paths <- filterM (includeFilter $ currentDeps state) $ possibleFilenames file (searchPaths state)
spiderHelper paths
return ()
where
includeFilter deps file = do
exists <- liftIO $ Dir.doesFileExist file
return $ exists && not (S.member (pathToDependency file) deps)
spiderHelper :: forall (m :: * -> *).(MonadIO m, MonadState ParseState m) => [FilePath] -> m ()
spiderHelper [] = return ()
spiderHelper (file:_) = do
contents <- liftIO $ readFile file
let deps = gatherDependencies contents
state <- get
put $ state {currentDeps = S.insert (pathToDependency file) (currentDeps state) }
mapM_ spider deps
return ()
spiderLauncher :: forall (m :: * -> *).(MonadIO m, MonadState ParseState m) => FilePath -> m ()
spiderLauncher file = do
contents <- liftIO $ readFile file
let deps = gatherDependencies contents
mapM_ spider deps
return ()
getDepsForFile :: [FilePath] -> FilePath -> IO [T.Text]
getDepsForFile includeDirs file = do
(_, state) <- runStateT (runDepGatherer $ spiderLauncher file) PS {currentDeps=S.empty, searchPaths=[F.dropFileName file, "."] ++ includeDirs }
return $ L.sort $ map (T.pack.getPathFromDep) (S.toList (currentDeps state))
| Takes in a list of include directories , extra dependencies , a ' SrcTransform ' ,
and returns a new ' SrcTransform ' with the dependencies injected into the source
-- side.
cDepScanner :: [FilePath] -> SrcTransform -> IO SrcTransform
cDepScanner includeDirs (OneToOne input output) = getDepsForFile includeDirs (T.unpack input) >>= \deps -> return $ ManyToOne (input:deps) output
cDepScanner includeDirs (OneToMany input output) = getDepsForFile includeDirs (T.unpack input) >>= \deps -> return $ ManyToMany (input:deps) output
cDepScanner _ _ = error "Unimplemented. Implement this if it is a valid relationship."
| null | https://raw.githubusercontent.com/blajzer/dib/750253c972668bb0d849239f94b96050bae74f2a/src/Dib/Scanners/CDepScanner.hs | haskell | See LICENSE for license information.
| C dependency scanner. Runs a stripped-down pre-processor to scan for
include files (recursively).
already read includes, list of include paths
intial pass, removes comments and leading whitespace, then filters out extra lines
side. | # LANGUAGE GeneralizedNewtypeDeriving , ExistentialQuantification , KindSignatures , FlexibleContexts #
Copyright ( c ) 2010 - 2018
module Dib.Scanners.CDepScanner (
cDepScanner
) where
import Dib.Types
import qualified Data.List as L
import qualified Data.Set as S
import qualified Data.Text as T
import qualified System.Directory as Dir
import qualified System.FilePath as F
import Control.Applicative()
import Control.Monad.State.Lazy
data Dependency = Dependency String String
deriving (Show)
instance Eq Dependency where
(Dependency f1 _) == (Dependency f2 _) = f1 == f2
instance Ord Dependency where
compare (Dependency f1 _) (Dependency f2 _) = compare f1 f2
getPathFromDep :: Dependency -> String
getPathFromDep (Dependency _ path) = path
data ParseState = PS {
currentDeps :: S.Set Dependency,
searchPaths :: [String] }
deriving (Show)
newtype DepGatherer a = DepGatherer {
runDepGatherer :: StateT ParseState IO a
} deriving (Functor, Applicative, Monad, MonadIO, MonadState ParseState)
removeCR :: String -> String
removeCR = filter (/= '\r')
removeLeadingWS :: String -> String
removeLeadingWS = dropWhile (\x -> x == ' ' || x == '\t')
removeBlockComment :: String -> String
removeBlockComment ('*':'/':xs) = removeComments xs
removeBlockComment (_:xs) = removeBlockComment xs
removeBlockComment [] = error "Unterminated block comment."
removeLineComment :: String -> String
removeLineComment ('\n':xs) = removeComments xs
removeLineComment (_:xs) = removeLineComment xs
removeLineComment [] = []
processCharLiteral :: String -> String
processCharLiteral ('\\':x:'\'':xs) = '\\' : x : '\'' : removeComments xs
processCharLiteral (x:'\'':xs) = x : '\'' : removeComments xs
processCharLiteral (x:xs) = x : removeComments xs
processCharLiteral [] = []
processStringLiteral :: String -> String
processStringLiteral ('\\':'"':xs) = '\\' : '"' : processStringLiteral xs
processStringLiteral ('"':xs) = '"' : removeComments xs
processStringLiteral (x:xs) = x : processStringLiteral xs
processStringLiteral [] = error "Unterminated string literal."
processDirective :: String -> String
processDirective ('\\':'\n':xs) = '\\' : '\n' : processDirective xs
processDirective ('\n':xs) = '\n' : removeComments xs
processDirective (x:xs) = x : processDirective xs
processDirective [] = []
removeComments :: String -> String
removeComments ('#':xs) = '#' : processDirective xs
removeComments ('/':'*':xs) = removeBlockComment xs
removeComments ('/':'/':xs) = removeLineComment xs
removeComments ('\'':xs) = '\'' : processCharLiteral xs
removeComments ('"':xs) = '"' : processStringLiteral xs
removeComments (x:xs) = x : removeComments xs
removeComments [] = []
filterBlank :: [String] -> [String]
filterBlank = filter (\x -> x /= "\n" && x /= [])
extractIncludes :: [String] -> [String]
extractIncludes = filter (\x -> "#include" == takeWhile (/= ' ') x)
dequoteInclude :: String -> String
dequoteInclude s =
let endPortion = dropWhile (\x -> x /= '\"' && x /= '<') s
endLen = length endPortion
in if endLen > 0 then takeWhile (\x -> x /= '\"' && x /= '>') $ tail endPortion else []
pass1 :: String -> [String]
pass1 s = filterBlank $ map removeLeadingWS $ lines $ removeComments (removeCR s)
second pass , cleans up includes
pass2 :: [String] -> [String]
pass2 l = filterBlank $ map dequoteInclude $ extractIncludes l
gatherDependencies :: String -> [String]
gatherDependencies = pass2.pass1
possibleFilenames :: FilePath -> [FilePath] -> [FilePath]
possibleFilenames file = map (\p -> F.normalise $ F.combine p file)
pathToDependency :: FilePath -> Dependency
pathToDependency path = Dependency (F.takeFileName path) path
spider :: forall (m :: * -> *).(MonadIO m, MonadState ParseState m) => String -> m ()
spider file = do
state <- get
paths <- filterM (includeFilter $ currentDeps state) $ possibleFilenames file (searchPaths state)
spiderHelper paths
return ()
where
includeFilter deps file = do
exists <- liftIO $ Dir.doesFileExist file
return $ exists && not (S.member (pathToDependency file) deps)
spiderHelper :: forall (m :: * -> *).(MonadIO m, MonadState ParseState m) => [FilePath] -> m ()
spiderHelper [] = return ()
spiderHelper (file:_) = do
contents <- liftIO $ readFile file
let deps = gatherDependencies contents
state <- get
put $ state {currentDeps = S.insert (pathToDependency file) (currentDeps state) }
mapM_ spider deps
return ()
spiderLauncher :: forall (m :: * -> *).(MonadIO m, MonadState ParseState m) => FilePath -> m ()
spiderLauncher file = do
contents <- liftIO $ readFile file
let deps = gatherDependencies contents
mapM_ spider deps
return ()
getDepsForFile :: [FilePath] -> FilePath -> IO [T.Text]
getDepsForFile includeDirs file = do
(_, state) <- runStateT (runDepGatherer $ spiderLauncher file) PS {currentDeps=S.empty, searchPaths=[F.dropFileName file, "."] ++ includeDirs }
return $ L.sort $ map (T.pack.getPathFromDep) (S.toList (currentDeps state))
| Takes in a list of include directories , extra dependencies , a ' SrcTransform ' ,
and returns a new ' SrcTransform ' with the dependencies injected into the source
cDepScanner :: [FilePath] -> SrcTransform -> IO SrcTransform
cDepScanner includeDirs (OneToOne input output) = getDepsForFile includeDirs (T.unpack input) >>= \deps -> return $ ManyToOne (input:deps) output
cDepScanner includeDirs (OneToMany input output) = getDepsForFile includeDirs (T.unpack input) >>= \deps -> return $ ManyToMany (input:deps) output
cDepScanner _ _ = error "Unimplemented. Implement this if it is a valid relationship."
|
cf0951a2e102a3fb7abbcb5f235f27c3945dc0cbc258e846b0e3191566489666 | msp-strath/TypOS | Pretty.hs | {-|
Description: The internals of pretty-printing.
-}
{-# LANGUAGE OverloadedStrings #-}
module Pretty
( module Text.PrettyPrint.Compact
, module Doc.Annotations
, Pretty(..)
, Collapse(..)
, BracesList(..)
, asBlock
, indent
, keyword
, escape
, parenthesise
, pipe
) where
import Data.Void (Void, absurd)
import ANSI hiding (withANSI)
import Bwd (Bwd(..),Cursor(..),(<>>))
import Doc.Annotations (Annotations,withANSI,toANSIs) -- will be re-exported?
import Text.PrettyPrint.Compact hiding (Options) -- will be re-exported from here
-- | Class Pretty lets us declare what things are (nicely) printable.
class Pretty a where
pretty :: a -> Doc Annotations
pretty = prettyPrec 0
prettyPrec :: Int -> a -> Doc Annotations
prettyPrec _ = pretty
-- | Indent by 'n' spaces
indent :: Int -> Doc Annotations -> Doc Annotations
indent n d = string (replicate n ' ') <> d
-- | asBlock n header lines
-- | @ n the indentation for the block's line
-- | @ header the title line of the block
-- | @ lines the block's lines
asBlock :: Int -> Doc Annotations -> [Doc Annotations] -> Doc Annotations
asBlock n header [] = header
asBlock n header lines = header $$ vcat (map (indent n) lines)
-- | maybe 'parenthesize' a document
parenthesise :: Bool -> Doc Annotations -> Doc Annotations
parenthesise True = parens
parenthesise False = id
-- | keywords are underlined
keyword :: Doc Annotations -> Doc Annotations
keyword = withANSI [ SetUnderlining Single ]
-- | 'pipe' symbol
pipe :: Doc Annotations
pipe = "|"
-- | 'escape' goes through a 'String' and escape carriage return and tab
escape :: String -> String
escape = concatMap go where
go :: Char -> String
go '\n' = "\\n"
go '\t' = "\\t"
go c = [c]
-- Instances for some common types
instance Pretty String where
pretty s = text s
instance Pretty () where
pretty _ = text "()"
instance Pretty Void where
pretty = absurd
------------------------------------------------------------------
-- | a 't's worth of |Doc| can be 'Collapse'd if it can be flattened to a 'Doc'
class Collapse t where
collapse :: t (Doc Annotations) -> Doc Annotations
| print snoc lists as " [ < a , b , c ] " , and the empty one as " [ < ] "
instance Collapse Bwd where
collapse ds = encloseSep "[<" "]" ", " (ds <>> [])
-- | print lists as usual
instance Collapse [] where
collapse ds = encloseSep lbracket rbracket ", " ds
| print ' Cursor ' with a Bold Red " : < + > : " in the middle
instance Collapse Cursor where
collapse (lstrs :<+>: rstrs) =
sep [ collapse lstrs
, withANSI [SetColour Foreground Red, SetWeight Bold] ":<+>:"
, collapse rstrs
]
------------------------------------------------------------------
' BracesList ' is a marker for printing something in braces
newtype BracesList t = BracesList { unBracesList :: [t] }
| print ' BracesList ' as lists with braces ...
instance Collapse BracesList where
collapse (BracesList ds) = encloseSep "{" "}" "; " ds
-- | Can 'show' a 'Doc' via 'render'
instance Show (Doc Annotations) where show = render
| null | https://raw.githubusercontent.com/msp-strath/TypOS/f5345779b886eeadc8bd9d84b33cf9bf0bfba244/Src/Pretty.hs | haskell | |
Description: The internals of pretty-printing.
# LANGUAGE OverloadedStrings #
will be re-exported?
will be re-exported from here
| Class Pretty lets us declare what things are (nicely) printable.
| Indent by 'n' spaces
| asBlock n header lines
| @ n the indentation for the block's line
| @ header the title line of the block
| @ lines the block's lines
| maybe 'parenthesize' a document
| keywords are underlined
| 'pipe' symbol
| 'escape' goes through a 'String' and escape carriage return and tab
Instances for some common types
----------------------------------------------------------------
| a 't's worth of |Doc| can be 'Collapse'd if it can be flattened to a 'Doc'
| print lists as usual
----------------------------------------------------------------
| Can 'show' a 'Doc' via 'render' | module Pretty
( module Text.PrettyPrint.Compact
, module Doc.Annotations
, Pretty(..)
, Collapse(..)
, BracesList(..)
, asBlock
, indent
, keyword
, escape
, parenthesise
, pipe
) where
import Data.Void (Void, absurd)
import ANSI hiding (withANSI)
import Bwd (Bwd(..),Cursor(..),(<>>))
class Pretty a where
pretty :: a -> Doc Annotations
pretty = prettyPrec 0
prettyPrec :: Int -> a -> Doc Annotations
prettyPrec _ = pretty
indent :: Int -> Doc Annotations -> Doc Annotations
indent n d = string (replicate n ' ') <> d
asBlock :: Int -> Doc Annotations -> [Doc Annotations] -> Doc Annotations
asBlock n header [] = header
asBlock n header lines = header $$ vcat (map (indent n) lines)
parenthesise :: Bool -> Doc Annotations -> Doc Annotations
parenthesise True = parens
parenthesise False = id
keyword :: Doc Annotations -> Doc Annotations
keyword = withANSI [ SetUnderlining Single ]
pipe :: Doc Annotations
pipe = "|"
escape :: String -> String
escape = concatMap go where
go :: Char -> String
go '\n' = "\\n"
go '\t' = "\\t"
go c = [c]
instance Pretty String where
pretty s = text s
instance Pretty () where
pretty _ = text "()"
instance Pretty Void where
pretty = absurd
class Collapse t where
collapse :: t (Doc Annotations) -> Doc Annotations
| print snoc lists as " [ < a , b , c ] " , and the empty one as " [ < ] "
instance Collapse Bwd where
collapse ds = encloseSep "[<" "]" ", " (ds <>> [])
instance Collapse [] where
collapse ds = encloseSep lbracket rbracket ", " ds
| print ' Cursor ' with a Bold Red " : < + > : " in the middle
instance Collapse Cursor where
collapse (lstrs :<+>: rstrs) =
sep [ collapse lstrs
, withANSI [SetColour Foreground Red, SetWeight Bold] ":<+>:"
, collapse rstrs
]
' BracesList ' is a marker for printing something in braces
newtype BracesList t = BracesList { unBracesList :: [t] }
| print ' BracesList ' as lists with braces ...
instance Collapse BracesList where
collapse (BracesList ds) = encloseSep "{" "}" "; " ds
instance Show (Doc Annotations) where show = render
|
f010b9350f3715277b77687b2ac4389141e64a1b9e56612eab9c80543433676a | BillHallahan/G2 | KnownValues.hs | {-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE DeriveGeneric #
We define a datatype to hol the names of other data types we know should
-- exist, and that we care about for some special reason
( for example : the type )
Try to avoid imports from G2 other than G2.Internal . Language . Syntax here !
module G2.Language.KnownValues where
import G2.Language.Syntax
import Data.Data (Data, Typeable)
import Data.Hashable
import GHC.Generics (Generic)
data KnownValues = KnownValues {
tyInt :: Name
, dcInt :: Name
, tyFloat :: Name
, dcFloat :: Name
, tyDouble :: Name
, dcDouble :: Name
, tyInteger :: Name
, dcInteger :: Name
, tyChar :: Name
, dcChar :: Name
, tyBool :: Name
, dcTrue :: Name
, dcFalse :: Name
, tyRational :: Name
, tyList :: Name
, dcCons :: Name
, dcEmpty :: Name
, tyMaybe :: Name
, dcJust :: Name
, dcNothing :: Name
, tyUnit :: Name
, dcUnit :: Name
Typeclasses
, eqTC :: Name
, numTC :: Name
, ordTC :: Name
, integralTC :: Name
, realTC :: Name
, fractionalTC :: Name
Typeclass superclass extractors
, integralExtactReal :: Name
, realExtractNum :: Name
, realExtractOrd :: Name
, ordExtractEq :: Name
, eqFunc :: Name
, neqFunc :: Name
, plusFunc :: Name
, minusFunc :: Name
, timesFunc :: Name
, divFunc :: Name
, negateFunc :: Name
, modFunc :: Name
, fromIntegerFunc :: Name
, toIntegerFunc :: Name
, toRatioFunc :: Name
, fromRationalFunc :: Name
, geFunc :: Name
, gtFunc :: Name
, ltFunc :: Name
, leFunc :: Name
, impliesFunc :: Name
, iffFunc :: Name
, structEqTC :: Name
, structEqFunc :: Name
, andFunc :: Name
, orFunc :: Name
, notFunc :: Name
, errorFunc :: Name
, errorWithoutStackTraceFunc :: Name
, errorEmptyListFunc :: Name
, patErrorFunc :: Name
} deriving (Show, Eq, Read, Typeable, Data, Generic)
instance Hashable KnownValues
isErrorFunc :: KnownValues -> Name -> Bool
isErrorFunc kv n = n == errorFunc kv
|| n == errorEmptyListFunc kv
|| n == errorWithoutStackTraceFunc kv
|| n == patErrorFunc kv
| null | https://raw.githubusercontent.com/BillHallahan/G2/90c1f07c167d8b20dd4c591b138b8a427be559e3/src/G2/Language/KnownValues.hs | haskell | # LANGUAGE DeriveDataTypeable #
exist, and that we care about for some special reason | # LANGUAGE DeriveGeneric #
We define a datatype to hol the names of other data types we know should
( for example : the type )
Try to avoid imports from G2 other than G2.Internal . Language . Syntax here !
module G2.Language.KnownValues where
import G2.Language.Syntax
import Data.Data (Data, Typeable)
import Data.Hashable
import GHC.Generics (Generic)
data KnownValues = KnownValues {
tyInt :: Name
, dcInt :: Name
, tyFloat :: Name
, dcFloat :: Name
, tyDouble :: Name
, dcDouble :: Name
, tyInteger :: Name
, dcInteger :: Name
, tyChar :: Name
, dcChar :: Name
, tyBool :: Name
, dcTrue :: Name
, dcFalse :: Name
, tyRational :: Name
, tyList :: Name
, dcCons :: Name
, dcEmpty :: Name
, tyMaybe :: Name
, dcJust :: Name
, dcNothing :: Name
, tyUnit :: Name
, dcUnit :: Name
Typeclasses
, eqTC :: Name
, numTC :: Name
, ordTC :: Name
, integralTC :: Name
, realTC :: Name
, fractionalTC :: Name
Typeclass superclass extractors
, integralExtactReal :: Name
, realExtractNum :: Name
, realExtractOrd :: Name
, ordExtractEq :: Name
, eqFunc :: Name
, neqFunc :: Name
, plusFunc :: Name
, minusFunc :: Name
, timesFunc :: Name
, divFunc :: Name
, negateFunc :: Name
, modFunc :: Name
, fromIntegerFunc :: Name
, toIntegerFunc :: Name
, toRatioFunc :: Name
, fromRationalFunc :: Name
, geFunc :: Name
, gtFunc :: Name
, ltFunc :: Name
, leFunc :: Name
, impliesFunc :: Name
, iffFunc :: Name
, structEqTC :: Name
, structEqFunc :: Name
, andFunc :: Name
, orFunc :: Name
, notFunc :: Name
, errorFunc :: Name
, errorWithoutStackTraceFunc :: Name
, errorEmptyListFunc :: Name
, patErrorFunc :: Name
} deriving (Show, Eq, Read, Typeable, Data, Generic)
instance Hashable KnownValues
isErrorFunc :: KnownValues -> Name -> Bool
isErrorFunc kv n = n == errorFunc kv
|| n == errorEmptyListFunc kv
|| n == errorWithoutStackTraceFunc kv
|| n == patErrorFunc kv
|
d131d52b2207d279411483f6144f4035e12acdb25f2638d596fd6c1ba6c2c2b8 | cse-bristol/110-thermos-ui | monitoring.clj | This file is part of THERMOS , copyright © Centre for Sustainable Energy , 2017 - 2021
Licensed under the Reciprocal Public License v1.5 . See LICENSE for licensing details .
(ns thermos-backend.monitoring
"Some http endpoints for monitoring THERMOS using prometheus"
(:require [clojure.string :as string]
[thermos-backend.queue :as queue]
[thermos-backend.db.users :as users]
[thermos-backend.db.projects :as projects]
[thermos-backend.db :as db]))
(defn- clean-name [k]
(.replaceAll (name k) "-" "_"))
(defn- format-labels [m]
(let [m (dissoc m :name :value)]
(when (seq m)
(str "{"
(string/join ", "
(for [[k v] m]
(str (clean-name k) " = " (pr-str v))))
"}")
)))
(defn- format-metrics
"Format some metrics into the prometheus exposition format.
See / for the format.
Each metric is a map having :name and :value at least.
If the metric has other kvs they are put in as labels.
"
[metrics & {:keys [types doc]}]
(str (string/join
"\n"
(flatten
(for [[metric-name metrics] (group-by :name metrics)]
(let [type-info (get types metric-name :gauge)
type-doc (get doc metric-name "Undocumented")
metric-name (str "thermos_" (clean-name metric-name))]
[(str "# HELP " metric-name " " type-doc)
(str "# TYPE " metric-name " " (name type-info))
(for [metric metrics]
(str metric-name (format-labels metric) " " (:value metric))
)]))))
"\n"))
(defn- earlier [^java.sql.Timestamp a ^java.sql.Timestamp b]
(cond
(nil? a) b
(nil? b) a
:else (if (.before a b) a b)))
(defn- system-metrics
"Gather some metrics about the system in a format suitable for format-metrics to use."
[]
(let [tasks (queue/list-tasks)
queue-states (frequencies
(map (juxt :queue-name :state) tasks))
oldest-task (reduce
(fn [a {:keys [queue-name state queued]}]
;; we want to know how old the oldest unfinished task is
(update a [queue-name state] earlier queued))
{}
tasks)
]
(concat
(for [[[queue-name state] max-age] oldest-task]
{:name :oldest-task
:value (int (/ (.getTime max-age) 1000))
:queue-name queue-name
:task-state state})
(for [[[queue state] n] queue-states]
{:name :queue-count
:value n
:queue-name queue
:task-state state})
(for [table [:users :projects :maps :candidates :networks]]
{:name :object-count
:value (db/count-rows table)
:object (name table)}))))
(defn formatted-metrics []
(format-metrics
(system-metrics)
:doc {:queue-count "The number of entries in each queue"
:object-count "The number of rows in database tables"
:oldest-task "The age of the oldest task in each state / queue"
}))
| null | https://raw.githubusercontent.com/cse-bristol/110-thermos-ui/c6ae2781ac10df1d02e0d7589ba0f8148ebbe48b/src/thermos_backend/monitoring.clj | clojure | we want to know how old the oldest unfinished task is | This file is part of THERMOS , copyright © Centre for Sustainable Energy , 2017 - 2021
Licensed under the Reciprocal Public License v1.5 . See LICENSE for licensing details .
(ns thermos-backend.monitoring
"Some http endpoints for monitoring THERMOS using prometheus"
(:require [clojure.string :as string]
[thermos-backend.queue :as queue]
[thermos-backend.db.users :as users]
[thermos-backend.db.projects :as projects]
[thermos-backend.db :as db]))
(defn- clean-name [k]
(.replaceAll (name k) "-" "_"))
(defn- format-labels [m]
(let [m (dissoc m :name :value)]
(when (seq m)
(str "{"
(string/join ", "
(for [[k v] m]
(str (clean-name k) " = " (pr-str v))))
"}")
)))
(defn- format-metrics
"Format some metrics into the prometheus exposition format.
See / for the format.
Each metric is a map having :name and :value at least.
If the metric has other kvs they are put in as labels.
"
[metrics & {:keys [types doc]}]
(str (string/join
"\n"
(flatten
(for [[metric-name metrics] (group-by :name metrics)]
(let [type-info (get types metric-name :gauge)
type-doc (get doc metric-name "Undocumented")
metric-name (str "thermos_" (clean-name metric-name))]
[(str "# HELP " metric-name " " type-doc)
(str "# TYPE " metric-name " " (name type-info))
(for [metric metrics]
(str metric-name (format-labels metric) " " (:value metric))
)]))))
"\n"))
(defn- earlier [^java.sql.Timestamp a ^java.sql.Timestamp b]
(cond
(nil? a) b
(nil? b) a
:else (if (.before a b) a b)))
(defn- system-metrics
"Gather some metrics about the system in a format suitable for format-metrics to use."
[]
(let [tasks (queue/list-tasks)
queue-states (frequencies
(map (juxt :queue-name :state) tasks))
oldest-task (reduce
(fn [a {:keys [queue-name state queued]}]
(update a [queue-name state] earlier queued))
{}
tasks)
]
(concat
(for [[[queue-name state] max-age] oldest-task]
{:name :oldest-task
:value (int (/ (.getTime max-age) 1000))
:queue-name queue-name
:task-state state})
(for [[[queue state] n] queue-states]
{:name :queue-count
:value n
:queue-name queue
:task-state state})
(for [table [:users :projects :maps :candidates :networks]]
{:name :object-count
:value (db/count-rows table)
:object (name table)}))))
(defn formatted-metrics []
(format-metrics
(system-metrics)
:doc {:queue-count "The number of entries in each queue"
:object-count "The number of rows in database tables"
:oldest-task "The age of the oldest task in each state / queue"
}))
|
d90cc3f3f615842b960eb42ae1dee7309a3ab85b94361abfe1f9e77d5e0417ac | haskell/hackage-server | State.hs | # LANGUAGE TemplateHaskell , StandaloneDeriving , GeneralizedNewtypeDeriving ,
DeriveDataTypeable , TypeFamilies , FlexibleInstances ,
MultiParamTypeClasses , BangPatterns #
DeriveDataTypeable, TypeFamilies, FlexibleInstances,
MultiParamTypeClasses, BangPatterns #-}
module Distribution.Server.Features.DownloadCount.State where
import Data.Time.Calendar (Day(..))
import Data.Typeable (Typeable)
import Data.Foldable (forM_)
import Control.Arrow (first)
import Control.Monad (liftM)
import Data.List (foldl', groupBy)
import Data.Function (on)
import Control.Monad.Reader (ask, asks)
import Control.Monad.State (get, put)
import qualified Data.Map.Lazy as Map
import System.FilePath ((</>))
import System.Directory (
getDirectoryContents
, createDirectoryIfMissing
)
import qualified Data.ByteString.Lazy as BSL
import System.IO (withFile, IOMode (..))
import System.IO.Unsafe (unsafeInterleaveIO)
import Text.CSV (printCSV)
import Control.Exception (evaluate)
import Data.Acid (Update, Query, makeAcidic)
import Data.SafeCopy (base, deriveSafeCopy, safeGet, safePut)
import Data.Serialize.Get (runGetLazy)
import Data.Serialize.Put (runPutLazy)
import Distribution.Version (Version)
import Distribution.Package (
PackageId
, PackageName
, packageName
, packageVersion
)
import Distribution.Text (simpleParse, display)
import Distribution.Simple.Utils (writeFileAtomic)
import Distribution.Server.Framework.Instances ()
import Distribution.Server.Framework.MemSize
import Distribution.Server.Util.CountingMap
{------------------------------------------------------------------------------
Data types
------------------------------------------------------------------------------}
data InMemStats = InMemStats {
inMemToday :: !Day
, inMemCounts :: !(SimpleCountingMap PackageId)
}
deriving (Show, Eq, Typeable)
newtype OnDiskStats = OnDiskStats {
onDiskStats :: NestedCountingMap PackageName OnDiskPerPkg
}
deriving (Show, Eq, MemSize)
instance CountingMap (PackageName, (Day, Version)) OnDiskStats where
cmEmpty = OnDiskStats cmEmpty
cmTotal (OnDiskStats ncm) = cmTotal ncm
cmInsert kl n (OnDiskStats ncm) = OnDiskStats $ cmInsert kl n ncm
cmFind k (OnDiskStats ncm) = cmFind k ncm
cmUnion (OnDiskStats a)
(OnDiskStats b) = OnDiskStats (cmUnion a b)
cmToList (OnDiskStats ncm) = cmToList ncm
cmToCSV (OnDiskStats ncm) = cmToCSV ncm
cmInsertRecord r (OnDiskStats ncm) = first OnDiskStats `liftM` cmInsertRecord r ncm
newtype OnDiskPerPkg = OnDiskPerPkg {
onDiskPerPkgCounts :: NestedCountingMap Day (SimpleCountingMap Version)
}
deriving (Show, Eq, Ord, MemSize)
instance CountingMap (Day, Version) OnDiskPerPkg where
cmEmpty = OnDiskPerPkg cmEmpty
cmTotal (OnDiskPerPkg ncm) = cmTotal ncm
cmInsert kl n (OnDiskPerPkg ncm) = OnDiskPerPkg $ cmInsert kl n ncm
cmFind k (OnDiskPerPkg ncm) = cmFind k ncm
cmUnion (OnDiskPerPkg a) (OnDiskPerPkg b) = OnDiskPerPkg (cmUnion a b)
cmToList (OnDiskPerPkg ncm) = cmToList ncm
cmToCSV (OnDiskPerPkg ncm) = cmToCSV ncm
cmInsertRecord r (OnDiskPerPkg ncm) = first OnDiskPerPkg `liftM` cmInsertRecord r ncm
newtype RecentDownloads = RecentDownloads {
recentDownloads :: SimpleCountingMap PackageName
}
deriving (Show, Eq, MemSize)
instance CountingMap PackageName RecentDownloads where
cmEmpty = RecentDownloads cmEmpty
cmTotal (RecentDownloads ncm) = cmTotal ncm
cmInsert kl n (RecentDownloads ncm) = RecentDownloads $ cmInsert kl n ncm
cmFind k (RecentDownloads ncm) = cmFind k ncm
cmUnion (RecentDownloads a) (RecentDownloads b) = RecentDownloads (cmUnion a b)
cmToList (RecentDownloads ncm) = cmToList ncm
cmToCSV (RecentDownloads ncm) = cmToCSV ncm
cmInsertRecord r (RecentDownloads ncm) = first RecentDownloads `liftM` cmInsertRecord r ncm
newtype TotalDownloads = TotalDownloads {
totalDownloads :: SimpleCountingMap PackageName
}
deriving (Show, Eq, MemSize)
instance CountingMap PackageName TotalDownloads where
cmEmpty = TotalDownloads cmEmpty
cmTotal (TotalDownloads ncm) = cmTotal ncm
cmInsert kl n (TotalDownloads ncm) = TotalDownloads $ cmInsert kl n ncm
cmFind k (TotalDownloads ncm) = cmFind k ncm
cmUnion (TotalDownloads a) (TotalDownloads b) = TotalDownloads (cmUnion a b)
cmToList (TotalDownloads ncm) = cmToList ncm
cmToCSV (TotalDownloads ncm) = cmToCSV ncm
cmInsertRecord r (TotalDownloads ncm) = first TotalDownloads `liftM` cmInsertRecord r ncm
{------------------------------------------------------------------------------
Initial instances
------------------------------------------------------------------------------}
initInMemStats :: Day -> InMemStats
initInMemStats day = InMemStats {
inMemToday = day
, inMemCounts = cmEmpty
}
type DayRange = (Day, Day)
initRecentAndTotalDownloads :: DayRange -> OnDiskStats
-> (RecentDownloads, TotalDownloads)
initRecentAndTotalDownloads dayRange (OnDiskStats (NCM _ m)) =
foldl' (\(recent, total) (pname, pstats) ->
let !recent' = accumRecentDownloads dayRange pname pstats recent
!total' = accumTotalDownloads pname pstats total
in (recent', total'))
(emptyRecentDownloads, emptyTotalDownloads)
(Map.toList m)
emptyRecentDownloads :: RecentDownloads
emptyRecentDownloads = RecentDownloads cmEmpty
accumRecentDownloads :: DayRange
-> PackageName -> OnDiskPerPkg
-> RecentDownloads -> RecentDownloads
accumRecentDownloads dayRange pkgName (OnDiskPerPkg (NCM _ perDay))
| let rangeTotal = sum (map cmTotal (lookupRange dayRange perDay))
, rangeTotal > 0
= cmInsert pkgName rangeTotal
| otherwise = id
lookupRange :: Ord k => (k,k) -> Map.Map k a -> [a]
lookupRange (l,u) m =
let (_,ml,above) = Map.splitLookup l m
(middle,mu,_) = Map.splitLookup u above
in maybe [] (\x->[x]) ml
++ Map.elems middle
++ maybe [] (\x->[x]) mu
emptyTotalDownloads :: TotalDownloads
emptyTotalDownloads = TotalDownloads cmEmpty
accumTotalDownloads :: PackageName -> OnDiskPerPkg
-> TotalDownloads -> TotalDownloads
accumTotalDownloads pkgName (OnDiskPerPkg perPkg) =
cmInsert pkgName (cmTotal perPkg)
{------------------------------------------------------------------------------
Pure updates/queries
------------------------------------------------------------------------------}
updateHistory :: InMemStats -> OnDiskStats -> OnDiskStats
updateHistory (InMemStats day perPkg) (OnDiskStats (NCM _ m)) =
OnDiskStats (NCM 0 (Map.unionWith cmUnion m updatesMap))
where
updatesMap :: Map.Map PackageName OnDiskPerPkg
updatesMap = Map.fromList
[ (pkgname, applyUpdates pkgs)
| pkgs <- groupBy ((==) `on` (packageName . fst))
(cmToList perPkg :: [(PackageId, Int)])
, let pkgname = packageName (fst (head pkgs))
]
applyUpdates :: [(PackageId, Int)] -> OnDiskPerPkg
applyUpdates pkgs = foldr (.) id
[ cmInsert (day, packageVersion pkgId) count
| (pkgId, count) <- pkgs ]
cmEmpty
-----------------------------------------------------------------------------
MemSize
-----------------------------------------------------------------------------
MemSize
------------------------------------------------------------------------------}
instance MemSize InMemStats where
memSize (InMemStats a b) = memSize2 a b
{------------------------------------------------------------------------------
Serializing on-disk stats
------------------------------------------------------------------------------}
deriveSafeCopy 0 'base ''InMemStats
deriveSafeCopy 0 'base ''OnDiskPerPkg
readOnDiskStats :: FilePath -> IO OnDiskStats
readOnDiskStats stateDir = do
createDirectoryIfMissing True stateDir
pkgStrs <- getDirectoryContents stateDir
OnDiskStats . NCM 0 . Map.fromList <$> sequence
[ do onDiskPerPkg <- unsafeInterleaveIO $
either (const cmEmpty) id
<$> readOnDiskPerPkg pkgFile
return (pkgName, onDiskPerPkg)
| Just pkgName <- map simpleParse pkgStrs
, let pkgFile = stateDir </> display pkgName ]
readOnDiskPerPkg :: FilePath -> IO (Either String OnDiskPerPkg)
readOnDiskPerPkg pkgFile =
withFile pkgFile ReadMode $ \h ->
-- By evaluating the Either result from the parser we force
-- all contents to be read
evaluate =<< (runGetLazy safeGet <$> BSL.hGetContents h)
writeOnDiskStats :: FilePath -> OnDiskStats -> IO ()
writeOnDiskStats stateDir (OnDiskStats (NCM _ onDisk)) = do
createDirectoryIfMissing True stateDir
forM_ (Map.toList onDisk) $ \(pkgName, perPkg) -> do
let pkgFile = stateDir </> display pkgName
writeFileAtomic pkgFile $ runPutLazy (safePut perPkg)
{------------------------------------------------------------------------------
The append-only all-time log
------------------------------------------------------------------------------}
appendToLog :: FilePath -> InMemStats -> IO ()
appendToLog stateDir (InMemStats _ inMemStats) =
appendFile (stateDir </> "log") $ printCSV (cmToCSV inMemStats)
reconstructLog :: FilePath -> OnDiskStats -> IO ()
reconstructLog stateDir onDisk =
writeFile (stateDir </> "log") $ printCSV (cmToCSV onDisk)
{------------------------------------------------------------------------------
ACID stuff
------------------------------------------------------------------------------}
getInMemStats :: Query InMemStats InMemStats
getInMemStats = ask
replaceInMemStats :: InMemStats -> Update InMemStats ()
replaceInMemStats = put
recordedToday :: Query InMemStats Day
recordedToday = asks inMemToday
registerDownload :: PackageId -> Update InMemStats ()
registerDownload pkgId = do
InMemStats day counts <- get
put $ InMemStats day (cmInsert pkgId 1 counts)
makeAcidic ''InMemStats [ 'getInMemStats
, 'replaceInMemStats
, 'recordedToday
, 'registerDownload
]
| null | https://raw.githubusercontent.com/haskell/hackage-server/8ec64a1a30b6d53e4e86235c50e398ec803f50f1/src/Distribution/Server/Features/DownloadCount/State.hs | haskell | -----------------------------------------------------------------------------
Data types
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Initial instances
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Pure updates/queries
-----------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
----------------------------------------------------------------------------}
-----------------------------------------------------------------------------
Serializing on-disk stats
-----------------------------------------------------------------------------
By evaluating the Either result from the parser we force
all contents to be read
-----------------------------------------------------------------------------
The append-only all-time log
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
ACID stuff
----------------------------------------------------------------------------- | # LANGUAGE TemplateHaskell , StandaloneDeriving , GeneralizedNewtypeDeriving ,
DeriveDataTypeable , TypeFamilies , FlexibleInstances ,
MultiParamTypeClasses , BangPatterns #
DeriveDataTypeable, TypeFamilies, FlexibleInstances,
MultiParamTypeClasses, BangPatterns #-}
module Distribution.Server.Features.DownloadCount.State where
import Data.Time.Calendar (Day(..))
import Data.Typeable (Typeable)
import Data.Foldable (forM_)
import Control.Arrow (first)
import Control.Monad (liftM)
import Data.List (foldl', groupBy)
import Data.Function (on)
import Control.Monad.Reader (ask, asks)
import Control.Monad.State (get, put)
import qualified Data.Map.Lazy as Map
import System.FilePath ((</>))
import System.Directory (
getDirectoryContents
, createDirectoryIfMissing
)
import qualified Data.ByteString.Lazy as BSL
import System.IO (withFile, IOMode (..))
import System.IO.Unsafe (unsafeInterleaveIO)
import Text.CSV (printCSV)
import Control.Exception (evaluate)
import Data.Acid (Update, Query, makeAcidic)
import Data.SafeCopy (base, deriveSafeCopy, safeGet, safePut)
import Data.Serialize.Get (runGetLazy)
import Data.Serialize.Put (runPutLazy)
import Distribution.Version (Version)
import Distribution.Package (
PackageId
, PackageName
, packageName
, packageVersion
)
import Distribution.Text (simpleParse, display)
import Distribution.Simple.Utils (writeFileAtomic)
import Distribution.Server.Framework.Instances ()
import Distribution.Server.Framework.MemSize
import Distribution.Server.Util.CountingMap
data InMemStats = InMemStats {
inMemToday :: !Day
, inMemCounts :: !(SimpleCountingMap PackageId)
}
deriving (Show, Eq, Typeable)
newtype OnDiskStats = OnDiskStats {
onDiskStats :: NestedCountingMap PackageName OnDiskPerPkg
}
deriving (Show, Eq, MemSize)
instance CountingMap (PackageName, (Day, Version)) OnDiskStats where
cmEmpty = OnDiskStats cmEmpty
cmTotal (OnDiskStats ncm) = cmTotal ncm
cmInsert kl n (OnDiskStats ncm) = OnDiskStats $ cmInsert kl n ncm
cmFind k (OnDiskStats ncm) = cmFind k ncm
cmUnion (OnDiskStats a)
(OnDiskStats b) = OnDiskStats (cmUnion a b)
cmToList (OnDiskStats ncm) = cmToList ncm
cmToCSV (OnDiskStats ncm) = cmToCSV ncm
cmInsertRecord r (OnDiskStats ncm) = first OnDiskStats `liftM` cmInsertRecord r ncm
newtype OnDiskPerPkg = OnDiskPerPkg {
onDiskPerPkgCounts :: NestedCountingMap Day (SimpleCountingMap Version)
}
deriving (Show, Eq, Ord, MemSize)
instance CountingMap (Day, Version) OnDiskPerPkg where
cmEmpty = OnDiskPerPkg cmEmpty
cmTotal (OnDiskPerPkg ncm) = cmTotal ncm
cmInsert kl n (OnDiskPerPkg ncm) = OnDiskPerPkg $ cmInsert kl n ncm
cmFind k (OnDiskPerPkg ncm) = cmFind k ncm
cmUnion (OnDiskPerPkg a) (OnDiskPerPkg b) = OnDiskPerPkg (cmUnion a b)
cmToList (OnDiskPerPkg ncm) = cmToList ncm
cmToCSV (OnDiskPerPkg ncm) = cmToCSV ncm
cmInsertRecord r (OnDiskPerPkg ncm) = first OnDiskPerPkg `liftM` cmInsertRecord r ncm
newtype RecentDownloads = RecentDownloads {
recentDownloads :: SimpleCountingMap PackageName
}
deriving (Show, Eq, MemSize)
instance CountingMap PackageName RecentDownloads where
cmEmpty = RecentDownloads cmEmpty
cmTotal (RecentDownloads ncm) = cmTotal ncm
cmInsert kl n (RecentDownloads ncm) = RecentDownloads $ cmInsert kl n ncm
cmFind k (RecentDownloads ncm) = cmFind k ncm
cmUnion (RecentDownloads a) (RecentDownloads b) = RecentDownloads (cmUnion a b)
cmToList (RecentDownloads ncm) = cmToList ncm
cmToCSV (RecentDownloads ncm) = cmToCSV ncm
cmInsertRecord r (RecentDownloads ncm) = first RecentDownloads `liftM` cmInsertRecord r ncm
newtype TotalDownloads = TotalDownloads {
totalDownloads :: SimpleCountingMap PackageName
}
deriving (Show, Eq, MemSize)
instance CountingMap PackageName TotalDownloads where
cmEmpty = TotalDownloads cmEmpty
cmTotal (TotalDownloads ncm) = cmTotal ncm
cmInsert kl n (TotalDownloads ncm) = TotalDownloads $ cmInsert kl n ncm
cmFind k (TotalDownloads ncm) = cmFind k ncm
cmUnion (TotalDownloads a) (TotalDownloads b) = TotalDownloads (cmUnion a b)
cmToList (TotalDownloads ncm) = cmToList ncm
cmToCSV (TotalDownloads ncm) = cmToCSV ncm
cmInsertRecord r (TotalDownloads ncm) = first TotalDownloads `liftM` cmInsertRecord r ncm
initInMemStats :: Day -> InMemStats
initInMemStats day = InMemStats {
inMemToday = day
, inMemCounts = cmEmpty
}
type DayRange = (Day, Day)
initRecentAndTotalDownloads :: DayRange -> OnDiskStats
-> (RecentDownloads, TotalDownloads)
initRecentAndTotalDownloads dayRange (OnDiskStats (NCM _ m)) =
foldl' (\(recent, total) (pname, pstats) ->
let !recent' = accumRecentDownloads dayRange pname pstats recent
!total' = accumTotalDownloads pname pstats total
in (recent', total'))
(emptyRecentDownloads, emptyTotalDownloads)
(Map.toList m)
emptyRecentDownloads :: RecentDownloads
emptyRecentDownloads = RecentDownloads cmEmpty
accumRecentDownloads :: DayRange
-> PackageName -> OnDiskPerPkg
-> RecentDownloads -> RecentDownloads
accumRecentDownloads dayRange pkgName (OnDiskPerPkg (NCM _ perDay))
| let rangeTotal = sum (map cmTotal (lookupRange dayRange perDay))
, rangeTotal > 0
= cmInsert pkgName rangeTotal
| otherwise = id
lookupRange :: Ord k => (k,k) -> Map.Map k a -> [a]
lookupRange (l,u) m =
let (_,ml,above) = Map.splitLookup l m
(middle,mu,_) = Map.splitLookup u above
in maybe [] (\x->[x]) ml
++ Map.elems middle
++ maybe [] (\x->[x]) mu
emptyTotalDownloads :: TotalDownloads
emptyTotalDownloads = TotalDownloads cmEmpty
accumTotalDownloads :: PackageName -> OnDiskPerPkg
-> TotalDownloads -> TotalDownloads
accumTotalDownloads pkgName (OnDiskPerPkg perPkg) =
cmInsert pkgName (cmTotal perPkg)
updateHistory :: InMemStats -> OnDiskStats -> OnDiskStats
updateHistory (InMemStats day perPkg) (OnDiskStats (NCM _ m)) =
OnDiskStats (NCM 0 (Map.unionWith cmUnion m updatesMap))
where
updatesMap :: Map.Map PackageName OnDiskPerPkg
updatesMap = Map.fromList
[ (pkgname, applyUpdates pkgs)
| pkgs <- groupBy ((==) `on` (packageName . fst))
(cmToList perPkg :: [(PackageId, Int)])
, let pkgname = packageName (fst (head pkgs))
]
applyUpdates :: [(PackageId, Int)] -> OnDiskPerPkg
applyUpdates pkgs = foldr (.) id
[ cmInsert (day, packageVersion pkgId) count
| (pkgId, count) <- pkgs ]
cmEmpty
MemSize
MemSize
instance MemSize InMemStats where
memSize (InMemStats a b) = memSize2 a b
deriveSafeCopy 0 'base ''InMemStats
deriveSafeCopy 0 'base ''OnDiskPerPkg
readOnDiskStats :: FilePath -> IO OnDiskStats
readOnDiskStats stateDir = do
createDirectoryIfMissing True stateDir
pkgStrs <- getDirectoryContents stateDir
OnDiskStats . NCM 0 . Map.fromList <$> sequence
[ do onDiskPerPkg <- unsafeInterleaveIO $
either (const cmEmpty) id
<$> readOnDiskPerPkg pkgFile
return (pkgName, onDiskPerPkg)
| Just pkgName <- map simpleParse pkgStrs
, let pkgFile = stateDir </> display pkgName ]
readOnDiskPerPkg :: FilePath -> IO (Either String OnDiskPerPkg)
readOnDiskPerPkg pkgFile =
withFile pkgFile ReadMode $ \h ->
evaluate =<< (runGetLazy safeGet <$> BSL.hGetContents h)
writeOnDiskStats :: FilePath -> OnDiskStats -> IO ()
writeOnDiskStats stateDir (OnDiskStats (NCM _ onDisk)) = do
createDirectoryIfMissing True stateDir
forM_ (Map.toList onDisk) $ \(pkgName, perPkg) -> do
let pkgFile = stateDir </> display pkgName
writeFileAtomic pkgFile $ runPutLazy (safePut perPkg)
appendToLog :: FilePath -> InMemStats -> IO ()
appendToLog stateDir (InMemStats _ inMemStats) =
appendFile (stateDir </> "log") $ printCSV (cmToCSV inMemStats)
reconstructLog :: FilePath -> OnDiskStats -> IO ()
reconstructLog stateDir onDisk =
writeFile (stateDir </> "log") $ printCSV (cmToCSV onDisk)
getInMemStats :: Query InMemStats InMemStats
getInMemStats = ask
replaceInMemStats :: InMemStats -> Update InMemStats ()
replaceInMemStats = put
recordedToday :: Query InMemStats Day
recordedToday = asks inMemToday
registerDownload :: PackageId -> Update InMemStats ()
registerDownload pkgId = do
InMemStats day counts <- get
put $ InMemStats day (cmInsert pkgId 1 counts)
makeAcidic ''InMemStats [ 'getInMemStats
, 'replaceInMemStats
, 'recordedToday
, 'registerDownload
]
|
34433f31df1fb042704b26e4af50fcd34a7483bdf3b9db563c97eb76323a260d | rescript-lang/rescript-compiler | belt_MapDict.mli | Copyright ( C ) 2017 Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
(** This module separates identity from data, it is a bit more verbose but
slightly more efficient due to the fact that there is no need to pack
identity and data back after each operation.
**_Advanced usage only_**
*)
(* ```res prelude
type t<'key, 'value, 'id>
type cmp<'key, 'id> = Belt_Id.cmp<'key, 'id>
```
*)
type ('key, 'value, 'id) t
type ('key, 'id) cmp = ('key, 'id) Belt_Id.cmp
val empty: ('k, 'v, 'id) t
val isEmpty: ('k, 'v, 'id) t -> bool
val has:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
bool
val cmpU:
('k, 'v, 'id) t ->
('k, 'v, 'id) t ->
kcmp:('k, 'id) cmp ->
vcmp:('v -> 'v -> int [@bs]) ->
int
val cmp:
('k, 'v, 'id) t ->
('k, 'v, 'id) t ->
kcmp:('k, 'id) cmp ->
vcmp:('v -> 'v -> int) ->
int
val eqU:
('k, 'a, 'id) t ->
('k, 'a, 'id) t ->
kcmp:('k, 'id) cmp ->
veq:('a -> 'a -> bool [@bs]) ->
bool
val eq:
('k, 'a, 'id) t ->
('k, 'a, 'id) t ->
kcmp:('k, 'id) cmp ->
veq:('a -> 'a -> bool) ->
bool
* ` eq(m1 , , cmp ) ` tests whether the maps ` m1 ` and ` m2 ` are equal , that is ,
contain equal keys and associate them with equal data . ` cmp ` is the
equality predicate used to compare the data associated with the keys .
contain equal keys and associate them with equal data. `cmp` is the
equality predicate used to compare the data associated with the keys. *)
val findFirstByU : ('k, 'v, 'id) t -> ('k -> 'v -> bool [@bs]) -> ('k * 'v) option
val findFirstBy : ('k, 'v, 'id) t -> ('k -> 'v -> bool ) -> ('k * 'v) option
* ` findFirstBy(m , p ) ` uses function ` f ` to find the first key value pair to
match predicate ` p ` .
` ` ` res example
module IntCmp = Belt . Id. MakeComparable ( {
type t = int
let = Pervasives.compare
} )
let s0 = Belt . Map . , " 4 " ) , ( 1 , " 1 " ) , ( 2 , " 2 " ) , ( 3 , " 3 " ) ] , ~cmp = IntCmp.cmp )
Belt . Map . Dict.findFirstBy(s0 , ( k , _ ) = > k = = 4 ) = = Some((4 , " 4 " ) )
` ` `
match predicate `p`.
```res example
module IntCmp = Belt.Id.MakeComparable({
type t = int
let cmp = Pervasives.compare
})
let s0 = Belt.Map.Dict.fromArray([(4, "4"), (1, "1"), (2, "2"), (3, "3")], ~cmp=IntCmp.cmp)
Belt.Map.Dict.findFirstBy(s0, (k, _) => k == 4) == Some((4, "4"))
```
*)
val forEachU: ('k, 'a, 'id) t -> ('k -> 'a -> unit [@bs]) -> unit
val forEach: ('k, 'a, 'id) t -> ('k -> 'a -> unit) -> unit
* ` forEach(m , f ) ` applies ` f ` to all bindings in map ` m ` . ` f ` receives the
key as first argument , and the associated value as second argument . The
bindings are passed to ` f ` in increasing order with respect to the ordering
over the type of the keys .
key as first argument, and the associated value as second argument. The
bindings are passed to `f` in increasing order with respect to the ordering
over the type of the keys. *)
val reduceU: ('k, 'a, 'id) t -> 'b -> ('b -> 'k -> 'a -> 'b [@bs]) -> 'b
val reduce: ('k, 'a, 'id) t -> 'b -> ('b -> 'k -> 'a -> 'b) -> 'b
* ` reduce(m , a , f ) ` computes ` f(kN , dN ... f(k1 , d1 , a ) ... ) ` , where ` k1 ...
kN ` are the keys of all bindings in ` m ` ( in increasing order ) , and ` d1 ...
dN ` are the associated data .
kN` are the keys of all bindings in `m` (in increasing order), and `d1 ...
dN` are the associated data. *)
val everyU: ('k, 'a, 'id) t -> ('k -> 'a -> bool [@bs]) -> bool
val every: ('k, 'a, 'id) t -> ('k -> 'a -> bool) -> bool
(** `every(m, p)` checks if all the bindings of the map satisfy the predicate
`p`. Order unspecified *)
val someU: ('k, 'a, 'id) t -> ('k -> 'a -> bool [@bs]) -> bool
val some: ('k, 'a, 'id) t -> ('k -> 'a -> bool) -> bool
* ` some(m , p ) ` checks if at least one binding of the map satisfy the
predicate ` p ` . Order unspecified
predicate `p`. Order unspecified *)
val size: ('k, 'a, 'id) t -> int
val toList: ('k, 'a, 'id) t -> ('k * 'a) list
(** In increasing order. *)
val toArray: ('k, 'a, 'id) t -> ('k * 'a) array
val fromArray: ('k * 'a) array -> cmp:('k,'id) cmp -> ('k,'a,'id) t
val keysToArray: ('k, 'a, 'id) t -> 'k array
val valuesToArray: ('k, 'a, 'id) t -> 'a array
val minKey: ('k, _, _) t -> 'k option
val minKeyUndefined: ('k, _, _) t -> 'k Js.undefined
val maxKey: ('k, _, _) t -> 'k option
val maxKeyUndefined: ('k, _, _) t -> 'k Js.undefined
val minimum: ('k, 'a, _) t -> ('k * 'a) option
val minUndefined: ('k, 'a, _) t -> ('k * 'a) Js.undefined
val maximum: ('k, 'a, _) t -> ('k * 'a) option
val maxUndefined:('k, 'a, _) t -> ('k * 'a) Js.undefined
val get:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
'a option
val getUndefined:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
'a Js.undefined
val getWithDefault:
('k, 'a, 'id) t -> 'k -> 'a ->
cmp:('k, 'id) cmp ->
'a
val getExn:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
'a
val checkInvariantInternal: _ t -> unit
val remove:
('a, 'b, 'id) t -> 'a ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
(** `remove(m, x)` returns a map containing the same bindings as `m`, except
for `x` which is unbound in the returned map. *)
val removeMany:
('a, 'b, 'id) t ->
'a array ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val set:
('a, 'b, 'id) t -> 'a -> 'b ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
* ` set(m , x , y ) ` returns a map containing the same bindings as ` m ` , plus a
binding of ` x ` to ` y ` . If ` x ` was already bound in ` m ` , its previous
binding disappears .
binding of `x` to `y`. If `x` was already bound in `m`, its previous
binding disappears. *)
val updateU:
('a, 'b, 'id) t ->
'a ->
('b option -> 'b option [@bs]) ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val update:
('a, 'b, 'id) t ->
'a ->
('b option -> 'b option) ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val mergeU:
('a, 'b, 'id) t ->
('a, 'c, 'id) t ->
('a -> 'b option -> 'c option -> 'd option [@bs]) ->
cmp:('a, 'id) cmp -> ('a, 'd, 'id) t
val merge:
('a, 'b, 'id) t ->
('a, 'c, 'id) t ->
('a -> 'b option -> 'c option -> 'd option) ->
cmp:('a, 'id) cmp -> ('a, 'd, 'id) t
* ` merge(m1 , , f ) ` computes a map whose keys is a subset of keys of ` m1 `
and of ` m2 ` . The presence of each such binding , and the corresponding
value , is determined with the function ` f ` .
and of `m2`. The presence of each such binding, and the corresponding
value, is determined with the function `f`. *)
val mergeMany:
('a, 'b, 'id) t ->
('a * 'b) array ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val keepU:
('k, 'a, 'id) t ->
('k -> 'a -> bool [@bs]) ->
('k, 'a, 'id) t
val keep:
('k, 'a, 'id) t ->
('k -> 'a -> bool) ->
('k, 'a, 'id) t
(** `keep(m, p)` returns the map with all the bindings in `m` that satisfy
predicate `p`. *)
val partitionU:
('k, 'a, 'id) t ->
('k -> 'a -> bool [@bs]) ->
('k, 'a, 'id) t * ('k, 'a, 'id) t
val partition:
('k, 'a, 'id) t ->
('k -> 'a -> bool) ->
('k, 'a, 'id) t * ('k, 'a, 'id) t
* ` partition(m , p ) ` returns a pair of maps ` ( m1 , m2 ) ` , where ` m1 ` contains
all the bindings of ` s ` that satisfy the predicate ` p ` , and ` m2 ` is the map
with all the bindings of ` s ` that do not satisfy ` p ` .
all the bindings of `s` that satisfy the predicate `p`, and `m2` is the map
with all the bindings of `s` that do not satisfy `p`. *)
val split:
('a, 'b, 'id) t ->
'a ->
cmp:('a, 'id) cmp ->
(('a,'b,'id) t * ('a, 'b, 'id) t) * 'b option
(** `split(x, m)` returns a triple `(l, data, r)`, where `l` is the map with
all the bindings of `m` whose key is strictly less than `x`; `r` is the map
with all the bindings of `m` whose key is strictly greater than `x`; `data`
is `None` if `m` contains no binding for `x`, or `Some(v)` if `m` binds `v`
to `x`. *)
val mapU: ('k, 'a, 'id) t -> ('a -> 'b [@bs]) -> ('k ,'b,'id) t
val map: ('k, 'a, 'id) t -> ('a -> 'b) -> ('k ,'b,'id) t
(** `map(m, f)` returns a map with same domain as `m`, where the associated
value `a` of all bindings of `m` has been replaced by the result of the
application of `f` to `a`. The bindings are passed to `f` in increasing
order with respect to the ordering over the type of the keys. *)
val mapWithKeyU: ('k, 'a, 'id) t -> ('k -> 'a -> 'b [@bs]) -> ('k, 'b, 'id) t
val mapWithKey: ('k, 'a, 'id) t -> ('k -> 'a -> 'b) -> ('k, 'b, 'id) t
| null | https://raw.githubusercontent.com/rescript-lang/rescript-compiler/c3fcc430360079546a6aabd2b2770303480f8486/jscomp/others/belt_MapDict.mli | ocaml | * This module separates identity from data, it is a bit more verbose but
slightly more efficient due to the fact that there is no need to pack
identity and data back after each operation.
**_Advanced usage only_**
```res prelude
type t<'key, 'value, 'id>
type cmp<'key, 'id> = Belt_Id.cmp<'key, 'id>
```
* `every(m, p)` checks if all the bindings of the map satisfy the predicate
`p`. Order unspecified
* In increasing order.
* `remove(m, x)` returns a map containing the same bindings as `m`, except
for `x` which is unbound in the returned map.
* `keep(m, p)` returns the map with all the bindings in `m` that satisfy
predicate `p`.
* `split(x, m)` returns a triple `(l, data, r)`, where `l` is the map with
all the bindings of `m` whose key is strictly less than `x`; `r` is the map
with all the bindings of `m` whose key is strictly greater than `x`; `data`
is `None` if `m` contains no binding for `x`, or `Some(v)` if `m` binds `v`
to `x`.
* `map(m, f)` returns a map with same domain as `m`, where the associated
value `a` of all bindings of `m` has been replaced by the result of the
application of `f` to `a`. The bindings are passed to `f` in increasing
order with respect to the ordering over the type of the keys. | Copyright ( C ) 2017 Authors of ReScript
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* In addition to the permissions granted to you by the LGPL , you may combine
* or link a " work that uses the Library " with a publicly distributed version
* of this file to produce a combined library or application , then distribute
* that combined work under the terms of your choosing , with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 ( or the corresponding section of a later version of the LGPL
* should you choose to use a later version ) .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In addition to the permissions granted to you by the LGPL, you may combine
* or link a "work that uses the Library" with a publicly distributed version
* of this file to produce a combined library or application, then distribute
* that combined work under the terms of your choosing, with no requirement
* to comply with the obligations normally placed on you by section 4 of the
* LGPL version 3 (or the corresponding section of a later version of the LGPL
* should you choose to use a later version).
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *)
type ('key, 'value, 'id) t
type ('key, 'id) cmp = ('key, 'id) Belt_Id.cmp
val empty: ('k, 'v, 'id) t
val isEmpty: ('k, 'v, 'id) t -> bool
val has:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
bool
val cmpU:
('k, 'v, 'id) t ->
('k, 'v, 'id) t ->
kcmp:('k, 'id) cmp ->
vcmp:('v -> 'v -> int [@bs]) ->
int
val cmp:
('k, 'v, 'id) t ->
('k, 'v, 'id) t ->
kcmp:('k, 'id) cmp ->
vcmp:('v -> 'v -> int) ->
int
val eqU:
('k, 'a, 'id) t ->
('k, 'a, 'id) t ->
kcmp:('k, 'id) cmp ->
veq:('a -> 'a -> bool [@bs]) ->
bool
val eq:
('k, 'a, 'id) t ->
('k, 'a, 'id) t ->
kcmp:('k, 'id) cmp ->
veq:('a -> 'a -> bool) ->
bool
* ` eq(m1 , , cmp ) ` tests whether the maps ` m1 ` and ` m2 ` are equal , that is ,
contain equal keys and associate them with equal data . ` cmp ` is the
equality predicate used to compare the data associated with the keys .
contain equal keys and associate them with equal data. `cmp` is the
equality predicate used to compare the data associated with the keys. *)
val findFirstByU : ('k, 'v, 'id) t -> ('k -> 'v -> bool [@bs]) -> ('k * 'v) option
val findFirstBy : ('k, 'v, 'id) t -> ('k -> 'v -> bool ) -> ('k * 'v) option
* ` findFirstBy(m , p ) ` uses function ` f ` to find the first key value pair to
match predicate ` p ` .
` ` ` res example
module IntCmp = Belt . Id. MakeComparable ( {
type t = int
let = Pervasives.compare
} )
let s0 = Belt . Map . , " 4 " ) , ( 1 , " 1 " ) , ( 2 , " 2 " ) , ( 3 , " 3 " ) ] , ~cmp = IntCmp.cmp )
Belt . Map . Dict.findFirstBy(s0 , ( k , _ ) = > k = = 4 ) = = Some((4 , " 4 " ) )
` ` `
match predicate `p`.
```res example
module IntCmp = Belt.Id.MakeComparable({
type t = int
let cmp = Pervasives.compare
})
let s0 = Belt.Map.Dict.fromArray([(4, "4"), (1, "1"), (2, "2"), (3, "3")], ~cmp=IntCmp.cmp)
Belt.Map.Dict.findFirstBy(s0, (k, _) => k == 4) == Some((4, "4"))
```
*)
val forEachU: ('k, 'a, 'id) t -> ('k -> 'a -> unit [@bs]) -> unit
val forEach: ('k, 'a, 'id) t -> ('k -> 'a -> unit) -> unit
* ` forEach(m , f ) ` applies ` f ` to all bindings in map ` m ` . ` f ` receives the
key as first argument , and the associated value as second argument . The
bindings are passed to ` f ` in increasing order with respect to the ordering
over the type of the keys .
key as first argument, and the associated value as second argument. The
bindings are passed to `f` in increasing order with respect to the ordering
over the type of the keys. *)
val reduceU: ('k, 'a, 'id) t -> 'b -> ('b -> 'k -> 'a -> 'b [@bs]) -> 'b
val reduce: ('k, 'a, 'id) t -> 'b -> ('b -> 'k -> 'a -> 'b) -> 'b
* ` reduce(m , a , f ) ` computes ` f(kN , dN ... f(k1 , d1 , a ) ... ) ` , where ` k1 ...
kN ` are the keys of all bindings in ` m ` ( in increasing order ) , and ` d1 ...
dN ` are the associated data .
kN` are the keys of all bindings in `m` (in increasing order), and `d1 ...
dN` are the associated data. *)
val everyU: ('k, 'a, 'id) t -> ('k -> 'a -> bool [@bs]) -> bool
val every: ('k, 'a, 'id) t -> ('k -> 'a -> bool) -> bool
val someU: ('k, 'a, 'id) t -> ('k -> 'a -> bool [@bs]) -> bool
val some: ('k, 'a, 'id) t -> ('k -> 'a -> bool) -> bool
* ` some(m , p ) ` checks if at least one binding of the map satisfy the
predicate ` p ` . Order unspecified
predicate `p`. Order unspecified *)
val size: ('k, 'a, 'id) t -> int
val toList: ('k, 'a, 'id) t -> ('k * 'a) list
val toArray: ('k, 'a, 'id) t -> ('k * 'a) array
val fromArray: ('k * 'a) array -> cmp:('k,'id) cmp -> ('k,'a,'id) t
val keysToArray: ('k, 'a, 'id) t -> 'k array
val valuesToArray: ('k, 'a, 'id) t -> 'a array
val minKey: ('k, _, _) t -> 'k option
val minKeyUndefined: ('k, _, _) t -> 'k Js.undefined
val maxKey: ('k, _, _) t -> 'k option
val maxKeyUndefined: ('k, _, _) t -> 'k Js.undefined
val minimum: ('k, 'a, _) t -> ('k * 'a) option
val minUndefined: ('k, 'a, _) t -> ('k * 'a) Js.undefined
val maximum: ('k, 'a, _) t -> ('k * 'a) option
val maxUndefined:('k, 'a, _) t -> ('k * 'a) Js.undefined
val get:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
'a option
val getUndefined:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
'a Js.undefined
val getWithDefault:
('k, 'a, 'id) t -> 'k -> 'a ->
cmp:('k, 'id) cmp ->
'a
val getExn:
('k, 'a, 'id) t -> 'k ->
cmp:('k, 'id) cmp ->
'a
val checkInvariantInternal: _ t -> unit
val remove:
('a, 'b, 'id) t -> 'a ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val removeMany:
('a, 'b, 'id) t ->
'a array ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val set:
('a, 'b, 'id) t -> 'a -> 'b ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
* ` set(m , x , y ) ` returns a map containing the same bindings as ` m ` , plus a
binding of ` x ` to ` y ` . If ` x ` was already bound in ` m ` , its previous
binding disappears .
binding of `x` to `y`. If `x` was already bound in `m`, its previous
binding disappears. *)
val updateU:
('a, 'b, 'id) t ->
'a ->
('b option -> 'b option [@bs]) ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val update:
('a, 'b, 'id) t ->
'a ->
('b option -> 'b option) ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val mergeU:
('a, 'b, 'id) t ->
('a, 'c, 'id) t ->
('a -> 'b option -> 'c option -> 'd option [@bs]) ->
cmp:('a, 'id) cmp -> ('a, 'd, 'id) t
val merge:
('a, 'b, 'id) t ->
('a, 'c, 'id) t ->
('a -> 'b option -> 'c option -> 'd option) ->
cmp:('a, 'id) cmp -> ('a, 'd, 'id) t
* ` merge(m1 , , f ) ` computes a map whose keys is a subset of keys of ` m1 `
and of ` m2 ` . The presence of each such binding , and the corresponding
value , is determined with the function ` f ` .
and of `m2`. The presence of each such binding, and the corresponding
value, is determined with the function `f`. *)
val mergeMany:
('a, 'b, 'id) t ->
('a * 'b) array ->
cmp:('a, 'id) cmp ->
('a, 'b, 'id) t
val keepU:
('k, 'a, 'id) t ->
('k -> 'a -> bool [@bs]) ->
('k, 'a, 'id) t
val keep:
('k, 'a, 'id) t ->
('k -> 'a -> bool) ->
('k, 'a, 'id) t
val partitionU:
('k, 'a, 'id) t ->
('k -> 'a -> bool [@bs]) ->
('k, 'a, 'id) t * ('k, 'a, 'id) t
val partition:
('k, 'a, 'id) t ->
('k -> 'a -> bool) ->
('k, 'a, 'id) t * ('k, 'a, 'id) t
* ` partition(m , p ) ` returns a pair of maps ` ( m1 , m2 ) ` , where ` m1 ` contains
all the bindings of ` s ` that satisfy the predicate ` p ` , and ` m2 ` is the map
with all the bindings of ` s ` that do not satisfy ` p ` .
all the bindings of `s` that satisfy the predicate `p`, and `m2` is the map
with all the bindings of `s` that do not satisfy `p`. *)
val split:
('a, 'b, 'id) t ->
'a ->
cmp:('a, 'id) cmp ->
(('a,'b,'id) t * ('a, 'b, 'id) t) * 'b option
val mapU: ('k, 'a, 'id) t -> ('a -> 'b [@bs]) -> ('k ,'b,'id) t
val map: ('k, 'a, 'id) t -> ('a -> 'b) -> ('k ,'b,'id) t
val mapWithKeyU: ('k, 'a, 'id) t -> ('k -> 'a -> 'b [@bs]) -> ('k, 'b, 'id) t
val mapWithKey: ('k, 'a, 'id) t -> ('k -> 'a -> 'b) -> ('k, 'b, 'id) t
|
3023c9eceb7790ec077e67ad3de94b9fa4f37ce0889254d58971cba0d8b2d7fa | saltlang/saltlang | Monad.hs | Copyright ( c ) 2015 . All rights reserved .
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
1 . Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
2 . Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
3 . Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ` ` AS IS ''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
module Tests.Control.Monad(tests) where
import Test.HUnitPlus.Base
tests :: Test
tests = "Monad" ~: ([] :: [Test])
| null | https://raw.githubusercontent.com/saltlang/saltlang/f3478904139f19373f23824f25d28e8be745dc60/test/library/Tests/Control/Monad.hs | haskell |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE. | Copyright ( c ) 2015 . All rights reserved .
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of the author nor the names of any contributors
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ` ` AS IS ''
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
module Tests.Control.Monad(tests) where
import Test.HUnitPlus.Base
tests :: Test
tests = "Monad" ~: ([] :: [Test])
|
bb00a3d89c4c9cfbf99d526fecf400a907bb4caa5cdeb33ecafd7d6f70310b03 | mogenslund/liquid | tty_output.cljc | (ns liq.tty-output
(:require [liq.buffer :as buffer]
#?(:clj [clojure.java.io :as io])
#?(:clj [clojure.java.shell :as shell])
[liq.tty-shared :as shared]
; :cljs [lumo.io :as io]
[clojure.string :as str]))
(def settings (atom {::cursor-draw-hack false})) ;; cursor-draw-hack draws a block when cursors moves to avoid flicker
(def ^:private last-buffer (atom nil))
(def esc "\033[")
(defn rows
[]
#?(:clj (loop [shellinfo ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) n 0]
(if (or (re-find #"^\d+" shellinfo) (> n 10))
(Integer/parseInt (re-find #"^\d+" shellinfo))
(do
(shared/tty-println n)
(Thread/sleep 100)
(recur ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) (inc n)))))
:cljs (or 40 (aget (js/process.stdout.getWindowSize) 0))))
(defn cols
[]
#?(:clj (loop [shellinfo ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) n 0]
(if (or (re-find #"\d+$" shellinfo) (> n 10))
(dec (Integer/parseInt (re-find #"\d+$" shellinfo)))
(do
(shared/tty-println n)
(Thread/sleep 100)
(recur ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) (inc n)))))
:cljs (or 120 (aget (js/process.stdout.getWindowSize) 0))))
(defn get-dimensions
[]
{:rows (rows) :cols (cols)})
(defn buffer-footprint
[buf]
[(buf ::buffer/window) (buf ::buffer/name) (buf ::buffer/file-name)])
(def theme
{:string "38;5;131"
:keyword "38;5;117"
:comment "38;5;105"
:special "38;5;11"
:green "38;5;40"
:yellow "38;5;11"
:red "38;5;196"
:definition "38;5;40"
nil "0"})
(def char-cache (atom {}))
(def countdown-cache (atom 0))
(defn- draw-char
[ch row col color bgcolor]
(let [k (str row "-" col)
footprint (str ch row col color bgcolor)]
(when (not= (@char-cache k) footprint)
(reset! countdown-cache 9))
(when (> @countdown-cache 0)
(swap! countdown-cache dec)
(shared/tty-print esc color "m")
(shared/tty-print esc bgcolor "m")
(shared/tty-print esc row ";" col "H" esc "s" ch)
(swap! char-cache assoc k footprint))))
(defn invalidate-cache
[]
(shared/tty-print esc "2J")
(reset! char-cache {}))
(defn double-width?
[c]
(cond (re-matches #"[A-ÿ]" c) false
Japanese
Hiragana
( full - width )
( half - width )
(re-matches #"[\u3400-\u4DB5\u4E00-\u9FCB\uF900-\uFA6A]" c) true ;; Kanji
(re-matches #"[\u2E80-\u2FD5]" c) true ;; Kanji Radicals
Alphanumeric and Punctuation ( full - width )
(re-matches #"[\u3000-\u303F]" c) true ;; Symbols and Punctuation
:else false))
(defn print-buffer
[buf]
(let [cache-id (buffer-footprint buf)
tw (or (buf ::buffer/tabwidth) 8)
w (buf ::buffer/window)
top (w ::buffer/top) ; Window top margin
left (w ::buffer/left) ; Window left margin
rows (w ::buffer/rows) ; Window rows
cols (w ::buffer/cols) ; Window cols
tow (buf ::buffer/tow) ; Top of window
Cursor row
Cursor col
(when (and (@settings ::cursor-draw-hack) (= cache-id @last-buffer))
(shared/tty-print "█")) ; To make it look like the cursor is still there while drawing.
(shared/tty-print esc "?25l") ; Hide cursor
(when-let [statusline (and (not= (buf ::buffer/name) "*minibuffer*") (buf :status-line))]
(print-buffer statusline))
;; Looping over the rows and cols in buffer window in the terminal
(loop [trow top ; Terminal row
tcol left ; Terminal col
row (tow ::buffer/row)
col (tow ::buffer/col)
cursor-row nil
cursor-col nil
ccolor "0"]
(if (< trow (+ rows top))
(do
;; Check if row has changed...
Char map like { : : buffer / char \x : : buffer / style : string }
c-width (cond (= (cm ::buffer/char) \tab) (- tw (mod (- tcol left) tw))
(double-width? (str (cm ::buffer/char))) 2
true 1) ; Width of the char
cursor-match (or (and (= row crow) (= col ccol))
(and (= row crow) (not cursor-col) (> col ccol))
(and (not cursor-row) (> row crow)))
c (cond (and (@settings ::cursor-draw-hack) cursor-match (buf :status-line)) "█"
(= (cm ::buffer/char) \tab) (str/join (repeat c-width " "))
(= (cm ::buffer/char) \return) (char 633)
(cm ::buffer/char) (cm ::buffer/char)
(and (= col (inc (buffer/col-count buf row))) (> (buffer/next-visible-row buf row) (+ row 1))) "…"
(and (= col 1) (> row (buffer/line-count buf))) (str esc "36m~" esc "0m")
true \space)
new-cursor-row (if cursor-match trow cursor-row)
new-cursor-col (if cursor-match tcol cursor-col)
color (theme (cm ::buffer/style))
bgcolor (if (buffer/selected? buf row col) "48;5;17" "49")
last-col (+ cols left -1)
n-trow (if (< last-col tcol) (inc trow) trow)
;n-tcol (if (< last-col tcol) left (inc tcol))
n-tcol (if (< last-col tcol) left (+ tcol c-width))
n-row (cond (and (< last-col tcol) (> col (buffer/col-count buf row))) (buffer/next-visible-row buf row)
true row)
n-col (cond (and (< last-col tcol) (> col (buffer/col-count buf row))) 1
true (inc col))]
(draw-char c trow tcol color bgcolor)
(recur n-trow n-tcol n-row n-col new-cursor-row new-cursor-col (if cursor-match color ccolor))))
(do
(when-let [c (w ::buffer/bottom-border)]
(doseq [co (range left (+ left cols))]
(draw-char c (+ top rows) co "38;5;11" "49")))
(when (buf :status-line)
(shared/tty-print esc ccolor "m" esc cursor-row ";" cursor-col "H" esc "s" (or (and (not= (buffer/get-char buf) \tab) (buffer/get-char buf)) \space))
( draw - char ( or ( and ( not= ( buffer / get - char buf ) \tab ) ( buffer / get - char buf ) ) ) cursor - row cursor - col ccolor " 49 " )
(shared/tty-print esc "?25h" esc cursor-row ";" cursor-col "H" esc "s")
(shared/flush-output)
(reset! last-buffer cache-id)))))))
(def ^:private updater (atom nil))
(def ^:private queue (atom []))
(def ^:private next-buffer (atom nil))
#?(:cljs (js/setInterval
#(when-let [buf @next-buffer]
(reset! next-buffer nil)
(print-buffer buf))
20))
(defn printer
[buf]
#?(:clj (let [fp (buffer-footprint buf)]
Replace outdated versions of buf
(swap! queue
(fn [q] (conj
(filterv #(not= (buffer-footprint %) fp) q)
buf)))
(when (not @updater) (reset! updater (future nil)))
(when (future-done? @updater)
(reset! updater
(future
(while (not (empty? @queue))
(when-let [b (first @queue)]
(swap! queue #(subvec % 1))
(print-buffer b)))))))
:cljs (reset! next-buffer buf)))
(def output-handler
{:printer printer
:invalidate invalidate-cache
:dimensions get-dimensions})
| null | https://raw.githubusercontent.com/mogenslund/liquid/bc90dcf83a30c3b7a4ad50a1c5d440a86a0f999f/src/liq/tty_output.cljc | clojure | :cljs [lumo.io :as io]
cursor-draw-hack draws a block when cursors moves to avoid flicker
Kanji
Kanji Radicals
Symbols and Punctuation
Window top margin
Window left margin
Window rows
Window cols
Top of window
To make it look like the cursor is still there while drawing.
Hide cursor
Looping over the rows and cols in buffer window in the terminal
Terminal row
Terminal col
Check if row has changed...
Width of the char
n-tcol (if (< last-col tcol) left (inc tcol)) | (ns liq.tty-output
(:require [liq.buffer :as buffer]
#?(:clj [clojure.java.io :as io])
#?(:clj [clojure.java.shell :as shell])
[liq.tty-shared :as shared]
[clojure.string :as str]))
(def ^:private last-buffer (atom nil))
(def esc "\033[")
(defn rows
[]
#?(:clj (loop [shellinfo ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) n 0]
(if (or (re-find #"^\d+" shellinfo) (> n 10))
(Integer/parseInt (re-find #"^\d+" shellinfo))
(do
(shared/tty-println n)
(Thread/sleep 100)
(recur ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) (inc n)))))
:cljs (or 40 (aget (js/process.stdout.getWindowSize) 0))))
(defn cols
[]
#?(:clj (loop [shellinfo ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) n 0]
(if (or (re-find #"\d+$" shellinfo) (> n 10))
(dec (Integer/parseInt (re-find #"\d+$" shellinfo)))
(do
(shared/tty-println n)
(Thread/sleep 100)
(recur ((shell/sh "/bin/sh" "-c" "stty size </dev/tty") :out) (inc n)))))
:cljs (or 120 (aget (js/process.stdout.getWindowSize) 0))))
(defn get-dimensions
[]
{:rows (rows) :cols (cols)})
(defn buffer-footprint
[buf]
[(buf ::buffer/window) (buf ::buffer/name) (buf ::buffer/file-name)])
(def theme
{:string "38;5;131"
:keyword "38;5;117"
:comment "38;5;105"
:special "38;5;11"
:green "38;5;40"
:yellow "38;5;11"
:red "38;5;196"
:definition "38;5;40"
nil "0"})
(def char-cache (atom {}))
(def countdown-cache (atom 0))
(defn- draw-char
[ch row col color bgcolor]
(let [k (str row "-" col)
footprint (str ch row col color bgcolor)]
(when (not= (@char-cache k) footprint)
(reset! countdown-cache 9))
(when (> @countdown-cache 0)
(swap! countdown-cache dec)
(shared/tty-print esc color "m")
(shared/tty-print esc bgcolor "m")
(shared/tty-print esc row ";" col "H" esc "s" ch)
(swap! char-cache assoc k footprint))))
(defn invalidate-cache
[]
(shared/tty-print esc "2J")
(reset! char-cache {}))
(defn double-width?
[c]
(cond (re-matches #"[A-ÿ]" c) false
Japanese
Hiragana
( full - width )
( half - width )
Alphanumeric and Punctuation ( full - width )
:else false))
(defn print-buffer
[buf]
(let [cache-id (buffer-footprint buf)
tw (or (buf ::buffer/tabwidth) 8)
w (buf ::buffer/window)
Cursor row
Cursor col
(when (and (@settings ::cursor-draw-hack) (= cache-id @last-buffer))
(when-let [statusline (and (not= (buf ::buffer/name) "*minibuffer*") (buf :status-line))]
(print-buffer statusline))
row (tow ::buffer/row)
col (tow ::buffer/col)
cursor-row nil
cursor-col nil
ccolor "0"]
(if (< trow (+ rows top))
(do
Char map like { : : buffer / char \x : : buffer / style : string }
c-width (cond (= (cm ::buffer/char) \tab) (- tw (mod (- tcol left) tw))
(double-width? (str (cm ::buffer/char))) 2
cursor-match (or (and (= row crow) (= col ccol))
(and (= row crow) (not cursor-col) (> col ccol))
(and (not cursor-row) (> row crow)))
c (cond (and (@settings ::cursor-draw-hack) cursor-match (buf :status-line)) "█"
(= (cm ::buffer/char) \tab) (str/join (repeat c-width " "))
(= (cm ::buffer/char) \return) (char 633)
(cm ::buffer/char) (cm ::buffer/char)
(and (= col (inc (buffer/col-count buf row))) (> (buffer/next-visible-row buf row) (+ row 1))) "…"
(and (= col 1) (> row (buffer/line-count buf))) (str esc "36m~" esc "0m")
true \space)
new-cursor-row (if cursor-match trow cursor-row)
new-cursor-col (if cursor-match tcol cursor-col)
color (theme (cm ::buffer/style))
bgcolor (if (buffer/selected? buf row col) "48;5;17" "49")
last-col (+ cols left -1)
n-trow (if (< last-col tcol) (inc trow) trow)
n-tcol (if (< last-col tcol) left (+ tcol c-width))
n-row (cond (and (< last-col tcol) (> col (buffer/col-count buf row))) (buffer/next-visible-row buf row)
true row)
n-col (cond (and (< last-col tcol) (> col (buffer/col-count buf row))) 1
true (inc col))]
(draw-char c trow tcol color bgcolor)
(recur n-trow n-tcol n-row n-col new-cursor-row new-cursor-col (if cursor-match color ccolor))))
(do
(when-let [c (w ::buffer/bottom-border)]
(doseq [co (range left (+ left cols))]
(draw-char c (+ top rows) co "38;5;11" "49")))
(when (buf :status-line)
(shared/tty-print esc ccolor "m" esc cursor-row ";" cursor-col "H" esc "s" (or (and (not= (buffer/get-char buf) \tab) (buffer/get-char buf)) \space))
( draw - char ( or ( and ( not= ( buffer / get - char buf ) \tab ) ( buffer / get - char buf ) ) ) cursor - row cursor - col ccolor " 49 " )
(shared/tty-print esc "?25h" esc cursor-row ";" cursor-col "H" esc "s")
(shared/flush-output)
(reset! last-buffer cache-id)))))))
(def ^:private updater (atom nil))
(def ^:private queue (atom []))
(def ^:private next-buffer (atom nil))
#?(:cljs (js/setInterval
#(when-let [buf @next-buffer]
(reset! next-buffer nil)
(print-buffer buf))
20))
(defn printer
[buf]
#?(:clj (let [fp (buffer-footprint buf)]
Replace outdated versions of buf
(swap! queue
(fn [q] (conj
(filterv #(not= (buffer-footprint %) fp) q)
buf)))
(when (not @updater) (reset! updater (future nil)))
(when (future-done? @updater)
(reset! updater
(future
(while (not (empty? @queue))
(when-let [b (first @queue)]
(swap! queue #(subvec % 1))
(print-buffer b)))))))
:cljs (reset! next-buffer buf)))
(def output-handler
{:printer printer
:invalidate invalidate-cache
:dimensions get-dimensions})
|
a05417548b25bad4dba5405c1c8560440fa379679096c9789f61470a6086b023 | haskell-opengl/OpenGLRaw | SemaphoreFd.hs | # LANGUAGE PatternSynonyms #
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.GL.EXT.SemaphoreFd
Copyright : ( c ) 2019
-- License : BSD3
--
Maintainer : < >
-- Stability : stable
-- Portability : portable
--
--------------------------------------------------------------------------------
module Graphics.GL.EXT.SemaphoreFd (
-- * Extension Support
glGetEXTSemaphoreFd,
gl_EXT_semaphore_fd,
-- * Enums
pattern GL_HANDLE_TYPE_OPAQUE_FD_EXT,
-- * Functions
glImportSemaphoreFdEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
| null | https://raw.githubusercontent.com/haskell-opengl/OpenGLRaw/57e50c9d28dfa62d6a87ae9b561af28f64ce32a0/src/Graphics/GL/EXT/SemaphoreFd.hs | haskell | ------------------------------------------------------------------------------
|
Module : Graphics.GL.EXT.SemaphoreFd
License : BSD3
Stability : stable
Portability : portable
------------------------------------------------------------------------------
* Extension Support
* Enums
* Functions | # LANGUAGE PatternSynonyms #
Copyright : ( c ) 2019
Maintainer : < >
module Graphics.GL.EXT.SemaphoreFd (
glGetEXTSemaphoreFd,
gl_EXT_semaphore_fd,
pattern GL_HANDLE_TYPE_OPAQUE_FD_EXT,
glImportSemaphoreFdEXT
) where
import Graphics.GL.ExtensionPredicates
import Graphics.GL.Tokens
import Graphics.GL.Functions
|
8d3c6c9285883437c0ff36008ffa83d86ab0f8d426ac38118aa28393d66fb785 | Clozure/ccl-tests | oneminus.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Mon Sep 1 20:14:34 2003
;;;; Contains: Tests of 1-
(in-package :cl-test)
(compile-and-load "numbers-aux.lsp")
;;; Error tests
(deftest 1-.error.1
(signals-error (1-) program-error)
t)
(deftest 1-.error.2
(signals-error (1- 0 0) program-error)
t)
(deftest 1-.error.3
(signals-error (1- 0 nil nil) program-error)
t)
;;; Non-error tests
(deftest 1-.1
(loop for x = (random-fixnum)
for y = (1- x)
for z = (- x 1)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.2
(loop for x = (random-from-interval (ash 1 1000))
for y = (1- x)
for z = (- x 1)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.3
(loop for x = (random (1- most-positive-short-float))
for y = (1- x)
for z = (- x 1.0s0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.4
(loop for x = (random (1- most-positive-single-float))
for y = (1- x)
for z = (- x 1.0f0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.5
(loop for x = (random (1- most-positive-double-float))
for y = (1- x)
for z = (- x 1.0d0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.6
(loop for x = (random (1- most-positive-long-float))
for y = (1- x)
for z = (- x 1.0l0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.7
(loop for x = (random-fixnum)
for y = (random-fixnum)
for y2 = (if (zerop y) 1 y)
for r = (/ x y2)
for r1 = (1- r)
for r2 = (- r 1)
repeat 1000
unless (eql r1 r2)
collect (list x y2 r1 r2))
nil)
(deftest 1-.8
(let ((bound (ash 1 200)))
(loop for x = (random-from-interval bound)
for y = (random-from-interval bound)
for y2 = (if (zerop y) 1 y)
for r = (/ x y2)
for r1 = (1- r)
for r2 = (- r 1)
repeat 1000
unless (eql r1 r2)
collect (list x y2 r1 r2)))
nil)
;;; Complex numbers
(deftest 1-.9
(loop for xr = (random-fixnum)
for xi = (random-fixnum)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1))
nil)
(deftest 1-.10
(let ((bound (ash 1 100)))
(loop for xr = (random-from-interval bound)
for xi = (random-from-interval bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.11
(let ((bound (1- most-positive-short-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.12
(let ((bound (1- most-positive-single-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.13
(let ((bound (1- most-positive-double-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.14
(let ((bound (1- most-positive-long-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.15
(macrolet ((%m (z) z)) (1- (expand-in-current-env (%m 2))))
1)
| null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/oneminus.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests of 1-
Error tests
Non-error tests
Complex numbers | Author :
Created : Mon Sep 1 20:14:34 2003
(in-package :cl-test)
(compile-and-load "numbers-aux.lsp")
(deftest 1-.error.1
(signals-error (1-) program-error)
t)
(deftest 1-.error.2
(signals-error (1- 0 0) program-error)
t)
(deftest 1-.error.3
(signals-error (1- 0 nil nil) program-error)
t)
(deftest 1-.1
(loop for x = (random-fixnum)
for y = (1- x)
for z = (- x 1)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.2
(loop for x = (random-from-interval (ash 1 1000))
for y = (1- x)
for z = (- x 1)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.3
(loop for x = (random (1- most-positive-short-float))
for y = (1- x)
for z = (- x 1.0s0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.4
(loop for x = (random (1- most-positive-single-float))
for y = (1- x)
for z = (- x 1.0f0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.5
(loop for x = (random (1- most-positive-double-float))
for y = (1- x)
for z = (- x 1.0d0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.6
(loop for x = (random (1- most-positive-long-float))
for y = (1- x)
for z = (- x 1.0l0)
repeat 1000
unless (eql y z)
collect (list x y z))
nil)
(deftest 1-.7
(loop for x = (random-fixnum)
for y = (random-fixnum)
for y2 = (if (zerop y) 1 y)
for r = (/ x y2)
for r1 = (1- r)
for r2 = (- r 1)
repeat 1000
unless (eql r1 r2)
collect (list x y2 r1 r2))
nil)
(deftest 1-.8
(let ((bound (ash 1 200)))
(loop for x = (random-from-interval bound)
for y = (random-from-interval bound)
for y2 = (if (zerop y) 1 y)
for r = (/ x y2)
for r1 = (1- r)
for r2 = (- r 1)
repeat 1000
unless (eql r1 r2)
collect (list x y2 r1 r2)))
nil)
(deftest 1-.9
(loop for xr = (random-fixnum)
for xi = (random-fixnum)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1))
nil)
(deftest 1-.10
(let ((bound (ash 1 100)))
(loop for xr = (random-from-interval bound)
for xi = (random-from-interval bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.11
(let ((bound (1- most-positive-short-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.12
(let ((bound (1- most-positive-single-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.13
(let ((bound (1- most-positive-double-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.14
(let ((bound (1- most-positive-long-float)))
(loop for xr = (random bound)
for xi = (random bound)
for xc = (complex xr xi)
for xc1 = (1- xc)
repeat 1000
unless (eql xc1 (complex (- xr 1) xi))
collect (list xr xi xc xc1)))
nil)
(deftest 1-.15
(macrolet ((%m (z) z)) (1- (expand-in-current-env (%m 2))))
1)
|
77e9f27e01b56d08aeafec64579a1d4c621881ff3cbf05f39aebc96a2c6cefe6 | pjones/playlists | PLSSpec.hs |
This file is part of the package playlists . It is subject to
the license terms in the LICENSE file found in the top - level directory
of this distribution and at git . No part
of playlists package , including this file , may be copied , modified ,
propagated , or distributed except according to the terms contained in
the LICENSE file .
This file is part of the Haskell package playlists. It is subject to
the license terms in the LICENSE file found in the top-level directory
of this distribution and at git. No part
of playlists package, including this file, may be copied, modified,
propagated, or distributed except according to the terms contained in
the LICENSE file.
-}
--------------------------------------------------------------------------------
module PLSSpec (spec) where
--------------------------------------------------------------------------------
import Examples (secretAgent, pigRadio, utf8Radio)
import Helper (playlistFromFile, roundTrip)
import Test.Hspec
import Text.Playlist
--------------------------------------------------------------------------------
spec :: Spec
spec = do
describe "Parsing" $ do
it "Secret Agent" $ playlistFromFile' "sa" `shouldReturn` secretAgent
it "Pig Radio" $ playlistFromFile' "pig" `shouldReturn` pigRadio
it "UTF8 Radio" $ playlistFromFile' "utf8" `shouldReturn` utf8Radio
describe "Generating" $ do
it "Secret Agent" $ roundTrip' secretAgent `shouldReturn` secretAgent
it "Pig Radio" $ roundTrip' pigRadio `shouldReturn` pigRadio
it "UTF8 Radio" $ roundTrip' utf8Radio `shouldReturn` utf8Radio
--------------------------------------------------------------------------------
playlistFromFile' :: FilePath -> IO Playlist
playlistFromFile' = playlistFromFile PLS
--------------------------------------------------------------------------------
roundTrip' :: Playlist -> IO Playlist
roundTrip' = roundTrip PLS
| null | https://raw.githubusercontent.com/pjones/playlists/35e4a91e599eac4a1c6f24500d120692e709fd1f/test/PLSSpec.hs | haskell | ------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ |
This file is part of the package playlists . It is subject to
the license terms in the LICENSE file found in the top - level directory
of this distribution and at git . No part
of playlists package , including this file , may be copied , modified ,
propagated , or distributed except according to the terms contained in
the LICENSE file .
This file is part of the Haskell package playlists. It is subject to
the license terms in the LICENSE file found in the top-level directory
of this distribution and at git. No part
of playlists package, including this file, may be copied, modified,
propagated, or distributed except according to the terms contained in
the LICENSE file.
-}
module PLSSpec (spec) where
import Examples (secretAgent, pigRadio, utf8Radio)
import Helper (playlistFromFile, roundTrip)
import Test.Hspec
import Text.Playlist
spec :: Spec
spec = do
describe "Parsing" $ do
it "Secret Agent" $ playlistFromFile' "sa" `shouldReturn` secretAgent
it "Pig Radio" $ playlistFromFile' "pig" `shouldReturn` pigRadio
it "UTF8 Radio" $ playlistFromFile' "utf8" `shouldReturn` utf8Radio
describe "Generating" $ do
it "Secret Agent" $ roundTrip' secretAgent `shouldReturn` secretAgent
it "Pig Radio" $ roundTrip' pigRadio `shouldReturn` pigRadio
it "UTF8 Radio" $ roundTrip' utf8Radio `shouldReturn` utf8Radio
playlistFromFile' :: FilePath -> IO Playlist
playlistFromFile' = playlistFromFile PLS
roundTrip' :: Playlist -> IO Playlist
roundTrip' = roundTrip PLS
|
4bc09fb638d9986a553db5edf3200d59baaec15d40229179ec73cbf0c8044155 | erlang/erlide_kernel | ttb_integration.erl | Author :
Created : Jul 29 , 2010
-module(ttb_integration).
%%
%% Exported Functions
%%
-export([start/3, stop/0, load/3, load_data/3, get_file_info/1, str2ms/1]).
start(NodesAndCookies, FileName, NetTicktime)->
ttbe:stop(),
net_kernel:set_net_ticktime(NetTicktime),
Nodes = set_cookies(NodesAndCookies),
ttbe:tracer(Nodes, [{file,{local, FileName}}]).
set_cookies(NodesAndCookies) ->
F = fun({Node, Cookie}) ->
case Cookie of
'' ->
ok;
_ ->
erlang:set_cookie(Node, Cookie)
end,
Node
end,
lists:map(F, NodesAndCookies).
stop() ->
{stopped, Dir} = ttbe:stop([return]),
spawn(?MODULE, get_file_info, [Dir]).
get_file_info(Path) ->
Result = (catch ttbe:format(Path, [{handler, {create_info_handler(Path), {true, 0, '_', '_'}}}])),
case Result of
ok -> erlide_jrpc:event(trace_event, stop_tracing);
{error, Reason} -> erlide_jrpc:event(trace_event, {error_loading, Reason});
_ -> erlide_jrpc:event(trace_event, {error_loading, "Can not load data"})
end.
load(Path, Start, Stop) ->
spawn(?MODULE, load_data, [Path, Start, Stop]).
load_data(Path, Start, Stop) ->
Result = (catch ttbe:format(Path, [{handler, {create_load_handler(Start, Stop), 1}}])),
case Result of
ok -> erlide_jrpc:event(trace_event, stop_tracing);
{error, Reason} -> erlide_jrpc:event(trace_event, {error_loading, Reason});
_ -> erlide_jrpc:event(trace_event, {error_loading, "Can not load data"})
end.
create_load_handler(Start, Stop) ->
fun(_Fd, Trace, _TraceInfo, State) ->
if
State >= Start, State =< Stop ->
case Trace of
{trace_ts, Pid, call, {Mod, Fun, Args}, Time} ->
erlide_jrpc:event(trace_event, {trace_ts, Pid, call, {Mod, Fun,[avoid_interpreting_as_string] ++ Args}, calendar:now_to_local_time(Time)});
{trace_ts, Pid, spawn, Pid2, {M, F, Args}, Time} ->
erlide_jrpc:event(trace_event, {trace_ts, Pid, spawn, Pid2, {M, F, [avoid_interpreting_as_string] ++ Args}, calendar:now_to_local_time(Time)});
{trace_ts, _, _, _, Time} ->
T = calendar:now_to_local_time(Time),
erlide_jrpc:event(trace_event, setelement(tuple_size(Trace), Trace, T));
{trace_ts, _, _, _, _, Time} ->
T = calendar:now_to_local_time(Time),
erlide_jrpc:event(trace_event, setelement(tuple_size(Trace), Trace, T));
_ ->
erlide_jrpc:event(trace_event, Trace)
end;
true -> ok
end,
State + 1
end.
create_info_handler(Path) ->
fun(_Fd, Trace, _TraceInfo, State) ->
{First, Count, Start_date, End_date} = State,
case Trace of
end_of_trace ->
case First of
true ->
erlide_jrpc:event(trace_event, {file_info, empty});
false ->
erlide_jrpc:event(trace_event, {file_info, calendar:now_to_local_time(Start_date), calendar:now_to_local_time(End_date), Path, Count})
end;
{trace_ts, _, _, _, Time} ->
case First of
true -> {false, Count + 1, Time, Time};
_ -> {false, Count + 1, Start_date, Time}
end;
{trace_ts, _, _, _, _, Time} ->
case First of
true -> {false, Count + 1, Time, Time};
_ -> {false, Count + 1, Start_date, Time}
end;
_ -> {First, Count + 1, Start_date, End_date}
end
end.
str2fun(S) ->
case erl_scan:string(S) of
{error, ErrorInfo, _} ->
{error, ErrorInfo};
{ok, Tokens, _} ->
case erl_parse:parse_exprs(Tokens) of
{error, ErrorInfo} ->
{error, ErrorInfo};
{ok, Expr_list} ->
{value, Value, _} = erl_eval:exprs(Expr_list, erl_eval:new_bindings()),
{ok, Value}
end
end.
str2ms(S) ->
try (str2fun(S)) of
{error, ErrorInfo} ->
{error, standard_info, ErrorInfo};
{ok, Fun} ->
try dbg:fun2ms(Fun) of
{error, ErrorInfo} -> {error, standard_info, ErrorInfo};
Result -> {ok, Result}
catch error:function_clause -> {error, not_fun} end
catch
error:{unbound_var, X} -> {error, unbound_var, X}
end.
| null | https://raw.githubusercontent.com/erlang/erlide_kernel/763a7fe47213f374b59862fd5a17d5dcc2811c7b/common/apps/erlide_tools/src/ttb_integration.erl | erlang |
Exported Functions
| Author :
Created : Jul 29 , 2010
-module(ttb_integration).
-export([start/3, stop/0, load/3, load_data/3, get_file_info/1, str2ms/1]).
start(NodesAndCookies, FileName, NetTicktime)->
ttbe:stop(),
net_kernel:set_net_ticktime(NetTicktime),
Nodes = set_cookies(NodesAndCookies),
ttbe:tracer(Nodes, [{file,{local, FileName}}]).
set_cookies(NodesAndCookies) ->
F = fun({Node, Cookie}) ->
case Cookie of
'' ->
ok;
_ ->
erlang:set_cookie(Node, Cookie)
end,
Node
end,
lists:map(F, NodesAndCookies).
stop() ->
{stopped, Dir} = ttbe:stop([return]),
spawn(?MODULE, get_file_info, [Dir]).
get_file_info(Path) ->
Result = (catch ttbe:format(Path, [{handler, {create_info_handler(Path), {true, 0, '_', '_'}}}])),
case Result of
ok -> erlide_jrpc:event(trace_event, stop_tracing);
{error, Reason} -> erlide_jrpc:event(trace_event, {error_loading, Reason});
_ -> erlide_jrpc:event(trace_event, {error_loading, "Can not load data"})
end.
load(Path, Start, Stop) ->
spawn(?MODULE, load_data, [Path, Start, Stop]).
load_data(Path, Start, Stop) ->
Result = (catch ttbe:format(Path, [{handler, {create_load_handler(Start, Stop), 1}}])),
case Result of
ok -> erlide_jrpc:event(trace_event, stop_tracing);
{error, Reason} -> erlide_jrpc:event(trace_event, {error_loading, Reason});
_ -> erlide_jrpc:event(trace_event, {error_loading, "Can not load data"})
end.
create_load_handler(Start, Stop) ->
fun(_Fd, Trace, _TraceInfo, State) ->
if
State >= Start, State =< Stop ->
case Trace of
{trace_ts, Pid, call, {Mod, Fun, Args}, Time} ->
erlide_jrpc:event(trace_event, {trace_ts, Pid, call, {Mod, Fun,[avoid_interpreting_as_string] ++ Args}, calendar:now_to_local_time(Time)});
{trace_ts, Pid, spawn, Pid2, {M, F, Args}, Time} ->
erlide_jrpc:event(trace_event, {trace_ts, Pid, spawn, Pid2, {M, F, [avoid_interpreting_as_string] ++ Args}, calendar:now_to_local_time(Time)});
{trace_ts, _, _, _, Time} ->
T = calendar:now_to_local_time(Time),
erlide_jrpc:event(trace_event, setelement(tuple_size(Trace), Trace, T));
{trace_ts, _, _, _, _, Time} ->
T = calendar:now_to_local_time(Time),
erlide_jrpc:event(trace_event, setelement(tuple_size(Trace), Trace, T));
_ ->
erlide_jrpc:event(trace_event, Trace)
end;
true -> ok
end,
State + 1
end.
create_info_handler(Path) ->
fun(_Fd, Trace, _TraceInfo, State) ->
{First, Count, Start_date, End_date} = State,
case Trace of
end_of_trace ->
case First of
true ->
erlide_jrpc:event(trace_event, {file_info, empty});
false ->
erlide_jrpc:event(trace_event, {file_info, calendar:now_to_local_time(Start_date), calendar:now_to_local_time(End_date), Path, Count})
end;
{trace_ts, _, _, _, Time} ->
case First of
true -> {false, Count + 1, Time, Time};
_ -> {false, Count + 1, Start_date, Time}
end;
{trace_ts, _, _, _, _, Time} ->
case First of
true -> {false, Count + 1, Time, Time};
_ -> {false, Count + 1, Start_date, Time}
end;
_ -> {First, Count + 1, Start_date, End_date}
end
end.
str2fun(S) ->
case erl_scan:string(S) of
{error, ErrorInfo, _} ->
{error, ErrorInfo};
{ok, Tokens, _} ->
case erl_parse:parse_exprs(Tokens) of
{error, ErrorInfo} ->
{error, ErrorInfo};
{ok, Expr_list} ->
{value, Value, _} = erl_eval:exprs(Expr_list, erl_eval:new_bindings()),
{ok, Value}
end
end.
str2ms(S) ->
try (str2fun(S)) of
{error, ErrorInfo} ->
{error, standard_info, ErrorInfo};
{ok, Fun} ->
try dbg:fun2ms(Fun) of
{error, ErrorInfo} -> {error, standard_info, ErrorInfo};
Result -> {ok, Result}
catch error:function_clause -> {error, not_fun} end
catch
error:{unbound_var, X} -> {error, unbound_var, X}
end.
|
890d4c8d5599291a1a0b06f1e5215cb5e9b95c2ccc05f97b4cc2cdfa68a1c08f | suttonshire/ocaml-xsk | xsk.ml | type buffer = (char, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t
module Desc = struct
type t =
{ mutable addr : int
; mutable len : int
; mutable options : int
}
let create () = { addr = 0; len = 0; options = 0 }
end
exception Xsk_C_Failure of int
let _ = Callback.register_exception "xsk exception" (Xsk_C_Failure 0)
(* src/libbpf_c/libbpf/include/uapi/linux/if_xdp.h *)
module Umem_flag = struct
type t = XDP_UMEM_UNALIGNED_CHUNK_FLAG
let to_int = function
| XDP_UMEM_UNALIGNED_CHUNK_FLAG -> 1 lsl 0
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
(* src/libbpf_c/libbpf/include/uapi/linux/if_link.h *)
module Xdp_flag = struct
type t =
| XDP_FLAGS_UPDATE_IF_NOEXIST
| XDP_FLAGS_SKB_MODE
| XDP_FLAGS_DRV_MODE
| XDP_FLAGS_HW_MODE
| XDP_FLAGS_REPLACE
let to_int = function
| XDP_FLAGS_UPDATE_IF_NOEXIST -> 1 lsl 0
| XDP_FLAGS_SKB_MODE -> 1 lsl 1
| XDP_FLAGS_DRV_MODE -> 1 lsl 2
| XDP_FLAGS_HW_MODE -> 1 lsl 3
| XDP_FLAGS_REPLACE -> 1 lsl 4
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
(* src/libbpf_c/libbpf/src/xsk.h *)
module Libbpf_flag = struct
type t = XSK_LIBBPF_FLAGS__INHIBIT_PROG_LOAD
let to_int = function
| XSK_LIBBPF_FLAGS__INHIBIT_PROG_LOAD -> 1 lsl 0
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
(* src/libbpf_c/libbpf/include/uapi/linux/if_xdp.h *)
module Bind_flag = struct
type t =
| XDP_SHARED_UMEM
| XDP_COPY
| XDP_ZEROCOPY
| XDP_USE_NEED_WAKEUP
let to_int = function
| XDP_SHARED_UMEM -> 1 lsl 0
| XDP_COPY -> 1 lsl 1
| XDP_ZEROCOPY -> 1 lsl 2
| XDP_USE_NEED_WAKEUP -> 1 lsl 3
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
type ring_cons
type ring_prod
type umem
type socket
external needs_wakeup_stub : ring_prod -> bool = "ring_prod_needs_wakeup" [@@noalloc]
external socket_sendto_stub
: (int[@untagged])
-> unit
= "socket_sendto" "socket_sendto_nat"
[@@noalloc]
external socket_pollin_stub
: (int[@untagged])
-> (int[@untagged])
-> bool
= "socket_pollin" "socket_pollin_nat"
[@@noalloc]
external socket_pollout_stub
: (int[@untagged])
-> (int[@untagged])
-> bool
= "socket_pollout" "socket_pollout_nat"
[@@noalloc]
module Comp_queue = struct
type t = ring_cons
let create ring = ring
external consume_stub
: ring_cons
-> int array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "comp_queue_cons" "comp_queue_cons_nat"
[@@noalloc]
let consume t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : int);
consume_stub t arr pos nb
;;
end
module Fill_queue = struct
type t = ring_prod
let create ring = ring
let needs_wakeup t = needs_wakeup_stub t
external produce_stub
: ring_prod
-> int array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "fill_queue_produce" "fill_queue_produce_nat"
[@@noalloc]
external produce_and_wakeup_stub
: ring_prod
-> (int[@untagged])
-> (int[@untagged])
-> int array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "fill_queue_produce_and_wakeup" "fill_queue_produce_and_wakeup_nat"
[@@noalloc]
let produce t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : int);
produce_stub t arr pos nb
;;
let produce_and_wakeup_kernel t (fd : Unix.file_descr) arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : int);
produce_and_wakeup_stub t (Obj.magic fd) 1000 arr pos nb
;;
end
module Tx_queue = struct
type t = ring_prod
let create ring = ring
let needs_wakeup t = needs_wakeup_stub t
external produce_stub
: ring_prod
-> Desc.t array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "tx_queue_produce" "tx_queue_produce_nat"
[@@noalloc]
let produce t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
produce_stub t arr pos nb
;;
let produce_and_wakeup_kernel t (fd : Unix.file_descr) arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
let ret = produce_stub t arr pos nb in
if ret >= 0 && needs_wakeup t then socket_sendto_stub (Obj.magic fd);
ret
;;
end
module Rx_queue = struct
type t = ring_cons
let create ring = ring
external consume_stub
: t
-> Desc.t array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "rx_queue_cons" "rx_queue_cons_nat"
[@@noalloc]
external poll_and_consume_stub
: t
-> (int[@untagged])
-> (int[@untagged])
-> Desc.t array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "rx_queue_poll_cons" "rx_queue_poll_cons_nat"
[@@noalloc]
let consume t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
consume_stub t arr pos nb
;;
let poll_and_consume t (fd : Unix.file_descr) timeout arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
let ret = poll_and_consume_stub t (Obj.magic fd) timeout arr pos nb in
if ret < 0 then None else Some ret
;;
end
module Umem = struct
type t =
{ umem : umem
; fill : ring_prod
; comp : ring_cons
}
module Config = struct
type t =
{ fill_size : int
; comp_size : int
; frame_size : int
; frame_headroom : int
; flags : Umem_flag.t list
}
type t_internal =
{ fill_size : int
; comp_size : int
; frame_size : int
; frame_headroom : int
; flags_bitfield : int
}
let t_internal_of_t t =
let flags_bitfield = Umem_flag.to_bitfield t.flags in
{ fill_size = t.fill_size
; comp_size = t.comp_size
; frame_size = t.frame_size
; frame_headroom = t.frame_headroom
; flags_bitfield
}
;;
let default_fill_queue_num_desc = 2048
let default_comp_queue_num_desc = 2048
let default_frame_size = 4096
let default_frame_headroom = 0
let default_flags = []
let default =
{ fill_size = default_fill_queue_num_desc
; comp_size = default_comp_queue_num_desc
; frame_size = default_frame_size
; frame_headroom = default_frame_headroom
; flags = default_flags
}
;;
end
external create_stub
: buffer
-> int
-> Config.t_internal
-> umem * ring_prod * ring_cons
= "umem_create"
let create mem size config =
let config = Config.t_internal_of_t config in
let umem, fill, comp = create_stub mem size config in
{ umem; fill; comp }, Fill_queue.create fill, Comp_queue.create comp
;;
external delete_stub : umem -> unit = "umem_delete"
let delete t = delete_stub t.umem
end
module Socket = struct
type t =
{ sock : socket
; rx : ring_cons
; tx : ring_prod
}
module Config = struct
type t =
{ rx_size : int
; tx_size : int
; libbpf_flags : Libbpf_flag.t list
; xdp_flags : Xdp_flag.t list
; bind_flags : Bind_flag.t list
}
type t_internal =
{ rx_size : int
; tx_size : int
; libbpf_flags_bitfield : int
; xdp_flags_bitfield : int
; bind_flags_bitfield : int
}
let t_internal_of_t t =
let libbpf_flags_bitfield = Libbpf_flag.to_bitfield t.libbpf_flags in
let xdp_flags_bitfield = Xdp_flag.to_bitfield t.xdp_flags in
let bind_flags_bitfield = Bind_flag.to_bitfield t.bind_flags in
{ tx_size = t.tx_size
; rx_size = t.rx_size
; libbpf_flags_bitfield
; xdp_flags_bitfield
; bind_flags_bitfield
}
;;
let default_rx_queue_num_desc = 2048
let default_tx_queue_num_desc = 2048
let default_libbpf_flags = []
let default_xdp_flags = []
let default_bind_flags = []
let default =
{ rx_size = default_rx_queue_num_desc
; tx_size = default_tx_queue_num_desc
; libbpf_flags = default_libbpf_flags
; xdp_flags = default_xdp_flags
; bind_flags = default_bind_flags
}
;;
end
external create_stub
: string
-> int
-> umem
-> Config.t_internal
-> socket * ring_cons * ring_prod
= "socket_create"
let create ifname queue_id umem config =
let config = Config.t_internal_of_t config in
let sock, rx, tx = create_stub ifname queue_id umem.Umem.umem config in
{ sock; rx; tx }, Rx_queue.create rx, Tx_queue.create tx
;;
external delete_stub : socket -> unit = "socket_delete"
let delete t = delete_stub t.sock
external fd_stub : socket -> Unix.file_descr = "socket_fd"
let fd t = fd_stub t.sock
let wakeup_kernel_with_sendto t = socket_sendto_stub (Obj.magic (fd t))
let pollin t timeout = socket_pollin_stub (Obj.magic (fd t)) timeout
let pollout t timeout = socket_pollout_stub (Obj.magic (fd t)) timeout
end
| null | https://raw.githubusercontent.com/suttonshire/ocaml-xsk/3b0de5e3ea92f3f6fb1856a429ae39c2dbd01490/src/xsk.ml | ocaml | src/libbpf_c/libbpf/include/uapi/linux/if_xdp.h
src/libbpf_c/libbpf/include/uapi/linux/if_link.h
src/libbpf_c/libbpf/src/xsk.h
src/libbpf_c/libbpf/include/uapi/linux/if_xdp.h | type buffer = (char, Bigarray.int8_unsigned_elt, Bigarray.c_layout) Bigarray.Array1.t
module Desc = struct
type t =
{ mutable addr : int
; mutable len : int
; mutable options : int
}
let create () = { addr = 0; len = 0; options = 0 }
end
exception Xsk_C_Failure of int
let _ = Callback.register_exception "xsk exception" (Xsk_C_Failure 0)
module Umem_flag = struct
type t = XDP_UMEM_UNALIGNED_CHUNK_FLAG
let to_int = function
| XDP_UMEM_UNALIGNED_CHUNK_FLAG -> 1 lsl 0
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
module Xdp_flag = struct
type t =
| XDP_FLAGS_UPDATE_IF_NOEXIST
| XDP_FLAGS_SKB_MODE
| XDP_FLAGS_DRV_MODE
| XDP_FLAGS_HW_MODE
| XDP_FLAGS_REPLACE
let to_int = function
| XDP_FLAGS_UPDATE_IF_NOEXIST -> 1 lsl 0
| XDP_FLAGS_SKB_MODE -> 1 lsl 1
| XDP_FLAGS_DRV_MODE -> 1 lsl 2
| XDP_FLAGS_HW_MODE -> 1 lsl 3
| XDP_FLAGS_REPLACE -> 1 lsl 4
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
module Libbpf_flag = struct
type t = XSK_LIBBPF_FLAGS__INHIBIT_PROG_LOAD
let to_int = function
| XSK_LIBBPF_FLAGS__INHIBIT_PROG_LOAD -> 1 lsl 0
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
module Bind_flag = struct
type t =
| XDP_SHARED_UMEM
| XDP_COPY
| XDP_ZEROCOPY
| XDP_USE_NEED_WAKEUP
let to_int = function
| XDP_SHARED_UMEM -> 1 lsl 0
| XDP_COPY -> 1 lsl 1
| XDP_ZEROCOPY -> 1 lsl 2
| XDP_USE_NEED_WAKEUP -> 1 lsl 3
;;
let to_bitfield l = List.fold_left (fun bitfield flag -> bitfield lor to_int flag) 0 l
end
type ring_cons
type ring_prod
type umem
type socket
external needs_wakeup_stub : ring_prod -> bool = "ring_prod_needs_wakeup" [@@noalloc]
external socket_sendto_stub
: (int[@untagged])
-> unit
= "socket_sendto" "socket_sendto_nat"
[@@noalloc]
external socket_pollin_stub
: (int[@untagged])
-> (int[@untagged])
-> bool
= "socket_pollin" "socket_pollin_nat"
[@@noalloc]
external socket_pollout_stub
: (int[@untagged])
-> (int[@untagged])
-> bool
= "socket_pollout" "socket_pollout_nat"
[@@noalloc]
module Comp_queue = struct
type t = ring_cons
let create ring = ring
external consume_stub
: ring_cons
-> int array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "comp_queue_cons" "comp_queue_cons_nat"
[@@noalloc]
let consume t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : int);
consume_stub t arr pos nb
;;
end
module Fill_queue = struct
type t = ring_prod
let create ring = ring
let needs_wakeup t = needs_wakeup_stub t
external produce_stub
: ring_prod
-> int array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "fill_queue_produce" "fill_queue_produce_nat"
[@@noalloc]
external produce_and_wakeup_stub
: ring_prod
-> (int[@untagged])
-> (int[@untagged])
-> int array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "fill_queue_produce_and_wakeup" "fill_queue_produce_and_wakeup_nat"
[@@noalloc]
let produce t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : int);
produce_stub t arr pos nb
;;
let produce_and_wakeup_kernel t (fd : Unix.file_descr) arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : int);
produce_and_wakeup_stub t (Obj.magic fd) 1000 arr pos nb
;;
end
module Tx_queue = struct
type t = ring_prod
let create ring = ring
let needs_wakeup t = needs_wakeup_stub t
external produce_stub
: ring_prod
-> Desc.t array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "tx_queue_produce" "tx_queue_produce_nat"
[@@noalloc]
let produce t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
produce_stub t arr pos nb
;;
let produce_and_wakeup_kernel t (fd : Unix.file_descr) arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
let ret = produce_stub t arr pos nb in
if ret >= 0 && needs_wakeup t then socket_sendto_stub (Obj.magic fd);
ret
;;
end
module Rx_queue = struct
type t = ring_cons
let create ring = ring
external consume_stub
: t
-> Desc.t array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "rx_queue_cons" "rx_queue_cons_nat"
[@@noalloc]
external poll_and_consume_stub
: t
-> (int[@untagged])
-> (int[@untagged])
-> Desc.t array
-> (int[@untagged])
-> (int[@untagged])
-> (int[@untagged])
= "rx_queue_poll_cons" "rx_queue_poll_cons_nat"
[@@noalloc]
let consume t arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
consume_stub t arr pos nb
;;
let poll_and_consume t (fd : Unix.file_descr) timeout arr ~pos ~nb =
ignore (arr.(pos + nb - 1) : Desc.t);
let ret = poll_and_consume_stub t (Obj.magic fd) timeout arr pos nb in
if ret < 0 then None else Some ret
;;
end
module Umem = struct
type t =
{ umem : umem
; fill : ring_prod
; comp : ring_cons
}
module Config = struct
type t =
{ fill_size : int
; comp_size : int
; frame_size : int
; frame_headroom : int
; flags : Umem_flag.t list
}
type t_internal =
{ fill_size : int
; comp_size : int
; frame_size : int
; frame_headroom : int
; flags_bitfield : int
}
let t_internal_of_t t =
let flags_bitfield = Umem_flag.to_bitfield t.flags in
{ fill_size = t.fill_size
; comp_size = t.comp_size
; frame_size = t.frame_size
; frame_headroom = t.frame_headroom
; flags_bitfield
}
;;
let default_fill_queue_num_desc = 2048
let default_comp_queue_num_desc = 2048
let default_frame_size = 4096
let default_frame_headroom = 0
let default_flags = []
let default =
{ fill_size = default_fill_queue_num_desc
; comp_size = default_comp_queue_num_desc
; frame_size = default_frame_size
; frame_headroom = default_frame_headroom
; flags = default_flags
}
;;
end
external create_stub
: buffer
-> int
-> Config.t_internal
-> umem * ring_prod * ring_cons
= "umem_create"
let create mem size config =
let config = Config.t_internal_of_t config in
let umem, fill, comp = create_stub mem size config in
{ umem; fill; comp }, Fill_queue.create fill, Comp_queue.create comp
;;
external delete_stub : umem -> unit = "umem_delete"
let delete t = delete_stub t.umem
end
module Socket = struct
type t =
{ sock : socket
; rx : ring_cons
; tx : ring_prod
}
module Config = struct
type t =
{ rx_size : int
; tx_size : int
; libbpf_flags : Libbpf_flag.t list
; xdp_flags : Xdp_flag.t list
; bind_flags : Bind_flag.t list
}
type t_internal =
{ rx_size : int
; tx_size : int
; libbpf_flags_bitfield : int
; xdp_flags_bitfield : int
; bind_flags_bitfield : int
}
let t_internal_of_t t =
let libbpf_flags_bitfield = Libbpf_flag.to_bitfield t.libbpf_flags in
let xdp_flags_bitfield = Xdp_flag.to_bitfield t.xdp_flags in
let bind_flags_bitfield = Bind_flag.to_bitfield t.bind_flags in
{ tx_size = t.tx_size
; rx_size = t.rx_size
; libbpf_flags_bitfield
; xdp_flags_bitfield
; bind_flags_bitfield
}
;;
let default_rx_queue_num_desc = 2048
let default_tx_queue_num_desc = 2048
let default_libbpf_flags = []
let default_xdp_flags = []
let default_bind_flags = []
let default =
{ rx_size = default_rx_queue_num_desc
; tx_size = default_tx_queue_num_desc
; libbpf_flags = default_libbpf_flags
; xdp_flags = default_xdp_flags
; bind_flags = default_bind_flags
}
;;
end
external create_stub
: string
-> int
-> umem
-> Config.t_internal
-> socket * ring_cons * ring_prod
= "socket_create"
let create ifname queue_id umem config =
let config = Config.t_internal_of_t config in
let sock, rx, tx = create_stub ifname queue_id umem.Umem.umem config in
{ sock; rx; tx }, Rx_queue.create rx, Tx_queue.create tx
;;
external delete_stub : socket -> unit = "socket_delete"
let delete t = delete_stub t.sock
external fd_stub : socket -> Unix.file_descr = "socket_fd"
let fd t = fd_stub t.sock
let wakeup_kernel_with_sendto t = socket_sendto_stub (Obj.magic (fd t))
let pollin t timeout = socket_pollin_stub (Obj.magic (fd t)) timeout
let pollout t timeout = socket_pollout_stub (Obj.magic (fd t)) timeout
end
|
7292f7570bc725cddb15bef43ed34cbd0d03dfa02665d2301b4cf0919b9b6bf5 | jesperes/aoc_erlang | aoc2015_day12.erl | -module(aoc2015_day12).
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2015,
day = 12,
name = "JSAbacusFramework.io",
expected = {119433, 68466},
has_input_file = true}.
-type input_type() :: jsone:json_object().
-type result1_type() :: integer().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
jsone:decode(Input).
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
count(Input).
-spec solve2(Input :: input_type()) -> result2_type().
solve2(Input) ->
count_nored(Input).
count(X) when is_number(X) ->
X;
count(X) when is_binary(X) ->
0;
count(X) when is_map(X) ->
maps:fold(fun(_, V, Acc) -> count(V) + Acc end, 0, X);
count(X) when is_list(X) ->
lists:foldl(fun(V, Acc) -> count(V) + Acc end, 0, X).
count_nored(X) when is_number(X) ->
X;
count_nored(X) when is_binary(X) ->
0;
count_nored(X) when is_map(X) ->
{IsRed, Sum} =
maps:fold(fun (_, <<"red">>, {_, _Acc}) ->
{true, 0};
(_, V, {IsRed, Acc}) ->
{IsRed, Acc + count_nored(V)}
end,
{false, 0},
X),
case IsRed of
true ->
0;
false ->
Sum
end;
count_nored(X) when is_list(X) ->
lists:foldl(fun(V, Acc) -> count_nored(V) + Acc end, 0, X).
| null | https://raw.githubusercontent.com/jesperes/aoc_erlang/ec0786088fb9ab886ee57e17ea0149ba3e91810a/src/2015/aoc2015_day12.erl | erlang | -module(aoc2015_day12).
-behavior(aoc_puzzle).
-export([parse/1, solve1/1, solve2/1, info/0]).
-include("aoc_puzzle.hrl").
-spec info() -> aoc_puzzle().
info() ->
#aoc_puzzle{module = ?MODULE,
year = 2015,
day = 12,
name = "JSAbacusFramework.io",
expected = {119433, 68466},
has_input_file = true}.
-type input_type() :: jsone:json_object().
-type result1_type() :: integer().
-type result2_type() :: result1_type().
-spec parse(Input :: binary()) -> input_type().
parse(Input) ->
jsone:decode(Input).
-spec solve1(Input :: input_type()) -> result1_type().
solve1(Input) ->
count(Input).
-spec solve2(Input :: input_type()) -> result2_type().
solve2(Input) ->
count_nored(Input).
count(X) when is_number(X) ->
X;
count(X) when is_binary(X) ->
0;
count(X) when is_map(X) ->
maps:fold(fun(_, V, Acc) -> count(V) + Acc end, 0, X);
count(X) when is_list(X) ->
lists:foldl(fun(V, Acc) -> count(V) + Acc end, 0, X).
count_nored(X) when is_number(X) ->
X;
count_nored(X) when is_binary(X) ->
0;
count_nored(X) when is_map(X) ->
{IsRed, Sum} =
maps:fold(fun (_, <<"red">>, {_, _Acc}) ->
{true, 0};
(_, V, {IsRed, Acc}) ->
{IsRed, Acc + count_nored(V)}
end,
{false, 0},
X),
case IsRed of
true ->
0;
false ->
Sum
end;
count_nored(X) when is_list(X) ->
lists:foldl(fun(V, Acc) -> count_nored(V) + Acc end, 0, X).
| |
da2338fd44ee9150d9c1cd7ed3febf79521731283f34cff121cc195423367cd8 | janestreet/core | comparable_intf.ml | open! Import
module type Infix = Base.Comparable.Infix
module type Comparisons = Base.Comparable.Comparisons
module type With_compare = Base.Comparable.With_compare
module type Validate = sig
type t
val validate_lbound : min:t Maybe_bound.t -> t Validate.check
val validate_ubound : max:t Maybe_bound.t -> t Validate.check
val validate_bound : min:t Maybe_bound.t -> max:t Maybe_bound.t -> t Validate.check
end
module type Validate_with_zero = sig
type t
include Validate with type t := t
val validate_positive : t Validate.check
val validate_non_negative : t Validate.check
val validate_negative : t Validate.check
val validate_non_positive : t Validate.check
end
module type With_zero = sig
type t
include Base.Comparable.With_zero with type t := t
include Validate_with_zero with type t := t
end
module type S_common = sig
include Base.Comparable.S
include Validate with type t := t
module Replace_polymorphic_compare : Comparisons with type t := t
end
* Usage example :
{ [
module : sig
type t = ...
include Comparable . S with type t : = t
end
] }
Then use [ Comparable . Make ] in the struct ( see comparable.mli for an example ) .
{[
module Foo : sig
type t = ...
include Comparable.S with type t := t
end
]}
Then use [Comparable.Make] in the struct (see comparable.mli for an example). *)
module type S_plain = sig
include S_common
module Map :
Map.S_plain with type Key.t = t with type Key.comparator_witness = comparator_witness
module Set :
Set.S_plain with type Elt.t = t with type Elt.comparator_witness = comparator_witness
end
module type S = sig
include S_common
module Map :
Map.S with type Key.t = t with type Key.comparator_witness = comparator_witness
module Set :
Set.S with type Elt.t = t with type Elt.comparator_witness = comparator_witness
end
module type Map_and_set_binable = sig
type t
include Comparator.S with type t := t
module Map :
Map.S_binable
with type Key.t = t
with type Key.comparator_witness = comparator_witness
module Set :
Set.S_binable
with type Elt.t = t
with type Elt.comparator_witness = comparator_witness
end
module type S_binable = sig
include S_common
include
Map_and_set_binable
with type t := t
with type comparator_witness := comparator_witness
end
module type Comparable = sig
* Comparable extends { { ! Base . Comparable}[Base . Comparable ] } and provides functions for
comparing like types .
Usage example :
{ [
module = struct
module T = struct
type t = ... [ @@deriving compare , sexp ]
end
include T
include Comparable . Make ( T )
end
] }
Then include [ Comparable . S ] in the signature ( see { ! Comparable_intf } for an
example ) .
To add an [ Infix ] submodule :
{ [
module C = Comparable . Make ( T )
include C
module Infix = ( C : Comparable . Infix with type t : = t )
] }
Common pattern : Define a module [ O ] with a restricted signature . It aims to be
( locally ) opened to bring useful operators into scope without shadowing unexpected
variable names . E.g. in the [ Date ] module :
{ [
module O = struct
include ( C : Comparable . Infix with type t : = t )
let to_string t = ..
end
] }
Opening [ Date ] would shadow [ now ] , but opening [ Date . O ] does n't :
{ [
let now = .. in
let someday = .. in
Date . O.(now > someday )
] }
comparing like types.
Usage example:
{[
module Foo = struct
module T = struct
type t = ... [@@deriving compare, sexp]
end
include T
include Comparable.Make (T)
end
]}
Then include [Comparable.S] in the signature (see {!Comparable_intf} for an
example).
To add an [Infix] submodule:
{[
module C = Comparable.Make (T)
include C
module Infix = (C : Comparable.Infix with type t := t)
]}
Common pattern: Define a module [O] with a restricted signature. It aims to be
(locally) opened to bring useful operators into scope without shadowing unexpected
variable names. E.g. in the [Date] module:
{[
module O = struct
include (C : Comparable.Infix with type t := t)
let to_string t = ..
end
]}
Opening [Date] would shadow [now], but opening [Date.O] doesn't:
{[
let now = .. in
let someday = .. in
Date.O.(now > someday)
]}
*)
module type Infix = Infix
module type Map_and_set_binable = Map_and_set_binable
module type Comparisons = Comparisons
module type S_plain = S_plain
module type S = S
module type S_binable = S_binable
module type S_common = S_common
module type Validate = Validate
module type Validate_with_zero = Validate_with_zero
module type With_compare = With_compare
module type With_zero = With_zero
include With_compare
(** Inherit comparability from a component. *)
module Inherit (C : sig
type t [@@deriving compare]
end) (T : sig
type t [@@deriving sexp]
val component : t -> C.t
end) : S with type t := T.t
* { 2 Comparison - only Functors }
These functors require only [ type t ] and [ val compare ] . They do not require [ val
sexp_of_t ] , and do not generate container datatypes .
These functors require only [type t] and [val compare]. They do not require [val
sexp_of_t], and do not generate container datatypes.
*)
module Infix (T : sig
type t [@@deriving compare]
end) : Infix with type t := T.t
module Comparisons (T : sig
type t [@@deriving compare]
end) : Comparisons with type t := T.t
* { 2 Make Functors }
The Comparable Make functor family allows users to choose among the following
attributes :
- [ * _ using_comparator ] or not
- [ * _ binable ] or not
- [ * _ plain ] or not
Thus there are functors like [ Make_plain ] or [ Make_binable_using_comparator ] , etc .
The Comparable Make functor family allows users to choose among the following
attributes:
- [*_using_comparator] or not
- [*_binable] or not
- [*_plain] or not
Thus there are functors like [Make_plain] or [Make_binable_using_comparator], etc.
*)
module Make_plain (T : sig
type t [@@deriving compare, sexp_of]
end) : S_plain with type t := T.t
module Make (T : sig
type t [@@deriving compare, sexp]
end) : S with type t := T.t
module Make_plain_using_comparator (T : sig
type t [@@deriving sexp_of]
include Comparator.S with type t := t
end) : S_plain with type t := T.t with type comparator_witness := T.comparator_witness
module Make_using_comparator (T : sig
type t [@@deriving sexp]
include Comparator.S with type t := t
end) : S with type t := T.t with type comparator_witness := T.comparator_witness
module Make_binable (T : sig
type t [@@deriving bin_io, compare, sexp]
end) : S_binable with type t := T.t
module Make_binable_using_comparator (T : sig
type t [@@deriving bin_io, sexp]
include Comparator.S with type t := t
end) : S_binable with type t := T.t with type comparator_witness := T.comparator_witness
module Extend
(M : Base.Comparable.S) (X : sig
type t = M.t [@@deriving sexp]
end) : S with type t := M.t with type comparator_witness := M.comparator_witness
module Extend_binable
(M : Base.Comparable.S) (X : sig
type t = M.t [@@deriving bin_io, sexp]
end) :
S_binable with type t := M.t with type comparator_witness := M.comparator_witness
module Map_and_set_binable (T : sig
type t [@@deriving bin_io, compare, sexp]
end) : Map_and_set_binable with type t := T.t
module Map_and_set_binable_using_comparator (T : sig
type t [@@deriving bin_io, compare, sexp]
include Comparator.S with type t := t
end) :
Map_and_set_binable
with type t := T.t
with type comparator_witness := T.comparator_witness
module Poly (T : sig
type t [@@deriving sexp]
end) : S with type t := T.t
module Validate (T : sig
type t [@@deriving compare, sexp_of]
end) : Validate with type t := T.t
module Validate_with_zero (T : sig
type t [@@deriving compare, sexp_of]
val zero : t
end) : Validate_with_zero with type t := T.t
module With_zero (T : sig
type t [@@deriving compare, sexp_of]
val zero : t
end) : With_zero with type t := T.t
(** The following module types and functors may be used to define stable modules: *)
module Stable : sig
module V1 : sig
module type S = sig
type comparable
type comparator_witness
module Map :
Map.Stable.V1.S
with type key := comparable
with type comparator_witness := comparator_witness
module Set :
Set.Stable.V1.S
with type elt := comparable
with type elt_comparator_witness := comparator_witness
end
module Make (X : Stable_module_types.S0) :
S with type comparable := X.t with type comparator_witness := X.comparator_witness
module With_stable_witness : sig
module type S = sig
type comparable
type comparator_witness
module Map :
Map.Stable.V1.With_stable_witness.S
with type key := comparable
with type comparator_witness := comparator_witness
module Set :
Set.Stable.V1.With_stable_witness.S
with type elt := comparable
with type elt_comparator_witness := comparator_witness
end
module Make (X : Stable_module_types.With_stable_witness.S0) :
S
with type comparable := X.t
with type comparator_witness := X.comparator_witness
end
end
end
end
| null | https://raw.githubusercontent.com/janestreet/core/f382131ccdcb4a8cd21ebf9a49fa42dcf8183de6/core/src/comparable_intf.ml | ocaml | * Inherit comparability from a component.
* The following module types and functors may be used to define stable modules: | open! Import
module type Infix = Base.Comparable.Infix
module type Comparisons = Base.Comparable.Comparisons
module type With_compare = Base.Comparable.With_compare
module type Validate = sig
type t
val validate_lbound : min:t Maybe_bound.t -> t Validate.check
val validate_ubound : max:t Maybe_bound.t -> t Validate.check
val validate_bound : min:t Maybe_bound.t -> max:t Maybe_bound.t -> t Validate.check
end
module type Validate_with_zero = sig
type t
include Validate with type t := t
val validate_positive : t Validate.check
val validate_non_negative : t Validate.check
val validate_negative : t Validate.check
val validate_non_positive : t Validate.check
end
module type With_zero = sig
type t
include Base.Comparable.With_zero with type t := t
include Validate_with_zero with type t := t
end
module type S_common = sig
include Base.Comparable.S
include Validate with type t := t
module Replace_polymorphic_compare : Comparisons with type t := t
end
* Usage example :
{ [
module : sig
type t = ...
include Comparable . S with type t : = t
end
] }
Then use [ Comparable . Make ] in the struct ( see comparable.mli for an example ) .
{[
module Foo : sig
type t = ...
include Comparable.S with type t := t
end
]}
Then use [Comparable.Make] in the struct (see comparable.mli for an example). *)
module type S_plain = sig
include S_common
module Map :
Map.S_plain with type Key.t = t with type Key.comparator_witness = comparator_witness
module Set :
Set.S_plain with type Elt.t = t with type Elt.comparator_witness = comparator_witness
end
module type S = sig
include S_common
module Map :
Map.S with type Key.t = t with type Key.comparator_witness = comparator_witness
module Set :
Set.S with type Elt.t = t with type Elt.comparator_witness = comparator_witness
end
module type Map_and_set_binable = sig
type t
include Comparator.S with type t := t
module Map :
Map.S_binable
with type Key.t = t
with type Key.comparator_witness = comparator_witness
module Set :
Set.S_binable
with type Elt.t = t
with type Elt.comparator_witness = comparator_witness
end
module type S_binable = sig
include S_common
include
Map_and_set_binable
with type t := t
with type comparator_witness := comparator_witness
end
module type Comparable = sig
* Comparable extends { { ! Base . Comparable}[Base . Comparable ] } and provides functions for
comparing like types .
Usage example :
{ [
module = struct
module T = struct
type t = ... [ @@deriving compare , sexp ]
end
include T
include Comparable . Make ( T )
end
] }
Then include [ Comparable . S ] in the signature ( see { ! Comparable_intf } for an
example ) .
To add an [ Infix ] submodule :
{ [
module C = Comparable . Make ( T )
include C
module Infix = ( C : Comparable . Infix with type t : = t )
] }
Common pattern : Define a module [ O ] with a restricted signature . It aims to be
( locally ) opened to bring useful operators into scope without shadowing unexpected
variable names . E.g. in the [ Date ] module :
{ [
module O = struct
include ( C : Comparable . Infix with type t : = t )
let to_string t = ..
end
] }
Opening [ Date ] would shadow [ now ] , but opening [ Date . O ] does n't :
{ [
let now = .. in
let someday = .. in
Date . O.(now > someday )
] }
comparing like types.
Usage example:
{[
module Foo = struct
module T = struct
type t = ... [@@deriving compare, sexp]
end
include T
include Comparable.Make (T)
end
]}
Then include [Comparable.S] in the signature (see {!Comparable_intf} for an
example).
To add an [Infix] submodule:
{[
module C = Comparable.Make (T)
include C
module Infix = (C : Comparable.Infix with type t := t)
]}
Common pattern: Define a module [O] with a restricted signature. It aims to be
(locally) opened to bring useful operators into scope without shadowing unexpected
variable names. E.g. in the [Date] module:
{[
module O = struct
include (C : Comparable.Infix with type t := t)
let to_string t = ..
end
]}
Opening [Date] would shadow [now], but opening [Date.O] doesn't:
{[
let now = .. in
let someday = .. in
Date.O.(now > someday)
]}
*)
module type Infix = Infix
module type Map_and_set_binable = Map_and_set_binable
module type Comparisons = Comparisons
module type S_plain = S_plain
module type S = S
module type S_binable = S_binable
module type S_common = S_common
module type Validate = Validate
module type Validate_with_zero = Validate_with_zero
module type With_compare = With_compare
module type With_zero = With_zero
include With_compare
module Inherit (C : sig
type t [@@deriving compare]
end) (T : sig
type t [@@deriving sexp]
val component : t -> C.t
end) : S with type t := T.t
* { 2 Comparison - only Functors }
These functors require only [ type t ] and [ val compare ] . They do not require [ val
sexp_of_t ] , and do not generate container datatypes .
These functors require only [type t] and [val compare]. They do not require [val
sexp_of_t], and do not generate container datatypes.
*)
module Infix (T : sig
type t [@@deriving compare]
end) : Infix with type t := T.t
module Comparisons (T : sig
type t [@@deriving compare]
end) : Comparisons with type t := T.t
* { 2 Make Functors }
The Comparable Make functor family allows users to choose among the following
attributes :
- [ * _ using_comparator ] or not
- [ * _ binable ] or not
- [ * _ plain ] or not
Thus there are functors like [ Make_plain ] or [ Make_binable_using_comparator ] , etc .
The Comparable Make functor family allows users to choose among the following
attributes:
- [*_using_comparator] or not
- [*_binable] or not
- [*_plain] or not
Thus there are functors like [Make_plain] or [Make_binable_using_comparator], etc.
*)
module Make_plain (T : sig
type t [@@deriving compare, sexp_of]
end) : S_plain with type t := T.t
module Make (T : sig
type t [@@deriving compare, sexp]
end) : S with type t := T.t
module Make_plain_using_comparator (T : sig
type t [@@deriving sexp_of]
include Comparator.S with type t := t
end) : S_plain with type t := T.t with type comparator_witness := T.comparator_witness
module Make_using_comparator (T : sig
type t [@@deriving sexp]
include Comparator.S with type t := t
end) : S with type t := T.t with type comparator_witness := T.comparator_witness
module Make_binable (T : sig
type t [@@deriving bin_io, compare, sexp]
end) : S_binable with type t := T.t
module Make_binable_using_comparator (T : sig
type t [@@deriving bin_io, sexp]
include Comparator.S with type t := t
end) : S_binable with type t := T.t with type comparator_witness := T.comparator_witness
module Extend
(M : Base.Comparable.S) (X : sig
type t = M.t [@@deriving sexp]
end) : S with type t := M.t with type comparator_witness := M.comparator_witness
module Extend_binable
(M : Base.Comparable.S) (X : sig
type t = M.t [@@deriving bin_io, sexp]
end) :
S_binable with type t := M.t with type comparator_witness := M.comparator_witness
module Map_and_set_binable (T : sig
type t [@@deriving bin_io, compare, sexp]
end) : Map_and_set_binable with type t := T.t
module Map_and_set_binable_using_comparator (T : sig
type t [@@deriving bin_io, compare, sexp]
include Comparator.S with type t := t
end) :
Map_and_set_binable
with type t := T.t
with type comparator_witness := T.comparator_witness
module Poly (T : sig
type t [@@deriving sexp]
end) : S with type t := T.t
module Validate (T : sig
type t [@@deriving compare, sexp_of]
end) : Validate with type t := T.t
module Validate_with_zero (T : sig
type t [@@deriving compare, sexp_of]
val zero : t
end) : Validate_with_zero with type t := T.t
module With_zero (T : sig
type t [@@deriving compare, sexp_of]
val zero : t
end) : With_zero with type t := T.t
module Stable : sig
module V1 : sig
module type S = sig
type comparable
type comparator_witness
module Map :
Map.Stable.V1.S
with type key := comparable
with type comparator_witness := comparator_witness
module Set :
Set.Stable.V1.S
with type elt := comparable
with type elt_comparator_witness := comparator_witness
end
module Make (X : Stable_module_types.S0) :
S with type comparable := X.t with type comparator_witness := X.comparator_witness
module With_stable_witness : sig
module type S = sig
type comparable
type comparator_witness
module Map :
Map.Stable.V1.With_stable_witness.S
with type key := comparable
with type comparator_witness := comparator_witness
module Set :
Set.Stable.V1.With_stable_witness.S
with type elt := comparable
with type elt_comparator_witness := comparator_witness
end
module Make (X : Stable_module_types.With_stable_witness.S0) :
S
with type comparable := X.t
with type comparator_witness := X.comparator_witness
end
end
end
end
|
322d357d19c9cdf623f703110fb12da7025ddd208a781d30df4f6d00f86ad70c | grin-compiler/ghc-wpc-sample-programs | Image.hs | {-# LANGUAGE OverloadedStrings, ScopedTypeVariables, CPP #-}
{-# LANGUAGE ViewPatterns #-}
|
Module : Text . Pandoc . Image
Copyright : Copyright ( C ) 2020
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Functions for converting images .
Module : Text.Pandoc.Image
Copyright : Copyright (C) 2020 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <>
Stability : alpha
Portability : portable
Functions for converting images.
-}
module Text.Pandoc.Image ( svgToPng ) where
import Text.Pandoc.Options (WriterOptions(..))
import Text.Pandoc.Process (pipeProcess)
import qualified Data.ByteString.Lazy as L
import System.Exit
import Data.Text (Text)
import Text.Pandoc.Shared (tshow)
import qualified Control.Exception as E
-- | Convert svg image to png. rsvg-convert
-- is used and must be available on the path.
svgToPng :: WriterOptions
-> L.ByteString -- ^ Input image as bytestring
-> IO (Either Text L.ByteString)
svgToPng opts bs = do
let dpi = show $ writerDpi opts
E.catch
(do (exit, out) <- pipeProcess Nothing "rsvg-convert"
["-f","png","-a","--dpi-x",dpi,"--dpi-y",dpi]
bs
return $ if exit == ExitSuccess
then Right out
else Left "conversion from SVG failed")
(\(e :: E.SomeException) -> return $ Left $
"check that rsvg-convert is in path.\n" <> tshow e)
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/pandoc-11df2a3c0f2b1b8e351ad8caaa7cdf583e1b3b2e/src/Text/Pandoc/Image.hs | haskell | # LANGUAGE OverloadedStrings, ScopedTypeVariables, CPP #
# LANGUAGE ViewPatterns #
| Convert svg image to png. rsvg-convert
is used and must be available on the path.
^ Input image as bytestring | |
Module : Text . Pandoc . Image
Copyright : Copyright ( C ) 2020
License : GNU GPL , version 2 or above
Maintainer : < >
Stability : alpha
Portability : portable
Functions for converting images .
Module : Text.Pandoc.Image
Copyright : Copyright (C) 2020 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <>
Stability : alpha
Portability : portable
Functions for converting images.
-}
module Text.Pandoc.Image ( svgToPng ) where
import Text.Pandoc.Options (WriterOptions(..))
import Text.Pandoc.Process (pipeProcess)
import qualified Data.ByteString.Lazy as L
import System.Exit
import Data.Text (Text)
import Text.Pandoc.Shared (tshow)
import qualified Control.Exception as E
svgToPng :: WriterOptions
-> IO (Either Text L.ByteString)
svgToPng opts bs = do
let dpi = show $ writerDpi opts
E.catch
(do (exit, out) <- pipeProcess Nothing "rsvg-convert"
["-f","png","-a","--dpi-x",dpi,"--dpi-y",dpi]
bs
return $ if exit == ExitSuccess
then Right out
else Left "conversion from SVG failed")
(\(e :: E.SomeException) -> return $ Left $
"check that rsvg-convert is in path.\n" <> tshow e)
|
015251b2448ec42f995857274963bab3736f8f9b3eb22eb3a4d95e72ed43d125 | srid/emanote.obelisk | Pipeline.hs | {-# LANGUAGE GADTs #-}
# LANGUAGE TypeApplications #
module Emanote.Pipeline (run, runNoMonitor) where
import qualified Algebra.Graph.Labelled.AdjacencyMap as AM
import qualified Algebra.Graph.Labelled.AdjacencyMap.Patch as G
import qualified Commonmark.Syntax as CM
import Data.Conflict (Conflict (..))
import qualified Data.Conflict as Conflict
import qualified Data.Conflict.Patch as Conflict
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Tagged (Tagged (..), untag)
import Emanote.FileSystem (PathContent (..))
import qualified Emanote.FileSystem as FS
import qualified Emanote.Graph as G
import qualified Emanote.Markdown as M
import qualified Emanote.Markdown.WikiLink as M
import qualified Emanote.Markdown.WikiLink.Parser as M
import Emanote.Zk (Zk (Zk))
import Reflex hiding (mapMaybe)
import Reflex.Host.Headless (MonadHeadlessApp)
import qualified Reflex.TIncremental as TInc
import Relude
import System.FilePath (dropExtension, takeExtension, takeFileName)
import qualified Text.Megaparsec as M
import qualified Text.Megaparsec.Char as M
import Text.Pandoc.Definition (Pandoc)
import qualified Text.Pandoc.LinkContext as LC
-- | Like `run`, but stops observing for file changes after the initial read
runNoMonitor :: MonadHeadlessApp t m => FilePath -> m Zk
runNoMonitor x = do
liftIO $ putStrLn "Running pipeline in read-only mode"
run' False x
run :: MonadHeadlessApp t m => FilePath -> m Zk
run x = do
liftIO $ putStrLn "Running pipeline in monitor mode"
run' True x
run' :: MonadHeadlessApp t m => Bool -> FilePath -> m Zk
run' monitor inputDir = do
input' <-
if monitor
then FS.directoryTreeIncremental [".*/**"] inputDir
else flip holdIncremental never =<< FS.directoryTree [".*/**"] inputDir
-- TODO: Deal with directory events sensibly, instead of ignoring them.
let input = input' & pipeFilesOnly
logInputChanges input
let pandocOut =
input
& pipeFilterFilename (\fn -> takeExtension fn == ".md")
& pipeFlattenFsTree (Tagged . toText . dropExtension . takeFileName)
& pipeParseMarkdown (M.wikiLinkSpec <> M.markdownSpec)
graphOut =
pandocOut
& pipeExtractLinks
& pipeGraph
& pipeCreateCalendar
Zk
<$> TInc.mirrorIncremental pandocOut
<*> TInc.mirrorIncremental graphOut
<*> newTVarIO 0
pipeFilesOnly :: Reflex t => Incremental t (PatchMap FilePath PathContent) -> Incremental t (PatchMap FilePath ByteString)
pipeFilesOnly =
unsafeMapIncremental
(Map.mapMaybe getFileContent)
(PatchMap . Map.mapMaybe (traverse getFileContent) . unPatchMap)
where
getFileContent = \case
PathContent_File s -> Just s
_ -> Nothing
logInputChanges :: (PerformEvent t m, MonadIO (Performable m)) => Incremental t (PatchMap FilePath a) -> m ()
logInputChanges input =
performEvent_ $
ffor (updatedIncremental input) $ \(void -> m) ->
forM_ (Map.toList $ unPatchMap m) $ \(fp, mval) -> do
let mark = maybe "-" (const "*") mval
liftIO $ putStr $ mark <> " "
liftIO $ putStrLn fp
pipeFilterFilename ::
Reflex t =>
(FilePath -> Bool) ->
Incremental t (PatchMap FilePath v) ->
Incremental t (PatchMap FilePath v)
pipeFilterFilename selectFile =
let f :: FilePath -> v -> Maybe v
f = \fs x -> guard (selectFile fs) >> pure x
in unsafeMapIncremental
(Map.mapMaybeWithKey f)
(PatchMap . Map.mapMaybeWithKey f . unPatchMap)
pipeFlattenFsTree ::
forall t v.
(Reflex t) =>
-- | How to flatten the file path.
(FilePath -> M.WikiLinkID) ->
Incremental t (PatchMap FilePath v) ->
Incremental t (PatchMap M.WikiLinkID (Either (Conflict FilePath v) (FilePath, v)))
pipeFlattenFsTree toKey = do
unsafeMapIncrementalWithOldValue
(Conflict.resolveConflicts toKey)
(Conflict.applyPatch toKey)
pipeParseMarkdown ::
(Reflex t, Functor f, Functor g, M.MarkdownSyntaxSpec m il bl) =>
CM.SyntaxSpec m il bl ->
Incremental t (PatchMap M.WikiLinkID (f (g ByteString))) ->
Incremental t (PatchMap M.WikiLinkID (f (g (Either M.ParserError Pandoc))))
pipeParseMarkdown spec =
unsafeMapIncremental
(Map.mapWithKey $ \fID -> (fmap . fmap) (parse fID))
(PatchMap . Map.mapWithKey ((fmap . fmap . fmap) . parse) . unPatchMap)
where
parse :: M.WikiLinkID -> ByteString -> Either M.ParserError Pandoc
parse (Tagged (toString -> fn)) = M.parseMarkdown spec fn . decodeUtf8
pipeExtractLinks ::
forall t f g h.
(Reflex t, Functor f, Functor g, Functor h, Foldable f, Foldable g, Foldable h) =>
Incremental t (PatchMap M.WikiLinkID (f (g (h Pandoc)))) ->
Incremental t (PatchMap M.WikiLinkID [(M.WikiLink, M.WikiLinkID)])
pipeExtractLinks = do
unsafeMapIncremental
(Map.map $ (concatMap . concatMap . concatMap) f)
(PatchMap . Map.map ((fmap . concatMap . concatMap . concatMap) f) . unPatchMap)
where
f doc =
let linkMap = LC.queryLinksWithContext doc
getTitleAttr =
Map.lookup "title" . Map.fromList
in concat $
ffor (Map.toList linkMap) $ \(url, urlLinks) -> do
fforMaybe (toList urlLinks) $ \(getTitleAttr -> tit, ctx) -> do
(lbl, wId) <- M.parseWikiLinkUrl tit url
pure (M.mkWikiLink lbl ctx, wId)
pipeGraph ::
forall t.
(Reflex t) =>
Incremental t (PatchMap M.WikiLinkID [(M.WikiLink, M.WikiLinkID)]) ->
Incremental t (G.PatchGraph G.E G.V)
pipeGraph = do
unsafeMapIncremental
(fromMaybe AM.empty . flip apply AM.empty . f . PatchMap . fmap Just)
f
where
f ::
PatchMap M.WikiLinkID [(M.WikiLink, M.WikiLinkID)] ->
G.PatchGraph G.E G.V
f p =
let pairs = Map.toList $ unPatchMap p
in G.PatchGraph $
pairs <&> \(k, mes) ->
case mes of
Nothing ->
G.ModifyGraph_RemoveVertexWithSuccessors k
Just es ->
G.ModifyGraph_ReplaceVertexWithSuccessors k (first one <$> es)
| Tag daily notes with month zettels ( " 2020 - 02 " ) , which are tagged further
with year zettels ( " 2020 " ) .
pipeCreateCalendar ::
Reflex t =>
Incremental t (G.PatchGraph G.E G.V) ->
Incremental t (G.PatchGraph G.E G.V)
pipeCreateCalendar =
unsafeMapIncremental
(fromMaybe AM.empty . flip apply AM.empty . f . G.asPatchGraph)
f
where
f :: G.PatchGraph G.E G.V -> G.PatchGraph G.E G.V
f diff =
let liftNote :: M.Parsec Void Text Text -> G.PatchGraph G.E G.V -> G.PatchGraph G.E G.V
liftNote wIdParser d =
G.PatchGraph $
flip mapMaybe (Set.toList $ G.modifiedOrAddedVertices d) $ \wId -> do
(Tagged -> parent) <- parse wIdParser (untag wId)
pure $ G.ModifyGraph_AddEdge (one $ (M.WikiLink M.WikiLinkLabel_Tag mempty)) wId parent
monthDiff = liftNote monthFromDate diff
-- Include monthDiff here, so as to 'lift' those ghost month zettels further.
yearDiff = liftNote yearFromMonth (diff <> monthDiff)
in mconcat
[ diff,
monthDiff,
yearDiff
]
yearFromMonth :: M.Parsec Void Text Text
yearFromMonth = do
year <- num 4 <* dash
_month <- num 2
pure year
monthFromDate :: M.Parsec Void Text Text
monthFromDate = do
year <- num 4 <* dash
month <- num 2 <* dash
_day <- num 2
pure $ year <> "-" <> month
dash = M.char '-'
num n =
toText <$> M.count n M.digitChar
parse :: forall e s r. (M.Stream s, Ord e) => M.Parsec e s r -> s -> Maybe r
parse p =
M.parseMaybe (p <* M.eof)
| Like ` unsafeMapIncremental ` but the patch function also takes the old
-- target.
unsafeMapIncrementalWithOldValue ::
(Reflex t, Patch p, Patch p') =>
(PatchTarget p -> PatchTarget p') ->
(PatchTarget p -> p -> p') ->
Incremental t p ->
Incremental t p'
unsafeMapIncrementalWithOldValue f g x =
let x0 = currentIncremental x
xE = updatedIncremental x
in unsafeBuildIncremental (f <$> sample x0) $ uncurry g <$> attach x0 xE
| null | https://raw.githubusercontent.com/srid/emanote.obelisk/de935cff82fc57085adef1904094c7129c02d995/lib/emanote/src/Emanote/Pipeline.hs | haskell | # LANGUAGE GADTs #
| Like `run`, but stops observing for file changes after the initial read
TODO: Deal with directory events sensibly, instead of ignoring them.
| How to flatten the file path.
Include monthDiff here, so as to 'lift' those ghost month zettels further.
target. | # LANGUAGE TypeApplications #
module Emanote.Pipeline (run, runNoMonitor) where
import qualified Algebra.Graph.Labelled.AdjacencyMap as AM
import qualified Algebra.Graph.Labelled.AdjacencyMap.Patch as G
import qualified Commonmark.Syntax as CM
import Data.Conflict (Conflict (..))
import qualified Data.Conflict as Conflict
import qualified Data.Conflict.Patch as Conflict
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.Tagged (Tagged (..), untag)
import Emanote.FileSystem (PathContent (..))
import qualified Emanote.FileSystem as FS
import qualified Emanote.Graph as G
import qualified Emanote.Markdown as M
import qualified Emanote.Markdown.WikiLink as M
import qualified Emanote.Markdown.WikiLink.Parser as M
import Emanote.Zk (Zk (Zk))
import Reflex hiding (mapMaybe)
import Reflex.Host.Headless (MonadHeadlessApp)
import qualified Reflex.TIncremental as TInc
import Relude
import System.FilePath (dropExtension, takeExtension, takeFileName)
import qualified Text.Megaparsec as M
import qualified Text.Megaparsec.Char as M
import Text.Pandoc.Definition (Pandoc)
import qualified Text.Pandoc.LinkContext as LC
runNoMonitor :: MonadHeadlessApp t m => FilePath -> m Zk
runNoMonitor x = do
liftIO $ putStrLn "Running pipeline in read-only mode"
run' False x
run :: MonadHeadlessApp t m => FilePath -> m Zk
run x = do
liftIO $ putStrLn "Running pipeline in monitor mode"
run' True x
run' :: MonadHeadlessApp t m => Bool -> FilePath -> m Zk
run' monitor inputDir = do
input' <-
if monitor
then FS.directoryTreeIncremental [".*/**"] inputDir
else flip holdIncremental never =<< FS.directoryTree [".*/**"] inputDir
let input = input' & pipeFilesOnly
logInputChanges input
let pandocOut =
input
& pipeFilterFilename (\fn -> takeExtension fn == ".md")
& pipeFlattenFsTree (Tagged . toText . dropExtension . takeFileName)
& pipeParseMarkdown (M.wikiLinkSpec <> M.markdownSpec)
graphOut =
pandocOut
& pipeExtractLinks
& pipeGraph
& pipeCreateCalendar
Zk
<$> TInc.mirrorIncremental pandocOut
<*> TInc.mirrorIncremental graphOut
<*> newTVarIO 0
pipeFilesOnly :: Reflex t => Incremental t (PatchMap FilePath PathContent) -> Incremental t (PatchMap FilePath ByteString)
pipeFilesOnly =
unsafeMapIncremental
(Map.mapMaybe getFileContent)
(PatchMap . Map.mapMaybe (traverse getFileContent) . unPatchMap)
where
getFileContent = \case
PathContent_File s -> Just s
_ -> Nothing
logInputChanges :: (PerformEvent t m, MonadIO (Performable m)) => Incremental t (PatchMap FilePath a) -> m ()
logInputChanges input =
performEvent_ $
ffor (updatedIncremental input) $ \(void -> m) ->
forM_ (Map.toList $ unPatchMap m) $ \(fp, mval) -> do
let mark = maybe "-" (const "*") mval
liftIO $ putStr $ mark <> " "
liftIO $ putStrLn fp
pipeFilterFilename ::
Reflex t =>
(FilePath -> Bool) ->
Incremental t (PatchMap FilePath v) ->
Incremental t (PatchMap FilePath v)
pipeFilterFilename selectFile =
let f :: FilePath -> v -> Maybe v
f = \fs x -> guard (selectFile fs) >> pure x
in unsafeMapIncremental
(Map.mapMaybeWithKey f)
(PatchMap . Map.mapMaybeWithKey f . unPatchMap)
pipeFlattenFsTree ::
forall t v.
(Reflex t) =>
(FilePath -> M.WikiLinkID) ->
Incremental t (PatchMap FilePath v) ->
Incremental t (PatchMap M.WikiLinkID (Either (Conflict FilePath v) (FilePath, v)))
pipeFlattenFsTree toKey = do
unsafeMapIncrementalWithOldValue
(Conflict.resolveConflicts toKey)
(Conflict.applyPatch toKey)
pipeParseMarkdown ::
(Reflex t, Functor f, Functor g, M.MarkdownSyntaxSpec m il bl) =>
CM.SyntaxSpec m il bl ->
Incremental t (PatchMap M.WikiLinkID (f (g ByteString))) ->
Incremental t (PatchMap M.WikiLinkID (f (g (Either M.ParserError Pandoc))))
pipeParseMarkdown spec =
unsafeMapIncremental
(Map.mapWithKey $ \fID -> (fmap . fmap) (parse fID))
(PatchMap . Map.mapWithKey ((fmap . fmap . fmap) . parse) . unPatchMap)
where
parse :: M.WikiLinkID -> ByteString -> Either M.ParserError Pandoc
parse (Tagged (toString -> fn)) = M.parseMarkdown spec fn . decodeUtf8
pipeExtractLinks ::
forall t f g h.
(Reflex t, Functor f, Functor g, Functor h, Foldable f, Foldable g, Foldable h) =>
Incremental t (PatchMap M.WikiLinkID (f (g (h Pandoc)))) ->
Incremental t (PatchMap M.WikiLinkID [(M.WikiLink, M.WikiLinkID)])
pipeExtractLinks = do
unsafeMapIncremental
(Map.map $ (concatMap . concatMap . concatMap) f)
(PatchMap . Map.map ((fmap . concatMap . concatMap . concatMap) f) . unPatchMap)
where
f doc =
let linkMap = LC.queryLinksWithContext doc
getTitleAttr =
Map.lookup "title" . Map.fromList
in concat $
ffor (Map.toList linkMap) $ \(url, urlLinks) -> do
fforMaybe (toList urlLinks) $ \(getTitleAttr -> tit, ctx) -> do
(lbl, wId) <- M.parseWikiLinkUrl tit url
pure (M.mkWikiLink lbl ctx, wId)
pipeGraph ::
forall t.
(Reflex t) =>
Incremental t (PatchMap M.WikiLinkID [(M.WikiLink, M.WikiLinkID)]) ->
Incremental t (G.PatchGraph G.E G.V)
pipeGraph = do
unsafeMapIncremental
(fromMaybe AM.empty . flip apply AM.empty . f . PatchMap . fmap Just)
f
where
f ::
PatchMap M.WikiLinkID [(M.WikiLink, M.WikiLinkID)] ->
G.PatchGraph G.E G.V
f p =
let pairs = Map.toList $ unPatchMap p
in G.PatchGraph $
pairs <&> \(k, mes) ->
case mes of
Nothing ->
G.ModifyGraph_RemoveVertexWithSuccessors k
Just es ->
G.ModifyGraph_ReplaceVertexWithSuccessors k (first one <$> es)
| Tag daily notes with month zettels ( " 2020 - 02 " ) , which are tagged further
with year zettels ( " 2020 " ) .
pipeCreateCalendar ::
Reflex t =>
Incremental t (G.PatchGraph G.E G.V) ->
Incremental t (G.PatchGraph G.E G.V)
pipeCreateCalendar =
unsafeMapIncremental
(fromMaybe AM.empty . flip apply AM.empty . f . G.asPatchGraph)
f
where
f :: G.PatchGraph G.E G.V -> G.PatchGraph G.E G.V
f diff =
let liftNote :: M.Parsec Void Text Text -> G.PatchGraph G.E G.V -> G.PatchGraph G.E G.V
liftNote wIdParser d =
G.PatchGraph $
flip mapMaybe (Set.toList $ G.modifiedOrAddedVertices d) $ \wId -> do
(Tagged -> parent) <- parse wIdParser (untag wId)
pure $ G.ModifyGraph_AddEdge (one $ (M.WikiLink M.WikiLinkLabel_Tag mempty)) wId parent
monthDiff = liftNote monthFromDate diff
yearDiff = liftNote yearFromMonth (diff <> monthDiff)
in mconcat
[ diff,
monthDiff,
yearDiff
]
yearFromMonth :: M.Parsec Void Text Text
yearFromMonth = do
year <- num 4 <* dash
_month <- num 2
pure year
monthFromDate :: M.Parsec Void Text Text
monthFromDate = do
year <- num 4 <* dash
month <- num 2 <* dash
_day <- num 2
pure $ year <> "-" <> month
dash = M.char '-'
num n =
toText <$> M.count n M.digitChar
parse :: forall e s r. (M.Stream s, Ord e) => M.Parsec e s r -> s -> Maybe r
parse p =
M.parseMaybe (p <* M.eof)
| Like ` unsafeMapIncremental ` but the patch function also takes the old
unsafeMapIncrementalWithOldValue ::
(Reflex t, Patch p, Patch p') =>
(PatchTarget p -> PatchTarget p') ->
(PatchTarget p -> p -> p') ->
Incremental t p ->
Incremental t p'
unsafeMapIncrementalWithOldValue f g x =
let x0 = currentIncremental x
xE = updatedIncremental x
in unsafeBuildIncremental (f <$> sample x0) $ uncurry g <$> attach x0 xE
|
ee7435a0fb4cee49cf32bc0d5e66559ea4a12f292fb49d2b19b38992fe7a3c5f | clj-kafka/franzy | deserializers.clj | (ns franzy.serialization.fressian.deserializers
(:require [clojure.data.fressian :as fressian])
(:import (org.apache.kafka.common.serialization Deserializer)))
(deftype FressianDeserializer [opts]
Deserializer
(configure [_ _ _])
(deserialize [_ _ data]
(when data
(with-open [bais (clojure.java.io/input-stream data)]
(apply fressian/read bais opts))))
(close [_]))
(defn fressian-deserializer
"Fressian deserializer for Apache Kafka.
Use for serializing Kafka values.
> Notes: You may pass any of the built-in Fressian options via the opts map, using
the 1-arity version of this function."
(^FressianDeserializer [] (fressian-deserializer nil))
(^FressianDeserializer [opts]
(FressianDeserializer. opts)))
| null | https://raw.githubusercontent.com/clj-kafka/franzy/6c2e2e65ad137d2bcbc04ff6e671f97ea8c0e380/fressian/src/franzy/serialization/fressian/deserializers.clj | clojure | (ns franzy.serialization.fressian.deserializers
(:require [clojure.data.fressian :as fressian])
(:import (org.apache.kafka.common.serialization Deserializer)))
(deftype FressianDeserializer [opts]
Deserializer
(configure [_ _ _])
(deserialize [_ _ data]
(when data
(with-open [bais (clojure.java.io/input-stream data)]
(apply fressian/read bais opts))))
(close [_]))
(defn fressian-deserializer
"Fressian deserializer for Apache Kafka.
Use for serializing Kafka values.
> Notes: You may pass any of the built-in Fressian options via the opts map, using
the 1-arity version of this function."
(^FressianDeserializer [] (fressian-deserializer nil))
(^FressianDeserializer [opts]
(FressianDeserializer. opts)))
| |
242c5fd9c12915fc63d2dfee6eed78debd0351f673e23b239f4f84c2e63bf3c9 | ceramic/ceramic | ql-patch.lisp | ;;;; Okay, so. So libraries use the sharpsign plus/minus macros to determine
whether Quicklisp is in the environment , and decide whether to use
;;;; `ql:quickload` or `asdf:load-system` to load a library. Libraries are
;;;; compiled with :quicklisp in *features* into fasl files. Then, when
;;;; compiling a Ceramic app, it loads those.
;;;;
;;;; Then it fails with "package `quicklisp-client` not found". Why can't we
;;;; make a list of those libraries and recompile them? Well, because that's
;;;; horrible, and also recompiling some libraries (looking at you,
;;;; clack-v1-compat) causes compilation errors due to package redefinition. So
what we do here is , we define the Quicklisp package and ` quickload ` to do
;;;; nothing. Why not make it call `asdf:load-system`? Because there's no ASDF
;;;; in a compiled application.
(defpackage quicklisp-client
(:use :cl)
(:nicknames :ql)
(:export :quickload)
(:documentation "Quicklisp look-alike fake package."))
(in-package :quicklisp-client)
(defun quickload (&rest args)
"This is a load-bearing hack."
t)
| null | https://raw.githubusercontent.com/ceramic/ceramic/5d81e2bd954440a6adebde31fac9c730a698c74b/src/ql-patch.lisp | lisp | Okay, so. So libraries use the sharpsign plus/minus macros to determine
`ql:quickload` or `asdf:load-system` to load a library. Libraries are
compiled with :quicklisp in *features* into fasl files. Then, when
compiling a Ceramic app, it loads those.
Then it fails with "package `quicklisp-client` not found". Why can't we
make a list of those libraries and recompile them? Well, because that's
horrible, and also recompiling some libraries (looking at you,
clack-v1-compat) causes compilation errors due to package redefinition. So
nothing. Why not make it call `asdf:load-system`? Because there's no ASDF
in a compiled application. | whether Quicklisp is in the environment , and decide whether to use
what we do here is , we define the Quicklisp package and ` quickload ` to do
(defpackage quicklisp-client
(:use :cl)
(:nicknames :ql)
(:export :quickload)
(:documentation "Quicklisp look-alike fake package."))
(in-package :quicklisp-client)
(defun quickload (&rest args)
"This is a load-bearing hack."
t)
|
e37fdc987ff30f5c6cdabbc3b2d7b93cfe96c92ad858aa042cf5aa7bd22897de | yogthos/components-example | dev_middleware.clj | (ns components-example.dev-middleware
(:require [ring.middleware.reload :refer [wrap-reload]]
[selmer.middleware :refer [wrap-error-page]]
[prone.middleware :refer [wrap-exceptions]]))
(defn wrap-dev [handler]
(-> handler
wrap-reload
wrap-error-page
wrap-exceptions))
| null | https://raw.githubusercontent.com/yogthos/components-example/c369bde7916b6d15cb6a8b4ab05c562e16755b2a/env/dev/clj/components_example/dev_middleware.clj | clojure | (ns components-example.dev-middleware
(:require [ring.middleware.reload :refer [wrap-reload]]
[selmer.middleware :refer [wrap-error-page]]
[prone.middleware :refer [wrap-exceptions]]))
(defn wrap-dev [handler]
(-> handler
wrap-reload
wrap-error-page
wrap-exceptions))
| |
a1200bd201019748c522c43b48ccea8473a7db8c1640887f6777b87f704c4fb1 | exercism/erlang | example.erl | -module(example).
-export([rebase/3]).
rebase(_Input, SrcBase, _DstBase) when SrcBase < 2 ->
{error, "input base must be >= 2"};
rebase(_Input, _SrcBase, DstBase) when DstBase < 2 ->
{error, "output base must be >= 2"};
rebase(Input, SrcBase, DstBase) ->
case lists:any(fun (D) -> D<0 orelse D>=SrcBase end, Input) of
true -> {error, "all digits must satisfy 0 <= d < input base"};
false -> output(to_base(to_dec(Input, SrcBase), DstBase))
end.
output([]) ->
{ok, [0]};
output(Digits) ->
{ok, Digits}.
to_dec(Digits, Base) ->
to_dec(Digits, Base, length(Digits)-1, 0).
to_dec([], _, _, Acc) ->
Acc;
to_dec([D|More], Base, N, Acc) ->
to_dec(More, Base, N-1, Acc+D*trunc(math:pow(Base, N))).
to_base(Num, Base) ->
to_base(Num, Base, []).
to_base(0, _, Acc) ->
Acc;
to_base(Num, Base, Acc) ->
to_base(Num div Base, Base, [Num rem Base|Acc]).
| null | https://raw.githubusercontent.com/exercism/erlang/57ac2707dae643682950715e74eb271f732e2100/exercises/practice/all-your-base/.meta/example.erl | erlang | -module(example).
-export([rebase/3]).
rebase(_Input, SrcBase, _DstBase) when SrcBase < 2 ->
{error, "input base must be >= 2"};
rebase(_Input, _SrcBase, DstBase) when DstBase < 2 ->
{error, "output base must be >= 2"};
rebase(Input, SrcBase, DstBase) ->
case lists:any(fun (D) -> D<0 orelse D>=SrcBase end, Input) of
true -> {error, "all digits must satisfy 0 <= d < input base"};
false -> output(to_base(to_dec(Input, SrcBase), DstBase))
end.
output([]) ->
{ok, [0]};
output(Digits) ->
{ok, Digits}.
to_dec(Digits, Base) ->
to_dec(Digits, Base, length(Digits)-1, 0).
to_dec([], _, _, Acc) ->
Acc;
to_dec([D|More], Base, N, Acc) ->
to_dec(More, Base, N-1, Acc+D*trunc(math:pow(Base, N))).
to_base(Num, Base) ->
to_base(Num, Base, []).
to_base(0, _, Acc) ->
Acc;
to_base(Num, Base, Acc) ->
to_base(Num div Base, Base, [Num rem Base|Acc]).
| |
d364468176344aaaa237052d624c9e88627b0bfadb1b8918ec412bccf124d596 | iu-parfunc/haskell_dsl_tour | Parser.hs | Copyright 2008 < >
Portions of this were derived from haskell
tutorial " Write yourself a scheme in 48 hours " and are thus
Copyright ( but there is n't much of his stuff left ) .
This file is part of haskeem .
haskeem is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
haskeem is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
$ I d : parser.hs , v 1.17 2010 - 01 - 05 05:23:34 uwe Exp $
Portions of this were derived from Jonathan Tang's haskell
tutorial "Write yourself a scheme in 48 hours" and are thus
Copyright Jonathan Tang (but there isn't much of his stuff left).
This file is part of haskeem.
haskeem is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
haskeem is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with haskeem; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
$Id: parser.hs,v 1.17 2010-01-05 05:23:34 uwe Exp $ -}
module FrameworkHs.SExpReader.Parser (readExpr, readExprList, readNumber, specialSymbolChars, lexFile) where
import Prelude
import Data.Char
import Data.Ratio
import Text.ParserCombinators.Parsec as TPCP hiding (spaces)
import Control.Monad.Error as CME
import qualified Data.IntMap as DIM
import Control.Monad (mapM_)
import System.Environment
import FrameworkHs.SExpReader.LispData
lexFile :: FilePath -> IO (ThrowsError [LispVal])
lexFile f = do
c <- readFile f
return $ readExprList c
Parsers for the various kinds of LispVal
-- "#!/some/path/to/executable" at the top of the file, to enable
-- scheme "shell" scripts: for the rest of the program, it's a comment
hashbang :: Parser Char
hashbang = char '#' >> char '!' >> many (noneOf "\r\n") >> return ' '
-- semicolon to end-of-line, the oldest style of lisp comment
comment :: Parser Char
comment =
do char ';'
many (noneOf "\r\n")
return ' '
spaces :: Parser ()
spaces = skipMany1 (comment <|> space)
-- This is not quite R6RS-compliant: R6RS allows '.'
specialSymbolChars :: String
specialSymbolChars = "!$%&*+-/:<=>?@^_~."
symbol :: Parser Char
symbol = oneOf specialSymbolChars
-- This is a small extension to R6RS
controlChar :: Parser Char
controlChar =
do char '^'
c <- oneOf (['A' .. 'Z'] ++ "[\\]^_")
return (chr (ord c + 1 - ord 'A'))
namedChar :: Parser Char
namedChar =
do name <- string "alarm"
<|> string "backspace"
<|> string "delete"
<|> string "esc"
<|> string "linefeed"
<|> TPCP.try (string "newline")
<|> string "nul"
<|> string "page"
<|> string "return"
<|> string "space"
<|> string "tab"
<|> string "vtab"
case name of
"nul" -> return (chr 0)
"alarm" -> return (chr 7)
"backspace" -> return (chr 8)
"tab" -> return '\t'
"linefeed" -> return '\n'
"newline" -> return '\n'
"vtab" -> return (chr 11)
"page" -> return (chr 12)
"return" -> return '\r'
"esc" -> return (chr 27)
"space" -> return ' '
"delete" -> return (chr 127)
_ -> progError "namedChar"
parseChar :: Parser LispVal
parseChar =
do char '#'
char '\\'
c <- TPCP.try controlChar <|> TPCP.try namedChar <|> anyChar
return (Char c)
-- This is not quite R6RS-compliant: R6RS requires a hex escape spec,
-- and it forbids the "otherwise" clause below. oh well... later maybe
escChar :: Parser Char
escChar =
do char '\\'
c <- anyChar
return (case c of
'a' -> chr 7
'b' -> chr 8
't' -> '\t'
'n' -> '\n'
'v' -> chr 11
'f' -> chr 12
'r' -> '\r'
_ -> c)
parseBool :: Parser LispVal
parseBool =
do char '#'
v <- oneOf "tTfF"
return (case v of
't' -> lispTrue
'T' -> lispTrue
'f' -> lispFalse
'F' -> lispFalse
_ -> progError "parseBool")
parseString :: Parser LispVal
parseString =
do char '"'
x <- many (escChar <|> noneOf "\"")
char '"'
return (String x)
parseSymbol :: Parser LispVal
parseSymbol =
do first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
return (Symbol (first:rest))
readBaseInt :: Integer -> String -> Integer
readBaseInt b s = foldl ma 0 s
where ma v1 v2 = b*v1 + toInteger (digitToInt v2)
readBaseFrac :: Integer -> String -> Double
readBaseFrac _ [] = 0.0
readBaseFrac b s = r * foldr ma 0 s where
r = 1.0 / fromInteger b
ma v1 v2 = fromIntegral (digitToInt v1) + r*v2
parseHdr :: Parser (Char, Integer)
parseHdr =
do b <- option 'd' (char '#' >> oneOf "bodxBODX")
s <- option '+' (oneOf "+-")
let base = case b of
'b' -> 2
'B' -> 2
'o' -> 8
'O' -> 8
'd' -> 10
'D' -> 10
'x' -> 16
'X' -> 16
_ -> progError "parseHdr"
return (s, base)
baseDigits :: Integer -> String
baseDigits 2 = "01"
baseDigits 8 = "01234567"
baseDigits 10 = "0123456789"
baseDigits 16 = "0123456789abcdefABCDEF"
baseDigits _ = progError "baseDigits"
int :: String
int = "int"
-- The fact that this parser can deal with floating-point numbers
in bases 2 , 8 , and 16 as well as 10 is an extension of R6RS .
Parse first alternative for floating - point numbers : \d+(\.\d * ) ?
parseF1 :: Integer -> Parser (String,String)
parseF1 b =
do ip <- many1 (oneOf (baseDigits b))
fp <- option int (char '.' >> many (oneOf (baseDigits b)))
return (ip,fp)
Parse second alternative for floating - point numbers : \.\d+
parseF2 :: Integer -> Parser (String,String)
parseF2 b =
do char '.'
fp <- many1 (oneOf (baseDigits b))
return ("0",fp)
-- Parse the exponent
parseExp :: Integer -> Parser Integer
parseExp b =
do oneOf (if b == 16 then "xX" else "eExX")
s <- option '+' (oneOf "+-")
num <- many1 (oneOf (baseDigits b))
let e = readBaseInt b num
return (if s == '-' then (-e) else e)
powi :: Integer -> Integer -> Integer
powi b e | e == 0 = 1
| e < 0 = error "negative exponent in powi"
| even e = powi (b*b) (e `quot` 2)
| otherwise = b * powi b (e - 1)
pow :: Integer -> Integer -> Double
pow b e =
if e >= 0 then fromInteger (powi b e) else recip (fromInteger (powi b (-e)))
-- Parse an integer or a floating-point number. This parser will return
-- numbers written as aaaEbb (with no decimal point) as integers, if the
-- exponent bb is non-negative.
parseIntOrFlt :: Parser LispVal
parseIntOrFlt =
do (s, b) <- parseHdr
(ip, fp) <- parseF1 b <|> parseF2 b
e <- option 0 (parseExp b)
let fpi = if fp == int then "0" else fp
vf = pow b e * (fromInteger (readBaseInt b ip) + readBaseFrac b fpi)
vi = powi b e * readBaseInt b ip
if fp == int && e >= 0
then return (IntNumber (if s == '-' then (-vi) else vi))
else return (FltNumber (if s == '-' then (-vf) else vf))
Parse a rational number written as numerator / denominator . This parser
-- accepts and understands rational infinity, both positive and negative,
and rational not - a - number : + infinity is written as 1/0 , -infinity as
-1/0 , and not - a - number as 0/0 . That 's an incompatible extension of R6RS .
parseRat :: Parser LispVal
parseRat =
do (s, b) <- parseHdr
nstr <- many1 (oneOf (baseDigits b))
char '/'
dstr <- many1 (oneOf (baseDigits b))
let num = readBaseInt b nstr
den = readBaseInt b dstr
ns = if s == '-' then (-num) else num
val = if den /= 0
then ns % den
else if ns > 0
then myRatPInf
else if ns < 0
then myRatNInf
else myRatNaN
if denominator val == 1
then return (IntNumber (numerator val))
else return (RatNumber val)
a couple of special floating - point numbers mandated by R6RS
parseNaNInf :: Parser LispVal
parseNaNInf =
do val <- TPCP.try (string "+nan.0")
<|> TPCP.try (string "-nan.0")
<|> TPCP.try (string "+inf.0")
<|> TPCP.try (string "-inf.0")
case val of
"+nan.0" -> return (FltNumber myFltNaN)
"-nan.0" -> return (FltNumber myFltNaN)
"+inf.0" -> return (FltNumber myFltPInf)
"-inf.0" -> return (FltNumber myFltNInf)
_ -> progError "parseNaNInf"
parseNumber :: Parser LispVal
parseNumber = TPCP.try parseNaNInf <|> TPCP.try parseRat <|> parseIntOrFlt
Parsers for the abbreviations for the various kinds of quoting entities :
-- '<datum> => (quote <datum>)
-- `<datum> => (quasiquote <datum>)
-- ,<datum> => (unquote <datum>)
-- ,@<datum> => (unquote-splicing <datum>)
parseQQ :: Parser LispVal
parseQQ =
do char '`'
x <- parseExpr
return (List [Symbol "quasiquote", x])
parseQ :: Parser LispVal
parseQ =
do char '\''
x <- parseExpr
return (List [Symbol "quote", x])
parseUQ :: Parser LispVal
parseUQ =
do char ','
x <- parseExpr
return (List [Symbol "unquote", x])
parseUQS :: Parser LispVal
parseUQS =
do char ','
char '@'
x <- parseExpr
return (List [Symbol "unquote-splicing", x])
parseQuoted :: Parser LispVal
parseQuoted = TPCP.try parseUQS
<|> TPCP.try parseUQ
<|> TPCP.try parseQQ
<|> parseQ
Parser for a dotted - list or a regular list . Due to the representation of
-- scheme lists as haskell lists rather than as dotted-pairs, it's slightly
-- tricky to get the case of (a . (b . (c . ()))) and similar forms to come
-- out right; however, that is explicitly described as exactly identical to
the list ( a b c ) according to the RnRS standard , so it has to be treated
-- correctly.
parseList :: String -> Parser LispVal
parseList [o,c] =
do char o
skipMany space
hd <- sepEndBy parseExpr spaces
tl <- option (List []) (TPCP.try (char '.' >> spaces >> parseExpr))
skipMany space
char c
if isl tl
then return (List (hd ++ unpl tl))
else if isdl tl
then return (DottedList (hd ++ unpdlh tl) (unpdlt tl))
else return (DottedList hd tl)
where isl (List ((Symbol sym):_)) =
not (sym == "unquote" || sym == "unquote-splicing")
isl (List _) = True
isl _ = False
unpl (List l) = l
unpl _ = progError "parseDottedList/unpl"
isdl (DottedList _ _) = True
isdl _ = False
unpdlh (DottedList h _) = h
unpdlh _ = progError "parseDottedList/unpdlh"
unpdlt (DottedList _ t) = t
unpdlt _ = progError "parseDottedList/unpdlt"
parseParList :: Parser LispVal
parseParList = parseList "()"
parseBraList :: Parser LispVal
parseBraList = parseList "[]"
Parser for a vector : this is similar to a list ( but not a dotted - list ) ,
-- except that R6RS says access times are generally faster than for lists.
-- It would seem that haskell Arrays would be the natural way to go, but
the documentation for those is ... well , crappy . Data . IntMap is much
-- better documented, and pretty close to what we want. Access times aren't
O(1 ) , but they are O(min(n , W ) ) , where n is the size of the vector and
W is the size in bits of a machine word : either 32 or 64 usually . This
is due to the implementation of Data . IntMap : internally , it 's a PATRICIA
-- tree. That should be fast enough for the moment; if it becomes an issue,
-- I can always change later. Data.IntMaps are extensible, so I could in
-- principle have extensible vectors, which would mean I'd not need to store
-- the length, but bounds-checked arrays seem like a nice feature to have;
-- I can add an explicit grow-vector routine, which as a result of the
-- extensibility of Data.IntMaps will be very easy to write.
parseVector :: Parser LispVal
parseVector =
do char '#'
char '('
skipMany space
vals <- sepBy parseExpr spaces
skipMany space
char ')'
return (Vector (toInteger (length vals))
(DIM.fromAscList (addkey 0 vals)))
where addkey _ [] = []
addkey n (v:vs) = (n, v) : addkey (n+1) vs
parseExpr :: Parser LispVal
parseExpr = parseString
<|> TPCP.try parseBool
<|> TPCP.try parseChar
<|> TPCP.try parseNumber
<|> TPCP.try parseVector
<|> TPCP.try parseSymbol
<|> parseQuoted
<|> parseParList
<|> parseBraList
readOrThrow :: Parser a -> String -> ThrowsError a
readOrThrow parser input =
case parse parser "lisp" input of
Left err -> throwError (Parser err)
Right val -> return val
readExpr :: String -> ThrowsError LispVal
readExpr = readOrThrow parseExpr
readExprList :: String -> ThrowsError [LispVal]
readExprList =
readOrThrow (optional hashbang >> skipMany spaces >>
endBy parseExpr (spaces <|> eof))
Parser for just numbers , for internally converting strings to numbers ;
-- it's just a little more lenient than only and exactly a number: allow
-- whitespace on either side, that doesn't harm anything and seems polite
parseJustNumber :: Parser LispVal
parseJustNumber =
do skipMany space
num <- parseNumber
skipMany space
eof
return num
readNumber :: String -> ThrowsError LispVal
readNumber input =
case parse parseJustNumber "number" input of
Left _ -> return lispFalse
Right val -> return val
| null | https://raw.githubusercontent.com/iu-parfunc/haskell_dsl_tour/f75a7e492a1e5d219a77fb128f70441d54a706eb/middle_end/nanopass/course_example/FrameworkHs/SExpReader/Parser.hs | haskell | "#!/some/path/to/executable" at the top of the file, to enable
scheme "shell" scripts: for the rest of the program, it's a comment
semicolon to end-of-line, the oldest style of lisp comment
This is not quite R6RS-compliant: R6RS allows '.'
This is a small extension to R6RS
This is not quite R6RS-compliant: R6RS requires a hex escape spec,
and it forbids the "otherwise" clause below. oh well... later maybe
The fact that this parser can deal with floating-point numbers
Parse the exponent
Parse an integer or a floating-point number. This parser will return
numbers written as aaaEbb (with no decimal point) as integers, if the
exponent bb is non-negative.
accepts and understands rational infinity, both positive and negative,
'<datum> => (quote <datum>)
`<datum> => (quasiquote <datum>)
,<datum> => (unquote <datum>)
,@<datum> => (unquote-splicing <datum>)
scheme lists as haskell lists rather than as dotted-pairs, it's slightly
tricky to get the case of (a . (b . (c . ()))) and similar forms to come
out right; however, that is explicitly described as exactly identical to
correctly.
except that R6RS says access times are generally faster than for lists.
It would seem that haskell Arrays would be the natural way to go, but
better documented, and pretty close to what we want. Access times aren't
tree. That should be fast enough for the moment; if it becomes an issue,
I can always change later. Data.IntMaps are extensible, so I could in
principle have extensible vectors, which would mean I'd not need to store
the length, but bounds-checked arrays seem like a nice feature to have;
I can add an explicit grow-vector routine, which as a result of the
extensibility of Data.IntMaps will be very easy to write.
it's just a little more lenient than only and exactly a number: allow
whitespace on either side, that doesn't harm anything and seems polite | Copyright 2008 < >
Portions of this were derived from haskell
tutorial " Write yourself a scheme in 48 hours " and are thus
Copyright ( but there is n't much of his stuff left ) .
This file is part of haskeem .
haskeem is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
haskeem is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details .
You should have received a copy of the GNU General Public License
along with ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
$ I d : parser.hs , v 1.17 2010 - 01 - 05 05:23:34 uwe Exp $
Portions of this were derived from Jonathan Tang's haskell
tutorial "Write yourself a scheme in 48 hours" and are thus
Copyright Jonathan Tang (but there isn't much of his stuff left).
This file is part of haskeem.
haskeem is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
haskeem is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with haskeem; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
$Id: parser.hs,v 1.17 2010-01-05 05:23:34 uwe Exp $ -}
module FrameworkHs.SExpReader.Parser (readExpr, readExprList, readNumber, specialSymbolChars, lexFile) where
import Prelude
import Data.Char
import Data.Ratio
import Text.ParserCombinators.Parsec as TPCP hiding (spaces)
import Control.Monad.Error as CME
import qualified Data.IntMap as DIM
import Control.Monad (mapM_)
import System.Environment
import FrameworkHs.SExpReader.LispData
lexFile :: FilePath -> IO (ThrowsError [LispVal])
lexFile f = do
c <- readFile f
return $ readExprList c
Parsers for the various kinds of LispVal
hashbang :: Parser Char
hashbang = char '#' >> char '!' >> many (noneOf "\r\n") >> return ' '
comment :: Parser Char
comment =
do char ';'
many (noneOf "\r\n")
return ' '
spaces :: Parser ()
spaces = skipMany1 (comment <|> space)
specialSymbolChars :: String
specialSymbolChars = "!$%&*+-/:<=>?@^_~."
symbol :: Parser Char
symbol = oneOf specialSymbolChars
controlChar :: Parser Char
controlChar =
do char '^'
c <- oneOf (['A' .. 'Z'] ++ "[\\]^_")
return (chr (ord c + 1 - ord 'A'))
namedChar :: Parser Char
namedChar =
do name <- string "alarm"
<|> string "backspace"
<|> string "delete"
<|> string "esc"
<|> string "linefeed"
<|> TPCP.try (string "newline")
<|> string "nul"
<|> string "page"
<|> string "return"
<|> string "space"
<|> string "tab"
<|> string "vtab"
case name of
"nul" -> return (chr 0)
"alarm" -> return (chr 7)
"backspace" -> return (chr 8)
"tab" -> return '\t'
"linefeed" -> return '\n'
"newline" -> return '\n'
"vtab" -> return (chr 11)
"page" -> return (chr 12)
"return" -> return '\r'
"esc" -> return (chr 27)
"space" -> return ' '
"delete" -> return (chr 127)
_ -> progError "namedChar"
parseChar :: Parser LispVal
parseChar =
do char '#'
char '\\'
c <- TPCP.try controlChar <|> TPCP.try namedChar <|> anyChar
return (Char c)
escChar :: Parser Char
escChar =
do char '\\'
c <- anyChar
return (case c of
'a' -> chr 7
'b' -> chr 8
't' -> '\t'
'n' -> '\n'
'v' -> chr 11
'f' -> chr 12
'r' -> '\r'
_ -> c)
parseBool :: Parser LispVal
parseBool =
do char '#'
v <- oneOf "tTfF"
return (case v of
't' -> lispTrue
'T' -> lispTrue
'f' -> lispFalse
'F' -> lispFalse
_ -> progError "parseBool")
parseString :: Parser LispVal
parseString =
do char '"'
x <- many (escChar <|> noneOf "\"")
char '"'
return (String x)
parseSymbol :: Parser LispVal
parseSymbol =
do first <- letter <|> symbol
rest <- many (letter <|> digit <|> symbol)
return (Symbol (first:rest))
readBaseInt :: Integer -> String -> Integer
readBaseInt b s = foldl ma 0 s
where ma v1 v2 = b*v1 + toInteger (digitToInt v2)
readBaseFrac :: Integer -> String -> Double
readBaseFrac _ [] = 0.0
readBaseFrac b s = r * foldr ma 0 s where
r = 1.0 / fromInteger b
ma v1 v2 = fromIntegral (digitToInt v1) + r*v2
parseHdr :: Parser (Char, Integer)
parseHdr =
do b <- option 'd' (char '#' >> oneOf "bodxBODX")
s <- option '+' (oneOf "+-")
let base = case b of
'b' -> 2
'B' -> 2
'o' -> 8
'O' -> 8
'd' -> 10
'D' -> 10
'x' -> 16
'X' -> 16
_ -> progError "parseHdr"
return (s, base)
baseDigits :: Integer -> String
baseDigits 2 = "01"
baseDigits 8 = "01234567"
baseDigits 10 = "0123456789"
baseDigits 16 = "0123456789abcdefABCDEF"
baseDigits _ = progError "baseDigits"
int :: String
int = "int"
in bases 2 , 8 , and 16 as well as 10 is an extension of R6RS .
Parse first alternative for floating - point numbers : \d+(\.\d * ) ?
parseF1 :: Integer -> Parser (String,String)
parseF1 b =
do ip <- many1 (oneOf (baseDigits b))
fp <- option int (char '.' >> many (oneOf (baseDigits b)))
return (ip,fp)
Parse second alternative for floating - point numbers : \.\d+
parseF2 :: Integer -> Parser (String,String)
parseF2 b =
do char '.'
fp <- many1 (oneOf (baseDigits b))
return ("0",fp)
parseExp :: Integer -> Parser Integer
parseExp b =
do oneOf (if b == 16 then "xX" else "eExX")
s <- option '+' (oneOf "+-")
num <- many1 (oneOf (baseDigits b))
let e = readBaseInt b num
return (if s == '-' then (-e) else e)
powi :: Integer -> Integer -> Integer
powi b e | e == 0 = 1
| e < 0 = error "negative exponent in powi"
| even e = powi (b*b) (e `quot` 2)
| otherwise = b * powi b (e - 1)
pow :: Integer -> Integer -> Double
pow b e =
if e >= 0 then fromInteger (powi b e) else recip (fromInteger (powi b (-e)))
parseIntOrFlt :: Parser LispVal
parseIntOrFlt =
do (s, b) <- parseHdr
(ip, fp) <- parseF1 b <|> parseF2 b
e <- option 0 (parseExp b)
let fpi = if fp == int then "0" else fp
vf = pow b e * (fromInteger (readBaseInt b ip) + readBaseFrac b fpi)
vi = powi b e * readBaseInt b ip
if fp == int && e >= 0
then return (IntNumber (if s == '-' then (-vi) else vi))
else return (FltNumber (if s == '-' then (-vf) else vf))
Parse a rational number written as numerator / denominator . This parser
and rational not - a - number : + infinity is written as 1/0 , -infinity as
-1/0 , and not - a - number as 0/0 . That 's an incompatible extension of R6RS .
parseRat :: Parser LispVal
parseRat =
do (s, b) <- parseHdr
nstr <- many1 (oneOf (baseDigits b))
char '/'
dstr <- many1 (oneOf (baseDigits b))
let num = readBaseInt b nstr
den = readBaseInt b dstr
ns = if s == '-' then (-num) else num
val = if den /= 0
then ns % den
else if ns > 0
then myRatPInf
else if ns < 0
then myRatNInf
else myRatNaN
if denominator val == 1
then return (IntNumber (numerator val))
else return (RatNumber val)
a couple of special floating - point numbers mandated by R6RS
parseNaNInf :: Parser LispVal
parseNaNInf =
do val <- TPCP.try (string "+nan.0")
<|> TPCP.try (string "-nan.0")
<|> TPCP.try (string "+inf.0")
<|> TPCP.try (string "-inf.0")
case val of
"+nan.0" -> return (FltNumber myFltNaN)
"-nan.0" -> return (FltNumber myFltNaN)
"+inf.0" -> return (FltNumber myFltPInf)
"-inf.0" -> return (FltNumber myFltNInf)
_ -> progError "parseNaNInf"
parseNumber :: Parser LispVal
parseNumber = TPCP.try parseNaNInf <|> TPCP.try parseRat <|> parseIntOrFlt
Parsers for the abbreviations for the various kinds of quoting entities :
parseQQ :: Parser LispVal
parseQQ =
do char '`'
x <- parseExpr
return (List [Symbol "quasiquote", x])
parseQ :: Parser LispVal
parseQ =
do char '\''
x <- parseExpr
return (List [Symbol "quote", x])
parseUQ :: Parser LispVal
parseUQ =
do char ','
x <- parseExpr
return (List [Symbol "unquote", x])
parseUQS :: Parser LispVal
parseUQS =
do char ','
char '@'
x <- parseExpr
return (List [Symbol "unquote-splicing", x])
parseQuoted :: Parser LispVal
parseQuoted = TPCP.try parseUQS
<|> TPCP.try parseUQ
<|> TPCP.try parseQQ
<|> parseQ
Parser for a dotted - list or a regular list . Due to the representation of
the list ( a b c ) according to the RnRS standard , so it has to be treated
parseList :: String -> Parser LispVal
parseList [o,c] =
do char o
skipMany space
hd <- sepEndBy parseExpr spaces
tl <- option (List []) (TPCP.try (char '.' >> spaces >> parseExpr))
skipMany space
char c
if isl tl
then return (List (hd ++ unpl tl))
else if isdl tl
then return (DottedList (hd ++ unpdlh tl) (unpdlt tl))
else return (DottedList hd tl)
where isl (List ((Symbol sym):_)) =
not (sym == "unquote" || sym == "unquote-splicing")
isl (List _) = True
isl _ = False
unpl (List l) = l
unpl _ = progError "parseDottedList/unpl"
isdl (DottedList _ _) = True
isdl _ = False
unpdlh (DottedList h _) = h
unpdlh _ = progError "parseDottedList/unpdlh"
unpdlt (DottedList _ t) = t
unpdlt _ = progError "parseDottedList/unpdlt"
parseParList :: Parser LispVal
parseParList = parseList "()"
parseBraList :: Parser LispVal
parseBraList = parseList "[]"
Parser for a vector : this is similar to a list ( but not a dotted - list ) ,
the documentation for those is ... well , crappy . Data . IntMap is much
O(1 ) , but they are O(min(n , W ) ) , where n is the size of the vector and
W is the size in bits of a machine word : either 32 or 64 usually . This
is due to the implementation of Data . IntMap : internally , it 's a PATRICIA
parseVector :: Parser LispVal
parseVector =
do char '#'
char '('
skipMany space
vals <- sepBy parseExpr spaces
skipMany space
char ')'
return (Vector (toInteger (length vals))
(DIM.fromAscList (addkey 0 vals)))
where addkey _ [] = []
addkey n (v:vs) = (n, v) : addkey (n+1) vs
parseExpr :: Parser LispVal
parseExpr = parseString
<|> TPCP.try parseBool
<|> TPCP.try parseChar
<|> TPCP.try parseNumber
<|> TPCP.try parseVector
<|> TPCP.try parseSymbol
<|> parseQuoted
<|> parseParList
<|> parseBraList
readOrThrow :: Parser a -> String -> ThrowsError a
readOrThrow parser input =
case parse parser "lisp" input of
Left err -> throwError (Parser err)
Right val -> return val
readExpr :: String -> ThrowsError LispVal
readExpr = readOrThrow parseExpr
readExprList :: String -> ThrowsError [LispVal]
readExprList =
readOrThrow (optional hashbang >> skipMany spaces >>
endBy parseExpr (spaces <|> eof))
Parser for just numbers , for internally converting strings to numbers ;
parseJustNumber :: Parser LispVal
parseJustNumber =
do skipMany space
num <- parseNumber
skipMany space
eof
return num
readNumber :: String -> ThrowsError LispVal
readNumber input =
case parse parseJustNumber "number" input of
Left _ -> return lispFalse
Right val -> return val
|
4ae63423217487315b194b53aee92520fbda37b2b00de153d62dd6cfc3dbfe2d | jeffshrager/biobike | package.lisp | ;;; -*- mode: Lisp; Syntax: Common-Lisp; Package: user; -*-
(in-package :cl-user)
;;; +=========================================================================+
| Copyright ( c ) 2002 , 2003 , 2004 JP , , |
;;; | |
;;; | Permission is hereby granted, free of charge, to any person obtaining |
;;; | a copy of this software and associated documentation files (the |
| " Software " ) , to deal in the Software without restriction , including |
;;; | without limitation the rights to use, copy, modify, merge, publish, |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
;;; | the following conditions: |
;;; | |
;;; | The above copyright notice and this permission notice shall be included |
| in all copies or substantial portions of the Software . |
;;; | |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
;;; | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
;;; | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
;;; | IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
;;; | TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
;;; | SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
;;; +=========================================================================+
Packages & miscellaneous setup for
(eval-when (:compile-toplevel :load-toplevel :execute)
(require :aserve)
#+:allegro
(require :smtp)
)
(defpackage :weblistener
(:nicknames :wb)
(:use
:cl-user
:frames
:genericdb
:net.aserve
:net.html.generator
:utils
:wlisp)
(:export
"START-BIOWEBLISTENER" "START-WEBLISTENER"
"STOP-BIOWEBLISTENER" "STOP-WEBLISTENER"
"PARSE-XML" "WEB-PAGE-CONTENTS" "WITH-OPEN-WEB-PAGE"
"WEBPAGE" "*USERNAME*" "*SESSIONID*")
#.`(:shadowing-import-from :cl-user
cl-user::in
cl-user::os?
,@(mapcar 'config-varname *configuration-variables*)))
(defparameter wb:*username* nil
#.(utils:one-string-nl
"Login name symbol and package name symbol of user currently executing."
"Everything keys off this symbol and it needs to be rebound in every"
"toplevel published URL. Its value is obtained from the PKG argument"
"passed through to (almost) every URL."
))
(defparameter wb:*sessionid* nil
#.(utils:one-string-nl
"Session ID of user's current session. This replaces wb:*username*"
"as the symbol many things key off of. Its value is obtained from"
"the PKG argument passed through to (almost) every URL, and"
"the value of wb:*username* is obtained now by lookup from"
"this variable. *** This is not implemented yet. *** "
))
This is bound to T whenever the VPL itself receives a message from the
;; client and starts to process it.
(defvar wb::*vpl-executing?* nil)
This is bound to T when the VPL is executing user code .
(defvar wb::*vpl-evaluating?* nil)
;; What used to be VISTOOLS
(defpackage :webuser
(:use :wlisp :utils :weblistener))
(defpackage published-protocols
(:nicknames :pp)
(:use :wlisp :webuser)
)
For persistent ' '
(defpackage $$)
| null | https://raw.githubusercontent.com/jeffshrager/biobike/5313ec1fe8e82c21430d645e848ecc0386436f57/BioLisp/Webdefs/package.lisp | lisp | -*- mode: Lisp; Syntax: Common-Lisp; Package: user; -*-
+=========================================================================+
| |
| Permission is hereby granted, free of charge, to any person obtaining |
| a copy of this software and associated documentation files (the |
| without limitation the rights to use, copy, modify, merge, publish, |
| the following conditions: |
| |
| The above copyright notice and this permission notice shall be included |
| |
| EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF |
| MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
| IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY |
| TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE |
| SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
+=========================================================================+
client and starts to process it.
What used to be VISTOOLS |
(in-package :cl-user)
| Copyright ( c ) 2002 , 2003 , 2004 JP , , |
| " Software " ) , to deal in the Software without restriction , including |
| distribute , sublicense , and/or sell copies of the Software , and to |
| permit persons to whom the Software is furnished to do so , subject to |
| in all copies or substantial portions of the Software . |
| THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , |
| CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT , |
Packages & miscellaneous setup for
(eval-when (:compile-toplevel :load-toplevel :execute)
(require :aserve)
#+:allegro
(require :smtp)
)
(defpackage :weblistener
(:nicknames :wb)
(:use
:cl-user
:frames
:genericdb
:net.aserve
:net.html.generator
:utils
:wlisp)
(:export
"START-BIOWEBLISTENER" "START-WEBLISTENER"
"STOP-BIOWEBLISTENER" "STOP-WEBLISTENER"
"PARSE-XML" "WEB-PAGE-CONTENTS" "WITH-OPEN-WEB-PAGE"
"WEBPAGE" "*USERNAME*" "*SESSIONID*")
#.`(:shadowing-import-from :cl-user
cl-user::in
cl-user::os?
,@(mapcar 'config-varname *configuration-variables*)))
(defparameter wb:*username* nil
#.(utils:one-string-nl
"Login name symbol and package name symbol of user currently executing."
"Everything keys off this symbol and it needs to be rebound in every"
"toplevel published URL. Its value is obtained from the PKG argument"
"passed through to (almost) every URL."
))
(defparameter wb:*sessionid* nil
#.(utils:one-string-nl
"Session ID of user's current session. This replaces wb:*username*"
"as the symbol many things key off of. Its value is obtained from"
"the PKG argument passed through to (almost) every URL, and"
"the value of wb:*username* is obtained now by lookup from"
"this variable. *** This is not implemented yet. *** "
))
This is bound to T whenever the VPL itself receives a message from the
(defvar wb::*vpl-executing?* nil)
This is bound to T when the VPL is executing user code .
(defvar wb::*vpl-evaluating?* nil)
(defpackage :webuser
(:use :wlisp :utils :weblistener))
(defpackage published-protocols
(:nicknames :pp)
(:use :wlisp :webuser)
)
For persistent ' '
(defpackage $$)
|
68a564de130a70b9255fbce193197604b032ca66858f1e7f8152eb1568544e08 | mpickering/apply-refact | Structure17.hs | {-# LANGUAGE BangPatterns #-}
foo = case v of !True -> x
| null | https://raw.githubusercontent.com/mpickering/apply-refact/a4343ea0f4f9d8c2e16d6b16b9068f321ba4f272/tests/examples/Structure17.hs | haskell | # LANGUAGE BangPatterns # | foo = case v of !True -> x
|
d09514b918eef6fa26eb8179aec0eccc9652f1f2cbca682e3a6e374c361cb4d8 | stefanrusek/spdy_proxy | http_proxy.erl | -module(http_proxy).
-include("deps/espdy/include/espdy.hrl").
-export([preprocess_opts/1]).
-export([init/3, closed/2, headers_updated/3, handle_data/2, handle_info/2]). %% API
-define(HOST_RE, "(:\\d+)?$").
preprocess_opts(Opts) ->
[{http_proxy_host_re, re:compile(?HOST_RE)} | Opts].
header name for spdy version
host(_Version = 2, Headers) -> proplists:get_value(<<"host">>, Headers);
host(_Version = 3, Headers) -> proplists:get_value(<<":host">>, Headers).
method(_Version = 2, Headers) -> proplists:get_value(<<"method">>, Headers);
method(_Version = 3, Headers) -> proplists:get_value(<<":method">>, Headers).
path(_Version = 2, Headers) -> proplists:get_value(<<"url">>, Headers);
path(_Version = 3, Headers) -> proplists:get_value(<<":path">>, Headers).
status(_Version = 2, Value) -> {<<"status">>, Value};
status(_Version = 3, Value) -> {<<":status">>, Value}.
version(_Version = 2, Headers) when is_list(Headers) -> proplists:get_value(<<"version">>, Headers);
version(_Version = 3, Headers) when is_list(Headers) -> proplists:get_value(<<":version">>, Headers);
version(_Version = 2, Value) -> {<<"version">>, Value};
version(_Version = 3, Value) -> {<<":version">>, Value}.
format_http_headers(Headers) ->
binary:list_to_bin([[K, <<": ">>, V, <<"\n">>] || {K, V} <- Headers]).
init(_Id, Headers, SpdyOpts) ->
SpdyVersion = proplists:get_value(spdy_version, SpdyOpts),
HostAndPort = host(SpdyVersion, Headers),
Host = lists:flatten(re:replace(HostAndPort, proplists:get_value(http_proxy_host_re, SpdyOpts, ?HOST_RE), "")),
Method = method(SpdyVersion, Headers),
Path = path(SpdyVersion, Headers),
HttpVersion = version(SpdyVersion, Headers),
ProxyPort = proplists:get_value(port, SpdyOpts),
ProxyServer = proplists:get_value(server, SpdyOpts),
case gen_tcp:connect(ProxyServer, ProxyPort, [{mode, list}, {active, once}, {packet, line}]) of
{ok, Sock} ->
SpdyHeaderValue = list_to_binary(io_lib:format("SPDY/~b", [SpdyVersion])),
NewHeaders = [
{<<"host">>, Host},
{<<"connection">>, <<"close">>},
{<<"x-spdy">>, SpdyHeaderValue},
{<<"accept-encoding">>, <<"gzip, deflate">>}
| proplists:delete(<<"accept-encoding">>, proplists:delete(<<"host">>, Headers))],
FirstPacket = io_lib:format("~s ~s ~s~n~s~n", [Method, Path, HttpVersion, format_http_headers(NewHeaders)]),
case gen_tcp:send(Sock, FirstPacket) of
ok -> {ok, noreply, [{spdy_version, SpdyVersion}, {sock, Sock}, {response_headers, []}]};
_ ->
gen_tcp:close(Sock),
{error, ?INTERNAL_ERROR}
end;
_ -> {error, ?REFUSED_STREAM}
end.
Called when the SPDY session terminates
closed(Reason, State) ->
case proplists:get_value(sock, State) of
undefined -> ok;
Sock -> gen_tcp:close(Sock)
end,
?LOG("CLOSED! ~p\n",[Reason]).
%% Called when a HEADERS frame updated the headers
headers_updated(_Delta, _NewMergedHeaders, State) ->
?LOG("headers updated with ~p",[_Delta]),
{ok, State}.
%% Called when we recieve a DATA frame
handle_data(Data, State) ->
?LOG("DATA on stream ~p",[Data]),
gen_tcp:send(proplists:get_value(sock, State), Data),
{ok, State}.
handle_info({tcp, Sock, "\r\n"}, State) ->
handle_info({tcp, Sock, ""}, State);
handle_info({tcp, Sock, ""}, State) ->
OutHeaders = join_duplicates(process_x_spdy_headers(proplists:get_value(response_headers, State), State)),
?LOG("Response Headers ~p~n", [OutHeaders]),
espdy_stream:send_frame(self(), #spdy_syn_reply{ headers = OutHeaders }),
inet:setopts(Sock, [{packet, raw}, {active, true}, {mode, binary}]),
{noreply, State};
handle_info({tcp, Sock, Line}, State) when is_list(Line) ->
Headers = case proplists:get_value(response_headers, State) of
[] ->
SpdyVersion = proplists:get_value(spdy_version, State),
SpdyHeaderValue = list_to_binary(io_lib:format("SPDY/~b", [SpdyVersion])),
case Line of
"HTTP/1.1 " ++ Status1 -> [status(SpdyVersion, strip_crlf(Status1)), version(SpdyVersion, <<"HTTP/1.1">>), {<<"x-spdy-response">>, SpdyHeaderValue}];
"HTTP/1.0 " ++ Status2 -> [status(SpdyVersion, strip_crlf(Status2)), version(SpdyVersion, <<"HTTP/1.0">>), {<<"x-spdy-response">>, SpdyHeaderValue}]
end;
PL ->
case Line of
" " ++ Continue ->
[{K, V} | Tail] = PL,
[{K, << V/binary, <<" ">>/binary, (clean_spaces(Continue))/binary >>} | Tail];
Line ->
[split_header(Line) | PL]
end
end,
inet:setopts(Sock, [{active, once}]),
{noreply, [{response_headers, Headers} | proplists:delete(response_headers, State)]};
handle_info({tcp, _Sock, Bin}, State) ->
espdy_stream:send_data(self(), Bin),
{noreply, State};
handle_info({tcp_closed, _Sock}, State) ->
espdy_stream:send_data_fin(self()),
{noreply, State};
handle_info({tcp_error, _Sock, _Err}, State) ->
espdy_stream:send_data_fin(self()),
{noreply, State};
handle_info(_M, State) ->
?LOG("~p", [_M]),
{noreply, State}.
process_x_spdy_headers(Headers, State) ->
process_x_spdy_headers(Headers, State, []).
process_x_spdy_headers([], _State, Acc) ->
lists:reverse(Acc);
process_x_spdy_headers([{<<"x-spdy-max-streams">>, Value} | Headers], State, Acc) ->
case catch binary_to_integer(Value) of
N when is_number(N) ->
Settings = [#spdy_setting_pair{id=?SETTINGS_MAX_CONCURRENT_STREAMS, flags=?SETTINGS_FLAG_PERSIST_VALUE, value=N} ],
F = #spdy_settings{settings=Settings},
espdy_stream:send_frame(self(), F);
_ -> ignored
end,
process_x_spdy_headers(Headers, State, Acc);
process_x_spdy_headers([Pair | Headers], State, Acc) ->
process_x_spdy_headers(Headers, State, [Pair | Acc]).
split_header(Line) ->
split_header(Line, {start, []}).
clean_spaces(Line) ->
{_, Cleaned} = split_header(Line, {tail, []}),
Cleaned.
join_duplicates(Headers) ->
join_duplicates(Headers, []).
join_duplicates([], Acc) -> Acc;
join_duplicates([{K, V} | Tail], Acc) ->
case proplists:get_value(K, Acc) of
undefined -> join_duplicates(Tail, [{K,V} | Acc]);
PrevV ->
join_duplicates(Tail,
[
{K, << PrevV/binary, 0, V/binary >>}
| proplists:delete(K, Acc)
])
end.
split_header(":" ++ Value, {start, Name}) -> split_header(Value, {tail, list_to_binary(string:to_lower(lists:reverse(Name)))});
split_header([C | Rest], {start, Name}) -> split_header(Rest, {start, [C | Name]});
split_header(" " ++ Value, Acc) -> split_header(Value, Acc);
split_header(Value, {tail, Name}) -> {Name, strip_crlf(Value)}.
strip_crlf(Line) -> strip_crlf(Line, []).
strip_crlf([], Acc) -> list_to_binary(lists:reverse(Acc));
strip_crlf("\r" ++ Line, Acc) -> strip_crlf(Line, Acc);
strip_crlf("\n" ++ Line, Acc) -> strip_crlf(Line, Acc);
strip_crlf([C | Line], Acc) -> strip_crlf(Line, [C | Acc]).
| null | https://raw.githubusercontent.com/stefanrusek/spdy_proxy/86eb796a7e48df5a2d56fdcae0eaa827b6440056/src/http_proxy.erl | erlang | API
Called when a HEADERS frame updated the headers
Called when we recieve a DATA frame | -module(http_proxy).
-include("deps/espdy/include/espdy.hrl").
-export([preprocess_opts/1]).
-define(HOST_RE, "(:\\d+)?$").
preprocess_opts(Opts) ->
[{http_proxy_host_re, re:compile(?HOST_RE)} | Opts].
header name for spdy version
host(_Version = 2, Headers) -> proplists:get_value(<<"host">>, Headers);
host(_Version = 3, Headers) -> proplists:get_value(<<":host">>, Headers).
method(_Version = 2, Headers) -> proplists:get_value(<<"method">>, Headers);
method(_Version = 3, Headers) -> proplists:get_value(<<":method">>, Headers).
path(_Version = 2, Headers) -> proplists:get_value(<<"url">>, Headers);
path(_Version = 3, Headers) -> proplists:get_value(<<":path">>, Headers).
status(_Version = 2, Value) -> {<<"status">>, Value};
status(_Version = 3, Value) -> {<<":status">>, Value}.
version(_Version = 2, Headers) when is_list(Headers) -> proplists:get_value(<<"version">>, Headers);
version(_Version = 3, Headers) when is_list(Headers) -> proplists:get_value(<<":version">>, Headers);
version(_Version = 2, Value) -> {<<"version">>, Value};
version(_Version = 3, Value) -> {<<":version">>, Value}.
format_http_headers(Headers) ->
binary:list_to_bin([[K, <<": ">>, V, <<"\n">>] || {K, V} <- Headers]).
init(_Id, Headers, SpdyOpts) ->
SpdyVersion = proplists:get_value(spdy_version, SpdyOpts),
HostAndPort = host(SpdyVersion, Headers),
Host = lists:flatten(re:replace(HostAndPort, proplists:get_value(http_proxy_host_re, SpdyOpts, ?HOST_RE), "")),
Method = method(SpdyVersion, Headers),
Path = path(SpdyVersion, Headers),
HttpVersion = version(SpdyVersion, Headers),
ProxyPort = proplists:get_value(port, SpdyOpts),
ProxyServer = proplists:get_value(server, SpdyOpts),
case gen_tcp:connect(ProxyServer, ProxyPort, [{mode, list}, {active, once}, {packet, line}]) of
{ok, Sock} ->
SpdyHeaderValue = list_to_binary(io_lib:format("SPDY/~b", [SpdyVersion])),
NewHeaders = [
{<<"host">>, Host},
{<<"connection">>, <<"close">>},
{<<"x-spdy">>, SpdyHeaderValue},
{<<"accept-encoding">>, <<"gzip, deflate">>}
| proplists:delete(<<"accept-encoding">>, proplists:delete(<<"host">>, Headers))],
FirstPacket = io_lib:format("~s ~s ~s~n~s~n", [Method, Path, HttpVersion, format_http_headers(NewHeaders)]),
case gen_tcp:send(Sock, FirstPacket) of
ok -> {ok, noreply, [{spdy_version, SpdyVersion}, {sock, Sock}, {response_headers, []}]};
_ ->
gen_tcp:close(Sock),
{error, ?INTERNAL_ERROR}
end;
_ -> {error, ?REFUSED_STREAM}
end.
Called when the SPDY session terminates
closed(Reason, State) ->
case proplists:get_value(sock, State) of
undefined -> ok;
Sock -> gen_tcp:close(Sock)
end,
?LOG("CLOSED! ~p\n",[Reason]).
headers_updated(_Delta, _NewMergedHeaders, State) ->
?LOG("headers updated with ~p",[_Delta]),
{ok, State}.
handle_data(Data, State) ->
?LOG("DATA on stream ~p",[Data]),
gen_tcp:send(proplists:get_value(sock, State), Data),
{ok, State}.
handle_info({tcp, Sock, "\r\n"}, State) ->
handle_info({tcp, Sock, ""}, State);
handle_info({tcp, Sock, ""}, State) ->
OutHeaders = join_duplicates(process_x_spdy_headers(proplists:get_value(response_headers, State), State)),
?LOG("Response Headers ~p~n", [OutHeaders]),
espdy_stream:send_frame(self(), #spdy_syn_reply{ headers = OutHeaders }),
inet:setopts(Sock, [{packet, raw}, {active, true}, {mode, binary}]),
{noreply, State};
handle_info({tcp, Sock, Line}, State) when is_list(Line) ->
Headers = case proplists:get_value(response_headers, State) of
[] ->
SpdyVersion = proplists:get_value(spdy_version, State),
SpdyHeaderValue = list_to_binary(io_lib:format("SPDY/~b", [SpdyVersion])),
case Line of
"HTTP/1.1 " ++ Status1 -> [status(SpdyVersion, strip_crlf(Status1)), version(SpdyVersion, <<"HTTP/1.1">>), {<<"x-spdy-response">>, SpdyHeaderValue}];
"HTTP/1.0 " ++ Status2 -> [status(SpdyVersion, strip_crlf(Status2)), version(SpdyVersion, <<"HTTP/1.0">>), {<<"x-spdy-response">>, SpdyHeaderValue}]
end;
PL ->
case Line of
" " ++ Continue ->
[{K, V} | Tail] = PL,
[{K, << V/binary, <<" ">>/binary, (clean_spaces(Continue))/binary >>} | Tail];
Line ->
[split_header(Line) | PL]
end
end,
inet:setopts(Sock, [{active, once}]),
{noreply, [{response_headers, Headers} | proplists:delete(response_headers, State)]};
handle_info({tcp, _Sock, Bin}, State) ->
espdy_stream:send_data(self(), Bin),
{noreply, State};
handle_info({tcp_closed, _Sock}, State) ->
espdy_stream:send_data_fin(self()),
{noreply, State};
handle_info({tcp_error, _Sock, _Err}, State) ->
espdy_stream:send_data_fin(self()),
{noreply, State};
handle_info(_M, State) ->
?LOG("~p", [_M]),
{noreply, State}.
process_x_spdy_headers(Headers, State) ->
process_x_spdy_headers(Headers, State, []).
process_x_spdy_headers([], _State, Acc) ->
lists:reverse(Acc);
process_x_spdy_headers([{<<"x-spdy-max-streams">>, Value} | Headers], State, Acc) ->
case catch binary_to_integer(Value) of
N when is_number(N) ->
Settings = [#spdy_setting_pair{id=?SETTINGS_MAX_CONCURRENT_STREAMS, flags=?SETTINGS_FLAG_PERSIST_VALUE, value=N} ],
F = #spdy_settings{settings=Settings},
espdy_stream:send_frame(self(), F);
_ -> ignored
end,
process_x_spdy_headers(Headers, State, Acc);
process_x_spdy_headers([Pair | Headers], State, Acc) ->
process_x_spdy_headers(Headers, State, [Pair | Acc]).
split_header(Line) ->
split_header(Line, {start, []}).
clean_spaces(Line) ->
{_, Cleaned} = split_header(Line, {tail, []}),
Cleaned.
join_duplicates(Headers) ->
join_duplicates(Headers, []).
join_duplicates([], Acc) -> Acc;
join_duplicates([{K, V} | Tail], Acc) ->
case proplists:get_value(K, Acc) of
undefined -> join_duplicates(Tail, [{K,V} | Acc]);
PrevV ->
join_duplicates(Tail,
[
{K, << PrevV/binary, 0, V/binary >>}
| proplists:delete(K, Acc)
])
end.
split_header(":" ++ Value, {start, Name}) -> split_header(Value, {tail, list_to_binary(string:to_lower(lists:reverse(Name)))});
split_header([C | Rest], {start, Name}) -> split_header(Rest, {start, [C | Name]});
split_header(" " ++ Value, Acc) -> split_header(Value, Acc);
split_header(Value, {tail, Name}) -> {Name, strip_crlf(Value)}.
strip_crlf(Line) -> strip_crlf(Line, []).
strip_crlf([], Acc) -> list_to_binary(lists:reverse(Acc));
strip_crlf("\r" ++ Line, Acc) -> strip_crlf(Line, Acc);
strip_crlf("\n" ++ Line, Acc) -> strip_crlf(Line, Acc);
strip_crlf([C | Line], Acc) -> strip_crlf(Line, [C | Acc]).
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.