_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
672d9c350fcae7707e0e75dd576ba6b73389e3cd1fe62fd77472588a63fc8357 | kupl/LearnML | original.ml | type formula =
| True
| False
| Not of formula
| AndAlso of (formula * formula)
| OrElse of (formula * formula)
| Imply of (formula * formula)
| Equal of (exp * exp)
and exp = Num of int | Plus of (exp * exp) | Minus of (exp * exp)
let rec eval (f : formula) : bool =
match f with
| True -> true
| False -> false
| Not a -> if a = True then false else true
| OrElse (a, b) -> if a = True || b = True then true else false
| AndAlso (a, b) -> if a = True && b = True then true else false
| Imply (a, b) -> if a = True && b = False then false else true
| Equal (a, b) -> if a = b then true else false
| null | https://raw.githubusercontent.com/kupl/LearnML/c98ef2b95ef67e657b8158a2c504330e9cfb7700/result/cafe2/formula/sub75/original.ml | ocaml | type formula =
| True
| False
| Not of formula
| AndAlso of (formula * formula)
| OrElse of (formula * formula)
| Imply of (formula * formula)
| Equal of (exp * exp)
and exp = Num of int | Plus of (exp * exp) | Minus of (exp * exp)
let rec eval (f : formula) : bool =
match f with
| True -> true
| False -> false
| Not a -> if a = True then false else true
| OrElse (a, b) -> if a = True || b = True then true else false
| AndAlso (a, b) -> if a = True && b = True then true else false
| Imply (a, b) -> if a = True && b = False then false else true
| Equal (a, b) -> if a = b then true else false
| |
55c8a3e9ac414a532f2987774ee4cd469ab7c286ff563793fe8c7d98e26cbed6 | digitallyinduced/ihp | Main.hs | module Main where
import IHP.Prelude
import IHP.Environment
import IHP.FrameworkConfig
import qualified IHP.Server
import IHP.RouterSupport
import IHP.ControllerPrelude
import IHP.Mail
--import IHP.GenericController
data DemoController = DemoAction deriving (Eq, Show, Data)
instance AutoRoute DemoController
instance InitControllerContext RootApplication
instance FrontController RootApplication where
controllers =
[ parseRoute @DemoController
, startPage DemoAction
]
instance Controller DemoController where
action DemoAction = renderPlain "Hello World!"
instance Worker RootApplication where
workers _ = []
config :: ConfigBuilder
config = do
option Development
option $ AppHostname "localhost"
main :: IO ()
main = IHP.Server.run config
| null | https://raw.githubusercontent.com/digitallyinduced/ihp/46c6c1299feb9a1361cd79a75c093df5e3819a24/Main.hs | haskell | import IHP.GenericController | module Main where
import IHP.Prelude
import IHP.Environment
import IHP.FrameworkConfig
import qualified IHP.Server
import IHP.RouterSupport
import IHP.ControllerPrelude
import IHP.Mail
data DemoController = DemoAction deriving (Eq, Show, Data)
instance AutoRoute DemoController
instance InitControllerContext RootApplication
instance FrontController RootApplication where
controllers =
[ parseRoute @DemoController
, startPage DemoAction
]
instance Controller DemoController where
action DemoAction = renderPlain "Hello World!"
instance Worker RootApplication where
workers _ = []
config :: ConfigBuilder
config = do
option Development
option $ AppHostname "localhost"
main :: IO ()
main = IHP.Server.run config
|
900cd63dcc67a2814615c464c1f98577837e1c2fa73f4432c0339e2842615a14 | shop-planner/shop3 | p15.lisp |
(IN-PACKAGE :SHOP-USER)
(DEFPROBLEM STRIPS-SAT-X-1
((SATELLITE SATELLITE0) (INSTRUMENT INSTRUMENT0)
(INSTRUMENT INSTRUMENT1) (SATELLITE SATELLITE1)
(INSTRUMENT INSTRUMENT2) (INSTRUMENT INSTRUMENT3)
(SATELLITE SATELLITE2) (INSTRUMENT INSTRUMENT4)
(INSTRUMENT INSTRUMENT5) (SATELLITE SATELLITE3)
(INSTRUMENT INSTRUMENT6) (SATELLITE SATELLITE4)
(INSTRUMENT INSTRUMENT7) (INSTRUMENT INSTRUMENT8)
(INSTRUMENT INSTRUMENT9) (SATELLITE SATELLITE5)
(INSTRUMENT INSTRUMENT10) (INSTRUMENT INSTRUMENT11)
(INSTRUMENT INSTRUMENT12) (SATELLITE SATELLITE6)
(INSTRUMENT INSTRUMENT13) (INSTRUMENT INSTRUMENT14)
(INSTRUMENT INSTRUMENT15) (SATELLITE SATELLITE7)
(INSTRUMENT INSTRUMENT16) (INSTRUMENT INSTRUMENT17)
(INSTRUMENT INSTRUMENT18) (MODE IMAGE1) (MODE INFRARED0)
(MODE THERMOGRAPH3) (MODE SPECTROGRAPH2) (MODE THERMOGRAPH4)
(DIRECTION STAR3) (DIRECTION GROUNDSTATION0)
(DIRECTION GROUNDSTATION2) (DIRECTION STAR1) (DIRECTION STAR4)
(DIRECTION PHENOMENON5) (DIRECTION PLANET6) (DIRECTION PLANET7)
(DIRECTION STAR8) (DIRECTION PHENOMENON9) (DIRECTION PHENOMENON10)
(DIRECTION PLANET11) (DIRECTION STAR12) (DIRECTION STAR13)
(DIRECTION PLANET14) (DIRECTION STAR15) (DIRECTION PHENOMENON16)
(DIRECTION PLANET17) (DIRECTION STAR18) (DIRECTION STAR19)
(DIRECTION PLANET20) (DIRECTION PLANET21) (DIRECTION PLANET22)
(DIRECTION PLANET23) (DIRECTION PLANET24)
(SUPPORTS INSTRUMENT0 THERMOGRAPH4) (SUPPORTS INSTRUMENT0 IMAGE1)
(CALIBRATION_TARGET INSTRUMENT0 GROUNDSTATION0)
(SUPPORTS INSTRUMENT1 SPECTROGRAPH2)
(SUPPORTS INSTRUMENT1 THERMOGRAPH3)
(CALIBRATION_TARGET INSTRUMENT1 STAR3)
(ON_BOARD INSTRUMENT0 SATELLITE0) (ON_BOARD INSTRUMENT1 SATELLITE0)
(POWER_AVAIL SATELLITE0) (POINTING SATELLITE0 STAR19)
(SUPPORTS INSTRUMENT2 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT2 STAR4) (SUPPORTS INSTRUMENT3 IMAGE1)
(SUPPORTS INSTRUMENT3 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT3 GROUNDSTATION2)
(ON_BOARD INSTRUMENT2 SATELLITE1) (ON_BOARD INSTRUMENT3 SATELLITE1)
(POWER_AVAIL SATELLITE1) (POINTING SATELLITE1 STAR18)
(SUPPORTS INSTRUMENT4 THERMOGRAPH3)
(SUPPORTS INSTRUMENT4 THERMOGRAPH4)
(SUPPORTS INSTRUMENT4 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT4 STAR1)
(SUPPORTS INSTRUMENT5 THERMOGRAPH3) (SUPPORTS INSTRUMENT5 IMAGE1)
(SUPPORTS INSTRUMENT5 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT5 GROUNDSTATION2)
(ON_BOARD INSTRUMENT4 SATELLITE2) (ON_BOARD INSTRUMENT5 SATELLITE2)
(POWER_AVAIL SATELLITE2) (POINTING SATELLITE2 STAR19)
(SUPPORTS INSTRUMENT6 SPECTROGRAPH2) (SUPPORTS INSTRUMENT6 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT6 GROUNDSTATION2)
(ON_BOARD INSTRUMENT6 SATELLITE3) (POWER_AVAIL SATELLITE3)
(POINTING SATELLITE3 STAR4) (SUPPORTS INSTRUMENT7 THERMOGRAPH3)
(SUPPORTS INSTRUMENT7 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT7 STAR3) (SUPPORTS INSTRUMENT8 IMAGE1)
(CALIBRATION_TARGET INSTRUMENT8 GROUNDSTATION2)
(SUPPORTS INSTRUMENT9 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT9 STAR3)
(ON_BOARD INSTRUMENT7 SATELLITE4) (ON_BOARD INSTRUMENT8 SATELLITE4)
(ON_BOARD INSTRUMENT9 SATELLITE4) (POWER_AVAIL SATELLITE4)
(POINTING SATELLITE4 PHENOMENON9)
(SUPPORTS INSTRUMENT10 THERMOGRAPH4)
(SUPPORTS INSTRUMENT10 SPECTROGRAPH2)
(SUPPORTS INSTRUMENT10 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT10 GROUNDSTATION0)
(SUPPORTS INSTRUMENT11 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT11 GROUNDSTATION0)
(SUPPORTS INSTRUMENT12 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT12 STAR1)
(ON_BOARD INSTRUMENT10 SATELLITE5) (ON_BOARD INSTRUMENT11 SATELLITE5)
(ON_BOARD INSTRUMENT12 SATELLITE5) (POWER_AVAIL SATELLITE5)
(POINTING SATELLITE5 PLANET6) (SUPPORTS INSTRUMENT13 THERMOGRAPH3)
(SUPPORTS INSTRUMENT13 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT13 STAR3)
(SUPPORTS INSTRUMENT14 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT14 GROUNDSTATION2)
(SUPPORTS INSTRUMENT15 THERMOGRAPH4)
(CALIBRATION_TARGET INSTRUMENT15 GROUNDSTATION0)
(ON_BOARD INSTRUMENT13 SATELLITE6) (ON_BOARD INSTRUMENT14 SATELLITE6)
(ON_BOARD INSTRUMENT15 SATELLITE6) (POWER_AVAIL SATELLITE6)
(POINTING SATELLITE6 PLANET17) (SUPPORTS INSTRUMENT16 THERMOGRAPH4)
(CALIBRATION_TARGET INSTRUMENT16 GROUNDSTATION2)
(SUPPORTS INSTRUMENT17 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT17 STAR1)
(SUPPORTS INSTRUMENT18 THERMOGRAPH4)
(CALIBRATION_TARGET INSTRUMENT18 STAR4)
(ON_BOARD INSTRUMENT16 SATELLITE7) (ON_BOARD INSTRUMENT17 SATELLITE7)
(ON_BOARD INSTRUMENT18 SATELLITE7) (POWER_AVAIL SATELLITE7)
(POINTING SATELLITE7 PLANET11)
(ORIGINAL-GOAL
(AND (POINTING SATELLITE0 STAR19) (POINTING SATELLITE1 PLANET22)
(POINTING SATELLITE2 STAR13) (POINTING SATELLITE3 PLANET14)
(POINTING SATELLITE5 PLANET24) (POINTING SATELLITE7 STAR3)
(HAVE_IMAGE PHENOMENON5 SPECTROGRAPH2)
(HAVE_IMAGE PLANET6 SPECTROGRAPH2)
(HAVE_IMAGE PLANET7 INFRARED0)
(HAVE_IMAGE PHENOMENON9 INFRARED0)
(HAVE_IMAGE PHENOMENON10 IMAGE1) (HAVE_IMAGE PLANET11 IMAGE1)
(HAVE_IMAGE STAR12 THERMOGRAPH3)
(HAVE_IMAGE STAR13 THERMOGRAPH3)
(HAVE_IMAGE PLANET14 THERMOGRAPH4)
(HAVE_IMAGE STAR15 THERMOGRAPH4)
(HAVE_IMAGE PHENOMENON16 IMAGE1)
(HAVE_IMAGE PLANET17 THERMOGRAPH3) (HAVE_IMAGE STAR18 IMAGE1)
(HAVE_IMAGE PLANET20 IMAGE1) (HAVE_IMAGE PLANET21 INFRARED0)
(HAVE_IMAGE PLANET22 IMAGE1) (HAVE_IMAGE PLANET23 THERMOGRAPH3)
(HAVE_IMAGE PLANET24 INFRARED0)))
(GOAL-POINTING SATELLITE0 STAR19) (GOAL-POINTING SATELLITE1 PLANET22)
(GOAL-POINTING SATELLITE2 STAR13) (GOAL-POINTING SATELLITE3 PLANET14)
(GOAL-POINTING SATELLITE5 PLANET24) (GOAL-POINTING SATELLITE7 STAR3)
(GOAL-HAVE-IMAGE PHENOMENON5 SPECTROGRAPH2)
(GOAL-HAVE-IMAGE PLANET6 SPECTROGRAPH2)
(GOAL-HAVE-IMAGE PLANET7 INFRARED0)
(GOAL-HAVE-IMAGE PHENOMENON9 INFRARED0)
(GOAL-HAVE-IMAGE PHENOMENON10 IMAGE1)
(GOAL-HAVE-IMAGE PLANET11 IMAGE1)
(GOAL-HAVE-IMAGE STAR12 THERMOGRAPH3)
(GOAL-HAVE-IMAGE STAR13 THERMOGRAPH3)
(GOAL-HAVE-IMAGE PLANET14 THERMOGRAPH4)
(GOAL-HAVE-IMAGE STAR15 THERMOGRAPH4)
(GOAL-HAVE-IMAGE PHENOMENON16 IMAGE1)
(GOAL-HAVE-IMAGE PLANET17 THERMOGRAPH3)
(GOAL-HAVE-IMAGE STAR18 IMAGE1) (GOAL-HAVE-IMAGE PLANET20 IMAGE1)
(GOAL-HAVE-IMAGE PLANET21 INFRARED0)
(GOAL-HAVE-IMAGE PLANET22 IMAGE1)
(GOAL-HAVE-IMAGE PLANET23 THERMOGRAPH3)
(GOAL-HAVE-IMAGE PLANET24 INFRARED0))
(MAIN)) | null | https://raw.githubusercontent.com/shop-planner/shop3/ba429cf91a575e88f28b7f0e89065de7b4d666a6/shop3/examples/satellite/strips/p15.lisp | lisp |
(IN-PACKAGE :SHOP-USER)
(DEFPROBLEM STRIPS-SAT-X-1
((SATELLITE SATELLITE0) (INSTRUMENT INSTRUMENT0)
(INSTRUMENT INSTRUMENT1) (SATELLITE SATELLITE1)
(INSTRUMENT INSTRUMENT2) (INSTRUMENT INSTRUMENT3)
(SATELLITE SATELLITE2) (INSTRUMENT INSTRUMENT4)
(INSTRUMENT INSTRUMENT5) (SATELLITE SATELLITE3)
(INSTRUMENT INSTRUMENT6) (SATELLITE SATELLITE4)
(INSTRUMENT INSTRUMENT7) (INSTRUMENT INSTRUMENT8)
(INSTRUMENT INSTRUMENT9) (SATELLITE SATELLITE5)
(INSTRUMENT INSTRUMENT10) (INSTRUMENT INSTRUMENT11)
(INSTRUMENT INSTRUMENT12) (SATELLITE SATELLITE6)
(INSTRUMENT INSTRUMENT13) (INSTRUMENT INSTRUMENT14)
(INSTRUMENT INSTRUMENT15) (SATELLITE SATELLITE7)
(INSTRUMENT INSTRUMENT16) (INSTRUMENT INSTRUMENT17)
(INSTRUMENT INSTRUMENT18) (MODE IMAGE1) (MODE INFRARED0)
(MODE THERMOGRAPH3) (MODE SPECTROGRAPH2) (MODE THERMOGRAPH4)
(DIRECTION STAR3) (DIRECTION GROUNDSTATION0)
(DIRECTION GROUNDSTATION2) (DIRECTION STAR1) (DIRECTION STAR4)
(DIRECTION PHENOMENON5) (DIRECTION PLANET6) (DIRECTION PLANET7)
(DIRECTION STAR8) (DIRECTION PHENOMENON9) (DIRECTION PHENOMENON10)
(DIRECTION PLANET11) (DIRECTION STAR12) (DIRECTION STAR13)
(DIRECTION PLANET14) (DIRECTION STAR15) (DIRECTION PHENOMENON16)
(DIRECTION PLANET17) (DIRECTION STAR18) (DIRECTION STAR19)
(DIRECTION PLANET20) (DIRECTION PLANET21) (DIRECTION PLANET22)
(DIRECTION PLANET23) (DIRECTION PLANET24)
(SUPPORTS INSTRUMENT0 THERMOGRAPH4) (SUPPORTS INSTRUMENT0 IMAGE1)
(CALIBRATION_TARGET INSTRUMENT0 GROUNDSTATION0)
(SUPPORTS INSTRUMENT1 SPECTROGRAPH2)
(SUPPORTS INSTRUMENT1 THERMOGRAPH3)
(CALIBRATION_TARGET INSTRUMENT1 STAR3)
(ON_BOARD INSTRUMENT0 SATELLITE0) (ON_BOARD INSTRUMENT1 SATELLITE0)
(POWER_AVAIL SATELLITE0) (POINTING SATELLITE0 STAR19)
(SUPPORTS INSTRUMENT2 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT2 STAR4) (SUPPORTS INSTRUMENT3 IMAGE1)
(SUPPORTS INSTRUMENT3 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT3 GROUNDSTATION2)
(ON_BOARD INSTRUMENT2 SATELLITE1) (ON_BOARD INSTRUMENT3 SATELLITE1)
(POWER_AVAIL SATELLITE1) (POINTING SATELLITE1 STAR18)
(SUPPORTS INSTRUMENT4 THERMOGRAPH3)
(SUPPORTS INSTRUMENT4 THERMOGRAPH4)
(SUPPORTS INSTRUMENT4 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT4 STAR1)
(SUPPORTS INSTRUMENT5 THERMOGRAPH3) (SUPPORTS INSTRUMENT5 IMAGE1)
(SUPPORTS INSTRUMENT5 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT5 GROUNDSTATION2)
(ON_BOARD INSTRUMENT4 SATELLITE2) (ON_BOARD INSTRUMENT5 SATELLITE2)
(POWER_AVAIL SATELLITE2) (POINTING SATELLITE2 STAR19)
(SUPPORTS INSTRUMENT6 SPECTROGRAPH2) (SUPPORTS INSTRUMENT6 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT6 GROUNDSTATION2)
(ON_BOARD INSTRUMENT6 SATELLITE3) (POWER_AVAIL SATELLITE3)
(POINTING SATELLITE3 STAR4) (SUPPORTS INSTRUMENT7 THERMOGRAPH3)
(SUPPORTS INSTRUMENT7 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT7 STAR3) (SUPPORTS INSTRUMENT8 IMAGE1)
(CALIBRATION_TARGET INSTRUMENT8 GROUNDSTATION2)
(SUPPORTS INSTRUMENT9 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT9 STAR3)
(ON_BOARD INSTRUMENT7 SATELLITE4) (ON_BOARD INSTRUMENT8 SATELLITE4)
(ON_BOARD INSTRUMENT9 SATELLITE4) (POWER_AVAIL SATELLITE4)
(POINTING SATELLITE4 PHENOMENON9)
(SUPPORTS INSTRUMENT10 THERMOGRAPH4)
(SUPPORTS INSTRUMENT10 SPECTROGRAPH2)
(SUPPORTS INSTRUMENT10 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT10 GROUNDSTATION0)
(SUPPORTS INSTRUMENT11 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT11 GROUNDSTATION0)
(SUPPORTS INSTRUMENT12 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT12 STAR1)
(ON_BOARD INSTRUMENT10 SATELLITE5) (ON_BOARD INSTRUMENT11 SATELLITE5)
(ON_BOARD INSTRUMENT12 SATELLITE5) (POWER_AVAIL SATELLITE5)
(POINTING SATELLITE5 PLANET6) (SUPPORTS INSTRUMENT13 THERMOGRAPH3)
(SUPPORTS INSTRUMENT13 INFRARED0)
(CALIBRATION_TARGET INSTRUMENT13 STAR3)
(SUPPORTS INSTRUMENT14 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT14 GROUNDSTATION2)
(SUPPORTS INSTRUMENT15 THERMOGRAPH4)
(CALIBRATION_TARGET INSTRUMENT15 GROUNDSTATION0)
(ON_BOARD INSTRUMENT13 SATELLITE6) (ON_BOARD INSTRUMENT14 SATELLITE6)
(ON_BOARD INSTRUMENT15 SATELLITE6) (POWER_AVAIL SATELLITE6)
(POINTING SATELLITE6 PLANET17) (SUPPORTS INSTRUMENT16 THERMOGRAPH4)
(CALIBRATION_TARGET INSTRUMENT16 GROUNDSTATION2)
(SUPPORTS INSTRUMENT17 SPECTROGRAPH2)
(CALIBRATION_TARGET INSTRUMENT17 STAR1)
(SUPPORTS INSTRUMENT18 THERMOGRAPH4)
(CALIBRATION_TARGET INSTRUMENT18 STAR4)
(ON_BOARD INSTRUMENT16 SATELLITE7) (ON_BOARD INSTRUMENT17 SATELLITE7)
(ON_BOARD INSTRUMENT18 SATELLITE7) (POWER_AVAIL SATELLITE7)
(POINTING SATELLITE7 PLANET11)
(ORIGINAL-GOAL
(AND (POINTING SATELLITE0 STAR19) (POINTING SATELLITE1 PLANET22)
(POINTING SATELLITE2 STAR13) (POINTING SATELLITE3 PLANET14)
(POINTING SATELLITE5 PLANET24) (POINTING SATELLITE7 STAR3)
(HAVE_IMAGE PHENOMENON5 SPECTROGRAPH2)
(HAVE_IMAGE PLANET6 SPECTROGRAPH2)
(HAVE_IMAGE PLANET7 INFRARED0)
(HAVE_IMAGE PHENOMENON9 INFRARED0)
(HAVE_IMAGE PHENOMENON10 IMAGE1) (HAVE_IMAGE PLANET11 IMAGE1)
(HAVE_IMAGE STAR12 THERMOGRAPH3)
(HAVE_IMAGE STAR13 THERMOGRAPH3)
(HAVE_IMAGE PLANET14 THERMOGRAPH4)
(HAVE_IMAGE STAR15 THERMOGRAPH4)
(HAVE_IMAGE PHENOMENON16 IMAGE1)
(HAVE_IMAGE PLANET17 THERMOGRAPH3) (HAVE_IMAGE STAR18 IMAGE1)
(HAVE_IMAGE PLANET20 IMAGE1) (HAVE_IMAGE PLANET21 INFRARED0)
(HAVE_IMAGE PLANET22 IMAGE1) (HAVE_IMAGE PLANET23 THERMOGRAPH3)
(HAVE_IMAGE PLANET24 INFRARED0)))
(GOAL-POINTING SATELLITE0 STAR19) (GOAL-POINTING SATELLITE1 PLANET22)
(GOAL-POINTING SATELLITE2 STAR13) (GOAL-POINTING SATELLITE3 PLANET14)
(GOAL-POINTING SATELLITE5 PLANET24) (GOAL-POINTING SATELLITE7 STAR3)
(GOAL-HAVE-IMAGE PHENOMENON5 SPECTROGRAPH2)
(GOAL-HAVE-IMAGE PLANET6 SPECTROGRAPH2)
(GOAL-HAVE-IMAGE PLANET7 INFRARED0)
(GOAL-HAVE-IMAGE PHENOMENON9 INFRARED0)
(GOAL-HAVE-IMAGE PHENOMENON10 IMAGE1)
(GOAL-HAVE-IMAGE PLANET11 IMAGE1)
(GOAL-HAVE-IMAGE STAR12 THERMOGRAPH3)
(GOAL-HAVE-IMAGE STAR13 THERMOGRAPH3)
(GOAL-HAVE-IMAGE PLANET14 THERMOGRAPH4)
(GOAL-HAVE-IMAGE STAR15 THERMOGRAPH4)
(GOAL-HAVE-IMAGE PHENOMENON16 IMAGE1)
(GOAL-HAVE-IMAGE PLANET17 THERMOGRAPH3)
(GOAL-HAVE-IMAGE STAR18 IMAGE1) (GOAL-HAVE-IMAGE PLANET20 IMAGE1)
(GOAL-HAVE-IMAGE PLANET21 INFRARED0)
(GOAL-HAVE-IMAGE PLANET22 IMAGE1)
(GOAL-HAVE-IMAGE PLANET23 THERMOGRAPH3)
(GOAL-HAVE-IMAGE PLANET24 INFRARED0))
(MAIN)) | |
e52ea2b0cd1f5380990a2e16e8a2ab822408a07e0fd179a4e06d0821a2b206f7 | robrix/Manifold | Module.hs | # LANGUAGE GeneralizedNewtypeDeriving #
module Manifold.Module where
import Control.Monad ((<=<))
import qualified Data.Map as Map
import Data.Semilattice.Lower
import Manifold.Constraint
import Manifold.Declaration
import Manifold.Name
import Manifold.Pretty
import Manifold.Type
data Module var def = Module
{ moduleName :: Name
, moduleImports :: [Name]
, moduleDeclarations :: [Declaration var def]
}
deriving (Eq, Ord, Show)
moduleExports :: Module var def -> [Constraint var (Type var)]
moduleExports = declarationSignatures <=< moduleDeclarations
instance (Pretty var, Pretty def) => Pretty (Module var def) where
prettyPrec _ (Module name imports decls)
= align . vsep . (>>= each)
$ [ prettyString "module" <+> pretty name <+> prettyString "where" ]
: map ((prettyString "import" <+>) . pretty) imports
: map (pure . pretty) decls
where each :: [Doc ann] -> [Doc ann]
each [] = []
each xs = [ vsep xs <> line ]
newtype ModuleTable var def = ModuleTable { unModuleTable :: Map.Map Name (Module var def) }
deriving (Eq, Lower, Ord, Show)
fromModules :: [Module var def] -> ModuleTable var def
fromModules = ModuleTable . Map.fromList . map ((,) . moduleName <*> id)
insert :: Module var def -> ModuleTable var def -> ModuleTable var def
insert m@(Module name _ _) = ModuleTable . Map.insert name m . unModuleTable
lookup :: Name -> ModuleTable var def -> Maybe (Module var def)
lookup name = Map.lookup name . unModuleTable
| null | https://raw.githubusercontent.com/robrix/Manifold/3cc7a49c90b9cc7d1da61c532d0ee526ccdd3474/src/Manifold/Module.hs | haskell | # LANGUAGE GeneralizedNewtypeDeriving #
module Manifold.Module where
import Control.Monad ((<=<))
import qualified Data.Map as Map
import Data.Semilattice.Lower
import Manifold.Constraint
import Manifold.Declaration
import Manifold.Name
import Manifold.Pretty
import Manifold.Type
data Module var def = Module
{ moduleName :: Name
, moduleImports :: [Name]
, moduleDeclarations :: [Declaration var def]
}
deriving (Eq, Ord, Show)
moduleExports :: Module var def -> [Constraint var (Type var)]
moduleExports = declarationSignatures <=< moduleDeclarations
instance (Pretty var, Pretty def) => Pretty (Module var def) where
prettyPrec _ (Module name imports decls)
= align . vsep . (>>= each)
$ [ prettyString "module" <+> pretty name <+> prettyString "where" ]
: map ((prettyString "import" <+>) . pretty) imports
: map (pure . pretty) decls
where each :: [Doc ann] -> [Doc ann]
each [] = []
each xs = [ vsep xs <> line ]
newtype ModuleTable var def = ModuleTable { unModuleTable :: Map.Map Name (Module var def) }
deriving (Eq, Lower, Ord, Show)
fromModules :: [Module var def] -> ModuleTable var def
fromModules = ModuleTable . Map.fromList . map ((,) . moduleName <*> id)
insert :: Module var def -> ModuleTable var def -> ModuleTable var def
insert m@(Module name _ _) = ModuleTable . Map.insert name m . unModuleTable
lookup :: Name -> ModuleTable var def -> Maybe (Module var def)
lookup name = Map.lookup name . unModuleTable
| |
b97e3c62d5e382118460e072430e9f9fe335602ef3d9dbb64dc645e602372289 | jwarlander/everex | thrift1151_types.erl | %%
Autogenerated by Thrift Compiler ( 1.0.0 - dev )
%%
%% DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
%%
-module(thrift1151_types).
-include("thrift1151_types.hrl").
-export([struct_info/1, struct_info_ext/1]).
struct_info('StructA') ->
{struct, [{1, i16}]}
;
struct_info('StructB') ->
{struct, [{1, i32}]}
;
struct_info('StructC') ->
{struct, [{1, {struct, {'thrift1151_types', 'StructA'}}}]}
;
struct_info(_) -> erlang:error(function_clause).
struct_info_ext('StructA') ->
{struct, [{1, undefined, i16, 'x', undefined}]}
;
struct_info_ext('StructB') ->
{struct, [{1, undefined, i32, 'x', undefined}]}
;
struct_info_ext('StructC') ->
{struct, [{1, undefined, {struct, {'thrift1151_types', 'StructA'}}, 'x', #'StructA'{}}]}
;
struct_info_ext(_) -> erlang:error(function_clause).
| null | https://raw.githubusercontent.com/jwarlander/everex/559a18e45054abd7d2adfeb3ef060f79b58308e9/test/thrift/gen-erl/thrift1151_types.erl | erlang |
DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
| Autogenerated by Thrift Compiler ( 1.0.0 - dev )
-module(thrift1151_types).
-include("thrift1151_types.hrl").
-export([struct_info/1, struct_info_ext/1]).
struct_info('StructA') ->
{struct, [{1, i16}]}
;
struct_info('StructB') ->
{struct, [{1, i32}]}
;
struct_info('StructC') ->
{struct, [{1, {struct, {'thrift1151_types', 'StructA'}}}]}
;
struct_info(_) -> erlang:error(function_clause).
struct_info_ext('StructA') ->
{struct, [{1, undefined, i16, 'x', undefined}]}
;
struct_info_ext('StructB') ->
{struct, [{1, undefined, i32, 'x', undefined}]}
;
struct_info_ext('StructC') ->
{struct, [{1, undefined, {struct, {'thrift1151_types', 'StructA'}}, 'x', #'StructA'{}}]}
;
struct_info_ext(_) -> erlang:error(function_clause).
|
c656306e718f543f97ebe67b85350af5958beec4efbb87358177d4a6fa83a8f9 | logaan/bt.clj | core_test.clj | (ns testing-gloss.core-test
(:require [clojure.test :refer :all]
[testing-gloss.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 0))))
| null | https://raw.githubusercontent.com/logaan/bt.clj/44831af16c4b987db16d0d85eee9cd79cb4fa757/test/testing_gloss/core_test.clj | clojure | (ns testing-gloss.core-test
(:require [clojure.test :refer :all]
[testing-gloss.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 0))))
| |
c725106c5318adabcbbb996b4c71b1a50948062dc28d43c3853670d94b01c516 | osstotalsoft/functional-guy | 04.MindTrick.hs | -- Think of any number.
-- Double the number.
Add 9 with result .
Subtract 3 with the result .
Divide the result by 2 .
Subtract the number with the first number started with .
The answer will always be 3 .
mindTrick :: Integer -> Integer
mindTrick x = (x * 2 + 9 -3) `div` 2 - x
-- rewrite it as a pipeline of functions
thinkOfANumber :: Integer -> Integer
thinkOfANumber = id
doubleTheNumber :: Integer -> Integer
doubleTheNumber = (2 *)
add9 :: Integer -> Integer
add9 = (9 +)
substract3 :: Integer -> Integer
substract3 x = x -3
divideBy2 :: Integer -> Integer
divideBy2 x = x `div` 2
substractTheFirstNumberYouStartedWith :: Integer -> Integer -> Integer
substractTheFirstNumberYouStartedWith x theNumberYouStartedWith = x - theNumberYouStartedWith
(>>>) :: (a -> b) -> (b -> c) -> a -> c
f >>> g = g . f
mindTrick' :: Integer -> Integer -> Integer
mindTrick' = thinkOfANumber >>> doubleTheNumber >>> add9 >>> substract3 >>> divideBy2 >>> substractTheFirstNumberYouStartedWith
mindTrick'' :: Integer -> Integer
mindTrick'' x = thinkOfANumber >>> doubleTheNumber >>> add9 >>> substract3 >>> divideBy2 >>> (`substractTheFirstNumberYouStartedWith` x) $ x
| null | https://raw.githubusercontent.com/osstotalsoft/functional-guy/c02a8b22026c261a9722551f3641228dc02619ba/Chapter6.%20Computational%20effects/Examples/Reader/04.MindTrick.hs | haskell | Think of any number.
Double the number.
rewrite it as a pipeline of functions | Add 9 with result .
Subtract 3 with the result .
Divide the result by 2 .
Subtract the number with the first number started with .
The answer will always be 3 .
mindTrick :: Integer -> Integer
mindTrick x = (x * 2 + 9 -3) `div` 2 - x
thinkOfANumber :: Integer -> Integer
thinkOfANumber = id
doubleTheNumber :: Integer -> Integer
doubleTheNumber = (2 *)
add9 :: Integer -> Integer
add9 = (9 +)
substract3 :: Integer -> Integer
substract3 x = x -3
divideBy2 :: Integer -> Integer
divideBy2 x = x `div` 2
substractTheFirstNumberYouStartedWith :: Integer -> Integer -> Integer
substractTheFirstNumberYouStartedWith x theNumberYouStartedWith = x - theNumberYouStartedWith
(>>>) :: (a -> b) -> (b -> c) -> a -> c
f >>> g = g . f
mindTrick' :: Integer -> Integer -> Integer
mindTrick' = thinkOfANumber >>> doubleTheNumber >>> add9 >>> substract3 >>> divideBy2 >>> substractTheFirstNumberYouStartedWith
mindTrick'' :: Integer -> Integer
mindTrick'' x = thinkOfANumber >>> doubleTheNumber >>> add9 >>> substract3 >>> divideBy2 >>> (`substractTheFirstNumberYouStartedWith` x) $ x
|
7c110f3684135ecc8691e20ac294c87dbea47858e44cc8caa13d47d8b8cfb19b | mRrvz/bmstu-ca | Interpolation.hs | module Interpolation (
interpolation2
) where
import Data.List
import Data.Maybe
type TableXY = [(Double, Double)]
type Matrix = [[Double]]
type ValueTable = [[Double]]
type Point = (Double, Double)
type PolynomDegrees = (Int, Int)
slice :: TableXY -> Int -> Int -> TableXY
slice table n pos = take n $ drop pos table
takeApproximation :: TableXY -> Double -> Int -> TableXY
takeApproximation table x0 n
| (<=) x0 . fst $ head table = take n table
| (>=) x0 . fst $ last table = reverse $ take n $ reverse table
| otherwise = left ++ right
where indexL = fromJust $ findIndex (\x -> fst x >= x0) table
left = slice table (n `div` 2 ) (indexL - n `div` 2)
indexR = fromJust $ findIndex (== last left) table
right = slice table (n - length left) (indexR + 1)
createMatrix :: [Double] -> [Double] -> Int -> Matrix
createMatrix _ (_:[]) _ = []
createMatrix xs ys step = divDiff xs ys step : createMatrix xs (divDiff xs ys step) (step + 1)
where divDiff _ (_:[]) _ = []
divDiff xs ys step = (ys !! 1 - ys !! 0) / (xs !! (1 + step) - xs !! 0) : divDiff (tail xs) (tail ys) step
newtonPolynomial :: TableXY -> Double -> Int -> Double
newtonPolynomial table x0 n = foldl (\x y -> x + fst y * snd y) y0 pairs
where approximation = unzip $ takeApproximation table x0 (n + 1)
matrix = createMatrix (fst approximation) (snd approximation) 0
y0 = head $ snd approximation
xDifference = reverse $ init $ foldl (\x y -> (x0 - y) * head x : x) [1] (fst approximation)
pairs = zip (map head matrix) xDifference
interpolation2 :: ValueTable -> Point -> PolynomDegrees -> Double
interpolation2 table pt n = result
where
xBorder = tail $ head table
xColPairs = map (\x -> zip xBorder $ tail x) $ tail table
xApprox = map (\x -> takeApproximation x (fst pt) $ fst n + 1) xColPairs
yBorder = map head $ tail table
yApprox = reverse $ takeApproximation (zip yBorder yBorder) (snd pt) $ snd n + 1
xyApprox = map (\x -> (fst x, xApprox !! (round $ fst x - 1))) yApprox
finApprox = map (\x -> (fst x, newtonPolynomial (snd x) (fst pt) (length finApprox))) xyApprox
result = newtonPolynomial finApprox (snd pt) (length finApprox)
| null | https://raw.githubusercontent.com/mRrvz/bmstu-ca/866a32b37878d45006ec3c4f99f67983ae681717/lab_02/src/Interpolation.hs | haskell | module Interpolation (
interpolation2
) where
import Data.List
import Data.Maybe
type TableXY = [(Double, Double)]
type Matrix = [[Double]]
type ValueTable = [[Double]]
type Point = (Double, Double)
type PolynomDegrees = (Int, Int)
slice :: TableXY -> Int -> Int -> TableXY
slice table n pos = take n $ drop pos table
takeApproximation :: TableXY -> Double -> Int -> TableXY
takeApproximation table x0 n
| (<=) x0 . fst $ head table = take n table
| (>=) x0 . fst $ last table = reverse $ take n $ reverse table
| otherwise = left ++ right
where indexL = fromJust $ findIndex (\x -> fst x >= x0) table
left = slice table (n `div` 2 ) (indexL - n `div` 2)
indexR = fromJust $ findIndex (== last left) table
right = slice table (n - length left) (indexR + 1)
createMatrix :: [Double] -> [Double] -> Int -> Matrix
createMatrix _ (_:[]) _ = []
createMatrix xs ys step = divDiff xs ys step : createMatrix xs (divDiff xs ys step) (step + 1)
where divDiff _ (_:[]) _ = []
divDiff xs ys step = (ys !! 1 - ys !! 0) / (xs !! (1 + step) - xs !! 0) : divDiff (tail xs) (tail ys) step
newtonPolynomial :: TableXY -> Double -> Int -> Double
newtonPolynomial table x0 n = foldl (\x y -> x + fst y * snd y) y0 pairs
where approximation = unzip $ takeApproximation table x0 (n + 1)
matrix = createMatrix (fst approximation) (snd approximation) 0
y0 = head $ snd approximation
xDifference = reverse $ init $ foldl (\x y -> (x0 - y) * head x : x) [1] (fst approximation)
pairs = zip (map head matrix) xDifference
interpolation2 :: ValueTable -> Point -> PolynomDegrees -> Double
interpolation2 table pt n = result
where
xBorder = tail $ head table
xColPairs = map (\x -> zip xBorder $ tail x) $ tail table
xApprox = map (\x -> takeApproximation x (fst pt) $ fst n + 1) xColPairs
yBorder = map head $ tail table
yApprox = reverse $ takeApproximation (zip yBorder yBorder) (snd pt) $ snd n + 1
xyApprox = map (\x -> (fst x, xApprox !! (round $ fst x - 1))) yApprox
finApprox = map (\x -> (fst x, newtonPolynomial (snd x) (fst pt) (length finApprox))) xyApprox
result = newtonPolynomial finApprox (snd pt) (length finApprox)
| |
675ec0bddd64e76083df9743abc4feafcbfb7ea8fcd4e2783b44208c3adb0a6e | janestreet/noise-wireguard-ocaml | messages.ml | open Core
module Handshake_initiation = struct include Handshake_initiation end
module Handshake_response = struct include Handshake_response end
module Cookie_reply = struct include Cookie_reply end
module Transport = struct include Transport end
type mac_message =
| Handshake_initiation of Handshake_initiation.t
| Handshake_response of Handshake_response.t
| Handshake_initiation_cstruct of Handshake_initiation.t_cstruct
| Handshake_response_cstruct of Handshake_response.t_cstruct
| Dummy_for_cookie_tests of Cstruct.t * bytes * bytes
let get_dummy_msg_beta ~msg_body ~mac1 =
let body_length = Cstruct.len msg_body in
let ret = Cstruct.create (body_length + 16) in
Cstruct.blit msg_body 0 ret 0 body_length ;
Cstruct.blit (Cstruct.of_bytes mac1) 0 ret body_length 16 ;
ret
let get_macs (msg : mac_message) =
let get_macs_init (m : Handshake_initiation.t) =
(m.msg_alpha, !(m.mac1), m.msg_beta, !(m.mac2)) in
let get_macs_resp (m : Handshake_response.t) =
(m.msg_alpha, !(m.mac1), m.msg_beta, !(m.mac2)) in
match msg with
| Handshake_initiation m -> get_macs_init m
| Handshake_response m -> get_macs_resp m
| Handshake_initiation_cstruct m_cstruct ->
Handshake_initiation.cstruct_to_t m_cstruct |> get_macs_init
| Handshake_response_cstruct m_cstruct ->
Handshake_response.cstruct_to_t m_cstruct |> get_macs_resp
| Dummy_for_cookie_tests (msg_body, mac1, mac2) ->
let msg_beta = get_dummy_msg_beta ~msg_body ~mac1 in
(msg_body, mac1, msg_beta, mac2)
let set_macs ~(msg : mac_message) ~mac1 ~mac2 =
match msg with
| Handshake_initiation m ->
m.mac1 := mac1 ;
m.mac2 := mac2
| Handshake_response m ->
m.mac1 := mac1 ;
m.mac2 := mac2
| Handshake_initiation_cstruct m_cstruct ->
Handshake_initiation.set_macs ~msg:m_cstruct ~mac1 ~mac2
| Handshake_response_cstruct m_cstruct ->
Handshake_response.set_macs ~msg:m_cstruct ~mac1 ~mac2
| Dummy_for_cookie_tests (_, old_mac1, old_mac2) ->
Bytes.blit ~src:mac1 ~src_pos:0 ~dst:old_mac1 ~dst_pos:0 ~len:16 ;
Bytes.blit ~src:mac2 ~src_pos:0 ~dst:old_mac2 ~dst_pos:0 ~len:16
let create_dummy bytes =
Dummy_for_cookie_tests
(Cstruct.of_bytes bytes, Bytes.create 16, Bytes.create 16)
let xor_dummy byte =
let byte_int = int_of_char byte in
function
| Dummy_for_cookie_tests (cstruct, _, _) ->
for i = 0 to Cstruct.len cstruct - 1 do
Cstruct.set_uint8 cstruct i (Cstruct.get_uint8 cstruct i lxor byte_int)
done
| _ -> ()
let pretty_print_bytes bytes = bytes |> Cstruct.of_bytes |> Cstruct.hexdump
let hexdump_mac_message = function
| Handshake_initiation m ->
Handshake_initiation.t_to_cstruct m
|> Handshake_initiation.hexdump_t_cstruct
| Handshake_response m ->
Handshake_response.t_to_cstruct m |> Handshake_response.hexdump_t_cstruct
| Handshake_initiation_cstruct m_cstruct ->
Handshake_initiation.hexdump_t_cstruct m_cstruct
| Handshake_response_cstruct m_cstruct ->
Handshake_response.hexdump_t_cstruct m_cstruct
| Dummy_for_cookie_tests (msg, old_mac1, old_mac2) ->
Cstruct.hexdump msg ;
print_string "mac1:" ;
pretty_print_bytes old_mac1 ;
print_string "mac2:" ;
pretty_print_bytes old_mac2
type t =
| Handshake_initiation of Handshake_initiation.t
| Handshake_response of Handshake_response.t
| Cookie_reply of Cookie_reply.t
| Transport of Transport.t
| null | https://raw.githubusercontent.com/janestreet/noise-wireguard-ocaml/4ec7a34d1778ab5c8ec16a6129277d4f77406616/messages/messages.ml | ocaml | open Core
module Handshake_initiation = struct include Handshake_initiation end
module Handshake_response = struct include Handshake_response end
module Cookie_reply = struct include Cookie_reply end
module Transport = struct include Transport end
type mac_message =
| Handshake_initiation of Handshake_initiation.t
| Handshake_response of Handshake_response.t
| Handshake_initiation_cstruct of Handshake_initiation.t_cstruct
| Handshake_response_cstruct of Handshake_response.t_cstruct
| Dummy_for_cookie_tests of Cstruct.t * bytes * bytes
let get_dummy_msg_beta ~msg_body ~mac1 =
let body_length = Cstruct.len msg_body in
let ret = Cstruct.create (body_length + 16) in
Cstruct.blit msg_body 0 ret 0 body_length ;
Cstruct.blit (Cstruct.of_bytes mac1) 0 ret body_length 16 ;
ret
let get_macs (msg : mac_message) =
let get_macs_init (m : Handshake_initiation.t) =
(m.msg_alpha, !(m.mac1), m.msg_beta, !(m.mac2)) in
let get_macs_resp (m : Handshake_response.t) =
(m.msg_alpha, !(m.mac1), m.msg_beta, !(m.mac2)) in
match msg with
| Handshake_initiation m -> get_macs_init m
| Handshake_response m -> get_macs_resp m
| Handshake_initiation_cstruct m_cstruct ->
Handshake_initiation.cstruct_to_t m_cstruct |> get_macs_init
| Handshake_response_cstruct m_cstruct ->
Handshake_response.cstruct_to_t m_cstruct |> get_macs_resp
| Dummy_for_cookie_tests (msg_body, mac1, mac2) ->
let msg_beta = get_dummy_msg_beta ~msg_body ~mac1 in
(msg_body, mac1, msg_beta, mac2)
let set_macs ~(msg : mac_message) ~mac1 ~mac2 =
match msg with
| Handshake_initiation m ->
m.mac1 := mac1 ;
m.mac2 := mac2
| Handshake_response m ->
m.mac1 := mac1 ;
m.mac2 := mac2
| Handshake_initiation_cstruct m_cstruct ->
Handshake_initiation.set_macs ~msg:m_cstruct ~mac1 ~mac2
| Handshake_response_cstruct m_cstruct ->
Handshake_response.set_macs ~msg:m_cstruct ~mac1 ~mac2
| Dummy_for_cookie_tests (_, old_mac1, old_mac2) ->
Bytes.blit ~src:mac1 ~src_pos:0 ~dst:old_mac1 ~dst_pos:0 ~len:16 ;
Bytes.blit ~src:mac2 ~src_pos:0 ~dst:old_mac2 ~dst_pos:0 ~len:16
let create_dummy bytes =
Dummy_for_cookie_tests
(Cstruct.of_bytes bytes, Bytes.create 16, Bytes.create 16)
let xor_dummy byte =
let byte_int = int_of_char byte in
function
| Dummy_for_cookie_tests (cstruct, _, _) ->
for i = 0 to Cstruct.len cstruct - 1 do
Cstruct.set_uint8 cstruct i (Cstruct.get_uint8 cstruct i lxor byte_int)
done
| _ -> ()
let pretty_print_bytes bytes = bytes |> Cstruct.of_bytes |> Cstruct.hexdump
let hexdump_mac_message = function
| Handshake_initiation m ->
Handshake_initiation.t_to_cstruct m
|> Handshake_initiation.hexdump_t_cstruct
| Handshake_response m ->
Handshake_response.t_to_cstruct m |> Handshake_response.hexdump_t_cstruct
| Handshake_initiation_cstruct m_cstruct ->
Handshake_initiation.hexdump_t_cstruct m_cstruct
| Handshake_response_cstruct m_cstruct ->
Handshake_response.hexdump_t_cstruct m_cstruct
| Dummy_for_cookie_tests (msg, old_mac1, old_mac2) ->
Cstruct.hexdump msg ;
print_string "mac1:" ;
pretty_print_bytes old_mac1 ;
print_string "mac2:" ;
pretty_print_bytes old_mac2
type t =
| Handshake_initiation of Handshake_initiation.t
| Handshake_response of Handshake_response.t
| Cookie_reply of Cookie_reply.t
| Transport of Transport.t
| |
3bbd5134e5c0429e6a43c55575537a94e8e35484ec75b2ae4abd80ca7949b3b3 | ghcjs/ghcjs-dom | SVGRectElement.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
module GHCJS.DOM.JSFFI.Generated.SVGRectElement
(js_getX, getX, js_getY, getY, js_getWidth, getWidth, js_getHeight,
getHeight, js_getRx, getRx, js_getRy, getRy, SVGRectElement(..),
gTypeSVGRectElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"x\"]" js_getX ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.x Mozilla SVGRectElement.x documentation >
getX :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getX self = liftIO (js_getX self)
foreign import javascript unsafe "$1[\"y\"]" js_getY ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.y Mozilla SVGRectElement.y documentation >
getY :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getY self = liftIO (js_getY self)
foreign import javascript unsafe "$1[\"width\"]" js_getWidth ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.width Mozilla SVGRectElement.width documentation >
getWidth :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getWidth self = liftIO (js_getWidth self)
foreign import javascript unsafe "$1[\"height\"]" js_getHeight ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.height Mozilla documentation >
getHeight :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getHeight self = liftIO (js_getHeight self)
foreign import javascript unsafe "$1[\"rx\"]" js_getRx ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.rx Mozilla SVGRectElement.rx documentation >
getRx :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getRx self = liftIO (js_getRx self)
foreign import javascript unsafe "$1[\"ry\"]" js_getRy ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.ry Mozilla SVGRectElement.ry documentation >
getRy :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getRy self = liftIO (js_getRy self) | null | https://raw.githubusercontent.com/ghcjs/ghcjs-dom/749963557d878d866be2d0184079836f367dd0ea/ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGRectElement.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
module GHCJS.DOM.JSFFI.Generated.SVGRectElement
(js_getX, getX, js_getY, getY, js_getWidth, getWidth, js_getHeight,
getHeight, js_getRx, getRx, js_getRy, getRy, SVGRectElement(..),
gTypeSVGRectElement)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"x\"]" js_getX ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.x Mozilla SVGRectElement.x documentation >
getX :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getX self = liftIO (js_getX self)
foreign import javascript unsafe "$1[\"y\"]" js_getY ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.y Mozilla SVGRectElement.y documentation >
getY :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getY self = liftIO (js_getY self)
foreign import javascript unsafe "$1[\"width\"]" js_getWidth ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.width Mozilla SVGRectElement.width documentation >
getWidth :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getWidth self = liftIO (js_getWidth self)
foreign import javascript unsafe "$1[\"height\"]" js_getHeight ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.height Mozilla documentation >
getHeight :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getHeight self = liftIO (js_getHeight self)
foreign import javascript unsafe "$1[\"rx\"]" js_getRx ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.rx Mozilla SVGRectElement.rx documentation >
getRx :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getRx self = liftIO (js_getRx self)
foreign import javascript unsafe "$1[\"ry\"]" js_getRy ::
SVGRectElement -> IO SVGAnimatedLength
| < -US/docs/Web/API/SVGRectElement.ry Mozilla SVGRectElement.ry documentation >
getRy :: (MonadIO m) => SVGRectElement -> m SVGAnimatedLength
getRy self = liftIO (js_getRy self) |
82b333902d79b3d8852ce4694df952ac5b2f195feb4f309758d1a8eeb5505e8f | protz/mezzo | UntypedMezzo2UntypedOCaml.ml | open SurfaceSyntax
open UntypedMezzo
module O = UntypedOCaml
This is the translation of to .
(* TEMPORARY think about [open]: when we mention a data constructor
or field name in OCaml, is it always in scope? or must we qualify
it? can we use qualified names everywhere? *)
(* ---------------------------------------------------------------------------- *)
(* When printing a (variable, type, field) name, we must make sure that it is
not an OCaml keyword. If it is one, we rename it. *)
let identifier (x : string) =
if Hashtbl.mem OCamlKeywords.keyword_table x then
"__ok_" ^ x
else
x
(* ---------------------------------------------------------------------------- *)
We translate Mezzo module names in such a way that they are unlikely to be
confused with a native OCaml module name . This seems to be required ( I have
not found how to persuade OCaml to ignore its standard library ; -nostdlib
does not suffice ) and it will possibly allow us to link OCaml and Mezzo
code together in the future .
confused with a native OCaml module name. This seems to be required (I have
not found how to persuade OCaml to ignore its standard library; -nostdlib
does not suffice) and it will possibly allow us to link OCaml and Mezzo
code together in the future. *)
The Mezzo module name [ array ] becomes the OCaml module name [ ] .
let translate_module_name m =
"Mz_" ^ Module.print m
(* ---------------------------------------------------------------------------- *)
(* This function maps a field name to a field index. It accounts for the hidden
adopter field. *)
let field_index (info : datacon_info) (f : Field.name) : int =
(* TEMPORARY not pretty *)
(* should we eliminate field names in the earlier pass? *)
if Field.equal f Mezzo2UntypedMezzo.adopter_field then
0
else
1 + Field.Map.find f info.datacon_fields
(* Sorting a list of pairs of an integer and a datum. *)
let sort_by_index ixs =
List.sort (fun (i1, _) (i2, _) ->
Pervasives.compare i1 i2
) ixs
This function extracts the field index that was provided by the type - checker
at field access expressions ( read and write ) . This index does not account
for the hidden adopter field , so we must add 1 .
at field access expressions (read and write). This index does not account
for the hidden adopter field, so we must add 1. *)
let extract_field_index (f : field) : int =
if Field.equal f.field_name Mezzo2UntypedMezzo.adopter_field then
0
else
match f.field_offset with
| Some index ->
1 + index
| None ->
(* The field index has not been filled in by the type-checker!? *)
assert false
(* ---------------------------------------------------------------------------- *)
(* References to data constructors. *)
In principle , this reference to a data constructor should be resolved in the
same way at the OCaml level and at the Mezzo level , so we can print it exactly
as it appeared in the Mezzo program .
same way at the OCaml level and at the Mezzo level, so we can print it exactly
as it appeared in the Mezzo program. *) (* TEMPORARY think about this *)
let print_maybe_qualified f = function
| Unqualified x ->
identifier (f x)
| Qualified (m, x) ->
Printf.sprintf "%s.%s"
(translate_module_name m)
(identifier (f x))
let print_datacon_reference dref =
print_maybe_qualified Datacon.print dref.datacon_unresolved
(* ---------------------------------------------------------------------------- *)
(* A few smart constructors. *)
(* As patterns. *)
let pas p x =
match p with
| O.PAny ->
O.PVar x
| _ ->
O.PAs (p, x)
(* Sequence. *)
let seq e1 e2 =
match e1, e2 with
| O.ETuple [], e
| e, O.ETuple [] ->
e
| _, _ ->
O.ESequence (e1, e2)
Integer comparison in OCaml .
let gtz x =
O.EApply (O.EVar "MezzoLib.gtz", x)
Magic .
let rec magic e =
match e with
| O.EMagic _ ->
Avoid two consecutive magics .
e
| O.EGetField _ ->
We have changed the return type of [ ] to [ ' b ] , so a
magic on top of it is unnecessary .
magic on top of it is unnecessary. *)
e
| O.EApply (e1, e2) ->
(* Push magic into the left-hand side of applications, where it is
just as powerful. This will allow more redundancy elimination. *)
O.EApply (magic e1, e2)
| e ->
(* The default case. *)
O.EMagic e
(* ---------------------------------------------------------------------------- *)
(* Patterns. *)
OCaml does not have type casts within patterns , so we must produce
well - typed patterns , and furthermore , if several patterns are
type - compatible in Mezzo , then their OCaml counterparts must be
type - compatible in OCaml .
well-typed patterns, and furthermore, if several patterns are
type-compatible in Mezzo, then their OCaml counterparts must be
type-compatible in OCaml. *)
The translation of [ PConstruct ] patterns is somewhat tricky . When there
exist multiple tags ( i.e. , the pattern is refutable ) , we must translate it
to a [ PConstruct ] pattern , because that is the only way of examining the
tag within an OCaml pattern . When there exists just one tag , we could
translate to a [ PRecord ] pattern ; but , for simplicity , we will avoid
distinguishing a special case . Now , in OCaml , data constructors carry
anonymous fields , so we are forced to drop the field names and rely purely
on field offsets .
exist multiple tags (i.e., the pattern is refutable), we must translate it
to a [PConstruct] pattern, because that is the only way of examining the
tag within an OCaml pattern. When there exists just one tag, we could
translate to a [PRecord] pattern; but, for simplicity, we will avoid
distinguishing a special case. Now, in OCaml, data constructors carry
anonymous fields, so we are forced to drop the field names and rely purely
on field offsets. *)
For this translation to work , we will have to translate a Mezzo algebraic
data type to a corresponding OCaml algebraic data type , with the same data
constructors , same arity ( plus one , for the adopter field ) , and use a
distinct type variable as the type of each argument .
data type to a corresponding OCaml algebraic data type, with the same data
constructors, same arity (plus one, for the adopter field), and use a
distinct type variable as the type of each argument. *)
let rec translate_pattern (p : pattern) : O.pattern =
match p with
| PVar x ->
O.PVar (identifier (Variable.print x))
| PTuple ps ->
O.PTuple (List.map translate_pattern ps)
| PConstruct (dref, fields) ->
let info : datacon_info = Option.extract dref.datacon_info in
(* Build a list of (field index, pattern) pairs. *)
let fields =
List.map (fun (f, p) ->
field_index info f,
translate_pattern p
) fields
in
(* Sort this list by index. *)
let fields = sort_by_index fields in
(* Complete any missing entries, up to this data constructor's arity,
with wildcard patterns. At the same time, forget the indices. *)
let arity = 1 + info.datacon_arity in
let ps = complete 0 arity fields in
(* Create a data constructor pattern. *)
O.PConstruct (print_datacon_reference dref, ps)
| PLocated (p, _)
| PConstraint (p, _) ->
translate_pattern p
| PAs (p, x) ->
pas (translate_pattern p) (identifier (Variable.print x))
| PAny ->
O.PAny
and complete i arity ips =
if i = arity then
[]
else
match ips with
| (j, p) :: ips when i = j ->
(* We have an entry at index [i]. Use it. *)
p :: complete (i + 1) arity ips
| _ ->
(* We do not have an entry. Insert a wildcard pattern for this field. *)
O.PAny :: complete (i + 1) arity ips
(* ---------------------------------------------------------------------------- *)
(* Expressions. *)
We avoid using [ Obj.field ] and [ Obj.set_field ] , when possible , because they
are less efficient in terms of speed and code size . In particular , they seem
to incorporate a check against the special tag 254 , which represents an array
of values of type double . TEMPORARY not done yet
are less efficient in terms of speed and code size. In particular, they seem
to incorporate a check against the special tag 254, which represents an array
of values of type double. TEMPORARY not done yet *)
let rec transl (e : expression) : O.expression =
match e with
| EVar x ->
O.EVar (print_maybe_qualified Variable.print x)
| EBuiltin b ->
The builtin operations are defined in the OCaml library module
[ MezzoLib ] .
[MezzoLib]. *)
O.EVar (Printf.sprintf "MezzoLib.%s" b)
| ELet (flag, eqs, body) ->
O.ELet (flag, transl_equations eqs, transl body)
| EFun (p, e) ->
O.EFun (translate_pattern p, transl e)
| EAssign (e1, f, e2) ->
O.ESetField (transl e1, extract_field_index f, transl e2)
| EAssignTag (e, dref, info) ->
(* We must use [Obj.set_tag]; there is no other way. *)
As an optimization , if the old and new integer tags are equal ,
there is nothing to do . It is OK , in this case , not to translate
[ e ] at all , because the definition of guarantees
that [ e ] is a value .
there is nothing to do. It is OK, in this case, not to translate
[e] at all, because the definition of Untyped Mezzo guarantees
that [e] is a value. *)
let phantom = Option.extract info.is_phantom_update in
if phantom then
O.ETuple []
else
let info = Option.extract dref.datacon_info in
O.ESetTag (transl e, info.datacon_index)
| EAccess (e, f) ->
O.EGetField (transl e, extract_field_index f)
| EApply (e1, e2) ->
O.EApply (magic (transl e1), transl e2)
| EMatch (e, branches) ->
O.EMatch (magic (transl e), transl_branches branches)
| ETuple es ->
O.ETuple (List.map transl es)
| EConstruct (dref, fields) ->
let info : datacon_info = Option.extract dref.datacon_info in
(* Build a list of (field index, expression) pairs. *)
let fields =
List.map (fun (f, e) ->
field_index info f,
transl e
) fields
in
(* Sort this list by index. *)
let fields = sort_by_index fields in
(* In principle, every field is there. Drop the field names,
and create a data constructor expression. *)
O.EConstruct (print_datacon_reference dref, List.map snd fields)
| EIfThenElse (e, e1, e2) ->
O.EIfThenElse (
gtz (O.EGetTag (transl e)),
transl e1,
magic (transl e2)
)
| EWhile (e1, e2) ->
O.EWhile (
gtz (O.EGetTag (transl e1)),
transl e2
)
| EFor (x, e1, f, e2, e) ->
let mkop s = EVar (Unqualified (Variable.register s)) in
let f, e2 = match f with
| To -> O.To, e2
| Downto -> O.Downto, e2
| Below -> O.To, EApply (mkop "-", ETuple [e2; EInt 1])
| Above -> O.Downto, EApply (mkop "+", ETuple [e2; EInt 1])
in
O.EFor (identifier (Variable.print x), transl e1, f, transl e2, transl e)
| ESequence (e1, e2) ->
seq (transl e1) (transl e2)
| EInt i ->
O.EInt i
| EFail s ->
O.EApply (O.EVar "MezzoLib.failwith", O.EStringLiteral s)
| ENull ->
(* Using the unit value as a representation of [null]. *)
O.ETuple []
and transl_equations eqs =
List.map (fun (p, e) ->
let p = translate_pattern p in
let e = transl e in
(* If [p] is non-trivial, then we must insert a [magic],
because [e] is matched against [p]. We must be careful
not to insert an unnecessary [magic] here, as [magic]
is not allowed on the right-hand side of [let rec]. *)
p,
if is_non_trivial_pattern p then magic e else e
) eqs
and transl_branches branches =
List.map (fun (p, e) ->
(* We insert a [magic] on every branch, because all branches
must ultimately have the same type. *)
translate_pattern p, magic (transl e)
) branches
and is_non_trivial_pattern = function
| O.PTuple _
| O.PConstruct _
| O.PRecord _ ->
true
| O.PAs (p, _) ->
is_non_trivial_pattern p
| O.PVar _
| O.PAny ->
false
(* TEMPORARY if the OCaml inliner is good, an application of a builtin
function to an argument of the appropriate shape should be simplified
to an application of the corresponding OCaml primitive operation.
Check this. If that is not the case, perform this simplification here. *)
(* ---------------------------------------------------------------------------- *)
(* Type variables. *)
let tyvar (i : int) =
Printf.sprintf "'a%d" i
let ty (i : int) =
O.TyVar (tyvar i)
let init (n : int) (f : int -> 'a) : 'a list =
let rec loop (i : int) =
if i = n then
[]
else
let x = f i in
x :: loop (i + 1)
in
loop 0
let tyvars (base : int) (n : int) : string list =
init n (fun i -> tyvar (base + i))
let tys (base : int) (n : int) : O.ty list =
init n (fun i -> ty (base + i))
(* ---------------------------------------------------------------------------- *)
(* For each algebraic data type, we create a sum type. *)
let data_sum_name (typecon : Variable.name) : string =
identifier (Variable.print typecon)
let data_branch ((base : int), (branch : data_type_def_branch)) : O.data_type_def_branch =
let datacon, fields = branch in
(* [base] is the base number for numbering our type variables. *)
let n = List.length fields in
Datacon.print datacon, tys base n
let data_sum (def : data_type_def) =
let typecon, branches = def in
(* We need as many type parameters as there are fields, in total,
in all branches. *)
let n = ref 0 in
let branches =
List.map (fun ((_, fields) as branch) ->
let base = !n in
n := base + List.length fields;
base, branch
) branches
in
let n = !n in
let lhs =
data_sum_name typecon,
tyvars 0 n
in
let rhs =
O.Sum (List.map data_branch branches)
in
O.DataTypeGroup (lhs, rhs)
(* ---------------------------------------------------------------------------- *)
(* Translating top-level items. *)
let translate_item = function
| DataType def ->
[ data_sum def ]
| ValueDefinition (flag, eqs) ->
[ O.ValueDefinition (flag, transl_equations eqs) ]
| ValueDeclaration x ->
[ O.ValueDeclaration (identifier (Variable.print x), O.TyObj) ]
| OpenDirective m ->
[ O.OpenDirective (translate_module_name m) ]
(* ---------------------------------------------------------------------------- *)
(* Translating implementations. *)
let translate_implementation items =
List.flatten (List.map translate_item items)
(* ---------------------------------------------------------------------------- *)
(* Translating interfaces. *)
let translate_interface items =
List.flatten (List.map translate_item items)
(* The values that appear in the interface are published at type [Obj.t], so
they must be re-bound in the implementation; for each such value [x], we
construct the implementation item [let x = Obj.magic x]. *)
let translate_interface_as_implementation_filter = function
| None ->
[]
| Some items ->
List.flatten (List.map (function
| DataType _
| ValueDefinition _
| OpenDirective _ ->
[]
| ValueDeclaration x ->
[ O.ValueDefinition (Nonrecursive, [ translate_pattern (PVar x), magic (transl (EVar (Unqualified x)))]) ]
) items)
| null | https://raw.githubusercontent.com/protz/mezzo/4e9d917558bd96067437116341b7a6ea02ab9c39/compiler/UntypedMezzo2UntypedOCaml.ml | ocaml | TEMPORARY think about [open]: when we mention a data constructor
or field name in OCaml, is it always in scope? or must we qualify
it? can we use qualified names everywhere?
----------------------------------------------------------------------------
When printing a (variable, type, field) name, we must make sure that it is
not an OCaml keyword. If it is one, we rename it.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
This function maps a field name to a field index. It accounts for the hidden
adopter field.
TEMPORARY not pretty
should we eliminate field names in the earlier pass?
Sorting a list of pairs of an integer and a datum.
The field index has not been filled in by the type-checker!?
----------------------------------------------------------------------------
References to data constructors.
TEMPORARY think about this
----------------------------------------------------------------------------
A few smart constructors.
As patterns.
Sequence.
Push magic into the left-hand side of applications, where it is
just as powerful. This will allow more redundancy elimination.
The default case.
----------------------------------------------------------------------------
Patterns.
Build a list of (field index, pattern) pairs.
Sort this list by index.
Complete any missing entries, up to this data constructor's arity,
with wildcard patterns. At the same time, forget the indices.
Create a data constructor pattern.
We have an entry at index [i]. Use it.
We do not have an entry. Insert a wildcard pattern for this field.
----------------------------------------------------------------------------
Expressions.
We must use [Obj.set_tag]; there is no other way.
Build a list of (field index, expression) pairs.
Sort this list by index.
In principle, every field is there. Drop the field names,
and create a data constructor expression.
Using the unit value as a representation of [null].
If [p] is non-trivial, then we must insert a [magic],
because [e] is matched against [p]. We must be careful
not to insert an unnecessary [magic] here, as [magic]
is not allowed on the right-hand side of [let rec].
We insert a [magic] on every branch, because all branches
must ultimately have the same type.
TEMPORARY if the OCaml inliner is good, an application of a builtin
function to an argument of the appropriate shape should be simplified
to an application of the corresponding OCaml primitive operation.
Check this. If that is not the case, perform this simplification here.
----------------------------------------------------------------------------
Type variables.
----------------------------------------------------------------------------
For each algebraic data type, we create a sum type.
[base] is the base number for numbering our type variables.
We need as many type parameters as there are fields, in total,
in all branches.
----------------------------------------------------------------------------
Translating top-level items.
----------------------------------------------------------------------------
Translating implementations.
----------------------------------------------------------------------------
Translating interfaces.
The values that appear in the interface are published at type [Obj.t], so
they must be re-bound in the implementation; for each such value [x], we
construct the implementation item [let x = Obj.magic x]. | open SurfaceSyntax
open UntypedMezzo
module O = UntypedOCaml
This is the translation of to .
let identifier (x : string) =
if Hashtbl.mem OCamlKeywords.keyword_table x then
"__ok_" ^ x
else
x
We translate Mezzo module names in such a way that they are unlikely to be
confused with a native OCaml module name . This seems to be required ( I have
not found how to persuade OCaml to ignore its standard library ; -nostdlib
does not suffice ) and it will possibly allow us to link OCaml and Mezzo
code together in the future .
confused with a native OCaml module name. This seems to be required (I have
not found how to persuade OCaml to ignore its standard library; -nostdlib
does not suffice) and it will possibly allow us to link OCaml and Mezzo
code together in the future. *)
The Mezzo module name [ array ] becomes the OCaml module name [ ] .
let translate_module_name m =
"Mz_" ^ Module.print m
let field_index (info : datacon_info) (f : Field.name) : int =
if Field.equal f Mezzo2UntypedMezzo.adopter_field then
0
else
1 + Field.Map.find f info.datacon_fields
let sort_by_index ixs =
List.sort (fun (i1, _) (i2, _) ->
Pervasives.compare i1 i2
) ixs
This function extracts the field index that was provided by the type - checker
at field access expressions ( read and write ) . This index does not account
for the hidden adopter field , so we must add 1 .
at field access expressions (read and write). This index does not account
for the hidden adopter field, so we must add 1. *)
let extract_field_index (f : field) : int =
if Field.equal f.field_name Mezzo2UntypedMezzo.adopter_field then
0
else
match f.field_offset with
| Some index ->
1 + index
| None ->
assert false
In principle , this reference to a data constructor should be resolved in the
same way at the OCaml level and at the Mezzo level , so we can print it exactly
as it appeared in the Mezzo program .
same way at the OCaml level and at the Mezzo level, so we can print it exactly
let print_maybe_qualified f = function
| Unqualified x ->
identifier (f x)
| Qualified (m, x) ->
Printf.sprintf "%s.%s"
(translate_module_name m)
(identifier (f x))
let print_datacon_reference dref =
print_maybe_qualified Datacon.print dref.datacon_unresolved
let pas p x =
match p with
| O.PAny ->
O.PVar x
| _ ->
O.PAs (p, x)
let seq e1 e2 =
match e1, e2 with
| O.ETuple [], e
| e, O.ETuple [] ->
e
| _, _ ->
O.ESequence (e1, e2)
Integer comparison in OCaml .
let gtz x =
O.EApply (O.EVar "MezzoLib.gtz", x)
Magic .
let rec magic e =
match e with
| O.EMagic _ ->
Avoid two consecutive magics .
e
| O.EGetField _ ->
We have changed the return type of [ ] to [ ' b ] , so a
magic on top of it is unnecessary .
magic on top of it is unnecessary. *)
e
| O.EApply (e1, e2) ->
O.EApply (magic e1, e2)
| e ->
O.EMagic e
OCaml does not have type casts within patterns , so we must produce
well - typed patterns , and furthermore , if several patterns are
type - compatible in Mezzo , then their OCaml counterparts must be
type - compatible in OCaml .
well-typed patterns, and furthermore, if several patterns are
type-compatible in Mezzo, then their OCaml counterparts must be
type-compatible in OCaml. *)
The translation of [ PConstruct ] patterns is somewhat tricky . When there
exist multiple tags ( i.e. , the pattern is refutable ) , we must translate it
to a [ PConstruct ] pattern , because that is the only way of examining the
tag within an OCaml pattern . When there exists just one tag , we could
translate to a [ PRecord ] pattern ; but , for simplicity , we will avoid
distinguishing a special case . Now , in OCaml , data constructors carry
anonymous fields , so we are forced to drop the field names and rely purely
on field offsets .
exist multiple tags (i.e., the pattern is refutable), we must translate it
to a [PConstruct] pattern, because that is the only way of examining the
tag within an OCaml pattern. When there exists just one tag, we could
translate to a [PRecord] pattern; but, for simplicity, we will avoid
distinguishing a special case. Now, in OCaml, data constructors carry
anonymous fields, so we are forced to drop the field names and rely purely
on field offsets. *)
For this translation to work , we will have to translate a Mezzo algebraic
data type to a corresponding OCaml algebraic data type , with the same data
constructors , same arity ( plus one , for the adopter field ) , and use a
distinct type variable as the type of each argument .
data type to a corresponding OCaml algebraic data type, with the same data
constructors, same arity (plus one, for the adopter field), and use a
distinct type variable as the type of each argument. *)
let rec translate_pattern (p : pattern) : O.pattern =
match p with
| PVar x ->
O.PVar (identifier (Variable.print x))
| PTuple ps ->
O.PTuple (List.map translate_pattern ps)
| PConstruct (dref, fields) ->
let info : datacon_info = Option.extract dref.datacon_info in
let fields =
List.map (fun (f, p) ->
field_index info f,
translate_pattern p
) fields
in
let fields = sort_by_index fields in
let arity = 1 + info.datacon_arity in
let ps = complete 0 arity fields in
O.PConstruct (print_datacon_reference dref, ps)
| PLocated (p, _)
| PConstraint (p, _) ->
translate_pattern p
| PAs (p, x) ->
pas (translate_pattern p) (identifier (Variable.print x))
| PAny ->
O.PAny
and complete i arity ips =
if i = arity then
[]
else
match ips with
| (j, p) :: ips when i = j ->
p :: complete (i + 1) arity ips
| _ ->
O.PAny :: complete (i + 1) arity ips
We avoid using [ Obj.field ] and [ Obj.set_field ] , when possible , because they
are less efficient in terms of speed and code size . In particular , they seem
to incorporate a check against the special tag 254 , which represents an array
of values of type double . TEMPORARY not done yet
are less efficient in terms of speed and code size. In particular, they seem
to incorporate a check against the special tag 254, which represents an array
of values of type double. TEMPORARY not done yet *)
let rec transl (e : expression) : O.expression =
match e with
| EVar x ->
O.EVar (print_maybe_qualified Variable.print x)
| EBuiltin b ->
The builtin operations are defined in the OCaml library module
[ MezzoLib ] .
[MezzoLib]. *)
O.EVar (Printf.sprintf "MezzoLib.%s" b)
| ELet (flag, eqs, body) ->
O.ELet (flag, transl_equations eqs, transl body)
| EFun (p, e) ->
O.EFun (translate_pattern p, transl e)
| EAssign (e1, f, e2) ->
O.ESetField (transl e1, extract_field_index f, transl e2)
| EAssignTag (e, dref, info) ->
As an optimization , if the old and new integer tags are equal ,
there is nothing to do . It is OK , in this case , not to translate
[ e ] at all , because the definition of guarantees
that [ e ] is a value .
there is nothing to do. It is OK, in this case, not to translate
[e] at all, because the definition of Untyped Mezzo guarantees
that [e] is a value. *)
let phantom = Option.extract info.is_phantom_update in
if phantom then
O.ETuple []
else
let info = Option.extract dref.datacon_info in
O.ESetTag (transl e, info.datacon_index)
| EAccess (e, f) ->
O.EGetField (transl e, extract_field_index f)
| EApply (e1, e2) ->
O.EApply (magic (transl e1), transl e2)
| EMatch (e, branches) ->
O.EMatch (magic (transl e), transl_branches branches)
| ETuple es ->
O.ETuple (List.map transl es)
| EConstruct (dref, fields) ->
let info : datacon_info = Option.extract dref.datacon_info in
let fields =
List.map (fun (f, e) ->
field_index info f,
transl e
) fields
in
let fields = sort_by_index fields in
O.EConstruct (print_datacon_reference dref, List.map snd fields)
| EIfThenElse (e, e1, e2) ->
O.EIfThenElse (
gtz (O.EGetTag (transl e)),
transl e1,
magic (transl e2)
)
| EWhile (e1, e2) ->
O.EWhile (
gtz (O.EGetTag (transl e1)),
transl e2
)
| EFor (x, e1, f, e2, e) ->
let mkop s = EVar (Unqualified (Variable.register s)) in
let f, e2 = match f with
| To -> O.To, e2
| Downto -> O.Downto, e2
| Below -> O.To, EApply (mkop "-", ETuple [e2; EInt 1])
| Above -> O.Downto, EApply (mkop "+", ETuple [e2; EInt 1])
in
O.EFor (identifier (Variable.print x), transl e1, f, transl e2, transl e)
| ESequence (e1, e2) ->
seq (transl e1) (transl e2)
| EInt i ->
O.EInt i
| EFail s ->
O.EApply (O.EVar "MezzoLib.failwith", O.EStringLiteral s)
| ENull ->
O.ETuple []
and transl_equations eqs =
List.map (fun (p, e) ->
let p = translate_pattern p in
let e = transl e in
p,
if is_non_trivial_pattern p then magic e else e
) eqs
and transl_branches branches =
List.map (fun (p, e) ->
translate_pattern p, magic (transl e)
) branches
and is_non_trivial_pattern = function
| O.PTuple _
| O.PConstruct _
| O.PRecord _ ->
true
| O.PAs (p, _) ->
is_non_trivial_pattern p
| O.PVar _
| O.PAny ->
false
let tyvar (i : int) =
Printf.sprintf "'a%d" i
let ty (i : int) =
O.TyVar (tyvar i)
let init (n : int) (f : int -> 'a) : 'a list =
let rec loop (i : int) =
if i = n then
[]
else
let x = f i in
x :: loop (i + 1)
in
loop 0
let tyvars (base : int) (n : int) : string list =
init n (fun i -> tyvar (base + i))
let tys (base : int) (n : int) : O.ty list =
init n (fun i -> ty (base + i))
let data_sum_name (typecon : Variable.name) : string =
identifier (Variable.print typecon)
let data_branch ((base : int), (branch : data_type_def_branch)) : O.data_type_def_branch =
let datacon, fields = branch in
let n = List.length fields in
Datacon.print datacon, tys base n
let data_sum (def : data_type_def) =
let typecon, branches = def in
let n = ref 0 in
let branches =
List.map (fun ((_, fields) as branch) ->
let base = !n in
n := base + List.length fields;
base, branch
) branches
in
let n = !n in
let lhs =
data_sum_name typecon,
tyvars 0 n
in
let rhs =
O.Sum (List.map data_branch branches)
in
O.DataTypeGroup (lhs, rhs)
let translate_item = function
| DataType def ->
[ data_sum def ]
| ValueDefinition (flag, eqs) ->
[ O.ValueDefinition (flag, transl_equations eqs) ]
| ValueDeclaration x ->
[ O.ValueDeclaration (identifier (Variable.print x), O.TyObj) ]
| OpenDirective m ->
[ O.OpenDirective (translate_module_name m) ]
let translate_implementation items =
List.flatten (List.map translate_item items)
let translate_interface items =
List.flatten (List.map translate_item items)
let translate_interface_as_implementation_filter = function
| None ->
[]
| Some items ->
List.flatten (List.map (function
| DataType _
| ValueDefinition _
| OpenDirective _ ->
[]
| ValueDeclaration x ->
[ O.ValueDefinition (Nonrecursive, [ translate_pattern (PVar x), magic (transl (EVar (Unqualified x)))]) ]
) items)
|
41207de7c89fed2bfa0e092cc0f74e6400507d363d74ff72e4eec29f46dcdfb0 | janestreet/core | ofday_float.mli | open! Import
include Ofday_intf.S with type underlying = float and module Span := Span_float
module Stable : sig
module V1 : sig
type nonrec t = t
[@@deriving bin_io, compare, hash, sexp, sexp_grammar, stable_witness]
end
end
| null | https://raw.githubusercontent.com/janestreet/core/4b6635d206f7adcfac8324820d246299d6f572fe/core/src/ofday_float.mli | ocaml | open! Import
include Ofday_intf.S with type underlying = float and module Span := Span_float
module Stable : sig
module V1 : sig
type nonrec t = t
[@@deriving bin_io, compare, hash, sexp, sexp_grammar, stable_witness]
end
end
| |
1eaadaa69578933db7f684c590f21f2d344a6a8f0dc04659da9bf0c720045a0c | cubicle-model-checker/cubicle | smt.ml | (**************************************************************************)
(* *)
Cubicle
(* *)
Copyright ( C ) 2011 - 2014
(* *)
and
Universite Paris - Sud 11
(* *)
(* *)
This file is distributed under the terms of the Apache Software
(* License version 2.0 *)
(* *)
(**************************************************************************)
open Options
let select_solver =
match smt_solver with
| AltErgo -> (module Alt_ergo : Smt_sig.S)
| Z3 -> (module Z3wrapper : Smt_sig.S)
module Selected_Smt : Smt_sig.S = (val (select_solver))
include Selected_Smt
| null | https://raw.githubusercontent.com/cubicle-model-checker/cubicle/00f09bb2d4bb496549775e770d7ada08bc1e4866/smt/smt.ml | ocaml | ************************************************************************
License version 2.0
************************************************************************ | Cubicle
Copyright ( C ) 2011 - 2014
and
Universite Paris - Sud 11
This file is distributed under the terms of the Apache Software
open Options
let select_solver =
match smt_solver with
| AltErgo -> (module Alt_ergo : Smt_sig.S)
| Z3 -> (module Z3wrapper : Smt_sig.S)
module Selected_Smt : Smt_sig.S = (val (select_solver))
include Selected_Smt
|
0bae79d3804ac90f27120910725d7b75fa5dd13a058dbdd3cad92db20549d0b4 | pavankumarbn/DroneGUIROS | navdata_watchdog.lisp | ; Auto-generated. Do not edit!
(cl:in-package ardrone_autonomy-msg)
// ! \htmlinclude navdata_watchdog.msg.html
(cl:defclass <navdata_watchdog> (roslisp-msg-protocol:ros-message)
((header
:reader header
:initarg :header
:type std_msgs-msg:Header
:initform (cl:make-instance 'std_msgs-msg:Header))
(drone_time
:reader drone_time
:initarg :drone_time
:type cl:float
:initform 0.0)
(tag
:reader tag
:initarg :tag
:type cl:fixnum
:initform 0)
(size
:reader size
:initarg :size
:type cl:fixnum
:initform 0))
)
(cl:defclass navdata_watchdog (<navdata_watchdog>)
())
(cl:defmethod cl:initialize-instance :after ((m <navdata_watchdog>) cl:&rest args)
(cl:declare (cl:ignorable args))
(cl:unless (cl:typep m 'navdata_watchdog)
(roslisp-msg-protocol:msg-deprecation-warning "using old message class name ardrone_autonomy-msg:<navdata_watchdog> is deprecated: use ardrone_autonomy-msg:navdata_watchdog instead.")))
(cl:ensure-generic-function 'header-val :lambda-list '(m))
(cl:defmethod header-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:header-val is deprecated. Use ardrone_autonomy-msg:header instead.")
(header m))
(cl:ensure-generic-function 'drone_time-val :lambda-list '(m))
(cl:defmethod drone_time-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:drone_time-val is deprecated. Use ardrone_autonomy-msg:drone_time instead.")
(drone_time m))
(cl:ensure-generic-function 'tag-val :lambda-list '(m))
(cl:defmethod tag-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:tag-val is deprecated. Use ardrone_autonomy-msg:tag instead.")
(tag m))
(cl:ensure-generic-function 'size-val :lambda-list '(m))
(cl:defmethod size-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:size-val is deprecated. Use ardrone_autonomy-msg:size instead.")
(size m))
(cl:defmethod roslisp-msg-protocol:serialize ((msg <navdata_watchdog>) ostream)
"Serializes a message object of type '<navdata_watchdog>"
(roslisp-msg-protocol:serialize (cl:slot-value msg 'header) ostream)
(cl:let ((bits (roslisp-utils:encode-double-float-bits (cl:slot-value msg 'drone_time))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 32) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 40) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 48) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 56) bits) ostream))
(cl:write-byte (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'tag)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'tag)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'size)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'size)) ostream)
)
(cl:defmethod roslisp-msg-protocol:deserialize ((msg <navdata_watchdog>) istream)
"Deserializes a message object of type '<navdata_watchdog>"
(roslisp-msg-protocol:deserialize (cl:slot-value msg 'header) istream)
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 32) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 40) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 48) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 56) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'drone_time) (roslisp-utils:decode-double-float-bits bits)))
(cl:setf (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'tag)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'tag)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'size)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'size)) (cl:read-byte istream))
msg
)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql '<navdata_watchdog>)))
"Returns string type for a message object of type '<navdata_watchdog>"
"ardrone_autonomy/navdata_watchdog")
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'navdata_watchdog)))
"Returns string type for a message object of type 'navdata_watchdog"
"ardrone_autonomy/navdata_watchdog")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql '<navdata_watchdog>)))
"Returns md5sum for a message object of type '<navdata_watchdog>"
"9e64269f1f5e463ea4f48c395e917507")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql 'navdata_watchdog)))
"Returns md5sum for a message object of type 'navdata_watchdog"
"9e64269f1f5e463ea4f48c395e917507")
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql '<navdata_watchdog>)))
"Returns full string definition for message of type '<navdata_watchdog>"
(cl:format cl:nil "Header header~%float64 drone_time~%uint16 tag~%uint16 size~%~%================================================================================~%MSG: std_msgs/Header~%# Standard metadata for higher-level stamped data types.~%# This is generally used to communicate timestamped data ~%# in a particular coordinate frame.~%# ~%# sequence ID: consecutively increasing ID ~%uint32 seq~%#Two-integer timestamp that is expressed as:~%# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')~%# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')~%# time-handling sugar is provided by the client library~%time stamp~%#Frame this data is associated with~%# 0: no frame~%# 1: global frame~%string frame_id~%~%~%"))
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql 'navdata_watchdog)))
"Returns full string definition for message of type 'navdata_watchdog"
(cl:format cl:nil "Header header~%float64 drone_time~%uint16 tag~%uint16 size~%~%================================================================================~%MSG: std_msgs/Header~%# Standard metadata for higher-level stamped data types.~%# This is generally used to communicate timestamped data ~%# in a particular coordinate frame.~%# ~%# sequence ID: consecutively increasing ID ~%uint32 seq~%#Two-integer timestamp that is expressed as:~%# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')~%# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')~%# time-handling sugar is provided by the client library~%time stamp~%#Frame this data is associated with~%# 0: no frame~%# 1: global frame~%string frame_id~%~%~%"))
(cl:defmethod roslisp-msg-protocol:serialization-length ((msg <navdata_watchdog>))
(cl:+ 0
(roslisp-msg-protocol:serialization-length (cl:slot-value msg 'header))
8
2
2
))
(cl:defmethod roslisp-msg-protocol:ros-message-to-list ((msg <navdata_watchdog>))
"Converts a ROS message object to a list"
(cl:list 'navdata_watchdog
(cl:cons ':header (header msg))
(cl:cons ':drone_time (drone_time msg))
(cl:cons ':tag (tag msg))
(cl:cons ':size (size msg))
))
| null | https://raw.githubusercontent.com/pavankumarbn/DroneGUIROS/745320d73035bc50ac4fea2699e22586e10be800/devel/share/common-lisp/ros/ardrone_autonomy/msg/navdata_watchdog.lisp | lisp | Auto-generated. Do not edit! |
(cl:in-package ardrone_autonomy-msg)
// ! \htmlinclude navdata_watchdog.msg.html
(cl:defclass <navdata_watchdog> (roslisp-msg-protocol:ros-message)
((header
:reader header
:initarg :header
:type std_msgs-msg:Header
:initform (cl:make-instance 'std_msgs-msg:Header))
(drone_time
:reader drone_time
:initarg :drone_time
:type cl:float
:initform 0.0)
(tag
:reader tag
:initarg :tag
:type cl:fixnum
:initform 0)
(size
:reader size
:initarg :size
:type cl:fixnum
:initform 0))
)
(cl:defclass navdata_watchdog (<navdata_watchdog>)
())
(cl:defmethod cl:initialize-instance :after ((m <navdata_watchdog>) cl:&rest args)
(cl:declare (cl:ignorable args))
(cl:unless (cl:typep m 'navdata_watchdog)
(roslisp-msg-protocol:msg-deprecation-warning "using old message class name ardrone_autonomy-msg:<navdata_watchdog> is deprecated: use ardrone_autonomy-msg:navdata_watchdog instead.")))
(cl:ensure-generic-function 'header-val :lambda-list '(m))
(cl:defmethod header-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:header-val is deprecated. Use ardrone_autonomy-msg:header instead.")
(header m))
(cl:ensure-generic-function 'drone_time-val :lambda-list '(m))
(cl:defmethod drone_time-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:drone_time-val is deprecated. Use ardrone_autonomy-msg:drone_time instead.")
(drone_time m))
(cl:ensure-generic-function 'tag-val :lambda-list '(m))
(cl:defmethod tag-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:tag-val is deprecated. Use ardrone_autonomy-msg:tag instead.")
(tag m))
(cl:ensure-generic-function 'size-val :lambda-list '(m))
(cl:defmethod size-val ((m <navdata_watchdog>))
(roslisp-msg-protocol:msg-deprecation-warning "Using old-style slot reader ardrone_autonomy-msg:size-val is deprecated. Use ardrone_autonomy-msg:size instead.")
(size m))
(cl:defmethod roslisp-msg-protocol:serialize ((msg <navdata_watchdog>) ostream)
"Serializes a message object of type '<navdata_watchdog>"
(roslisp-msg-protocol:serialize (cl:slot-value msg 'header) ostream)
(cl:let ((bits (roslisp-utils:encode-double-float-bits (cl:slot-value msg 'drone_time))))
(cl:write-byte (cl:ldb (cl:byte 8 0) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 16) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 24) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 32) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 40) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 48) bits) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 56) bits) ostream))
(cl:write-byte (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'tag)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'tag)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'size)) ostream)
(cl:write-byte (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'size)) ostream)
)
(cl:defmethod roslisp-msg-protocol:deserialize ((msg <navdata_watchdog>) istream)
"Deserializes a message object of type '<navdata_watchdog>"
(roslisp-msg-protocol:deserialize (cl:slot-value msg 'header) istream)
(cl:let ((bits 0))
(cl:setf (cl:ldb (cl:byte 8 0) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 16) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 24) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 32) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 40) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 48) bits) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 56) bits) (cl:read-byte istream))
(cl:setf (cl:slot-value msg 'drone_time) (roslisp-utils:decode-double-float-bits bits)))
(cl:setf (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'tag)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'tag)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 0) (cl:slot-value msg 'size)) (cl:read-byte istream))
(cl:setf (cl:ldb (cl:byte 8 8) (cl:slot-value msg 'size)) (cl:read-byte istream))
msg
)
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql '<navdata_watchdog>)))
"Returns string type for a message object of type '<navdata_watchdog>"
"ardrone_autonomy/navdata_watchdog")
(cl:defmethod roslisp-msg-protocol:ros-datatype ((msg (cl:eql 'navdata_watchdog)))
"Returns string type for a message object of type 'navdata_watchdog"
"ardrone_autonomy/navdata_watchdog")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql '<navdata_watchdog>)))
"Returns md5sum for a message object of type '<navdata_watchdog>"
"9e64269f1f5e463ea4f48c395e917507")
(cl:defmethod roslisp-msg-protocol:md5sum ((type (cl:eql 'navdata_watchdog)))
"Returns md5sum for a message object of type 'navdata_watchdog"
"9e64269f1f5e463ea4f48c395e917507")
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql '<navdata_watchdog>)))
"Returns full string definition for message of type '<navdata_watchdog>"
(cl:format cl:nil "Header header~%float64 drone_time~%uint16 tag~%uint16 size~%~%================================================================================~%MSG: std_msgs/Header~%# Standard metadata for higher-level stamped data types.~%# This is generally used to communicate timestamped data ~%# in a particular coordinate frame.~%# ~%# sequence ID: consecutively increasing ID ~%uint32 seq~%#Two-integer timestamp that is expressed as:~%# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')~%# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')~%# time-handling sugar is provided by the client library~%time stamp~%#Frame this data is associated with~%# 0: no frame~%# 1: global frame~%string frame_id~%~%~%"))
(cl:defmethod roslisp-msg-protocol:message-definition ((type (cl:eql 'navdata_watchdog)))
"Returns full string definition for message of type 'navdata_watchdog"
(cl:format cl:nil "Header header~%float64 drone_time~%uint16 tag~%uint16 size~%~%================================================================================~%MSG: std_msgs/Header~%# Standard metadata for higher-level stamped data types.~%# This is generally used to communicate timestamped data ~%# in a particular coordinate frame.~%# ~%# sequence ID: consecutively increasing ID ~%uint32 seq~%#Two-integer timestamp that is expressed as:~%# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')~%# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')~%# time-handling sugar is provided by the client library~%time stamp~%#Frame this data is associated with~%# 0: no frame~%# 1: global frame~%string frame_id~%~%~%"))
(cl:defmethod roslisp-msg-protocol:serialization-length ((msg <navdata_watchdog>))
(cl:+ 0
(roslisp-msg-protocol:serialization-length (cl:slot-value msg 'header))
8
2
2
))
(cl:defmethod roslisp-msg-protocol:ros-message-to-list ((msg <navdata_watchdog>))
"Converts a ROS message object to a list"
(cl:list 'navdata_watchdog
(cl:cons ':header (header msg))
(cl:cons ':drone_time (drone_time msg))
(cl:cons ':tag (tag msg))
(cl:cons ':size (size msg))
))
|
6d41744523edaa641620e420b41b2cbac3c9caa9e28f630e08bdbc13858d2eab | MLstate/opalang | tree.ml |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
(** A module for general non empty tree.
If you need trees that may be empty, you can use an option on this type.
*)
module List = BaseList
module Tree : sig
type 'a t = Tree of 'a * 'a t list
val leaf : 'a -> 'a t
val is_leaf : 'a t -> bool
val value : 'a t -> 'a
val children : 'a t -> 'a t list
val to_string : ('a -> string) -> 'a t -> string
val get_path_opt : 'a t -> 'a list -> 'a t option
end = struct
type 'a t = Tree of 'a * 'a t list
let leaf a = Tree(a,[])
let is_leaf (Tree (_,l)) = l = []
let value (Tree (a,_)) = a
let children (Tree (_,l)) = l
let rec to_string f (Tree (s,l)) =
Printf.sprintf "(%s:%s)" (f s) (List.to_string (to_string f) l)
let rec get_path_opt (Tree (_,children) as tree) = function
| [] -> Some tree
| h :: t ->
match List.find_opt (fun tr -> value tr = h) children with
| None -> None
| Some tree -> get_path_opt tree t
end
include Tree
module S = struct
type 'a t = 'b Tree.t constraint 'a = 'b * 'c * 'd
let subs_cons (Tree (x,l)) = (fun l -> Tree(x,l)), l
end
(** defines map, fold, etc. *)
module Walk = Traverse.Make(S)
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/ocamllib/libbase/tree.ml | ocaml | * A module for general non empty tree.
If you need trees that may be empty, you can use an option on this type.
* defines map, fold, etc. |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
module List = BaseList
module Tree : sig
type 'a t = Tree of 'a * 'a t list
val leaf : 'a -> 'a t
val is_leaf : 'a t -> bool
val value : 'a t -> 'a
val children : 'a t -> 'a t list
val to_string : ('a -> string) -> 'a t -> string
val get_path_opt : 'a t -> 'a list -> 'a t option
end = struct
type 'a t = Tree of 'a * 'a t list
let leaf a = Tree(a,[])
let is_leaf (Tree (_,l)) = l = []
let value (Tree (a,_)) = a
let children (Tree (_,l)) = l
let rec to_string f (Tree (s,l)) =
Printf.sprintf "(%s:%s)" (f s) (List.to_string (to_string f) l)
let rec get_path_opt (Tree (_,children) as tree) = function
| [] -> Some tree
| h :: t ->
match List.find_opt (fun tr -> value tr = h) children with
| None -> None
| Some tree -> get_path_opt tree t
end
include Tree
module S = struct
type 'a t = 'b Tree.t constraint 'a = 'b * 'c * 'd
let subs_cons (Tree (x,l)) = (fun l -> Tree(x,l)), l
end
module Walk = Traverse.Make(S)
|
bb8fc7bf689fa18e365a46e0c1b5d26fbfac014a4ddc362735590cff6a281007 | caradoc-org/caradoc | selectview.ml | (*****************************************************************************)
(* Caradoc: a PDF parser and validator *)
Copyright ( C ) 2017
(* *)
(* This program is free software; you can redistribute it and/or modify *)
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation .
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
(*****************************************************************************)
open Listview
open Uiutils
module SelectView = struct
type t = {
view : ListView.t;
title : string;
mutable offset : int;
}
let make (a : string array) (title : string) : t =
{view = ListView.make a; title = title; offset = 0;}
let get_selection (v : t) : int =
v.view.ListView.offset
let move_up (v : t) =
ListView.move_up v.view
let move_down (v : t) =
ListView.move_down v.view
let move_to (v : t) =
ListView.move_to v.view
let move_home (v : t) =
ListView.move_home v.view
let move_end (v : t) =
ListView.move_end v.view
let adjust (v : t) (height : int) : unit =
(* Adjust w.r.t. selection *)
if v.view.ListView.offset >= v.offset + height then
v.offset <- v.view.ListView.offset - height + 1
else if v.view.ListView.offset < v.offset then
v.offset <- v.view.ListView.offset;
(* Adjust w.r.t. window *)
if v.view.ListView.len < v.offset + height then
v.offset <- v.view.ListView.len - height;
if v.offset < 0 then
v.offset <- 0
let draw (v : t) (w : Curses.window) : unit =
let height, width = Curses.getmaxyx w in
adjust v (height - 1);
assert (Curses.waddstr w (inlinestr v.title width));
for i = 1 to height - 1 do
let j = v.offset + i - 1 in
let s = trim_str (if j < v.view.ListView.len then v.view.ListView.buf.(j) else "~") width in
if j == v.view.ListView.offset then
reverse_wadd_inlinestr w i s width
else
assert (Curses.mvwaddstr w i 0 s);
done
end
| null | https://raw.githubusercontent.com/caradoc-org/caradoc/100f53bc55ef682049e10fabf24869bc019dc6ce/src/ui/selectview.ml | ocaml | ***************************************************************************
Caradoc: a PDF parser and validator
This program is free software; you can redistribute it and/or modify
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
***************************************************************************
Adjust w.r.t. selection
Adjust w.r.t. window | Copyright ( C ) 2017
it under the terms of the GNU General Public License version 2 as
published by the Free Software Foundation .
You should have received a copy of the GNU General Public License along
with this program ; if not , write to the Free Software Foundation , Inc. ,
51 Franklin Street , Fifth Floor , Boston , USA .
open Listview
open Uiutils
module SelectView = struct
type t = {
view : ListView.t;
title : string;
mutable offset : int;
}
let make (a : string array) (title : string) : t =
{view = ListView.make a; title = title; offset = 0;}
let get_selection (v : t) : int =
v.view.ListView.offset
let move_up (v : t) =
ListView.move_up v.view
let move_down (v : t) =
ListView.move_down v.view
let move_to (v : t) =
ListView.move_to v.view
let move_home (v : t) =
ListView.move_home v.view
let move_end (v : t) =
ListView.move_end v.view
let adjust (v : t) (height : int) : unit =
if v.view.ListView.offset >= v.offset + height then
v.offset <- v.view.ListView.offset - height + 1
else if v.view.ListView.offset < v.offset then
v.offset <- v.view.ListView.offset;
if v.view.ListView.len < v.offset + height then
v.offset <- v.view.ListView.len - height;
if v.offset < 0 then
v.offset <- 0
let draw (v : t) (w : Curses.window) : unit =
let height, width = Curses.getmaxyx w in
adjust v (height - 1);
assert (Curses.waddstr w (inlinestr v.title width));
for i = 1 to height - 1 do
let j = v.offset + i - 1 in
let s = trim_str (if j < v.view.ListView.len then v.view.ListView.buf.(j) else "~") width in
if j == v.view.ListView.offset then
reverse_wadd_inlinestr w i s width
else
assert (Curses.mvwaddstr w i 0 s);
done
end
|
7f1e7c93897d65f1bfcae1cf1f7a9a8d0553e8bf9164e0d59f73c822b419fc0a | huangjs/cl | mcstep.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 1.215 2009/04/07 22:05:21 rtoy Exp $ "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.200 2009/01/19 02:38:17 rtoy Exp $ "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.112 2009/01/08 12:57:19 " )
Using Lisp CMU Common Lisp 19f ( 19F )
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':array)
;;; (:array-slicing t) (:declare-common nil)
;;; (:float-format double-float))
(in-package :common-lisp-user)
(defun mcstep (stx fx dx sty fy dy stp fp dp brackt stpmin stpmax info)
(declare (type (f2cl-lib:integer4) info)
(type f2cl-lib:logical brackt)
(type (double-float) stpmax stpmin dp fp stp dy fy sty dx fx stx))
(prog ((gamma 0.0) (p 0.0) (q 0.0) (r 0.0) (s 0.0) (sgnd 0.0) (stpc 0.0)
(stpf 0.0) (stpq 0.0) (theta 0.0) (bound nil))
(declare (type f2cl-lib:logical bound)
(type (double-float) theta stpq stpf stpc sgnd s r q p gamma))
(setf info 0)
(if
(or (and brackt (or (<= stp (min stx sty)) (>= stp (max stx sty))))
(>= (* dx (- stp stx)) 0.0)
(< stpmax stpmin))
(go end_label))
(setf sgnd (* dp (/ dx (f2cl-lib:dabs dx))))
(cond
((> fp fx)
(setf info 1)
(setf bound f2cl-lib:%true%)
(setf theta (+ (/ (* 3 (- fx fp)) (- stp stx)) dx dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dx)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(- (expt (/ theta s) 2) (* (/ dx s) (/ dp s))))))
(if (< stp stx) (setf gamma (- gamma)))
(setf p (+ (- gamma dx) theta))
(setf q (+ (- gamma dx) gamma dp))
(setf r (/ p q))
(setf stpc (+ stx (* r (- stp stx))))
(setf stpq
(+ stx
(* (/ (/ dx (+ (/ (- fx fp) (- stp stx)) dx)) 2)
(- stp stx))))
(cond
((< (f2cl-lib:dabs (+ stpc (- stx))) (f2cl-lib:dabs (+ stpq (- stx))))
(setf stpf stpc))
(t
(setf stpf (+ stpc (/ (- stpq stpc) 2)))))
(setf brackt f2cl-lib:%true%))
((< sgnd 0.0)
(setf info 2)
(setf bound f2cl-lib:%false%)
(setf theta (+ (/ (* 3 (- fx fp)) (- stp stx)) dx dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dx)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(- (expt (/ theta s) 2) (* (/ dx s) (/ dp s))))))
(if (> stp stx) (setf gamma (- gamma)))
(setf p (+ (- gamma dp) theta))
(setf q (+ (- gamma dp) gamma dx))
(setf r (/ p q))
(setf stpc (+ stp (* r (- stx stp))))
(setf stpq (+ stp (* (/ dp (- dp dx)) (- stx stp))))
(cond
((> (f2cl-lib:dabs (+ stpc (- stp))) (f2cl-lib:dabs (+ stpq (- stp))))
(setf stpf stpc))
(t
(setf stpf stpq)))
(setf brackt f2cl-lib:%true%))
((< (f2cl-lib:dabs dp) (f2cl-lib:dabs dx))
(setf info 3)
(setf bound f2cl-lib:%true%)
(setf theta (+ (/ (* 3 (- fx fp)) (- stp stx)) dx dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dx)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(max 0.0 (- (expt (/ theta s) 2) (* (/ dx s) (/ dp s)))))))
(if (> stp stx) (setf gamma (- gamma)))
(setf p (+ (- gamma dp) theta))
(setf q (+ gamma (- dx dp) gamma))
(setf r (/ p q))
(cond
((and (< r 0.0) (/= gamma 0.0))
(setf stpc (+ stp (* r (- stx stp)))))
((> stp stx)
(setf stpc stpmax))
(t
(setf stpc stpmin)))
(setf stpq (+ stp (* (/ dp (- dp dx)) (- stx stp))))
(cond
(brackt
(cond
((< (f2cl-lib:dabs (+ stp (- stpc)))
(f2cl-lib:dabs (+ stp (- stpq))))
(setf stpf stpc))
(t
(setf stpf stpq))))
(t
(cond
((> (f2cl-lib:dabs (+ stp (- stpc)))
(f2cl-lib:dabs (+ stp (- stpq))))
(setf stpf stpc))
(t
(setf stpf stpq))))))
(t
(setf info 4)
(setf bound f2cl-lib:%false%)
(cond
(brackt
(setf theta (+ (/ (* 3 (- fp fy)) (- sty stp)) dy dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dy)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(- (expt (/ theta s) 2) (* (/ dy s) (/ dp s))))))
(if (> stp sty) (setf gamma (- gamma)))
(setf p (+ (- gamma dp) theta))
(setf q (+ (- gamma dp) gamma dy))
(setf r (/ p q))
(setf stpc (+ stp (* r (- sty stp))))
(setf stpf stpc))
((> stp stx)
(setf stpf stpmax))
(t
(setf stpf stpmin)))))
(cond
((> fp fx)
(setf sty stp)
(setf fy fp)
(setf dy dp))
(t
(cond
((< sgnd 0.0)
(setf sty stx)
(setf fy fx)
(setf dy dx)))
(setf stx stp)
(setf fx fp)
(setf dx dp)))
(setf stpf (min stpmax stpf))
(setf stpf (max stpmin stpf))
(setf stp stpf)
(cond
((and brackt bound)
(cond
((> sty stx)
(setf stp (min (+ stx (* 0.66 (- sty stx))) stp)))
(t
(setf stp (max (+ stx (* 0.66 (- sty stx))) stp))))))
(go end_label)
end_label
(return (values stx fx dx sty fy dy stp nil nil brackt nil nil info))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::mcstep
fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((double-float) (double-float) (double-float)
(double-float) (double-float) (double-float)
(double-float) (double-float) (double-float)
fortran-to-lisp::logical (double-float) (double-float)
(fortran-to-lisp::integer4))
:return-values '(fortran-to-lisp::stx fortran-to-lisp::fx
fortran-to-lisp::dx fortran-to-lisp::sty
fortran-to-lisp::fy fortran-to-lisp::dy
fortran-to-lisp::stp nil nil
fortran-to-lisp::brackt nil nil
fortran-to-lisp::info)
:calls 'nil)))
| null | https://raw.githubusercontent.com/huangjs/cl/96158b3f82f82a6b7d53ef04b3b29c5c8de2dbf7/lib/maxima/share/lbfgs/mcstep.lisp | lisp | Compiled by f2cl version:
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':array)
(:array-slicing t) (:declare-common nil)
(:float-format double-float)) | ( " f2cl1.l , v 1.215 2009/04/07 22:05:21 rtoy Exp $ "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.200 2009/01/19 02:38:17 rtoy Exp $ "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.112 2009/01/08 12:57:19 " )
Using Lisp CMU Common Lisp 19f ( 19F )
(in-package :common-lisp-user)
(defun mcstep (stx fx dx sty fy dy stp fp dp brackt stpmin stpmax info)
(declare (type (f2cl-lib:integer4) info)
(type f2cl-lib:logical brackt)
(type (double-float) stpmax stpmin dp fp stp dy fy sty dx fx stx))
(prog ((gamma 0.0) (p 0.0) (q 0.0) (r 0.0) (s 0.0) (sgnd 0.0) (stpc 0.0)
(stpf 0.0) (stpq 0.0) (theta 0.0) (bound nil))
(declare (type f2cl-lib:logical bound)
(type (double-float) theta stpq stpf stpc sgnd s r q p gamma))
(setf info 0)
(if
(or (and brackt (or (<= stp (min stx sty)) (>= stp (max stx sty))))
(>= (* dx (- stp stx)) 0.0)
(< stpmax stpmin))
(go end_label))
(setf sgnd (* dp (/ dx (f2cl-lib:dabs dx))))
(cond
((> fp fx)
(setf info 1)
(setf bound f2cl-lib:%true%)
(setf theta (+ (/ (* 3 (- fx fp)) (- stp stx)) dx dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dx)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(- (expt (/ theta s) 2) (* (/ dx s) (/ dp s))))))
(if (< stp stx) (setf gamma (- gamma)))
(setf p (+ (- gamma dx) theta))
(setf q (+ (- gamma dx) gamma dp))
(setf r (/ p q))
(setf stpc (+ stx (* r (- stp stx))))
(setf stpq
(+ stx
(* (/ (/ dx (+ (/ (- fx fp) (- stp stx)) dx)) 2)
(- stp stx))))
(cond
((< (f2cl-lib:dabs (+ stpc (- stx))) (f2cl-lib:dabs (+ stpq (- stx))))
(setf stpf stpc))
(t
(setf stpf (+ stpc (/ (- stpq stpc) 2)))))
(setf brackt f2cl-lib:%true%))
((< sgnd 0.0)
(setf info 2)
(setf bound f2cl-lib:%false%)
(setf theta (+ (/ (* 3 (- fx fp)) (- stp stx)) dx dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dx)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(- (expt (/ theta s) 2) (* (/ dx s) (/ dp s))))))
(if (> stp stx) (setf gamma (- gamma)))
(setf p (+ (- gamma dp) theta))
(setf q (+ (- gamma dp) gamma dx))
(setf r (/ p q))
(setf stpc (+ stp (* r (- stx stp))))
(setf stpq (+ stp (* (/ dp (- dp dx)) (- stx stp))))
(cond
((> (f2cl-lib:dabs (+ stpc (- stp))) (f2cl-lib:dabs (+ stpq (- stp))))
(setf stpf stpc))
(t
(setf stpf stpq)))
(setf brackt f2cl-lib:%true%))
((< (f2cl-lib:dabs dp) (f2cl-lib:dabs dx))
(setf info 3)
(setf bound f2cl-lib:%true%)
(setf theta (+ (/ (* 3 (- fx fp)) (- stp stx)) dx dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dx)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(max 0.0 (- (expt (/ theta s) 2) (* (/ dx s) (/ dp s)))))))
(if (> stp stx) (setf gamma (- gamma)))
(setf p (+ (- gamma dp) theta))
(setf q (+ gamma (- dx dp) gamma))
(setf r (/ p q))
(cond
((and (< r 0.0) (/= gamma 0.0))
(setf stpc (+ stp (* r (- stx stp)))))
((> stp stx)
(setf stpc stpmax))
(t
(setf stpc stpmin)))
(setf stpq (+ stp (* (/ dp (- dp dx)) (- stx stp))))
(cond
(brackt
(cond
((< (f2cl-lib:dabs (+ stp (- stpc)))
(f2cl-lib:dabs (+ stp (- stpq))))
(setf stpf stpc))
(t
(setf stpf stpq))))
(t
(cond
((> (f2cl-lib:dabs (+ stp (- stpc)))
(f2cl-lib:dabs (+ stp (- stpq))))
(setf stpf stpc))
(t
(setf stpf stpq))))))
(t
(setf info 4)
(setf bound f2cl-lib:%false%)
(cond
(brackt
(setf theta (+ (/ (* 3 (- fp fy)) (- sty stp)) dy dp))
(setf s
(max (f2cl-lib:dabs theta)
(f2cl-lib:dabs dy)
(f2cl-lib:dabs dp)))
(setf gamma
(* s
(f2cl-lib:dsqrt
(- (expt (/ theta s) 2) (* (/ dy s) (/ dp s))))))
(if (> stp sty) (setf gamma (- gamma)))
(setf p (+ (- gamma dp) theta))
(setf q (+ (- gamma dp) gamma dy))
(setf r (/ p q))
(setf stpc (+ stp (* r (- sty stp))))
(setf stpf stpc))
((> stp stx)
(setf stpf stpmax))
(t
(setf stpf stpmin)))))
(cond
((> fp fx)
(setf sty stp)
(setf fy fp)
(setf dy dp))
(t
(cond
((< sgnd 0.0)
(setf sty stx)
(setf fy fx)
(setf dy dx)))
(setf stx stp)
(setf fx fp)
(setf dx dp)))
(setf stpf (min stpmax stpf))
(setf stpf (max stpmin stpf))
(setf stp stpf)
(cond
((and brackt bound)
(cond
((> sty stx)
(setf stp (min (+ stx (* 0.66 (- sty stx))) stp)))
(t
(setf stp (max (+ stx (* 0.66 (- sty stx))) stp))))))
(go end_label)
end_label
(return (values stx fx dx sty fy dy stp nil nil brackt nil nil info))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::mcstep
fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((double-float) (double-float) (double-float)
(double-float) (double-float) (double-float)
(double-float) (double-float) (double-float)
fortran-to-lisp::logical (double-float) (double-float)
(fortran-to-lisp::integer4))
:return-values '(fortran-to-lisp::stx fortran-to-lisp::fx
fortran-to-lisp::dx fortran-to-lisp::sty
fortran-to-lisp::fy fortran-to-lisp::dy
fortran-to-lisp::stp nil nil
fortran-to-lisp::brackt nil nil
fortran-to-lisp::info)
:calls 'nil)))
|
19664604ef6ee5428cbee81ad813e9ad45c6c0a95b12dd2291a07e30c7dd7da2 | zellige/zellige | MapnikVectorTile.hs | # LANGUAGE FlexibleContexts #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module Data.Geometry.MapnikVectorTile where
import qualified Control.Monad.ST as ST
import qualified Data.Aeson as Aeson
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Char8 as ByteStringChar8
import qualified Data.ByteString.Lazy as ByteStringLazy
import qualified Data.Geometry.VectorTile.Types as VectorTileTypes
import qualified Data.Geometry.VectorTile.VectorTile as VectorTile
import qualified Data.Geospatial as Geospatial
import qualified Data.HashMap.Lazy as HashMapLazy
import Data.Monoid ((<>))
import qualified Data.Sequence as Sequence
import qualified Data.Text as Text
import qualified Data.Geometry.Clip as Clip
import qualified Data.Geometry.GeoJsonToMvt as GeoJsonToMvt
import qualified Data.Geometry.Simplify as Simplify
import qualified Data.Geometry.SphericalMercator as SphericalMercator
import qualified Data.Geometry.Types.Config as TypesConfig
import qualified Data.Geometry.Types.GeoJsonFeatures as TypesGeoJsonFeatures
import qualified Data.Geometry.Types.LayerConfig as TypesLayerConfig
-- Command line
writeLayer :: TypesLayerConfig.LayerConfig -> IO ()
writeLayer lc = do
mvt <- geoJsonFileToMvt (TypesLayerConfig._layerInput lc) (configFromLayerConfig lc)
ByteString.writeFile (TypesLayerConfig._layerOutput lc) (encodeMvt mvt)
configFromLayerConfig :: TypesLayerConfig.LayerConfig -> TypesConfig.Config
configFromLayerConfig TypesLayerConfig.LayerConfig{..} = TypesConfig.mkConfig _layerName _layerZoom (_layerX, _layerY) _layerBuffer _layerExtent _layerQuantizePixels _layerSimplification
geoJsonFileToMvt :: FilePath -> TypesConfig.Config -> IO VectorTileTypes.VectorTile
geoJsonFileToMvt filePath config = do
geoJson <- readGeoJson filePath
createMvt config geoJson
readGeoJson :: FilePath -> IO (Geospatial.GeoFeatureCollection Aeson.Value)
readGeoJson geoJsonFile = do
bs <- ByteStringLazy.readFile geoJsonFile
let ebs = Aeson.eitherDecode' bs :: Either String (Geospatial.GeoFeatureCollection Aeson.Value)
decodeError = error . (("Unable to decode " <> geoJsonFile <> ": ") <>)
pure (either decodeError id ebs)
readMvt :: FilePath -> IO (Either Text.Text VectorTile.VectorTile)
readMvt filePath = do
b <- ByteString.readFile filePath
pure (VectorTile.tile b)
-- Lib
encodeMvt :: VectorTileTypes.VectorTile -> ByteStringChar8.ByteString
encodeMvt = VectorTile.untile
createMvt :: TypesConfig.Config -> Geospatial.GeoFeatureCollection Aeson.Value -> IO VectorTileTypes.VectorTile
createMvt TypesConfig.Config{..} (Geospatial.GeoFeatureCollection geoFeatureBbox geoFeatures) = do
let sphericalMercatorPts = SphericalMercator.convertFeatures _extents _quantizePixels (SphericalMercator.boundingBox _gtc) geoFeatures
clipBb = Clip.createBoundingBox _buffer _extents
clippedFeatures = Clip.clipFeatures clipBb sphericalMercatorPts
simplifiedFeatures = Simplify.simplifyFeatures _simplify clippedFeatures
TypesGeoJsonFeatures.MvtFeatures{..} = ST.runST $ getFeatures (Geospatial.GeoFeatureCollection geoFeatureBbox simplifiedFeatures)
layer = VectorTileTypes.Layer (fromIntegral _version) _name Sequence.empty mvtPoints mvtLines mvtPolygons (fromIntegral _extents)
pure . VectorTileTypes.VectorTile $ HashMapLazy.fromList [(_name, layer)]
getFeatures :: Geospatial.GeoFeatureCollection Aeson.Value -> ST.ST s TypesGeoJsonFeatures.MvtFeatures
getFeatures Geospatial.GeoFeatureCollection{..} = GeoJsonToMvt.geoJsonFeaturesToMvtFeatures TypesGeoJsonFeatures.emptyMvtFeatures _geofeatures
convertClipSimplify :: TypesConfig.Config -> Geospatial.GeospatialGeometry -> Geospatial.GeospatialGeometry
convertClipSimplify TypesConfig.Config{..} feature = simplifiedFeatures
where
sphericalMercatorPts = SphericalMercator.mapFeature _extents _quantizePixels (SphericalMercator.boundingBox _gtc) feature
clipBb = Clip.createBoundingBox _buffer _extents
clippedFeatures = Clip.mapFeature clipBb sphericalMercatorPts
simplifiedFeatures = Simplify.mapFeature _simplify clippedFeatures
| null | https://raw.githubusercontent.com/zellige/zellige/87e6dab11ac4c1843009043580f14422a1d83ebf/src/Data/Geometry/MapnikVectorTile.hs | haskell | # LANGUAGE OverloadedStrings #
Command line
Lib | # LANGUAGE FlexibleContexts #
# LANGUAGE RecordWildCards #
module Data.Geometry.MapnikVectorTile where
import qualified Control.Monad.ST as ST
import qualified Data.Aeson as Aeson
import qualified Data.ByteString as ByteString
import qualified Data.ByteString.Char8 as ByteStringChar8
import qualified Data.ByteString.Lazy as ByteStringLazy
import qualified Data.Geometry.VectorTile.Types as VectorTileTypes
import qualified Data.Geometry.VectorTile.VectorTile as VectorTile
import qualified Data.Geospatial as Geospatial
import qualified Data.HashMap.Lazy as HashMapLazy
import Data.Monoid ((<>))
import qualified Data.Sequence as Sequence
import qualified Data.Text as Text
import qualified Data.Geometry.Clip as Clip
import qualified Data.Geometry.GeoJsonToMvt as GeoJsonToMvt
import qualified Data.Geometry.Simplify as Simplify
import qualified Data.Geometry.SphericalMercator as SphericalMercator
import qualified Data.Geometry.Types.Config as TypesConfig
import qualified Data.Geometry.Types.GeoJsonFeatures as TypesGeoJsonFeatures
import qualified Data.Geometry.Types.LayerConfig as TypesLayerConfig
writeLayer :: TypesLayerConfig.LayerConfig -> IO ()
writeLayer lc = do
mvt <- geoJsonFileToMvt (TypesLayerConfig._layerInput lc) (configFromLayerConfig lc)
ByteString.writeFile (TypesLayerConfig._layerOutput lc) (encodeMvt mvt)
configFromLayerConfig :: TypesLayerConfig.LayerConfig -> TypesConfig.Config
configFromLayerConfig TypesLayerConfig.LayerConfig{..} = TypesConfig.mkConfig _layerName _layerZoom (_layerX, _layerY) _layerBuffer _layerExtent _layerQuantizePixels _layerSimplification
geoJsonFileToMvt :: FilePath -> TypesConfig.Config -> IO VectorTileTypes.VectorTile
geoJsonFileToMvt filePath config = do
geoJson <- readGeoJson filePath
createMvt config geoJson
readGeoJson :: FilePath -> IO (Geospatial.GeoFeatureCollection Aeson.Value)
readGeoJson geoJsonFile = do
bs <- ByteStringLazy.readFile geoJsonFile
let ebs = Aeson.eitherDecode' bs :: Either String (Geospatial.GeoFeatureCollection Aeson.Value)
decodeError = error . (("Unable to decode " <> geoJsonFile <> ": ") <>)
pure (either decodeError id ebs)
readMvt :: FilePath -> IO (Either Text.Text VectorTile.VectorTile)
readMvt filePath = do
b <- ByteString.readFile filePath
pure (VectorTile.tile b)
encodeMvt :: VectorTileTypes.VectorTile -> ByteStringChar8.ByteString
encodeMvt = VectorTile.untile
createMvt :: TypesConfig.Config -> Geospatial.GeoFeatureCollection Aeson.Value -> IO VectorTileTypes.VectorTile
createMvt TypesConfig.Config{..} (Geospatial.GeoFeatureCollection geoFeatureBbox geoFeatures) = do
let sphericalMercatorPts = SphericalMercator.convertFeatures _extents _quantizePixels (SphericalMercator.boundingBox _gtc) geoFeatures
clipBb = Clip.createBoundingBox _buffer _extents
clippedFeatures = Clip.clipFeatures clipBb sphericalMercatorPts
simplifiedFeatures = Simplify.simplifyFeatures _simplify clippedFeatures
TypesGeoJsonFeatures.MvtFeatures{..} = ST.runST $ getFeatures (Geospatial.GeoFeatureCollection geoFeatureBbox simplifiedFeatures)
layer = VectorTileTypes.Layer (fromIntegral _version) _name Sequence.empty mvtPoints mvtLines mvtPolygons (fromIntegral _extents)
pure . VectorTileTypes.VectorTile $ HashMapLazy.fromList [(_name, layer)]
getFeatures :: Geospatial.GeoFeatureCollection Aeson.Value -> ST.ST s TypesGeoJsonFeatures.MvtFeatures
getFeatures Geospatial.GeoFeatureCollection{..} = GeoJsonToMvt.geoJsonFeaturesToMvtFeatures TypesGeoJsonFeatures.emptyMvtFeatures _geofeatures
convertClipSimplify :: TypesConfig.Config -> Geospatial.GeospatialGeometry -> Geospatial.GeospatialGeometry
convertClipSimplify TypesConfig.Config{..} feature = simplifiedFeatures
where
sphericalMercatorPts = SphericalMercator.mapFeature _extents _quantizePixels (SphericalMercator.boundingBox _gtc) feature
clipBb = Clip.createBoundingBox _buffer _extents
clippedFeatures = Clip.mapFeature clipBb sphericalMercatorPts
simplifiedFeatures = Simplify.mapFeature _simplify clippedFeatures
|
2285036a429982af5393083786f218eddeab662b131a0e81807a2e717ffcb3dc | xvw/planet | lib.ml | open Bedrock
open Baremetal
let stamp task action message () =
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let open Result.Infix in
let open Shapes.Task in
Glue.Git.stage [ filename ]
>>= fun () ->
Glue.Git.commit ~desc:message (Format.asprintf "%s %s" action task.uuid)
;;
let ensure_task taskname f =
let filename =
Filename.concat
Glue.(Database.path Task.database)
(String.uppercase_ascii taskname ^ ".qube")
in
match
filename
|> File.to_stream (fun _ -> Paperwork.Qexp.from_stream)
|> Validation.from_result
|> Validation.bind Shapes.Task.from_qexp
with
| Error errs -> Prompter.prompt_errors errs
| Ok task -> f task
;;
let ansi_header task =
let open Shapes.Task in
Ansi.[ !"\n" ]
@ Ansi.(box task.name [ [ fg cyan; !(task.description) ] ])
@ Ansi.[ fg magenta; !(task.uuid ^ " ~> " ^ state_to_string task.state) ]
@ Ansi.[ reset; !"\n" ]
;;
let ansi_list title elements =
match elements with
| [] -> []
| _ ->
Ansi.
[ bold
; !(title ^ ": ")
; reset
; fg yellow
; !(String.concat ", " elements)
; reset
; !"\n"
]
;;
let ansi_project task =
ansi_list "Project" (Option.to_list Shapes.Task.(task.project))
;;
let ansi_sectors task = ansi_list "Sectors" Shapes.Task.(task.sectors)
let ansi_tags task = ansi_list "Tags" Shapes.Task.(task.tags)
let ansi_checklist task =
let open Shapes.Task in
match task.checklist with
| [] -> []
| _ ->
Ansi.(
box
"Checklist"
(List.mapi
(fun i (flag, label) ->
[ bold; fg magenta; !(Format.asprintf "%03d:" (succ i)); reset ]
@ (if flag
then
[ fg blue; !"["; fg green; bold; !"X"; reset; fg blue; !"]" ]
else [ fg blue; !"[ ]" ])
@ [ reset; !" "; !label; reset ])
task.checklist))
;;
let ansi_dates task =
let open Shapes.Task in
let open Ansi in
let dates =
[ "Creation date", Some task.date
; "Engagement date", task.engagement_date
; "Opening date", task.opening_date
; "Closing date", task.closing_date
]
|> List.bind (function
| label, Some date ->
[ [ bold
; !(label ^ ": ")
; reset
; !(Paperwork.Timetable.Day.to_string date)
]
]
| _ -> [])
in
[ !"\n" ] @ box "Dates" dates
;;
let display_patch new_state new_opening_date new_closing_date =
let open Ansi in
((match new_state with
| None -> []
| Some x ->
[ bold
; fg blue
; !"New state: "
; reset
; fg yellow
; !(Shapes.Task.state_to_string x)
; !"\n"
])
@ (match new_opening_date with
| None -> []
| Some x ->
[ bold
; fg blue
; !"New Opening Date: "
; reset
; fg yellow
; !(Paperwork.Timetable.Day.to_string x)
; !"\n"
])
@
match new_closing_date with
| None -> []
| Some x ->
[ bold
; fg blue
; !"New Closing Date: "
; reset
; fg yellow
; !(Paperwork.Timetable.Day.to_string x)
; !"\n"
])
|> Ansi.to_string
;;
let display task =
let fragment =
ansi_header task
@ ansi_project task
@ ansi_sectors task
@ ansi_tags task
@ ansi_checklist task
@ ansi_dates task
@ Ansi.[ !"\n" ]
in
fragment |> Ansi.to_string ~scoped:true |> print_endline
;;
let show taskname = ensure_task taskname display
let move taskname new_state =
match Shapes.Task.state_from_string new_state with
| Error errs -> Prompter.prompt_errors errs
| Ok state ->
ensure_task taskname (fun task ->
let open Shapes.Task in
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let new_task = { task with state } in
let qexp = to_qexp new_task in
let str = Paperwork.Qexp.to_string qexp in
let open Result.Infix in
let message =
Format.asprintf
"%s to %s"
(state_to_string task.state)
(state_to_string new_task.state)
in
match File.overwrite filename str >>= stamp new_task "move" message with
| Ok () -> display new_task
| Error err -> Prompter.prompt_error err)
;;
let may_update_state task =
let open Shapes.Task in
let open Result.Syntax in
let* day = Glue.Util.day () in
let (new_state, new_opening_date, new_closing_date), need_changement =
need_state_changement day task
in
if need_changement
then (
let valid =
Prompter.yes_no
~answer_style:Ansi.[ fg yellow ]
~title:"Apply patch"
(display_patch new_state new_opening_date new_closing_date)
in
if valid
then (
match new_state with
| None -> Ok task
| Some nstate ->
Ok
{ task with
state = nstate
; opening_date = new_opening_date
; closing_date = new_closing_date
})
else Ok task)
else Ok task
;;
let check taskname =
ensure_task taskname (fun task ->
let () = Ansi.(ansi_header task |> to_string |> print_endline) in
let open Shapes.Task in
let open Result.Infix in
Util.try_until Prompter.repeat_result (fun () ->
Prompter.choose_multiple
~answer_style:Ansi.[ fg yellow ]
~title:"Which task"
(fun (i, _, _) -> i)
(fun (_, flag, label) ->
let f = if flag then "x" else " " in
Format.asprintf "[%s] %s" f label)
(Array.of_list (List.mapi (fun i (f, g) -> i, f, g) task.checklist))
"Toggle task")
>|= (fun indexes ->
let new_check =
List.mapi
(fun i (f, l) -> if List.mem i indexes then not f, l else f, l)
task.checklist
in
{ task with checklist = new_check })
>>= may_update_state
>|= (fun task ->
let () = ansi_checklist task |> Ansi.to_string |> print_endline in
task)
>>= (fun task ->
let qexp = Shapes.Task.to_qexp task in
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let task_str = Paperwork.Qexp.to_string qexp in
File.overwrite filename task_str
>>= stamp task "check" "update checklist"
>|= fun () -> task)
|> function
| Error err -> Prompter.prompt_error err
| Ok new_task -> display new_task)
;;
let update_engagement date task =
let open Shapes.Task in
let message =
Format.asprintf
"old: %a \t new: %a"
(Option.pp Paperwork.Timetable.Day.pp)
task.engagement_date
(Option.pp Paperwork.Timetable.Day.pp)
date
in
let valid =
Prompter.yes_no
~answer_style:Ansi.[ fg yellow ]
~title:"Update engagement"
message
in
if valid
then (
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let new_task = { task with engagement_date = date } in
let qexp = to_qexp new_task in
let str = Paperwork.Qexp.to_string qexp in
let open Result.Infix in
match
File.overwrite filename str
>>= stamp new_task "update engagement of" message
with
| Ok () -> display new_task
| Error err -> Prompter.prompt_error err)
else ()
;;
let engage taskname date_str =
match Paperwork.Timetable.Day.from_string date_str with
| Error err -> Prompter.prompt_error err
| Ok date -> ensure_task taskname (update_engagement (Some date))
;;
let desengage taskname = ensure_task taskname (update_engagement None)
let create () =
Glue.Ui.ensure_sectors_projects (fun sectors (_ctx, projects) ->
let name = Glue.Ui.get_string "Title?" "Title of the task" in
let description = Glue.Ui.get_string "Description?" "Describe the task" in
let some_project =
Glue.Ui.may_project (List.map (fun (x, _, _) -> x) projects)
|> Option.map (fun x -> Shapes.Project.(x.name))
in
let sectors = Glue.Ui.select_sectors sectors in
let checklist =
Glue.Ui.get_string_opt "Tasks?" "Checklist"
|> Option.to_list
|> List.bind (String.tokenize ',')
in
let tags =
Glue.Ui.get_string_opt "Tags?" "Tags of the task"
|> Option.to_list
|> List.bind (String.tokenize ',')
in
let engagement = Glue.Ui.get_day_opt "Engagement?" "Potential due date" in
let open Result.Infix in
Glue.Task.init
some_project
sectors
name
description
checklist
tags
engagement
>|= (fun task ->
let () = display task in
task)
>>= (fun task ->
let qexp = Shapes.Task.to_qexp task in
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let task_str = Paperwork.Qexp.to_string qexp in
let valid =
Prompter.yes_no
~answer_style:Ansi.[ fg yellow ]
~title:"Confirm?"
task_str
in
if valid
then
File.create filename task_str
>>= stamp task "create" "create new task"
>|= fun () ->
Ansi.
[ bold
; fg green
; !filename
; reset
; !" has been dumped\n"
; fg yellow
; !task_str
; reset
; !"\n"
]
|> Ansi.to_string ~scoped:true
|> print_endline
else Ok ())
|> function
| Ok _ -> ()
| Error e -> Prompter.prompt_error e)
;;
| null | https://raw.githubusercontent.com/xvw/planet/c2a77ea66f61cc76df78b9c2ad06d114795f3053/src/bin/todo/lib.ml | ocaml | open Bedrock
open Baremetal
let stamp task action message () =
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let open Result.Infix in
let open Shapes.Task in
Glue.Git.stage [ filename ]
>>= fun () ->
Glue.Git.commit ~desc:message (Format.asprintf "%s %s" action task.uuid)
;;
let ensure_task taskname f =
let filename =
Filename.concat
Glue.(Database.path Task.database)
(String.uppercase_ascii taskname ^ ".qube")
in
match
filename
|> File.to_stream (fun _ -> Paperwork.Qexp.from_stream)
|> Validation.from_result
|> Validation.bind Shapes.Task.from_qexp
with
| Error errs -> Prompter.prompt_errors errs
| Ok task -> f task
;;
let ansi_header task =
let open Shapes.Task in
Ansi.[ !"\n" ]
@ Ansi.(box task.name [ [ fg cyan; !(task.description) ] ])
@ Ansi.[ fg magenta; !(task.uuid ^ " ~> " ^ state_to_string task.state) ]
@ Ansi.[ reset; !"\n" ]
;;
let ansi_list title elements =
match elements with
| [] -> []
| _ ->
Ansi.
[ bold
; !(title ^ ": ")
; reset
; fg yellow
; !(String.concat ", " elements)
; reset
; !"\n"
]
;;
let ansi_project task =
ansi_list "Project" (Option.to_list Shapes.Task.(task.project))
;;
let ansi_sectors task = ansi_list "Sectors" Shapes.Task.(task.sectors)
let ansi_tags task = ansi_list "Tags" Shapes.Task.(task.tags)
let ansi_checklist task =
let open Shapes.Task in
match task.checklist with
| [] -> []
| _ ->
Ansi.(
box
"Checklist"
(List.mapi
(fun i (flag, label) ->
[ bold; fg magenta; !(Format.asprintf "%03d:" (succ i)); reset ]
@ (if flag
then
[ fg blue; !"["; fg green; bold; !"X"; reset; fg blue; !"]" ]
else [ fg blue; !"[ ]" ])
@ [ reset; !" "; !label; reset ])
task.checklist))
;;
let ansi_dates task =
let open Shapes.Task in
let open Ansi in
let dates =
[ "Creation date", Some task.date
; "Engagement date", task.engagement_date
; "Opening date", task.opening_date
; "Closing date", task.closing_date
]
|> List.bind (function
| label, Some date ->
[ [ bold
; !(label ^ ": ")
; reset
; !(Paperwork.Timetable.Day.to_string date)
]
]
| _ -> [])
in
[ !"\n" ] @ box "Dates" dates
;;
let display_patch new_state new_opening_date new_closing_date =
let open Ansi in
((match new_state with
| None -> []
| Some x ->
[ bold
; fg blue
; !"New state: "
; reset
; fg yellow
; !(Shapes.Task.state_to_string x)
; !"\n"
])
@ (match new_opening_date with
| None -> []
| Some x ->
[ bold
; fg blue
; !"New Opening Date: "
; reset
; fg yellow
; !(Paperwork.Timetable.Day.to_string x)
; !"\n"
])
@
match new_closing_date with
| None -> []
| Some x ->
[ bold
; fg blue
; !"New Closing Date: "
; reset
; fg yellow
; !(Paperwork.Timetable.Day.to_string x)
; !"\n"
])
|> Ansi.to_string
;;
let display task =
let fragment =
ansi_header task
@ ansi_project task
@ ansi_sectors task
@ ansi_tags task
@ ansi_checklist task
@ ansi_dates task
@ Ansi.[ !"\n" ]
in
fragment |> Ansi.to_string ~scoped:true |> print_endline
;;
let show taskname = ensure_task taskname display
let move taskname new_state =
match Shapes.Task.state_from_string new_state with
| Error errs -> Prompter.prompt_errors errs
| Ok state ->
ensure_task taskname (fun task ->
let open Shapes.Task in
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let new_task = { task with state } in
let qexp = to_qexp new_task in
let str = Paperwork.Qexp.to_string qexp in
let open Result.Infix in
let message =
Format.asprintf
"%s to %s"
(state_to_string task.state)
(state_to_string new_task.state)
in
match File.overwrite filename str >>= stamp new_task "move" message with
| Ok () -> display new_task
| Error err -> Prompter.prompt_error err)
;;
let may_update_state task =
let open Shapes.Task in
let open Result.Syntax in
let* day = Glue.Util.day () in
let (new_state, new_opening_date, new_closing_date), need_changement =
need_state_changement day task
in
if need_changement
then (
let valid =
Prompter.yes_no
~answer_style:Ansi.[ fg yellow ]
~title:"Apply patch"
(display_patch new_state new_opening_date new_closing_date)
in
if valid
then (
match new_state with
| None -> Ok task
| Some nstate ->
Ok
{ task with
state = nstate
; opening_date = new_opening_date
; closing_date = new_closing_date
})
else Ok task)
else Ok task
;;
let check taskname =
ensure_task taskname (fun task ->
let () = Ansi.(ansi_header task |> to_string |> print_endline) in
let open Shapes.Task in
let open Result.Infix in
Util.try_until Prompter.repeat_result (fun () ->
Prompter.choose_multiple
~answer_style:Ansi.[ fg yellow ]
~title:"Which task"
(fun (i, _, _) -> i)
(fun (_, flag, label) ->
let f = if flag then "x" else " " in
Format.asprintf "[%s] %s" f label)
(Array.of_list (List.mapi (fun i (f, g) -> i, f, g) task.checklist))
"Toggle task")
>|= (fun indexes ->
let new_check =
List.mapi
(fun i (f, l) -> if List.mem i indexes then not f, l else f, l)
task.checklist
in
{ task with checklist = new_check })
>>= may_update_state
>|= (fun task ->
let () = ansi_checklist task |> Ansi.to_string |> print_endline in
task)
>>= (fun task ->
let qexp = Shapes.Task.to_qexp task in
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let task_str = Paperwork.Qexp.to_string qexp in
File.overwrite filename task_str
>>= stamp task "check" "update checklist"
>|= fun () -> task)
|> function
| Error err -> Prompter.prompt_error err
| Ok new_task -> display new_task)
;;
let update_engagement date task =
let open Shapes.Task in
let message =
Format.asprintf
"old: %a \t new: %a"
(Option.pp Paperwork.Timetable.Day.pp)
task.engagement_date
(Option.pp Paperwork.Timetable.Day.pp)
date
in
let valid =
Prompter.yes_no
~answer_style:Ansi.[ fg yellow ]
~title:"Update engagement"
message
in
if valid
then (
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let new_task = { task with engagement_date = date } in
let qexp = to_qexp new_task in
let str = Paperwork.Qexp.to_string qexp in
let open Result.Infix in
match
File.overwrite filename str
>>= stamp new_task "update engagement of" message
with
| Ok () -> display new_task
| Error err -> Prompter.prompt_error err)
else ()
;;
let engage taskname date_str =
match Paperwork.Timetable.Day.from_string date_str with
| Error err -> Prompter.prompt_error err
| Ok date -> ensure_task taskname (update_engagement (Some date))
;;
let desengage taskname = ensure_task taskname (update_engagement None)
let create () =
Glue.Ui.ensure_sectors_projects (fun sectors (_ctx, projects) ->
let name = Glue.Ui.get_string "Title?" "Title of the task" in
let description = Glue.Ui.get_string "Description?" "Describe the task" in
let some_project =
Glue.Ui.may_project (List.map (fun (x, _, _) -> x) projects)
|> Option.map (fun x -> Shapes.Project.(x.name))
in
let sectors = Glue.Ui.select_sectors sectors in
let checklist =
Glue.Ui.get_string_opt "Tasks?" "Checklist"
|> Option.to_list
|> List.bind (String.tokenize ',')
in
let tags =
Glue.Ui.get_string_opt "Tags?" "Tags of the task"
|> Option.to_list
|> List.bind (String.tokenize ',')
in
let engagement = Glue.Ui.get_day_opt "Engagement?" "Potential due date" in
let open Result.Infix in
Glue.Task.init
some_project
sectors
name
description
checklist
tags
engagement
>|= (fun task ->
let () = display task in
task)
>>= (fun task ->
let qexp = Shapes.Task.to_qexp task in
let filename =
Filename.concat
Glue.(Database.path Task.database)
(Shapes.Task.(task.uuid) ^ ".qube")
in
let task_str = Paperwork.Qexp.to_string qexp in
let valid =
Prompter.yes_no
~answer_style:Ansi.[ fg yellow ]
~title:"Confirm?"
task_str
in
if valid
then
File.create filename task_str
>>= stamp task "create" "create new task"
>|= fun () ->
Ansi.
[ bold
; fg green
; !filename
; reset
; !" has been dumped\n"
; fg yellow
; !task_str
; reset
; !"\n"
]
|> Ansi.to_string ~scoped:true
|> print_endline
else Ok ())
|> function
| Ok _ -> ()
| Error e -> Prompter.prompt_error e)
;;
| |
6e787ccaccf28774f7d1ce8de9e739eaa0e33023a2db1aeaedeaa933fa17aa8a | bytekid/mkbtt | yices.ml | Copyright 2008 , Christian Sternagel ,
* GNU Lesser General Public License
*
* This file is part of TTT2 .
*
* TTT2 is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* TTT2 is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2 . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of TTT2.
*
* TTT2 is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* TTT2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2. If not, see </>.
*)
(*** FUNCTIONS ****************************************************************)
module F = Formula;;
open Yinterface;;
open Monad;;
open Util;;
type ctx = {
con : Yinterface.yices_context;
vars : (int,Yinterface.yices_var_decl * string) Hashtbl.t;
ta_tbl : (F.a, Yinterface.yices_expr) Hashtbl.t;
tf_tbl : (F.p,Yinterface.yices_expr) Hashtbl.t;
};;
(* create and delete context *)
let init () =
yices_enable_type_checker (1);
{
con = yices_mk_context ();
vars = Hashtbl.create 512;
ta_tbl = Hashtbl.create 512;
tf_tbl = Hashtbl.create 512;
}
let finalize ctx =
yices_del_context ctx.con;
Hashtbl.clear ctx.vars;
;;
let decl t ctx x =
let ty = yices_mk_type ctx.con t in
if not (Hashtbl.mem ctx.vars x) then begin
let xdecl = yices_mk_var_decl ctx.con (string_of_int x) ty in
Hashtbl.add ctx.vars x (xdecl, t);
end;
;;
let args x y =
let args = Array.make 2 x in
args.(0) <- x;
args.(1) <- y;
args
;;
(* comparisons *)
let eq ctx a b = yices_mk_eq ctx.con a b;;
let ge ctx a b = yices_mk_ge ctx.con a b;;
let gt ctx a b = yices_mk_gt ctx.con a b;;
let ite ctx c t e = yices_mk_ite ctx.con c t e;;
(* boolean operators *)
let bot ctx = yices_mk_false ctx.con;;
let top ctx = yices_mk_true ctx.con;;
let conj ctx x y = yices_mk_and ctx.con (args x y) 2;;
let disj ctx x y = yices_mk_or ctx.con (args x y) 2;;
let neg ctx x = yices_mk_not ctx.con x;;
let impl c x y = disj c (neg c x) y;;
let iff c x y = conj c (impl c x y) (impl c y x);;
(* arithmetic *)
let add ctx x y = yices_mk_sum ctx.con (args x y) 2;;
let sub ctx x y = yices_mk_sub ctx.con (args x y) 2;;
let mul ctx x y = yices_mk_mul ctx.con (args x y) 2;;
let to_int n = (int_of_string (Number.to_string n))
let of_number ctx r = yices_mk_num ctx.con (to_int r)
;;
let var ?(neg=false) t ctx x =
if not (Hashtbl.mem ctx.vars x) then (
decl t ctx x;
if neg || t = "bool" then () else (
let z = yices_mk_num ctx.con 0 in
let v = yices_mk_var_from_decl ctx.con (fst (Hashtbl.find ctx.vars x)) in
yices_assert ctx.con (ge ctx v z);
);
);
yices_mk_var_from_decl ctx.con (fst (Hashtbl.find ctx.vars x))
;;
let var_p = var "bool";;
let var_a con x =
let t =
if (F.a_spec x).F.rat <> 1 || (F.a_spec x).F.real then "real" else
"int" in
var ~neg:(F.a_spec x).F.neg t con (F.a_id x)
;;
let value ctx m x =
let (decl, t) = Hashtbl.find ctx.vars x in
match t with
| "int" -> Number.of_int (helper_get_int_value m decl)
| "bool" -> Number.of_int (helper_get_value m decl)
| "real" -> (* rat is subsumed by real *)
Number.of_rat (helper_get_num_value m decl)
(helper_get_dnum_value m decl)
| _ -> failwith "unknown variable declaration"
;;
let cache tbl f k =
if not (Hashtbl.mem tbl k) then Hashtbl.add tbl k (f k);
Hashtbl.find tbl k
;;
let rec tf ctx = function
| F.Top -> top ctx
| F.Bot -> bot ctx
| F.P i -> var_p ctx i
| F.Not (x) -> neg ctx (tfc ctx x)
| F.And (x, y) -> conj ctx (tfc ctx x) (tfc ctx y)
| F.Or (x, y) -> disj ctx (tfc ctx x) (tfc ctx y)
(*| F.Implies (x, y) -> impl ctx (tfc ctx x) (tfc ctx y) *)
| F.Iff (x, y) -> iff ctx (tfc ctx x) (tfc ctx y)
| F.Eq (a, b) -> eq ctx (tc ctx a) (tc ctx b)
| F.Ge (a, b) -> ge ctx (tc ctx a) (tc ctx b)
| F.Gt (a, b) -> gt ctx (tc ctx a) (tc ctx b)
and t ctx = function
| F.App ( f , args ) - > app ctx f ( List.map ( t ctx ) args )
| F.A a -> var_a ctx a
| F.C r -> of_number ctx r
| F.Fresh a -> tc ctx a
| F.Add (a, b) -> add ctx (tc ctx a) (tc ctx b)
| F.Sub (a, b) -> sub ctx (tc ctx a) (tc ctx b)
| F.Mul (a, b) -> mul ctx (tc ctx a) (tc ctx b)
(*| F.SMul (i, b) -> mul ctx (of_int ctx i) (t ctx b) *)
| F.Ite ( x , C 1 , C 0 ) - >
| F.Ite (x,a,b) -> ite ctx (tfc ctx x) (tc ctx a) (tc ctx b)
| F.Max (a,b) -> tc ctx (F.Ite (F.Gt (a,b),a,b))
| F.Min (a,b) -> tc ctx (F.Ite (F.Gt (a,b),b,a))
and tfc ctx x = cache ctx.tf_tbl (tf ctx) x
and tc ctx x = cache ctx.ta_tbl (t ctx) x
;;
let solve f =
let t0 = Unix.gettimeofday() in
let t _ = (Unix.gettimeofday () -. t0) in
Format.eprintf " Entering Yices section@\n% ! " ;
let ctx = init () in
(*Format.eprintf "Before transforming %f@\n%!" (t ()); *)
ignore (yices_assert ctx.con (tfc ctx f));
(*Format.eprintf "Before solving %f@\n%!" (t ()); *)
let r = yices_check_aux ctx.con in
(*Format.eprintf "After solving %f@\n%!" (t ()); *)
let assign =
if (r = ~-1) then
None
else
let m = yices_get_model ctx.con in
(*ignore (yices_display_model m); *)
let assign = Hashtbl.fold
(fun k v -> Assignment.add_a (Formula.arith k) (value ctx m k))
ctx.vars
Assignment.empty
in Some assign
in
finalize ctx;
(*Format.eprintf "Leaving Yices sections %f@\n%!" (t ()); *)
return assign
;;
(* monadic caching *)
let cache tbl f k =
if Hashtbl.mem tbl k then return (Hashtbl.find tbl k)
else (f k >>= fun v -> Hashtbl.add tbl k v; return v)
;;
let mite mx ma mb = mx >>= fun x -> if x then ma else mb;;
let rec ea a ass = match a with
| F.A l -> return (try Assignment.find_a a ass with | Not_found -> Number.of_int 0)
| F.C r -> return r
| F.Fresh a -> eval_a a ass
| F.Add (a,b) -> lift2 Number.add (eval_a a ass) (eval_a b ass)
| F.Sub (a,b) -> lift2 Number.sub (eval_a a ass) (eval_a b ass)
| F.Mul (a,b) -> lift2 Number.mul (eval_a a ass) (eval_a b ass)
| F.Ite (x,a,b) -> mite (eval_p x ass) (eval_a a ass) (eval_a b ass)
| F.Min (a,b) -> lift2 Number.min (eval_a a ass) (eval_a b ass)
| F.Max (a,b) -> lift2 Number.max (eval_a a ass) (eval_a b ass)
and ep f ass = match f with
| F.Top -> return true
| F.Bot -> return false
| F.P x ->
let v = try Assignment.find_a (Formula.arith x) ass with | Not_found -> Number.zero in
if v = Number.zero then return false
else if v = Number.one then return true
else failwith "propositional variable not propositional"
| F.Not x -> lift not (eval_p x ass)
| F.And (x,y) -> lift2 (&&) (eval_p x ass) (eval_p y ass)
| F.Or (x,y) -> lift2 (||) (eval_p x ass) (eval_p y ass)
| F.Iff (x,y) -> lift2 (=) (eval_p x ass) (eval_p y ass)
| F.Eq (a,b) -> lift2 Number.eq (eval_a a ass) (eval_a b ass)
| F.Gt (a,b) -> lift2 Number.gt (eval_a a ass) (eval_a b ass)
| F.Ge (a,b) -> lift2 Number.ge (eval_a a ass) (eval_a b ass)
and eval_a a ass = get >>= fun s -> cache s.State.eay_tbl (flip ea ass) a
and eval_p p ass = get >>= fun s -> cache s.State.epy_tbl (flip ep ass) p
;;
| null | https://raw.githubusercontent.com/bytekid/mkbtt/c2f8e0615389b52eabd12655fe48237aa0fe83fd/src/logic/src/yices.ml | ocaml | ** FUNCTIONS ***************************************************************
create and delete context
comparisons
boolean operators
arithmetic
rat is subsumed by real
| F.Implies (x, y) -> impl ctx (tfc ctx x) (tfc ctx y)
| F.SMul (i, b) -> mul ctx (of_int ctx i) (t ctx b)
Format.eprintf "Before transforming %f@\n%!" (t ());
Format.eprintf "Before solving %f@\n%!" (t ());
Format.eprintf "After solving %f@\n%!" (t ());
ignore (yices_display_model m);
Format.eprintf "Leaving Yices sections %f@\n%!" (t ());
monadic caching | Copyright 2008 , Christian Sternagel ,
* GNU Lesser General Public License
*
* This file is part of TTT2 .
*
* TTT2 is free software : you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version .
*
* TTT2 is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2 . If not , see < / > .
* GNU Lesser General Public License
*
* This file is part of TTT2.
*
* TTT2 is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* TTT2 is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with TTT2. If not, see </>.
*)
module F = Formula;;
open Yinterface;;
open Monad;;
open Util;;
type ctx = {
con : Yinterface.yices_context;
vars : (int,Yinterface.yices_var_decl * string) Hashtbl.t;
ta_tbl : (F.a, Yinterface.yices_expr) Hashtbl.t;
tf_tbl : (F.p,Yinterface.yices_expr) Hashtbl.t;
};;
let init () =
yices_enable_type_checker (1);
{
con = yices_mk_context ();
vars = Hashtbl.create 512;
ta_tbl = Hashtbl.create 512;
tf_tbl = Hashtbl.create 512;
}
let finalize ctx =
yices_del_context ctx.con;
Hashtbl.clear ctx.vars;
;;
let decl t ctx x =
let ty = yices_mk_type ctx.con t in
if not (Hashtbl.mem ctx.vars x) then begin
let xdecl = yices_mk_var_decl ctx.con (string_of_int x) ty in
Hashtbl.add ctx.vars x (xdecl, t);
end;
;;
let args x y =
let args = Array.make 2 x in
args.(0) <- x;
args.(1) <- y;
args
;;
let eq ctx a b = yices_mk_eq ctx.con a b;;
let ge ctx a b = yices_mk_ge ctx.con a b;;
let gt ctx a b = yices_mk_gt ctx.con a b;;
let ite ctx c t e = yices_mk_ite ctx.con c t e;;
let bot ctx = yices_mk_false ctx.con;;
let top ctx = yices_mk_true ctx.con;;
let conj ctx x y = yices_mk_and ctx.con (args x y) 2;;
let disj ctx x y = yices_mk_or ctx.con (args x y) 2;;
let neg ctx x = yices_mk_not ctx.con x;;
let impl c x y = disj c (neg c x) y;;
let iff c x y = conj c (impl c x y) (impl c y x);;
let add ctx x y = yices_mk_sum ctx.con (args x y) 2;;
let sub ctx x y = yices_mk_sub ctx.con (args x y) 2;;
let mul ctx x y = yices_mk_mul ctx.con (args x y) 2;;
let to_int n = (int_of_string (Number.to_string n))
let of_number ctx r = yices_mk_num ctx.con (to_int r)
;;
let var ?(neg=false) t ctx x =
if not (Hashtbl.mem ctx.vars x) then (
decl t ctx x;
if neg || t = "bool" then () else (
let z = yices_mk_num ctx.con 0 in
let v = yices_mk_var_from_decl ctx.con (fst (Hashtbl.find ctx.vars x)) in
yices_assert ctx.con (ge ctx v z);
);
);
yices_mk_var_from_decl ctx.con (fst (Hashtbl.find ctx.vars x))
;;
let var_p = var "bool";;
let var_a con x =
let t =
if (F.a_spec x).F.rat <> 1 || (F.a_spec x).F.real then "real" else
"int" in
var ~neg:(F.a_spec x).F.neg t con (F.a_id x)
;;
let value ctx m x =
let (decl, t) = Hashtbl.find ctx.vars x in
match t with
| "int" -> Number.of_int (helper_get_int_value m decl)
| "bool" -> Number.of_int (helper_get_value m decl)
Number.of_rat (helper_get_num_value m decl)
(helper_get_dnum_value m decl)
| _ -> failwith "unknown variable declaration"
;;
let cache tbl f k =
if not (Hashtbl.mem tbl k) then Hashtbl.add tbl k (f k);
Hashtbl.find tbl k
;;
let rec tf ctx = function
| F.Top -> top ctx
| F.Bot -> bot ctx
| F.P i -> var_p ctx i
| F.Not (x) -> neg ctx (tfc ctx x)
| F.And (x, y) -> conj ctx (tfc ctx x) (tfc ctx y)
| F.Or (x, y) -> disj ctx (tfc ctx x) (tfc ctx y)
| F.Iff (x, y) -> iff ctx (tfc ctx x) (tfc ctx y)
| F.Eq (a, b) -> eq ctx (tc ctx a) (tc ctx b)
| F.Ge (a, b) -> ge ctx (tc ctx a) (tc ctx b)
| F.Gt (a, b) -> gt ctx (tc ctx a) (tc ctx b)
and t ctx = function
| F.App ( f , args ) - > app ctx f ( List.map ( t ctx ) args )
| F.A a -> var_a ctx a
| F.C r -> of_number ctx r
| F.Fresh a -> tc ctx a
| F.Add (a, b) -> add ctx (tc ctx a) (tc ctx b)
| F.Sub (a, b) -> sub ctx (tc ctx a) (tc ctx b)
| F.Mul (a, b) -> mul ctx (tc ctx a) (tc ctx b)
| F.Ite ( x , C 1 , C 0 ) - >
| F.Ite (x,a,b) -> ite ctx (tfc ctx x) (tc ctx a) (tc ctx b)
| F.Max (a,b) -> tc ctx (F.Ite (F.Gt (a,b),a,b))
| F.Min (a,b) -> tc ctx (F.Ite (F.Gt (a,b),b,a))
and tfc ctx x = cache ctx.tf_tbl (tf ctx) x
and tc ctx x = cache ctx.ta_tbl (t ctx) x
;;
let solve f =
let t0 = Unix.gettimeofday() in
let t _ = (Unix.gettimeofday () -. t0) in
Format.eprintf " Entering Yices section@\n% ! " ;
let ctx = init () in
ignore (yices_assert ctx.con (tfc ctx f));
let r = yices_check_aux ctx.con in
let assign =
if (r = ~-1) then
None
else
let m = yices_get_model ctx.con in
let assign = Hashtbl.fold
(fun k v -> Assignment.add_a (Formula.arith k) (value ctx m k))
ctx.vars
Assignment.empty
in Some assign
in
finalize ctx;
return assign
;;
let cache tbl f k =
if Hashtbl.mem tbl k then return (Hashtbl.find tbl k)
else (f k >>= fun v -> Hashtbl.add tbl k v; return v)
;;
let mite mx ma mb = mx >>= fun x -> if x then ma else mb;;
let rec ea a ass = match a with
| F.A l -> return (try Assignment.find_a a ass with | Not_found -> Number.of_int 0)
| F.C r -> return r
| F.Fresh a -> eval_a a ass
| F.Add (a,b) -> lift2 Number.add (eval_a a ass) (eval_a b ass)
| F.Sub (a,b) -> lift2 Number.sub (eval_a a ass) (eval_a b ass)
| F.Mul (a,b) -> lift2 Number.mul (eval_a a ass) (eval_a b ass)
| F.Ite (x,a,b) -> mite (eval_p x ass) (eval_a a ass) (eval_a b ass)
| F.Min (a,b) -> lift2 Number.min (eval_a a ass) (eval_a b ass)
| F.Max (a,b) -> lift2 Number.max (eval_a a ass) (eval_a b ass)
and ep f ass = match f with
| F.Top -> return true
| F.Bot -> return false
| F.P x ->
let v = try Assignment.find_a (Formula.arith x) ass with | Not_found -> Number.zero in
if v = Number.zero then return false
else if v = Number.one then return true
else failwith "propositional variable not propositional"
| F.Not x -> lift not (eval_p x ass)
| F.And (x,y) -> lift2 (&&) (eval_p x ass) (eval_p y ass)
| F.Or (x,y) -> lift2 (||) (eval_p x ass) (eval_p y ass)
| F.Iff (x,y) -> lift2 (=) (eval_p x ass) (eval_p y ass)
| F.Eq (a,b) -> lift2 Number.eq (eval_a a ass) (eval_a b ass)
| F.Gt (a,b) -> lift2 Number.gt (eval_a a ass) (eval_a b ass)
| F.Ge (a,b) -> lift2 Number.ge (eval_a a ass) (eval_a b ass)
and eval_a a ass = get >>= fun s -> cache s.State.eay_tbl (flip ea ass) a
and eval_p p ass = get >>= fun s -> cache s.State.epy_tbl (flip ep ass) p
;;
|
13b1b099623951a76503408f40052f61063ef53c4844de4de1ebdf35dac5d89b | spawnfest/eep49ers | tls_bloom_filter.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2007 - 2019 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
%%----------------------------------------------------------------------
%% Purpose: Bloom Filter implementation for anti-replay protection
in TLS 1.3 ( stateless tickets )
%%----------------------------------------------------------------------
-module(tls_bloom_filter).
-export([add_elem/2,
contains/2,
new/2,
rotate/1]).
%%--------------------------------------------------------------------
%% API ---------------------------------------------------------------
%%--------------------------------------------------------------------
%% Create new Bloom Filter with k hashes, m bits in the filter
new(K, M) ->
Size = round(math:ceil(M / 8)),
BitField = binary:copy(<<0>>, Size),
#{k => K,
m => M,
current => BitField,
old => BitField
}.
Add new element to Bloom Filter
add_elem(#{k := K,
m := M,
current := BitField0} = BloomFilter,
Elem) ->
Hash = hash(Elem, K, M),
BitField = set_bits(BitField0, Hash),
BloomFilter#{current => BitField}.
%% Check if Bloom Filter contains element.
contains(#{k := K,
m := M,
current := BFCurrent,
old := BFOld},
Elem) ->
Hash = hash(Elem, K, M),
lists:all(fun (Pos) -> bit_is_set(BFCurrent, Pos) end, Hash) orelse
lists:all(fun (Pos) -> bit_is_set(BFOld, Pos) end, Hash).
rotate(#{m := M,
current := BFCurrent} = BloomFilter) ->
Size = round(math:ceil(M / 8)),
BFNew = binary:copy(<<0>>, Size),
BloomFilter#{current := BFNew,
old := BFCurrent}.
%%--------------------------------------------------------------------
Internal functions ------------------------------------------------
%%--------------------------------------------------------------------
bit_is_set(<<1:1,_/bitstring>>, 0) ->
true;
bit_is_set(BitField, N) ->
case BitField of
<<_:N,1:1,_/bitstring>> ->
true;
_ ->
false
end.
set_bits(BitField, []) ->
BitField;
set_bits(BitField, [H|T]) ->
set_bits(set_bit(BitField, H), T).
set_bit(BitField, 0) ->
<<_:1,Rest/bitstring>> = BitField,
<<1:1,Rest/bitstring>>;
set_bit(BitField, B) ->
<<Front:B,_:1,Rest/bitstring>> = BitField,
<<Front:B,1:1,Rest/bitstring>>.
hash(Elem, K, M) ->
hash(Elem, K, M, []).
%%
hash(_, 0, _, Acc) ->
Acc;
hash(Elem, K, M, Acc) ->
H = (erlang:phash2({Elem, 0}, M) + (K - 1) * erlang:phash2({Elem, 1}, M)) rem M,
hash(Elem, K - 1, M, [H|Acc]).
| null | https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/lib/ssl/src/tls_bloom_filter.erl | erlang |
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
----------------------------------------------------------------------
Purpose: Bloom Filter implementation for anti-replay protection
----------------------------------------------------------------------
--------------------------------------------------------------------
API ---------------------------------------------------------------
--------------------------------------------------------------------
Create new Bloom Filter with k hashes, m bits in the filter
Check if Bloom Filter contains element.
--------------------------------------------------------------------
--------------------------------------------------------------------
| Copyright Ericsson AB 2007 - 2019 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
in TLS 1.3 ( stateless tickets )
-module(tls_bloom_filter).
-export([add_elem/2,
contains/2,
new/2,
rotate/1]).
new(K, M) ->
Size = round(math:ceil(M / 8)),
BitField = binary:copy(<<0>>, Size),
#{k => K,
m => M,
current => BitField,
old => BitField
}.
Add new element to Bloom Filter
add_elem(#{k := K,
m := M,
current := BitField0} = BloomFilter,
Elem) ->
Hash = hash(Elem, K, M),
BitField = set_bits(BitField0, Hash),
BloomFilter#{current => BitField}.
contains(#{k := K,
m := M,
current := BFCurrent,
old := BFOld},
Elem) ->
Hash = hash(Elem, K, M),
lists:all(fun (Pos) -> bit_is_set(BFCurrent, Pos) end, Hash) orelse
lists:all(fun (Pos) -> bit_is_set(BFOld, Pos) end, Hash).
rotate(#{m := M,
current := BFCurrent} = BloomFilter) ->
Size = round(math:ceil(M / 8)),
BFNew = binary:copy(<<0>>, Size),
BloomFilter#{current := BFNew,
old := BFCurrent}.
Internal functions ------------------------------------------------
bit_is_set(<<1:1,_/bitstring>>, 0) ->
true;
bit_is_set(BitField, N) ->
case BitField of
<<_:N,1:1,_/bitstring>> ->
true;
_ ->
false
end.
set_bits(BitField, []) ->
BitField;
set_bits(BitField, [H|T]) ->
set_bits(set_bit(BitField, H), T).
set_bit(BitField, 0) ->
<<_:1,Rest/bitstring>> = BitField,
<<1:1,Rest/bitstring>>;
set_bit(BitField, B) ->
<<Front:B,_:1,Rest/bitstring>> = BitField,
<<Front:B,1:1,Rest/bitstring>>.
hash(Elem, K, M) ->
hash(Elem, K, M, []).
hash(_, 0, _, Acc) ->
Acc;
hash(Elem, K, M, Acc) ->
H = (erlang:phash2({Elem, 0}, M) + (K - 1) * erlang:phash2({Elem, 1}, M)) rem M,
hash(Elem, K - 1, M, [H|Acc]).
|
e7540174c09611f909a77205b46f54c810312d0a3c7bd6f1e01b392221c06792 | perf101/rage | html_handler.ml | open! Core.Std
class t = fun ~args ->
object (self)
inherit Handler.t ~args
method private include_javascript =
printf "<script src='rage.js'></script>"
method private write_404 =
printf "Status: 404 Not Found\n";
printf "Content-Type: text/html\n\n";
printf "<h1>404 --- this is not the page you are looking for ...</h1>"
method private javascript_redirect url =
printf "Content-type: text/html\n\n";
printf "<html><head>\n";
printf "<script language='javascript' type='text/javascript'>\n";
let url_fqdn = Str.replace_first (Str.regexp "perf/") "perf.uk.xensource.com" url in
printf "window.location.replace(decodeURIComponent('%s'));\n" url_fqdn;
printf "</script>\n</head><body></body></html>\n"
method private write_header = self#write_html_header
method private write_footer = Utils.cat (base_path ^ "footer.html")
end
| null | https://raw.githubusercontent.com/perf101/rage/e8630659b2754b6621df7c49f3663fa7c4fac5eb/src/html_handler.ml | ocaml | open! Core.Std
class t = fun ~args ->
object (self)
inherit Handler.t ~args
method private include_javascript =
printf "<script src='rage.js'></script>"
method private write_404 =
printf "Status: 404 Not Found\n";
printf "Content-Type: text/html\n\n";
printf "<h1>404 --- this is not the page you are looking for ...</h1>"
method private javascript_redirect url =
printf "Content-type: text/html\n\n";
printf "<html><head>\n";
printf "<script language='javascript' type='text/javascript'>\n";
let url_fqdn = Str.replace_first (Str.regexp "perf/") "perf.uk.xensource.com" url in
printf "window.location.replace(decodeURIComponent('%s'));\n" url_fqdn;
printf "</script>\n</head><body></body></html>\n"
method private write_header = self#write_html_header
method private write_footer = Utils.cat (base_path ^ "footer.html")
end
| |
42b99c6d98c7def3fa13a73a6a835e8bd72ffc6ae127ed3c5fcb0824782c8837 | clojure-interop/google-cloud-clients | MetricsClient.clj | (ns com.google.cloud.logging.v2.MetricsClient
"Service Description: Service for configuring logs-based metrics.
This class provides the ability to make remote calls to the backing service through method
calls that map to API methods. Sample code to get started:
try (MetricsClient metricsClient = MetricsClient.create()) {
MetricName metricName = ProjectMetricName.of(\"[PROJECT]\", \"[METRIC]\");
LogMetric response = metricsClient.getLogMetric(metricName);
}
Note: close() needs to be called on the metricsClient object to clean up resources such as
threads. In the example above, try-with-resources is used, which automatically calls close().
The surface of this class includes several types of Java methods for each of the API's
methods:
A \"flattened\" method. With this type of method, the fields of the request type have been
converted into function parameters. It may be the case that not all fields are available as
parameters, and not every API method will have a flattened method entry point.
A \"request object\" method. This type of method only takes one parameter, a request object,
which must be constructed before the call. Not every API method will have a request object
method.
A \"callable\" method. This type of method takes no parameters and returns an immutable API
callable object, which can be used to initiate calls to the service.
See the individual methods for example code.
Many parameters require resource names to be formatted in a particular way. To assist with
these names, this class includes a format method for each type of name, and additionally a parse
method to extract the individual identifiers contained within names that are returned.
This class can be customized by passing in a custom instance of MetricsSettings to create().
For example:
To customize credentials:
MetricsSettings metricsSettings =
MetricsSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
.build();
MetricsClient metricsClient =
MetricsClient.create(metricsSettings);
To customize the endpoint:
MetricsSettings metricsSettings =
MetricsSettings.newBuilder().setEndpoint(myEndpoint).build();
MetricsClient metricsClient =
MetricsClient.create(metricsSettings);"
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.logging.v2 MetricsClient]))
(defn *create
"Constructs an instance of MetricsClient, using the given settings. The channels are created
based on the settings passed in, or defaults for any settings that are not set.
settings - `com.google.cloud.logging.v2.MetricsSettings`
returns: `com.google.cloud.logging.v2.MetricsClient`
throws: java.io.IOException"
(^com.google.cloud.logging.v2.MetricsClient [^com.google.cloud.logging.v2.MetricsSettings settings]
(MetricsClient/create settings))
(^com.google.cloud.logging.v2.MetricsClient []
(MetricsClient/create )))
(defn update-log-metric
"Creates or updates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
MetricName metricName = ProjectMetricName.of(\"[PROJECT]\", \"[METRIC]\");
LogMetric metric = LogMetric.newBuilder().build();
LogMetric response = metricsClient.updateLogMetric(metricName, metric);
}
metric-name - The resource name of the metric to update: \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" The updated metric must be provided in the request and it's `name` field must be the same as `[METRIC_ID]` If the metric does not exist in `[PROJECT_ID]`, then a new metric is created. - `com.google.logging.v2.MetricName`
metric - The updated metric. - `com.google.logging.v2.LogMetric`
returns: `com.google.logging.v2.LogMetric`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.MetricName metric-name ^com.google.logging.v2.LogMetric metric]
(-> this (.updateLogMetric metric-name metric)))
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.UpdateLogMetricRequest request]
(-> this (.updateLogMetric request))))
(defn get-settings
"returns: `com.google.cloud.logging.v2.MetricsSettings`"
(^com.google.cloud.logging.v2.MetricsSettings [^MetricsClient this]
(-> this (.getSettings))))
(defn get-log-metric-callable
"Gets a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
MetricName metricName = ProjectMetricName.of(\"[PROJECT]\", \"[METRIC]\");
GetLogMetricRequest request = GetLogMetricRequest.newBuilder()
.setMetricName(metricName.toString())
.build();
ApiFuture<LogMetric> future = metricsClient.getLogMetricCallable().futureCall(request);
// Do something
LogMetric response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.GetLogMetricRequest,com.google.logging.v2.LogMetric>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.getLogMetricCallable))))
(defn create-log-metric
"Creates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
ParentName parent = ProjectName.of(\"[PROJECT]\");
LogMetric metric = LogMetric.newBuilder().build();
LogMetric response = metricsClient.createLogMetric(parent, metric);
}
parent - The resource name of the project in which to create the metric: \"projects/[PROJECT_ID]\" The new metric must be provided in the request. - `com.google.logging.v2.ParentName`
metric - The new logs-based metric, which must not have an identifier that already exists. - `com.google.logging.v2.LogMetric`
returns: `com.google.logging.v2.LogMetric`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.ParentName parent ^com.google.logging.v2.LogMetric metric]
(-> this (.createLogMetric parent metric)))
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.CreateLogMetricRequest request]
(-> this (.createLogMetric request))))
(defn get-stub
"returns: `(value="A restructuring of stub classes is planned, so this may break in the future") com.google.cloud.logging.v2.stub.MetricsServiceV2Stub`"
([^MetricsClient this]
(-> this (.getStub))))
(defn create-log-metric-callable
"Creates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
ParentName parent = ProjectName.of(\"[PROJECT]\");
LogMetric metric = LogMetric.newBuilder().build();
CreateLogMetricRequest request = CreateLogMetricRequest.newBuilder()
.setParent(parent.toString())
.setMetric(metric)
.build();
ApiFuture<LogMetric> future = metricsClient.createLogMetricCallable().futureCall(request);
// Do something
LogMetric response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.CreateLogMetricRequest,com.google.logging.v2.LogMetric>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.createLogMetricCallable))))
(defn shutdown?
"returns: `boolean`"
(^Boolean [^MetricsClient this]
(-> this (.isShutdown))))
(defn await-termination
"duration - `long`
unit - `java.util.concurrent.TimeUnit`
returns: `boolean`
throws: java.lang.InterruptedException"
(^Boolean [^MetricsClient this ^Long duration ^java.util.concurrent.TimeUnit unit]
(-> this (.awaitTermination duration unit))))
(defn shutdown
""
([^MetricsClient this]
(-> this (.shutdown))))
(defn list-log-metrics-callable
"Lists logs-based metrics.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
ParentName parent = ProjectName.of(\"[PROJECT]\");
ListLogMetricsRequest request = ListLogMetricsRequest.newBuilder()
.setParent(parent.toString())
.build();
while (true) {
ListLogMetricsResponse response = metricsClient.listLogMetricsCallable().call(request);
for (LogMetric element : response.getMetricsList()) {
// doThingsWith(element);
}
String nextPageToken = response.getNextPageToken();
if (!Strings.isNullOrEmpty(nextPageToken)) {
request = request.toBuilder().setPageToken(nextPageToken).build();
} else {
break;
}
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.ListLogMetricsRequest,com.google.logging.v2.ListLogMetricsResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.listLogMetricsCallable))))
(defn close
""
([^MetricsClient this]
(-> this (.close))))
(defn terminated?
"returns: `boolean`"
(^Boolean [^MetricsClient this]
(-> this (.isTerminated))))
(defn delete-log-metric
"Deletes a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
MetricName metricName = ProjectMetricName.of(\"[PROJECT]\", \"[METRIC]\");
metricsClient.deleteLogMetric(metricName);
}
metric-name - The resource name of the metric to delete: \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" - `com.google.logging.v2.MetricName`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^MetricsClient this ^com.google.logging.v2.MetricName metric-name]
(-> this (.deleteLogMetric metric-name))))
(defn update-log-metric-callable
"Creates or updates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
MetricName metricName = ProjectMetricName.of(\"[PROJECT]\", \"[METRIC]\");
LogMetric metric = LogMetric.newBuilder().build();
UpdateLogMetricRequest request = UpdateLogMetricRequest.newBuilder()
.setMetricName(metricName.toString())
.setMetric(metric)
.build();
ApiFuture<LogMetric> future = metricsClient.updateLogMetricCallable().futureCall(request);
// Do something
LogMetric response = future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.UpdateLogMetricRequest,com.google.logging.v2.LogMetric>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.updateLogMetricCallable))))
(defn delete-log-metric-callable
"Deletes a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
MetricName metricName = ProjectMetricName.of(\"[PROJECT]\", \"[METRIC]\");
DeleteLogMetricRequest request = DeleteLogMetricRequest.newBuilder()
.setMetricName(metricName.toString())
.build();
ApiFuture<Void> future = metricsClient.deleteLogMetricCallable().futureCall(request);
// Do something
future.get();
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.DeleteLogMetricRequest,com.google.protobuf.Empty>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.deleteLogMetricCallable))))
(defn list-log-metrics
"Lists logs-based metrics.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
ParentName parent = ProjectName.of(\"[PROJECT]\");
for (LogMetric element : metricsClient.listLogMetrics(parent).iterateAll()) {
// doThingsWith(element);
}
}
parent - Required. The name of the project containing the metrics: \"projects/[PROJECT_ID]\" - `com.google.logging.v2.ParentName`
returns: `com.google.cloud.logging.v2.MetricsClient$ListLogMetricsPagedResponse`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.cloud.logging.v2.MetricsClient$ListLogMetricsPagedResponse [^MetricsClient this ^com.google.logging.v2.ParentName parent]
(-> this (.listLogMetrics parent))))
(defn shutdown-now
""
([^MetricsClient this]
(-> this (.shutdownNow))))
(defn get-log-metric
"Gets a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
MetricName metricName = ProjectMetricName.of(\"[PROJECT]\", \"[METRIC]\");
LogMetric response = metricsClient.getLogMetric(metricName);
}
metric-name - The resource name of the desired metric: \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" - `com.google.logging.v2.MetricName`
returns: `com.google.logging.v2.LogMetric`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.MetricName metric-name]
(-> this (.getLogMetric metric-name))))
(defn list-log-metrics-paged-callable
"Lists logs-based metrics.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
ParentName parent = ProjectName.of(\"[PROJECT]\");
ListLogMetricsRequest request = ListLogMetricsRequest.newBuilder()
.setParent(parent.toString())
.build();
ApiFuture<ListLogMetricsPagedResponse> future = metricsClient.listLogMetricsPagedCallable().futureCall(request);
// Do something
for (LogMetric element : future.get().iterateAll()) {
// doThingsWith(element);
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.ListLogMetricsRequest,com.google.cloud.logging.v2.MetricsClient$ListLogMetricsPagedResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.listLogMetricsPagedCallable))))
| null | https://raw.githubusercontent.com/clojure-interop/google-cloud-clients/80852d0496057c22f9cdc86d6f9ffc0fa3cd7904/com.google.cloud.logging/src/com/google/cloud/logging/v2/MetricsClient.clj | clojure |
"
| (ns com.google.cloud.logging.v2.MetricsClient
"Service Description: Service for configuring logs-based metrics.
This class provides the ability to make remote calls to the backing service through method
calls that map to API methods. Sample code to get started:
try (MetricsClient metricsClient = MetricsClient.create()) {
}
Note: close() needs to be called on the metricsClient object to clean up resources such as
threads. In the example above, try-with-resources is used, which automatically calls close().
The surface of this class includes several types of Java methods for each of the API's
methods:
A \"flattened\" method. With this type of method, the fields of the request type have been
converted into function parameters. It may be the case that not all fields are available as
parameters, and not every API method will have a flattened method entry point.
A \"request object\" method. This type of method only takes one parameter, a request object,
which must be constructed before the call. Not every API method will have a request object
method.
A \"callable\" method. This type of method takes no parameters and returns an immutable API
callable object, which can be used to initiate calls to the service.
See the individual methods for example code.
Many parameters require resource names to be formatted in a particular way. To assist with
these names, this class includes a format method for each type of name, and additionally a parse
method to extract the individual identifiers contained within names that are returned.
This class can be customized by passing in a custom instance of MetricsSettings to create().
For example:
To customize credentials:
MetricsSettings metricsSettings =
MetricsSettings.newBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(myCredentials))
MetricsClient metricsClient =
To customize the endpoint:
MetricsSettings metricsSettings =
MetricsClient metricsClient =
(:refer-clojure :only [require comment defn ->])
(:import [com.google.cloud.logging.v2 MetricsClient]))
(defn *create
"Constructs an instance of MetricsClient, using the given settings. The channels are created
based on the settings passed in, or defaults for any settings that are not set.
settings - `com.google.cloud.logging.v2.MetricsSettings`
returns: `com.google.cloud.logging.v2.MetricsClient`
throws: java.io.IOException"
(^com.google.cloud.logging.v2.MetricsClient [^com.google.cloud.logging.v2.MetricsSettings settings]
(MetricsClient/create settings))
(^com.google.cloud.logging.v2.MetricsClient []
(MetricsClient/create )))
(defn update-log-metric
"Creates or updates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
}
metric-name - The resource name of the metric to update: \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" The updated metric must be provided in the request and it's `name` field must be the same as `[METRIC_ID]` If the metric does not exist in `[PROJECT_ID]`, then a new metric is created. - `com.google.logging.v2.MetricName`
metric - The updated metric. - `com.google.logging.v2.LogMetric`
returns: `com.google.logging.v2.LogMetric`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.MetricName metric-name ^com.google.logging.v2.LogMetric metric]
(-> this (.updateLogMetric metric-name metric)))
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.UpdateLogMetricRequest request]
(-> this (.updateLogMetric request))))
(defn get-settings
"returns: `com.google.cloud.logging.v2.MetricsSettings`"
(^com.google.cloud.logging.v2.MetricsSettings [^MetricsClient this]
(-> this (.getSettings))))
(defn get-log-metric-callable
"Gets a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
GetLogMetricRequest request = GetLogMetricRequest.newBuilder()
.setMetricName(metricName.toString())
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.GetLogMetricRequest,com.google.logging.v2.LogMetric>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.getLogMetricCallable))))
(defn create-log-metric
"Creates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
}
parent - The resource name of the project in which to create the metric: \"projects/[PROJECT_ID]\" The new metric must be provided in the request. - `com.google.logging.v2.ParentName`
metric - The new logs-based metric, which must not have an identifier that already exists. - `com.google.logging.v2.LogMetric`
returns: `com.google.logging.v2.LogMetric`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.ParentName parent ^com.google.logging.v2.LogMetric metric]
(-> this (.createLogMetric parent metric)))
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.CreateLogMetricRequest request]
(-> this (.createLogMetric request))))
(defn get-stub
"returns: `(value="A restructuring of stub classes is planned, so this may break in the future") com.google.cloud.logging.v2.stub.MetricsServiceV2Stub`"
([^MetricsClient this]
(-> this (.getStub))))
(defn create-log-metric-callable
"Creates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
CreateLogMetricRequest request = CreateLogMetricRequest.newBuilder()
.setParent(parent.toString())
.setMetric(metric)
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.CreateLogMetricRequest,com.google.logging.v2.LogMetric>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.createLogMetricCallable))))
(defn shutdown?
"returns: `boolean`"
(^Boolean [^MetricsClient this]
(-> this (.isShutdown))))
(defn await-termination
"duration - `long`
unit - `java.util.concurrent.TimeUnit`
returns: `boolean`
throws: java.lang.InterruptedException"
(^Boolean [^MetricsClient this ^Long duration ^java.util.concurrent.TimeUnit unit]
(-> this (.awaitTermination duration unit))))
(defn shutdown
""
([^MetricsClient this]
(-> this (.shutdown))))
(defn list-log-metrics-callable
"Lists logs-based metrics.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
ListLogMetricsRequest request = ListLogMetricsRequest.newBuilder()
.setParent(parent.toString())
while (true) {
for (LogMetric element : response.getMetricsList()) {
}
if (!Strings.isNullOrEmpty(nextPageToken)) {
} else {
}
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.ListLogMetricsRequest,com.google.logging.v2.ListLogMetricsResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.listLogMetricsCallable))))
(defn close
""
([^MetricsClient this]
(-> this (.close))))
(defn terminated?
"returns: `boolean`"
(^Boolean [^MetricsClient this]
(-> this (.isTerminated))))
(defn delete-log-metric
"Deletes a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
}
metric-name - The resource name of the metric to delete: \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" - `com.google.logging.v2.MetricName`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
([^MetricsClient this ^com.google.logging.v2.MetricName metric-name]
(-> this (.deleteLogMetric metric-name))))
(defn update-log-metric-callable
"Creates or updates a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
UpdateLogMetricRequest request = UpdateLogMetricRequest.newBuilder()
.setMetricName(metricName.toString())
.setMetric(metric)
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.UpdateLogMetricRequest,com.google.logging.v2.LogMetric>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.updateLogMetricCallable))))
(defn delete-log-metric-callable
"Deletes a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
DeleteLogMetricRequest request = DeleteLogMetricRequest.newBuilder()
.setMetricName(metricName.toString())
// Do something
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.DeleteLogMetricRequest,com.google.protobuf.Empty>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.deleteLogMetricCallable))))
(defn list-log-metrics
"Lists logs-based metrics.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
for (LogMetric element : metricsClient.listLogMetrics(parent).iterateAll()) {
}
}
parent - Required. The name of the project containing the metrics: \"projects/[PROJECT_ID]\" - `com.google.logging.v2.ParentName`
returns: `com.google.cloud.logging.v2.MetricsClient$ListLogMetricsPagedResponse`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.cloud.logging.v2.MetricsClient$ListLogMetricsPagedResponse [^MetricsClient this ^com.google.logging.v2.ParentName parent]
(-> this (.listLogMetrics parent))))
(defn shutdown-now
""
([^MetricsClient this]
(-> this (.shutdownNow))))
(defn get-log-metric
"Gets a logs-based metric.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
}
metric-name - The resource name of the desired metric: \"projects/[PROJECT_ID]/metrics/[METRIC_ID]\" - `com.google.logging.v2.MetricName`
returns: `com.google.logging.v2.LogMetric`
throws: com.google.api.gax.rpc.ApiException - if the remote call fails"
(^com.google.logging.v2.LogMetric [^MetricsClient this ^com.google.logging.v2.MetricName metric-name]
(-> this (.getLogMetric metric-name))))
(defn list-log-metrics-paged-callable
"Lists logs-based metrics.
Sample code:
try (MetricsClient metricsClient = MetricsClient.create()) {
ListLogMetricsRequest request = ListLogMetricsRequest.newBuilder()
.setParent(parent.toString())
// Do something
for (LogMetric element : future.get().iterateAll()) {
}
}
returns: `com.google.api.gax.rpc.UnaryCallable<com.google.logging.v2.ListLogMetricsRequest,com.google.cloud.logging.v2.MetricsClient$ListLogMetricsPagedResponse>`"
(^com.google.api.gax.rpc.UnaryCallable [^MetricsClient this]
(-> this (.listLogMetricsPagedCallable))))
|
5816769fa0492301ab54ba8987d2e7923e513ec0beed4c800f3bd49a72e6ec17 | haskell/cabal | Prelude.hs | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE Trustworthy #-}
{-# LANGUAGE TypeOperators #-}
#ifdef MIN_VERSION_base
#define MINVER_base_411 MIN_VERSION_base(4,11,0)
#else
#define MINVER_base_411 (__GLASGOW_HASKELL__ >= 804)
#endif
| This module does two things :
--
* Acts as a compatibility layer , like @base - compat@.
--
-- * Provides commonly used imports.
module Distribution.Compat.Prelude (
-- * Prelude
--
Prelude is re - exported , following is hidden :
module BasePrelude,
-- * Common type-classes
Semigroup (..),
gmappend, gmempty,
Typeable, TypeRep, typeRep,
Data,
Generic,
NFData (..), genericRnf,
Binary (..),
Structured,
Alternative (..),
MonadPlus (..),
IsString (..),
-- * Some types
Map,
Set,
NonEmptySet,
Identity (..),
Proxy (..),
Const (..),
Void,
-- * Data.Either
partitionEithers,
-- * Data.Maybe
catMaybes, mapMaybe,
fromMaybe,
maybeToList, listToMaybe,
isNothing, isJust,
-- * Data.List
unfoldr,
isPrefixOf, isSuffixOf,
intercalate, intersperse,
sort, sortBy,
nub, nubBy,
partition,
dropWhileEnd,
* Data . List . NonEmpty
NonEmpty((:|)), nonEmpty, foldl1, foldr1,
head, tail, last, init,
-- * Data.Foldable
Foldable, foldMap, foldr,
null, length,
find, foldl',
traverse_, for_,
any, all,
toList,
* Data .
Traversable, traverse, sequenceA,
for,
-- * Data.Function
on,
-- * Data.Ord
comparing,
-- * Control.Arrow
first,
-- * Control.Monad
liftM, liftM2,
unless, when,
ap, void,
foldM, filterM,
join, guard,
-- * Control.Exception
catch, throwIO, evaluate,
Exception (..), IOException, SomeException (..),
tryIO, catchIO, catchExit,
* Control .
deepseq, force,
* Data .
isSpace, isDigit, isUpper, isAlpha, isAlphaNum,
chr, ord,
toLower, toUpper,
-- * Data.Void
absurd, vacuous,
-- * Data.Word & Data.Int
Word,
Word8, Word16, Word32, Word64,
Int8, Int16, Int32, Int64,
-- * Text.PrettyPrint
(<<>>), (Disp.<+>),
-- * System.Exit
ExitCode (..),
exitWith, exitSuccess, exitFailure,
-- * Text.Read
readMaybe,
-- * Debug.Trace (as deprecated functions)
trace, traceShow, traceShowId, traceM, traceShowM
) where
-- We also could hide few partial function
import Prelude as BasePrelude hiding
( mapM, mapM_, sequence, null, length, foldr, any, all, head, tail, last, init
-- partial functions
, read
, foldr1, foldl1
#if MINVER_base_411
As of base 4.11.0.0 Prelude exports part of Semigroup ( .. ) .
Hide this so we instead rely on Distribution . Compat . Semigroup .
, Semigroup(..)
#endif
, Word
-- We hide them, as we import only some members
, Traversable, traverse, sequenceA
, Foldable, foldMap
)
AMP
import Data.Foldable
( Foldable(toList),
length,
null,
Foldable(foldMap, foldr),
all,
any,
find,
foldl',
for_,
traverse_ )
import Data.Traversable (Traversable (sequenceA, traverse), for)
import qualified Data.Foldable
-- Extra exports
import Control.Applicative (Alternative (..), Const(..))
import Control.Arrow (first)
import Control.DeepSeq (NFData (..), deepseq, force)
import Control.Exception (Exception (..), IOException, SomeException (..), catch, evaluate, throwIO)
import Control.Monad (MonadPlus (..), ap, filterM, foldM, guard, join, liftM, liftM2, unless, void, when)
import Data.Char (chr, isAlpha, isAlphaNum, isDigit, isSpace, isUpper, ord, toLower, toUpper)
import Data.Data (Data)
import Data.Either (partitionEithers)
import Data.Function (on)
import Data.Functor.Identity (Identity (..))
import Data.Int (Int16, Int32, Int64, Int8)
import Data.List (dropWhileEnd, intercalate, intersperse, isPrefixOf, isSuffixOf, nub, nubBy, partition, sort, sortBy, unfoldr)
import Data.List.NonEmpty (NonEmpty ((:|)), nonEmpty, head, init, last, tail)
import Data.Map (Map)
import Data.Maybe (catMaybes, fromMaybe, isJust, isNothing, listToMaybe, mapMaybe, maybeToList)
import Data.Ord (comparing)
import Data.Proxy (Proxy (..))
import Data.Set (Set)
import Data.String (IsString (..))
import Data.Void (Void, absurd, vacuous)
import Data.Word (Word, Word16, Word32, Word64, Word8)
import Distribution.Compat.Binary (Binary (..))
import Distribution.Compat.Semigroup (Semigroup (..), gmappend, gmempty)
import Distribution.Compat.Typeable (TypeRep, Typeable, typeRep)
import GHC.Generics ((:*:) ((:*:)), (:+:) (L1, R1), Generic, K1 (unK1), M1 (unM1), Rep (..), U1 (U1), V1)
import System.Exit (ExitCode (..), exitFailure, exitSuccess, exitWith)
import Text.Read (readMaybe)
import qualified Text.PrettyPrint as Disp
import Distribution.Compat.Exception
import Distribution.Compat.NonEmptySet (NonEmptySet)
import Distribution.Utils.Structured (Structured)
import qualified Debug.Trace
-- | New name for 'Text.PrettyPrint.<>'
(<<>>) :: Disp.Doc -> Disp.Doc -> Disp.Doc
(<<>>) = (Disp.<>)
-- | "GHC.Generics"-based 'rnf' implementation
--
This is needed in order to support @deepseq < 1.4@ which did n't
-- have a 'Generic'-based default 'rnf' implementation yet.
--
-- In order to define instances, use e.g.
--
> instance NFData MyType where rnf = genericRnf
--
The implementation has been taken from @deepseq-1.4.2@ 's default
-- 'rnf' implementation.
genericRnf :: (Generic a, GNFData (Rep a)) => a -> ()
genericRnf = grnf . from
-- | Hidden internal type-class
class GNFData f where
grnf :: f a -> ()
instance GNFData V1 where
grnf = error "Control.DeepSeq.rnf: uninhabited type"
instance GNFData U1 where
grnf U1 = ()
instance NFData a => GNFData (K1 i a) where
grnf = rnf . unK1
# INLINEABLE grnf #
instance GNFData a => GNFData (M1 i c a) where
grnf = grnf . unM1
# INLINEABLE grnf #
instance (GNFData a, GNFData b) => GNFData (a :*: b) where
grnf (x :*: y) = grnf x `seq` grnf y
# INLINEABLE grnf #
instance (GNFData a, GNFData b) => GNFData (a :+: b) where
grnf (L1 x) = grnf x
grnf (R1 x) = grnf x
# INLINEABLE grnf #
TODO : if we want / foldl1 to work on more than NonEmpty , we
-- can define a local typeclass 'Foldable1', e.g.
--
-- @
-- class Foldable f => Foldable1 f
--
instance Foldable1 NonEmpty
--
: : Foldable1 t = > ( a - > a - > a ) - > t a - > a
-- foldr1 = Data.Foldable.foldr1
--
foldl1 : : Foldable1 t = > ( a - > a - > a ) - > t a - > a
foldl1 = Data . Foldable.foldl1
-- @
--
# INLINE foldr1 #
foldr1 :: (a -> a -> a) -> NonEmpty a -> a
foldr1 = Data.Foldable.foldr1
# INLINE foldl1 #
foldl1 :: (a -> a -> a) -> NonEmpty a -> a
foldl1 = Data.Foldable.foldl1
-------------------------------------------------------------------------------
Trace
-------------------------------------------------------------------------------
-- Functions from Debug.Trace
-- but with DEPRECATED pragma, so -Werror will scream on them.
trace :: String -> a -> a
trace = Debug.Trace.trace
{-# DEPRECATED trace "Don't leave me in the code" #-}
traceShowId :: Show a => a -> a
traceShowId x = Debug.Trace.traceShow x x
{-# DEPRECATED traceShowId "Don't leave me in the code" #-}
traceShow :: Show a => a -> b -> b
traceShow = Debug.Trace.traceShow
{-# DEPRECATED traceShow "Don't leave me in the code" #-}
traceM :: Applicative f => String -> f ()
traceM = Debug.Trace.traceM
{-# DEPRECATED traceM "Don't leave me in the code" #-}
traceShowM :: (Show a, Applicative f) => a -> f ()
traceShowM = Debug.Trace.traceShowM
{-# DEPRECATED traceShowM "Don't leave me in the code" #-}
| null | https://raw.githubusercontent.com/haskell/cabal/c53acf5280014ba37337a78967191e1ddd0507cc/Cabal-syntax/src/Distribution/Compat/Prelude.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE Trustworthy #
# LANGUAGE TypeOperators #
* Provides commonly used imports.
* Prelude
* Common type-classes
* Some types
* Data.Either
* Data.Maybe
* Data.List
* Data.Foldable
* Data.Function
* Data.Ord
* Control.Arrow
* Control.Monad
* Control.Exception
* Data.Void
* Data.Word & Data.Int
* Text.PrettyPrint
* System.Exit
* Text.Read
* Debug.Trace (as deprecated functions)
We also could hide few partial function
partial functions
We hide them, as we import only some members
Extra exports
| New name for 'Text.PrettyPrint.<>'
| "GHC.Generics"-based 'rnf' implementation
have a 'Generic'-based default 'rnf' implementation yet.
In order to define instances, use e.g.
'rnf' implementation.
| Hidden internal type-class
can define a local typeclass 'Foldable1', e.g.
@
class Foldable f => Foldable1 f
foldr1 = Data.Foldable.foldr1
@
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Functions from Debug.Trace
but with DEPRECATED pragma, so -Werror will scream on them.
# DEPRECATED trace "Don't leave me in the code" #
# DEPRECATED traceShowId "Don't leave me in the code" #
# DEPRECATED traceShow "Don't leave me in the code" #
# DEPRECATED traceM "Don't leave me in the code" #
# DEPRECATED traceShowM "Don't leave me in the code" # | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
#ifdef MIN_VERSION_base
#define MINVER_base_411 MIN_VERSION_base(4,11,0)
#else
#define MINVER_base_411 (__GLASGOW_HASKELL__ >= 804)
#endif
| This module does two things :
* Acts as a compatibility layer , like @base - compat@.
module Distribution.Compat.Prelude (
Prelude is re - exported , following is hidden :
module BasePrelude,
Semigroup (..),
gmappend, gmempty,
Typeable, TypeRep, typeRep,
Data,
Generic,
NFData (..), genericRnf,
Binary (..),
Structured,
Alternative (..),
MonadPlus (..),
IsString (..),
Map,
Set,
NonEmptySet,
Identity (..),
Proxy (..),
Const (..),
Void,
partitionEithers,
catMaybes, mapMaybe,
fromMaybe,
maybeToList, listToMaybe,
isNothing, isJust,
unfoldr,
isPrefixOf, isSuffixOf,
intercalate, intersperse,
sort, sortBy,
nub, nubBy,
partition,
dropWhileEnd,
* Data . List . NonEmpty
NonEmpty((:|)), nonEmpty, foldl1, foldr1,
head, tail, last, init,
Foldable, foldMap, foldr,
null, length,
find, foldl',
traverse_, for_,
any, all,
toList,
* Data .
Traversable, traverse, sequenceA,
for,
on,
comparing,
first,
liftM, liftM2,
unless, when,
ap, void,
foldM, filterM,
join, guard,
catch, throwIO, evaluate,
Exception (..), IOException, SomeException (..),
tryIO, catchIO, catchExit,
* Control .
deepseq, force,
* Data .
isSpace, isDigit, isUpper, isAlpha, isAlphaNum,
chr, ord,
toLower, toUpper,
absurd, vacuous,
Word,
Word8, Word16, Word32, Word64,
Int8, Int16, Int32, Int64,
(<<>>), (Disp.<+>),
ExitCode (..),
exitWith, exitSuccess, exitFailure,
readMaybe,
trace, traceShow, traceShowId, traceM, traceShowM
) where
import Prelude as BasePrelude hiding
( mapM, mapM_, sequence, null, length, foldr, any, all, head, tail, last, init
, read
, foldr1, foldl1
#if MINVER_base_411
As of base 4.11.0.0 Prelude exports part of Semigroup ( .. ) .
Hide this so we instead rely on Distribution . Compat . Semigroup .
, Semigroup(..)
#endif
, Word
, Traversable, traverse, sequenceA
, Foldable, foldMap
)
AMP
import Data.Foldable
( Foldable(toList),
length,
null,
Foldable(foldMap, foldr),
all,
any,
find,
foldl',
for_,
traverse_ )
import Data.Traversable (Traversable (sequenceA, traverse), for)
import qualified Data.Foldable
import Control.Applicative (Alternative (..), Const(..))
import Control.Arrow (first)
import Control.DeepSeq (NFData (..), deepseq, force)
import Control.Exception (Exception (..), IOException, SomeException (..), catch, evaluate, throwIO)
import Control.Monad (MonadPlus (..), ap, filterM, foldM, guard, join, liftM, liftM2, unless, void, when)
import Data.Char (chr, isAlpha, isAlphaNum, isDigit, isSpace, isUpper, ord, toLower, toUpper)
import Data.Data (Data)
import Data.Either (partitionEithers)
import Data.Function (on)
import Data.Functor.Identity (Identity (..))
import Data.Int (Int16, Int32, Int64, Int8)
import Data.List (dropWhileEnd, intercalate, intersperse, isPrefixOf, isSuffixOf, nub, nubBy, partition, sort, sortBy, unfoldr)
import Data.List.NonEmpty (NonEmpty ((:|)), nonEmpty, head, init, last, tail)
import Data.Map (Map)
import Data.Maybe (catMaybes, fromMaybe, isJust, isNothing, listToMaybe, mapMaybe, maybeToList)
import Data.Ord (comparing)
import Data.Proxy (Proxy (..))
import Data.Set (Set)
import Data.String (IsString (..))
import Data.Void (Void, absurd, vacuous)
import Data.Word (Word, Word16, Word32, Word64, Word8)
import Distribution.Compat.Binary (Binary (..))
import Distribution.Compat.Semigroup (Semigroup (..), gmappend, gmempty)
import Distribution.Compat.Typeable (TypeRep, Typeable, typeRep)
import GHC.Generics ((:*:) ((:*:)), (:+:) (L1, R1), Generic, K1 (unK1), M1 (unM1), Rep (..), U1 (U1), V1)
import System.Exit (ExitCode (..), exitFailure, exitSuccess, exitWith)
import Text.Read (readMaybe)
import qualified Text.PrettyPrint as Disp
import Distribution.Compat.Exception
import Distribution.Compat.NonEmptySet (NonEmptySet)
import Distribution.Utils.Structured (Structured)
import qualified Debug.Trace
(<<>>) :: Disp.Doc -> Disp.Doc -> Disp.Doc
(<<>>) = (Disp.<>)
This is needed in order to support @deepseq < 1.4@ which did n't
> instance NFData MyType where rnf = genericRnf
The implementation has been taken from @deepseq-1.4.2@ 's default
genericRnf :: (Generic a, GNFData (Rep a)) => a -> ()
genericRnf = grnf . from
class GNFData f where
grnf :: f a -> ()
instance GNFData V1 where
grnf = error "Control.DeepSeq.rnf: uninhabited type"
instance GNFData U1 where
grnf U1 = ()
instance NFData a => GNFData (K1 i a) where
grnf = rnf . unK1
# INLINEABLE grnf #
instance GNFData a => GNFData (M1 i c a) where
grnf = grnf . unM1
# INLINEABLE grnf #
instance (GNFData a, GNFData b) => GNFData (a :*: b) where
grnf (x :*: y) = grnf x `seq` grnf y
# INLINEABLE grnf #
instance (GNFData a, GNFData b) => GNFData (a :+: b) where
grnf (L1 x) = grnf x
grnf (R1 x) = grnf x
# INLINEABLE grnf #
TODO : if we want / foldl1 to work on more than NonEmpty , we
instance Foldable1 NonEmpty
: : Foldable1 t = > ( a - > a - > a ) - > t a - > a
foldl1 : : Foldable1 t = > ( a - > a - > a ) - > t a - > a
foldl1 = Data . Foldable.foldl1
# INLINE foldr1 #
foldr1 :: (a -> a -> a) -> NonEmpty a -> a
foldr1 = Data.Foldable.foldr1
# INLINE foldl1 #
foldl1 :: (a -> a -> a) -> NonEmpty a -> a
foldl1 = Data.Foldable.foldl1
Trace
trace :: String -> a -> a
trace = Debug.Trace.trace
traceShowId :: Show a => a -> a
traceShowId x = Debug.Trace.traceShow x x
traceShow :: Show a => a -> b -> b
traceShow = Debug.Trace.traceShow
traceM :: Applicative f => String -> f ()
traceM = Debug.Trace.traceM
traceShowM :: (Show a, Applicative f) => a -> f ()
traceShowM = Debug.Trace.traceShowM
|
a185f2b3e38eaf73e540a8549dcacd68a3d945d27135341fd0dd4874f806e547 | kappelmann/engaging-large-scale-functional-programming | Interface.hs | module Interface(main) where
import qualified Exercise12 as E
main :: IO ()
main = E.main
| null | https://raw.githubusercontent.com/kappelmann/engaging-large-scale-functional-programming/28905255605b55353de2d06239f79448c6fe4230/resources/io_mocking/stocks/test/Interface.hs | haskell | module Interface(main) where
import qualified Exercise12 as E
main :: IO ()
main = E.main
| |
5f1ef95bb57421b2aa36f437c7acf16fdfeb56c3e5397cb0ce1b35b3db01a35c | RyanMcG/chic-text | project.clj | (defproject chic-text "0.2.0"
:description "Helpful functions for formatting text into tables."
:url "-text"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.6.0"]
[jline "2.12"]]
:profiles {:dev {:plugins [[lein-repack "0.2.7"]]
:dependencies [[incise "0.5.0"]
[com.ryanmcg/incise-codox "0.2.0"]
[com.ryanmcg/incise-vm-layout "0.5.0"]]
:aliases {"incise" ^:pass-through-help ["run" "-m" "incise.core"]}}
:test {:dependencies [[org.clojure/tools.cli "0.3.1"]
[fixturex "0.3.0"]]}
:1.4 {:dependencies [[org.clojure/clojure "1.4.0"]]}
:1.5 {:dependencies [[org.clojure/clojure "1.5.1"]]}
:1.7 {:dependencies [[org.clojure/clojure "1.7.0-alpha5"]]}}
:repack [{:type :clojure
:levels 2
:path "src"
:standalone #{"core"}}])
| null | https://raw.githubusercontent.com/RyanMcG/chic-text/2d901b6ddfb6f2703bedef7230160c1c782fc326/project.clj | clojure | (defproject chic-text "0.2.0"
:description "Helpful functions for formatting text into tables."
:url "-text"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.6.0"]
[jline "2.12"]]
:profiles {:dev {:plugins [[lein-repack "0.2.7"]]
:dependencies [[incise "0.5.0"]
[com.ryanmcg/incise-codox "0.2.0"]
[com.ryanmcg/incise-vm-layout "0.5.0"]]
:aliases {"incise" ^:pass-through-help ["run" "-m" "incise.core"]}}
:test {:dependencies [[org.clojure/tools.cli "0.3.1"]
[fixturex "0.3.0"]]}
:1.4 {:dependencies [[org.clojure/clojure "1.4.0"]]}
:1.5 {:dependencies [[org.clojure/clojure "1.5.1"]]}
:1.7 {:dependencies [[org.clojure/clojure "1.7.0-alpha5"]]}}
:repack [{:type :clojure
:levels 2
:path "src"
:standalone #{"core"}}])
| |
d4c1c862a834bf7fbe638e70fcfd861049d8c218f454479697b421c80ddd6810 | wavejumper/rehook | todo_test.cljs | (ns todo-test
(:require [rehook.test :as rehook.test :refer-macros [defuitest is io initial-render next-render]]
[todomvc.core :as todo]))
(defn test-ctx [component]
{:system todo/ctx
:system-args []
:shutdown-f #(when-let [f (some-> % meta :stop)]
(f))
:ctx-f (fn [ctx _] ctx)
:props-f identity
:component component})
;; This example shows how we can use the 'data layer' -- eg our dispatch and subscribe
fns to write tests against our components .
(defuitest todo-app--data-layer
[[scenes {:keys [dispatch subscribe]}] (test-ctx todo/todo-app)]
(-> (initial-render scenes
(let [items (subscribe [:todos])]
(is "Subscription should contain 5 items"
(= items todo/initial-items)))
(io "Dispatch :complete-all"
(dispatch [:complete-all])))
(next-render
(let [items (subscribe [:todos])]
(is "After dispatching :complete-all, there should be 5 TODO items selected"
(every? :done (vals items))))
(io "Dispatch :clear-done"
(dispatch [:clear-done])))
(next-render
(let [items (subscribe [:todos])]
(is "After dispatching :clear-done, there should be no TODO items"
(empty? items))))))
;; This example shows how we can inspect the output of our components hiccup
;; to write tests against our components
(defuitest todo-app--view-layer
[[scenes _] (test-ctx todo/todo-app)]
(-> (initial-render scenes
(is "Initial render should show 5 active TODO items"
(= (rehook.test/children :items-left)
[[:strong {} 5]
" "
"items"
" left"]))
(io "Click 'Complete all'"
(rehook.test/invoke-prop :complete-all :onChange [{}])))
(next-render
(is "After clicking 'Complete all', there should be 5 TODO items selected"
(= (rehook.test/children :clear-completed) ["Clear completed " 5]))
(io "Invoking 'Clear completed'"
(rehook.test/invoke-prop :clear-completed :onClick [{}])))
(next-render
(is "After clicking 'Clear completed' there should be no TODO items left"
(nil? (rehook.test/children :clear-completed)))
#_(is "A demo of a failing test"
(= true false)))))
;; defuitest isn't limited to just top-level components!
;; we can test child components as well :)
(defuitest todo-app--todo-stats
[[scenes _] (test-ctx (rehook.test/with-props
todo/todo-stats
{:active 1 :done 1}))]
(-> (initial-render scenes
(is "Initial render should show 1 items left"
(= (rehook.test/children :items-left)
[[:strong {} 1]
" "
"item"
" left"]))))) | null | https://raw.githubusercontent.com/wavejumper/rehook/c1a4207918827f4b738cdad9a9645385e5e10ff4/examples/todomvc/src/test/todo_test.cljs | clojure | This example shows how we can use the 'data layer' -- eg our dispatch and subscribe
This example shows how we can inspect the output of our components hiccup
to write tests against our components
defuitest isn't limited to just top-level components!
we can test child components as well :) | (ns todo-test
(:require [rehook.test :as rehook.test :refer-macros [defuitest is io initial-render next-render]]
[todomvc.core :as todo]))
(defn test-ctx [component]
{:system todo/ctx
:system-args []
:shutdown-f #(when-let [f (some-> % meta :stop)]
(f))
:ctx-f (fn [ctx _] ctx)
:props-f identity
:component component})
fns to write tests against our components .
(defuitest todo-app--data-layer
[[scenes {:keys [dispatch subscribe]}] (test-ctx todo/todo-app)]
(-> (initial-render scenes
(let [items (subscribe [:todos])]
(is "Subscription should contain 5 items"
(= items todo/initial-items)))
(io "Dispatch :complete-all"
(dispatch [:complete-all])))
(next-render
(let [items (subscribe [:todos])]
(is "After dispatching :complete-all, there should be 5 TODO items selected"
(every? :done (vals items))))
(io "Dispatch :clear-done"
(dispatch [:clear-done])))
(next-render
(let [items (subscribe [:todos])]
(is "After dispatching :clear-done, there should be no TODO items"
(empty? items))))))
(defuitest todo-app--view-layer
[[scenes _] (test-ctx todo/todo-app)]
(-> (initial-render scenes
(is "Initial render should show 5 active TODO items"
(= (rehook.test/children :items-left)
[[:strong {} 5]
" "
"items"
" left"]))
(io "Click 'Complete all'"
(rehook.test/invoke-prop :complete-all :onChange [{}])))
(next-render
(is "After clicking 'Complete all', there should be 5 TODO items selected"
(= (rehook.test/children :clear-completed) ["Clear completed " 5]))
(io "Invoking 'Clear completed'"
(rehook.test/invoke-prop :clear-completed :onClick [{}])))
(next-render
(is "After clicking 'Clear completed' there should be no TODO items left"
(nil? (rehook.test/children :clear-completed)))
#_(is "A demo of a failing test"
(= true false)))))
(defuitest todo-app--todo-stats
[[scenes _] (test-ctx (rehook.test/with-props
todo/todo-stats
{:active 1 :done 1}))]
(-> (initial-render scenes
(is "Initial render should show 1 items left"
(= (rehook.test/children :items-left)
[[:strong {} 1]
" "
"item"
" left"]))))) |
b89a92879eeebcc28628cecc0ef0327c66a2b49ed8b068c5519eb5969e076224 | jellelicht/guix | pkg-config.scm | ;;; GNU Guix --- Functional package management for GNU
Copyright © 2012 , 2013 , 2014 , 2016 < >
;;;
;;; This file is part of GNU Guix.
;;;
GNU is free software ; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or ( at
;;; your option) any later version.
;;;
;;; GNU Guix is distributed in the hope that it will be useful, but
;;; WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages pkg-config)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu)
#:use-module (guix build-system trivial)
#:export (pkg-config))
;; This is the "primitive" pkg-config package. People should use `pkg-config'
;; (see below) rather than `%pkg-config', but we export `%pkg-config' so that
;; `fold-packages' finds it.
(define-public %pkg-config
(package
(name "pkg-config")
(version "0.29")
(source (origin
(method url-fetch)
(uri (list
(string-append
"-config-" version
".tar.gz")
;; FIXME: The following URL redirects to HTTPS, which
;; creates bootstrapping problems:
< > .
(string-append
"-config-"
version ".tar.gz")))
(sha256
(base32
"0sq09a39wj4cxf8l2jvkq067g08ywfma4v6nhprnf351s82pfl68"))))
(build-system gnu-build-system)
(arguments `(#:configure-flags '("--with-internal-glib")))
(native-search-paths
(list (search-path-specification
(variable "PKG_CONFIG_PATH")
(files '("lib/pkgconfig" "lib64/pkgconfig" "share/pkgconfig")))))
(home-page "-config")
(license gpl2+)
(synopsis "Helper tool used when compiling applications and libraries")
(description
"pkg-config is a helper tool used when compiling applications and
libraries. It helps you insert the correct compiler options on the
command line so an application can use gcc -o test test.c `pkg-config
--libs --cflags glib-2.0` for instance, rather than hard-coding values
on where to find glib (or other libraries). It is language-agnostic, so
it can be used for defining the location of documentation tools, for
instance.")))
(define (cross-pkg-config target)
"Return a pkg-config for TARGET, essentially just a wrapper called
`TARGET-pkg-config', as `configure' scripts like it."
;; See <-mythbuster/pkgconfig/cross-compiling.html>
;; for details.
(package (inherit %pkg-config)
(name (string-append (package-name %pkg-config) "-" target))
(build-system trivial-build-system)
(arguments
`(#:modules ((guix build utils))
#:builder (begin
(use-modules (guix build utils))
(let* ((in (assoc-ref %build-inputs "pkg-config"))
(out (assoc-ref %outputs "out"))
(bin (string-append out "/bin"))
(prog (string-append ,target "-pkg-config"))
(native (string-append in "/bin/pkg-config")))
(mkdir-p bin)
;; Create a `TARGET-pkg-config' -> `pkg-config' symlink.
;; This satisfies the pkg.m4 macros, which use
AC_PROG_TOOL to determine the ` pkg - config ' program
;; name.
(symlink native (string-append bin "/" prog))
;; Also make 'pkg.m4' available, some packages might
;; expect it.
(mkdir-p (string-append out "/share"))
(symlink (string-append in "/share/aclocal")
(string-append out "/share/aclocal"))))))
(native-inputs `(("pkg-config" ,%pkg-config)))
;; Ignore native inputs, and set `PKG_CONFIG_PATH' for target inputs.
(native-search-paths '())
(search-paths (package-native-search-paths %pkg-config))))
(define (pkg-config-for-target target)
"Return a pkg-config package for TARGET, which may be either #f for a native
build, or a GNU triplet."
(if target
(cross-pkg-config target)
%pkg-config))
;; This hack allows us to automatically choose the native or the cross
;; `pkg-config' depending on whether it's being used in a cross-build
;; environment or not.
(define-syntax pkg-config
(identifier-syntax (pkg-config-for-target (%current-target-system))))
| null | https://raw.githubusercontent.com/jellelicht/guix/83cfc9414fca3ab57c949e18c1ceb375a179b59c/gnu/packages/pkg-config.scm | scheme | GNU Guix --- Functional package management for GNU
This file is part of GNU Guix.
you can redistribute it and/or modify it
either version 3 of the License , or ( at
your option) any later version.
GNU Guix is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
This is the "primitive" pkg-config package. People should use `pkg-config'
(see below) rather than `%pkg-config', but we export `%pkg-config' so that
`fold-packages' finds it.
FIXME: The following URL redirects to HTTPS, which
creates bootstrapping problems:
See <-mythbuster/pkgconfig/cross-compiling.html>
for details.
Create a `TARGET-pkg-config' -> `pkg-config' symlink.
This satisfies the pkg.m4 macros, which use
name.
Also make 'pkg.m4' available, some packages might
expect it.
Ignore native inputs, and set `PKG_CONFIG_PATH' for target inputs.
This hack allows us to automatically choose the native or the cross
`pkg-config' depending on whether it's being used in a cross-build
environment or not. | Copyright © 2012 , 2013 , 2014 , 2016 < >
under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
along with GNU . If not , see < / > .
(define-module (gnu packages pkg-config)
#:use-module (guix licenses)
#:use-module (guix packages)
#:use-module (guix download)
#:use-module (guix build-system gnu)
#:use-module (guix build-system trivial)
#:export (pkg-config))
(define-public %pkg-config
(package
(name "pkg-config")
(version "0.29")
(source (origin
(method url-fetch)
(uri (list
(string-append
"-config-" version
".tar.gz")
< > .
(string-append
"-config-"
version ".tar.gz")))
(sha256
(base32
"0sq09a39wj4cxf8l2jvkq067g08ywfma4v6nhprnf351s82pfl68"))))
(build-system gnu-build-system)
(arguments `(#:configure-flags '("--with-internal-glib")))
(native-search-paths
(list (search-path-specification
(variable "PKG_CONFIG_PATH")
(files '("lib/pkgconfig" "lib64/pkgconfig" "share/pkgconfig")))))
(home-page "-config")
(license gpl2+)
(synopsis "Helper tool used when compiling applications and libraries")
(description
"pkg-config is a helper tool used when compiling applications and
libraries. It helps you insert the correct compiler options on the
command line so an application can use gcc -o test test.c `pkg-config
--libs --cflags glib-2.0` for instance, rather than hard-coding values
on where to find glib (or other libraries). It is language-agnostic, so
it can be used for defining the location of documentation tools, for
instance.")))
(define (cross-pkg-config target)
"Return a pkg-config for TARGET, essentially just a wrapper called
`TARGET-pkg-config', as `configure' scripts like it."
(package (inherit %pkg-config)
(name (string-append (package-name %pkg-config) "-" target))
(build-system trivial-build-system)
(arguments
`(#:modules ((guix build utils))
#:builder (begin
(use-modules (guix build utils))
(let* ((in (assoc-ref %build-inputs "pkg-config"))
(out (assoc-ref %outputs "out"))
(bin (string-append out "/bin"))
(prog (string-append ,target "-pkg-config"))
(native (string-append in "/bin/pkg-config")))
(mkdir-p bin)
AC_PROG_TOOL to determine the ` pkg - config ' program
(symlink native (string-append bin "/" prog))
(mkdir-p (string-append out "/share"))
(symlink (string-append in "/share/aclocal")
(string-append out "/share/aclocal"))))))
(native-inputs `(("pkg-config" ,%pkg-config)))
(native-search-paths '())
(search-paths (package-native-search-paths %pkg-config))))
(define (pkg-config-for-target target)
"Return a pkg-config package for TARGET, which may be either #f for a native
build, or a GNU triplet."
(if target
(cross-pkg-config target)
%pkg-config))
(define-syntax pkg-config
(identifier-syntax (pkg-config-for-target (%current-target-system))))
|
ff532027260d874bdf885d9879972ac132cce65ddb5492254e78d4ff1d7acc41 | paoloo/blockchainPOC | project.clj | (defproject bcpoc "0.5.0-HOTASHELL"
:description "BCPOC: a proof-of-concept blockchain-based wallet API"
:url ""
:min-lein-version "2.0.0"
:dependencies [[org.clojure/clojure "1.8.0"]
[compojure "1.5.1"]
[com.chain/chain-sdk-java "1.2.1"]
[ring/ring-defaults "0.2.1"]
[ring/ring-json "0.4.0"]
[ring/ring-jetty-adapter "0.3.8"]]
:plugins [[lein-ring "0.9.7"]]
:ring {:handler bcpoc.handler/app}
:profiles
{:dev {:dependencies [[javax.servlet/servlet-api "2.5"]
[ring/ring-mock "0.3.0"]]}}
:main ^{:skip-aot true} bcpoc.handler)
| null | https://raw.githubusercontent.com/paoloo/blockchainPOC/18593423a70ec4294a66a40ada562d7dc23612d8/project.clj | clojure | (defproject bcpoc "0.5.0-HOTASHELL"
:description "BCPOC: a proof-of-concept blockchain-based wallet API"
:url ""
:min-lein-version "2.0.0"
:dependencies [[org.clojure/clojure "1.8.0"]
[compojure "1.5.1"]
[com.chain/chain-sdk-java "1.2.1"]
[ring/ring-defaults "0.2.1"]
[ring/ring-json "0.4.0"]
[ring/ring-jetty-adapter "0.3.8"]]
:plugins [[lein-ring "0.9.7"]]
:ring {:handler bcpoc.handler/app}
:profiles
{:dev {:dependencies [[javax.servlet/servlet-api "2.5"]
[ring/ring-mock "0.3.0"]]}}
:main ^{:skip-aot true} bcpoc.handler)
| |
efc094dd155f915a6cbebbaf031be2438bf903678ca49cf431949898b0e53739 | ruhler/smten | StableNameEq.hs |
-- | Fast equality by pointer comparison.
module Smten.Runtime.StableNameEq (
stableNameEq,
) where
import System.IO.Unsafe
import System.Mem.StableName
| Return true if the two arguments point to the same location in the heap .
-- If this returns True, it means the arguments are one and the same.
stableNameEq :: a -> a -> Bool
stableNameEq x y = unsafeDupablePerformIO $ do
xnm <- makeStableName x
ynm <- makeStableName y
return (xnm == ynm)
| null | https://raw.githubusercontent.com/ruhler/smten/16dd37fb0ee3809408803d4be20401211b6c4027/smten-base/Smten/Runtime/StableNameEq.hs | haskell | | Fast equality by pointer comparison.
If this returns True, it means the arguments are one and the same. |
module Smten.Runtime.StableNameEq (
stableNameEq,
) where
import System.IO.Unsafe
import System.Mem.StableName
| Return true if the two arguments point to the same location in the heap .
stableNameEq :: a -> a -> Bool
stableNameEq x y = unsafeDupablePerformIO $ do
xnm <- makeStableName x
ynm <- makeStableName y
return (xnm == ynm)
|
030461f1776ce4fb75fac7c1cdf8ceea9709d3ed4608a86e36955c95cc6fac0a | fgalassi/cs61a-sp11 | 3.50.scm | (define (stream-map proc . argstreams)
(if (stream-null? (car argstreams))
the-empty-stream
(cons-stream
(apply proc (map stream-car argstreams))
(apply stream-map
(cons proc (map stream-cdr argstreams))))))
| null | https://raw.githubusercontent.com/fgalassi/cs61a-sp11/66df3b54b03ee27f368c716ae314fd7ed85c4dba/homework/3.50.scm | scheme | (define (stream-map proc . argstreams)
(if (stream-null? (car argstreams))
the-empty-stream
(cons-stream
(apply proc (map stream-car argstreams))
(apply stream-map
(cons proc (map stream-cdr argstreams))))))
| |
05fde558e74f47b1963c816959dc8f1fa6abed85d8fca91e68bc703728532e4b | manuel-serrano/bigloo | depend.scm | ;*---------------------------------------------------------------------*/
* serrano / prgm / project / bigloo / examples / Depend / depend.scm * /
;* */
* Author : * /
* Creation : We d Mar 17 10:49:15 1993 * /
* Last change : Thu Sep 26 09:47:28 1996 ( serrano ) * /
;* */
;* On genere des dependances (d'apres les includes). */
;*---------------------------------------------------------------------*/
;*---------------------------------------------------------------------*/
;* Le module */
;*---------------------------------------------------------------------*/
(module afile (main main))
;*---------------------------------------------------------------------*/
;* *print-dependence* ... */
;*---------------------------------------------------------------------*/
(define *print-dependence* print-make-dependence)
;*---------------------------------------------------------------------*/
;* *prefix-dir* */
;*---------------------------------------------------------------------*/
(define *prefix-dir* "")
;*---------------------------------------------------------------------*/
;* main ... */
;*---------------------------------------------------------------------*/
(define (main argv)
(if (or (null? (cdr argv))
(string=? (cadr argv) "-help"))
(usage)
(let loop ((files (cdr argv))
(files-list '())
(output-file '()))
(cond
((null? files)
(output files-list output-file))
((string=? (car files) "-o")
(if (null? (cdr files))
(usage)
(loop (cddr files)
files-list
(cadr files))))
((string=? (car files) "-rmake")
(set! *print-dependence* print-rmake-dependence)
(loop (cdr files)
files-list
output-file))
((string=? (car files) "-make")
(set! *print-dependence* print-make-dependence)
(loop (cdr files)
files-list
output-file))
((string=? (car files) "-dir")
(if (null? (cdr files))
(usage)
(let ((dir (cadr files)))
(if (not (char=? (string-ref dir (-fx (string-length dir)
1))
#\/))
(set! *prefix-dir* (string-append dir "/"))
(set! *prefix-dir* dir))
(loop (cddr files)
files-list
output-file))))
(else
(loop (cdr files)
(cons (car files) files-list)
output-file))))))
;*---------------------------------------------------------------------*/
;* output ... */
;*---------------------------------------------------------------------*/
(define (output files-list output-file)
(let ((port (if (string? output-file)
(begin
(if (file-exists? output-file)
(rename-file output-file
(string-append output-file "~")))
(open-output-file output-file))
(current-output-port))))
(let loop ((files-list files-list))
(if (null? files-list)
(newline port)
(let* ((file-name (car files-list))
(key (gensym))
(includes (find-includes key file-name #t)))
(if (not (null? includes))
(*print-dependence* port file-name includes))
(loop (cdr files-list)))))))
;*---------------------------------------------------------------------*/
;* print-make-dependence ... */
;*---------------------------------------------------------------------*/
(define (print-make-dependence port file-name includes)
(display (string-append (remove-extansion file-name) ".o") port)
(display #\: port)
(for-each (lambda (i)
(display #\space port)
(display (string-append *prefix-dir* i) port))
includes)
(newline port))
;*---------------------------------------------------------------------*/
;* print-rmake-dependence ... */
;*---------------------------------------------------------------------*/
(define (print-rmake-dependence port file-name includes)
(fprint port ";; " file-name)
(display "(set-depend! " port)
(write file-name port)
(display " '" port)
(write (map (lambda (f) (string-append *prefix-dir* f)) includes) port)
(fprint port ")"))
;*---------------------------------------------------------------------*/
;* find-includes ... */
;*---------------------------------------------------------------------*/
(define (find-includes key file w/error)
(cond
((not (file-exists? file))
(fprint (current-error-port) "*** ERROR:depend:" #\Newline
"Can't find file -- " file)
'())
((eq? (getprop (string->symbol file) 'include) key)
'())
(else
on marque que a ete examine
(putprop! (string->symbol file) 'include key)
(let ((port (open-input-file file)))
(if (not (input-port? port))
(begin
(fprint (current-error-port) "*** ERROR:depend:" #\Newline
"Can't open file -- " file)
'())
(let ((exp (read port)))
(match-case exp
((or (module ?- . ?clauses)
(directives . ?clauses))
(let loop ((clauses clauses)
(includes '()))
(if (null? clauses)
(begin
(close-input-port port)
includes)
(if (eq? (car (car clauses)) 'include)
(loop (cdr clauses)
(append (cdr (car clauses))
(apply append
(map
(lambda (f)
(find-includes key
f
#f))
(cdr (car clauses))))
includes))
(loop (cdr clauses)
includes)))))
(else
(close-input-port port)
(if w/error
(begin
(fprint (current-error-port) "*** ERROR:depend:"
#\Newline
"Illegal file format -- " file)
'())
'())))))))))
;*---------------------------------------------------------------------*/
;* usage ... */
;*---------------------------------------------------------------------*/
(define (usage)
(print "usage: depend [-rmake/-make] [-o output] <file1> <file2> ... <filen>")
(exit -1))
;*---------------------------------------------------------------------*/
;* remove-extansion ... */
;*---------------------------------------------------------------------*/
(define (remove-extansion string)
(let ((len (-fx (string-length string) 1)))
(let loop ((e len)
(s len))
(cond
((=fx s 0)
(substring string 0 (+fx 1 e)))
(else
(if (and (eq? (string-ref string s) #\.)
(=fx e len))
(loop (-fx s 1) (- s 1))
(loop e (-fx s 1))))))))
| null | https://raw.githubusercontent.com/manuel-serrano/bigloo/eb650ed4429155f795a32465e009706bbf1b8d74/examples/Depend/depend.scm | scheme | *---------------------------------------------------------------------*/
* */
* */
* On genere des dependances (d'apres les includes). */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* Le module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* *print-dependence* ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* *prefix-dir* */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* main ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* output ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* print-make-dependence ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* print-rmake-dependence ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* find-includes ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* usage ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* remove-extansion ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / bigloo / examples / Depend / depend.scm * /
* Author : * /
* Creation : We d Mar 17 10:49:15 1993 * /
* Last change : Thu Sep 26 09:47:28 1996 ( serrano ) * /
(module afile (main main))
(define *print-dependence* print-make-dependence)
(define *prefix-dir* "")
(define (main argv)
(if (or (null? (cdr argv))
(string=? (cadr argv) "-help"))
(usage)
(let loop ((files (cdr argv))
(files-list '())
(output-file '()))
(cond
((null? files)
(output files-list output-file))
((string=? (car files) "-o")
(if (null? (cdr files))
(usage)
(loop (cddr files)
files-list
(cadr files))))
((string=? (car files) "-rmake")
(set! *print-dependence* print-rmake-dependence)
(loop (cdr files)
files-list
output-file))
((string=? (car files) "-make")
(set! *print-dependence* print-make-dependence)
(loop (cdr files)
files-list
output-file))
((string=? (car files) "-dir")
(if (null? (cdr files))
(usage)
(let ((dir (cadr files)))
(if (not (char=? (string-ref dir (-fx (string-length dir)
1))
#\/))
(set! *prefix-dir* (string-append dir "/"))
(set! *prefix-dir* dir))
(loop (cddr files)
files-list
output-file))))
(else
(loop (cdr files)
(cons (car files) files-list)
output-file))))))
(define (output files-list output-file)
(let ((port (if (string? output-file)
(begin
(if (file-exists? output-file)
(rename-file output-file
(string-append output-file "~")))
(open-output-file output-file))
(current-output-port))))
(let loop ((files-list files-list))
(if (null? files-list)
(newline port)
(let* ((file-name (car files-list))
(key (gensym))
(includes (find-includes key file-name #t)))
(if (not (null? includes))
(*print-dependence* port file-name includes))
(loop (cdr files-list)))))))
(define (print-make-dependence port file-name includes)
(display (string-append (remove-extansion file-name) ".o") port)
(display #\: port)
(for-each (lambda (i)
(display #\space port)
(display (string-append *prefix-dir* i) port))
includes)
(newline port))
(define (print-rmake-dependence port file-name includes)
(fprint port ";; " file-name)
(display "(set-depend! " port)
(write file-name port)
(display " '" port)
(write (map (lambda (f) (string-append *prefix-dir* f)) includes) port)
(fprint port ")"))
(define (find-includes key file w/error)
(cond
((not (file-exists? file))
(fprint (current-error-port) "*** ERROR:depend:" #\Newline
"Can't find file -- " file)
'())
((eq? (getprop (string->symbol file) 'include) key)
'())
(else
on marque que a ete examine
(putprop! (string->symbol file) 'include key)
(let ((port (open-input-file file)))
(if (not (input-port? port))
(begin
(fprint (current-error-port) "*** ERROR:depend:" #\Newline
"Can't open file -- " file)
'())
(let ((exp (read port)))
(match-case exp
((or (module ?- . ?clauses)
(directives . ?clauses))
(let loop ((clauses clauses)
(includes '()))
(if (null? clauses)
(begin
(close-input-port port)
includes)
(if (eq? (car (car clauses)) 'include)
(loop (cdr clauses)
(append (cdr (car clauses))
(apply append
(map
(lambda (f)
(find-includes key
f
#f))
(cdr (car clauses))))
includes))
(loop (cdr clauses)
includes)))))
(else
(close-input-port port)
(if w/error
(begin
(fprint (current-error-port) "*** ERROR:depend:"
#\Newline
"Illegal file format -- " file)
'())
'())))))))))
(define (usage)
(print "usage: depend [-rmake/-make] [-o output] <file1> <file2> ... <filen>")
(exit -1))
(define (remove-extansion string)
(let ((len (-fx (string-length string) 1)))
(let loop ((e len)
(s len))
(cond
((=fx s 0)
(substring string 0 (+fx 1 e)))
(else
(if (and (eq? (string-ref string s) #\.)
(=fx e len))
(loop (-fx s 1) (- s 1))
(loop e (-fx s 1))))))))
|
b0a153beb1924b9f5a1fe8779ba8f3aa7ee188a47451f4d889d3b7cbe43d06f1 | pflanze/chj-schemelib | seq.scm | Copyright 2019 - 2020 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require (cj-functional-2 either)
(lazy FV)
(debuggable-promise possibly-use-debuggable-promise);; ever hack
(scheme-meta homogenous-vector?)
(cj-struct-tag cj-struct#vector?)
(predicates-1 pair-or-null?)
;; test
)
(export iseq?
iseq-of
iseq+-of
seq?
seq-of
char-iseq+?)
"Sequence interface definition."
XX " Note that unlike Clojure 's seq which are called iseq ? here , these include ( number and other ) vectors . "
(include "cj-standarddeclares.scm")
(possibly-use-debuggable-promise)
(define (iseq? v)
(FV (v)
(pair-or-null? v)))
(define (iseq-of pred)
(lambda (v)
(FV (v)
(if (pair? v)
(pred (car v))
(null? v)))))
(define seq#homogenous-vector?
(let ()
(include "scheme-meta-homogenous-vector--include.scm")
homogenous-vector?))
(define seq? (either iseq?
cj-struct#vector?
seq#homogenous-vector?))
;;XX what is this again, for?
(define (iseq+-of pred)
(lambda (v)
(FV (v)
(and (pair? v)
(pred (car v))))))
(define char-iseq+? (iseq+-of char?))
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/seq.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version.
ever hack
test
XX what is this again, for? | Copyright 2019 - 2020 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
(require (cj-functional-2 either)
(lazy FV)
(scheme-meta homogenous-vector?)
(cj-struct-tag cj-struct#vector?)
(predicates-1 pair-or-null?)
)
(export iseq?
iseq-of
iseq+-of
seq?
seq-of
char-iseq+?)
"Sequence interface definition."
XX " Note that unlike Clojure 's seq which are called iseq ? here , these include ( number and other ) vectors . "
(include "cj-standarddeclares.scm")
(possibly-use-debuggable-promise)
(define (iseq? v)
(FV (v)
(pair-or-null? v)))
(define (iseq-of pred)
(lambda (v)
(FV (v)
(if (pair? v)
(pred (car v))
(null? v)))))
(define seq#homogenous-vector?
(let ()
(include "scheme-meta-homogenous-vector--include.scm")
homogenous-vector?))
(define seq? (either iseq?
cj-struct#vector?
seq#homogenous-vector?))
(define (iseq+-of pred)
(lambda (v)
(FV (v)
(and (pair? v)
(pred (car v))))))
(define char-iseq+? (iseq+-of char?))
|
b1a1a178e2acac18dbcb59f2417c7dceb111f60bb8dffa98365772ba4e82a938 | jonase/eastwood | clojure_1_11.clj | (ns testcases.clojure-1-11
"A namespace that exercises all new features in Clojure 1.11.0."
(:require
[clojure.math :as math]
[clojure.test :refer [deftest is]]
[totally-does-not-exist :as-alias does-not-exist]))
;; -serving-people-and-programs
(defn destr [& {:keys [a b] :as opts}]
[a b opts])
(defn uses-destr []
[(destr :a 1)
(destr {:a 1 :b 2})])
(defn uses-non-existing []
::does-not-exist/foo)
(defn uses-misc []
[(abs -1)
(random-uuid)
(update-keys (fn [v]
(+ v v))
{1 2
3 4})
(update-vals (fn [v]
(+ v v))
{1 2
3 4})
(parse-long "1")
(parse-double "1.1")
(parse-uuid "fail")
(parse-uuid "true")
(NaN? :not-a-nan)
(infinite? 42)
(math/random)
(math/round 2.4)])
(deftest test-iteration
;; equivalence to line-seq
(let [readme #(java.nio.file.Files/newBufferedReader (.toPath (java.io.File. "project.clj")))]
(is (= (with-open [^java.io.BufferedReader r (readme)]
(vec (iteration (fn [_] (.readLine r)))))
(with-open [^java.io.BufferedReader r (readme)]
(doall (line-seq r))))))
;; paginated API
(let [items 12 pgsize 5
src (vec (repeatedly items #(java.util.UUID/randomUUID)))
api (fn [tok]
(let [tok (or tok 0)]
(when (< tok items)
{:tok (+ tok pgsize)
:ret (subvec src tok (min (+ tok pgsize) items))})))]
(is (= src
(mapcat identity (iteration api :kf :tok :vf :ret))
(into [] cat (iteration api :kf :tok :vf :ret)))))
(let [src [:a :b :c :d :e]
api (fn [k]
(let [k (or k 0)]
(if (< k (count src))
{:item (nth src k)
:k (inc k)})))]
(is (= [:a :b :c]
(vec (iteration api
:some? (comp #{:a :b :c} :item)
:kf :k
:vf :item))
(vec (iteration api
:kf #(some-> % :k #{0 1 2})
:vf :item))))))
| null | https://raw.githubusercontent.com/jonase/eastwood/a983a04045ac9e0c4540693eade5a2e47193fc3c/cases/testcases/clojure_1_11.clj | clojure | -serving-people-and-programs
equivalence to line-seq
paginated API | (ns testcases.clojure-1-11
"A namespace that exercises all new features in Clojure 1.11.0."
(:require
[clojure.math :as math]
[clojure.test :refer [deftest is]]
[totally-does-not-exist :as-alias does-not-exist]))
(defn destr [& {:keys [a b] :as opts}]
[a b opts])
(defn uses-destr []
[(destr :a 1)
(destr {:a 1 :b 2})])
(defn uses-non-existing []
::does-not-exist/foo)
(defn uses-misc []
[(abs -1)
(random-uuid)
(update-keys (fn [v]
(+ v v))
{1 2
3 4})
(update-vals (fn [v]
(+ v v))
{1 2
3 4})
(parse-long "1")
(parse-double "1.1")
(parse-uuid "fail")
(parse-uuid "true")
(NaN? :not-a-nan)
(infinite? 42)
(math/random)
(math/round 2.4)])
(deftest test-iteration
(let [readme #(java.nio.file.Files/newBufferedReader (.toPath (java.io.File. "project.clj")))]
(is (= (with-open [^java.io.BufferedReader r (readme)]
(vec (iteration (fn [_] (.readLine r)))))
(with-open [^java.io.BufferedReader r (readme)]
(doall (line-seq r))))))
(let [items 12 pgsize 5
src (vec (repeatedly items #(java.util.UUID/randomUUID)))
api (fn [tok]
(let [tok (or tok 0)]
(when (< tok items)
{:tok (+ tok pgsize)
:ret (subvec src tok (min (+ tok pgsize) items))})))]
(is (= src
(mapcat identity (iteration api :kf :tok :vf :ret))
(into [] cat (iteration api :kf :tok :vf :ret)))))
(let [src [:a :b :c :d :e]
api (fn [k]
(let [k (or k 0)]
(if (< k (count src))
{:item (nth src k)
:k (inc k)})))]
(is (= [:a :b :c]
(vec (iteration api
:some? (comp #{:a :b :c} :item)
:kf :k
:vf :item))
(vec (iteration api
:kf #(some-> % :k #{0 1 2})
:vf :item))))))
|
8647882ce6cb4c4f4ae6ea76f4194b0e7d036abbbafc5dbe3c41ae971d841f38 | coord-e/mlml | bundler_dune.ml | let () =
(* TODO: Remove hardcoded path *)
let current_dir = "../../../test/" in
Tester.file (Filename.concat current_dir "./bundler_dune/bin/exec.ml") "18490"
;;
| null | https://raw.githubusercontent.com/coord-e/mlml/ec34b1fe8766901fab6842b790267f32b77a2861/test/bundler_dune.ml | ocaml | TODO: Remove hardcoded path | let () =
let current_dir = "../../../test/" in
Tester.file (Filename.concat current_dir "./bundler_dune/bin/exec.ml") "18490"
;;
|
f146ee4c3b295ff08e7e3cbd687af8c7357ffcd5128fc185cc04a9e8bd7db841 | blitz/stumpwm | kmap.lisp | Copyright ( C ) 2003 - 2008
;;
This file is part of stumpwm .
;;
stumpwm is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 , or ( at your option )
;; any later version.
stumpwm is distributed in the hope that it will be useful ,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
You should have received a copy of the GNU General Public License
;; along with this software; see the file COPYING. If not, write to
the Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
;; Commentary:
;;
This file handles keymaps
;;
;; Code:
(in-package stumpwm)
(export '(*top-map*
define-key
kbd
lookup-command
lookup-key
make-sparse-keymap
undefine-key))
(defvar *top-map* nil
"The top level key map. This is where you'll find the binding for the
@dfn{prefix map}.")
(defstruct key
keysym shift control meta alt hyper super)
(defstruct kmap
bindings)
(defstruct binding
key command)
(defun make-sparse-keymap ()
"Create an empty keymap. If you want to create a new list of bindings
in the key binding tree, this is where you start. To hang frame
related bindings off @kbd{C-t C-f} one might use the following code:
@example
\(defvar *my-frame-bindings*
(let ((m (stumpwm:make-sparse-keymap)))
(stumpwm:define-key m (stumpwm:kbd \"f\") \"curframe\")
(stumpwm:define-key m (stumpwm:kbd \"M-b\") \"move-focus left\")
m ; NOTE: this is important
))
\(stumpwm:define-key stumpwm:*root-map* (stumpwm:kbd \"C-f\") '*my-frame-bindings*)
@end example"
(make-kmap))
(defun lookup-command (keymap command)
"Return a list of keys that are bound to command"
(loop for i in (kmap-bindings keymap)
when (equal command (binding-command i))
collect (binding-key i)))
(defun lookup-key (keymap key &optional accept-default)
(labels ((retcmd (key)
(when key (binding-command key))))
(or (retcmd (find key (kmap-bindings keymap) :key 'binding-key :test 'equalp))
(and accept-default
(retcmd (find t (kmap-bindings keymap) :key 'binding-key))))))
(defun key-mods-p (key)
(or (key-shift key)
(key-control key)
(key-meta key)
(key-alt key)
(key-hyper key)
(key-super key)))
(defun x11-mods (key &optional with-numlock with-capslock)
"Return the modifiers for key in a format that clx understands. if
WITH-NUMLOCK is non-nil then include the numlock modifier. if
WITH-CAPSLOCK is non-nil then include the capslock modifier. Most of
the time these just gets in the way."
(let (mods)
(when (key-shift key) (push :shift mods))
(when (key-control key) (push :control mods))
(when (key-meta key) (setf mods (append (modifiers-meta *modifiers*) mods)))
(when (key-alt key) (setf mods (append (modifiers-alt *modifiers*) mods)))
(when (key-hyper key) (setf mods (append (modifiers-hyper *modifiers*) mods)))
(when (key-super key) (setf mods (append (modifiers-super *modifiers*) mods)))
(when with-numlock (setf mods (append (modifiers-numlock *modifiers*) mods)))
(when with-capslock (push :lock mods))
(apply 'xlib:make-state-mask mods)))
(defun report-kbd-parse-error (c stream)
(format stream "Failed to parse key string: ~s" (slot-value c 'string)))
(define-condition kbd-parse-error (stumpwm-error)
((string :initarg :string))
(:report report-kbd-parse-error)
(:documentation "Raised when a kbd string failed to parse."))
(defun parse-mods (mods end)
"MODS is a sequence of <MOD CHAR> #\- pairs. Return a list suitable
for passing as the last argument to (apply #'make-key ...)"
(unless (evenp end)
(signal 'kbd-parse-error :string mods))
(apply #'nconc (loop for i from 0 below end by 2
if (char/= (char mods (1+ i)) #\-)
do (signal 'kbd-parse)
collect (case (char mods i)
(#\M (list :meta t))
(#\A (list :alt t))
(#\C (list :control t))
(#\H (list :hyper t))
(#\s (list :super t))
(#\S (list :shift t))
(t (signal 'kbd-parse-error :string mods))))))
(defun parse-key (string)
"Parse STRING and return a key structure. Raise an error of type
kbd-parse if the key failed to parse."
(let* ((p (when (> (length string) 2)
(position #\- string :from-end t :end (- (length string) 1))))
(mods (parse-mods string (if p (1+ p) 0)))
(keysym (stumpwm-name->keysym (subseq string (if p (1+ p) 0)))))
(if keysym
(apply 'make-key :keysym keysym mods)
(signal 'kbd-parse-error :string string))))
(defun parse-key-seq (keys)
"KEYS is a key sequence. Parse it and return the list of keys."
(mapcar 'parse-key (split-string keys)))
(defun kbd (keys)
"This compiles a key string into a key structure used by
`define-key', `undefine-key', `set-prefix-key' and
others."
;; XXX: define-key needs to be fixed to handle a list of keys
(first (parse-key-seq keys)))
(defun copy-key-into (from to)
"copy the contents of TO into FROM."
(setf (key-keysym to) (key-keysym from)
(key-shift to) (key-shift from)
(key-control to) (key-control from)
(key-meta to) (key-meta from)
(key-alt to) (key-alt from)
(key-hyper to) (key-hyper from)
(key-super to) (key-super from)))
(defun print-mods (key)
(concatenate 'string
(when (key-control key) "C-")
(when (key-meta key) "M-")
(when (key-alt key) "A-")
(when (key-shift key) "S-")
(when (key-super key) "s-")
(when (key-hyper key) "H-")))
(defun print-key (key)
(format nil "~a~a"
(print-mods key)
(keysym->stumpwm-name (key-keysym key))))
(defun print-key-seq (seq)
(format nil "^5*~{~a~^ ~}^n" (mapcar 'print-key seq)))
(defun define-key (map key command)
"Add a keybinding mapping for the key, @var{key}, to the command,
@var{command}, in the specified keymap. If @var{command} is nil, remove an
exising binding. For example,
@example
\(stumpwm:define-key stumpwm:*root-map* (stumpwm:kbd \"C-z\") \"echo Zzzzz...\")
@end example
Now when you type C-t C-z, you'll see the text ``Zzzzz...'' pop up."
(declare (type kmap map) (type (or key (eql t)) key))
(let ((binding (find key (kmap-bindings map) :key 'binding-key :test 'equalp)))
(if command
(setf (kmap-bindings map)
(append (if binding
(delete binding (kmap-bindings map))
(kmap-bindings map))
(list (make-binding :key key :command command))))
(setf (kmap-bindings map) (delete binding (kmap-bindings map))))
;; We need to tell the X server when changing the top-map bindings.
(when (eq map *top-map*)
(sync-keys))))
;; Not really needed. Keep it for backward compatibility.
(defun undefine-key (map key)
"Clear the key binding in the specified keybinding."
(define-key map key nil))
(defun lookup-key-sequence (kmap key-seq)
"Return the command bound to the key sequenc, KEY-SEQ, in keymap KMAP."
(when (kmap-symbol-p kmap)
(setf kmap (symbol-value kmap)))
(check-type kmap kmap)
(let* ((key (car key-seq))
(cmd (lookup-key kmap key)))
(cond ((null (cdr key-seq))
cmd)
(cmd
(if (kmap-or-kmap-symbol-p cmd)
(lookup-key-sequence cmd (cdr key-seq))
cmd))
(t nil))))
(defun kmap-symbol-p (x)
(and (symbolp x)
(boundp x)
(kmap-p (symbol-value x))))
(defun kmap-or-kmap-symbol-p (x)
(or (kmap-p x)
(kmap-symbol-p x)))
(defun dereference-kmaps (kmaps)
(mapcar (lambda (m)
(if (kmap-symbol-p m)
(symbol-value m)
m))
kmaps))
(defun search-kmap (command keymap &key (test 'equal))
"Search the keymap for the specified binding. Return the key
sequences that run binding."
(labels ((search-it (cmd kmap key-seq)
(when (kmap-symbol-p kmap)
(setf kmap (symbol-value kmap)))
(check-type kmap kmap)
(loop for i in (kmap-bindings kmap)
if (funcall test (binding-command i) cmd)
collect (cons (binding-key i) key-seq)
else if (kmap-or-kmap-symbol-p (binding-command i))
append (search-it cmd (binding-command i) (cons (binding-key i) key-seq)))))
(mapcar 'reverse (search-it command keymap nil))))
;;; The Top Map
(defvar *top-map-list* nil)
(defun push-top-map (new-top)
(push *top-map* *top-map-list*)
(setf *top-map* new-top)
(sync-keys))
(defun pop-top-map ()
(when *top-map-list*
(setf *top-map* (pop *top-map-list*))
(sync-keys)
t))
| null | https://raw.githubusercontent.com/blitz/stumpwm/439180985920a628b18d4426f1a29b1c36576531/kmap.lisp | lisp |
you can redistribute it and/or modify
either version 2 , or ( at your option )
any later version.
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this software; see the file COPYING. If not, write to
Commentary:
Code:
NOTE: this is important
XXX: define-key needs to be fixed to handle a list of keys
We need to tell the X server when changing the top-map bindings.
Not really needed. Keep it for backward compatibility.
The Top Map | Copyright ( C ) 2003 - 2008
This file is part of stumpwm .
it under the terms of the GNU General Public License as published by
stumpwm is distributed in the hope that it will be useful ,
You should have received a copy of the GNU General Public License
the Free Software Foundation , Inc. , 59 Temple Place , Suite 330 ,
Boston , MA 02111 - 1307 USA
This file handles keymaps
(in-package stumpwm)
(export '(*top-map*
define-key
kbd
lookup-command
lookup-key
make-sparse-keymap
undefine-key))
(defvar *top-map* nil
"The top level key map. This is where you'll find the binding for the
@dfn{prefix map}.")
(defstruct key
keysym shift control meta alt hyper super)
(defstruct kmap
bindings)
(defstruct binding
key command)
(defun make-sparse-keymap ()
"Create an empty keymap. If you want to create a new list of bindings
in the key binding tree, this is where you start. To hang frame
related bindings off @kbd{C-t C-f} one might use the following code:
@example
\(defvar *my-frame-bindings*
(let ((m (stumpwm:make-sparse-keymap)))
(stumpwm:define-key m (stumpwm:kbd \"f\") \"curframe\")
(stumpwm:define-key m (stumpwm:kbd \"M-b\") \"move-focus left\")
))
\(stumpwm:define-key stumpwm:*root-map* (stumpwm:kbd \"C-f\") '*my-frame-bindings*)
@end example"
(make-kmap))
(defun lookup-command (keymap command)
"Return a list of keys that are bound to command"
(loop for i in (kmap-bindings keymap)
when (equal command (binding-command i))
collect (binding-key i)))
(defun lookup-key (keymap key &optional accept-default)
(labels ((retcmd (key)
(when key (binding-command key))))
(or (retcmd (find key (kmap-bindings keymap) :key 'binding-key :test 'equalp))
(and accept-default
(retcmd (find t (kmap-bindings keymap) :key 'binding-key))))))
(defun key-mods-p (key)
(or (key-shift key)
(key-control key)
(key-meta key)
(key-alt key)
(key-hyper key)
(key-super key)))
(defun x11-mods (key &optional with-numlock with-capslock)
"Return the modifiers for key in a format that clx understands. if
WITH-NUMLOCK is non-nil then include the numlock modifier. if
WITH-CAPSLOCK is non-nil then include the capslock modifier. Most of
the time these just gets in the way."
(let (mods)
(when (key-shift key) (push :shift mods))
(when (key-control key) (push :control mods))
(when (key-meta key) (setf mods (append (modifiers-meta *modifiers*) mods)))
(when (key-alt key) (setf mods (append (modifiers-alt *modifiers*) mods)))
(when (key-hyper key) (setf mods (append (modifiers-hyper *modifiers*) mods)))
(when (key-super key) (setf mods (append (modifiers-super *modifiers*) mods)))
(when with-numlock (setf mods (append (modifiers-numlock *modifiers*) mods)))
(when with-capslock (push :lock mods))
(apply 'xlib:make-state-mask mods)))
(defun report-kbd-parse-error (c stream)
(format stream "Failed to parse key string: ~s" (slot-value c 'string)))
(define-condition kbd-parse-error (stumpwm-error)
((string :initarg :string))
(:report report-kbd-parse-error)
(:documentation "Raised when a kbd string failed to parse."))
(defun parse-mods (mods end)
"MODS is a sequence of <MOD CHAR> #\- pairs. Return a list suitable
for passing as the last argument to (apply #'make-key ...)"
(unless (evenp end)
(signal 'kbd-parse-error :string mods))
(apply #'nconc (loop for i from 0 below end by 2
if (char/= (char mods (1+ i)) #\-)
do (signal 'kbd-parse)
collect (case (char mods i)
(#\M (list :meta t))
(#\A (list :alt t))
(#\C (list :control t))
(#\H (list :hyper t))
(#\s (list :super t))
(#\S (list :shift t))
(t (signal 'kbd-parse-error :string mods))))))
(defun parse-key (string)
"Parse STRING and return a key structure. Raise an error of type
kbd-parse if the key failed to parse."
(let* ((p (when (> (length string) 2)
(position #\- string :from-end t :end (- (length string) 1))))
(mods (parse-mods string (if p (1+ p) 0)))
(keysym (stumpwm-name->keysym (subseq string (if p (1+ p) 0)))))
(if keysym
(apply 'make-key :keysym keysym mods)
(signal 'kbd-parse-error :string string))))
(defun parse-key-seq (keys)
"KEYS is a key sequence. Parse it and return the list of keys."
(mapcar 'parse-key (split-string keys)))
(defun kbd (keys)
"This compiles a key string into a key structure used by
`define-key', `undefine-key', `set-prefix-key' and
others."
(first (parse-key-seq keys)))
(defun copy-key-into (from to)
"copy the contents of TO into FROM."
(setf (key-keysym to) (key-keysym from)
(key-shift to) (key-shift from)
(key-control to) (key-control from)
(key-meta to) (key-meta from)
(key-alt to) (key-alt from)
(key-hyper to) (key-hyper from)
(key-super to) (key-super from)))
(defun print-mods (key)
(concatenate 'string
(when (key-control key) "C-")
(when (key-meta key) "M-")
(when (key-alt key) "A-")
(when (key-shift key) "S-")
(when (key-super key) "s-")
(when (key-hyper key) "H-")))
(defun print-key (key)
(format nil "~a~a"
(print-mods key)
(keysym->stumpwm-name (key-keysym key))))
(defun print-key-seq (seq)
(format nil "^5*~{~a~^ ~}^n" (mapcar 'print-key seq)))
(defun define-key (map key command)
"Add a keybinding mapping for the key, @var{key}, to the command,
@var{command}, in the specified keymap. If @var{command} is nil, remove an
exising binding. For example,
@example
\(stumpwm:define-key stumpwm:*root-map* (stumpwm:kbd \"C-z\") \"echo Zzzzz...\")
@end example
Now when you type C-t C-z, you'll see the text ``Zzzzz...'' pop up."
(declare (type kmap map) (type (or key (eql t)) key))
(let ((binding (find key (kmap-bindings map) :key 'binding-key :test 'equalp)))
(if command
(setf (kmap-bindings map)
(append (if binding
(delete binding (kmap-bindings map))
(kmap-bindings map))
(list (make-binding :key key :command command))))
(setf (kmap-bindings map) (delete binding (kmap-bindings map))))
(when (eq map *top-map*)
(sync-keys))))
(defun undefine-key (map key)
"Clear the key binding in the specified keybinding."
(define-key map key nil))
(defun lookup-key-sequence (kmap key-seq)
"Return the command bound to the key sequenc, KEY-SEQ, in keymap KMAP."
(when (kmap-symbol-p kmap)
(setf kmap (symbol-value kmap)))
(check-type kmap kmap)
(let* ((key (car key-seq))
(cmd (lookup-key kmap key)))
(cond ((null (cdr key-seq))
cmd)
(cmd
(if (kmap-or-kmap-symbol-p cmd)
(lookup-key-sequence cmd (cdr key-seq))
cmd))
(t nil))))
(defun kmap-symbol-p (x)
(and (symbolp x)
(boundp x)
(kmap-p (symbol-value x))))
(defun kmap-or-kmap-symbol-p (x)
(or (kmap-p x)
(kmap-symbol-p x)))
(defun dereference-kmaps (kmaps)
(mapcar (lambda (m)
(if (kmap-symbol-p m)
(symbol-value m)
m))
kmaps))
(defun search-kmap (command keymap &key (test 'equal))
"Search the keymap for the specified binding. Return the key
sequences that run binding."
(labels ((search-it (cmd kmap key-seq)
(when (kmap-symbol-p kmap)
(setf kmap (symbol-value kmap)))
(check-type kmap kmap)
(loop for i in (kmap-bindings kmap)
if (funcall test (binding-command i) cmd)
collect (cons (binding-key i) key-seq)
else if (kmap-or-kmap-symbol-p (binding-command i))
append (search-it cmd (binding-command i) (cons (binding-key i) key-seq)))))
(mapcar 'reverse (search-it command keymap nil))))
(defvar *top-map-list* nil)
(defun push-top-map (new-top)
(push *top-map* *top-map-list*)
(setf *top-map* new-top)
(sync-keys))
(defun pop-top-map ()
(when *top-map-list*
(setf *top-map* (pop *top-map-list*))
(sync-keys)
t))
|
2a138538f3694f7a0143a05ad6d231ef3f08ce73cc9e4a29ca6b1b11a50c27cd | haskell-tools/haskell-tools | Renamed_res.hs | module Refactor.OrganizeImports.MakeExplicit.Renamed where
import Refactor.OrganizeImports.MakeExplicit.Source as Src (A(..))
x = B
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/builtin-refactorings/examples/Refactor/OrganizeImports/MakeExplicit/Renamed_res.hs | haskell | module Refactor.OrganizeImports.MakeExplicit.Renamed where
import Refactor.OrganizeImports.MakeExplicit.Source as Src (A(..))
x = B
| |
f1b6641c6d4867081aa3cfc1c501024b0c305b8fe6177a331e034128d2d437b0 | chetmurthy/poly-protobuf | test24_ml.ml | module T = Test24_types
module Pb = Test24_pb
module Pp = Test24_pp
let decode_ref_data () = (T.Value "value" : T.a)
let () =
let mode = Test_util.parse_args () in
match mode with
| Test_util.Decode ->
Test_util.decode "test24.c2ml.data" Pb.decode_a Pp.pp_a (decode_ref_data ())
| Test_util.Encode ->
Test_util.encode "test24.ml2c.data" Pb.encode_a (decode_ref_data ())
| null | https://raw.githubusercontent.com/chetmurthy/poly-protobuf/1f80774af6472fa30ee2fb10d0ef91905a13a144/tests/testdata/integration-tests/test24_ml.ml | ocaml | module T = Test24_types
module Pb = Test24_pb
module Pp = Test24_pp
let decode_ref_data () = (T.Value "value" : T.a)
let () =
let mode = Test_util.parse_args () in
match mode with
| Test_util.Decode ->
Test_util.decode "test24.c2ml.data" Pb.decode_a Pp.pp_a (decode_ref_data ())
| Test_util.Encode ->
Test_util.encode "test24.ml2c.data" Pb.encode_a (decode_ref_data ())
| |
e60b43c8d50e74f3a13dd2f8f00e07acf77bf74824a09d43f1d367d2ac3dfcf4 | alura-cursos/datomic-introducao | db.clj | (ns ecommerce.db
(:use clojure.pprint)
(:require [datomic.api :as d]))
(def db-uri "datomic:dev:4334/ecommerce")
(defn abre-conexao []
(d/create-database db-uri)
(d/connect db-uri))
(defn apaga-banco []
(d/delete-database db-uri))
; Produtos
; id?
nome String 1 = = > Computador Novo
; slug String 1 ==> /computador_novo
preco ponto flutuante 1 = = > 3500.10
; id_entidade atributo valor
15 : produto / nome Computador Novo ID_TX operacao
15 : produto / slug /computador_novo ID_TX operacao
15 : produto / preco 3500.10 ID_TX operacao
17 : produto / nome Telefone Caro ID_TX operacao
17 : produto / slug /telefone ID_TX operacao
17 : produto / preco 8888.88 ID_TX operacao
(def schema [{:db/ident :produto/nome
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one
:db/doc "O nome de um produto"}
{:db/ident :produto/slug
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one
:db/doc "O caminho para acessar esse produto via http"}
{:db/ident :produto/preco
:db/valueType :db.type/bigdec
:db/cardinality :db.cardinality/one
:db/doc "O preço de um produto com precisão monetária"}])
(defn cria-schema [conn]
(d/transact conn schema))
| null | https://raw.githubusercontent.com/alura-cursos/datomic-introducao/cfb214135fed0670ee90090b63a38d8287673ac6/aula2.1/ecommerce/src/ecommerce/db.clj | clojure | Produtos
id?
slug String 1 ==> /computador_novo
id_entidade atributo valor | (ns ecommerce.db
(:use clojure.pprint)
(:require [datomic.api :as d]))
(def db-uri "datomic:dev:4334/ecommerce")
(defn abre-conexao []
(d/create-database db-uri)
(d/connect db-uri))
(defn apaga-banco []
(d/delete-database db-uri))
nome String 1 = = > Computador Novo
preco ponto flutuante 1 = = > 3500.10
15 : produto / nome Computador Novo ID_TX operacao
15 : produto / slug /computador_novo ID_TX operacao
15 : produto / preco 3500.10 ID_TX operacao
17 : produto / nome Telefone Caro ID_TX operacao
17 : produto / slug /telefone ID_TX operacao
17 : produto / preco 8888.88 ID_TX operacao
(def schema [{:db/ident :produto/nome
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one
:db/doc "O nome de um produto"}
{:db/ident :produto/slug
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one
:db/doc "O caminho para acessar esse produto via http"}
{:db/ident :produto/preco
:db/valueType :db.type/bigdec
:db/cardinality :db.cardinality/one
:db/doc "O preço de um produto com precisão monetária"}])
(defn cria-schema [conn]
(d/transact conn schema))
|
3e55bd75cef2542a9c81269d6d469e0c18d5d5fcbd530fbc1869b6c84e0aa514 | karamellpelle/grid | FriendsObject.hs | grid is a game written in Haskell
Copyright ( C ) 2018
--
-- This file is part of grid.
--
-- grid is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
-- (at your option) any later version.
--
-- grid is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
You should have received a copy of the GNU General Public License
-- along with grid. If not, see </>.
--
module MEnv.Env.FriendsObject
(
-- tmp
module MEnv.Env.FriendsObject.GLFW,
--
) where
-- tmp
import MEnv.Env.FriendsObject.GLFW
--
| null | https://raw.githubusercontent.com/karamellpelle/grid/56729e63ed6404fd6cfd6d11e73fa358f03c386f/designer/source/MEnv/Env/FriendsObject.hs | haskell |
This file is part of grid.
grid is free software: you can redistribute it and/or modify
(at your option) any later version.
grid is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with grid. If not, see </>.
tmp
tmp
| grid is a game written in Haskell
Copyright ( C ) 2018
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
module MEnv.Env.FriendsObject
(
module MEnv.Env.FriendsObject.GLFW,
) where
import MEnv.Env.FriendsObject.GLFW
|
2322631cc4c34179d455fc9668dc3dae548d7e76bdbef456ff954b9237c854a1 | yuriy-chumak/ol | srfi-87.scm | (define-library (scheme srfi-87)
(import (scheme core) (srfi 87))
(export
(exports (srfi 87))) )
| null | https://raw.githubusercontent.com/yuriy-chumak/ol/0a38b3f9dbb720aa3fbc3e8219429ebe240c86bf/libraries/scheme/srfi-87.scm | scheme | (define-library (scheme srfi-87)
(import (scheme core) (srfi 87))
(export
(exports (srfi 87))) )
| |
15adf356ada2541eba555eb4a8e5d89ecebf3914046a229c85de2391771dc8e8 | herd/herdtools7 | LISALang.ml | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2017 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
module Make(V:Constant.S) = struct
open Printf
module A = LISAArch_litmus.Make(V)
type arch_reg = A.reg
type t = A.Out.t
module Tmpl = A.Out
let checkVal f v = f v
module RegSet = A.Out.RegSet
module RegMap = A.Out.RegMap
let debug = false
let do_dump compile_val _compile_addr compile_out_reg
chan indent env proc t =
let rec dump_ins k ts = match ts with
| [] -> ()
| t::ts ->
fprintf chan "%s%s\n" indent (Tmpl.to_string t) ;
(*
fprintf chan "\"%-20s%c_litmus_P%i_%i\\n\\t\"\n"
(to_string t) A.comment proc k ;
*)
dump_ins (k+1) ts in
(* Prefix *)
let reg_env = Tmpl.get_reg_env A.I.error A.I.warn t in
let all_regs = Tmpl.all_regs_in_tmpl t in
let init =
List.fold_left (fun m (r,v) -> RegMap.add r v m)
RegMap.empty
t.Tmpl.init in
RegSet.iter
(fun r ->
let ty =
try RegMap.find r env
with Not_found ->
try RegMap.find r reg_env with
| Not_found -> Compile.base in
if debug then
eprintf "%i:%s -> %s\n" proc (Tmpl.tag_reg r) (CType.dump ty) ;
fprintf chan "%s%s %s%s;\n" indent
(CType.dump ty)
(Tmpl.tag_reg r)
(try
let v = RegMap.find r init in
sprintf " = %s" (compile_val v)
with Not_found -> ""))
all_regs ;
(* Code *)
begin match t.Tmpl.code with
| [] -> ()
| code -> dump_ins 0 code
end ;
(* Postfix *)
fprintf chan "%sbarrier();\n" indent ;
List.iter
(fun reg ->
fprintf chan "%s%s = %s;\n" indent
(compile_out_reg proc reg) (Tmpl.tag_reg reg))
t.Tmpl.final ;
()
(*****************)
(* As a function *)
(*****************)
let compile_val_fun v =
let open Constant in
match v with
| Symbolic sym -> sprintf "%s" (Constant.as_address sym)
| Concrete _|ConcreteVector _ -> Tmpl.dump_v v
| Label _ -> Warn.user_error "No label value in LISA"
| Tag _ -> Warn.user_error "No tag in LISA"
| PteVal _ -> Warn.user_error "No pteval in LISA"
| Instruction _ -> Warn.user_error "No instruction value in LISA"
and compile_addr_fun x = sprintf "*%s" x
and compile_out_reg_fun p r = sprintf "*%s" (Tmpl.dump_out_reg p r)
let dump_fun ?user chan _args0 env globEnv _volatileEnv proc t =
assert (Misc.is_none user) ;
let addrs_proc = A.Out.get_addrs_only t in
let addrs =
List.map
(fun x ->
let ty =
try List.assoc x globEnv
with Not_found -> Compile.base in
let ty = SkelUtil.dump_global_type x ty in
sprintf "%s *%s" ty x)
addrs_proc in
let outs =
List.map
(fun x ->
let ty =
try RegMap.find x env
with Not_found -> assert false in
let x = Tmpl.dump_out_reg proc x in
sprintf "%s *%s" (CType.dump ty) x) t.Tmpl.final in
let params =
let p = addrs@outs in
match p with
| [] -> "void"
| _::_ -> String.concat "," p in
LangUtils.dump_code_def chan false Mode.Std proc params ;
do_dump
(checkVal compile_val_fun)
compile_addr_fun
(fun p r -> sprintf "*%s" (Tmpl.dump_out_reg p r))
chan " " env proc t ;
fprintf chan "}\n\n" ;
()
let compile_addr_call x = sprintf "&_a->%s[_i]" x
let compile_out_reg_call proc reg =
sprintf "&_a->%s" (Tmpl.compile_out_reg proc reg)
let dump_call f_id args0
_tr_idx chan indent _env _globEnv _volatileEnv proc t =
let addrs_proc = Tmpl.get_addrs_only t in
let addrs = List.map compile_addr_call addrs_proc
and outs = List.map (compile_out_reg_call proc) t.Tmpl.final in
let args = String.concat "," (args0@addrs@outs) in
LangUtils.dump_code_call chan indent f_id args
let dump _chan _indent _env _globEnv _volatileEnv _proc _t = ()
end
| null | https://raw.githubusercontent.com/herd/herdtools7/aef181181408f40dbdb4ed35c5338335a83d983f/litmus/LISALang.ml | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
fprintf chan "\"%-20s%c_litmus_P%i_%i\\n\\t\"\n"
(to_string t) A.comment proc k ;
Prefix
Code
Postfix
***************
As a function
*************** | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2017 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
module Make(V:Constant.S) = struct
open Printf
module A = LISAArch_litmus.Make(V)
type arch_reg = A.reg
type t = A.Out.t
module Tmpl = A.Out
let checkVal f v = f v
module RegSet = A.Out.RegSet
module RegMap = A.Out.RegMap
let debug = false
let do_dump compile_val _compile_addr compile_out_reg
chan indent env proc t =
let rec dump_ins k ts = match ts with
| [] -> ()
| t::ts ->
fprintf chan "%s%s\n" indent (Tmpl.to_string t) ;
dump_ins (k+1) ts in
let reg_env = Tmpl.get_reg_env A.I.error A.I.warn t in
let all_regs = Tmpl.all_regs_in_tmpl t in
let init =
List.fold_left (fun m (r,v) -> RegMap.add r v m)
RegMap.empty
t.Tmpl.init in
RegSet.iter
(fun r ->
let ty =
try RegMap.find r env
with Not_found ->
try RegMap.find r reg_env with
| Not_found -> Compile.base in
if debug then
eprintf "%i:%s -> %s\n" proc (Tmpl.tag_reg r) (CType.dump ty) ;
fprintf chan "%s%s %s%s;\n" indent
(CType.dump ty)
(Tmpl.tag_reg r)
(try
let v = RegMap.find r init in
sprintf " = %s" (compile_val v)
with Not_found -> ""))
all_regs ;
begin match t.Tmpl.code with
| [] -> ()
| code -> dump_ins 0 code
end ;
fprintf chan "%sbarrier();\n" indent ;
List.iter
(fun reg ->
fprintf chan "%s%s = %s;\n" indent
(compile_out_reg proc reg) (Tmpl.tag_reg reg))
t.Tmpl.final ;
()
let compile_val_fun v =
let open Constant in
match v with
| Symbolic sym -> sprintf "%s" (Constant.as_address sym)
| Concrete _|ConcreteVector _ -> Tmpl.dump_v v
| Label _ -> Warn.user_error "No label value in LISA"
| Tag _ -> Warn.user_error "No tag in LISA"
| PteVal _ -> Warn.user_error "No pteval in LISA"
| Instruction _ -> Warn.user_error "No instruction value in LISA"
and compile_addr_fun x = sprintf "*%s" x
and compile_out_reg_fun p r = sprintf "*%s" (Tmpl.dump_out_reg p r)
let dump_fun ?user chan _args0 env globEnv _volatileEnv proc t =
assert (Misc.is_none user) ;
let addrs_proc = A.Out.get_addrs_only t in
let addrs =
List.map
(fun x ->
let ty =
try List.assoc x globEnv
with Not_found -> Compile.base in
let ty = SkelUtil.dump_global_type x ty in
sprintf "%s *%s" ty x)
addrs_proc in
let outs =
List.map
(fun x ->
let ty =
try RegMap.find x env
with Not_found -> assert false in
let x = Tmpl.dump_out_reg proc x in
sprintf "%s *%s" (CType.dump ty) x) t.Tmpl.final in
let params =
let p = addrs@outs in
match p with
| [] -> "void"
| _::_ -> String.concat "," p in
LangUtils.dump_code_def chan false Mode.Std proc params ;
do_dump
(checkVal compile_val_fun)
compile_addr_fun
(fun p r -> sprintf "*%s" (Tmpl.dump_out_reg p r))
chan " " env proc t ;
fprintf chan "}\n\n" ;
()
let compile_addr_call x = sprintf "&_a->%s[_i]" x
let compile_out_reg_call proc reg =
sprintf "&_a->%s" (Tmpl.compile_out_reg proc reg)
let dump_call f_id args0
_tr_idx chan indent _env _globEnv _volatileEnv proc t =
let addrs_proc = Tmpl.get_addrs_only t in
let addrs = List.map compile_addr_call addrs_proc
and outs = List.map (compile_out_reg_call proc) t.Tmpl.final in
let args = String.concat "," (args0@addrs@outs) in
LangUtils.dump_code_call chan indent f_id args
let dump _chan _indent _env _globEnv _volatileEnv _proc _t = ()
end
|
7c1993094a0671f3a7e7dfa18326c1bff6d494ece4a1e2abea2e115ec18dadf1 | avelino/awesome-racket | main.rkt | #lang racket
(require markdown
racket/file
2htdp/batch-io)
(define readmemd (parse-markdown (file->string "./README.md")))
(define markdown2html (xexpr->string `
(html ()
(head () (title "A curated list of awesome Racket frameworks, libraries and software, maintained by Community - Awesome Racket"))
(body () ,@readmemd))))
(write-file "tmp/index.html" markdown2html)
| null | https://raw.githubusercontent.com/avelino/awesome-racket/a9a58baf31484d555d986301d725c6a0eef70d44/main.rkt | racket | #lang racket
(require markdown
racket/file
2htdp/batch-io)
(define readmemd (parse-markdown (file->string "./README.md")))
(define markdown2html (xexpr->string `
(html ()
(head () (title "A curated list of awesome Racket frameworks, libraries and software, maintained by Community - Awesome Racket"))
(body () ,@readmemd))))
(write-file "tmp/index.html" markdown2html)
| |
6cfde331926640a41ffceb81f226a83bddddc47e9bf3442dd88b971821972b3d | static-analysis-engineering/codehawk | bCHBCFunDeclarations.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer C Parser using CIL
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2021 - 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer C Parser using CIL
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2021-2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
bchcil
open BCHCilTypes
val mk_bcfundeclarations: unit -> bcfundeclarations_int
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/418c95f77e6ece464df2ade519f27d95aa0119d6/CodeHawk/CHB/bchcil/bCHBCFunDeclarations.mli | ocaml | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer C Parser using CIL
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2021 - 2022 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer C Parser using CIL
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2021-2022 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
bchcil
open BCHCilTypes
val mk_bcfundeclarations: unit -> bcfundeclarations_int
| |
1c1deb1d713fb0d2121c77373d128b527afd99e1317730a9bd57eff2cc8569f1 | manu291/dypgen | parse_tree.ml | (* parse_tree.ml *)
type expr =
| Lident of string
| Int of int
| Pair of (expr * expr)
| Cons of string * (int * (expr list))
type rhs = Token of string | Nt of (string * string)
let rec str_expr exp = match exp with
| Int i -> string_of_int i
| Pair (a,b) -> "("^(str_expr a)^","^(str_expr b)^")"
| Cons (cons,(0,_)) -> cons
| Cons (cons,(1,[o])) ->
cons^"("^(str_expr o)^")"
| Cons (cons,(2,[o1;o2])) ->
cons^"("^(str_expr o1)^","^(str_expr o2)^")"
| Lident x -> x
| _ -> failwith "str_expr"
module Ordered_string =
struct
type t = string
let compare = Pervasives.compare
end
module String_map = Map.Make(Ordered_string)
let rec substitute env expr = match expr with
| Int i -> Int i
| Lident s ->
begin try String_map.find s env
with Not_found -> Lident s end
| Pair (a,b) -> Pair (substitute env a,substitute env b)
| Cons (c,(n,l)) ->
Cons (c,(n,(List.map (substitute env) l))) | null | https://raw.githubusercontent.com/manu291/dypgen/59d11b4c70b8d0971348ca6913839ff1fb4f9b5f/demos/demo_ocamllex/parse_tree.ml | ocaml | parse_tree.ml |
type expr =
| Lident of string
| Int of int
| Pair of (expr * expr)
| Cons of string * (int * (expr list))
type rhs = Token of string | Nt of (string * string)
let rec str_expr exp = match exp with
| Int i -> string_of_int i
| Pair (a,b) -> "("^(str_expr a)^","^(str_expr b)^")"
| Cons (cons,(0,_)) -> cons
| Cons (cons,(1,[o])) ->
cons^"("^(str_expr o)^")"
| Cons (cons,(2,[o1;o2])) ->
cons^"("^(str_expr o1)^","^(str_expr o2)^")"
| Lident x -> x
| _ -> failwith "str_expr"
module Ordered_string =
struct
type t = string
let compare = Pervasives.compare
end
module String_map = Map.Make(Ordered_string)
let rec substitute env expr = match expr with
| Int i -> Int i
| Lident s ->
begin try String_map.find s env
with Not_found -> Lident s end
| Pair (a,b) -> Pair (substitute env a,substitute env b)
| Cons (c,(n,l)) ->
Cons (c,(n,(List.map (substitute env) l))) |
a90001da6be96fb23272750441420f7efa12639fff230834d4908233b339d530 | haskell/text | Lazy.hs | # LANGUAGE BangPatterns , DeriveDataTypeable #
# OPTIONS_HADDOCK not - home #
-- |
-- Module : Data.Text.Internal.Lazy
Copyright : ( c ) 2009 , 2010
--
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
Portability : GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- A module containing private 'Text' internals. This exposes the
-- 'Text' representation and low level construction functions.
-- Modules which extend the 'Text' system may need to use this module.
module Data.Text.Internal.Lazy
(
Text(..)
, chunk
, empty
, foldrChunks
, foldlChunks
-- * Data type invariant and abstraction functions
-- $invariant
, strictInvariant
, lazyInvariant
, showStructure
-- * Chunk allocation sizes
, defaultChunkSize
, smallChunkSize
, chunkOverhead
, equal
) where
import Data.Bits (shiftL)
import Data.Text ()
import Data.Typeable (Typeable)
import Foreign.Storable (sizeOf)
import qualified Data.Text.Array as A
import qualified Data.Text.Internal as T
data Text = Empty
| Chunk {-# UNPACK #-} !T.Text Text
deriving (Typeable)
-- $invariant
--
-- The data type invariant for lazy 'Text': Every 'Text' is either 'Empty' or
-- consists of non-null 'T.Text's. All functions must preserve this,
-- and the QC properties must check this.
-- | Check the invariant strictly.
strictInvariant :: Text -> Bool
strictInvariant Empty = True
strictInvariant x@(Chunk (T.Text _ _ len) cs)
| len > 0 = strictInvariant cs
| otherwise = error $ "Data.Text.Lazy: invariant violation: "
++ showStructure x
-- | Check the invariant lazily.
lazyInvariant :: Text -> Text
lazyInvariant Empty = Empty
lazyInvariant x@(Chunk c@(T.Text _ _ len) cs)
| len > 0 = Chunk c (lazyInvariant cs)
| otherwise = error $ "Data.Text.Lazy: invariant violation: "
++ showStructure x
-- | Display the internal structure of a lazy 'Text'.
showStructure :: Text -> String
showStructure Empty = "Empty"
showStructure (Chunk t Empty) = "Chunk " ++ show t ++ " Empty"
showStructure (Chunk t ts) =
"Chunk " ++ show t ++ " (" ++ showStructure ts ++ ")"
-- | Smart constructor for 'Chunk'. Guarantees the data type invariant.
chunk :: T.Text -> Text -> Text
# INLINE chunk #
chunk t@(T.Text _ _ len) ts | len == 0 = ts
| otherwise = Chunk t ts
-- | Smart constructor for 'Empty'.
empty :: Text
{-# INLINE [0] empty #-}
empty = Empty
-- | Consume the chunks of a lazy 'Text' with a natural right fold.
foldrChunks :: (T.Text -> a -> a) -> a -> Text -> a
foldrChunks f z = go
where go Empty = z
go (Chunk c cs) = f c (go cs)
# INLINE foldrChunks #
-- | Consume the chunks of a lazy 'Text' with a strict, tail-recursive,
-- accumulating left fold.
foldlChunks :: (a -> T.Text -> a) -> a -> Text -> a
foldlChunks f z = go z
where go !a Empty = a
go !a (Chunk c cs) = go (f a c) cs
# INLINE foldlChunks #
| Currently set to 16 KiB , less the memory management overhead .
defaultChunkSize :: Int
defaultChunkSize = 16384 - chunkOverhead
# INLINE defaultChunkSize #
| Currently set to 128 bytes , less the memory management overhead .
smallChunkSize :: Int
smallChunkSize = 128 - chunkOverhead
# INLINE smallChunkSize #
| The memory management overhead . Currently this is tuned for GHC only .
chunkOverhead :: Int
chunkOverhead = sizeOf (undefined :: Int) `shiftL` 1
# INLINE chunkOverhead #
equal :: Text -> Text -> Bool
equal Empty Empty = True
equal Empty _ = False
equal _ Empty = False
equal (Chunk (T.Text arrA offA lenA) as) (Chunk (T.Text arrB offB lenB) bs) =
case compare lenA lenB of
LT -> A.equal arrA offA arrB offB lenA &&
as `equal` Chunk (T.Text arrB (offB + lenA) (lenB - lenA)) bs
EQ -> A.equal arrA offA arrB offB lenA &&
as `equal` bs
GT -> A.equal arrA offA arrB offB lenB &&
Chunk (T.Text arrA (offA + lenB) (lenA - lenB)) as `equal` bs
| null | https://raw.githubusercontent.com/haskell/text/3488190f25e1ee5a5dea6b1a593d8a5819e76a1e/src/Data/Text/Internal/Lazy.hs | haskell | |
Module : Data.Text.Internal.Lazy
License : BSD-style
Maintainer :
Stability : experimental
/Warning/: this is an internal module, and does not have a stable
API or name. Functions in this module may not check or enforce
preconditions expected by public modules. Use at your own risk!
A module containing private 'Text' internals. This exposes the
'Text' representation and low level construction functions.
Modules which extend the 'Text' system may need to use this module.
* Data type invariant and abstraction functions
$invariant
* Chunk allocation sizes
# UNPACK #
$invariant
The data type invariant for lazy 'Text': Every 'Text' is either 'Empty' or
consists of non-null 'T.Text's. All functions must preserve this,
and the QC properties must check this.
| Check the invariant strictly.
| Check the invariant lazily.
| Display the internal structure of a lazy 'Text'.
| Smart constructor for 'Chunk'. Guarantees the data type invariant.
| Smart constructor for 'Empty'.
# INLINE [0] empty #
| Consume the chunks of a lazy 'Text' with a natural right fold.
| Consume the chunks of a lazy 'Text' with a strict, tail-recursive,
accumulating left fold. | # LANGUAGE BangPatterns , DeriveDataTypeable #
# OPTIONS_HADDOCK not - home #
Copyright : ( c ) 2009 , 2010
Portability : GHC
module Data.Text.Internal.Lazy
(
Text(..)
, chunk
, empty
, foldrChunks
, foldlChunks
, strictInvariant
, lazyInvariant
, showStructure
, defaultChunkSize
, smallChunkSize
, chunkOverhead
, equal
) where
import Data.Bits (shiftL)
import Data.Text ()
import Data.Typeable (Typeable)
import Foreign.Storable (sizeOf)
import qualified Data.Text.Array as A
import qualified Data.Text.Internal as T
data Text = Empty
deriving (Typeable)
strictInvariant :: Text -> Bool
strictInvariant Empty = True
strictInvariant x@(Chunk (T.Text _ _ len) cs)
| len > 0 = strictInvariant cs
| otherwise = error $ "Data.Text.Lazy: invariant violation: "
++ showStructure x
lazyInvariant :: Text -> Text
lazyInvariant Empty = Empty
lazyInvariant x@(Chunk c@(T.Text _ _ len) cs)
| len > 0 = Chunk c (lazyInvariant cs)
| otherwise = error $ "Data.Text.Lazy: invariant violation: "
++ showStructure x
showStructure :: Text -> String
showStructure Empty = "Empty"
showStructure (Chunk t Empty) = "Chunk " ++ show t ++ " Empty"
showStructure (Chunk t ts) =
"Chunk " ++ show t ++ " (" ++ showStructure ts ++ ")"
chunk :: T.Text -> Text -> Text
# INLINE chunk #
chunk t@(T.Text _ _ len) ts | len == 0 = ts
| otherwise = Chunk t ts
empty :: Text
empty = Empty
foldrChunks :: (T.Text -> a -> a) -> a -> Text -> a
foldrChunks f z = go
where go Empty = z
go (Chunk c cs) = f c (go cs)
# INLINE foldrChunks #
foldlChunks :: (a -> T.Text -> a) -> a -> Text -> a
foldlChunks f z = go z
where go !a Empty = a
go !a (Chunk c cs) = go (f a c) cs
# INLINE foldlChunks #
| Currently set to 16 KiB , less the memory management overhead .
defaultChunkSize :: Int
defaultChunkSize = 16384 - chunkOverhead
# INLINE defaultChunkSize #
| Currently set to 128 bytes , less the memory management overhead .
smallChunkSize :: Int
smallChunkSize = 128 - chunkOverhead
# INLINE smallChunkSize #
| The memory management overhead . Currently this is tuned for GHC only .
chunkOverhead :: Int
chunkOverhead = sizeOf (undefined :: Int) `shiftL` 1
# INLINE chunkOverhead #
equal :: Text -> Text -> Bool
equal Empty Empty = True
equal Empty _ = False
equal _ Empty = False
equal (Chunk (T.Text arrA offA lenA) as) (Chunk (T.Text arrB offB lenB) bs) =
case compare lenA lenB of
LT -> A.equal arrA offA arrB offB lenA &&
as `equal` Chunk (T.Text arrB (offB + lenA) (lenB - lenA)) bs
EQ -> A.equal arrA offA arrB offB lenA &&
as `equal` bs
GT -> A.equal arrA offA arrB offB lenB &&
Chunk (T.Text arrA (offA + lenB) (lenA - lenB)) as `equal` bs
|
cd05ffb193a0f2e93a9c3c0c5680c08f2ddceeed575ca5a715903c483e02b756 | reborg/clojure-essential-reference | 1.clj | < 1 >
(def document
(xml/parse
< 2 >
(keys document) ; <3>
(: tag : attrs : content ) | null | https://raw.githubusercontent.com/reborg/clojure-essential-reference/c37fa19d45dd52b2995a191e3e96f0ebdc3f6d69/TheToolbox/clojure.xml/1.clj | clojure | <3> | < 1 >
(def document
(xml/parse
< 2 >
(: tag : attrs : content ) |
f1ca43bdbc2ba7f6a9c8ce6cc528bed104c4d1930f1b63022f915850a788d5a5 | Bogdanp/racket-component | run-all-tests.rkt | #lang racket/base
(require rackunit
rackunit/text-ui)
(require "graph-tests.rkt"
"system-tests.rkt"
"testing-tests.rkt")
(define all-component-tests
(test-suite
"component"
graph-tests
system-tests
testing-tests))
(module+ main
(run-tests all-component-tests))
| null | https://raw.githubusercontent.com/Bogdanp/racket-component/3ca0ce9f27f7d2f0bc4e71434b558e3088d7da84/component-test/tests/component/run-all-tests.rkt | racket | #lang racket/base
(require rackunit
rackunit/text-ui)
(require "graph-tests.rkt"
"system-tests.rkt"
"testing-tests.rkt")
(define all-component-tests
(test-suite
"component"
graph-tests
system-tests
testing-tests))
(module+ main
(run-tests all-component-tests))
| |
9cd8c28fb11e555d3466527d8812b6bbeff43b07e7f514fb5d317dd2528b7b13 | rmculpepper/crypto | age.rkt | #lang racket/base
(require rackunit
racket/match
racket/file
racket/port
racket/class
racket/system
crypto
crypto/libcrypto
crypto/util/age
(submod crypto/util/age private-for-testing))
(crypto-factories libcrypto-factory)
(define sk1 (generate-private-key 'ecx '((curve x25519))))
(define sk2 (generate-private-key 'ecx '((curve x25519))))
(define pk1 (pk-key->public-only-key sk1))
(define pk2 (pk-key->public-only-key sk2))
(define pass #"wakawaka")
(define msg #"When you come to a fork in the road, take it.")
(define enc1 (age-encrypt (list pk1) msg))
(define enc2 (age-encrypt (list pk2) msg))
(define enc12 (age-encrypt (list pk1 pk2) msg))
(define encp (age-encrypt (list `(scrypt ,pass)) msg))
(check-equal? (age-decrypt (list sk1) enc1) msg)
(check-equal? (age-decrypt (list sk2) enc2) msg)
(check-equal? (age-decrypt (list sk1 sk2) enc1) msg)
(check-equal? (age-decrypt (list sk1) enc12) msg)
(check-exn #rx"age-decrypt: no identity matched"
(lambda () (age-decrypt (list sk1) enc2)))
(check-exn #rx"age-decrypt: no identity matched"
(lambda () (age-decrypt (list `(scrypt ,pass)) enc12)))
(check-equal? (age-decrypt (list `(scrypt ,pass)) encp) msg)
(check-exn #rx"age-encrypt: scrypt recipient "
(lambda () (age-encrypt (list sk1 `(scrypt ,pass)) msg)))
(check-exn #rx"age-encrypt: scrypt recipient "
(lambda () (age-encrypt (list `(scrypt ,pass) `(scrypt #"other")) msg)))
;; ----------------------------------------
;; Fixed key generated by age-keygen:
(define skastr "AGE-SECRET-KEY-1NENQKDH6A5HPG5ZNVE9YYD9XY57UHKDNRTRGM0PY4KEKRFDJ22RQSVGXJ2")
(define pkastr "age1qpjdzg0gss09w7ddsrjy43zlqxs7avr4gxegzd7tgrts3fngry2smsmrwe")
$ echo -n fork in the road | /opt / r / age / age --encrypt -r $ pkastr
(define enca
#"age-encryption.org/v1\n-> X25519 Pey5gFv/w6UEFs5tNDAcNX1O7THABKZKNjdFmTGCrEI\nWIRv6dMu/JCukaH07nsNsJoXtGSON4q2XT5S8Rbado0\n--- hOlj4iNBnFSWIeCZ3mM2RD59APU+iCFpQjXCNWR+rWw\n\325\263\3416\310g\340\231\264\\\254\311\330\312\203i\370}\271c\273C@>\267\364qB\260+\216|\262\273\35\215\355\2710\232\255m\366?:m\211z")
(define ska (datum->pk-key skastr 'age/v1-private))
(define pka (datum->pk-key pkastr 'age/v1-public))
(check-equal? (age-decrypt (list ska) enca) #"fork in the road")
;; ----------------------------------------
;; Compatibility with age and age-keygen:
(define age-keygen-exe (find-executable-path "age-keygen"))
(define age-exe (find-executable-path "age"))
(unless age-exe (printf "Cannot find `age` executable. Skipping compat tests.\n"))
(define-values (pks3 sks3 pk3 sk3)
(cond [age-keygen-exe
(define keygen-err (open-output-bytes))
(define keygen-out (with-output-to-string
(lambda ()
;; Apparently age-keygen prints public key to
;; stderr if stdout is not terminal.
(parameterize ((current-error-port keygen-err))
(system* age-keygen-exe)))))
(define keygen-out-rx
#rx"^# created: [^\n]*\n# public key: (age1[a-z0-9]+)\n(AGE-SECRET-KEY-1[A-Z0-9]+)\n")
(match (regexp-match keygen-out-rx keygen-out)
[(list _ pks sks)
(define pk (datum->pk-key pks 'age/v1-public))
(define sk (datum->pk-key sks 'age/v1-private))
(values pks sks pk sk)]
[_ (error 'keygen "can't parse keygen output\n out: ~s\n err: ~s"
keygen-out (get-output-string keygen-err))])]
[else
(values (pk-key->datum pk1 'age/v1-public) (pk-key->datum sk1 'age/v1-private) pk1 sk1)]))
(when age-exe
(define enc3
(with-output-to-bytes
(lambda ()
(parameterize ((current-input-port (open-input-bytes msg)))
(system* age-exe "--encrypt" "--recipient" pks3)))))
(check-equal? (age-decrypt (list sk3) enc3) msg))
(when age-exe
(define enc3 (age-encrypt (list pk3) msg))
(define ident-file (make-temporary-file))
(with-output-to-file ident-file #:exists 'append
(lambda ()
(printf "# temp secret key generated by Racket crypto/tests/age\n")
(printf "~a\n" sks3)))
(check-equal? (with-output-to-bytes
(lambda ()
(parameterize ((current-input-port (open-input-bytes enc3)))
(system* age-exe "--decrypt" "--identity" ident-file))))
msg)
(delete-file ident-file))
;; ------------------------------------------------------------
;; Reject low scrypt work factors
(let ([low-work-age (new age% (scrypt-ln 3))])
(define enc
(let ([out (open-output-bytes)])
(send low-work-age age-encrypt (list `(scrypt ,pass)) (open-input-bytes msg) out)
(get-output-bytes out)))
(check-exn #rx"age-decrypt: bad scrypt stanza.*work factor"
(lambda () (age-decrypt (list `(scrypt ,pass)) enc))))
;; ------------------------------------------------------------
Error if no impls
(check-exn #rx"age-maker: no [a-z]* implementation found"
(lambda ()
(parameterize ((crypto-factories null))
(new age% (init-who 'age-maker)))))
(check-exn #rx"age-encrypt: no [a-z]* implementation found"
(lambda ()
(parameterize ((crypto-factories null))
(age-encrypt (list pk1) msg))))
| null | https://raw.githubusercontent.com/rmculpepper/crypto/63e131c06d54756c3f36833ad0b700d56d6a75c8/crypto-test/tests/age.rkt | racket | ----------------------------------------
Fixed key generated by age-keygen:
----------------------------------------
Compatibility with age and age-keygen:
Apparently age-keygen prints public key to
stderr if stdout is not terminal.
------------------------------------------------------------
Reject low scrypt work factors
------------------------------------------------------------ | #lang racket/base
(require rackunit
racket/match
racket/file
racket/port
racket/class
racket/system
crypto
crypto/libcrypto
crypto/util/age
(submod crypto/util/age private-for-testing))
(crypto-factories libcrypto-factory)
(define sk1 (generate-private-key 'ecx '((curve x25519))))
(define sk2 (generate-private-key 'ecx '((curve x25519))))
(define pk1 (pk-key->public-only-key sk1))
(define pk2 (pk-key->public-only-key sk2))
(define pass #"wakawaka")
(define msg #"When you come to a fork in the road, take it.")
(define enc1 (age-encrypt (list pk1) msg))
(define enc2 (age-encrypt (list pk2) msg))
(define enc12 (age-encrypt (list pk1 pk2) msg))
(define encp (age-encrypt (list `(scrypt ,pass)) msg))
(check-equal? (age-decrypt (list sk1) enc1) msg)
(check-equal? (age-decrypt (list sk2) enc2) msg)
(check-equal? (age-decrypt (list sk1 sk2) enc1) msg)
(check-equal? (age-decrypt (list sk1) enc12) msg)
(check-exn #rx"age-decrypt: no identity matched"
(lambda () (age-decrypt (list sk1) enc2)))
(check-exn #rx"age-decrypt: no identity matched"
(lambda () (age-decrypt (list `(scrypt ,pass)) enc12)))
(check-equal? (age-decrypt (list `(scrypt ,pass)) encp) msg)
(check-exn #rx"age-encrypt: scrypt recipient "
(lambda () (age-encrypt (list sk1 `(scrypt ,pass)) msg)))
(check-exn #rx"age-encrypt: scrypt recipient "
(lambda () (age-encrypt (list `(scrypt ,pass) `(scrypt #"other")) msg)))
(define skastr "AGE-SECRET-KEY-1NENQKDH6A5HPG5ZNVE9YYD9XY57UHKDNRTRGM0PY4KEKRFDJ22RQSVGXJ2")
(define pkastr "age1qpjdzg0gss09w7ddsrjy43zlqxs7avr4gxegzd7tgrts3fngry2smsmrwe")
$ echo -n fork in the road | /opt / r / age / age --encrypt -r $ pkastr
(define enca
#"age-encryption.org/v1\n-> X25519 Pey5gFv/w6UEFs5tNDAcNX1O7THABKZKNjdFmTGCrEI\nWIRv6dMu/JCukaH07nsNsJoXtGSON4q2XT5S8Rbado0\n--- hOlj4iNBnFSWIeCZ3mM2RD59APU+iCFpQjXCNWR+rWw\n\325\263\3416\310g\340\231\264\\\254\311\330\312\203i\370}\271c\273C@>\267\364qB\260+\216|\262\273\35\215\355\2710\232\255m\366?:m\211z")
(define ska (datum->pk-key skastr 'age/v1-private))
(define pka (datum->pk-key pkastr 'age/v1-public))
(check-equal? (age-decrypt (list ska) enca) #"fork in the road")
(define age-keygen-exe (find-executable-path "age-keygen"))
(define age-exe (find-executable-path "age"))
(unless age-exe (printf "Cannot find `age` executable. Skipping compat tests.\n"))
(define-values (pks3 sks3 pk3 sk3)
(cond [age-keygen-exe
(define keygen-err (open-output-bytes))
(define keygen-out (with-output-to-string
(lambda ()
(parameterize ((current-error-port keygen-err))
(system* age-keygen-exe)))))
(define keygen-out-rx
#rx"^# created: [^\n]*\n# public key: (age1[a-z0-9]+)\n(AGE-SECRET-KEY-1[A-Z0-9]+)\n")
(match (regexp-match keygen-out-rx keygen-out)
[(list _ pks sks)
(define pk (datum->pk-key pks 'age/v1-public))
(define sk (datum->pk-key sks 'age/v1-private))
(values pks sks pk sk)]
[_ (error 'keygen "can't parse keygen output\n out: ~s\n err: ~s"
keygen-out (get-output-string keygen-err))])]
[else
(values (pk-key->datum pk1 'age/v1-public) (pk-key->datum sk1 'age/v1-private) pk1 sk1)]))
(when age-exe
(define enc3
(with-output-to-bytes
(lambda ()
(parameterize ((current-input-port (open-input-bytes msg)))
(system* age-exe "--encrypt" "--recipient" pks3)))))
(check-equal? (age-decrypt (list sk3) enc3) msg))
(when age-exe
(define enc3 (age-encrypt (list pk3) msg))
(define ident-file (make-temporary-file))
(with-output-to-file ident-file #:exists 'append
(lambda ()
(printf "# temp secret key generated by Racket crypto/tests/age\n")
(printf "~a\n" sks3)))
(check-equal? (with-output-to-bytes
(lambda ()
(parameterize ((current-input-port (open-input-bytes enc3)))
(system* age-exe "--decrypt" "--identity" ident-file))))
msg)
(delete-file ident-file))
(let ([low-work-age (new age% (scrypt-ln 3))])
(define enc
(let ([out (open-output-bytes)])
(send low-work-age age-encrypt (list `(scrypt ,pass)) (open-input-bytes msg) out)
(get-output-bytes out)))
(check-exn #rx"age-decrypt: bad scrypt stanza.*work factor"
(lambda () (age-decrypt (list `(scrypt ,pass)) enc))))
Error if no impls
(check-exn #rx"age-maker: no [a-z]* implementation found"
(lambda ()
(parameterize ((crypto-factories null))
(new age% (init-who 'age-maker)))))
(check-exn #rx"age-encrypt: no [a-z]* implementation found"
(lambda ()
(parameterize ((crypto-factories null))
(age-encrypt (list pk1) msg))))
|
4ffcea7ac2938fbb0c5be2447294aeb3f7dc04f5780aafe1c8944c01e9a6812f | 2600hz-archive/whistle | rebar_rel_utils.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
%% ex: ts=4 sw=4 et
%% -------------------------------------------------------------------
%%
rebar : Erlang Build Tools
%%
Copyright ( c ) 2009 ( )
%%
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%%
%% The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software .
%%
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
%% -------------------------------------------------------------------
-module(rebar_rel_utils).
-export([is_rel_dir/0,
is_rel_dir/1,
get_reltool_release_info/1,
get_rel_release_info/1,
get_rel_release_info/2,
get_rel_apps/1,
get_rel_apps/2,
get_previous_release_path/0,
get_rel_file_path/2,
load_config/1,
get_sys_tuple/1,
get_target_dir/1,
get_target_parent_dir/1]).
-include("rebar.hrl").
is_rel_dir() ->
is_rel_dir(rebar_utils:get_cwd()).
is_rel_dir(Dir) ->
Fname = filename:join([Dir, "reltool.config"]),
case filelib:is_regular(Fname) of
true ->
{true, Fname};
false ->
false
end.
%% Get release name and version from a reltool.config
get_reltool_release_info([{sys, Config}| _]) ->
{rel, Name, Ver, _} = proplists:lookup(rel, Config),
{Name, Ver};
get_reltool_release_info(ReltoolFile) when is_list(ReltoolFile) ->
case file:consult(ReltoolFile) of
{ok, ReltoolConfig} ->
get_reltool_release_info(ReltoolConfig);
_ ->
?ABORT("Failed to parse ~s~n", [ReltoolFile])
end.
%% Get release name and version from a rel file
get_rel_release_info(RelFile) ->
case file:consult(RelFile) of
{ok, [{release, {Name, Ver}, _, _}]} ->
{Name, Ver};
_ ->
?ABORT("Failed to parse ~s~n", [RelFile])
end.
%% Get release name and version from a name and a path
get_rel_release_info(Name, Path) ->
RelPath = get_rel_file_path(Name, Path),
get_rel_release_info(RelPath).
%% Get list of apps included in a release from a rel file
get_rel_apps(RelFile) ->
case file:consult(RelFile) of
{ok, [{release, _, _, Apps}]} ->
make_proplist(Apps, []);
_ ->
?ABORT("Failed to parse ~s~n", [RelFile])
end.
%% Get list of apps included in a release from a name and a path
get_rel_apps(Name, Path) ->
RelPath = get_rel_file_path(Name, Path),
get_rel_apps(RelPath).
%% Get rel file path from name and path
get_rel_file_path(Name, Path) ->
[RelFile] = filelib:wildcard(filename:join([Path, "releases", "*",
Name ++ ".rel"])),
[BinDir|_] = re:replace(RelFile, Name ++ "\\.rel", ""),
filename:join([binary_to_list(BinDir), Name ++ ".rel"]).
%% Get the previous release path from a global variable
get_previous_release_path() ->
case rebar_config:get_global(previous_release, false) of
false ->
?ABORT("previous_release=PATH is required to "
"create upgrade package~n", []);
OldVerPath ->
OldVerPath
end.
%%
%% Load terms from reltool.config
%%
load_config(ReltoolFile) ->
case file:consult(ReltoolFile) of
{ok, Terms} ->
Terms;
Other ->
?ABORT("Failed to load expected config from ~s: ~p\n",
[ReltoolFile, Other])
end.
%%
%% Look for the {sys, [...]} tuple in the reltool.config file.
Without this present , we ca n't run reltool .
%%
get_sys_tuple(ReltoolConfig) ->
case lists:keyfind(sys, 1, ReltoolConfig) of
{sys, _} = SysTuple ->
SysTuple;
false ->
?ABORT("Failed to find {sys, [...]} tuple in reltool.config.", [])
end.
%%
Look for { target_dir , TargetDir } in the reltool config file ; if none is
%% found, use the name of the release as the default target directory.
%%
get_target_dir(ReltoolConfig) ->
case rebar_config:get_global(target_dir, undefined) of
undefined ->
case lists:keyfind(target_dir, 1, ReltoolConfig) of
{target_dir, TargetDir} ->
filename:absname(TargetDir);
false ->
{sys, SysInfo} = get_sys_tuple(ReltoolConfig),
case lists:keyfind(rel, 1, SysInfo) of
{rel, Name, _Vsn, _Apps} ->
filename:absname(Name);
false ->
filename:absname("target")
end
end;
TargetDir ->
filename:absname(TargetDir)
end.
get_target_parent_dir(ReltoolConfig) ->
case lists:reverse(tl(lists:reverse(filename:split(get_target_dir(ReltoolConfig))))) of
[] -> ".";
Components -> filename:join(Components)
end.
%% ===================================================================
Internal functions
%% ===================================================================
make_proplist([{_,_}=H|T], Acc) ->
make_proplist(T, [H|Acc]);
make_proplist([H|T], Acc) ->
App = element(1, H),
Ver = element(2, H),
make_proplist(T, [{App,Ver}|Acc]);
make_proplist([], Acc) ->
Acc. | null | https://raw.githubusercontent.com/2600hz-archive/whistle/1a256604f0d037fac409ad5a55b6b17e545dcbf9/utils/rebar/src/rebar_rel_utils.erl | erlang | ex: ts=4 sw=4 et
-------------------------------------------------------------------
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-------------------------------------------------------------------
Get release name and version from a reltool.config
Get release name and version from a rel file
Get release name and version from a name and a path
Get list of apps included in a release from a rel file
Get list of apps included in a release from a name and a path
Get rel file path from name and path
Get the previous release path from a global variable
Load terms from reltool.config
Look for the {sys, [...]} tuple in the reltool.config file.
found, use the name of the release as the default target directory.
===================================================================
=================================================================== | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
rebar : Erlang Build Tools
Copyright ( c ) 2009 ( )
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-module(rebar_rel_utils).
-export([is_rel_dir/0,
is_rel_dir/1,
get_reltool_release_info/1,
get_rel_release_info/1,
get_rel_release_info/2,
get_rel_apps/1,
get_rel_apps/2,
get_previous_release_path/0,
get_rel_file_path/2,
load_config/1,
get_sys_tuple/1,
get_target_dir/1,
get_target_parent_dir/1]).
-include("rebar.hrl").
is_rel_dir() ->
is_rel_dir(rebar_utils:get_cwd()).
is_rel_dir(Dir) ->
Fname = filename:join([Dir, "reltool.config"]),
case filelib:is_regular(Fname) of
true ->
{true, Fname};
false ->
false
end.
get_reltool_release_info([{sys, Config}| _]) ->
{rel, Name, Ver, _} = proplists:lookup(rel, Config),
{Name, Ver};
get_reltool_release_info(ReltoolFile) when is_list(ReltoolFile) ->
case file:consult(ReltoolFile) of
{ok, ReltoolConfig} ->
get_reltool_release_info(ReltoolConfig);
_ ->
?ABORT("Failed to parse ~s~n", [ReltoolFile])
end.
get_rel_release_info(RelFile) ->
case file:consult(RelFile) of
{ok, [{release, {Name, Ver}, _, _}]} ->
{Name, Ver};
_ ->
?ABORT("Failed to parse ~s~n", [RelFile])
end.
get_rel_release_info(Name, Path) ->
RelPath = get_rel_file_path(Name, Path),
get_rel_release_info(RelPath).
get_rel_apps(RelFile) ->
case file:consult(RelFile) of
{ok, [{release, _, _, Apps}]} ->
make_proplist(Apps, []);
_ ->
?ABORT("Failed to parse ~s~n", [RelFile])
end.
get_rel_apps(Name, Path) ->
RelPath = get_rel_file_path(Name, Path),
get_rel_apps(RelPath).
get_rel_file_path(Name, Path) ->
[RelFile] = filelib:wildcard(filename:join([Path, "releases", "*",
Name ++ ".rel"])),
[BinDir|_] = re:replace(RelFile, Name ++ "\\.rel", ""),
filename:join([binary_to_list(BinDir), Name ++ ".rel"]).
get_previous_release_path() ->
case rebar_config:get_global(previous_release, false) of
false ->
?ABORT("previous_release=PATH is required to "
"create upgrade package~n", []);
OldVerPath ->
OldVerPath
end.
load_config(ReltoolFile) ->
case file:consult(ReltoolFile) of
{ok, Terms} ->
Terms;
Other ->
?ABORT("Failed to load expected config from ~s: ~p\n",
[ReltoolFile, Other])
end.
Without this present , we ca n't run reltool .
get_sys_tuple(ReltoolConfig) ->
case lists:keyfind(sys, 1, ReltoolConfig) of
{sys, _} = SysTuple ->
SysTuple;
false ->
?ABORT("Failed to find {sys, [...]} tuple in reltool.config.", [])
end.
Look for { target_dir , TargetDir } in the reltool config file ; if none is
get_target_dir(ReltoolConfig) ->
case rebar_config:get_global(target_dir, undefined) of
undefined ->
case lists:keyfind(target_dir, 1, ReltoolConfig) of
{target_dir, TargetDir} ->
filename:absname(TargetDir);
false ->
{sys, SysInfo} = get_sys_tuple(ReltoolConfig),
case lists:keyfind(rel, 1, SysInfo) of
{rel, Name, _Vsn, _Apps} ->
filename:absname(Name);
false ->
filename:absname("target")
end
end;
TargetDir ->
filename:absname(TargetDir)
end.
get_target_parent_dir(ReltoolConfig) ->
case lists:reverse(tl(lists:reverse(filename:split(get_target_dir(ReltoolConfig))))) of
[] -> ".";
Components -> filename:join(Components)
end.
Internal functions
make_proplist([{_,_}=H|T], Acc) ->
make_proplist(T, [H|Acc]);
make_proplist([H|T], Acc) ->
App = element(1, H),
Ver = element(2, H),
make_proplist(T, [{App,Ver}|Acc]);
make_proplist([], Acc) ->
Acc. |
241078823e81541bf4aeae0edb3bafb95d582977cf685b8e027f2d2556ac185a | kadena-io/chainweb-node | MerkleLog.hs | # LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PolyKinds #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RoleAnnotations #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
-- |
Module : Chainweb . Crypto . MerkleLog
Copyright : Copyright © 2018 - 2020 Kadena LLC .
License : MIT
Maintainer : < >
-- Stability: experimental
--
-- This module provides a framework for hashing structured data and creating
-- inclusion proofs for hashed data.
--
-- An example of how to use this module can be found in the file
-- ./docs/merklelog-example.md. The impatient reader is encouraged to skip right
-- to the example.
--
-- == Background: The Problem of Hashing Structured Data
--
-- When authenticating structured data it is common practice to create hashes
-- by using some binary encoding of the whole structure or of the individual
-- components. In the latter case, the serialized components are usually
-- concatenated and hashed. This ad-hoc approach to hashing binary data is error
-- prone and makes API specifications more complicated.
--
-- Moreover, the resulting hashes can't be used for compact self-contained
-- inclusion proofs. Usually a lot of unrelated additional data has to be
-- included in the proof to make it self-contained. Another way is to define a
separate structure as input for a tree which requires to maintain an
-- additional index for the roots of the trees and possibly also for querying
-- the relevant input in the right order.
--
-- Finally, authenticated values in block chains and values for content-
-- addressed key value stores in general, often include the key in the data
-- structure, which creates a cyclic dependency of a value on itself. When
-- creating such a structure this is usually dealt with by lazily tying a knot,
-- instantiating the key with a default value (e.g. 'Nothing') and replacing it
later with the actual key ( e.g. ' Just key ' ) , or by defining two functions for
-- creating the hash and another for creating the structure that take the same
-- data as input plus the hash. Often those functions take a large number of
-- parameters and it can be hard to keep both in sync when maintaining the code.
-- It is easy to add a parameter to the structure, but forget to update the hash
-- computation to include the new value, which is difficult to detect in tests and can lead
-- to flawed security.
--
-- == What this Module Offers
--
-- This module defines types and idioms for a unified way to compute hashes on
-- structured data. This makes it easier to maintain the consistency of the hash
computations and generates hashes in a way that supports compact Merkle
-- proofs. The module introduces a syntax for defining the structure of hashes
-- that makes it obvious how a hash is created and helps to to make sure that
-- the code complies with the specification.
--
-- The tools in the module also automate some of the tedious and error prone
-- work of creating and verifying inclusion proofs. The precise specification of
-- the construction of these proofs is key for a block chain application, and
-- this module should help to ensure that the specification is uniform across
-- all proofs.
--
= = Log
--
-- A merkle log represents a view of a data structure that
--
1 . defines how a hash is computed and
2 . supports efficient creation of compact , self - contained inclusion proofs .
--
Entries in a merkle log must be
--
1 . serializable to a binary representation and
2 . arranged in a arbitrary but fixed order .
--
The types of entries that can be included as leaves of the same MerkleTree must
-- live in a closed universe. Each universe allows fixes the hash algorithm that
is used for all trees over the universe . Using a closed universe
-- prevents attacks that are based on using hashes of data from different
-- domains with equal representation.
--
In our implementation we assume that each value for which a Merkle Log is
-- created can be represented as a short, finite length header and a body that
-- is a sequence of values of the same type. The 'HasMerkleLog' class provides
the representation of a type as a Merkle log and the functions to
-- transform a value to and from that representation.
--
module Chainweb.Crypto.MerkleLog
(
-- $inputs
--
* Log Universe
MerkleUniverse(..)
, tagVal
, MerkleHashAlgorithm
, MerkleHashAlgorithmName(..)
* Log Entries
, IsMerkleLogEntry(..)
, MerkleLogEntries(..)
, emptyBody
, mapLogEntries
, entriesHeaderSize
, entriesBody
* Log
, MerkleLog(..)
, HasMerkleLog(..)
, MkLogType
, merkleLog
, newMerkleLog
, HasHeader
, HasHeader_(..)
, body
, headerSize
, bodySize
, computeMerkleLogRoot
* Log Proofs
, headerProof
, headerTree
, headerTree_
, bodyProof
, bodyTree
, bodyTree_
, proofSubject
-- * Utils for Defining Instances
, decodeMerkleInputNode
, encodeMerkleInputNode
, decodeMerkleTreeNode
, encodeMerkleTreeNode
* * IsMerkleLogEntry instance for use with @deriving via@
, ByteArrayMerkleLogEntry(..)
, MerkleRootLogEntry(..)
, Word8MerkleLogEntry(..)
, Word16BeMerkleLogEntry(..)
, Word32BeMerkleLogEntry(..)
, Word64BeMerkleLogEntry(..)
-- * Exceptions
, MerkleLogException(..)
, expectedInputNodeException
, expectedTreeNodeException
) where
import Control.Monad.Catch
import Crypto.Hash.Algorithms
import qualified Data.ByteArray as BA
import qualified Data.ByteString as B
import Data.Coerce
import Data.Foldable
import Data.Kind
import Data.Memory.Endian
import qualified Data.Memory.Endian as BA
import Data.MerkleLog hiding (Expected, Actual)
import Data.Proxy
import qualified Data.Text as T
import qualified Data.Vector as V
import Data.Word
import Foreign.Storable
import GHC.TypeNats
import System.IO.Unsafe
-- internal modules
import Data.Singletons
import Chainweb.Utils
import Chainweb.Utils.Serialization
-- -------------------------------------------------------------------------- --
-- Exceptions
data MerkleLogException
= MerkleLogWrongNodeTypeException (Expected T.Text) (Actual T.Text)
| MerkleLogWrongTagException (Expected T.Text) (Actual T.Text)
| MerkleLogDecodeException T.Text
deriving (Show)
instance Exception MerkleLogException
expectedInputNodeException :: MerkleLogException
expectedInputNodeException = MerkleLogWrongNodeTypeException
(Expected "InputNode")
(Actual "TreeNode")
expectedTreeNodeException :: MerkleLogException
expectedTreeNodeException = MerkleLogWrongNodeTypeException
(Expected "TreeNode")
(Actual "InputNode")
-- -------------------------------------------------------------------------- --
Internal Utils
uncurry3 :: (t1 -> t2 -> t3 -> t4) -> (t1, t2, t3) -> t4
uncurry3 f (a,b,c) = f a b c
fromWordBE :: forall w b . BA.ByteArray b => ByteSwap w => w -> b
fromWordBE w = BA.allocAndFreeze (sizeOf (undefined :: w)) $ \ptr -> poke ptr (BA.toBE w)
unsafeToWordBE :: BA.ByteSwap w => BA.ByteArrayAccess b => b -> w
unsafeToWordBE bytes = BA.fromBE . unsafeDupablePerformIO $ BA.withByteArray bytes peek
toWordBE
:: forall w b m
. MonadThrow m
=> BA.ByteSwap w
=> BA.ByteArrayAccess b
=> b
-> m w
toWordBE bytes
| BA.length bytes < sizeOf (undefined :: w) = throwM
$ MerkleLogDecodeException "failed to parse Word from bytes: not enough bytes"
| otherwise = return $! unsafeToWordBE bytes
-- -------------------------------------------------------------------------- --
-- $inputs
= Inputs
--
-- A framework for computing hashes from structures in a well-defined way.
--
-- A structure that can be hashed is called a log. The elements of the structure
-- that are inputs to the hash are called log entries. In order to use a type as
a log one has to define two things :
--
1 . For each log entry a serialization and deserialization method .
2 . A decomposition of a log type into an ordered sequence of log entries .
--
A is created as follows :
--
1 . Define a data kind with a nullary type constructor for each type in the
universe .
2 . Define an instance for MerkleUniverse for the new kind and assign each
type constructor in the universe a typelevel ' ' value that represents a
' Word16 ' value .
--
-- A log entry is defined as follows:
--
1 . Consider creating a specific newtype wrapper for the entry type , in
-- particular, if the entry is of a generic type, like, for instance, 'Int'
-- or 'B.ByteString'.
2 . Define an ' IsMerkleLogEntry ' instance or derive it using the ' deriving via '
-- extension if available. For the 'Tag' associated type family pick the
value from the universe type that corresponds to the entry type .
--
-- A log type is defines as follows:
--
1 . Define all constructor fields as log entries .
2 . Define a ' HasMerkleLogEntry ' instance for the type .
-- -------------------------------------------------------------------------- --
-- | A Kind that represents a closed universe of types that can be included as
leaves of the same MerkleTree .
--
-- The 'MerkleLogHash' type family defines the hash function that is used for
trees within the universe .
--
-- The 'MerkleTagVal' type family is used to assing each type-constructor in the
universe a type - level ' ' that represents a ' Word16 ' value .
--
class MerkleUniverse k where
type MerkleTagVal k (a :: k) :: Nat
| Term level representation of the ' MerkleTagVal ' of a type in a
-- universe.
--
tagVal :: forall u (t :: u) . KnownNat (MerkleTagVal u t) => Word16
tagVal = fromIntegral $ natVal (Proxy @(MerkleTagVal u t))
-- -------------------------------------------------------------------------- --
-- Hash Algorithms
type MerkleHashAlgorithm = HashAlgorithm
class MerkleHashAlgorithmName a where
merkleHashAlgorithmName :: T.Text
instance MerkleHashAlgorithmName SHA512t_256 where
merkleHashAlgorithmName = "SHA512t_256"
# INLINE merkleHashAlgorithmName #
instance MerkleHashAlgorithmName Keccak_256 where
merkleHashAlgorithmName = "Keccak_256"
# INLINE merkleHashAlgorithmName #
-- -------------------------------------------------------------------------- --
-- Merkle Log Entries
| A constraint that claims that a type is a universe and that its
' MerkleTagVal ' has a termlevel representation at runtime .
--
type InUniverse u (t :: u) = (MerkleUniverse u, KnownNat (MerkleTagVal u t))
-- | Class of types that can be used as entries in a merkle tree.
--
-- The 'Tag' associated type family tags each type that is an instance of
' IsMerkleLogEntry ' with a type from the respective universe .
--
-- The functions of the type class specify whether the entry corresponds to the
root of a nested tree or corresponds to an input node .
--
class (MerkleHashAlgorithm a, InUniverse u (Tag b)) => IsMerkleLogEntry a u b | b -> u where
type Tag b :: u
toMerkleNode
:: b
-> MerkleNodeType a B.ByteString
fromMerkleNode
:: MerkleNodeType a B.ByteString
-> Either SomeException b
fromMerkleNodeM
:: forall a u b m
. MonadThrow m
=> IsMerkleLogEntry a u b
=> MerkleNodeType a B.ByteString
-> m b
fromMerkleNodeM = either throwM return . fromMerkleNode @a
# INLINE fromMerkleNodeM #
-- -------------------------------------------------------------------------- --
-- Merkle Log Entries
| A data type that represents the log for a structure as
--
-- * a fixed size polymorphic list of header values and
-- * a monomorphic sequence of body values.
--
-- Both the header and the body may possibly be empty. The type of the former is
-- represented by an empty type-level list, while the latter is represented by
-- 'Void'.
--
data MerkleLogEntries
:: Type
-- Hash Algorithm
-> Type
Universe
-> [Type]
-- HeaderTypes
-> Type
-- BodyType
-> Type
where
MerkleLogBody
:: IsMerkleLogEntry a u b
=> V.Vector b
-> MerkleLogEntries a u '[] b
(:+:)
:: IsMerkleLogEntry a u h
=> h
-> MerkleLogEntries a u t b
-> MerkleLogEntries a u (h ': t) b
-- TODO: should we support lazy lists/streams in the body or more
-- generally abstracting a store the may be backed by a persisted
-- database? All we need is the ability to enumerate items in order. We
-- may also consider support to lookup the index of an item for creating
-- proof.
infixr 5 :+:
emptyBody :: IsMerkleLogEntry a u b => MerkleLogEntries a u '[] b
emptyBody = MerkleLogBody mempty
# INLINE emptyBody #
mapLogEntries
:: forall a u h s b
. (forall x . IsMerkleLogEntry a u x => x -> b)
-> MerkleLogEntries a u h s
-> V.Vector b
mapLogEntries f m = V.concat $ go m
where
go :: forall h' . MerkleLogEntries a u h' s -> [V.Vector b]
go (MerkleLogBody s) = [V.map f s]
go (h :+: t) = V.singleton (f h) : go t
# INLINE mapLogEntries #
entriesHeaderSize :: MerkleLogEntries a u l s -> Int
entriesHeaderSize MerkleLogBody{} = 0
entriesHeaderSize (_ :+: t) = succ $ entriesHeaderSize t
entriesBody :: MerkleLogEntries a u l s -> V.Vector s
entriesBody (MerkleLogBody s) = s
entriesBody (_ :+: t) = entriesBody t
# INLINE entriesBody #
-- -------------------------------------------------------------------------- --
-- Merkle Log
-- | A merkle log represents values of 'IsMerkleLog' types in a generic way that
-- supports computing the root hash and a merkle tree.
--
data MerkleLog a u (h :: [Type]) (b :: Type) = MerkleLog
{ _merkleLogRoot :: {-# UNPACK #-} !(MerkleRoot a)
^ The root hash of the tree of the log .
, _merkleLogEntries :: !(MerkleLogEntries a u h b)
^ The entries of the log .
--
-- Note: For creating proofs this isn't needed. Should we make this
-- lazy, too? For large entries it may be somewhat costly to pull it
-- from the store.
, _merkleLogTree :: {- Lazy -} MerkleTree a
-- ^ The merkle tree for the entries of the log. It is required to
-- compute the root and for creating a merkle proofs.
--
-- For some types, computing the merkle tree can be expensive. Therefore
-- it is instantiated lazily and only forced if actually required.
}
| Class of types which can be represented as a tree log .
--
-- An instance of 'HasMerkleLog' can be encoded as
--
1 . a header that consists of a fixed size polymorphic list ' IsMerkleEntry '
-- instances, and
2 . a body that consists of a monomorphic sequence of ' IsMerkleEntry '
-- instances.
--
class (MerkleUniverse u, MerkleHashAlgorithm a) => HasMerkleLog a u b | b -> u where
type MerkleLogHeader b :: [Type]
The header of the log representation of the type .
type MerkleLogBody b :: Type
the body of the log representation of the type .
toLog :: b -> MerkleLog a u (MerkleLogHeader b) (MerkleLogBody b)
^ Transform a value into a Merkle log .
--
Often the root of the tree is used as identifier and is stored
-- as part of the input structure. In those cases the function
' merkleLog ' can be used to create the ' MerkleLog ' from the root and
the entries without forcing the computation of the tree field .
fromLog :: MerkleLog a u (MerkleLogHeader b) (MerkleLogBody b) -> b
^ Recover a value from a Merkle log .
type MkLogType a u b = MerkleLog a u (MerkleLogHeader b) (MerkleLogBody b)
| Create a ' MerkleLog ' from a ' MerkleRoot ' and a sequence of ' MerkleLogEntry 's .
-- The '_merkleLogTree' fields is instantiated lazily.
--
merkleLog
:: forall a u h b
. MerkleHashAlgorithm a
=> MerkleRoot a
-> MerkleLogEntries a u h b
-> MerkleLog a u h b
merkleLog root entries = MerkleLog
{ _merkleLogRoot = root
, _merkleLogEntries = entries
, _merkleLogTree = {- Lazy -} merkleTree
$ toList
$ mapLogEntries (toMerkleNodeTagged @a) entries
}
| /Internal:/ Create a representation nodes that are tagged with the
respedtive type from the universe .
--
toMerkleNodeTagged
:: forall a u b
. IsMerkleLogEntry a u b
=> b
-> MerkleNodeType a B.ByteString
toMerkleNodeTagged b = case toMerkleNode @a @u @b b of
InputNode bytes -> InputNode @a @B.ByteString
$ fromWordBE @Word16 tag <> bytes
TreeNode r -> TreeNode @a r
where
tag :: Word16
tag = tagVal @u @(Tag b)
| /Internal:/ Decode nodes that are tagged with the respedtive type
from the universe .
--
fromMerkleNodeTagged
:: forall a u b m
. MonadThrow m
=> IsMerkleLogEntry a u b
=> MerkleNodeType a B.ByteString
-> m b
fromMerkleNodeTagged (InputNode bytes) = do
w16 <- toWordBE @Word16 bytes
if w16 /= tag
then throwM
$ MerkleLogWrongTagException (Expected (sshow tag)) (Actual (sshow w16))
else fromMerkleNodeM @a $ InputNode (B.drop 2 bytes)
where
tag = tagVal @u @(Tag b)
fromMerkleNodeTagged r = fromMerkleNodeM @a r
-- | 'IsMerkleLog' values often include a hash of the value itself, which
-- represents cyclic dependency of a value on itself. This function allows to
-- create such an value from its representation as a sequence of merkle log
-- entries.
--
newMerkleLog
:: forall a u h b
. MerkleUniverse u
=> MerkleHashAlgorithm a
=> MerkleLogEntries a u h b
-> MerkleLog a u h b
newMerkleLog entries = MerkleLog
{ _merkleLogRoot = merkleRoot tree
, _merkleLogEntries = entries
, _merkleLogTree = tree
}
where
tree = merkleTree $ toList $ mapLogEntries (toMerkleNodeTagged @a) entries
| /Internal:/ Get ( first ) header entry of given type from a Merkle log .
--
-- TODO:
-- * check that the header value is unique (requires traversing the list until
-- then end after a value is found)
--
class Index c (Hdr b) ~ i => HasHeader_ a u c b i | i b -> c where
type Hdr b :: [Type]
header :: b -> c
headerPos :: Int
headerDict :: b -> Dict (IsMerkleLogEntry a u c) c
instance HasHeader_ a u c (MerkleLog a u (c ': t) s) 'Z where
type Hdr (MerkleLog a u (c ': t) s) = (c ': t)
header (MerkleLog _ (c :+: _) _) = c
headerPos = 0
headerDict (MerkleLog _ (c :+: _) _) = Dict c
-- TODO:
--
-- * recurse only on entries
--
instance
( HasHeader_ a u c (MerkleLog a u t s) i
, 'S i ~ Index c (x ': t) -- this effectively asserts that c and x are different
)
=> HasHeader_ a u c (MerkleLog a u (x ': t) s) ('S i)
where
type Hdr (MerkleLog a u (x ': t) s) = (x ': t)
header (MerkleLog x (_ :+: t) y) = header @a @u (MerkleLog @a x t y)
headerPos = succ $ headerPos @a @u @c @(MerkleLog a u t s)
headerDict (MerkleLog x (_ :+: t) y) = headerDict @a @u (MerkleLog @a x t y)
type HasHeader a u c b = HasHeader_ a u c b (Index c (Hdr b))
| Get the body sequence of a Merkle log .
--
body :: MerkleLog a u l s -> V.Vector s
body = entriesBody . _merkleLogEntries
# INLINE body #
| Get the number of entries in the header of a log .
--
headerSize :: MerkleLog a u l s -> Int
headerSize = entriesHeaderSize . _merkleLogEntries
# INLINE headerSize #
| Get the number of entries in the body of a Merkle log .
--
bodySize :: MerkleLog a u l s -> Int
bodySize = V.length . body
# INLINE bodySize #
| Compute the root hash for an instance of ' HasMerkleLog ' .
--
-- This computes the merkle log and forces the merkle tree, which is linear in
the size of the @b@. For large logs the hash or the full ' MerkleTree ' should
-- be cached.
--
computeMerkleLogRoot
:: forall a u b
. HasMerkleLog a u b
=> b
-> MerkleRoot a
computeMerkleLogRoot = merkleRoot . _merkleLogTree . toLog @a
# INLINE computeMerkleLogRoot #
-- -------------------------------------------------------------------------- --
-- Proofs
| Create an inclusion proof for a value in the header of a log .
--
-- NOTE: The call to 'toLog' can be potentially expensive if the b body of the
-- log is large and the tree isn't memoized. However, for most applications,
-- either the header is empty or the body is small and recomputing it is cheap.
--
-- We could consider placing the header at the end of the tree. In that case we
-- could cache (all but at most logarithmic many hashes) of the body tree and
-- recompute just the part of the tree that depends on the header. However, for
-- large trees, we store a full cached version of the tree in any case, so we
should use that instead . Another option would be to use two separate trees ,
-- but, again, our current use case wouldn't justify the overhead.
--
headerProof
:: forall c a u b m
. MonadThrow m
=> HasHeader a u c (MkLogType a u b)
=> HasMerkleLog a u b
=> b
-> m (MerkleProof a)
headerProof = uncurry3 merkleProof . headerTree @c @a
# INLINE headerProof #
-- | Create the parameters for creating nested inclusion proofs with the
-- 'merkleProof_' of the merkle-log package.
--
-- This function returns the proof subject, the position of the subject, and the
tree and should be used for the leaf tree in the nested proof .
--
headerTree
:: forall c a u b
. HasHeader a u c (MkLogType a u b)
=> HasMerkleLog a u b
=> b
-> (MerkleNodeType a B.ByteString, Int, MerkleTree a)
headerTree b = (node, p, _merkleLogTree @a mlog)
where
mlog = toLog @a b
p = headerPos @a @u @c @(MkLogType a u b)
node = case headerDict @a @u @c mlog of
Dict hdr -> toMerkleNodeTagged @a hdr
-- | Create the parameters for creating nested inclusion proofs with the
-- 'merkleProof_' of the merkle-log package.
--
This function returns the position of the input and the tree , but not
-- the subject. It should be used for inner trees in the nested proof.
--
headerTree_
:: forall c a u b
. HasHeader a u c (MkLogType a u b)
=> HasMerkleLog a u b
=> b
-> (Int, MerkleTree a)
headerTree_ b = (p, _merkleLogTree @a mlog)
where
mlog = toLog @a b
p = headerPos @a @u @c @(MkLogType a u b)
| Create an inclusion proof for a value in the body of a Merkle log .
--
-- TODO: it is not clear if the result of 'toLog' (which contains the cached
tree ) is memoized on the heap . ( i.e. depends on where @b@ is coming from ) . We
-- should either make sure that it is memoized or provide a version that takes a
' MerkleLog ' value .
--
bodyProof
:: forall a u b m
. MonadThrow m
=> HasMerkleLog a u b
=> b
-> Int
-- ^ the index in the body of the log
-> m (MerkleProof a)
bodyProof b = uncurry3 merkleProof . bodyTree @a b
# INLINE bodyProof #
-- | Create the parameters for creating nested inclusion proofs with the
-- 'merkleProof_' of the merkle-log package.
--
-- This function returns the proof subject, the position of the subject, and the
tree and should be used for the leaf tree in the nested proof .
--
bodyTree
:: forall a u b
. HasMerkleLog a u b
=> b
-> Int
-- ^ the index in the body of the log
-> (MerkleNodeType a B.ByteString, Int, MerkleTree a)
bodyTree b i = (node, i_, _merkleLogTree @a mlog)
where
mlog = toLog @a b
i_ = i + headerSize mlog
node = mapLogEntries (toMerkleNodeTagged @a) (_merkleLogEntries mlog) V.! i_
-- | Create the parameters for creating nested inclusion proofs with the
-- 'merkleProof_' of the merkle-log package.
--
This function returns the position of the input and the tree , but not
-- the subject. It should be used for inner trees in the nested proof.
--
bodyTree_
:: forall a u b
. HasMerkleLog a u b
=> b
-> Int
-- ^ the index in the body of the log
-> (Int, MerkleTree a)
bodyTree_ b i = (i_, _merkleLogTree @a mlog)
where
mlog = toLog @a b
i_ = i + headerSize mlog
| Extract the proof subject from a ' MerkleProof ' value .
--
proofSubject
:: forall a u b m
. MonadThrow m
=> IsMerkleLogEntry a u b
=> MerkleProof a
-> m b
proofSubject p = fromMerkleNodeTagged @a subj
where
MerkleProofSubject subj = _merkleProofSubject p
# INLINE proofSubject #
-- -------------------------------------------------------------------------- --
-- Tools Defining Instances
encodeMerkleInputNode
:: (b -> Put)
-> b
-> MerkleNodeType a B.ByteString
encodeMerkleInputNode encode = InputNode . runPutS . encode
decodeMerkleInputNode
:: MonadThrow m
=> Get b
-> MerkleNodeType a B.ByteString
-> m b
decodeMerkleInputNode decode (InputNode bytes) = runGetS decode bytes
decodeMerkleInputNode _ (TreeNode _) = throwM expectedInputNodeException
encodeMerkleTreeNode :: Coercible a (MerkleRoot alg) => a -> MerkleNodeType alg x
encodeMerkleTreeNode = TreeNode . coerce
decodeMerkleTreeNode
:: MonadThrow m
=> Coercible (MerkleRoot alg) a
=> MerkleNodeType alg x
-> m a
decodeMerkleTreeNode (TreeNode bytes) = return $! coerce bytes
decodeMerkleTreeNode (InputNode _) = throwM expectedTreeNodeException
-- -------------------------------------------------------------------------- --
Support for Deriving Via
-- | Support for deriving IsMerkleLogEntry for types that are an instance of
' BA.ByteArray ' via the @DerivingVia@ extension .
--
newtype ByteArrayMerkleLogEntry u (t :: u) b = ByteArrayMerkleLogEntry b
instance
(MerkleHashAlgorithm a, InUniverse u t, BA.ByteArray b)
=> IsMerkleLogEntry a u (ByteArrayMerkleLogEntry u (t :: u) b)
where
type Tag (ByteArrayMerkleLogEntry u t b) = t
toMerkleNode (ByteArrayMerkleLogEntry b) = InputNode $ BA.convert b
fromMerkleNode (InputNode x) = return $! ByteArrayMerkleLogEntry $! BA.convert x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
-- | Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
' MerkleRoot ' via the @DerivingVia@ extension .
--
newtype MerkleRootLogEntry a (t :: u) = MerkleRootLogEntry (MerkleRoot a)
instance (MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (MerkleRootLogEntry a (t :: u)) where
type Tag (MerkleRootLogEntry a t) = t
toMerkleNode (MerkleRootLogEntry r) = TreeNode r
fromMerkleNode (TreeNode !x) = return $! MerkleRootLogEntry x
fromMerkleNode (InputNode _) = throwM expectedTreeNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
-- | Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
' Word8 ' via the @DerivingVia@ extension .
--
newtype Word8MerkleLogEntry (t :: u) = Word8MerkleLogEntry { _getWord8LogEntry :: Word8 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word8MerkleLogEntry (t :: u))
where
type Tag (Word8MerkleLogEntry t) = t
toMerkleNode = InputNode . B.singleton . _getWord8LogEntry
fromMerkleNode (InputNode x) = case B.uncons x of
Nothing -> throwM
$ MerkleLogDecodeException "failed to deserialize Word8 from empty ByteString"
Just (!c,"") -> return $! Word8MerkleLogEntry c
Just _ -> throwM
$ MerkleLogDecodeException "failed to deserialize Word8. Pending bytes in input"
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
-- | Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
' Word16 ' via the @DerivingVia@ extension .
--
newtype Word16BeMerkleLogEntry (t :: u) = Word16BeMerkleLogEntry
{ _getWord16BeLogEntry :: Word16 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word16BeMerkleLogEntry (t :: u))
where
type Tag (Word16BeMerkleLogEntry t) = t
toMerkleNode = InputNode . fromWordBE . _getWord16BeLogEntry
fromMerkleNode (InputNode x) = Word16BeMerkleLogEntry <$> toWordBE x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
-- | Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
' ' via the @DerivingVia@ extension .
--
newtype Word32BeMerkleLogEntry (t :: u) = Word32BeMerkleLogEntry
{ _getWord32BeLogEntry :: Word32 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word32BeMerkleLogEntry (t :: u))
where
type Tag (Word32BeMerkleLogEntry t) = t
toMerkleNode = InputNode . fromWordBE . _getWord32BeLogEntry
fromMerkleNode (InputNode x) = Word32BeMerkleLogEntry <$> toWordBE x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
-- | Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
' Word64 ' via the @DerivingVia@ extension .
--
newtype Word64BeMerkleLogEntry (t :: u) = Word64BeMerkleLogEntry
{ _getWord64BeLogEntry :: Word64 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word64BeMerkleLogEntry (t :: u))
where
type Tag (Word64BeMerkleLogEntry t) = t
toMerkleNode = InputNode . fromWordBE . _getWord64BeLogEntry
fromMerkleNode (InputNode x) = Word64BeMerkleLogEntry <$> toWordBE x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
| null | https://raw.githubusercontent.com/kadena-io/chainweb-node/3358be570b3552aad8c75d685f6e250fd360feda/src/Chainweb/Crypto/MerkleLog.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE ConstraintKinds #
# LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
|
Stability: experimental
This module provides a framework for hashing structured data and creating
inclusion proofs for hashed data.
An example of how to use this module can be found in the file
./docs/merklelog-example.md. The impatient reader is encouraged to skip right
to the example.
== Background: The Problem of Hashing Structured Data
When authenticating structured data it is common practice to create hashes
by using some binary encoding of the whole structure or of the individual
components. In the latter case, the serialized components are usually
concatenated and hashed. This ad-hoc approach to hashing binary data is error
prone and makes API specifications more complicated.
Moreover, the resulting hashes can't be used for compact self-contained
inclusion proofs. Usually a lot of unrelated additional data has to be
included in the proof to make it self-contained. Another way is to define a
additional index for the roots of the trees and possibly also for querying
the relevant input in the right order.
Finally, authenticated values in block chains and values for content-
addressed key value stores in general, often include the key in the data
structure, which creates a cyclic dependency of a value on itself. When
creating such a structure this is usually dealt with by lazily tying a knot,
instantiating the key with a default value (e.g. 'Nothing') and replacing it
creating the hash and another for creating the structure that take the same
data as input plus the hash. Often those functions take a large number of
parameters and it can be hard to keep both in sync when maintaining the code.
It is easy to add a parameter to the structure, but forget to update the hash
computation to include the new value, which is difficult to detect in tests and can lead
to flawed security.
== What this Module Offers
This module defines types and idioms for a unified way to compute hashes on
structured data. This makes it easier to maintain the consistency of the hash
proofs. The module introduces a syntax for defining the structure of hashes
that makes it obvious how a hash is created and helps to to make sure that
the code complies with the specification.
The tools in the module also automate some of the tedious and error prone
work of creating and verifying inclusion proofs. The precise specification of
the construction of these proofs is key for a block chain application, and
this module should help to ensure that the specification is uniform across
all proofs.
A merkle log represents a view of a data structure that
live in a closed universe. Each universe allows fixes the hash algorithm that
prevents attacks that are based on using hashes of data from different
domains with equal representation.
created can be represented as a short, finite length header and a body that
is a sequence of values of the same type. The 'HasMerkleLog' class provides
transform a value to and from that representation.
$inputs
* Utils for Defining Instances
* Exceptions
internal modules
-------------------------------------------------------------------------- --
Exceptions
-------------------------------------------------------------------------- --
-------------------------------------------------------------------------- --
$inputs
A framework for computing hashes from structures in a well-defined way.
A structure that can be hashed is called a log. The elements of the structure
that are inputs to the hash are called log entries. In order to use a type as
A log entry is defined as follows:
particular, if the entry is of a generic type, like, for instance, 'Int'
or 'B.ByteString'.
extension if available. For the 'Tag' associated type family pick the
A log type is defines as follows:
-------------------------------------------------------------------------- --
| A Kind that represents a closed universe of types that can be included as
The 'MerkleLogHash' type family defines the hash function that is used for
The 'MerkleTagVal' type family is used to assing each type-constructor in the
universe.
-------------------------------------------------------------------------- --
Hash Algorithms
-------------------------------------------------------------------------- --
Merkle Log Entries
| Class of types that can be used as entries in a merkle tree.
The 'Tag' associated type family tags each type that is an instance of
The functions of the type class specify whether the entry corresponds to the
-------------------------------------------------------------------------- --
Merkle Log Entries
* a fixed size polymorphic list of header values and
* a monomorphic sequence of body values.
Both the header and the body may possibly be empty. The type of the former is
represented by an empty type-level list, while the latter is represented by
'Void'.
Hash Algorithm
HeaderTypes
BodyType
TODO: should we support lazy lists/streams in the body or more
generally abstracting a store the may be backed by a persisted
database? All we need is the ability to enumerate items in order. We
may also consider support to lookup the index of an item for creating
proof.
-------------------------------------------------------------------------- --
Merkle Log
| A merkle log represents values of 'IsMerkleLog' types in a generic way that
supports computing the root hash and a merkle tree.
# UNPACK #
Note: For creating proofs this isn't needed. Should we make this
lazy, too? For large entries it may be somewhat costly to pull it
from the store.
Lazy
^ The merkle tree for the entries of the log. It is required to
compute the root and for creating a merkle proofs.
For some types, computing the merkle tree can be expensive. Therefore
it is instantiated lazily and only forced if actually required.
An instance of 'HasMerkleLog' can be encoded as
instances, and
instances.
as part of the input structure. In those cases the function
The '_merkleLogTree' fields is instantiated lazily.
Lazy
| 'IsMerkleLog' values often include a hash of the value itself, which
represents cyclic dependency of a value on itself. This function allows to
create such an value from its representation as a sequence of merkle log
entries.
TODO:
* check that the header value is unique (requires traversing the list until
then end after a value is found)
TODO:
* recurse only on entries
this effectively asserts that c and x are different
This computes the merkle log and forces the merkle tree, which is linear in
be cached.
-------------------------------------------------------------------------- --
Proofs
NOTE: The call to 'toLog' can be potentially expensive if the b body of the
log is large and the tree isn't memoized. However, for most applications,
either the header is empty or the body is small and recomputing it is cheap.
We could consider placing the header at the end of the tree. In that case we
could cache (all but at most logarithmic many hashes) of the body tree and
recompute just the part of the tree that depends on the header. However, for
large trees, we store a full cached version of the tree in any case, so we
but, again, our current use case wouldn't justify the overhead.
| Create the parameters for creating nested inclusion proofs with the
'merkleProof_' of the merkle-log package.
This function returns the proof subject, the position of the subject, and the
| Create the parameters for creating nested inclusion proofs with the
'merkleProof_' of the merkle-log package.
the subject. It should be used for inner trees in the nested proof.
TODO: it is not clear if the result of 'toLog' (which contains the cached
should either make sure that it is memoized or provide a version that takes a
^ the index in the body of the log
| Create the parameters for creating nested inclusion proofs with the
'merkleProof_' of the merkle-log package.
This function returns the proof subject, the position of the subject, and the
^ the index in the body of the log
| Create the parameters for creating nested inclusion proofs with the
'merkleProof_' of the merkle-log package.
the subject. It should be used for inner trees in the nested proof.
^ the index in the body of the log
-------------------------------------------------------------------------- --
Tools Defining Instances
-------------------------------------------------------------------------- --
| Support for deriving IsMerkleLogEntry for types that are an instance of
| Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
| Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
| Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
| Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
| Support for deriving IsMerkleLogEntry for types that are newtype wrappers of
| # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE FunctionalDependencies #
# LANGUAGE PolyKinds #
# LANGUAGE RoleAnnotations #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
Module : Chainweb . Crypto . MerkleLog
Copyright : Copyright © 2018 - 2020 Kadena LLC .
License : MIT
Maintainer : < >
separate structure as input for a tree which requires to maintain an
later with the actual key ( e.g. ' Just key ' ) , or by defining two functions for
computations and generates hashes in a way that supports compact Merkle
= = Log
1 . defines how a hash is computed and
2 . supports efficient creation of compact , self - contained inclusion proofs .
Entries in a merkle log must be
1 . serializable to a binary representation and
2 . arranged in a arbitrary but fixed order .
The types of entries that can be included as leaves of the same MerkleTree must
is used for all trees over the universe . Using a closed universe
In our implementation we assume that each value for which a Merkle Log is
the representation of a type as a Merkle log and the functions to
module Chainweb.Crypto.MerkleLog
(
* Log Universe
MerkleUniverse(..)
, tagVal
, MerkleHashAlgorithm
, MerkleHashAlgorithmName(..)
* Log Entries
, IsMerkleLogEntry(..)
, MerkleLogEntries(..)
, emptyBody
, mapLogEntries
, entriesHeaderSize
, entriesBody
* Log
, MerkleLog(..)
, HasMerkleLog(..)
, MkLogType
, merkleLog
, newMerkleLog
, HasHeader
, HasHeader_(..)
, body
, headerSize
, bodySize
, computeMerkleLogRoot
* Log Proofs
, headerProof
, headerTree
, headerTree_
, bodyProof
, bodyTree
, bodyTree_
, proofSubject
, decodeMerkleInputNode
, encodeMerkleInputNode
, decodeMerkleTreeNode
, encodeMerkleTreeNode
* * IsMerkleLogEntry instance for use with @deriving via@
, ByteArrayMerkleLogEntry(..)
, MerkleRootLogEntry(..)
, Word8MerkleLogEntry(..)
, Word16BeMerkleLogEntry(..)
, Word32BeMerkleLogEntry(..)
, Word64BeMerkleLogEntry(..)
, MerkleLogException(..)
, expectedInputNodeException
, expectedTreeNodeException
) where
import Control.Monad.Catch
import Crypto.Hash.Algorithms
import qualified Data.ByteArray as BA
import qualified Data.ByteString as B
import Data.Coerce
import Data.Foldable
import Data.Kind
import Data.Memory.Endian
import qualified Data.Memory.Endian as BA
import Data.MerkleLog hiding (Expected, Actual)
import Data.Proxy
import qualified Data.Text as T
import qualified Data.Vector as V
import Data.Word
import Foreign.Storable
import GHC.TypeNats
import System.IO.Unsafe
import Data.Singletons
import Chainweb.Utils
import Chainweb.Utils.Serialization
data MerkleLogException
= MerkleLogWrongNodeTypeException (Expected T.Text) (Actual T.Text)
| MerkleLogWrongTagException (Expected T.Text) (Actual T.Text)
| MerkleLogDecodeException T.Text
deriving (Show)
instance Exception MerkleLogException
expectedInputNodeException :: MerkleLogException
expectedInputNodeException = MerkleLogWrongNodeTypeException
(Expected "InputNode")
(Actual "TreeNode")
expectedTreeNodeException :: MerkleLogException
expectedTreeNodeException = MerkleLogWrongNodeTypeException
(Expected "TreeNode")
(Actual "InputNode")
Internal Utils
uncurry3 :: (t1 -> t2 -> t3 -> t4) -> (t1, t2, t3) -> t4
uncurry3 f (a,b,c) = f a b c
fromWordBE :: forall w b . BA.ByteArray b => ByteSwap w => w -> b
fromWordBE w = BA.allocAndFreeze (sizeOf (undefined :: w)) $ \ptr -> poke ptr (BA.toBE w)
unsafeToWordBE :: BA.ByteSwap w => BA.ByteArrayAccess b => b -> w
unsafeToWordBE bytes = BA.fromBE . unsafeDupablePerformIO $ BA.withByteArray bytes peek
toWordBE
:: forall w b m
. MonadThrow m
=> BA.ByteSwap w
=> BA.ByteArrayAccess b
=> b
-> m w
toWordBE bytes
| BA.length bytes < sizeOf (undefined :: w) = throwM
$ MerkleLogDecodeException "failed to parse Word from bytes: not enough bytes"
| otherwise = return $! unsafeToWordBE bytes
= Inputs
a log one has to define two things :
1 . For each log entry a serialization and deserialization method .
2 . A decomposition of a log type into an ordered sequence of log entries .
A is created as follows :
1 . Define a data kind with a nullary type constructor for each type in the
universe .
2 . Define an instance for MerkleUniverse for the new kind and assign each
type constructor in the universe a typelevel ' ' value that represents a
' Word16 ' value .
1 . Consider creating a specific newtype wrapper for the entry type , in
2 . Define an ' IsMerkleLogEntry ' instance or derive it using the ' deriving via '
value from the universe type that corresponds to the entry type .
1 . Define all constructor fields as log entries .
2 . Define a ' HasMerkleLogEntry ' instance for the type .
leaves of the same MerkleTree .
trees within the universe .
universe a type - level ' ' that represents a ' Word16 ' value .
class MerkleUniverse k where
type MerkleTagVal k (a :: k) :: Nat
| Term level representation of the ' MerkleTagVal ' of a type in a
tagVal :: forall u (t :: u) . KnownNat (MerkleTagVal u t) => Word16
tagVal = fromIntegral $ natVal (Proxy @(MerkleTagVal u t))
type MerkleHashAlgorithm = HashAlgorithm
class MerkleHashAlgorithmName a where
merkleHashAlgorithmName :: T.Text
instance MerkleHashAlgorithmName SHA512t_256 where
merkleHashAlgorithmName = "SHA512t_256"
# INLINE merkleHashAlgorithmName #
instance MerkleHashAlgorithmName Keccak_256 where
merkleHashAlgorithmName = "Keccak_256"
# INLINE merkleHashAlgorithmName #
| A constraint that claims that a type is a universe and that its
' MerkleTagVal ' has a termlevel representation at runtime .
type InUniverse u (t :: u) = (MerkleUniverse u, KnownNat (MerkleTagVal u t))
' IsMerkleLogEntry ' with a type from the respective universe .
root of a nested tree or corresponds to an input node .
class (MerkleHashAlgorithm a, InUniverse u (Tag b)) => IsMerkleLogEntry a u b | b -> u where
type Tag b :: u
toMerkleNode
:: b
-> MerkleNodeType a B.ByteString
fromMerkleNode
:: MerkleNodeType a B.ByteString
-> Either SomeException b
fromMerkleNodeM
:: forall a u b m
. MonadThrow m
=> IsMerkleLogEntry a u b
=> MerkleNodeType a B.ByteString
-> m b
fromMerkleNodeM = either throwM return . fromMerkleNode @a
# INLINE fromMerkleNodeM #
| A data type that represents the log for a structure as
data MerkleLogEntries
:: Type
-> Type
Universe
-> [Type]
-> Type
-> Type
where
MerkleLogBody
:: IsMerkleLogEntry a u b
=> V.Vector b
-> MerkleLogEntries a u '[] b
(:+:)
:: IsMerkleLogEntry a u h
=> h
-> MerkleLogEntries a u t b
-> MerkleLogEntries a u (h ': t) b
infixr 5 :+:
emptyBody :: IsMerkleLogEntry a u b => MerkleLogEntries a u '[] b
emptyBody = MerkleLogBody mempty
# INLINE emptyBody #
mapLogEntries
:: forall a u h s b
. (forall x . IsMerkleLogEntry a u x => x -> b)
-> MerkleLogEntries a u h s
-> V.Vector b
mapLogEntries f m = V.concat $ go m
where
go :: forall h' . MerkleLogEntries a u h' s -> [V.Vector b]
go (MerkleLogBody s) = [V.map f s]
go (h :+: t) = V.singleton (f h) : go t
# INLINE mapLogEntries #
entriesHeaderSize :: MerkleLogEntries a u l s -> Int
entriesHeaderSize MerkleLogBody{} = 0
entriesHeaderSize (_ :+: t) = succ $ entriesHeaderSize t
entriesBody :: MerkleLogEntries a u l s -> V.Vector s
entriesBody (MerkleLogBody s) = s
entriesBody (_ :+: t) = entriesBody t
# INLINE entriesBody #
data MerkleLog a u (h :: [Type]) (b :: Type) = MerkleLog
^ The root hash of the tree of the log .
, _merkleLogEntries :: !(MerkleLogEntries a u h b)
^ The entries of the log .
}
| Class of types which can be represented as a tree log .
1 . a header that consists of a fixed size polymorphic list ' IsMerkleEntry '
2 . a body that consists of a monomorphic sequence of ' IsMerkleEntry '
class (MerkleUniverse u, MerkleHashAlgorithm a) => HasMerkleLog a u b | b -> u where
type MerkleLogHeader b :: [Type]
The header of the log representation of the type .
type MerkleLogBody b :: Type
the body of the log representation of the type .
toLog :: b -> MerkleLog a u (MerkleLogHeader b) (MerkleLogBody b)
^ Transform a value into a Merkle log .
Often the root of the tree is used as identifier and is stored
' merkleLog ' can be used to create the ' MerkleLog ' from the root and
the entries without forcing the computation of the tree field .
fromLog :: MerkleLog a u (MerkleLogHeader b) (MerkleLogBody b) -> b
^ Recover a value from a Merkle log .
type MkLogType a u b = MerkleLog a u (MerkleLogHeader b) (MerkleLogBody b)
| Create a ' MerkleLog ' from a ' MerkleRoot ' and a sequence of ' MerkleLogEntry 's .
merkleLog
:: forall a u h b
. MerkleHashAlgorithm a
=> MerkleRoot a
-> MerkleLogEntries a u h b
-> MerkleLog a u h b
merkleLog root entries = MerkleLog
{ _merkleLogRoot = root
, _merkleLogEntries = entries
$ toList
$ mapLogEntries (toMerkleNodeTagged @a) entries
}
| /Internal:/ Create a representation nodes that are tagged with the
respedtive type from the universe .
toMerkleNodeTagged
:: forall a u b
. IsMerkleLogEntry a u b
=> b
-> MerkleNodeType a B.ByteString
toMerkleNodeTagged b = case toMerkleNode @a @u @b b of
InputNode bytes -> InputNode @a @B.ByteString
$ fromWordBE @Word16 tag <> bytes
TreeNode r -> TreeNode @a r
where
tag :: Word16
tag = tagVal @u @(Tag b)
| /Internal:/ Decode nodes that are tagged with the respedtive type
from the universe .
fromMerkleNodeTagged
:: forall a u b m
. MonadThrow m
=> IsMerkleLogEntry a u b
=> MerkleNodeType a B.ByteString
-> m b
fromMerkleNodeTagged (InputNode bytes) = do
w16 <- toWordBE @Word16 bytes
if w16 /= tag
then throwM
$ MerkleLogWrongTagException (Expected (sshow tag)) (Actual (sshow w16))
else fromMerkleNodeM @a $ InputNode (B.drop 2 bytes)
where
tag = tagVal @u @(Tag b)
fromMerkleNodeTagged r = fromMerkleNodeM @a r
newMerkleLog
:: forall a u h b
. MerkleUniverse u
=> MerkleHashAlgorithm a
=> MerkleLogEntries a u h b
-> MerkleLog a u h b
newMerkleLog entries = MerkleLog
{ _merkleLogRoot = merkleRoot tree
, _merkleLogEntries = entries
, _merkleLogTree = tree
}
where
tree = merkleTree $ toList $ mapLogEntries (toMerkleNodeTagged @a) entries
| /Internal:/ Get ( first ) header entry of given type from a Merkle log .
class Index c (Hdr b) ~ i => HasHeader_ a u c b i | i b -> c where
type Hdr b :: [Type]
header :: b -> c
headerPos :: Int
headerDict :: b -> Dict (IsMerkleLogEntry a u c) c
instance HasHeader_ a u c (MerkleLog a u (c ': t) s) 'Z where
type Hdr (MerkleLog a u (c ': t) s) = (c ': t)
header (MerkleLog _ (c :+: _) _) = c
headerPos = 0
headerDict (MerkleLog _ (c :+: _) _) = Dict c
instance
( HasHeader_ a u c (MerkleLog a u t s) i
)
=> HasHeader_ a u c (MerkleLog a u (x ': t) s) ('S i)
where
type Hdr (MerkleLog a u (x ': t) s) = (x ': t)
header (MerkleLog x (_ :+: t) y) = header @a @u (MerkleLog @a x t y)
headerPos = succ $ headerPos @a @u @c @(MerkleLog a u t s)
headerDict (MerkleLog x (_ :+: t) y) = headerDict @a @u (MerkleLog @a x t y)
type HasHeader a u c b = HasHeader_ a u c b (Index c (Hdr b))
| Get the body sequence of a Merkle log .
body :: MerkleLog a u l s -> V.Vector s
body = entriesBody . _merkleLogEntries
# INLINE body #
| Get the number of entries in the header of a log .
headerSize :: MerkleLog a u l s -> Int
headerSize = entriesHeaderSize . _merkleLogEntries
# INLINE headerSize #
| Get the number of entries in the body of a Merkle log .
bodySize :: MerkleLog a u l s -> Int
bodySize = V.length . body
# INLINE bodySize #
| Compute the root hash for an instance of ' HasMerkleLog ' .
the size of the @b@. For large logs the hash or the full ' MerkleTree ' should
computeMerkleLogRoot
:: forall a u b
. HasMerkleLog a u b
=> b
-> MerkleRoot a
computeMerkleLogRoot = merkleRoot . _merkleLogTree . toLog @a
# INLINE computeMerkleLogRoot #
| Create an inclusion proof for a value in the header of a log .
should use that instead . Another option would be to use two separate trees ,
headerProof
:: forall c a u b m
. MonadThrow m
=> HasHeader a u c (MkLogType a u b)
=> HasMerkleLog a u b
=> b
-> m (MerkleProof a)
headerProof = uncurry3 merkleProof . headerTree @c @a
# INLINE headerProof #
tree and should be used for the leaf tree in the nested proof .
headerTree
:: forall c a u b
. HasHeader a u c (MkLogType a u b)
=> HasMerkleLog a u b
=> b
-> (MerkleNodeType a B.ByteString, Int, MerkleTree a)
headerTree b = (node, p, _merkleLogTree @a mlog)
where
mlog = toLog @a b
p = headerPos @a @u @c @(MkLogType a u b)
node = case headerDict @a @u @c mlog of
Dict hdr -> toMerkleNodeTagged @a hdr
This function returns the position of the input and the tree , but not
headerTree_
:: forall c a u b
. HasHeader a u c (MkLogType a u b)
=> HasMerkleLog a u b
=> b
-> (Int, MerkleTree a)
headerTree_ b = (p, _merkleLogTree @a mlog)
where
mlog = toLog @a b
p = headerPos @a @u @c @(MkLogType a u b)
| Create an inclusion proof for a value in the body of a Merkle log .
tree ) is memoized on the heap . ( i.e. depends on where @b@ is coming from ) . We
' MerkleLog ' value .
bodyProof
:: forall a u b m
. MonadThrow m
=> HasMerkleLog a u b
=> b
-> Int
-> m (MerkleProof a)
bodyProof b = uncurry3 merkleProof . bodyTree @a b
# INLINE bodyProof #
tree and should be used for the leaf tree in the nested proof .
bodyTree
:: forall a u b
. HasMerkleLog a u b
=> b
-> Int
-> (MerkleNodeType a B.ByteString, Int, MerkleTree a)
bodyTree b i = (node, i_, _merkleLogTree @a mlog)
where
mlog = toLog @a b
i_ = i + headerSize mlog
node = mapLogEntries (toMerkleNodeTagged @a) (_merkleLogEntries mlog) V.! i_
This function returns the position of the input and the tree , but not
bodyTree_
:: forall a u b
. HasMerkleLog a u b
=> b
-> Int
-> (Int, MerkleTree a)
bodyTree_ b i = (i_, _merkleLogTree @a mlog)
where
mlog = toLog @a b
i_ = i + headerSize mlog
| Extract the proof subject from a ' MerkleProof ' value .
proofSubject
:: forall a u b m
. MonadThrow m
=> IsMerkleLogEntry a u b
=> MerkleProof a
-> m b
proofSubject p = fromMerkleNodeTagged @a subj
where
MerkleProofSubject subj = _merkleProofSubject p
# INLINE proofSubject #
encodeMerkleInputNode
:: (b -> Put)
-> b
-> MerkleNodeType a B.ByteString
encodeMerkleInputNode encode = InputNode . runPutS . encode
decodeMerkleInputNode
:: MonadThrow m
=> Get b
-> MerkleNodeType a B.ByteString
-> m b
decodeMerkleInputNode decode (InputNode bytes) = runGetS decode bytes
decodeMerkleInputNode _ (TreeNode _) = throwM expectedInputNodeException
encodeMerkleTreeNode :: Coercible a (MerkleRoot alg) => a -> MerkleNodeType alg x
encodeMerkleTreeNode = TreeNode . coerce
decodeMerkleTreeNode
:: MonadThrow m
=> Coercible (MerkleRoot alg) a
=> MerkleNodeType alg x
-> m a
decodeMerkleTreeNode (TreeNode bytes) = return $! coerce bytes
decodeMerkleTreeNode (InputNode _) = throwM expectedTreeNodeException
Support for Deriving Via
' BA.ByteArray ' via the @DerivingVia@ extension .
newtype ByteArrayMerkleLogEntry u (t :: u) b = ByteArrayMerkleLogEntry b
instance
(MerkleHashAlgorithm a, InUniverse u t, BA.ByteArray b)
=> IsMerkleLogEntry a u (ByteArrayMerkleLogEntry u (t :: u) b)
where
type Tag (ByteArrayMerkleLogEntry u t b) = t
toMerkleNode (ByteArrayMerkleLogEntry b) = InputNode $ BA.convert b
fromMerkleNode (InputNode x) = return $! ByteArrayMerkleLogEntry $! BA.convert x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
' MerkleRoot ' via the @DerivingVia@ extension .
newtype MerkleRootLogEntry a (t :: u) = MerkleRootLogEntry (MerkleRoot a)
instance (MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (MerkleRootLogEntry a (t :: u)) where
type Tag (MerkleRootLogEntry a t) = t
toMerkleNode (MerkleRootLogEntry r) = TreeNode r
fromMerkleNode (TreeNode !x) = return $! MerkleRootLogEntry x
fromMerkleNode (InputNode _) = throwM expectedTreeNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
' Word8 ' via the @DerivingVia@ extension .
newtype Word8MerkleLogEntry (t :: u) = Word8MerkleLogEntry { _getWord8LogEntry :: Word8 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word8MerkleLogEntry (t :: u))
where
type Tag (Word8MerkleLogEntry t) = t
toMerkleNode = InputNode . B.singleton . _getWord8LogEntry
fromMerkleNode (InputNode x) = case B.uncons x of
Nothing -> throwM
$ MerkleLogDecodeException "failed to deserialize Word8 from empty ByteString"
Just (!c,"") -> return $! Word8MerkleLogEntry c
Just _ -> throwM
$ MerkleLogDecodeException "failed to deserialize Word8. Pending bytes in input"
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
' Word16 ' via the @DerivingVia@ extension .
newtype Word16BeMerkleLogEntry (t :: u) = Word16BeMerkleLogEntry
{ _getWord16BeLogEntry :: Word16 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word16BeMerkleLogEntry (t :: u))
where
type Tag (Word16BeMerkleLogEntry t) = t
toMerkleNode = InputNode . fromWordBE . _getWord16BeLogEntry
fromMerkleNode (InputNode x) = Word16BeMerkleLogEntry <$> toWordBE x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
' ' via the @DerivingVia@ extension .
newtype Word32BeMerkleLogEntry (t :: u) = Word32BeMerkleLogEntry
{ _getWord32BeLogEntry :: Word32 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word32BeMerkleLogEntry (t :: u))
where
type Tag (Word32BeMerkleLogEntry t) = t
toMerkleNode = InputNode . fromWordBE . _getWord32BeLogEntry
fromMerkleNode (InputNode x) = Word32BeMerkleLogEntry <$> toWordBE x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
' Word64 ' via the @DerivingVia@ extension .
newtype Word64BeMerkleLogEntry (t :: u) = Word64BeMerkleLogEntry
{ _getWord64BeLogEntry :: Word64 }
instance
(MerkleHashAlgorithm a, InUniverse u t) => IsMerkleLogEntry a u (Word64BeMerkleLogEntry (t :: u))
where
type Tag (Word64BeMerkleLogEntry t) = t
toMerkleNode = InputNode . fromWordBE . _getWord64BeLogEntry
fromMerkleNode (InputNode x) = Word64BeMerkleLogEntry <$> toWordBE x
fromMerkleNode (TreeNode _) = throwM expectedInputNodeException
# INLINE toMerkleNode #
# INLINE fromMerkleNode #
|
2648eedcc9e2ee76c192f566831985d65aacd15bcc6b102edff3b527f8d53c4f | zmaril/callcongressnow | index.cljs | (ns callcongressnow.index
(:require-macros [cljs.core.async.macros :refer [go]])
(:require [callcongressnow.legislators :refer [legislator-box]]
[cljs.core.async :refer [<!]]
[om.core :as om :include-macros true]
[om.dom :as dom :include-macros true]
[callcongressnow.state :refer
[app-state find-local find-all find-query router]]
[callcongressnow.app :refer
[navbar]]
[callcongressnow.utils :refer [jsonp]]))
(enable-console-print!)
(defn jumbotron [app opts]
(om/component
(dom/div #js {:id "jumbocontain" :className "row"}
(dom/div
#js {:className "col-lg-12"}
(dom/div
#js {:className "jumbotron"}
(dom/p nil (dom/h1 nil
"Give Congress a piece of your mind."))
(dom/p #js {:className "lead"}
"Use the buttons or search bar to find legislators to call from your browser (for free!)"
)
(dom/p nil
(dom/a #js {:onClick (partial find-local app)
:className "btn btn-lg btn-primary"}
"Find your legislators"
)
" "
(dom/a #js {:onClick (partial find-all app)
:className "btn btn-lg btn-primary"}
"View all legislators"))
(dom/p nil
"Search by name: "
(dom/input #js {:type "text"
:onChange #(find-query
app
(.-value (.-target %)))})))))))
(defn index-app [app]
(reify
om/IRender
(render [_ owner]
(dom/div nil
(om/build navbar app)
(dom/div #js {:id "content"}
(dom/div #js {:ref "query"}
(om/build jumbotron app)
(om/build legislator-box app)))))))
(when-let [root (.getElementById js/document "indexcontainer")]
(om/root app-state index-app root))
| null | https://raw.githubusercontent.com/zmaril/callcongressnow/c6a9eef9fd02b8141e87f34ff73a128c0d9cdc7f/src/cljs/callcongressnow/index.cljs | clojure | (ns callcongressnow.index
(:require-macros [cljs.core.async.macros :refer [go]])
(:require [callcongressnow.legislators :refer [legislator-box]]
[cljs.core.async :refer [<!]]
[om.core :as om :include-macros true]
[om.dom :as dom :include-macros true]
[callcongressnow.state :refer
[app-state find-local find-all find-query router]]
[callcongressnow.app :refer
[navbar]]
[callcongressnow.utils :refer [jsonp]]))
(enable-console-print!)
(defn jumbotron [app opts]
(om/component
(dom/div #js {:id "jumbocontain" :className "row"}
(dom/div
#js {:className "col-lg-12"}
(dom/div
#js {:className "jumbotron"}
(dom/p nil (dom/h1 nil
"Give Congress a piece of your mind."))
(dom/p #js {:className "lead"}
"Use the buttons or search bar to find legislators to call from your browser (for free!)"
)
(dom/p nil
(dom/a #js {:onClick (partial find-local app)
:className "btn btn-lg btn-primary"}
"Find your legislators"
)
" "
(dom/a #js {:onClick (partial find-all app)
:className "btn btn-lg btn-primary"}
"View all legislators"))
(dom/p nil
"Search by name: "
(dom/input #js {:type "text"
:onChange #(find-query
app
(.-value (.-target %)))})))))))
(defn index-app [app]
(reify
om/IRender
(render [_ owner]
(dom/div nil
(om/build navbar app)
(dom/div #js {:id "content"}
(dom/div #js {:ref "query"}
(om/build jumbotron app)
(om/build legislator-box app)))))))
(when-let [root (.getElementById js/document "indexcontainer")]
(om/root app-state index-app root))
| |
83771f5a2d8bd59706f7b2af4b722042a3b777b57f958ccfea9ea49969eb2e2d | BranchTaken/Hemlock | test_is_pow2.ml | open! Basis.Rudiments
open! Basis
open Zint
let test () =
let rec test = function
| [] -> ()
| u :: us' -> begin
File.Fmt.stdout
|> Fmt.fmt "is_pow2 "
|> fmt ~alt:true ~radix:Radix.Hex u
|> Fmt.fmt " -> "
|> Bool.fmt (is_pow2 u)
|> Fmt.fmt "\n"
|> ignore;
test us'
end
in
let us = [
of_string "0";
of_string "1";
of_string "2";
of_string "3";
of_string
"0x8000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000";
of_string
"0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff"
] in
test us
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/zint/test_is_pow2.ml | ocaml | open! Basis.Rudiments
open! Basis
open Zint
let test () =
let rec test = function
| [] -> ()
| u :: us' -> begin
File.Fmt.stdout
|> Fmt.fmt "is_pow2 "
|> fmt ~alt:true ~radix:Radix.Hex u
|> Fmt.fmt " -> "
|> Bool.fmt (is_pow2 u)
|> Fmt.fmt "\n"
|> ignore;
test us'
end
in
let us = [
of_string "0";
of_string "1";
of_string "2";
of_string "3";
of_string
"0x8000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000";
of_string
"0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff"
] in
test us
let _ = test ()
| |
80893d86e09e5602c2f7d89e22e4a2496c5c1b81ab34614d631eb6efa536d73c | cram-code/cram_core | equate-notification-mixin.lisp | Copyright ( c ) 2012 , < >
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
* Neither the name of the Intelligent Autonomous Systems Group/
;;; Technische Universitaet Muenchen nor the names of its contributors
;;; may be used to endorse or promote products derived from this software
;;; without specific prior written permission.
;;;
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
;;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
;;; CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
;;; SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
;;; CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
;;; ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
;;; POSSIBILITY OF SUCH DAMAGE.
(in-package :cram-designators)
(defclass equate-notification-mixin ()
((equate-notification-callbacks
:initform nil
:documentation "List of notification callbacks that are executed
whenever the designator is equated with another one.")))
(defgeneric register-equate-callback (designator callback)
(:documentation "Registers the callback function object `callback'
at `designator'. The callback will be executed whenever the
designator is equated other designators. `callback' is a function
object that takes one parameter, the designator the parameter
`designator' has been equated to.")
(:method ((designator equate-notification-mixin) callback)
(declare (type function callback))
(with-slots (equate-notification-callbacks) designator
(pushnew callback equate-notification-callbacks))))
(defgeneric unregister-equate-callback (designator callback)
(:documentation "Removes `callback' from the list of equate
callbacks in `designator'.")
(:method ((designator equate-notification-mixin) callback)
(declare (type function callback))
(with-slots (equate-notification-callbacks) designator
(setf equate-notification-callbacks
(remove callback equate-notification-callbacks)))))
(defgeneric execute-equate-callbacks (designator other)
(:documentation "Executes all equation callbacks of
`designator'. `other' specifies the designator this designator has
been equated to.")
(:method ((designator equate-notification-mixin) (other designator))
(with-slots (equate-notification-callbacks) designator
(dolist (callback equate-notification-callbacks)
(funcall callback other))))
;; Note(moesenle): We need to specialize on type T here and not on
type DESIGNATOR because in the inheritance tree of the more
;; specific designator types (location, action, object), designator
;; and the mixins are on the same level. That means specializations
on mixins are only executed after specializations on DESIGNATOR .
(:method ((designator t) (other t))
nil))
(defmacro with-equate-callback ((designator callback) &body body)
(with-gensyms (evaluated-designator evaluated-callback)
`(let ((,evaluated-designator ,designator)
(,evaluated-callback ,callback))
(unwind-protect
(progn
(register-equate-callback ,evaluated-designator ,evaluated-callback)
,@body)
(unregister-equate-callback ,evaluated-designator ,evaluated-callback)))))
(defun execute-all-equated-callbacks (designator other)
(declare (type equate-notification-mixin designator)
(type designator other))
(labels ((iterate (designator)
(when designator
(execute-equate-callbacks designator other)
(iterate (successor designator)))))
(iterate (first-desig designator))))
(defmethod equate :after ((parent equate-notification-mixin) successor)
(execute-all-equated-callbacks parent successor))
| null | https://raw.githubusercontent.com/cram-code/cram_core/984046abe2ec9e25b63e52007ed3b857c3d9a13c/cram_designators/src/cram-designators/equate-notification-mixin.lisp | lisp | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Technische Universitaet Muenchen nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Note(moesenle): We need to specialize on type T here and not on
specific designator types (location, action, object), designator
and the mixins are on the same level. That means specializations | Copyright ( c ) 2012 , < >
* Neither the name of the Intelligent Autonomous Systems Group/
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT OWNER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
(in-package :cram-designators)
(defclass equate-notification-mixin ()
((equate-notification-callbacks
:initform nil
:documentation "List of notification callbacks that are executed
whenever the designator is equated with another one.")))
(defgeneric register-equate-callback (designator callback)
(:documentation "Registers the callback function object `callback'
at `designator'. The callback will be executed whenever the
designator is equated other designators. `callback' is a function
object that takes one parameter, the designator the parameter
`designator' has been equated to.")
(:method ((designator equate-notification-mixin) callback)
(declare (type function callback))
(with-slots (equate-notification-callbacks) designator
(pushnew callback equate-notification-callbacks))))
(defgeneric unregister-equate-callback (designator callback)
(:documentation "Removes `callback' from the list of equate
callbacks in `designator'.")
(:method ((designator equate-notification-mixin) callback)
(declare (type function callback))
(with-slots (equate-notification-callbacks) designator
(setf equate-notification-callbacks
(remove callback equate-notification-callbacks)))))
(defgeneric execute-equate-callbacks (designator other)
(:documentation "Executes all equation callbacks of
`designator'. `other' specifies the designator this designator has
been equated to.")
(:method ((designator equate-notification-mixin) (other designator))
(with-slots (equate-notification-callbacks) designator
(dolist (callback equate-notification-callbacks)
(funcall callback other))))
type DESIGNATOR because in the inheritance tree of the more
on mixins are only executed after specializations on DESIGNATOR .
(:method ((designator t) (other t))
nil))
(defmacro with-equate-callback ((designator callback) &body body)
(with-gensyms (evaluated-designator evaluated-callback)
`(let ((,evaluated-designator ,designator)
(,evaluated-callback ,callback))
(unwind-protect
(progn
(register-equate-callback ,evaluated-designator ,evaluated-callback)
,@body)
(unregister-equate-callback ,evaluated-designator ,evaluated-callback)))))
(defun execute-all-equated-callbacks (designator other)
(declare (type equate-notification-mixin designator)
(type designator other))
(labels ((iterate (designator)
(when designator
(execute-equate-callbacks designator other)
(iterate (successor designator)))))
(iterate (first-desig designator))))
(defmethod equate :after ((parent equate-notification-mixin) successor)
(execute-all-equated-callbacks parent successor))
|
8f90d8757834f9f28ae153fda426fef7b273120a9cc3e282eb11016c11ac5df9 | Frama-C/headache | skip.ml | (**************************************************************************)
(* *)
(* Headache *)
(* *)
, Projet Cristal , INRIA Rocquencourt
(* *)
Copyright 2002
Institut National de Recherche en Informatique et en Automatique .
(* All rights reserved. This file is distributed under the terms of *)
the GNU Library General Public License .
(* *)
/~simonet/
(* *)
(**************************************************************************)
type regexp_filename = Str.regexp
;;
type regexp_skip = Str.regexp
;;
type param_skip = bool * regexp_skip
;;
let skip ~verbose skip_lst ic oc =
let multiple_skip_lst,simple_skip_lst =
List.partition (fun (_,(multiple,_)) -> multiple) skip_lst
in
let rec skip_aux () =
let initial_pos =
LargeFile.pos_in ic
in
try
let line =
input_line ic
in
let match_line skip_lst =
let (_,(multiple,_)) =
List.find
(fun (_, (_,rg_skip)) -> Str.string_match rg_skip line 0)
skip_lst
in multiple
in
try
let multiple =
try
match_line multiple_skip_lst;
with Not_found ->
match_line simple_skip_lst;
in
if verbose then
prerr_endline
("Line : "^line^" skipped");
(match oc with
| None -> ()
| Some oc ->
output_string oc line;
output_string oc "\n");
if multiple then skip_aux ()
with Not_found ->
LargeFile.seek_in ic initial_pos
with End_of_file ->
()
in
skip_aux ()
;;
| null | https://raw.githubusercontent.com/Frama-C/headache/e9f5e89041e9949198e096bb8ca764731109b6a0/skip.ml | ocaml | ************************************************************************
Headache
All rights reserved. This file is distributed under the terms of
************************************************************************ | , Projet Cristal , INRIA Rocquencourt
Copyright 2002
Institut National de Recherche en Informatique et en Automatique .
the GNU Library General Public License .
/~simonet/
type regexp_filename = Str.regexp
;;
type regexp_skip = Str.regexp
;;
type param_skip = bool * regexp_skip
;;
let skip ~verbose skip_lst ic oc =
let multiple_skip_lst,simple_skip_lst =
List.partition (fun (_,(multiple,_)) -> multiple) skip_lst
in
let rec skip_aux () =
let initial_pos =
LargeFile.pos_in ic
in
try
let line =
input_line ic
in
let match_line skip_lst =
let (_,(multiple,_)) =
List.find
(fun (_, (_,rg_skip)) -> Str.string_match rg_skip line 0)
skip_lst
in multiple
in
try
let multiple =
try
match_line multiple_skip_lst;
with Not_found ->
match_line simple_skip_lst;
in
if verbose then
prerr_endline
("Line : "^line^" skipped");
(match oc with
| None -> ()
| Some oc ->
output_string oc line;
output_string oc "\n");
if multiple then skip_aux ()
with Not_found ->
LargeFile.seek_in ic initial_pos
with End_of_file ->
()
in
skip_aux ()
;;
|
94c123a254e9cc0035c40461337e1bae1ad7863943431fdb499b5269c91e7ac0 | snmsts/cl-langserver | listeners.lisp | (in-package :ls-base)
;;; Listeners
(defclass listener ()
((out :initarg :out :type stream :reader listener-out)
(in :initarg :in :type stream :reader listener-in)
(env)))
(defmacro listeners () `(connection-listeners *emacs-connection*))
(defmethod initialize-instance :after ((l listener) &key initial-env)
(with-slots (out in env) l
(let ((io (make-two-way-stream in out)))
(setf env
(append
initial-env
`((cl:*standard-output* . ,out)
(cl:*standard-input* . ,in)
(cl:*trace-output* . ,out)
(cl:*error-output* . ,out)
(cl:*debug-io* . ,io)
(cl:*query-io* . ,io)
(cl:*terminal-io* . ,io)))))
(assert out nil "Must have an OUT stream")
(assert in nil "Must have an IN stream")
(assert env nil "Must have an ENV"))
(setf (listeners) (nconc (listeners)
(list l))))
(defun call-with-listener (listener fn &optional saving)
(with-slots (env) listener
(with-bindings env
(unwind-protect (funcall fn)
(when saving
(loop for binding in env
do (setf (cdr binding) (symbol-value (car binding)))))))))
(defmacro with-listener-bindings (listener &body body)
"Execute BODY inside LISTENER's environment"
`(call-with-listener ,listener (lambda () ,@body)))
(defmacro saving-listener-bindings (listener &body body)
"Execute BODY inside LISTENER's environment, update it afterwards."
`(call-with-listener ,listener (lambda () ,@body) 'saving))
(defmacro with-default-listener ((connection) &body body)
"Execute BODY with in CONNECTION's default listener."
(let ((listener-sym (gensym))
(body-fn-sym (gensym)))
`(let ((,listener-sym (default-listener ,connection))
(,body-fn-sym #'(lambda () ,@body)))
(if ,listener-sym
(with-listener-bindings ,listener-sym
(funcall ,body-fn-sym))
(funcall ,body-fn-sym)))))
(defun default-listener (connection)
(first (connection-listeners connection)))
(defun flush-listener-streams (listener)
(with-slots (in out) listener
(force-output out)
(clear-input in)))
(defmethod close-listener (l)
TODO : investigate why SBCL complains when we close IN and OUT
;; here.
(setf (listeners) (delete l (listeners))))
| null | https://raw.githubusercontent.com/snmsts/cl-langserver/3b1246a5d0bd58459e7a64708f820bf718cf7175/src/helitage/listeners.lisp | lisp | Listeners
here. | (in-package :ls-base)
(defclass listener ()
((out :initarg :out :type stream :reader listener-out)
(in :initarg :in :type stream :reader listener-in)
(env)))
(defmacro listeners () `(connection-listeners *emacs-connection*))
(defmethod initialize-instance :after ((l listener) &key initial-env)
(with-slots (out in env) l
(let ((io (make-two-way-stream in out)))
(setf env
(append
initial-env
`((cl:*standard-output* . ,out)
(cl:*standard-input* . ,in)
(cl:*trace-output* . ,out)
(cl:*error-output* . ,out)
(cl:*debug-io* . ,io)
(cl:*query-io* . ,io)
(cl:*terminal-io* . ,io)))))
(assert out nil "Must have an OUT stream")
(assert in nil "Must have an IN stream")
(assert env nil "Must have an ENV"))
(setf (listeners) (nconc (listeners)
(list l))))
(defun call-with-listener (listener fn &optional saving)
(with-slots (env) listener
(with-bindings env
(unwind-protect (funcall fn)
(when saving
(loop for binding in env
do (setf (cdr binding) (symbol-value (car binding)))))))))
(defmacro with-listener-bindings (listener &body body)
"Execute BODY inside LISTENER's environment"
`(call-with-listener ,listener (lambda () ,@body)))
(defmacro saving-listener-bindings (listener &body body)
"Execute BODY inside LISTENER's environment, update it afterwards."
`(call-with-listener ,listener (lambda () ,@body) 'saving))
(defmacro with-default-listener ((connection) &body body)
"Execute BODY with in CONNECTION's default listener."
(let ((listener-sym (gensym))
(body-fn-sym (gensym)))
`(let ((,listener-sym (default-listener ,connection))
(,body-fn-sym #'(lambda () ,@body)))
(if ,listener-sym
(with-listener-bindings ,listener-sym
(funcall ,body-fn-sym))
(funcall ,body-fn-sym)))))
(defun default-listener (connection)
(first (connection-listeners connection)))
(defun flush-listener-streams (listener)
(with-slots (in out) listener
(force-output out)
(clear-input in)))
(defmethod close-listener (l)
TODO : investigate why SBCL complains when we close IN and OUT
(setf (listeners) (delete l (listeners))))
|
947e32e7ae7f72089c80763e30e8cd91fdd76a5ded5ae777f93c3e2dbf4f3a9d | silky/quipper | Unboxing.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
{-# LANGUAGE Rank2Types #-}
-- | This library provides functions for “unboxing” hierarchical circuits,
-- replacing calls to named subroutines by inlined copies of the subroutines
-- themselves.
module QuipperLib.Unboxing where
import Quipper
import Quipper.Internal
import Quipper.Circuit (BoxId (..), RepeatFlag (..))
import Quipper.Monad (endpoints_of_wires_in_arity)
import Quipper.Generic (inline_subroutine, transform_unary)
| A transformer to peel away one level of boxing . Transforms any
-- top-level subroutine gate into its corresponding circuit.
unbox_transformer :: Transformer Circ Qubit Bit
unbox_transformer (T_Subroutine name inv ncf _ _ _ ws2 a2 (RepeatFlag reps) f) = f $
\namespace ws c -> do
outputs <- loopM reps ws
((without_controls_if ncf) .
(with_controls c) .
((if inv then flip reverse_generic (endpoints_of_wires_in_arity a2 ws2) else id)
(inline_subroutine name namespace)))
return (outputs, c)
unbox_transformer x = identity_transformer x
| Peel away one level of boxing from a circuit . Transforms any
-- top-level subroutine gate into its corresponding circuit.
unbox_unary :: (QCData x, QCData y) => (x -> Circ y) -> (x -> Circ y)
unbox_unary circ = transform_unary unbox_transformer circ
| Peel away one level of boxing from a circuit . Transforms any
-- top-level subroutine gate into its corresponding circuit.
--
-- The type of this heavily overloaded function is difficult to
-- read. In more readable form, it has all of the following types:
--
> unbox : : ( QCData x ) = > Circ x - > Circ x
> unbox : : ( QCData x , QCData y ) = > ( x - > Circ y ) - > ( x - > Circ y )
> unbox : : ( QCData x , QCData y , QCData z ) = > ( x - > y - > Circ z ) - > ( x - > y - > Circ z )
--
-- and so forth.
unbox :: (QCData x, QCData y, QCurry qfun x y) => qfun -> qfun
unbox = qcurry . unbox_unary . quncurry
-- | A transformer to recursively unbox some specified class of boxed subroutines.
unbox_recursive_filtered_transformer :: (BoxId -> Bool) -> Transformer Circ Qubit Bit
unbox_recursive_filtered_transformer p b@(T_Subroutine boxid inv ncf _ _ _ ws2 a2 (RepeatFlag reps) f) =
if not (p boxid)
then identity_transformer b
else f $
\namespace ws c -> do
outputs <- loopM reps ws
((without_controls_if ncf) .
(with_controls c) .
((if inv then flip reverse_generic (endpoints_of_wires_in_arity a2 ws2) else id) $
(unbox_recursive_filtered p) $
(inline_subroutine boxid namespace)))
return (outputs, c)
unbox_recursive_filtered_transformer _ x = identity_transformer x
-- | Recursively unbox all subroutines satisfying a given predicate.
unbox_recursive_filtered_unary :: (QCData x, QCData y) => (BoxId -> Bool) -> (x -> Circ y) -> (x -> Circ y)
unbox_recursive_filtered_unary p = transform_unary (unbox_recursive_filtered_transformer p)
-- | Recursively unbox all subroutines satisfying a given predicate.
--
-- The type of this heavily overloaded function is difficult to
-- read. In more readable form, it has all of the following types:
--
> unbox_recursive_filtered : : ( QCData x ) = > ( BoxId - > Bool ) - > Circ x - > Circ x
> unbox_recursive_filtered : : ( QCData x , QCData y ) = > ( BoxId - > Bool ) - > ( x - > Circ y ) - > ( x - > Circ y )
--
-- and so forth.
unbox_recursive_filtered :: (QCData x, QCData y, QCurry qfun x y) => (BoxId -> Bool) -> qfun -> qfun
unbox_recursive_filtered p = qcurry . (unbox_recursive_filtered_unary p) . quncurry
-- | Recursively unbox all subroutines of a circuit.
--
-- The type of this heavily overloaded function is difficult to
-- read. In more readable form, it has all of the following types:
--
> unbox_recursive : : ( QCData x ) = > Circ x - > Circ x
> unbox_recursive : : ( QCData x , QCData y ) = > ( x - > Circ y ) - > ( x - > Circ y )
> unbox_recursive : : ( QCData x , QCData y , QCData z ) = > ( x - > y - > Circ z ) - > ( x - > y - > Circ z )
--
-- and so forth.
unbox_recursive :: (QCData x, QCData y, QCurry qfun x y) => qfun -> qfun
unbox_recursive = unbox_recursive_filtered (const True)
| null | https://raw.githubusercontent.com/silky/quipper/1ef6d031984923d8b7ded1c14f05db0995791633/QuipperLib/Unboxing.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
# LANGUAGE Rank2Types #
| This library provides functions for “unboxing” hierarchical circuits,
replacing calls to named subroutines by inlined copies of the subroutines
themselves.
top-level subroutine gate into its corresponding circuit.
top-level subroutine gate into its corresponding circuit.
top-level subroutine gate into its corresponding circuit.
The type of this heavily overloaded function is difficult to
read. In more readable form, it has all of the following types:
and so forth.
| A transformer to recursively unbox some specified class of boxed subroutines.
| Recursively unbox all subroutines satisfying a given predicate.
| Recursively unbox all subroutines satisfying a given predicate.
The type of this heavily overloaded function is difficult to
read. In more readable form, it has all of the following types:
and so forth.
| Recursively unbox all subroutines of a circuit.
The type of this heavily overloaded function is difficult to
read. In more readable form, it has all of the following types:
and so forth. | This file is part of Quipper . Copyright ( C ) 2011 - 2016 . Please see the
module QuipperLib.Unboxing where
import Quipper
import Quipper.Internal
import Quipper.Circuit (BoxId (..), RepeatFlag (..))
import Quipper.Monad (endpoints_of_wires_in_arity)
import Quipper.Generic (inline_subroutine, transform_unary)
| A transformer to peel away one level of boxing . Transforms any
unbox_transformer :: Transformer Circ Qubit Bit
unbox_transformer (T_Subroutine name inv ncf _ _ _ ws2 a2 (RepeatFlag reps) f) = f $
\namespace ws c -> do
outputs <- loopM reps ws
((without_controls_if ncf) .
(with_controls c) .
((if inv then flip reverse_generic (endpoints_of_wires_in_arity a2 ws2) else id)
(inline_subroutine name namespace)))
return (outputs, c)
unbox_transformer x = identity_transformer x
| Peel away one level of boxing from a circuit . Transforms any
unbox_unary :: (QCData x, QCData y) => (x -> Circ y) -> (x -> Circ y)
unbox_unary circ = transform_unary unbox_transformer circ
| Peel away one level of boxing from a circuit . Transforms any
> unbox : : ( QCData x ) = > Circ x - > Circ x
> unbox : : ( QCData x , QCData y ) = > ( x - > Circ y ) - > ( x - > Circ y )
> unbox : : ( QCData x , QCData y , QCData z ) = > ( x - > y - > Circ z ) - > ( x - > y - > Circ z )
unbox :: (QCData x, QCData y, QCurry qfun x y) => qfun -> qfun
unbox = qcurry . unbox_unary . quncurry
unbox_recursive_filtered_transformer :: (BoxId -> Bool) -> Transformer Circ Qubit Bit
unbox_recursive_filtered_transformer p b@(T_Subroutine boxid inv ncf _ _ _ ws2 a2 (RepeatFlag reps) f) =
if not (p boxid)
then identity_transformer b
else f $
\namespace ws c -> do
outputs <- loopM reps ws
((without_controls_if ncf) .
(with_controls c) .
((if inv then flip reverse_generic (endpoints_of_wires_in_arity a2 ws2) else id) $
(unbox_recursive_filtered p) $
(inline_subroutine boxid namespace)))
return (outputs, c)
unbox_recursive_filtered_transformer _ x = identity_transformer x
unbox_recursive_filtered_unary :: (QCData x, QCData y) => (BoxId -> Bool) -> (x -> Circ y) -> (x -> Circ y)
unbox_recursive_filtered_unary p = transform_unary (unbox_recursive_filtered_transformer p)
> unbox_recursive_filtered : : ( QCData x ) = > ( BoxId - > Bool ) - > Circ x - > Circ x
> unbox_recursive_filtered : : ( QCData x , QCData y ) = > ( BoxId - > Bool ) - > ( x - > Circ y ) - > ( x - > Circ y )
unbox_recursive_filtered :: (QCData x, QCData y, QCurry qfun x y) => (BoxId -> Bool) -> qfun -> qfun
unbox_recursive_filtered p = qcurry . (unbox_recursive_filtered_unary p) . quncurry
> unbox_recursive : : ( QCData x ) = > Circ x - > Circ x
> unbox_recursive : : ( QCData x , QCData y ) = > ( x - > Circ y ) - > ( x - > Circ y )
> unbox_recursive : : ( QCData x , QCData y , QCData z ) = > ( x - > y - > Circ z ) - > ( x - > y - > Circ z )
unbox_recursive :: (QCData x, QCData y, QCurry qfun x y) => qfun -> qfun
unbox_recursive = unbox_recursive_filtered (const True)
|
3732ec9fefa9b87019285258c64582775639f89ea05ff302f195d53250aa41ec | eamsden/Animas | TestElevatorMain.hs |
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A F R P *
* *
* Example : Elevator *
* Purpose : Testing of the Elevator simulator . *
* Authors : *
* *
* Copyright ( c ) The University of Nottingham , 2004 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
******************************************************************************
* A F R P *
* *
* Example: Elevator *
* Purpose: Testing of the Elevator simulator. *
* Authors: Henrik Nilsson *
* *
* Copyright (c) The University of Nottingham, 2004 *
* *
******************************************************************************
-}
module Main where
import Data.List (sortBy, intersperse)
import Data.Maybe (catMaybes)
import FRP.Yampa
import FRP.Yampa.Utilities
import FRP.Yampa.Internals -- Just for testing purposes.
import Elevator
smplPer = 0.01
lbps :: SF a (Event ())
lbps = afterEach [(3.0, ()), (2.0, ()), (50.0, ())]
rbps :: SF a (Event ())
rbps = afterEach [(20.0, ()), (2.0, ()), (18.0, ()), (15.001, ())]
-- Looks for interesting events by inspecting the input events
-- and the elevator position over the interval [0, t_max].
data State = Stopped | GoingUp | GoingDown deriving Eq
testElevator :: Time -> [(Time, ((Event (), Event ()), Position))]
testElevator t_max = takeWhile ((<= t_max) . fst) tios
where
Time , Input , and Output
tios = embed (localTime &&& ((lbps &&& rbps >>^ dup)
>>> second elevator))
(deltaEncode smplPer (repeat ()))
findEvents :: [(Time, ((Event (), Event ()), Position))]
-> [(Time, Position, String)]
findEvents [] = []
findEvents tios@((_, (_, y)) : _) = feAux Stopped y tios
where
feAux _ _ [] = []
feAux sPre yPre ((t, ((lbp, rbp), y)) : tios') =
if not (null message) then
(t, y, message) : feAux s y tios'
else
feAux s y tios'
where
s = if y == yPre then
Stopped
else if yPre < y then
GoingUp
else
GoingDown
ms = if s /= sPre then
case s of
Stopped -> Just "elevator stopped"
GoingUp -> Just "elevator started going up"
GoingDown -> Just "elevator started going down"
else
Nothing
mu = if isEvent lbp then
Just "up button pressed"
else
Nothing
md = if isEvent rbp then
Just "down button pressed"
else
Nothing
message = concat (intersperse ", " (catMaybes [ms, mu, md]))
formatEvent :: (Time, Position, String) -> String
formatEvent (t, y, m) = "t = " ++ t' ++ ",\ty = " ++ y' ++ ":\t" ++ m
where
t' = show (fromIntegral (round (t * 100)) / 100)
y' = show (fromIntegral (round (y * 100)) / 100)
ppEvents [] = return ()
ppEvents (e : es) = putStrLn (formatEvent e) >> ppEvents es
main = ppEvents (findEvents (testElevator 100))
| null | https://raw.githubusercontent.com/eamsden/Animas/2404d1de20982a337109fc6032cb77b022514f9d/examples/Elevator/TestElevatorMain.hs | haskell | Just for testing purposes.
Looks for interesting events by inspecting the input events
and the elevator position over the interval [0, t_max]. |
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* A F R P *
* *
* Example : Elevator *
* Purpose : Testing of the Elevator simulator . *
* Authors : *
* *
* Copyright ( c ) The University of Nottingham , 2004 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
******************************************************************************
* A F R P *
* *
* Example: Elevator *
* Purpose: Testing of the Elevator simulator. *
* Authors: Henrik Nilsson *
* *
* Copyright (c) The University of Nottingham, 2004 *
* *
******************************************************************************
-}
module Main where
import Data.List (sortBy, intersperse)
import Data.Maybe (catMaybes)
import FRP.Yampa
import FRP.Yampa.Utilities
import Elevator
smplPer = 0.01
lbps :: SF a (Event ())
lbps = afterEach [(3.0, ()), (2.0, ()), (50.0, ())]
rbps :: SF a (Event ())
rbps = afterEach [(20.0, ()), (2.0, ()), (18.0, ()), (15.001, ())]
data State = Stopped | GoingUp | GoingDown deriving Eq
testElevator :: Time -> [(Time, ((Event (), Event ()), Position))]
testElevator t_max = takeWhile ((<= t_max) . fst) tios
where
Time , Input , and Output
tios = embed (localTime &&& ((lbps &&& rbps >>^ dup)
>>> second elevator))
(deltaEncode smplPer (repeat ()))
findEvents :: [(Time, ((Event (), Event ()), Position))]
-> [(Time, Position, String)]
findEvents [] = []
findEvents tios@((_, (_, y)) : _) = feAux Stopped y tios
where
feAux _ _ [] = []
feAux sPre yPre ((t, ((lbp, rbp), y)) : tios') =
if not (null message) then
(t, y, message) : feAux s y tios'
else
feAux s y tios'
where
s = if y == yPre then
Stopped
else if yPre < y then
GoingUp
else
GoingDown
ms = if s /= sPre then
case s of
Stopped -> Just "elevator stopped"
GoingUp -> Just "elevator started going up"
GoingDown -> Just "elevator started going down"
else
Nothing
mu = if isEvent lbp then
Just "up button pressed"
else
Nothing
md = if isEvent rbp then
Just "down button pressed"
else
Nothing
message = concat (intersperse ", " (catMaybes [ms, mu, md]))
formatEvent :: (Time, Position, String) -> String
formatEvent (t, y, m) = "t = " ++ t' ++ ",\ty = " ++ y' ++ ":\t" ++ m
where
t' = show (fromIntegral (round (t * 100)) / 100)
y' = show (fromIntegral (round (y * 100)) / 100)
ppEvents [] = return ()
ppEvents (e : es) = putStrLn (formatEvent e) >> ppEvents es
main = ppEvents (findEvents (testElevator 100))
|
06aeb411f0da1f1f740fccc0cc5eb3af86728fb029967f2881f54c1398726325 | Fresheyeball/Shpadoinkle | Example.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
module Shpadoinkle.Website.Types.Example where
import Data.Aeson (FromJSON, ToJSON)
import GHC.Generics (Generic)
import Shpadoinkle (NFData)
import Shpadoinkle.Isreal.Types (Code, SnowNonce,
SnowToken)
import Shpadoinkle.Website.Types.ExampleState (ExampleState)
data Example = Example
{ inputHaskell :: Code
, snowToken :: SnowToken
, snowNonce :: SnowNonce
, state :: ExampleState
}
deriving stock (Eq, Ord, Show, Read, Generic)
deriving anyclass (FromJSON, ToJSON, NFData)
| null | https://raw.githubusercontent.com/Fresheyeball/Shpadoinkle/5e5fb636fb0b0e99f04bae0d75cff722a10463ae/website/Shpadoinkle/Website/Types/Example.hs | haskell | # LANGUAGE DeriveAnyClass # | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
module Shpadoinkle.Website.Types.Example where
import Data.Aeson (FromJSON, ToJSON)
import GHC.Generics (Generic)
import Shpadoinkle (NFData)
import Shpadoinkle.Isreal.Types (Code, SnowNonce,
SnowToken)
import Shpadoinkle.Website.Types.ExampleState (ExampleState)
data Example = Example
{ inputHaskell :: Code
, snowToken :: SnowToken
, snowNonce :: SnowNonce
, state :: ExampleState
}
deriving stock (Eq, Ord, Show, Read, Generic)
deriving anyclass (FromJSON, ToJSON, NFData)
|
5a3bf5fcc191b73884f10a7bb23621a684f079f8266d3d013d2e2f971a230649 | ksaaskil/functional-programming-examples | Sphere.hs | module Geometry.Sphere
( volume
, area
)
where
volume :: Float -> Float
volume radius = (4.0 / 3.0) * pi * (radius ^ 3)
area :: Float -> Float
area radius = 4 * pi * (radius ^ 2)
| null | https://raw.githubusercontent.com/ksaaskil/functional-programming-examples/cbeb90f200d42e4a464dfb02fa5d7424bc12dd00/learn-you-a-haskell/src/Geometry/Sphere.hs | haskell | module Geometry.Sphere
( volume
, area
)
where
volume :: Float -> Float
volume radius = (4.0 / 3.0) * pi * (radius ^ 3)
area :: Float -> Float
area radius = 4 * pi * (radius ^ 2)
| |
312a5f73c64c2c6942baa75b3e18a22733cc076d31f33c0070689d374bd1d20f | herd/herdtools7 | key.ml | (****************************************************************************)
(* the diy toolsuite *)
(* *)
, University College London , UK .
, INRIA Paris - Rocquencourt , France .
(* *)
Copyright 2010 - present Institut National de Recherche en Informatique et
(* en Automatique and the authors. All rights reserved. *)
(* *)
This software is governed by the CeCILL - B license under French law and
(* abiding by the rules of distribution of free software. You can use, *)
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
(****************************************************************************)
(* Internal keys for mcompare *)
open LogState
(* A key is a reference for a row, it features the name of
the test, plus information left abstract *)
type 'a t = { name : string ; info : 'a }
module type Config = sig
val verbose : int
val kinds : LogState.kind TblRename.t
val conds : LogConstr.cond TblRename.t
end
module Make(Opt:Config) = struct
module W = Warn.Make(Opt)
module LS = LogState.Make(Opt)
module None = struct
type info = unit
let add_names ts = Array.map (fun n -> { name = n; info = (); }) ts
let add_log t =
Array.map (fun n -> { name = n.tname; info = (); }) t.tests
end
module Kind = struct
type info = { kind : LogState.kind ; loop : bool }
let add_kind_tests nks ts =
let sz_nks = Array.length nks in
let sz_ts = Array.length ts in
let tout = ExtArray.create () in
let out x = ExtArray.add tout x in
let rec do_rec idx_nks idx_ts =
if idx_nks >= sz_nks || idx_ts >= sz_ts then
ExtArray.blit tout nks idx_nks (sz_nks-idx_nks)
else
let (n,k,loop as nk) = nks.(idx_nks)
and t = ts.(idx_ts) in
let c = String.compare n t.tname in
if c < 0 then begin
out nk ; do_rec (idx_nks+1) idx_ts
end else if c > 0 then
do_rec idx_nks (idx_ts+1)
else (* c=0 *) match k with
| None ->
out (n,Some t.LogState.kind,t.LogState.loop) ;
do_rec (idx_nks+1) (idx_ts+1)
| Some k1 ->
let k2 = t.LogState.kind in
if k1=k2 then
out (n,Some k1,loop || t.LogState.loop)
else begin
W.warn "Kind variation for test %s" n ;
out (n,Some NoKind,loop || t.LogState.loop)
end ;
do_rec (idx_nks+1) (idx_ts+1) in
do_rec 0 0 ;
ExtArray.to_array tout
let default_kind name =
try
Some (TblRename.find_value Opt.kinds name)
with Not_found -> None
let add names ts =
let nks_init = Array.map (fun x -> x,None,false) names in
let nks =
List.fold_left
(fun nks t -> add_kind_tests nks t.tests)
nks_init ts in
Array.map
(fun (n,k,loop) ->
let k =
let kdefault = default_kind n in
match kdefault with
| None -> k
| Some _ -> kdefault in
match k with
| None ->
{ name = n ;
info = { kind = NoKind ; loop = false}}
| Some k ->
{ name = n ;
info = { kind = k ; loop = loop}})
nks
let pps =
Array.map
(fun key ->
let k = key.info.kind
and loop = key.info.loop in
let k = LS.pp_kind k in
if loop then [k;"Loop"] else [k])
end
complete information : ie test result for first column
module Full = struct
type info = LogState.test
let add log =
Array.map
(fun t -> { name = t.tname ; info = t; })
log.tests
end
(* Condition *)
module Cond = struct
type info =
{ cond : LogConstr.cond option ; unsure : bool ; kind : LogState.kind;}
let sure k c = { cond = c ; unsure = false; kind=k; }
let unsure k c = { cond = c ; unsure = true; kind=k; }
let change_condition k c = match c with
| None -> sure k None
| Some c ->
let p = ConstrGen.prop_of c in
let sure c = sure k (Some c) in
match k with
| Forbid -> sure (ConstrGen.NotExistsState p)
| Allow -> sure (ConstrGen.ExistsState p)
| Require -> sure (ConstrGen.ForallStates p)
| Undefined -> sure (ConstrGen.ExistsState p)
| NoKind -> unsure k (Some c)
| ErrorKind -> assert false
let add_col t1 =
Array.map
(fun t ->
let _k,c =
let c,from_log =
try
Some (TblRename.find_value Opt.conds t.tname),false
with Not_found -> t.condition,true in
try
let k = TblRename.find_value Opt.kinds t.tname in
k,change_condition k c
with Not_found ->
let k = match c with
| None -> NoKind
| Some c ->
let open ConstrGen in
begin match c with
| NotExistsState _ -> LogState.Forbid
| ExistsState _ -> LogState.Allow
| ForallStates _ -> LogState.Require
end in
k,if from_log then unsure k c else sure k c in
{ name = t.tname ; info = c; })
t1.tests
let add = add_col
let merge_cond x y = match x.cond,y.cond with
| Some _,_ -> x
| _,Some _ -> y
| _,_ -> x
let merge_info xs ys =
let sz_xs = Array.length xs
and sz_ys = Array.length ys in
let tout = ExtArray.create () in
let out x = ExtArray.add tout x in
let rec loop i_xs i_ys =
if i_xs >= sz_xs then
ExtArray.blit tout ys i_ys (sz_ys-i_ys)
else if i_ys >= sz_ys then
ExtArray.blit tout xs i_xs (sz_xs-i_xs)
else
let x = xs.(i_xs) and y = ys.(i_ys) in
let c = String.compare x.name y.name in
if c < 0 then begin
out x ; loop (i_xs+1) i_ys
end else if c > 0 then begin
out y ; loop i_xs (i_ys+1)
end else begin
out { x with info = merge_cond x.info y.info; } ;
loop (i_xs+1) (i_ys+1)
end in
loop 0 0 ;
ExtArray.to_array tout
let rec merge_infos xs = function
| [] -> xs
| ys::rem -> merge_infos (merge_info xs ys) rem
let adds tss =
let ess = List.map add_col tss in
match ess with
| [] -> [| |]
| xs::rem -> merge_infos xs rem
end
end
| null | https://raw.githubusercontent.com/herd/herdtools7/b86aec8db64f8812e19468893deb1cdf5bbcfb83/tools/key.ml | ocaml | **************************************************************************
the diy toolsuite
en Automatique and the authors. All rights reserved.
abiding by the rules of distribution of free software. You can use,
**************************************************************************
Internal keys for mcompare
A key is a reference for a row, it features the name of
the test, plus information left abstract
c=0
Condition | , University College London , UK .
, INRIA Paris - Rocquencourt , France .
Copyright 2010 - present Institut National de Recherche en Informatique et
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " . We also give a copy in LICENSE.txt .
open LogState
type 'a t = { name : string ; info : 'a }
module type Config = sig
val verbose : int
val kinds : LogState.kind TblRename.t
val conds : LogConstr.cond TblRename.t
end
module Make(Opt:Config) = struct
module W = Warn.Make(Opt)
module LS = LogState.Make(Opt)
module None = struct
type info = unit
let add_names ts = Array.map (fun n -> { name = n; info = (); }) ts
let add_log t =
Array.map (fun n -> { name = n.tname; info = (); }) t.tests
end
module Kind = struct
type info = { kind : LogState.kind ; loop : bool }
let add_kind_tests nks ts =
let sz_nks = Array.length nks in
let sz_ts = Array.length ts in
let tout = ExtArray.create () in
let out x = ExtArray.add tout x in
let rec do_rec idx_nks idx_ts =
if idx_nks >= sz_nks || idx_ts >= sz_ts then
ExtArray.blit tout nks idx_nks (sz_nks-idx_nks)
else
let (n,k,loop as nk) = nks.(idx_nks)
and t = ts.(idx_ts) in
let c = String.compare n t.tname in
if c < 0 then begin
out nk ; do_rec (idx_nks+1) idx_ts
end else if c > 0 then
do_rec idx_nks (idx_ts+1)
| None ->
out (n,Some t.LogState.kind,t.LogState.loop) ;
do_rec (idx_nks+1) (idx_ts+1)
| Some k1 ->
let k2 = t.LogState.kind in
if k1=k2 then
out (n,Some k1,loop || t.LogState.loop)
else begin
W.warn "Kind variation for test %s" n ;
out (n,Some NoKind,loop || t.LogState.loop)
end ;
do_rec (idx_nks+1) (idx_ts+1) in
do_rec 0 0 ;
ExtArray.to_array tout
let default_kind name =
try
Some (TblRename.find_value Opt.kinds name)
with Not_found -> None
let add names ts =
let nks_init = Array.map (fun x -> x,None,false) names in
let nks =
List.fold_left
(fun nks t -> add_kind_tests nks t.tests)
nks_init ts in
Array.map
(fun (n,k,loop) ->
let k =
let kdefault = default_kind n in
match kdefault with
| None -> k
| Some _ -> kdefault in
match k with
| None ->
{ name = n ;
info = { kind = NoKind ; loop = false}}
| Some k ->
{ name = n ;
info = { kind = k ; loop = loop}})
nks
let pps =
Array.map
(fun key ->
let k = key.info.kind
and loop = key.info.loop in
let k = LS.pp_kind k in
if loop then [k;"Loop"] else [k])
end
complete information : ie test result for first column
module Full = struct
type info = LogState.test
let add log =
Array.map
(fun t -> { name = t.tname ; info = t; })
log.tests
end
module Cond = struct
type info =
{ cond : LogConstr.cond option ; unsure : bool ; kind : LogState.kind;}
let sure k c = { cond = c ; unsure = false; kind=k; }
let unsure k c = { cond = c ; unsure = true; kind=k; }
let change_condition k c = match c with
| None -> sure k None
| Some c ->
let p = ConstrGen.prop_of c in
let sure c = sure k (Some c) in
match k with
| Forbid -> sure (ConstrGen.NotExistsState p)
| Allow -> sure (ConstrGen.ExistsState p)
| Require -> sure (ConstrGen.ForallStates p)
| Undefined -> sure (ConstrGen.ExistsState p)
| NoKind -> unsure k (Some c)
| ErrorKind -> assert false
let add_col t1 =
Array.map
(fun t ->
let _k,c =
let c,from_log =
try
Some (TblRename.find_value Opt.conds t.tname),false
with Not_found -> t.condition,true in
try
let k = TblRename.find_value Opt.kinds t.tname in
k,change_condition k c
with Not_found ->
let k = match c with
| None -> NoKind
| Some c ->
let open ConstrGen in
begin match c with
| NotExistsState _ -> LogState.Forbid
| ExistsState _ -> LogState.Allow
| ForallStates _ -> LogState.Require
end in
k,if from_log then unsure k c else sure k c in
{ name = t.tname ; info = c; })
t1.tests
let add = add_col
let merge_cond x y = match x.cond,y.cond with
| Some _,_ -> x
| _,Some _ -> y
| _,_ -> x
let merge_info xs ys =
let sz_xs = Array.length xs
and sz_ys = Array.length ys in
let tout = ExtArray.create () in
let out x = ExtArray.add tout x in
let rec loop i_xs i_ys =
if i_xs >= sz_xs then
ExtArray.blit tout ys i_ys (sz_ys-i_ys)
else if i_ys >= sz_ys then
ExtArray.blit tout xs i_xs (sz_xs-i_xs)
else
let x = xs.(i_xs) and y = ys.(i_ys) in
let c = String.compare x.name y.name in
if c < 0 then begin
out x ; loop (i_xs+1) i_ys
end else if c > 0 then begin
out y ; loop i_xs (i_ys+1)
end else begin
out { x with info = merge_cond x.info y.info; } ;
loop (i_xs+1) (i_ys+1)
end in
loop 0 0 ;
ExtArray.to_array tout
let rec merge_infos xs = function
| [] -> xs
| ys::rem -> merge_infos (merge_info xs ys) rem
let adds tss =
let ess = List.map add_col tss in
match ess with
| [] -> [| |]
| xs::rem -> merge_infos xs rem
end
end
|
2df148be5a841628a0058c2087707145e1711649336f4773e202af1132d4dd99 | clojure-interop/java-jdk | AttributeSet$CharacterAttribute.clj | (ns javax.swing.text.AttributeSet$CharacterAttribute
"This interface is the type signature that is expected
to be present on any attribute key that contributes to
character level presentation. This would be any attribute
that applies to a so-called run of
style."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text AttributeSet$CharacterAttribute]))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/text/AttributeSet%24CharacterAttribute.clj | clojure | (ns javax.swing.text.AttributeSet$CharacterAttribute
"This interface is the type signature that is expected
to be present on any attribute key that contributes to
character level presentation. This would be any attribute
that applies to a so-called run of
style."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text AttributeSet$CharacterAttribute]))
| |
916dcc4a29407daf647005662ed172aa3109923f798bac798c8e5c0446ecac2c | russmatney/reframe-games | views.cljs | (ns games.tetris.views
(:require
[re-frame.core :as rf]
[games.grid.views :as grid.views]
[games.subs :as subs]
[games.color :as color]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Cells
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn cell->style [game-opts {:keys [color] :as c}]
(merge
(or (:cell-style game-opts) {})
(if color
{:background (color/cell->piece-color c)}
{:background (color/cell->background c)})))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Grid
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn matrix
"Returns the rows of cells."
[grid game-opts]
(grid.views/matrix
grid
{:cell->style (partial cell->style game-opts)}))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Select game
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn select-game
"Intended as a mini-game to be used when choosing a game to play."
[]
(let [game-opts {:name :tetris-select-game}
grid @(rf/subscribe [::subs/game-grid game-opts])
game-opts @(rf/subscribe [::subs/game-opts game-opts])]
[:div
[matrix grid game-opts]]))
| null | https://raw.githubusercontent.com/russmatney/reframe-games/ff05f6ad4794e4505b6231522af0c90c3e212631/src/games/tetris/views.cljs | clojure |
Cells
Grid
Select game
| (ns games.tetris.views
(:require
[re-frame.core :as rf]
[games.grid.views :as grid.views]
[games.subs :as subs]
[games.color :as color]))
(defn cell->style [game-opts {:keys [color] :as c}]
(merge
(or (:cell-style game-opts) {})
(if color
{:background (color/cell->piece-color c)}
{:background (color/cell->background c)})))
(defn matrix
"Returns the rows of cells."
[grid game-opts]
(grid.views/matrix
grid
{:cell->style (partial cell->style game-opts)}))
(defn select-game
"Intended as a mini-game to be used when choosing a game to play."
[]
(let [game-opts {:name :tetris-select-game}
grid @(rf/subscribe [::subs/game-grid game-opts])
game-opts @(rf/subscribe [::subs/game-opts game-opts])]
[:div
[matrix grid game-opts]]))
|
b1248311dcb812056dc97e0b39df85c5752dac3984331baa497aeac57d5221d6 | ocaml/dune | client.ml | open Import
let client ?handler connection init ~f =
Client.client ?handler
~private_menu:[ Request Decl.build; Request Decl.status ]
connection init ~f
| null | https://raw.githubusercontent.com/ocaml/dune/20180d12149343d073cdea5860d01dc181702e6a/src/dune_rpc_impl/client.ml | ocaml | open Import
let client ?handler connection init ~f =
Client.client ?handler
~private_menu:[ Request Decl.build; Request Decl.status ]
connection init ~f
| |
0a7772e825e97b14b61107e060bcd9bd68380d62748be1721720652f9313c62e | borkdude/jet | project.clj | (defproject borkdude/jet
#=(clojure.string/trim
#=(slurp "resources/JET_VERSION"))
:description "jet"
:url ""
:scm {:name "git"
:url ""}
:license {:name "Eclipse Public License 1.0"
:url "-1.0.php"}
:source-paths ["src"]
:dependencies [[org.clojure/clojure "1.11.1"]
[com.cognitect/transit-clj "1.0.329"]
[cheshire "5.11.0"]
[clj-commons/clj-yaml "1.0.26"]
[mvxcvi/puget "1.3.2"]
[commons-io/commons-io "2.11.0"]
[org.babashka/sci "0.5.34"]
[org.babashka/cli "0.6.45"]
[camel-snake-kebab "0.4.3"]
[com.rpl/specter "1.1.4"]]
:profiles {:test {:dependencies [[clj-commons/conch "0.9.2"]]}
:uberjar {:dependencies [[com.github.clj-easy/graal-build-time "0.1.4"]]
:global-vars {*assert* false}
:jvm-opts [#_"-Dclojure.compiler.direct-linking=true"
#_"-Dclojure.spec.skip-macros=true"]
:aot [jet.main]
:main jet.main}
:native-image {:jvm-opts ["-Djet.native=true"]
:java-source-paths ["src-java"]}}
:aliases {"jet" ["run" "-m" "jet.main"]}
:deploy-repositories [["clojars" {:url ""
:username :env/clojars_user
:password :env/clojars_pass
:sign-releases false}]])
| null | https://raw.githubusercontent.com/borkdude/jet/fea03680f50846a808afbb22b00dc732f98ebaeb/project.clj | clojure | (defproject borkdude/jet
#=(clojure.string/trim
#=(slurp "resources/JET_VERSION"))
:description "jet"
:url ""
:scm {:name "git"
:url ""}
:license {:name "Eclipse Public License 1.0"
:url "-1.0.php"}
:source-paths ["src"]
:dependencies [[org.clojure/clojure "1.11.1"]
[com.cognitect/transit-clj "1.0.329"]
[cheshire "5.11.0"]
[clj-commons/clj-yaml "1.0.26"]
[mvxcvi/puget "1.3.2"]
[commons-io/commons-io "2.11.0"]
[org.babashka/sci "0.5.34"]
[org.babashka/cli "0.6.45"]
[camel-snake-kebab "0.4.3"]
[com.rpl/specter "1.1.4"]]
:profiles {:test {:dependencies [[clj-commons/conch "0.9.2"]]}
:uberjar {:dependencies [[com.github.clj-easy/graal-build-time "0.1.4"]]
:global-vars {*assert* false}
:jvm-opts [#_"-Dclojure.compiler.direct-linking=true"
#_"-Dclojure.spec.skip-macros=true"]
:aot [jet.main]
:main jet.main}
:native-image {:jvm-opts ["-Djet.native=true"]
:java-source-paths ["src-java"]}}
:aliases {"jet" ["run" "-m" "jet.main"]}
:deploy-repositories [["clojars" {:url ""
:username :env/clojars_user
:password :env/clojars_pass
:sign-releases false}]])
| |
45fcadcd812d15ae9a400da90a3fe6eacb0a262514c7a319125f38ee5d166f1f | gklijs/bkes-demo | project.clj | (defproject nl.openweb/projector "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:plugins [[walmartlabs/shared-deps "0.2.8"]]
:dependencies [[org.apache.logging.log4j/log4j-slf4j-impl "2.14.0"]]
:dependency-sets [:clojure nl.openweb/topology]
:main nl.openweb.projector.core
:profiles {:uberjar {:omit-source true
:aot :all
:uberjar-name "projector-docker.jar"}})
| null | https://raw.githubusercontent.com/gklijs/bkes-demo/43d78683b41c2c8a1d06ab9fe4e6bea0c67cf16b/projector/project.clj | clojure | (defproject nl.openweb/projector "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:plugins [[walmartlabs/shared-deps "0.2.8"]]
:dependencies [[org.apache.logging.log4j/log4j-slf4j-impl "2.14.0"]]
:dependency-sets [:clojure nl.openweb/topology]
:main nl.openweb.projector.core
:profiles {:uberjar {:omit-source true
:aot :all
:uberjar-name "projector-docker.jar"}})
| |
b51c70aee52748c80f72b45beae0549a5557f0d5d6a0853ed3989cfcc1547007 | gsakkas/rite | 0031.ml | BopG VarG VarG
depth - num
a + x
x * x
wList = wReverse
length2 > length1
pi *. x
a * a
pi *. y
i + j
a < b
a * x
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14/clusters/0031.ml | ocaml | BopG VarG VarG
depth - num
a + x
x * x
wList = wReverse
length2 > length1
pi *. x
a * a
pi *. y
i + j
a < b
a * x
| |
1941ff9417df1e4ac153f1828486b59c8e745adcb99fb1e927393bdb68a56f32 | GaloisInc/ivory | Control.hs | # LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
module Ivory.Stdlib.Control
( ifte
, when
, unless
, cond_, cond, (==>), Cond()
) where
import Ivory.Language
ifte :: ( IvoryStore a
, IvoryZero ('Stored a)
, GetAlloc eff ~ 'Scope s
) => IBool
-> Ivory eff a
-> Ivory eff a
-> Ivory eff a
ifte c t f = do
r <- local izero
ifte_ c
(t >>= store r)
(f >>= store r)
deref r
when :: IBool -> Ivory eff () -> Ivory eff ()
when c t = ifte_ c t (return ())
unless :: IBool -> Ivory eff () -> Ivory eff ()
unless c f = ifte_ c (return ()) f
data Cond eff a = Cond IBool (Ivory eff a)
(==>) :: IBool -> Ivory eff a -> Cond eff a
(==>) = Cond
infix 0 ==>
-- | A multi-way if. This is useful for avoiding an explosion of
-- nesting and parentheses in complex conditionals.
--
-- Instead of writing nested chains of ifs:
--
> ifte _ ( x > ? 100 )
> ( store result 10 )
> ( ifte _ ( x > ? 50 )
> ( store result 5 )
-- > (ifte_ (x >? 0)
> ( store result 1 )
-- > (store result 0)))
--
-- You can write:
--
-- > cond_
> [ x > ? 100 = = > store result 10
> , x > ? 50 = = > store result 5
> , x > ? 0 = = > store result 1
-- > , true ==> store result 0
-- > ]
--
-- Note that "==>" is non-associative and has precedence 0, so you
-- will need parentheses to call functions with "$" on the left-hand
-- side:
--
-- > cond_ [ (f $ g x) ==> y ]
--
-- rather than:
--
-- > cond_ [ f $ g x ==> y ]
cond_ :: [Cond eff ()] -> Ivory eff ()
cond_ [] = return ()
cond_ ((Cond b f):cs) = ifte_ b f (cond_ cs)
cond :: ( IvoryStore a
, IvoryZero ('Stored a)
, GetAlloc eff ~ 'Scope s
) => [Cond eff a] -> Ivory eff a
cond as = do
r <- local izero
aux as r
deref r
where
aux [] _ = return ()
aux ((Cond b f):cs) r = ifte_ b (f >>= store r) (aux cs r)
| null | https://raw.githubusercontent.com/GaloisInc/ivory/53a0795b4fbeb0b7da0f6cdaccdde18849a78cd6/ivory-stdlib/src/Ivory/Stdlib/Control.hs | haskell | | A multi-way if. This is useful for avoiding an explosion of
nesting and parentheses in complex conditionals.
Instead of writing nested chains of ifs:
> (ifte_ (x >? 0)
> (store result 0)))
You can write:
> cond_
> , true ==> store result 0
> ]
Note that "==>" is non-associative and has precedence 0, so you
will need parentheses to call functions with "$" on the left-hand
side:
> cond_ [ (f $ g x) ==> y ]
rather than:
> cond_ [ f $ g x ==> y ] | # LANGUAGE DataKinds #
# LANGUAGE TypeOperators #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
module Ivory.Stdlib.Control
( ifte
, when
, unless
, cond_, cond, (==>), Cond()
) where
import Ivory.Language
ifte :: ( IvoryStore a
, IvoryZero ('Stored a)
, GetAlloc eff ~ 'Scope s
) => IBool
-> Ivory eff a
-> Ivory eff a
-> Ivory eff a
ifte c t f = do
r <- local izero
ifte_ c
(t >>= store r)
(f >>= store r)
deref r
when :: IBool -> Ivory eff () -> Ivory eff ()
when c t = ifte_ c t (return ())
unless :: IBool -> Ivory eff () -> Ivory eff ()
unless c f = ifte_ c (return ()) f
data Cond eff a = Cond IBool (Ivory eff a)
(==>) :: IBool -> Ivory eff a -> Cond eff a
(==>) = Cond
infix 0 ==>
> ifte _ ( x > ? 100 )
> ( store result 10 )
> ( ifte _ ( x > ? 50 )
> ( store result 5 )
> ( store result 1 )
> [ x > ? 100 = = > store result 10
> , x > ? 50 = = > store result 5
> , x > ? 0 = = > store result 1
cond_ :: [Cond eff ()] -> Ivory eff ()
cond_ [] = return ()
cond_ ((Cond b f):cs) = ifte_ b f (cond_ cs)
cond :: ( IvoryStore a
, IvoryZero ('Stored a)
, GetAlloc eff ~ 'Scope s
) => [Cond eff a] -> Ivory eff a
cond as = do
r <- local izero
aux as r
deref r
where
aux [] _ = return ()
aux ((Cond b f):cs) r = ifte_ b (f >>= store r) (aux cs r)
|
d82aa3d830f257a4bfc0f0b50678d2487544e383e14c450cd10c5b60659e40a9 | gildor478/ocaml-fileutils | FileUtilWHICH.ml | (******************************************************************************)
(* ocaml-fileutils: files and filenames common operations *)
(* *)
Copyright ( C ) 2003 - 2014 ,
(* *)
(* This library is free software; you can redistribute it and/or modify it *)
(* under the terms of the GNU Lesser General Public License as published by *)
the Free Software Foundation ; either version 2.1 of the License , or ( at
(* your option) any later version, with the OCaml static compilation *)
(* exception. *)
(* *)
(* This library is distributed in the hope that it will be useful, but *)
(* WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file *)
(* COPYING for more details. *)
(* *)
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
(******************************************************************************)
open FileUtilTypes
open FilePath
open FileUtilTEST
let which ?(path) fln =
let real_path =
match path with
| None ->
path_of_string
(try
Sys.getenv "PATH"
with Not_found ->
"")
| Some x ->
x
in
let exec_test = test (And(Is_exec, Is_file)) in
let which_path =
match Sys.os_type with
| "Win32" ->
begin
let real_ext =
List.map
(fun dot_ext ->
(* Remove leading "." if it exists *)
if (String.length dot_ext) >= 1 && dot_ext.[0] = '.' then
String.sub dot_ext 1 ((String.length dot_ext) - 1)
else
dot_ext)
Extract possible extension from PATHEXT
(path_of_string
(try
Sys.getenv "PATHEXT"
with Not_found ->
""))
in
let to_filename dirname ext = add_extension (concat dirname fln) ext in
let ctst dirname ext = exec_test (to_filename dirname ext) in
List.fold_left
(fun found dirname ->
if found = None then begin
try
let ext = List.find (ctst dirname) real_ext in
Some (to_filename dirname ext)
with Not_found ->
None
end else
found)
None
real_path
end
| _ ->
begin
let to_filename dirname = concat dirname fln in
try
Some
(to_filename
(List.find
(fun dirname ->
exec_test (to_filename dirname)) real_path))
with Not_found ->
None
end
in
match which_path with
| Some fn -> fn
| None -> raise Not_found
| null | https://raw.githubusercontent.com/gildor478/ocaml-fileutils/9ad8d2ee342c551391f2a9873de01982d24b36d5/src/lib/fileutils/FileUtilWHICH.ml | ocaml | ****************************************************************************
ocaml-fileutils: files and filenames common operations
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by
your option) any later version, with the OCaml static compilation
exception.
This library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the file
COPYING for more details.
****************************************************************************
Remove leading "." if it exists | Copyright ( C ) 2003 - 2014 ,
the Free Software Foundation ; either version 2.1 of the License , or ( at
You should have received a copy of the GNU Lesser General Public License
along with this library ; if not , write to the Free Software Foundation ,
Inc. , 51 Franklin St , Fifth Floor , Boston , MA 02110 - 1301 USA
open FileUtilTypes
open FilePath
open FileUtilTEST
let which ?(path) fln =
let real_path =
match path with
| None ->
path_of_string
(try
Sys.getenv "PATH"
with Not_found ->
"")
| Some x ->
x
in
let exec_test = test (And(Is_exec, Is_file)) in
let which_path =
match Sys.os_type with
| "Win32" ->
begin
let real_ext =
List.map
(fun dot_ext ->
if (String.length dot_ext) >= 1 && dot_ext.[0] = '.' then
String.sub dot_ext 1 ((String.length dot_ext) - 1)
else
dot_ext)
Extract possible extension from PATHEXT
(path_of_string
(try
Sys.getenv "PATHEXT"
with Not_found ->
""))
in
let to_filename dirname ext = add_extension (concat dirname fln) ext in
let ctst dirname ext = exec_test (to_filename dirname ext) in
List.fold_left
(fun found dirname ->
if found = None then begin
try
let ext = List.find (ctst dirname) real_ext in
Some (to_filename dirname ext)
with Not_found ->
None
end else
found)
None
real_path
end
| _ ->
begin
let to_filename dirname = concat dirname fln in
try
Some
(to_filename
(List.find
(fun dirname ->
exec_test (to_filename dirname)) real_path))
with Not_found ->
None
end
in
match which_path with
| Some fn -> fn
| None -> raise Not_found
|
add0d420475e622cdb3ae43cdd1f6444a25400da5507580bf3a3baa955e95358 | jackrusher/sparkledriver | project.clj | (defproject sparkledriver "0.2.4"
:description "A clojure wrapper for jBrowserDriver, which is a Selenium-compatible wrapper around JFX embedded WebKit."
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0"]
[com.machinepublishers/jbrowserdriver "1.1.1"]
[org.clojure/math.numeric-tower "0.0.4"]]
:profiles {:dev {:dependencies [[org.slf4j/slf4j-simple "1.7.25"]
[http-kit "2.3.0"]
[compojure "1.6.1"]
[hiccup "1.0.5"]]}
:codox {:dependencies [[codox-theme-rdash "0.1.2"]]
:plugins [[lein-codox "0.10.3"]]
:codox {:project {:name "sparkledriver"}
:metadata {:doc/format :markdown}
:themes [:rdash]
:output-path "gh-pages"}}}
:aliases {"codox" ["with-profile" "codox,dev" "codox"]}
:deploy-repositories [["releases" :clojars]])
| null | https://raw.githubusercontent.com/jackrusher/sparkledriver/375885dcad85e37ae2e0d624bc005016ada2982e/project.clj | clojure | (defproject sparkledriver "0.2.4"
:description "A clojure wrapper for jBrowserDriver, which is a Selenium-compatible wrapper around JFX embedded WebKit."
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.9.0"]
[com.machinepublishers/jbrowserdriver "1.1.1"]
[org.clojure/math.numeric-tower "0.0.4"]]
:profiles {:dev {:dependencies [[org.slf4j/slf4j-simple "1.7.25"]
[http-kit "2.3.0"]
[compojure "1.6.1"]
[hiccup "1.0.5"]]}
:codox {:dependencies [[codox-theme-rdash "0.1.2"]]
:plugins [[lein-codox "0.10.3"]]
:codox {:project {:name "sparkledriver"}
:metadata {:doc/format :markdown}
:themes [:rdash]
:output-path "gh-pages"}}}
:aliases {"codox" ["with-profile" "codox,dev" "codox"]}
:deploy-repositories [["releases" :clojars]])
| |
ac49aaf49f2d7a2714bf42c86dbf9ef9ebfa606050d63f35dd6d79e3f2b10014 | racket/gui | gui.rkt | (module gui scheme
(require scheme/gui/base)
(provide (all-from-out scheme)
(all-from-out scheme/gui/base)))
| null | https://raw.githubusercontent.com/racket/gui/d1fef7a43a482c0fdd5672be9a6e713f16d8be5c/gui-lib/scheme/gui.rkt | racket | (module gui scheme
(require scheme/gui/base)
(provide (all-from-out scheme)
(all-from-out scheme/gui/base)))
| |
7c0b094b032f5bfefe41e051e750385d51759ed761686a969d2f6beba5c7e309 | freckle/stackctl | Deploy.hs | module Stackctl.Spec.Deploy
( DeployOptions(..)
, DeployConfirmation(..)
, parseDeployOptions
, runDeploy
) where
import Stackctl.Prelude
import Blammo.Logging.Logger (pushLoggerLn)
import qualified Data.Text as T
import Data.Time (defaultTimeLocale, formatTime, utcToLocalZonedTime)
import Options.Applicative
import Stackctl.Action
import Stackctl.AWS hiding (action)
import Stackctl.AWS.Scope
import Stackctl.Colors
import Stackctl.Config (HasConfig)
import Stackctl.DirectoryOption (HasDirectoryOption)
import Stackctl.FilterOption (HasFilterOption)
import Stackctl.ParameterOption
import Stackctl.Prompt
import Stackctl.RemovedStack
import Stackctl.Spec.Changes.Format
import Stackctl.Spec.Discover
import Stackctl.StackSpec
import Stackctl.TagOption
import UnliftIO.Directory (createDirectoryIfMissing)
data DeployOptions = DeployOptions
{ sdoParameters :: [Parameter]
, sdoTags :: [Tag]
, sdoSaveChangeSets :: Maybe FilePath
, sdoDeployConfirmation :: DeployConfirmation
, sdoRemovals :: Bool
, sdoClean :: Bool
}
-- brittany-disable-next-binding
parseDeployOptions :: Parser DeployOptions
parseDeployOptions = DeployOptions
<$> many parameterOption
<*> many tagOption
<*> optional (strOption
( long "save-change-sets"
<> metavar "DIRECTORY"
<> help "Save executed changesets to DIRECTORY"
<> action "directory"
))
<*> flag DeployWithConfirmation DeployWithoutConfirmation
( long "no-confirm"
<> help "Don't confirm changes before executing"
)
<*> (not <$> switch
( long "no-remove"
<> help "Don't delete removed Stacks"
))
<*> switch
( long "clean"
<> help "Remove all changesets from Stack after deploy"
)
runDeploy
:: ( MonadMask m
, MonadUnliftIO m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsScope env
, HasAwsEnv env
, HasConfig env
, HasDirectoryOption env
, HasFilterOption env
)
=> DeployOptions
-> m ()
runDeploy DeployOptions {..} = do
specs <- discoverSpecs
for_ specs $ \spec -> do
withThreadContext ["stackName" .= stackSpecStackName spec] $ do
checkIfStackRequiresDeletion sdoDeployConfirmation
$ stackSpecStackName spec
emChangeSet <- createChangeSet spec sdoParameters sdoTags
case emChangeSet of
Left err -> do
logError $ "Error creating ChangeSet" :# ["error" .= err]
exitFailure
Right Nothing -> logInfo "Stack is up to date"
Right (Just changeSet) -> do
let stackName = stackSpecStackName spec
for_ sdoSaveChangeSets $ \dir -> do
let out = dir </> unpack (unStackName stackName) <.> "json"
logInfo $ "Recording changeset" :# ["path" .= out]
createDirectoryIfMissing True dir
writeFileUtf8 out $ changeSetJSON changeSet
deployChangeSet sdoDeployConfirmation changeSet
runActions stackName PostDeploy $ stackSpecActions spec
when sdoClean $ awsCloudFormationDeleteAllChangeSets stackName
when sdoRemovals $ do
removed <- inferRemovedStacks
traverse_ (deleteRemovedStack sdoDeployConfirmation) removed
deleteRemovedStack
:: ( MonadMask m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> DeployConfirmation
-> Stack
-> m ()
deleteRemovedStack confirmation stack = do
withThreadContext ["stack" .= stackName] $ do
colors <- getColorsLogger
pushLoggerLn $ formatRemovedStack colors FormatTTY stack
case confirmation of
DeployWithConfirmation -> do
promptContinue
logInfo "Deleting Stack"
DeployWithoutConfirmation -> pure ()
deleteStack stackName
where stackName = StackName $ stack ^. stack_stackName
data DeployConfirmation
= DeployWithConfirmation
| DeployWithoutConfirmation
deriving stock Eq
checkIfStackRequiresDeletion
:: ( MonadUnliftIO m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> DeployConfirmation
-> StackName
-> m ()
checkIfStackRequiresDeletion confirmation stackName = do
mStack <- awsCloudFormationDescribeStackMaybe stackName
for_ (stackStatusRequiresDeletion =<< mStack) $ \status -> do
logWarn $ "Stack must be deleted before proceeding" :# ["status" .= status]
when (status == StackStatus_ROLLBACK_FAILED)
$ logWarn
"Stack is in ROLLBACK_FAILED. This may require elevated permissions for the delete to succeed"
case confirmation of
DeployWithConfirmation -> promptContinue
DeployWithoutConfirmation -> do
logError "Refusing to delete without confirmation"
exitFailure
logInfo "Deleting Stack"
deleteStack stackName
deleteStack
:: (MonadResource m, MonadLogger m, MonadReader env m, HasAwsEnv env)
=> StackName
-> m ()
deleteStack stackName = do
result <- awsCloudFormationDeleteStack stackName
case result of
StackDeleteSuccess -> logInfo $ prettyStackDeleteResult result :# []
StackDeleteFailure{} -> logWarn $ prettyStackDeleteResult result :# []
deployChangeSet
:: ( MonadUnliftIO m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> DeployConfirmation
-> ChangeSet
-> m ()
deployChangeSet confirmation changeSet = do
colors <- getColorsLogger
pushLoggerLn $ formatTTY colors (unStackName stackName) $ Just changeSet
case confirmation of
DeployWithConfirmation -> promptContinue
DeployWithoutConfirmation -> pure ()
It can take a minute to get this batch of events to work out where we 're
-- tailing from, so do that part synchronously
mLastId <- awsCloudFormationGetMostRecentStackEventId stackName
asyncTail <- async $ tailStackEventsSince stackName mLastId
logInfo $ "Executing ChangeSet" :# ["changeSetId" .= changeSetId]
result <- do
awsCloudFormationExecuteChangeSet changeSetId
awsCloudFormationWait stackName
cancel asyncTail
let
onSuccess = logInfo $ prettyStackDeployResult result :# []
onFailure = do
logError $ prettyStackDeployResult result :# []
exitFailure
case result of
StackCreateSuccess -> onSuccess
StackCreateFailure{} -> onFailure
StackUpdateSuccess -> onSuccess
StackUpdateFailure{} -> onFailure
where
stackName = csStackName changeSet
changeSetId = csChangeSetId changeSet
tailStackEventsSince
:: ( MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> StackName
-> Maybe Text -- ^ StackEventId
-> m a
tailStackEventsSince stackName mLastId = do
colors <- getColorsLogger
events <- awsCloudFormationDescribeStackEvents stackName mLastId
traverse_ (pushLoggerLn <=< formatStackEvent colors) $ reverse events
-- Without this small delay before looping, our requests seem to hang
-- intermittently (without errors) and often we miss events.
threadDelay $ 1 * 1000000
-- Tail from the next "last id". If we got no events, be sure to pass along
-- any last-id we were given
tailStackEventsSince stackName $ getLastEventId events <|> mLastId
formatStackEvent :: MonadIO m => Colors -> StackEvent -> m Text
formatStackEvent Colors {..} e = do
timestamp <-
liftIO $ formatTime defaultTimeLocale "%F %T %Z" <$> utcToLocalZonedTime
(e ^. stackEvent_timestamp)
pure $ mconcat
[ fromString timestamp
, " | "
, maybe "" colorStatus $ e ^. stackEvent_resourceStatus
, maybe "" (magenta . (" " <>)) $ e ^. stackEvent_logicalResourceId
, maybe "" ((\x -> " (" <> x <> ")") . T.strip)
$ e
^. stackEvent_resourceStatusReason
]
where
colorStatus = \case
ResourceStatus' x
| "ROLLBACK" `T.isInfixOf` x -> red x
| "COMPLETE" `T.isSuffixOf` x -> green x
| "FAILED" `T.isSuffixOf` x -> red x
| "IN_PROGRESS" `T.isSuffixOf` x -> blue x
| "SKIPPED" `T.isSuffixOf` x -> yellow x
| otherwise -> x
getLastEventId :: [StackEvent] -> Maybe Text
getLastEventId = fmap (^. stackEvent_eventId) . listToMaybe
| null | https://raw.githubusercontent.com/freckle/stackctl/fdb99c911dd118dee4a46358da98ceedcebc2e09/src/Stackctl/Spec/Deploy.hs | haskell | brittany-disable-next-binding
tailing from, so do that part synchronously
^ StackEventId
Without this small delay before looping, our requests seem to hang
intermittently (without errors) and often we miss events.
Tail from the next "last id". If we got no events, be sure to pass along
any last-id we were given | module Stackctl.Spec.Deploy
( DeployOptions(..)
, DeployConfirmation(..)
, parseDeployOptions
, runDeploy
) where
import Stackctl.Prelude
import Blammo.Logging.Logger (pushLoggerLn)
import qualified Data.Text as T
import Data.Time (defaultTimeLocale, formatTime, utcToLocalZonedTime)
import Options.Applicative
import Stackctl.Action
import Stackctl.AWS hiding (action)
import Stackctl.AWS.Scope
import Stackctl.Colors
import Stackctl.Config (HasConfig)
import Stackctl.DirectoryOption (HasDirectoryOption)
import Stackctl.FilterOption (HasFilterOption)
import Stackctl.ParameterOption
import Stackctl.Prompt
import Stackctl.RemovedStack
import Stackctl.Spec.Changes.Format
import Stackctl.Spec.Discover
import Stackctl.StackSpec
import Stackctl.TagOption
import UnliftIO.Directory (createDirectoryIfMissing)
data DeployOptions = DeployOptions
{ sdoParameters :: [Parameter]
, sdoTags :: [Tag]
, sdoSaveChangeSets :: Maybe FilePath
, sdoDeployConfirmation :: DeployConfirmation
, sdoRemovals :: Bool
, sdoClean :: Bool
}
parseDeployOptions :: Parser DeployOptions
parseDeployOptions = DeployOptions
<$> many parameterOption
<*> many tagOption
<*> optional (strOption
( long "save-change-sets"
<> metavar "DIRECTORY"
<> help "Save executed changesets to DIRECTORY"
<> action "directory"
))
<*> flag DeployWithConfirmation DeployWithoutConfirmation
( long "no-confirm"
<> help "Don't confirm changes before executing"
)
<*> (not <$> switch
( long "no-remove"
<> help "Don't delete removed Stacks"
))
<*> switch
( long "clean"
<> help "Remove all changesets from Stack after deploy"
)
runDeploy
:: ( MonadMask m
, MonadUnliftIO m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsScope env
, HasAwsEnv env
, HasConfig env
, HasDirectoryOption env
, HasFilterOption env
)
=> DeployOptions
-> m ()
runDeploy DeployOptions {..} = do
specs <- discoverSpecs
for_ specs $ \spec -> do
withThreadContext ["stackName" .= stackSpecStackName spec] $ do
checkIfStackRequiresDeletion sdoDeployConfirmation
$ stackSpecStackName spec
emChangeSet <- createChangeSet spec sdoParameters sdoTags
case emChangeSet of
Left err -> do
logError $ "Error creating ChangeSet" :# ["error" .= err]
exitFailure
Right Nothing -> logInfo "Stack is up to date"
Right (Just changeSet) -> do
let stackName = stackSpecStackName spec
for_ sdoSaveChangeSets $ \dir -> do
let out = dir </> unpack (unStackName stackName) <.> "json"
logInfo $ "Recording changeset" :# ["path" .= out]
createDirectoryIfMissing True dir
writeFileUtf8 out $ changeSetJSON changeSet
deployChangeSet sdoDeployConfirmation changeSet
runActions stackName PostDeploy $ stackSpecActions spec
when sdoClean $ awsCloudFormationDeleteAllChangeSets stackName
when sdoRemovals $ do
removed <- inferRemovedStacks
traverse_ (deleteRemovedStack sdoDeployConfirmation) removed
deleteRemovedStack
:: ( MonadMask m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> DeployConfirmation
-> Stack
-> m ()
deleteRemovedStack confirmation stack = do
withThreadContext ["stack" .= stackName] $ do
colors <- getColorsLogger
pushLoggerLn $ formatRemovedStack colors FormatTTY stack
case confirmation of
DeployWithConfirmation -> do
promptContinue
logInfo "Deleting Stack"
DeployWithoutConfirmation -> pure ()
deleteStack stackName
where stackName = StackName $ stack ^. stack_stackName
data DeployConfirmation
= DeployWithConfirmation
| DeployWithoutConfirmation
deriving stock Eq
checkIfStackRequiresDeletion
:: ( MonadUnliftIO m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> DeployConfirmation
-> StackName
-> m ()
checkIfStackRequiresDeletion confirmation stackName = do
mStack <- awsCloudFormationDescribeStackMaybe stackName
for_ (stackStatusRequiresDeletion =<< mStack) $ \status -> do
logWarn $ "Stack must be deleted before proceeding" :# ["status" .= status]
when (status == StackStatus_ROLLBACK_FAILED)
$ logWarn
"Stack is in ROLLBACK_FAILED. This may require elevated permissions for the delete to succeed"
case confirmation of
DeployWithConfirmation -> promptContinue
DeployWithoutConfirmation -> do
logError "Refusing to delete without confirmation"
exitFailure
logInfo "Deleting Stack"
deleteStack stackName
deleteStack
:: (MonadResource m, MonadLogger m, MonadReader env m, HasAwsEnv env)
=> StackName
-> m ()
deleteStack stackName = do
result <- awsCloudFormationDeleteStack stackName
case result of
StackDeleteSuccess -> logInfo $ prettyStackDeleteResult result :# []
StackDeleteFailure{} -> logWarn $ prettyStackDeleteResult result :# []
deployChangeSet
:: ( MonadUnliftIO m
, MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> DeployConfirmation
-> ChangeSet
-> m ()
deployChangeSet confirmation changeSet = do
colors <- getColorsLogger
pushLoggerLn $ formatTTY colors (unStackName stackName) $ Just changeSet
case confirmation of
DeployWithConfirmation -> promptContinue
DeployWithoutConfirmation -> pure ()
It can take a minute to get this batch of events to work out where we 're
mLastId <- awsCloudFormationGetMostRecentStackEventId stackName
asyncTail <- async $ tailStackEventsSince stackName mLastId
logInfo $ "Executing ChangeSet" :# ["changeSetId" .= changeSetId]
result <- do
awsCloudFormationExecuteChangeSet changeSetId
awsCloudFormationWait stackName
cancel asyncTail
let
onSuccess = logInfo $ prettyStackDeployResult result :# []
onFailure = do
logError $ prettyStackDeployResult result :# []
exitFailure
case result of
StackCreateSuccess -> onSuccess
StackCreateFailure{} -> onFailure
StackUpdateSuccess -> onSuccess
StackUpdateFailure{} -> onFailure
where
stackName = csStackName changeSet
changeSetId = csChangeSetId changeSet
tailStackEventsSince
:: ( MonadResource m
, MonadLogger m
, MonadReader env m
, HasLogger env
, HasAwsEnv env
)
=> StackName
-> m a
tailStackEventsSince stackName mLastId = do
colors <- getColorsLogger
events <- awsCloudFormationDescribeStackEvents stackName mLastId
traverse_ (pushLoggerLn <=< formatStackEvent colors) $ reverse events
threadDelay $ 1 * 1000000
tailStackEventsSince stackName $ getLastEventId events <|> mLastId
formatStackEvent :: MonadIO m => Colors -> StackEvent -> m Text
formatStackEvent Colors {..} e = do
timestamp <-
liftIO $ formatTime defaultTimeLocale "%F %T %Z" <$> utcToLocalZonedTime
(e ^. stackEvent_timestamp)
pure $ mconcat
[ fromString timestamp
, " | "
, maybe "" colorStatus $ e ^. stackEvent_resourceStatus
, maybe "" (magenta . (" " <>)) $ e ^. stackEvent_logicalResourceId
, maybe "" ((\x -> " (" <> x <> ")") . T.strip)
$ e
^. stackEvent_resourceStatusReason
]
where
colorStatus = \case
ResourceStatus' x
| "ROLLBACK" `T.isInfixOf` x -> red x
| "COMPLETE" `T.isSuffixOf` x -> green x
| "FAILED" `T.isSuffixOf` x -> red x
| "IN_PROGRESS" `T.isSuffixOf` x -> blue x
| "SKIPPED" `T.isSuffixOf` x -> yellow x
| otherwise -> x
getLastEventId :: [StackEvent] -> Maybe Text
getLastEventId = fmap (^. stackEvent_eventId) . listToMaybe
|
99ddcdb33707eed0c8b1dc700868e233c8986e7acff5bf055447d6a6ca586248 | hdbc/hdbc-odbc | ODBC.hs | |
Module : Database . HDBC.ODBC
Copyright : Copyright ( C ) 2005
License : BSD3
Maintainer : < >
Stability : provisional
Portability : portable
HDBC driver interface for ODBC 3.x
Written by , jgoerzen\@complete.org
Module : Database.HDBC.ODBC
Copyright : Copyright (C) 2005 John Goerzen
License : BSD3
Maintainer : John Goerzen <>
Stability : provisional
Portability: portable
HDBC driver interface for ODBC 3.x
Written by John Goerzen, jgoerzen\@complete.org
-}
module Database.HDBC.ODBC
(
connectODBC, Connection(), getQueryInfo, setAutoCommit
)
where
import Database.HDBC.ODBC.Connection(connectODBC, Connection())
import Database.HDBC.ODBC.ConnectionImpl(getQueryInfo, setAutoCommit)
| null | https://raw.githubusercontent.com/hdbc/hdbc-odbc/06833d77799f16634d2038bcdc308c35d4752cdd/Database/HDBC/ODBC.hs | haskell | |
Module : Database . HDBC.ODBC
Copyright : Copyright ( C ) 2005
License : BSD3
Maintainer : < >
Stability : provisional
Portability : portable
HDBC driver interface for ODBC 3.x
Written by , jgoerzen\@complete.org
Module : Database.HDBC.ODBC
Copyright : Copyright (C) 2005 John Goerzen
License : BSD3
Maintainer : John Goerzen <>
Stability : provisional
Portability: portable
HDBC driver interface for ODBC 3.x
Written by John Goerzen, jgoerzen\@complete.org
-}
module Database.HDBC.ODBC
(
connectODBC, Connection(), getQueryInfo, setAutoCommit
)
where
import Database.HDBC.ODBC.Connection(connectODBC, Connection())
import Database.HDBC.ODBC.ConnectionImpl(getQueryInfo, setAutoCommit)
| |
be5f6c03802ea99f13bd8d0e32d76bfe14d943352ddde663b6f6e2057c79692c | skanev/playground | 45-tests.scm | (require rackunit rackunit/text-ui)
(load "../45.scm")
(define sicp-1.45-tests
(test-suite
"Tests for SICP exercise 1.45"
(check-= (nth-root 2 4) 2 0.000001)
(check-= (nth-root 3 8) 2 0.000001)
(check-= (nth-root 4 16) 2 0.000001)
(check-= (nth-root 5 32) 2 0.000001)
(check-= (nth-root 8 256) 2 0.000001)
(check-= (nth-root 9 512) 2 0.000001)
))
(run-tests sicp-1.45-tests)
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/01/tests/45-tests.scm | scheme | (require rackunit rackunit/text-ui)
(load "../45.scm")
(define sicp-1.45-tests
(test-suite
"Tests for SICP exercise 1.45"
(check-= (nth-root 2 4) 2 0.000001)
(check-= (nth-root 3 8) 2 0.000001)
(check-= (nth-root 4 16) 2 0.000001)
(check-= (nth-root 5 32) 2 0.000001)
(check-= (nth-root 8 256) 2 0.000001)
(check-= (nth-root 9 512) 2 0.000001)
))
(run-tests sicp-1.45-tests)
| |
1e06d87a6295b1213adcf411f5fb8c5e2e361d9df72586e611df8bcadcc5ad37 | ghilesZ/geoml | constraint.ml | (**The linear constraint module*)
type comp = Lt | Gt | Leq | Geq
let neg = function
| Lt -> Geq
| Leq -> Gt
| Gt -> Leq
| Geq -> Lt
let compf = function
| Lt -> ( < )
| Leq -> ( <= )
| Gt -> ( > )
| Geq -> ( >= )
type t = Line.t * comp
let print fmt ((l,cmp):t) =
let comp_to_string (c:comp) =
match c with
| Lt -> "<"
| Gt -> ">"
| Leq -> "<="
| Geq -> ">="
in
let (a,b,c) = Line.get_coeff l in
Format.fprintf fmt "%fx + %fy + %f %s 0" a b c (comp_to_string cmp)
let make l comp : t = (l,comp)
let get_border (l,_) = l
let get_comp (_,c) = c
let contains ((l,comp):t) p =
let (a,b,c) = Line.get_coeff l in
let value =
let open Point in
a *. p.x +. b *. p.y +. c
in
(compf comp) value 0.
* contains c p returns true if the point p is in the half - space defined by c
let translate dx dy ((l,comp):t) =
make (Line.translate dx dy l) comp
let complementary (l,comp) = l,(neg comp)
let intersects (((l1,_)as c1):t) (((l2,_) as c2) :t) =
not(Line.parallel l1 l2)
|| Line.arbitrary_point l1 |> contains c2
|| Line.arbitrary_point l2 |> contains c1
| null | https://raw.githubusercontent.com/ghilesZ/geoml/a239499dabbbfbf74bae73a105ed567546f43312/src/constraint.ml | ocaml | *The linear constraint module | type comp = Lt | Gt | Leq | Geq
let neg = function
| Lt -> Geq
| Leq -> Gt
| Gt -> Leq
| Geq -> Lt
let compf = function
| Lt -> ( < )
| Leq -> ( <= )
| Gt -> ( > )
| Geq -> ( >= )
type t = Line.t * comp
let print fmt ((l,cmp):t) =
let comp_to_string (c:comp) =
match c with
| Lt -> "<"
| Gt -> ">"
| Leq -> "<="
| Geq -> ">="
in
let (a,b,c) = Line.get_coeff l in
Format.fprintf fmt "%fx + %fy + %f %s 0" a b c (comp_to_string cmp)
let make l comp : t = (l,comp)
let get_border (l,_) = l
let get_comp (_,c) = c
let contains ((l,comp):t) p =
let (a,b,c) = Line.get_coeff l in
let value =
let open Point in
a *. p.x +. b *. p.y +. c
in
(compf comp) value 0.
* contains c p returns true if the point p is in the half - space defined by c
let translate dx dy ((l,comp):t) =
make (Line.translate dx dy l) comp
let complementary (l,comp) = l,(neg comp)
let intersects (((l1,_)as c1):t) (((l2,_) as c2) :t) =
not(Line.parallel l1 l2)
|| Line.arbitrary_point l1 |> contains c2
|| Line.arbitrary_point l2 |> contains c1
|
9e5796d96d2da12fe4e5b3c9fc1312250fae7b1ed2841b9fcc55a41718726979 | inhabitedtype/ocaml-aws | getMaintenanceWindowTask.mli | open Types
type input = GetMaintenanceWindowTaskRequest.t
type output = GetMaintenanceWindowTaskResult.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/3bc554af7ae7ef9e2dcea44a1b72c9e687435fa9/libraries/ssm/lib/getMaintenanceWindowTask.mli | ocaml | open Types
type input = GetMaintenanceWindowTaskRequest.t
type output = GetMaintenanceWindowTaskResult.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
| |
631a2d5f2778c18a29ddc4289c5bf8227abe03f6cab1c75121cfd97c973489e5 | stackbuilders/tutorials | Main.hs | module Main (main) where
import Shortener (shortener)
main :: IO ()
main =
shortener
| null | https://raw.githubusercontent.com/stackbuilders/tutorials/d80de4b31407754c2a01eff7eeed65550d9baac8/tutorials/haskell/getting-started-with-haskell-projects-using-scotty/code/app/Main.hs | haskell | module Main (main) where
import Shortener (shortener)
main :: IO ()
main =
shortener
| |
09765f9c6705e839a7fea389abd7710cadd7d63974bb932fc3b98181566b812b | valerauko/kitsune-alpha | common.clj | (ns kitsune.spec.common
(:require [clojure.spec.alpha :as s]
[org.bovinegenius [exploding-fish :refer [absolute?]]]))
(s/def ::id pos-int?)
(s/def ::url
(s/with-gen
absolute?
#(s/gen uri?)))
(s/def ::bool
(s/or :bool boolean?
:str #{"true" "false"}))
(s/def ::created-at inst?)
| null | https://raw.githubusercontent.com/valerauko/kitsune-alpha/34ca08e4013541aeaf1a5a5786f3189ff17c1a58/src/kitsune/spec/common.clj | clojure | (ns kitsune.spec.common
(:require [clojure.spec.alpha :as s]
[org.bovinegenius [exploding-fish :refer [absolute?]]]))
(s/def ::id pos-int?)
(s/def ::url
(s/with-gen
absolute?
#(s/gen uri?)))
(s/def ::bool
(s/or :bool boolean?
:str #{"true" "false"}))
(s/def ::created-at inst?)
| |
3cff6b03994c880793785b319577e05959583383fc1e9cd6ca3a24136b6d382b | chrisdone/prana | Eval.hs | # LANGUAGE NamedFieldPuns #
{-# LANGUAGE BangPatterns #-}
# LANGUAGE TemplateHaskell #
# LANGUAGE UnboxedTuples #
# LANGUAGE MagicHash #
# LANGUAGE RecursiveDo #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE StandaloneDeriving #
-- |
module Prana.Interpreter.Eval where
import Control.Monad.Reader
import qualified Data.ByteString.Char8 as S8
import Data.IORef
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Foreign.C.Types
import qualified Foreign.LibFFI as LibFFI
import Prana.Interpreter.Binding
import Prana.Interpreter.Boxing
import Prana.Interpreter.PrimOps
import Prana.Interpreter.Types
import Prana.Pretty
import Prana.Types
import qualified System.Posix.DynamicLinker as Posix
evalExpr ::
ReverseIndex
-> Map GlobalVarId Box
-> Map LocalVarId Box
-> Expr
-> IO Whnf
evalExpr index globals locals0 toplevelexpr = do
go locals0 toplevelexpr
where
go locals expr = do
whnf <- go' locals expr
pure whnf
go' locals =
\case
OpAppExpr expOp args ->
case expOp of
PrimOp primOp typ ->
evalPrimOp
index
(evalSomeVarId index globals locals)
(evalBox index globals)
locals
primOp
args
typ
ForeignOp cCallSpec ffiReturnType ->
evalCCallSpec index globals locals cCallSpec args ffiReturnType
OtherOp ->
error "Unimplemented op type (either custom primop or FFI)."
LetExpr localBinding expr -> do
locals' <- bindLocal localBinding locals
( unlines
[ " Evaluating let form : "
, " Bindings : " + + show localBinding
, " Expression : " + + show expr
] )
(unlines
[ "Evaluating let form:"
, " Bindings: " ++ show localBinding
, " Expression: " ++ show expr
])-}
go locals' expr
LitExpr lit -> pure (LitWhnf lit)
ConAppExpr dataConId args _types -> evalCon locals (Con dataConId args)
AppExpr someVarId args ->
let loop [] whnf = pure whnf
loop args0 whnf = do
case whnf of
FunWhnf localsClosure funParams funBody -> do
let (closureArgs, remainderArgs) =
splitAt (length funParams) args0
if length args0 < length funParams
then putStrLn ( unlines [ " Not enough arguments to function : "
, " Putting these in scope : " + + show ( zip funParams closureArgs ) ] )
else pure ( )
then putStrLn (unlines ["Not enough arguments to function:"
," Putting these in scope: " ++ show (zip funParams closureArgs)])
else pure ()-}
locals' <-
foldM
(\locals' (param, arg) -> do
box <- boxArg locals arg
pure (M.insert param box locals'))
(localsClosure <> locals)
(zip funParams closureArgs)
putStrLn
( unlines
[ " Entering function : "
, " Params : " + + show funParams
, " Arguments : " + + show closureArgs
, " Body : " + + show funBody
, " Scope : " + + show locals '
] )
(unlines
[ "Entering function:"
, " Params: " ++ show funParams
, " Arguments: " ++ show closureArgs
, " Body: " ++ show funBody
, " Scope: " ++ show locals'
])-}
if length args0 < length funParams
then do
pure
(FunWhnf
locals'
(drop (length closureArgs) funParams)
funBody)
else do
whnf' <- go locals' funBody
loop remainderArgs whnf'
ConWhnf {} ->
if null args
then pure whnf
else error
("Unexpected arguments for already-saturated data constructor: " ++
show whnf ++ ", args were: " ++ show args)
_ -> error ("Expected function, but got: " <> show whnf)
in do whnf <- evalSomeVarId index globals locals someVarId
putStrLn
( unlines
[ " Applying function : "
, " Function : " + + show whnf
, " Arguments : " + + show args
] )
(unlines
[ "Applying function:"
, " Function: " ++ show whnf
, " Arguments: " ++ show args
])-}
loop args whnf
caseE@(CaseExpr expr caseExprVarId dataAlts)
putStrLn ( unlines [ " Case expression " , " Scrutinee : " + + show expr ] )
-> do
case dataAlts of
DataAlts _tyCon alts mdefaultExpr -> do
(dataConId, boxes) <-
evalExprToCon "DataAlts" index globals locals expr
caseExprBox <- boxWhnf (ConWhnf dataConId boxes)
let locals1 = M.insert caseExprVarId caseExprBox locals
let loop (DataAlt altDataConId localVarIds rhsExpr:rest) =
if dataConId == altDataConId
then if length boxes == length localVarIds
then do
locals' <-
foldM
(\locals' (box, localVarId) -> do
pure (M.insert localVarId box locals'))
locals1
(zip boxes localVarIds)
go locals' rhsExpr
else error
"Mismatch between number of slots in constructor and pattern."
else loop rest
loop [] =
case mdefaultExpr of
Nothing ->
error
("(DataAlts) Inexhaustive pattern match: " ++
show alts)
Just defaultExpr -> go locals1 defaultExpr
in loop alts
PrimAlts _primRep litAlts mdefaultExpr -> do
whnf <- go locals expr
caseExprBox <- boxWhnf whnf
let locals1 = M.insert caseExprVarId caseExprBox locals
case whnf of
LitWhnf lit ->
let loop [] =
case mdefaultExpr of
Nothing ->
error "Inexhaustive primitive pattern match..."
Just defaultExpr -> go locals1 defaultExpr
loop (litAlt:rest) =
if litAltLit litAlt == lit
then go locals1 (litAltExpr litAlt)
else loop rest
in loop litAlts
_ ->
error
("Unexpected whnf for PrimAlts (I'm sure ClosureWhnf will come up here): " ++
show whnf)
MultiValAlts size alts mdefaultExpr -> do
(dataConId, boxes) <-
evalExprToCon
("MultiValAlts:\n" ++ prettyExpr index toplevelexpr)
index
globals
locals
expr
( " Scrutinee : " + + prettyLocalVar index caseExprVarId + + " < - " + + show ( ConWhnf dataConId boxes ) )
caseExprBox <- boxWhnf (ConWhnf dataConId boxes)
-- Yes, this is a function, and it's weird. I've
-- observed a case where the scrutinee binding is
-- shadowed by a pattern binding. Weird, but
perhaps GHC uses this some kind of correctness
-- guarantee. In any case, rewrite this
-- later. There is now a test case for this code,
-- so that's OK.
let locals1 :: Map LocalVarId Box -> Map LocalVarId Box
locals1 = M.insert caseExprVarId caseExprBox
let loop (DataAlt altDataConId localVarIds rhsExpr:rest) = do
-- putStrLn ("altDataConId=" ++ show altDataConId)
if dataConId == altDataConId
then if length boxes == length localVarIds &&
length boxes == size
then do
locals' <-
foldM
(\locals' (box, localVarId)
( " Pattern : " + + prettyLocalVar index caseExprVarId + + " < - " + + show ( ConWhnf dataConId boxes ) )
-> do
do pure (M.insert localVarId box locals'))
locals
(zip boxes localVarIds)
go (locals1 locals') rhsExpr
else error
"Mismatch between number of slots in constructor and pattern."
else do
loop rest
loop [] =
case mdefaultExpr of
Nothing ->
error
("(MultiValAlts) Inexhaustive pattern match! " ++
show size ++
" \n" ++
prettyAlts index dataAlts ++
"\nFrom:\n" ++ prettyExpr index caseE)
Just defaultExpr -> go (locals1 locals) defaultExpr
in loop alts
PolymorphicAlt rhsExpr -> do
(dataConId, boxes) <-
evalExprToCon "PolymorphicAlt" index globals locals expr
caseExprBox <- boxWhnf (ConWhnf dataConId boxes)
let locals1 = M.insert caseExprVarId caseExprBox locals
go locals1 rhsExpr
evalExprToCon :: String -> ReverseIndex -> Map GlobalVarId Box -> Map LocalVarId Box -> Expr -> IO (DataConId, [Box])
evalExprToCon label index globals locals0 expr = do
whnf <- evalExpr index globals locals0 expr
case whnf of
ConWhnf dataConId boxes -> pure (dataConId, boxes)
FunWhnf{} ->
error "Unexpected function for data alt case scrutinee."
LitWhnf {} -> error "Unexpected literal for data alt case scrutinee."
_ -> error ("TODO: evalExprToCon: "++label ++ ": "++ show whnf)
evalBox :: ReverseIndex -> Map GlobalVarId Box -> Box -> IO Whnf
evalBox index globals box = do
thunk <- readIORef (boxIORef box)
whnf <-
case thunk of
WhnfThunk whnf -> pure whnf
VariableThunk locals someVarId ->
evalSomeVarId index globals locals someVarId
ExpressionThunk locals expr -> evalExpr index globals locals expr
writeIORef (boxIORef box) (WhnfThunk whnf)
pure whnf
evalSomeVarId ::
ReverseIndex -> Map GlobalVarId Box -> Map LocalVarId Box -> SomeVarId -> IO Whnf
evalSomeVarId index globals locals someVarId = do
whnf <-
case someVarId of
SomeLocalVarId localVarId ->
case M.lookup localVarId locals of
Nothing ->
error
("Couldn't find local " ++
show localVarId ++
", " ++
(case M.lookup localVarId (reverseIndexLocals index) of
Nothing -> error "Couldn't find name! BUG!"
Just name -> displayName name) ++
"\nIn scope: " ++ show locals)
Just box -> evalBox index globals box
SomeGlobalVarId globalVarId ->
case M.lookup globalVarId globals of
Nothing ->
error
("Couldn't find global " ++
case M.lookup globalVarId (reverseIndexGlobals index) of
Nothing -> error "Couldn't find name! BUG!"
Just name -> displayName name)
Just box -> evalBox index globals box
w@WiredInVal {} -> error ("TODO: evalSomeVarId: Wired in: " ++ show w)
-- putStrLn (prettySomeVarId index someVarId ++ " = " ++ show whnf)
putStrLn ( unlines [ " Looking up i d : "
, " I d : " + + show someVarId
, " : " + + show whnf ] )
," Id: " ++ show someVarId
," Whnf: " ++ show whnf])-}
pure whnf
evalCon :: Map LocalVarId Box -> Con -> IO Whnf
evalCon locals (Con dataConId args) =
ConWhnf dataConId <$> traverse (boxArg locals) args
-- | Apply a foreign C call.
--
-- Currently will link the function every time with
-- dlsym(). Obviously, we can improve this in future.
--
-- The S8.unpack on the function isn't good either. Fix that later. I
just ca n't be bothered writing the zero - termination part .
evalCCallSpec ::
ReverseIndex
-> Map GlobalVarId Box
-> Map LocalVarId Box
-> CCallSpec
-> [Arg]
-> FFIReturnType
-> IO Whnf
evalCCallSpec index globals locals CCallSpec { cCallTarget
, cCallConv
, safety
, unique
} args ffiReturnType =
case cCallTarget of
DynamicTarget -> error "TODO: Dynamic foreign functions."
StaticTarget StaticCallTarget {byteString, functionOrValue} -> do
funPtr <- Posix.dlsym Posix.Default (S8.unpack byteString)
The init skips the State # RealWorld arg .
libffiArgs <- mapM (evalFFIArg index globals locals) (init args)
case ffiReturnType of
FFIUnboxedTupleOfStateRealWorldAnd Nothing -> do
LibFFI.callFFI funPtr LibFFI.retVoid libffiArgs
pure (ConWhnf (UnboxedTupleConId 0) [])
FFIUnboxedTupleOfStateRealWorldAnd (Just ty) ->
-- TODO: flesh out other cases.
case ty of
FFI_Double -> do
CDouble ret <- LibFFI.callFFI funPtr LibFFI.retCDouble libffiArgs
retBox <- boxWhnf (LitWhnf (DoubleLit ret))
pure (ConWhnf (UnboxedTupleConId 1) [retBox])
-- | Evaluate the argument, if necessary, and produce, if possible, an
FFI argument . Anything else is an exception .
evalFFIArg :: ReverseIndex -> Map GlobalVarId Box -> Map LocalVarId Box -> Arg -> IO LibFFI.Arg
evalFFIArg index globals locals =
\case
LitArg lit -> litToFFIArg lit
VarArg someVarId -> evalSomeVarId index globals locals someVarId >>= whnfToFFIArg
whnfToFFIArg :: Whnf -> IO LibFFI.Arg
whnfToFFIArg =
\case
LitWhnf lit -> litToFFIArg lit
AddrWhnf {} -> error "TODO: whnfToFFIArg: AddrWhnf"
ConWhnf {} -> error "TODO: whnfToFFIArg: ConWhnf"
FunWhnf {} -> error "TODO: whnfToFFIArg: FunWhnf"
StateWhnf {} -> error "TODO: whnfToFFIArg: StateWhnf"
ArrayWhnf {} -> error "TODO: whnfToFFIArg: ArrayWhnf"
MutableArrayWhnf {} -> error "TODO: whnfToFFIArg: MutableArrayWhnf"
MutableByteArrayWhnf {} -> error "TODO: whnfToFFIArg: MutableByteArrayWhnf"
SmallMutableArrayWhnf {} -> error "TODO: whnfToFFIArg: SmallMutableArrayWhnf"
litToFFIArg :: Lit -> IO LibFFI.Arg
litToFFIArg =
\case
CharLit !_ -> error "TODO: CharLit for FFIArg"
StringLit !_ -> error "TODO: StringLit for FFIArg"
NullAddrLit -> error "TODO: NullAddrLit for FFIArg"
IntLit !_ -> error "TODO: IntLit for FFIArg"
Int64Lit !_ -> error "TODO: Int64Lit for FFIArg"
WordLit !_ -> error "TODO: WordLit for FFIArg"
Word64Lit !_ -> error "TODO: Word64Lit for FFIArg"
FloatLit !_ -> error "TODO: FloatLit for FFIArg"
DoubleLit !d -> pure (LibFFI.argCDouble (CDouble d))
IntegerLit !_ -> error "TODO: IntegerLit for FFIArg"
LabelLit -> error "TODO: LabelLit for FFIArg"
| null | https://raw.githubusercontent.com/chrisdone/prana/f2e45538937d326aff562b6d49296eaedd015662/prana-interpreter/src/Prana/Interpreter/Eval.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE OverloadedStrings #
|
Yes, this is a function, and it's weird. I've
observed a case where the scrutinee binding is
shadowed by a pattern binding. Weird, but
guarantee. In any case, rewrite this
later. There is now a test case for this code,
so that's OK.
putStrLn ("altDataConId=" ++ show altDataConId)
putStrLn (prettySomeVarId index someVarId ++ " = " ++ show whnf)
| Apply a foreign C call.
Currently will link the function every time with
dlsym(). Obviously, we can improve this in future.
The S8.unpack on the function isn't good either. Fix that later. I
TODO: flesh out other cases.
| Evaluate the argument, if necessary, and produce, if possible, an | # LANGUAGE NamedFieldPuns #
# LANGUAGE TemplateHaskell #
# LANGUAGE UnboxedTuples #
# LANGUAGE MagicHash #
# LANGUAGE RecursiveDo #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE StandaloneDeriving #
module Prana.Interpreter.Eval where
import Control.Monad.Reader
import qualified Data.ByteString.Char8 as S8
import Data.IORef
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as M
import Foreign.C.Types
import qualified Foreign.LibFFI as LibFFI
import Prana.Interpreter.Binding
import Prana.Interpreter.Boxing
import Prana.Interpreter.PrimOps
import Prana.Interpreter.Types
import Prana.Pretty
import Prana.Types
import qualified System.Posix.DynamicLinker as Posix
evalExpr ::
ReverseIndex
-> Map GlobalVarId Box
-> Map LocalVarId Box
-> Expr
-> IO Whnf
evalExpr index globals locals0 toplevelexpr = do
go locals0 toplevelexpr
where
go locals expr = do
whnf <- go' locals expr
pure whnf
go' locals =
\case
OpAppExpr expOp args ->
case expOp of
PrimOp primOp typ ->
evalPrimOp
index
(evalSomeVarId index globals locals)
(evalBox index globals)
locals
primOp
args
typ
ForeignOp cCallSpec ffiReturnType ->
evalCCallSpec index globals locals cCallSpec args ffiReturnType
OtherOp ->
error "Unimplemented op type (either custom primop or FFI)."
LetExpr localBinding expr -> do
locals' <- bindLocal localBinding locals
( unlines
[ " Evaluating let form : "
, " Bindings : " + + show localBinding
, " Expression : " + + show expr
] )
(unlines
[ "Evaluating let form:"
, " Bindings: " ++ show localBinding
, " Expression: " ++ show expr
])-}
go locals' expr
LitExpr lit -> pure (LitWhnf lit)
ConAppExpr dataConId args _types -> evalCon locals (Con dataConId args)
AppExpr someVarId args ->
let loop [] whnf = pure whnf
loop args0 whnf = do
case whnf of
FunWhnf localsClosure funParams funBody -> do
let (closureArgs, remainderArgs) =
splitAt (length funParams) args0
if length args0 < length funParams
then putStrLn ( unlines [ " Not enough arguments to function : "
, " Putting these in scope : " + + show ( zip funParams closureArgs ) ] )
else pure ( )
then putStrLn (unlines ["Not enough arguments to function:"
," Putting these in scope: " ++ show (zip funParams closureArgs)])
else pure ()-}
locals' <-
foldM
(\locals' (param, arg) -> do
box <- boxArg locals arg
pure (M.insert param box locals'))
(localsClosure <> locals)
(zip funParams closureArgs)
putStrLn
( unlines
[ " Entering function : "
, " Params : " + + show funParams
, " Arguments : " + + show closureArgs
, " Body : " + + show funBody
, " Scope : " + + show locals '
] )
(unlines
[ "Entering function:"
, " Params: " ++ show funParams
, " Arguments: " ++ show closureArgs
, " Body: " ++ show funBody
, " Scope: " ++ show locals'
])-}
if length args0 < length funParams
then do
pure
(FunWhnf
locals'
(drop (length closureArgs) funParams)
funBody)
else do
whnf' <- go locals' funBody
loop remainderArgs whnf'
ConWhnf {} ->
if null args
then pure whnf
else error
("Unexpected arguments for already-saturated data constructor: " ++
show whnf ++ ", args were: " ++ show args)
_ -> error ("Expected function, but got: " <> show whnf)
in do whnf <- evalSomeVarId index globals locals someVarId
putStrLn
( unlines
[ " Applying function : "
, " Function : " + + show whnf
, " Arguments : " + + show args
] )
(unlines
[ "Applying function:"
, " Function: " ++ show whnf
, " Arguments: " ++ show args
])-}
loop args whnf
caseE@(CaseExpr expr caseExprVarId dataAlts)
putStrLn ( unlines [ " Case expression " , " Scrutinee : " + + show expr ] )
-> do
case dataAlts of
DataAlts _tyCon alts mdefaultExpr -> do
(dataConId, boxes) <-
evalExprToCon "DataAlts" index globals locals expr
caseExprBox <- boxWhnf (ConWhnf dataConId boxes)
let locals1 = M.insert caseExprVarId caseExprBox locals
let loop (DataAlt altDataConId localVarIds rhsExpr:rest) =
if dataConId == altDataConId
then if length boxes == length localVarIds
then do
locals' <-
foldM
(\locals' (box, localVarId) -> do
pure (M.insert localVarId box locals'))
locals1
(zip boxes localVarIds)
go locals' rhsExpr
else error
"Mismatch between number of slots in constructor and pattern."
else loop rest
loop [] =
case mdefaultExpr of
Nothing ->
error
("(DataAlts) Inexhaustive pattern match: " ++
show alts)
Just defaultExpr -> go locals1 defaultExpr
in loop alts
PrimAlts _primRep litAlts mdefaultExpr -> do
whnf <- go locals expr
caseExprBox <- boxWhnf whnf
let locals1 = M.insert caseExprVarId caseExprBox locals
case whnf of
LitWhnf lit ->
let loop [] =
case mdefaultExpr of
Nothing ->
error "Inexhaustive primitive pattern match..."
Just defaultExpr -> go locals1 defaultExpr
loop (litAlt:rest) =
if litAltLit litAlt == lit
then go locals1 (litAltExpr litAlt)
else loop rest
in loop litAlts
_ ->
error
("Unexpected whnf for PrimAlts (I'm sure ClosureWhnf will come up here): " ++
show whnf)
MultiValAlts size alts mdefaultExpr -> do
(dataConId, boxes) <-
evalExprToCon
("MultiValAlts:\n" ++ prettyExpr index toplevelexpr)
index
globals
locals
expr
( " Scrutinee : " + + prettyLocalVar index caseExprVarId + + " < - " + + show ( ConWhnf dataConId boxes ) )
caseExprBox <- boxWhnf (ConWhnf dataConId boxes)
perhaps GHC uses this some kind of correctness
let locals1 :: Map LocalVarId Box -> Map LocalVarId Box
locals1 = M.insert caseExprVarId caseExprBox
let loop (DataAlt altDataConId localVarIds rhsExpr:rest) = do
if dataConId == altDataConId
then if length boxes == length localVarIds &&
length boxes == size
then do
locals' <-
foldM
(\locals' (box, localVarId)
( " Pattern : " + + prettyLocalVar index caseExprVarId + + " < - " + + show ( ConWhnf dataConId boxes ) )
-> do
do pure (M.insert localVarId box locals'))
locals
(zip boxes localVarIds)
go (locals1 locals') rhsExpr
else error
"Mismatch between number of slots in constructor and pattern."
else do
loop rest
loop [] =
case mdefaultExpr of
Nothing ->
error
("(MultiValAlts) Inexhaustive pattern match! " ++
show size ++
" \n" ++
prettyAlts index dataAlts ++
"\nFrom:\n" ++ prettyExpr index caseE)
Just defaultExpr -> go (locals1 locals) defaultExpr
in loop alts
PolymorphicAlt rhsExpr -> do
(dataConId, boxes) <-
evalExprToCon "PolymorphicAlt" index globals locals expr
caseExprBox <- boxWhnf (ConWhnf dataConId boxes)
let locals1 = M.insert caseExprVarId caseExprBox locals
go locals1 rhsExpr
evalExprToCon :: String -> ReverseIndex -> Map GlobalVarId Box -> Map LocalVarId Box -> Expr -> IO (DataConId, [Box])
evalExprToCon label index globals locals0 expr = do
whnf <- evalExpr index globals locals0 expr
case whnf of
ConWhnf dataConId boxes -> pure (dataConId, boxes)
FunWhnf{} ->
error "Unexpected function for data alt case scrutinee."
LitWhnf {} -> error "Unexpected literal for data alt case scrutinee."
_ -> error ("TODO: evalExprToCon: "++label ++ ": "++ show whnf)
evalBox :: ReverseIndex -> Map GlobalVarId Box -> Box -> IO Whnf
evalBox index globals box = do
thunk <- readIORef (boxIORef box)
whnf <-
case thunk of
WhnfThunk whnf -> pure whnf
VariableThunk locals someVarId ->
evalSomeVarId index globals locals someVarId
ExpressionThunk locals expr -> evalExpr index globals locals expr
writeIORef (boxIORef box) (WhnfThunk whnf)
pure whnf
evalSomeVarId ::
ReverseIndex -> Map GlobalVarId Box -> Map LocalVarId Box -> SomeVarId -> IO Whnf
evalSomeVarId index globals locals someVarId = do
whnf <-
case someVarId of
SomeLocalVarId localVarId ->
case M.lookup localVarId locals of
Nothing ->
error
("Couldn't find local " ++
show localVarId ++
", " ++
(case M.lookup localVarId (reverseIndexLocals index) of
Nothing -> error "Couldn't find name! BUG!"
Just name -> displayName name) ++
"\nIn scope: " ++ show locals)
Just box -> evalBox index globals box
SomeGlobalVarId globalVarId ->
case M.lookup globalVarId globals of
Nothing ->
error
("Couldn't find global " ++
case M.lookup globalVarId (reverseIndexGlobals index) of
Nothing -> error "Couldn't find name! BUG!"
Just name -> displayName name)
Just box -> evalBox index globals box
w@WiredInVal {} -> error ("TODO: evalSomeVarId: Wired in: " ++ show w)
putStrLn ( unlines [ " Looking up i d : "
, " I d : " + + show someVarId
, " : " + + show whnf ] )
," Id: " ++ show someVarId
," Whnf: " ++ show whnf])-}
pure whnf
evalCon :: Map LocalVarId Box -> Con -> IO Whnf
evalCon locals (Con dataConId args) =
ConWhnf dataConId <$> traverse (boxArg locals) args
just ca n't be bothered writing the zero - termination part .
evalCCallSpec ::
ReverseIndex
-> Map GlobalVarId Box
-> Map LocalVarId Box
-> CCallSpec
-> [Arg]
-> FFIReturnType
-> IO Whnf
evalCCallSpec index globals locals CCallSpec { cCallTarget
, cCallConv
, safety
, unique
} args ffiReturnType =
case cCallTarget of
DynamicTarget -> error "TODO: Dynamic foreign functions."
StaticTarget StaticCallTarget {byteString, functionOrValue} -> do
funPtr <- Posix.dlsym Posix.Default (S8.unpack byteString)
The init skips the State # RealWorld arg .
libffiArgs <- mapM (evalFFIArg index globals locals) (init args)
case ffiReturnType of
FFIUnboxedTupleOfStateRealWorldAnd Nothing -> do
LibFFI.callFFI funPtr LibFFI.retVoid libffiArgs
pure (ConWhnf (UnboxedTupleConId 0) [])
FFIUnboxedTupleOfStateRealWorldAnd (Just ty) ->
case ty of
FFI_Double -> do
CDouble ret <- LibFFI.callFFI funPtr LibFFI.retCDouble libffiArgs
retBox <- boxWhnf (LitWhnf (DoubleLit ret))
pure (ConWhnf (UnboxedTupleConId 1) [retBox])
FFI argument . Anything else is an exception .
evalFFIArg :: ReverseIndex -> Map GlobalVarId Box -> Map LocalVarId Box -> Arg -> IO LibFFI.Arg
evalFFIArg index globals locals =
\case
LitArg lit -> litToFFIArg lit
VarArg someVarId -> evalSomeVarId index globals locals someVarId >>= whnfToFFIArg
whnfToFFIArg :: Whnf -> IO LibFFI.Arg
whnfToFFIArg =
\case
LitWhnf lit -> litToFFIArg lit
AddrWhnf {} -> error "TODO: whnfToFFIArg: AddrWhnf"
ConWhnf {} -> error "TODO: whnfToFFIArg: ConWhnf"
FunWhnf {} -> error "TODO: whnfToFFIArg: FunWhnf"
StateWhnf {} -> error "TODO: whnfToFFIArg: StateWhnf"
ArrayWhnf {} -> error "TODO: whnfToFFIArg: ArrayWhnf"
MutableArrayWhnf {} -> error "TODO: whnfToFFIArg: MutableArrayWhnf"
MutableByteArrayWhnf {} -> error "TODO: whnfToFFIArg: MutableByteArrayWhnf"
SmallMutableArrayWhnf {} -> error "TODO: whnfToFFIArg: SmallMutableArrayWhnf"
litToFFIArg :: Lit -> IO LibFFI.Arg
litToFFIArg =
\case
CharLit !_ -> error "TODO: CharLit for FFIArg"
StringLit !_ -> error "TODO: StringLit for FFIArg"
NullAddrLit -> error "TODO: NullAddrLit for FFIArg"
IntLit !_ -> error "TODO: IntLit for FFIArg"
Int64Lit !_ -> error "TODO: Int64Lit for FFIArg"
WordLit !_ -> error "TODO: WordLit for FFIArg"
Word64Lit !_ -> error "TODO: Word64Lit for FFIArg"
FloatLit !_ -> error "TODO: FloatLit for FFIArg"
DoubleLit !d -> pure (LibFFI.argCDouble (CDouble d))
IntegerLit !_ -> error "TODO: IntegerLit for FFIArg"
LabelLit -> error "TODO: LabelLit for FFIArg"
|
28e5695c2bc43a001d423bd22143f6331de86eff038f0f427f7b042211c0b38c | racket/typed-racket | tc-app-eq.rkt | #lang racket/unit
(require "../../utils/utils.rkt"
"signatures.rkt"
"utils.rkt"
(only-in "../../infer/infer.rkt" intersect)
syntax/parse syntax/stx racket/match racket/unsafe/undefined
"../signatures.rkt"
"../tc-funapp.rkt"
"../../types/abbrev.rkt"
"../../types/prop-ops.rkt"
"../../types/utils.rkt"
"../../types/match-expanders.rkt"
"../../rep/type-rep.rkt"
"../../rep/object-rep.rkt"
(for-label racket/base racket/bool))
(import tc-expr^)
(export tc-app-eq^)
(define-literal-set eq-literals
#:for-label
(eq? equal? eqv? string=? symbol=? memq member memv))
;; comparators that inform the type system
;; `=' is not included. Its type is more useful than this typing rule.
(define-syntax-class comparator
#:literal-sets (eq-literals)
(pattern (~or eq? equal? eqv? string=? symbol=? member memq memv)))
(define-tc/app-syntax-class (tc/app-eq expected)
(pattern (eq?:comparator v1 v2)
;; make sure the whole expression is type correct
(match* ((tc/funapp #'eq? #'(v1 v2) (tc-expr/t #'eq?)
(stx-map single-value #'(v1 v2)) expected)
check thn and els with the eq ? info
(tc/eq #'eq? #'v1 #'v2))
[((tc-result1: t) (tc-result1: t* f o))
(ret t f o)])))
;; typecheck eq? applications
;; identifier expr expr -> tc-results
(define (tc/eq comparator v1 v2)
(define (eq?-able e) (or (boolean? e) (keyword? e) (symbol? e) (eof-object? e) (eq? e unsafe-undefined)))
(define (eqv?-able e) (or (eq?-able e) (number? e) (char? e)))
(define (equal?-able e) #t)
(define (id=? a b)
(free-identifier=? a b #f (syntax-local-phase-level)))
(define (ok? val)
(define-syntax-rule (alt nm pred ...)
(and (id=? #'nm comparator)
(or (pred val) ...)))
(or (alt symbol=? symbol?)
(alt string=? string?)
(alt eq? eq?-able)
(alt eqv? eqv?-able)
(alt equal? equal?-able)))
(match* ((single-value v1) (single-value v2))
[((tc-result1: (Val-able: (? ok? val1)) _ o1)
(tc-result1: (Val-able: (? ok? val2)) _ o2))
(ret -Boolean (-PS (-and (-is-type o1 (-val val2))
(-is-type o2 (-val val1)))
(-and (-not-type o1 (-val val2))
(-not-type o2 (-val val1)))))]
[((tc-result1: t _ o) (tc-result1: (Val-able: (? ok? val))))
(ret -Boolean (-PS (-is-type o (-val val)) (-not-type o (-val val))))]
[((tc-result1: (Val-able: (? ok? val))) (tc-result1: t _ o))
(ret -Boolean (-PS (-is-type o (-val val)) (-not-type o (-val val))))]
;; In this case, try to find there is an overlap between t1 and t2
[((tc-result1: t1 _ o1) (tc-result1: t2 _ o2))
#:when (not (ormap (lambda (a) (id=? comparator a))
(list #'member #'memv #'memq)))
(define result-t (intersect t1 t2))
(if (Bottom? result-t)
;; the overlap doesn't exist so we fall back to other cases.
(failure-cont)
;; put the type refinements of o1 and o2 in the true proposition
(ret -Boolean (-PS (make-AndProp (list (-is-type o1 result-t) (-is-type o2 result-t)))
-tt)))]
[((tc-result1: t _ o)
(or (and (? (lambda _ (id=? #'member comparator)))
(tc-result1: (List: (list (and ts (Val-able: _)) ...))))
(and (? (lambda _ (id=? #'memv comparator)))
(tc-result1: (List: (list (and ts (Val-able: (? eqv?-able))) ...))))
(and (? (lambda _ (id=? #'memq comparator)))
(tc-result1: (List: (list (and ts (Val-able: (? eq?-able))) ...))))))
(let ([ty (apply Un ts)])
(ret (Un (-val #f) t)
(-PS (-is-type o ty)
(-not-type o ty))))]
[(_ _) (ret -Boolean)]))
| null | https://raw.githubusercontent.com/racket/typed-racket/6ea20bec8d41e1a188d7f831c35423293a89c98e/typed-racket-lib/typed-racket/typecheck/tc-app/tc-app-eq.rkt | racket | comparators that inform the type system
`=' is not included. Its type is more useful than this typing rule.
make sure the whole expression is type correct
typecheck eq? applications
identifier expr expr -> tc-results
In this case, try to find there is an overlap between t1 and t2
the overlap doesn't exist so we fall back to other cases.
put the type refinements of o1 and o2 in the true proposition | #lang racket/unit
(require "../../utils/utils.rkt"
"signatures.rkt"
"utils.rkt"
(only-in "../../infer/infer.rkt" intersect)
syntax/parse syntax/stx racket/match racket/unsafe/undefined
"../signatures.rkt"
"../tc-funapp.rkt"
"../../types/abbrev.rkt"
"../../types/prop-ops.rkt"
"../../types/utils.rkt"
"../../types/match-expanders.rkt"
"../../rep/type-rep.rkt"
"../../rep/object-rep.rkt"
(for-label racket/base racket/bool))
(import tc-expr^)
(export tc-app-eq^)
(define-literal-set eq-literals
#:for-label
(eq? equal? eqv? string=? symbol=? memq member memv))
(define-syntax-class comparator
#:literal-sets (eq-literals)
(pattern (~or eq? equal? eqv? string=? symbol=? member memq memv)))
(define-tc/app-syntax-class (tc/app-eq expected)
(pattern (eq?:comparator v1 v2)
(match* ((tc/funapp #'eq? #'(v1 v2) (tc-expr/t #'eq?)
(stx-map single-value #'(v1 v2)) expected)
check thn and els with the eq ? info
(tc/eq #'eq? #'v1 #'v2))
[((tc-result1: t) (tc-result1: t* f o))
(ret t f o)])))
(define (tc/eq comparator v1 v2)
(define (eq?-able e) (or (boolean? e) (keyword? e) (symbol? e) (eof-object? e) (eq? e unsafe-undefined)))
(define (eqv?-able e) (or (eq?-able e) (number? e) (char? e)))
(define (equal?-able e) #t)
(define (id=? a b)
(free-identifier=? a b #f (syntax-local-phase-level)))
(define (ok? val)
(define-syntax-rule (alt nm pred ...)
(and (id=? #'nm comparator)
(or (pred val) ...)))
(or (alt symbol=? symbol?)
(alt string=? string?)
(alt eq? eq?-able)
(alt eqv? eqv?-able)
(alt equal? equal?-able)))
(match* ((single-value v1) (single-value v2))
[((tc-result1: (Val-able: (? ok? val1)) _ o1)
(tc-result1: (Val-able: (? ok? val2)) _ o2))
(ret -Boolean (-PS (-and (-is-type o1 (-val val2))
(-is-type o2 (-val val1)))
(-and (-not-type o1 (-val val2))
(-not-type o2 (-val val1)))))]
[((tc-result1: t _ o) (tc-result1: (Val-able: (? ok? val))))
(ret -Boolean (-PS (-is-type o (-val val)) (-not-type o (-val val))))]
[((tc-result1: (Val-able: (? ok? val))) (tc-result1: t _ o))
(ret -Boolean (-PS (-is-type o (-val val)) (-not-type o (-val val))))]
[((tc-result1: t1 _ o1) (tc-result1: t2 _ o2))
#:when (not (ormap (lambda (a) (id=? comparator a))
(list #'member #'memv #'memq)))
(define result-t (intersect t1 t2))
(if (Bottom? result-t)
(failure-cont)
(ret -Boolean (-PS (make-AndProp (list (-is-type o1 result-t) (-is-type o2 result-t)))
-tt)))]
[((tc-result1: t _ o)
(or (and (? (lambda _ (id=? #'member comparator)))
(tc-result1: (List: (list (and ts (Val-able: _)) ...))))
(and (? (lambda _ (id=? #'memv comparator)))
(tc-result1: (List: (list (and ts (Val-able: (? eqv?-able))) ...))))
(and (? (lambda _ (id=? #'memq comparator)))
(tc-result1: (List: (list (and ts (Val-able: (? eq?-able))) ...))))))
(let ([ty (apply Un ts)])
(ret (Un (-val #f) t)
(-PS (-is-type o ty)
(-not-type o ty))))]
[(_ _) (ret -Boolean)]))
|
f9af15a81ccf534aa8ac841b79aaea791fe5a4bf3c3ef2cae3cce51b9cfd0a31 | hypernumbers/hypernumbers | factory.erl | %%%-------------------------------------------------------------------
@author
( C ) 2011 - 2014 , Hypernumbers.com
@doc provisions Hypernumbers websites
%%% @end
%%% Created : by
%%%-------------------------------------------------------------------
%%%-------------------------------------------------------------------
%%%
%%% LICENSE
%%%
%%% This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation version 3
%%%
%%% This program is distributed in the hope that it will be useful,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
%%% GNU Affero General Public License for more details.
%%%
You should have received a copy of the GNU Affero General Public License
%%% along with this program. If not, see </>.
%%%-------------------------------------------------------------------
-module(factory).
%% API
-export([provision_site/7
]).
Extract existing uid from anonymous users , otherwise , generate a
fresh uid .
provision_site(Zone, Email, From, Sig, SiteType, [$_|SuggestedUid], Data) ->
provision_site_(Zone, Email, From, Sig, SiteType, SuggestedUid, Data);
provision_site(Zone, Email, From, Sig, SiteType, _, Data) ->
SuggestedUid = passport:create_uid(),
provision_site_(Zone, Email, From, Sig, SiteType, SuggestedUid, Data).
-spec provision_site(string(), string(), string(), string(), atom(),
auth_srv:uid(), list()) -> {ok , new | existing, string(),
atom(),
auth_srv:uid(), string()}
| {error, invalid_email}.
provision_site_(Zone, Email, From, Sig, Type, SuggestedUid, Data) ->
case hn_util:valid_email(Email) of
false ->
{error, invalid_email};
true ->
{ok, {Host, {_Ip, Port, Node}}} = hns:link_resource(Zone),
{ok, NE, Uid} = passport:get_or_create_user(Email, SuggestedUid),
Name = hn_util:extract_name_from_email(Email),
Site = lists:flatten(io_lib:format("http://~s:~b", [Host,Port])),
{initial_view, IView} = rpc:call(Node, hn_setup, site,
[Site, Type, [{creator, Uid},
{email, Email},
{name, Name}]]),
post_provision(NE, Site, Uid, Email, From, Sig, Name, Data),
{ok, NE, Site, Node, Uid, Name, IView}
end.
-spec post_provision(new | existing, string(), string(), string(), auth_srv:uid(),
string(), string(), list()) -> ok.
%% User does not have any existing sites, log them into their new site
%% directly.
post_provision(NE, Site, Uid, Email, From, Sig, Name, UserData) ->
IsValid = passport:is_valid_uid(Uid),
Recs = make_recs(UserData, Site, []),
ok = new_db_api:write_attributes(Recs),
case {NE, IsValid} of
{existing, true} ->
emailer:send(new_site_existing, Email, "", From, Site,
[{sig, Sig}]);
{_, _} ->
Path = ["_validate", Name],
Data = [{emailed, true}],
HT = passport:create_hypertag_url(Site, Path, Uid, Email,
Data, "never"),
emailer:send(new_site_validate, Email, "", From, Site,
[{sig, Sig}, {hypertag, HT}])
end.
make_recs([], _Site, Acc) ->
lists:reverse(Acc);
make_recs([{Ref, Val} | T], Site, Acc) ->
URL = case Ref of
"/" ++ _Rest -> Site ++ Ref;
_ -> Site ++ "/" ++ Ref
end,
RefX = hn_util:url_to_refX(URL),
make_recs(T, Site, [{RefX, [{"formula", Val}]} | Acc]).
| null | https://raw.githubusercontent.com/hypernumbers/hypernumbers/281319f60c0ac60fb009ee6d1e4826f4f2d51c4e/lib/hypernumbers-1.0/src/factory.erl | erlang | -------------------------------------------------------------------
@end
Created : by
-------------------------------------------------------------------
-------------------------------------------------------------------
LICENSE
This program is free software: you can redistribute it and/or modify
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
along with this program. If not, see </>.
-------------------------------------------------------------------
API
User does not have any existing sites, log them into their new site
directly. | @author
( C ) 2011 - 2014 , Hypernumbers.com
@doc provisions Hypernumbers websites
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation version 3
You should have received a copy of the GNU Affero General Public License
-module(factory).
-export([provision_site/7
]).
Extract existing uid from anonymous users , otherwise , generate a
fresh uid .
provision_site(Zone, Email, From, Sig, SiteType, [$_|SuggestedUid], Data) ->
provision_site_(Zone, Email, From, Sig, SiteType, SuggestedUid, Data);
provision_site(Zone, Email, From, Sig, SiteType, _, Data) ->
SuggestedUid = passport:create_uid(),
provision_site_(Zone, Email, From, Sig, SiteType, SuggestedUid, Data).
-spec provision_site(string(), string(), string(), string(), atom(),
auth_srv:uid(), list()) -> {ok , new | existing, string(),
atom(),
auth_srv:uid(), string()}
| {error, invalid_email}.
provision_site_(Zone, Email, From, Sig, Type, SuggestedUid, Data) ->
case hn_util:valid_email(Email) of
false ->
{error, invalid_email};
true ->
{ok, {Host, {_Ip, Port, Node}}} = hns:link_resource(Zone),
{ok, NE, Uid} = passport:get_or_create_user(Email, SuggestedUid),
Name = hn_util:extract_name_from_email(Email),
Site = lists:flatten(io_lib:format("http://~s:~b", [Host,Port])),
{initial_view, IView} = rpc:call(Node, hn_setup, site,
[Site, Type, [{creator, Uid},
{email, Email},
{name, Name}]]),
post_provision(NE, Site, Uid, Email, From, Sig, Name, Data),
{ok, NE, Site, Node, Uid, Name, IView}
end.
-spec post_provision(new | existing, string(), string(), string(), auth_srv:uid(),
string(), string(), list()) -> ok.
post_provision(NE, Site, Uid, Email, From, Sig, Name, UserData) ->
IsValid = passport:is_valid_uid(Uid),
Recs = make_recs(UserData, Site, []),
ok = new_db_api:write_attributes(Recs),
case {NE, IsValid} of
{existing, true} ->
emailer:send(new_site_existing, Email, "", From, Site,
[{sig, Sig}]);
{_, _} ->
Path = ["_validate", Name],
Data = [{emailed, true}],
HT = passport:create_hypertag_url(Site, Path, Uid, Email,
Data, "never"),
emailer:send(new_site_validate, Email, "", From, Site,
[{sig, Sig}, {hypertag, HT}])
end.
make_recs([], _Site, Acc) ->
lists:reverse(Acc);
make_recs([{Ref, Val} | T], Site, Acc) ->
URL = case Ref of
"/" ++ _Rest -> Site ++ Ref;
_ -> Site ++ "/" ++ Ref
end,
RefX = hn_util:url_to_refX(URL),
make_recs(T, Site, [{RefX, [{"formula", Val}]} | Acc]).
|
cc38ed5ade1ed64540dc747d89fd1598a0fc0860a997e80344439be9c50a82e6 | scrintal/heroicons-reagent | folder.cljs | (ns com.scrintal.heroicons.outline.folder)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M2.25 12.75V12A2.25 2.25 0 014.5 9.75h15A2.25 2.25 0 0121.75 12v.75m-8.69-6.44l-2.12-2.12a1.5 1.5 0 00-1.061-.44H4.5A2.25 2.25 0 002.25 6v12a2.25 2.25 0 002.25 2.25h15A2.25 2.25 0 0021.75 18V9a2.25 2.25 0 00-2.25-2.25h-5.379a1.5 1.5 0 01-1.06-.44z"}]]) | null | https://raw.githubusercontent.com/scrintal/heroicons-reagent/572f51d2466697ec4d38813663ee2588960365b6/src/com/scrintal/heroicons/outline/folder.cljs | clojure | (ns com.scrintal.heroicons.outline.folder)
(defn render []
[:svg {:xmlns ""
:fill "none"
:viewBox "0 0 24 24"
:strokeWidth "1.5"
:stroke "currentColor"
:aria-hidden "true"}
[:path {:strokeLinecap "round"
:strokeLinejoin "round"
:d "M2.25 12.75V12A2.25 2.25 0 014.5 9.75h15A2.25 2.25 0 0121.75 12v.75m-8.69-6.44l-2.12-2.12a1.5 1.5 0 00-1.061-.44H4.5A2.25 2.25 0 002.25 6v12a2.25 2.25 0 002.25 2.25h15A2.25 2.25 0 0021.75 18V9a2.25 2.25 0 00-2.25-2.25h-5.379a1.5 1.5 0 01-1.06-.44z"}]]) | |
812dc65c3eee9dd62fadeba4452d0840c6bad8ec879f498343ef7e4546411ed2 | sheyll/newtype-zoo | Wanted.hs | -- | Indicate that something is `Wanted`.
module NewtypeZoo.Wanted
( Wanted(Wanted)
, _theWanted
, theWanted
) where
import Control.DeepSeq (NFData)
import Control.Monad.Fix (MonadFix)
import Control.Monad.Zip (MonadZip)
import Data.Bits (Bits,FiniteBits)
import Data.Copointed (Copointed)
import Data.Default (Default)
import Data.Functor.Classes (Eq1, Ord1, Read1, Show1)
import Data.Functor.Identity
import Data.Ix (Ix)
import Data.Profunctor (Profunctor, dimap)
import Data.Pointed (Pointed)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import GHC.Generics (Generic, Generic1)
import System.Random (Random)
import Test.QuickCheck (Arbitrary)
newtype Wanted a = Wanted a
deriving ( Eq
, Ord
, Read
, Show
, NFData
, Foldable
, Traversable
, Functor
, Default
, Monoid
, Semigroup
, Typeable
, Generic
, Generic1
, Random
, Arbitrary
, Bounded
, Enum
, Floating
, Fractional
, Integral
, Num
, Real
, RealFloat
, RealFrac
, Ix
, IsString
, Bits
, FiniteBits
)
deriving ( Eq1
, Ord1
, Read1
, Show1
, Pointed
, Copointed
, Applicative
, MonadFix
, Monad
, MonadZip
)
via Identity
_theWanted :: Wanted x -> x
_theWanted (Wanted !x) = x
# INLINE _ theWanted #
theWanted :: forall a b p f. (Profunctor p, Functor f) => p a (f b) -> p (Wanted a) (f (Wanted b))
theWanted = dimap _theWanted (fmap Wanted)
# INLINE theWanted #
| null | https://raw.githubusercontent.com/sheyll/newtype-zoo/0e67717cbcd9233d9c26b6aacb4c6f8bba6ef5f7/src/NewtypeZoo/Wanted.hs | haskell | | Indicate that something is `Wanted`. | module NewtypeZoo.Wanted
( Wanted(Wanted)
, _theWanted
, theWanted
) where
import Control.DeepSeq (NFData)
import Control.Monad.Fix (MonadFix)
import Control.Monad.Zip (MonadZip)
import Data.Bits (Bits,FiniteBits)
import Data.Copointed (Copointed)
import Data.Default (Default)
import Data.Functor.Classes (Eq1, Ord1, Read1, Show1)
import Data.Functor.Identity
import Data.Ix (Ix)
import Data.Profunctor (Profunctor, dimap)
import Data.Pointed (Pointed)
import Data.String (IsString)
import Data.Typeable (Typeable)
import Foreign.Storable (Storable)
import GHC.Generics (Generic, Generic1)
import System.Random (Random)
import Test.QuickCheck (Arbitrary)
newtype Wanted a = Wanted a
deriving ( Eq
, Ord
, Read
, Show
, NFData
, Foldable
, Traversable
, Functor
, Default
, Monoid
, Semigroup
, Typeable
, Generic
, Generic1
, Random
, Arbitrary
, Bounded
, Enum
, Floating
, Fractional
, Integral
, Num
, Real
, RealFloat
, RealFrac
, Ix
, IsString
, Bits
, FiniteBits
)
deriving ( Eq1
, Ord1
, Read1
, Show1
, Pointed
, Copointed
, Applicative
, MonadFix
, Monad
, MonadZip
)
via Identity
_theWanted :: Wanted x -> x
_theWanted (Wanted !x) = x
# INLINE _ theWanted #
theWanted :: forall a b p f. (Profunctor p, Functor f) => p a (f b) -> p (Wanted a) (f (Wanted b))
theWanted = dimap _theWanted (fmap Wanted)
# INLINE theWanted #
|
16ea9c7eeead9449266452811cbb9c683f96661cf7187194e7216d65905e73b3 | jwiegley/notes | Delta.hs | # LANGUAGE TypeFamilies #
module Delta where
class Delta a where
type Change a
data Delta a => Changed a = Unchanged | Changed (Change a)
assertChanges :: Delta a => a -> a -> Changed a -> IO ()
assertChanges = undefined
data IntChange = IntChange Int Int
data Product = Product
{ foo :: Int,
bar :: Int,
baz :: Int
}
instance Delta Product where
type Change Product = [ProductChange]
-- GENERATED
data ProductChange
= ProductFoo IntChange
| ProductBar IntChange
| ProductBaz IntChange
data Sum
= Foo Int Int Int
| Bar Int Int Int
| Baz Int Int Int
instance Delta Sum where
type Change Sum = SumChange
-- GENERATED
data SumChange
= SumFoo [SumFooChange]
| SumBar [SumBarChange]
| SumBaz [SumBazChange]
-- GENERATED
data SumFooChange
= SumFoo0 IntChange
| SumFoo1 IntChange
| SumFoo2 IntChange
-- GENERATED
data SumBarChange
= SumBar0 IntChange
| SumBar1 IntChange
| SumBar2 IntChange
-- GENERATED
data SumBazChange
= SumBaz0 IntChange
| SumBaz1 IntChange
| SumBaz2 IntChange
main :: IO ()
main = do
assertChanges (Product 1 2 3) (Product 1 10 3) $
Changed [ProductBar (IntChange 2 10)]
assertChanges (Bar 1 2 3) (Bar 1 10 3) $
Changed (SumBar [SumBar1 (IntChange 2 10)])
| null | https://raw.githubusercontent.com/jwiegley/notes/762c803de16743df4f52a5762437dcf9252b31cd/haskell/Delta.hs | haskell | GENERATED
GENERATED
GENERATED
GENERATED
GENERATED | # LANGUAGE TypeFamilies #
module Delta where
class Delta a where
type Change a
data Delta a => Changed a = Unchanged | Changed (Change a)
assertChanges :: Delta a => a -> a -> Changed a -> IO ()
assertChanges = undefined
data IntChange = IntChange Int Int
data Product = Product
{ foo :: Int,
bar :: Int,
baz :: Int
}
instance Delta Product where
type Change Product = [ProductChange]
data ProductChange
= ProductFoo IntChange
| ProductBar IntChange
| ProductBaz IntChange
data Sum
= Foo Int Int Int
| Bar Int Int Int
| Baz Int Int Int
instance Delta Sum where
type Change Sum = SumChange
data SumChange
= SumFoo [SumFooChange]
| SumBar [SumBarChange]
| SumBaz [SumBazChange]
data SumFooChange
= SumFoo0 IntChange
| SumFoo1 IntChange
| SumFoo2 IntChange
data SumBarChange
= SumBar0 IntChange
| SumBar1 IntChange
| SumBar2 IntChange
data SumBazChange
= SumBaz0 IntChange
| SumBaz1 IntChange
| SumBaz2 IntChange
main :: IO ()
main = do
assertChanges (Product 1 2 3) (Product 1 10 3) $
Changed [ProductBar (IntChange 2 10)]
assertChanges (Bar 1 2 3) (Bar 1 10 3) $
Changed (SumBar [SumBar1 (IntChange 2 10)])
|
27f6767eaf741fca4fc32e48a11be241662468a9ed84940a914a21dfe2cdaa8e | B-Lang-org/bsc | ACleanup.hs | module ACleanup(aCleanup) where
import ASyntax
import ASyntaxUtil
import Prim
import DisjointTest(DisjointTestState, initDisjointTestState,
addADefToDisjointTestState, checkDisjointExprWithCtx)
import Data.Maybe
import Flags(Flags)
import Control.Monad.State
import FStringCompat(mkFString)
import Position(noPosition)
import Id
import Util
import IOUtil(progArgs)
import PPrint(ppReadable, ppString)
import Error(ErrorHandle)
trace_disjoint_tests :: Bool
trace_disjoint_tests = "-trace-disjoint-tests" `elem` progArgs
-- =====
-- Naming conventions
acleanupPref :: String
acleanupPref = "_dfoo"
-- =====
-- A state monad for generating identifiers and capturing definitions during the cleanup pass
data CState = CState Integer [ADef] DisjointTestState
Identifiers are _ prefix # and the first is _
-- prefix is acleanupPref
initCState :: DisjointTestState -> CState
initCState dts = CState 1 [] dts
-- the state monad itself
type CMonad = StateT CState IO
addDef :: ADef -> CMonad ()
addDef d = do
(CState i ds dts) <- get
dts' <- liftIO $ addADefToDisjointTestState dts [d]
put (CState i (d:ds) dts')
getDefs :: CMonad [ADef]
getDefs = do
(CState i ds dts) <- get
return (reverse ds)
getDisjointTestState :: CMonad DisjointTestState
getDisjointTestState = do
(CState i ds dts) <- get
return dts
updateDisjointTestState :: DisjointTestState -> CMonad ()
updateDisjointTestState dts = do
(CState i ds _) <- get
put (CState i ds dts)
newName :: CMonad AId
newName = do
(CState i ds dts) <- get
put (CState (i+1) ds dts)
return $ mkId noPosition (mkFString (acleanupPref ++ itos i))
aCleanup :: ErrorHandle -> Flags -> APackage -> IO APackage
aCleanup errh flags apkg = do
let userDefs = apkg_local_defs apkg
state = apkg_state_instances apkg
userARules = apkg_rules apkg
ifs = apkg_interface apkg
-- definitions of the value methods in the interface
ifcDefs = [d | (AIDef { aif_value = d }) <- ifs] ++
[d | (AIActionValue { aif_value = d }) <- ifs]
let str = "cleanup_" ++ ppString (apkg_name apkg)
dts <- initDisjointTestState str errh flags (userDefs ++ ifcDefs) state []
let initstate = initCState dts
evalStateT (do
userARules' <- mapM (cleanupRule flags Nothing) userARules
ifs' <- mapM (cleanupIfc flags) ifs
newDefs <- getDefs
traceM ( show newDefs )
let defs' = userDefs ++ newDefs
return (apkg { apkg_local_defs = defs', apkg_rules = userARules', apkg_interface = ifs' }))
initstate
cleanupRule :: Flags -> Maybe AExpr -> ARule -> CMonad ARule
cleanupRule flags mrdy (ARule id rps descr wp pred actions asmps splitorig) =
if the rule is the Action part of a method ( possible a split method )
-- then we need to include the method's ready condiion
let pred' = case mrdy of
Nothing -> pred
Just rdy -> aAnd rdy pred
actions' <- cleanupActions flags pred' actions
-- don't cleanup assumption actions because they include no methods
return (ARule id rps descr wp pred actions' asmps splitorig)
cleanupIfc :: Flags -> AIFace -> CMonad AIFace
cleanupIfc flags a@(AIAction { aif_pred = rdy, aif_body = rs }) =
do rs' <- mapM (cleanupRule flags (Just rdy)) rs
return a { aif_body = rs' }
cleanupIfc flags a@(AIActionValue { aif_pred = rdy, aif_body = rs }) =
do rs' <- mapM (cleanupRule flags (Just rdy)) rs
return a { aif_body = rs' }
cleanupIfc _ ifc = return ifc
-- merge mutually exclusive calls to the same action method
-- really a later pass lifting (so I suppose no-lift no longer breaks anything)
cleanupActions :: Flags -> AExpr -> [AAction] -> CMonad [AAction]
cleanupActions flags pred as =
let
loop :: [AAction] -> [AAction] -> CMonad [AAction]
loop merged [] = return (reverse merged)
-- Found an action method
loop merged (first@(ACall id methodid (cond:args)):rest) =
Internal loop to scan for matching actions that might be ME
let loopR :: [AAction] -> [AAction] -> CMonad [AAction]
loopR scanned [] =
return ((reverse merged) ++ [first] ++ (reverse scanned))
-- not necessary - rest has been cleaned already
loopR scanned [ ] = loop ( first : merged ) ( reverse scanned )
loopR scanned (firstR@(ACall id' methodid' (cond':args')):restR)
| (id == id') &&
(methodid == methodid') &&
((length args) == (length args')) =
do
dtState <- getDisjointTestState
(isDisjoint,newstate) <-
liftIO $ checkDisjointCond dtState pred cond cond'
updateDisjointTestState newstate
if (isDisjoint) then
do
newid <- newName
addDef (ADef newid aTBool
(APrim newid aTBool PrimBOr [cond, cond']) [])
newargs <-
(mapM (\ (arg, arg') ->
do
argid <- newName
let argtyp = (aType arg)
addDef (ADef argid argtyp
(APrim argid argtyp PrimIf [cond, arg, arg']) [])
return (ASDef argtyp argid))
(zip args args'))
let newcall = (ACall id methodid
((ASDef aTBool newid):newargs))
-- restR is guaranteed merged amongst itself (see below)
-- so no more work need be done...
return ((reverse merged)
++ [newcall]
++ (reverse scanned)
++ restR)
else loopR (firstR:scanned) restR
loopR scanned (firstR:restR) = loopR (firstR:scanned) restR
-- aggressively merge the rest
-- which allows the shortcuts in loopR above
in do rest' <- (loop [] rest)
(loopR [] rest')
-- don't try to merge foreign function calls since
-- those shouldn't get muxed anyway
loop merged (first:rest) = loop (first:merged) rest
in (loop [] as)
checkDisjointCond :: DisjointTestState -> AExpr -> AExpr -> AExpr ->
IO (Bool, DisjointTestState)
checkDisjointCond dtState pred cond1 cond2 =
do
(mres, dtState') <- checkDisjointExprWithCtx dtState pred pred cond1 cond2
let res = if isNothing mres then False else fromJust mres
when(trace_disjoint_tests) $
putStrLn("checkDisjoint(ACleanup)\n" ++
-- "pred: " ++ (ppReadable pred) ++
"cond1: " ++ (ppReadable cond1) ++
"cond2: " ++ (ppReadable cond2) ++
"Result: " ++ (show mres))
return (res, dtState')
| null | https://raw.githubusercontent.com/B-Lang-org/bsc/bd141b505394edc5a4bdd3db442a9b0a8c101f0f/src/comp/ACleanup.hs | haskell | =====
Naming conventions
=====
A state monad for generating identifiers and capturing definitions during the cleanup pass
prefix is acleanupPref
the state monad itself
definitions of the value methods in the interface
then we need to include the method's ready condiion
don't cleanup assumption actions because they include no methods
merge mutually exclusive calls to the same action method
really a later pass lifting (so I suppose no-lift no longer breaks anything)
Found an action method
not necessary - rest has been cleaned already
restR is guaranteed merged amongst itself (see below)
so no more work need be done...
aggressively merge the rest
which allows the shortcuts in loopR above
don't try to merge foreign function calls since
those shouldn't get muxed anyway
"pred: " ++ (ppReadable pred) ++ | module ACleanup(aCleanup) where
import ASyntax
import ASyntaxUtil
import Prim
import DisjointTest(DisjointTestState, initDisjointTestState,
addADefToDisjointTestState, checkDisjointExprWithCtx)
import Data.Maybe
import Flags(Flags)
import Control.Monad.State
import FStringCompat(mkFString)
import Position(noPosition)
import Id
import Util
import IOUtil(progArgs)
import PPrint(ppReadable, ppString)
import Error(ErrorHandle)
trace_disjoint_tests :: Bool
trace_disjoint_tests = "-trace-disjoint-tests" `elem` progArgs
acleanupPref :: String
acleanupPref = "_dfoo"
data CState = CState Integer [ADef] DisjointTestState
Identifiers are _ prefix # and the first is _
initCState :: DisjointTestState -> CState
initCState dts = CState 1 [] dts
type CMonad = StateT CState IO
addDef :: ADef -> CMonad ()
addDef d = do
(CState i ds dts) <- get
dts' <- liftIO $ addADefToDisjointTestState dts [d]
put (CState i (d:ds) dts')
getDefs :: CMonad [ADef]
getDefs = do
(CState i ds dts) <- get
return (reverse ds)
getDisjointTestState :: CMonad DisjointTestState
getDisjointTestState = do
(CState i ds dts) <- get
return dts
updateDisjointTestState :: DisjointTestState -> CMonad ()
updateDisjointTestState dts = do
(CState i ds _) <- get
put (CState i ds dts)
newName :: CMonad AId
newName = do
(CState i ds dts) <- get
put (CState (i+1) ds dts)
return $ mkId noPosition (mkFString (acleanupPref ++ itos i))
aCleanup :: ErrorHandle -> Flags -> APackage -> IO APackage
aCleanup errh flags apkg = do
let userDefs = apkg_local_defs apkg
state = apkg_state_instances apkg
userARules = apkg_rules apkg
ifs = apkg_interface apkg
ifcDefs = [d | (AIDef { aif_value = d }) <- ifs] ++
[d | (AIActionValue { aif_value = d }) <- ifs]
let str = "cleanup_" ++ ppString (apkg_name apkg)
dts <- initDisjointTestState str errh flags (userDefs ++ ifcDefs) state []
let initstate = initCState dts
evalStateT (do
userARules' <- mapM (cleanupRule flags Nothing) userARules
ifs' <- mapM (cleanupIfc flags) ifs
newDefs <- getDefs
traceM ( show newDefs )
let defs' = userDefs ++ newDefs
return (apkg { apkg_local_defs = defs', apkg_rules = userARules', apkg_interface = ifs' }))
initstate
cleanupRule :: Flags -> Maybe AExpr -> ARule -> CMonad ARule
cleanupRule flags mrdy (ARule id rps descr wp pred actions asmps splitorig) =
if the rule is the Action part of a method ( possible a split method )
let pred' = case mrdy of
Nothing -> pred
Just rdy -> aAnd rdy pred
actions' <- cleanupActions flags pred' actions
return (ARule id rps descr wp pred actions' asmps splitorig)
cleanupIfc :: Flags -> AIFace -> CMonad AIFace
cleanupIfc flags a@(AIAction { aif_pred = rdy, aif_body = rs }) =
do rs' <- mapM (cleanupRule flags (Just rdy)) rs
return a { aif_body = rs' }
cleanupIfc flags a@(AIActionValue { aif_pred = rdy, aif_body = rs }) =
do rs' <- mapM (cleanupRule flags (Just rdy)) rs
return a { aif_body = rs' }
cleanupIfc _ ifc = return ifc
cleanupActions :: Flags -> AExpr -> [AAction] -> CMonad [AAction]
cleanupActions flags pred as =
let
loop :: [AAction] -> [AAction] -> CMonad [AAction]
loop merged [] = return (reverse merged)
loop merged (first@(ACall id methodid (cond:args)):rest) =
Internal loop to scan for matching actions that might be ME
let loopR :: [AAction] -> [AAction] -> CMonad [AAction]
loopR scanned [] =
return ((reverse merged) ++ [first] ++ (reverse scanned))
loopR scanned [ ] = loop ( first : merged ) ( reverse scanned )
loopR scanned (firstR@(ACall id' methodid' (cond':args')):restR)
| (id == id') &&
(methodid == methodid') &&
((length args) == (length args')) =
do
dtState <- getDisjointTestState
(isDisjoint,newstate) <-
liftIO $ checkDisjointCond dtState pred cond cond'
updateDisjointTestState newstate
if (isDisjoint) then
do
newid <- newName
addDef (ADef newid aTBool
(APrim newid aTBool PrimBOr [cond, cond']) [])
newargs <-
(mapM (\ (arg, arg') ->
do
argid <- newName
let argtyp = (aType arg)
addDef (ADef argid argtyp
(APrim argid argtyp PrimIf [cond, arg, arg']) [])
return (ASDef argtyp argid))
(zip args args'))
let newcall = (ACall id methodid
((ASDef aTBool newid):newargs))
return ((reverse merged)
++ [newcall]
++ (reverse scanned)
++ restR)
else loopR (firstR:scanned) restR
loopR scanned (firstR:restR) = loopR (firstR:scanned) restR
in do rest' <- (loop [] rest)
(loopR [] rest')
loop merged (first:rest) = loop (first:merged) rest
in (loop [] as)
checkDisjointCond :: DisjointTestState -> AExpr -> AExpr -> AExpr ->
IO (Bool, DisjointTestState)
checkDisjointCond dtState pred cond1 cond2 =
do
(mres, dtState') <- checkDisjointExprWithCtx dtState pred pred cond1 cond2
let res = if isNothing mres then False else fromJust mres
when(trace_disjoint_tests) $
putStrLn("checkDisjoint(ACleanup)\n" ++
"cond1: " ++ (ppReadable cond1) ++
"cond2: " ++ (ppReadable cond2) ++
"Result: " ++ (show mres))
return (res, dtState')
|
c74832701727648dacddbf589378e89c1ff949d887c04b7ffdd3e7430ca9e3be | vaclavsvejcar/headroom | Readers.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeApplications #
# LANGUAGE NoImplicitPrelude #
-- |
-- Module : Headroom.Command.Readers
-- Description : Custom readers for /optparse-applicative/ library
Copyright : ( c ) 2019 - 2022
-- License : BSD-3-Clause
-- Maintainer :
-- Stability : experimental
-- Portability : POSIX
--
-- This module contains custom readers required by the /optparse-applicative/
library to parse data types such as ' LicenseType ' or ' FileType ' .
module Headroom.Command.Readers
( licenseReader
, licenseTypeReader
, regexReader
, templateRefReader
, parseLicense
)
where
import Data.Either.Combinators (maybeToRight)
import Headroom.Config.Types (LicenseType)
import Headroom.Data.EnumExtra (EnumExtra (..))
import Headroom.Data.Regex
( Regex (..)
, compile
)
import Headroom.FileType.Types (FileType (..))
import Headroom.Template.TemplateRef
( TemplateRef (..)
, mkTemplateRef
)
import Options.Applicative
import RIO
import qualified RIO.Text as T
import qualified RIO.Text.Partial as TP
| Reader for tuple of ' LicenseType ' and ' FileType ' .
licenseReader :: ReadM (LicenseType, FileType)
licenseReader = eitherReader parseLicense'
where
parseLicense' raw = maybeToRight errMsg (parseLicense $ T.pack raw)
errMsg =
T.unpack $
mconcat
[ "invalid license/file type, must be in format 'licenseType:fileType' "
, "(e.g. bsd3:haskell)"
, "\nAvailable license types: "
, T.toLower (allValuesToText @LicenseType)
, "\nAvailable file types: "
, T.toLower (allValuesToText @FileType)
]
| Reader for ' LicenseType ' .
licenseTypeReader :: ReadM LicenseType
licenseTypeReader = eitherReader parseLicenseType
where
parseLicenseType raw = maybeToRight errMsg (textToEnum $ T.pack raw)
errMsg =
T.unpack $
mconcat
[ "invalid license type, available options: "
, T.toLower (allValuesToText @LicenseType)
]
-- | Reader for 'Regex'.
regexReader :: ReadM Regex
regexReader =
let parse input = mapLeft displayException (compile . T.pack $ input)
in eitherReader parse
-- | Reader for 'TemplateRef'.
templateRefReader :: ReadM TemplateRef
templateRefReader =
let parse input = mapLeft displayException (mkTemplateRef . T.pack $ input)
in eitherReader parse
| Parses ' LicenseType ' and ' FileType ' from the input string ,
formatted as @licenseType : fileType@.
--
-- >>> parseLicense "bsd3:haskell"
Just ( BSD3,Haskell )
parseLicense :: Text -> Maybe (LicenseType, FileType)
parseLicense raw
| [lt, ft] <- TP.splitOn ":" raw = (,) <$> textToEnum lt <*> textToEnum ft
| otherwise = Nothing
| null | https://raw.githubusercontent.com/vaclavsvejcar/headroom/3b20a89568248259d59f83f274f60f6e13d16f93/src/Headroom/Command/Readers.hs | haskell | # LANGUAGE OverloadedStrings #
|
Module : Headroom.Command.Readers
Description : Custom readers for /optparse-applicative/ library
License : BSD-3-Clause
Maintainer :
Stability : experimental
Portability : POSIX
This module contains custom readers required by the /optparse-applicative/
| Reader for 'Regex'.
| Reader for 'TemplateRef'.
>>> parseLicense "bsd3:haskell" | # LANGUAGE TypeApplications #
# LANGUAGE NoImplicitPrelude #
Copyright : ( c ) 2019 - 2022
library to parse data types such as ' LicenseType ' or ' FileType ' .
module Headroom.Command.Readers
( licenseReader
, licenseTypeReader
, regexReader
, templateRefReader
, parseLicense
)
where
import Data.Either.Combinators (maybeToRight)
import Headroom.Config.Types (LicenseType)
import Headroom.Data.EnumExtra (EnumExtra (..))
import Headroom.Data.Regex
( Regex (..)
, compile
)
import Headroom.FileType.Types (FileType (..))
import Headroom.Template.TemplateRef
( TemplateRef (..)
, mkTemplateRef
)
import Options.Applicative
import RIO
import qualified RIO.Text as T
import qualified RIO.Text.Partial as TP
| Reader for tuple of ' LicenseType ' and ' FileType ' .
licenseReader :: ReadM (LicenseType, FileType)
licenseReader = eitherReader parseLicense'
where
parseLicense' raw = maybeToRight errMsg (parseLicense $ T.pack raw)
errMsg =
T.unpack $
mconcat
[ "invalid license/file type, must be in format 'licenseType:fileType' "
, "(e.g. bsd3:haskell)"
, "\nAvailable license types: "
, T.toLower (allValuesToText @LicenseType)
, "\nAvailable file types: "
, T.toLower (allValuesToText @FileType)
]
| Reader for ' LicenseType ' .
licenseTypeReader :: ReadM LicenseType
licenseTypeReader = eitherReader parseLicenseType
where
parseLicenseType raw = maybeToRight errMsg (textToEnum $ T.pack raw)
errMsg =
T.unpack $
mconcat
[ "invalid license type, available options: "
, T.toLower (allValuesToText @LicenseType)
]
regexReader :: ReadM Regex
regexReader =
let parse input = mapLeft displayException (compile . T.pack $ input)
in eitherReader parse
templateRefReader :: ReadM TemplateRef
templateRefReader =
let parse input = mapLeft displayException (mkTemplateRef . T.pack $ input)
in eitherReader parse
| Parses ' LicenseType ' and ' FileType ' from the input string ,
formatted as @licenseType : fileType@.
Just ( BSD3,Haskell )
parseLicense :: Text -> Maybe (LicenseType, FileType)
parseLicense raw
| [lt, ft] <- TP.splitOn ":" raw = (,) <$> textToEnum lt <*> textToEnum ft
| otherwise = Nothing
|
0d689c1a139cefdedf8fe16ca8cd98ec73f27c57f0183cfcfe62d6bfa22e7cce | josefs/Gradualizer | map_update_with_record_field.erl | -module(map_update_with_record_field).
-export([mapup3/2]).
%% When record `r' is defined or included and used directly in the map type definition,
then ` mapup3 ' passes typechecking .
%-record(r, {}).
%-type m() :: #{rec := #r{}}.
%% However, when the actual remote type and corresponding remote record is used,
%% then `mapup3' fails to typecheck, although it still should.
-type m() :: #{rec := user_types:my_empty_record()}.
-spec mapup3(m(), user_types:my_empty_record()) -> m().
mapup3(TypedMap, R) ->
TypedMap#{rec => R}.
| null | https://raw.githubusercontent.com/josefs/Gradualizer/208f5816b0157f282212fc036ba7560f0822f9fc/test/should_pass/map_update_with_record_field.erl | erlang | When record `r' is defined or included and used directly in the map type definition,
-record(r, {}).
-type m() :: #{rec := #r{}}.
However, when the actual remote type and corresponding remote record is used,
then `mapup3' fails to typecheck, although it still should. | -module(map_update_with_record_field).
-export([mapup3/2]).
then ` mapup3 ' passes typechecking .
-type m() :: #{rec := user_types:my_empty_record()}.
-spec mapup3(m(), user_types:my_empty_record()) -> m().
mapup3(TypedMap, R) ->
TypedMap#{rec => R}.
|
0835efdc83cedd91ebef9b68146759698b0703f0c496678b0f448a5e0d2872e6 | kmi/irs | new-ontology.lisp | ;;; Mode: Lisp; Package: web-onto
Author :
The Open University
(in-package "WEB-ONTO")
(defun get-undefined-ontology-uses (ontologies)
(mapcan #'(lambda (ontology)
(unless (get-ontology ontology)
(list ontology)))
ontologies))
(defun new-ontology (stream request-string)
(with-input-from-string (request-string-stream request-string)
(read request-string-stream)
(let* ((*package* (find-package "OCML"))
(ontology-name (read request-string-stream))
(ontology-type (read request-string-stream))
(ontology-uses (read request-string-stream))
(ontology-author (read request-string-stream))
(allowed-editors (read request-string-stream))
(ontology (get-ontology ontology-name))
(undefined-ontologies (get-undefined-ontology-uses ontology-uses)))
(when (string= allowed-editors "")
(setf allowed-editors nil))
(cond (ontology
(when stream
(http::princ-to-binary-stream
(format nil "Sorry, the ontology ~a already exists~%" ontology-name)
stream)))
(undefined-ontologies
(when stream
(http::princ-to-binary-stream
(format nil "Sorry, the ontolog~@p~{ ~(~a~)~} ~:[do~;does~] not exist~%"
(length undefined-ontologies) undefined-ontologies
(= (length undefined-ontologies) 1))
stream)))
(t (define-new-ontology stream ontology-name ontology-type
ontology-uses ontology-author allowed-editors))))))
(defun define-new-ontology (stream ontology-name ontology-type ontology-uses ontology-author
allowed-editors)
(cond ((not (registered-user ontology-author))
(http::princ-to-binary-stream
(format nil "Sorry, ~a is not a registered user~%" ontology-author)
stream))
(t (add-ontology-files ontology-name ontology-type ontology-uses ontology-author allowed-editors)
(load-new-ontology ontology-name)
(add-to-list-of-ontology-names ontology-name)
;;(add-to-list-of-ontology-home-pathnames ontology-name ontology-type)
(http::princ-to-binary-stream
(format nil "OK~%")
stream))))
(defun add-ontology-files (ontology-name ontology-type ontology-uses ontology-author allowed-editors)
(let* ((logical-ontology-directory
(ontology-directory-name-from-type ontology-type ontology-name))
(ontology-directory (translate-logical-pathname logical-ontology-directory)))
(create-directory ontology-directory)
(create-load-file ontology-directory
ontology-name ontology-type ontology-uses ontology-author allowed-editors)
(create-new-definitions-file ontology-directory ontology-name)
(create-second-definitions-file ontology-directory ontology-name)))
(defun create-directory (directory)
(unless (probe-file directory)
#+lispworks
(ensure-directories-exist directory)
( foreign::call - system ( format nil " mkdir ~a " directory ) )
;;(io::io-mkdir
( if ( pathnamep directory )
;; (namestring directory)
;;directory))
#+allegro (excl::run-shell-command (format nil "mkdir ~a" directory))))
(defun create-new-definitions-file (ontology-directory ontology-name)
(create-definitions-file ontology-directory ontology-name *webonto-edits-filename*))
(defun create-second-definitions-file (ontology-directory ontology-name)
(create-definitions-file ontology-directory ontology-name (format nil "~(~a~).lisp" ontology-name)))
(defun create-definitions-file (ontology-directory ontology-name filename)
(let ((edits-file (merge-pathnames ontology-directory filename)))
(when (probe-file edits-file)
(delete-file edits-file))
(with-open-file (ostream edits-file :direction :output :if-does-not-exist :create)
(format ostream *edits-file-header*)
(format ostream "~%~%(in-ontology ~(~a~))~%~%" ontology-name))))
(defvar *load-files*
'((ocml::sisyphus1 . "load2.lisp"))
"All the ontology load files which are not load.lisp are stored here")
;(defun setup-load-files ()
; (mapc #'(lambda (ontology-and-load-file)
( setf ( ocml::ontology - load - filename ( get - ontology ( car ontology - and - load - file ) ) )
; (cdr ontology-and-load-file)))
; *load-files*))
(defun create-load-file (ontology-directory ontology-name
ontology-type ontology-uses ontology-author allowed-editors)
(let ((load-file (merge-pathnames ontology-directory ocml:*load-filename*))
(parsed-allowed-editors
(parse-allowed-editors allowed-editors)))
(when (probe-file load-file)
(delete-file load-file))
(with-open-file (ostream load-file :direction :output :if-does-not-exist :create)
(format ostream *load-file-header*)
(cond (ontology-uses
(format ostream "~%~%(eval-when (eval load)")
(mapc #'(lambda (used-ontology)
(let ((ontology-structure (ocml::get-ontology used-ontology)))
(format ostream "~% (ensure-ontology ~(~a~) ~(~a~) \"~(~a~)\" )"
used-ontology
(ocml::ontology-type ontology-structure)
(ocml::ontology-logical-load-filename
ontology-structure))))
ontology-uses)
(format ostream ")")
(format ostream
"~%~%(def-ontology ~(~a~) :includes ~(~a~)~
~%~14t:type ~(~s~) ~
~%~14t:author ~s ~:[~*~;:allowed-editors (~{~s ~}~s)~])"
ontology-name
ontology-uses
(make-ontology-type-name ontology-type)
ontology-author allowed-editors
(butlast parsed-allowed-editors)
(car (last parsed-allowed-editors))))
(t (format ostream
"~%~%(def-ontology ~(~a~) :type ~(~s~)~
~%~14t:author ~s ~:[~*~;:allowed-editors (~{~s ~}~s)~])"
ontology-name
(make-ontology-type-name ontology-type)
ontology-author allowed-editors
(butlast parsed-allowed-editors)
(car (last parsed-allowed-editors))))))))
(defun parse-allowed-editors (string)
(multiple-value-list (parse-string-by-spaces string)))
(defun load-new-ontology (ontology-name)
(internal-load-single-ontology ontology-name))
(defun add-to-list-of-ontology-names (ontology-name)
(push ontology-name *ontologies*)
(setf *ontologies-string*
(concatenate 'string *ontologies-string*
(format nil "~%~(~a~)" ontology-name)))
(save-ontologies-string))
;(defun add-to-list-of-ontology-home-pathnames (ontology-name ontology-type)
; (let* ((logical-directory (ontology-directory-name-from-type ontology-type ontology-name))
; (new-source-location (pathname
; (format nil "~a~a" (translate-logical-pathname logical-directory)
; *webonto-edits-filename*))))
( setf * ontology - home - pathnames - string *
; (concatenate 'string *ontology-home-pathnames-string*
( format nil " ~%~(~a~)~% ~s~% ~s ~%~% " ontology - name
; logical-directory
; *webonto-edits-filename*)))
( setf * ontology - home - pathnames *
; (cons (list ontology-name new-source-location) *ontology-home-pathnames*))
( setf ( ocml::ontology - new - source - location ( get - ontology ontology - name ) )
; new-source-location)
; (save-ontology-home-pathnames-string)))
(defun basic-type-p (ontology-type)
(eq ontology-type :basic))
(defun ontology-directory-name-from-type (ontology-type
ontology-name
&optional
(root "ocml:library;"))
(if (basic-type-p ontology-type)
(format nil "~a~(~a~);"
root
(ontology-type-to-library-directory ontology-type))
(format nil "~a~(~a~);~(~a~);"
root
(ontology-type-to-library-directory ontology-type)
ontology-name)))
(defun ontology-type-to-library-directory (ontology-type)
(case ontology-type
((application :application ocml::application) 'applications)
((domain :domain ocml::domain) 'domains)
((method :method ocml::method) 'methods)
((task :task ocml::task) 'tasks)
((goal :goal ocml::goal) 'goals)
((web-service :web-service ocml::web-service) 'web-services)
((mediator :mediator ocml::mediator) 'mediators)
((:basic basic ocml::basic) 'basic)))
(defun make-ontology-type-name (ontology-type)
(intern (symbol-name ontology-type) (find-package "KEYWORD"))) | null | https://raw.githubusercontent.com/kmi/irs/e1b8d696f61c6b6878c0e92d993ed549fee6e7dd/src/webonto/new-ontology.lisp | lisp | Mode: Lisp; Package: web-onto
(add-to-list-of-ontology-home-pathnames ontology-name ontology-type)
(io::io-mkdir
(namestring directory)
directory))
(defun setup-load-files ()
(mapc #'(lambda (ontology-and-load-file)
(cdr ontology-and-load-file)))
*load-files*))
:allowed-editors (~{~s ~}~s)~])"
:allowed-editors (~{~s ~}~s)~])"
(defun add-to-list-of-ontology-home-pathnames (ontology-name ontology-type)
(let* ((logical-directory (ontology-directory-name-from-type ontology-type ontology-name))
(new-source-location (pathname
(format nil "~a~a" (translate-logical-pathname logical-directory)
*webonto-edits-filename*))))
(concatenate 'string *ontology-home-pathnames-string*
logical-directory
*webonto-edits-filename*)))
(cons (list ontology-name new-source-location) *ontology-home-pathnames*))
new-source-location)
(save-ontology-home-pathnames-string))) |
Author :
The Open University
(in-package "WEB-ONTO")
(defun get-undefined-ontology-uses (ontologies)
(mapcan #'(lambda (ontology)
(unless (get-ontology ontology)
(list ontology)))
ontologies))
(defun new-ontology (stream request-string)
(with-input-from-string (request-string-stream request-string)
(read request-string-stream)
(let* ((*package* (find-package "OCML"))
(ontology-name (read request-string-stream))
(ontology-type (read request-string-stream))
(ontology-uses (read request-string-stream))
(ontology-author (read request-string-stream))
(allowed-editors (read request-string-stream))
(ontology (get-ontology ontology-name))
(undefined-ontologies (get-undefined-ontology-uses ontology-uses)))
(when (string= allowed-editors "")
(setf allowed-editors nil))
(cond (ontology
(when stream
(http::princ-to-binary-stream
(format nil "Sorry, the ontology ~a already exists~%" ontology-name)
stream)))
(undefined-ontologies
(when stream
(http::princ-to-binary-stream
(format nil "Sorry, the ontolog~@p~{ ~(~a~)~} ~:[do~;does~] not exist~%"
(length undefined-ontologies) undefined-ontologies
(= (length undefined-ontologies) 1))
stream)))
(t (define-new-ontology stream ontology-name ontology-type
ontology-uses ontology-author allowed-editors))))))
(defun define-new-ontology (stream ontology-name ontology-type ontology-uses ontology-author
allowed-editors)
(cond ((not (registered-user ontology-author))
(http::princ-to-binary-stream
(format nil "Sorry, ~a is not a registered user~%" ontology-author)
stream))
(t (add-ontology-files ontology-name ontology-type ontology-uses ontology-author allowed-editors)
(load-new-ontology ontology-name)
(add-to-list-of-ontology-names ontology-name)
(http::princ-to-binary-stream
(format nil "OK~%")
stream))))
(defun add-ontology-files (ontology-name ontology-type ontology-uses ontology-author allowed-editors)
(let* ((logical-ontology-directory
(ontology-directory-name-from-type ontology-type ontology-name))
(ontology-directory (translate-logical-pathname logical-ontology-directory)))
(create-directory ontology-directory)
(create-load-file ontology-directory
ontology-name ontology-type ontology-uses ontology-author allowed-editors)
(create-new-definitions-file ontology-directory ontology-name)
(create-second-definitions-file ontology-directory ontology-name)))
(defun create-directory (directory)
(unless (probe-file directory)
#+lispworks
(ensure-directories-exist directory)
( foreign::call - system ( format nil " mkdir ~a " directory ) )
( if ( pathnamep directory )
#+allegro (excl::run-shell-command (format nil "mkdir ~a" directory))))
(defun create-new-definitions-file (ontology-directory ontology-name)
(create-definitions-file ontology-directory ontology-name *webonto-edits-filename*))
(defun create-second-definitions-file (ontology-directory ontology-name)
(create-definitions-file ontology-directory ontology-name (format nil "~(~a~).lisp" ontology-name)))
(defun create-definitions-file (ontology-directory ontology-name filename)
(let ((edits-file (merge-pathnames ontology-directory filename)))
(when (probe-file edits-file)
(delete-file edits-file))
(with-open-file (ostream edits-file :direction :output :if-does-not-exist :create)
(format ostream *edits-file-header*)
(format ostream "~%~%(in-ontology ~(~a~))~%~%" ontology-name))))
(defvar *load-files*
'((ocml::sisyphus1 . "load2.lisp"))
"All the ontology load files which are not load.lisp are stored here")
( setf ( ocml::ontology - load - filename ( get - ontology ( car ontology - and - load - file ) ) )
(defun create-load-file (ontology-directory ontology-name
ontology-type ontology-uses ontology-author allowed-editors)
(let ((load-file (merge-pathnames ontology-directory ocml:*load-filename*))
(parsed-allowed-editors
(parse-allowed-editors allowed-editors)))
(when (probe-file load-file)
(delete-file load-file))
(with-open-file (ostream load-file :direction :output :if-does-not-exist :create)
(format ostream *load-file-header*)
(cond (ontology-uses
(format ostream "~%~%(eval-when (eval load)")
(mapc #'(lambda (used-ontology)
(let ((ontology-structure (ocml::get-ontology used-ontology)))
(format ostream "~% (ensure-ontology ~(~a~) ~(~a~) \"~(~a~)\" )"
used-ontology
(ocml::ontology-type ontology-structure)
(ocml::ontology-logical-load-filename
ontology-structure))))
ontology-uses)
(format ostream ")")
(format ostream
"~%~%(def-ontology ~(~a~) :includes ~(~a~)~
~%~14t:type ~(~s~) ~
ontology-name
ontology-uses
(make-ontology-type-name ontology-type)
ontology-author allowed-editors
(butlast parsed-allowed-editors)
(car (last parsed-allowed-editors))))
(t (format ostream
"~%~%(def-ontology ~(~a~) :type ~(~s~)~
ontology-name
(make-ontology-type-name ontology-type)
ontology-author allowed-editors
(butlast parsed-allowed-editors)
(car (last parsed-allowed-editors))))))))
(defun parse-allowed-editors (string)
(multiple-value-list (parse-string-by-spaces string)))
(defun load-new-ontology (ontology-name)
(internal-load-single-ontology ontology-name))
(defun add-to-list-of-ontology-names (ontology-name)
(push ontology-name *ontologies*)
(setf *ontologies-string*
(concatenate 'string *ontologies-string*
(format nil "~%~(~a~)" ontology-name)))
(save-ontologies-string))
( setf * ontology - home - pathnames - string *
( format nil " ~%~(~a~)~% ~s~% ~s ~%~% " ontology - name
( setf * ontology - home - pathnames *
( setf ( ocml::ontology - new - source - location ( get - ontology ontology - name ) )
(defun basic-type-p (ontology-type)
(eq ontology-type :basic))
(defun ontology-directory-name-from-type (ontology-type
ontology-name
&optional
(root "ocml:library;"))
(if (basic-type-p ontology-type)
(format nil "~a~(~a~);"
root
(ontology-type-to-library-directory ontology-type))
(format nil "~a~(~a~);~(~a~);"
root
(ontology-type-to-library-directory ontology-type)
ontology-name)))
(defun ontology-type-to-library-directory (ontology-type)
(case ontology-type
((application :application ocml::application) 'applications)
((domain :domain ocml::domain) 'domains)
((method :method ocml::method) 'methods)
((task :task ocml::task) 'tasks)
((goal :goal ocml::goal) 'goals)
((web-service :web-service ocml::web-service) 'web-services)
((mediator :mediator ocml::mediator) 'mediators)
((:basic basic ocml::basic) 'basic)))
(defun make-ontology-type-name (ontology-type)
(intern (symbol-name ontology-type) (find-package "KEYWORD"))) |
0ec7cc6db9b8da9cdb5a2a355038bcd8cce743b9821698ad414e8c4d1194930f | fumieval/extensible | Bits.hs | # LANGUAGE UndecidableInstances , ScopedTypeVariables , MultiParamTypeClasses , TypeFamilies #
# LANGUAGE GeneralizedNewtypeDeriving , DeriveGeneric #
#if __GLASGOW_HASKELL__ < 806
{-# LANGUAGE TypeInType #-}
#endif
-----------------------------------------------------------------------
-- |
-- Module : Data.Extensible.Bits
Copyright : ( c ) 2018
-- License : BSD3
--
Maintainer : < >
--
-- Bit-packed records
-----------------------------------------------------------------------
module Data.Extensible.Bits (BitProd(..)
, FromBits(..)
, TotalBits
, BitFields
, blookup
, bupdate
, toBitProd
, fromBitProd
, BitRecordOf
, BitRecord) where
import Control.Applicative
import Control.Comonad
import Data.Bits
import Data.Extensible.Class
import Data.Extensible.Dictionary
import Data.Extensible.Product
import Data.Extensible.Field
import Data.Functor.Identity
import Data.Hashable
import Data.Ix
import Data.Kind (Type)
import Data.Profunctor.Rep
import Data.Profunctor.Sieve
import Data.Proxy
import Data.Word
import Data.Int
import Foreign.Storable (Storable)
import GHC.Generics (Generic)
import GHC.TypeLits
| Bit - vector product . It has similar interface as @(:*)@ but fields are packed into @r@.
newtype BitProd r (xs :: [k]) (h :: k -> Type) = BitProd { unBitProd :: r }
deriving (Eq, Ord, Enum, Bounded, Ix, Generic, Hashable, Storable)
instance (Forall (Instance1 Show h) xs, BitFields r xs h) => Show (BitProd r xs h) where
showsPrec d x = showParen (d > 10)
$ showString "toBitProd " . showsPrec 11 (fromBitProd x)
| Total ' BitWidth '
type family TotalBits h xs where
TotalBits h '[] = 0
TotalBits h (x ': xs) = BitWidth (h x) + TotalBits h xs
-- | Conversion between a value and a bit representation.
--
-- Instances of `FromBits` must satisfy the following laws:
--
-- > fromBits (x `shiftL` W .|. toBits a) ≡ a
-- > toBits a `shiftR` W == zeroBits
--
where W is the ' BitWidth ' .
class (Bits r, KnownNat (BitWidth a)) => FromBits r a where
type BitWidth a :: Nat
fromBits :: r -> a
toBits :: a -> r
instance Bits r => FromBits r () where
type BitWidth () = 0
fromBits _ = ()
toBits _ = zeroBits
instance Bits r => FromBits r (Proxy a) where
type BitWidth (Proxy a) = 0
fromBits _ = Proxy
toBits _ = zeroBits
instance FromBits Word64 Word64 where
type BitWidth Word64 = 64
fromBits = id
toBits = id
instance FromBits Word64 Bool where
type BitWidth Bool = 1
fromBits = flip testBit 0
toBits False = 0
toBits True = 1
instance FromBits Word64 Word8 where
type BitWidth Word8 = 8
fromBits = fromIntegral
toBits = fromIntegral
instance FromBits Word64 Word16 where
type BitWidth Word16 = 16
fromBits = fromIntegral
toBits = fromIntegral
instance FromBits Word64 Word32 where
type BitWidth Word32 = 32
fromBits = fromIntegral
toBits = fromIntegral
instance FromBits Word64 Int8 where
type BitWidth Int8 = 8
fromBits = fromIntegral
toBits = fromIntegral . (fromIntegral :: Int8 -> Word8)
instance FromBits Word64 Int16 where
type BitWidth Int16 = 16
fromBits = fromIntegral
toBits = fromIntegral . (fromIntegral :: Int16 -> Word16)
instance FromBits Word64 Int32 where
type BitWidth Int32 = 32
fromBits = fromIntegral
toBits = fromIntegral . (fromIntegral :: Int32 -> Word32)
instance FromBits r a => FromBits r (Identity a) where
type BitWidth (Identity a) = BitWidth a
fromBits = Identity . fromBits
toBits = toBits . runIdentity
instance (FromBits r a, FromBits r b, n ~ (BitWidth a + BitWidth b), n <= BitWidth r, KnownNat n) => FromBits r (a, b) where
type BitWidth (a, b) = BitWidth a + BitWidth b
fromBits r = (fromBits (unsafeShiftR r width), fromBits r) where
width = fromInteger $ natVal (Proxy :: Proxy (BitWidth b))
toBits (a, b) = unsafeShiftL (toBits a) width .|. toBits b where
width = fromInteger $ natVal (Proxy :: Proxy (BitWidth b))
instance FromBits r a => FromBits r (Const a b) where
type BitWidth (Const a b) = BitWidth a
fromBits = Const . fromBits
toBits = toBits . getConst
instance (Bits r, FromBits r (h (TargetOf x))) => FromBits r (Field h x) where
type BitWidth (Field h x) = BitWidth (h (TargetOf x))
fromBits = Field . fromBits
toBits = toBits . getField
instance (Bits r, KnownNat (TotalBits h xs)) => FromBits r (BitProd r xs h) where
type BitWidth (BitProd r xs h) = TotalBits h xs
fromBits = BitProd
toBits = unBitProd
-- | Fields are instances of 'FromBits' and fit in the representation.
type BitFields r xs h = (FromBits r r
, TotalBits h xs <= BitWidth r
, Forall (Instance1 (FromBits r) h) xs)
-- | Convert a normal extensible record into a bit record.
toBitProd :: forall r xs h. BitFields r xs h => xs :& h -> BitProd r xs h
toBitProd p = hfoldrWithIndexFor (Proxy :: Proxy (Instance1 (FromBits r) h))
(\i v f r -> f $! bupdate i r v) id p (BitProd zeroBits)
# INLINE toBitProd #
-- | Convert a normal extensible record into a bit record.
fromBitProd :: forall r xs h. BitFields r xs h => BitProd r xs h -> xs :& h
fromBitProd p = htabulateFor (Proxy :: Proxy (Instance1 (FromBits r) h))
$ flip blookup p
# INLINE fromBitProd #
| ' hlookup ' for ' BitProd '
blookup :: forall x r xs h.
(BitFields r xs h, FromBits r (h x))
=> Membership xs x -> BitProd r xs h -> h x
blookup i (BitProd r) = fromBits $ unsafeShiftR r
$ bitOffsetAt (Proxy :: Proxy r) (Proxy :: Proxy h) (Proxy :: Proxy xs)
$ getMemberId i
# INLINE blookup #
| Update a field of a ' BitProd ' .
bupdate :: forall x r xs h.
(BitFields r xs h, FromBits r (h x))
=> Membership xs x -> BitProd r xs h -> h x -> BitProd r xs h
bupdate i (BitProd r) a = BitProd $ r .&. mask
.|. unsafeShiftL (toBits a) offset
where
mask = unsafeShiftL (complement zeroBits) width `rotateL` offset
width = fromInteger $ natVal (Proxy :: Proxy (BitWidth (h x)))
offset = bitOffsetAt (Proxy :: Proxy r) (Proxy :: Proxy h) (Proxy :: Proxy xs) $ getMemberId i
# INLINE bupdate #
bitOffsetAt :: forall k r h xs. Forall (Instance1 (FromBits r) h) xs
=> Proxy (r :: Type) -> Proxy (h :: k -> Type) -> Proxy (xs :: [k]) -> Int -> Int
bitOffsetAt _ ph _ = henumerateFor
(Proxy :: Proxy (Instance1 (FromBits r) h))
(Proxy :: Proxy xs)
(\m r o i -> if i == 0
then o
else r (fromInteger (natVal (proxyBitWidth ph m)) + o) (i - 1))
(error "Impossible") 0
# INLINE bitOffsetAt #
proxyBitWidth :: Proxy h -> proxy x -> Proxy (BitWidth (h x))
proxyBitWidth _ _ = Proxy
-- | Bit-packed record
type BitRecordOf r h xs = BitProd r xs (Field h)
-- | Bit-packed record
type BitRecord r xs = BitRecordOf r Identity xs
instance (Corepresentable p, Comonad (Corep p), Functor f) => Extensible f p (BitProd r) where
type ExtensibleConstr (BitProd r) xs h x
= (BitFields r xs h, FromBits r (h x))
pieceAt i pafb = cotabulate $ \ws -> bupdate i (extract ws) <$> cosieve pafb (blookup i <$> ws)
# INLINE pieceAt #
| null | https://raw.githubusercontent.com/fumieval/extensible/6b119e922c1c109c2812d4ad3a2153838528c39d/src/Data/Extensible/Bits.hs | haskell | # LANGUAGE TypeInType #
---------------------------------------------------------------------
|
Module : Data.Extensible.Bits
License : BSD3
Bit-packed records
---------------------------------------------------------------------
| Conversion between a value and a bit representation.
Instances of `FromBits` must satisfy the following laws:
> fromBits (x `shiftL` W .|. toBits a) ≡ a
> toBits a `shiftR` W == zeroBits
| Fields are instances of 'FromBits' and fit in the representation.
| Convert a normal extensible record into a bit record.
| Convert a normal extensible record into a bit record.
| Bit-packed record
| Bit-packed record | # LANGUAGE UndecidableInstances , ScopedTypeVariables , MultiParamTypeClasses , TypeFamilies #
# LANGUAGE GeneralizedNewtypeDeriving , DeriveGeneric #
#if __GLASGOW_HASKELL__ < 806
#endif
Copyright : ( c ) 2018
Maintainer : < >
module Data.Extensible.Bits (BitProd(..)
, FromBits(..)
, TotalBits
, BitFields
, blookup
, bupdate
, toBitProd
, fromBitProd
, BitRecordOf
, BitRecord) where
import Control.Applicative
import Control.Comonad
import Data.Bits
import Data.Extensible.Class
import Data.Extensible.Dictionary
import Data.Extensible.Product
import Data.Extensible.Field
import Data.Functor.Identity
import Data.Hashable
import Data.Ix
import Data.Kind (Type)
import Data.Profunctor.Rep
import Data.Profunctor.Sieve
import Data.Proxy
import Data.Word
import Data.Int
import Foreign.Storable (Storable)
import GHC.Generics (Generic)
import GHC.TypeLits
| Bit - vector product . It has similar interface as @(:*)@ but fields are packed into @r@.
newtype BitProd r (xs :: [k]) (h :: k -> Type) = BitProd { unBitProd :: r }
deriving (Eq, Ord, Enum, Bounded, Ix, Generic, Hashable, Storable)
instance (Forall (Instance1 Show h) xs, BitFields r xs h) => Show (BitProd r xs h) where
showsPrec d x = showParen (d > 10)
$ showString "toBitProd " . showsPrec 11 (fromBitProd x)
| Total ' BitWidth '
type family TotalBits h xs where
TotalBits h '[] = 0
TotalBits h (x ': xs) = BitWidth (h x) + TotalBits h xs
where W is the ' BitWidth ' .
class (Bits r, KnownNat (BitWidth a)) => FromBits r a where
type BitWidth a :: Nat
fromBits :: r -> a
toBits :: a -> r
instance Bits r => FromBits r () where
type BitWidth () = 0
fromBits _ = ()
toBits _ = zeroBits
instance Bits r => FromBits r (Proxy a) where
type BitWidth (Proxy a) = 0
fromBits _ = Proxy
toBits _ = zeroBits
instance FromBits Word64 Word64 where
type BitWidth Word64 = 64
fromBits = id
toBits = id
instance FromBits Word64 Bool where
type BitWidth Bool = 1
fromBits = flip testBit 0
toBits False = 0
toBits True = 1
instance FromBits Word64 Word8 where
type BitWidth Word8 = 8
fromBits = fromIntegral
toBits = fromIntegral
instance FromBits Word64 Word16 where
type BitWidth Word16 = 16
fromBits = fromIntegral
toBits = fromIntegral
instance FromBits Word64 Word32 where
type BitWidth Word32 = 32
fromBits = fromIntegral
toBits = fromIntegral
instance FromBits Word64 Int8 where
type BitWidth Int8 = 8
fromBits = fromIntegral
toBits = fromIntegral . (fromIntegral :: Int8 -> Word8)
instance FromBits Word64 Int16 where
type BitWidth Int16 = 16
fromBits = fromIntegral
toBits = fromIntegral . (fromIntegral :: Int16 -> Word16)
instance FromBits Word64 Int32 where
type BitWidth Int32 = 32
fromBits = fromIntegral
toBits = fromIntegral . (fromIntegral :: Int32 -> Word32)
instance FromBits r a => FromBits r (Identity a) where
type BitWidth (Identity a) = BitWidth a
fromBits = Identity . fromBits
toBits = toBits . runIdentity
instance (FromBits r a, FromBits r b, n ~ (BitWidth a + BitWidth b), n <= BitWidth r, KnownNat n) => FromBits r (a, b) where
type BitWidth (a, b) = BitWidth a + BitWidth b
fromBits r = (fromBits (unsafeShiftR r width), fromBits r) where
width = fromInteger $ natVal (Proxy :: Proxy (BitWidth b))
toBits (a, b) = unsafeShiftL (toBits a) width .|. toBits b where
width = fromInteger $ natVal (Proxy :: Proxy (BitWidth b))
instance FromBits r a => FromBits r (Const a b) where
type BitWidth (Const a b) = BitWidth a
fromBits = Const . fromBits
toBits = toBits . getConst
instance (Bits r, FromBits r (h (TargetOf x))) => FromBits r (Field h x) where
type BitWidth (Field h x) = BitWidth (h (TargetOf x))
fromBits = Field . fromBits
toBits = toBits . getField
instance (Bits r, KnownNat (TotalBits h xs)) => FromBits r (BitProd r xs h) where
type BitWidth (BitProd r xs h) = TotalBits h xs
fromBits = BitProd
toBits = unBitProd
type BitFields r xs h = (FromBits r r
, TotalBits h xs <= BitWidth r
, Forall (Instance1 (FromBits r) h) xs)
toBitProd :: forall r xs h. BitFields r xs h => xs :& h -> BitProd r xs h
toBitProd p = hfoldrWithIndexFor (Proxy :: Proxy (Instance1 (FromBits r) h))
(\i v f r -> f $! bupdate i r v) id p (BitProd zeroBits)
# INLINE toBitProd #
fromBitProd :: forall r xs h. BitFields r xs h => BitProd r xs h -> xs :& h
fromBitProd p = htabulateFor (Proxy :: Proxy (Instance1 (FromBits r) h))
$ flip blookup p
# INLINE fromBitProd #
| ' hlookup ' for ' BitProd '
blookup :: forall x r xs h.
(BitFields r xs h, FromBits r (h x))
=> Membership xs x -> BitProd r xs h -> h x
blookup i (BitProd r) = fromBits $ unsafeShiftR r
$ bitOffsetAt (Proxy :: Proxy r) (Proxy :: Proxy h) (Proxy :: Proxy xs)
$ getMemberId i
# INLINE blookup #
| Update a field of a ' BitProd ' .
bupdate :: forall x r xs h.
(BitFields r xs h, FromBits r (h x))
=> Membership xs x -> BitProd r xs h -> h x -> BitProd r xs h
bupdate i (BitProd r) a = BitProd $ r .&. mask
.|. unsafeShiftL (toBits a) offset
where
mask = unsafeShiftL (complement zeroBits) width `rotateL` offset
width = fromInteger $ natVal (Proxy :: Proxy (BitWidth (h x)))
offset = bitOffsetAt (Proxy :: Proxy r) (Proxy :: Proxy h) (Proxy :: Proxy xs) $ getMemberId i
# INLINE bupdate #
bitOffsetAt :: forall k r h xs. Forall (Instance1 (FromBits r) h) xs
=> Proxy (r :: Type) -> Proxy (h :: k -> Type) -> Proxy (xs :: [k]) -> Int -> Int
bitOffsetAt _ ph _ = henumerateFor
(Proxy :: Proxy (Instance1 (FromBits r) h))
(Proxy :: Proxy xs)
(\m r o i -> if i == 0
then o
else r (fromInteger (natVal (proxyBitWidth ph m)) + o) (i - 1))
(error "Impossible") 0
# INLINE bitOffsetAt #
proxyBitWidth :: Proxy h -> proxy x -> Proxy (BitWidth (h x))
proxyBitWidth _ _ = Proxy
type BitRecordOf r h xs = BitProd r xs (Field h)
type BitRecord r xs = BitRecordOf r Identity xs
instance (Corepresentable p, Comonad (Corep p), Functor f) => Extensible f p (BitProd r) where
type ExtensibleConstr (BitProd r) xs h x
= (BitFields r xs h, FromBits r (h x))
pieceAt i pafb = cotabulate $ \ws -> bupdate i (extract ws) <$> cosieve pafb (blookup i <$> ws)
# INLINE pieceAt #
|
6cc28dc7369d9948ef974b2eaf740fef13a5d39d37426ddee0d848662ed58de3 | orchid-hybrid/microKanren-sagittarius | t1.scm | (import (scheme base)
(test-check)
(miruKanren mk-basic)
(examples closure))
(test-check "record #1"
(run* (lambda (q) (== q 'x)))
'((x where)))
(test-check "record #2"
(run* (lambda (q) (== (closure q) 'x)))
'())
(test-check "record #3"
(run* (lambda (q) (== (closure q) q)))
'())
It 's impossible to check if records are EQUAL ?
because does not specify what happens ....
;;
( test - check " record # 4 "
;; (run* (lambda (q) (== (closure 'x) q)))
;; `((,(closure 'x) where)))
(test-check "record #5"
(run* (lambda (q) (== (closure q) (closure 'y))))
'((y where)))
| null | https://raw.githubusercontent.com/orchid-hybrid/microKanren-sagittarius/9e740bbf94ed2930f88bbcf32636d3480934cfbb/t/records/t1.scm | scheme |
(run* (lambda (q) (== (closure 'x) q)))
`((,(closure 'x) where))) | (import (scheme base)
(test-check)
(miruKanren mk-basic)
(examples closure))
(test-check "record #1"
(run* (lambda (q) (== q 'x)))
'((x where)))
(test-check "record #2"
(run* (lambda (q) (== (closure q) 'x)))
'())
(test-check "record #3"
(run* (lambda (q) (== (closure q) q)))
'())
It 's impossible to check if records are EQUAL ?
because does not specify what happens ....
( test - check " record # 4 "
(test-check "record #5"
(run* (lambda (q) (== (closure q) (closure 'y))))
'((y where)))
|
34919915a5ed994b8c3ebe4cab13967006c53e89c67b2415983dc7aed64f9559 | hyperfiddle/electric | reactor1.clj | (ns dustin.reactor1
(:require
[minitest :refer [tests]]
[missionary.core :as m :refer [? ?? ?! ap]]))
(defn sleep-emit [delays]
(ap (let [n (?? (m/enumerate delays))]
(? (m/sleep n n)))))
(do
(def !x (atom 0))
(def effect (partial println 'effect))
(def process (m/reactor
(let [>a (m/signal! (m/watch !x))
>b (m/stream! (m/sample #(first %&)
>a
(sleep-emit (take 10 (repeat 100)))))]
(m/stream!
(ap (effect (?! >b)))))))
(process
(partial println 'finished 'success)
(partial println 'finished 'failure))
(swap! !x inc)
)
(comment
(def !x (atom 0))
(def effect (partial println 'effect))
(def process
(m/reactor
; reactors produce effects
(try
(let [>a (m/signal! (m/watch !x))
#_#_
>z (m/signal! (m/latest vector
(m/relieve {} (ap (inc (?! >a))))
(m/relieve {} (ap (dec (?! >a))))))]
(ap (effect (m/sample first >a (sleep-emit [100 100 100 100 100]))))
#_(ap (effect (?! >z)))
#_(declare !z)
#_(def !z (>z #(prn :ready) #(prn :done))))
(catch Exception e (println 'error)))))
(def ctx (process
(partial println 'finished 'success)
(partial println 'finished 'failure)))
((m/sp (let [>z (m/? process)
!z (>z #(prn :ready) #(prn :done))]
))
#(prn :ready1) #(prn :ready2))
(swap! !x inc)
(effect :z)
;(def !out (>out #(prn :ready) #(prn :done)))
@!out
;(def process-cancel (process #(prn :process/success) #()))
)
(comment
; cancelled
(def r (m/reactor (m/signal! (m/ap))))
(m/? r)
(m/reactor
(let [r (atom [])
i (m/signal! (m/watch (atom 1)))]
(m/stream! (m/ap (m/?? i) (swap! r conj (m/?? i)))) r))
)
| null | https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2021/missionary/reactor1.clj | clojure | reactors produce effects
(def !out (>out #(prn :ready) #(prn :done)))
(def process-cancel (process #(prn :process/success) #()))
cancelled | (ns dustin.reactor1
(:require
[minitest :refer [tests]]
[missionary.core :as m :refer [? ?? ?! ap]]))
(defn sleep-emit [delays]
(ap (let [n (?? (m/enumerate delays))]
(? (m/sleep n n)))))
(do
(def !x (atom 0))
(def effect (partial println 'effect))
(def process (m/reactor
(let [>a (m/signal! (m/watch !x))
>b (m/stream! (m/sample #(first %&)
>a
(sleep-emit (take 10 (repeat 100)))))]
(m/stream!
(ap (effect (?! >b)))))))
(process
(partial println 'finished 'success)
(partial println 'finished 'failure))
(swap! !x inc)
)
(comment
(def !x (atom 0))
(def effect (partial println 'effect))
(def process
(m/reactor
(try
(let [>a (m/signal! (m/watch !x))
#_#_
>z (m/signal! (m/latest vector
(m/relieve {} (ap (inc (?! >a))))
(m/relieve {} (ap (dec (?! >a))))))]
(ap (effect (m/sample first >a (sleep-emit [100 100 100 100 100]))))
#_(ap (effect (?! >z)))
#_(declare !z)
#_(def !z (>z #(prn :ready) #(prn :done))))
(catch Exception e (println 'error)))))
(def ctx (process
(partial println 'finished 'success)
(partial println 'finished 'failure)))
((m/sp (let [>z (m/? process)
!z (>z #(prn :ready) #(prn :done))]
))
#(prn :ready1) #(prn :ready2))
(swap! !x inc)
(effect :z)
@!out
)
(comment
(def r (m/reactor (m/signal! (m/ap))))
(m/? r)
(m/reactor
(let [r (atom [])
i (m/signal! (m/watch (atom 1)))]
(m/stream! (m/ap (m/?? i) (swap! r conj (m/?? i)))) r))
)
|
110056419a5a96cd43dadd05b6e5d0d344b07bd9f01d8cbfb882ec8576413103 | lisp/de.setf.xml | namespace.lisp | -*- Mode : lisp ; Syntax : ansi - common - lisp ; Base : 10 ; Package : xml - query - data - model ; -*-
(in-package :xml-query-data-model)
(setq xml-query-data-model:*namespace*
(xml-query-data-model:defnamespace "#"
(:use)
(:nicknames)
(:export
"altLabel"
"broader"
"broaderTransitive"
"broadMatch"
"changeNote"
"closeMatch"
"Collection"
"Concept"
"ConceptScheme"
"definition"
"editorialNote"
"exactMatch"
"example"
"hasTopConcept"
"hiddenLabel"
"historyNote"
"inScheme"
"mappingRelation"
"member"
"memberList"
"narrower"
"narrowerTransitive"
"narrowMatch"
"notation"
"note"
"OrderedCollection"
"prefLabel"
"related"
"relatedMatch"
"scopeNote"
"semanticRelation"
"topConceptOf")
(:documentation nil)))
(let ((xml-query-data-model::p
(or (find-package "#")
(make-package "#"
:use
nil
:nicknames
'nil))))
(dolist (xml-query-data-model::s
'("altLabel" "broader" "broaderTransitive" "broadMatch" "changeNote"
"closeMatch" "Collection" "Concept" "ConceptScheme" "definition"
"editorialNote" "exactMatch" "example" "hasTopConcept"
"hiddenLabel" "historyNote" "inScheme" "mappingRelation" "member"
"memberList" "narrower" "narrowerTransitive" "narrowMatch"
"notation" "note" "OrderedCollection" "prefLabel" "related"
"relatedMatch" "scopeNote" "semanticRelation" "topConceptOf"))
(export (intern xml-query-data-model::s xml-query-data-model::p)
xml-query-data-model::p)))
;;; (xqdm:find-namespace "#" :if-does-not-exist :load)
| null | https://raw.githubusercontent.com/lisp/de.setf.xml/827681c969342096c3b95735d84b447befa69fa6/namespaces/www-w3-org/2004/02/skos/core/namespace.lisp | lisp | Syntax : ansi - common - lisp ; Base : 10 ; Package : xml - query - data - model ; -*-
(xqdm:find-namespace "#" :if-does-not-exist :load) |
(in-package :xml-query-data-model)
(setq xml-query-data-model:*namespace*
(xml-query-data-model:defnamespace "#"
(:use)
(:nicknames)
(:export
"altLabel"
"broader"
"broaderTransitive"
"broadMatch"
"changeNote"
"closeMatch"
"Collection"
"Concept"
"ConceptScheme"
"definition"
"editorialNote"
"exactMatch"
"example"
"hasTopConcept"
"hiddenLabel"
"historyNote"
"inScheme"
"mappingRelation"
"member"
"memberList"
"narrower"
"narrowerTransitive"
"narrowMatch"
"notation"
"note"
"OrderedCollection"
"prefLabel"
"related"
"relatedMatch"
"scopeNote"
"semanticRelation"
"topConceptOf")
(:documentation nil)))
(let ((xml-query-data-model::p
(or (find-package "#")
(make-package "#"
:use
nil
:nicknames
'nil))))
(dolist (xml-query-data-model::s
'("altLabel" "broader" "broaderTransitive" "broadMatch" "changeNote"
"closeMatch" "Collection" "Concept" "ConceptScheme" "definition"
"editorialNote" "exactMatch" "example" "hasTopConcept"
"hiddenLabel" "historyNote" "inScheme" "mappingRelation" "member"
"memberList" "narrower" "narrowerTransitive" "narrowMatch"
"notation" "note" "OrderedCollection" "prefLabel" "related"
"relatedMatch" "scopeNote" "semanticRelation" "topConceptOf"))
(export (intern xml-query-data-model::s xml-query-data-model::p)
xml-query-data-model::p)))
|
8e57cf5f4efcd83085b761a7a3cf27d6830b44d22feeb8b62168e4cb1e079003 | benjaminselfridge/logix | Main.hs | |
Module : Main
Description : Command line logix tool .
Copyright : ( c ) , 2017
License : :
Stability : experimental
Module : Main
Description : Command line logix tool.
Copyright : (c) Ben Selfridge, 2017
License : BSD3
Maintainer :
Stability : experimental
-}
module Main where
import Calculus
import Calculi
import Parse
import PPCalculus
import Utils
import Data.Char
import Data.List
import Data.List.Split
import Data.Maybe
import System.IO
version = "0.2.1"
data Env = Env { goal :: Derivation
, subgoal :: GoalSpec
, calculus :: Calculus
, quitFlag :: Bool
, pretty :: Bool
, unicode :: Bool
, history :: [String]
}
getCurrentGoal :: Env -> Derivation
getCurrentGoal env = case getGoal (subgoal env) (goal env) of
Nothing -> error $ "current subgoal non-existent: " ++ show (subgoal env)
Just der -> der
-- TODO: Add utility to hide/show left-hand assumptions, and another utility to
-- specifically list them
-- TODO: make help more readable by splitting it into sections
TODO : add " up " command to go up one level of the proof tree
TODO : when printing out a particular rule with a NoFreePat ( or several ) , instead
-- of printing it as [no free], just add a little qualifier string at the end.
TODO : add option to write " rule L & " or whatever , which only displays the
possibilities for L &
-- TODO: add "assume" command, maintaining a list of formulas as assumptions that get
-- prepended to every top-level goal. Ultimately want to be able to abbreviate
-- formulas.
-- TODO: maybe a manual mode, where the user can input the substitution for a
-- particular rule manually? "use" command might be cool
TODO : " examples " command that spits out examples of how to write formulas
commands :: [(String, (String, [String], Env -> String -> IO Env))]
commands = [ ("help", ("Print all commands.",
[],
help))
, ("top", ("Change top-level goal. If given no argument, " ++
"just prints the top-level goal.",
["<goal>"],
setTopGoal))
, ("rule", ("Apply a rule to the current subgoal. If given no argument, " ++
"just prints all applicable rules.",
["<ruleid>"],
rule))
, ("axiom", ("Apply an axiom to the current subgoal. If given no argument, " ++
"just prints all applicable axioms.",
["<axiomid>"],
axiom))
, ("goals", ("List all open subgoals.",
[],
listGoals))
, ("goal", ("Change current subgoal. If given no argument, " ++
"just prints the current subgoal.",
["<subgoal id>"],
changeSubgoal))
, ("history", ("Print out history of all commands you've entered.",
[],
showHistory))
, ("clear", ("Clear the derivation at a particular subgoal.",
["<subgoal>"],
clear))
, ("check", ("Check that each step in a derivation is valid.",
[],
check))
, ("tree", ("Print current proof tree.",
[],
printProofTree))
, ("pretty", ("Toggle pretty printing for proof tree.",
[],
togglePretty))
, ("unicode", ("Toggle unicode printing.",
[],
toggleUnicode))
, ("calc", ("Change current calculus. If given no argument, " ++
"just prints the current calculus.",
["<calcName>"],
changeCalculus))
, ("ruleinfo", ("List a particular rule or axiom.",
["<ruleName>"],
listRule))
, ("calcs", ("List all available calculi.",
[],
listCalculi))
, ("quit", ("Quit.",
[],
quit))
]
help :: Env -> String -> IO Env
help env _ = do mapM_ showCommand commands
return env
where showCommand (name, (desc, args, _)) = do
putStrLn $ name ++ " " ++ intercalate " " args
putStrLn $ " " ++ desc
setTopGoal :: Env -> String -> IO Env
setTopGoal env arg =
if null goalString
then do putStrLn $ ppSequent (unicode env) (calculus env) $ conclusion (goal env)
return env
else case parse (spaces *> sequent (calculus env) <* spaces <* end) goalString of
[] -> do putStrLn $ "Couldn't parse sequent \"" ++ goalString ++ "\"."
return env
-- TODO: Figure out why there might be multiple parses here (I know why but look
-- into fixing it)
((sequent,_):_) -> do
putStrLn $ "Changing goal to \"" ++ ppSequent (unicode env) (calculus env) sequent ++ "\"."
return $ env { goal = Stub sequent,
subgoal = [],
history = ["top " ++ goalString, "calc " ++ calcName (calculus env)]
-- clear history because we are starting a new
-- proof
}
where goalString = dropWhile (==' ') arg
listGoals :: Env -> String -> IO Env
listGoals env _ = do
putStrLn "Current open subgoals:"
mapM_ printGoal (stubs (goal env))
return env
where printGoal ([], sequent) = do
putStr $ if [] == (subgoal env) then " *" else " "
putStrLn $ "top: " ++ ppSequent (unicode env) (calculus env) sequent
printGoal (spec, sequent) = do
putStr $ if spec == (subgoal env) then " *" else " "
putStr $ ppGoalSpec spec
putStrLn $ ": " ++ ppSequent (unicode env) (calculus env) sequent
changeSubgoal :: Env -> String -> IO Env
changeSubgoal env arg =
if null subgoalString
then do let der = getCurrentGoal env
putStr $ "Current subgoal: " ++ ppSequent (unicode env) (calculus env) (conclusion der)
putStrLn $ " [" ++ ppGoalSpec (subgoal env) ++ "]"
return env
else case getGoal subgoalSpec (goal env) of
Nothing -> do putStrLn $ "Nonexistent subgoal: " ++ subgoalString
return env
Just der -> do
putStr $ "Current subgoal: " ++ ppSequent (unicode env) (calculus env) (conclusion der)
putStrLn $ " [" ++ ppGoalSpec subgoalSpec ++ "]"
let newHistory = case (history env) of
(comm:cs) | "goal " `isPrefixOf` comm -> ("goal " ++ subgoalString) : cs
otherwise -> ("goal " ++ subgoalString) : (history env)
return $ env { subgoal = subgoalSpec, history = newHistory }
where subgoalString = dropWhile (== ' ') arg
subgoalSpec = if subgoalString == "top"
then []
else case sequence $ map readMaybe (splitOn "." subgoalString) of
Just spec -> spec
Nothing -> []
showHistory :: Env -> String -> IO Env
showHistory env _ = mapM_ putStrLn (reverse (history env)) *> return env
clear :: Env -> String -> IO Env
clear env arg =
if null subgoalString
then case clearSubgoal (subgoal env) (goal env) of
Nothing -> do putStrLn $ "Nonexistent subgoal: " ++ subgoalString
return env
Just newGoal -> do
putStr $ "Current subgoal: "
putStr $ ppSequent (unicode env) (calculus env) (conclusion $ fromJust $ getGoal (subgoal env) newGoal)
putStrLn $ " [" ++ ppGoalSpec (subgoal env) ++ "]"
let newHistory = ("clear " ++ subgoalString) : (history env)
return $ env { goal = newGoal, history = newHistory }
else case clearSubgoal subgoalSpec (goal env) of
Nothing -> do putStrLn $ "Nonexistent subgoal: " ++ subgoalString
return env
Just newGoal -> do
putStr $ "Current subgoal: "
putStr $ ppSequent (unicode env) (calculus env) (conclusion $ fromJust $ getGoal subgoalSpec newGoal)
putStrLn $ " [" ++ ppGoalSpec subgoalSpec ++ "]"
let newHistory = ("clear " ++ subgoalString) : (history env)
return $ env { goal = newGoal, subgoal = subgoalSpec, history = newHistory }
where subgoalString = dropWhile (== ' ') arg
subgoalSpec = if subgoalString == "top"
then []
else case sequence $ map readMaybe (splitOn "." subgoalString) of
Just spec -> spec
Nothing -> []
check :: Env -> String -> IO Env
check env _ = do
case checkDerivation (calculus env) (goal env) of
Left d -> do
putStrLn "Error in subderivation: "
putStrLn $ ppDerivation (unicode env) (calculus env) d
Right () -> do
putStrLn $ "Valid derivation in " ++ calcName (calculus env)
return env
-- TODO: figure out why we can get multiple identical parses
getFormBindings :: Bool -> Calculus -> [FormulaPat] -> IO FormulaAssignment
getFormBindings unicode _ [] = return []
getFormBindings unicode calc (PredPat p:pats) = do
putStr $ "Need binding for atom " ++ p ++ ":\n " ++ p ++ " ::= "
hFlush stdout
str <- getLine
let fs = parse (spaces *> atomFormula <* end) str
case fs of
[] -> do putStrLn $ "Couldn't parse. Please enter a single atom identifier."
getFormBindings unicode calc (PredPat p:pats)
[(f,_)] -> do rest <- getFormBindings unicode calc pats
return $ (p, [f]) : rest
((f,_):_) -> do rest <- getFormBindings unicode calc pats
return $ (p, [f]) : rest
-- x -> error $ "multiple parses for atom: " ++ ppFormulaList unicode calc (map fst x)
getFormBindings unicode calc (FormPat a:pats) = do
putStr $ "Need binding for variable " ++ a ++ ":\n " ++ a ++ " ::= "
hFlush stdout
str <- getLine
let fs = parse (spaces *> formula calc <* end) str
case fs of
[] -> do putStrLn $ "Couldn't parse. Please enter a single formula."
getFormBindings unicode calc (FormPat a:pats)
[(f,_)] -> do rest <- getFormBindings unicode calc pats
return $ (a, [f]) : rest
((f,_):_) -> do rest <- getFormBindings unicode calc pats
return $ (a, [f]) : rest
-- x -> error $ "multiple parses for atom: " ++ ppFormulaList unicode calc (map fst x)
getFormBindings unicode calc (SetPat gamma:pats) = do
putStr $ "Need binding for formula list " ++ gamma ++ ":\n " ++ gamma ++ " ::= "
hFlush stdout
str <- getLine
let fs = parse (spaces *> formulaList calc <* end) str
case fs of
[] -> do putStrLn $ "Couldn't parse. Please enter a comma-separated list of formulas."
getFormBindings unicode calc (SetPat gamma:pats)
[(fs,_)] -> do rest <- getFormBindings unicode calc pats
return $ (gamma, fs) : rest
((fs,_):_) -> do rest <- getFormBindings unicode calc pats
return $ (gamma, fs) : rest
-- x -> error $ "multiple parses for atom: " ++ intercalate ", " (map (ppFormulaList unicode calc) (map fst x))
getFormBindings unicode calc (pat:_) = error $ "can't bind pattern " ++ ppFormulaPat unicode pat
getTermBindings :: Bool -> [TermPat] -> IO TermAssignment
getTermBindings unicode [] = return []
getTermBindings unicode (VarPat x:pats) = do
putStr $ "Need binding for variable <" ++ x ++ ">:\n <" ++ x ++ "> ::= "
hFlush stdout
str <- getLine
let xs = parse (spaces *> many1 alphaNum <* end) str
case xs of
[] -> do putStrLn $ "Couldn't parse. Please enter a single variable identifier (like 'x')."
getTermBindings unicode (VarPat x:pats)
[(y,_)] -> do rest <- getTermBindings unicode pats
return $ (x, VarTerm y) : rest
_ -> error $ "multiple parses for variable term: " ++ show x
getTermBindings unicode (TermPat t:pats) = do
putStr $ "Need binding for term <" ++ t ++ ">:\n <" ++ t ++ "> ::= "
hFlush stdout
str <- getLine
let ts = parse (spaces *> term <* end) str
case ts of
[] -> do putStrLn $ "Couldn't parse. Please enter a term."
getTermBindings unicode (TermPat t:pats)
[(t',_)] -> do rest <- getTermBindings unicode pats
return $ (t, t') : rest
_ -> error $ "multiple parses for variable term: " ++ show t
getFirstSubgoal :: Derivation -> GoalSpec
getFirstSubgoal der = case stubs der of
[] -> []
((subgoal, _):_) -> subgoal
getNextSubgoal :: Derivation -> GoalSpec -> GoalSpec
getNextSubgoal der spec = getNextSubgoal' (map fst $ stubs der) where
getNextSubgoal' [] = getFirstSubgoal der
getNextSubgoal' (stubSpec:specs) | spec <= stubSpec = stubSpec
| otherwise = getNextSubgoal' specs
rule :: Env -> String -> IO Env
rule env arg =
if null ruleString
then do putStrLn "Applicable rules: "
let rules = applicableRules (calculus env) $ conclusion $ getCurrentGoal env
let zRules = zipRules rules
mapM_ putStrLn (showZipRules zRules)
return env
else do let rules = applicableRules (calculus env) $ conclusion $ getCurrentGoal env
case rules !!! (ruleNum-1) of
Nothing -> do let rules = applicableRules (calculus env) $ conclusion $ getCurrentGoal env
let zRules = filter (\(_,(name,_,_)) -> name == ruleString) $ zipRules rules
mapM_ putStrLn (showZipRules zRules)
return env
Just (name, formBinding, termBinding) -> do
TODO : fix this . tryRule returns a list of unbound terms as well .
let (unboundForms, unboundTerms) = tryRule (calculus env) name formBinding termBinding
extraFormBindings <- getFormBindings (unicode env) (calculus env) unboundForms
extraTermBindings <- getTermBindings (unicode env) unboundTerms
-- TODO: get term bindings for unbound terms
case instRule (calculus env) name
(extraFormBindings ++ formBinding)
(extraTermBindings ++ termBinding)
(subgoal env)
(goal env) of
Just newGoal -> do
putStrLn $ "Applying " ++ name ++ "."
let nextSubgoal = getNextSubgoal newGoal (subgoal env)
putStrLn $ "Setting active subgoal to " ++ ppGoalSpec nextSubgoal ++
": " ++ ppSequent (unicode env) (calculus env) (conclusion (fromJust (getGoal nextSubgoal newGoal)))
let newHistory = ("rule " ++ ruleString) : (history env)
return env { goal = newGoal, subgoal = nextSubgoal, history = newHistory }
Nothing -> do
putStrLn "Invalid instantiation."
return env
where ruleString = dropWhile (== ' ') arg
-- TODO: fix this kludge; we really just need to make ruleNum a maybe, and
-- handle it above.
ruleNum = case readMaybe ruleString of
Just num -> num
Nothing -> 0
showRule (n, (name, formBinding, termBinding)) =
case prems of
[] ->
" " ++ show n ++ ". " ++ name ++ " with no obligations"
[prem] ->
" " ++ show n ++ ". " ++ name ++ " with obligation: " ++
ppSequentInst (unicode env) (calculus env) formBinding termBinding prem
_ ->
" " ++ show n ++ ". " ++ name ++ " with obligations:\n " ++
intercalate "\n " (map (ppSequentInst (unicode env) (calculus env) formBinding termBinding) prems)
where Just (prems, _) = lookup name (rules (calculus env))
zipRules rules = zip [1..] rules
showZipRules rules = map showRule rules
-- TODO: add term binding machinery for rules to axioms as well.
axiom :: Env -> String -> IO Env
axiom env arg =
if null axiomString
then do putStrLn "Applicable axioms: "
let axioms = applicableAxioms (calculus env) $ conclusion $ getCurrentGoal env
mapM_ putStrLn (showAxioms 1 axioms)
return env
else do let axioms = applicableAxioms (calculus env) $ conclusion $ getCurrentGoal env
case axioms !!! (axiomNum-1) of
Nothing -> do putStrLn $ "No axiom corresponding to " ++ axiomString
return env
Just (name, formBindings, termBindings) -> do
-- we should never have any unbound variables for an axiom, but we
-- provide this just for the sake of completeness.
TODO : fix this . tryAxiom returns a list of unbound terms as well .
-- TODO: We're never really using the extra bindings here...
let unboundVars = fst $ tryAxiom (calculus env) name formBindings termBindings
-- extraBindings <- getFormBindings (unicode env) unboundVars
putStrLn $ "Applying " ++ name ++ "."
-- TODO: if we add extra bindings, we need to update this line.
let Just newGoal = instAxiom (calculus env) name formBindings termBindings (subgoal env) (goal env)
let nextSubgoal = getNextSubgoal newGoal (subgoal env)
putStrLn $ "Setting active subgoal to " ++ ppGoalSpec nextSubgoal ++
": " ++ ppSequent (unicode env) (calculus env) (conclusion (fromJust (getGoal nextSubgoal newGoal)))
let newHistory = ("axiom " ++ axiomString) : (history env)
return env { goal = newGoal, subgoal = nextSubgoal, history = newHistory }
where axiomString = dropWhile (== ' ') arg
axiomNum = case readMaybe axiomString of
Just num -> num
Nothing -> 0
showAxiom n (name, formBindings, termBindings) = " " ++ show n ++ ". " ++ name ++ " with " ++ ppFormulaAssignment formBindings
showAxioms n [] = []
showAxioms n (x:xs) = showAxiom n x : showAxioms (n+1) xs
ppFormulaAssignment bindings = intercalate ", " (map showBindings bindings)
showBindings (var, [f]) = var ++ " := " ++ ppFormula (unicode env) (calculus env) f
showBindings (var, fs) = var ++ " := [" ++ ppFormulaList (unicode env) (calculus env) fs ++ "]"
printProofTree :: Env -> String -> IO Env
printProofTree env _ =
case (pretty env) of
True -> do putStr $ ppDerivationTree (unicode env) (calculus env) (goal env) (subgoal env)
return env
_ -> do putStr $ ppDerivation (unicode env) (calculus env) (goal env)
return env
togglePretty :: Env -> String -> IO Env
togglePretty env _ =
case (pretty env) of
True -> do putStrLn "Disabling pretty printing."
return env { pretty = False }
_ -> do putStrLn "Enabling pretty printing."
return env { pretty = True }
toggleUnicode :: Env -> String -> IO Env
toggleUnicode env _ =
case (unicode env) of
True -> do putStrLn "Disabling unicode."
return env { unicode = False }
_ -> do putStrLn "Enabling unicode."
return env { unicode = True }
changeCalculus :: Env -> String -> IO Env
changeCalculus env arg =
if null name
then do putStrLn $ ppCalculus (unicode env) $ calculus env
return env
else
case find (\calc -> calcName calc == name) calculi of
Nothing -> do putStrLn $ "No calculus named \"" ++ name ++ "\"."
return env
Just calc -> do putStrLn $ "Changing calculus to " ++ name ++ "."
let newHistory = ("calc " ++ name) : (history env)
return $ env { calculus = calc, history = newHistory }
where name = dropWhile (==' ') arg
-- TODO: fix spacing for axiom
listRule :: Env -> String -> IO Env
listRule env arg =
case (lookup ruleStr $ axioms (calculus env), lookup ruleStr $ rules (calculus env)) of
(Just axiomPat,_) -> do putStrLn (ppSequentPat (unicode env) axiomPat ++ " (" ++ ruleStr ++ ")")
return env
(_,Just rulePat) -> do putStrLn (ppRulePat (unicode env) "" (ruleStr, rulePat))
return env
_ -> do putStrLn $ "Couldn't find axiom/rule " ++ ruleStr
return env
where ruleStr = dropWhile (==' ') arg
listCalculi :: Env -> String -> IO Env
listCalculi env _ = do mapM_ (\calc -> putStrLn $ calcName calc) calculi
return env
quit :: Env -> String -> IO Env
quit env _ = do { putStrLn "Bye."; return env {quitFlag = True} }
repl :: Env -> IO ()
repl env = do
putStr "> "
hFlush stdout
s <- getLine
let (com, arg) = break isSpace (dropWhile (==' ') s)
case lookup com commands of
Nothing -> do putStrLn $ "Invalid command: " ++ com
repl env
Just (_, _, f) -> do env' <- f env arg
case quitFlag env' of
True -> return ()
False -> repl env'
introMessage :: String
introMessage =
"LogiX (Logic Explorer) v" ++ version ++ "\n" ++
"a customizable proof construction tool for sequent calculi\n\n" ++
"Type \"help\" for a list of commands.\n"
main :: IO ()
main = do
putStr introMessage
repl $ Env { goal = Stub ([] :=> [impliesForm (Pred "P" []) (Pred "P" [])])
, subgoal = []
, calculus = head calculi
, quitFlag = False
, pretty = True
, unicode = False
, history = ["top => P -> P", "calc " ++ calcName (head calculi)]
}
| null | https://raw.githubusercontent.com/benjaminselfridge/logix/4c19c1cf036b1d4ceb4fe05e7d43c9cff32e80a1/src/Main.hs | haskell | TODO: Add utility to hide/show left-hand assumptions, and another utility to
specifically list them
TODO: make help more readable by splitting it into sections
of printing it as [no free], just add a little qualifier string at the end.
TODO: add "assume" command, maintaining a list of formulas as assumptions that get
prepended to every top-level goal. Ultimately want to be able to abbreviate
formulas.
TODO: maybe a manual mode, where the user can input the substitution for a
particular rule manually? "use" command might be cool
TODO: Figure out why there might be multiple parses here (I know why but look
into fixing it)
clear history because we are starting a new
proof
TODO: figure out why we can get multiple identical parses
x -> error $ "multiple parses for atom: " ++ ppFormulaList unicode calc (map fst x)
x -> error $ "multiple parses for atom: " ++ ppFormulaList unicode calc (map fst x)
x -> error $ "multiple parses for atom: " ++ intercalate ", " (map (ppFormulaList unicode calc) (map fst x))
TODO: get term bindings for unbound terms
TODO: fix this kludge; we really just need to make ruleNum a maybe, and
handle it above.
TODO: add term binding machinery for rules to axioms as well.
we should never have any unbound variables for an axiom, but we
provide this just for the sake of completeness.
TODO: We're never really using the extra bindings here...
extraBindings <- getFormBindings (unicode env) unboundVars
TODO: if we add extra bindings, we need to update this line.
TODO: fix spacing for axiom | |
Module : Main
Description : Command line logix tool .
Copyright : ( c ) , 2017
License : :
Stability : experimental
Module : Main
Description : Command line logix tool.
Copyright : (c) Ben Selfridge, 2017
License : BSD3
Maintainer :
Stability : experimental
-}
module Main where
import Calculus
import Calculi
import Parse
import PPCalculus
import Utils
import Data.Char
import Data.List
import Data.List.Split
import Data.Maybe
import System.IO
version = "0.2.1"
data Env = Env { goal :: Derivation
, subgoal :: GoalSpec
, calculus :: Calculus
, quitFlag :: Bool
, pretty :: Bool
, unicode :: Bool
, history :: [String]
}
getCurrentGoal :: Env -> Derivation
getCurrentGoal env = case getGoal (subgoal env) (goal env) of
Nothing -> error $ "current subgoal non-existent: " ++ show (subgoal env)
Just der -> der
TODO : add " up " command to go up one level of the proof tree
TODO : when printing out a particular rule with a NoFreePat ( or several ) , instead
TODO : add option to write " rule L & " or whatever , which only displays the
possibilities for L &
TODO : " examples " command that spits out examples of how to write formulas
commands :: [(String, (String, [String], Env -> String -> IO Env))]
commands = [ ("help", ("Print all commands.",
[],
help))
, ("top", ("Change top-level goal. If given no argument, " ++
"just prints the top-level goal.",
["<goal>"],
setTopGoal))
, ("rule", ("Apply a rule to the current subgoal. If given no argument, " ++
"just prints all applicable rules.",
["<ruleid>"],
rule))
, ("axiom", ("Apply an axiom to the current subgoal. If given no argument, " ++
"just prints all applicable axioms.",
["<axiomid>"],
axiom))
, ("goals", ("List all open subgoals.",
[],
listGoals))
, ("goal", ("Change current subgoal. If given no argument, " ++
"just prints the current subgoal.",
["<subgoal id>"],
changeSubgoal))
, ("history", ("Print out history of all commands you've entered.",
[],
showHistory))
, ("clear", ("Clear the derivation at a particular subgoal.",
["<subgoal>"],
clear))
, ("check", ("Check that each step in a derivation is valid.",
[],
check))
, ("tree", ("Print current proof tree.",
[],
printProofTree))
, ("pretty", ("Toggle pretty printing for proof tree.",
[],
togglePretty))
, ("unicode", ("Toggle unicode printing.",
[],
toggleUnicode))
, ("calc", ("Change current calculus. If given no argument, " ++
"just prints the current calculus.",
["<calcName>"],
changeCalculus))
, ("ruleinfo", ("List a particular rule or axiom.",
["<ruleName>"],
listRule))
, ("calcs", ("List all available calculi.",
[],
listCalculi))
, ("quit", ("Quit.",
[],
quit))
]
help :: Env -> String -> IO Env
help env _ = do mapM_ showCommand commands
return env
where showCommand (name, (desc, args, _)) = do
putStrLn $ name ++ " " ++ intercalate " " args
putStrLn $ " " ++ desc
setTopGoal :: Env -> String -> IO Env
setTopGoal env arg =
if null goalString
then do putStrLn $ ppSequent (unicode env) (calculus env) $ conclusion (goal env)
return env
else case parse (spaces *> sequent (calculus env) <* spaces <* end) goalString of
[] -> do putStrLn $ "Couldn't parse sequent \"" ++ goalString ++ "\"."
return env
((sequent,_):_) -> do
putStrLn $ "Changing goal to \"" ++ ppSequent (unicode env) (calculus env) sequent ++ "\"."
return $ env { goal = Stub sequent,
subgoal = [],
history = ["top " ++ goalString, "calc " ++ calcName (calculus env)]
}
where goalString = dropWhile (==' ') arg
listGoals :: Env -> String -> IO Env
listGoals env _ = do
putStrLn "Current open subgoals:"
mapM_ printGoal (stubs (goal env))
return env
where printGoal ([], sequent) = do
putStr $ if [] == (subgoal env) then " *" else " "
putStrLn $ "top: " ++ ppSequent (unicode env) (calculus env) sequent
printGoal (spec, sequent) = do
putStr $ if spec == (subgoal env) then " *" else " "
putStr $ ppGoalSpec spec
putStrLn $ ": " ++ ppSequent (unicode env) (calculus env) sequent
changeSubgoal :: Env -> String -> IO Env
changeSubgoal env arg =
if null subgoalString
then do let der = getCurrentGoal env
putStr $ "Current subgoal: " ++ ppSequent (unicode env) (calculus env) (conclusion der)
putStrLn $ " [" ++ ppGoalSpec (subgoal env) ++ "]"
return env
else case getGoal subgoalSpec (goal env) of
Nothing -> do putStrLn $ "Nonexistent subgoal: " ++ subgoalString
return env
Just der -> do
putStr $ "Current subgoal: " ++ ppSequent (unicode env) (calculus env) (conclusion der)
putStrLn $ " [" ++ ppGoalSpec subgoalSpec ++ "]"
let newHistory = case (history env) of
(comm:cs) | "goal " `isPrefixOf` comm -> ("goal " ++ subgoalString) : cs
otherwise -> ("goal " ++ subgoalString) : (history env)
return $ env { subgoal = subgoalSpec, history = newHistory }
where subgoalString = dropWhile (== ' ') arg
subgoalSpec = if subgoalString == "top"
then []
else case sequence $ map readMaybe (splitOn "." subgoalString) of
Just spec -> spec
Nothing -> []
showHistory :: Env -> String -> IO Env
showHistory env _ = mapM_ putStrLn (reverse (history env)) *> return env
clear :: Env -> String -> IO Env
clear env arg =
if null subgoalString
then case clearSubgoal (subgoal env) (goal env) of
Nothing -> do putStrLn $ "Nonexistent subgoal: " ++ subgoalString
return env
Just newGoal -> do
putStr $ "Current subgoal: "
putStr $ ppSequent (unicode env) (calculus env) (conclusion $ fromJust $ getGoal (subgoal env) newGoal)
putStrLn $ " [" ++ ppGoalSpec (subgoal env) ++ "]"
let newHistory = ("clear " ++ subgoalString) : (history env)
return $ env { goal = newGoal, history = newHistory }
else case clearSubgoal subgoalSpec (goal env) of
Nothing -> do putStrLn $ "Nonexistent subgoal: " ++ subgoalString
return env
Just newGoal -> do
putStr $ "Current subgoal: "
putStr $ ppSequent (unicode env) (calculus env) (conclusion $ fromJust $ getGoal subgoalSpec newGoal)
putStrLn $ " [" ++ ppGoalSpec subgoalSpec ++ "]"
let newHistory = ("clear " ++ subgoalString) : (history env)
return $ env { goal = newGoal, subgoal = subgoalSpec, history = newHistory }
where subgoalString = dropWhile (== ' ') arg
subgoalSpec = if subgoalString == "top"
then []
else case sequence $ map readMaybe (splitOn "." subgoalString) of
Just spec -> spec
Nothing -> []
check :: Env -> String -> IO Env
check env _ = do
case checkDerivation (calculus env) (goal env) of
Left d -> do
putStrLn "Error in subderivation: "
putStrLn $ ppDerivation (unicode env) (calculus env) d
Right () -> do
putStrLn $ "Valid derivation in " ++ calcName (calculus env)
return env
getFormBindings :: Bool -> Calculus -> [FormulaPat] -> IO FormulaAssignment
getFormBindings unicode _ [] = return []
getFormBindings unicode calc (PredPat p:pats) = do
putStr $ "Need binding for atom " ++ p ++ ":\n " ++ p ++ " ::= "
hFlush stdout
str <- getLine
let fs = parse (spaces *> atomFormula <* end) str
case fs of
[] -> do putStrLn $ "Couldn't parse. Please enter a single atom identifier."
getFormBindings unicode calc (PredPat p:pats)
[(f,_)] -> do rest <- getFormBindings unicode calc pats
return $ (p, [f]) : rest
((f,_):_) -> do rest <- getFormBindings unicode calc pats
return $ (p, [f]) : rest
getFormBindings unicode calc (FormPat a:pats) = do
putStr $ "Need binding for variable " ++ a ++ ":\n " ++ a ++ " ::= "
hFlush stdout
str <- getLine
let fs = parse (spaces *> formula calc <* end) str
case fs of
[] -> do putStrLn $ "Couldn't parse. Please enter a single formula."
getFormBindings unicode calc (FormPat a:pats)
[(f,_)] -> do rest <- getFormBindings unicode calc pats
return $ (a, [f]) : rest
((f,_):_) -> do rest <- getFormBindings unicode calc pats
return $ (a, [f]) : rest
getFormBindings unicode calc (SetPat gamma:pats) = do
putStr $ "Need binding for formula list " ++ gamma ++ ":\n " ++ gamma ++ " ::= "
hFlush stdout
str <- getLine
let fs = parse (spaces *> formulaList calc <* end) str
case fs of
[] -> do putStrLn $ "Couldn't parse. Please enter a comma-separated list of formulas."
getFormBindings unicode calc (SetPat gamma:pats)
[(fs,_)] -> do rest <- getFormBindings unicode calc pats
return $ (gamma, fs) : rest
((fs,_):_) -> do rest <- getFormBindings unicode calc pats
return $ (gamma, fs) : rest
getFormBindings unicode calc (pat:_) = error $ "can't bind pattern " ++ ppFormulaPat unicode pat
getTermBindings :: Bool -> [TermPat] -> IO TermAssignment
getTermBindings unicode [] = return []
getTermBindings unicode (VarPat x:pats) = do
putStr $ "Need binding for variable <" ++ x ++ ">:\n <" ++ x ++ "> ::= "
hFlush stdout
str <- getLine
let xs = parse (spaces *> many1 alphaNum <* end) str
case xs of
[] -> do putStrLn $ "Couldn't parse. Please enter a single variable identifier (like 'x')."
getTermBindings unicode (VarPat x:pats)
[(y,_)] -> do rest <- getTermBindings unicode pats
return $ (x, VarTerm y) : rest
_ -> error $ "multiple parses for variable term: " ++ show x
getTermBindings unicode (TermPat t:pats) = do
putStr $ "Need binding for term <" ++ t ++ ">:\n <" ++ t ++ "> ::= "
hFlush stdout
str <- getLine
let ts = parse (spaces *> term <* end) str
case ts of
[] -> do putStrLn $ "Couldn't parse. Please enter a term."
getTermBindings unicode (TermPat t:pats)
[(t',_)] -> do rest <- getTermBindings unicode pats
return $ (t, t') : rest
_ -> error $ "multiple parses for variable term: " ++ show t
getFirstSubgoal :: Derivation -> GoalSpec
getFirstSubgoal der = case stubs der of
[] -> []
((subgoal, _):_) -> subgoal
getNextSubgoal :: Derivation -> GoalSpec -> GoalSpec
getNextSubgoal der spec = getNextSubgoal' (map fst $ stubs der) where
getNextSubgoal' [] = getFirstSubgoal der
getNextSubgoal' (stubSpec:specs) | spec <= stubSpec = stubSpec
| otherwise = getNextSubgoal' specs
rule :: Env -> String -> IO Env
rule env arg =
if null ruleString
then do putStrLn "Applicable rules: "
let rules = applicableRules (calculus env) $ conclusion $ getCurrentGoal env
let zRules = zipRules rules
mapM_ putStrLn (showZipRules zRules)
return env
else do let rules = applicableRules (calculus env) $ conclusion $ getCurrentGoal env
case rules !!! (ruleNum-1) of
Nothing -> do let rules = applicableRules (calculus env) $ conclusion $ getCurrentGoal env
let zRules = filter (\(_,(name,_,_)) -> name == ruleString) $ zipRules rules
mapM_ putStrLn (showZipRules zRules)
return env
Just (name, formBinding, termBinding) -> do
TODO : fix this . tryRule returns a list of unbound terms as well .
let (unboundForms, unboundTerms) = tryRule (calculus env) name formBinding termBinding
extraFormBindings <- getFormBindings (unicode env) (calculus env) unboundForms
extraTermBindings <- getTermBindings (unicode env) unboundTerms
case instRule (calculus env) name
(extraFormBindings ++ formBinding)
(extraTermBindings ++ termBinding)
(subgoal env)
(goal env) of
Just newGoal -> do
putStrLn $ "Applying " ++ name ++ "."
let nextSubgoal = getNextSubgoal newGoal (subgoal env)
putStrLn $ "Setting active subgoal to " ++ ppGoalSpec nextSubgoal ++
": " ++ ppSequent (unicode env) (calculus env) (conclusion (fromJust (getGoal nextSubgoal newGoal)))
let newHistory = ("rule " ++ ruleString) : (history env)
return env { goal = newGoal, subgoal = nextSubgoal, history = newHistory }
Nothing -> do
putStrLn "Invalid instantiation."
return env
where ruleString = dropWhile (== ' ') arg
ruleNum = case readMaybe ruleString of
Just num -> num
Nothing -> 0
showRule (n, (name, formBinding, termBinding)) =
case prems of
[] ->
" " ++ show n ++ ". " ++ name ++ " with no obligations"
[prem] ->
" " ++ show n ++ ". " ++ name ++ " with obligation: " ++
ppSequentInst (unicode env) (calculus env) formBinding termBinding prem
_ ->
" " ++ show n ++ ". " ++ name ++ " with obligations:\n " ++
intercalate "\n " (map (ppSequentInst (unicode env) (calculus env) formBinding termBinding) prems)
where Just (prems, _) = lookup name (rules (calculus env))
zipRules rules = zip [1..] rules
showZipRules rules = map showRule rules
axiom :: Env -> String -> IO Env
axiom env arg =
if null axiomString
then do putStrLn "Applicable axioms: "
let axioms = applicableAxioms (calculus env) $ conclusion $ getCurrentGoal env
mapM_ putStrLn (showAxioms 1 axioms)
return env
else do let axioms = applicableAxioms (calculus env) $ conclusion $ getCurrentGoal env
case axioms !!! (axiomNum-1) of
Nothing -> do putStrLn $ "No axiom corresponding to " ++ axiomString
return env
Just (name, formBindings, termBindings) -> do
TODO : fix this . tryAxiom returns a list of unbound terms as well .
let unboundVars = fst $ tryAxiom (calculus env) name formBindings termBindings
putStrLn $ "Applying " ++ name ++ "."
let Just newGoal = instAxiom (calculus env) name formBindings termBindings (subgoal env) (goal env)
let nextSubgoal = getNextSubgoal newGoal (subgoal env)
putStrLn $ "Setting active subgoal to " ++ ppGoalSpec nextSubgoal ++
": " ++ ppSequent (unicode env) (calculus env) (conclusion (fromJust (getGoal nextSubgoal newGoal)))
let newHistory = ("axiom " ++ axiomString) : (history env)
return env { goal = newGoal, subgoal = nextSubgoal, history = newHistory }
where axiomString = dropWhile (== ' ') arg
axiomNum = case readMaybe axiomString of
Just num -> num
Nothing -> 0
showAxiom n (name, formBindings, termBindings) = " " ++ show n ++ ". " ++ name ++ " with " ++ ppFormulaAssignment formBindings
showAxioms n [] = []
showAxioms n (x:xs) = showAxiom n x : showAxioms (n+1) xs
ppFormulaAssignment bindings = intercalate ", " (map showBindings bindings)
showBindings (var, [f]) = var ++ " := " ++ ppFormula (unicode env) (calculus env) f
showBindings (var, fs) = var ++ " := [" ++ ppFormulaList (unicode env) (calculus env) fs ++ "]"
printProofTree :: Env -> String -> IO Env
printProofTree env _ =
case (pretty env) of
True -> do putStr $ ppDerivationTree (unicode env) (calculus env) (goal env) (subgoal env)
return env
_ -> do putStr $ ppDerivation (unicode env) (calculus env) (goal env)
return env
togglePretty :: Env -> String -> IO Env
togglePretty env _ =
case (pretty env) of
True -> do putStrLn "Disabling pretty printing."
return env { pretty = False }
_ -> do putStrLn "Enabling pretty printing."
return env { pretty = True }
toggleUnicode :: Env -> String -> IO Env
toggleUnicode env _ =
case (unicode env) of
True -> do putStrLn "Disabling unicode."
return env { unicode = False }
_ -> do putStrLn "Enabling unicode."
return env { unicode = True }
changeCalculus :: Env -> String -> IO Env
changeCalculus env arg =
if null name
then do putStrLn $ ppCalculus (unicode env) $ calculus env
return env
else
case find (\calc -> calcName calc == name) calculi of
Nothing -> do putStrLn $ "No calculus named \"" ++ name ++ "\"."
return env
Just calc -> do putStrLn $ "Changing calculus to " ++ name ++ "."
let newHistory = ("calc " ++ name) : (history env)
return $ env { calculus = calc, history = newHistory }
where name = dropWhile (==' ') arg
listRule :: Env -> String -> IO Env
listRule env arg =
case (lookup ruleStr $ axioms (calculus env), lookup ruleStr $ rules (calculus env)) of
(Just axiomPat,_) -> do putStrLn (ppSequentPat (unicode env) axiomPat ++ " (" ++ ruleStr ++ ")")
return env
(_,Just rulePat) -> do putStrLn (ppRulePat (unicode env) "" (ruleStr, rulePat))
return env
_ -> do putStrLn $ "Couldn't find axiom/rule " ++ ruleStr
return env
where ruleStr = dropWhile (==' ') arg
listCalculi :: Env -> String -> IO Env
listCalculi env _ = do mapM_ (\calc -> putStrLn $ calcName calc) calculi
return env
quit :: Env -> String -> IO Env
quit env _ = do { putStrLn "Bye."; return env {quitFlag = True} }
repl :: Env -> IO ()
repl env = do
putStr "> "
hFlush stdout
s <- getLine
let (com, arg) = break isSpace (dropWhile (==' ') s)
case lookup com commands of
Nothing -> do putStrLn $ "Invalid command: " ++ com
repl env
Just (_, _, f) -> do env' <- f env arg
case quitFlag env' of
True -> return ()
False -> repl env'
introMessage :: String
introMessage =
"LogiX (Logic Explorer) v" ++ version ++ "\n" ++
"a customizable proof construction tool for sequent calculi\n\n" ++
"Type \"help\" for a list of commands.\n"
main :: IO ()
main = do
putStr introMessage
repl $ Env { goal = Stub ([] :=> [impliesForm (Pred "P" []) (Pred "P" [])])
, subgoal = []
, calculus = head calculi
, quitFlag = False
, pretty = True
, unicode = False
, history = ["top => P -> P", "calc " ++ calcName (head calculi)]
}
|
389167be2df25c0f00f77e6b5abc220dbe98a1b8867d7560566850de011eb5a3 | facebook/flow | modulename.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
Signature for module names . Such names are assumed to be " resolved " , and have
global scope in the sense that they may be used to identify modules uniquely
in a code base . In contrast , module references ( strings that are used to
refer to modules in require / import statements ) have local scope in the sense
that their meaning is relative to the files they appear in .
There are two ways to construct a module name :
* A module name may be a String that is declared in a file : e.g. , in the
Haste module system such module names are declared via @providesModule .
* A module name may be a Filename : e.g. , in the Node module system a module
is simply known by its path in the file system .
global scope in the sense that they may be used to identify modules uniquely
in a code base. In contrast, module references (strings that are used to
refer to modules in require/import statements) have local scope in the sense
that their meaning is relative to the files they appear in.
There are two ways to construct a module name:
* A module name may be a String that is declared in a file: e.g., in the
Haste module system such module names are declared via @providesModule.
* A module name may be a Filename: e.g., in the Node module system a module
is simply known by its path in the file system.
*)
type t =
| String of string
| Filename of File_key.t
[@@deriving show, ord]
let to_string = function
| String m -> m
| Filename f -> File_key.to_string f
module Key = struct
type nonrec t = t
let pp = pp
let to_string = to_string
let compare : t -> t -> int = compare
end
module Set = struct
include Flow_set.Make (Key)
let pp = make_pp Key.pp
let show x = Format.asprintf "%a" pp x
end
module Map = struct
include WrappedMap.Make (Key)
let pp pp_data = make_pp Key.pp pp_data
let show pp_data x = Format.asprintf "%a" (pp pp_data) x
end
| null | https://raw.githubusercontent.com/facebook/flow/741104e69c43057ebd32804dd6bcc1b5e97548ea/src/common/modulename/modulename.ml | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
Signature for module names . Such names are assumed to be " resolved " , and have
global scope in the sense that they may be used to identify modules uniquely
in a code base . In contrast , module references ( strings that are used to
refer to modules in require / import statements ) have local scope in the sense
that their meaning is relative to the files they appear in .
There are two ways to construct a module name :
* A module name may be a String that is declared in a file : e.g. , in the
Haste module system such module names are declared via @providesModule .
* A module name may be a Filename : e.g. , in the Node module system a module
is simply known by its path in the file system .
global scope in the sense that they may be used to identify modules uniquely
in a code base. In contrast, module references (strings that are used to
refer to modules in require/import statements) have local scope in the sense
that their meaning is relative to the files they appear in.
There are two ways to construct a module name:
* A module name may be a String that is declared in a file: e.g., in the
Haste module system such module names are declared via @providesModule.
* A module name may be a Filename: e.g., in the Node module system a module
is simply known by its path in the file system.
*)
type t =
| String of string
| Filename of File_key.t
[@@deriving show, ord]
let to_string = function
| String m -> m
| Filename f -> File_key.to_string f
module Key = struct
type nonrec t = t
let pp = pp
let to_string = to_string
let compare : t -> t -> int = compare
end
module Set = struct
include Flow_set.Make (Key)
let pp = make_pp Key.pp
let show x = Format.asprintf "%a" pp x
end
module Map = struct
include WrappedMap.Make (Key)
let pp pp_data = make_pp Key.pp pp_data
let show pp_data x = Format.asprintf "%a" (pp pp_data) x
end
| |
eba8ab543936f0f580bef4e2b9600c0a32b41a00abfeb5a7dfae32f0039cbebb | kfish/const-math-ghc-plugin | 4383.hs | main = print (0.5 ^ 1030)
| null | https://raw.githubusercontent.com/kfish/const-math-ghc-plugin/c1d269e0ddc72a782c73cca233ec9488f69112a9/tests/ghc-7.4/4383.hs | haskell | main = print (0.5 ^ 1030)
| |
c7cb74a4d91be510052ecf97bb79a7edc7fb99a9d34a62087258b7b1b01a46c4 | runa-labs/clj-hazelcast | project.clj | (defproject org.clojars.runa/clj-hazelcast "1.2.1"
:description "Clojure library for the Hazelcast p2p cluster"
:dependencies [[org.clojure/clojure "1.6.0"]
[org.clojars.runa/clj-kryo "1.5.0"]
[com.hazelcast/hazelcast "3.4"]]
:profiles {:provided {:dependencies [[org.clojure/tools.logging "0.2.6"]]}}
:global-vars {*warn-on-reflection* true}
provided
:java-source-paths ["src-java"])
| null | https://raw.githubusercontent.com/runa-labs/clj-hazelcast/ba577984796e6e8e876554b10c6ebbb763ca5b44/project.clj | clojure | (defproject org.clojars.runa/clj-hazelcast "1.2.1"
:description "Clojure library for the Hazelcast p2p cluster"
:dependencies [[org.clojure/clojure "1.6.0"]
[org.clojars.runa/clj-kryo "1.5.0"]
[com.hazelcast/hazelcast "3.4"]]
:profiles {:provided {:dependencies [[org.clojure/tools.logging "0.2.6"]]}}
:global-vars {*warn-on-reflection* true}
provided
:java-source-paths ["src-java"])
| |
c08938f352149058d05802352e6bb807277ecd8c28cf94dd79d09b981a0ae6ca | bobatkey/CS316-17 | Lec15.hs | module Lec15 where
import Control.Applicative hiding (some, many)
import Control.Monad
import Data.Char
LECTURE 15 : MORE MONADIC PARSING
First we 'll recap the ' Parser ' type we defined in the last lecture .
Parsers of things are functions from strings to the possibility of
pairs of things and strings :
Parsers of things are functions from strings to the possibility of
pairs of things and strings: -}
newtype Parser a = MkParser (String -> Maybe (a,String))
We apply a ' Parser ' to a string using ' runParser ' , which returns
either ' Nothing ' if the parser fails to extract anything from the
string , or ' Just ( a , s ) ' if it extracted ' a ' with leftover string
's ' . We 'll see more examples of this below .
either 'Nothing' if the parser fails to extract anything from the
string, or 'Just (a, s)' if it extracted 'a' with leftover string
's'. We'll see more examples of this below. -}
runParser :: Parser a -> String -> Maybe (a, String)
runParser (MkParser p) input = p input
What makes ' 's so useful is that they support lots of
structure : ' Functor ' , ' Applicative ' , ' Alternative ' , and ' ' . We
introduced this structure in the previous lecture , so we briefly
reintroduce it here .
The ' Functor ' typeclass includes the ' fmap ' function , which allows
us to post - process the results of a ' Parser ' , as we saw last time .
structure: 'Functor', 'Applicative', 'Alternative', and 'Monad'. We
introduced this structure in the previous lecture, so we briefly
reintroduce it here.
The 'Functor' typeclass includes the 'fmap' function, which allows
us to post-process the results of a 'Parser', as we saw last time. -}
instance Functor Parser where
-- fmap :: (a -> b) -> Parser a -> Parser b
fmap f (MkParser p) =
MkParser (\input -> fmap (\(a,rest) -> (f a,rest)) (p input))
The ' Alternative ' instance for ' Parser 's allows us to try one parser
and then another if the first one fails ( ' p1 < | > p2 ' ) , or to write
a parser that always fails ( ' empty ' ) .
and then another if the first one fails ('p1 <|> p2'), or to write
a parser that always fails ('empty'). -}
orElse :: Parser a -> Parser a -> Parser a
orElse (MkParser p1) (MkParser p2) =
MkParser (\input -> case p1 input of
Nothing -> p2 input
Just (a,s) -> Just (a,s))
failure :: Parser a
failure = MkParser (\input -> Nothing)
instance Alternative Parser where
empty = failure
(<|>) = orElse
The ' Applicative ' instance for ' Parser 's allows us to run one parser
and then another one afterwards . The leftover input from the first
parser is fed into the second one .
and then another one afterwards. The leftover input from the first
parser is fed into the second one. -}
andThen :: Parser a -> Parser b -> Parser (a,b)
andThen (MkParser p1) (MkParser p2) =
MkParser (\input -> case p1 input of
Nothing -> Nothing
Just (a, input2) ->
case p2 input2 of
Nothing -> Nothing
Just (b, rest) ->
Just ((a,b), rest))
nothing :: Parser ()
nothing = MkParser (\input -> Just ((), input))
instance Applicative Parser where
-- pure :: a -> Parser a
pure x = const x <$> nothing
( < * > ) : : ( a - > b ) - > Parser a - > Parser b
pf <*> pa = (\(f, a) -> f a) <$> (pf `andThen` pa)
The ' Monad ' instance allows us to run a ' Parser ' and then choose
another ' Parser ' based on the data parsed by the first . This is
useful for making decisions based on the input , such as filtering
out certain things , as we saw in the previous lecture .
another 'Parser' based on the data parsed by the first. This is
useful for making decisions based on the input, such as filtering
out certain things, as we saw in the previous lecture. -}
instance Monad Parser where
MkParser p >>= f =
MkParser (\input -> case p input of
Nothing -> Nothing
Just (a,rest) ->
let MkParser p2 = f a in
p2 rest)
{- So far, we haven't yet defined anything that actually consumes any
input. We do this by defining the 'char' parser. This reads a
single character from the input, or fails if that is not possible. -}
char :: Parser Char
char = MkParser (\input -> case input of
"" -> Nothing
c:cs -> Just (c,cs))
Finally , we have a ' Parser ' that only succeeds when we are at the end
of input ( eoi ) .
of input (eoi). -}
eoi :: Parser ()
eoi = MkParser (\input -> case input of
"" -> Just ((), "")
_:_ -> Nothing)
{--------------------------------------------------------------------}
Part 1 . Building parsers
{--------------------------------------------------------------------}
We have now defined all of the basic functions we need to build more
complex parsers . Everything to do with parsers from this point on
is done in terms of :
1 . The Functor interface ( fmap )
2 . The Monad interface ( return , > > =)
3 . The Applicative interface ( pure , < * > )
4 . The Alternative interface ( empty , < | > )
5 . ' char '
6 . ' eoi '
And , of course , to actually use parsers we need the ' runParser '
function .
Let 's see now how to build up more complex parsers to recognise
more complex kinds of input .
The ' char ' parser accepts any input , and returns it . Sometimes we
will want to make sure that the character read matches some
criteria . For example , we might want to make sure it is an opening
parenthesis ' ( ' , or it is a digit .
We define ' satisfies ' in terms of ' char ' , the Monad structure ' > > = '
and ' return ' , and part of the Alternative structure ' empty ' :
complex parsers. Everything to do with parsers from this point on
is done in terms of:
1. The Functor interface (fmap)
2. The Monad interface (return, >>=)
3. The Applicative interface (pure, <*>)
4. The Alternative interface (empty, <|>)
5. 'char'
6. 'eoi'
And, of course, to actually use parsers we need the 'runParser'
function.
Let's see now how to build up more complex parsers to recognise
more complex kinds of input.
The 'char' parser accepts any input, and returns it. Sometimes we
will want to make sure that the character read matches some
criteria. For example, we might want to make sure it is an opening
parenthesis '(', or it is a digit.
We define 'satisfies' in terms of 'char', the Monad structure '>>='
and 'return', and part of the Alternative structure 'empty': -}
satisfies :: (Char -> Bool) -> Parser Char
satisfies p = do
c <- char
if p c then return c else empty
{- We'll also use the function 'ignore', which uses 'fmap' to
post-process the result of a parser than returns an 'a' to throw it
away and just return '()' on success. -}
ignore :: Parser a -> Parser ()
ignore p = fmap (\_ -> ()) p
{- Using 'satisfies' and 'ignore', we can write a parser than recognises
left parentheses '(': -}
leftParen :: Parser ()
leftParen = ignore (satisfies (\x -> x == '('))
Let 's see it working :
λ > " ( "
Just ( ( ) , " " )
λ > " ) "
Nothing
λ > " ( abc "
Just ( ( ) , " abc " )
In the same way , we can write a parser that only recognises left
square brackets ' [ ' :
λ> runParser leftParen "("
Just ((),"")
λ> runParser leftParen ")"
Nothing
λ> runParser leftParen "(abc"
Just ((),"abc")
In the same way, we can write a parser that only recognises left
square brackets '[': -}
leftSqBracket :: Parser ()
leftSqBracket = ignore (satisfies (\x -> x == '['))
Using the same pattern , we can write many useful parsers . Here 's one
that recognises whitespace : spaces , newlines , tabs , and carriage
returns :
that recognises whitespace: spaces, newlines, tabs, and carriage
returns: -}
space :: Parser ()
space = ignore (satisfies (\x -> x == ' ' || x == '\n' || x == '\t' || x == '\r'))
Alternatively , we can use the built - in function ' isSpace ' from the ' Data . ' module ( imported above ):
space = ignore ( satisfies isSpace )
Another useful little parser is the one that is given a specific
character ' c ' , and succeeds only if the first character in the
input is ' c ' :
space = ignore (satisfies isSpace)
Another useful little parser is the one that is given a specific
character 'c', and succeeds only if the first character in the
input is 'c': -}
isChar :: Char -> Parser ()
isChar c = ignore (satisfies (\x -> x == c))
{- Using 'isChar', we can use it repeatedly to build a parser that
recognises a given string: -}
string :: String -> Parser ()
string [] = nothing -- pure ()
string (e:es) = (\() () -> ()) <$> isChar e <*> string es
For example :
λ > runParser ( string " hello " ) " hello ! ! ! "
Just ( ( ) , " ! ! ! " )
The function ' string ' parses more than one character by recursing
over the sequence of characters it has been told to look for . But
what if we do n't know what we are looking for in advance . How can
we run a parser repeatedly until it fails ?
λ> runParser (string "hello") "hello!!!"
Just ((),"!!!")
The function 'string' parses more than one character by recursing
over the sequence of characters it has been told to look for. But
what if we don't know what we are looking for in advance. How can
we run a parser repeatedly until it fails? -}
{- PART II. Repeated Parsing
-}
zero or more
many :: Parser a -> Parser [a]
many p = (\a as -> a:as) <$> p <*> many p
<|> pure []
-- fmap f s = pure f <*> s
-- = f <$> s
Examples using ' many ' :
λ > ( many space ) " "
Just ( [ ] , " " )
λ > ( many space ) " "
Just ( [ ( ) , ( ) , ( ) , ( ) ] , " " )
λ > ( many char ) " "
Just ( " " , " " )
λ > ( many char ) " sdhgsjfhksdh "
Just ( " sdhgsjfhksdh " , " " )
λ> runParser (many space) ""
Just ([], "")
λ> runParser (many space) " "
Just ([(),(),(),()], "")
λ> runParser (many char) " "
Just (" ","")
λ> runParser (many char) " sdhgsjfhksdh "
Just (" sdhgsjfhksdh ","")
-}
one or more
some :: Parser a -> Parser [a]
some p = (\ a as -> a:as) <$> p <*> many p
Examples using ' some '
λ > runParser ( some space ) " "
Nothing
λ > ( some space ) " "
Just ( [ ( ) ] , " " )
λ> runParser (some space) ""
Nothing
λ> runParser (some space) " "
Just ([()], "")
-}
{- Things separated by other things -}
sepBy :: Parser () -> Parser a -> Parser [a]
sepBy separator thing =
(:) <$> thing
<*> many ((\ () t -> t) <$> separator <*> thing)
<|> pure []
{- Extended example: Writing a CSV parser -}
field :: Parser String
field = (\_ s _ -> s) <$> isChar '\"'
<*> many (satisfies (\x -> x /= '\"' && x /= '\n'))
<*> isChar '\"'
<|> many (satisfies (\x -> x /= ':' && x /= '\n'))
unescapedChar :: Parser Char
unescapedChar =
satisfies (\x -> x /= '\"' && x /= '\n' && x /= '\\')
escapedChar :: Parser Char
escapedChar =
(\_ c -> c) <$> isChar '\\' <*> char
field2 :: Parser String
field2 = (\_ s _ -> s) <$> isChar '\"'
<*> many (unescapedChar <|> escapedChar)
<*> isChar '\"'
<|> many (satisfies (\x -> x /= ':' && x /= '\n'))
1 , 2 , 3
-- a,b,c
sepBy ( isChar ' , ' ) char
Parsing comma separated values :
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2,3 "
[ " 123 " ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2,,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " , 1,2,,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " , 1,2,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2 , , , "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2 , , "
[ " 12 , " ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2,,,3 "
[ " 12,3 " ]
λ > runParser ( sepBy ( isChar ' , ' ) ( satisfies ( /= ' , ' ) ) ) " 1,2,,,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) ( satisfies ( /= ' , ' ) ) ) " 1,2,3,3 "
[ " 1233 " ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( /= ' , ' ) ) ) ) " 1,2,3,3 "
[ [ " 1","2","3","3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( /= ' , ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3,3 "
[ [ " 1","2","3","3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( /= ' , ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3\\,3 "
[ [ " 1","2","3\\","3"],["1","2","3,3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( \c - > c/=','||c/='\\ ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3\\,3 "
[ [ " 1","2","3\\","3"],["1","2","3\\,3"],["1","2","3,3"],["1","2,3\\","3"],["1","2,3\\,3"],["1","2,3,3"],["1,2","3\\","3"],["1,2","3\\,3"],["1,2","3,3"],["1,2,3\\","3"],["1,2,3\\,3"],["1,2,3,3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( \c - > c/=','&&c/='\\ ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3\\,3 "
[ [ " 1","2","3,3 " ] ]
λ> runParser (sepBy (isChar ',') char) "1,2,3"
["123"]
λ> runParser (sepBy (isChar ',') char) "1,2,,3"
[]
λ> runParser (sepBy (isChar ',') char) ",1,2,,3"
[]
λ> runParser (sepBy (isChar ',') char) ",1,2,3"
[]
λ> runParser (sepBy (isChar ',') char) "1,2,,,"
[]
λ> runParser (sepBy (isChar ',') char) "1,2,,"
["12,"]
λ> runParser (sepBy (isChar ',') char) "1,2,,,3"
["12,3"]
λ> runParser (sepBy (isChar ',') (satisfies (/=','))) "1,2,,,3"
[]
λ> runParser (sepBy (isChar ',') (satisfies (/=','))) "1,2,3,3"
["1233"]
λ> runParser (sepBy (isChar ',') (some (satisfies (/=',')))) "1,2,3,3"
[["1","2","3","3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (/=',') <|> const ',' <$> string "\\,"))) "1,2,3,3"
[["1","2","3","3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (/=',') <|> const ',' <$> string "\\,"))) "1,2,3\\,3"
[["1","2","3\\","3"],["1","2","3,3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (\c -> c/=','||c/='\\') <|> const ',' <$> string "\\,"))) "1,2,3\\,3"
[["1","2","3\\","3"],["1","2","3\\,3"],["1","2","3,3"],["1","2,3\\","3"],["1","2,3\\,3"],["1","2,3,3"],["1,2","3\\","3"],["1,2","3\\,3"],["1,2","3,3"],["1,2,3\\","3"],["1,2,3\\,3"],["1,2,3,3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (\c -> c/=','&&c/='\\') <|> const ',' <$> string "\\,"))) "1,2,3\\,3"
[["1","2","3,3"]]
-}
| null | https://raw.githubusercontent.com/bobatkey/CS316-17/36eb67c335cd0e6f5b7a4b8eafdea3cd4b715e0c/lectures/Lec15.hs | haskell | fmap :: (a -> b) -> Parser a -> Parser b
pure :: a -> Parser a
So far, we haven't yet defined anything that actually consumes any
input. We do this by defining the 'char' parser. This reads a
single character from the input, or fails if that is not possible.
------------------------------------------------------------------
------------------------------------------------------------------
We'll also use the function 'ignore', which uses 'fmap' to
post-process the result of a parser than returns an 'a' to throw it
away and just return '()' on success.
Using 'satisfies' and 'ignore', we can write a parser than recognises
left parentheses '(':
Using 'isChar', we can use it repeatedly to build a parser that
recognises a given string:
pure ()
PART II. Repeated Parsing
fmap f s = pure f <*> s
= f <$> s
Things separated by other things
Extended example: Writing a CSV parser
a,b,c | module Lec15 where
import Control.Applicative hiding (some, many)
import Control.Monad
import Data.Char
LECTURE 15 : MORE MONADIC PARSING
First we 'll recap the ' Parser ' type we defined in the last lecture .
Parsers of things are functions from strings to the possibility of
pairs of things and strings :
Parsers of things are functions from strings to the possibility of
pairs of things and strings: -}
newtype Parser a = MkParser (String -> Maybe (a,String))
We apply a ' Parser ' to a string using ' runParser ' , which returns
either ' Nothing ' if the parser fails to extract anything from the
string , or ' Just ( a , s ) ' if it extracted ' a ' with leftover string
's ' . We 'll see more examples of this below .
either 'Nothing' if the parser fails to extract anything from the
string, or 'Just (a, s)' if it extracted 'a' with leftover string
's'. We'll see more examples of this below. -}
runParser :: Parser a -> String -> Maybe (a, String)
runParser (MkParser p) input = p input
What makes ' 's so useful is that they support lots of
structure : ' Functor ' , ' Applicative ' , ' Alternative ' , and ' ' . We
introduced this structure in the previous lecture , so we briefly
reintroduce it here .
The ' Functor ' typeclass includes the ' fmap ' function , which allows
us to post - process the results of a ' Parser ' , as we saw last time .
structure: 'Functor', 'Applicative', 'Alternative', and 'Monad'. We
introduced this structure in the previous lecture, so we briefly
reintroduce it here.
The 'Functor' typeclass includes the 'fmap' function, which allows
us to post-process the results of a 'Parser', as we saw last time. -}
instance Functor Parser where
fmap f (MkParser p) =
MkParser (\input -> fmap (\(a,rest) -> (f a,rest)) (p input))
The ' Alternative ' instance for ' Parser 's allows us to try one parser
and then another if the first one fails ( ' p1 < | > p2 ' ) , or to write
a parser that always fails ( ' empty ' ) .
and then another if the first one fails ('p1 <|> p2'), or to write
a parser that always fails ('empty'). -}
orElse :: Parser a -> Parser a -> Parser a
orElse (MkParser p1) (MkParser p2) =
MkParser (\input -> case p1 input of
Nothing -> p2 input
Just (a,s) -> Just (a,s))
failure :: Parser a
failure = MkParser (\input -> Nothing)
instance Alternative Parser where
empty = failure
(<|>) = orElse
The ' Applicative ' instance for ' Parser 's allows us to run one parser
and then another one afterwards . The leftover input from the first
parser is fed into the second one .
and then another one afterwards. The leftover input from the first
parser is fed into the second one. -}
andThen :: Parser a -> Parser b -> Parser (a,b)
andThen (MkParser p1) (MkParser p2) =
MkParser (\input -> case p1 input of
Nothing -> Nothing
Just (a, input2) ->
case p2 input2 of
Nothing -> Nothing
Just (b, rest) ->
Just ((a,b), rest))
nothing :: Parser ()
nothing = MkParser (\input -> Just ((), input))
instance Applicative Parser where
pure x = const x <$> nothing
( < * > ) : : ( a - > b ) - > Parser a - > Parser b
pf <*> pa = (\(f, a) -> f a) <$> (pf `andThen` pa)
The ' Monad ' instance allows us to run a ' Parser ' and then choose
another ' Parser ' based on the data parsed by the first . This is
useful for making decisions based on the input , such as filtering
out certain things , as we saw in the previous lecture .
another 'Parser' based on the data parsed by the first. This is
useful for making decisions based on the input, such as filtering
out certain things, as we saw in the previous lecture. -}
instance Monad Parser where
MkParser p >>= f =
MkParser (\input -> case p input of
Nothing -> Nothing
Just (a,rest) ->
let MkParser p2 = f a in
p2 rest)
char :: Parser Char
char = MkParser (\input -> case input of
"" -> Nothing
c:cs -> Just (c,cs))
Finally , we have a ' Parser ' that only succeeds when we are at the end
of input ( eoi ) .
of input (eoi). -}
eoi :: Parser ()
eoi = MkParser (\input -> case input of
"" -> Just ((), "")
_:_ -> Nothing)
Part 1 . Building parsers
We have now defined all of the basic functions we need to build more
complex parsers . Everything to do with parsers from this point on
is done in terms of :
1 . The Functor interface ( fmap )
2 . The Monad interface ( return , > > =)
3 . The Applicative interface ( pure , < * > )
4 . The Alternative interface ( empty , < | > )
5 . ' char '
6 . ' eoi '
And , of course , to actually use parsers we need the ' runParser '
function .
Let 's see now how to build up more complex parsers to recognise
more complex kinds of input .
The ' char ' parser accepts any input , and returns it . Sometimes we
will want to make sure that the character read matches some
criteria . For example , we might want to make sure it is an opening
parenthesis ' ( ' , or it is a digit .
We define ' satisfies ' in terms of ' char ' , the Monad structure ' > > = '
and ' return ' , and part of the Alternative structure ' empty ' :
complex parsers. Everything to do with parsers from this point on
is done in terms of:
1. The Functor interface (fmap)
2. The Monad interface (return, >>=)
3. The Applicative interface (pure, <*>)
4. The Alternative interface (empty, <|>)
5. 'char'
6. 'eoi'
And, of course, to actually use parsers we need the 'runParser'
function.
Let's see now how to build up more complex parsers to recognise
more complex kinds of input.
The 'char' parser accepts any input, and returns it. Sometimes we
will want to make sure that the character read matches some
criteria. For example, we might want to make sure it is an opening
parenthesis '(', or it is a digit.
We define 'satisfies' in terms of 'char', the Monad structure '>>='
and 'return', and part of the Alternative structure 'empty': -}
satisfies :: (Char -> Bool) -> Parser Char
satisfies p = do
c <- char
if p c then return c else empty
ignore :: Parser a -> Parser ()
ignore p = fmap (\_ -> ()) p
leftParen :: Parser ()
leftParen = ignore (satisfies (\x -> x == '('))
Let 's see it working :
λ > " ( "
Just ( ( ) , " " )
λ > " ) "
Nothing
λ > " ( abc "
Just ( ( ) , " abc " )
In the same way , we can write a parser that only recognises left
square brackets ' [ ' :
λ> runParser leftParen "("
Just ((),"")
λ> runParser leftParen ")"
Nothing
λ> runParser leftParen "(abc"
Just ((),"abc")
In the same way, we can write a parser that only recognises left
square brackets '[': -}
leftSqBracket :: Parser ()
leftSqBracket = ignore (satisfies (\x -> x == '['))
Using the same pattern , we can write many useful parsers . Here 's one
that recognises whitespace : spaces , newlines , tabs , and carriage
returns :
that recognises whitespace: spaces, newlines, tabs, and carriage
returns: -}
space :: Parser ()
space = ignore (satisfies (\x -> x == ' ' || x == '\n' || x == '\t' || x == '\r'))
Alternatively , we can use the built - in function ' isSpace ' from the ' Data . ' module ( imported above ):
space = ignore ( satisfies isSpace )
Another useful little parser is the one that is given a specific
character ' c ' , and succeeds only if the first character in the
input is ' c ' :
space = ignore (satisfies isSpace)
Another useful little parser is the one that is given a specific
character 'c', and succeeds only if the first character in the
input is 'c': -}
isChar :: Char -> Parser ()
isChar c = ignore (satisfies (\x -> x == c))
string :: String -> Parser ()
string (e:es) = (\() () -> ()) <$> isChar e <*> string es
For example :
λ > runParser ( string " hello " ) " hello ! ! ! "
Just ( ( ) , " ! ! ! " )
The function ' string ' parses more than one character by recursing
over the sequence of characters it has been told to look for . But
what if we do n't know what we are looking for in advance . How can
we run a parser repeatedly until it fails ?
λ> runParser (string "hello") "hello!!!"
Just ((),"!!!")
The function 'string' parses more than one character by recursing
over the sequence of characters it has been told to look for. But
what if we don't know what we are looking for in advance. How can
we run a parser repeatedly until it fails? -}
zero or more
many :: Parser a -> Parser [a]
many p = (\a as -> a:as) <$> p <*> many p
<|> pure []
Examples using ' many ' :
λ > ( many space ) " "
Just ( [ ] , " " )
λ > ( many space ) " "
Just ( [ ( ) , ( ) , ( ) , ( ) ] , " " )
λ > ( many char ) " "
Just ( " " , " " )
λ > ( many char ) " sdhgsjfhksdh "
Just ( " sdhgsjfhksdh " , " " )
λ> runParser (many space) ""
Just ([], "")
λ> runParser (many space) " "
Just ([(),(),(),()], "")
λ> runParser (many char) " "
Just (" ","")
λ> runParser (many char) " sdhgsjfhksdh "
Just (" sdhgsjfhksdh ","")
-}
one or more
some :: Parser a -> Parser [a]
some p = (\ a as -> a:as) <$> p <*> many p
Examples using ' some '
λ > runParser ( some space ) " "
Nothing
λ > ( some space ) " "
Just ( [ ( ) ] , " " )
λ> runParser (some space) ""
Nothing
λ> runParser (some space) " "
Just ([()], "")
-}
sepBy :: Parser () -> Parser a -> Parser [a]
sepBy separator thing =
(:) <$> thing
<*> many ((\ () t -> t) <$> separator <*> thing)
<|> pure []
field :: Parser String
field = (\_ s _ -> s) <$> isChar '\"'
<*> many (satisfies (\x -> x /= '\"' && x /= '\n'))
<*> isChar '\"'
<|> many (satisfies (\x -> x /= ':' && x /= '\n'))
unescapedChar :: Parser Char
unescapedChar =
satisfies (\x -> x /= '\"' && x /= '\n' && x /= '\\')
escapedChar :: Parser Char
escapedChar =
(\_ c -> c) <$> isChar '\\' <*> char
field2 :: Parser String
field2 = (\_ s _ -> s) <$> isChar '\"'
<*> many (unescapedChar <|> escapedChar)
<*> isChar '\"'
<|> many (satisfies (\x -> x /= ':' && x /= '\n'))
1 , 2 , 3
sepBy ( isChar ' , ' ) char
Parsing comma separated values :
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2,3 "
[ " 123 " ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2,,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " , 1,2,,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " , 1,2,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2 , , , "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2 , , "
[ " 12 , " ]
λ > runParser ( sepBy ( isChar ' , ' ) char ) " 1,2,,,3 "
[ " 12,3 " ]
λ > runParser ( sepBy ( isChar ' , ' ) ( satisfies ( /= ' , ' ) ) ) " 1,2,,,3 "
[ ]
λ > runParser ( sepBy ( isChar ' , ' ) ( satisfies ( /= ' , ' ) ) ) " 1,2,3,3 "
[ " 1233 " ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( /= ' , ' ) ) ) ) " 1,2,3,3 "
[ [ " 1","2","3","3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( /= ' , ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3,3 "
[ [ " 1","2","3","3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( /= ' , ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3\\,3 "
[ [ " 1","2","3\\","3"],["1","2","3,3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( \c - > c/=','||c/='\\ ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3\\,3 "
[ [ " 1","2","3\\","3"],["1","2","3\\,3"],["1","2","3,3"],["1","2,3\\","3"],["1","2,3\\,3"],["1","2,3,3"],["1,2","3\\","3"],["1,2","3\\,3"],["1,2","3,3"],["1,2,3\\","3"],["1,2,3\\,3"],["1,2,3,3 " ] ]
λ > runParser ( sepBy ( isChar ' , ' ) ( some ( satisfies ( \c - > c/=','&&c/='\\ ' ) < | > const ' , ' < $ > string " \\ , " ) ) ) " 1,2,3\\,3 "
[ [ " 1","2","3,3 " ] ]
λ> runParser (sepBy (isChar ',') char) "1,2,3"
["123"]
λ> runParser (sepBy (isChar ',') char) "1,2,,3"
[]
λ> runParser (sepBy (isChar ',') char) ",1,2,,3"
[]
λ> runParser (sepBy (isChar ',') char) ",1,2,3"
[]
λ> runParser (sepBy (isChar ',') char) "1,2,,,"
[]
λ> runParser (sepBy (isChar ',') char) "1,2,,"
["12,"]
λ> runParser (sepBy (isChar ',') char) "1,2,,,3"
["12,3"]
λ> runParser (sepBy (isChar ',') (satisfies (/=','))) "1,2,,,3"
[]
λ> runParser (sepBy (isChar ',') (satisfies (/=','))) "1,2,3,3"
["1233"]
λ> runParser (sepBy (isChar ',') (some (satisfies (/=',')))) "1,2,3,3"
[["1","2","3","3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (/=',') <|> const ',' <$> string "\\,"))) "1,2,3,3"
[["1","2","3","3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (/=',') <|> const ',' <$> string "\\,"))) "1,2,3\\,3"
[["1","2","3\\","3"],["1","2","3,3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (\c -> c/=','||c/='\\') <|> const ',' <$> string "\\,"))) "1,2,3\\,3"
[["1","2","3\\","3"],["1","2","3\\,3"],["1","2","3,3"],["1","2,3\\","3"],["1","2,3\\,3"],["1","2,3,3"],["1,2","3\\","3"],["1,2","3\\,3"],["1,2","3,3"],["1,2,3\\","3"],["1,2,3\\,3"],["1,2,3,3"]]
λ> runParser (sepBy (isChar ',') (some (satisfies (\c -> c/=','&&c/='\\') <|> const ',' <$> string "\\,"))) "1,2,3\\,3"
[["1","2","3,3"]]
-}
|
d2bb9d652c23453f4f8d1583cc6304513abc8e0fed1564ea4b13df3acb95d4f3 | tolysz/ghcjs-stack | ConfiguredConversion.hs | module Distribution.Client.Dependency.Modular.ConfiguredConversion
( convCP
) where
import Data.Maybe
import Prelude hiding (pi)
import Distribution.Package (UnitId)
import Distribution.Client.Types
import Distribution.Client.Dependency.Types (ResolverPackage(..))
import qualified Distribution.Client.PackageIndex as CI
import qualified Distribution.Simple.PackageIndex as SI
import Distribution.Client.Dependency.Modular.Configured
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.ComponentDeps (ComponentDeps)
-- | Converts from the solver specific result @CP QPN@ into
-- a 'ResolverPackage', which can then be converted into
-- the install plan.
convCP :: SI.InstalledPackageIndex ->
CI.PackageIndex SourcePackage ->
CP QPN -> ResolverPackage
convCP iidx sidx (CP qpi fa es ds) =
case convPI qpi of
Left pi -> PreExisting
(fromJust $ SI.lookupUnitId iidx pi)
Right pi -> Configured $ ConfiguredPackage
srcpkg
fa
es
ds'
where
Just srcpkg = CI.lookupPackageId sidx pi
where
ds' :: ComponentDeps [ConfiguredId]
ds' = fmap (map convConfId) ds
convPI :: PI QPN -> Either UnitId PackageId
convPI (PI _ (I _ (Inst pi))) = Left pi
convPI qpi = Right $ confSrcId $ convConfId qpi
convConfId :: PI QPN -> ConfiguredId
convConfId (PI (Q _ pn) (I v loc)) = ConfiguredId {
confSrcId = sourceId
, confInstId = installedId
}
where
sourceId = PackageIdentifier pn v
installedId = case loc of
Inst pi -> pi
_otherwise -> fakeUnitId sourceId
| null | https://raw.githubusercontent.com/tolysz/ghcjs-stack/83d5be83e87286d984e89635d5926702c55b9f29/special/cabal/cabal-install/Distribution/Client/Dependency/Modular/ConfiguredConversion.hs | haskell | | Converts from the solver specific result @CP QPN@ into
a 'ResolverPackage', which can then be converted into
the install plan. | module Distribution.Client.Dependency.Modular.ConfiguredConversion
( convCP
) where
import Data.Maybe
import Prelude hiding (pi)
import Distribution.Package (UnitId)
import Distribution.Client.Types
import Distribution.Client.Dependency.Types (ResolverPackage(..))
import qualified Distribution.Client.PackageIndex as CI
import qualified Distribution.Simple.PackageIndex as SI
import Distribution.Client.Dependency.Modular.Configured
import Distribution.Client.Dependency.Modular.Package
import Distribution.Client.ComponentDeps (ComponentDeps)
convCP :: SI.InstalledPackageIndex ->
CI.PackageIndex SourcePackage ->
CP QPN -> ResolverPackage
convCP iidx sidx (CP qpi fa es ds) =
case convPI qpi of
Left pi -> PreExisting
(fromJust $ SI.lookupUnitId iidx pi)
Right pi -> Configured $ ConfiguredPackage
srcpkg
fa
es
ds'
where
Just srcpkg = CI.lookupPackageId sidx pi
where
ds' :: ComponentDeps [ConfiguredId]
ds' = fmap (map convConfId) ds
convPI :: PI QPN -> Either UnitId PackageId
convPI (PI _ (I _ (Inst pi))) = Left pi
convPI qpi = Right $ confSrcId $ convConfId qpi
convConfId :: PI QPN -> ConfiguredId
convConfId (PI (Q _ pn) (I v loc)) = ConfiguredId {
confSrcId = sourceId
, confInstId = installedId
}
where
sourceId = PackageIdentifier pn v
installedId = case loc of
Inst pi -> pi
_otherwise -> fakeUnitId sourceId
|
a48b5a8a2959b1a74b4937a9d586929bafe8e8625a3114ffd26abe7ddf17b9ea | lulf/hcoap | Main.hs | import Network.CoAP.Client
import Network.CoAP.Transport
import Network.Socket
import Network.URI
import System.Environment
import qualified Data.ByteString.Char8 as B
main :: IO ()
main = do
args <- getArgs
if length args < 2
then printUsage
else runClient args
runClient :: [String] -> IO ()
runClient (method:uriStr:_) = do
let request = Request { requestMethod = read method :: Method
, requestOptions = []
, requestPayload = Nothing
, requestReliable = True }
withSocketsDo $ do
sock <- socket AF_INET6 Datagram defaultProtocol
bind sock (SockAddrInet6 0 0 (0, 0, 0, 0) 0)
let transport = createUDPTransport sock
client <- createClient transport
let (Just uri) = parseURI uriStr
response <- doRequest client uri request
putStrLn ("Got response: " ++ show response)
return ()
printUsage :: IO ()
printUsage = do
pname <- getProgName
putStrLn ("Usage: " ++ pname ++ " <method> <uri>")
| null | https://raw.githubusercontent.com/lulf/hcoap/8127520b767430b5513be6cfe646894ae7a2e616/example-client/Main.hs | haskell | import Network.CoAP.Client
import Network.CoAP.Transport
import Network.Socket
import Network.URI
import System.Environment
import qualified Data.ByteString.Char8 as B
main :: IO ()
main = do
args <- getArgs
if length args < 2
then printUsage
else runClient args
runClient :: [String] -> IO ()
runClient (method:uriStr:_) = do
let request = Request { requestMethod = read method :: Method
, requestOptions = []
, requestPayload = Nothing
, requestReliable = True }
withSocketsDo $ do
sock <- socket AF_INET6 Datagram defaultProtocol
bind sock (SockAddrInet6 0 0 (0, 0, 0, 0) 0)
let transport = createUDPTransport sock
client <- createClient transport
let (Just uri) = parseURI uriStr
response <- doRequest client uri request
putStrLn ("Got response: " ++ show response)
return ()
printUsage :: IO ()
printUsage = do
pname <- getProgName
putStrLn ("Usage: " ++ pname ++ " <method> <uri>")
| |
0e7993983fbc3c8253acc6fc3439ef21e8540fb13420fe911caa266783676f36 | septract/jstar-old | unused.ml | * Finds stuff declared in mli that is not used in * other *
open Format
type file_info = {
path : string;
declarations : string list; (* ids appearing in mli *)
uses : string list (* ids appearing in ml *)
}
let list_of_hashtbl h = Hashtbl.fold (fun x () xs -> x :: xs) h []
let get_ids fn lexer =
let h = Hashtbl.create 101 in
try
let c = open_in fn in
let lb = Lexing.from_channel (open_in fn) in
lexer h lb;
let r = list_of_hashtbl h in
close_in c;
r
with Sys_error _ ->
printf "@[Warning: missing %s@." fn; []
let parse fn = {
path = fn;
declarations = get_ids (fn ^ "i") Id_extractor.id_decl;
uses = get_ids fn Id_extractor.id
}
module StringSet = Set.Make (String)
let _ =
let fis =
Utils.files_map (fun x->Filename.check_suffix x ".ml") parse "src" in
let fis =
Utils.files_map
(fun x->Filename.check_suffix x ".mly" || Filename.check_suffix x ".mll")
(fun fn->{path=fn;declarations=[];uses=get_ids fn Id_extractor.id})
"src"
@ fis in
let h1 = Hashtbl.create 10007 in
let h2 = Hashtbl.create 10007 in
let add_use u =
if not (Hashtbl.mem h2 u) then
begin
if not (Hashtbl.mem h1 u) then
Hashtbl.add h1 u ()
else
begin
Hashtbl.remove h1 u;
Hashtbl.add h2 u ()
end
end in
List.iter (fun {uses=uses;declarations=_;path=_} -> List.iter add_use uses) fis;
let process {path=path; declarations=declarations; uses=_} =
let bd = List.fold_left
(fun s d -> if Hashtbl.mem h1 d then StringSet.add d s else s)
StringSet.empty
declarations in
if not (StringSet.is_empty bd) then
begin
printf "@\n@[<4>%si@\n" path;
StringSet.iter (fun x -> printf "%s@\n" x) bd;
printf "@]"
end in
printf "@[";
List.iter process fis;
printf "@."
| null | https://raw.githubusercontent.com/septract/jstar-old/c3b4fc6c1efc098efcdb864edbf0c666130f5fe5/scripts/unused.ml | ocaml | ids appearing in mli
ids appearing in ml | * Finds stuff declared in mli that is not used in * other *
open Format
type file_info = {
path : string;
}
let list_of_hashtbl h = Hashtbl.fold (fun x () xs -> x :: xs) h []
let get_ids fn lexer =
let h = Hashtbl.create 101 in
try
let c = open_in fn in
let lb = Lexing.from_channel (open_in fn) in
lexer h lb;
let r = list_of_hashtbl h in
close_in c;
r
with Sys_error _ ->
printf "@[Warning: missing %s@." fn; []
let parse fn = {
path = fn;
declarations = get_ids (fn ^ "i") Id_extractor.id_decl;
uses = get_ids fn Id_extractor.id
}
module StringSet = Set.Make (String)
let _ =
let fis =
Utils.files_map (fun x->Filename.check_suffix x ".ml") parse "src" in
let fis =
Utils.files_map
(fun x->Filename.check_suffix x ".mly" || Filename.check_suffix x ".mll")
(fun fn->{path=fn;declarations=[];uses=get_ids fn Id_extractor.id})
"src"
@ fis in
let h1 = Hashtbl.create 10007 in
let h2 = Hashtbl.create 10007 in
let add_use u =
if not (Hashtbl.mem h2 u) then
begin
if not (Hashtbl.mem h1 u) then
Hashtbl.add h1 u ()
else
begin
Hashtbl.remove h1 u;
Hashtbl.add h2 u ()
end
end in
List.iter (fun {uses=uses;declarations=_;path=_} -> List.iter add_use uses) fis;
let process {path=path; declarations=declarations; uses=_} =
let bd = List.fold_left
(fun s d -> if Hashtbl.mem h1 d then StringSet.add d s else s)
StringSet.empty
declarations in
if not (StringSet.is_empty bd) then
begin
printf "@\n@[<4>%si@\n" path;
StringSet.iter (fun x -> printf "%s@\n" x) bd;
printf "@]"
end in
printf "@[";
List.iter process fis;
printf "@."
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.