_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
2eee94a6e94c8cae548ba217181e5a7a93a1a17bee1792fda5036e1c116bb5cb | diagrams/diagrams-lib | Names.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE FlexibleContexts #
{-# LANGUAGE MonoLocalBinds #-}
# OPTIONS_GHC -fno - warn - unused - imports #
for Data . Semigroup
-----------------------------------------------------------------------------
-- |
-- Module : Diagrams.Names
Copyright : ( c ) 2013 diagrams - lib team ( see LICENSE )
-- License : BSD-style (see LICENSE)
-- Maintainer :
--
-- Names can be given to subdiagrams, and subdiagrams can later be
-- queried by name. This module exports types for representing names
-- and subdiagrams, and various functions for working with them.
--
-----------------------------------------------------------------------------
module Diagrams.Names
( -- * Names
AName, Name, IsName(..), (.>)
, Qualifiable(..)
-- * Subdiagrams
, Subdiagram, mkSubdiagram, subPoint, getSub, rawSub, location
-- * Subdiagram maps
, SubMap, fromNames, rememberAs, lookupSub
-- * Naming things
, named, nameSub, namePoint, localize
-- * Querying by name
, names
, lookupName
, withName, withNameAll, withNames
) where
import Data.Semigroup
import Diagrams.Core (OrderedField, Point)
import Diagrams.Core.Names
import Diagrams.Core.Types
import Linear.Metric
-- | Attach an atomic name to a diagram.
named :: (IsName nm, Metric v, OrderedField n, Semigroup m)
=> nm -> QDiagram b v n m -> QDiagram b v n m
named = nameSub mkSubdiagram
-- | Attach an atomic name to a certain point (which may be computed
-- from the given diagram), treated as a subdiagram with no content
-- and a point envelope.
namePoint :: (IsName nm , Metric v, OrderedField n, Semigroup m)
=> (QDiagram b v n m -> Point v n) -> nm -> QDiagram b v n m -> QDiagram b v n m
namePoint p = nameSub (subPoint . p)
| null | https://raw.githubusercontent.com/diagrams/diagrams-lib/6f66ce6bd5aed81d8a1330c143ea012724dbac3c/src/Diagrams/Names.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE MonoLocalBinds #
---------------------------------------------------------------------------
|
Module : Diagrams.Names
License : BSD-style (see LICENSE)
Maintainer :
Names can be given to subdiagrams, and subdiagrams can later be
queried by name. This module exports types for representing names
and subdiagrams, and various functions for working with them.
---------------------------------------------------------------------------
* Names
* Subdiagrams
* Subdiagram maps
* Naming things
* Querying by name
| Attach an atomic name to a diagram.
| Attach an atomic name to a certain point (which may be computed
from the given diagram), treated as a subdiagram with no content
and a point envelope. | # LANGUAGE FlexibleContexts #
# OPTIONS_GHC -fno - warn - unused - imports #
for Data . Semigroup
Copyright : ( c ) 2013 diagrams - lib team ( see LICENSE )
module Diagrams.Names
AName, Name, IsName(..), (.>)
, Qualifiable(..)
, Subdiagram, mkSubdiagram, subPoint, getSub, rawSub, location
, SubMap, fromNames, rememberAs, lookupSub
, named, nameSub, namePoint, localize
, names
, lookupName
, withName, withNameAll, withNames
) where
import Data.Semigroup
import Diagrams.Core (OrderedField, Point)
import Diagrams.Core.Names
import Diagrams.Core.Types
import Linear.Metric
named :: (IsName nm, Metric v, OrderedField n, Semigroup m)
=> nm -> QDiagram b v n m -> QDiagram b v n m
named = nameSub mkSubdiagram
namePoint :: (IsName nm , Metric v, OrderedField n, Semigroup m)
=> (QDiagram b v n m -> Point v n) -> nm -> QDiagram b v n m -> QDiagram b v n m
namePoint p = nameSub (subPoint . p)
|
5ed5ecd394a3611e291dc7ccbcd42b1ce49eed0356889b59542fcd42f9050b40 | uwplse/PUMPKIN-PATCH | abstractionconfig.mli | open Environ
open Evd
open Constr
open Abstracters
open Candidates
open Proofdiff
open Cutlemma
open Stateutils
(* --- Configuring Abstraction --- *)
Caller configuration for abstraction
type abstraction_config =
{
env : env;
args_base : types list;
args_goal : types list;
cs : candidates;
f_base : types;
f_goal : types;
strategies : abstraction_strategy list;
}
(* --- Defaults --- *)
(*
* Given an environment, a difference in goal types, and a list of candidates,
* configure the default configuration for abstraction of arguments
*)
val configure_args :
env ->
types proof_diff ->
candidates ->
evar_map ->
abstraction_config state
* Given an environment , a list of differences between fixpoint cases ,
* and a list of candidates , configure function abstraction .
*
* This produces one configuration for each difference .
* Given an environment, a list of differences between fixpoint cases,
* and a list of candidates, configure function abstraction.
*
* This produces one configuration for each difference.
*)
val configure_fixpoint_cases :
env ->
types list ->
candidates ->
evar_map ->
(abstraction_config list) state
(* --- Cut Lemmas --- *)
(*
* These configuration functions are for when you cut search by a certain lemma,
* so the type of the candidate may not be formatted well enough to infer how
* to abstract it, but the supplied cut lemma type may be.
* In those cases, we go with the cut lemma, though improvements
* to search and abstraction should make this obsolete.
*)
(*
* Given an environment, a lemma to cut by, and a list of candidates,
* configure argument abstraction.
*)
val configure_cut_args :
env ->
cut_lemma ->
candidates ->
evar_map ->
abstraction_config state
(* --- Goals --- *)
(*
* These configuration functions are for the top-level abstract
* command, which takes a goal type. We use the goal type
* to infer the arguments, but we use the candidate itself
* to infer which function to abstract. For now, this will fail
* if the function is not obvious from the candidate.
*)
(*
* Give an environment, a goal type, and a candidate, configure abstraction.
*
* Automatically infer which kind of abstraction to try from the goal type.
*)
val configure_from_goal :
env ->
types ->
types ->
evar_map ->
abstraction_config state
| null | https://raw.githubusercontent.com/uwplse/PUMPKIN-PATCH/73fd77ba49388fdc72702a252a8fa8f071a8e1ea/plugin/src/core/components/abstraction/abstractionconfig.mli | ocaml | --- Configuring Abstraction ---
--- Defaults ---
* Given an environment, a difference in goal types, and a list of candidates,
* configure the default configuration for abstraction of arguments
--- Cut Lemmas ---
* These configuration functions are for when you cut search by a certain lemma,
* so the type of the candidate may not be formatted well enough to infer how
* to abstract it, but the supplied cut lemma type may be.
* In those cases, we go with the cut lemma, though improvements
* to search and abstraction should make this obsolete.
* Given an environment, a lemma to cut by, and a list of candidates,
* configure argument abstraction.
--- Goals ---
* These configuration functions are for the top-level abstract
* command, which takes a goal type. We use the goal type
* to infer the arguments, but we use the candidate itself
* to infer which function to abstract. For now, this will fail
* if the function is not obvious from the candidate.
* Give an environment, a goal type, and a candidate, configure abstraction.
*
* Automatically infer which kind of abstraction to try from the goal type.
| open Environ
open Evd
open Constr
open Abstracters
open Candidates
open Proofdiff
open Cutlemma
open Stateutils
Caller configuration for abstraction
type abstraction_config =
{
env : env;
args_base : types list;
args_goal : types list;
cs : candidates;
f_base : types;
f_goal : types;
strategies : abstraction_strategy list;
}
val configure_args :
env ->
types proof_diff ->
candidates ->
evar_map ->
abstraction_config state
* Given an environment , a list of differences between fixpoint cases ,
* and a list of candidates , configure function abstraction .
*
* This produces one configuration for each difference .
* Given an environment, a list of differences between fixpoint cases,
* and a list of candidates, configure function abstraction.
*
* This produces one configuration for each difference.
*)
val configure_fixpoint_cases :
env ->
types list ->
candidates ->
evar_map ->
(abstraction_config list) state
val configure_cut_args :
env ->
cut_lemma ->
candidates ->
evar_map ->
abstraction_config state
val configure_from_goal :
env ->
types ->
types ->
evar_map ->
abstraction_config state
|
1b70d5bcaa89b568b808229aecb267fd1e5d4ee0825c821dd14148c5c7e25d2f | ngless-toolkit/ngless | Tests.hs | Copyright 2013 - 2021 NGLess Authors
- License : MIT
- License: MIT
-}
# LANGUAGE TemplateHaskell , QuasiQuotes #
-- | Unit tests are their own programme.
--
Unit tests written in have less overhead than full integration tests
-- in the tests/ directory, but are not always as convenient.
module Main where
import Test.Tasty
import Test.Tasty.TH
import Test.Tasty.HUnit
import Text.Parsec (parse)
import Text.Parsec.Combinator (eof)
import System.Directory (removeDirectoryRecursive)
import qualified Data.Vector.Storable as VS
import qualified Data.ByteString.Char8 as B
import qualified Data.Conduit as C
import Data.Conduit ((.|))
import Control.Monad.State.Strict (execState, modify')
import Data.Convertible (convert)
import Data.Conduit.Algorithms.Async (conduitPossiblyCompressedFile)
import Language
import Interpret
import Tokens
import FileManagement
import NGLess
import NGLess.NGLEnvironment (setupTestEnvironment)
import Interpretation.Unique
import Data.Sam
import Data.FastQ
import Utils.Conduit
import Utils.Samtools (samBamConduit)
import Utils.Here
import qualified Data.GFF as GFF
import Tests.Utils
import Tests.Count (tgroup_Count)
import Tests.FastQ (tgroup_FastQ)
import Tests.IntGroups (tgroup_IntGroups)
import Tests.Language (tgroup_Language)
import Tests.LoadFQDirectory (tgroup_LoadFQDirectory)
import Tests.NGLessAPI (tgroup_NGLessAPI)
import Tests.Parse (tgroup_Parse)
import Tests.Samples (tgroup_Samples)
import Tests.Select (tgroup_Select)
import Tests.Types (tgroup_Types)
import Tests.Validation (tgroup_Validation)
import Tests.Vector (tgroup_Vector)
import Tests.Write (tgroup_Write)
test_FastQ = [tgroup_FastQ]
test_Validation = [tgroup_Validation]
test_Count = [tgroup_Count]
test_Parse = [tgroup_Parse]
test_Types = [tgroup_Types]
test_NGLessAPI = [tgroup_NGLessAPI]
test_Vector = [tgroup_Vector]
test_IntGroups = [tgroup_IntGroups]
test_Samples = [tgroup_Samples]
test_Select = [tgroup_Select]
test_Language = [tgroup_Language]
test_LoadFqDir = [tgroup_LoadFQDirectory]
test_Write = [tgroup_Write]
-- The main test driver sets up the config options then uses the automatically
-- generated function
main = do
setupTestEnvironment
$(defaultMainGenerator)
removeDirectoryRecursive "testing_directory_tmp"
-- Test Tokens module
tokenize' fn t = map snd <$> (tokenize fn t)
case_tok_cr = TNewLine @=? (case parse (Tokens.eol <* eof) "test" "\r\n" of { Right t -> t; Left _ -> error "Parse failed"; })
case_tok_single_line_comment = tokenize' "test" with_comment @?= Right expected
where
with_comment = "a=0# comment\nb=1\n"
expected = [TWord "a",TOperator '=',TExpr (ConstInt 0),TNewLine,TWord "b",TOperator '=',TExpr (ConstInt 1),TNewLine]
case_tok_single_line_comment_cstyle = tokenize' "test" with_comment @?= Right expected
where
with_comment = "a=0// comment\nb=1\n"
expected = [TWord "a",TOperator '=',TExpr (ConstInt 0),TNewLine,TWord "b",TOperator '=',TExpr (ConstInt 1),TNewLine]
case_tok_multi_line_comment = tokenize' "test" with_comment @?= Right expected
where
with_comment = "a=0/* This\n\nwith\nlines*/\nb=1\n"
expected = [TWord "a",TOperator '=',TExpr (ConstInt 0),TIndent 0,TNewLine,TWord "b",TOperator '=',TExpr (ConstInt 1),TNewLine]
case_tok_word_ = tokenize' "test" "word_with_underscore" @?= Right expected
where
expected = [TWord "word_with_underscore"]
-- Test Types
case_indent_comment = isOk "ParseFailed" $ parsetest indent_comment
case_indent_space = isOk "ParseFailed" $ parsetest indent_space
indent_comment = "ngless '0.0'\n\
\reads = fastq('input1.fq')\n\
\reads = preprocess(reads) using |read|:\n\
\ read = read[5:]\n\
\ # comment \n"
indent_space = "ngless '0.0'\n\
\reads = fastq('input1.fq')\n\
\reads = preprocess(reads) using |read|:\n\
\ read = read[5:]\n\
\ \n"
-- Type Validate pre process operations
sr i s q = NGOShortRead (ShortRead i s $ VS.generate (B.length q) (convert . B.index q))
case_pre_process_indexation_1 = _evalIndex' (sr "@IRIS" "AGTACCAA" "aa`aaaaa") [Just (NGOInteger 5), Nothing] @?= (sr "@IRIS" "CAA" "aaa")
case_pre_process_indexation_2 = _evalIndex' (sr "@IRIS" "AGTACCAA" "aa`aaaaa") [Nothing, Just (NGOInteger 3)] @?= (sr "@IRIS" "AGT" "aa`")
case_pre_process_indexation_3 = _evalIndex' (sr "@IRIS" "AGTACCAA" "aa`aaaaa") [Just (NGOInteger 2), Just (NGOInteger 5)] @?= (sr "@IRIS" "TAC" "`aa")
_evalIndex' a b = case _evalIndex a b of
Right v -> v
Left err -> error (show err)
case_pre_process_length_1 = _evalUnary UOpLen (sr "@IRIS" "AGTACCAA" "aa`aaaaa") @?= Right (NGOInteger 8)
case_bop_gte_1 = evalBinary BOpGTE (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_gte_2 = evalBinary BOpGTE (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_gte_3 = evalBinary BOpGTE (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool False)
case_bop_gt_1 = evalBinary BOpGT (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_gt_2 = evalBinary BOpGT (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_gt_3 = evalBinary BOpGT (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool False)
case_bop_lt_1 = evalBinary BOpLT (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_lt_2 = evalBinary BOpLT (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_lt_3 = evalBinary BOpLT (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool True)
case_bop_lte_1 = evalBinary BOpLTE (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_lte_2 = evalBinary BOpLTE (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_lte_3 = evalBinary BOpLTE (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool True)
case_bop_eq_1 = evalBinary BOpEQ (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_eq_2 = evalBinary BOpEQ (NGOInteger 10) (NGOInteger 0) @?= Right (NGOBool False)
case_bop_neq_1 = evalBinary BOpNEQ (NGOInteger 0) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_neq_2 = evalBinary BOpNEQ (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_add_1 = evalBinary BOpAdd (NGOInteger 0) (NGOInteger 10) @?= Right (NGOInteger 10)
case_bop_add_2 = evalBinary BOpAdd (NGOInteger 10) (NGOInteger 0) @?= Right (NGOInteger 10)
case_bop_add_3 = evalBinary BOpAdd (NGOInteger 10) (NGOInteger 10) @?= Right (NGOInteger 20)
case_bop_mul_1 = evalBinary BOpMul (NGOInteger 0) (NGOInteger 10) @?= Right (NGOInteger 0)
case_bop_mul_2 = evalBinary BOpMul (NGOInteger 10) (NGOInteger 0) @?= Right (NGOInteger 0)
case_bop_mul_3 = evalBinary BOpMul (NGOInteger 10) (NGOInteger 10) @?= Right (NGOInteger 100)
case_bop_add_path_1 = evalBinary BOpPathAppend (NGOString "dir") (NGOString "file") @?= Right (NGOString "dir/file")
case_bop_add_path_2 = evalBinary BOpPathAppend (NGOString "dir/subdir") (NGOString "file") @?= Right (NGOString "dir/subdir/file")
case_bop_add_path_3 = evalBinary BOpPathAppend (NGOString "dir/subdir/") (NGOString "file") @?= Right (NGOString "dir/subdir/file")
case_bop_add_path_4 = evalBinary BOpPathAppend (NGOString "../dir/subdir/") (NGOString "file") @?= Right (NGOString "../dir/subdir/file")
case_bop_add_path_5 = evalBinary BOpPathAppend (NGOString "/abs/dir/subdir/") (NGOString "file") @?= Right (NGOString "/abs/dir/subdir/file")
case_uop_minus_1 = _evalUnary UOpMinus (NGOInteger 10) @?= Right (NGOInteger (-10))
case_uop_minus_2 = _evalUnary UOpMinus (NGOInteger (-10)) @?= Right (NGOInteger 10)
--
case_template_id = takeBaseNameNoExtensions "a/B/c/d/xpto_1.fq" @?= takeBaseNameNoExtensions "a/B/c/d/xpto_1.fq"
case_template = takeBaseNameNoExtensions "a/B/c/d/xpto_1.fq" @?= "xpto_1"
samStats :: FilePath -> NGLessIO (Int, Int, Int)
samStats fname = C.runConduit (samBamConduit fname .| linesVC 1024 .| samStatsC) >>= runNGLess
case_sam20 = do
sam <- testNGLessIO $ asTempFile sam20 "sam" >>= samStats
sam @?= (5,0,0)
where
sam20 = [here|
@SQ SN:I LN:230218
@PG ID:bwa PN:bwa VN:0.7.7-r441 CL:/home/luispedro/.local/share/ngless/bin/ngless-0.0.0-bwa mem -t 1 /home/luispedro/.local/share/ngless/data/sacCer3/Sequence/BWAIndex/reference.fa.gz /tmp/preprocessed_sample20.fq1804289383846930886.gz
IRIS:7:1:17:394#0 4 * 0 0 * * 0 0 GTCAGGACAAGAAAGACAANTCCAATTNACATT aaabaa`]baaaaa_aab]D^^`b`aYDW]aba AS:i:0 XS:i:0
IRIS:7:1:17:800#0 4 * 0 0 * * 0 0 GGAAACACTACTTAGGCTTATAAGATCNGGTTGCGG ababbaaabaaaaa`]`ba`]`aaaaYD\\_a``XT AS:i:0 XS:i:0
IRIS:7:1:17:1757#0 4 * 0 0 * * 0 0 TTTTCTCGACGATTTCCACTCCTGGTCNAC aaaaaa``aaa`aaaa_^a```]][Z[DY^ AS:i:0 XS:i:0
IRIS:7:1:17:1479#0 4 * 0 0 * * 0 0 CATATTGTAGGGTGGATCTCGAAAGATATGAAAGAT abaaaaa`a```^aaaaa`_]aaa`aaa__a_X]`` AS:i:0 XS:i:0
IRIS:7:1:17:150#0 4 * 0 0 * * 0 0 TGATGTACTATGCATATGAACTTGTATGCAAAGTGG abaabaa`aaaaaaa^ba_]]aaa^aaaaa_^][aa AS:i:0 XS:i:0
|]
Parse GFF lines
case_trim_attrs_1 = GFF._trimString " x = 10" @?= "x = 10"
case_trim_attrs_2 = GFF._trimString " x = 10 " @?= "x = 10"
case_trim_attrs_3 = GFF._trimString "x = 10 " @?= "x = 10"
case_trim_attrs_4 = GFF._trimString "x = 10" @?= "x = 10"
case_trim_attrs_5 = GFF._trimString " X " @?= "X"
case_parse_gff_line = GFF.readGffLine gff_line @?= Right gff_structure
where
gff_line = "chrI\tunknown\texon\t4124\t4358\t.\t-\t.\tgene_id \"Y74C9A.3\"; transcript_id \"NM_058260\"; gene_name \"Y74C9A.3\"; p_id \"P23728\"; tss_id \"TSS14501\";"
gff_structure = GFF.GffLine "chrI" "unknown" "exon" 4124 4358 Nothing GFF.GffNegStrand (-1) attrsExpected
attrsExpected = [("gene_id","Y74C9A.3"), ("transcript_id" ,"NM_058260"), ("gene_name", "Y74C9A.3"), ("p_id", "P23728"), ("tss_id", "TSS14501")]
-- _parseGffAttributes
case_parse_gff_atributes_normal_1 = GFF._parseGffAttributes "ID=chrI;dbxref=NCBI:NC_001133;Name=chrI" @?= [("ID","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_parse_gff_atributes_normal_2 = GFF._parseGffAttributes "gene_id=chrI;dbxref=NCBI:NC_001133;Name=chrI" @?= [("gene_id","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_parse_gff_atributes_trail_del = GFF._parseGffAttributes "gene_id=chrI;dbxref=NCBI:NC_001133;Name=chrI;" @?= [("gene_id","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_parse_gff_atributes_trail_del_space = GFF._parseGffAttributes "gene_id=chrI;dbxref=NCBI:NC_001133;Name=chrI; " @?= [("gene_id","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_calc_sam_stats = testNGLessIO (samStats "test_samples/sample.sam.gz") >>= \r ->
r @?= (2772,1310,1299)
--- Unique.hs
File " test_samples / data_set_repeated.fq " has 216 reads in which 54 are unique .
countC = loop (0 :: Int)
where
loop !n = C.await >>= maybe (return n) (const (loop $ n+1))
make_unique_test n = let enc = SolexaEncoding in do
nuniq <- testNGLessIO $ do
newfp <- performUnique "test_samples/data_set_repeated.fq" enc n
C.runConduit $
conduitPossiblyCompressedFile newfp
.| linesC
.| fqDecodeC "testing" enc
.| countC
let n' = min n 4
nuniq @?= (n' * 54)
case_unique_1 = make_unique_test 1
case_unique_2 = make_unique_test 2
case_unique_3 = make_unique_test 3
case_unique_4 = make_unique_test 4
case_unique_5 = make_unique_test 5
case_recursiveAnalyze = execState (recursiveAnalyse countFcalls expr) 0 @?= (1 :: Int)
where
countFcalls (FunctionCall _ _ _ _) = modify' (+1)
countFcalls _ = return ()
expr = Assignment
(Variable "varname")
(FunctionCall (FuncName "count")
(Lookup Nothing (Variable "mapped"))
[(Variable "features", ListExpression [ConstStr "seqname"])
,(Variable "multiple", ConstSymbol "all1")]
Nothing)
case_expand_path = do
expandPath' "/nothing1/file.txt" [] @?= ["/nothing1/file.txt"]
expandPath' "/nothing2/file.txt" [undefined] @?= ["/nothing2/file.txt"]
expandPath' "/nothing3/file.txt" ["/home/luispedro/my-directory"] @?= ["/nothing3/file.txt"]
expandPath' "<>/nothing4/file.txt" ["/home/luispedro/my-directory1"] @?= ["/home/luispedro/my-directory1/nothing4/file.txt"]
expandPath' "<>/nothing4/file.txt" ["refs=/home/luispedro/my-directory1"] @?= []
expandPath' "<>/nothing/file.txt" ["/home/luispedro/my-directory"
,"/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"
,"/home/alternative/your-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["/home/luispedro/my-directory"
,"/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"
,"/home/alternative/your-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["refs=/home/luispedro/my-directory"
,"/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"
,"/home/alternative/your-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["refs=/home/luispedro/my-directory"
,"nope=/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["other=/home/luispedro/my-directory"
,"nope=/home/alternative/your-directory"] @?= []
expandPath' "<refs>/nothing/file.txt" [] @?= []
| null | https://raw.githubusercontent.com/ngless-toolkit/ngless/c69baf7e00e807a82ec1b1276763f74f4a5411b2/Tests-Src/Tests.hs | haskell | | Unit tests are their own programme.
in the tests/ directory, but are not always as convenient.
The main test driver sets up the config options then uses the automatically
generated function
Test Tokens module
Test Types
Type Validate pre process operations
_parseGffAttributes
- Unique.hs | Copyright 2013 - 2021 NGLess Authors
- License : MIT
- License: MIT
-}
# LANGUAGE TemplateHaskell , QuasiQuotes #
Unit tests written in have less overhead than full integration tests
module Main where
import Test.Tasty
import Test.Tasty.TH
import Test.Tasty.HUnit
import Text.Parsec (parse)
import Text.Parsec.Combinator (eof)
import System.Directory (removeDirectoryRecursive)
import qualified Data.Vector.Storable as VS
import qualified Data.ByteString.Char8 as B
import qualified Data.Conduit as C
import Data.Conduit ((.|))
import Control.Monad.State.Strict (execState, modify')
import Data.Convertible (convert)
import Data.Conduit.Algorithms.Async (conduitPossiblyCompressedFile)
import Language
import Interpret
import Tokens
import FileManagement
import NGLess
import NGLess.NGLEnvironment (setupTestEnvironment)
import Interpretation.Unique
import Data.Sam
import Data.FastQ
import Utils.Conduit
import Utils.Samtools (samBamConduit)
import Utils.Here
import qualified Data.GFF as GFF
import Tests.Utils
import Tests.Count (tgroup_Count)
import Tests.FastQ (tgroup_FastQ)
import Tests.IntGroups (tgroup_IntGroups)
import Tests.Language (tgroup_Language)
import Tests.LoadFQDirectory (tgroup_LoadFQDirectory)
import Tests.NGLessAPI (tgroup_NGLessAPI)
import Tests.Parse (tgroup_Parse)
import Tests.Samples (tgroup_Samples)
import Tests.Select (tgroup_Select)
import Tests.Types (tgroup_Types)
import Tests.Validation (tgroup_Validation)
import Tests.Vector (tgroup_Vector)
import Tests.Write (tgroup_Write)
test_FastQ = [tgroup_FastQ]
test_Validation = [tgroup_Validation]
test_Count = [tgroup_Count]
test_Parse = [tgroup_Parse]
test_Types = [tgroup_Types]
test_NGLessAPI = [tgroup_NGLessAPI]
test_Vector = [tgroup_Vector]
test_IntGroups = [tgroup_IntGroups]
test_Samples = [tgroup_Samples]
test_Select = [tgroup_Select]
test_Language = [tgroup_Language]
test_LoadFqDir = [tgroup_LoadFQDirectory]
test_Write = [tgroup_Write]
main = do
setupTestEnvironment
$(defaultMainGenerator)
removeDirectoryRecursive "testing_directory_tmp"
tokenize' fn t = map snd <$> (tokenize fn t)
case_tok_cr = TNewLine @=? (case parse (Tokens.eol <* eof) "test" "\r\n" of { Right t -> t; Left _ -> error "Parse failed"; })
case_tok_single_line_comment = tokenize' "test" with_comment @?= Right expected
where
with_comment = "a=0# comment\nb=1\n"
expected = [TWord "a",TOperator '=',TExpr (ConstInt 0),TNewLine,TWord "b",TOperator '=',TExpr (ConstInt 1),TNewLine]
case_tok_single_line_comment_cstyle = tokenize' "test" with_comment @?= Right expected
where
with_comment = "a=0// comment\nb=1\n"
expected = [TWord "a",TOperator '=',TExpr (ConstInt 0),TNewLine,TWord "b",TOperator '=',TExpr (ConstInt 1),TNewLine]
case_tok_multi_line_comment = tokenize' "test" with_comment @?= Right expected
where
with_comment = "a=0/* This\n\nwith\nlines*/\nb=1\n"
expected = [TWord "a",TOperator '=',TExpr (ConstInt 0),TIndent 0,TNewLine,TWord "b",TOperator '=',TExpr (ConstInt 1),TNewLine]
case_tok_word_ = tokenize' "test" "word_with_underscore" @?= Right expected
where
expected = [TWord "word_with_underscore"]
case_indent_comment = isOk "ParseFailed" $ parsetest indent_comment
case_indent_space = isOk "ParseFailed" $ parsetest indent_space
indent_comment = "ngless '0.0'\n\
\reads = fastq('input1.fq')\n\
\reads = preprocess(reads) using |read|:\n\
\ read = read[5:]\n\
\ # comment \n"
indent_space = "ngless '0.0'\n\
\reads = fastq('input1.fq')\n\
\reads = preprocess(reads) using |read|:\n\
\ read = read[5:]\n\
\ \n"
sr i s q = NGOShortRead (ShortRead i s $ VS.generate (B.length q) (convert . B.index q))
case_pre_process_indexation_1 = _evalIndex' (sr "@IRIS" "AGTACCAA" "aa`aaaaa") [Just (NGOInteger 5), Nothing] @?= (sr "@IRIS" "CAA" "aaa")
case_pre_process_indexation_2 = _evalIndex' (sr "@IRIS" "AGTACCAA" "aa`aaaaa") [Nothing, Just (NGOInteger 3)] @?= (sr "@IRIS" "AGT" "aa`")
case_pre_process_indexation_3 = _evalIndex' (sr "@IRIS" "AGTACCAA" "aa`aaaaa") [Just (NGOInteger 2), Just (NGOInteger 5)] @?= (sr "@IRIS" "TAC" "`aa")
_evalIndex' a b = case _evalIndex a b of
Right v -> v
Left err -> error (show err)
case_pre_process_length_1 = _evalUnary UOpLen (sr "@IRIS" "AGTACCAA" "aa`aaaaa") @?= Right (NGOInteger 8)
case_bop_gte_1 = evalBinary BOpGTE (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_gte_2 = evalBinary BOpGTE (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_gte_3 = evalBinary BOpGTE (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool False)
case_bop_gt_1 = evalBinary BOpGT (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_gt_2 = evalBinary BOpGT (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_gt_3 = evalBinary BOpGT (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool False)
case_bop_lt_1 = evalBinary BOpLT (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_lt_2 = evalBinary BOpLT (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_lt_3 = evalBinary BOpLT (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool True)
case_bop_lte_1 = evalBinary BOpLTE (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_lte_2 = evalBinary BOpLTE (NGOInteger 11) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_lte_3 = evalBinary BOpLTE (NGOInteger 10) (NGOInteger 11) @?= Right (NGOBool True)
case_bop_eq_1 = evalBinary BOpEQ (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_eq_2 = evalBinary BOpEQ (NGOInteger 10) (NGOInteger 0) @?= Right (NGOBool False)
case_bop_neq_1 = evalBinary BOpNEQ (NGOInteger 0) (NGOInteger 10) @?= Right (NGOBool True)
case_bop_neq_2 = evalBinary BOpNEQ (NGOInteger 10) (NGOInteger 10) @?= Right (NGOBool False)
case_bop_add_1 = evalBinary BOpAdd (NGOInteger 0) (NGOInteger 10) @?= Right (NGOInteger 10)
case_bop_add_2 = evalBinary BOpAdd (NGOInteger 10) (NGOInteger 0) @?= Right (NGOInteger 10)
case_bop_add_3 = evalBinary BOpAdd (NGOInteger 10) (NGOInteger 10) @?= Right (NGOInteger 20)
case_bop_mul_1 = evalBinary BOpMul (NGOInteger 0) (NGOInteger 10) @?= Right (NGOInteger 0)
case_bop_mul_2 = evalBinary BOpMul (NGOInteger 10) (NGOInteger 0) @?= Right (NGOInteger 0)
case_bop_mul_3 = evalBinary BOpMul (NGOInteger 10) (NGOInteger 10) @?= Right (NGOInteger 100)
case_bop_add_path_1 = evalBinary BOpPathAppend (NGOString "dir") (NGOString "file") @?= Right (NGOString "dir/file")
case_bop_add_path_2 = evalBinary BOpPathAppend (NGOString "dir/subdir") (NGOString "file") @?= Right (NGOString "dir/subdir/file")
case_bop_add_path_3 = evalBinary BOpPathAppend (NGOString "dir/subdir/") (NGOString "file") @?= Right (NGOString "dir/subdir/file")
case_bop_add_path_4 = evalBinary BOpPathAppend (NGOString "../dir/subdir/") (NGOString "file") @?= Right (NGOString "../dir/subdir/file")
case_bop_add_path_5 = evalBinary BOpPathAppend (NGOString "/abs/dir/subdir/") (NGOString "file") @?= Right (NGOString "/abs/dir/subdir/file")
case_uop_minus_1 = _evalUnary UOpMinus (NGOInteger 10) @?= Right (NGOInteger (-10))
case_uop_minus_2 = _evalUnary UOpMinus (NGOInteger (-10)) @?= Right (NGOInteger 10)
case_template_id = takeBaseNameNoExtensions "a/B/c/d/xpto_1.fq" @?= takeBaseNameNoExtensions "a/B/c/d/xpto_1.fq"
case_template = takeBaseNameNoExtensions "a/B/c/d/xpto_1.fq" @?= "xpto_1"
samStats :: FilePath -> NGLessIO (Int, Int, Int)
samStats fname = C.runConduit (samBamConduit fname .| linesVC 1024 .| samStatsC) >>= runNGLess
case_sam20 = do
sam <- testNGLessIO $ asTempFile sam20 "sam" >>= samStats
sam @?= (5,0,0)
where
sam20 = [here|
@SQ SN:I LN:230218
@PG ID:bwa PN:bwa VN:0.7.7-r441 CL:/home/luispedro/.local/share/ngless/bin/ngless-0.0.0-bwa mem -t 1 /home/luispedro/.local/share/ngless/data/sacCer3/Sequence/BWAIndex/reference.fa.gz /tmp/preprocessed_sample20.fq1804289383846930886.gz
IRIS:7:1:17:394#0 4 * 0 0 * * 0 0 GTCAGGACAAGAAAGACAANTCCAATTNACATT aaabaa`]baaaaa_aab]D^^`b`aYDW]aba AS:i:0 XS:i:0
IRIS:7:1:17:800#0 4 * 0 0 * * 0 0 GGAAACACTACTTAGGCTTATAAGATCNGGTTGCGG ababbaaabaaaaa`]`ba`]`aaaaYD\\_a``XT AS:i:0 XS:i:0
IRIS:7:1:17:1757#0 4 * 0 0 * * 0 0 TTTTCTCGACGATTTCCACTCCTGGTCNAC aaaaaa``aaa`aaaa_^a```]][Z[DY^ AS:i:0 XS:i:0
IRIS:7:1:17:1479#0 4 * 0 0 * * 0 0 CATATTGTAGGGTGGATCTCGAAAGATATGAAAGAT abaaaaa`a```^aaaaa`_]aaa`aaa__a_X]`` AS:i:0 XS:i:0
IRIS:7:1:17:150#0 4 * 0 0 * * 0 0 TGATGTACTATGCATATGAACTTGTATGCAAAGTGG abaabaa`aaaaaaa^ba_]]aaa^aaaaa_^][aa AS:i:0 XS:i:0
|]
Parse GFF lines
case_trim_attrs_1 = GFF._trimString " x = 10" @?= "x = 10"
case_trim_attrs_2 = GFF._trimString " x = 10 " @?= "x = 10"
case_trim_attrs_3 = GFF._trimString "x = 10 " @?= "x = 10"
case_trim_attrs_4 = GFF._trimString "x = 10" @?= "x = 10"
case_trim_attrs_5 = GFF._trimString " X " @?= "X"
case_parse_gff_line = GFF.readGffLine gff_line @?= Right gff_structure
where
gff_line = "chrI\tunknown\texon\t4124\t4358\t.\t-\t.\tgene_id \"Y74C9A.3\"; transcript_id \"NM_058260\"; gene_name \"Y74C9A.3\"; p_id \"P23728\"; tss_id \"TSS14501\";"
gff_structure = GFF.GffLine "chrI" "unknown" "exon" 4124 4358 Nothing GFF.GffNegStrand (-1) attrsExpected
attrsExpected = [("gene_id","Y74C9A.3"), ("transcript_id" ,"NM_058260"), ("gene_name", "Y74C9A.3"), ("p_id", "P23728"), ("tss_id", "TSS14501")]
case_parse_gff_atributes_normal_1 = GFF._parseGffAttributes "ID=chrI;dbxref=NCBI:NC_001133;Name=chrI" @?= [("ID","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_parse_gff_atributes_normal_2 = GFF._parseGffAttributes "gene_id=chrI;dbxref=NCBI:NC_001133;Name=chrI" @?= [("gene_id","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_parse_gff_atributes_trail_del = GFF._parseGffAttributes "gene_id=chrI;dbxref=NCBI:NC_001133;Name=chrI;" @?= [("gene_id","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_parse_gff_atributes_trail_del_space = GFF._parseGffAttributes "gene_id=chrI;dbxref=NCBI:NC_001133;Name=chrI; " @?= [("gene_id","chrI"),("dbxref","NCBI:NC_001133"),("Name","chrI")]
case_calc_sam_stats = testNGLessIO (samStats "test_samples/sample.sam.gz") >>= \r ->
r @?= (2772,1310,1299)
File " test_samples / data_set_repeated.fq " has 216 reads in which 54 are unique .
countC = loop (0 :: Int)
where
loop !n = C.await >>= maybe (return n) (const (loop $ n+1))
make_unique_test n = let enc = SolexaEncoding in do
nuniq <- testNGLessIO $ do
newfp <- performUnique "test_samples/data_set_repeated.fq" enc n
C.runConduit $
conduitPossiblyCompressedFile newfp
.| linesC
.| fqDecodeC "testing" enc
.| countC
let n' = min n 4
nuniq @?= (n' * 54)
case_unique_1 = make_unique_test 1
case_unique_2 = make_unique_test 2
case_unique_3 = make_unique_test 3
case_unique_4 = make_unique_test 4
case_unique_5 = make_unique_test 5
case_recursiveAnalyze = execState (recursiveAnalyse countFcalls expr) 0 @?= (1 :: Int)
where
countFcalls (FunctionCall _ _ _ _) = modify' (+1)
countFcalls _ = return ()
expr = Assignment
(Variable "varname")
(FunctionCall (FuncName "count")
(Lookup Nothing (Variable "mapped"))
[(Variable "features", ListExpression [ConstStr "seqname"])
,(Variable "multiple", ConstSymbol "all1")]
Nothing)
case_expand_path = do
expandPath' "/nothing1/file.txt" [] @?= ["/nothing1/file.txt"]
expandPath' "/nothing2/file.txt" [undefined] @?= ["/nothing2/file.txt"]
expandPath' "/nothing3/file.txt" ["/home/luispedro/my-directory"] @?= ["/nothing3/file.txt"]
expandPath' "<>/nothing4/file.txt" ["/home/luispedro/my-directory1"] @?= ["/home/luispedro/my-directory1/nothing4/file.txt"]
expandPath' "<>/nothing4/file.txt" ["refs=/home/luispedro/my-directory1"] @?= []
expandPath' "<>/nothing/file.txt" ["/home/luispedro/my-directory"
,"/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"
,"/home/alternative/your-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["/home/luispedro/my-directory"
,"/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"
,"/home/alternative/your-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["refs=/home/luispedro/my-directory"
,"/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"
,"/home/alternative/your-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["refs=/home/luispedro/my-directory"
,"nope=/home/alternative/your-directory"] @?= ["/home/luispedro/my-directory/nothing/file.txt"]
expandPath' "<refs>/nothing/file.txt" ["other=/home/luispedro/my-directory"
,"nope=/home/alternative/your-directory"] @?= []
expandPath' "<refs>/nothing/file.txt" [] @?= []
|
1d1b9df1b8abbc094af201d530ad5a5a574fd6c893dbd475b29dfd59a98b2329 | geophf/1HaskellADay | Exercise.hs | module Y2020.M08.D31.Exercise where
-
" Yesterday , " * ...
* where " yesterday " is " our last working day where we , the workers , get
weekends off"-yesterday .
... " yesterday " we discovered that is is
, because they all wrote the word : " the . "
Yay ! Everything is related to everything , and we can all go home , now .
Good to know , but also not helpful .
So , there are many common or connective words that do n't relate to the topic *
* but then , if you 're writing a research paper on the English use of the
definite article " the " and the Polish avoidance of the definite article when
speaking in English , then " the " is very much the topic , and what are you going
to do about that , huh ? Nothing ? Is that your answer ?
and those are know as STOPWORDS , and , unix systems also have in
/usr / share / dict/
a set of words called connectives .
And guess what the very first word is in connectives .
Just .
guess .
So !
Today 's problem .
Yesterday * ...
-
"Yesterday,"* ...
* where "yesterday" is "our last working day where we, the workers, get
weekends off"-yesterday.
... "yesterday" we discovered that Charles Dickens is Charles Darwin is
Charlie Kaufman, because they all wrote the word: "the."
Yay! Everything is related to everything, and we can all go home, now.
Good to know, but also not helpful.
So, there are many common or connective words that don't relate to the topic*
* but then, if you're writing a research paper on the English use of the
definite article "the" and the Polish avoidance of the definite article when
speaking in English, then "the" is very much the topic, and what are you going
to do about that, huh? Nothing? Is that your answer?
and those are know as STOPWORDS, and, unix systems also have in
/usr/share/dict/
a set of words called connectives.
And guess what the very first word is in connectives.
Just.
guess.
So!
Today's Haskell problem.
Yesterday* ...
--}
import Y2020.M08.D28.Exercise
-
... we created a word - frequency analysis of the cleaned - text of " A Christmas Carol . " And we learned " the " is the most - frequent word .
TODAY , * ...
* Today , : actually today , unlike ' yesterday 's ' meaning varies
... let us compute what the most - frequent word is , having removed the STOPWORDS .
-
... we created a word-frequency analysis of the cleaned-text of Charles
Dickens' "A Christmas Carol." And we learned "the" is the most-frequent word.
TODAY,* ...
*Today, n.: actually today, unlike 'yesterday's' meaning varies
... let us compute what the most-frequent word is, having removed the STOPWORDS.
--}
import Data.List (sortOn)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Ord
import Data.Set (Set)
import Y2020.M08.D25.Exercise (workingDir, gutenbergTop100Index)
stopwords :: FilePath
stopwords = "/usr/share/dict/connectives"
loadStopwords :: FilePath -> IO (Set String)
loadStopwords connectives = undefined
-
> > > take 5 . Set.toList < $ > loadStopwords stopwords
[ " a","about","after","against","all " ]
-
>>> take 5 . Set.toList <$> loadStopwords stopwords
["a","about","after","against","all"]
--}
removeStopwords :: Set String -> Map String Int -> Map String Int
removeStopwords stoppers wordfreqs = undefined
-
What is most - frequent word in " A Christmas Carol , " having
removed all ?
> > > let conns = loadStopwords stopwords
> > > let weirdos = Set.fromList " ! \"#$%'()*,-./0123456789:;?@[]\182\187\191 "
> > > let bookus = study ( workingDir + + gutenbergTop100Index )
> > > let bookwords = cleanDoc weirdos < $ > bookus
> > > let wordus = wordFreq < $ > bookwords
> > > length < $ > wordus
4852
> > > let keywords = removeStopwords < $ > conns < * > wordus
> > > length < $ > keywords
4702
> > > take 5 . sortOn ( Down . snd ) . Map.toList < $ > keywords
[ ( " scrooge",314),("upon",120),("ghost",93),("christmas",92),("project",87 ) ]
Okay , NOW we 're talking !
-
What is Charles Dickens most-frequent word in "A Christmas Carol," having
removed all stopwords?
>>> let conns = loadStopwords stopwords
>>> let weirdos = Set.fromList "!\"#$%'()*,-./0123456789:;?@[]\182\187\191"
>>> let bookus = study (workingDir ++ gutenbergTop100Index)
>>> let bookwords = cleanDoc weirdos <$> bookus
>>> let wordus = wordFreq <$> bookwords
>>> length <$> wordus
4852
>>> let keywords = removeStopwords <$> conns <*> wordus
>>> length <$> keywords
4702
>>> take 5 . sortOn (Down . snd) . Map.toList <$> keywords
[("scrooge",314),("upon",120),("ghost",93),("christmas",92),("project",87)]
Okay, NOW we're talking!
--}
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2020/M08/D31/Exercise.hs | haskell | }
}
}
} | module Y2020.M08.D31.Exercise where
-
" Yesterday , " * ...
* where " yesterday " is " our last working day where we , the workers , get
weekends off"-yesterday .
... " yesterday " we discovered that is is
, because they all wrote the word : " the . "
Yay ! Everything is related to everything , and we can all go home , now .
Good to know , but also not helpful .
So , there are many common or connective words that do n't relate to the topic *
* but then , if you 're writing a research paper on the English use of the
definite article " the " and the Polish avoidance of the definite article when
speaking in English , then " the " is very much the topic , and what are you going
to do about that , huh ? Nothing ? Is that your answer ?
and those are know as STOPWORDS , and , unix systems also have in
/usr / share / dict/
a set of words called connectives .
And guess what the very first word is in connectives .
Just .
guess .
So !
Today 's problem .
Yesterday * ...
-
"Yesterday,"* ...
* where "yesterday" is "our last working day where we, the workers, get
weekends off"-yesterday.
... "yesterday" we discovered that Charles Dickens is Charles Darwin is
Charlie Kaufman, because they all wrote the word: "the."
Yay! Everything is related to everything, and we can all go home, now.
Good to know, but also not helpful.
So, there are many common or connective words that don't relate to the topic*
* but then, if you're writing a research paper on the English use of the
definite article "the" and the Polish avoidance of the definite article when
speaking in English, then "the" is very much the topic, and what are you going
to do about that, huh? Nothing? Is that your answer?
and those are know as STOPWORDS, and, unix systems also have in
/usr/share/dict/
a set of words called connectives.
And guess what the very first word is in connectives.
Just.
guess.
So!
Today's Haskell problem.
Yesterday* ...
import Y2020.M08.D28.Exercise
-
... we created a word - frequency analysis of the cleaned - text of " A Christmas Carol . " And we learned " the " is the most - frequent word .
TODAY , * ...
* Today , : actually today , unlike ' yesterday 's ' meaning varies
... let us compute what the most - frequent word is , having removed the STOPWORDS .
-
... we created a word-frequency analysis of the cleaned-text of Charles
Dickens' "A Christmas Carol." And we learned "the" is the most-frequent word.
TODAY,* ...
*Today, n.: actually today, unlike 'yesterday's' meaning varies
... let us compute what the most-frequent word is, having removed the STOPWORDS.
import Data.List (sortOn)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Ord
import Data.Set (Set)
import Y2020.M08.D25.Exercise (workingDir, gutenbergTop100Index)
stopwords :: FilePath
stopwords = "/usr/share/dict/connectives"
loadStopwords :: FilePath -> IO (Set String)
loadStopwords connectives = undefined
-
> > > take 5 . Set.toList < $ > loadStopwords stopwords
[ " a","about","after","against","all " ]
-
>>> take 5 . Set.toList <$> loadStopwords stopwords
["a","about","after","against","all"]
removeStopwords :: Set String -> Map String Int -> Map String Int
removeStopwords stoppers wordfreqs = undefined
-
What is most - frequent word in " A Christmas Carol , " having
removed all ?
> > > let conns = loadStopwords stopwords
> > > let weirdos = Set.fromList " ! \"#$%'()*,-./0123456789:;?@[]\182\187\191 "
> > > let bookus = study ( workingDir + + gutenbergTop100Index )
> > > let bookwords = cleanDoc weirdos < $ > bookus
> > > let wordus = wordFreq < $ > bookwords
> > > length < $ > wordus
4852
> > > let keywords = removeStopwords < $ > conns < * > wordus
> > > length < $ > keywords
4702
> > > take 5 . sortOn ( Down . snd ) . Map.toList < $ > keywords
[ ( " scrooge",314),("upon",120),("ghost",93),("christmas",92),("project",87 ) ]
Okay , NOW we 're talking !
-
What is Charles Dickens most-frequent word in "A Christmas Carol," having
removed all stopwords?
>>> let conns = loadStopwords stopwords
>>> let weirdos = Set.fromList "!\"#$%'()*,-./0123456789:;?@[]\182\187\191"
>>> let bookus = study (workingDir ++ gutenbergTop100Index)
>>> let bookwords = cleanDoc weirdos <$> bookus
>>> let wordus = wordFreq <$> bookwords
>>> length <$> wordus
4852
>>> let keywords = removeStopwords <$> conns <*> wordus
>>> length <$> keywords
4702
>>> take 5 . sortOn (Down . snd) . Map.toList <$> keywords
[("scrooge",314),("upon",120),("ghost",93),("christmas",92),("project",87)]
Okay, NOW we're talking!
|
e517e034894d596e36469432185c8afce2885922cd3003c3a1bf3c8d9c4e87f0 | facebook/infer | timer.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
(** Timers for runtime statistics *)
open! NS0
type t =
{ mutable ustart: float
; mutable sstart: float
; mutable uaggregate: float
; mutable saggregate: float
; mutable count: int
; mutable max: float
; mutable threshold: float
; name: string }
let enabled = ref false
let start t =
if !enabled then (
let {Unix.tms_utime; tms_stime} = Unix.times () in
t.ustart <- tms_utime ;
t.sstart <- tms_stime )
let stop_ t =
let {Unix.tms_utime; tms_stime} = Unix.times () in
let ud = tms_utime -. t.ustart in
let sd = tms_stime -. t.sstart in
t.uaggregate <- t.uaggregate +. ud ;
t.saggregate <- t.saggregate +. sd ;
let usd = ud +. sd in
if Float.(t.max < usd) then t.max <- usd ;
t.count <- t.count + 1 ;
(tms_utime, tms_stime)
let stop t = if !enabled then stop_ t |> ignore
let stop_report t report =
if !enabled then
let tms_utime, tms_stime = stop_ t in
let elapsed = tms_utime +. tms_stime -. (t.ustart +. t.sstart) in
if Float.(elapsed > t.threshold) then (
t.threshold <- elapsed ;
report ~name:t.name ~elapsed:(elapsed *. 1000.)
~aggregate:((t.uaggregate +. t.saggregate) *. 1000.)
~count:t.count )
let create ?at_exit:printf name =
let t =
{ ustart= 0.
; uaggregate= 0.
; sstart= 0.
; saggregate= 0.
; count= 0
; max= 0.
; threshold= 0.
; name }
in
Option.iter printf ~f:(fun report ->
at_exit (fun () ->
if !enabled then
report ~name:t.name ~elapsed:(t.max *. 1000.)
~aggregate:((t.uaggregate +. t.saggregate) *. 1000.)
~count:t.count ) ) ;
t
| null | https://raw.githubusercontent.com/facebook/infer/b3a229f872003fe020455807668bc7a8edd22d5c/sledge/nonstdlib/timer.ml | ocaml | * Timers for runtime statistics |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! NS0
type t =
{ mutable ustart: float
; mutable sstart: float
; mutable uaggregate: float
; mutable saggregate: float
; mutable count: int
; mutable max: float
; mutable threshold: float
; name: string }
let enabled = ref false
let start t =
if !enabled then (
let {Unix.tms_utime; tms_stime} = Unix.times () in
t.ustart <- tms_utime ;
t.sstart <- tms_stime )
let stop_ t =
let {Unix.tms_utime; tms_stime} = Unix.times () in
let ud = tms_utime -. t.ustart in
let sd = tms_stime -. t.sstart in
t.uaggregate <- t.uaggregate +. ud ;
t.saggregate <- t.saggregate +. sd ;
let usd = ud +. sd in
if Float.(t.max < usd) then t.max <- usd ;
t.count <- t.count + 1 ;
(tms_utime, tms_stime)
let stop t = if !enabled then stop_ t |> ignore
let stop_report t report =
if !enabled then
let tms_utime, tms_stime = stop_ t in
let elapsed = tms_utime +. tms_stime -. (t.ustart +. t.sstart) in
if Float.(elapsed > t.threshold) then (
t.threshold <- elapsed ;
report ~name:t.name ~elapsed:(elapsed *. 1000.)
~aggregate:((t.uaggregate +. t.saggregate) *. 1000.)
~count:t.count )
let create ?at_exit:printf name =
let t =
{ ustart= 0.
; uaggregate= 0.
; sstart= 0.
; saggregate= 0.
; count= 0
; max= 0.
; threshold= 0.
; name }
in
Option.iter printf ~f:(fun report ->
at_exit (fun () ->
if !enabled then
report ~name:t.name ~elapsed:(t.max *. 1000.)
~aggregate:((t.uaggregate +. t.saggregate) *. 1000.)
~count:t.count ) ) ;
t
|
0da7f207c1413ccf1434010a485ae8dbcce7dae1feab7a5f0d4ae8de23a833c1 | mmontone/fmt | package.lisp | (defpackage fmt-test
(:use :cl :fmt :fiveam)
(:export #:run-tests))
| null | https://raw.githubusercontent.com/mmontone/fmt/ecb0443fe595638e46c06a5dfea9a3e8c353d7df/t/package.lisp | lisp | (defpackage fmt-test
(:use :cl :fmt :fiveam)
(:export #:run-tests))
| |
d178dea71726772477be0a29e1b35e34c94c98ebf689e35079fafa93bfc0d346 | blockapps/eth-pruner | ExtendedWord.hs | # LANGUAGE FlexibleInstances #
{-# LANGUAGE TypeSynonymInstances #-}
{-# OPTIONS -fno-warn-orphans #-}
module Prune.ExtendedWord
(
Word64, Word160,
word64ToBytes, bytesToWord64,
word160ToBytes, bytesToWord160,
) where
import Data.Binary
import Data.Bits
import Data.DoubleWord (Word160)
word64ToBytes :: Word64 -> [Word8]
word64ToBytes word = map (fromIntegral . (word `shiftR`)) [64-8, 64-16..0]
bytesToWord64 :: [Word8] -> Word64
bytesToWord64 bytes | length bytes == 8 =
sum $ map (\(shiftBits, byte) -> fromIntegral byte `shiftL` shiftBits) $ zip [64-8,64-16..0] bytes
bytesToWord64 _ = error "bytesToWord64 was called with the wrong number of bytes"
word160ToBytes :: Word160 -> [Word8]
word160ToBytes word = map (fromIntegral . (word `shiftR`)) [160-8, 160-16..0]
bytesToWord160 :: [Word8] -> Word160
bytesToWord160 bytes | length bytes == 20 =
sum $ map (\(shiftBits, byte) -> fromIntegral byte `shiftL` shiftBits) $ zip [160-8,160-16..0] bytes
bytesToWord160 _ = error "bytesToWord160 was called with the wrong number of bytes"
| null | https://raw.githubusercontent.com/blockapps/eth-pruner/513fcc884c93974a29b380ca80669dc0aebec125/src/Prune/ExtendedWord.hs | haskell | # LANGUAGE TypeSynonymInstances #
# OPTIONS -fno-warn-orphans # | # LANGUAGE FlexibleInstances #
module Prune.ExtendedWord
(
Word64, Word160,
word64ToBytes, bytesToWord64,
word160ToBytes, bytesToWord160,
) where
import Data.Binary
import Data.Bits
import Data.DoubleWord (Word160)
word64ToBytes :: Word64 -> [Word8]
word64ToBytes word = map (fromIntegral . (word `shiftR`)) [64-8, 64-16..0]
bytesToWord64 :: [Word8] -> Word64
bytesToWord64 bytes | length bytes == 8 =
sum $ map (\(shiftBits, byte) -> fromIntegral byte `shiftL` shiftBits) $ zip [64-8,64-16..0] bytes
bytesToWord64 _ = error "bytesToWord64 was called with the wrong number of bytes"
word160ToBytes :: Word160 -> [Word8]
word160ToBytes word = map (fromIntegral . (word `shiftR`)) [160-8, 160-16..0]
bytesToWord160 :: [Word8] -> Word160
bytesToWord160 bytes | length bytes == 20 =
sum $ map (\(shiftBits, byte) -> fromIntegral byte `shiftL` shiftBits) $ zip [160-8,160-16..0] bytes
bytesToWord160 _ = error "bytesToWord160 was called with the wrong number of bytes"
|
024fd069d1d21f40f57771dace6f4dde307dc8de7104aa4fcd7c912cbd7b7501 | hgoes/smtlib2 | Nat.hs | module Language.SMTLib2.Internals.Type.Nat where
import Data.Typeable
import Data.Constraint
import Data.GADT.Compare
import Data.GADT.Show
import Language.Haskell.TH
-- | Natural numbers on the type-level.
data Nat = Z | S Nat deriving Typeable
| A concrete representation of the ' ' type .
data Natural (n::Nat) where
Zero :: Natural Z
Succ :: Natural n -> Natural (S n)
type family (+) (n :: Nat) (m :: Nat) :: Nat where
(+) Z n = n
(+) (S n) m = S ((+) n m)
type family (-) (n :: Nat) (m :: Nat) :: Nat where
(-) n Z = n
(-) (S n) (S m) = n - m
type family (<=) (n :: Nat) (m :: Nat) :: Bool where
(<=) Z m = True
(<=) (S n) Z = False
(<=) (S n) (S m) = (<=) n m
naturalToInteger :: Natural n -> Integer
naturalToInteger = conv 0
where
conv :: Integer -> Natural m -> Integer
conv n Zero = n
conv n (Succ x) = conv (n+1) x
naturalAdd :: Natural n -> Natural m -> Natural (n + m)
naturalAdd Zero n = n
naturalAdd (Succ x) y = Succ (naturalAdd x y)
naturalSub :: Natural (n + m) -> Natural n -> Natural m
naturalSub n Zero = n
naturalSub (Succ sum) (Succ n) = naturalSub sum n
naturalSub' :: Natural n -> Natural m
-> (forall diff. ((m + diff) ~ n) => Natural diff -> a)
-> a
naturalSub' n Zero f = f n
naturalSub' (Succ sum) (Succ n) f = naturalSub' sum n f
naturalLEQ :: Natural n -> Natural m -> Maybe (Dict ((n <= m) ~ True))
naturalLEQ Zero _ = Just Dict
naturalLEQ (Succ n) (Succ m) = case naturalLEQ n m of
Just Dict -> Just Dict
Nothing -> Nothing
naturalLEQ _ _ = Nothing
instance Show (Natural n) where
showsPrec p = showsPrec p . naturalToInteger
instance Eq (Natural n) where
(==) _ _ = True
instance Ord (Natural n) where
compare _ _ = EQ
-- | Get a static representation for a dynamically created natural number.
--
-- Example:
--
-- >>> reifyNat (S (S Z)) show
" 2 "
reifyNat :: Nat -> (forall n. Natural n -> r) -> r
reifyNat Z f = f Zero
reifyNat (S n) f = reifyNat n $ \n' -> f (Succ n')
-- | A template haskell function to create nicer looking numbers.
--
-- Example:
--
> > > : t $ ( nat 5 )
$ ( nat 5 ) : : Natural ( 'S ( 'S ( 'S ( 'S ( 'S ' Z ) ) ) ) )
nat :: (Num a,Ord a) => a -> ExpQ
nat n
| n < 0 = error $ "nat: Can only use numbers >= 0."
| otherwise = nat' n
where
nat' 0 = [| Zero |]
nat' n = [| Succ $(nat' (n-1)) |]
-- | A template haskell function to create nicer looking number types.
--
-- Example:
--
> > > $ ( nat 5 ) : : Natural $ ( natT 5 )
5
natT :: (Num a,Ord a) => a -> TypeQ
natT n
| n < 0 = error $ "natT: Can only use numbers >= 0."
| otherwise = natT' n
where
natT' 0 = [t| Z |]
natT' n = [t| S $(natT' (n-1)) |]
instance Eq Nat where
(==) Z Z = True
(==) (S x) (S y) = x == y
(==) _ _ = False
instance Ord Nat where
compare Z Z = EQ
compare Z _ = LT
compare _ Z = GT
compare (S x) (S y) = compare x y
instance Num Nat where
(+) Z n = n
(+) (S n) m = S (n + m)
(-) n Z = n
(-) (S n) (S m) = n - m
(-) _ _ = error $ "Cannot produce negative natural numbers."
(*) Z n = Z
(*) (S n) m = m+(n*m)
negate _ = error $ "Cannot produce negative natural numbers."
abs = id
signum Z = Z
signum (S _) = S Z
fromInteger x
| x<0 = error $ "Cannot produce negative natural numbers."
| otherwise = f x
where
f 0 = Z
f n = S (f (n-1))
instance Enum Nat where
succ = S
pred (S n) = n
pred Z = error $ "Cannot produce negative natural numbers."
toEnum 0 = Z
toEnum n = S (toEnum (n-1))
fromEnum Z = 0
fromEnum (S n) = (fromEnum n)+1
instance Real Nat where
toRational Z = 0
toRational (S n) = (toRational n)+1
instance Integral Nat where
quotRem x y = let (q,r) = quotRem (toInteger x) (toInteger y)
in (fromInteger q,fromInteger r)
toInteger = f 0
where
f n Z = n
f n (S m) = f (n+1) m
type N0 = Z
type N1 = S N0
type N2 = S N1
type N3 = S N2
type N4 = S N3
type N5 = S N4
type N6 = S N5
type N7 = S N6
type N8 = S N7
type N9 = S N8
type N10 = S N9
type N11 = S N10
type N12 = S N11
type N13 = S N12
type N14 = S N13
type N15 = S N14
type N16 = S N15
type N17 = S N16
type N18 = S N17
type N19 = S N18
type N20 = S N19
type N21 = S N20
type N22 = S N21
type N23 = S N22
type N24 = S N23
type N25 = S N24
type N26 = S N25
type N27 = S N26
type N28 = S N27
type N29 = S N28
type N30 = S N29
type N31 = S N30
type N32 = S N31
type N33 = S N32
type N34 = S N33
type N35 = S N34
type N36 = S N35
type N37 = S N36
type N38 = S N37
type N39 = S N38
type N40 = S N39
type N41 = S N40
type N42 = S N41
type N43 = S N42
type N44 = S N43
type N45 = S N44
type N46 = S N45
type N47 = S N46
type N48 = S N47
type N49 = S N48
type N50 = S N49
type N51 = S N50
type N52 = S N51
type N53 = S N52
type N54 = S N53
type N55 = S N54
type N56 = S N55
type N57 = S N56
type N58 = S N57
type N59 = S N58
type N60 = S N59
type N61 = S N60
type N62 = S N61
type N63 = S N62
type N64 = S N63
instance GEq Natural where
geq Zero Zero = Just Refl
geq (Succ x) (Succ y) = do
Refl <- geq x y
return Refl
geq _ _ = Nothing
instance GCompare Natural where
gcompare Zero Zero = GEQ
gcompare Zero _ = GLT
gcompare _ Zero = GGT
gcompare (Succ x) (Succ y) = case gcompare x y of
GEQ -> GEQ
GLT -> GLT
GGT -> GGT
instance GShow Natural where
gshowsPrec = showsPrec
class IsNatural n where
getNatural :: Natural n
instance IsNatural Z where
getNatural = Zero
instance IsNatural n => IsNatural (S n) where
getNatural = Succ getNatural
deriveIsNatural :: Natural n -> Dict (IsNatural n)
deriveIsNatural Zero = Dict
deriveIsNatural (Succ n) = case deriveIsNatural n of
Dict -> Dict
| null | https://raw.githubusercontent.com/hgoes/smtlib2/c35747f2a5a9ec88dc7b1db41a5aab6e98c0458d/Language/SMTLib2/Internals/Type/Nat.hs | haskell | | Natural numbers on the type-level.
| Get a static representation for a dynamically created natural number.
Example:
>>> reifyNat (S (S Z)) show
| A template haskell function to create nicer looking numbers.
Example:
| A template haskell function to create nicer looking number types.
Example:
| module Language.SMTLib2.Internals.Type.Nat where
import Data.Typeable
import Data.Constraint
import Data.GADT.Compare
import Data.GADT.Show
import Language.Haskell.TH
data Nat = Z | S Nat deriving Typeable
| A concrete representation of the ' ' type .
data Natural (n::Nat) where
Zero :: Natural Z
Succ :: Natural n -> Natural (S n)
type family (+) (n :: Nat) (m :: Nat) :: Nat where
(+) Z n = n
(+) (S n) m = S ((+) n m)
type family (-) (n :: Nat) (m :: Nat) :: Nat where
(-) n Z = n
(-) (S n) (S m) = n - m
type family (<=) (n :: Nat) (m :: Nat) :: Bool where
(<=) Z m = True
(<=) (S n) Z = False
(<=) (S n) (S m) = (<=) n m
naturalToInteger :: Natural n -> Integer
naturalToInteger = conv 0
where
conv :: Integer -> Natural m -> Integer
conv n Zero = n
conv n (Succ x) = conv (n+1) x
naturalAdd :: Natural n -> Natural m -> Natural (n + m)
naturalAdd Zero n = n
naturalAdd (Succ x) y = Succ (naturalAdd x y)
naturalSub :: Natural (n + m) -> Natural n -> Natural m
naturalSub n Zero = n
naturalSub (Succ sum) (Succ n) = naturalSub sum n
naturalSub' :: Natural n -> Natural m
-> (forall diff. ((m + diff) ~ n) => Natural diff -> a)
-> a
naturalSub' n Zero f = f n
naturalSub' (Succ sum) (Succ n) f = naturalSub' sum n f
naturalLEQ :: Natural n -> Natural m -> Maybe (Dict ((n <= m) ~ True))
naturalLEQ Zero _ = Just Dict
naturalLEQ (Succ n) (Succ m) = case naturalLEQ n m of
Just Dict -> Just Dict
Nothing -> Nothing
naturalLEQ _ _ = Nothing
instance Show (Natural n) where
showsPrec p = showsPrec p . naturalToInteger
instance Eq (Natural n) where
(==) _ _ = True
instance Ord (Natural n) where
compare _ _ = EQ
" 2 "
reifyNat :: Nat -> (forall n. Natural n -> r) -> r
reifyNat Z f = f Zero
reifyNat (S n) f = reifyNat n $ \n' -> f (Succ n')
> > > : t $ ( nat 5 )
$ ( nat 5 ) : : Natural ( 'S ( 'S ( 'S ( 'S ( 'S ' Z ) ) ) ) )
nat :: (Num a,Ord a) => a -> ExpQ
nat n
| n < 0 = error $ "nat: Can only use numbers >= 0."
| otherwise = nat' n
where
nat' 0 = [| Zero |]
nat' n = [| Succ $(nat' (n-1)) |]
> > > $ ( nat 5 ) : : Natural $ ( natT 5 )
5
natT :: (Num a,Ord a) => a -> TypeQ
natT n
| n < 0 = error $ "natT: Can only use numbers >= 0."
| otherwise = natT' n
where
natT' 0 = [t| Z |]
natT' n = [t| S $(natT' (n-1)) |]
instance Eq Nat where
(==) Z Z = True
(==) (S x) (S y) = x == y
(==) _ _ = False
instance Ord Nat where
compare Z Z = EQ
compare Z _ = LT
compare _ Z = GT
compare (S x) (S y) = compare x y
instance Num Nat where
(+) Z n = n
(+) (S n) m = S (n + m)
(-) n Z = n
(-) (S n) (S m) = n - m
(-) _ _ = error $ "Cannot produce negative natural numbers."
(*) Z n = Z
(*) (S n) m = m+(n*m)
negate _ = error $ "Cannot produce negative natural numbers."
abs = id
signum Z = Z
signum (S _) = S Z
fromInteger x
| x<0 = error $ "Cannot produce negative natural numbers."
| otherwise = f x
where
f 0 = Z
f n = S (f (n-1))
instance Enum Nat where
succ = S
pred (S n) = n
pred Z = error $ "Cannot produce negative natural numbers."
toEnum 0 = Z
toEnum n = S (toEnum (n-1))
fromEnum Z = 0
fromEnum (S n) = (fromEnum n)+1
instance Real Nat where
toRational Z = 0
toRational (S n) = (toRational n)+1
instance Integral Nat where
quotRem x y = let (q,r) = quotRem (toInteger x) (toInteger y)
in (fromInteger q,fromInteger r)
toInteger = f 0
where
f n Z = n
f n (S m) = f (n+1) m
type N0 = Z
type N1 = S N0
type N2 = S N1
type N3 = S N2
type N4 = S N3
type N5 = S N4
type N6 = S N5
type N7 = S N6
type N8 = S N7
type N9 = S N8
type N10 = S N9
type N11 = S N10
type N12 = S N11
type N13 = S N12
type N14 = S N13
type N15 = S N14
type N16 = S N15
type N17 = S N16
type N18 = S N17
type N19 = S N18
type N20 = S N19
type N21 = S N20
type N22 = S N21
type N23 = S N22
type N24 = S N23
type N25 = S N24
type N26 = S N25
type N27 = S N26
type N28 = S N27
type N29 = S N28
type N30 = S N29
type N31 = S N30
type N32 = S N31
type N33 = S N32
type N34 = S N33
type N35 = S N34
type N36 = S N35
type N37 = S N36
type N38 = S N37
type N39 = S N38
type N40 = S N39
type N41 = S N40
type N42 = S N41
type N43 = S N42
type N44 = S N43
type N45 = S N44
type N46 = S N45
type N47 = S N46
type N48 = S N47
type N49 = S N48
type N50 = S N49
type N51 = S N50
type N52 = S N51
type N53 = S N52
type N54 = S N53
type N55 = S N54
type N56 = S N55
type N57 = S N56
type N58 = S N57
type N59 = S N58
type N60 = S N59
type N61 = S N60
type N62 = S N61
type N63 = S N62
type N64 = S N63
instance GEq Natural where
geq Zero Zero = Just Refl
geq (Succ x) (Succ y) = do
Refl <- geq x y
return Refl
geq _ _ = Nothing
instance GCompare Natural where
gcompare Zero Zero = GEQ
gcompare Zero _ = GLT
gcompare _ Zero = GGT
gcompare (Succ x) (Succ y) = case gcompare x y of
GEQ -> GEQ
GLT -> GLT
GGT -> GGT
instance GShow Natural where
gshowsPrec = showsPrec
class IsNatural n where
getNatural :: Natural n
instance IsNatural Z where
getNatural = Zero
instance IsNatural n => IsNatural (S n) where
getNatural = Succ getNatural
deriveIsNatural :: Natural n -> Dict (IsNatural n)
deriveIsNatural Zero = Dict
deriveIsNatural (Succ n) = case deriveIsNatural n of
Dict -> Dict
|
901d22b5924d19e89e4818dc2c1860c9bc18b5a677625b4c1203c9bfe0dc6965 | ejlilley/AbstractMusic | Scales.hs | # LANGUAGE GADTs ,
MultiParamTypeClasses #
MultiParamTypeClasses #-}
module Scales where
import Music (Scale(..),
AbstractPitch1(..), AbstractInt1(..),
AbstractPitch2(..), AbstractInt2(..),
Interval(..), Pitch(..), Transpose(..),
faInt, faPitch,
Name(..), Number(..), Quality(..), Accidental(..), Ficta(..))
import Shortcuts
import Util (rotate, rotateN)
data GenericScale where
GenericScale :: Scale s p i => s -> GenericScale
todo : represent / enforce scale length(s ) with type - level .
todo : make the basic scale type a list of * intervals * ( not pitches ) . e.g. = [ M2 , M2 , m2 , M2 , M2 , M2 , m2 ] etc .
ficToAcc Raise = sharpen
ficToAcc Neutral = id
ficToAcc Lower = flatten
completeScale s i = let c = if i >= 0
then scale s ++
map (transpose (AbstractInt2 Perf (Compound Unison))) c
else map (transpose (AbstractInt2 Perf (Negative (Compound Unison)))) (reverse (scale s)) ++
map (transpose (AbstractInt2 Perf (Negative (Compound Unison)))) c
in if i >= 0
then c
else (head (scale s)) : c
infiniteScale s = completeScale s 1
scaleDegree s (AbstractPitch1 deg fic) =
let i = fromEnum deg
index = abs i
note = (completeScale s i) !! index
in (ficToAcc fic) note
Ionian | Hypoionian | Aeolian | Hypoaeolian | Dorian | Phrygian | Lydian | Mixolydian | Hypodorian | Hypophrygian | Hypolydian | Hypomixolydian | Locrian | Hypolocrian
transposeScale orig base new = let offset = interval base new
in map (transpose offset) orig
Diatonic :
basicIonian = map (\n -> AbstractPitch2 n Na) [C .. ]
data Ionian = Ionian AbstractPitch2 deriving Show
type Major = Ionian
instance Scale Ionian AbstractPitch1 AbstractInt1 where
tonic (Ionian t) = t
scale s = take 7 $ transposeScale basicIonian (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
data Dorian = Dorian AbstractPitch2 deriving Show
instance Scale Dorian AbstractPitch1 AbstractInt1 where
tonic (Dorian t) = t
scale s = take 7 $ transposeScale (rotate basicIonian) (AbstractPitch2 D Na) (tonic s)
applyScale = scaleDegree
data Phrygian = Phrygian AbstractPitch2 deriving Show
instance Scale Phrygian AbstractPitch1 AbstractInt1 where
tonic (Phrygian t) = t
scale s = take 7 $ transposeScale ((rotateN 2) basicIonian) (AbstractPitch2 E Na) (tonic s)
applyScale = scaleDegree
data Lydian = Lydian AbstractPitch2 deriving Show
instance Scale Lydian AbstractPitch1 AbstractInt1 where
tonic (Lydian t) = t
scale s = take 7 $ transposeScale ((rotateN 3) basicIonian) (AbstractPitch2 F Na) (tonic s)
applyScale = scaleDegree
data Mixolydian = Mixolydian AbstractPitch2 deriving Show
instance Scale Mixolydian AbstractPitch1 AbstractInt1 where
tonic (Mixolydian t) = t
scale s = take 7 $ transposeScale ((rotateN 4) basicIonian) (AbstractPitch2 G Na) (tonic s)
applyScale = scaleDegree
data Aeolian = Aeolian AbstractPitch2 deriving Show
type Minor = Aeolian
instance Scale Aeolian AbstractPitch1 AbstractInt1 where
tonic (Aeolian t) = t
scale s = take 7 $ transposeScale ((rotateN 5) basicIonian) (AbstractPitch2 A Na) (tonic s)
applyScale = scaleDegree
data Locrian = Locrian AbstractPitch2 deriving Show
instance Scale Locrian AbstractPitch1 AbstractInt1 where
tonic (Locrian t) = t
scale s = take 7 $ transposeScale ((rotateN 6) basicIonian) (AbstractPitch2 B Na) (tonic s)
applyScale = scaleDegree
-- Melodic minor scales:
basicMelodicMinor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E flat,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) Na,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicMelodicMinor
data MelodicMinor = MelodicMinor AbstractPitch2 deriving Show
instance Scale MelodicMinor AbstractPitch1 AbstractInt1 where
tonic (MelodicMinor t) = t
scale s = take 7 $ transposeScale basicMelodicMinor (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
Harmonic major scales :
basicHarmonicMajor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E Na,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicHarmonicMajor
Harmonic minor scales :
basicHarmonicMinor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E flat,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicHarmonicMinor
data AlteredPhrygian = AlteredPhrygian AbstractPitch2 deriving Show
instance Scale AlteredPhrygian AbstractPitch1 AbstractInt1 where
tonic (AlteredPhrygian t) = t
scale s = take 7 $ transposeScale (rotateN 4 basicHarmonicMinor) (AbstractPitch2 G Na) (tonic s)
applyScale = scaleDegree
data HarmonicMinor = HarmonicMinor AbstractPitch2 deriving Show
instance Scale HarmonicMinor AbstractPitch1 AbstractInt1 where
tonic (HarmonicMinor t) = t
scale s = take 7 $ transposeScale basicHarmonicMinor (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
-- Double harmonic scales:
basicDoubleHarmonic = [AbstractPitch2 C Na,
AbstractPitch2 D flat,
AbstractPitch2 E Na,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicDoubleHarmonic
major :: AbstractPitch2 -> Major
major n = Ionian n
minor :: AbstractPitch2 -> Minor
minor n = Aeolian (n .-^ octave)
harmonicminor :: AbstractPitch2 -> HarmonicMinor
harmonicminor n = HarmonicMinor n
melodicminor :: AbstractPitch2 -> MelodicMinor
melodicminor n = MelodicMinor n
chromaticScale p@(AbstractPitch2 n a)
| (n == B) || (n == E) || (a == sharp) = p:(chromaticScale (AbstractPitch2 (succ n) Na))
| otherwise = p:(chromaticScale (AbstractPitch2 n sharp))
-- Modal:
modeII = [ ( D , ) , ( E , Na ) , ( F , ) , ( G , ) , ( A , Na ) , ( B , Na ) , ( C , Na ) ]
-- modeIII
-- modeIV
-- modeV
-- modeVI
-- modeVII
-- modeVIII
's scales :
mode1 = [ 2,2,2,2,2 ]
mode2 = [ 1,2 , 1,2 , 1,2 , 1,2 ]
mode3 = [ 2,1,1 , 2,1,1 , 2,1,1 ]
mode4 = [ 1,1,3,1 , 1,1,3,1 ]
= [ 1,4,1 , 1,4,1 ]
= [ 2,2,1,1 , 2,2,1,1 ]
mode7 = [ 1,1,1,2,1 , 1,1,1,2,1 ]
-- (measured in semitones)
hexachord :: AbstractPitch2 -> [AbstractPitch2]
hexachord p = [p, p .+^ _M2, p .+^ _M3, p .+^ _P4, p .+^ _P5, p .+^ _M6]
:
data HexachordPrima = HexachordPrima deriving Show
instance Scale HexachordPrima AbstractPitch1 AbstractInt1 where
tonic HexachordPrima = g .-^ (3 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSecunda = HexachordSecunda deriving Show
instance Scale HexachordSecunda AbstractPitch1 AbstractInt1 where
tonic s = c .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordTertia = HexachordTertia deriving Show
instance Scale HexachordTertia AbstractPitch1 AbstractInt1 where
tonic s = f .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordQuarta = HexachordQuarta deriving Show
instance Scale HexachordQuarta AbstractPitch1 AbstractInt1 where
tonic s = g .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordQuinta = HexachordQuinta deriving Show
instance Scale HexachordQuinta AbstractPitch1 AbstractInt1 where
tonic s = c .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSexta = HexachordSexta deriving Show
instance Scale HexachordSexta AbstractPitch1 AbstractInt1 where
tonic s = f .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSeptima = HexachordSeptima deriving Show
instance Scale HexachordSeptima AbstractPitch1 AbstractInt1 where
tonic s = g .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
| null | https://raw.githubusercontent.com/ejlilley/AbstractMusic/815ab33ee204dd3ebf29076bde330bfdf6938677/Scales.hs | haskell | Melodic minor scales:
Double harmonic scales:
Modal:
modeIII
modeIV
modeV
modeVI
modeVII
modeVIII
(measured in semitones) | # LANGUAGE GADTs ,
MultiParamTypeClasses #
MultiParamTypeClasses #-}
module Scales where
import Music (Scale(..),
AbstractPitch1(..), AbstractInt1(..),
AbstractPitch2(..), AbstractInt2(..),
Interval(..), Pitch(..), Transpose(..),
faInt, faPitch,
Name(..), Number(..), Quality(..), Accidental(..), Ficta(..))
import Shortcuts
import Util (rotate, rotateN)
data GenericScale where
GenericScale :: Scale s p i => s -> GenericScale
todo : represent / enforce scale length(s ) with type - level .
todo : make the basic scale type a list of * intervals * ( not pitches ) . e.g. = [ M2 , M2 , m2 , M2 , M2 , M2 , m2 ] etc .
ficToAcc Raise = sharpen
ficToAcc Neutral = id
ficToAcc Lower = flatten
completeScale s i = let c = if i >= 0
then scale s ++
map (transpose (AbstractInt2 Perf (Compound Unison))) c
else map (transpose (AbstractInt2 Perf (Negative (Compound Unison)))) (reverse (scale s)) ++
map (transpose (AbstractInt2 Perf (Negative (Compound Unison)))) c
in if i >= 0
then c
else (head (scale s)) : c
infiniteScale s = completeScale s 1
scaleDegree s (AbstractPitch1 deg fic) =
let i = fromEnum deg
index = abs i
note = (completeScale s i) !! index
in (ficToAcc fic) note
Ionian | Hypoionian | Aeolian | Hypoaeolian | Dorian | Phrygian | Lydian | Mixolydian | Hypodorian | Hypophrygian | Hypolydian | Hypomixolydian | Locrian | Hypolocrian
transposeScale orig base new = let offset = interval base new
in map (transpose offset) orig
Diatonic :
basicIonian = map (\n -> AbstractPitch2 n Na) [C .. ]
data Ionian = Ionian AbstractPitch2 deriving Show
type Major = Ionian
instance Scale Ionian AbstractPitch1 AbstractInt1 where
tonic (Ionian t) = t
scale s = take 7 $ transposeScale basicIonian (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
data Dorian = Dorian AbstractPitch2 deriving Show
instance Scale Dorian AbstractPitch1 AbstractInt1 where
tonic (Dorian t) = t
scale s = take 7 $ transposeScale (rotate basicIonian) (AbstractPitch2 D Na) (tonic s)
applyScale = scaleDegree
data Phrygian = Phrygian AbstractPitch2 deriving Show
instance Scale Phrygian AbstractPitch1 AbstractInt1 where
tonic (Phrygian t) = t
scale s = take 7 $ transposeScale ((rotateN 2) basicIonian) (AbstractPitch2 E Na) (tonic s)
applyScale = scaleDegree
data Lydian = Lydian AbstractPitch2 deriving Show
instance Scale Lydian AbstractPitch1 AbstractInt1 where
tonic (Lydian t) = t
scale s = take 7 $ transposeScale ((rotateN 3) basicIonian) (AbstractPitch2 F Na) (tonic s)
applyScale = scaleDegree
data Mixolydian = Mixolydian AbstractPitch2 deriving Show
instance Scale Mixolydian AbstractPitch1 AbstractInt1 where
tonic (Mixolydian t) = t
scale s = take 7 $ transposeScale ((rotateN 4) basicIonian) (AbstractPitch2 G Na) (tonic s)
applyScale = scaleDegree
data Aeolian = Aeolian AbstractPitch2 deriving Show
type Minor = Aeolian
instance Scale Aeolian AbstractPitch1 AbstractInt1 where
tonic (Aeolian t) = t
scale s = take 7 $ transposeScale ((rotateN 5) basicIonian) (AbstractPitch2 A Na) (tonic s)
applyScale = scaleDegree
data Locrian = Locrian AbstractPitch2 deriving Show
instance Scale Locrian AbstractPitch1 AbstractInt1 where
tonic (Locrian t) = t
scale s = take 7 $ transposeScale ((rotateN 6) basicIonian) (AbstractPitch2 B Na) (tonic s)
applyScale = scaleDegree
basicMelodicMinor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E flat,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) Na,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicMelodicMinor
data MelodicMinor = MelodicMinor AbstractPitch2 deriving Show
instance Scale MelodicMinor AbstractPitch1 AbstractInt1 where
tonic (MelodicMinor t) = t
scale s = take 7 $ transposeScale basicMelodicMinor (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
Harmonic major scales :
basicHarmonicMajor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E Na,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicHarmonicMajor
Harmonic minor scales :
basicHarmonicMinor = [AbstractPitch2 C Na,
AbstractPitch2 D Na,
AbstractPitch2 E flat,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicHarmonicMinor
data AlteredPhrygian = AlteredPhrygian AbstractPitch2 deriving Show
instance Scale AlteredPhrygian AbstractPitch1 AbstractInt1 where
tonic (AlteredPhrygian t) = t
scale s = take 7 $ transposeScale (rotateN 4 basicHarmonicMinor) (AbstractPitch2 G Na) (tonic s)
applyScale = scaleDegree
data HarmonicMinor = HarmonicMinor AbstractPitch2 deriving Show
instance Scale HarmonicMinor AbstractPitch1 AbstractInt1 where
tonic (HarmonicMinor t) = t
scale s = take 7 $ transposeScale basicHarmonicMinor (AbstractPitch2 C Na) (tonic s)
applyScale = scaleDegree
basicDoubleHarmonic = [AbstractPitch2 C Na,
AbstractPitch2 D flat,
AbstractPitch2 E Na,
AbstractPitch2 F Na,
AbstractPitch2 G Na,
AbstractPitch2 (Up A) flat,
AbstractPitch2 (Up B) Na] ++ map (transpose (AbstractInt2 Perf (Compound Unison))) basicDoubleHarmonic
major :: AbstractPitch2 -> Major
major n = Ionian n
minor :: AbstractPitch2 -> Minor
minor n = Aeolian (n .-^ octave)
harmonicminor :: AbstractPitch2 -> HarmonicMinor
harmonicminor n = HarmonicMinor n
melodicminor :: AbstractPitch2 -> MelodicMinor
melodicminor n = MelodicMinor n
chromaticScale p@(AbstractPitch2 n a)
| (n == B) || (n == E) || (a == sharp) = p:(chromaticScale (AbstractPitch2 (succ n) Na))
| otherwise = p:(chromaticScale (AbstractPitch2 n sharp))
modeII = [ ( D , ) , ( E , Na ) , ( F , ) , ( G , ) , ( A , Na ) , ( B , Na ) , ( C , Na ) ]
's scales :
mode1 = [ 2,2,2,2,2 ]
mode2 = [ 1,2 , 1,2 , 1,2 , 1,2 ]
mode3 = [ 2,1,1 , 2,1,1 , 2,1,1 ]
mode4 = [ 1,1,3,1 , 1,1,3,1 ]
= [ 1,4,1 , 1,4,1 ]
= [ 2,2,1,1 , 2,2,1,1 ]
mode7 = [ 1,1,1,2,1 , 1,1,1,2,1 ]
hexachord :: AbstractPitch2 -> [AbstractPitch2]
hexachord p = [p, p .+^ _M2, p .+^ _M3, p .+^ _P4, p .+^ _P5, p .+^ _M6]
:
data HexachordPrima = HexachordPrima deriving Show
instance Scale HexachordPrima AbstractPitch1 AbstractInt1 where
tonic HexachordPrima = g .-^ (3 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSecunda = HexachordSecunda deriving Show
instance Scale HexachordSecunda AbstractPitch1 AbstractInt1 where
tonic s = c .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordTertia = HexachordTertia deriving Show
instance Scale HexachordTertia AbstractPitch1 AbstractInt1 where
tonic s = f .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordQuarta = HexachordQuarta deriving Show
instance Scale HexachordQuarta AbstractPitch1 AbstractInt1 where
tonic s = g .-^ (2 *^ _P8)
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordQuinta = HexachordQuinta deriving Show
instance Scale HexachordQuinta AbstractPitch1 AbstractInt1 where
tonic s = c .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSexta = HexachordSexta deriving Show
instance Scale HexachordSexta AbstractPitch1 AbstractInt1 where
tonic s = f .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
data HexachordSeptima = HexachordSeptima deriving Show
instance Scale HexachordSeptima AbstractPitch1 AbstractInt1 where
tonic s = g .-^ _P8
scale s = hexachord (tonic s)
applyScale = scaleDegree
|
4334c917df03600fe4bc5b69fa6d4753eecd9212c198584a1338f1cdf69c8635 | wireapp/wire-server | LoginCodeTimeout_user.hs | -- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
module Test.Wire.API.Golden.Generated.LoginCodeTimeout_user where
import Data.Code (Timeout (Timeout))
import Data.Time (secondsToNominalDiffTime)
import Wire.API.User.Auth (LoginCodeTimeout (..))
testObject_LoginCodeTimeout_user_1 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_1 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-25.000000000000))}
testObject_LoginCodeTimeout_user_2 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_2 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 20.000000000000)}
testObject_LoginCodeTimeout_user_3 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_3 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 3.000000000000)}
testObject_LoginCodeTimeout_user_4 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_4 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-15.000000000000))}
testObject_LoginCodeTimeout_user_5 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_5 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-24.000000000000))}
testObject_LoginCodeTimeout_user_6 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_6 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-14.000000000000))}
testObject_LoginCodeTimeout_user_7 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_7 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-27.000000000000))}
testObject_LoginCodeTimeout_user_8 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_8 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 12.000000000000)}
testObject_LoginCodeTimeout_user_9 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_9 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 21.000000000000)}
testObject_LoginCodeTimeout_user_10 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_10 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-3.000000000000))}
testObject_LoginCodeTimeout_user_11 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_11 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-1.000000000000))}
testObject_LoginCodeTimeout_user_12 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_12 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-2.000000000000))}
testObject_LoginCodeTimeout_user_13 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_13 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-30.000000000000))}
testObject_LoginCodeTimeout_user_14 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_14 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-24.000000000000))}
testObject_LoginCodeTimeout_user_15 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_15 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 6.000000000000)}
testObject_LoginCodeTimeout_user_16 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_16 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 23.000000000000)}
testObject_LoginCodeTimeout_user_17 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_17 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 29.000000000000)}
testObject_LoginCodeTimeout_user_18 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_18 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 22.000000000000)}
testObject_LoginCodeTimeout_user_19 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_19 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 7.000000000000)}
testObject_LoginCodeTimeout_user_20 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_20 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-5.000000000000))}
| null | https://raw.githubusercontent.com/wireapp/wire-server/c428355b7683b7b7722ea544eba314fc843ad8fa/libs/wire-api/test/golden/Test/Wire/API/Golden/Generated/LoginCodeTimeout_user.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>. | Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
module Test.Wire.API.Golden.Generated.LoginCodeTimeout_user where
import Data.Code (Timeout (Timeout))
import Data.Time (secondsToNominalDiffTime)
import Wire.API.User.Auth (LoginCodeTimeout (..))
testObject_LoginCodeTimeout_user_1 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_1 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-25.000000000000))}
testObject_LoginCodeTimeout_user_2 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_2 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 20.000000000000)}
testObject_LoginCodeTimeout_user_3 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_3 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 3.000000000000)}
testObject_LoginCodeTimeout_user_4 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_4 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-15.000000000000))}
testObject_LoginCodeTimeout_user_5 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_5 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-24.000000000000))}
testObject_LoginCodeTimeout_user_6 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_6 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-14.000000000000))}
testObject_LoginCodeTimeout_user_7 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_7 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-27.000000000000))}
testObject_LoginCodeTimeout_user_8 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_8 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 12.000000000000)}
testObject_LoginCodeTimeout_user_9 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_9 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 21.000000000000)}
testObject_LoginCodeTimeout_user_10 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_10 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-3.000000000000))}
testObject_LoginCodeTimeout_user_11 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_11 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-1.000000000000))}
testObject_LoginCodeTimeout_user_12 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_12 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-2.000000000000))}
testObject_LoginCodeTimeout_user_13 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_13 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-30.000000000000))}
testObject_LoginCodeTimeout_user_14 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_14 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-24.000000000000))}
testObject_LoginCodeTimeout_user_15 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_15 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 6.000000000000)}
testObject_LoginCodeTimeout_user_16 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_16 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 23.000000000000)}
testObject_LoginCodeTimeout_user_17 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_17 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 29.000000000000)}
testObject_LoginCodeTimeout_user_18 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_18 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 22.000000000000)}
testObject_LoginCodeTimeout_user_19 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_19 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime 7.000000000000)}
testObject_LoginCodeTimeout_user_20 :: LoginCodeTimeout
testObject_LoginCodeTimeout_user_20 =
LoginCodeTimeout {fromLoginCodeTimeout = Timeout (secondsToNominalDiffTime (-5.000000000000))}
|
fda824638f00f901fff07c3d841fe20e3d74f88bcb96a5618e0e8f8a7399cbe3 | pixlsus/registry.gimp.org_static | imageFormula.scm | Berengar W. Lehr ( )
Medical Physics Group , Department of Diagnostic and Interventional Radiology
Jena University Hospital , 07743 Jena , Thueringen , Germany
;
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
; If you use this script and/or like it the author would be happy to
; receive a postcard from you:
;
You should have received a copy of the GNU General Public License
; along with this program; if not, write to the Free Software
Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
(define (script-fu-formula formula filename size TextColor)
(let*
(
(sizeName (cond
((= size 0) "normalsize")
((= size 1) "LARGE")
((= size 2) "Huge")
))
(url (string-append "" sizeName "%5C!" formula ".gif"))
(image (car (file-uri-load FALSE url url)))
(drawable (car (gimp-image-get-active-layer image)))
(width (car (gimp-image-width image)))
(height (car (gimp-image-height image)))
(background (car (gimp-layer-new image width height RGBA-IMAGE "Background" 100 NORMAL-MODE)))
(AntiTextColor (list (- 255 (car TextColor)) (- 255 (cadr TextColor)) (- 255 (caddr TextColor))))
(gradientName (car (gimp-gradient-new "NeuerFarbverlauf")))
(activegradient (car (gimp-context-get-gradient)))
(filename (string-append filename ".png"))
)
(gimp-image-convert-rgb image)
(gimp-image-add-layer image background 1)
(gimp-edit-fill background WHITE-FILL)
(set! drawable (car (gimp-image-merge-visible-layers image EXPAND-AS-NECESSARY)))
(gimp-gradient-segment-set-left-color gradientName 0 TextColor 100)
(gimp-gradient-segment-set-right-color gradientName 0 AntiTextColor 100)
(gimp-context-set-gradient gradientName)
(plug-in-gradmap TRUE image drawable)
(gimp-context-set-gradient activegradient)
(plug-in-colortoalpha TRUE image drawable AntiTextColor)
(file-png-save-defaults TRUE image drawable filename filename)
)
)
Register the function with GIMP :
(script-fu-register
"script-fu-formula"
_"_Image Formula..."
_"Return an formula"
"Berengar W. Lehr"
"2010, Berengar W. Lehr / MPG@IDIR, UH Jena, Germany."
"22th April 2010"
""
SF-STRING "Formular" "E=mc^2"
SF-STRING "Filename" "<Filename>.png"
SF-ADJUSTMENT "Size (0-Normal|1-Large|2-Huge)" '(1 0 2 1 1 1 1)
SF-COLOR "Textcolor" '(0 0 0)
)
(script-fu-menu-register "script-fu-formula" "<Image>/Script-Fu/")
| null | https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/imageFormula.scm | scheme |
This program is free software; you can redistribute it and/or modify
either version 2 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
If you use this script and/or like it the author would be happy to
receive a postcard from you:
along with this program; if not, write to the Free Software | Berengar W. Lehr ( )
Medical Physics Group , Department of Diagnostic and Interventional Radiology
Jena University Hospital , 07743 Jena , Thueringen , Germany
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
(define (script-fu-formula formula filename size TextColor)
(let*
(
(sizeName (cond
((= size 0) "normalsize")
((= size 1) "LARGE")
((= size 2) "Huge")
))
(url (string-append "" sizeName "%5C!" formula ".gif"))
(image (car (file-uri-load FALSE url url)))
(drawable (car (gimp-image-get-active-layer image)))
(width (car (gimp-image-width image)))
(height (car (gimp-image-height image)))
(background (car (gimp-layer-new image width height RGBA-IMAGE "Background" 100 NORMAL-MODE)))
(AntiTextColor (list (- 255 (car TextColor)) (- 255 (cadr TextColor)) (- 255 (caddr TextColor))))
(gradientName (car (gimp-gradient-new "NeuerFarbverlauf")))
(activegradient (car (gimp-context-get-gradient)))
(filename (string-append filename ".png"))
)
(gimp-image-convert-rgb image)
(gimp-image-add-layer image background 1)
(gimp-edit-fill background WHITE-FILL)
(set! drawable (car (gimp-image-merge-visible-layers image EXPAND-AS-NECESSARY)))
(gimp-gradient-segment-set-left-color gradientName 0 TextColor 100)
(gimp-gradient-segment-set-right-color gradientName 0 AntiTextColor 100)
(gimp-context-set-gradient gradientName)
(plug-in-gradmap TRUE image drawable)
(gimp-context-set-gradient activegradient)
(plug-in-colortoalpha TRUE image drawable AntiTextColor)
(file-png-save-defaults TRUE image drawable filename filename)
)
)
Register the function with GIMP :
(script-fu-register
"script-fu-formula"
_"_Image Formula..."
_"Return an formula"
"Berengar W. Lehr"
"2010, Berengar W. Lehr / MPG@IDIR, UH Jena, Germany."
"22th April 2010"
""
SF-STRING "Formular" "E=mc^2"
SF-STRING "Filename" "<Filename>.png"
SF-ADJUSTMENT "Size (0-Normal|1-Large|2-Huge)" '(1 0 2 1 1 1 1)
SF-COLOR "Textcolor" '(0 0 0)
)
(script-fu-menu-register "script-fu-formula" "<Image>/Script-Fu/")
|
ca6fbc55dbc633d43fd86357cc03670aed906c971bb96557a02605f13a8006ee | tarides/dune-release | test_uri_helpers.ml | let uri =
let open Dune_release.Uri_helpers in
Alcotest.testable pp_uri equal_uri
let test_parse =
let make_test ~input ~expected =
let name = Printf.sprintf "parse: %s" input in
let test_fun () =
let actual = Dune_release.Uri_helpers.parse input in
Alcotest.(check (option uri)) name expected actual
in
(name, `Quick, test_fun)
in
[
make_test ~input:"scheme"
~expected:
(Some
{
scheme = Some "scheme";
domain = [ "com"; "domain" ];
path = [ "some"; "path" ];
});
make_test ~input:"noscheme.com/some/path"
~expected:
(Some
{
scheme = None;
domain = [ "com"; "noscheme" ];
path = [ "some"; "path" ];
});
make_test ~input:"nopath.com"
~expected:
(Some { scheme = None; domain = [ "com"; "nopath" ]; path = [] });
make_test ~input:":some/path"
~expected:
(Some
{
scheme = None;
domain = [ "com"; "git@github" ];
path = [ "some"; "path" ];
});
]
let suite = ("Uri_helpers", test_parse)
| null | https://raw.githubusercontent.com/tarides/dune-release/6bfed0f299b82c0931c78d4e216fd0efedff0673/tests/lib/test_uri_helpers.ml | ocaml | let uri =
let open Dune_release.Uri_helpers in
Alcotest.testable pp_uri equal_uri
let test_parse =
let make_test ~input ~expected =
let name = Printf.sprintf "parse: %s" input in
let test_fun () =
let actual = Dune_release.Uri_helpers.parse input in
Alcotest.(check (option uri)) name expected actual
in
(name, `Quick, test_fun)
in
[
make_test ~input:"scheme"
~expected:
(Some
{
scheme = Some "scheme";
domain = [ "com"; "domain" ];
path = [ "some"; "path" ];
});
make_test ~input:"noscheme.com/some/path"
~expected:
(Some
{
scheme = None;
domain = [ "com"; "noscheme" ];
path = [ "some"; "path" ];
});
make_test ~input:"nopath.com"
~expected:
(Some { scheme = None; domain = [ "com"; "nopath" ]; path = [] });
make_test ~input:":some/path"
~expected:
(Some
{
scheme = None;
domain = [ "com"; "git@github" ];
path = [ "some"; "path" ];
});
]
let suite = ("Uri_helpers", test_parse)
| |
0952b667f624c1223a23eda7bfcab19a5432f56ca9e41606159e78b7ed3fb00f | coccinelle/coccinelle | bytes.mli | external length : bytes -> int = "%bytes_length"
external get : bytes -> int -> char = "%bytes_safe_get"
external set : bytes -> int -> char -> unit = "%bytes_safe_set"
external create : int -> bytes = "caml_create_bytes"
val make : int -> char -> bytes
val init : int -> (int -> char) -> bytes
val empty : bytes
val copy : bytes -> bytes
val of_string : string -> bytes
val to_string : bytes -> string
val sub : bytes -> int -> int -> bytes
val sub_string : bytes -> int -> int -> string
val extend : bytes -> int -> int -> bytes
val fill : bytes -> int -> int -> char -> unit
val blit : bytes -> int -> bytes -> int -> int -> unit
val blit_string : string -> int -> bytes -> int -> int -> unit
val concat : bytes -> bytes list -> bytes
val cat : bytes -> bytes -> bytes
val iter : (char -> unit) -> bytes -> unit
val iteri : (int -> char -> unit) -> bytes -> unit
val map : (char -> char) -> bytes -> bytes
val mapi : (int -> char -> char) -> bytes -> bytes
val fold_left : ('a -> char -> 'a) -> 'a -> bytes -> 'a
val fold_right : (char -> 'a -> 'a) -> bytes -> 'a -> 'a
val for_all : (char -> bool) -> bytes -> bool
val exists : (char -> bool) -> bytes -> bool
val trim : bytes -> bytes
val escaped : bytes -> bytes
val index : bytes -> char -> int
val index_opt : bytes -> char -> int option
val rindex : bytes -> char -> int
val rindex_opt : bytes -> char -> int option
val index_from : bytes -> int -> char -> int
val index_from_opt : bytes -> int -> char -> int option
val rindex_from : bytes -> int -> char -> int
val rindex_from_opt : bytes -> int -> char -> int option
val contains : bytes -> char -> bool
val contains_from : bytes -> int -> char -> bool
val rcontains_from : bytes -> int -> char -> bool
val uppercase : bytes -> bytes
val lowercase : bytes -> bytes
val capitalize : bytes -> bytes
val uncapitalize : bytes -> bytes
val uppercase_ascii : bytes -> bytes
val lowercase_ascii : bytes -> bytes
val capitalize_ascii : bytes -> bytes
val uncapitalize_ascii : bytes -> bytes
type t = bytes
val compare : t -> t -> int
val equal : t -> t -> bool
val starts_with : prefix:bytes -> bytes -> bool
val ends_with : suffix:bytes -> bytes -> bool
val unsafe_to_string : bytes -> string
val unsafe_of_string : string -> bytes
val split_on_char : char -> bytes -> bytes list
val to_seq : t -> char Seq.t
val to_seqi : t -> (int * char) Seq.t
val of_seq : char Seq.t -> t
val get_utf_8_uchar : t -> int -> Uchar.utf_decode
val set_utf_8_uchar : t -> int -> Uchar.t -> int
val is_valid_utf_8 : t -> bool
val get_utf_16be_uchar : t -> int -> Uchar.utf_decode
val set_utf_16be_uchar : t -> int -> Uchar.t -> int
val is_valid_utf_16be : t -> bool
val get_utf_16le_uchar : t -> int -> Uchar.utf_decode
val set_utf_16le_uchar : t -> int -> Uchar.t -> int
val is_valid_utf_16le : t -> bool
val get_uint8 : bytes -> int -> int
val get_int8 : bytes -> int -> int
val get_uint16_ne : bytes -> int -> int
val get_uint16_be : bytes -> int -> int
val get_uint16_le : bytes -> int -> int
val get_int16_ne : bytes -> int -> int
val get_int16_be : bytes -> int -> int
val get_int16_le : bytes -> int -> int
val get_int32_ne : bytes -> int -> int32
val get_int32_be : bytes -> int -> int32
val get_int32_le : bytes -> int -> int32
val get_int64_ne : bytes -> int -> int64
val get_int64_be : bytes -> int -> int64
val get_int64_le : bytes -> int -> int64
val set_uint8 : bytes -> int -> int -> unit
val set_int8 : bytes -> int -> int -> unit
val set_uint16_ne : bytes -> int -> int -> unit
val set_uint16_be : bytes -> int -> int -> unit
val set_uint16_le : bytes -> int -> int -> unit
val set_int16_ne : bytes -> int -> int -> unit
val set_int16_be : bytes -> int -> int -> unit
val set_int16_le : bytes -> int -> int -> unit
val set_int32_ne : bytes -> int -> int32 -> unit
val set_int32_be : bytes -> int -> int32 -> unit
val set_int32_le : bytes -> int -> int32 -> unit
val set_int64_ne : bytes -> int -> int64 -> unit
val set_int64_be : bytes -> int -> int64 -> unit
val set_int64_le : bytes -> int -> int64 -> unit
external unsafe_get : bytes -> int -> char = "%bytes_unsafe_get"
external unsafe_set : bytes -> int -> char -> unit = "%bytes_unsafe_set"
external unsafe_blit :
bytes -> int -> bytes -> int -> int -> unit = "caml_blit_bytes"[@@noalloc ]
external unsafe_blit_string :
string -> int -> bytes -> int -> int -> unit = "caml_blit_string"[@@noalloc
]
external unsafe_fill :
bytes -> int -> int -> char -> unit = "caml_fill_bytes"[@@noalloc ]
| null | https://raw.githubusercontent.com/coccinelle/coccinelle/5448bb2bd03491ffec356bf7bd6ddcdbf4d36bc9/bundles/stdcompat/stdcompat-current/interfaces/4.14/bytes.mli | ocaml | external length : bytes -> int = "%bytes_length"
external get : bytes -> int -> char = "%bytes_safe_get"
external set : bytes -> int -> char -> unit = "%bytes_safe_set"
external create : int -> bytes = "caml_create_bytes"
val make : int -> char -> bytes
val init : int -> (int -> char) -> bytes
val empty : bytes
val copy : bytes -> bytes
val of_string : string -> bytes
val to_string : bytes -> string
val sub : bytes -> int -> int -> bytes
val sub_string : bytes -> int -> int -> string
val extend : bytes -> int -> int -> bytes
val fill : bytes -> int -> int -> char -> unit
val blit : bytes -> int -> bytes -> int -> int -> unit
val blit_string : string -> int -> bytes -> int -> int -> unit
val concat : bytes -> bytes list -> bytes
val cat : bytes -> bytes -> bytes
val iter : (char -> unit) -> bytes -> unit
val iteri : (int -> char -> unit) -> bytes -> unit
val map : (char -> char) -> bytes -> bytes
val mapi : (int -> char -> char) -> bytes -> bytes
val fold_left : ('a -> char -> 'a) -> 'a -> bytes -> 'a
val fold_right : (char -> 'a -> 'a) -> bytes -> 'a -> 'a
val for_all : (char -> bool) -> bytes -> bool
val exists : (char -> bool) -> bytes -> bool
val trim : bytes -> bytes
val escaped : bytes -> bytes
val index : bytes -> char -> int
val index_opt : bytes -> char -> int option
val rindex : bytes -> char -> int
val rindex_opt : bytes -> char -> int option
val index_from : bytes -> int -> char -> int
val index_from_opt : bytes -> int -> char -> int option
val rindex_from : bytes -> int -> char -> int
val rindex_from_opt : bytes -> int -> char -> int option
val contains : bytes -> char -> bool
val contains_from : bytes -> int -> char -> bool
val rcontains_from : bytes -> int -> char -> bool
val uppercase : bytes -> bytes
val lowercase : bytes -> bytes
val capitalize : bytes -> bytes
val uncapitalize : bytes -> bytes
val uppercase_ascii : bytes -> bytes
val lowercase_ascii : bytes -> bytes
val capitalize_ascii : bytes -> bytes
val uncapitalize_ascii : bytes -> bytes
type t = bytes
val compare : t -> t -> int
val equal : t -> t -> bool
val starts_with : prefix:bytes -> bytes -> bool
val ends_with : suffix:bytes -> bytes -> bool
val unsafe_to_string : bytes -> string
val unsafe_of_string : string -> bytes
val split_on_char : char -> bytes -> bytes list
val to_seq : t -> char Seq.t
val to_seqi : t -> (int * char) Seq.t
val of_seq : char Seq.t -> t
val get_utf_8_uchar : t -> int -> Uchar.utf_decode
val set_utf_8_uchar : t -> int -> Uchar.t -> int
val is_valid_utf_8 : t -> bool
val get_utf_16be_uchar : t -> int -> Uchar.utf_decode
val set_utf_16be_uchar : t -> int -> Uchar.t -> int
val is_valid_utf_16be : t -> bool
val get_utf_16le_uchar : t -> int -> Uchar.utf_decode
val set_utf_16le_uchar : t -> int -> Uchar.t -> int
val is_valid_utf_16le : t -> bool
val get_uint8 : bytes -> int -> int
val get_int8 : bytes -> int -> int
val get_uint16_ne : bytes -> int -> int
val get_uint16_be : bytes -> int -> int
val get_uint16_le : bytes -> int -> int
val get_int16_ne : bytes -> int -> int
val get_int16_be : bytes -> int -> int
val get_int16_le : bytes -> int -> int
val get_int32_ne : bytes -> int -> int32
val get_int32_be : bytes -> int -> int32
val get_int32_le : bytes -> int -> int32
val get_int64_ne : bytes -> int -> int64
val get_int64_be : bytes -> int -> int64
val get_int64_le : bytes -> int -> int64
val set_uint8 : bytes -> int -> int -> unit
val set_int8 : bytes -> int -> int -> unit
val set_uint16_ne : bytes -> int -> int -> unit
val set_uint16_be : bytes -> int -> int -> unit
val set_uint16_le : bytes -> int -> int -> unit
val set_int16_ne : bytes -> int -> int -> unit
val set_int16_be : bytes -> int -> int -> unit
val set_int16_le : bytes -> int -> int -> unit
val set_int32_ne : bytes -> int -> int32 -> unit
val set_int32_be : bytes -> int -> int32 -> unit
val set_int32_le : bytes -> int -> int32 -> unit
val set_int64_ne : bytes -> int -> int64 -> unit
val set_int64_be : bytes -> int -> int64 -> unit
val set_int64_le : bytes -> int -> int64 -> unit
external unsafe_get : bytes -> int -> char = "%bytes_unsafe_get"
external unsafe_set : bytes -> int -> char -> unit = "%bytes_unsafe_set"
external unsafe_blit :
bytes -> int -> bytes -> int -> int -> unit = "caml_blit_bytes"[@@noalloc ]
external unsafe_blit_string :
string -> int -> bytes -> int -> int -> unit = "caml_blit_string"[@@noalloc
]
external unsafe_fill :
bytes -> int -> int -> char -> unit = "caml_fill_bytes"[@@noalloc ]
| |
e630d785c7eb19022fd29d985fd6ec87464723b761e4880cf0910396dde054f5 | mzp/coq-for-ipad | taquin.ml | (***********************************************************************)
(* *)
MLTk , Tcl / Tk interface of Objective Caml
(* *)
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
(* described in file LICENSE found in the Objective Caml source tree. *)
(* *)
(***********************************************************************)
$ I d : taquin.ml 9547 2010 - 01 - 22 12:48:24Z doligez $
open Tk;;
let découpe_image img nx ny =
let l = Imagephoto.width img
and h = Imagephoto.height img in
let tx = l / nx and ty = h / ny in
let pièces = ref [] in
for x = 0 to nx - 1 do
for y = 0 to ny - 1 do
let pièce = Imagephoto.create ~width:tx ~height:ty () in
Imagephoto.copy ~src:img
~src_area:(x * tx, y * ty, (x + 1) * tx, (y + 1) * ty) pièce;
pièces := pièce :: !pièces
done
done;
(tx, ty, List.tl !pièces);;
let remplir_taquin c nx ny tx ty pièces =
let trou_x = ref (nx - 1)
and trou_y = ref (ny - 1) in
let trou =
Canvas.create_rectangle
~x1:(!trou_x * tx) ~y1:(!trou_y * ty) ~x2:tx ~y2:ty c in
let taquin = Array.make_matrix nx ny trou in
let p = ref pièces in
for x = 0 to nx - 1 do
for y = 0 to ny - 1 do
match !p with
| [] -> ()
| pièce :: reste ->
taquin.(x).(y) <-
Canvas.create_image
~x:(x * tx) ~y:(y * ty)
~image:pièce ~anchor:`Nw ~tags:["pièce"] c;
p := reste
done
done;
let déplacer x y =
let pièce = taquin.(x).(y) in
Canvas.coords_set c pièce
~xys:[!trou_x * tx, !trou_y * ty];
Canvas.coords_set c trou
~xys:[x * tx, y * ty; tx, ty];
taquin.(!trou_x).(!trou_y) <- pièce;
taquin.(x).(y) <- trou;
trou_x := x; trou_y := y in
let jouer ei =
let x = ei.ev_MouseX / tx and y = ei.ev_MouseY / ty in
if x = !trou_x && (y = !trou_y - 1 || y = !trou_y + 1)
|| y = !trou_y && (x = !trou_x - 1 || x = !trou_x + 1)
then déplacer x y in
Canvas.bind ~events:[`ButtonPress]
~fields:[`MouseX; `MouseY] ~action:jouer c (`Tag "pièce");;
let rec permutation = function
| [] -> []
| l -> let n = Random.int (List.length l) in
let (élément, reste) = partage l n in
élément :: permutation reste
and partage l n =
match l with
| [] -> failwith "partage"
| tête :: reste ->
if n = 0 then (tête, reste) else
let (élément, reste') = partage reste (n - 1) in
(élément, tête :: reste');;
let create_filled_text parent lines =
let lnum = List.length lines
and lwidth =
List.fold_right
(fun line max ->
let l = String.length line in
if l > max then l else max)
lines 1 in
let txtw = Text.create ~width:lwidth ~height:lnum parent in
List.iter
(fun line ->
Text.insert ~index:(`End, []) ~text:line txtw;
Text.insert ~index:(`End, []) ~text:"\n" txtw)
lines;
txtw;;
let give_help parent lines () =
let help_window = Toplevel.create parent in
Wm.title_set help_window "Help";
let help_frame = Frame.create help_window in
let help_txtw = create_filled_text help_frame lines in
let quit_help () = destroy help_window in
let ok_button = Button.create ~text:"Ok" ~command:quit_help help_frame in
pack ~side:`Bottom [help_txtw];
pack ~side:`Bottom [ok_button ];
pack [help_frame];;
let taquin nom_fichier nx ny =
let fp = openTk () in
Wm.title_set fp "Taquin";
let img = Imagephoto.create ~file:nom_fichier () in
let c =
Canvas.create ~background:`Black
~width:(Imagephoto.width img)
~height:(Imagephoto.height img) fp in
let (tx, ty, pièces) = découpe_image img nx ny in
remplir_taquin c nx ny tx ty (permutation pièces);
pack [c];
let quit = Button.create ~text:"Quit" ~command:closeTk fp in
let help_lines =
["Pour jouer, cliquer sur une des pièces";
"entourant le trou";
"";
"To play, click on a part around the hole"] in
let help =
Button.create ~text:"Help" ~command:(give_help fp help_lines) fp in
pack ~side:`Left ~fill:`X [quit] ;
pack ~side:`Left ~fill:`X [help] ;
mainLoop ();;
if !Sys.interactive then () else
begin taquin "Lambda2.back.gif" 4 4; exit 0 end;;
| null | https://raw.githubusercontent.com/mzp/coq-for-ipad/4fb3711723e2581a170ffd734e936f210086396e/src/ocaml-3.12.0/otherlibs/labltk/examples_labltk/taquin.ml | ocaml | *********************************************************************
described in file LICENSE found in the Objective Caml source tree.
********************************************************************* | MLTk , Tcl / Tk interface of Objective Caml
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
$ I d : taquin.ml 9547 2010 - 01 - 22 12:48:24Z doligez $
open Tk;;
let découpe_image img nx ny =
let l = Imagephoto.width img
and h = Imagephoto.height img in
let tx = l / nx and ty = h / ny in
let pièces = ref [] in
for x = 0 to nx - 1 do
for y = 0 to ny - 1 do
let pièce = Imagephoto.create ~width:tx ~height:ty () in
Imagephoto.copy ~src:img
~src_area:(x * tx, y * ty, (x + 1) * tx, (y + 1) * ty) pièce;
pièces := pièce :: !pièces
done
done;
(tx, ty, List.tl !pièces);;
let remplir_taquin c nx ny tx ty pièces =
let trou_x = ref (nx - 1)
and trou_y = ref (ny - 1) in
let trou =
Canvas.create_rectangle
~x1:(!trou_x * tx) ~y1:(!trou_y * ty) ~x2:tx ~y2:ty c in
let taquin = Array.make_matrix nx ny trou in
let p = ref pièces in
for x = 0 to nx - 1 do
for y = 0 to ny - 1 do
match !p with
| [] -> ()
| pièce :: reste ->
taquin.(x).(y) <-
Canvas.create_image
~x:(x * tx) ~y:(y * ty)
~image:pièce ~anchor:`Nw ~tags:["pièce"] c;
p := reste
done
done;
let déplacer x y =
let pièce = taquin.(x).(y) in
Canvas.coords_set c pièce
~xys:[!trou_x * tx, !trou_y * ty];
Canvas.coords_set c trou
~xys:[x * tx, y * ty; tx, ty];
taquin.(!trou_x).(!trou_y) <- pièce;
taquin.(x).(y) <- trou;
trou_x := x; trou_y := y in
let jouer ei =
let x = ei.ev_MouseX / tx and y = ei.ev_MouseY / ty in
if x = !trou_x && (y = !trou_y - 1 || y = !trou_y + 1)
|| y = !trou_y && (x = !trou_x - 1 || x = !trou_x + 1)
then déplacer x y in
Canvas.bind ~events:[`ButtonPress]
~fields:[`MouseX; `MouseY] ~action:jouer c (`Tag "pièce");;
let rec permutation = function
| [] -> []
| l -> let n = Random.int (List.length l) in
let (élément, reste) = partage l n in
élément :: permutation reste
and partage l n =
match l with
| [] -> failwith "partage"
| tête :: reste ->
if n = 0 then (tête, reste) else
let (élément, reste') = partage reste (n - 1) in
(élément, tête :: reste');;
let create_filled_text parent lines =
let lnum = List.length lines
and lwidth =
List.fold_right
(fun line max ->
let l = String.length line in
if l > max then l else max)
lines 1 in
let txtw = Text.create ~width:lwidth ~height:lnum parent in
List.iter
(fun line ->
Text.insert ~index:(`End, []) ~text:line txtw;
Text.insert ~index:(`End, []) ~text:"\n" txtw)
lines;
txtw;;
let give_help parent lines () =
let help_window = Toplevel.create parent in
Wm.title_set help_window "Help";
let help_frame = Frame.create help_window in
let help_txtw = create_filled_text help_frame lines in
let quit_help () = destroy help_window in
let ok_button = Button.create ~text:"Ok" ~command:quit_help help_frame in
pack ~side:`Bottom [help_txtw];
pack ~side:`Bottom [ok_button ];
pack [help_frame];;
let taquin nom_fichier nx ny =
let fp = openTk () in
Wm.title_set fp "Taquin";
let img = Imagephoto.create ~file:nom_fichier () in
let c =
Canvas.create ~background:`Black
~width:(Imagephoto.width img)
~height:(Imagephoto.height img) fp in
let (tx, ty, pièces) = découpe_image img nx ny in
remplir_taquin c nx ny tx ty (permutation pièces);
pack [c];
let quit = Button.create ~text:"Quit" ~command:closeTk fp in
let help_lines =
["Pour jouer, cliquer sur une des pièces";
"entourant le trou";
"";
"To play, click on a part around the hole"] in
let help =
Button.create ~text:"Help" ~command:(give_help fp help_lines) fp in
pack ~side:`Left ~fill:`X [quit] ;
pack ~side:`Left ~fill:`X [help] ;
mainLoop ();;
if !Sys.interactive then () else
begin taquin "Lambda2.back.gif" 4 4; exit 0 end;;
|
17f7a9f22550e67fd48befc1b8e85f48a2d14089123d09a9e5c38e061751251f | may-liu/qtalk | http_add_muc_user.erl | %% Feel free to use, reuse and abuse the code in this file.
-module(http_add_muc_user).
-export([init/3]).
-export([handle/2]).
-export([terminate/3]).
-include("logger.hrl").
-include("http_req.hrl").
-include("ejb_http_server.hrl").
init(_Transport, Req, []) ->
{ok, Req, undefined}.
handle(Req, State) ->
{Method, _} = cowboy_req:method(Req),
case Method of
<<"GET">> ->
{Host,_} = cowboy_req:host(Req),
{ok, Req1} = get_echo(Method,Host,Req),
{ok, Req1, State};
<<"POST">> ->
HasBody = cowboy_req:has_body(Req),
{ok, Req1} = post_echo(Method, HasBody, Req),
{ok, Req1, State};
_ ->
{ok,Req1} = echo(undefined, Req),
{ok, Req1, State}
end.
get_echo(<<"GET">>,_,Req) ->
cowboy_req:reply(200, [
{<<"content-type">>, <<"text/json; charset=utf-8">>}
], <<"No GET method">>, Req).
post_echo(<<"POST">>, true, Req) ->
{ok, Body, _} = cowboy_req:body(Req),
Ret =
case iplimit_util:check_muc_ip_limit(Req,Body) of
true ->
http_create_muc(Body);
_ ->
http_utils:gen_result(false, <<"3">>, <<"">>,<<"ip is limited">>)
end,
cowboy_req:reply(200, [ {<<"content-type">>, <<"text/json; charset=utf-8">>}], Ret, Req);
post_echo(<<"POST">>, false, Req) ->
cowboy_req:reply(400, [], <<"Missing Post body.">>, Req);
post_echo(_, _, Req) ->
cowboy_req:reply(405, Req).
echo(undefined, Req) ->
cowboy_req:reply(400, [], <<"Missing parameter.">>, Req);
echo(Echo, Req) ->
cowboy_req:reply(200, [
{<<"content-type">>, <<"text/json; charset=utf-8">>}
], Echo, Req).
terminate(_Reason, _Req, _State) ->
ok.
http_create_muc(Body) ->
Url1 =
case catch ets:lookup(ejabberd_config,<<"http_server">>) of
[Http_server] when is_record(Http_server,ejabberd_config) ->
Http_server#ejabberd_config.val;
_ ->
":10050/"
end,
Url = Url1 ++ "add_muc_user",
Header = [],
Type = "application/json",
HTTPOptions = [],
Options = [],
case http_client:http_post(Url,Header,Type,Body,HTTPOptions,Options) of
{ok, {_Status,_Headers, Rslt}} ->
Rslt;
_ ->
http_utils:gen_result(false, <<"1">>, <<"">>,<<"create_muc failed">>)
end.
| null | https://raw.githubusercontent.com/may-liu/qtalk/f5431e5a7123975e9656e7ab239e674ce33713cd/qtalk_opensource/scripts/ejb_http_server/src/http_add_muc_user.erl | erlang | Feel free to use, reuse and abuse the code in this file. |
-module(http_add_muc_user).
-export([init/3]).
-export([handle/2]).
-export([terminate/3]).
-include("logger.hrl").
-include("http_req.hrl").
-include("ejb_http_server.hrl").
init(_Transport, Req, []) ->
{ok, Req, undefined}.
handle(Req, State) ->
{Method, _} = cowboy_req:method(Req),
case Method of
<<"GET">> ->
{Host,_} = cowboy_req:host(Req),
{ok, Req1} = get_echo(Method,Host,Req),
{ok, Req1, State};
<<"POST">> ->
HasBody = cowboy_req:has_body(Req),
{ok, Req1} = post_echo(Method, HasBody, Req),
{ok, Req1, State};
_ ->
{ok,Req1} = echo(undefined, Req),
{ok, Req1, State}
end.
get_echo(<<"GET">>,_,Req) ->
cowboy_req:reply(200, [
{<<"content-type">>, <<"text/json; charset=utf-8">>}
], <<"No GET method">>, Req).
post_echo(<<"POST">>, true, Req) ->
{ok, Body, _} = cowboy_req:body(Req),
Ret =
case iplimit_util:check_muc_ip_limit(Req,Body) of
true ->
http_create_muc(Body);
_ ->
http_utils:gen_result(false, <<"3">>, <<"">>,<<"ip is limited">>)
end,
cowboy_req:reply(200, [ {<<"content-type">>, <<"text/json; charset=utf-8">>}], Ret, Req);
post_echo(<<"POST">>, false, Req) ->
cowboy_req:reply(400, [], <<"Missing Post body.">>, Req);
post_echo(_, _, Req) ->
cowboy_req:reply(405, Req).
echo(undefined, Req) ->
cowboy_req:reply(400, [], <<"Missing parameter.">>, Req);
echo(Echo, Req) ->
cowboy_req:reply(200, [
{<<"content-type">>, <<"text/json; charset=utf-8">>}
], Echo, Req).
terminate(_Reason, _Req, _State) ->
ok.
http_create_muc(Body) ->
Url1 =
case catch ets:lookup(ejabberd_config,<<"http_server">>) of
[Http_server] when is_record(Http_server,ejabberd_config) ->
Http_server#ejabberd_config.val;
_ ->
":10050/"
end,
Url = Url1 ++ "add_muc_user",
Header = [],
Type = "application/json",
HTTPOptions = [],
Options = [],
case http_client:http_post(Url,Header,Type,Body,HTTPOptions,Options) of
{ok, {_Status,_Headers, Rslt}} ->
Rslt;
_ ->
http_utils:gen_result(false, <<"1">>, <<"">>,<<"create_muc failed">>)
end.
|
3f253c683e34125c1c01239715773c31690e607e11f5ef9db88b8ed1a8e069a6 | UU-ComputerScience/uhc | t2.hs | module Main where
ids :: [forall a . a -> a]
ids = []
h1 = (\x -> x) : ids
h2 :: [forall a . a -> a]
h2 = (\x -> x) : ids
h3 = id ids
main = return ()
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/lucilia/t2.hs | haskell | module Main where
ids :: [forall a . a -> a]
ids = []
h1 = (\x -> x) : ids
h2 :: [forall a . a -> a]
h2 = (\x -> x) : ids
h3 = id ids
main = return ()
| |
ba562e507cb8250be5ea19e5f2afb70206ff093ef0269979bbf722592e74dc99 | realworldocaml/book | action.mli | (** Actions defined in dune files.
All constructors correspond to actions the user may write in dune files.
Eventually, these are all desugared into [Action.t], which are actions
executed by the build system. *)
open Stdune
open Dune_sexp
module Action_plugin : sig
val syntax : Syntax.t
end
module Diff : sig
module Mode : sig
type t =
| Binary (** no diffing, just raw comparison *)
| Text (** diffing after newline normalization *)
end
type ('path, 'target) t =
{ optional : bool
; mode : Mode.t
; file1 : 'path
; file2 : 'target
}
val map : ('p, 't) t -> path:('p -> 'x) -> target:('t -> 'y) -> ('x, 'y) t
val decode :
'path Decoder.t
-> 'target Decoder.t
-> optional:bool
-> ('path, 'target) t Decoder.t
val decode_binary :
'path Decoder.t -> 'target Decoder.t -> ('path, 'target) t Decoder.t
end
module Outputs : sig
type t =
| Stdout
| Stderr
| Outputs (** Both Stdout and Stderr *)
val to_string : t -> string
end
module Inputs : sig
type t = Stdin
val to_string : t -> string
end
module File_perm : sig
* File mode , for when creating files . We only allow what takes into
account when commands .
account when memoizing commands. *)
type t =
| Normal
| Executable
val suffix : t -> string
val to_unix_perm : t -> int
end
type t =
| Run of String_with_vars.t * String_with_vars.t list
| With_accepted_exit_codes of int Predicate_lang.t * t
| Dynamic_run of String_with_vars.t * String_with_vars.t list
| Chdir of String_with_vars.t * t
| Setenv of String_with_vars.t * String_with_vars.t * t
It 's not possible to use a build String_with_vars.t here since jbuild
supports redirecting to /dev / null . In [ dune ] files this is replaced with
% { null }
supports redirecting to /dev/null. In [dune] files this is replaced with
%{null} *)
| Redirect_out of Outputs.t * String_with_vars.t * File_perm.t * t
| Redirect_in of Inputs.t * String_with_vars.t * t
| Ignore of Outputs.t * t
| Progn of t list
| Echo of String_with_vars.t list
| Cat of String_with_vars.t list
| Copy of String_with_vars.t * String_with_vars.t
| Symlink of String_with_vars.t * String_with_vars.t
| Copy_and_add_line_directive of String_with_vars.t * String_with_vars.t
| System of String_with_vars.t
| Bash of String_with_vars.t
| Write_file of String_with_vars.t * File_perm.t * String_with_vars.t
| Mkdir of String_with_vars.t
| Diff of (String_with_vars.t, String_with_vars.t) Diff.t
| No_infer of t
| Pipe of Outputs.t * t list
| Cram of String_with_vars.t
include Conv.S with type t := t
(** Raises User_error on invalid action. *)
val validate : loc:Loc.t -> t -> unit
val compare_no_locs : t -> t -> Ordering.t
val to_dyn : t -> Dyn.t
val remove_locs : t -> t
val equal : t -> t -> bool
val chdir : String_with_vars.t -> t -> t
val run : String_with_vars.t -> String_with_vars.t list -> t
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/dune_/src/dune_lang/action.mli | ocaml | * Actions defined in dune files.
All constructors correspond to actions the user may write in dune files.
Eventually, these are all desugared into [Action.t], which are actions
executed by the build system.
* no diffing, just raw comparison
* diffing after newline normalization
* Both Stdout and Stderr
* Raises User_error on invalid action. |
open Stdune
open Dune_sexp
module Action_plugin : sig
val syntax : Syntax.t
end
module Diff : sig
module Mode : sig
type t =
end
type ('path, 'target) t =
{ optional : bool
; mode : Mode.t
; file1 : 'path
; file2 : 'target
}
val map : ('p, 't) t -> path:('p -> 'x) -> target:('t -> 'y) -> ('x, 'y) t
val decode :
'path Decoder.t
-> 'target Decoder.t
-> optional:bool
-> ('path, 'target) t Decoder.t
val decode_binary :
'path Decoder.t -> 'target Decoder.t -> ('path, 'target) t Decoder.t
end
module Outputs : sig
type t =
| Stdout
| Stderr
val to_string : t -> string
end
module Inputs : sig
type t = Stdin
val to_string : t -> string
end
module File_perm : sig
* File mode , for when creating files . We only allow what takes into
account when commands .
account when memoizing commands. *)
type t =
| Normal
| Executable
val suffix : t -> string
val to_unix_perm : t -> int
end
type t =
| Run of String_with_vars.t * String_with_vars.t list
| With_accepted_exit_codes of int Predicate_lang.t * t
| Dynamic_run of String_with_vars.t * String_with_vars.t list
| Chdir of String_with_vars.t * t
| Setenv of String_with_vars.t * String_with_vars.t * t
It 's not possible to use a build String_with_vars.t here since jbuild
supports redirecting to /dev / null . In [ dune ] files this is replaced with
% { null }
supports redirecting to /dev/null. In [dune] files this is replaced with
%{null} *)
| Redirect_out of Outputs.t * String_with_vars.t * File_perm.t * t
| Redirect_in of Inputs.t * String_with_vars.t * t
| Ignore of Outputs.t * t
| Progn of t list
| Echo of String_with_vars.t list
| Cat of String_with_vars.t list
| Copy of String_with_vars.t * String_with_vars.t
| Symlink of String_with_vars.t * String_with_vars.t
| Copy_and_add_line_directive of String_with_vars.t * String_with_vars.t
| System of String_with_vars.t
| Bash of String_with_vars.t
| Write_file of String_with_vars.t * File_perm.t * String_with_vars.t
| Mkdir of String_with_vars.t
| Diff of (String_with_vars.t, String_with_vars.t) Diff.t
| No_infer of t
| Pipe of Outputs.t * t list
| Cram of String_with_vars.t
include Conv.S with type t := t
val validate : loc:Loc.t -> t -> unit
val compare_no_locs : t -> t -> Ordering.t
val to_dyn : t -> Dyn.t
val remove_locs : t -> t
val equal : t -> t -> bool
val chdir : String_with_vars.t -> t -> t
val run : String_with_vars.t -> String_with_vars.t list -> t
|
cb4b92e7e21f947917d25fb0065d59e8c9f7b24260e6ae462137ff83fe338848 | LeventErkok/hArduino | Setup.hs | -----------------------------------------------------------------------------
-- |
-- Module : Main
Copyright : ( c )
-- License : BSD3
-- Maintainer :
-- Stability : experimental
--
Setup module for the hArduino library
-----------------------------------------------------------------------------
# OPTIONS_GHC -Wall #
module Main(main) where
import Distribution.Simple
main :: IO ()
main = defaultMain
| null | https://raw.githubusercontent.com/LeventErkok/hArduino/ee04988ad9ef3d4384d7ce7a8670518ce8b0a34c/Setup.hs | haskell | ---------------------------------------------------------------------------
|
Module : Main
License : BSD3
Maintainer :
Stability : experimental
--------------------------------------------------------------------------- | Copyright : ( c )
Setup module for the hArduino library
# OPTIONS_GHC -Wall #
module Main(main) where
import Distribution.Simple
main :: IO ()
main = defaultMain
|
5c32ae59b4d739747294968ce2f44fc0ea4b221d69658a7909f07f9af30b3e8c | clojurewerkz/cassaforte | types.clj | (ns clojurewerkz.cassaforte.query.types
(:import [com.datastax.driver.core TupleType DataType ProtocolVersion CodecRegistry]))
;;
;; Types
;;
(def primitive-types
{:ascii (DataType/ascii)
:bigint (DataType/bigint)
:blob (DataType/blob)
:boolean (DataType/cboolean)
:counter (DataType/counter)
:decimal (DataType/decimal)
:double (DataType/cdouble)
:float (DataType/cfloat)
:inet (DataType/inet)
:int (DataType/cint)
:text (DataType/text)
:timestamp (DataType/timestamp)
:uuid (DataType/uuid)
:varchar (DataType/varchar)
:varint (DataType/varint)
:timeuuid (DataType/timeuuid)})
(defn resolve-primitive-type
[type-or-name]
(if (keyword? type-or-name)
(if-let [res (get primitive-types type-or-name)]
res
(throw (IllegalArgumentException. (str "Column name "
(name type-or-name)
" was not found, pick one of ("
(clojure.string/join "," (keys primitive-types))
")"))))
type-or-name))
(defn list-type
[primitive-type]
(DataType/list (get primitive-types primitive-type)))
(defn set-type
[primitive-type]
(DataType/set (get primitive-types primitive-type)))
(defn map-type
[key-type value-type]
(DataType/map (get primitive-types key-type)
(get primitive-types value-type)))
FIXME should be using cluster instance and cluster.metadata.newTupleType instead
(defn tuple-of
[^ProtocolVersion protocol-version types values]
(.newValue (TupleType/of protocol-version CodecRegistry/DEFAULT_INSTANCE (into-array (map #(get primitive-types %) types)))
(object-array values)))
| null | https://raw.githubusercontent.com/clojurewerkz/cassaforte/bd0b3ff44c5d7f993798270032aa41be0e8209c2/src/clojure/clojurewerkz/cassaforte/query/types.clj | clojure |
Types
| (ns clojurewerkz.cassaforte.query.types
(:import [com.datastax.driver.core TupleType DataType ProtocolVersion CodecRegistry]))
(def primitive-types
{:ascii (DataType/ascii)
:bigint (DataType/bigint)
:blob (DataType/blob)
:boolean (DataType/cboolean)
:counter (DataType/counter)
:decimal (DataType/decimal)
:double (DataType/cdouble)
:float (DataType/cfloat)
:inet (DataType/inet)
:int (DataType/cint)
:text (DataType/text)
:timestamp (DataType/timestamp)
:uuid (DataType/uuid)
:varchar (DataType/varchar)
:varint (DataType/varint)
:timeuuid (DataType/timeuuid)})
(defn resolve-primitive-type
[type-or-name]
(if (keyword? type-or-name)
(if-let [res (get primitive-types type-or-name)]
res
(throw (IllegalArgumentException. (str "Column name "
(name type-or-name)
" was not found, pick one of ("
(clojure.string/join "," (keys primitive-types))
")"))))
type-or-name))
(defn list-type
[primitive-type]
(DataType/list (get primitive-types primitive-type)))
(defn set-type
[primitive-type]
(DataType/set (get primitive-types primitive-type)))
(defn map-type
[key-type value-type]
(DataType/map (get primitive-types key-type)
(get primitive-types value-type)))
FIXME should be using cluster instance and cluster.metadata.newTupleType instead
(defn tuple-of
[^ProtocolVersion protocol-version types values]
(.newValue (TupleType/of protocol-version CodecRegistry/DEFAULT_INSTANCE (into-array (map #(get primitive-types %) types)))
(object-array values)))
|
2b94c1a341d091155e96a107de8037ddaf0406739da641c51356d3854a6fdee8 | clj-easy/graal-config | core.clj | (ns example.core
(:require
[io.pedestal.log :as log])
(:gen-class))
(defn -main
[& _args]
(log/error :in 'my-fn :message "this is a message")
(log/error :hello "world")
(log/info :hello "world"))
| null | https://raw.githubusercontent.com/clj-easy/graal-config/2462e9f730c7a283796694c4e7786093c1192016/config/org.slf4j/slf4j-simple/example/src/example/core.clj | clojure | (ns example.core
(:require
[io.pedestal.log :as log])
(:gen-class))
(defn -main
[& _args]
(log/error :in 'my-fn :message "this is a message")
(log/error :hello "world")
(log/info :hello "world"))
| |
5c04821fd8b06bfce5c4b2d561a71289d7fb65553994fd4af8ea47a6998807ee | iamaleksey/common_lib | common_lib.erl | Copyright ( C ) 2009 ,
%%% All rights reserved.
%%%
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions are met:
%%%
%%% o Redistributions of source code must retain the above copyright notice,
%%% this list of conditions and the following disclaimer.
%%%
%%% o Redistributions in binary form must reproduce the above copyright notice,
%%% this list of conditions and the following disclaimer in the documentation
%%% and/or other materials provided with the distribution.
%%%
%%% o Neither the name of ERLANG TRAINING AND CONSULTING nor the names of its
%%% contributors may be used to endorse or promote products derived from this
%%% software without specific prior written permission.
%%%
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
%%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
%%% CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
%%% SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
%%% CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
%%% ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
%%% POSSIBILITY OF SUCH DAMAGE.
-module(common_lib).
-behaviour(application).
START / STOP EXPORTS
-export([start/2, stop/1]).
%%%-----------------------------------------------------------------------------
START / STOP EXPORTS
%%%-----------------------------------------------------------------------------
start(_Type, _StartArgs) ->
cl_queue_tab:new(),
common_lib_sup:start_link().
stop(_St) ->
ok.
| null | https://raw.githubusercontent.com/iamaleksey/common_lib/250d965d5accaad8aa1e2c7bbf4ac691aa94ca93/src/common_lib.erl | erlang | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
o Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
o Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
o Neither the name of ERLANG TRAINING AND CONSULTING nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
-----------------------------------------------------------------------------
----------------------------------------------------------------------------- | Copyright ( C ) 2009 ,
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS "
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN
-module(common_lib).
-behaviour(application).
START / STOP EXPORTS
-export([start/2, stop/1]).
START / STOP EXPORTS
start(_Type, _StartArgs) ->
cl_queue_tab:new(),
common_lib_sup:start_link().
stop(_St) ->
ok.
|
d157a63149042db3468419f7247874769da42f5532356333a6d91d0b3b5fa8b7 | qfpl/reflex-tutorial | Solution.hs | # LANGUAGE CPP #
{-# LANGUAGE OverloadedStrings #-}
module Ex03.Solution (
attachEx03
) where
import Language.Javascript.JSaddle (JSM)
import qualified Data.Map as Map
import Reflex
import Util.Attach
#ifndef ghcjs_HOST_OS
import Util.Run
#endif
import Ex03.Common
import Ex03.Run
ex03 ::
Reflex t =>
Inputs t ->
Outputs t
ex03 (Inputs bMoney bSelected eBuy eRefund) =
let
-- We put our products in a list:
products =
[carrot, celery, cucumber]
-- We write a helper function to get from a `Product` to a `Map Text (Behavior t Product)`
productSingleton p =
Map.singleton (pName p) (pure p)
-- We use this helper to turn our products into a `Map` and then combine the `Map`s, using `foldMap`:
mbProduct =
foldMap productSingleton products
-- We have a `Map` of `Behavior`s that we want to turn into a `Behavior` of `Map`s,
-- so we use `sequence`:
bmProduct =
sequence mbProduct
-- We use `(<@)` here to run `Map.lookup` with the values of `bSelected` and `bmProduct` at
the times that ` eBuy ` fires :
emProduct =
Map.lookup <$> bSelected <*> bmProduct <@ eBuy
Finally , we use ` fmapMaybe i d ` to filter out the ` Nothing ` values and removing the ` Just ` constructor :
eProduct =
fmapMaybe id emProduct
checkNotEnoughMoney money p =
money < pCost p
eError =
NotEnoughMoney <$ ffilter id (checkNotEnoughMoney <$> bMoney <@> eProduct)
eSale =
difference eProduct eError
eVend =
pName <$> eSale
eSpend =
pCost <$> eSale
eChange =
bMoney <@ eRefund
in
Outputs eVend eSpend eChange eError
attachEx03 ::
JSM ()
attachEx03 =
attachId_ "ex03" $
host ex03
#ifndef ghcjs_HOST_OS
go ::
IO ()
go =
run $
host ex03
#endif
| null | https://raw.githubusercontent.com/qfpl/reflex-tutorial/07c1e6fab387cbeedd031630ba6a5cd946cc612e/code/exercises/src/Ex03/Solution.hs | haskell | # LANGUAGE OverloadedStrings #
We put our products in a list:
We write a helper function to get from a `Product` to a `Map Text (Behavior t Product)`
We use this helper to turn our products into a `Map` and then combine the `Map`s, using `foldMap`:
We have a `Map` of `Behavior`s that we want to turn into a `Behavior` of `Map`s,
so we use `sequence`:
We use `(<@)` here to run `Map.lookup` with the values of `bSelected` and `bmProduct` at | # LANGUAGE CPP #
module Ex03.Solution (
attachEx03
) where
import Language.Javascript.JSaddle (JSM)
import qualified Data.Map as Map
import Reflex
import Util.Attach
#ifndef ghcjs_HOST_OS
import Util.Run
#endif
import Ex03.Common
import Ex03.Run
ex03 ::
Reflex t =>
Inputs t ->
Outputs t
ex03 (Inputs bMoney bSelected eBuy eRefund) =
let
products =
[carrot, celery, cucumber]
productSingleton p =
Map.singleton (pName p) (pure p)
mbProduct =
foldMap productSingleton products
bmProduct =
sequence mbProduct
the times that ` eBuy ` fires :
emProduct =
Map.lookup <$> bSelected <*> bmProduct <@ eBuy
Finally , we use ` fmapMaybe i d ` to filter out the ` Nothing ` values and removing the ` Just ` constructor :
eProduct =
fmapMaybe id emProduct
checkNotEnoughMoney money p =
money < pCost p
eError =
NotEnoughMoney <$ ffilter id (checkNotEnoughMoney <$> bMoney <@> eProduct)
eSale =
difference eProduct eError
eVend =
pName <$> eSale
eSpend =
pCost <$> eSale
eChange =
bMoney <@ eRefund
in
Outputs eVend eSpend eChange eError
attachEx03 ::
JSM ()
attachEx03 =
attachId_ "ex03" $
host ex03
#ifndef ghcjs_HOST_OS
go ::
IO ()
go =
run $
host ex03
#endif
|
b5317b17fb56d4c77afae99c39be3e05c7f45a2656a782c49961b6b4516cd4b8 | input-output-hk/cardano-ledger | Utxos.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - orphans #
module Cardano.Ledger.Conway.Rules.Utxos (ConwayUTXOS) where
import Cardano.Ledger.Alonzo.Rules (
AlonzoUtxoEvent (..),
AlonzoUtxoPredFailure (..),
AlonzoUtxosEvent,
AlonzoUtxosPredFailure,
)
import Cardano.Ledger.Alonzo.Scripts (AlonzoScript)
import Cardano.Ledger.Alonzo.Tx (AlonzoTx (..))
import Cardano.Ledger.Babbage.Rules (BabbageUTXO, BabbageUtxoPredFailure (..))
import Cardano.Ledger.BaseTypes (ShelleyBase)
import Cardano.Ledger.Conway.Core
import Cardano.Ledger.Conway.Era (ConwayUTXOS)
import Cardano.Ledger.Shelley.LedgerState (PPUPPredFailure, UTxOState (..))
import Cardano.Ledger.Shelley.Rules (UtxoEnv (..))
import Control.State.Transition.Extended (Embed (..), STS (..))
instance
( EraTxOut era
, EraGovernance era
, Script era ~ AlonzoScript era
, Eq (PPUPPredFailure era)
, Show (PPUPPredFailure era)
) =>
STS (ConwayUTXOS era)
where
type BaseM (ConwayUTXOS era) = ShelleyBase
type Environment (ConwayUTXOS era) = UtxoEnv era
type State (ConwayUTXOS era) = UTxOState era
type Signal (ConwayUTXOS era) = AlonzoTx era
type PredicateFailure (ConwayUTXOS era) = AlonzoUtxosPredFailure era
type Event (ConwayUTXOS era) = AlonzoUtxosEvent era
transitionRules = []
instance
( EraTxOut era
, EraGovernance era
, PredicateFailure (EraRule "UTXOS" era) ~ AlonzoUtxosPredFailure era
, Event (EraRule "UTXOS" era) ~ AlonzoUtxosEvent era
, Eq (PPUPPredFailure era)
, Show (PPUPPredFailure era)
, Script era ~ AlonzoScript era
) =>
Embed (ConwayUTXOS era) (BabbageUTXO era)
where
wrapFailed = AlonzoInBabbageUtxoPredFailure . UtxosFailure
wrapEvent = UtxosEvent
| null | https://raw.githubusercontent.com/input-output-hk/cardano-ledger/ac405a977557a7c58ce1cf69d3c2a0bf148cf19f/eras/conway/impl/src/Cardano/Ledger/Conway/Rules/Utxos.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -Wno - orphans #
module Cardano.Ledger.Conway.Rules.Utxos (ConwayUTXOS) where
import Cardano.Ledger.Alonzo.Rules (
AlonzoUtxoEvent (..),
AlonzoUtxoPredFailure (..),
AlonzoUtxosEvent,
AlonzoUtxosPredFailure,
)
import Cardano.Ledger.Alonzo.Scripts (AlonzoScript)
import Cardano.Ledger.Alonzo.Tx (AlonzoTx (..))
import Cardano.Ledger.Babbage.Rules (BabbageUTXO, BabbageUtxoPredFailure (..))
import Cardano.Ledger.BaseTypes (ShelleyBase)
import Cardano.Ledger.Conway.Core
import Cardano.Ledger.Conway.Era (ConwayUTXOS)
import Cardano.Ledger.Shelley.LedgerState (PPUPPredFailure, UTxOState (..))
import Cardano.Ledger.Shelley.Rules (UtxoEnv (..))
import Control.State.Transition.Extended (Embed (..), STS (..))
instance
( EraTxOut era
, EraGovernance era
, Script era ~ AlonzoScript era
, Eq (PPUPPredFailure era)
, Show (PPUPPredFailure era)
) =>
STS (ConwayUTXOS era)
where
type BaseM (ConwayUTXOS era) = ShelleyBase
type Environment (ConwayUTXOS era) = UtxoEnv era
type State (ConwayUTXOS era) = UTxOState era
type Signal (ConwayUTXOS era) = AlonzoTx era
type PredicateFailure (ConwayUTXOS era) = AlonzoUtxosPredFailure era
type Event (ConwayUTXOS era) = AlonzoUtxosEvent era
transitionRules = []
instance
( EraTxOut era
, EraGovernance era
, PredicateFailure (EraRule "UTXOS" era) ~ AlonzoUtxosPredFailure era
, Event (EraRule "UTXOS" era) ~ AlonzoUtxosEvent era
, Eq (PPUPPredFailure era)
, Show (PPUPPredFailure era)
, Script era ~ AlonzoScript era
) =>
Embed (ConwayUTXOS era) (BabbageUTXO era)
where
wrapFailed = AlonzoInBabbageUtxoPredFailure . UtxosFailure
wrapEvent = UtxosEvent
| |
13b08cd1ae231341aa0785e37a2a7c55845bb975dbcff3fc896a44495239347c | xvw/muhokama | env.mli | (** Provide the [Env] through HTTP request. *)
val set : Lib_common.Env.t -> Dream.middleware
val get : Dream.request -> (Lib_common.Env.t -> 'a) -> 'a
| null | https://raw.githubusercontent.com/xvw/muhokama/c6f4dbe0459d9dc5f9fb9921a05a3bba93a06ce7/lib/service/env.mli | ocaml | * Provide the [Env] through HTTP request. |
val set : Lib_common.Env.t -> Dream.middleware
val get : Dream.request -> (Lib_common.Env.t -> 'a) -> 'a
|
edb771153958ea88bc302010e45190da828039b571e9f06848d77b10b19dc1c1 | jacquev6/JsOfOCairo | draw_in_browser.ml | Copyright 2017 < >
module Drawings = Drawings.Make(JsOfOCairo)
let () = Js_of_ocaml.Js.export "draw" (fun canvas ->
Drawings.draw (JsOfOCairo.create canvas)
)
| null | https://raw.githubusercontent.com/jacquev6/JsOfOCairo/a00a31000b1a7cabe6ac74e48e86b52ec4699cff/demo/draw_in_browser.ml | ocaml | Copyright 2017 < >
module Drawings = Drawings.Make(JsOfOCairo)
let () = Js_of_ocaml.Js.export "draw" (fun canvas ->
Drawings.draw (JsOfOCairo.create canvas)
)
| |
614ba85d34f7ae256dbf9c40f4e994e5967a1774889f6394f5df48ccc19e556c | Andromedans/andromeda | substitution.ml | module MetaMap = Map.Make(struct
type t = Mlty.meta
let compare = compare
end)
type t = Mlty.ty MetaMap.t
let lookup m s =
try Some (MetaMap.find m s)
with Not_found -> None
let domain s =
MetaMap.fold (fun m _ ms -> Mlty.MetaSet.add m ms) s Mlty.MetaSet.empty
let apply (s : t) t =
if MetaMap.is_empty s
then t
else begin
let rec app = function
| Mlty.Exn
| Mlty.Judgement
| Mlty.Boundary
| Mlty.Derivation
| Mlty.String
| Mlty.Param _ as t -> t
| Mlty.Meta m as orig ->
begin match lookup m s with
| Some t -> app t
| None -> orig
end
| Mlty.Prod ts ->
let ts = List.map app ts in
Mlty.Prod ts
| Mlty.Arrow (t1, t2) ->
let t1 = app t1
and t2 = app t2 in
Mlty.Arrow (t1, t2)
| Mlty.Handler (t1, t2) ->
let t1 = app t1
and t2 = app t2 in
Mlty.Handler (t1, t2)
| Mlty.Apply (pth, ts) ->
let ts = List.map app ts in
Mlty.Apply (pth, ts)
| Mlty.Ref t ->
let t = app t in
Mlty.Ref t
in
app t
end
let empty : t = MetaMap.empty
let from_lists ms ts =
List.fold_left2 (fun s m t ->
MetaMap.add m t s)
empty ms ts
let add m t s =
let t = apply s t in
if Mlty.occurs m t
then
None
else
Some (MetaMap.add m t s)
let partition = MetaMap.partition
| null | https://raw.githubusercontent.com/Andromedans/andromeda/a5c678450e6c6d4a7cd5eee1196bde558541b994/src/typing/substitution.ml | ocaml | module MetaMap = Map.Make(struct
type t = Mlty.meta
let compare = compare
end)
type t = Mlty.ty MetaMap.t
let lookup m s =
try Some (MetaMap.find m s)
with Not_found -> None
let domain s =
MetaMap.fold (fun m _ ms -> Mlty.MetaSet.add m ms) s Mlty.MetaSet.empty
let apply (s : t) t =
if MetaMap.is_empty s
then t
else begin
let rec app = function
| Mlty.Exn
| Mlty.Judgement
| Mlty.Boundary
| Mlty.Derivation
| Mlty.String
| Mlty.Param _ as t -> t
| Mlty.Meta m as orig ->
begin match lookup m s with
| Some t -> app t
| None -> orig
end
| Mlty.Prod ts ->
let ts = List.map app ts in
Mlty.Prod ts
| Mlty.Arrow (t1, t2) ->
let t1 = app t1
and t2 = app t2 in
Mlty.Arrow (t1, t2)
| Mlty.Handler (t1, t2) ->
let t1 = app t1
and t2 = app t2 in
Mlty.Handler (t1, t2)
| Mlty.Apply (pth, ts) ->
let ts = List.map app ts in
Mlty.Apply (pth, ts)
| Mlty.Ref t ->
let t = app t in
Mlty.Ref t
in
app t
end
let empty : t = MetaMap.empty
let from_lists ms ts =
List.fold_left2 (fun s m t ->
MetaMap.add m t s)
empty ms ts
let add m t s =
let t = apply s t in
if Mlty.occurs m t
then
None
else
Some (MetaMap.add m t s)
let partition = MetaMap.partition
| |
e3d908eb870e8abc3099f901278592ee19e78085ada2066cf1b0ee3969c3e5a7 | serokell/servant-util | Postgres.hs | module Servant.Util.Beam.Postgres
( module M
) where
import Servant.Util.Beam.Postgres.Filtering as M
import Servant.Util.Beam.Postgres.Pagination as M
import Servant.Util.Beam.Postgres.Sorting as M
| null | https://raw.githubusercontent.com/serokell/servant-util/21ce12e9bdbf37e06498e2387429a30d4a4ba992/servant-util-beam-pg/src/Servant/Util/Beam/Postgres.hs | haskell | module Servant.Util.Beam.Postgres
( module M
) where
import Servant.Util.Beam.Postgres.Filtering as M
import Servant.Util.Beam.Postgres.Pagination as M
import Servant.Util.Beam.Postgres.Sorting as M
| |
2bdbf7556bab5e8c21b3f2302e2018d51eb00203b4531f6adebf05bd18bc500f | takuto-h/yuzu | typeInfo.ml | type t =
| Variant of ((Names.ctor_name * (TypeExpr.t) option * TypeExpr.t)) list
| Record of ((bool * Names.val_name * TypeExpr.t * TypeExpr.t)) list
| null | https://raw.githubusercontent.com/takuto-h/yuzu/e6bef0964d87c1ced65280083505448f56e5cf29/typeInfo.ml | ocaml | type t =
| Variant of ((Names.ctor_name * (TypeExpr.t) option * TypeExpr.t)) list
| Record of ((bool * Names.val_name * TypeExpr.t * TypeExpr.t)) list
| |
a11b795b611fa38211667a48c5fb6c0aa79d0060515f2d0fee7f08b7194eeccd | ekmett/rope | Branded.hs | # LANGUAGE TypeOperators , Rank2Types , EmptyDataDecls ,
MultiParamTypeClasses , FunctionalDependencies ,
FlexibleContexts , FlexibleInstances , UndecidableInstances ,
IncoherentInstances , OverlappingInstances #
MultiParamTypeClasses, FunctionalDependencies,
FlexibleContexts, FlexibleInstances, UndecidableInstances,
IncoherentInstances, OverlappingInstances #-}
module Data.Rope.Branded
( Branded(..)
, Unsafe
, UBR
, null -- :: (s `Branded` Rope) a -> Bool
-- * Unpacking Ropes
, head -- :: Unpackable t => (s `Branded` Rope) a -> t
, last -- :: Unpackable t => (s `Branded` Rope) a -> t
, unpack -- :: Unpackable t => (s `Branded` Rope) a -> [t]
-- * MonadWriter
, runBranded
, execBranded -- MonadWriter terminology for 'context'
) where
import Prelude hiding (null, head, last, take, drop, span, break, splitAt, takeWhile, dropWhile)
import Control.Applicative hiding (empty)
import Control.Monad.Writer.Class
import Data.Rope.Branded.Comonad
import Data.Monoid
import Data.FingerTree (Measured(..))
import Data.Foldable (Foldable)
import qualified Data.Foldable
import Data.Traversable (Traversable(traverse))
import qualified Data.Rope.Internal as Rope
import Data.Rope.Internal (Rope(..),Unpackable)
type UBR a = (Unsafe `Branded` Rope) a
data Unsafe
data Branded brand t a = Branded { context :: !t, extractBranded :: a }
null :: Branded s Rope a -> Bool
null = Rope.null . context
# INLINE null #
head :: Unpackable t => Branded s Rope a -> t
head = Rope.head . context
# INLINE head #
last :: Unpackable t => Branded s Rope a -> t
last = Rope.last . context
# INLINE last #
unpack :: Unpackable t => Branded s Rope a -> [t]
unpack (Branded s _) = Rope.unpack s
# INLINE unpack #
instance Measured v t => Measured v (Branded s t a) where
measure = measure . context
instance Functor (Branded s t) where
fmap f (Branded s a) = Branded s (f a)
instance Comonad (Branded s t) where
extract = extractBranded
extend f a@(Branded s _) = Branded s (f a)
duplicate a@(Branded s _) = Branded s a
instance Foldable (Branded s t) where
foldr f z (Branded _ a) = f a z
foldr1 _ (Branded _ a) = a
foldl f z (Branded _ a) = f z a
foldl1 _ (Branded _ a) = a
foldMap f (Branded _ a) = f a
instance Traversable (Branded s t) where
traverse f (Branded s a) = Branded s <$> f a
instance Monoid t => Applicative (Branded Unsafe t) where
pure = Branded mempty
Branded s f <*> Branded s' a = Branded (s `mappend` s') (f a)
instance Monoid t => Monad (Branded Unsafe t) where
return = Branded mempty
Branded s a >>= f = Branded (s `mappend` s') b
where Branded s' b = f a
instance (Monoid t, Monoid m) => Monoid (Branded Unsafe t m) where
mempty = Branded mempty mempty
Branded r t `mappend` Branded s u = Branded (r `mappend` s) (t `mappend` u)
-- > sample :: Branded Unsafe Rope ()
-- > sample = do pack "Hello"
-- > pack ' '
> pack " World "
-- >
instance Monoid t => MonadWriter t (Branded Unsafe t) where
tell t = Branded t ()
listen (Branded t a) = Branded t (a, t)
pass (Branded t (a,f)) = Branded (f t) a
runBranded :: Branded s t a -> (a, t)
runBranded (Branded t a) = (a, t)
# INLINE runBranded #
execBranded :: Branded s t a -> t
execBranded (Branded t _) = t
# INLINE execBranded #
| null | https://raw.githubusercontent.com/ekmett/rope/418b895e4895f566d726ca17688c428bcc95aa25/Data/Rope/Branded.hs | haskell | :: (s `Branded` Rope) a -> Bool
* Unpacking Ropes
:: Unpackable t => (s `Branded` Rope) a -> t
:: Unpackable t => (s `Branded` Rope) a -> t
:: Unpackable t => (s `Branded` Rope) a -> [t]
* MonadWriter
MonadWriter terminology for 'context'
> sample :: Branded Unsafe Rope ()
> sample = do pack "Hello"
> pack ' '
> | # LANGUAGE TypeOperators , Rank2Types , EmptyDataDecls ,
MultiParamTypeClasses , FunctionalDependencies ,
FlexibleContexts , FlexibleInstances , UndecidableInstances ,
IncoherentInstances , OverlappingInstances #
MultiParamTypeClasses, FunctionalDependencies,
FlexibleContexts, FlexibleInstances, UndecidableInstances,
IncoherentInstances, OverlappingInstances #-}
module Data.Rope.Branded
( Branded(..)
, Unsafe
, UBR
, runBranded
) where
import Prelude hiding (null, head, last, take, drop, span, break, splitAt, takeWhile, dropWhile)
import Control.Applicative hiding (empty)
import Control.Monad.Writer.Class
import Data.Rope.Branded.Comonad
import Data.Monoid
import Data.FingerTree (Measured(..))
import Data.Foldable (Foldable)
import qualified Data.Foldable
import Data.Traversable (Traversable(traverse))
import qualified Data.Rope.Internal as Rope
import Data.Rope.Internal (Rope(..),Unpackable)
type UBR a = (Unsafe `Branded` Rope) a
data Unsafe
data Branded brand t a = Branded { context :: !t, extractBranded :: a }
null :: Branded s Rope a -> Bool
null = Rope.null . context
# INLINE null #
head :: Unpackable t => Branded s Rope a -> t
head = Rope.head . context
# INLINE head #
last :: Unpackable t => Branded s Rope a -> t
last = Rope.last . context
# INLINE last #
unpack :: Unpackable t => Branded s Rope a -> [t]
unpack (Branded s _) = Rope.unpack s
# INLINE unpack #
instance Measured v t => Measured v (Branded s t a) where
measure = measure . context
instance Functor (Branded s t) where
fmap f (Branded s a) = Branded s (f a)
instance Comonad (Branded s t) where
extract = extractBranded
extend f a@(Branded s _) = Branded s (f a)
duplicate a@(Branded s _) = Branded s a
instance Foldable (Branded s t) where
foldr f z (Branded _ a) = f a z
foldr1 _ (Branded _ a) = a
foldl f z (Branded _ a) = f z a
foldl1 _ (Branded _ a) = a
foldMap f (Branded _ a) = f a
instance Traversable (Branded s t) where
traverse f (Branded s a) = Branded s <$> f a
instance Monoid t => Applicative (Branded Unsafe t) where
pure = Branded mempty
Branded s f <*> Branded s' a = Branded (s `mappend` s') (f a)
instance Monoid t => Monad (Branded Unsafe t) where
return = Branded mempty
Branded s a >>= f = Branded (s `mappend` s') b
where Branded s' b = f a
instance (Monoid t, Monoid m) => Monoid (Branded Unsafe t m) where
mempty = Branded mempty mempty
Branded r t `mappend` Branded s u = Branded (r `mappend` s) (t `mappend` u)
> pack " World "
instance Monoid t => MonadWriter t (Branded Unsafe t) where
tell t = Branded t ()
listen (Branded t a) = Branded t (a, t)
pass (Branded t (a,f)) = Branded (f t) a
runBranded :: Branded s t a -> (a, t)
runBranded (Branded t a) = (a, t)
# INLINE runBranded #
execBranded :: Branded s t a -> t
execBranded (Branded t _) = t
# INLINE execBranded #
|
68ac82da956c64301e44939fdeb784435ecb6c135ed5e80de9515e190af07594 | yokolet/clementine | tagged_literals.clj | (ns cljs.tagged-literals
(:require [clojure.instant :as inst]))
(defn read-queue
[form]
(assert (vector? form) "Queue literal expects a vector for its elements.")
(list 'cljs.core/into 'cljs.core.PersistentQueue/EMPTY form))
(defn read-uuid
[form]
(assert (string? form) "UUID literal expects a string as its representation.")
(try
(let [uuid (java.util.UUID/fromString form)]
(list (symbol "UUID.") form))
(catch Throwable e
(throw (RuntimeException. (.getMessage e))))))
(defn read-inst
[form]
(assert (string? form) "Instance literal expects a string for its timestamp.")
(try
(let [^java.util.Date d (inst/read-instant-date form)]
(list (symbol "js/Date.") (.getTime d)))
(catch Throwable e
(throw (RuntimeException. (.getMessage e))))))
(def ^:dynamic *cljs-data-readers*
{'queue read-queue
'uuid read-uuid
'inst read-inst})
| null | https://raw.githubusercontent.com/yokolet/clementine/b26c2318625e49606b5cc3b95cc9e1f5085ac309/ext/clojure-clojurescript-bef56a7/src/clj/cljs/tagged_literals.clj | clojure | (ns cljs.tagged-literals
(:require [clojure.instant :as inst]))
(defn read-queue
[form]
(assert (vector? form) "Queue literal expects a vector for its elements.")
(list 'cljs.core/into 'cljs.core.PersistentQueue/EMPTY form))
(defn read-uuid
[form]
(assert (string? form) "UUID literal expects a string as its representation.")
(try
(let [uuid (java.util.UUID/fromString form)]
(list (symbol "UUID.") form))
(catch Throwable e
(throw (RuntimeException. (.getMessage e))))))
(defn read-inst
[form]
(assert (string? form) "Instance literal expects a string for its timestamp.")
(try
(let [^java.util.Date d (inst/read-instant-date form)]
(list (symbol "js/Date.") (.getTime d)))
(catch Throwable e
(throw (RuntimeException. (.getMessage e))))))
(def ^:dynamic *cljs-data-readers*
{'queue read-queue
'uuid read-uuid
'inst read-inst})
| |
bcacf7656377ebae3a72c46691639cc13babb89d1cd0f8158d7e7f9140ce7abc | programaker-project/Programaker-Core | automate_rest_api_programs_root.erl | %%% @doc
%%% REST endpoint to manage knowledge collections.
%%% @end
-module(automate_rest_api_programs_root).
-export([init/2]).
-export([ allowed_methods/2
, options/2
, is_authorized/2
, content_types_provided/2
, content_types_accepted/2
, resource_exists/2
]).
-export([ accept_json_create_program/2
, to_json/2
]).
-include("./records.hrl").
-define(UTILS, automate_rest_api_utils).
-define(FORMATTING, automate_rest_api_utils_formatting).
-define(PROGRAMS, automate_rest_api_utils_programs).
-record(create_program_seq, { username :: binary() }).
-spec init(_,_) -> {'cowboy_rest',_,_}.
init(Req, _Opts) ->
UserId = cowboy_req:binding(user_id, Req),
{cowboy_rest, Req
, #create_program_seq{ username=UserId }}.
resource_exists(Req, State) ->
case cowboy_req:method(Req) of
<<"POST">> ->
{ false, Req, State };
_ ->
{ true, Req, State}
end.
%% CORS
options(Req, State) ->
Req1 = automate_rest_api_cors:set_headers(Req),
{ok, Req1, State}.
%% Authentication
-spec allowed_methods(cowboy_req:req(),_) -> {[binary()], cowboy_req:req(),_}.
allowed_methods(Req, State) ->
{[<<"GET">>, <<"POST">>, <<"OPTIONS">>], Req, State}.
is_authorized(Req, State) ->
Req1 = automate_rest_api_cors:set_headers(Req),
case cowboy_req:method(Req1) of
%% Don't do authentication if it's just asking for options
<<"OPTIONS">> ->
{ true, Req1, State };
Method ->
case cowboy_req:header(<<"authorization">>, Req, undefined) of
undefined ->
{ {false, <<"Authorization header not found">>} , Req1, State };
X ->
Scope = case Method of
<<"GET">> -> list_programs;
<<"POST">> -> create_programs
end,
#create_program_seq{username=Username} = State,
case automate_rest_api_backend:is_valid_token(X, Scope) of
{true, Username} ->
{ true, Req1, State };
{true, _} -> %% Non matching username
{ { false, <<"Unauthorized to create a program here">>}, Req1, State };
false ->
{ { false, <<"Authorization not correct">>}, Req1, State }
end
end
end.
%% POST handler
content_types_accepted(Req, State) ->
{[{{<<"application">>, <<"json">>, []}, accept_json_create_program}],
Req, State}.
-spec accept_json_create_program(cowboy_req:req(), #create_program_seq{})
-> {{'true', binary()},cowboy_req:req(), #create_program_seq{}}.
accept_json_create_program(Req, State) ->
#create_program_seq{username=Username} = State,
{ok, Body, _} = ?UTILS:read_body(Req),
{Type, Name} = ?PROGRAMS:get_metadata_from_body(Body),
case automate_rest_api_backend:create_program(Username, Name, Type) of
{ ok, {ProgramId, ProgramName, ProgramUrl, ProgramType} } ->
Output = jiffy:encode(#{ <<"id">> => ProgramId
, <<"name">> => ProgramName
, <<"link">> => ProgramUrl
, <<"type">> => ProgramType
}),
Res1 = cowboy_req:set_resp_body(Output, Req),
Res2 = cowboy_req:delete_resp_header(<<"content-type">>, Res1),
Res3 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res2),
{ {true, ProgramUrl }, Res3, State }
end.
%% GET handler
content_types_provided(Req, State) ->
{[{{<<"application">>, <<"json">>, []}, to_json}],
Req, State}.
-spec to_json(cowboy_req:req(), #create_program_seq{})
-> {binary(),cowboy_req:req(), #create_program_seq{}}.
to_json(Req, State) ->
#create_program_seq{username=Username} = State,
case automate_rest_api_backend:lists_programs_from_username(Username) of
{ ok, Programs } ->
Output = jiffy:encode(encode_program_list(Programs)),
Res1 = cowboy_req:delete_resp_header(<<"content-type">>, Req),
Res2 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res1),
{ Output, Res2, State }
end.
encode_program_list(Programs) ->
lists:map(fun(Program=#program_metadata{id=Id}) ->
ProgramBridges = try ?UTILS:get_bridges_on_program_id(Id) of
Bridges -> Bridges
catch ErrNS:Error:StackTrace ->
automate_logging:log_platform(error, ErrNS, Error, StackTrace),
[]
end,
?FORMATTING:program_listing_to_json(Program, ProgramBridges)
end, Programs).
| null | https://raw.githubusercontent.com/programaker-project/Programaker-Core/ef10fc6d2a228b2096b121170c421f5c29f9f270/backend/apps/automate_rest_api/src/automate_rest_api_programs_root.erl | erlang | @doc
REST endpoint to manage knowledge collections.
@end
CORS
Authentication
Don't do authentication if it's just asking for options
Non matching username
POST handler
GET handler |
-module(automate_rest_api_programs_root).
-export([init/2]).
-export([ allowed_methods/2
, options/2
, is_authorized/2
, content_types_provided/2
, content_types_accepted/2
, resource_exists/2
]).
-export([ accept_json_create_program/2
, to_json/2
]).
-include("./records.hrl").
-define(UTILS, automate_rest_api_utils).
-define(FORMATTING, automate_rest_api_utils_formatting).
-define(PROGRAMS, automate_rest_api_utils_programs).
-record(create_program_seq, { username :: binary() }).
-spec init(_,_) -> {'cowboy_rest',_,_}.
init(Req, _Opts) ->
UserId = cowboy_req:binding(user_id, Req),
{cowboy_rest, Req
, #create_program_seq{ username=UserId }}.
resource_exists(Req, State) ->
case cowboy_req:method(Req) of
<<"POST">> ->
{ false, Req, State };
_ ->
{ true, Req, State}
end.
options(Req, State) ->
Req1 = automate_rest_api_cors:set_headers(Req),
{ok, Req1, State}.
-spec allowed_methods(cowboy_req:req(),_) -> {[binary()], cowboy_req:req(),_}.
allowed_methods(Req, State) ->
{[<<"GET">>, <<"POST">>, <<"OPTIONS">>], Req, State}.
is_authorized(Req, State) ->
Req1 = automate_rest_api_cors:set_headers(Req),
case cowboy_req:method(Req1) of
<<"OPTIONS">> ->
{ true, Req1, State };
Method ->
case cowboy_req:header(<<"authorization">>, Req, undefined) of
undefined ->
{ {false, <<"Authorization header not found">>} , Req1, State };
X ->
Scope = case Method of
<<"GET">> -> list_programs;
<<"POST">> -> create_programs
end,
#create_program_seq{username=Username} = State,
case automate_rest_api_backend:is_valid_token(X, Scope) of
{true, Username} ->
{ true, Req1, State };
{ { false, <<"Unauthorized to create a program here">>}, Req1, State };
false ->
{ { false, <<"Authorization not correct">>}, Req1, State }
end
end
end.
content_types_accepted(Req, State) ->
{[{{<<"application">>, <<"json">>, []}, accept_json_create_program}],
Req, State}.
-spec accept_json_create_program(cowboy_req:req(), #create_program_seq{})
-> {{'true', binary()},cowboy_req:req(), #create_program_seq{}}.
accept_json_create_program(Req, State) ->
#create_program_seq{username=Username} = State,
{ok, Body, _} = ?UTILS:read_body(Req),
{Type, Name} = ?PROGRAMS:get_metadata_from_body(Body),
case automate_rest_api_backend:create_program(Username, Name, Type) of
{ ok, {ProgramId, ProgramName, ProgramUrl, ProgramType} } ->
Output = jiffy:encode(#{ <<"id">> => ProgramId
, <<"name">> => ProgramName
, <<"link">> => ProgramUrl
, <<"type">> => ProgramType
}),
Res1 = cowboy_req:set_resp_body(Output, Req),
Res2 = cowboy_req:delete_resp_header(<<"content-type">>, Res1),
Res3 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res2),
{ {true, ProgramUrl }, Res3, State }
end.
content_types_provided(Req, State) ->
{[{{<<"application">>, <<"json">>, []}, to_json}],
Req, State}.
-spec to_json(cowboy_req:req(), #create_program_seq{})
-> {binary(),cowboy_req:req(), #create_program_seq{}}.
to_json(Req, State) ->
#create_program_seq{username=Username} = State,
case automate_rest_api_backend:lists_programs_from_username(Username) of
{ ok, Programs } ->
Output = jiffy:encode(encode_program_list(Programs)),
Res1 = cowboy_req:delete_resp_header(<<"content-type">>, Req),
Res2 = cowboy_req:set_resp_header(<<"content-type">>, <<"application/json">>, Res1),
{ Output, Res2, State }
end.
encode_program_list(Programs) ->
lists:map(fun(Program=#program_metadata{id=Id}) ->
ProgramBridges = try ?UTILS:get_bridges_on_program_id(Id) of
Bridges -> Bridges
catch ErrNS:Error:StackTrace ->
automate_logging:log_platform(error, ErrNS, Error, StackTrace),
[]
end,
?FORMATTING:program_listing_to_json(Program, ProgramBridges)
end, Programs).
|
3e91bd156ee353ae6dcae09638eebdc353745581e96bd946f077acdcb57ed946 | tqtezos/stablecoin | Metadata.hs | SPDX - FileCopyrightText : 2021 Oxhead Alpha
SPDX - License - Identifier : MIT
| This module contains the TZIP-16 metadata and off - chain - views for the
-- @stablecoin.tz@ contract.
module Lorentz.Contracts.Stablecoin.Metadata
( MetadataUri(..)
, ParsedMetadataUri(..)
, metadataJSON
, metadataMap
, parseMetadataUri
) where
import Data.Aeson qualified as J
import Data.ByteString.Lazy qualified as BSL
import Data.Version (showVersion)
import Fmt (pretty)
import Text.Megaparsec qualified as P
import Text.Megaparsec.Char (string')
import Lorentz as L
import Lorentz.Contracts.Spec.FA2Interface qualified as FA2
import Lorentz.Contracts.Spec.TZIP16Interface
(Error(..), License(..), Metadata(..), MetadataMap, Source(..), ViewImplementation(..))
import Lorentz.Contracts.Spec.TZIP16Interface qualified as TZ
import Morley.Metadata
(ViewCode(..), compileViewCodeTH, mkMichelsonStorageView, unsafeCompileViewCode)
import Morley.Micheline (ToExpression(toExpression))
import Morley.Tezos.Address (ContractAddress, formatAddress, parseKindedAddress)
import Lorentz.Contracts.Stablecoin.Types
import Paths_stablecoin (version)
import Stablecoin.Util (ligoVersion)
jfield :: MText
jfield = [mt|metadataJSON|]
metadataMap :: J.ToJSON metadata => MetadataUri metadata -> MetadataMap
metadataMap mdata = mkBigMap $
One might reasonable expect that the URI would be stored as packed strings ,
but the TZIP-16 spec is explicit about that not being the case .
--
-- > Unless otherwise-specified, the encoding of the values must be the direct stream
-- > of bytes of the data being stored. (...)
> There is no implicit conversion to 's binary format ( PACK ) nor
-- > quoting mechanism.
--
-- See: </-/blob/eb1da57684599a266334a73babd7ba82dbbbce66/proposals/tzip-16/tzip-16.md#contract-storage>
--
So , instead , we encode it as UTF-8 byte sequences .
case mdata of
CurrentContract md includeUri ->
if includeUri
then [ (mempty, TZ.encodeURI $ TZ.tezosStorageUri (TZ.ContractHost Nothing) jfield)
, (jfield, BSL.toStrict (J.encode md))
]
else [ (jfield, BSL.toStrict (J.encode md)) ]
RemoteContract addr ->
[ (mempty, TZ.encodeURI $ TZ.tezosStorageUri (TZ.ContractHost (Just $ formatAddress addr)) jfield)
]
Raw uri ->
[ (mempty, encodeUtf8 uri)
]
Result after parsing the metadata uri from a TZIP-16 metadata bigmap .
data ParsedMetadataUri
= InCurrentContractUnderKey Text
| InRemoteContractUnderKey ContractAddress Text
| RawUri Text
deriving stock (Eq, Show)
parseMetadataUri :: Text -> Either Text ParsedMetadataUri
parseMetadataUri t = first (fromString . P.errorBundlePretty) $ P.parse metadataUriParser "" t
metadataUriParser :: P.Parsec Void Text ParsedMetadataUri
metadataUriParser
= (P.try remoteContractUriParser)
<|> (P.try currentContractUriParser)
<|> rawUriParser
remoteContractUriParser :: P.Parsec Void Text ParsedMetadataUri
remoteContractUriParser = do
_ <- string' (TZ.tezosStorageScheme <> "://")
addr <- P.manyTill P.anySingle (string' "/")
key <- P.many P.anySingle
case parseKindedAddress (toText addr) of
Right paddr -> pure $ InRemoteContractUnderKey paddr (toText key)
Left err -> fail $ pretty err
rawUriParser :: P.Parsec Void Text ParsedMetadataUri
rawUriParser = (RawUri . toText) <$> (P.many (P.satisfy (const True)))
currentContractUriParser :: P.Parsec Void Text ParsedMetadataUri
currentContractUriParser = do
_ <- string' (TZ.tezosStorageScheme <> ":")
key_ <- P.many P.anySingle
pure $ InCurrentContractUnderKey (toText key_)
data MetadataUri metadata
^ Metadata and a flag to denote if URI should be included
| RemoteContract ContractAddress
| Raw Text
| Make the TZIP-16 metadata . We accept a @Maybe@ @FA2.TokenMetadata@
-- as argument here so that we can use this function to create the metadata of the
-- FA1.2 Variant as well.
metadataJSON :: Maybe FA2.TokenMetadata -> Maybe Text -> Metadata (ToT Storage)
metadataJSON mtmd mbDescription =
TZ.name "stablecoin" <>
TZ.description (fromMaybe defaultDescription mbDescription) <>
TZ.version (toText $ showVersion version) <>
TZ.license (License { lName = "MIT", lDetails = Nothing }) <>
TZ.authors
[ TZ.author "Serokell" "/"
, TZ.author "TQ Tezos" "/"
, TZ.author "Oxhead Alpha" "/"
] <>
TZ.homepage "/" <>
TZ.source Source
{ sLocation = Just $ "" <> toText (showVersion version) <> "/ligo/stablecoin"
, sTools = [ "ligo " <> $ligoVersion ]
} <>
TZ.interfaces [ TZ.Interface "TZIP-012", TZ.Interface "TZIP-017" ] <>
TZ.errors [ mkError [mt|FA2_TOKEN_UNDEFINED|] [mt|All `token_id`s must be 0|]
, mkError [mt|FA2_INSUFFICIENT_BALANCE|] [mt|Cannot debit from a wallet because of insufficient amount of tokens|]
, mkError [mt|FA2_NOT_OPERATOR|] [mt|You're neither the owner or a permitted operator of one or more wallets from which tokens will be transferred|]
, mkError [mt|XTZ_RECEIVED|] [mt|Contract received a non-zero amount of tokens|]
, mkError [mt|NOT_CONTRACT_OWNER|] [mt|Operation can only be performed by the contract's owner|]
, mkError [mt|NOT_PENDING_OWNER|] [mt|Operation can only be performed by the current contract's pending owner|]
, mkError [mt|NO_PENDING_OWNER_SET|] [mt|There's no pending transfer of ownership|]
, mkError [mt|NOT_PAUSER|] [mt|Operation can only be performed by the contract's pauser|]
, mkError [mt|NOT_MASTER_MINTER|] [mt|Operation can only be performed by the contract's master minter|]
, mkError [mt|NOT_MINTER|] [mt|Operation can only be performed by registered minters|]
, mkError [mt|CONTRACT_PAUSED|] [mt|Operation cannot be performed while the contract is paused|]
, mkError [mt|CONTRACT_NOT_PAUSED|] [mt|Operation cannot be performed while the contract is not paused|]
, mkError [mt|NOT_TOKEN_OWNER|] [mt|You cannot configure another user's operators|]
, mkError [mt|CURRENT_ALLOWANCE_REQUIRED|] [mt|The given address is already a minter, you must specify its current minting allowance|]
, mkError [mt|ALLOWANCE_MISMATCH|] [mt|The given current minting allowance does not match the minter's actual current minting allowance|]
, mkError [mt|ADDR_NOT_MINTER|] [mt|This address is not a registered minter|]
, mkError [mt|ALLOWANCE_EXCEEDED|] [mt|The amount of tokens to be minted exceeds your current minting allowance|]
, mkError [mt|BAD_TRANSFERLIST|] [mt|The given address is a not a smart contract complying with the transferlist interface|]
, mkError [mt|MINTER_LIMIT_REACHED|] [mt|Cannot add new minter because the number of minters is already at the limit|]
, mkError [mt|MISSIGNED|] [mt|This permit's signature is invalid|]
, mkError [mt|EXPIRED_PERMIT|] [mt|A permit was found, but it has already expired|]
, mkError [mt|NOT_PERMIT_ISSUER|] [mt|You're not the issuer of the given permit|]
, mkError [mt|DUP_PERMIT|] [mt|The given permit already exists|]
, mkError [mt|EXPIRY_TOO_BIG|] [mt|The `set_expiry` entrypoint was called with an expiry value that is too big|]
, mkError [mt|NEGATIVE_TOTAL_SUPPLY|] [mt|The total_supply value was found to be less than zero after an operation. This indicates a bug in the contract.|]
] <>
TZ.views mkViews
where
defaultDescription :: Text
defaultDescription =
"Tezos Stablecoin project implements an FA2-compatible token smart contract.\
\ It draws inspiration from popular permissioned asset contracts like CENTRE Fiat Token and other similar contracts.\
\ The contract is implemented in the LIGO language."
mkViews :: [TZ.View (ToT Storage)]
mkViews =
case mtmd of
Nothing ->
[ getDefaultExpiryView
, getCounterView
]
Just tmd ->
[ getDefaultExpiryView
, getCounterView
, getBalanceView
, getTotalSupplyView
, getAllTokensView
, isOperatorView
, mkTokenMetadataView tmd
]
mkError :: MText -> MText -> Error
mkError err expansion =
TZ.EStatic $ TZ.StaticError
{ seError = toExpression (toVal err)
, seExpansion = toExpression (toVal expansion)
, seLanguages = ["en"]
}
type BalanceViewParam = (Address, Natural)
getBalanceView :: TZ.View (ToT Storage)
getBalanceView =
TZ.View
{ vName = "get_balance"
, vDescription = Just "Access the balance of an address"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithParam @BalanceViewParam $
L.dip (L.toField #sLedger) #
L.car #
L.get #
If there is no ledger entry , return zero .
)
}
getTotalSupplyView :: TZ.View (ToT Storage)
getTotalSupplyView =
TZ.View
{ vName = "total_supply"
, vDescription = Just "Get the total no of tokens available."
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithParam @Natural $
L.int #
L.assertEq0 [mt|Unknown TOKEN ID|] #
L.toField #sTotalSupply
)
}
getAllTokensView :: TZ.View (ToT Storage)
getAllTokensView =
TZ.View
{ vName = "all_tokens"
, vDescription = Just "Get list of token ids supported."
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage Nothing [] $
$$(compileViewCodeTH $ WithoutParam $
L.drop # L.nil # L.push (0 :: Natural) # L.cons
)
}
isOperatorView :: TZ.View (ToT Storage)
isOperatorView =
TZ.View
{ vName = "is_operator"
, vDescription = Just "Check if the given address is an operator"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Bool Nothing [] $
$$(compileViewCodeTH $ WithParam @FA2.OperatorParam $
L.dip (L.toField #sOperators) #
L.getField #opTokenId # forcedCoerce_ @FA2.TokenId @Natural #
L.int #
L.assertEq0 [mt|Unknown TOKEN ID|] #
L.getField #opOwner #
L.dip (L.toField #opOperator) #
L.pair #
L.get #
L.ifSome (L.drop # L.push True) (L.push False)
)
}
mkTokenMetadataView :: FA2.TokenMetadata -> TZ.View (ToT Storage)
mkTokenMetadataView md =
let vc = unsafeCompileViewCode $ WithParam @Natural $
L.dip L.drop #
L.int #
L.assertEq0 [mt|Unknown TOKEN ID|] #
L.push (0 :: Natural, md)
in TZ.View
{ vName = "token_metadata"
, vDescription = Just "Get token metadata for the token id"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @(Natural, FA2.TokenMetadata) Nothing [] vc
}
getDefaultExpiryView :: TZ.View (ToT Storage)
getDefaultExpiryView =
TZ.View
{ vName = "GetDefaultExpiry"
, vDescription = Just "Access the contract's default expiry in seconds"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithoutParam $
L.toField #sDefaultExpiry
)
}
getCounterView :: TZ.View (ToT Storage)
getCounterView =
TZ.View
{ vName = "GetCounter"
, vDescription = Just "Access the current permit counter"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithoutParam $
L.toField #sPermitCounter
)
}
| null | https://raw.githubusercontent.com/tqtezos/stablecoin/48012781d6c2d46d4cb8f0508a8ca1576481a561/haskell/src/Lorentz/Contracts/Stablecoin/Metadata.hs | haskell | @stablecoin.tz@ contract.
> Unless otherwise-specified, the encoding of the values must be the direct stream
> of bytes of the data being stored. (...)
> quoting mechanism.
See: </-/blob/eb1da57684599a266334a73babd7ba82dbbbce66/proposals/tzip-16/tzip-16.md#contract-storage>
as argument here so that we can use this function to create the metadata of the
FA1.2 Variant as well. | SPDX - FileCopyrightText : 2021 Oxhead Alpha
SPDX - License - Identifier : MIT
| This module contains the TZIP-16 metadata and off - chain - views for the
module Lorentz.Contracts.Stablecoin.Metadata
( MetadataUri(..)
, ParsedMetadataUri(..)
, metadataJSON
, metadataMap
, parseMetadataUri
) where
import Data.Aeson qualified as J
import Data.ByteString.Lazy qualified as BSL
import Data.Version (showVersion)
import Fmt (pretty)
import Text.Megaparsec qualified as P
import Text.Megaparsec.Char (string')
import Lorentz as L
import Lorentz.Contracts.Spec.FA2Interface qualified as FA2
import Lorentz.Contracts.Spec.TZIP16Interface
(Error(..), License(..), Metadata(..), MetadataMap, Source(..), ViewImplementation(..))
import Lorentz.Contracts.Spec.TZIP16Interface qualified as TZ
import Morley.Metadata
(ViewCode(..), compileViewCodeTH, mkMichelsonStorageView, unsafeCompileViewCode)
import Morley.Micheline (ToExpression(toExpression))
import Morley.Tezos.Address (ContractAddress, formatAddress, parseKindedAddress)
import Lorentz.Contracts.Stablecoin.Types
import Paths_stablecoin (version)
import Stablecoin.Util (ligoVersion)
jfield :: MText
jfield = [mt|metadataJSON|]
metadataMap :: J.ToJSON metadata => MetadataUri metadata -> MetadataMap
metadataMap mdata = mkBigMap $
One might reasonable expect that the URI would be stored as packed strings ,
but the TZIP-16 spec is explicit about that not being the case .
> There is no implicit conversion to 's binary format ( PACK ) nor
So , instead , we encode it as UTF-8 byte sequences .
case mdata of
CurrentContract md includeUri ->
if includeUri
then [ (mempty, TZ.encodeURI $ TZ.tezosStorageUri (TZ.ContractHost Nothing) jfield)
, (jfield, BSL.toStrict (J.encode md))
]
else [ (jfield, BSL.toStrict (J.encode md)) ]
RemoteContract addr ->
[ (mempty, TZ.encodeURI $ TZ.tezosStorageUri (TZ.ContractHost (Just $ formatAddress addr)) jfield)
]
Raw uri ->
[ (mempty, encodeUtf8 uri)
]
Result after parsing the metadata uri from a TZIP-16 metadata bigmap .
data ParsedMetadataUri
= InCurrentContractUnderKey Text
| InRemoteContractUnderKey ContractAddress Text
| RawUri Text
deriving stock (Eq, Show)
parseMetadataUri :: Text -> Either Text ParsedMetadataUri
parseMetadataUri t = first (fromString . P.errorBundlePretty) $ P.parse metadataUriParser "" t
metadataUriParser :: P.Parsec Void Text ParsedMetadataUri
metadataUriParser
= (P.try remoteContractUriParser)
<|> (P.try currentContractUriParser)
<|> rawUriParser
remoteContractUriParser :: P.Parsec Void Text ParsedMetadataUri
remoteContractUriParser = do
_ <- string' (TZ.tezosStorageScheme <> "://")
addr <- P.manyTill P.anySingle (string' "/")
key <- P.many P.anySingle
case parseKindedAddress (toText addr) of
Right paddr -> pure $ InRemoteContractUnderKey paddr (toText key)
Left err -> fail $ pretty err
rawUriParser :: P.Parsec Void Text ParsedMetadataUri
rawUriParser = (RawUri . toText) <$> (P.many (P.satisfy (const True)))
currentContractUriParser :: P.Parsec Void Text ParsedMetadataUri
currentContractUriParser = do
_ <- string' (TZ.tezosStorageScheme <> ":")
key_ <- P.many P.anySingle
pure $ InCurrentContractUnderKey (toText key_)
data MetadataUri metadata
^ Metadata and a flag to denote if URI should be included
| RemoteContract ContractAddress
| Raw Text
| Make the TZIP-16 metadata . We accept a @Maybe@ @FA2.TokenMetadata@
metadataJSON :: Maybe FA2.TokenMetadata -> Maybe Text -> Metadata (ToT Storage)
metadataJSON mtmd mbDescription =
TZ.name "stablecoin" <>
TZ.description (fromMaybe defaultDescription mbDescription) <>
TZ.version (toText $ showVersion version) <>
TZ.license (License { lName = "MIT", lDetails = Nothing }) <>
TZ.authors
[ TZ.author "Serokell" "/"
, TZ.author "TQ Tezos" "/"
, TZ.author "Oxhead Alpha" "/"
] <>
TZ.homepage "/" <>
TZ.source Source
{ sLocation = Just $ "" <> toText (showVersion version) <> "/ligo/stablecoin"
, sTools = [ "ligo " <> $ligoVersion ]
} <>
TZ.interfaces [ TZ.Interface "TZIP-012", TZ.Interface "TZIP-017" ] <>
TZ.errors [ mkError [mt|FA2_TOKEN_UNDEFINED|] [mt|All `token_id`s must be 0|]
, mkError [mt|FA2_INSUFFICIENT_BALANCE|] [mt|Cannot debit from a wallet because of insufficient amount of tokens|]
, mkError [mt|FA2_NOT_OPERATOR|] [mt|You're neither the owner or a permitted operator of one or more wallets from which tokens will be transferred|]
, mkError [mt|XTZ_RECEIVED|] [mt|Contract received a non-zero amount of tokens|]
, mkError [mt|NOT_CONTRACT_OWNER|] [mt|Operation can only be performed by the contract's owner|]
, mkError [mt|NOT_PENDING_OWNER|] [mt|Operation can only be performed by the current contract's pending owner|]
, mkError [mt|NO_PENDING_OWNER_SET|] [mt|There's no pending transfer of ownership|]
, mkError [mt|NOT_PAUSER|] [mt|Operation can only be performed by the contract's pauser|]
, mkError [mt|NOT_MASTER_MINTER|] [mt|Operation can only be performed by the contract's master minter|]
, mkError [mt|NOT_MINTER|] [mt|Operation can only be performed by registered minters|]
, mkError [mt|CONTRACT_PAUSED|] [mt|Operation cannot be performed while the contract is paused|]
, mkError [mt|CONTRACT_NOT_PAUSED|] [mt|Operation cannot be performed while the contract is not paused|]
, mkError [mt|NOT_TOKEN_OWNER|] [mt|You cannot configure another user's operators|]
, mkError [mt|CURRENT_ALLOWANCE_REQUIRED|] [mt|The given address is already a minter, you must specify its current minting allowance|]
, mkError [mt|ALLOWANCE_MISMATCH|] [mt|The given current minting allowance does not match the minter's actual current minting allowance|]
, mkError [mt|ADDR_NOT_MINTER|] [mt|This address is not a registered minter|]
, mkError [mt|ALLOWANCE_EXCEEDED|] [mt|The amount of tokens to be minted exceeds your current minting allowance|]
, mkError [mt|BAD_TRANSFERLIST|] [mt|The given address is a not a smart contract complying with the transferlist interface|]
, mkError [mt|MINTER_LIMIT_REACHED|] [mt|Cannot add new minter because the number of minters is already at the limit|]
, mkError [mt|MISSIGNED|] [mt|This permit's signature is invalid|]
, mkError [mt|EXPIRED_PERMIT|] [mt|A permit was found, but it has already expired|]
, mkError [mt|NOT_PERMIT_ISSUER|] [mt|You're not the issuer of the given permit|]
, mkError [mt|DUP_PERMIT|] [mt|The given permit already exists|]
, mkError [mt|EXPIRY_TOO_BIG|] [mt|The `set_expiry` entrypoint was called with an expiry value that is too big|]
, mkError [mt|NEGATIVE_TOTAL_SUPPLY|] [mt|The total_supply value was found to be less than zero after an operation. This indicates a bug in the contract.|]
] <>
TZ.views mkViews
where
defaultDescription :: Text
defaultDescription =
"Tezos Stablecoin project implements an FA2-compatible token smart contract.\
\ It draws inspiration from popular permissioned asset contracts like CENTRE Fiat Token and other similar contracts.\
\ The contract is implemented in the LIGO language."
mkViews :: [TZ.View (ToT Storage)]
mkViews =
case mtmd of
Nothing ->
[ getDefaultExpiryView
, getCounterView
]
Just tmd ->
[ getDefaultExpiryView
, getCounterView
, getBalanceView
, getTotalSupplyView
, getAllTokensView
, isOperatorView
, mkTokenMetadataView tmd
]
mkError :: MText -> MText -> Error
mkError err expansion =
TZ.EStatic $ TZ.StaticError
{ seError = toExpression (toVal err)
, seExpansion = toExpression (toVal expansion)
, seLanguages = ["en"]
}
type BalanceViewParam = (Address, Natural)
getBalanceView :: TZ.View (ToT Storage)
getBalanceView =
TZ.View
{ vName = "get_balance"
, vDescription = Just "Access the balance of an address"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithParam @BalanceViewParam $
L.dip (L.toField #sLedger) #
L.car #
L.get #
If there is no ledger entry , return zero .
)
}
getTotalSupplyView :: TZ.View (ToT Storage)
getTotalSupplyView =
TZ.View
{ vName = "total_supply"
, vDescription = Just "Get the total no of tokens available."
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithParam @Natural $
L.int #
L.assertEq0 [mt|Unknown TOKEN ID|] #
L.toField #sTotalSupply
)
}
getAllTokensView :: TZ.View (ToT Storage)
getAllTokensView =
TZ.View
{ vName = "all_tokens"
, vDescription = Just "Get list of token ids supported."
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage Nothing [] $
$$(compileViewCodeTH $ WithoutParam $
L.drop # L.nil # L.push (0 :: Natural) # L.cons
)
}
isOperatorView :: TZ.View (ToT Storage)
isOperatorView =
TZ.View
{ vName = "is_operator"
, vDescription = Just "Check if the given address is an operator"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Bool Nothing [] $
$$(compileViewCodeTH $ WithParam @FA2.OperatorParam $
L.dip (L.toField #sOperators) #
L.getField #opTokenId # forcedCoerce_ @FA2.TokenId @Natural #
L.int #
L.assertEq0 [mt|Unknown TOKEN ID|] #
L.getField #opOwner #
L.dip (L.toField #opOperator) #
L.pair #
L.get #
L.ifSome (L.drop # L.push True) (L.push False)
)
}
mkTokenMetadataView :: FA2.TokenMetadata -> TZ.View (ToT Storage)
mkTokenMetadataView md =
let vc = unsafeCompileViewCode $ WithParam @Natural $
L.dip L.drop #
L.int #
L.assertEq0 [mt|Unknown TOKEN ID|] #
L.push (0 :: Natural, md)
in TZ.View
{ vName = "token_metadata"
, vDescription = Just "Get token metadata for the token id"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @(Natural, FA2.TokenMetadata) Nothing [] vc
}
getDefaultExpiryView :: TZ.View (ToT Storage)
getDefaultExpiryView =
TZ.View
{ vName = "GetDefaultExpiry"
, vDescription = Just "Access the contract's default expiry in seconds"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithoutParam $
L.toField #sDefaultExpiry
)
}
getCounterView :: TZ.View (ToT Storage)
getCounterView =
TZ.View
{ vName = "GetCounter"
, vDescription = Just "Access the current permit counter"
, vPure = Just True
, vImplementations = one $
VIMichelsonStorageView $
mkMichelsonStorageView @Storage @Natural Nothing [] $
$$(compileViewCodeTH $ WithoutParam $
L.toField #sPermitCounter
)
}
|
c60778b40b33f7fb79d2a9709dce55e351e071f4a3aa3b5f42e5de6f1b83ea90 | jordanthayer/ocaml-search | queue_set.ml | * A queue that does n't allow for duplicates .
@author eaburns
@since 2010 - 02 - 15
@author eaburns
@since 2010-02-15
*)
type 'a t = {
q : 'a Queue.t;
on_q : ('a, bool) Hashtbl.t;
}
let create ?(hash_size=100) () =
(** [create ?hash_size=100 ()] creates a new queue. *)
{ q = Queue.create (); on_q = Hashtbl.create hash_size }
let push q elm =
(** [push q elm] pushes an element onto the queue if it is not there
already. *)
if not (Hashtbl.mem q.on_q elm) then Queue.push elm q.q
let push_all q elms = List.iter (push q) elms
(** [push_all q elms] pushes a list of elements onto the queue. *)
let of_list elms =
(** [of_list elms] creates a queue populated with the given list of
elements. *)
let q = create () in
push_all q elms;
q
let take q =
(** [take q] takes an element off of the front of the queue. *)
let elm = Queue.take q.q in
Hashtbl.remove q.on_q elm;
elm
let on_q q elm = Hashtbl.mem q.on_q elm
* [ on_q q elm ] tests if the given element is on the queue .
let is_empty q = Queue.is_empty q.q
(** [is_empty q] tests if the queue is empty. *)
| null | https://raw.githubusercontent.com/jordanthayer/ocaml-search/57cfc85417aa97ee5d8fbcdb84c333aae148175f/structs/queue_set.ml | ocaml | * [create ?hash_size=100 ()] creates a new queue.
* [push q elm] pushes an element onto the queue if it is not there
already.
* [push_all q elms] pushes a list of elements onto the queue.
* [of_list elms] creates a queue populated with the given list of
elements.
* [take q] takes an element off of the front of the queue.
* [is_empty q] tests if the queue is empty. | * A queue that does n't allow for duplicates .
@author eaburns
@since 2010 - 02 - 15
@author eaburns
@since 2010-02-15
*)
type 'a t = {
q : 'a Queue.t;
on_q : ('a, bool) Hashtbl.t;
}
let create ?(hash_size=100) () =
{ q = Queue.create (); on_q = Hashtbl.create hash_size }
let push q elm =
if not (Hashtbl.mem q.on_q elm) then Queue.push elm q.q
let push_all q elms = List.iter (push q) elms
let of_list elms =
let q = create () in
push_all q elms;
q
let take q =
let elm = Queue.take q.q in
Hashtbl.remove q.on_q elm;
elm
let on_q q elm = Hashtbl.mem q.on_q elm
* [ on_q q elm ] tests if the given element is on the queue .
let is_empty q = Queue.is_empty q.q
|
3ccb0b985ee27a52df95f9801794cc172356640307b52798d2780d6b8813ab6c | clojurewerkz/statistiker | distribution_test.clj | (ns clojurewerkz.statistiker.distribution-test
(:import [org.apache.commons.math3.distribution EnumeratedRealDistribution])
(:require [clojure.test :refer :all]))
| null | https://raw.githubusercontent.com/clojurewerkz/statistiker/f056f68c975cf3d6e0f1c8212aef9114d4eb657c/test/clj/clojurewerkz/statistiker/distribution_test.clj | clojure | (ns clojurewerkz.statistiker.distribution-test
(:import [org.apache.commons.math3.distribution EnumeratedRealDistribution])
(:require [clojure.test :refer :all]))
| |
0d4bb2ad27ce99030d7a9afad627d1b6a36740235b0804912d3403bc883e9f45 | jtanguy/hmacaroons | Internal.hs | {-# LANGUAGE OverloadedStrings #-}
|
Module : Crypto . Macaroon . Internal
Copyright : ( c ) 2015
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Internal representation of a macaroon
Module : Crypto.Macaroon.Internal
Copyright : (c) 2015 Julien Tanguy
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Internal representation of a macaroon
-}
module Crypto.Macaroon.Internal where
import Control.DeepSeq
import Crypto.Hash
import Data.Byteable
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import Data.Hex
import Data.List
|Type alias for Macaroons secret keys
type Secret = BS.ByteString
|Type alias for Macaroons and Caveat and identifiers
type Key = BS.ByteString
|Type alias for Macaroons and Caveat locations
type Location = BS.ByteString
|Type alias for Macaroons signatures
type Sig = BS.ByteString
-- | Main structure of a macaroon
data Macaroon = MkMacaroon { location :: Location
-- ^ Target location
, identifier :: Key
^ Macaroon Identifier
, caveats :: [Caveat]
-- ^ List of caveats
, signature :: Sig
^ Macaroon HMAC signature
}
| Constant - time instance
instance Eq Macaroon where
(MkMacaroon l1 i1 c1 s1) == (MkMacaroon l2 i2 c2 s2) =
(l1 `constEqBytes` l2) &&!
(i1 `constEqBytes` i2) &&!
(c1 == c2) &&!
(s1 `constEqBytes` s2)
-- | show instance conforming to the @inspect@ "specification"
instance Show Macaroon where
We use intercalate because unlines would add a trailing newline
show (MkMacaroon l i c s) = intercalate "\n" [
"location " ++ B8.unpack l
, "identifier " ++ B8.unpack i
, intercalate "\n" (map show c)
, "signature " ++ B8.unpack (hex s)
]
-- | NFData instance for use in the benchmark
instance NFData Macaroon where
rnf (MkMacaroon loc ident cavs sig) = rnf loc `seq` rnf ident `seq` rnf cavs `seq` rnf sig
-- | Caveat structure
data Caveat = MkCaveat { cid :: Key
-- ^ Caveat identifier
, vid :: Key
-- ^ Caveat verification key identifier
, cl :: Location
-- ^ Caveat target location
}
| Constant - time instance
instance Eq Caveat where
(MkCaveat c1 v1 l1) == (MkCaveat c2 v2 l2) =
(c1 `constEqBytes` c2) &&!
(v1 `constEqBytes` v2) &&!
(l1 `constEqBytes` l2)
-- | show instance conforming to the @inspect@ "specification"
instance Show Caveat where
show (MkCaveat c v l) | v == BS.empty = "cid " ++ B8.unpack c
| otherwise = unlines [ "cid " ++ B8.unpack c
, "vid " ++ B8.unpack v
, "cl " ++ B8.unpack l
]
-- | NFData instance for use in the benchmark
instance NFData Caveat where
rnf (MkCaveat cid vid cl) = rnf cid `seq` rnf vid `seq` rnf cl
| Primitive to add a First or Third party caveat to a macaroon
-- For internal use only
addCaveat :: Location
-> Key
-> Key
-> Macaroon
-> Macaroon
addCaveat loc cid vid m = m { caveats = cavs ++ [cav'], signature = sig}
where
cavs = caveats m
cav' = MkCaveat cid vid loc
sig = toBytes (hmac (signature m) (BS.append vid cid) :: HMAC SHA256)
-- | Utility non-short circuiting '&&' function.
(&&!) :: Bool -> Bool -> Bool
True &&! True = True
True &&! False = False
False &&! True = False
False &&! False = False
| null | https://raw.githubusercontent.com/jtanguy/hmacaroons/6fbca87836a4baef171c5ffc774387766c709fbf/src/Crypto/Macaroon/Internal.hs | haskell | # LANGUAGE OverloadedStrings #
| Main structure of a macaroon
^ Target location
^ List of caveats
| show instance conforming to the @inspect@ "specification"
| NFData instance for use in the benchmark
| Caveat structure
^ Caveat identifier
^ Caveat verification key identifier
^ Caveat target location
| show instance conforming to the @inspect@ "specification"
| NFData instance for use in the benchmark
For internal use only
| Utility non-short circuiting '&&' function. | |
Module : Crypto . Macaroon . Internal
Copyright : ( c ) 2015
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Internal representation of a macaroon
Module : Crypto.Macaroon.Internal
Copyright : (c) 2015 Julien Tanguy
License : BSD3
Maintainer :
Stability : experimental
Portability : portable
Internal representation of a macaroon
-}
module Crypto.Macaroon.Internal where
import Control.DeepSeq
import Crypto.Hash
import Data.Byteable
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B8
import Data.Hex
import Data.List
|Type alias for Macaroons secret keys
type Secret = BS.ByteString
|Type alias for Macaroons and Caveat and identifiers
type Key = BS.ByteString
|Type alias for Macaroons and Caveat locations
type Location = BS.ByteString
|Type alias for Macaroons signatures
type Sig = BS.ByteString
data Macaroon = MkMacaroon { location :: Location
, identifier :: Key
^ Macaroon Identifier
, caveats :: [Caveat]
, signature :: Sig
^ Macaroon HMAC signature
}
| Constant - time instance
instance Eq Macaroon where
(MkMacaroon l1 i1 c1 s1) == (MkMacaroon l2 i2 c2 s2) =
(l1 `constEqBytes` l2) &&!
(i1 `constEqBytes` i2) &&!
(c1 == c2) &&!
(s1 `constEqBytes` s2)
instance Show Macaroon where
We use intercalate because unlines would add a trailing newline
show (MkMacaroon l i c s) = intercalate "\n" [
"location " ++ B8.unpack l
, "identifier " ++ B8.unpack i
, intercalate "\n" (map show c)
, "signature " ++ B8.unpack (hex s)
]
instance NFData Macaroon where
rnf (MkMacaroon loc ident cavs sig) = rnf loc `seq` rnf ident `seq` rnf cavs `seq` rnf sig
data Caveat = MkCaveat { cid :: Key
, vid :: Key
, cl :: Location
}
| Constant - time instance
instance Eq Caveat where
(MkCaveat c1 v1 l1) == (MkCaveat c2 v2 l2) =
(c1 `constEqBytes` c2) &&!
(v1 `constEqBytes` v2) &&!
(l1 `constEqBytes` l2)
instance Show Caveat where
show (MkCaveat c v l) | v == BS.empty = "cid " ++ B8.unpack c
| otherwise = unlines [ "cid " ++ B8.unpack c
, "vid " ++ B8.unpack v
, "cl " ++ B8.unpack l
]
instance NFData Caveat where
rnf (MkCaveat cid vid cl) = rnf cid `seq` rnf vid `seq` rnf cl
| Primitive to add a First or Third party caveat to a macaroon
addCaveat :: Location
-> Key
-> Key
-> Macaroon
-> Macaroon
addCaveat loc cid vid m = m { caveats = cavs ++ [cav'], signature = sig}
where
cavs = caveats m
cav' = MkCaveat cid vid loc
sig = toBytes (hmac (signature m) (BS.append vid cid) :: HMAC SHA256)
(&&!) :: Bool -> Bool -> Bool
True &&! True = True
True &&! False = False
False &&! True = False
False &&! False = False
|
c7623163614ecdf92dc787eac0547274dbb396cb98a7f26de785b6e0ab6fbdda | spawnfest/eep49ers | z.erl | -module(z).
| null | https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/lib/tools/test/cover_SUITE_data/compile_beam/z.erl | erlang | -module(z).
| |
941b7697a6c36a58bb0b3a613ae9a214170540f4cf571ff34db636715b83dbc4 | themetaschemer/malt | test-D-extend.rkt | (module+ test
(require rackunit)
(define r0-td 3.0)
(define r1-td (tensor 3.0 4.0 5.0))
(define r2-td (tensor (tensor 3.0 4.0 5.0) (tensor 7.0 8.0 9.0)))
(define test-shape (list 2 2 3))
(check-equal? (tmap (λ (x) (+ x 1)) r1-td) (tensor 4.0 5.0 6.0))
(check-true (rank> r2-td r1-td))
(check-false (rank> r1-td r2-td))
(check-true (of-ranks? 1 r1-td 2 r2-td))
(check-false (of-ranks? 2 r1-td 2 r2-td))
(check-true (of-rank? 2 r2-td))
(check-false (of-rank? 1 r2-td))
(check-equal?
(desc-u
(λ (t e)
(+! t e))
(tensor 0 1 2 3)
(tensor 4 5 6 7))
(tensor (tensor 4 5 6 7)
(tensor 5 6 7 8)
(tensor 6 7 8 9)
(tensor 7 8 9 10)))
(check-equal?
(desc-t
(λ (e u)
(+! e u))
(tensor 4 5 6 7)
(tensor 0 1 2 3))
(tensor (tensor 4 5 6 7)
(tensor 5 6 7 8)
(tensor 6 7 8 9)
(tensor 7 8 9 10)))
(check-equal? (+! (tensor 1 2 3 4) 2)
(tensor 3 4 5 6))
(let-values (((gt gu) (fill-gu-acc-gt
(make-vector 3 0.0)
(λ (i) (values (+ i 1.0) (+ i 2.0)))
2
0.0)))
(check-equal? gu (tensor 2.0 3.0 4.0))
(check-equal? gt 6.0))
(let-values (((gt gu) (build-gu-acc-gt 3
(λ (i) (values (+ i 1.0) (+ i 2.0))))))
(check-equal? gu (tensor 2.0 3.0 4.0))
(check-equal? gt 6.0))
(let-values (((gt gu) (fill-gt-acc-gu
(make-vector 3 0.0)
(λ (i) (values (+ i 2.0) (+ i 1.0)))
2
0.0)))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu 6.0))
(let-values (((gt gu) (build-gt-acc-gu 3
(λ (i) (values (+ i 2.0) (+ i 1.0))))))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu 6.0))
(let-values (((gt gu) (fill-gt-gu
(make-vector 3 0.0)
(make-vector 3 0.0)
(λ (i) (values (+ i 2.0) (+ i 1.0)))
2)))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu (tensor 1.0 2.0 3.0)))
(let-values (((gt gu) (build-gt-gu 3
(λ (i) (values (+ i 2.0) (+ i 1.0))))))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu (tensor 1.0 2.0 3.0)))
(let-values (((gt gu) (desc-u-∇
(λ (t ui zi)
(values (* zi ui) (* zi t)))
6.0
(tensor 2.0 3.0 4.0)
(tensor 1.0 1.0 1.0))))
(check-equal? gt 9.0)
(check-equal? gu (tensor 6.0 6.0 6.0)))
(let-values (((gt gu) (desc-t-∇
(λ (ti u zi)
(values (* zi u) (* zi ti)))
(tensor 2.0 3.0 4.0)
6.0
(tensor 1.0 1.0 1.0))))
(check-equal? gt (tensor 6.0 6.0 6.0))
(check-equal? gu 9.0))
(let-values (((gt gu) (tmap2
(λ (ti ui zi)
(values (* zi ui) (* zi ti)))
(tensor 2.0 3.0 4.0)
(tensor 1.0 2.0 3.0)
(tensor 1.0 1.0 1.0))))
(check-equal? gt (tensor 1.0 2.0 3.0))
(check-equal? gu (tensor 2.0 3.0 4.0)))
(define *∇ (ext2-∇ (λ (a b z) (values (* z b) (* z a)))
0
0))
(let-values (((gt gu) (*∇ (tensor 2.0 3.0 4.0) (tensor 1.0 2.0 3.0) (tensor 1.0 1.0 1.0))))
(check-equal? gt (tensor 1.0 2.0 3.0))
(check-equal? gu (tensor 2.0 3.0 4.0)))
(define sum-1-∇
(λ (t z)
(tmap (λ (ti) z) t)))
(define sum-∇ (ext1-∇ sum-1-∇ 1))
(let ((gt (sum-∇ (tensor 2.0 3.0 4.0)
1.0)))
(check-equal? gt (tensor 1.0 1.0 1.0)))
(let ((gt (sum-∇ (tensor (tensor 2.0 3.0 4.0)
(tensor 2.0 3.0 4.0))
(tensor 2.0 1.0))))
(check-equal? gt (tensor (tensor 2.0 2.0 2.0)
(tensor 1.0 1.0 1.0))))
)
| null | https://raw.githubusercontent.com/themetaschemer/malt/78a04063a5a343f5cf4332e84da0e914cdb4d347/nested-tensors/tensors/test/test-D-extend.rkt | racket | (module+ test
(require rackunit)
(define r0-td 3.0)
(define r1-td (tensor 3.0 4.0 5.0))
(define r2-td (tensor (tensor 3.0 4.0 5.0) (tensor 7.0 8.0 9.0)))
(define test-shape (list 2 2 3))
(check-equal? (tmap (λ (x) (+ x 1)) r1-td) (tensor 4.0 5.0 6.0))
(check-true (rank> r2-td r1-td))
(check-false (rank> r1-td r2-td))
(check-true (of-ranks? 1 r1-td 2 r2-td))
(check-false (of-ranks? 2 r1-td 2 r2-td))
(check-true (of-rank? 2 r2-td))
(check-false (of-rank? 1 r2-td))
(check-equal?
(desc-u
(λ (t e)
(+! t e))
(tensor 0 1 2 3)
(tensor 4 5 6 7))
(tensor (tensor 4 5 6 7)
(tensor 5 6 7 8)
(tensor 6 7 8 9)
(tensor 7 8 9 10)))
(check-equal?
(desc-t
(λ (e u)
(+! e u))
(tensor 4 5 6 7)
(tensor 0 1 2 3))
(tensor (tensor 4 5 6 7)
(tensor 5 6 7 8)
(tensor 6 7 8 9)
(tensor 7 8 9 10)))
(check-equal? (+! (tensor 1 2 3 4) 2)
(tensor 3 4 5 6))
(let-values (((gt gu) (fill-gu-acc-gt
(make-vector 3 0.0)
(λ (i) (values (+ i 1.0) (+ i 2.0)))
2
0.0)))
(check-equal? gu (tensor 2.0 3.0 4.0))
(check-equal? gt 6.0))
(let-values (((gt gu) (build-gu-acc-gt 3
(λ (i) (values (+ i 1.0) (+ i 2.0))))))
(check-equal? gu (tensor 2.0 3.0 4.0))
(check-equal? gt 6.0))
(let-values (((gt gu) (fill-gt-acc-gu
(make-vector 3 0.0)
(λ (i) (values (+ i 2.0) (+ i 1.0)))
2
0.0)))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu 6.0))
(let-values (((gt gu) (build-gt-acc-gu 3
(λ (i) (values (+ i 2.0) (+ i 1.0))))))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu 6.0))
(let-values (((gt gu) (fill-gt-gu
(make-vector 3 0.0)
(make-vector 3 0.0)
(λ (i) (values (+ i 2.0) (+ i 1.0)))
2)))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu (tensor 1.0 2.0 3.0)))
(let-values (((gt gu) (build-gt-gu 3
(λ (i) (values (+ i 2.0) (+ i 1.0))))))
(check-equal? gt (tensor 2.0 3.0 4.0))
(check-equal? gu (tensor 1.0 2.0 3.0)))
(let-values (((gt gu) (desc-u-∇
(λ (t ui zi)
(values (* zi ui) (* zi t)))
6.0
(tensor 2.0 3.0 4.0)
(tensor 1.0 1.0 1.0))))
(check-equal? gt 9.0)
(check-equal? gu (tensor 6.0 6.0 6.0)))
(let-values (((gt gu) (desc-t-∇
(λ (ti u zi)
(values (* zi u) (* zi ti)))
(tensor 2.0 3.0 4.0)
6.0
(tensor 1.0 1.0 1.0))))
(check-equal? gt (tensor 6.0 6.0 6.0))
(check-equal? gu 9.0))
(let-values (((gt gu) (tmap2
(λ (ti ui zi)
(values (* zi ui) (* zi ti)))
(tensor 2.0 3.0 4.0)
(tensor 1.0 2.0 3.0)
(tensor 1.0 1.0 1.0))))
(check-equal? gt (tensor 1.0 2.0 3.0))
(check-equal? gu (tensor 2.0 3.0 4.0)))
(define *∇ (ext2-∇ (λ (a b z) (values (* z b) (* z a)))
0
0))
(let-values (((gt gu) (*∇ (tensor 2.0 3.0 4.0) (tensor 1.0 2.0 3.0) (tensor 1.0 1.0 1.0))))
(check-equal? gt (tensor 1.0 2.0 3.0))
(check-equal? gu (tensor 2.0 3.0 4.0)))
(define sum-1-∇
(λ (t z)
(tmap (λ (ti) z) t)))
(define sum-∇ (ext1-∇ sum-1-∇ 1))
(let ((gt (sum-∇ (tensor 2.0 3.0 4.0)
1.0)))
(check-equal? gt (tensor 1.0 1.0 1.0)))
(let ((gt (sum-∇ (tensor (tensor 2.0 3.0 4.0)
(tensor 2.0 3.0 4.0))
(tensor 2.0 1.0))))
(check-equal? gt (tensor (tensor 2.0 2.0 2.0)
(tensor 1.0 1.0 1.0))))
)
| |
e2f8eab211e1a6224014c5d93f70c18b95aea3cc53662de67dfd39f4f505a78e | foretspaisibles/cl-kaputt | package.lisp | package.lisp — A Testsuite for the Kaputt Test Framework
( -kaputt )
This file is part of Kaputt .
;;;;
Copyright © 2019–2021
;;;; All rights reserved.
This software is governed by the CeCILL - B license under French law and
;;;; abiding by the rules of distribution of free software. You can use,
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
;;;; "-B_V1-en.txt"
(defpackage #:kaputt/testsuite
(:use #:common-lisp #:kaputt)
(:import-from #:alexandria #:with-unique-names)
(:export
#:run-all-tests
#:run-all-tests-batch)
(:documentation
"A testsuite for the Kaputt Test Framewok."))
End of file ` package.lisp '
| null | https://raw.githubusercontent.com/foretspaisibles/cl-kaputt/94ff2a96ced6576e3995b445b78ab2a4bf09c57f/testsuite/package.lisp | lisp |
All rights reserved.
abiding by the rules of distribution of free software. You can use,
"-B_V1-en.txt" | package.lisp — A Testsuite for the Kaputt Test Framework
( -kaputt )
This file is part of Kaputt .
Copyright © 2019–2021
This software is governed by the CeCILL - B license under French law and
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
(defpackage #:kaputt/testsuite
(:use #:common-lisp #:kaputt)
(:import-from #:alexandria #:with-unique-names)
(:export
#:run-all-tests
#:run-all-tests-batch)
(:documentation
"A testsuite for the Kaputt Test Framewok."))
End of file ` package.lisp '
|
0ab94e5af00ff101f55075615168f9e11765a704f938ab19a629db61ddf851de | bvaugon/ocapic | lazies.ml | (*************************************************************************)
(* *)
(* OCaPIC *)
(* *)
(* *)
This file is distributed under the terms of the CeCILL license .
(* See file ../../LICENSE-en. *)
(* *)
(*************************************************************************)
let sleep () = Sys.sleep 4000;;
Display.write_string "a";;
sleep ();;
let x = lazy (
Display.write_string "Hello";
sleep ();
42
);;
Gc.run ();;
Display.write_string "b";;
sleep ();;
let n = Lazy.force x;;
Gc.run ();;
sleep ();;
Display.write_string "c";;
sleep ();;
Display.write_int n;;
sleep ();;
let n = Lazy.force x;;
sleep ();;
Display.write_string "d";;
sleep ();;
Display.write_int n;;
sleep ();;
Display.write_string "e";;
| null | https://raw.githubusercontent.com/bvaugon/ocapic/a14cd9ec3f5022aeb5fe2264d595d7e8f1ddf58a/tests/lazies/lazies.ml | ocaml | ***********************************************************************
OCaPIC
See file ../../LICENSE-en.
*********************************************************************** |
This file is distributed under the terms of the CeCILL license .
let sleep () = Sys.sleep 4000;;
Display.write_string "a";;
sleep ();;
let x = lazy (
Display.write_string "Hello";
sleep ();
42
);;
Gc.run ();;
Display.write_string "b";;
sleep ();;
let n = Lazy.force x;;
Gc.run ();;
sleep ();;
Display.write_string "c";;
sleep ();;
Display.write_int n;;
sleep ();;
let n = Lazy.force x;;
sleep ();;
Display.write_string "d";;
sleep ();;
Display.write_int n;;
sleep ();;
Display.write_string "e";;
|
99076db6d811049d85539b8ba9de254ce5f2d8d0b00596828345691c9ac8a7b8 | deepfire/holotype | ghc-repro-16095-2.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE KindSignatures #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
module Repro
where
import Generics.SOP
recover :: forall a xs.
(Code a ~ '[xs], HasDatatypeInfo a)
=> a
recover =
case datatypeInfo (Proxy @a) :: DatatypeInfo '[xs] of
Newtype _ _ _ ->
let sop :: NP [] xs =
(undefined
:: forall c xs
. All c xs
=> NP [] xs)
in undefined
| null | https://raw.githubusercontent.com/deepfire/holotype/d33052f588b74616560b81616ffc4a0142f8a617/doc/ghc-repro-16095-2.hs | haskell | # LANGUAGE GADTs # | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE KindSignatures #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeOperators #
module Repro
where
import Generics.SOP
recover :: forall a xs.
(Code a ~ '[xs], HasDatatypeInfo a)
=> a
recover =
case datatypeInfo (Proxy @a) :: DatatypeInfo '[xs] of
Newtype _ _ _ ->
let sop :: NP [] xs =
(undefined
:: forall c xs
. All c xs
=> NP [] xs)
in undefined
|
0bee15e1dc847d6a798ccc0deb1d9a8d1d2a6aa1a6e57ec7fa94db8f6ec5f6b3 | bn-d/ppx_pyformat | utils.ml | open Ppx_pyformat.Types
let format_spec = String_format { fill = None }
let make_field ?index ?conversion ?(format_spec = format_spec) arg =
Field (make_replacement_field ~arg ?index ?conversion ~format_spec ())
let make_string = make_string_format_of_format_spec
let make_int = make_int_format_of_format_spec
let make_float = make_float_format_of_format_spec
let test str expected _ =
Ppx_pyformat.Utils.parse str
|> OUnit2.assert_equal ~printer:Printer_utils.string_of_elements expected
let test_exc str exc _ =
let f _ = Ppx_pyformat.Utils.parse str in
OUnit2.assert_raises exc f
| null | https://raw.githubusercontent.com/bn-d/ppx_pyformat/bcb0031cf9fbce12d54d0e8d927ecf41ff3cab97/test/parser/utils.ml | ocaml | open Ppx_pyformat.Types
let format_spec = String_format { fill = None }
let make_field ?index ?conversion ?(format_spec = format_spec) arg =
Field (make_replacement_field ~arg ?index ?conversion ~format_spec ())
let make_string = make_string_format_of_format_spec
let make_int = make_int_format_of_format_spec
let make_float = make_float_format_of_format_spec
let test str expected _ =
Ppx_pyformat.Utils.parse str
|> OUnit2.assert_equal ~printer:Printer_utils.string_of_elements expected
let test_exc str exc _ =
let f _ = Ppx_pyformat.Utils.parse str in
OUnit2.assert_raises exc f
| |
c4f8e65bcae28f56fbc7ac3396f9eedb859e2164072897b39dbfb9aede530c58 | c-cube/funarith | Rat.mli |
* { 1 Interface for Rationals }
* This abstracts over a type and operation for rationals .
A possible implementation for arbitrary precision numbers
is Zarith , with module { ! Q }
A possible implementation for arbitrary precision numbers
is Zarith, with module {!Q}
*)
module type S = Rat_intf.S
| null | https://raw.githubusercontent.com/c-cube/funarith/1c86ac45e9608efaa761e3f14455402730885339/src/Rat.mli | ocaml |
* { 1 Interface for Rationals }
* This abstracts over a type and operation for rationals .
A possible implementation for arbitrary precision numbers
is Zarith , with module { ! Q }
A possible implementation for arbitrary precision numbers
is Zarith, with module {!Q}
*)
module type S = Rat_intf.S
| |
5f09c90a296fbb537e634ec62a50a338e537d1dab7d70a31be5dffa0eb69f2a2 | input-output-hk/cardano-wallet | WalletState.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedLabels #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeFamilies #
-- |
Copyright : © 2022 IOHK
-- License: Apache-2.0
--
-- Pure data type which represents the entire wallet state,
-- including all checkpoints.
--
-- FIXME during ADP-1043: Actually include everything,
e.g. TxHistory , Pending transactions , …
module Cardano.Wallet.DB.WalletState
( -- * Wallet state
WalletState (..)
, fromGenesis
, getLatest
, findNearestPoint
* WalletCheckpoint ( internal use mostly )
, WalletCheckpoint (..)
, toWallet
, fromWallet
, getBlockHeight
, getSlot
* Delta types
, DeltaWalletState1 (..)
, DeltaWalletState
-- * Multiple wallets
, DeltaMap (..)
, ErrNoSuchWallet (..)
, adjustNoSuchWallet
) where
import Prelude
import Cardano.Wallet.Address.Book
( AddressBookIso (..), Discoveries, Prologue )
import Cardano.Wallet.Checkpoints
( Checkpoints )
import Cardano.Wallet.DB.Errors
( ErrNoSuchWallet (..) )
import Cardano.Wallet.DB.Store.Submissions.Layer
( emptyTxSubmissions )
import Cardano.Wallet.DB.Store.Submissions.Operations
( DeltaTxSubmissions, TxSubmissions )
import Cardano.Wallet.Primitive.Types
( BlockHeader, WalletId )
import Cardano.Wallet.Primitive.Types.UTxO
( UTxO )
import Data.Delta
( Delta (..) )
import Data.DeltaMap
( DeltaMap (..) )
import Data.Generics.Internal.VL
( withIso )
import Data.Generics.Internal.VL.Lens
( over, view, (^.) )
import Data.Map.Strict
( Map )
import Data.Word
( Word32 )
import Fmt
( Buildable (..), pretty )
import GHC.Generics
( Generic )
import qualified Cardano.Wallet.Checkpoints as CPS
import qualified Cardano.Wallet.Primitive.Model as W
import qualified Cardano.Wallet.Primitive.Types as W
import qualified Data.Map.Strict as Map
{-------------------------------------------------------------------------------
Wallet Checkpoint
-------------------------------------------------------------------------------}
-- | Data stored in a single checkpoint.
-- Only includes the 'UTxO' and the 'Discoveries', but not the 'Prologue'.
data WalletCheckpoint s = WalletCheckpoint
{ currentTip :: !BlockHeader
, utxo :: !UTxO
, discoveries :: !(Discoveries s)
} deriving (Generic)
deriving instance AddressBookIso s => Eq (WalletCheckpoint s)
-- | Helper function: Get the block height of a wallet checkpoint.
getBlockHeight :: WalletCheckpoint s -> Word32
getBlockHeight (WalletCheckpoint currentTip _ _) =
currentTip ^. (#blockHeight . #getQuantity)
-- | Helper function: Get the 'Slot' of a wallet checkpoint.
getSlot :: WalletCheckpoint s -> W.Slot
getSlot (WalletCheckpoint currentTip _ _) =
W.toSlot . W.chainPointFromBlockHeader $ currentTip
| Convert a stored ' WalletCheckpoint ' to the legacy ' W.Wallet ' state .
toWallet :: AddressBookIso s => Prologue s -> WalletCheckpoint s -> W.Wallet s
toWallet pro (WalletCheckpoint pt utxo dis) =
W.unsafeInitWallet utxo pt $ withIso addressIso $ \_ from -> from (pro,dis)
| Convert a legacy ' W.Wallet ' state to a ' Prologue ' and a ' WalletCheckpoint '
fromWallet :: AddressBookIso s => W.Wallet s -> (Prologue s, WalletCheckpoint s)
fromWallet w = (pro, WalletCheckpoint (W.currentTip w) (W.utxo w) dis)
where
(pro, dis) = withIso addressIso $ \to _ -> to (w ^. #getState)
{-------------------------------------------------------------------------------
Wallet State
-------------------------------------------------------------------------------}
-- | Wallet state. Currently includes:
--
-- * Prologue of the address discovery state
-- * Checkpoints of UTxO and of discoveries of the address discovery state.
--
FIXME during ADP-1043 : Include also TxHistory , pending transactions , … ,
-- everything.
data WalletState s = WalletState
{ prologue :: !(Prologue s)
, checkpoints :: !(Checkpoints (WalletCheckpoint s))
, submissions :: !TxSubmissions
} deriving (Generic)
deriving instance AddressBookIso s => Eq (WalletState s)
-- | Create a wallet from the genesis block.
fromGenesis :: AddressBookIso s => W.Wallet s -> Maybe (WalletState s)
fromGenesis cp
| W.isGenesisBlockHeader header = Just $
WalletState
{ prologue
, checkpoints = CPS.fromGenesis checkpoint
, submissions = emptyTxSubmissions
}
| otherwise = Nothing
where
header = cp ^. #currentTip
(prologue, checkpoint) = fromWallet cp
-- | Get the wallet checkpoint with the largest slot number
getLatest :: AddressBookIso s => WalletState s -> W.Wallet s
getLatest w =
toWallet (w ^. #prologue) . snd $ CPS.getLatest (w ^. #checkpoints)
| Find the nearest ' ' that is either at the given point or before .
findNearestPoint :: WalletState s -> W.Slot -> Maybe W.Slot
findNearestPoint = CPS.findNearestPoint . view #checkpoints
{-------------------------------------------------------------------------------
Delta type for the wallet state
-------------------------------------------------------------------------------}
type DeltaWalletState s = [DeltaWalletState1 s]
data DeltaWalletState1 s
= ReplacePrologue (Prologue s)
-- ^ Replace the prologue of the address discovery state
| UpdateCheckpoints (CPS.DeltasCheckpoints (WalletCheckpoint s))
-- ^ Update the wallet checkpoints.
| UpdateSubmissions [DeltaTxSubmissions]
instance Delta (DeltaWalletState1 s) where
type Base (DeltaWalletState1 s) = WalletState s
apply (ReplacePrologue p) = over #prologue $ const p
apply (UpdateCheckpoints d) = over #checkpoints $ apply d
apply (UpdateSubmissions d) = over #submissions $ apply d
instance Buildable (DeltaWalletState1 s) where
build (ReplacePrologue _) = "ReplacePrologue …"
build (UpdateCheckpoints d) = "UpdateCheckpoints (" <> build d <> ")"
build (UpdateSubmissions d) = "UpdateSubmissions (" <> build d <> ")"
instance Show (DeltaWalletState1 s) where
show = pretty
{-------------------------------------------------------------------------------
Multiple wallets.
-------------------------------------------------------------------------------}
| Adjust a specific wallet if it exists or return ' ErrNoSuchWallet ' .
adjustNoSuchWallet
:: WalletId
-> (ErrNoSuchWallet -> e)
-> (w -> Either e (dw, b))
-> (Map WalletId w -> (Maybe (DeltaMap WalletId dw), Either e b))
adjustNoSuchWallet wid err update wallets = case Map.lookup wid wallets of
Nothing -> (Nothing, Left $ err $ ErrNoSuchWallet wid)
Just wal -> case update wal of
Left e -> (Nothing, Left e)
Right (dw, b) -> (Just $ Adjust wid dw, Right b)
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet/ce8843fa0cc82a3b19863e2263abfdc332c4c63a/lib/wallet/src/Cardano/Wallet/DB/WalletState.hs | haskell | |
License: Apache-2.0
Pure data type which represents the entire wallet state,
including all checkpoints.
FIXME during ADP-1043: Actually include everything,
* Wallet state
* Multiple wallets
------------------------------------------------------------------------------
Wallet Checkpoint
------------------------------------------------------------------------------
| Data stored in a single checkpoint.
Only includes the 'UTxO' and the 'Discoveries', but not the 'Prologue'.
| Helper function: Get the block height of a wallet checkpoint.
| Helper function: Get the 'Slot' of a wallet checkpoint.
------------------------------------------------------------------------------
Wallet State
------------------------------------------------------------------------------
| Wallet state. Currently includes:
* Prologue of the address discovery state
* Checkpoints of UTxO and of discoveries of the address discovery state.
everything.
| Create a wallet from the genesis block.
| Get the wallet checkpoint with the largest slot number
------------------------------------------------------------------------------
Delta type for the wallet state
------------------------------------------------------------------------------
^ Replace the prologue of the address discovery state
^ Update the wallet checkpoints.
------------------------------------------------------------------------------
Multiple wallets.
------------------------------------------------------------------------------ | # LANGUAGE DeriveGeneric #
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedLabels #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeFamilies #
Copyright : © 2022 IOHK
e.g. TxHistory , Pending transactions , …
module Cardano.Wallet.DB.WalletState
WalletState (..)
, fromGenesis
, getLatest
, findNearestPoint
* WalletCheckpoint ( internal use mostly )
, WalletCheckpoint (..)
, toWallet
, fromWallet
, getBlockHeight
, getSlot
* Delta types
, DeltaWalletState1 (..)
, DeltaWalletState
, DeltaMap (..)
, ErrNoSuchWallet (..)
, adjustNoSuchWallet
) where
import Prelude
import Cardano.Wallet.Address.Book
( AddressBookIso (..), Discoveries, Prologue )
import Cardano.Wallet.Checkpoints
( Checkpoints )
import Cardano.Wallet.DB.Errors
( ErrNoSuchWallet (..) )
import Cardano.Wallet.DB.Store.Submissions.Layer
( emptyTxSubmissions )
import Cardano.Wallet.DB.Store.Submissions.Operations
( DeltaTxSubmissions, TxSubmissions )
import Cardano.Wallet.Primitive.Types
( BlockHeader, WalletId )
import Cardano.Wallet.Primitive.Types.UTxO
( UTxO )
import Data.Delta
( Delta (..) )
import Data.DeltaMap
( DeltaMap (..) )
import Data.Generics.Internal.VL
( withIso )
import Data.Generics.Internal.VL.Lens
( over, view, (^.) )
import Data.Map.Strict
( Map )
import Data.Word
( Word32 )
import Fmt
( Buildable (..), pretty )
import GHC.Generics
( Generic )
import qualified Cardano.Wallet.Checkpoints as CPS
import qualified Cardano.Wallet.Primitive.Model as W
import qualified Cardano.Wallet.Primitive.Types as W
import qualified Data.Map.Strict as Map
data WalletCheckpoint s = WalletCheckpoint
{ currentTip :: !BlockHeader
, utxo :: !UTxO
, discoveries :: !(Discoveries s)
} deriving (Generic)
deriving instance AddressBookIso s => Eq (WalletCheckpoint s)
getBlockHeight :: WalletCheckpoint s -> Word32
getBlockHeight (WalletCheckpoint currentTip _ _) =
currentTip ^. (#blockHeight . #getQuantity)
getSlot :: WalletCheckpoint s -> W.Slot
getSlot (WalletCheckpoint currentTip _ _) =
W.toSlot . W.chainPointFromBlockHeader $ currentTip
| Convert a stored ' WalletCheckpoint ' to the legacy ' W.Wallet ' state .
toWallet :: AddressBookIso s => Prologue s -> WalletCheckpoint s -> W.Wallet s
toWallet pro (WalletCheckpoint pt utxo dis) =
W.unsafeInitWallet utxo pt $ withIso addressIso $ \_ from -> from (pro,dis)
| Convert a legacy ' W.Wallet ' state to a ' Prologue ' and a ' WalletCheckpoint '
fromWallet :: AddressBookIso s => W.Wallet s -> (Prologue s, WalletCheckpoint s)
fromWallet w = (pro, WalletCheckpoint (W.currentTip w) (W.utxo w) dis)
where
(pro, dis) = withIso addressIso $ \to _ -> to (w ^. #getState)
FIXME during ADP-1043 : Include also TxHistory , pending transactions , … ,
data WalletState s = WalletState
{ prologue :: !(Prologue s)
, checkpoints :: !(Checkpoints (WalletCheckpoint s))
, submissions :: !TxSubmissions
} deriving (Generic)
deriving instance AddressBookIso s => Eq (WalletState s)
fromGenesis :: AddressBookIso s => W.Wallet s -> Maybe (WalletState s)
fromGenesis cp
| W.isGenesisBlockHeader header = Just $
WalletState
{ prologue
, checkpoints = CPS.fromGenesis checkpoint
, submissions = emptyTxSubmissions
}
| otherwise = Nothing
where
header = cp ^. #currentTip
(prologue, checkpoint) = fromWallet cp
getLatest :: AddressBookIso s => WalletState s -> W.Wallet s
getLatest w =
toWallet (w ^. #prologue) . snd $ CPS.getLatest (w ^. #checkpoints)
| Find the nearest ' ' that is either at the given point or before .
findNearestPoint :: WalletState s -> W.Slot -> Maybe W.Slot
findNearestPoint = CPS.findNearestPoint . view #checkpoints
type DeltaWalletState s = [DeltaWalletState1 s]
data DeltaWalletState1 s
= ReplacePrologue (Prologue s)
| UpdateCheckpoints (CPS.DeltasCheckpoints (WalletCheckpoint s))
| UpdateSubmissions [DeltaTxSubmissions]
instance Delta (DeltaWalletState1 s) where
type Base (DeltaWalletState1 s) = WalletState s
apply (ReplacePrologue p) = over #prologue $ const p
apply (UpdateCheckpoints d) = over #checkpoints $ apply d
apply (UpdateSubmissions d) = over #submissions $ apply d
instance Buildable (DeltaWalletState1 s) where
build (ReplacePrologue _) = "ReplacePrologue …"
build (UpdateCheckpoints d) = "UpdateCheckpoints (" <> build d <> ")"
build (UpdateSubmissions d) = "UpdateSubmissions (" <> build d <> ")"
instance Show (DeltaWalletState1 s) where
show = pretty
| Adjust a specific wallet if it exists or return ' ErrNoSuchWallet ' .
adjustNoSuchWallet
:: WalletId
-> (ErrNoSuchWallet -> e)
-> (w -> Either e (dw, b))
-> (Map WalletId w -> (Maybe (DeltaMap WalletId dw), Either e b))
adjustNoSuchWallet wid err update wallets = case Map.lookup wid wallets of
Nothing -> (Nothing, Left $ err $ ErrNoSuchWallet wid)
Just wal -> case update wal of
Left e -> (Nothing, Left e)
Right (dw, b) -> (Just $ Adjust wid dw, Right b)
|
51a2475952511c7139894b91c67d82196a2e09a5f5dcc12e7f0100065fe3b952 | janestreet/vcaml | vcaml.mli | module Unshadow_buffer := Buffer
module Unshadow_command := Command
open Core
open Async
module Api_version = Nvim_internal.Api_version
module Buffer = Unshadow_buffer
module Channel_info = Channel_info
module Client_info = Client_info
module Color = Color
module Command = Unshadow_command
module Error_type = Nvim_internal.Error_type
module Highlighted_text = Highlighted_text
module Keymap = Keymap
module Mark = Mark
module Mode = Mode
module Namespace = Namespace
module Nvim = Nvim
module Position = Position
module Tabpage = Tabpage
module Type = Nvim_internal.Phantom
module Ui = Ui
module Vcaml_error = Vcaml_error
module Window = Window
* API version for which this library is built ( not the same as the Neovim version ) .
val api_version : Api_version.t
* [ Msgpack.pp ] with support for extensions .
val pp : Formatter.t -> Msgpack.t -> unit
module Nvim_version : sig
include Semantic_version.S
val vcaml : t
end
module Client : sig
type 'state t = 'state Client.t
* [ on_error ] is invoked when VCaml fails to parse a response from Neovim and when
sends us an asynchronous error event to inform us that it encountered a
problem with a message we sent .
Neovim sends us an asynchronous error event to inform us that it encountered a
problem with a message we sent. *)
val create
: on_error:[ `Raise | `Call of Vcaml_error.t -> unit ]
-> [ `not_connected ] t
* A value of type [ Connection_type.t ] describes the type of connection to use , along
with the information necessary to connect to Neovim .
With a [ Unix ] connection , the plugin communicates with Neovim over the unix domain
socket it uses to serve RPC requests . [ Unix ` Child ] should be used if the plugin is
launched from within Neovim ; if it is launched independently a path to the socket
will need to be provided .
With a [ Stdio ] connection , the plugin communicates with Neovim using its own stdin
and stdout ( which means stdout can not be used for logging ) . This connection type
should only be used if the plugin is launched from Neovim with [ jobstart ] with
[ rpc:1 ] in [ opts ] .
[ Stdio ] connections are useful for synchronous , " one - shot " plugins where you want to
synchronously start the process , communicate with Neovim , and shut down . To make
this work , after starting the plugin , issue an [ rpcrequest ] , which will cause Neovim
to block . The plugin should register the requested RPC before connecting to Neovim
to ensure the RPC is defined at the time Neovim 's request is handled . After handling
the request , the plugin should shut down . If you tried to do this with a [ Unix ]
connection then after the process is launched you would need to create a new channel
but Neovim ca n't do that while in the middle of processing whatever logic it 's
currently executing that launched the process . To achieve synchronicity in this way
you 'd need a continuation - after launching the process you 'd need to yield to the
event loop so the channel could be established , and then the plugin would need to
invoke a callback in Neovim to continue .
The [ Embed ] connection is the inverse of the [ Stdio ] connection - instead of the
OCaml app being launched by Neovim , Neovim is launched by the OCaml app . Just as
in a [ Stdio ] connection the app 's stdin and stdout are used for RPC communication ,
here Neovim 's stdin and stdout are used to communicate with the embedding process .
[ Embed ] is most useful for testing and for graphical applications that want to
embed Neovim for editing text . When [ Embed ] is used the [ --embed ] flag must be
passed in [ args ] .
with the information necessary to connect to Neovim.
With a [Unix] connection, the plugin communicates with Neovim over the unix domain
socket it uses to serve RPC requests. [Unix `Child] should be used if the plugin is
launched from within Neovim; if it is launched independently a path to the socket
will need to be provided.
With a [Stdio] connection, the plugin communicates with Neovim using its own stdin
and stdout (which means stdout cannot be used for logging). This connection type
should only be used if the plugin is launched from Neovim with [jobstart] with
[rpc:1] in [opts].
[Stdio] connections are useful for synchronous, "one-shot" plugins where you want to
synchronously start the process, communicate with Neovim, and shut down. To make
this work, after starting the plugin, issue an [rpcrequest], which will cause Neovim
to block. The plugin should register the requested RPC before connecting to Neovim
to ensure the RPC is defined at the time Neovim's request is handled. After handling
the request, the plugin should shut down. If you tried to do this with a [Unix]
connection then after the process is launched you would need to create a new channel
but Neovim can't do that while in the middle of processing whatever logic it's
currently executing that launched the process. To achieve synchronicity in this way
you'd need a continuation - after launching the process you'd need to yield to the
event loop so the channel could be established, and then the plugin would need to
invoke a callback in Neovim to continue.
The [Embed] connection is the inverse of the [Stdio] connection - instead of the
OCaml app being launched by Neovim, Neovim is launched by the OCaml app. Just as
in a [Stdio] connection the app's stdin and stdout are used for RPC communication,
here Neovim's stdin and stdout are used to communicate with the embedding process.
[Embed] is most useful for testing and for graphical applications that want to
embed Neovim for editing text. When [Embed] is used the [--embed] flag must be
passed in [args]. *)
module Connection_type : sig
type _ t =
| Unix : [ `Child | `Socket of string ] -> [ `connected ] Client.t t
| Stdio : [ `connected ] Client.t t
| Embed :
{ prog : string
; args : string list
; working_dir : string
; env : Core_unix.env
}
-> ([ `connected ] Client.t * Async.Process.t) t
end
* Attach to Neovim over an RPC channel . Once [ attach ] is called Neovim can start
sending RPC requests and notifications , so handlers should be registered in advance
with [ register_request_async ] and [ register_request_blocking ] as needed . Registering
more handlers after attaching is allowed . Calling [ attach ] twice will raise .
sending RPC requests and notifications, so handlers should be registered in advance
with [register_request_async] and [register_request_blocking] as needed. Registering
more handlers after attaching is allowed. Calling [attach] twice will raise. *)
val attach
: ?close_reader_and_writer_on_disconnect:(* Default: [true] *) bool
-> [ `not_connected ] t
-> 'a Connection_type.t
-> time_source:Time_source.t
-> 'a Deferred.Or_error.t
(** Close the client and release the underlying file descriptors. Can be called safely
multiple times. *)
val close : [ `connected ] t -> unit Deferred.t
* Returns Neovim 's i d for the channel over which Neovim and the client communicate .
This can be useful when you want to set an autocmd or key mapping that issues an
[ rcprequest ] or [ rpcnotify ] when triggered , since these functions requre the channel
i d as an argument .
This can be useful when you want to set an autocmd or key mapping that issues an
[rcprequest] or [rpcnotify] when triggered, since these functions requre the channel
id as an argument. *)
val channel : [ `connected ] t -> int
end
* A [ ' a Api_call.t ] is a thunked call to neovim returning a - encoded [ ' a ] . No RPC
traffic is generated until an [ Api_call.t ] is invoked via [ run ] or [ run_join ] .
[ Api_call.t ] 's can be manipulated with an applicative - like interface .
A good mental model is that invoking a [ ' a Api_call.t ] should cause exactly one RPC
message to be sent to the neovim client , and that any operations within will not be
interrupted . Calls with side effects will occur in the order written , so
{ [
let%map _ a = a
and _ b = b
in
( )
] }
will cause Neovim to first run [ a ] and then [ b ] .
This is important for applications that rely on manipulating neovim 's internal state .
In particular , the atomicity guarantee prevents races with other pending operations ,
including user input .
You can run an [ Api_call.t ] with [ run ] or [ run_join ] .
traffic is generated until an [Api_call.t] is invoked via [run] or [run_join].
[Api_call.t]'s can be manipulated with an applicative-like interface.
A good mental model is that invoking a ['a Api_call.t] should cause exactly one RPC
message to be sent to the neovim client, and that any operations within will not be
interrupted. Calls with side effects will occur in the order written, so
{[
let%map _a = a
and _b = b
in
()
]}
will cause Neovim to first run [a] and then [b].
This is important for applications that rely on manipulating neovim's internal state.
In particular, the atomicity guarantee prevents races with other pending operations,
including user input.
You can run an [Api_call.t] with [run] or [run_join]. *)
module Api_call : sig
include Applicative.S with type 'a t = 'a Api_call.t
include Applicative.Let_syntax with type 'a t := 'a Api_call.t
module Or_error = Api_call.Or_error
end
val run
: Source_code_position.t
-> [ `connected ] Client.t
-> 'a Api_call.t
-> 'a Deferred.Or_error.t
val run_join
: Source_code_position.t
-> [ `connected ] Client.t
-> 'a Api_call.Or_error.t
-> 'a Deferred.Or_error.t
module Defun : sig
* A [ Defun . Vim.t ] value is a reified value corresponding to the type of a function . It
is used by [ wrap_viml_function ] to produce a regular ocaml function of the correct
type .
Important notes about [ Nil ] :
1 . If you are wrapping a function that takes no arguments , just use [ return T ] . Do
not use [ Nil @- > return T ] .
2 . If you are wrapping a native ( non - API ) Vimscript function that does not have an
explicit return statement , its implicit return is [ Integer 0 ] , not [ Nil ] .
is used by [wrap_viml_function] to produce a regular ocaml function of the correct
type.
Important notes about [Nil]:
1. If you are wrapping a function that takes no arguments, just use [return T]. Do
not use [Nil @-> return T].
2. If you are wrapping a native (non-API) Vimscript function that does not have an
explicit return statement, its implicit return is [Integer 0], not [Nil]. *)
module Vim : sig
type ('f, 'leftmost_input, 'out) t
(** Wraps a [Type.t] to be used as the rightmost (return) type of this function. *)
val return : 'a Type.t -> ('a Api_call.Or_error.t, unit, 'a) t
* Add an extra argument to an existing function arity .
Using this operator , function types will look extremely closely to how the
underlying OCaml type will end up . For example , a Vim function with ( OCaml ) type
[ int - > string - > int - > buffer ] would use the arity [ Integer @- > String @- >
Integer @- > return Buffer ] .
Using this operator, function types will look extremely closely to how the
underlying OCaml type will end up. For example, a Vim function with (OCaml) type
[int -> string -> int -> buffer] would use the arity [Integer @-> String @->
Integer @-> return Buffer]. *)
val ( @-> ) : 'a Type.t -> ('b, _, 'output) t -> ('a -> 'b, 'a, 'output) t
end
* [ Defun . Ocaml ] is analogous to [ Defun . Vim ] , except used to specify OCaml - defined
functions callable from neovim . See [ register_request_blocking ] and
[ register_request_async ] below for usage .
functions callable from neovim. See [register_request_blocking] and
[register_request_async] below for usage. *)
module Ocaml : sig
module Sync : sig
type ('f, 'leftmost_input) t
val return : 'a Type.t -> ('a Deferred.Or_error.t, unit) t
val ( @-> ) : 'a Type.t -> ('b, _) t -> ('a -> 'b, 'a) t
module Expert : sig
* Supports the rare case of interoperating with a Vimscript function that takes
a callback that takes a variable number of arguments .
a callback that takes a variable number of arguments. *)
val varargs
: args_type:'a Type.t
-> return_type:'b Type.t
-> ('a list -> 'b Deferred.Or_error.t, 'a list) t
end
end
module Async : sig
type 'f t
val unit : unit Deferred.Or_error.t t
val ( @-> ) : 'a Type.t -> 'b t -> ('a -> 'b) t
module Expert : sig
val varargs : 'a Type.t -> ('a list -> unit Deferred.Or_error.t) t
end
end
end
end
* Given the name of a function available in Vimscript ( VimL ) along with its arity ( see
[ Defun . Vim ] ) , return a regularly - typed OCaml function that calls said function .
This is intended for client authors to delegate work back to Neovim , possibly to call
an existing Vimscript function . Before reaching for this function , please check the
functions available in [ Nvim ] , [ Buffer ] , [ Window ] and [ Tabpage ] to see that the
functionality you intend to wrap is n't directly exposed in the API .
[Defun.Vim]), return a regularly-typed OCaml function that calls said function.
This is intended for client authors to delegate work back to Neovim, possibly to call
an existing Vimscript function. Before reaching for this function, please check the
functions available in [Nvim], [Buffer], [Window] and [Tabpage] to see that the
functionality you intend to wrap isn't directly exposed in the API. *)
val wrap_viml_function
: type_:('fn, 'leftmost, 'out) Defun.Vim.t
-> function_name:string
-> 'fn
* [ register_request_blocking ] and [ register_request_async ] register functions that can
be called from Neovim via [ rpcrequest ] and [ rpcnotify ] respectively . This is achieved
by adding a listener to the Neovim msgpack_rpc bus .
A blocking request will block Neovim from processing user input or communication over
other channels until a response is returned . Neovim will continue to process calls
sent over the same channel while a blocking request is in flight , which means nested
calls are supported .
When the user presses Ctrl - C to interrupt a blocking call , [ keyboard_interrupted ]
will be determined . Use that to run any necessary cleanup . If you call back into
Neovim during the blocking RPC , consider whether a keyboard interrupt should prevent
those calls from being run .
An async request will enqueue logic on Neovim 's event loop instead of blocking .
Importantly , the state of the editor may have changed between the time the async
request was made and the time Neovim process any of its logic .
be called from Neovim via [rpcrequest] and [rpcnotify] respectively. This is achieved
by adding a listener to the Neovim msgpack_rpc bus.
A blocking request will block Neovim from processing user input or communication over
other channels until a response is returned. Neovim will continue to process calls
sent over the same channel while a blocking request is in flight, which means nested
calls are supported.
When the user presses Ctrl-C to interrupt a blocking call, [keyboard_interrupted]
will be determined. Use that to run any necessary cleanup. If you call back into
Neovim during the blocking RPC, consider whether a keyboard interrupt should prevent
those calls from being run.
An async request will enqueue logic on Neovim's event loop instead of blocking.
Importantly, the state of the editor may have changed between the time the async
request was made and the time Neovim process any of its logic. *)
val register_request_blocking
: _ Client.t
-> name:string
-> type_:('fn, 'leftmost) Defun.Ocaml.Sync.t
-> f:(keyboard_interrupted:unit Deferred.t -> client:[ `connected ] Client.t -> 'fn)
-> unit
val register_request_async
: _ Client.t
-> name:string
-> type_:'fn Defun.Ocaml.Async.t
-> f:(client:[ `connected ] Client.t -> 'fn)
-> unit
module Expert : sig
module Notifier = Notifier
end
(* These functions are exported solely for the vcaml_plugin library's use. Clients should
not call them. *)
module Private : sig
val register_request_blocking
: _ Client.t
-> name:string
-> type_:('fn, 'leftmost) Defun.Ocaml.Sync.t
-> f:(keyboard_interrupted:unit Deferred.t -> client:[ `connected ] Client.t -> 'fn)
-> wrap_f:((unit -> Msgpack.t Deferred.Or_error.t) -> Msgpack.t Deferred.Or_error.t)
-> unit
val register_request_async
: _ Client.t
-> name:string
-> type_:'fn Defun.Ocaml.Async.t
-> f:(client:[ `connected ] Client.t -> 'fn)
-> wrap_f:((unit -> unit Deferred.Or_error.t) -> unit Deferred.Or_error.t)
-> unit
end
| null | https://raw.githubusercontent.com/janestreet/vcaml/b02fc56c48746fa18a6bc9a0f8fb85776db76977/src/vcaml.mli | ocaml | Default: [true]
* Close the client and release the underlying file descriptors. Can be called safely
multiple times.
* Wraps a [Type.t] to be used as the rightmost (return) type of this function.
These functions are exported solely for the vcaml_plugin library's use. Clients should
not call them. | module Unshadow_buffer := Buffer
module Unshadow_command := Command
open Core
open Async
module Api_version = Nvim_internal.Api_version
module Buffer = Unshadow_buffer
module Channel_info = Channel_info
module Client_info = Client_info
module Color = Color
module Command = Unshadow_command
module Error_type = Nvim_internal.Error_type
module Highlighted_text = Highlighted_text
module Keymap = Keymap
module Mark = Mark
module Mode = Mode
module Namespace = Namespace
module Nvim = Nvim
module Position = Position
module Tabpage = Tabpage
module Type = Nvim_internal.Phantom
module Ui = Ui
module Vcaml_error = Vcaml_error
module Window = Window
* API version for which this library is built ( not the same as the Neovim version ) .
val api_version : Api_version.t
* [ Msgpack.pp ] with support for extensions .
val pp : Formatter.t -> Msgpack.t -> unit
module Nvim_version : sig
include Semantic_version.S
val vcaml : t
end
module Client : sig
type 'state t = 'state Client.t
* [ on_error ] is invoked when VCaml fails to parse a response from Neovim and when
sends us an asynchronous error event to inform us that it encountered a
problem with a message we sent .
Neovim sends us an asynchronous error event to inform us that it encountered a
problem with a message we sent. *)
val create
: on_error:[ `Raise | `Call of Vcaml_error.t -> unit ]
-> [ `not_connected ] t
* A value of type [ Connection_type.t ] describes the type of connection to use , along
with the information necessary to connect to Neovim .
With a [ Unix ] connection , the plugin communicates with Neovim over the unix domain
socket it uses to serve RPC requests . [ Unix ` Child ] should be used if the plugin is
launched from within Neovim ; if it is launched independently a path to the socket
will need to be provided .
With a [ Stdio ] connection , the plugin communicates with Neovim using its own stdin
and stdout ( which means stdout can not be used for logging ) . This connection type
should only be used if the plugin is launched from Neovim with [ jobstart ] with
[ rpc:1 ] in [ opts ] .
[ Stdio ] connections are useful for synchronous , " one - shot " plugins where you want to
synchronously start the process , communicate with Neovim , and shut down . To make
this work , after starting the plugin , issue an [ rpcrequest ] , which will cause Neovim
to block . The plugin should register the requested RPC before connecting to Neovim
to ensure the RPC is defined at the time Neovim 's request is handled . After handling
the request , the plugin should shut down . If you tried to do this with a [ Unix ]
connection then after the process is launched you would need to create a new channel
but Neovim ca n't do that while in the middle of processing whatever logic it 's
currently executing that launched the process . To achieve synchronicity in this way
you 'd need a continuation - after launching the process you 'd need to yield to the
event loop so the channel could be established , and then the plugin would need to
invoke a callback in Neovim to continue .
The [ Embed ] connection is the inverse of the [ Stdio ] connection - instead of the
OCaml app being launched by Neovim , Neovim is launched by the OCaml app . Just as
in a [ Stdio ] connection the app 's stdin and stdout are used for RPC communication ,
here Neovim 's stdin and stdout are used to communicate with the embedding process .
[ Embed ] is most useful for testing and for graphical applications that want to
embed Neovim for editing text . When [ Embed ] is used the [ --embed ] flag must be
passed in [ args ] .
with the information necessary to connect to Neovim.
With a [Unix] connection, the plugin communicates with Neovim over the unix domain
socket it uses to serve RPC requests. [Unix `Child] should be used if the plugin is
launched from within Neovim; if it is launched independently a path to the socket
will need to be provided.
With a [Stdio] connection, the plugin communicates with Neovim using its own stdin
and stdout (which means stdout cannot be used for logging). This connection type
should only be used if the plugin is launched from Neovim with [jobstart] with
[rpc:1] in [opts].
[Stdio] connections are useful for synchronous, "one-shot" plugins where you want to
synchronously start the process, communicate with Neovim, and shut down. To make
this work, after starting the plugin, issue an [rpcrequest], which will cause Neovim
to block. The plugin should register the requested RPC before connecting to Neovim
to ensure the RPC is defined at the time Neovim's request is handled. After handling
the request, the plugin should shut down. If you tried to do this with a [Unix]
connection then after the process is launched you would need to create a new channel
but Neovim can't do that while in the middle of processing whatever logic it's
currently executing that launched the process. To achieve synchronicity in this way
you'd need a continuation - after launching the process you'd need to yield to the
event loop so the channel could be established, and then the plugin would need to
invoke a callback in Neovim to continue.
The [Embed] connection is the inverse of the [Stdio] connection - instead of the
OCaml app being launched by Neovim, Neovim is launched by the OCaml app. Just as
in a [Stdio] connection the app's stdin and stdout are used for RPC communication,
here Neovim's stdin and stdout are used to communicate with the embedding process.
[Embed] is most useful for testing and for graphical applications that want to
embed Neovim for editing text. When [Embed] is used the [--embed] flag must be
passed in [args]. *)
module Connection_type : sig
type _ t =
| Unix : [ `Child | `Socket of string ] -> [ `connected ] Client.t t
| Stdio : [ `connected ] Client.t t
| Embed :
{ prog : string
; args : string list
; working_dir : string
; env : Core_unix.env
}
-> ([ `connected ] Client.t * Async.Process.t) t
end
* Attach to Neovim over an RPC channel . Once [ attach ] is called Neovim can start
sending RPC requests and notifications , so handlers should be registered in advance
with [ register_request_async ] and [ register_request_blocking ] as needed . Registering
more handlers after attaching is allowed . Calling [ attach ] twice will raise .
sending RPC requests and notifications, so handlers should be registered in advance
with [register_request_async] and [register_request_blocking] as needed. Registering
more handlers after attaching is allowed. Calling [attach] twice will raise. *)
val attach
-> [ `not_connected ] t
-> 'a Connection_type.t
-> time_source:Time_source.t
-> 'a Deferred.Or_error.t
val close : [ `connected ] t -> unit Deferred.t
* Returns Neovim 's i d for the channel over which Neovim and the client communicate .
This can be useful when you want to set an autocmd or key mapping that issues an
[ rcprequest ] or [ rpcnotify ] when triggered , since these functions requre the channel
i d as an argument .
This can be useful when you want to set an autocmd or key mapping that issues an
[rcprequest] or [rpcnotify] when triggered, since these functions requre the channel
id as an argument. *)
val channel : [ `connected ] t -> int
end
* A [ ' a Api_call.t ] is a thunked call to neovim returning a - encoded [ ' a ] . No RPC
traffic is generated until an [ Api_call.t ] is invoked via [ run ] or [ run_join ] .
[ Api_call.t ] 's can be manipulated with an applicative - like interface .
A good mental model is that invoking a [ ' a Api_call.t ] should cause exactly one RPC
message to be sent to the neovim client , and that any operations within will not be
interrupted . Calls with side effects will occur in the order written , so
{ [
let%map _ a = a
and _ b = b
in
( )
] }
will cause Neovim to first run [ a ] and then [ b ] .
This is important for applications that rely on manipulating neovim 's internal state .
In particular , the atomicity guarantee prevents races with other pending operations ,
including user input .
You can run an [ Api_call.t ] with [ run ] or [ run_join ] .
traffic is generated until an [Api_call.t] is invoked via [run] or [run_join].
[Api_call.t]'s can be manipulated with an applicative-like interface.
A good mental model is that invoking a ['a Api_call.t] should cause exactly one RPC
message to be sent to the neovim client, and that any operations within will not be
interrupted. Calls with side effects will occur in the order written, so
{[
let%map _a = a
and _b = b
in
()
]}
will cause Neovim to first run [a] and then [b].
This is important for applications that rely on manipulating neovim's internal state.
In particular, the atomicity guarantee prevents races with other pending operations,
including user input.
You can run an [Api_call.t] with [run] or [run_join]. *)
module Api_call : sig
include Applicative.S with type 'a t = 'a Api_call.t
include Applicative.Let_syntax with type 'a t := 'a Api_call.t
module Or_error = Api_call.Or_error
end
val run
: Source_code_position.t
-> [ `connected ] Client.t
-> 'a Api_call.t
-> 'a Deferred.Or_error.t
val run_join
: Source_code_position.t
-> [ `connected ] Client.t
-> 'a Api_call.Or_error.t
-> 'a Deferred.Or_error.t
module Defun : sig
* A [ Defun . Vim.t ] value is a reified value corresponding to the type of a function . It
is used by [ wrap_viml_function ] to produce a regular ocaml function of the correct
type .
Important notes about [ Nil ] :
1 . If you are wrapping a function that takes no arguments , just use [ return T ] . Do
not use [ Nil @- > return T ] .
2 . If you are wrapping a native ( non - API ) Vimscript function that does not have an
explicit return statement , its implicit return is [ Integer 0 ] , not [ Nil ] .
is used by [wrap_viml_function] to produce a regular ocaml function of the correct
type.
Important notes about [Nil]:
1. If you are wrapping a function that takes no arguments, just use [return T]. Do
not use [Nil @-> return T].
2. If you are wrapping a native (non-API) Vimscript function that does not have an
explicit return statement, its implicit return is [Integer 0], not [Nil]. *)
module Vim : sig
type ('f, 'leftmost_input, 'out) t
val return : 'a Type.t -> ('a Api_call.Or_error.t, unit, 'a) t
* Add an extra argument to an existing function arity .
Using this operator , function types will look extremely closely to how the
underlying OCaml type will end up . For example , a Vim function with ( OCaml ) type
[ int - > string - > int - > buffer ] would use the arity [ Integer @- > String @- >
Integer @- > return Buffer ] .
Using this operator, function types will look extremely closely to how the
underlying OCaml type will end up. For example, a Vim function with (OCaml) type
[int -> string -> int -> buffer] would use the arity [Integer @-> String @->
Integer @-> return Buffer]. *)
val ( @-> ) : 'a Type.t -> ('b, _, 'output) t -> ('a -> 'b, 'a, 'output) t
end
* [ Defun . Ocaml ] is analogous to [ Defun . Vim ] , except used to specify OCaml - defined
functions callable from neovim . See [ register_request_blocking ] and
[ register_request_async ] below for usage .
functions callable from neovim. See [register_request_blocking] and
[register_request_async] below for usage. *)
module Ocaml : sig
module Sync : sig
type ('f, 'leftmost_input) t
val return : 'a Type.t -> ('a Deferred.Or_error.t, unit) t
val ( @-> ) : 'a Type.t -> ('b, _) t -> ('a -> 'b, 'a) t
module Expert : sig
* Supports the rare case of interoperating with a Vimscript function that takes
a callback that takes a variable number of arguments .
a callback that takes a variable number of arguments. *)
val varargs
: args_type:'a Type.t
-> return_type:'b Type.t
-> ('a list -> 'b Deferred.Or_error.t, 'a list) t
end
end
module Async : sig
type 'f t
val unit : unit Deferred.Or_error.t t
val ( @-> ) : 'a Type.t -> 'b t -> ('a -> 'b) t
module Expert : sig
val varargs : 'a Type.t -> ('a list -> unit Deferred.Or_error.t) t
end
end
end
end
* Given the name of a function available in Vimscript ( VimL ) along with its arity ( see
[ Defun . Vim ] ) , return a regularly - typed OCaml function that calls said function .
This is intended for client authors to delegate work back to Neovim , possibly to call
an existing Vimscript function . Before reaching for this function , please check the
functions available in [ Nvim ] , [ Buffer ] , [ Window ] and [ Tabpage ] to see that the
functionality you intend to wrap is n't directly exposed in the API .
[Defun.Vim]), return a regularly-typed OCaml function that calls said function.
This is intended for client authors to delegate work back to Neovim, possibly to call
an existing Vimscript function. Before reaching for this function, please check the
functions available in [Nvim], [Buffer], [Window] and [Tabpage] to see that the
functionality you intend to wrap isn't directly exposed in the API. *)
val wrap_viml_function
: type_:('fn, 'leftmost, 'out) Defun.Vim.t
-> function_name:string
-> 'fn
* [ register_request_blocking ] and [ register_request_async ] register functions that can
be called from Neovim via [ rpcrequest ] and [ rpcnotify ] respectively . This is achieved
by adding a listener to the Neovim msgpack_rpc bus .
A blocking request will block Neovim from processing user input or communication over
other channels until a response is returned . Neovim will continue to process calls
sent over the same channel while a blocking request is in flight , which means nested
calls are supported .
When the user presses Ctrl - C to interrupt a blocking call , [ keyboard_interrupted ]
will be determined . Use that to run any necessary cleanup . If you call back into
Neovim during the blocking RPC , consider whether a keyboard interrupt should prevent
those calls from being run .
An async request will enqueue logic on Neovim 's event loop instead of blocking .
Importantly , the state of the editor may have changed between the time the async
request was made and the time Neovim process any of its logic .
be called from Neovim via [rpcrequest] and [rpcnotify] respectively. This is achieved
by adding a listener to the Neovim msgpack_rpc bus.
A blocking request will block Neovim from processing user input or communication over
other channels until a response is returned. Neovim will continue to process calls
sent over the same channel while a blocking request is in flight, which means nested
calls are supported.
When the user presses Ctrl-C to interrupt a blocking call, [keyboard_interrupted]
will be determined. Use that to run any necessary cleanup. If you call back into
Neovim during the blocking RPC, consider whether a keyboard interrupt should prevent
those calls from being run.
An async request will enqueue logic on Neovim's event loop instead of blocking.
Importantly, the state of the editor may have changed between the time the async
request was made and the time Neovim process any of its logic. *)
val register_request_blocking
: _ Client.t
-> name:string
-> type_:('fn, 'leftmost) Defun.Ocaml.Sync.t
-> f:(keyboard_interrupted:unit Deferred.t -> client:[ `connected ] Client.t -> 'fn)
-> unit
val register_request_async
: _ Client.t
-> name:string
-> type_:'fn Defun.Ocaml.Async.t
-> f:(client:[ `connected ] Client.t -> 'fn)
-> unit
module Expert : sig
module Notifier = Notifier
end
module Private : sig
val register_request_blocking
: _ Client.t
-> name:string
-> type_:('fn, 'leftmost) Defun.Ocaml.Sync.t
-> f:(keyboard_interrupted:unit Deferred.t -> client:[ `connected ] Client.t -> 'fn)
-> wrap_f:((unit -> Msgpack.t Deferred.Or_error.t) -> Msgpack.t Deferred.Or_error.t)
-> unit
val register_request_async
: _ Client.t
-> name:string
-> type_:'fn Defun.Ocaml.Async.t
-> f:(client:[ `connected ] Client.t -> 'fn)
-> wrap_f:((unit -> unit Deferred.Or_error.t) -> unit Deferred.Or_error.t)
-> unit
end
|
77279677c720d7471fa1748beb7091a17863ff034d928d540b636882e2cf0eda | softwarelanguageslab/maf | fjt-seq.scm | Adapted from Savina benchmarks ( " Fork Join ( throughput ) " benchmark , coming from JGF )
(letrec ((N 10)
(A 3)
(perform-computation (lambda (theta)
(let ((sint (+ 1 theta)))
(* sint sint))))
(throughput-actor
(actor "throughput" (processed)
(message ()
(perform-computation 37.2)
(if (= (+ processed 1) N)
(terminate)
(become throughput-actor (+ processed 1))))))
(actors (vector (create throughput-actor 0)
(create throughput-actor 0)
(create throughput-actor 0)))
(vector-foreach (lambda (f v)
(letrec ((loop (lambda (i)
(if (< i (vector-length v))
(begin
(f (vector-ref v i))
(loop (+ i 1)))
'done))))
(loop 0))))
(loop (lambda (n)
(if (= n N)
'done
(begin
(vector-foreach (lambda (a)
(send a message)) actors)
(loop (+ n 1)))))))
(loop 0))
| null | https://raw.githubusercontent.com/softwarelanguageslab/maf/be58e02c63d25cab5b48fdf7b737b68b882e9dca/test/concurrentScheme/actors/contracts/savina/fjt-seq.scm | scheme | Adapted from Savina benchmarks ( " Fork Join ( throughput ) " benchmark , coming from JGF )
(letrec ((N 10)
(A 3)
(perform-computation (lambda (theta)
(let ((sint (+ 1 theta)))
(* sint sint))))
(throughput-actor
(actor "throughput" (processed)
(message ()
(perform-computation 37.2)
(if (= (+ processed 1) N)
(terminate)
(become throughput-actor (+ processed 1))))))
(actors (vector (create throughput-actor 0)
(create throughput-actor 0)
(create throughput-actor 0)))
(vector-foreach (lambda (f v)
(letrec ((loop (lambda (i)
(if (< i (vector-length v))
(begin
(f (vector-ref v i))
(loop (+ i 1)))
'done))))
(loop 0))))
(loop (lambda (n)
(if (= n N)
'done
(begin
(vector-foreach (lambda (a)
(send a message)) actors)
(loop (+ n 1)))))))
(loop 0))
| |
e30f60e71558cf501e974fc4f3fddb5648dcc68d3e69574d9d133fbcc23f548f | slipstream/SlipStreamServer | module_component.cljc | (ns com.sixsq.slipstream.ssclj.resources.spec.module-component
(:require
[clojure.spec.alpha :as s]
[com.sixsq.slipstream.ssclj.resources.spec.common :as c]
[com.sixsq.slipstream.ssclj.resources.spec.core :as cimi-core]
[com.sixsq.slipstream.ssclj.resources.spec.module :as module]
[com.sixsq.slipstream.ssclj.util.spec :as su]))
(s/def ::commit ::cimi-core/nonblank-string)
(s/def ::author ::cimi-core/nonblank-string)
(s/def ::parentModule ::module/link)
(s/def ::cpu nat-int?)
(s/def ::ram nat-int?)
(s/def ::disk nat-int?)
(s/def ::volatileDisk nat-int?)
(s/def ::networkType #{"public" "private"})
(s/def ::ports (s/coll-of ::cimi-core/nonblank-string :min-count 1 :kind vector?))
(s/def ::mounts (s/coll-of ::cimi-core/nonblank-string :min-count 1 :kind vector?))
;; parameter keywords are used in components and application parameter mappings
(def ^:const parameter-name-regex #"^[a-zA-Z0-9]+([-_\.:][a-zA-Z0-9]*)*$")
(s/def ::parameter (s/and string? #(re-matches parameter-name-regex %)))
(s/def ::description ::cimi-core/nonblank-string)
(s/def ::value ::cimi-core/nonblank-string)
(s/def ::parameter-map (su/only-keys :req-un [::parameter]
:opt-un [::description ::value]))
(s/def ::parameters (s/coll-of ::parameter-map :min-count 1 :kind vector?))
(s/def ::inputParameters ::parameters)
(s/def ::outputParameters ::parameters)
(s/def ::target ::cimi-core/nonblank-string)
(s/def ::package-list (s/coll-of ::cimi-core/nonblank-string :min-count 1 :kind vector?))
(s/def ::preinstall ::target)
(s/def ::packages ::package-list)
(s/def ::postinstall ::target)
(s/def ::deployment ::target)
(s/def ::reporting ::target)
(s/def ::onVmAdd ::target)
(s/def ::onVmRemove ::target)
(s/def ::prescale ::target)
(s/def ::postscale ::target)
(s/def ::targets (su/only-keys :opt-un [::preinstall
::packages
::postinstall
::deployment
::reporting
::onVmAdd
::onVmRemove
::prescale
::postscale]))
(def module-component-keys-spec (su/merge-keys-specs [c/common-attrs
{:req-un [::parentModule
::networkType
::outputParameters
::author]
:opt-un [::inputParameters
::cpu
::ram
::disk
::volatileDisk
::ports
::mounts
::targets
::commit]}]))
(s/def ::module-component (su/only-keys-maps module-component-keys-spec))
| null | https://raw.githubusercontent.com/slipstream/SlipStreamServer/3ee5c516877699746c61c48fc72779fe3d4e4652/cimi-resources/src/com/sixsq/slipstream/ssclj/resources/spec/module_component.cljc | clojure | parameter keywords are used in components and application parameter mappings | (ns com.sixsq.slipstream.ssclj.resources.spec.module-component
(:require
[clojure.spec.alpha :as s]
[com.sixsq.slipstream.ssclj.resources.spec.common :as c]
[com.sixsq.slipstream.ssclj.resources.spec.core :as cimi-core]
[com.sixsq.slipstream.ssclj.resources.spec.module :as module]
[com.sixsq.slipstream.ssclj.util.spec :as su]))
(s/def ::commit ::cimi-core/nonblank-string)
(s/def ::author ::cimi-core/nonblank-string)
(s/def ::parentModule ::module/link)
(s/def ::cpu nat-int?)
(s/def ::ram nat-int?)
(s/def ::disk nat-int?)
(s/def ::volatileDisk nat-int?)
(s/def ::networkType #{"public" "private"})
(s/def ::ports (s/coll-of ::cimi-core/nonblank-string :min-count 1 :kind vector?))
(s/def ::mounts (s/coll-of ::cimi-core/nonblank-string :min-count 1 :kind vector?))
(def ^:const parameter-name-regex #"^[a-zA-Z0-9]+([-_\.:][a-zA-Z0-9]*)*$")
(s/def ::parameter (s/and string? #(re-matches parameter-name-regex %)))
(s/def ::description ::cimi-core/nonblank-string)
(s/def ::value ::cimi-core/nonblank-string)
(s/def ::parameter-map (su/only-keys :req-un [::parameter]
:opt-un [::description ::value]))
(s/def ::parameters (s/coll-of ::parameter-map :min-count 1 :kind vector?))
(s/def ::inputParameters ::parameters)
(s/def ::outputParameters ::parameters)
(s/def ::target ::cimi-core/nonblank-string)
(s/def ::package-list (s/coll-of ::cimi-core/nonblank-string :min-count 1 :kind vector?))
(s/def ::preinstall ::target)
(s/def ::packages ::package-list)
(s/def ::postinstall ::target)
(s/def ::deployment ::target)
(s/def ::reporting ::target)
(s/def ::onVmAdd ::target)
(s/def ::onVmRemove ::target)
(s/def ::prescale ::target)
(s/def ::postscale ::target)
(s/def ::targets (su/only-keys :opt-un [::preinstall
::packages
::postinstall
::deployment
::reporting
::onVmAdd
::onVmRemove
::prescale
::postscale]))
(def module-component-keys-spec (su/merge-keys-specs [c/common-attrs
{:req-un [::parentModule
::networkType
::outputParameters
::author]
:opt-un [::inputParameters
::cpu
::ram
::disk
::volatileDisk
::ports
::mounts
::targets
::commit]}]))
(s/def ::module-component (su/only-keys-maps module-component-keys-spec))
|
e15035599a6380b10a4827530ae252656ef43797cb32c43c2bcc29f5c112a1b4 | ijvcms/chuanqi_dev | map_20003.erl | -module(map_20003).
-export([
range/0,
data/0
]).
range() -> {48, 32}.
data() ->
{
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,2,0,0,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1},
{1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,2,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1},
{1,1,1,1,2,2,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1},
{1,1,1,1,1,1,2,2,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1},
{1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,2,2,0,0,0,0,2,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,2,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,1,2,0,0,2,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}
}.
| null | https://raw.githubusercontent.com/ijvcms/chuanqi_dev/7742184bded15f25be761c4f2d78834249d78097/server/trunk/server/src/map_data/map_20003.erl | erlang | -module(map_20003).
-export([
range/0,
data/0
]).
range() -> {48, 32}.
data() ->
{
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,2,0,0,0,0,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1},
{1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1},
{1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1},
{1,1,1,1,2,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1},
{1,1,1,1,2,2,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1},
{1,1,1,1,1,1,2,2,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1},
{1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,0,0,0,0,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,2,2,0,0,0,0,2,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,2,2,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,0,0,0,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,1,2,0,0,2,0,0,0,0,0,0,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,0,0,2,2,2,2,2,0,0,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,2,2,2,2,2,2,2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1},
{1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}
}.
| |
4f92ee014a9c4c558f20b5ed1564e9f0eedf43a4338b108267b0273e6ccecaaf | clojure/core.async | ex-async.clj | (require '[clojure.core.async :as async :refer [<!! >!! timeout chan alt!!]])
(defn fake-search [kind]
(fn [c query]
(future
(<!! (timeout (rand-int 100)))
(>!! c [kind query]))))
(def web1 (fake-search :web1))
(def web2 (fake-search :web2))
(def image1 (fake-search :image1))
(def image2 (fake-search :image2))
(def video1 (fake-search :video1))
(def video2 (fake-search :video2))
(defn fastest [query & replicas]
(let [c (chan)]
(doseq [replica replicas]
(replica c query))
c))
(defn google [query]
(let [c (chan)
t (timeout 80)]
(future (>!! c (<!! (fastest query web1 web2))))
(future (>!! c (<!! (fastest query image1 image2))))
(future (>!! c (<!! (fastest query video1 video2))))
(loop [i 0 ret []]
(if (= i 3)
ret
(recur (inc i) (conj ret (alt!! [c t] ([v] v))))))))
(google "clojure")
| null | https://raw.githubusercontent.com/clojure/core.async/edc3e16c034106f06e861ffbf91ba0ea87107208/examples/ex-async.clj | clojure | (require '[clojure.core.async :as async :refer [<!! >!! timeout chan alt!!]])
(defn fake-search [kind]
(fn [c query]
(future
(<!! (timeout (rand-int 100)))
(>!! c [kind query]))))
(def web1 (fake-search :web1))
(def web2 (fake-search :web2))
(def image1 (fake-search :image1))
(def image2 (fake-search :image2))
(def video1 (fake-search :video1))
(def video2 (fake-search :video2))
(defn fastest [query & replicas]
(let [c (chan)]
(doseq [replica replicas]
(replica c query))
c))
(defn google [query]
(let [c (chan)
t (timeout 80)]
(future (>!! c (<!! (fastest query web1 web2))))
(future (>!! c (<!! (fastest query image1 image2))))
(future (>!! c (<!! (fastest query video1 video2))))
(loop [i 0 ret []]
(if (= i 3)
ret
(recur (inc i) (conj ret (alt!! [c t] ([v] v))))))))
(google "clojure")
| |
e357ca830a1da35f5a85da475df29f12b9b60d650073e5983a7a2d8c16c57008 | OCamlPro/freeton_ocaml_sdk | ton_abi.mli | (**************************************************************************)
(* *)
Copyright ( c ) 2021 OCamlPro SAS
(* *)
(* All rights reserved. *)
(* This file is distributed under the terms of the GNU Lesser General *)
Public License version 2.1 , with the special exception on linking
(* described in the LICENSE.md file in the root directory. *)
(* *)
(* *)
(**************************************************************************)
val read : string -> Ton_types.AbiContract.t
val write : string -> Ton_types.AbiContract.t -> unit
| null | https://raw.githubusercontent.com/OCamlPro/freeton_ocaml_sdk/42a0d95252ed19c647fa86e9728af15d557fc5e3/src/freeton_base_lib/ton_abi.mli | ocaml | ************************************************************************
All rights reserved.
This file is distributed under the terms of the GNU Lesser General
described in the LICENSE.md file in the root directory.
************************************************************************ | Copyright ( c ) 2021 OCamlPro SAS
Public License version 2.1 , with the special exception on linking
val read : string -> Ton_types.AbiContract.t
val write : string -> Ton_types.AbiContract.t -> unit
|
af371d15686741e68c314c0f5f0e8ccffb378ce7ad85b9a51ecd2eb6578285e7 | ghcjs/ghcjs-boot | Types.hs | # LANGUAGE MagicHash , NoImplicitPrelude , TypeFamilies , UnboxedTuples ,
RoleAnnotations #
RoleAnnotations #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.Types
Copyright : ( c ) The University of Glasgow 2009
License : see libraries / ghc - prim / LICENSE
--
-- Maintainer :
-- Stability : internal
Portability : non - portable ( GHC Extensions )
--
GHC type definitions .
-- Use GHC.Exts from the base package instead of importing this
-- module directly.
--
-----------------------------------------------------------------------------
module GHC.Types (
Bool(..), Char(..), Int(..), Word(..),
Float(..), Double(..),
Ordering(..), IO(..),
isTrue#,
SPEC(..),
Coercible,
) where
import GHC.Prim
infixr 5 :
data [] a = [] | a : [a]
# CTYPE " HsBool " #
| The character type ' ' is an enumeration whose values represent
( or equivalently ISO\/IEC 10646 ) characters ( see
< / > for details ) . This set extends the ISO 8859 - 1
( Latin-1 ) character set ( the first 256 characters ) , which is itself an extension
of the ASCII character set ( the first 128 characters ) . A character literal in
has type ' ' .
To convert a ' ' to or from the corresponding ' Int ' value defined
by Unicode , use ' Prelude.toEnum ' and ' Prelude.fromEnum ' from the
' Prelude . ' class respectively ( or equivalently ' ord ' and ' chr ' ) .
Unicode (or equivalently ISO\/IEC 10646) characters (see
</> for details). This set extends the ISO 8859-1
(Latin-1) character set (the first 256 characters), which is itself an extension
of the ASCII character set (the first 128 characters). A character literal in
Haskell has type 'Char'.
To convert a 'Char' to or from the corresponding 'Int' value defined
by Unicode, use 'Prelude.toEnum' and 'Prelude.fromEnum' from the
'Prelude.Enum' class respectively (or equivalently 'ord' and 'chr').
-}
data {-# CTYPE "HsChar" #-} Char = C# Char#
| A fixed - precision integer type with at least the range @[-2 ^ 29 .. 2 ^ 29 - 1]@.
-- The exact range for a given implementation can be determined by using
' Prelude.minBound ' and ' Prelude.maxBound ' from the ' Prelude . Bounded ' class .
# CTYPE " HsInt " #
-- |A 'Word' is an unsigned integral type, with the same size as 'Int'.
# CTYPE " HsWord " #
-- | Single-precision floating point numbers.
-- It is desirable that this type be at least equal in range and precision
to the IEEE single - precision type .
data {-# CTYPE "HsFloat" #-} Float = F# Float#
-- | Double-precision floating point numbers.
-- It is desirable that this type be at least equal in range and precision
to the IEEE double - precision type .
# CTYPE " " #
data Ordering = LT | EQ | GT
|
A value of type @'IO ' a@ is a computation which , when performed ,
does some I\/O before returning a value of type
There is really only one way to \"perform\ " an I\/O action : bind it to
@Main.main@ in your program . When your program is run , the I\/O will
be performed . It is n't possible to perform I\/O from an arbitrary
function , unless that function is itself in the ' IO ' monad and called
at some point , directly or indirectly , from
' IO ' is a monad , so ' IO ' actions can be combined using either the do - notation
or the ' > > ' and ' > > = ' operations from the ' Monad ' class .
A value of type @'IO' a@ is a computation which, when performed,
does some I\/O before returning a value of type @a@.
There is really only one way to \"perform\" an I\/O action: bind it to
@Main.main@ in your program. When your program is run, the I\/O will
be performed. It isn't possible to perform I\/O from an arbitrary
function, unless that function is itself in the 'IO' monad and called
at some point, directly or indirectly, from @Main.main@.
'IO' is a monad, so 'IO' actions can be combined using either the do-notation
or the '>>' and '>>=' operations from the 'Monad' class.
-}
newtype IO a = IO (State# RealWorld -> (# State# RealWorld, a #))
type role IO representational
The above role annotation is redundant but is included because this role
is significant in the normalisation of FFI types . Specifically , if this
role were to become nominal ( which would be very strange , indeed ! ) , changes
elsewhere in GHC would be necessary . See [ FFI type roles ] in TcForeign .
The above role annotation is redundant but is included because this role
is significant in the normalisation of FFI types. Specifically, if this
role were to become nominal (which would be very strange, indeed!), changes
elsewhere in GHC would be necessary. See [FFI type roles] in TcForeign.
-}
Note [ Kind - changing of ( ~ ) and Coercible ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
( ~ ) and Coercible are tricky to define . To the user , they must appear as
constraints , but we can not define them as such in Haskell . But we also can not
just define them only in GHC.Prim ( like ( - > ) ) , because we need a real module
for them , e.g. to compile the constructor 's info table .
Furthermore the type of MkCoercible can not be written in Haskell
( no syntax for ~#R ) .
So we define them as regular data types in GHC.Types , and do magic in TysWiredIn ,
inside GHC , to change the kind and type .
Note [Kind-changing of (~) and Coercible]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(~) and Coercible are tricky to define. To the user, they must appear as
constraints, but we cannot define them as such in Haskell. But we also cannot
just define them only in GHC.Prim (like (->)), because we need a real module
for them, e.g. to compile the constructor's info table.
Furthermore the type of MkCoercible cannot be written in Haskell
(no syntax for ~#R).
So we define them as regular data types in GHC.Types, and do magic in TysWiredIn,
inside GHC, to change the kind and type.
-}
-- | A data constructor used to box up all unlifted equalities
--
The type constructor is special in that GHC pretends that it
-- has kind (? -> ? -> Fact) rather than (* -> * -> *)
data (~) a b = Eq# ((~#) a b)
| This two - parameter class has instances for types @a@ and if
-- the compiler can infer that they have the same representation. This class
-- does not have regular instances; instead they are created on-the-fly during
-- type-checking. Trying to manually declare an instance of @Coercible@
-- is an error.
--
Nevertheless one can pretend that the following three kinds of instances
exist . First , as a trivial base - case :
--
-- @instance a a@
--
-- Furthermore, for every type constructor there is
-- an instance that allows to coerce under the type constructor. For
example , let @D@ be a prototypical type constructor ( @data@ or
@newtype@ ) with three type arguments , which have roles @nominal@ ,
-- @representational@ resp. @phantom@. Then there is an instance of
-- the form
--
@instance Coercible b b\ ' = > Coercible ( D a b c ) ( D a b\ ' c\')@
--
-- Note that the @nominal@ type arguments are equal, the
-- @representational@ type arguments can differ, but need to have a
-- @Coercible@ instance themself, and the @phantom@ type arguments can be
-- changed arbitrarily.
--
The third kind of instance exists for every @newtype NT = MkNT T@ and
comes in two variants , namely
--
@instance Coercible a T = > Coercible a NT@
--
-- @instance Coercible T b => Coercible NT b@
--
-- This instance is only usable if the constructor @MkNT@ is in scope.
--
-- If, as a library author of a type constructor like @Set a@, you
-- want to prevent a user of your module to write
-- @coerce :: Set T -> Set NT@,
you need to set the role of @Set@\ 's type parameter to @nominal@ ,
-- by writing
--
-- @type role Set nominal@
--
-- For more details about this feature, please refer to
-- </~eir/papers/2014/coercible/coercible.pdf Safe Coercions>
by , , and .
--
-- @since 4.7.0.0
data Coercible a b = MkCoercible ((~#) a b)
-- It's really ~R# (representational equality), not ~#,
but * we do n't yet have syntax for ~R # ,
-- * the compiled code is the same either way
-- * TysWiredIn has the truthful types
-- Also see Note [Kind-changing of (~) and Coercible]
| for ' tagToEnum # ' . Returns True if its parameter is 1 # and False
-- if it is 0#.
# INLINE isTrue # #
isTrue# :: Int# -> Bool -- See Note [Optimizing isTrue#]
isTrue# x = tagToEnum# x
-- Note [Optimizing isTrue#]
-- ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
--
-- Current definition of isTrue# is a temporary workaround. We would like to
-- have functions isTrue# and isFalse# defined like this:
--
-- isTrue# :: Int# -> Bool
-- isTrue# 1# = True
-- isTrue# _ = False
--
-- isFalse# :: Int# -> Bool
isFalse # 0 # = True
-- isFalse# _ = False
--
-- These functions would allow us to safely check if a tag can represent True
-- or False. Using isTrue# and isFalse# as defined above will not introduce
-- additional case into the code. When we scrutinize return value of isTrue#
-- or isFalse#, either explicitly in a case expression or implicitly in a guard,
-- the result will always be a single case expression (given that optimizations
-- are turned on). This results from case-of-case transformation. Consider this
code ( this is both valid and ):
--
-- case isTrue# (a ># b) of
-- True -> e1
-- False -> e2
--
-- Inlining isTrue# gives:
--
-- case (case (a ># b) of { 1# -> True; _ -> False } ) of
-- True -> e1
-- False -> e2
--
-- Case-of-case transforms that to:
--
-- case (a ># b) of
-- 1# -> case True of
-- True -> e1
-- False -> e2
-- _ -> case False of
-- True -> e1
-- False -> e2
--
-- Which is then simplified by case-of-known-constructor:
--
-- case (a ># b) of
-- 1# -> e1
-- _ -> e2
--
While we get good Core here , the code generator will generate very bad Cmm
-- if e1 or e2 do allocation. It will push heap checks into case alternatives
which results in about 2.5 % increase in code size . Until this is improved we
-- just make isTrue# an alias to tagToEnum#. This is a temporary solution (if
you 're reading this in 2023 then things went wrong ) . See # 8326 .
--
| ' SPEC ' is used by GHC in the @SpecConstr@ pass in order to inform
-- the compiler when to be particularly aggressive. In particular, it
tells GHC to specialize regardless of size or the number of
-- specializations. However, not all loops fall into this category.
--
-- Libraries can specify this by using 'SPEC' data type to inform which
-- loops should be aggressively specialized.
data SPEC = SPEC | SPEC2
| null | https://raw.githubusercontent.com/ghcjs/ghcjs-boot/8c549931da27ba9e607f77195208ec156c840c8a/boot/ghc-prim/GHC/Types.hs | haskell | ---------------------------------------------------------------------------
|
Module : GHC.Types
Maintainer :
Stability : internal
Use GHC.Exts from the base package instead of importing this
module directly.
---------------------------------------------------------------------------
# CTYPE "HsChar" #
The exact range for a given implementation can be determined by using
|A 'Word' is an unsigned integral type, with the same size as 'Int'.
| Single-precision floating point numbers.
It is desirable that this type be at least equal in range and precision
# CTYPE "HsFloat" #
| Double-precision floating point numbers.
It is desirable that this type be at least equal in range and precision
| A data constructor used to box up all unlifted equalities
has kind (? -> ? -> Fact) rather than (* -> * -> *)
the compiler can infer that they have the same representation. This class
does not have regular instances; instead they are created on-the-fly during
type-checking. Trying to manually declare an instance of @Coercible@
is an error.
@instance a a@
Furthermore, for every type constructor there is
an instance that allows to coerce under the type constructor. For
@representational@ resp. @phantom@. Then there is an instance of
the form
Note that the @nominal@ type arguments are equal, the
@representational@ type arguments can differ, but need to have a
@Coercible@ instance themself, and the @phantom@ type arguments can be
changed arbitrarily.
@instance Coercible T b => Coercible NT b@
This instance is only usable if the constructor @MkNT@ is in scope.
If, as a library author of a type constructor like @Set a@, you
want to prevent a user of your module to write
@coerce :: Set T -> Set NT@,
by writing
@type role Set nominal@
For more details about this feature, please refer to
</~eir/papers/2014/coercible/coercible.pdf Safe Coercions>
@since 4.7.0.0
It's really ~R# (representational equality), not ~#,
* the compiled code is the same either way
* TysWiredIn has the truthful types
Also see Note [Kind-changing of (~) and Coercible]
if it is 0#.
See Note [Optimizing isTrue#]
Note [Optimizing isTrue#]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Current definition of isTrue# is a temporary workaround. We would like to
have functions isTrue# and isFalse# defined like this:
isTrue# :: Int# -> Bool
isTrue# 1# = True
isTrue# _ = False
isFalse# :: Int# -> Bool
isFalse# _ = False
These functions would allow us to safely check if a tag can represent True
or False. Using isTrue# and isFalse# as defined above will not introduce
additional case into the code. When we scrutinize return value of isTrue#
or isFalse#, either explicitly in a case expression or implicitly in a guard,
the result will always be a single case expression (given that optimizations
are turned on). This results from case-of-case transformation. Consider this
case isTrue# (a ># b) of
True -> e1
False -> e2
Inlining isTrue# gives:
case (case (a ># b) of { 1# -> True; _ -> False } ) of
True -> e1
False -> e2
Case-of-case transforms that to:
case (a ># b) of
1# -> case True of
True -> e1
False -> e2
_ -> case False of
True -> e1
False -> e2
Which is then simplified by case-of-known-constructor:
case (a ># b) of
1# -> e1
_ -> e2
if e1 or e2 do allocation. It will push heap checks into case alternatives
just make isTrue# an alias to tagToEnum#. This is a temporary solution (if
the compiler when to be particularly aggressive. In particular, it
specializations. However, not all loops fall into this category.
Libraries can specify this by using 'SPEC' data type to inform which
loops should be aggressively specialized. | # LANGUAGE MagicHash , NoImplicitPrelude , TypeFamilies , UnboxedTuples ,
RoleAnnotations #
RoleAnnotations #-}
Copyright : ( c ) The University of Glasgow 2009
License : see libraries / ghc - prim / LICENSE
Portability : non - portable ( GHC Extensions )
GHC type definitions .
module GHC.Types (
Bool(..), Char(..), Int(..), Word(..),
Float(..), Double(..),
Ordering(..), IO(..),
isTrue#,
SPEC(..),
Coercible,
) where
import GHC.Prim
infixr 5 :
data [] a = [] | a : [a]
# CTYPE " HsBool " #
| The character type ' ' is an enumeration whose values represent
( or equivalently ISO\/IEC 10646 ) characters ( see
< / > for details ) . This set extends the ISO 8859 - 1
( Latin-1 ) character set ( the first 256 characters ) , which is itself an extension
of the ASCII character set ( the first 128 characters ) . A character literal in
has type ' ' .
To convert a ' ' to or from the corresponding ' Int ' value defined
by Unicode , use ' Prelude.toEnum ' and ' Prelude.fromEnum ' from the
' Prelude . ' class respectively ( or equivalently ' ord ' and ' chr ' ) .
Unicode (or equivalently ISO\/IEC 10646) characters (see
</> for details). This set extends the ISO 8859-1
(Latin-1) character set (the first 256 characters), which is itself an extension
of the ASCII character set (the first 128 characters). A character literal in
Haskell has type 'Char'.
To convert a 'Char' to or from the corresponding 'Int' value defined
by Unicode, use 'Prelude.toEnum' and 'Prelude.fromEnum' from the
'Prelude.Enum' class respectively (or equivalently 'ord' and 'chr').
-}
| A fixed - precision integer type with at least the range @[-2 ^ 29 .. 2 ^ 29 - 1]@.
' Prelude.minBound ' and ' Prelude.maxBound ' from the ' Prelude . Bounded ' class .
# CTYPE " HsInt " #
# CTYPE " HsWord " #
to the IEEE single - precision type .
to the IEEE double - precision type .
# CTYPE " " #
data Ordering = LT | EQ | GT
|
A value of type @'IO ' a@ is a computation which , when performed ,
does some I\/O before returning a value of type
There is really only one way to \"perform\ " an I\/O action : bind it to
@Main.main@ in your program . When your program is run , the I\/O will
be performed . It is n't possible to perform I\/O from an arbitrary
function , unless that function is itself in the ' IO ' monad and called
at some point , directly or indirectly , from
' IO ' is a monad , so ' IO ' actions can be combined using either the do - notation
or the ' > > ' and ' > > = ' operations from the ' Monad ' class .
A value of type @'IO' a@ is a computation which, when performed,
does some I\/O before returning a value of type @a@.
There is really only one way to \"perform\" an I\/O action: bind it to
@Main.main@ in your program. When your program is run, the I\/O will
be performed. It isn't possible to perform I\/O from an arbitrary
function, unless that function is itself in the 'IO' monad and called
at some point, directly or indirectly, from @Main.main@.
'IO' is a monad, so 'IO' actions can be combined using either the do-notation
or the '>>' and '>>=' operations from the 'Monad' class.
-}
newtype IO a = IO (State# RealWorld -> (# State# RealWorld, a #))
type role IO representational
The above role annotation is redundant but is included because this role
is significant in the normalisation of FFI types . Specifically , if this
role were to become nominal ( which would be very strange , indeed ! ) , changes
elsewhere in GHC would be necessary . See [ FFI type roles ] in TcForeign .
The above role annotation is redundant but is included because this role
is significant in the normalisation of FFI types. Specifically, if this
role were to become nominal (which would be very strange, indeed!), changes
elsewhere in GHC would be necessary. See [FFI type roles] in TcForeign.
-}
Note [ Kind - changing of ( ~ ) and Coercible ]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
( ~ ) and Coercible are tricky to define . To the user , they must appear as
constraints , but we can not define them as such in Haskell . But we also can not
just define them only in GHC.Prim ( like ( - > ) ) , because we need a real module
for them , e.g. to compile the constructor 's info table .
Furthermore the type of MkCoercible can not be written in Haskell
( no syntax for ~#R ) .
So we define them as regular data types in GHC.Types , and do magic in TysWiredIn ,
inside GHC , to change the kind and type .
Note [Kind-changing of (~) and Coercible]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
(~) and Coercible are tricky to define. To the user, they must appear as
constraints, but we cannot define them as such in Haskell. But we also cannot
just define them only in GHC.Prim (like (->)), because we need a real module
for them, e.g. to compile the constructor's info table.
Furthermore the type of MkCoercible cannot be written in Haskell
(no syntax for ~#R).
So we define them as regular data types in GHC.Types, and do magic in TysWiredIn,
inside GHC, to change the kind and type.
-}
The type constructor is special in that GHC pretends that it
data (~) a b = Eq# ((~#) a b)
| This two - parameter class has instances for types @a@ and if
Nevertheless one can pretend that the following three kinds of instances
exist . First , as a trivial base - case :
example , let @D@ be a prototypical type constructor ( @data@ or
@newtype@ ) with three type arguments , which have roles @nominal@ ,
@instance Coercible b b\ ' = > Coercible ( D a b c ) ( D a b\ ' c\')@
The third kind of instance exists for every @newtype NT = MkNT T@ and
comes in two variants , namely
@instance Coercible a T = > Coercible a NT@
you need to set the role of @Set@\ 's type parameter to @nominal@ ,
by , , and .
data Coercible a b = MkCoercible ((~#) a b)
but * we do n't yet have syntax for ~R # ,
| for ' tagToEnum # ' . Returns True if its parameter is 1 # and False
# INLINE isTrue # #
isTrue# x = tagToEnum# x
isFalse # 0 # = True
code ( this is both valid and ):
While we get good Core here , the code generator will generate very bad Cmm
which results in about 2.5 % increase in code size . Until this is improved we
you 're reading this in 2023 then things went wrong ) . See # 8326 .
| ' SPEC ' is used by GHC in the @SpecConstr@ pass in order to inform
tells GHC to specialize regardless of size or the number of
data SPEC = SPEC | SPEC2
|
6166e6daf54e8bc9614b0d83acd98bf04e0ca98b463a73b9d7ce297cd92a446c | sionescu/iolib | buffer.lisp | ;;;; -*- Mode: Lisp; indent-tabs-mode: nil -*-
;;;
;;; --- Foreign memory buffers.
;;;
(in-package :iolib/streams)
;;;; Foreign Buffers
(defconstant +bytes-per-iobuf+ (* 4 1024))
;;; FIXME: make this right
;;; probably not all SIMPLE-ARRAYs are admissible
;;; on all implementations
(deftype compatible-lisp-array ()
'(simple-array * (*)))
(defun allocate-iobuf (&optional (size +bytes-per-iobuf+))
(let ((b (%make-iobuf)))
(setf (iobuf-data b) (foreign-alloc :uint8 :count size)
(iobuf-size b) size)
(values b)))
(defun free-iobuf (iobuf)
(unless (null-pointer-p (iobuf-data iobuf))
(foreign-free (iobuf-data iobuf)))
(setf (iobuf-data iobuf) (null-pointer))
(values iobuf))
(defun iobuf-length (iobuf)
(- (iobuf-end iobuf)
(iobuf-start iobuf)))
(defun iobuf-start-pointer (iobuf)
(inc-pointer (iobuf-data iobuf)
(iobuf-start iobuf)))
(defun iobuf-end-pointer (iobuf)
(inc-pointer (iobuf-data iobuf)
(iobuf-end iobuf)))
(defun iobuf-empty-p (iobuf)
(= (iobuf-end iobuf)
(iobuf-start iobuf)))
(defun iobuf-full-p (iobuf)
(= (iobuf-end iobuf)
(iobuf-size iobuf)))
(defun iobuf-end-space-length (iobuf)
(- (iobuf-size iobuf)
(iobuf-end iobuf)))
(defun iobuf-reset (iobuf)
(setf (iobuf-start iobuf) 0
(iobuf-end iobuf) 0))
(defun iobuf-peek (iobuf &optional (offset 0))
(bref iobuf (+ (iobuf-start iobuf) offset)))
(defun iobuf-copy-data-to-start (iobuf)
(declare (type iobuf iobuf))
(isys:memmove
(iobuf-data iobuf)
(inc-pointer (iobuf-data iobuf)
(iobuf-start iobuf))
(iobuf-length iobuf))
(setf (iobuf-end iobuf) (iobuf-length iobuf))
(setf (iobuf-start iobuf) 0))
(defun iobuf-can-fit-slice-p (iobuf start end)
(<= (- end start) (iobuf-end-space-length iobuf)))
(defun iobuf-append-slice (iobuf array start end)
(let ((slice-length (- end start)))
(iobuf-copy-from-lisp-array array start iobuf
(iobuf-end iobuf) slice-length)
(incf (iobuf-end iobuf) slice-length)))
BREF , ( SETF BREF ) and BUFFER - COPY * DO NOT * check boundaries
;;; that must be done by their callers
(defun bref (iobuf index)
(declare (type iobuf iobuf)
(type buffer-index index))
(debug-only (assert (not (minusp index))))
(mem-aref (iobuf-data iobuf) :uint8 index))
(defun (setf bref) (octet iobuf index)
(declare (type (unsigned-byte 8) octet)
(type iobuf iobuf)
(type buffer-index index))
(debug-only
(assert (>= index 0))
(assert (< index (iobuf-size iobuf))))
(setf (mem-aref (iobuf-data iobuf) :uint8 index) octet))
(defun iobuf-copy-from-lisp-array (src soff dst doff length)
(declare (type compatible-lisp-array src)
(type iobuf dst)
(type buffer-index soff doff length))
(debug-only
(assert (>= doff 0))
(assert (>= soff 0))
(assert (<= (+ doff length) (iobuf-size dst))))
(let ((dst-ptr (iobuf-data dst)))
(with-pointer-to-vector-data (src-ptr src)
(isys:memcpy
(inc-pointer dst-ptr doff)
(inc-pointer src-ptr soff)
length))))
(defun iobuf-copy-into-lisp-array (src soff dst doff length)
(declare (type iobuf src)
(type compatible-lisp-array dst)
(type buffer-index soff doff length))
(debug-only
(assert (>= doff 0))
(assert (>= soff 0))
(assert (<= (+ doff length) (length dst))))
(let ((src-ptr (iobuf-data src)))
(with-pointer-to-vector-data (dst-ptr dst)
(isys:memcpy
(inc-pointer dst-ptr doff)
(inc-pointer src-ptr soff)
length))))
(defun iobuf-pop-octet (iobuf)
(declare (type iobuf iobuf))
(debug-only (assert (> (iobuf-length iobuf) 0)))
(let ((start (iobuf-start iobuf)))
(prog1 (bref iobuf start)
(incf (iobuf-start iobuf)))))
(defun iobuf-push-octet (iobuf octet)
(declare (type iobuf iobuf)
(type (unsigned-byte 8) octet))
(debug-only (assert (not (iobuf-full-p iobuf))))
(let ((end (iobuf-end iobuf)))
(prog1 (setf (bref iobuf end) octet)
(incf (iobuf-end iobuf)))))
| null | https://raw.githubusercontent.com/sionescu/iolib/dac715c81db55704db623d8b2cfc399ebcf6175f/src/streams/gray/buffer.lisp | lisp | -*- Mode: Lisp; indent-tabs-mode: nil -*-
--- Foreign memory buffers.
Foreign Buffers
FIXME: make this right
probably not all SIMPLE-ARRAYs are admissible
on all implementations
that must be done by their callers |
(in-package :iolib/streams)
(defconstant +bytes-per-iobuf+ (* 4 1024))
(deftype compatible-lisp-array ()
'(simple-array * (*)))
(defun allocate-iobuf (&optional (size +bytes-per-iobuf+))
(let ((b (%make-iobuf)))
(setf (iobuf-data b) (foreign-alloc :uint8 :count size)
(iobuf-size b) size)
(values b)))
(defun free-iobuf (iobuf)
(unless (null-pointer-p (iobuf-data iobuf))
(foreign-free (iobuf-data iobuf)))
(setf (iobuf-data iobuf) (null-pointer))
(values iobuf))
(defun iobuf-length (iobuf)
(- (iobuf-end iobuf)
(iobuf-start iobuf)))
(defun iobuf-start-pointer (iobuf)
(inc-pointer (iobuf-data iobuf)
(iobuf-start iobuf)))
(defun iobuf-end-pointer (iobuf)
(inc-pointer (iobuf-data iobuf)
(iobuf-end iobuf)))
(defun iobuf-empty-p (iobuf)
(= (iobuf-end iobuf)
(iobuf-start iobuf)))
(defun iobuf-full-p (iobuf)
(= (iobuf-end iobuf)
(iobuf-size iobuf)))
(defun iobuf-end-space-length (iobuf)
(- (iobuf-size iobuf)
(iobuf-end iobuf)))
(defun iobuf-reset (iobuf)
(setf (iobuf-start iobuf) 0
(iobuf-end iobuf) 0))
(defun iobuf-peek (iobuf &optional (offset 0))
(bref iobuf (+ (iobuf-start iobuf) offset)))
(defun iobuf-copy-data-to-start (iobuf)
(declare (type iobuf iobuf))
(isys:memmove
(iobuf-data iobuf)
(inc-pointer (iobuf-data iobuf)
(iobuf-start iobuf))
(iobuf-length iobuf))
(setf (iobuf-end iobuf) (iobuf-length iobuf))
(setf (iobuf-start iobuf) 0))
(defun iobuf-can-fit-slice-p (iobuf start end)
(<= (- end start) (iobuf-end-space-length iobuf)))
(defun iobuf-append-slice (iobuf array start end)
(let ((slice-length (- end start)))
(iobuf-copy-from-lisp-array array start iobuf
(iobuf-end iobuf) slice-length)
(incf (iobuf-end iobuf) slice-length)))
BREF , ( SETF BREF ) and BUFFER - COPY * DO NOT * check boundaries
(defun bref (iobuf index)
(declare (type iobuf iobuf)
(type buffer-index index))
(debug-only (assert (not (minusp index))))
(mem-aref (iobuf-data iobuf) :uint8 index))
(defun (setf bref) (octet iobuf index)
(declare (type (unsigned-byte 8) octet)
(type iobuf iobuf)
(type buffer-index index))
(debug-only
(assert (>= index 0))
(assert (< index (iobuf-size iobuf))))
(setf (mem-aref (iobuf-data iobuf) :uint8 index) octet))
(defun iobuf-copy-from-lisp-array (src soff dst doff length)
(declare (type compatible-lisp-array src)
(type iobuf dst)
(type buffer-index soff doff length))
(debug-only
(assert (>= doff 0))
(assert (>= soff 0))
(assert (<= (+ doff length) (iobuf-size dst))))
(let ((dst-ptr (iobuf-data dst)))
(with-pointer-to-vector-data (src-ptr src)
(isys:memcpy
(inc-pointer dst-ptr doff)
(inc-pointer src-ptr soff)
length))))
(defun iobuf-copy-into-lisp-array (src soff dst doff length)
(declare (type iobuf src)
(type compatible-lisp-array dst)
(type buffer-index soff doff length))
(debug-only
(assert (>= doff 0))
(assert (>= soff 0))
(assert (<= (+ doff length) (length dst))))
(let ((src-ptr (iobuf-data src)))
(with-pointer-to-vector-data (dst-ptr dst)
(isys:memcpy
(inc-pointer dst-ptr doff)
(inc-pointer src-ptr soff)
length))))
(defun iobuf-pop-octet (iobuf)
(declare (type iobuf iobuf))
(debug-only (assert (> (iobuf-length iobuf) 0)))
(let ((start (iobuf-start iobuf)))
(prog1 (bref iobuf start)
(incf (iobuf-start iobuf)))))
(defun iobuf-push-octet (iobuf octet)
(declare (type iobuf iobuf)
(type (unsigned-byte 8) octet))
(debug-only (assert (not (iobuf-full-p iobuf))))
(let ((end (iobuf-end iobuf)))
(prog1 (setf (bref iobuf end) octet)
(incf (iobuf-end iobuf)))))
|
7ea0b5c807b5dc701f555319c3a90e8778f5fd897f3720c6901d9e9dd1c3473b | tkych/lisp-dojo | 003.lisp | Last modified : 2013 - 10 - 15 18:58:20 tkych
(define-practice
:id 003
:name my-last
:level 0
:problem "
MY-LAST list => cons/null
Make function MY-LAST.
It returns the last cons of a `list'. If `list' is (), returns ().
Examples:
(my-last '()) => NIL
(my-last '(a b c d)) => (D)
(my-last '(a b (c d))) => ((C D))
"
:hint
nil
:solutions "
* (defun my-last (lst)
(if (endp (rest lst))
lst
(my-last (rest lst))))"
:reference "
* #last"
:test-env
nil
:test
((<=>? (my-last '()) (last '()))
(<=>? (my-last '(a b c d)) (last '(a b c d)))
(<=>? (my-last '(a b (c d))) (last '(a b (c d)))))
)
| null | https://raw.githubusercontent.com/tkych/lisp-dojo/ba83d025bc03101eec43ec6be44585d7b076caf6/practices/003.lisp | lisp | Last modified : 2013 - 10 - 15 18:58:20 tkych
(define-practice
:id 003
:name my-last
:level 0
:problem "
MY-LAST list => cons/null
Make function MY-LAST.
It returns the last cons of a `list'. If `list' is (), returns ().
Examples:
(my-last '()) => NIL
(my-last '(a b c d)) => (D)
(my-last '(a b (c d))) => ((C D))
"
:hint
nil
:solutions "
* (defun my-last (lst)
(if (endp (rest lst))
lst
(my-last (rest lst))))"
:reference "
* #last"
:test-env
nil
:test
((<=>? (my-last '()) (last '()))
(<=>? (my-last '(a b c d)) (last '(a b c d)))
(<=>? (my-last '(a b (c d))) (last '(a b (c d)))))
)
| |
42bce414c88dd7b08e779e082b37b52964193fb07d84bcc68c200526e9af91fc | glguy/advent | 16.hs | # Language QuasiQuotes , BlockArguments , LambdaCase #
|
Module : Main
Description : Day 16 solution
Copyright : ( c ) , 2021
License : ISC
Maintainer :
< >
We 're given facts about a bunch of different /Sues/ and asked to
check which one matches what we know about the one true /Sue/.
Module : Main
Description : Day 16 solution
Copyright : (c) Eric Mertens, 2021
License : ISC
Maintainer :
<>
We're given facts about a bunch of different /Sues/ and asked to
check which one matches what we know about the one true /Sue/.
-}
module Main where
import Advent.Format (format)
main :: IO ()
main =
do input <- [format|2015 16 (Sue %d: (%s: %d)&(, )%n)*|]
print [i | (i, props) <- input, matchesClues1 props]
print [i | (i, props) <- input, matchesClues2 props]
-- | Predicate for properties that match exactly.
matchesClues1 :: [(String,Int)] -> Bool
matchesClues1 = matcher (const (==))
-- | Predicate like 'matchesClues1' but with special cases for
-- /cats/, /trees/, /pomeranians/, and /goldfish/.
matchesClues2 :: [(String,Int)] -> Bool
matchesClues2 =
matcher \case
"cats" -> (<)
"trees" -> (<)
"pomeranians" -> (>)
"goldfish" -> (>)
_ -> (==)
-- | Match a list of properties against the known hints.
matcher ::
(String -> Int -> Int -> Bool) {- ^ comparison selector -} ->
[(String,Int)] {- ^ list of properties -} ->
Bool {- ^ properties match clues -}
matcher match = all \(prop, memory) ->
match prop (clues prop) memory
-- | Returns the given hint value for each property.
clues :: String -> Int
clues "children" = 3
clues "cats" = 7
clues "samoyeds" = 2
clues "pomeranians" = 3
clues "akitas" = 0
clues "vizslas" = 0
clues "goldfish" = 5
clues "trees" = 3
clues "cars" = 2
clues "perfumes" = 1
| null | https://raw.githubusercontent.com/glguy/advent/7ab9f9e47208fd5720e36bac33fee2b78d4ec50b/solutions/src/2015/16.hs | haskell | | Predicate for properties that match exactly.
| Predicate like 'matchesClues1' but with special cases for
/cats/, /trees/, /pomeranians/, and /goldfish/.
| Match a list of properties against the known hints.
^ comparison selector
^ list of properties
^ properties match clues
| Returns the given hint value for each property. | # Language QuasiQuotes , BlockArguments , LambdaCase #
|
Module : Main
Description : Day 16 solution
Copyright : ( c ) , 2021
License : ISC
Maintainer :
< >
We 're given facts about a bunch of different /Sues/ and asked to
check which one matches what we know about the one true /Sue/.
Module : Main
Description : Day 16 solution
Copyright : (c) Eric Mertens, 2021
License : ISC
Maintainer :
<>
We're given facts about a bunch of different /Sues/ and asked to
check which one matches what we know about the one true /Sue/.
-}
module Main where
import Advent.Format (format)
main :: IO ()
main =
do input <- [format|2015 16 (Sue %d: (%s: %d)&(, )%n)*|]
print [i | (i, props) <- input, matchesClues1 props]
print [i | (i, props) <- input, matchesClues2 props]
matchesClues1 :: [(String,Int)] -> Bool
matchesClues1 = matcher (const (==))
matchesClues2 :: [(String,Int)] -> Bool
matchesClues2 =
matcher \case
"cats" -> (<)
"trees" -> (<)
"pomeranians" -> (>)
"goldfish" -> (>)
_ -> (==)
matcher ::
matcher match = all \(prop, memory) ->
match prop (clues prop) memory
clues :: String -> Int
clues "children" = 3
clues "cats" = 7
clues "samoyeds" = 2
clues "pomeranians" = 3
clues "akitas" = 0
clues "vizslas" = 0
clues "goldfish" = 5
clues "trees" = 3
clues "cars" = 2
clues "perfumes" = 1
|
a046c7fc99575fc486be2a0693067edf71f5657c55d545365139174c233463b8 | yellowbean/Hastructure | Util.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
module Util
(mulBR,mulBIR,mulBI,mulBInt,mulBInteger,lastN,yearCountFraction,genSerialDates
,getValByDate,getValByDates,projDatesByPattern
,genSerialDatesTill,genSerialDatesTill2,subDates,getTsDates,sliceDates,SliceType(..)
,calcInt,calcIntRate,calcIntRateCurve
,multiplyTs,zipTs,getTsVals,divideBI,mulIR, daysInterval
,replace,paddingDefault, capWith, pv2, splitByDate, rangeBy
)
where
import qualified Data.Time as T
import Data.List
import Data.Fixed
import Data.Ratio ((%))
import Data.Ix
import Data.Maybe
import qualified Data.Map as M
import Lib
import Types
import Text.Printf
import Control.Exception
import Debug.Trace
debug = flip trace
mulBR :: Balance -> Rate -> Centi
mulBR b r = fromRational $ toRational b * r
mulBIR :: Balance -> IRate -> Centi
mulBIR b r = fromRational $ (toRational b) * (toRational r)
mulIR :: Int -> Rational -> Rational
mulIR i r = (toRational i) * r
mulBInt :: Balance -> Int -> Rational
mulBInt b i = (toRational b) * (toRational i)
mulBInteger :: Balance -> Integer -> Rational
mulBInteger b i = mulBInt b (fromInteger i)
mulBI :: Balance -> IRate -> Amount
mulBI bal r = fromRational $ (toRational bal) * (toRational r)
divideBI :: Balance -> Int -> Balance
divideBI b i = fromRational $ (toRational b) / (toRational i)
zipLeftover :: [a] -> [a] -> [a]
zipLeftover [] [] = []
zipLeftover xs [] = xs
zipLeftover [] ys = ys
zipLeftover (x:xs) (y:ys) = zipLeftover xs ys
lastN :: Int -> [a] -> [a]
lastN n xs = zipLeftover (drop n xs) xs
-- -count-conventions
yearCountFraction :: DayCount -> Date -> Date -> Rational --TODO -16-field-22f.htm
yearCountFraction dc sd ed
= case dc of
DC_ACT_ACT -> if sameYear then
_diffDays % daysOfYear syear
else
(sDaysTillYearEnd % (daysOfYear syear)) + (eDaysAfterYearBeg % (daysOfYear eyear)) + (pred _diffYears)
` debug ` ( " < > " + + show sDaysTillYearEnd++"<>"++show(daysOfYear syear ) + + " < > " + + show ( daysOfYear eyear)++"<>"++ show eyear )
DC_ACT_365F -> _diffDays % 365 -- `debug` ("DIFF Days"++show(_diffDays))
DC_ACT_360 -> _diffDays % 360
DC_ACT_365A -> if has_leap_day then
_diffDays % 366
else
_diffDays % 365
DC_ACT_365L -> if T.isLeapYear eyear then
_diffDays % 366
else
_diffDays % 365
DC_NL_365 -> if has_leap_day then
(pred _diffDays) % 365
else
_diffDays % 365
DC_30E_360 -> let
_sday = f31to30 sday
_eday = f31to30 eday
num = toRational (_eday - _sday) + 30*_gapMonth + 360*_diffYears
in
` debug ` ( " NUM->"++show num++"E S month"++show emonth++show )
DC_30Ep_360 -> let
_sday = f31to30 sday
(_eyear,_emonth,_eday) = T.toGregorian $
if eday==31 then
T.addDays 1 ed
else
ed
__gapMonth = (toInteger $ _emonth - smonth) % 1
__diffYears = (toInteger $ _eyear - syear) % 1
num = toRational (_eday - _sday) + 30*__gapMonth + 360*__diffYears
in
num / 360
DC_30_360_ISDA -> let
_sday = f31to30 sday
_eday = if _sday>=30 && eday==31 then
30
else
eday
num = toRational (_eday - _sday) + 30*_gapMonth + 360*_diffYears
in
num / 360
30/360 Bond basis , this was call 30E/360 ISDA by kalotay
DC_30_360_German -> let
_sday = if sday==31 || (endOfFeb syear smonth sday) then
` debug ` ( " German eof start if > > " + + show ( endOfFeb sday)++show syear + + show smonth++show sday )
else
sday
` debug ` ( " German eof start else " + + show ( endOfFeb sday)++show syear + + show smonth++show sday )
_eday = if eday==31 || (endOfFeb eyear emonth eday) then
30
else
eday
` debug ` ( " German eof end " + + show ( endOfFeb eyear emonth eday)++show eyear++show emonth++show eday )
num = toRational (_eday - _sday) + 30*_gapMonth + 360*_diffYears -- `debug` ("German"++show(_sday)++"<>"++show _eday)
in
num / 360
DC_30_360_US -> let
_sday = if (endOfFeb syear smonth sday) || sday==31 then
30
else
sday
_eday = if (eday==31 && sday >= 30)||(endOfFeb eyear emonth eday) && (endOfFeb syear smonth sday) then
30
else
eday
num = toRational (_eday - _sday) + 30*_gapMonth + 360*_diffYears
in
num / 360
-- -16-field-22f.htm
where
daysOfYear y = if T.isLeapYear y then 366 else 365
f31to30 d = if d==31 then
30
else
d
endOfFeb y m d = if T.isLeapYear y then
(m==2) && d == 29
else
(m==2) && d == 28
sameYear = syear == eyear
has_leap_day
= case (sameYear,sLeap,eLeap) of
(True,False,False) -> False
(True,True,_) -> inRange (sd,ed) (T.fromGregorian syear 2 29)
_ -> let
_leapDays = [ T.fromGregorian _y 2 29 | _y <- range (syear,eyear) , (T.isLeapYear _y) ]
in
any (inRange (sd,ed)) _leapDays
_diffYears = (eyear - syear) % 1 -- Ratio Integer
_gapDay = (toInteger (eday - sday)) % 1
_gapMonth = (toInteger (emonth - smonth)) % 1
sDaysTillYearEnd = succ $ T.diffDays (T.fromGregorian syear 12 31) sd
eDaysAfterYearBeg = T.diffDays ed (T.fromGregorian eyear 1 1)
_diffDays = toInteger $ T.diffDays ed sd
sLeap = T.isLeapYear syear
eLeap = T.isLeapYear eyear
(syear,smonth,sday) = T.toGregorian sd
(eyear,emonth,eday) = T.toGregorian ed
genSerialDates :: DatePattern -> Date -> Int -> Dates
genSerialDates dp sd num
= take num $ filter (>= sd) $
case dp of
MonthEnd ->
[T.fromGregorian yearRange (fst __md) (snd __md) | yearRange <- [_y..(_y+yrs)]
,__md <- monthEnds yearRange ]
where
yrs = fromIntegral $ div num 12 + 1
QuarterEnd ->
[T.fromGregorian yearRange __m __d | yearRange <- [_y..(_y+yrs)]
,(__m,__d) <- quarterEnds]
where
yrs = fromIntegral $ div num 4 + 1
YearEnd ->
[T.fromGregorian yearRange 12 31 | yearRange <- [_y..(_y+(toInteger num))]]
YearFirst ->
[T.fromGregorian yearRange 1 1 | yearRange <- [_y..(_y+(toInteger num))]]
MonthFirst ->
[T.fromGregorian yearRange monthRange 1 | yearRange <- [_y..(_y+yrs)]
, monthRange <- [1..12]]
where
yrs = fromIntegral $ div num 12 + 1
QuarterFirst ->
[T.fromGregorian yearRange __m 1 | yearRange <- [_y..(_y+yrs)]
,__m <- [3,6,9,12]]
where
yrs = fromIntegral $ div num 4 + 1
MonthDayOfYear m d ->
[T.fromGregorian yearRange m d | yearRange <- [_y..(_y+(toInteger num))]]
DayOfMonth d ->
[T.fromGregorian yearRange monthRange d | yearRange <- [_y..(_y+yrs)]
, monthRange <- [1..12]]
where
yrs = fromIntegral $ div num 12 + 1
where
quarterEnds = [(3,31),(6,30),(9,30),(12,31)]
monthEnds y =
if T.isLeapYear y then
[(1,31),(2,29),(3,31),(4,30),(5,31),(6,30),(7,31),(8,31),(9,30),(10,31),(11,30),(12,31)]
else
[(1,31),(2,28),(3,31),(4,30),(5,31),(6,30),(7,31),(8,31),(9,30),(10,31),(11,30),(12,31)]
(_y,_m,_d) = T.toGregorian sd
yearBegin = T.fromGregorian _y 1 1
genSerialDatesTill:: Date -> DatePattern -> Date -> Dates
genSerialDatesTill sd ptn ed
= filter (< ed) $ genSerialDates ptn sd (fromInteger (succ num)) --`debug` ("Num"++show num)
where
(sy,sm,sday) = T.toGregorian sd
(ey,em,eday) = T.toGregorian ed
T.CalendarDiffDays cdM cdD = T.diffGregorianDurationRollOver ed sd
num = case ptn of
MonthEnd -> cdM
QuarterEnd -> div cdM 3
YearEnd -> div cdM 12
MonthFirst -> cdM
QuarterFirst-> div cdM 3
YearFirst-> div cdM 12
T.MonthOfYear
DayOfMonth _d -> cdM -- T.DayOfMonth
-- T.DayOfWeek
genSerialDatesTill2 :: RangeType -> Date -> DatePattern -> Date -> Dates
genSerialDatesTill2 rt sd dp ed
= case rt of
II -> sd:_r ++ [ed]
EI -> _r ++ [ed]
IE -> if (head _r)==sd then
_r
else
sd:_r
EE -> _r
where
_r = genSerialDatesTill sd dp ed
tsPointVal :: TsPoint a -> a
tsPointVal (TsPoint d v) = v
getValByDate :: Ts -> CutoffType -> Date -> Rational
getValByDate (LeftBalanceCurve dps) ct d
= case find (\(TsPoint _d _) -> (cmpFun ct) _d d) (reverse dps) of
Just (TsPoint _d v) -> toRational v
Nothing -> 0
where
cmpFun Inc = (<=)
cmpFun Exc = (<)
getValByDate (BalanceCurve dps) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
Just (TsPoint _d v) -> toRational v
Nothing -> 0
getValByDate (BalanceCurve dps) Inc d
= case find (\(TsPoint _d _) -> d >= _d) (reverse dps) of
Just (TsPoint _d v) -> toRational v
Nothing -> 0
getValByDate (FloatCurve dps) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
Just (TsPoint _d v) -> toRational v -- `debug` ("Getting rate "++show(_d)++show(v))
Nothing -> 0 -- `debug` ("Getting 0 ")
getValByDate (IRateCurve dps) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
Just (TsPoint _d v) -> toRational v -- `debug` ("Getting rate "++show(_d)++show(v))
Nothing -> 0 -- `debug` ("Getting 0 ")
getValByDate (ThresholdCurve dps) Inc d
= case find (\(TsPoint _d _) -> d <= _d) dps of
Just (TsPoint _d v) -> toRational v -- `debug` ("Getting rate "++show(_d)++show(v))
` debug ` ( " Not found in " )
getValByDate (ThresholdCurve dps) Exc d
= case find (\(TsPoint _d _) -> d < _d) dps of
Just (TsPoint _d v) -> toRational v -- `debug` ("Getting rate "++show(_d)++show(v))
` debug ` ( " Not found in " )
getValByDate (FactorCurveClosed dps ed) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
Just found@(TsPoint _found_d _found_v) ->
if d >= ed then
1.0
else
_found_v
Nothing -> 1.0
getValByDate (PricingCurve dps) _ d
= case (d>=lday,d<=fday) of
(True,_) -> tsPointVal $ last dps
(_,True) -> tsPointVal $ head dps
_ -> let
rindex = fromMaybe 0 $findIndex (\(TsPoint _dl _) -> ( _dl > d )) dps
rdp@(TsPoint _dr _rv) = dps!!rindex
ldp@(TsPoint _dl _lv) = dps!!(pred rindex)
leftDistance = toRational $ daysBetween _dl d -- `debug` ("LEFT"++show leftDistance)
distance = toRational $ daysBetween _dl _dr -- `debug` ("DIST"++show distance)
vdistance = _rv - _lv -- ("DIST")
in
toRational $ _lv + (vdistance * leftDistance) / distance
-- `debug` ("D "++ show _lv++">>"++ show vdistance++">>"++ show leftDistance++">>"++ show distance)
where
fday = getDate $ head dps
lday = getDate $ last dps
getValByDates :: Ts -> CutoffType -> [Date] -> [Rational]
-- getValByDates rc ds = map (getValByDate rc) ds
getValByDates rc ct = map (getValByDate rc ct)
getTsVals :: Ts -> [Rational]
getTsVals (FloatCurve ts) = [ v | (TsPoint d v) <- ts ]
getTsDates :: Ts -> [Date]
getTsDates (IRateCurve tps) = map getDate tps
getTsDates (FloatCurve tps) = map getDate tps
getTsDates (PricingCurve tps) = map getDate tps
getTsDates (BalanceCurve tps) = map getDate tps
subDates :: RangeType -> Date -> Date -> [Date] -> [Date]
subDates rt sd ed ds
= case rt of
II -> filter (\x -> x >= sd && x <= ed ) ds
EI -> filter (\x -> x > sd && x <= ed ) ds
IE -> filter (\x -> x >= sd && x < ed ) ds
EE -> filter (\x -> x > sd && x < ed ) ds
data SliceType = SliceAfter Date
| SliceOnAfter Date
| SliceAfterKeepPrevious Date
| SliceOnAfterKeepPrevious Date
sliceDates :: SliceType -> [Date] -> [Date]
sliceDates st ds =
case st of
SliceAfter d -> filter (> d) ds
SliceOnAfter d -> filter (>= d) ds
SliceAfterKeepPrevious d ->
case findIndex (> d) ds of
Just idx -> snd $ splitAt (pred idx) ds
Nothing -> []
SliceOnAfterKeepPrevious d ->
case findIndex (>= d) ds of
Just idx -> snd $ splitAt (pred idx) ds
Nothing -> []
calcIntRate :: Date -> Date -> IRate -> DayCount -> IRate
calcIntRate start_date end_date int_rate day_count =
let
yf = yearCountFraction day_count start_date end_date
in
int_rate * (fromRational yf)
calcIntRateCurve :: DayCount -> IRate -> [Date] -> [IRate]
calcIntRateCurve dc r ds
= [ calcIntRate sd ed r dc | (sd,ed) <- zip (init ds) (tail ds) ]
calcInt :: Balance -> Date -> Date -> IRate -> DayCount -> Amount
calcInt bal start_date end_date int_rate day_count =
let
yfactor = yearCountFraction day_count start_date end_date
in
mulBR bal (yfactor * (toRational int_rate))
zipTs :: [Date] -> [Rational] -> Ts
zipTs ds rs = FloatCurve [ TsPoint d r | (d,r) <- (zip ds rs) ]
multiplyTs :: CutoffType -> Ts -> Ts -> Ts
multiplyTs ct (FloatCurve ts1) ts2
= FloatCurve [(TsPoint d (v * (getValByDate ts2 ct d))) | (TsPoint d v) <- ts1 ]
TODO to be replace by generateDateSeries
projDatesByPattern dp sd ed
= let
(T.CalendarDiffDays cdm cdd) = T.diffGregorianDurationClip ed sd
num = case dp of
MonthEnd -> cdm + 1
QuarterEnd -> (div cdm 3) + 1 -- `debug` ("cdm"++show cdm)
YearEnd -> (div cdm 12) + 1
MonthFirst -> cdm + 1
QuarterFirst -> (div cdm 3) + 1
YearFirst -> (div cdm 12) + 1
MonthDayOfYear _ _ -> (div cdm 12) + 1
DayOfMonth _ -> cdm + 1
in
genSerialDates dp sd (fromInteger num)
replace :: [a] -> Int -> a -> [a]
replace xs i e = case splitAt i xs of
(before, _:after) -> before ++ e: after
_ -> xs
paddingDefault :: a -> [a] -> Int -> [a]
paddingDefault x xs s
| (length xs) > s = take s xs
| otherwise = xs++(replicate (s - (length xs)) x)
capWith :: Ord a => [a] -> a -> [a]
capWith xs cap = [ if x > cap then
cap
else
x | x <- xs ]
pv2 :: IRate -> Date -> Date -> Amount -> Amount
pv2 discount_rate today d amt =
mulBI amt $ 1/denominator -- `debug` ("days between->"++show d ++show today++">>>"++show distance )
where
denominator = (1+discount_rate) ^^ (fromInteger (div distance 365))
distance = daysBetween today d
daysInterval :: [Date] -> [Integer]
daysInterval ds = zipWith daysBetween (init ds) (tail ds)
splitByDate :: TimeSeries a => [a] -> Date -> SplitType -> ([a],[a])
splitByDate xs d st
= case st of
EqToLeft -> span (\x -> (getDate x) <= d) xs
EqToRight -> span (\x -> (getDate x) < d) xs
EqToLeftKeepOne ->
case findIndex (\x -> (getDate x) >= d ) xs of
Just idx -> splitAt (pred idx) xs -- `debug` ("split with "++show (pred idx)++">>"++show (length xs))
Nothing -> (xs,[])
EqToRightKeepOne - >
case findIndex ( \x - > ( getDate x ) > = d ) xs of
Just idx - > splitAt ( pred idx ) xs -- ` debug ` ( " split with " + + show ( pred idx)++">>"++show ( length xs ) )
-- Nothing -> (xs,[])
-- EqToLeftKeepOnes ->
case findIndices ( \x - > ( getDate x ) < = d ) xs of
-- [] -> (xs,[])
-- inds ->
rangeBy :: TimeSeries a => [a] -> Date -> Date -> RangeType -> [a]
rangeBy xs sd ed rt =
case rt of
II -> filter (\x -> (getDate x >= sd) && (getDate x <= ed)) xs -- `debug` ("in rangeBy II")
IE -> filter (\x -> (getDate x >= sd) && (getDate x < ed)) xs
EI -> filter (\x -> (getDate x > sd) && (getDate x <= ed)) xs
EE -> filter (\x -> (getDate x > sd) && (getDate x < ed)) xs
debugLine :: Show a => [a] -> String
debugLine xs = ""
| null | https://raw.githubusercontent.com/yellowbean/Hastructure/ff1eb0c223b418c18ab6328012701864a6edc651/src/Util.hs | haskell | # LANGUAGE OverloadedStrings #
-count-conventions
TODO -16-field-22f.htm
`debug` ("DIFF Days"++show(_diffDays))
`debug` ("German"++show(_sday)++"<>"++show _eday)
-16-field-22f.htm
Ratio Integer
`debug` ("Num"++show num)
T.DayOfMonth
T.DayOfWeek
`debug` ("Getting rate "++show(_d)++show(v))
`debug` ("Getting 0 ")
`debug` ("Getting rate "++show(_d)++show(v))
`debug` ("Getting 0 ")
`debug` ("Getting rate "++show(_d)++show(v))
`debug` ("Getting rate "++show(_d)++show(v))
`debug` ("LEFT"++show leftDistance)
`debug` ("DIST"++show distance)
("DIST")
`debug` ("D "++ show _lv++">>"++ show vdistance++">>"++ show leftDistance++">>"++ show distance)
getValByDates rc ds = map (getValByDate rc) ds
`debug` ("cdm"++show cdm)
`debug` ("days between->"++show d ++show today++">>>"++show distance )
`debug` ("split with "++show (pred idx)++">>"++show (length xs))
` debug ` ( " split with " + + show ( pred idx)++">>"++show ( length xs ) )
Nothing -> (xs,[])
EqToLeftKeepOnes ->
[] -> (xs,[])
inds ->
`debug` ("in rangeBy II") | # LANGUAGE ScopedTypeVariables #
module Util
(mulBR,mulBIR,mulBI,mulBInt,mulBInteger,lastN,yearCountFraction,genSerialDates
,getValByDate,getValByDates,projDatesByPattern
,genSerialDatesTill,genSerialDatesTill2,subDates,getTsDates,sliceDates,SliceType(..)
,calcInt,calcIntRate,calcIntRateCurve
,multiplyTs,zipTs,getTsVals,divideBI,mulIR, daysInterval
,replace,paddingDefault, capWith, pv2, splitByDate, rangeBy
)
where
import qualified Data.Time as T
import Data.List
import Data.Fixed
import Data.Ratio ((%))
import Data.Ix
import Data.Maybe
import qualified Data.Map as M
import Lib
import Types
import Text.Printf
import Control.Exception
import Debug.Trace
debug = flip trace
mulBR :: Balance -> Rate -> Centi
mulBR b r = fromRational $ toRational b * r
mulBIR :: Balance -> IRate -> Centi
mulBIR b r = fromRational $ (toRational b) * (toRational r)
mulIR :: Int -> Rational -> Rational
mulIR i r = (toRational i) * r
mulBInt :: Balance -> Int -> Rational
mulBInt b i = (toRational b) * (toRational i)
mulBInteger :: Balance -> Integer -> Rational
mulBInteger b i = mulBInt b (fromInteger i)
mulBI :: Balance -> IRate -> Amount
mulBI bal r = fromRational $ (toRational bal) * (toRational r)
divideBI :: Balance -> Int -> Balance
divideBI b i = fromRational $ (toRational b) / (toRational i)
zipLeftover :: [a] -> [a] -> [a]
zipLeftover [] [] = []
zipLeftover xs [] = xs
zipLeftover [] ys = ys
zipLeftover (x:xs) (y:ys) = zipLeftover xs ys
lastN :: Int -> [a] -> [a]
lastN n xs = zipLeftover (drop n xs) xs
yearCountFraction dc sd ed
= case dc of
DC_ACT_ACT -> if sameYear then
_diffDays % daysOfYear syear
else
(sDaysTillYearEnd % (daysOfYear syear)) + (eDaysAfterYearBeg % (daysOfYear eyear)) + (pred _diffYears)
` debug ` ( " < > " + + show sDaysTillYearEnd++"<>"++show(daysOfYear syear ) + + " < > " + + show ( daysOfYear eyear)++"<>"++ show eyear )
DC_ACT_360 -> _diffDays % 360
DC_ACT_365A -> if has_leap_day then
_diffDays % 366
else
_diffDays % 365
DC_ACT_365L -> if T.isLeapYear eyear then
_diffDays % 366
else
_diffDays % 365
DC_NL_365 -> if has_leap_day then
(pred _diffDays) % 365
else
_diffDays % 365
DC_30E_360 -> let
_sday = f31to30 sday
_eday = f31to30 eday
num = toRational (_eday - _sday) + 30*_gapMonth + 360*_diffYears
in
` debug ` ( " NUM->"++show num++"E S month"++show emonth++show )
DC_30Ep_360 -> let
_sday = f31to30 sday
(_eyear,_emonth,_eday) = T.toGregorian $
if eday==31 then
T.addDays 1 ed
else
ed
__gapMonth = (toInteger $ _emonth - smonth) % 1
__diffYears = (toInteger $ _eyear - syear) % 1
num = toRational (_eday - _sday) + 30*__gapMonth + 360*__diffYears
in
num / 360
DC_30_360_ISDA -> let
_sday = f31to30 sday
_eday = if _sday>=30 && eday==31 then
30
else
eday
num = toRational (_eday - _sday) + 30*_gapMonth + 360*_diffYears
in
num / 360
30/360 Bond basis , this was call 30E/360 ISDA by kalotay
DC_30_360_German -> let
_sday = if sday==31 || (endOfFeb syear smonth sday) then
` debug ` ( " German eof start if > > " + + show ( endOfFeb sday)++show syear + + show smonth++show sday )
else
sday
` debug ` ( " German eof start else " + + show ( endOfFeb sday)++show syear + + show smonth++show sday )
_eday = if eday==31 || (endOfFeb eyear emonth eday) then
30
else
eday
` debug ` ( " German eof end " + + show ( endOfFeb eyear emonth eday)++show eyear++show emonth++show eday )
in
num / 360
DC_30_360_US -> let
_sday = if (endOfFeb syear smonth sday) || sday==31 then
30
else
sday
_eday = if (eday==31 && sday >= 30)||(endOfFeb eyear emonth eday) && (endOfFeb syear smonth sday) then
30
else
eday
num = toRational (_eday - _sday) + 30*_gapMonth + 360*_diffYears
in
num / 360
where
daysOfYear y = if T.isLeapYear y then 366 else 365
f31to30 d = if d==31 then
30
else
d
endOfFeb y m d = if T.isLeapYear y then
(m==2) && d == 29
else
(m==2) && d == 28
sameYear = syear == eyear
has_leap_day
= case (sameYear,sLeap,eLeap) of
(True,False,False) -> False
(True,True,_) -> inRange (sd,ed) (T.fromGregorian syear 2 29)
_ -> let
_leapDays = [ T.fromGregorian _y 2 29 | _y <- range (syear,eyear) , (T.isLeapYear _y) ]
in
any (inRange (sd,ed)) _leapDays
_gapDay = (toInteger (eday - sday)) % 1
_gapMonth = (toInteger (emonth - smonth)) % 1
sDaysTillYearEnd = succ $ T.diffDays (T.fromGregorian syear 12 31) sd
eDaysAfterYearBeg = T.diffDays ed (T.fromGregorian eyear 1 1)
_diffDays = toInteger $ T.diffDays ed sd
sLeap = T.isLeapYear syear
eLeap = T.isLeapYear eyear
(syear,smonth,sday) = T.toGregorian sd
(eyear,emonth,eday) = T.toGregorian ed
genSerialDates :: DatePattern -> Date -> Int -> Dates
genSerialDates dp sd num
= take num $ filter (>= sd) $
case dp of
MonthEnd ->
[T.fromGregorian yearRange (fst __md) (snd __md) | yearRange <- [_y..(_y+yrs)]
,__md <- monthEnds yearRange ]
where
yrs = fromIntegral $ div num 12 + 1
QuarterEnd ->
[T.fromGregorian yearRange __m __d | yearRange <- [_y..(_y+yrs)]
,(__m,__d) <- quarterEnds]
where
yrs = fromIntegral $ div num 4 + 1
YearEnd ->
[T.fromGregorian yearRange 12 31 | yearRange <- [_y..(_y+(toInteger num))]]
YearFirst ->
[T.fromGregorian yearRange 1 1 | yearRange <- [_y..(_y+(toInteger num))]]
MonthFirst ->
[T.fromGregorian yearRange monthRange 1 | yearRange <- [_y..(_y+yrs)]
, monthRange <- [1..12]]
where
yrs = fromIntegral $ div num 12 + 1
QuarterFirst ->
[T.fromGregorian yearRange __m 1 | yearRange <- [_y..(_y+yrs)]
,__m <- [3,6,9,12]]
where
yrs = fromIntegral $ div num 4 + 1
MonthDayOfYear m d ->
[T.fromGregorian yearRange m d | yearRange <- [_y..(_y+(toInteger num))]]
DayOfMonth d ->
[T.fromGregorian yearRange monthRange d | yearRange <- [_y..(_y+yrs)]
, monthRange <- [1..12]]
where
yrs = fromIntegral $ div num 12 + 1
where
quarterEnds = [(3,31),(6,30),(9,30),(12,31)]
monthEnds y =
if T.isLeapYear y then
[(1,31),(2,29),(3,31),(4,30),(5,31),(6,30),(7,31),(8,31),(9,30),(10,31),(11,30),(12,31)]
else
[(1,31),(2,28),(3,31),(4,30),(5,31),(6,30),(7,31),(8,31),(9,30),(10,31),(11,30),(12,31)]
(_y,_m,_d) = T.toGregorian sd
yearBegin = T.fromGregorian _y 1 1
genSerialDatesTill:: Date -> DatePattern -> Date -> Dates
genSerialDatesTill sd ptn ed
where
(sy,sm,sday) = T.toGregorian sd
(ey,em,eday) = T.toGregorian ed
T.CalendarDiffDays cdM cdD = T.diffGregorianDurationRollOver ed sd
num = case ptn of
MonthEnd -> cdM
QuarterEnd -> div cdM 3
YearEnd -> div cdM 12
MonthFirst -> cdM
QuarterFirst-> div cdM 3
YearFirst-> div cdM 12
T.MonthOfYear
genSerialDatesTill2 :: RangeType -> Date -> DatePattern -> Date -> Dates
genSerialDatesTill2 rt sd dp ed
= case rt of
II -> sd:_r ++ [ed]
EI -> _r ++ [ed]
IE -> if (head _r)==sd then
_r
else
sd:_r
EE -> _r
where
_r = genSerialDatesTill sd dp ed
tsPointVal :: TsPoint a -> a
tsPointVal (TsPoint d v) = v
getValByDate :: Ts -> CutoffType -> Date -> Rational
getValByDate (LeftBalanceCurve dps) ct d
= case find (\(TsPoint _d _) -> (cmpFun ct) _d d) (reverse dps) of
Just (TsPoint _d v) -> toRational v
Nothing -> 0
where
cmpFun Inc = (<=)
cmpFun Exc = (<)
getValByDate (BalanceCurve dps) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
Just (TsPoint _d v) -> toRational v
Nothing -> 0
getValByDate (BalanceCurve dps) Inc d
= case find (\(TsPoint _d _) -> d >= _d) (reverse dps) of
Just (TsPoint _d v) -> toRational v
Nothing -> 0
getValByDate (FloatCurve dps) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
getValByDate (IRateCurve dps) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
getValByDate (ThresholdCurve dps) Inc d
= case find (\(TsPoint _d _) -> d <= _d) dps of
` debug ` ( " Not found in " )
getValByDate (ThresholdCurve dps) Exc d
= case find (\(TsPoint _d _) -> d < _d) dps of
` debug ` ( " Not found in " )
getValByDate (FactorCurveClosed dps ed) Exc d
= case find (\(TsPoint _d _) -> d > _d) (reverse dps) of
Just found@(TsPoint _found_d _found_v) ->
if d >= ed then
1.0
else
_found_v
Nothing -> 1.0
getValByDate (PricingCurve dps) _ d
= case (d>=lday,d<=fday) of
(True,_) -> tsPointVal $ last dps
(_,True) -> tsPointVal $ head dps
_ -> let
rindex = fromMaybe 0 $findIndex (\(TsPoint _dl _) -> ( _dl > d )) dps
rdp@(TsPoint _dr _rv) = dps!!rindex
ldp@(TsPoint _dl _lv) = dps!!(pred rindex)
in
toRational $ _lv + (vdistance * leftDistance) / distance
where
fday = getDate $ head dps
lday = getDate $ last dps
getValByDates :: Ts -> CutoffType -> [Date] -> [Rational]
getValByDates rc ct = map (getValByDate rc ct)
getTsVals :: Ts -> [Rational]
getTsVals (FloatCurve ts) = [ v | (TsPoint d v) <- ts ]
getTsDates :: Ts -> [Date]
getTsDates (IRateCurve tps) = map getDate tps
getTsDates (FloatCurve tps) = map getDate tps
getTsDates (PricingCurve tps) = map getDate tps
getTsDates (BalanceCurve tps) = map getDate tps
subDates :: RangeType -> Date -> Date -> [Date] -> [Date]
subDates rt sd ed ds
= case rt of
II -> filter (\x -> x >= sd && x <= ed ) ds
EI -> filter (\x -> x > sd && x <= ed ) ds
IE -> filter (\x -> x >= sd && x < ed ) ds
EE -> filter (\x -> x > sd && x < ed ) ds
data SliceType = SliceAfter Date
| SliceOnAfter Date
| SliceAfterKeepPrevious Date
| SliceOnAfterKeepPrevious Date
sliceDates :: SliceType -> [Date] -> [Date]
sliceDates st ds =
case st of
SliceAfter d -> filter (> d) ds
SliceOnAfter d -> filter (>= d) ds
SliceAfterKeepPrevious d ->
case findIndex (> d) ds of
Just idx -> snd $ splitAt (pred idx) ds
Nothing -> []
SliceOnAfterKeepPrevious d ->
case findIndex (>= d) ds of
Just idx -> snd $ splitAt (pred idx) ds
Nothing -> []
calcIntRate :: Date -> Date -> IRate -> DayCount -> IRate
calcIntRate start_date end_date int_rate day_count =
let
yf = yearCountFraction day_count start_date end_date
in
int_rate * (fromRational yf)
calcIntRateCurve :: DayCount -> IRate -> [Date] -> [IRate]
calcIntRateCurve dc r ds
= [ calcIntRate sd ed r dc | (sd,ed) <- zip (init ds) (tail ds) ]
calcInt :: Balance -> Date -> Date -> IRate -> DayCount -> Amount
calcInt bal start_date end_date int_rate day_count =
let
yfactor = yearCountFraction day_count start_date end_date
in
mulBR bal (yfactor * (toRational int_rate))
zipTs :: [Date] -> [Rational] -> Ts
zipTs ds rs = FloatCurve [ TsPoint d r | (d,r) <- (zip ds rs) ]
multiplyTs :: CutoffType -> Ts -> Ts -> Ts
multiplyTs ct (FloatCurve ts1) ts2
= FloatCurve [(TsPoint d (v * (getValByDate ts2 ct d))) | (TsPoint d v) <- ts1 ]
TODO to be replace by generateDateSeries
projDatesByPattern dp sd ed
= let
(T.CalendarDiffDays cdm cdd) = T.diffGregorianDurationClip ed sd
num = case dp of
MonthEnd -> cdm + 1
YearEnd -> (div cdm 12) + 1
MonthFirst -> cdm + 1
QuarterFirst -> (div cdm 3) + 1
YearFirst -> (div cdm 12) + 1
MonthDayOfYear _ _ -> (div cdm 12) + 1
DayOfMonth _ -> cdm + 1
in
genSerialDates dp sd (fromInteger num)
replace :: [a] -> Int -> a -> [a]
replace xs i e = case splitAt i xs of
(before, _:after) -> before ++ e: after
_ -> xs
paddingDefault :: a -> [a] -> Int -> [a]
paddingDefault x xs s
| (length xs) > s = take s xs
| otherwise = xs++(replicate (s - (length xs)) x)
capWith :: Ord a => [a] -> a -> [a]
capWith xs cap = [ if x > cap then
cap
else
x | x <- xs ]
pv2 :: IRate -> Date -> Date -> Amount -> Amount
pv2 discount_rate today d amt =
where
denominator = (1+discount_rate) ^^ (fromInteger (div distance 365))
distance = daysBetween today d
daysInterval :: [Date] -> [Integer]
daysInterval ds = zipWith daysBetween (init ds) (tail ds)
splitByDate :: TimeSeries a => [a] -> Date -> SplitType -> ([a],[a])
splitByDate xs d st
= case st of
EqToLeft -> span (\x -> (getDate x) <= d) xs
EqToRight -> span (\x -> (getDate x) < d) xs
EqToLeftKeepOne ->
case findIndex (\x -> (getDate x) >= d ) xs of
Nothing -> (xs,[])
EqToRightKeepOne - >
case findIndex ( \x - > ( getDate x ) > = d ) xs of
case findIndices ( \x - > ( getDate x ) < = d ) xs of
rangeBy :: TimeSeries a => [a] -> Date -> Date -> RangeType -> [a]
rangeBy xs sd ed rt =
case rt of
IE -> filter (\x -> (getDate x >= sd) && (getDate x < ed)) xs
EI -> filter (\x -> (getDate x > sd) && (getDate x <= ed)) xs
EE -> filter (\x -> (getDate x > sd) && (getDate x < ed)) xs
debugLine :: Show a => [a] -> String
debugLine xs = ""
|
a920c3f7b78d5c279e7fb172e79450183e3d5184395db5cbf8faa3f4442e26a3 | diffusionkinetics/open | TestPlotly.hs | {-# LANGUAGE OverloadedStrings #-}
import Lucid
import Lucid.Html5
import Graphics.Plotly
import Graphics.Plotly.Lucid
import Lens.Micro
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
main =
T.writeFile "test.html" $ renderText $ doctypehtml_ $ do
head_ $ do meta_ [charset_ "utf-8"]
plotlyCDN
body_ $ toHtml $ plotly "myDiv" [myTrace]
myTrace = scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200]
| null | https://raw.githubusercontent.com/diffusionkinetics/open/673d9a4a099abd9035ccc21e37d8e614a45a1901/plotlyhs/TestPlotly.hs | haskell | # LANGUAGE OverloadedStrings # |
import Lucid
import Lucid.Html5
import Graphics.Plotly
import Graphics.Plotly.Lucid
import Lens.Micro
import qualified Data.Text.Lazy as T
import qualified Data.Text.Lazy.IO as T
main =
T.writeFile "test.html" $ renderText $ doctypehtml_ $ do
head_ $ do meta_ [charset_ "utf-8"]
plotlyCDN
body_ $ toHtml $ plotly "myDiv" [myTrace]
myTrace = scatter & x ?~ [1,2,3,4]
& y ?~ [500,3000,700,200]
|
b2ed567a044ed0a57371e0f06e7dde6964ba592e265306fdd8a3f0d80a8b340d | siraben/zkeme80 | interrupt.scm | (define interrupt-asm
`((label sys-interrupt)
(di)
,@(push* '(af bc de hl ix iy))
(exx)
((ex af afs))
,@(push* '(af bc de hl))
(jp usb-interrupt)
(label interrupt-resume)
(in a (4))
(bit 0 a)
(jr nz int-handle-on)
(bit 1 a)
(jr nz int-handle-timer1)
(bit 2 a)
(jr nz int-handle-timer2)
(bit 4 a)
(jr nz int-handle-link)
(jr sys-interrupt-done)
(label int-handle-on)
(in a (3))
(res 0 a)
(out (3) a)
(set 0 a)
(out (3) a)
(jr sys-interrupt-done)
(label int-handle-timer1)
(in a (3))
(res 1 a)
(out (3) a)
(set 1 a)
(out (3) a)
(jr sys-interrupt-done)
(label int-handle-timer2)
(in a (3))
(res 2 a)
(out (3) a)
(set 2 a)
(out (3) a)
(jr sys-interrupt-done)
(label int-handle-link)
(in a (3))
(res 4 a)
(out (3) a)
(set 4 a)
(out (3) a)
(label sys-interrupt-done)
,@(pop* '(hl de bc af))
(exx)
((ex af afs))
,@(pop* '(iy ix hl de bc af))
(ei)
(ret)
(label usb-interrupt)
(in a (#x55))
(bit 0 a)
(jr z usb-unknown-event)
(bit 2 a)
(jr z usb-line-event)
(bit 4 a)
(jr z usb-protocol-event)
(jp interrupt-resume)
(label usb-unknown-event)
(jp interrupt-resume)
(label usb-line-event)
(in a (#x56))
(xor #xff)
(out (#x57) a)
(jp interrupt-resume)
(label usb-protocol-event)
,@(map (lambda (x) `(in a (,x)))
'(#x82 #x83 #x84 #x85 #x86))
(jp interrupt-resume)
))
| null | https://raw.githubusercontent.com/siraben/zkeme80/ab49d496cac6797e6e3264ee027f96040eaf0492/src/interrupt.scm | scheme | (define interrupt-asm
`((label sys-interrupt)
(di)
,@(push* '(af bc de hl ix iy))
(exx)
((ex af afs))
,@(push* '(af bc de hl))
(jp usb-interrupt)
(label interrupt-resume)
(in a (4))
(bit 0 a)
(jr nz int-handle-on)
(bit 1 a)
(jr nz int-handle-timer1)
(bit 2 a)
(jr nz int-handle-timer2)
(bit 4 a)
(jr nz int-handle-link)
(jr sys-interrupt-done)
(label int-handle-on)
(in a (3))
(res 0 a)
(out (3) a)
(set 0 a)
(out (3) a)
(jr sys-interrupt-done)
(label int-handle-timer1)
(in a (3))
(res 1 a)
(out (3) a)
(set 1 a)
(out (3) a)
(jr sys-interrupt-done)
(label int-handle-timer2)
(in a (3))
(res 2 a)
(out (3) a)
(set 2 a)
(out (3) a)
(jr sys-interrupt-done)
(label int-handle-link)
(in a (3))
(res 4 a)
(out (3) a)
(set 4 a)
(out (3) a)
(label sys-interrupt-done)
,@(pop* '(hl de bc af))
(exx)
((ex af afs))
,@(pop* '(iy ix hl de bc af))
(ei)
(ret)
(label usb-interrupt)
(in a (#x55))
(bit 0 a)
(jr z usb-unknown-event)
(bit 2 a)
(jr z usb-line-event)
(bit 4 a)
(jr z usb-protocol-event)
(jp interrupt-resume)
(label usb-unknown-event)
(jp interrupt-resume)
(label usb-line-event)
(in a (#x56))
(xor #xff)
(out (#x57) a)
(jp interrupt-resume)
(label usb-protocol-event)
,@(map (lambda (x) `(in a (,x)))
'(#x82 #x83 #x84 #x85 #x86))
(jp interrupt-resume)
))
| |
f911167c9183315dcf0214ddedf797377d1551917c5942ea00408d8f6421faee | hoelzl/Clicc | read.lisp | ;;;-----------------------------------------------------------------------------
Copyright ( C ) 1993 Christian - Albrechts - Universitaet zu Kiel , Germany
;;;-----------------------------------------------------------------------------
Projekt : APPLY - A Practicable And Portable Lisp Implementation
;;; ------------------------------------------------------
Funktion : Laufzeitsystem
;;; - Backquote-Reader + Simplifier
- Readtables ,
;;; - READ
;;; - READ-PRESERVING-WHITESPACE
;;; - READ-DELIMITED-LIST
;;; - READ-LINE
( - READ - CHAR , UNREAD - CHAR -- > file.lisp )
;;; - PEEK-CHAR
;;; - READ-FROM-STRING
- PARSE - INTEGER
;;;
$ Revision : 1.14 $
;;; $Log: read.lisp,v $
;;; Revision 1.14 1994/06/03 09:51:05 hk
Schreibfehler
;;;
;;; Revision 1.13 1994/06/02 14:10:19 hk
Print - Funktion f"ur readtable - Struktur .
;;;
Revision 1.12 1994/05/31 12:05:06 hk
Bessere warning
;;;
;;; Revision 1.11 1994/02/17 16:16:35 hk
" Uberfl"ussigen Test in struct - reader gestrichen , aufger"aumt ,
.
;;;
;;; Revision 1.10 1994/01/11 16:11:47 hk
in bq - attach - append bei ` ( ..... . const ) behoben .
;;;
;;; Revision 1.9 1993/11/29 12:26:40 uho
In ' read - token ' wird des look - ahead - Zeichens das Ende
.
;;;
Revision 1.8 1993/11/10 16:09:27 hk
In read - from - string den zu T korrigiert .
;;;
Revision 1.7 1993/07/14 13:50:42 hk
Neue Reader fuer # b , # o , # x und # nR , vector - reader
;;;
Revision 1.6 1993/06/16 15:20:38 hk
Copyright eingefuegt .
;;;
Revision 1.5 1993/05/07 08:55:22 hk
;;; readtable exportiert.
;;;
;;; Revision 1.4 1993/04/22 10:48:21 hk
;;; (in-package "RUNTIME") -> (in-package "LISP"),
Definitionen exportiert , defvar , defconstant ,
clicc / lib / . rt::set - xxx in ( setf xxx ) umgeschrieben .
Definitionen und Anwendungen von / aus Package Runtime mit rt : .
declaim fun - spec und declaim top - level - form gestrichen .
;;;
Revision 1.3 1993/02/16 14:34:20 hk
;;; clicc::declaim -> declaim, clicc::fun-spec (etc.) -> lisp::fun-spec (etc.)
$ Revision : 1.14 $ eingefuegt
;;;
;;; Revision 1.2 1993/01/11 15:04:27 hk
;;; structure -> struct
;;;
Revision 1.1 1992/03/24 17:12:55 hk
;;; Initial revision
;;;-----------------------------------------------------------------------------
(in-package "LISP")
(export
'(*read-base* *read-suppress* *features*
readtable readtablep copy-readtable set-syntax-from-char set-macro-character
get-macro-character make-dispatch-macro-character
set-dispatch-macro-character get-dispatch-macro-character read
read-preserving-whitespace read-delimited-list read-line peek-char
read-from-string parse-integer))
;;------------------------------------------------------------------------------
(defparameter *comma* (make-symbol "COMMA"))
(defparameter *comma-atsign* (make-symbol "COMMA-ATSIGN"))
(defparameter *comma-dot* (make-symbol "COMMA-DOT"))
(defparameter *bq-list* (make-symbol "BQ-LIST"))
(defparameter *bq-append* (make-symbol "BQ-APPEND"))
(defparameter *bq-list** (make-symbol "BQ-LIST*"))
(defparameter *bq-nconc* (make-symbol "BQ-NCONC"))
(defparameter *bq-clobberable* (make-symbol "BQ-CLOBBERABLE"))
(defparameter *bq-quote* (make-symbol "BQ-QUOTE"))
(defparameter *bq-quote-nil* (list *bq-quote* nil))
(defparameter *bq-vector* (make-symbol "VECTOR"))
(defparameter *bq-level* 0)
(defparameter *read-base* 10)
(defparameter *read-suppress* nil)
(defparameter *features* nil)
(defparameter *token* (make-array 80
:element-type 'character
:fill-pointer 0
:adjustable t))
(defparameter *uninterned* nil)
(defparameter *preserve-whitespace* nil)
(defparameter *dot-flag* nil)
(defparameter *parenthesis-open* nil)
(defparameter *standard-readtable* (make-standard-readtable))
erst nach obiger Zeile
;;------------------------------------------------------------------------------
Backquote ...
;;------------------------------------------------------------------------------
angelehnt an " Backquote , Appendix C in CLtL , 2nd . "
erweitert um Backquote in Vektoren .
;;------------------------------------------------------------------------------
;;------------------------------------------------------------------------------
(defun backquote-reader (stream char)
(declare (ignore char))
(incf *bq-level*)
(prog1
(bq-completely-process (bq-read stream))
(decf *bq-level*)))
;;------------------------------------------------------------------------------
(defun comma-reader (stream char)
(declare (ignore char))
(when (<= *bq-level* 0)
(error "A comma appeared outside of a backquote"))
(decf *bq-level*)
(prog1
(cons (case (peek-char nil stream t nil t)
(#\@ (read-char stream t nil t) *comma-atsign*)
(#\. (read-char stream t nil t) *comma-dot*)
(otherwise *comma*))
(bq-read stream))
(incf *bq-level*)))
;;------------------------------------------------------------------------------
(defun bq-completely-process (x)
(bq-remove-tokens (bq-simplify (bq-process x))))
;;------------------------------------------------------------------------------
(defun bq-process (x)
(cond
;; `basic --> (QUOTE basic)
;;-------------------------
((atom x) (list *bq-quote* x))
;; `#(x1 x2 x3 ... xn) --> (apply #'vector `(x1 x2 x3 ... xn))
;;------------------------------------------------------------
((eq (car x) *bq-vector*)
(let ((list (bq-completely-process (cdr x))))
(if (eq 'QUOTE (car list))
(list *bq-quote* (apply #'vector (cadr list)))
(list 'APPLY `#'VECTOR list))))
;; `,form --> form
;;----------------
((eq (car x) *comma*) (cdr x))
` , @form -- > ERROR
;;------------------
((eq (car x) *comma-atsign*) (error ",@~S after `" (cdr x)))
` , .form -- > ERROR
;;------------------
((eq (car x) *comma-dot*) (error ",.~S after `" (cdr x)))
;; `(x1 x2 x3 ... xn . atom) -->
;;------------------------------
(t (do ((p x (cdr p))
(q '() (cons (bracket (car p)) q)))
((atom p)
;; --> (append [x1] [x2 [x3] ... [xn] (quote atom))
;;-------------------------------------------------
(cons *bq-append*
(nreconc q (list (list *bq-quote* p)))))
;; `(x1 ... xn . ,form) --> (append [x1] ... [xn] form)
;;-----------------------------------------------------
(when (eq (car p) *comma*)
(return (cons *bq-append* (nreconc q (list (cdr p))))))
` ( x1 ... xn . , @form ) -- > ERROR
;;--------------------------------
(when (eq (car p) *comma-atsign*) (error "Dotted ,@~s" (cdr p)))
` ( x1 ... xn . , .form ) -- > ERROR
;;--------------------------------
(when (eq (car p) *comma-dot*) (error "Dotted ,@~s" (cdr p)))))))
;;------------------------------------------------------------------------------
(defun bracket (x)
(cond
((atom x) (list *bq-list* (bq-process x)))
((eq (car x) *comma*) (list *bq-list* (cdr x)))
((eq (car x) *comma-atsign*) (cdr x))
((eq (car x) *comma-dot*) (list *bq-clobberable* (cdr x)))
(t (list *bq-list* (bq-process x)))))
;;------------------------------------------------------------------------------
(defun maptree (fn x)
(if (atom x)
(funcall fn x)
(let ((a (funcall fn (car x)))
(d (maptree fn (cdr x))))
(if (and (eql a (car x)) (eql d (cdr x)))
x
(cons a d)))))
;;------------------------------------------------------------------------------
(defun bq-splicing-frob (x)
(and (consp x)
(or (eq (car x) *comma-atsign*)
(eq (car x) *comma-dot*))))
;;------------------------------------------------------------------------------
(defun bq-frob (x)
(and (consp x)
(or (eq (car x) *comma*)
(eq (car x) *comma-atsign*)
(eq (car x) *comma-dot*))))
;;------------------------------------------------------------------------------
(defun bq-simplify (x)
(if (atom x)
x
(let ((x (if (eq (car x) *bq-quote*)
x
(maptree #'bq-simplify x))))
(if (not (eq (car x) *bq-append*))
x
(bq-simplify-args x)))))
;;------------------------------------------------------------------------------
(defun bq-simplify-args (x)
(do ((args (reverse (cdr x)) (cdr args))
(result
nil
(cond ((atom (car args))
(bq-attach-append *bq-append* (car args) result))
((and (eq (caar args) *bq-list*)
(notany #'bq-splicing-frob (cdar args)))
(bq-attach-conses (cdar args) result))
((and (eq (caar args) *bq-list**)
(notany #'bq-splicing-frob (cdar args)))
(bq-attach-conses
(reverse (cdr (reverse (cdar args))))
(bq-attach-append *bq-append*
(car (last (car args)))
result)))
((and (eq (caar args) *bq-quote*)
(consp (cadar args))
(not (bq-frob (cadar args)))
(null (cddar args)))
(bq-attach-conses (list (list *bq-quote*
(caadar args)))
result))
((eq (caar args) *bq-clobberable*)
(bq-attach-append *bq-nconc* (cadar args) result))
(t (bq-attach-append *bq-append*
(car args)
result)))))
((null args) result)))
;;------------------------------------------------------------------------------
(defun null-or-quoted (x)
(or (null x) (and (consp x) (eq (car x) *bq-quote*))))
;;------------------------------------------------------------------------------
(defun bq-attach-append (op item result)
(cond ((or (null result) (equal result *bq-quote-nil*))
(if (bq-splicing-frob item) (list op item) item))
((and (null-or-quoted item) (null-or-quoted result))
(list *bq-quote* (append (cadr item) (cadr result))))
((and (consp result) (eq (car result) op))
(list* (car result) item (cdr result)))
(t (list op item result))))
;;------------------------------------------------------------------------------
(defun bq-attach-conses (items result)
(cond
((and (every #'null-or-quoted items)
(null-or-quoted result))
(list *bq-quote*
(append (mapcar #'cadr items) (cadr result))))
((or (null result) (equal result *bq-quote-nil*))
(cons *bq-list* items))
((and (consp result)
(or (eq (car result) *bq-list*)
(eq (car result) *bq-list**)))
(cons (car result) (append items (cdr result))))
(t (cons *bq-list** (append items (list result))))))
;;------------------------------------------------------------------------------
(defun bq-remove-tokens (x)
(cond
((atom x) (cond
((eq x *bq-list*) 'list)
((eq x *bq-append*) 'append)
((eq x *bq-nconc*) 'nconc)
((eq x *bq-list**) 'list*)
((eq x *bq-quote*) 'quote)
(T x)))
((eq (car x) *bq-clobberable*) (bq-remove-tokens (cadr x)))
((and (eq (car x) *bq-list**) (consp (cddr x)) (null (cdddr x)))
(cons 'cons (maptree #'bq-remove-tokens (cdr x))))
(T (maptree #'bq-remove-tokens x))))
;;------------------------------------------------------------------------------
(defstruct (readtable (:copier nil)
(:predicate readtablep)
(:print-function
(lambda (readtable stream depth)
(declare (ignore readtable depth))
(write-string "#<readtable>" stream))))
fuer jedes Standard - Character ein Eintrag :
NIL (= ILLEGAL ) , WHITESPACE , CONSTITUENT , SINGLE - ESCAPE , MULTI - ESCAPE ,
;; (function . non-terminating-p)
;;-------------------------------
(syntax (make-array char-code-limit :initial-element nil))
eine Association - List , die fuer jedes Dispatch - Character
;; das Dispatch-Array angiebt.
;;----------------------------
(dispatch nil))
;;------------------------------------------------------------------------------
(defmacro get-fun (syntax) `(car ,syntax))
(defmacro terminating-p (syntax) `(null (cdr ,syntax)))
(defmacro get-syntax (c)
`(aref (readtable-syntax *readtable*) (char-code ,c)) )
;;------------------------------------------------------------------------------
(defun copy-readtable (&optional (from *readtable*) (to nil))
(when (null from) (setq from *standard-readtable*))
(when (null to) (setq to (make-readtable)))
(let ((syntax-from (readtable-syntax from))
(syntax-to (readtable-syntax to)))
(dotimes (i char-code-limit)
(setf (aref syntax-to i) (aref syntax-from i)))
(setf (readtable-dispatch to) nil)
(dolist (pair (readtable-dispatch from))
(when (car pair)
(copy-dispatch-macro-character pair from to))))
to)
;;------------------------------------------------------------------------------
(defun set-syntax-from-char (to-char from-char
&optional
(to-readtable *readtable*)
(from-readtable *standard-readtable*)
&aux pair)
(setf (aref (readtable-syntax to-readtable) (char-code to-char))
(aref (readtable-syntax from-readtable) (char-code from-char)))
evtl . , wenn dispatch - character ueberschrieben
;;---------------------------------------------------------------------
(setq pair (assoc to-char (readtable-dispatch to-readtable)))
(when pair (setf (car pair) nil) (setf (cdr pair) nil))
wenn dispatch - character , Kopie der
Dispatch - Funktion erzeugen , die sich genauso wie die alte verhaelt .
;;--------------------------------------------------------------------
(setq pair (assoc from-char (readtable-dispatch from-readtable)))
(when pair
(copy-dispatch-macro-character pair from-readtable to-readtable))
t)
;;------------------------------------------------------------------------------
(defun copy-dispatch-macro-character (pair from to)
(let ((c (car pair))
(dispatch-from (cdr pair))
dispatch-to)
(make-dispatch-macro-character c
(cdr (aref (readtable-syntax from)
(char-code c)))
to)
(setq dispatch-to (cdr (assoc c (readtable-dispatch to))))
(dotimes (i char-code-limit)
(setf (aref dispatch-to i) (aref dispatch-from i)))))
;;-----------------------------------------------------------------------------
(defun set-macro-character (char function
&optional
non-terminating-p
(readtable *readtable*))
(setf (aref (readtable-syntax readtable) (char-code char))
(cons function non-terminating-p))
evtl . , wenn dispatch - character ueberschrieben
;;---------------------------------------------------------------------
(let ((pair (assoc char (readtable-dispatch readtable))))
(when pair (setf (car pair) nil) (setf (cdr pair) nil)))
t)
;;------------------------------------------------------------------------------
(defun get-macro-character (char &optional (readtable *readtable*))
(let ((entry (aref (readtable-syntax readtable) (char-code char))))
(if entry
(values (car entry) (cdr entry))
nil)))
;;------------------------------------------------------------------------------
(defun make-dispatch-macro-character (char &optional
non-terminating-p
(readtable *readtable*))
(let ((dispatch-array (make-array char-code-limit :initial-element nil)))
(setf (aref (readtable-syntax readtable) (char-code char))
(cons
Dispatch - Reader
;;----------------
#'(lambda (stream char)
(let* ((i nil)
(c (read-char stream t nil t))
(d (digit-char-p c))
fun)
(when d
(setq i d)
(loop
(setq c (read-char stream t nil t))
(setq d (digit-char-p c))
(when (null d)
(when *read-suppress* (setq i nil))
(return))
(setq i (+ d (* 10 i)))))
(setq fun (aref dispatch-array (char-code (char-upcase c))))
(unless fun
(error "no ~S dispatch function defined for subchar ~S ~
(with arg ~S)" char c i))
(funcall fun stream c i)))
non-terminating-p))
(let ((pair (assoc char (readtable-dispatch readtable))))
(cond
;; altes dispatch-array ueberschreiben
;;------------------------------------
(pair (setf (cdr pair) dispatch-array))
Eintrag neu
;;--------------------
(T (push (cons char dispatch-array)
(readtable-dispatch readtable))))))
t)
;;------------------------------------------------------------------------------
(defun set-dispatch-macro-character (disp-char sub-char function
&optional
(readtable *readtable*))
(let ((dispatch-array
(cdr (assoc disp-char (readtable-dispatch readtable)))))
(unless dispatch-array
(error "~S is not a dispatch macro character" disp-char))
(setf (aref dispatch-array (char-code (char-upcase sub-char))) function))
t)
;;------------------------------------------------------------------------------
(defun get-dispatch-macro-character (disp-char sub-char
&optional
(readtable *readtable*))
(let ((dispatch-array
(cdr (assoc disp-char (readtable-dispatch readtable)))))
(unless dispatch-array
(error "~S is not a dispatch macro character" disp-char))
(aref dispatch-array (char-code (char-upcase sub-char)))))
;;------------------------------------------------------------------------------
(defun read-token (stream c)
(let ((multiple-escape nil)
syntax
(escape nil)
(colon nil)
colon-pos)
(loop
(setq syntax (get-syntax c))
(cond
((not multiple-escape)
(case syntax
(WHITESPACE
Changed by
;; (when *preserve-whitespace* (unread-char c stream))
(unread-char c stream)
(return))
(SINGLE-ESCAPE
(setq c (read-char stream nil nil))
(unless c
(error
"unexpected End of File after single escape"))
(setq escape t)
(vector-push-extend c *token*))
(MULTIPLE-ESCAPE (setq escape t multiple-escape t))
((nil) (error "illegal Character"))
(t
(when (and (consp syntax) (terminating-p syntax))
(unread-char c stream)
(return))
(when (eql #\: c)
(case colon
((nil) (setq colon 1)
(setq colon-pos (fill-pointer *token*)))
(1 (setq colon 2)
(unless (eql colon-pos
(1- (fill-pointer *token*)))
(setq colon 3)))
(t (setq colon 3))))
(vector-push-extend (char-upcase c) *token*))))
(T (case syntax
(SINGLE-ESCAPE
(setq c (read-char stream nil nil))
(unless c
(error "unexpected End of File after single escape"))
(vector-push-extend c *token*))
(MULTIPLE-ESCAPE (setq multiple-escape nil))
((NIL) (error "illegal character"))
(t (vector-push-extend c *token*)))))
(setq c (read-char stream nil nil))
(unless c
(when multiple-escape
(error "unexpected End of File after multiple escape"))
(return))) ;end of loop
;; nicht analysieren, wenn *READ-SUPPRESS*
(when *read-suppress* (return-from read-token nil))
* token * als Zahl oder Symbol interpretieren
;;---------------------------------------------
(let ((i 0)
(len (fill-pointer *token*))
(sign 1)
(num1 0) (num2 0.0)
(base *read-base*)
c)
(flet
(
liest eine
;; (abhaengig von *read-base*).
;; Resultat:
NIL , wenn 0 Ziffern gelesen wurden
INTEGER , die durch die Ziffernfolge repraesentiert
;; wird, sonst.
;;---------------------------------------------------
(read-digits (&aux x d)
(cond
((eql i len) nil)
(T (setq x (digit-char-p (aref *token* i) base))
(cond
((null x) nil)
(T (incf i)
(loop
(when (eql i len)
(return x))
(setq d (digit-char-p (aref *token* i)
base))
(when (null d)
(return x))
(incf i)
(setq x (+ (* base x) d))))))))
(read-sign ()
(if (eql i len)
1
(case (aref *token* i)
(#\- (incf i) -1)
(#\+ (incf i) 1)
(t 1))))
(read-float2 (d &aux x)
(loop
(when (eql i len)
(return))
(setq c (aref *token* i))
(setq x (digit-char-p c))
(when (null x) (return))
(incf i)
(setq num2 (+ num2 (/ x d)))
(setq d (* d 10.0)))
num2))
;;---------------
(tagbody
(when (or escape colon)
(go SYMBOL))
Wenn letztes Zeichen = # \. ,
;;-------------------------------------------------
(when (eql #\. (aref *token* (1- len)))
(setq base 10))
(setq sign (read-sign))
(setq num1 (read-digits))
(when (null num1) (go FLOAT-DOT))
(when (eql i len)
Integer
;;--------
(return-from read-token (* sign num1)))
(setq c (aref *token* i)) (incf i)
(cond
((eql #\. c)
;;----------------
(when (eql i len) (return-from read-token (* sign num1)))
;; evtl. Floating-Point
;;---------------------
(unless (eql base 10)
(setq base 10)
(setq i 0)
(setq sign (read-sign))
(setq num1 (read-digits))
(incf i))
(setq num2 (read-float2 10.0))
(go FLOAT2))
((eql #\/ c) (go RATIO2))
(T (go FLOAT-EXPT)))
FLOAT-DOT
(setq num1 0)
(when (eql i len) (go SYMBOL))
(setq c (aref *token* i)) (incf i)
(unless (eql #\. c) (go SYMBOL))
;; nur ein Punkt
;;--------------
(when (eql i len) (go SYMBOL))
(setq c (aref *token* i)) (incf i)
(setq num2 (digit-char-p c))
(when (null num2) (go SYMBOL))
(setq num2 (/ num2 10.0))
(setq num2 (read-float2 100.0))
FLOAT2
(when (eql i len)
(return-from read-token (* sign (+ num1 num2))))
(setq c (aref *token* i)) (incf i)
FLOAT-EXPT
(unless (member (char-downcase c) '(#\e #\s #\f #\d #\l))
(go SYMBOL))
(when (eql i len) (go SYMBOL))
(setq base 10)
(let ((e-sign (read-sign))
(expt (read-digits)))
(when (or (null expt) (< i len)) (go Symbol))
(return-from read-token (* sign
(+ num1 num2)
(expt 10 (* e-sign expt)))))
RATIO2
(setq num2 (read-digits))
(when (or (null num2) (< i len)) (go SYMBOL))
(let ((result (/ num1 num2)))
(warn "ratio ~a/~a has been read as ~s" num1 num2 result)
(return-from read-token result))
SYMBOL
(cond
kein Package angegeben
;;-----------------------
((null colon)
(unless escape
Pruefen ob der ' Dot ' einer Dotted - List vorliegt
;;------------------------------------------------
(when (and (eql len 1)
(eql (aref *token* 0) #\.)
*dot-flag*)
(setq *dot-flag* nil)
(return-from read-token nil))
;; Pruefen ob das Symbol vollstaendig aus Dots besteht
;;----------------------------------------------------
(do ((i 0 (1+ i)))
((>= i len) (error "illegal token ~S" *token*))
(unless (eql #\. (aref *token* i)) (return))))
(return-from read-token
(if *uninterned*
(make-symbol *token*)
(values (intern *token*)))))
(*uninterned* (error "token may not contain colons"))
KEYWORD
;;--------
((eql colon-pos 0)
(unless (eql colon 1)
(error "illegal token ~S" *token*))
(return-from read-token
(values (intern (subseq *token* 1)
*keyword-package*))))
;; Package ist angegeben
;;----------------------
(T (let* ((package-name (subseq *token* 0 colon-pos))
(package (find-package package-name))
(symbol-name
(subseq *token* (+ colon-pos colon))))
(unless package
(error "illegal package-name ~S" package-name))
(case colon
(2 (return-from read-token (values (intern symbol-name
package))))
(1 (multiple-value-bind (symbol where)
(find-symbol symbol-name package)
(unless (eql :external where)
(error "can't find the external symbol ~S in ~S"
symbol-name package))
(return-from read-token symbol)))
(T (error "illegal Token ~S" *token*)))))))))))
;;------------------------------------------------------------------------------
;; ignore-token stream
liest ein token
;;------------------------------------------------------------------------------
(defun ignore-token (stream)
(let ((c (read-char stream t nil t)))
(read-token stream c)
nil))
;;------------------------------------------------------------------------------
die schliessende Klammer wird mittels ( read ) und nicht mit ( peek - char )
eingelesen , weil evtl . hinter dem letzten Element der Liste noch
;;------------------------------------------------------------------------------
(defun cons-reader (stream char)
(declare (ignore char))
(let ((*dot-flag* t)
(*parenthesis-open* #\))
list
last-cons
x)
(setq x (bq-read stream))
(when (null *parenthesis-open*)
(return-from cons-reader nil))
(when (null *dot-flag*)
(error "Nothing appears before the dot in a list"))
(setq list (setq last-cons (cons x nil)))
(loop
(setq x (bq-read stream))
(when (null *parenthesis-open*)
(return list))
(cond
kein Dot gelesen
;;-----------------
(*dot-flag* (setq last-cons (setf (cdr last-cons) (cons x nil))))
Dot gelesen , Sonderbehandlung
;;------------------------------
(T (setf (cdr last-cons) (bq-read stream))
(when (null *parenthesis-open*)
(error "Nothing appears after the dot in a list"))
(bq-read stream)
(unless (null *parenthesis-open*)
(error "More than one object found after dot in a list"))
(return list))))))
;;------------------------------------------------------------------------------
(defun right-parenthesis-reader (stream char)
(declare (ignore stream))
(cond
((eql char *parenthesis-open*) (setq *parenthesis-open* nil) nil)
(T (warn "Ignoring an unmatched ~a" char) (values))))
;;------------------------------------------------------------------------------
(defun quote-reader (stream char)
(declare (ignore char))
(list 'QUOTE (bq-read stream)))
;;------------------------------------------------------------------------------
(defun semicolon-reader (stream char)
(declare (ignore char))
(read-line stream nil nil nil)
(values))
;;------------------------------------------------------------------------------
(defun string-reader (stream char &aux c)
(setf (fill-pointer *token*) 0)
(loop
(setq c (read-char stream t nil t))
(cond
((eql (get-syntax c) 'SINGLE-ESCAPE)
(setq c (read-char stream t nil t))
(vector-push-extend c *token*))
((eql char c)
(return (copy-seq *token*)))
(T (vector-push-extend c *token*)))))
;;------------------------------------------------------------------------------
(defun char-reader (stream char font)
(declare (ignore char))
(let ((c (read-char stream t nil t))
(c2 (peek-char nil stream nil nil t)))
( 353 ): In the single - character case ,
;; the character must be followed by a non-constituent character
;;--------------------------------------------------------------
(when (and c2 (eql 'CONSTITUENT (get-syntax c2)))
;; multiple-character case
;;------------------------
(setf (fill-pointer *token*) 0)
(let ((*read-suppress* t))
(read-token stream c))
(unless *read-suppress*
(setq c (name-char *token*))
(when (null c) (error "illegal character name ~s" *token*))))
(cond
(*read-suppress* nil)
(t (when (and font (> font 0))
(warn "font ~s of ~s will be ignored" font c))
c))))
;;------------------------------------------------------------------------------
(defun function-reader (stream char i)
(when i (extra-argument char))
(list 'function (bq-read stream)))
;;------------------------------------------------------------------------------
(defun vector-reader (stream char len)
(let ((list (cons-reader stream char)))
(cond (*read-suppress* nil)
((zerop *bq-level*)
(if len
(let ((listlen (length list)))
(when (> listlen len)
(error "Vector longer than specified length: #~S~S"
len list))
(fill (replace (make-array len) list)
(car (last list)) :start listlen))
(apply #'vector list)))
(len
(error "#~s( syntax is not allowed in backquoted expressions" len))
(t (cons *bq-vector* list)))))
;;------------------------------------------------------------------------------
(defun binary-reader (stream char i)
(when i (extra-argument char))
(radix-reader stream char 2))
;;------------------------------------------------------------------------------
(defun octal-reader (stream char i)
(when i (extra-argument char))
(radix-reader stream char 8))
;;------------------------------------------------------------------------------
(defun hex-reader (stream char i)
(when i (extra-argument char))
(radix-reader stream char 16))
;;------------------------------------------------------------------------------
(defun radix-reader (stream char radix)
(cond (*read-suppress* (ignore-token stream))
((not radix)
(error "Radix missing in #R."))
((not (<= 2 radix 36))
(error "Illegal radix for #R: ~D." radix))
(t
(let ((res (let ((*read-base* radix))
(read stream t nil t))))
(unless (typep res 'integer #|rational|#)
(error "#~A (base ~D) value is not a rational: ~S."
char radix res))
res))))
;;------------------------------------------------------------------------------
(defun uninterned-reader (stream char i)
(when i (extra-argument char))
(let* ((*uninterned* T)
(value (read stream t nil t)))
(cond
(*read-suppress* nil)
((symbolp value) value)
(T (error "illegal value (~S) followed #:" value)))))
;;------------------------------------------------------------------------------
(defun array-reader (stream char n)
(cond
(*read-suppress* (read stream t nil t) nil)
((null n) (error "Rank argument must be supplied to #~a" char))
(t (let ((list (read stream t nil t)))
(labels
((calc-dim (rank list)
(cond
((eql 0 rank) ())
(T (cons (length list) (calc-dim (1- rank) (car list)))))))
(make-array (calc-dim n list) :initial-contents list))))))
;;------------------------------------------------------------------------------
(defun struct-reader (stream char i)
(when i (extra-argument char))
(let ((list (read stream t nil t)))
(cond
(*read-suppress* nil)
((atom list) (error "illegal value (~s) followed #S" list))
(T (let ((constructor (rt:struct-constructor (car list)))
(key-value-list (cdr list)))
(unless constructor (error "~a is not a structure" (car list)))
(do ((result ()))
((endp key-value-list) (apply constructor (nreverse result)))
(push (intern (string (car key-value-list)) *keyword-package*)
result)
(pop key-value-list)
(when (endp key-value-list)
(error "unexpected end in #S~s" key-value-list))
(push (car key-value-list) result)
(pop key-value-list)))))))
;;------------------------------------------------------------------------------
(defun feature-plus-reader (stream char i)
(declare (ignore char))
(when i (extra-argument #\+))
(let ((feature
(let ((*package* *keyword-package*))
(read stream t nil t))))
(if (eval-feature feature)
(bq-read stream)
(let ((*read-suppress* t))
(bq-read stream)
(values)))))
;;------------------------------------------------------------------------------
(defun feature-minus-reader (stream char i)
(declare (ignore char))
(when i (extra-argument #\-))
(let ((feature
(let ((*package* *keyword-package*))
(read stream t nil t))))
(if (eval-feature feature)
(let ((*read-suppress* t))
(bq-read stream)
(values))
(bq-read stream))))
;;------------------------------------------------------------------------------
(defun eval-feature (feature)
(if (atom feature)
(member feature *features*)
(case (first feature)
(not (not (eval-feature (second feature))))
(and (dolist (feature (cdr feature))
(unless (eval-feature feature)
(return-from eval-feature nil)))
T)
(or (dolist (feature (cdr feature))
(when (eval-feature feature)
(return-from eval-feature t)))
nil)
(T (error "illegal feature expression ~s" feature)))))
;;------------------------------------------------------------------------------
(defun comment-block-reader (stream char i)
(declare (ignore char))
(when i (extra-argument #\|))
(let ((c (read-char stream t nil t))
(level 0))
(loop
(cond
((eql #\| c)
(setq c (read-char stream t nil t))
(when (eql #\# c)
(when (eql 0 level) (return (values)))
(decf level)
(setq c (read-char stream t nil t))))
((eql #\# c)
(setq c (read-char stream t nil t))
(when (eql #\| c)
(incf level)
(setq c (read-char stream t nil t))))
(T (setq c (read-char stream t nil t)))))))
;;------------------------------------------------------------------------------
(defun extra-argument (c)
(error "extra argument for #~S" c))
;;------------------------------------------------------------------------------
(defun bq-read (stream)
(internal-read stream t nil t))
;;------------------------------------------------------------------------------
(defun internal-read (stream eof-error-p eof-value recursive-p)
(prog1
(loop
(let ((c (read-char stream nil nil))
syntax)
(when (null c)
(when (or eof-error-p recursive-p)
(error "unexpected End of File"))
(return-from internal-read eof-value))
(setq syntax (get-syntax c))
(case syntax
(WHITESPACE) ;ignorieren
((CONSTITUENT SINGLE-ESCAPE MULTIPLE-ESCAPE)
(return (progn (setf (fill-pointer *token*) 0)
(read-token stream c))))
((nil) (error "illegal Character"))
( Non- ) Terminating - Macro
;;-------------------------
(T (multiple-value-call
#'(lambda (&optional (v nil supplied))
(when supplied
(return v)))
(funcall (get-fun syntax) stream c))))))
(when (and (not recursive-p) (not *preserve-whitespace*))
(let ((c (read-char stream nil nil)))
(cond
EOF ( Annahme : stream )
;; ---------------------------------------------------------------
((null c))
auf Top - Level
;; -------------------------------------
((eql (get-syntax c) 'WHITESPACE))
Syntaktisch relevante
;; ------------------------------------------------------
(t (unread-char c stream)))))))
;;------------------------------------------------------------------------------
(defun read (&optional (stream *standard-input*)
(eof-error-p t)
(eof-value nil)
(recursive-p nil))
(case stream
((nil) (setq stream *standard-input*))
((t) (setq stream *terminal-io*)))
(let ((*bq-level* 0))
(if recursive-p
(internal-read stream eof-error-p eof-value recursive-p)
(let ((*preserve-whitespace* nil))
(internal-read stream eof-error-p eof-value recursive-p)))))
;;------------------------------------------------------------------------------
(defun read-preserving-whitespace (&optional (stream *standard-input*)
(eof-error-p t)
(eof-value nil)
(recursive-p nil))
(case stream
((nil) (setq stream *standard-input*))
((t) (setq stream *terminal-io*)))
(let ((*bq-level* 0))
(if recursive-p
(internal-read stream eof-error-p eof-value recursive-p)
(let ((*preserve-whitespace* t))
(internal-read stream eof-error-p eof-value recursive-p)))))
;;------------------------------------------------------------------------------
(defun read-delimited-list (char &optional stream recursive-p)
(case stream
((nil) (setq stream *standard-input*))
((T) (setq stream *terminal-io*)))
(labels
((read-rest ()
(if (eql char (peek-char t stream t nil t))
(progn (read-char stream) nil)
(cons (read stream t nil t) (read-rest)))))
(read-rest)))
;;------------------------------------------------------------------------------
2 Werte : 1 . string , 2 . ( member nil t ) , T = = eof
;;------------------------------------------------------------------------------
(defun read-line (&optional stream (eof-error-p t) eof-value recursive-p)
(case stream
((nil) (setq stream *standard-input*))
((T) (setq stream *terminal-io*)))
(let ((c (read-char stream nil nil)))
(cond
((null c)
(when eof-error-p (error "unexpected end of file"))
(values eof-value t))
(T (setf (fill-pointer *token*) 0)
(loop
(when (eql #\Newline c) (return (values (copy-seq *token*) nil)))
(vector-push-extend c *token*)
(setq c (read-char stream nil nil))
(when (null c) (return (values (copy-seq *token*) t))))))))
;;------------------------------------------------------------------------------
(defun peek-char (&optional peek-type stream (eof-error-p t)
eof-value recursive-p)
(case stream
((nil) (setq stream *standard-input*))
((T) (setq stream *terminal-io*)))
(let ((c (read-char stream nil nil recursive-p)))
(cond
((eq t peek-type)
(loop
(when (null c) (return))
(unless (eql 'WHITESPACE (get-syntax c))
(return))
(setq c (read-char stream nil nil recursive-p))))
((characterp peek-type)
(loop
(when (null c) (return))
(unless (eql peek-type c)
(return))
(setq c (read-char stream nil nil recursive-p)))))
(when (null c)
(when eof-error-p (error "unexpected end of file"))
eof-value)
(prog1 c
(unread-char c stream))))
;;------------------------------------------------------------------------------
(defun read-from-string (string &optional (eof-error-p t) eof-value
&key (start 0) (end (length string))
((:preserve-whitespace *preserve-whitespace*))
&aux index (*bq-level* 0))
(with-input-from-string (stream string :index index :start start :end end)
(values (internal-read stream eof-error-p eof-value nil) index)))
;;------------------------------------------------------------------------------
(defun parse-integer (string &key (start 0) (end (length string))
(radix 10) (junk-allowed nil))
(prog (c x d sign)
pruefen ob fuehrende WHITESPACE
;;--------------------------------
(loop
(when (>= start end) (go NO-INTEGER))
(setq c (char string start))
(unless (eq 'WHITESPACE (get-syntax c))
(return)))
;; pruefen ob Vorzeichen
;;----------------------
(setq sign (case c
(#\- (incf start) -1)
(#\+ (incf start) 1)
(t 1)))
mindestens 1 Ziffer
;;--------------------------
(when (>= start end) (go NO-INTEGER))
(setq x (digit-char-p (char start start) radix))
(cond
((null x) (go NO-INTEGER))
(T (incf start)
(loop
(when (eql start end)
(return))
(setq d (digit-char-p (char string start) radix))
(when (null d)
(return))
(incf start)
(setq x (+ (* radix x) d)))))
;; Integer gefunden
;;-----------------
(cond
(junk-allowed (return (values x start)))
pruefen , ob nur WHITESPACE folgt
;;---------------------------------
(T (loop
(when (>= start end) (return))
(setq c (char string start))
(unless (eq 'WHITESPACE (get-syntax c)) (go ERROR)))
(return (values x start))))
NO-INTEGER
(when junk-allowed
(return (values nil start)))
ERROR
(error "illegal integer ~S" string)))
;;------------------------------------------------------------------------------
(defun make-standard-readtable ()
(let* ((rtab (make-readtable))
(syntax (readtable-syntax rtab)))
(dolist (whitespace-char '(#\tab #\newline #\page #\return #\space))
(setf (aref syntax (char-code whitespace-char)) 'WHITESPACE))
(setf (aref syntax (char-code #\backspace)) 'CONSTITUENT)
(do ((i (char-code #\!) (1+ i)))
((>= i (char-code #\rubout)))
(setf (aref syntax i) 'CONSTITUENT))
(setf (aref syntax (char-code #\\)) 'SINGLE-ESCAPE)
(setf (aref syntax (char-code #\|)) 'MULTIPLE-ESCAPE)
(make-dispatch-macro-character #\# T rtab)
(set-dispatch-macro-character #\# #\\ #'char-reader rtab)
(set-dispatch-macro-character #\# #\' #'function-reader rtab)
(set-dispatch-macro-character #\# #\( #'vector-reader rtab)
(set-dispatch-macro-character #\# #\: #'uninterned-reader rtab)
(set-dispatch-macro-character #\# #\B #'binary-reader rtab)
(set-dispatch-macro-character #\# #\O #'octal-reader rtab)
(set-dispatch-macro-character #\# #\X #'hex-reader rtab)
(set-dispatch-macro-character #\# #\R #'radix-reader rtab)
(set-dispatch-macro-character #\# #\A #'array-reader rtab)
(set-dispatch-macro-character #\# #\S #'struct-reader rtab)
(set-dispatch-macro-character #\# #\+ #'feature-plus-reader rtab)
(set-dispatch-macro-character #\# #\- #'feature-minus-reader rtab)
(set-dispatch-macro-character #\# #\| #'comment-block-reader rtab)
(set-macro-character #\' #'quote-reader NIL rtab)
(set-macro-character #\( #'cons-reader NIL rtab)
(set-macro-character #\) #'right-parenthesis-reader NIL rtab)
(set-macro-character #\, #'comma-reader NIL rtab)
# ' semicolon - reader NIL rtab )
(set-macro-character #\" #'string-reader NIL rtab)
(set-macro-character #\` #'backquote-reader NIL rtab)
rtab))
| null | https://raw.githubusercontent.com/hoelzl/Clicc/cea01db35301144967dc74fd2f96dd58aa52d6ea/src/runtime/lisp/read.lisp | lisp | -----------------------------------------------------------------------------
-----------------------------------------------------------------------------
------------------------------------------------------
- Backquote-Reader + Simplifier
- READ
- READ-PRESERVING-WHITESPACE
- READ-DELIMITED-LIST
- READ-LINE
- PEEK-CHAR
- READ-FROM-STRING
$Log: read.lisp,v $
Revision 1.14 1994/06/03 09:51:05 hk
Revision 1.13 1994/06/02 14:10:19 hk
Revision 1.11 1994/02/17 16:16:35 hk
Revision 1.10 1994/01/11 16:11:47 hk
Revision 1.9 1993/11/29 12:26:40 uho
readtable exportiert.
Revision 1.4 1993/04/22 10:48:21 hk
(in-package "RUNTIME") -> (in-package "LISP"),
clicc::declaim -> declaim, clicc::fun-spec (etc.) -> lisp::fun-spec (etc.)
Revision 1.2 1993/01/11 15:04:27 hk
structure -> struct
Initial revision
-----------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
`basic --> (QUOTE basic)
-------------------------
`#(x1 x2 x3 ... xn) --> (apply #'vector `(x1 x2 x3 ... xn))
------------------------------------------------------------
`,form --> form
----------------
------------------
------------------
`(x1 x2 x3 ... xn . atom) -->
------------------------------
--> (append [x1] [x2 [x3] ... [xn] (quote atom))
-------------------------------------------------
`(x1 ... xn . ,form) --> (append [x1] ... [xn] form)
-----------------------------------------------------
--------------------------------
--------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
(function . non-terminating-p)
-------------------------------
das Dispatch-Array angiebt.
----------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
---------------------------------------------------------------------
--------------------------------------------------------------------
------------------------------------------------------------------------------
-----------------------------------------------------------------------------
---------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
----------------
altes dispatch-array ueberschreiben
------------------------------------
--------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
(when *preserve-whitespace* (unread-char c stream))
end of loop
nicht analysieren, wenn *READ-SUPPRESS*
---------------------------------------------
(abhaengig von *read-base*).
Resultat:
wird, sonst.
---------------------------------------------------
---------------
-------------------------------------------------
--------
----------------
evtl. Floating-Point
---------------------
nur ein Punkt
--------------
-----------------------
------------------------------------------------
Pruefen ob das Symbol vollstaendig aus Dots besteht
----------------------------------------------------
--------
Package ist angegeben
----------------------
------------------------------------------------------------------------------
ignore-token stream
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-----------------
------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
the character must be followed by a non-constituent character
--------------------------------------------------------------
multiple-character case
------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
rational
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
ignorieren
-------------------------
---------------------------------------------------------------
-------------------------------------
------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
--------------------------------
pruefen ob Vorzeichen
----------------------
--------------------------
Integer gefunden
-----------------
---------------------------------
------------------------------------------------------------------------------ | Copyright ( C ) 1993 Christian - Albrechts - Universitaet zu Kiel , Germany
Projekt : APPLY - A Practicable And Portable Lisp Implementation
Funktion : Laufzeitsystem
- Readtables ,
( - READ - CHAR , UNREAD - CHAR -- > file.lisp )
- PARSE - INTEGER
$ Revision : 1.14 $
Schreibfehler
Print - Funktion f"ur readtable - Struktur .
Revision 1.12 1994/05/31 12:05:06 hk
Bessere warning
" Uberfl"ussigen Test in struct - reader gestrichen , aufger"aumt ,
.
in bq - attach - append bei ` ( ..... . const ) behoben .
In ' read - token ' wird des look - ahead - Zeichens das Ende
.
Revision 1.8 1993/11/10 16:09:27 hk
In read - from - string den zu T korrigiert .
Revision 1.7 1993/07/14 13:50:42 hk
Neue Reader fuer # b , # o , # x und # nR , vector - reader
Revision 1.6 1993/06/16 15:20:38 hk
Copyright eingefuegt .
Revision 1.5 1993/05/07 08:55:22 hk
Definitionen exportiert , defvar , defconstant ,
clicc / lib / . rt::set - xxx in ( setf xxx ) umgeschrieben .
Definitionen und Anwendungen von / aus Package Runtime mit rt : .
declaim fun - spec und declaim top - level - form gestrichen .
Revision 1.3 1993/02/16 14:34:20 hk
$ Revision : 1.14 $ eingefuegt
Revision 1.1 1992/03/24 17:12:55 hk
(in-package "LISP")
(export
'(*read-base* *read-suppress* *features*
readtable readtablep copy-readtable set-syntax-from-char set-macro-character
get-macro-character make-dispatch-macro-character
set-dispatch-macro-character get-dispatch-macro-character read
read-preserving-whitespace read-delimited-list read-line peek-char
read-from-string parse-integer))
(defparameter *comma* (make-symbol "COMMA"))
(defparameter *comma-atsign* (make-symbol "COMMA-ATSIGN"))
(defparameter *comma-dot* (make-symbol "COMMA-DOT"))
(defparameter *bq-list* (make-symbol "BQ-LIST"))
(defparameter *bq-append* (make-symbol "BQ-APPEND"))
(defparameter *bq-list** (make-symbol "BQ-LIST*"))
(defparameter *bq-nconc* (make-symbol "BQ-NCONC"))
(defparameter *bq-clobberable* (make-symbol "BQ-CLOBBERABLE"))
(defparameter *bq-quote* (make-symbol "BQ-QUOTE"))
(defparameter *bq-quote-nil* (list *bq-quote* nil))
(defparameter *bq-vector* (make-symbol "VECTOR"))
(defparameter *bq-level* 0)
(defparameter *read-base* 10)
(defparameter *read-suppress* nil)
(defparameter *features* nil)
(defparameter *token* (make-array 80
:element-type 'character
:fill-pointer 0
:adjustable t))
(defparameter *uninterned* nil)
(defparameter *preserve-whitespace* nil)
(defparameter *dot-flag* nil)
(defparameter *parenthesis-open* nil)
(defparameter *standard-readtable* (make-standard-readtable))
erst nach obiger Zeile
Backquote ...
angelehnt an " Backquote , Appendix C in CLtL , 2nd . "
erweitert um Backquote in Vektoren .
(defun backquote-reader (stream char)
(declare (ignore char))
(incf *bq-level*)
(prog1
(bq-completely-process (bq-read stream))
(decf *bq-level*)))
(defun comma-reader (stream char)
(declare (ignore char))
(when (<= *bq-level* 0)
(error "A comma appeared outside of a backquote"))
(decf *bq-level*)
(prog1
(cons (case (peek-char nil stream t nil t)
(#\@ (read-char stream t nil t) *comma-atsign*)
(#\. (read-char stream t nil t) *comma-dot*)
(otherwise *comma*))
(bq-read stream))
(incf *bq-level*)))
(defun bq-completely-process (x)
(bq-remove-tokens (bq-simplify (bq-process x))))
(defun bq-process (x)
(cond
((atom x) (list *bq-quote* x))
((eq (car x) *bq-vector*)
(let ((list (bq-completely-process (cdr x))))
(if (eq 'QUOTE (car list))
(list *bq-quote* (apply #'vector (cadr list)))
(list 'APPLY `#'VECTOR list))))
((eq (car x) *comma*) (cdr x))
` , @form -- > ERROR
((eq (car x) *comma-atsign*) (error ",@~S after `" (cdr x)))
` , .form -- > ERROR
((eq (car x) *comma-dot*) (error ",.~S after `" (cdr x)))
(t (do ((p x (cdr p))
(q '() (cons (bracket (car p)) q)))
((atom p)
(cons *bq-append*
(nreconc q (list (list *bq-quote* p)))))
(when (eq (car p) *comma*)
(return (cons *bq-append* (nreconc q (list (cdr p))))))
` ( x1 ... xn . , @form ) -- > ERROR
(when (eq (car p) *comma-atsign*) (error "Dotted ,@~s" (cdr p)))
` ( x1 ... xn . , .form ) -- > ERROR
(when (eq (car p) *comma-dot*) (error "Dotted ,@~s" (cdr p)))))))
(defun bracket (x)
(cond
((atom x) (list *bq-list* (bq-process x)))
((eq (car x) *comma*) (list *bq-list* (cdr x)))
((eq (car x) *comma-atsign*) (cdr x))
((eq (car x) *comma-dot*) (list *bq-clobberable* (cdr x)))
(t (list *bq-list* (bq-process x)))))
(defun maptree (fn x)
(if (atom x)
(funcall fn x)
(let ((a (funcall fn (car x)))
(d (maptree fn (cdr x))))
(if (and (eql a (car x)) (eql d (cdr x)))
x
(cons a d)))))
(defun bq-splicing-frob (x)
(and (consp x)
(or (eq (car x) *comma-atsign*)
(eq (car x) *comma-dot*))))
(defun bq-frob (x)
(and (consp x)
(or (eq (car x) *comma*)
(eq (car x) *comma-atsign*)
(eq (car x) *comma-dot*))))
(defun bq-simplify (x)
(if (atom x)
x
(let ((x (if (eq (car x) *bq-quote*)
x
(maptree #'bq-simplify x))))
(if (not (eq (car x) *bq-append*))
x
(bq-simplify-args x)))))
(defun bq-simplify-args (x)
(do ((args (reverse (cdr x)) (cdr args))
(result
nil
(cond ((atom (car args))
(bq-attach-append *bq-append* (car args) result))
((and (eq (caar args) *bq-list*)
(notany #'bq-splicing-frob (cdar args)))
(bq-attach-conses (cdar args) result))
((and (eq (caar args) *bq-list**)
(notany #'bq-splicing-frob (cdar args)))
(bq-attach-conses
(reverse (cdr (reverse (cdar args))))
(bq-attach-append *bq-append*
(car (last (car args)))
result)))
((and (eq (caar args) *bq-quote*)
(consp (cadar args))
(not (bq-frob (cadar args)))
(null (cddar args)))
(bq-attach-conses (list (list *bq-quote*
(caadar args)))
result))
((eq (caar args) *bq-clobberable*)
(bq-attach-append *bq-nconc* (cadar args) result))
(t (bq-attach-append *bq-append*
(car args)
result)))))
((null args) result)))
(defun null-or-quoted (x)
(or (null x) (and (consp x) (eq (car x) *bq-quote*))))
(defun bq-attach-append (op item result)
(cond ((or (null result) (equal result *bq-quote-nil*))
(if (bq-splicing-frob item) (list op item) item))
((and (null-or-quoted item) (null-or-quoted result))
(list *bq-quote* (append (cadr item) (cadr result))))
((and (consp result) (eq (car result) op))
(list* (car result) item (cdr result)))
(t (list op item result))))
(defun bq-attach-conses (items result)
(cond
((and (every #'null-or-quoted items)
(null-or-quoted result))
(list *bq-quote*
(append (mapcar #'cadr items) (cadr result))))
((or (null result) (equal result *bq-quote-nil*))
(cons *bq-list* items))
((and (consp result)
(or (eq (car result) *bq-list*)
(eq (car result) *bq-list**)))
(cons (car result) (append items (cdr result))))
(t (cons *bq-list** (append items (list result))))))
(defun bq-remove-tokens (x)
(cond
((atom x) (cond
((eq x *bq-list*) 'list)
((eq x *bq-append*) 'append)
((eq x *bq-nconc*) 'nconc)
((eq x *bq-list**) 'list*)
((eq x *bq-quote*) 'quote)
(T x)))
((eq (car x) *bq-clobberable*) (bq-remove-tokens (cadr x)))
((and (eq (car x) *bq-list**) (consp (cddr x)) (null (cdddr x)))
(cons 'cons (maptree #'bq-remove-tokens (cdr x))))
(T (maptree #'bq-remove-tokens x))))
(defstruct (readtable (:copier nil)
(:predicate readtablep)
(:print-function
(lambda (readtable stream depth)
(declare (ignore readtable depth))
(write-string "#<readtable>" stream))))
fuer jedes Standard - Character ein Eintrag :
NIL (= ILLEGAL ) , WHITESPACE , CONSTITUENT , SINGLE - ESCAPE , MULTI - ESCAPE ,
(syntax (make-array char-code-limit :initial-element nil))
eine Association - List , die fuer jedes Dispatch - Character
(dispatch nil))
(defmacro get-fun (syntax) `(car ,syntax))
(defmacro terminating-p (syntax) `(null (cdr ,syntax)))
(defmacro get-syntax (c)
`(aref (readtable-syntax *readtable*) (char-code ,c)) )
(defun copy-readtable (&optional (from *readtable*) (to nil))
(when (null from) (setq from *standard-readtable*))
(when (null to) (setq to (make-readtable)))
(let ((syntax-from (readtable-syntax from))
(syntax-to (readtable-syntax to)))
(dotimes (i char-code-limit)
(setf (aref syntax-to i) (aref syntax-from i)))
(setf (readtable-dispatch to) nil)
(dolist (pair (readtable-dispatch from))
(when (car pair)
(copy-dispatch-macro-character pair from to))))
to)
(defun set-syntax-from-char (to-char from-char
&optional
(to-readtable *readtable*)
(from-readtable *standard-readtable*)
&aux pair)
(setf (aref (readtable-syntax to-readtable) (char-code to-char))
(aref (readtable-syntax from-readtable) (char-code from-char)))
evtl . , wenn dispatch - character ueberschrieben
(setq pair (assoc to-char (readtable-dispatch to-readtable)))
(when pair (setf (car pair) nil) (setf (cdr pair) nil))
wenn dispatch - character , Kopie der
Dispatch - Funktion erzeugen , die sich genauso wie die alte verhaelt .
(setq pair (assoc from-char (readtable-dispatch from-readtable)))
(when pair
(copy-dispatch-macro-character pair from-readtable to-readtable))
t)
(defun copy-dispatch-macro-character (pair from to)
(let ((c (car pair))
(dispatch-from (cdr pair))
dispatch-to)
(make-dispatch-macro-character c
(cdr (aref (readtable-syntax from)
(char-code c)))
to)
(setq dispatch-to (cdr (assoc c (readtable-dispatch to))))
(dotimes (i char-code-limit)
(setf (aref dispatch-to i) (aref dispatch-from i)))))
(defun set-macro-character (char function
&optional
non-terminating-p
(readtable *readtable*))
(setf (aref (readtable-syntax readtable) (char-code char))
(cons function non-terminating-p))
evtl . , wenn dispatch - character ueberschrieben
(let ((pair (assoc char (readtable-dispatch readtable))))
(when pair (setf (car pair) nil) (setf (cdr pair) nil)))
t)
(defun get-macro-character (char &optional (readtable *readtable*))
(let ((entry (aref (readtable-syntax readtable) (char-code char))))
(if entry
(values (car entry) (cdr entry))
nil)))
(defun make-dispatch-macro-character (char &optional
non-terminating-p
(readtable *readtable*))
(let ((dispatch-array (make-array char-code-limit :initial-element nil)))
(setf (aref (readtable-syntax readtable) (char-code char))
(cons
Dispatch - Reader
#'(lambda (stream char)
(let* ((i nil)
(c (read-char stream t nil t))
(d (digit-char-p c))
fun)
(when d
(setq i d)
(loop
(setq c (read-char stream t nil t))
(setq d (digit-char-p c))
(when (null d)
(when *read-suppress* (setq i nil))
(return))
(setq i (+ d (* 10 i)))))
(setq fun (aref dispatch-array (char-code (char-upcase c))))
(unless fun
(error "no ~S dispatch function defined for subchar ~S ~
(with arg ~S)" char c i))
(funcall fun stream c i)))
non-terminating-p))
(let ((pair (assoc char (readtable-dispatch readtable))))
(cond
(pair (setf (cdr pair) dispatch-array))
Eintrag neu
(T (push (cons char dispatch-array)
(readtable-dispatch readtable))))))
t)
(defun set-dispatch-macro-character (disp-char sub-char function
&optional
(readtable *readtable*))
(let ((dispatch-array
(cdr (assoc disp-char (readtable-dispatch readtable)))))
(unless dispatch-array
(error "~S is not a dispatch macro character" disp-char))
(setf (aref dispatch-array (char-code (char-upcase sub-char))) function))
t)
(defun get-dispatch-macro-character (disp-char sub-char
&optional
(readtable *readtable*))
(let ((dispatch-array
(cdr (assoc disp-char (readtable-dispatch readtable)))))
(unless dispatch-array
(error "~S is not a dispatch macro character" disp-char))
(aref dispatch-array (char-code (char-upcase sub-char)))))
(defun read-token (stream c)
(let ((multiple-escape nil)
syntax
(escape nil)
(colon nil)
colon-pos)
(loop
(setq syntax (get-syntax c))
(cond
((not multiple-escape)
(case syntax
(WHITESPACE
Changed by
(unread-char c stream)
(return))
(SINGLE-ESCAPE
(setq c (read-char stream nil nil))
(unless c
(error
"unexpected End of File after single escape"))
(setq escape t)
(vector-push-extend c *token*))
(MULTIPLE-ESCAPE (setq escape t multiple-escape t))
((nil) (error "illegal Character"))
(t
(when (and (consp syntax) (terminating-p syntax))
(unread-char c stream)
(return))
(when (eql #\: c)
(case colon
((nil) (setq colon 1)
(setq colon-pos (fill-pointer *token*)))
(1 (setq colon 2)
(unless (eql colon-pos
(1- (fill-pointer *token*)))
(setq colon 3)))
(t (setq colon 3))))
(vector-push-extend (char-upcase c) *token*))))
(T (case syntax
(SINGLE-ESCAPE
(setq c (read-char stream nil nil))
(unless c
(error "unexpected End of File after single escape"))
(vector-push-extend c *token*))
(MULTIPLE-ESCAPE (setq multiple-escape nil))
((NIL) (error "illegal character"))
(t (vector-push-extend c *token*)))))
(setq c (read-char stream nil nil))
(unless c
(when multiple-escape
(error "unexpected End of File after multiple escape"))
(when *read-suppress* (return-from read-token nil))
* token * als Zahl oder Symbol interpretieren
(let ((i 0)
(len (fill-pointer *token*))
(sign 1)
(num1 0) (num2 0.0)
(base *read-base*)
c)
(flet
(
liest eine
NIL , wenn 0 Ziffern gelesen wurden
INTEGER , die durch die Ziffernfolge repraesentiert
(read-digits (&aux x d)
(cond
((eql i len) nil)
(T (setq x (digit-char-p (aref *token* i) base))
(cond
((null x) nil)
(T (incf i)
(loop
(when (eql i len)
(return x))
(setq d (digit-char-p (aref *token* i)
base))
(when (null d)
(return x))
(incf i)
(setq x (+ (* base x) d))))))))
(read-sign ()
(if (eql i len)
1
(case (aref *token* i)
(#\- (incf i) -1)
(#\+ (incf i) 1)
(t 1))))
(read-float2 (d &aux x)
(loop
(when (eql i len)
(return))
(setq c (aref *token* i))
(setq x (digit-char-p c))
(when (null x) (return))
(incf i)
(setq num2 (+ num2 (/ x d)))
(setq d (* d 10.0)))
num2))
(tagbody
(when (or escape colon)
(go SYMBOL))
Wenn letztes Zeichen = # \. ,
(when (eql #\. (aref *token* (1- len)))
(setq base 10))
(setq sign (read-sign))
(setq num1 (read-digits))
(when (null num1) (go FLOAT-DOT))
(when (eql i len)
Integer
(return-from read-token (* sign num1)))
(setq c (aref *token* i)) (incf i)
(cond
((eql #\. c)
(when (eql i len) (return-from read-token (* sign num1)))
(unless (eql base 10)
(setq base 10)
(setq i 0)
(setq sign (read-sign))
(setq num1 (read-digits))
(incf i))
(setq num2 (read-float2 10.0))
(go FLOAT2))
((eql #\/ c) (go RATIO2))
(T (go FLOAT-EXPT)))
FLOAT-DOT
(setq num1 0)
(when (eql i len) (go SYMBOL))
(setq c (aref *token* i)) (incf i)
(unless (eql #\. c) (go SYMBOL))
(when (eql i len) (go SYMBOL))
(setq c (aref *token* i)) (incf i)
(setq num2 (digit-char-p c))
(when (null num2) (go SYMBOL))
(setq num2 (/ num2 10.0))
(setq num2 (read-float2 100.0))
FLOAT2
(when (eql i len)
(return-from read-token (* sign (+ num1 num2))))
(setq c (aref *token* i)) (incf i)
FLOAT-EXPT
(unless (member (char-downcase c) '(#\e #\s #\f #\d #\l))
(go SYMBOL))
(when (eql i len) (go SYMBOL))
(setq base 10)
(let ((e-sign (read-sign))
(expt (read-digits)))
(when (or (null expt) (< i len)) (go Symbol))
(return-from read-token (* sign
(+ num1 num2)
(expt 10 (* e-sign expt)))))
RATIO2
(setq num2 (read-digits))
(when (or (null num2) (< i len)) (go SYMBOL))
(let ((result (/ num1 num2)))
(warn "ratio ~a/~a has been read as ~s" num1 num2 result)
(return-from read-token result))
SYMBOL
(cond
kein Package angegeben
((null colon)
(unless escape
Pruefen ob der ' Dot ' einer Dotted - List vorliegt
(when (and (eql len 1)
(eql (aref *token* 0) #\.)
*dot-flag*)
(setq *dot-flag* nil)
(return-from read-token nil))
(do ((i 0 (1+ i)))
((>= i len) (error "illegal token ~S" *token*))
(unless (eql #\. (aref *token* i)) (return))))
(return-from read-token
(if *uninterned*
(make-symbol *token*)
(values (intern *token*)))))
(*uninterned* (error "token may not contain colons"))
KEYWORD
((eql colon-pos 0)
(unless (eql colon 1)
(error "illegal token ~S" *token*))
(return-from read-token
(values (intern (subseq *token* 1)
*keyword-package*))))
(T (let* ((package-name (subseq *token* 0 colon-pos))
(package (find-package package-name))
(symbol-name
(subseq *token* (+ colon-pos colon))))
(unless package
(error "illegal package-name ~S" package-name))
(case colon
(2 (return-from read-token (values (intern symbol-name
package))))
(1 (multiple-value-bind (symbol where)
(find-symbol symbol-name package)
(unless (eql :external where)
(error "can't find the external symbol ~S in ~S"
symbol-name package))
(return-from read-token symbol)))
(T (error "illegal Token ~S" *token*)))))))))))
liest ein token
(defun ignore-token (stream)
(let ((c (read-char stream t nil t)))
(read-token stream c)
nil))
die schliessende Klammer wird mittels ( read ) und nicht mit ( peek - char )
eingelesen , weil evtl . hinter dem letzten Element der Liste noch
(defun cons-reader (stream char)
(declare (ignore char))
(let ((*dot-flag* t)
(*parenthesis-open* #\))
list
last-cons
x)
(setq x (bq-read stream))
(when (null *parenthesis-open*)
(return-from cons-reader nil))
(when (null *dot-flag*)
(error "Nothing appears before the dot in a list"))
(setq list (setq last-cons (cons x nil)))
(loop
(setq x (bq-read stream))
(when (null *parenthesis-open*)
(return list))
(cond
kein Dot gelesen
(*dot-flag* (setq last-cons (setf (cdr last-cons) (cons x nil))))
Dot gelesen , Sonderbehandlung
(T (setf (cdr last-cons) (bq-read stream))
(when (null *parenthesis-open*)
(error "Nothing appears after the dot in a list"))
(bq-read stream)
(unless (null *parenthesis-open*)
(error "More than one object found after dot in a list"))
(return list))))))
(defun right-parenthesis-reader (stream char)
(declare (ignore stream))
(cond
((eql char *parenthesis-open*) (setq *parenthesis-open* nil) nil)
(T (warn "Ignoring an unmatched ~a" char) (values))))
(defun quote-reader (stream char)
(declare (ignore char))
(list 'QUOTE (bq-read stream)))
(defun semicolon-reader (stream char)
(declare (ignore char))
(read-line stream nil nil nil)
(values))
(defun string-reader (stream char &aux c)
(setf (fill-pointer *token*) 0)
(loop
(setq c (read-char stream t nil t))
(cond
((eql (get-syntax c) 'SINGLE-ESCAPE)
(setq c (read-char stream t nil t))
(vector-push-extend c *token*))
((eql char c)
(return (copy-seq *token*)))
(T (vector-push-extend c *token*)))))
(defun char-reader (stream char font)
(declare (ignore char))
(let ((c (read-char stream t nil t))
(c2 (peek-char nil stream nil nil t)))
( 353 ): In the single - character case ,
(when (and c2 (eql 'CONSTITUENT (get-syntax c2)))
(setf (fill-pointer *token*) 0)
(let ((*read-suppress* t))
(read-token stream c))
(unless *read-suppress*
(setq c (name-char *token*))
(when (null c) (error "illegal character name ~s" *token*))))
(cond
(*read-suppress* nil)
(t (when (and font (> font 0))
(warn "font ~s of ~s will be ignored" font c))
c))))
(defun function-reader (stream char i)
(when i (extra-argument char))
(list 'function (bq-read stream)))
(defun vector-reader (stream char len)
(let ((list (cons-reader stream char)))
(cond (*read-suppress* nil)
((zerop *bq-level*)
(if len
(let ((listlen (length list)))
(when (> listlen len)
(error "Vector longer than specified length: #~S~S"
len list))
(fill (replace (make-array len) list)
(car (last list)) :start listlen))
(apply #'vector list)))
(len
(error "#~s( syntax is not allowed in backquoted expressions" len))
(t (cons *bq-vector* list)))))
(defun binary-reader (stream char i)
(when i (extra-argument char))
(radix-reader stream char 2))
(defun octal-reader (stream char i)
(when i (extra-argument char))
(radix-reader stream char 8))
(defun hex-reader (stream char i)
(when i (extra-argument char))
(radix-reader stream char 16))
(defun radix-reader (stream char radix)
(cond (*read-suppress* (ignore-token stream))
((not radix)
(error "Radix missing in #R."))
((not (<= 2 radix 36))
(error "Illegal radix for #R: ~D." radix))
(t
(let ((res (let ((*read-base* radix))
(read stream t nil t))))
(error "#~A (base ~D) value is not a rational: ~S."
char radix res))
res))))
(defun uninterned-reader (stream char i)
(when i (extra-argument char))
(let* ((*uninterned* T)
(value (read stream t nil t)))
(cond
(*read-suppress* nil)
((symbolp value) value)
(T (error "illegal value (~S) followed #:" value)))))
(defun array-reader (stream char n)
(cond
(*read-suppress* (read stream t nil t) nil)
((null n) (error "Rank argument must be supplied to #~a" char))
(t (let ((list (read stream t nil t)))
(labels
((calc-dim (rank list)
(cond
((eql 0 rank) ())
(T (cons (length list) (calc-dim (1- rank) (car list)))))))
(make-array (calc-dim n list) :initial-contents list))))))
(defun struct-reader (stream char i)
(when i (extra-argument char))
(let ((list (read stream t nil t)))
(cond
(*read-suppress* nil)
((atom list) (error "illegal value (~s) followed #S" list))
(T (let ((constructor (rt:struct-constructor (car list)))
(key-value-list (cdr list)))
(unless constructor (error "~a is not a structure" (car list)))
(do ((result ()))
((endp key-value-list) (apply constructor (nreverse result)))
(push (intern (string (car key-value-list)) *keyword-package*)
result)
(pop key-value-list)
(when (endp key-value-list)
(error "unexpected end in #S~s" key-value-list))
(push (car key-value-list) result)
(pop key-value-list)))))))
(defun feature-plus-reader (stream char i)
(declare (ignore char))
(when i (extra-argument #\+))
(let ((feature
(let ((*package* *keyword-package*))
(read stream t nil t))))
(if (eval-feature feature)
(bq-read stream)
(let ((*read-suppress* t))
(bq-read stream)
(values)))))
(defun feature-minus-reader (stream char i)
(declare (ignore char))
(when i (extra-argument #\-))
(let ((feature
(let ((*package* *keyword-package*))
(read stream t nil t))))
(if (eval-feature feature)
(let ((*read-suppress* t))
(bq-read stream)
(values))
(bq-read stream))))
(defun eval-feature (feature)
(if (atom feature)
(member feature *features*)
(case (first feature)
(not (not (eval-feature (second feature))))
(and (dolist (feature (cdr feature))
(unless (eval-feature feature)
(return-from eval-feature nil)))
T)
(or (dolist (feature (cdr feature))
(when (eval-feature feature)
(return-from eval-feature t)))
nil)
(T (error "illegal feature expression ~s" feature)))))
(defun comment-block-reader (stream char i)
(declare (ignore char))
(when i (extra-argument #\|))
(let ((c (read-char stream t nil t))
(level 0))
(loop
(cond
((eql #\| c)
(setq c (read-char stream t nil t))
(when (eql #\# c)
(when (eql 0 level) (return (values)))
(decf level)
(setq c (read-char stream t nil t))))
((eql #\# c)
(setq c (read-char stream t nil t))
(when (eql #\| c)
(incf level)
(setq c (read-char stream t nil t))))
(T (setq c (read-char stream t nil t)))))))
(defun extra-argument (c)
(error "extra argument for #~S" c))
(defun bq-read (stream)
(internal-read stream t nil t))
(defun internal-read (stream eof-error-p eof-value recursive-p)
(prog1
(loop
(let ((c (read-char stream nil nil))
syntax)
(when (null c)
(when (or eof-error-p recursive-p)
(error "unexpected End of File"))
(return-from internal-read eof-value))
(setq syntax (get-syntax c))
(case syntax
((CONSTITUENT SINGLE-ESCAPE MULTIPLE-ESCAPE)
(return (progn (setf (fill-pointer *token*) 0)
(read-token stream c))))
((nil) (error "illegal Character"))
( Non- ) Terminating - Macro
(T (multiple-value-call
#'(lambda (&optional (v nil supplied))
(when supplied
(return v)))
(funcall (get-fun syntax) stream c))))))
(when (and (not recursive-p) (not *preserve-whitespace*))
(let ((c (read-char stream nil nil)))
(cond
EOF ( Annahme : stream )
((null c))
auf Top - Level
((eql (get-syntax c) 'WHITESPACE))
Syntaktisch relevante
(t (unread-char c stream)))))))
(defun read (&optional (stream *standard-input*)
(eof-error-p t)
(eof-value nil)
(recursive-p nil))
(case stream
((nil) (setq stream *standard-input*))
((t) (setq stream *terminal-io*)))
(let ((*bq-level* 0))
(if recursive-p
(internal-read stream eof-error-p eof-value recursive-p)
(let ((*preserve-whitespace* nil))
(internal-read stream eof-error-p eof-value recursive-p)))))
(defun read-preserving-whitespace (&optional (stream *standard-input*)
(eof-error-p t)
(eof-value nil)
(recursive-p nil))
(case stream
((nil) (setq stream *standard-input*))
((t) (setq stream *terminal-io*)))
(let ((*bq-level* 0))
(if recursive-p
(internal-read stream eof-error-p eof-value recursive-p)
(let ((*preserve-whitespace* t))
(internal-read stream eof-error-p eof-value recursive-p)))))
(defun read-delimited-list (char &optional stream recursive-p)
(case stream
((nil) (setq stream *standard-input*))
((T) (setq stream *terminal-io*)))
(labels
((read-rest ()
(if (eql char (peek-char t stream t nil t))
(progn (read-char stream) nil)
(cons (read stream t nil t) (read-rest)))))
(read-rest)))
2 Werte : 1 . string , 2 . ( member nil t ) , T = = eof
(defun read-line (&optional stream (eof-error-p t) eof-value recursive-p)
(case stream
((nil) (setq stream *standard-input*))
((T) (setq stream *terminal-io*)))
(let ((c (read-char stream nil nil)))
(cond
((null c)
(when eof-error-p (error "unexpected end of file"))
(values eof-value t))
(T (setf (fill-pointer *token*) 0)
(loop
(when (eql #\Newline c) (return (values (copy-seq *token*) nil)))
(vector-push-extend c *token*)
(setq c (read-char stream nil nil))
(when (null c) (return (values (copy-seq *token*) t))))))))
(defun peek-char (&optional peek-type stream (eof-error-p t)
eof-value recursive-p)
(case stream
((nil) (setq stream *standard-input*))
((T) (setq stream *terminal-io*)))
(let ((c (read-char stream nil nil recursive-p)))
(cond
((eq t peek-type)
(loop
(when (null c) (return))
(unless (eql 'WHITESPACE (get-syntax c))
(return))
(setq c (read-char stream nil nil recursive-p))))
((characterp peek-type)
(loop
(when (null c) (return))
(unless (eql peek-type c)
(return))
(setq c (read-char stream nil nil recursive-p)))))
(when (null c)
(when eof-error-p (error "unexpected end of file"))
eof-value)
(prog1 c
(unread-char c stream))))
(defun read-from-string (string &optional (eof-error-p t) eof-value
&key (start 0) (end (length string))
((:preserve-whitespace *preserve-whitespace*))
&aux index (*bq-level* 0))
(with-input-from-string (stream string :index index :start start :end end)
(values (internal-read stream eof-error-p eof-value nil) index)))
(defun parse-integer (string &key (start 0) (end (length string))
(radix 10) (junk-allowed nil))
(prog (c x d sign)
pruefen ob fuehrende WHITESPACE
(loop
(when (>= start end) (go NO-INTEGER))
(setq c (char string start))
(unless (eq 'WHITESPACE (get-syntax c))
(return)))
(setq sign (case c
(#\- (incf start) -1)
(#\+ (incf start) 1)
(t 1)))
mindestens 1 Ziffer
(when (>= start end) (go NO-INTEGER))
(setq x (digit-char-p (char start start) radix))
(cond
((null x) (go NO-INTEGER))
(T (incf start)
(loop
(when (eql start end)
(return))
(setq d (digit-char-p (char string start) radix))
(when (null d)
(return))
(incf start)
(setq x (+ (* radix x) d)))))
(cond
(junk-allowed (return (values x start)))
pruefen , ob nur WHITESPACE folgt
(T (loop
(when (>= start end) (return))
(setq c (char string start))
(unless (eq 'WHITESPACE (get-syntax c)) (go ERROR)))
(return (values x start))))
NO-INTEGER
(when junk-allowed
(return (values nil start)))
ERROR
(error "illegal integer ~S" string)))
(defun make-standard-readtable ()
(let* ((rtab (make-readtable))
(syntax (readtable-syntax rtab)))
(dolist (whitespace-char '(#\tab #\newline #\page #\return #\space))
(setf (aref syntax (char-code whitespace-char)) 'WHITESPACE))
(setf (aref syntax (char-code #\backspace)) 'CONSTITUENT)
(do ((i (char-code #\!) (1+ i)))
((>= i (char-code #\rubout)))
(setf (aref syntax i) 'CONSTITUENT))
(setf (aref syntax (char-code #\\)) 'SINGLE-ESCAPE)
(setf (aref syntax (char-code #\|)) 'MULTIPLE-ESCAPE)
(make-dispatch-macro-character #\# T rtab)
(set-dispatch-macro-character #\# #\\ #'char-reader rtab)
(set-dispatch-macro-character #\# #\' #'function-reader rtab)
(set-dispatch-macro-character #\# #\( #'vector-reader rtab)
(set-dispatch-macro-character #\# #\: #'uninterned-reader rtab)
(set-dispatch-macro-character #\# #\B #'binary-reader rtab)
(set-dispatch-macro-character #\# #\O #'octal-reader rtab)
(set-dispatch-macro-character #\# #\X #'hex-reader rtab)
(set-dispatch-macro-character #\# #\R #'radix-reader rtab)
(set-dispatch-macro-character #\# #\A #'array-reader rtab)
(set-dispatch-macro-character #\# #\S #'struct-reader rtab)
(set-dispatch-macro-character #\# #\+ #'feature-plus-reader rtab)
(set-dispatch-macro-character #\# #\- #'feature-minus-reader rtab)
(set-dispatch-macro-character #\# #\| #'comment-block-reader rtab)
(set-macro-character #\' #'quote-reader NIL rtab)
(set-macro-character #\( #'cons-reader NIL rtab)
(set-macro-character #\) #'right-parenthesis-reader NIL rtab)
(set-macro-character #\, #'comma-reader NIL rtab)
# ' semicolon - reader NIL rtab )
(set-macro-character #\" #'string-reader NIL rtab)
(set-macro-character #\` #'backquote-reader NIL rtab)
rtab))
|
3b7373c471208622a7f376793e32630fe3e79122e56617925f7ce22a3f5cb293 | dalong0514/ITstudy | 0101Utils-DumpObjectV1-2.lisp | Dump Object -
Lists the ActiveX properties & methods of a supplied VLA - Object
or VLA - Object equivalent of a supplied ename , handle , or DXF data list
;; obj - [vla/ent/lst/str] VLA-Object, Entity Name, DXF List, or Handle
(defun c:dump nil (LM:dump (car (entsel))))
(defun c:dumpn nil (LM:dump (car (nentsel))))
(defun LM:dump ( arg )
(cond
( (= 'vla-object (type arg))
(vlax-dump-object arg t)
)
( (= 'ename (type arg))
(LM:dump (vlax-ename->vla-object arg))
)
( (= 'list (type arg))
(LM:dump (cdr (assoc -1 arg)))
)
( (= 'str (type arg))
(LM:dump (handent arg))
)
)
(princ)
)
(vl-load-com) (princ) | null | https://raw.githubusercontent.com/dalong0514/ITstudy/8a7f1708d11856a78016795268da67b6a7521115/004%E7%BC%96%E7%A8%8B%E8%AF%AD%E8%A8%80/07AutoLisp/04LeeMac-Library/0101Utils-DumpObjectV1-2.lisp | lisp | obj - [vla/ent/lst/str] VLA-Object, Entity Name, DXF List, or Handle
| Dump Object -
Lists the ActiveX properties & methods of a supplied VLA - Object
or VLA - Object equivalent of a supplied ename , handle , or DXF data list
(defun c:dump nil (LM:dump (car (entsel))))
(defun c:dumpn nil (LM:dump (car (nentsel))))
(defun LM:dump ( arg )
(cond
( (= 'vla-object (type arg))
(vlax-dump-object arg t)
)
( (= 'ename (type arg))
(LM:dump (vlax-ename->vla-object arg))
)
( (= 'list (type arg))
(LM:dump (cdr (assoc -1 arg)))
)
( (= 'str (type arg))
(LM:dump (handent arg))
)
)
(princ)
)
(vl-load-com) (princ) |
f861ebde6a4576846e6553f31480aa0eb2e161427b412c331049ed9f2a2e0c47 | jakemcc/sicp-study | ex4_13.clj | Exercise 4.13
Functionality has been added to environment.clj , environment_test.clj ,
; section4.clj and section4_test.clj
; I decided to only have the variable become unbound in the current
scope ( whatever the first frame of the environment is ) . This seems to make
; the most sense to me. Being able to traverse all the frames to remove
; a binding seems like the wrong thing. Especially when you consider that
; a variable could be bound at different frames. | null | https://raw.githubusercontent.com/jakemcc/sicp-study/3b9e3d6c8cc30ad92b0d9bbcbbbfe36a8413f89d/clojure/section4.1/src/ex4_13.clj | clojure | section4.clj and section4_test.clj
I decided to only have the variable become unbound in the current
the most sense to me. Being able to traverse all the frames to remove
a binding seems like the wrong thing. Especially when you consider that
a variable could be bound at different frames. | Exercise 4.13
Functionality has been added to environment.clj , environment_test.clj ,
scope ( whatever the first frame of the environment is ) . This seems to make |
8dfed27ed46bfdec984c175d23b2d079a605ff84be05e313610eba810497fd8b | lilactown/dom | dom.clj | (ns town.lilac.dom
"Macros for creating DOM expressions. See `$` for usage.
Additional macros like `div`, `input`, `button` allow quick & easy creation of
specific tags.
The code emitted by `$` and friends is side effecting. You do not need to keep
the value returned by `$` or any of the specific DOM macros.
\"incremental-dom\" keeps track of the elements created and diffs the result
against the DOM nodes on the page during `patch`.
Calling `$` and friends outside of a `patch` call is a runtime error."
(:refer-clojure :exclude [map meta time]))
(def void-tags
#{"area"
"base"
"br"
"col"
"embed"
"hr"
"img"
"input"
"link"
"meta"
"param"
"source"
"track"
"wbr"})
(defmacro $
"Core macro for creating DOM expressions. Emits code that uses Google's
\"incremental-dom\" library to create, diff and patch the DOM nodes on the
page.
`tag` (string) is the HTML tag you want to open. Optionally, a map of
attributes may be passed in the second position to configure the resulting DOM
node.
For non-void tags, any other type and/or any additional arguments are emitted
between the open and close tag calls. E.g. ($ \"div\" ($ \"input\")) will
place the input inside of the div.
Void tags (i.e. tags that do not close, for instance \"input\") do not emit
any of its args."
[tag & args]
(let [[attrs children] (if (map? (first args))
[(first args) (rest args)]
[nil args])
[key attrs] (if-let [key (:key attrs)]
[key (dissoc attrs :key)]
[nil attrs])
attrs (cond
(contains? attrs :&) `(merge ~(dissoc attrs :&)
~(:& attrs))
(contains? attrs '&) `(merge ~(dissoc attrs '&)
~('& attrs))
:else attrs)]
(if (contains? void-tags tag)
`(void ~tag ~key ~attrs)
`(do
(open ~tag ~key ~attrs)
~@children
(close ~tag)))))
(declare
input textarea option select a abbr address area article aside audio b base bdi
bdo big blockquote body br button canvas caption cite code col colgroup data datalist
dd del details dfn dialog div dl dt em embed fieldset figcaption figure footer form
h1 h2 h3 h4 h5 h6 head header hr html i iframe img ins kbd keygen label legend li link
main map mark menu menuitem meta meter nav noscript object ol optgroup output p param
picture pre progress q rp rt ruby s samp script section small source span strong style
sub summary sup table tbody td tfoot th thead time title tr track u ul var video wbr
circle clipPath ellipse g line mask path pattern polyline rect svg defs
linearGradient polygon radialGradient stop tspan)
(def tags
'[input textarea option select a abbr address area article aside audio
b base bdi bdo big blockquote body br button canvas caption cite code col
colgroup data datalist dd del details dfn dialog div dl dt em embed fieldset
figcaption figure footer form h1 h2 h3 h4 h5 h6 head header hr html i iframe
img ins kbd keygen label legend li link main map mark menu menuitem meta
meter nav noscript object ol optgroup output p param picture pre progress q
rp rt ruby s samp script section small source span strong style sub summary
sup table tbody td tfoot th thead time title tr track u ul var video wbr])
(defn gen-tag
[tag]
`(defmacro ~tag [& args#]
`($ ~(str '~tag) ~@args#)))
(defmacro gen-tags
[]
`(do
~@(for [tag tags]
(gen-tag tag))))
(gen-tags)
(defmacro buffer
[& body]
`(binding [*buffer* (cljs.core/array)]
~@body
(flush!)))
(defmacro try
[& body]
(let [catch (last body)
body (drop-last body)]
`(try
(buffer ~@body)
~catch)))
;; async runtime
(defmacro async
[& body]
(let [fallback (last body)
body (drop-last body)
fn-sym (gensym "async-fn")]
(when (not= 'fallback (first fallback))
(throw
(ex-info "async expr requires (fallback ,,,) as last expression in body"
{:body body})))
`((fn ~fn-sym []
(let [buffer# (cljs.core/array)
parent# (get-current-element)]
(try
(buffer ~@body)
(catch js/Promise e#
(let [fallback-id# (gensym "fallback")
TODO assert that fallback is a single element
cmt1# (html-comment (str fallback-id#))
el# (do ~@(rest fallback))
cmt2# (html-comment (str "/" fallback-id#))]
(.then e# (fn [result#]
(patch-range cmt1# cmt2# ~fn-sym)))))))))))
| null | https://raw.githubusercontent.com/lilactown/dom/eed9f4db8777bc9f32a300523404654d5496cc16/src/town/lilac/dom.clj | clojure | async runtime | (ns town.lilac.dom
"Macros for creating DOM expressions. See `$` for usage.
Additional macros like `div`, `input`, `button` allow quick & easy creation of
specific tags.
The code emitted by `$` and friends is side effecting. You do not need to keep
the value returned by `$` or any of the specific DOM macros.
\"incremental-dom\" keeps track of the elements created and diffs the result
against the DOM nodes on the page during `patch`.
Calling `$` and friends outside of a `patch` call is a runtime error."
(:refer-clojure :exclude [map meta time]))
(def void-tags
#{"area"
"base"
"br"
"col"
"embed"
"hr"
"img"
"input"
"link"
"meta"
"param"
"source"
"track"
"wbr"})
(defmacro $
"Core macro for creating DOM expressions. Emits code that uses Google's
\"incremental-dom\" library to create, diff and patch the DOM nodes on the
page.
`tag` (string) is the HTML tag you want to open. Optionally, a map of
attributes may be passed in the second position to configure the resulting DOM
node.
For non-void tags, any other type and/or any additional arguments are emitted
between the open and close tag calls. E.g. ($ \"div\" ($ \"input\")) will
place the input inside of the div.
Void tags (i.e. tags that do not close, for instance \"input\") do not emit
any of its args."
[tag & args]
(let [[attrs children] (if (map? (first args))
[(first args) (rest args)]
[nil args])
[key attrs] (if-let [key (:key attrs)]
[key (dissoc attrs :key)]
[nil attrs])
attrs (cond
(contains? attrs :&) `(merge ~(dissoc attrs :&)
~(:& attrs))
(contains? attrs '&) `(merge ~(dissoc attrs '&)
~('& attrs))
:else attrs)]
(if (contains? void-tags tag)
`(void ~tag ~key ~attrs)
`(do
(open ~tag ~key ~attrs)
~@children
(close ~tag)))))
(declare
input textarea option select a abbr address area article aside audio b base bdi
bdo big blockquote body br button canvas caption cite code col colgroup data datalist
dd del details dfn dialog div dl dt em embed fieldset figcaption figure footer form
h1 h2 h3 h4 h5 h6 head header hr html i iframe img ins kbd keygen label legend li link
main map mark menu menuitem meta meter nav noscript object ol optgroup output p param
picture pre progress q rp rt ruby s samp script section small source span strong style
sub summary sup table tbody td tfoot th thead time title tr track u ul var video wbr
circle clipPath ellipse g line mask path pattern polyline rect svg defs
linearGradient polygon radialGradient stop tspan)
(def tags
'[input textarea option select a abbr address area article aside audio
b base bdi bdo big blockquote body br button canvas caption cite code col
colgroup data datalist dd del details dfn dialog div dl dt em embed fieldset
figcaption figure footer form h1 h2 h3 h4 h5 h6 head header hr html i iframe
img ins kbd keygen label legend li link main map mark menu menuitem meta
meter nav noscript object ol optgroup output p param picture pre progress q
rp rt ruby s samp script section small source span strong style sub summary
sup table tbody td tfoot th thead time title tr track u ul var video wbr])
(defn gen-tag
[tag]
`(defmacro ~tag [& args#]
`($ ~(str '~tag) ~@args#)))
(defmacro gen-tags
[]
`(do
~@(for [tag tags]
(gen-tag tag))))
(gen-tags)
(defmacro buffer
[& body]
`(binding [*buffer* (cljs.core/array)]
~@body
(flush!)))
(defmacro try
[& body]
(let [catch (last body)
body (drop-last body)]
`(try
(buffer ~@body)
~catch)))
(defmacro async
[& body]
(let [fallback (last body)
body (drop-last body)
fn-sym (gensym "async-fn")]
(when (not= 'fallback (first fallback))
(throw
(ex-info "async expr requires (fallback ,,,) as last expression in body"
{:body body})))
`((fn ~fn-sym []
(let [buffer# (cljs.core/array)
parent# (get-current-element)]
(try
(buffer ~@body)
(catch js/Promise e#
(let [fallback-id# (gensym "fallback")
TODO assert that fallback is a single element
cmt1# (html-comment (str fallback-id#))
el# (do ~@(rest fallback))
cmt2# (html-comment (str "/" fallback-id#))]
(.then e# (fn [result#]
(patch-range cmt1# cmt2# ~fn-sym)))))))))))
|
50b1c9625b3183a5c5d4f730f515ed27ece5f79f3fc099ddaa5a35efe5cbdb10 | acieroid/scala-am | car-counter.scm | (define foldr
(lambda (f base lst)
(define foldr-aux
(lambda (lst)
(if (null? lst)
base
(f (car lst) (foldr-aux (cdr lst))))))
(foldr-aux lst)))
(define result '())
(define display2 (lambda (i) (set! result (cons i result))))
(define newline2 (lambda () (set! result (cons 'newline result))))
(define error2 (lambda (e) (set! result (cons (list 'error e) result))))
(define (maak-buffer)
(let ((inhoud '()))
(define (newValue value)
(set! inhoud (append inhoud (list value))))
(define (returnSum)
(foldr + 0 inhoud))
(define (flush)
(set! inhoud '()))
(define (value pos)
(list-ref inhoud pos))
(define (dispatch msg)
(cond ((eq? msg 'newValue) newValue)
((eq? msg 'return) inhoud)
((eq? msg 'returnSum) (returnSum))
((eq? msg 'flush) (flush))
((eq? msg 'value) value)
((eq? msg 'size) (length inhoud))
(else (error "wrong message"))))
dispatch))
(define buffer (maak-buffer))
((buffer 'newValue) 3)
((buffer 'newValue) 9)
(define res1 (and (= (buffer 'returnSum) 12)
(equal? (buffer 'return) '(3 9))
(begin (buffer 'flush))
(null? (buffer 'return))))
(define (make-counter)
(let ((state 0))
(define (increment) (set! state (+ state 1)))
(define (read) state)
(define (reset) (set! state 0))
(define (dispatch msg)
(cond ((eq? msg 'increment) (increment))
((eq? msg 'read) (read))
((eq? msg 'reset) (reset))
(else (error "wrong message"))))
dispatch))
(define (maak-verkeersteller)
(let ((voorbijgereden (make-counter))
(buffer (maak-buffer)))
(define (newCar)
(voorbijgereden 'increment))
(define (newHour)
((buffer 'newValue) (voorbijgereden 'read))
(voorbijgereden 'reset))
(define (newDay)
(define (loop start end)
(cond ((= start end) (newline))
(else (display2 "Tussen ") (display2 start)
(display2 " en ") (display2 (+ start 1))
(display2 " uur : ")
(display2 ((buffer 'value) start))
(display2 " auto's")
(newline2)
(loop (+ start 1) end))))
(if (= (buffer 'size) 24)
(begin (loop 0 24)
(buffer 'flush)
(voorbijgereden 'reset))
(error2 "no 24 hours have passed")))
(define (dispatch msg)
(cond ((eq? msg 'newCar) (newCar))
((eq? msg 'newHour) (newHour))
((eq? msg 'newDay) (newDay))
(else (error2 "wrong message"))))
dispatch))
(define verkeersteller (maak-verkeersteller))
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newDay)
(verkeersteller 'newHour)
(verkeersteller 'newDay)
(equal? result '(newline
newline
" auto's"
1
" uur : "
24
" en "
23
"Tussen "
newline
" auto's"
1
" uur : "
23
" en "
22
"Tussen "
newline
" auto's"
2
" uur : "
22
" en "
21
"Tussen "
newline
" auto's"
0
" uur : "
21
" en "
20
"Tussen "
newline
" auto's"
1
" uur : "
20
" en "
19
"Tussen "
newline
" auto's"
0
" uur : "
19
" en "
18
"Tussen "
newline
" auto's"
1
" uur : "
18
" en "
17
"Tussen "
newline
" auto's"
0
" uur : "
17
" en "
16
"Tussen "
newline
" auto's"
1
" uur : "
16
" en "
15
"Tussen "
newline
" auto's"
1
" uur : "
15
" en "
14
"Tussen "
newline
" auto's"
0
" uur : "
14
" en "
13
"Tussen "
newline
" auto's"
0
" uur : "
13
" en "
12
"Tussen "
newline
" auto's"
0
" uur : "
12
" en "
11
"Tussen "
newline
" auto's"
1
" uur : "
11
" en "
10
"Tussen "
newline
" auto's"
2
" uur : "
10
" en "
9
"Tussen "
newline
" auto's"
2
" uur : "
9
" en "
8
"Tussen "
newline
" auto's"
0
" uur : "
8
" en "
7
"Tussen "
newline
" auto's"
0
" uur : "
7
" en "
6
"Tussen "
newline
" auto's"
1
" uur : "
6
" en "
5
"Tussen "
newline
" auto's"
0
" uur : "
5
" en "
4
"Tussen "
newline
" auto's"
0
" uur : "
4
" en "
3
"Tussen "
newline
" auto's"
3
" uur : "
3
" en "
2
"Tussen "
newline
" auto's"
0
" uur : "
2
" en "
1
"Tussen "
newline
" auto's"
2
" uur : "
1
" en "
0
"Tussen "
(error2 "no 24 hours have passed")))
| null | https://raw.githubusercontent.com/acieroid/scala-am/13ef3befbfc664b77f31f56847c30d60f4ee7dfe/test/R5RS/scp1/car-counter.scm | scheme | (define foldr
(lambda (f base lst)
(define foldr-aux
(lambda (lst)
(if (null? lst)
base
(f (car lst) (foldr-aux (cdr lst))))))
(foldr-aux lst)))
(define result '())
(define display2 (lambda (i) (set! result (cons i result))))
(define newline2 (lambda () (set! result (cons 'newline result))))
(define error2 (lambda (e) (set! result (cons (list 'error e) result))))
(define (maak-buffer)
(let ((inhoud '()))
(define (newValue value)
(set! inhoud (append inhoud (list value))))
(define (returnSum)
(foldr + 0 inhoud))
(define (flush)
(set! inhoud '()))
(define (value pos)
(list-ref inhoud pos))
(define (dispatch msg)
(cond ((eq? msg 'newValue) newValue)
((eq? msg 'return) inhoud)
((eq? msg 'returnSum) (returnSum))
((eq? msg 'flush) (flush))
((eq? msg 'value) value)
((eq? msg 'size) (length inhoud))
(else (error "wrong message"))))
dispatch))
(define buffer (maak-buffer))
((buffer 'newValue) 3)
((buffer 'newValue) 9)
(define res1 (and (= (buffer 'returnSum) 12)
(equal? (buffer 'return) '(3 9))
(begin (buffer 'flush))
(null? (buffer 'return))))
(define (make-counter)
(let ((state 0))
(define (increment) (set! state (+ state 1)))
(define (read) state)
(define (reset) (set! state 0))
(define (dispatch msg)
(cond ((eq? msg 'increment) (increment))
((eq? msg 'read) (read))
((eq? msg 'reset) (reset))
(else (error "wrong message"))))
dispatch))
(define (maak-verkeersteller)
(let ((voorbijgereden (make-counter))
(buffer (maak-buffer)))
(define (newCar)
(voorbijgereden 'increment))
(define (newHour)
((buffer 'newValue) (voorbijgereden 'read))
(voorbijgereden 'reset))
(define (newDay)
(define (loop start end)
(cond ((= start end) (newline))
(else (display2 "Tussen ") (display2 start)
(display2 " en ") (display2 (+ start 1))
(display2 " uur : ")
(display2 ((buffer 'value) start))
(display2 " auto's")
(newline2)
(loop (+ start 1) end))))
(if (= (buffer 'size) 24)
(begin (loop 0 24)
(buffer 'flush)
(voorbijgereden 'reset))
(error2 "no 24 hours have passed")))
(define (dispatch msg)
(cond ((eq? msg 'newCar) (newCar))
((eq? msg 'newHour) (newHour))
((eq? msg 'newDay) (newDay))
(else (error2 "wrong message"))))
dispatch))
(define verkeersteller (maak-verkeersteller))
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newHour)
(verkeersteller 'newCar)
(verkeersteller 'newDay)
(verkeersteller 'newHour)
(verkeersteller 'newDay)
(equal? result '(newline
newline
" auto's"
1
" uur : "
24
" en "
23
"Tussen "
newline
" auto's"
1
" uur : "
23
" en "
22
"Tussen "
newline
" auto's"
2
" uur : "
22
" en "
21
"Tussen "
newline
" auto's"
0
" uur : "
21
" en "
20
"Tussen "
newline
" auto's"
1
" uur : "
20
" en "
19
"Tussen "
newline
" auto's"
0
" uur : "
19
" en "
18
"Tussen "
newline
" auto's"
1
" uur : "
18
" en "
17
"Tussen "
newline
" auto's"
0
" uur : "
17
" en "
16
"Tussen "
newline
" auto's"
1
" uur : "
16
" en "
15
"Tussen "
newline
" auto's"
1
" uur : "
15
" en "
14
"Tussen "
newline
" auto's"
0
" uur : "
14
" en "
13
"Tussen "
newline
" auto's"
0
" uur : "
13
" en "
12
"Tussen "
newline
" auto's"
0
" uur : "
12
" en "
11
"Tussen "
newline
" auto's"
1
" uur : "
11
" en "
10
"Tussen "
newline
" auto's"
2
" uur : "
10
" en "
9
"Tussen "
newline
" auto's"
2
" uur : "
9
" en "
8
"Tussen "
newline
" auto's"
0
" uur : "
8
" en "
7
"Tussen "
newline
" auto's"
0
" uur : "
7
" en "
6
"Tussen "
newline
" auto's"
1
" uur : "
6
" en "
5
"Tussen "
newline
" auto's"
0
" uur : "
5
" en "
4
"Tussen "
newline
" auto's"
0
" uur : "
4
" en "
3
"Tussen "
newline
" auto's"
3
" uur : "
3
" en "
2
"Tussen "
newline
" auto's"
0
" uur : "
2
" en "
1
"Tussen "
newline
" auto's"
2
" uur : "
1
" en "
0
"Tussen "
(error2 "no 24 hours have passed")))
| |
576bd9c3f6be6baeb4ec19dd38f75c37987c7c778c19f13832e46848edd790f3 | Eventuria/demonstration-gsd | Streaming.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TypeFamilies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE NamedFieldPuns #
module Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.Streaming (
streamFromRangeInclusive,
streamFromOffsetInclusive,
streamAll,
streamAllInfinitely) where
import GHC.Natural
import Control.Monad.IO.Class (MonadIO(liftIO))
import Control.Concurrent.Async (waitCatch)
import Control.Exception
import Data.Maybe
import Data.Aeson
import Streamly
import qualified Streamly.Prelude as S
import qualified Database.EventStore as EventStore
import Eventuria.Commons.Logger.Core
import qualified Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.Subscribing as EventStore.Subscribing
import Eventuria.Libraries.PersistedStreamEngine.Interface.PersistedItem
import Eventuria.Libraries.PersistedStreamEngine.Interface.Offset
import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.EventStoreStream
import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Client.Dependencies
import Eventuria.Libraries.PersistedStreamEngine.Interface.Streamable
streamFromRangeInclusive :: Streamable stream monad item =>
EventStoreStream item ->
Offset ->
Offset ->
stream monad (Either SomeException (Persisted item))
streamFromRangeInclusive eventStoreStream @ EventStoreStream {
clientDependencies = Dependencies { logger, credentials, connection },
streamName = streamName }
fromOffset
toOffset = do
liftIO $ logInfo logger $ "streaming [" ++ (show fromOffset) ++ "..] > " ++ show streamName
let batchSize = 100
commandFetched <- liftIO $ catch
(EventStore.readEventsForward
connection
streamName
(EventStore.eventNumber $ naturalFromInteger fromOffset)
(fromInteger batchSize)
EventStore.NoResolveLink
(Just credentials) >>= waitCatch )
(\e @ SomeException {} -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception raised " ++ show e
return $ Left e)
case commandFetched of
Right (EventStore.ReadSuccess slices) -> do
case (filterBelowInclusive
(getPersistedItemsFromSlices slices)
toOffset) of
persistedItems | (length persistedItems) == fromInteger batchSize ->
(Right <$> S.fromList persistedItems) <> (streamFromOffsetInclusive eventStoreStream $ fromOffset + batchSize)
persistedItems -> Right <$> S.fromList persistedItems
Right (EventStore.ReadNoStream) -> do
liftIO $ logInfo logger $ "> " ++ show streamName ++ " is not found."
S.fromList []
Right (EventStore.ReadStreamDeleted e) -> return $ Left readStreamDeletedException
Right (EventStore.ReadNotModified )-> return $ Left readNotModifiedException
Right (EventStore.ReadError e) -> return $ Left readErrorException
Right (EventStore.ReadAccessDenied e) -> return $ Left readAccessDeniedException
Left (exception) -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception propagated " ++ show exception
return $ Left exception
where
filterBelowInclusive :: [Persisted item] -> Offset -> [Persisted item]
filterBelowInclusive items toOffset = filter (\PersistedItem {offset} -> offset <= toOffset ) items
getPersistedItemsFromSlices :: FromJSON item => EventStore.Slice t -> [Persisted item]
getPersistedItemsFromSlices slices = recordedEventToPersistedItem
<$> EventStore.resolvedEventOriginal
<$> EventStore.sliceEvents slices
streamFromOffsetInclusive :: Streamable stream monad item =>
EventStoreStream item ->
Offset ->
stream monad (Either SomeException (Persisted item))
streamFromOffsetInclusive eventStoreStream @ EventStoreStream {
clientDependencies = Dependencies { logger, credentials, connection },
streamName = streamName } fromOffset = do
liftIO $ logInfo logger $ "streaming [" ++ (show fromOffset) ++ "..] > " ++ show streamName
let batchSize = 100
commandFetched <- liftIO $ catch
(EventStore.readEventsForward
connection
streamName
(EventStore.eventNumber $ naturalFromInteger fromOffset)
(fromInteger batchSize)
EventStore.NoResolveLink
(Just credentials) >>= waitCatch )
(\e @ SomeException {} -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception raised " ++ show e
return $ Left e)
case commandFetched of
Right (EventStore.ReadSuccess slices) -> do
case (getPersistedItemsFromSlices slices) of
persistedItems | (length persistedItems) == fromInteger batchSize ->
(Right <$> S.fromList persistedItems) <> (streamFromOffsetInclusive eventStoreStream $ fromOffset + batchSize)
persistedItems -> Right <$> S.fromList persistedItems
Right (EventStore.ReadNoStream) -> do
liftIO $ logInfo logger $ "> " ++ show streamName ++ " is not found."
S.fromList []
Right (EventStore.ReadStreamDeleted e) -> return $ Left readStreamDeletedException
Right (EventStore.ReadNotModified )-> return $ Left readNotModifiedException
Right (EventStore.ReadError e) -> return $ Left readErrorException
Right (EventStore.ReadAccessDenied e) -> return $ Left readAccessDeniedException
Left (exception) -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception propagated " ++ show exception
return $ Left exception
where
getPersistedItemsFromSlices :: FromJSON item => EventStore.Slice t -> [Persisted item]
getPersistedItemsFromSlices slices = recordedEventToPersistedItem
<$> EventStore.resolvedEventOriginal
<$> EventStore.sliceEvents slices
streamAll :: Streamable stream monad item =>
EventStoreStream item ->
stream monad (Either SomeException (Persisted item))
streamAll eventStoreStream = streamFromOffsetInclusive eventStoreStream 0
streamAllInfinitely :: Streamable stream monad item =>
EventStoreStream item ->
stream monad (Either SomeException (Persisted item))
streamAllInfinitely eventStoreStream =
(EventStore.Subscribing.subscribe eventStoreStream)
`parallel` (streamAll eventStoreStream)
recordedEventToPersistedItem :: FromJSON item => EventStore.RecordedEvent -> Persisted item
recordedEventToPersistedItem recordedEvent =
PersistedItem { offset = toInteger $ EventStore.recordedEventNumber recordedEvent,
item = fromJust $ EventStore.recordedEventDataAsJson recordedEvent }
data EvenStoreExceptionReason = ReadStreamDeleted
| ReadNotModified
| ReadError
| ReadAccessDenied deriving Show
instance Exception EvenStoreExceptionReason
readStreamDeletedException,readNotModifiedException,readErrorException,readAccessDeniedException :: SomeException
readStreamDeletedException = toException ReadStreamDeleted
readNotModifiedException = toException ReadNotModified
readErrorException = toException ReadError
readAccessDeniedException = toException ReadAccessDenied | null | https://raw.githubusercontent.com/Eventuria/demonstration-gsd/5c7692b310086bc172d3fd4e1eaf09ae51ea468f/src/Eventuria/Libraries/PersistedStreamEngine/Instances/EventStore/Read/Streaming.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE TypeFamilies #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE NamedFieldPuns #
module Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.Streaming (
streamFromRangeInclusive,
streamFromOffsetInclusive,
streamAll,
streamAllInfinitely) where
import GHC.Natural
import Control.Monad.IO.Class (MonadIO(liftIO))
import Control.Concurrent.Async (waitCatch)
import Control.Exception
import Data.Maybe
import Data.Aeson
import Streamly
import qualified Streamly.Prelude as S
import qualified Database.EventStore as EventStore
import Eventuria.Commons.Logger.Core
import qualified Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Read.Subscribing as EventStore.Subscribing
import Eventuria.Libraries.PersistedStreamEngine.Interface.PersistedItem
import Eventuria.Libraries.PersistedStreamEngine.Interface.Offset
import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.EventStoreStream
import Eventuria.Libraries.PersistedStreamEngine.Instances.EventStore.Client.Dependencies
import Eventuria.Libraries.PersistedStreamEngine.Interface.Streamable
streamFromRangeInclusive :: Streamable stream monad item =>
EventStoreStream item ->
Offset ->
Offset ->
stream monad (Either SomeException (Persisted item))
streamFromRangeInclusive eventStoreStream @ EventStoreStream {
clientDependencies = Dependencies { logger, credentials, connection },
streamName = streamName }
fromOffset
toOffset = do
liftIO $ logInfo logger $ "streaming [" ++ (show fromOffset) ++ "..] > " ++ show streamName
let batchSize = 100
commandFetched <- liftIO $ catch
(EventStore.readEventsForward
connection
streamName
(EventStore.eventNumber $ naturalFromInteger fromOffset)
(fromInteger batchSize)
EventStore.NoResolveLink
(Just credentials) >>= waitCatch )
(\e @ SomeException {} -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception raised " ++ show e
return $ Left e)
case commandFetched of
Right (EventStore.ReadSuccess slices) -> do
case (filterBelowInclusive
(getPersistedItemsFromSlices slices)
toOffset) of
persistedItems | (length persistedItems) == fromInteger batchSize ->
(Right <$> S.fromList persistedItems) <> (streamFromOffsetInclusive eventStoreStream $ fromOffset + batchSize)
persistedItems -> Right <$> S.fromList persistedItems
Right (EventStore.ReadNoStream) -> do
liftIO $ logInfo logger $ "> " ++ show streamName ++ " is not found."
S.fromList []
Right (EventStore.ReadStreamDeleted e) -> return $ Left readStreamDeletedException
Right (EventStore.ReadNotModified )-> return $ Left readNotModifiedException
Right (EventStore.ReadError e) -> return $ Left readErrorException
Right (EventStore.ReadAccessDenied e) -> return $ Left readAccessDeniedException
Left (exception) -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception propagated " ++ show exception
return $ Left exception
where
filterBelowInclusive :: [Persisted item] -> Offset -> [Persisted item]
filterBelowInclusive items toOffset = filter (\PersistedItem {offset} -> offset <= toOffset ) items
getPersistedItemsFromSlices :: FromJSON item => EventStore.Slice t -> [Persisted item]
getPersistedItemsFromSlices slices = recordedEventToPersistedItem
<$> EventStore.resolvedEventOriginal
<$> EventStore.sliceEvents slices
streamFromOffsetInclusive :: Streamable stream monad item =>
EventStoreStream item ->
Offset ->
stream monad (Either SomeException (Persisted item))
streamFromOffsetInclusive eventStoreStream @ EventStoreStream {
clientDependencies = Dependencies { logger, credentials, connection },
streamName = streamName } fromOffset = do
liftIO $ logInfo logger $ "streaming [" ++ (show fromOffset) ++ "..] > " ++ show streamName
let batchSize = 100
commandFetched <- liftIO $ catch
(EventStore.readEventsForward
connection
streamName
(EventStore.eventNumber $ naturalFromInteger fromOffset)
(fromInteger batchSize)
EventStore.NoResolveLink
(Just credentials) >>= waitCatch )
(\e @ SomeException {} -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception raised " ++ show e
return $ Left e)
case commandFetched of
Right (EventStore.ReadSuccess slices) -> do
case (getPersistedItemsFromSlices slices) of
persistedItems | (length persistedItems) == fromInteger batchSize ->
(Right <$> S.fromList persistedItems) <> (streamFromOffsetInclusive eventStoreStream $ fromOffset + batchSize)
persistedItems -> Right <$> S.fromList persistedItems
Right (EventStore.ReadNoStream) -> do
liftIO $ logInfo logger $ "> " ++ show streamName ++ " is not found."
S.fromList []
Right (EventStore.ReadStreamDeleted e) -> return $ Left readStreamDeletedException
Right (EventStore.ReadNotModified )-> return $ Left readNotModifiedException
Right (EventStore.ReadError e) -> return $ Left readErrorException
Right (EventStore.ReadAccessDenied e) -> return $ Left readAccessDeniedException
Left (exception) -> do
liftIO $ logInfo logger $ "[stream.from.offset] exception propagated " ++ show exception
return $ Left exception
where
getPersistedItemsFromSlices :: FromJSON item => EventStore.Slice t -> [Persisted item]
getPersistedItemsFromSlices slices = recordedEventToPersistedItem
<$> EventStore.resolvedEventOriginal
<$> EventStore.sliceEvents slices
streamAll :: Streamable stream monad item =>
EventStoreStream item ->
stream monad (Either SomeException (Persisted item))
streamAll eventStoreStream = streamFromOffsetInclusive eventStoreStream 0
streamAllInfinitely :: Streamable stream monad item =>
EventStoreStream item ->
stream monad (Either SomeException (Persisted item))
streamAllInfinitely eventStoreStream =
(EventStore.Subscribing.subscribe eventStoreStream)
`parallel` (streamAll eventStoreStream)
recordedEventToPersistedItem :: FromJSON item => EventStore.RecordedEvent -> Persisted item
recordedEventToPersistedItem recordedEvent =
PersistedItem { offset = toInteger $ EventStore.recordedEventNumber recordedEvent,
item = fromJust $ EventStore.recordedEventDataAsJson recordedEvent }
data EvenStoreExceptionReason = ReadStreamDeleted
| ReadNotModified
| ReadError
| ReadAccessDenied deriving Show
instance Exception EvenStoreExceptionReason
readStreamDeletedException,readNotModifiedException,readErrorException,readAccessDeniedException :: SomeException
readStreamDeletedException = toException ReadStreamDeleted
readNotModifiedException = toException ReadNotModified
readErrorException = toException ReadError
readAccessDeniedException = toException ReadAccessDenied |
1c5ab6a9289d810665e7d466888f1c0dbf708efdeb91517a2186c6c84ef8591e | Zilliqa/scilla | Config.ml |
This file is part of scilla .
Copyright ( c ) 2018 - present Zilliqa Research Pvt . Ltd.
scilla is free software : you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation , either version 3 of the License , or ( at your option ) any later
version .
scilla is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE . See the GNU General Public License for more details .
You should have received a copy of the GNU General Public License along with
scilla . If not , see < / > .
This file is part of scilla.
Copyright (c) 2018 - present Zilliqa Research Pvt. Ltd.
scilla is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
scilla is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
scilla. If not, see </>.
*)
open Core
type json_replacement = { vname : string; value : string } [@@deriving yojson]
type replacement = {
filename : string;
line : int;
col : int;
replacee : string; (** Identifier that should be replaced. *)
replacement : string;
}
[@@deriving yojson]
type config = {
json_replacements : json_replacement list;
replacements : replacement list;
}
[@@deriving yojson]
let from_file filename =
try Yojson.Safe.from_file filename |> config_of_yojson with
| Sys_error err -> Error err
| Yojson.Json_error err ->
Error (Printf.sprintf "%s is broken:\n%s" filename err)
| _ -> Error (Printf.sprintf "%s is broken" filename)
| null | https://raw.githubusercontent.com/Zilliqa/scilla/eec5d1c686f3a000de14707d0fe5245f0c430e0b/src/merge/Config.ml | ocaml | * Identifier that should be replaced. |
This file is part of scilla .
Copyright ( c ) 2018 - present Zilliqa Research Pvt . Ltd.
scilla is free software : you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation , either version 3 of the License , or ( at your option ) any later
version .
scilla is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE . See the GNU General Public License for more details .
You should have received a copy of the GNU General Public License along with
scilla . If not , see < / > .
This file is part of scilla.
Copyright (c) 2018 - present Zilliqa Research Pvt. Ltd.
scilla is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
scilla is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
scilla. If not, see </>.
*)
open Core
type json_replacement = { vname : string; value : string } [@@deriving yojson]
type replacement = {
filename : string;
line : int;
col : int;
replacement : string;
}
[@@deriving yojson]
type config = {
json_replacements : json_replacement list;
replacements : replacement list;
}
[@@deriving yojson]
let from_file filename =
try Yojson.Safe.from_file filename |> config_of_yojson with
| Sys_error err -> Error err
| Yojson.Json_error err ->
Error (Printf.sprintf "%s is broken:\n%s" filename err)
| _ -> Error (Printf.sprintf "%s is broken" filename)
|
8900c1548a2ae695bd98eacb3c7e2e57d8c831b147d18c9494649f0e31a40eb1 | typelead/intellij-eta | IElementType.hs | module FFI.Com.IntelliJ.Psi.Tree.IElementType where
import P
# CLASS " com.intellij.psi.tree . IElementType " #
IElementType = IElementType (Object# IElementType)
deriving Class
foreign import java unsafe getIndex :: IElementType -> Short
instance Ord IElementType where
x <= y = getIndex x <= getIndex y
instance Eq IElementType where
x == y = getIndex x == getIndex y
data {-# CLASS "com.intellij.psi.tree.IElementType[]" #-}
IElementTypeArray = IElementTypeArray (Object# IElementTypeArray)
deriving Class
instance JArray IElementType IElementTypeArray
instance Show IElementType where
show = fromJString . toStringJava
| null | https://raw.githubusercontent.com/typelead/intellij-eta/ee66d621aa0bfdf56d7d287279a9a54e89802cf9/plugin/src/main/eta/FFI/Com/IntelliJ/Psi/Tree/IElementType.hs | haskell | # CLASS "com.intellij.psi.tree.IElementType[]" # | module FFI.Com.IntelliJ.Psi.Tree.IElementType where
import P
# CLASS " com.intellij.psi.tree . IElementType " #
IElementType = IElementType (Object# IElementType)
deriving Class
foreign import java unsafe getIndex :: IElementType -> Short
instance Ord IElementType where
x <= y = getIndex x <= getIndex y
instance Eq IElementType where
x == y = getIndex x == getIndex y
IElementTypeArray = IElementTypeArray (Object# IElementTypeArray)
deriving Class
instance JArray IElementType IElementTypeArray
instance Show IElementType where
show = fromJString . toStringJava
|
0c365ab00845a9f9abd782814a91e9423a1c25e9ca5e9681b0044851403a17d0 | serioga/webapp-clojure-2020 | component.cljc | (ns app.rum.impl.component)
#?(:clj (set! *warn-on-reflection* true)
:cljs (set! *warn-on-infer* true))
••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••
(defmulti create-component
"Component constructor by ID keyword."
:app.rum/component-id)
(defmethod create-component :default
[data]
(println "Calling default `create-component` for" data))
••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••
| null | https://raw.githubusercontent.com/serioga/webapp-clojure-2020/91a7170a1be287bbfa5b9279d697208f7f806f9b/src/app/rum/impl/component.cljc | clojure | (ns app.rum.impl.component)
#?(:clj (set! *warn-on-reflection* true)
:cljs (set! *warn-on-infer* true))
••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••
(defmulti create-component
"Component constructor by ID keyword."
:app.rum/component-id)
(defmethod create-component :default
[data]
(println "Calling default `create-component` for" data))
••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••
| |
ec9e699c0e3de284d6d1874efd8fffd7e61fcdab8563e9e56b071de70cf11244 | tanakh/ICFP2011 | Y2CAtkQbMadan.hs | # LANGUAGE CPP #
{-# OPTIONS -Wall #-}
import Control.Applicative
import qualified Control.Exception.Control as E
import Control.Monad
import Control.Monad.State
import Data.Maybe
import Data.List
import Data.Vector ((!))
import qualified Data.Vector as V
import LTG
getFirstWorthEnemy :: Int -> LTG (Maybe Int)
getFirstWorthEnemy dmg = do
alives <- filterM
(\ix -> do
al <- isAlive False ix
vt <- getVital False ix
return (al && vt >= dmg))
[0..255]
return $ listToMaybe alives
getAnySlot :: LTG (Maybe Int)
getAnySlot = do
aliveidents <- filterM
(\ix -> do
al <- isAlive True ix
fn <- getField True ix
return (al && fn == VFun "I"))
[0..255]
return $ listToMaybe aliveidents
ensureZombieDead :: Int -> LTG ()
ensureZombieDead target = do
zombieReady <- isDead False target
if zombieReady
then do
return ()
oops ! They revived 255 !
vit <- getVital False target
aliveslot <- getAnySlot
case (vit, aliveslot) of
(1, Just slot) -> do
-- dec
num slot (255 - target)
Dec $> slot
ensureZombieDead target
(1, Nothing) -> do
return ()
(x, _) -> do
killTarget target
zombieLoop :: Int -> Int -> Int -> Int -> LTG ()
zombieLoop f4 f1 dmg target = do
elms <- getFirstWorthEnemy dmg
case elms of
Nothing -> return ()
Just n -> do
num f4 n
copyTo f1 0
ensureZombieDead target
f1 $< I
zombieLoop f4 f1 dmg target
ofN :: Int -> Value
ofN x = VInt x
ofC :: Card -> Value
ofC x = VFun (cardName x)
infixl 1 $|
($|) :: Value -> Value -> Value
x $| y = VApp x y
lazyApplyIntermediate :: Value -> Value -> Value
lazyApplyIntermediate f g =
-- S (K f) (K g)
(ofC S) $| (ofC K $| f) $| (ofC K $| g)
makeFieldsUnlessConstructed :: [(Int, Value)] -> LTG() -> LTG()
makeFieldsUnlessConstructed pairs procedure = do
ffs <- mapM
(\(f, v) -> do
k <- getField True f
return (k == v))
pairs
if and ffs
then do
lprint $ "Reusing " ++ show (map fst pairs)
return ()
else do
procedure
makeFieldUnlessConstructed :: Int -> Value -> LTG() -> LTG()
makeFieldUnlessConstructed f lval procedure = do
makeFieldsUnlessConstructed [(f, lval)] procedure
kyokoAnAn :: Int -> Int -> Int -> Int -> Int -> Int -> LTG ()
kyokoAnAn f1 f2 f4 f8 target dmg = do
-- f1, f2: temp
-- f4
-- target: zombie target
-- reuse field 0 to speed up
ff0 <- getField True 0
if ff0 == VInt dmg then do
copyTo f8 0
else do
num f8 dmg
makeFieldsUnlessConstructed
-- I know it's ugly
[(0, (VApp (VApp (VFun "S") (VApp (VFun "K") (VApp (VFun "zombie") (VInt (255 - target))))) (VApp (VFun "K") (VApp (VApp (VFun "S") (VApp (VFun "K") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VFun "help")) (VFun "I"))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 2))))) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 3)))) (VFun "succ"))))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 4))))))),
(f2, VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VFun "help")) (VFun "I"))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 2))))) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 3)))) (VFun "succ")))] $ do
-- v[f4] <- S (S Help I) (lazyApply Copy f8)
-- S (S Help I)
makeFieldUnlessConstructed f2
(VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VFun "help")) (VFun "I"))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 2))))) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 3)))) (VFun "succ"))) $ do
-- v[f2] <- (lazyApply Copy f8)
num :
clear f2
f2 $< Copy
num 0 f8
lazyApply f2 0
0 $< Put
-- v[f2] <- S (S Help I) v[f8]; loop body
= S ( \x - > Help x x ) ( lazyApply Copy f8 )
-- num: kill f4
copyTo 0 f2
clear f2
f2 $< S
f2 $< Help
f2 $< I
S $> f2
apply0 f2 -- S (S Help I) (S (K copy) (K 8))
v[f4 ] < - S ( lazyApply Copy f2 ) Succ ; loop next
-- num: gen f2
clear f4
f4 $< Copy
num 0 f2
lazyApply f4 0
0 $< Put
S $> f4
f4 $< Succ
-- v[f2] <- S f2 f4
-- num: kill f4
S $> f2
copyTo 0 f4
apply0 f2
-- v[f1] <- S (K v[f2]) (lazyApply Copy f4); zombie
-- v[0] = v[f4] = (lazyApply Copy f4)
clear f4
f4 $< Copy
num 0 f4
lazyApply f4 0
0 $< Put
copyTo 0 f4
-- use f2 but no help
copyTo f1 f2
K $> f1
S $> f1
apply0 f1
num f4 (255-target)
Zombie $> f4
lazyApply f4 f1
copyTo 0 f4
zombieLoop f4 f1 dmg target
sittingDuck :: LTG()
sittingDuck = do
I $> 0
sittingDuck
get 3 * 2^n or 2^n smaller than x
getEasyInt :: Int -> Int
getEasyInt x | (x <= 3) = x
getEasyInt x =
max (head $ filter (\y -> y * 2 > x) twos) (head $ filter (\y -> y * 2 > x) threep)
where
twos = map (2^) [(0::Int)..]
threep = 1 : map (\n -> 3*(2^n)) [(0::Int)..]
#ifdef KAMIJO
Iize , omae wo ,
! !
speedo :: Int -> Double
speedo x
| x == 0 = 0
| odd x = 1 + speedo (x-1)
| even x = 1 + speedo (div x 2)
getMaxEnemy :: LTG Int
getMaxEnemy = do
oppAlives <- filterM (isAlive False) [0..255]
vitals <- mapM (getVital False) oppAlives
let targets = zip oppAlives vitals
umami (i, v) = (fromIntegral v * 2 ** (0-speedo i) , v)
return $ snd $ maximum $ map umami targets
#else
debugTag::String
debugTag = "kyoko"
getMaxEnemy :: LTG Int
getMaxEnemy = do
oppAlives <- filterM (isAlive False) [0..255]
vitals <- mapM (getVital False) oppAlives
return $ maximum vitals
#endif
checkTarget :: Int -> LTG ()
checkTarget target = do
isTargetAlive <- isAlive False target
when isTargetAlive $ lerror "Not dead"
killTarget :: Int -> LTG()
killTarget target = do
zombifySlotVital <- getVital False target
let zombifySlotV = getEasyInt zombifySlotVital
alives <- filterM (\x -> do
v <- getVital True x
return $ v > zombifySlotV)
[1..255]
-- dmg > 2 -> attack is issued
-- Create wall between the cut, to control damage, if possible
case length alives of
n | n < 2 -> lerror "there are no vital"
n | zombifySlotV > 1 && n >= 5 -> attack2 (alives !! 1) (alives !! 4) (255 - target) zombifySlotV
_ | zombifySlotV > 1 -> attack2 (alives !! 0) (alives !! 1) (255 - target) zombifySlotV
_ -> return ()
when (zombifySlotV > 1) $ checkTarget target
chooseTarget :: LTG Int
chooseTarget = do
vs <- forM [255,254..240] $ \i -> do
vit <- getVital False i
return (vit, -i)
return $ negate $ snd $ minimum vs
kyoukoMain :: LTG()
kyoukoMain = do
target <- chooseTarget
dmg <- getEasyInt <$> getMaxEnemy
isTargetAlive <- isAlive False target
when isTargetAlive $ killTarget target
kyokoAnAn 1 3 4 2 target dmg
mamisanMain :: LTG()
mamisanMain = do
let weapon = VApp (VApp (VFun "S") (VFun "dec")) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "get"))) (VApp (VFun "K") (VInt 1)))) (VFun "succ"))
f <- getField True 1
if (f/=weapon) then prepareMagicalBullet 1 2
else do
alives <- fmap V.fromList $ mapM (isAlive True) [0..255]
vitals <- fmap V.fromList $ mapM (getVital True) [0..255]
let range i = [i .. min 255 (i+124)]
deaths i = sum [if alives ! j && vitals ! j == 1 then 1 else 0 | j<-range i]
damages i = sum [if alives ! j && vitals ! j >= 1 then 1 else 0 | j<-range i]
withScore i = ((deaths i, damages i,- speedo i), i)
target = snd $ maximum $ map withScore [0..255]
summonMami 1 2 target
keepAlive :: Int -> LTG ()
keepAlive ix = do
d <- isDead True ix
when d $ do
_ <- revive ix
keepAlive ix
ignExc :: LTG a -> LTG ()
ignExc m = do
mb <- E.try m
case mb of
Left (LTGError _) -> return ()
Right _ -> return ()
yose :: LTG ()
yose = do
forever $ ignExc $ do
keepAlive 0
num 0 0
forM_ [(0::Int)..255] $ \_ -> do
keepAlive 0
keepAlive 1
copyTo 1 0
Dec $> 1
Succ $> 0
waruagaki :: LTG ()
waruagaki = do
keepAlive 0
keepAlive 1
keepAlive 2
num 0 1
Inc $> 0
num 0 2
Inc $> 0
speedo :: Int -> Int
speedo x
| x == 0 = 0
| odd x = 1 + speedo (x-1)
| even x = 1 + speedo (div x 2)
main :: IO ()
main = runLTG $ do
let range = 10
let necks = take (range) $ map snd $ sort $[(speedo i, i) | i<-[0..255]]
forever $ do
ds <- filterM (isDead True) necks
enemyVs <- mapM (getVital False) [0..255]
let mainPuellaMagi = if (maximum enemyVs <= 2) then mamisanMain
else kyoukoMain
if null ds
then do
turn <- getTurnCnt
if turn >= 100000 - 1536
then do
lprint "yose mode"
yose
else do
lprint "normal mode"
mb <- E.try mainPuellaMagi
case mb of
Left (LTGError e) -> do
case e of
"there are no vital" -> do
lprint "waruagaki mode"
waruagaki
_ -> do
lprint e
return ()
Right _ -> do
return ()
return ()
else do
rankedTgt <- mapM rankDeads ds
let reviveTgt = snd $ head $ sort rankedTgt
lprint $ "Revive mode: " ++ show (sort rankedTgt)
ignExc $ revive reviveTgt
lprint "Revive done"
return ()
rankDeads :: Int -> LTG (Int, Int)
rankDeads i
| i == 0 = return (0, i)
| True = do
fa <- isAlive True (i-1)
return (if fa then 1 else 0, i)
futureApply 1 2 18 3
| null | https://raw.githubusercontent.com/tanakh/ICFP2011/db0d670cdbe12e9cef4242d6ab202a98c254412e/ai/Y2CAtkQbMadan.hs | haskell | # OPTIONS -Wall #
dec
S (K f) (K g)
f1, f2: temp
f4
target: zombie target
reuse field 0 to speed up
I know it's ugly
v[f4] <- S (S Help I) (lazyApply Copy f8)
S (S Help I)
v[f2] <- (lazyApply Copy f8)
v[f2] <- S (S Help I) v[f8]; loop body
num: kill f4
S (S Help I) (S (K copy) (K 8))
num: gen f2
v[f2] <- S f2 f4
num: kill f4
v[f1] <- S (K v[f2]) (lazyApply Copy f4); zombie
v[0] = v[f4] = (lazyApply Copy f4)
use f2 but no help
dmg > 2 -> attack is issued
Create wall between the cut, to control damage, if possible | # LANGUAGE CPP #
import Control.Applicative
import qualified Control.Exception.Control as E
import Control.Monad
import Control.Monad.State
import Data.Maybe
import Data.List
import Data.Vector ((!))
import qualified Data.Vector as V
import LTG
getFirstWorthEnemy :: Int -> LTG (Maybe Int)
getFirstWorthEnemy dmg = do
alives <- filterM
(\ix -> do
al <- isAlive False ix
vt <- getVital False ix
return (al && vt >= dmg))
[0..255]
return $ listToMaybe alives
getAnySlot :: LTG (Maybe Int)
getAnySlot = do
aliveidents <- filterM
(\ix -> do
al <- isAlive True ix
fn <- getField True ix
return (al && fn == VFun "I"))
[0..255]
return $ listToMaybe aliveidents
ensureZombieDead :: Int -> LTG ()
ensureZombieDead target = do
zombieReady <- isDead False target
if zombieReady
then do
return ()
oops ! They revived 255 !
vit <- getVital False target
aliveslot <- getAnySlot
case (vit, aliveslot) of
(1, Just slot) -> do
num slot (255 - target)
Dec $> slot
ensureZombieDead target
(1, Nothing) -> do
return ()
(x, _) -> do
killTarget target
zombieLoop :: Int -> Int -> Int -> Int -> LTG ()
zombieLoop f4 f1 dmg target = do
elms <- getFirstWorthEnemy dmg
case elms of
Nothing -> return ()
Just n -> do
num f4 n
copyTo f1 0
ensureZombieDead target
f1 $< I
zombieLoop f4 f1 dmg target
ofN :: Int -> Value
ofN x = VInt x
ofC :: Card -> Value
ofC x = VFun (cardName x)
infixl 1 $|
($|) :: Value -> Value -> Value
x $| y = VApp x y
lazyApplyIntermediate :: Value -> Value -> Value
lazyApplyIntermediate f g =
(ofC S) $| (ofC K $| f) $| (ofC K $| g)
makeFieldsUnlessConstructed :: [(Int, Value)] -> LTG() -> LTG()
makeFieldsUnlessConstructed pairs procedure = do
ffs <- mapM
(\(f, v) -> do
k <- getField True f
return (k == v))
pairs
if and ffs
then do
lprint $ "Reusing " ++ show (map fst pairs)
return ()
else do
procedure
makeFieldUnlessConstructed :: Int -> Value -> LTG() -> LTG()
makeFieldUnlessConstructed f lval procedure = do
makeFieldsUnlessConstructed [(f, lval)] procedure
kyokoAnAn :: Int -> Int -> Int -> Int -> Int -> Int -> LTG ()
kyokoAnAn f1 f2 f4 f8 target dmg = do
ff0 <- getField True 0
if ff0 == VInt dmg then do
copyTo f8 0
else do
num f8 dmg
makeFieldsUnlessConstructed
[(0, (VApp (VApp (VFun "S") (VApp (VFun "K") (VApp (VFun "zombie") (VInt (255 - target))))) (VApp (VFun "K") (VApp (VApp (VFun "S") (VApp (VFun "K") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VFun "help")) (VFun "I"))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 2))))) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 3)))) (VFun "succ"))))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 4))))))),
(f2, VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VFun "help")) (VFun "I"))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 2))))) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 3)))) (VFun "succ")))] $ do
makeFieldUnlessConstructed f2
(VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VFun "help")) (VFun "I"))) (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 2))))) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "copy"))) (VApp (VFun "K") (VInt 3)))) (VFun "succ"))) $ do
num :
clear f2
f2 $< Copy
num 0 f8
lazyApply f2 0
0 $< Put
= S ( \x - > Help x x ) ( lazyApply Copy f8 )
copyTo 0 f2
clear f2
f2 $< S
f2 $< Help
f2 $< I
S $> f2
v[f4 ] < - S ( lazyApply Copy f2 ) Succ ; loop next
clear f4
f4 $< Copy
num 0 f2
lazyApply f4 0
0 $< Put
S $> f4
f4 $< Succ
S $> f2
copyTo 0 f4
apply0 f2
clear f4
f4 $< Copy
num 0 f4
lazyApply f4 0
0 $< Put
copyTo 0 f4
copyTo f1 f2
K $> f1
S $> f1
apply0 f1
num f4 (255-target)
Zombie $> f4
lazyApply f4 f1
copyTo 0 f4
zombieLoop f4 f1 dmg target
sittingDuck :: LTG()
sittingDuck = do
I $> 0
sittingDuck
get 3 * 2^n or 2^n smaller than x
getEasyInt :: Int -> Int
getEasyInt x | (x <= 3) = x
getEasyInt x =
max (head $ filter (\y -> y * 2 > x) twos) (head $ filter (\y -> y * 2 > x) threep)
where
twos = map (2^) [(0::Int)..]
threep = 1 : map (\n -> 3*(2^n)) [(0::Int)..]
#ifdef KAMIJO
Iize , omae wo ,
! !
speedo :: Int -> Double
speedo x
| x == 0 = 0
| odd x = 1 + speedo (x-1)
| even x = 1 + speedo (div x 2)
getMaxEnemy :: LTG Int
getMaxEnemy = do
oppAlives <- filterM (isAlive False) [0..255]
vitals <- mapM (getVital False) oppAlives
let targets = zip oppAlives vitals
umami (i, v) = (fromIntegral v * 2 ** (0-speedo i) , v)
return $ snd $ maximum $ map umami targets
#else
debugTag::String
debugTag = "kyoko"
getMaxEnemy :: LTG Int
getMaxEnemy = do
oppAlives <- filterM (isAlive False) [0..255]
vitals <- mapM (getVital False) oppAlives
return $ maximum vitals
#endif
checkTarget :: Int -> LTG ()
checkTarget target = do
isTargetAlive <- isAlive False target
when isTargetAlive $ lerror "Not dead"
killTarget :: Int -> LTG()
killTarget target = do
zombifySlotVital <- getVital False target
let zombifySlotV = getEasyInt zombifySlotVital
alives <- filterM (\x -> do
v <- getVital True x
return $ v > zombifySlotV)
[1..255]
case length alives of
n | n < 2 -> lerror "there are no vital"
n | zombifySlotV > 1 && n >= 5 -> attack2 (alives !! 1) (alives !! 4) (255 - target) zombifySlotV
_ | zombifySlotV > 1 -> attack2 (alives !! 0) (alives !! 1) (255 - target) zombifySlotV
_ -> return ()
when (zombifySlotV > 1) $ checkTarget target
chooseTarget :: LTG Int
chooseTarget = do
vs <- forM [255,254..240] $ \i -> do
vit <- getVital False i
return (vit, -i)
return $ negate $ snd $ minimum vs
kyoukoMain :: LTG()
kyoukoMain = do
target <- chooseTarget
dmg <- getEasyInt <$> getMaxEnemy
isTargetAlive <- isAlive False target
when isTargetAlive $ killTarget target
kyokoAnAn 1 3 4 2 target dmg
mamisanMain :: LTG()
mamisanMain = do
let weapon = VApp (VApp (VFun "S") (VFun "dec")) (VApp (VApp (VFun "S") (VApp (VApp (VFun "S") (VApp (VFun "K") (VFun "get"))) (VApp (VFun "K") (VInt 1)))) (VFun "succ"))
f <- getField True 1
if (f/=weapon) then prepareMagicalBullet 1 2
else do
alives <- fmap V.fromList $ mapM (isAlive True) [0..255]
vitals <- fmap V.fromList $ mapM (getVital True) [0..255]
let range i = [i .. min 255 (i+124)]
deaths i = sum [if alives ! j && vitals ! j == 1 then 1 else 0 | j<-range i]
damages i = sum [if alives ! j && vitals ! j >= 1 then 1 else 0 | j<-range i]
withScore i = ((deaths i, damages i,- speedo i), i)
target = snd $ maximum $ map withScore [0..255]
summonMami 1 2 target
keepAlive :: Int -> LTG ()
keepAlive ix = do
d <- isDead True ix
when d $ do
_ <- revive ix
keepAlive ix
ignExc :: LTG a -> LTG ()
ignExc m = do
mb <- E.try m
case mb of
Left (LTGError _) -> return ()
Right _ -> return ()
yose :: LTG ()
yose = do
forever $ ignExc $ do
keepAlive 0
num 0 0
forM_ [(0::Int)..255] $ \_ -> do
keepAlive 0
keepAlive 1
copyTo 1 0
Dec $> 1
Succ $> 0
waruagaki :: LTG ()
waruagaki = do
keepAlive 0
keepAlive 1
keepAlive 2
num 0 1
Inc $> 0
num 0 2
Inc $> 0
speedo :: Int -> Int
speedo x
| x == 0 = 0
| odd x = 1 + speedo (x-1)
| even x = 1 + speedo (div x 2)
main :: IO ()
main = runLTG $ do
let range = 10
let necks = take (range) $ map snd $ sort $[(speedo i, i) | i<-[0..255]]
forever $ do
ds <- filterM (isDead True) necks
enemyVs <- mapM (getVital False) [0..255]
let mainPuellaMagi = if (maximum enemyVs <= 2) then mamisanMain
else kyoukoMain
if null ds
then do
turn <- getTurnCnt
if turn >= 100000 - 1536
then do
lprint "yose mode"
yose
else do
lprint "normal mode"
mb <- E.try mainPuellaMagi
case mb of
Left (LTGError e) -> do
case e of
"there are no vital" -> do
lprint "waruagaki mode"
waruagaki
_ -> do
lprint e
return ()
Right _ -> do
return ()
return ()
else do
rankedTgt <- mapM rankDeads ds
let reviveTgt = snd $ head $ sort rankedTgt
lprint $ "Revive mode: " ++ show (sort rankedTgt)
ignExc $ revive reviveTgt
lprint "Revive done"
return ()
rankDeads :: Int -> LTG (Int, Int)
rankDeads i
| i == 0 = return (0, i)
| True = do
fa <- isAlive True (i-1)
return (if fa then 1 else 0, i)
futureApply 1 2 18 3
|
ee683717327469cd0f779b62ca7e3c5d687b43eedc8968f8a8e82178b5df5638 | timbertson/passe | lock.mli | type lock
type proof
exception Stale_lock
val create : unit -> lock
val is_empty : lock -> bool
val use : ?proof:proof -> lock -> (proof -> 'a Lwt.t) -> 'a Lwt.t
module Map (Ord: OrderedType.S) ( ) : sig
val acquire : Ord.t -> ?proof:proof -> (proof -> 'a Lwt.t) -> 'a Lwt.t
end
| null | https://raw.githubusercontent.com/timbertson/passe/467a79ea0cd7b08a97b52be3bd1307a3bdf55799/src/server/lock.mli | ocaml | type lock
type proof
exception Stale_lock
val create : unit -> lock
val is_empty : lock -> bool
val use : ?proof:proof -> lock -> (proof -> 'a Lwt.t) -> 'a Lwt.t
module Map (Ord: OrderedType.S) ( ) : sig
val acquire : Ord.t -> ?proof:proof -> (proof -> 'a Lwt.t) -> 'a Lwt.t
end
| |
2068fda2bce652566dc4d4ac35907cdd9d9b59c1fcecdb9d46b997665dbc22d5 | cloudant-labs/dreyfus | dreyfus_blacklist_await_test.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(dreyfus_blacklist_await_test).
-include_lib("couch/include/couch_db.hrl").
-include_lib("dreyfus/include/dreyfus.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(DDOC_ID, <<"_design/black_list_doc">>).
-define(INDEX_NAME, <<"my_index">>).
-define(DBNAME, <<"mydb">>).
-define(TIMEOUT, 1000).
start() ->
test_util:start_couch([dreyfus]).
stop(_) ->
test_util:stop_couch([dreyfus]).
setup() ->
ok = meck:new(couch_log),
ok = meck:expect(couch_log, notice, fun(_Fmt, _Args) ->
?debugFmt(_Fmt, _Args)
end).
teardown(_) ->
ok = meck:unload(couch_log).
dreyfus_blacklist_await_test_() ->
{
"dreyfus black_list_doc await tests",
{
setup,
fun start/0, fun stop/1,
{
foreach,
fun setup/0, fun teardown/1,
[
fun do_not_await_1/0
]
}
}
}.
do_not_await_1() ->
ok = meck:new(dreyfus_index, [passthrough]),
Denied = lists:flatten([?b2l(?DBNAME), ".", "black_list_doc", ".",
"my_index"]),
config:set("dreyfus_blacklist", Denied, "true"),
dreyfus_test_util:wait_config_change(Denied, "true"),
Index = #index{dbname=?DBNAME, name=?INDEX_NAME, ddoc_id=?DDOC_ID},
State = create_state(?DBNAME, Index, nil, nil, []),
Msg = "Index Blocked from Updating - db: ~p, ddocid: ~p name: ~p",
Return = wait_log_message(Msg, fun() ->
{noreply, NewState} = dreyfus_index:handle_call({await, 1},
self(), State)
end),
?assertEqual(Return, ok).
wait_log_message(Fmt, Fun) ->
ok = meck:reset(couch_log),
Fun(),
ok = meck:wait(couch_log, '_', [Fmt, '_'], 5000).
create_state(DbName, Index, UPid, IPid, WList) ->
{state, DbName, Index, UPid, IPid, WList}.
| null | https://raw.githubusercontent.com/cloudant-labs/dreyfus/243d451a1f8c941bdfbeceb983419cba8e540c0b/test/dreyfus_blacklist_await_test.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(dreyfus_blacklist_await_test).
-include_lib("couch/include/couch_db.hrl").
-include_lib("dreyfus/include/dreyfus.hrl").
-include_lib("eunit/include/eunit.hrl").
-define(DDOC_ID, <<"_design/black_list_doc">>).
-define(INDEX_NAME, <<"my_index">>).
-define(DBNAME, <<"mydb">>).
-define(TIMEOUT, 1000).
start() ->
test_util:start_couch([dreyfus]).
stop(_) ->
test_util:stop_couch([dreyfus]).
setup() ->
ok = meck:new(couch_log),
ok = meck:expect(couch_log, notice, fun(_Fmt, _Args) ->
?debugFmt(_Fmt, _Args)
end).
teardown(_) ->
ok = meck:unload(couch_log).
dreyfus_blacklist_await_test_() ->
{
"dreyfus black_list_doc await tests",
{
setup,
fun start/0, fun stop/1,
{
foreach,
fun setup/0, fun teardown/1,
[
fun do_not_await_1/0
]
}
}
}.
do_not_await_1() ->
ok = meck:new(dreyfus_index, [passthrough]),
Denied = lists:flatten([?b2l(?DBNAME), ".", "black_list_doc", ".",
"my_index"]),
config:set("dreyfus_blacklist", Denied, "true"),
dreyfus_test_util:wait_config_change(Denied, "true"),
Index = #index{dbname=?DBNAME, name=?INDEX_NAME, ddoc_id=?DDOC_ID},
State = create_state(?DBNAME, Index, nil, nil, []),
Msg = "Index Blocked from Updating - db: ~p, ddocid: ~p name: ~p",
Return = wait_log_message(Msg, fun() ->
{noreply, NewState} = dreyfus_index:handle_call({await, 1},
self(), State)
end),
?assertEqual(Return, ok).
wait_log_message(Fmt, Fun) ->
ok = meck:reset(couch_log),
Fun(),
ok = meck:wait(couch_log, '_', [Fmt, '_'], 5000).
create_state(DbName, Index, UPid, IPid, WList) ->
{state, DbName, Index, UPid, IPid, WList}.
|
4e4b5ada1bbb4f3fbf623ed7dc1cc3602dd34ab1fd1a4e08a667860807c37ad4 | pink-gorilla/goldly | bindings_static.cljc | (ns goldly.sci.bindings-static
(:require
[clojure.walk :as walk]))
#_(def bindings-static
{'println println})
(def ns-static
{'walk {'postwalk walk/postwalk
'prewalk walk/prewalk
'keywordize-keys walk/keywordize-keys
'walk walk/walk
'postwalk-replace walk/postwalk-replace
'prewalk-replace walk/prewalk-replace
'stringify-keys walk/stringify-keys}}) | null | https://raw.githubusercontent.com/pink-gorilla/goldly/c65f789671cedfc5f115191a2a4b51d787492ae5/goldly/src/goldly/sci/bindings_static.cljc | clojure | (ns goldly.sci.bindings-static
(:require
[clojure.walk :as walk]))
#_(def bindings-static
{'println println})
(def ns-static
{'walk {'postwalk walk/postwalk
'prewalk walk/prewalk
'keywordize-keys walk/keywordize-keys
'walk walk/walk
'postwalk-replace walk/postwalk-replace
'prewalk-replace walk/prewalk-replace
'stringify-keys walk/stringify-keys}}) | |
8f17bda76e1c959cd61c0be921a2e4f0749b581dc242b6ed83e6d47f9f5eea37 | macchiato-framework/macchiato-core | params.cljs | (ns macchiato.test.middleware.params
(:require
[cljs.nodejs :as node]
[macchiato.middleware.params :refer [wrap-params]]
[macchiato.test.mock.request :refer [header request]]
[macchiato.test.mock.util :refer [mock-handler raw-response ok-response]]
[cljs.test :refer-macros [is are deftest testing use-fixtures]]))
(defn wrapped-echo [req]
((mock-handler wrap-params (fn [req res rais] (res req)))
req))
(deftest wrap-params-query-params-only
(let [req {:query-string "foo=bar&biz=bat%25"}
resp (wrapped-echo req)]
(is (= {"foo" "bar" "biz" "bat%"} (:query-params resp)))
(is (empty? (:form-params resp)))
(is (= {"foo" "bar" "biz" "bat%"} (:params resp)))))
(def readable (.-Readable (node/require "stream")))
(defn str->stream [s]
(doto (doto (readable.))
(.push s)
(.push nil)))
(deftest wrap-params-query-and-form-params
(let [req {:query-string "foo=bar"
:headers {"content-type" "application/x-www-form-urlencoded"}
:body "biz=bat%25"}
resp (wrapped-echo req)]
(is (= {"foo" "bar"} (:query-params resp)))
(is (= {"biz" "bat%"} (:form-params resp)))
(is (= {"foo" "bar" "biz" "bat%"} (:params resp)))))
(deftest wrap-params-not-form-encoded
(let [req {:headers {"content-type" "application/json"}
:body "{foo: \"bar\"}"}
resp (wrapped-echo req)]
(is (empty? (:form-params resp)))
(is (empty? (:params resp)))))
(deftest wrap-params-always-assocs-maps
(let [req {:query-string ""
:headers {"content-type" "application/x-www-form-urlencoded"}
:body ""}
resp (wrapped-echo req)]
(is (= {} (:query-params resp)))
(is (= {} (:form-params resp)))
(is (= {} (:params resp)))))
| null | https://raw.githubusercontent.com/macchiato-framework/macchiato-core/14eac3dbc561927ee61b6127f30ef0b0269b2af6/test/macchiato/test/middleware/params.cljs | clojure | (ns macchiato.test.middleware.params
(:require
[cljs.nodejs :as node]
[macchiato.middleware.params :refer [wrap-params]]
[macchiato.test.mock.request :refer [header request]]
[macchiato.test.mock.util :refer [mock-handler raw-response ok-response]]
[cljs.test :refer-macros [is are deftest testing use-fixtures]]))
(defn wrapped-echo [req]
((mock-handler wrap-params (fn [req res rais] (res req)))
req))
(deftest wrap-params-query-params-only
(let [req {:query-string "foo=bar&biz=bat%25"}
resp (wrapped-echo req)]
(is (= {"foo" "bar" "biz" "bat%"} (:query-params resp)))
(is (empty? (:form-params resp)))
(is (= {"foo" "bar" "biz" "bat%"} (:params resp)))))
(def readable (.-Readable (node/require "stream")))
(defn str->stream [s]
(doto (doto (readable.))
(.push s)
(.push nil)))
(deftest wrap-params-query-and-form-params
(let [req {:query-string "foo=bar"
:headers {"content-type" "application/x-www-form-urlencoded"}
:body "biz=bat%25"}
resp (wrapped-echo req)]
(is (= {"foo" "bar"} (:query-params resp)))
(is (= {"biz" "bat%"} (:form-params resp)))
(is (= {"foo" "bar" "biz" "bat%"} (:params resp)))))
(deftest wrap-params-not-form-encoded
(let [req {:headers {"content-type" "application/json"}
:body "{foo: \"bar\"}"}
resp (wrapped-echo req)]
(is (empty? (:form-params resp)))
(is (empty? (:params resp)))))
(deftest wrap-params-always-assocs-maps
(let [req {:query-string ""
:headers {"content-type" "application/x-www-form-urlencoded"}
:body ""}
resp (wrapped-echo req)]
(is (= {} (:query-params resp)))
(is (= {} (:form-params resp)))
(is (= {} (:params resp)))))
| |
bc91a72faba9ee660fea5b1320a5d221faa2457d62a7e5ce0e54d4a874316a5d | finnsson/hs2graphviz | Main.hs | module Main where
import Hs2Dot.Src
import System.Environment (getArgs)
import System.IO (FilePath)
import Control.Monad (filterM)
import System.Directory (doesFileExist)
main :: IO ()
main = do
args <- getArgs
files <- filterM doesFileExist args
code <- files2dot (conf args) files
putStr $ if null files then manual else code
where
conf args = if any (=="--high") args
then ConfigHigh
else if any (=="--low") args
then ConfigLow
else ConfigNormal
manual :: String
manual =
"usage: hs2dot [files.hs]*\n" ++
"\n" ++
"The files must be Haskell source code that haskell-src-exts can parse.\n" ++
"Some restrictions apply to the source files that can be parsed!\n"
-- Architecture:
-- Main
-- All IO
-- Hs2Dot.Src
-- Primary convertion-flow.
-- Every 1-1-relation between functions and boxes and lines in the dot-file.
Hs2Dot . SrcHelper
Helper - functions for Language . Haskell . Exts - datatypes
-- Hs2Dot.Dot
-- Functions to create dot-code.
Hs2Dot . DotHelper
-- Helper-functions to create dot-code. Makes use of Hs2Dot.Dot
-- Hs2Dot.Helper
Generic helper - methods not relying on any specific API ( Language . . nor Dot )
| null | https://raw.githubusercontent.com/finnsson/hs2graphviz/64969b6552c8408c5079e29c999c8fa968e7a08d/src/Main.hs | haskell | Architecture:
Main
All IO
Hs2Dot.Src
Primary convertion-flow.
Every 1-1-relation between functions and boxes and lines in the dot-file.
Hs2Dot.Dot
Functions to create dot-code.
Helper-functions to create dot-code. Makes use of Hs2Dot.Dot
Hs2Dot.Helper | module Main where
import Hs2Dot.Src
import System.Environment (getArgs)
import System.IO (FilePath)
import Control.Monad (filterM)
import System.Directory (doesFileExist)
main :: IO ()
main = do
args <- getArgs
files <- filterM doesFileExist args
code <- files2dot (conf args) files
putStr $ if null files then manual else code
where
conf args = if any (=="--high") args
then ConfigHigh
else if any (=="--low") args
then ConfigLow
else ConfigNormal
manual :: String
manual =
"usage: hs2dot [files.hs]*\n" ++
"\n" ++
"The files must be Haskell source code that haskell-src-exts can parse.\n" ++
"Some restrictions apply to the source files that can be parsed!\n"
Hs2Dot . SrcHelper
Helper - functions for Language . Haskell . Exts - datatypes
Hs2Dot . DotHelper
Generic helper - methods not relying on any specific API ( Language . . nor Dot )
|
47b185cdb319ebb4f5a2b5758bb86fc1efba96fc62c09c554d26762b871979cf | protz/mezzo | Exports.mli | (*****************************************************************************)
(* Mezzo, a programming language based on permissions *)
Copyright ( C ) 2011 , 2012 and
(* *)
(* This program is free software: you can redistribute it and/or modify *)
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
(* (at your option) any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
(* along with this program. If not, see </>. *)
(* *)
(*****************************************************************************)
(** Handling the names exported by interfaces and implementations. *)
* From a very high - level perspective , the type - checker needs to maintain a map
* from names to [ TypeCore.var]s .
* - We need to map _ qualified _ names to [ var]s ( What is the [ var ] that stands
* for " int::int " ? ) , and
* - we need to map _ unqualified _ names to [ var]s ( This implementation
* contains " val x " . Which [ var ] is " x " ? )
*
* Qualified names are added to this map when importing an interface .
* Unqualified names are added when type - checking an implementation .
*
* What does this have to do with kind - check ? Well , kind - check expects an
* environment where all required interfaces have been _ imported _ . This means
* that if there is an occurrence of module " m " in implementation " impl " , then
* kind - checking expects " m::x " to be available in the environment . Opening a
* module is managed internally by [ KindCheck ] and [ TransSurface ] .
*
* Rather than using a separate , high - level environment , which we would inject
* into the kind - checking environment , we cheat and reuse kind - checking
* environments as high - level environments . The invariant is that a high - level
* environment only contains [ NonLocal ] bindings , either qualified or
* unqualified .
* from names to [TypeCore.var]s.
* - We need to map _qualified_ names to [var]s (What is the [var] that stands
* for "int::int"?), and
* - we need to map _unqualified_ names to [var]s (This implementation
* contains "val x". Which [var] is "x"?)
*
* Qualified names are added to this map when importing an interface.
* Unqualified names are added when type-checking an implementation.
*
* What does this have to do with kind-check? Well, kind-check expects an
* environment where all required interfaces have been _imported_. This means
* that if there is an occurrence of module "m" in implementation "impl", then
* kind-checking expects "m::x" to be available in the environment. Opening a
* module is managed internally by [KindCheck] and [TransSurface].
*
* Rather than using a separate, high-level environment, which we would inject
* into the kind-checking environment, we cheat and reuse kind-checking
* environments as high-level environments. The invariant is that a high-level
* environment only contains [NonLocal] bindings, either qualified or
* unqualified. *)
type value_exports = (Variable.name * TypeCore.var) list
type datacon_exports = (TypeCore.var * Datacon.name * SurfaceSyntax.datacon_info) list
(** Record exported values in an implementation. This creates non-local,
* unqualified bindings. One can reach these bindings using
* [find_unqualified_var], for instance when printing a signature, or in the
* test-suite, when poking at a specific variable. *)
val bind_implementation_values:
TypeCore.env -> value_exports ->
TypeCore.env
(** Record exported types and data constructors in an implementation. *)
val bind_implementation_types:
TypeCore.env -> TypeCore.data_type_group -> TypeCore.var list -> datacon_exports ->
TypeCore.env
(** Record exported values from an interface. This creates non-local, qualified
* bindings. One can reach these bindings using [find_qualified_var] at any
* time. Such bindings will be used by the kind-checking, translation and
* type-checking phases. *)
val bind_interface_value:
TypeCore.env -> Module.name -> Variable.name -> TypeCore.var ->
TypeCore.env
(** Record exported types and data constructors from an interface. *)
val bind_interface_types:
TypeCore.env -> Module.name -> TypeCore.data_type_group -> TypeCore.var list -> datacon_exports ->
TypeCore.env
* [ find_qualified_var env mname x ] finds name [ x ] as exported by module
* [ ] . Use this to reach values exported by _ interfaces _ which the current
* program depends on .
* [mname]. Use this to reach values exported by _interfaces_ which the current
* program depends on. *)
val find_qualified_var: TypeCore.env -> Module.name -> Variable.name -> TypeCore.var
(** [find_unqualified_var env x] finds the name [x] as exported by the current
* module. Use it after type-checking an _implementation_. *)
val find_unqualified_var: TypeCore.env -> Variable.name -> TypeCore.var
| null | https://raw.githubusercontent.com/protz/mezzo/4e9d917558bd96067437116341b7a6ea02ab9c39/typing/Exports.mli | ocaml | ***************************************************************************
Mezzo, a programming language based on permissions
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
***************************************************************************
* Handling the names exported by interfaces and implementations.
* Record exported values in an implementation. This creates non-local,
* unqualified bindings. One can reach these bindings using
* [find_unqualified_var], for instance when printing a signature, or in the
* test-suite, when poking at a specific variable.
* Record exported types and data constructors in an implementation.
* Record exported values from an interface. This creates non-local, qualified
* bindings. One can reach these bindings using [find_qualified_var] at any
* time. Such bindings will be used by the kind-checking, translation and
* type-checking phases.
* Record exported types and data constructors from an interface.
* [find_unqualified_var env x] finds the name [x] as exported by the current
* module. Use it after type-checking an _implementation_. | Copyright ( C ) 2011 , 2012 and
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
* From a very high - level perspective , the type - checker needs to maintain a map
* from names to [ TypeCore.var]s .
* - We need to map _ qualified _ names to [ var]s ( What is the [ var ] that stands
* for " int::int " ? ) , and
* - we need to map _ unqualified _ names to [ var]s ( This implementation
* contains " val x " . Which [ var ] is " x " ? )
*
* Qualified names are added to this map when importing an interface .
* Unqualified names are added when type - checking an implementation .
*
* What does this have to do with kind - check ? Well , kind - check expects an
* environment where all required interfaces have been _ imported _ . This means
* that if there is an occurrence of module " m " in implementation " impl " , then
* kind - checking expects " m::x " to be available in the environment . Opening a
* module is managed internally by [ KindCheck ] and [ TransSurface ] .
*
* Rather than using a separate , high - level environment , which we would inject
* into the kind - checking environment , we cheat and reuse kind - checking
* environments as high - level environments . The invariant is that a high - level
* environment only contains [ NonLocal ] bindings , either qualified or
* unqualified .
* from names to [TypeCore.var]s.
* - We need to map _qualified_ names to [var]s (What is the [var] that stands
* for "int::int"?), and
* - we need to map _unqualified_ names to [var]s (This implementation
* contains "val x". Which [var] is "x"?)
*
* Qualified names are added to this map when importing an interface.
* Unqualified names are added when type-checking an implementation.
*
* What does this have to do with kind-check? Well, kind-check expects an
* environment where all required interfaces have been _imported_. This means
* that if there is an occurrence of module "m" in implementation "impl", then
* kind-checking expects "m::x" to be available in the environment. Opening a
* module is managed internally by [KindCheck] and [TransSurface].
*
* Rather than using a separate, high-level environment, which we would inject
* into the kind-checking environment, we cheat and reuse kind-checking
* environments as high-level environments. The invariant is that a high-level
* environment only contains [NonLocal] bindings, either qualified or
* unqualified. *)
type value_exports = (Variable.name * TypeCore.var) list
type datacon_exports = (TypeCore.var * Datacon.name * SurfaceSyntax.datacon_info) list
val bind_implementation_values:
TypeCore.env -> value_exports ->
TypeCore.env
val bind_implementation_types:
TypeCore.env -> TypeCore.data_type_group -> TypeCore.var list -> datacon_exports ->
TypeCore.env
val bind_interface_value:
TypeCore.env -> Module.name -> Variable.name -> TypeCore.var ->
TypeCore.env
val bind_interface_types:
TypeCore.env -> Module.name -> TypeCore.data_type_group -> TypeCore.var list -> datacon_exports ->
TypeCore.env
* [ find_qualified_var env mname x ] finds name [ x ] as exported by module
* [ ] . Use this to reach values exported by _ interfaces _ which the current
* program depends on .
* [mname]. Use this to reach values exported by _interfaces_ which the current
* program depends on. *)
val find_qualified_var: TypeCore.env -> Module.name -> Variable.name -> TypeCore.var
val find_unqualified_var: TypeCore.env -> Variable.name -> TypeCore.var
|
6314950e9b31a74c0904e3338d22a3b9b1450d9ba45f2a28f4255c9167f4d363 | bollu/cellularAutomata | GameOfLife.hs | # LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
module GameOfLife where
import Cellular
import Control.Comonad
import Diagrams.Prelude
import Diagrams.Backend.Cairo.CmdLine
import Diagrams.TwoD.Layout.Grid
import Control.Monad
import Data.Active
import qualified Data.Vector as V
import Data.MonoTraversable
import DeriveMonoComonadTH
data Cell = On | Off deriving(Eq)
newtype GameOfLife = GameOfLife (Univ Cell)
$(deriveMonoInstances ''GameOfLife)
instance CA GameOfLife where
stepCell = GameOfLife.stepCell
renderCA = GameOfLife.renderCA
liveNeighbourCount :: GameOfLife -> Int
liveNeighbourCount (GameOfLife grid) = V.sum $ fmap (\c -> if c == On then 1 else 0) (getUnivNeighbours grid)
stepCell :: GameOfLife -> Cell
stepCell gol =
cell'
where
cell' = if numNeighbours > 3 then Off
else if numNeighbours < 2 then Off
else if cell == Off && numNeighbours == 3 then On
else cell
cell = oextract gol
numNeighbours = liveNeighbourCount gol
renderCA :: CADiagramBackend b => GameOfLife -> QDiagram b V2 (N b) Any
renderCA (GameOfLife univ) = univToDiagram cellToDiagram univ
cellToDiagram :: CADiagramBackend b => Cell -> QDiagram b V2 (N b) Any
cellToDiagram On = square 1 # fc cyan
cellToDiagram Off = square 1 # fc white
| null | https://raw.githubusercontent.com/bollu/cellularAutomata/1c77ff5d6d59678a845bde7e1747d45d0ca0989f/app/GameOfLife.hs | haskell | # LANGUAGE TypeFamilies #
# LANGUAGE FlexibleContexts #
# LANGUAGE TemplateHaskell #
module GameOfLife where
import Cellular
import Control.Comonad
import Diagrams.Prelude
import Diagrams.Backend.Cairo.CmdLine
import Diagrams.TwoD.Layout.Grid
import Control.Monad
import Data.Active
import qualified Data.Vector as V
import Data.MonoTraversable
import DeriveMonoComonadTH
data Cell = On | Off deriving(Eq)
newtype GameOfLife = GameOfLife (Univ Cell)
$(deriveMonoInstances ''GameOfLife)
instance CA GameOfLife where
stepCell = GameOfLife.stepCell
renderCA = GameOfLife.renderCA
liveNeighbourCount :: GameOfLife -> Int
liveNeighbourCount (GameOfLife grid) = V.sum $ fmap (\c -> if c == On then 1 else 0) (getUnivNeighbours grid)
stepCell :: GameOfLife -> Cell
stepCell gol =
cell'
where
cell' = if numNeighbours > 3 then Off
else if numNeighbours < 2 then Off
else if cell == Off && numNeighbours == 3 then On
else cell
cell = oextract gol
numNeighbours = liveNeighbourCount gol
renderCA :: CADiagramBackend b => GameOfLife -> QDiagram b V2 (N b) Any
renderCA (GameOfLife univ) = univToDiagram cellToDiagram univ
cellToDiagram :: CADiagramBackend b => Cell -> QDiagram b V2 (N b) Any
cellToDiagram On = square 1 # fc cyan
cellToDiagram Off = square 1 # fc white
| |
81ccd12e5b2b6b9c3cb64f3ced3ce2a36dda5c4a6ee1a91913e28500e3a91a70 | oofp/Beseder | IORefStateProv.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE InstanceSigs #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeSynonymInstances #-}
# LANGUAGE UndecidableInstances #
{-# LANGUAGE ScopedTypeVariables #-}
module Beseder.Resources.State.Impl.IORefStateProv
( ioRefStateRes
) where
import Protolude
import GHC.Show (Show (..))
import Data.IORef
import Data.Coerce
import Beseder.Resources.State.MonoStateProv
newtype IORefState s = IORefState s deriving Show
ioRefStateRes :: s -> IORefStateRes s
ioRefStateRes = coerce
type IORefStateRes s = StateRes IORefState s
instance MonadIO m => MonoStateProv IORefState s m where
data MonoStateData IORefState s m = IORefData (IORef s)
createState (StateRes (IORefState s)) = liftIO (IORefData <$> newIORef s)
setState (SetState s) st@(IORefData ioRef) = liftIO $ writeIORef ioRef s >> return st
modifyState (ModifyState f) st@(IORefData ioRef) = liftIO $ modifyIORef ioRef f >> return st
clearState _ = return ()
getDataState (IORefData ioRef) = liftIO $ readIORef ioRef
| null | https://raw.githubusercontent.com/oofp/Beseder/a0f5c5e3138938b6fa18811d646535ee6df1a4f4/src/Beseder/Resources/State/Impl/IORefStateProv.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeSynonymInstances #
# LANGUAGE ScopedTypeVariables # | # LANGUAGE FlexibleInstances #
# LANGUAGE InstanceSigs #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE PartialTypeSignatures #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
module Beseder.Resources.State.Impl.IORefStateProv
( ioRefStateRes
) where
import Protolude
import GHC.Show (Show (..))
import Data.IORef
import Data.Coerce
import Beseder.Resources.State.MonoStateProv
newtype IORefState s = IORefState s deriving Show
ioRefStateRes :: s -> IORefStateRes s
ioRefStateRes = coerce
type IORefStateRes s = StateRes IORefState s
instance MonadIO m => MonoStateProv IORefState s m where
data MonoStateData IORefState s m = IORefData (IORef s)
createState (StateRes (IORefState s)) = liftIO (IORefData <$> newIORef s)
setState (SetState s) st@(IORefData ioRef) = liftIO $ writeIORef ioRef s >> return st
modifyState (ModifyState f) st@(IORefData ioRef) = liftIO $ modifyIORef ioRef f >> return st
clearState _ = return ()
getDataState (IORefData ioRef) = liftIO $ readIORef ioRef
|
a44c2a9486327056857c24a835c79bc43cc14b1d84984ff8d5bdf2d6464647c3 | openmusic-project/OMChroma | init-chroma.lisp | (in-package :cr)
(defvar *cr-root* (ensure-directories-exist (merge-pathnames "Documents/Chroma/" (user-homedir-pathname))) )
(defparameter *cr-out-dir* (ensure-directories-exist (merge-pathnames "cr-out/" *cr-root*)))
(defparameter *cr-csfun-dir* (ensure-directories-exist (merge-pathnames "cr-fun/" *cr-root*)))
(defparameter *cr-wt-dir* (ensure-directories-exist (merge-pathnames "cr-wt/" *cr-root*)))
(defparameter *cr-models-dir* (ensure-directories-exist (merge-pathnames "cr-models/" *cr-root*)))
(defparameter *cr-userfun-dir* (ensure-directories-exist (merge-pathnames "cr-userfun/" *cr-root*)))
(defparameter *cr-tmp-dir* (ensure-directories-exist (merge-pathnames "cr-tmp/" *cr-root*)))
(defun get-cr-path (dir &key name type subdirs)
(let ((root (case dir
(:out *cr-out-dir*)
(:csfun *cr-csfun-dir*)
(:wt *cr-wt-dir*)
(:models *cr-models-dir*)
(:userfun *cr-userfun-dir*)
(:tmp *cr-tmp-dir*)
(t nil))))
(when root (make-pathname :directory (append (pathname-directory root) subdirs)
:name name :type type))))
;;; Definition of helper-functions with implementation-dependent or external dependencies:
(defun choose-file-dialog (&optional (message "Select a file..."))
#+lispworks
(capi::prompt-for-file message)
#-lispworks
(error "Sorry I can not prompt for file...")
)
(defun cr-beep (&optional text)
#+lispworks
(capi::beep-pane nil)
#-lispworks
(print "BIP")
(when text (print text))
)
(defun sound-file-get-info (filename)
#+libsndfile
(audio-io::om-get-sound-info (namestring filename))
#-libsndfile
(error "Sorry I can not read sound info..."))
| null | https://raw.githubusercontent.com/openmusic-project/OMChroma/5ded34f22b59a1a93ea7b87e182c9dbdfa95e047/sources/chroma/init-chroma.lisp | lisp | Definition of helper-functions with implementation-dependent or external dependencies: | (in-package :cr)
(defvar *cr-root* (ensure-directories-exist (merge-pathnames "Documents/Chroma/" (user-homedir-pathname))) )
(defparameter *cr-out-dir* (ensure-directories-exist (merge-pathnames "cr-out/" *cr-root*)))
(defparameter *cr-csfun-dir* (ensure-directories-exist (merge-pathnames "cr-fun/" *cr-root*)))
(defparameter *cr-wt-dir* (ensure-directories-exist (merge-pathnames "cr-wt/" *cr-root*)))
(defparameter *cr-models-dir* (ensure-directories-exist (merge-pathnames "cr-models/" *cr-root*)))
(defparameter *cr-userfun-dir* (ensure-directories-exist (merge-pathnames "cr-userfun/" *cr-root*)))
(defparameter *cr-tmp-dir* (ensure-directories-exist (merge-pathnames "cr-tmp/" *cr-root*)))
(defun get-cr-path (dir &key name type subdirs)
(let ((root (case dir
(:out *cr-out-dir*)
(:csfun *cr-csfun-dir*)
(:wt *cr-wt-dir*)
(:models *cr-models-dir*)
(:userfun *cr-userfun-dir*)
(:tmp *cr-tmp-dir*)
(t nil))))
(when root (make-pathname :directory (append (pathname-directory root) subdirs)
:name name :type type))))
(defun choose-file-dialog (&optional (message "Select a file..."))
#+lispworks
(capi::prompt-for-file message)
#-lispworks
(error "Sorry I can not prompt for file...")
)
(defun cr-beep (&optional text)
#+lispworks
(capi::beep-pane nil)
#-lispworks
(print "BIP")
(when text (print text))
)
(defun sound-file-get-info (filename)
#+libsndfile
(audio-io::om-get-sound-info (namestring filename))
#-libsndfile
(error "Sorry I can not read sound info..."))
|
f288f960c4522b48468fd06d118099f3ed881605a13505f0a672ba1711b93c98 | zero-one-group/geni-performance-benchmark | script.clj | (ns geni.script
(:require
[zero-one.geni.core :as g]))
(comment
(time
(-> (g/read-parquet! "/data/performance-benchmark-data")
;(-> (g/read-parquet! "/data/performance-benchmark-data/part-00000-0cf99dad-6d07-4025-a5e9-f425bb9532b9-c000.snappy.parquet")
(g/with-column :sales (g/* :price :quantity))
(g/group-by :member-id)
(g/agg {:total-spend (g/sum :sales)
:avg-basket-size (g/mean :sales)
:avg-price (g/mean :price)
:n-transactions (g/count "*")
:n-visits (g/count-distinct :date)
:n-brands (g/count-distinct :brand-id)
:n-styles (g/count-distinct :style-id)})
(g/write-parquet! "target/geni-matrix.parquet" {:mode "overwrite"})))
(g/shape (g/read-parquet! "target/geni-matrix.parquet"))
true)
Elapsed time : 7684.372175 msecs = > 7.7 secs
Elapsed time : 39247.311383 msecs = > 39 secs
| null | https://raw.githubusercontent.com/zero-one-group/geni-performance-benchmark/ea3d92c69d78335e3e4d5f9d6772cb3e9c10b6b7/geni/src/geni/script.clj | clojure | (-> (g/read-parquet! "/data/performance-benchmark-data/part-00000-0cf99dad-6d07-4025-a5e9-f425bb9532b9-c000.snappy.parquet") | (ns geni.script
(:require
[zero-one.geni.core :as g]))
(comment
(time
(-> (g/read-parquet! "/data/performance-benchmark-data")
(g/with-column :sales (g/* :price :quantity))
(g/group-by :member-id)
(g/agg {:total-spend (g/sum :sales)
:avg-basket-size (g/mean :sales)
:avg-price (g/mean :price)
:n-transactions (g/count "*")
:n-visits (g/count-distinct :date)
:n-brands (g/count-distinct :brand-id)
:n-styles (g/count-distinct :style-id)})
(g/write-parquet! "target/geni-matrix.parquet" {:mode "overwrite"})))
(g/shape (g/read-parquet! "target/geni-matrix.parquet"))
true)
Elapsed time : 7684.372175 msecs = > 7.7 secs
Elapsed time : 39247.311383 msecs = > 39 secs
|
d8b3624c9907f0585ed650c9671724ca8c59536a8bd283d3a38602cacc98ab95 | Peaker/git-mediate | Opts.hs | # LANGUAGE LambdaCase #
-- | Option parser
module Opts
( Options(..)
, getOpts
) where
import Control.Applicative (Alternative(..))
import qualified Options.Applicative as O
import PPDiff (ColorEnable(..))
import System.Exit (exitSuccess)
import Version (versionString)
data Options = Options
{ shouldUseEditor :: Bool
, shouldDumpDiffs :: Bool
, shouldDumpDiff2 :: Bool
, shouldUseColor :: Maybe ColorEnable
, shouldSetConflictStyle :: Bool
, untabify :: Maybe Int
, mergeSpecificFile :: Maybe FilePath
, diffsContext :: Int
}
data CmdArgs = CmdVersion | CmdOptions Options
parser :: O.Parser CmdArgs
parser =
O.flag' CmdVersion (O.long "version" <> O.help "Print the version and quit")
<|> CmdOptions
<$> ( Options
<$> O.switch
( O.long "editor" <> O.short 'e'
<> O.help "Execute $EDITOR for each conflicted file that remains conflicted"
)
<*> O.switch
( O.long "diff" <> O.short 'd'
<> O.help "Dump the left/right diffs from base in each conflict remaining"
)
<*> O.switch
( O.long "diff2" <> O.short '2'
<> O.help "Dump the diff between left and right in each conflict remaining"
)
<*> ( O.flag' (Just EnableColor)
(O.long "color" <> O.short 'c' <> O.help "Enable color")
<|> O.flag' (Just DisableColor)
(O.long "no-color" <> O.short 'C' <> O.help "Disable color")
<|> pure Nothing
)
<*> O.switch
( O.long "style" <> O.short 's'
<> O.help "Configure git's global merge.conflictstyle to diff3 if needed"
)
<*> O.optional
( O.option O.auto
( O.long "untabify" <> O.metavar "TABSIZE"
<> O.help "Convert tabs to the spaces at the tab stops for the given tab size"
)
)
<*> O.optional
( O.strOption
( O.long "merge-file" <> O.short 'f' <> O.help "Merge a specific file")
)
<*> O.option O.auto
(O.long "context" <> O.short 'U' <> O.metavar "LINECOUNT" <> O.showDefault <> O.value 3
<> O.help "Number of context lines around dumped diffs"
)
)
opts :: O.ParserInfo CmdArgs
opts =
O.info (O.helper <*> parser) $
O.fullDesc
<> O.progDesc
"Resolve any git conflicts that have become trivial by editing operations.\n\
\Go to -mediate for example use."
<> O.header "git-mediate - Become a conflicts hero"
getOpts :: IO Options
getOpts =
O.execParser opts
>>= \case
CmdVersion ->
do
putStrLn $ "git-mediate version " ++ versionString
exitSuccess
CmdOptions o -> pure o
| null | https://raw.githubusercontent.com/Peaker/git-mediate/06a70285f16e8548e6b19bad393913450493adbf/src/Opts.hs | haskell | | Option parser | # LANGUAGE LambdaCase #
module Opts
( Options(..)
, getOpts
) where
import Control.Applicative (Alternative(..))
import qualified Options.Applicative as O
import PPDiff (ColorEnable(..))
import System.Exit (exitSuccess)
import Version (versionString)
data Options = Options
{ shouldUseEditor :: Bool
, shouldDumpDiffs :: Bool
, shouldDumpDiff2 :: Bool
, shouldUseColor :: Maybe ColorEnable
, shouldSetConflictStyle :: Bool
, untabify :: Maybe Int
, mergeSpecificFile :: Maybe FilePath
, diffsContext :: Int
}
data CmdArgs = CmdVersion | CmdOptions Options
parser :: O.Parser CmdArgs
parser =
O.flag' CmdVersion (O.long "version" <> O.help "Print the version and quit")
<|> CmdOptions
<$> ( Options
<$> O.switch
( O.long "editor" <> O.short 'e'
<> O.help "Execute $EDITOR for each conflicted file that remains conflicted"
)
<*> O.switch
( O.long "diff" <> O.short 'd'
<> O.help "Dump the left/right diffs from base in each conflict remaining"
)
<*> O.switch
( O.long "diff2" <> O.short '2'
<> O.help "Dump the diff between left and right in each conflict remaining"
)
<*> ( O.flag' (Just EnableColor)
(O.long "color" <> O.short 'c' <> O.help "Enable color")
<|> O.flag' (Just DisableColor)
(O.long "no-color" <> O.short 'C' <> O.help "Disable color")
<|> pure Nothing
)
<*> O.switch
( O.long "style" <> O.short 's'
<> O.help "Configure git's global merge.conflictstyle to diff3 if needed"
)
<*> O.optional
( O.option O.auto
( O.long "untabify" <> O.metavar "TABSIZE"
<> O.help "Convert tabs to the spaces at the tab stops for the given tab size"
)
)
<*> O.optional
( O.strOption
( O.long "merge-file" <> O.short 'f' <> O.help "Merge a specific file")
)
<*> O.option O.auto
(O.long "context" <> O.short 'U' <> O.metavar "LINECOUNT" <> O.showDefault <> O.value 3
<> O.help "Number of context lines around dumped diffs"
)
)
opts :: O.ParserInfo CmdArgs
opts =
O.info (O.helper <*> parser) $
O.fullDesc
<> O.progDesc
"Resolve any git conflicts that have become trivial by editing operations.\n\
\Go to -mediate for example use."
<> O.header "git-mediate - Become a conflicts hero"
getOpts :: IO Options
getOpts =
O.execParser opts
>>= \case
CmdVersion ->
do
putStrLn $ "git-mediate version " ++ versionString
exitSuccess
CmdOptions o -> pure o
|
e17bd8dc1afe818c1626efa73e67f2e02c6ef962743dc384df1fd3c428b37ac9 | metabase/toucan | hydrate.clj | (ns toucan.hydrate
"Functions for deserializing and hydrating fields in objects fetched from the DB."
(:require [toucan
[db :as db]
[models :as models]]))
Counts Destructuring & Restructuring
;;; ==================================================================================================================
# # # # * DISCLAIMER *
;;
This I wrote this code at 4 AM nearly 2 years ago and do n't remember exactly what it is supposed to accomplish ,
;; or why. It generates a sort of path that records the wacky ways in which objects in a collection are nested,
;; and how they fit into sequences; it then returns a flattened sequence of desired objects for easy modification.
;; Afterwards the modified objects can be put in place of the originals by passing in the sequence of modified objects
;; and the path.
;;
;; Nonetheless, it still works (somehow) and is well-tested. But it's definitely overengineered and crying out to be
;; replaced with a simpler implementation (`clojure.walk` would probably work here). PRs welcome!
;;
# # # # Original Overview
;;
;; At a high level, these functions let you aggressively flatten a sequence of maps by a key
;; so you can apply some function across it, and then unflatten that sequence.
;;
;; +-------------------------------------------------------------------------+
;; | +--> (map merge) --> new seq
;; seq -+--> counts-of ------------------------------------+ |
;; | +--> counts-unflatten -+
;; +--> counts-flatten -> (modify the flattened seq) -+
;;
1 . Get a value that can be used to unflatten a sequence later with ` counts - of ` .
2 . Flatten the sequence with ` counts - flatten `
3 . Modify the flattened sequence as needed
4 . Unflatten the sequence by calling ` counts - unflatten ` with the modified sequence and value from step 1
5 . ` map merge ` the original sequence and the unflattened sequence .
;;
;; For your convenience `counts-apply` combines these steps for you.
(defn- counts-of
"Return a sequence of counts / keywords that can be used to unflatten
COLL later.
(counts-of [{:a [{:b 1} {:b 2}], :c 2}
{:a {:b 3}, :c 4}] :a)
-> [2 :atom]
For each `x` in COLL, return:
* `(count (k x))` if `(k x)` is sequential
* `:atom` if `(k x)` is otherwise non-nil
* `:nil` if `x` has key `k` but the value is nil
* `nil` if `x` is nil."
[coll k]
(map (fn [x]
(cond
(sequential? (k x)) (count (k x))
(k x) :atom
(contains? x k) :nil
:else nil))
coll))
(defn- counts-flatten
"Flatten COLL by K.
(counts-flatten [{:a [{:b 1} {:b 2}], :c 2}
{:a {:b 3}, :c 4}] :a)
-> [{:b 1} {:b 2} {:b 3}]"
[coll k]
{:pre [(sequential? coll)
(keyword? k)]}
(->> coll
(map k)
(mapcat (fn [x]
(if (sequential? x) x
[x])))))
(defn- counts-unflatten
"Unflatten COLL by K using COUNTS from `counts-of`.
(counts-unflatten [{:b 2} {:b 4} {:b 6}] :a [2 :atom])
-> [{:a [{:b 2} {:b 4}]}
{:a {:b 6}}]"
([coll k counts]
(counts-unflatten [] coll k counts))
([acc coll k [count & more]]
(let [[unflattend coll] (condp = count
nil [nil (rest coll)]
:atom [(first coll) (rest coll)]
:nil [:nil (rest coll)]
(split-at count coll))
acc (conj acc unflattend)]
(if-not (seq more) (map (fn [x]
(when x
{k (when-not (= x :nil) x)}))
acc)
(recur acc coll k more)))))
(defn- counts-apply
"Apply F to values of COLL flattened by K, then return unflattened/updated results.
(counts-apply [{:a [{:b 1} {:b 2}], :c 2}
{:a {:b 3}, :c 4}]
:a #(update-in % [:b] (partial * 2)))
-> [{:a [{:b 2} {:b 4}], :c 2}
{:a {:b 3}, :c 4}]"
[coll k f]
(let [counts (counts-of coll k)
new-vals (-> coll
(counts-flatten k)
f
(counts-unflatten k counts))]
(map merge coll new-vals)))
;;; Util Fns
;;; ==================================================================================================================
(defn- valid-hydration-form?
"Is this a valid argument to `hydrate`?"
[k]
(or (keyword? k)
(and (sequential? k)
(keyword? (first k))
(every? valid-hydration-form? (rest k)))))
(defn- kw-append
"Append to a keyword.
(kw-append :user \"_id\") -> :user_id"
[k suffix]
(keyword (str (name k) suffix)))
(defn- lookup-functions-with-metadata-key
"Return a map of hydration keywords to functions that should be used to hydrate them, e.g.
{:fields #'my-project.models.table/fields
:tables #'my-project.models.database/tables
...}
These functions are ones that are marked METADATA-KEY, e.g. `^:hydrate` or `^:batched-hydrate`."
[metadata-key]
(loop [m {}, [[k f] & more] (for [ns (all-ns)
[symb varr] (ns-interns ns)
:let [hydration-key (metadata-key (meta varr))]
:when hydration-key]
[(if (true? hydration-key)
(keyword (name symb))
hydration-key)
varr])]
(cond
(not k) m
(m k) (throw (Exception.
(format "Duplicate `^%s` functions for key '%s': %s and %s." metadata-key k (m k) f)))
:else (recur (assoc m k f) more))))
;;; Automagic Batched Hydration (via :model-keys)
;;; ==================================================================================================================
(defn- require-model-namespaces-and-find-hydration-fns
"Return map of `hydration-key` -> model
e.g. `:user -> User`.
This is built pulling the `hydration-keys` set from all of our entities."
[]
(into {} (for [ns (all-ns)
[_ varr] (ns-publics ns)
:let [model (var-get varr)]
:when (models/model? model)
:let [hydration-keys (models/hydration-keys model)]
k hydration-keys]
{k model})))
(def ^:private automagic-batched-hydration-key->model* (atom nil))
(defn- automagic-batched-hydration-key->model
"Get a map of hydration keys to corresponding models."
[]
(or @automagic-batched-hydration-key->model*
(reset! automagic-batched-hydration-key->model* (require-model-namespaces-and-find-hydration-fns))))
(defn- can-automagically-batched-hydrate?
"Can we do a batched hydration of RESULTS with key K?"
[results k]
(let [k-id-u (kw-append k "_id")
k-id-d (kw-append k "-id")
contains-k-id? (fn [obj]
(or (contains? obj k-id-u)
(contains? obj k-id-d)))]
(and (contains? (automagic-batched-hydration-key->model) k)
(every? contains-k-id? results))))
(defn- automagically-batched-hydrate
"Hydrate keyword DEST-KEY across all RESULTS by aggregating corresponding source keys (`DEST-KEY_id`),
doing a single `db/select`, and mapping corresponding objects to DEST-KEY."
[results dest-key]
{:pre [(keyword? dest-key)]}
(let [model ((automagic-batched-hydration-key->model) dest-key)
source-keys #{(kw-append dest-key "_id") (kw-append dest-key "-id")}
ids (set (for [result results
:when (not (get result dest-key))
:let [k (some result source-keys)]
:when k]
k))
primary-key (models/primary-key model)
objs (if (seq ids)
(into {} (for [item (db/select model, primary-key [:in ids])]
{(primary-key item) item}))
(constantly nil))]
(for [result results
:let [source-id (some result source-keys)]]
(if (get result dest-key)
result
(assoc result dest-key (objs source-id))))))
Function - Based Batched Hydration ( fns marked ^:batched - hydrate )
;;; ==================================================================================================================
(def ^:private hydration-key->batched-f*
(atom nil))
(defn- hydration-key->batched-f
"Map of keys to functions marked `^:batched-hydrate` for them."
[]
(or @hydration-key->batched-f*
(reset! hydration-key->batched-f* (lookup-functions-with-metadata-key :batched-hydrate))))
(defn- can-fn-based-batched-hydrate? [_ k]
(contains? (hydration-key->batched-f) k))
(defn- fn-based-batched-hydrate
[results k]
{:pre [(keyword? k)]}
(((hydration-key->batched-f) k) results))
Function - Based Simple Hydration ( fns marked ^:hydrate )
;;; ==================================================================================================================
(def ^:private hydration-key->f*
(atom nil))
(defn- hydration-key->f
"Fetch a map of keys to functions marked `^:hydrate` for them."
[]
(or @hydration-key->f*
(reset! hydration-key->f* (lookup-functions-with-metadata-key :hydrate))))
(defn- simple-hydrate
"Hydrate keyword K in results by calling corresponding functions when applicable."
[results k]
{:pre [(keyword? k)]}
(for [result results]
;; don't try to hydrate if they key is already present. If we find a matching fn, hydrate with it
(when result
(or (when-not (k result)
(when-let [f ((hydration-key->f) k)]
(assoc result k (f result))))
result))))
;;; Resetting Hydration keys (for REPL usage)
;;; ==================================================================================================================
(defn flush-hydration-key-caches!
"Clear out the cached hydration keys. Useful when doing interactive development and defining new hydration
functions."
[]
(reset! automagic-batched-hydration-key->model* nil)
(reset! hydration-key->batched-f* nil)
(reset! hydration-key->f* nil))
;;; Primary Hydration Fns
;;; ==================================================================================================================
(declare hydrate)
(defn- hydrate-vector
"Hydrate a nested hydration form (vector) by recursively calling `hydrate`."
[results [k & more :as vect]]
(assert (> (count vect) 1)
(format (str "Replace '%s' with '%s'. Vectors are for nested hydration. "
"There's no need to use one when you only have a single key.")
vect (first vect)))
(let [results (hydrate results k)]
(if-not (seq more)
results
(counts-apply results k #(apply hydrate % more)))))
(defn- hydrate-kw
"Hydrate a single keyword."
[results k]
(cond
(can-automagically-batched-hydrate? results k) (automagically-batched-hydrate results k)
(can-fn-based-batched-hydrate? results k) (fn-based-batched-hydrate results k)
:else (simple-hydrate results k)))
(defn- hydrate-1
"Hydrate a single hydration form."
[results k]
(if (keyword? k)
(hydrate-kw results k)
(hydrate-vector results k)))
(defn- hydrate-many
"Hydrate many hydration forms across a *sequence* of RESULTS by recursively calling `hydrate-1`."
[results k & more]
(let [results (hydrate-1 results k)]
(if-not (seq more)
results
(recur results (first more) (rest more)))))
;;; Public Interface
;;; ==================================================================================================================
;; hydrate <-------------+
| |
;; hydrate-many |
;; | (for each form) |
;; hydrate-1 | (recursively)
| |
;; keyword? --+-- vector? |
;; | | |
;; hydrate-kw hydrate-vector ----+
;; |
;; can-automagically-batched-hydrate?
;; |
;; true ------------+----------------- false
;; | |
;; automagically-batched-hydrate can-fn-based-batched-hydrate?
;; |
;; true -------------+------------- false
;; | |
;; fn-based-batched-hydrate simple-hydrate
(defn hydrate
"Hydrate a single object or sequence of objects.
#### Automagic Batched Hydration (via hydration-keys)
`hydrate` attempts to do a *batched hydration* where possible.
If the key being hydrated is defined as one of some model's `hydration-keys`,
`hydrate` will do a batched `db/select` if a corresponding key ending with `_id`
is found in the objects being batch hydrated.
(hydrate [{:user_id 100}, {:user_id 101}] :user)
Since `:user` is a hydration key for `User`, a single `db/select` will used to
fetch `Users`:
(db/select User :id [:in #{100 101}])
The corresponding `Users` are then added under the key `:user`.
#### Function-Based Batched Hydration (via functions marked ^:batched-hydrate)
If the key can't be hydrated auto-magically with the appropriate `:hydration-keys`,
`hydrate` will look for a function tagged with `:batched-hydrate` in its metadata, and
use that instead. If a matching function is found, it is called with a collection of objects,
e.g.
(defn with-fields
\"Efficiently add `Fields` to a collection of TABLES.\"
{:batched-hydrate :fields}
[tables]
...)
(let [tables (get-some-tables)]
(hydrate tables :fields)) ; uses with-fields
By default, the function will be used to hydrate keys that match its name; as in the example above,
you can specify a different key to hydrate for in the metadata instead.
#### Simple Hydration (via functions marked ^:hydrate)
If the key is *not* eligible for batched hydration, `hydrate` will look for a function or method
tagged with `:hydrate` in its metadata, and use that instead; if a matching function
is found, it is called on the object being hydrated and the result is `assoc`ed:
(defn ^:hydrate dashboard [{:keys [dashboard_id]}]
(Dashboard dashboard_id))
(let [dc (DashboardCard ...)]
roughly equivalent to ( assoc dc : dashboard ( dashboard dc ) )
As with `:batched-hydrate` functions, by default, the function will be used to hydrate keys that
match its name; you can specify a different key to hydrate instead as the metadata value of `:hydrate`:
(defn ^{:hydrate :pk_field} pk-field-id [obj] ...) ; hydrate :pk_field with pk-field-id
Keep in mind that you can only define a single function/method to hydrate each key; move functions into the
`IModel` interface as needed.
#### Hydrating Multiple Keys
You can hydrate several keys at one time:
(hydrate {...} :a :b)
-> {:a 1, :b 2}
#### Nested Hydration
You can do recursive hydration by listing keys inside a vector:
(hydrate {...} [:a :b])
-> {:a {:b 1}}
The first key in a vector will be hydrated normally, and any subsequent keys
will be hydrated *inside* the corresponding values for that key.
(hydrate {...}
[:a [:b :c] :e])
-> {:a {:b {:c 1} :e 2}}"
[results k & ks]
{:pre [(valid-hydration-form? k)
(every? valid-hydration-form? ks)]}
(when results
(if (sequential? results)
(if (empty? results)
results
(apply hydrate-many results k ks))
(first (apply hydrate-many [results] k ks)))))
| null | https://raw.githubusercontent.com/metabase/toucan/29a921750f3051dce350255cfbd33512428bc3f8/src/toucan/hydrate.clj | clojure | ==================================================================================================================
or why. It generates a sort of path that records the wacky ways in which objects in a collection are nested,
and how they fit into sequences; it then returns a flattened sequence of desired objects for easy modification.
Afterwards the modified objects can be put in place of the originals by passing in the sequence of modified objects
and the path.
Nonetheless, it still works (somehow) and is well-tested. But it's definitely overengineered and crying out to be
replaced with a simpler implementation (`clojure.walk` would probably work here). PRs welcome!
At a high level, these functions let you aggressively flatten a sequence of maps by a key
so you can apply some function across it, and then unflatten that sequence.
+-------------------------------------------------------------------------+
| +--> (map merge) --> new seq
seq -+--> counts-of ------------------------------------+ |
| +--> counts-unflatten -+
+--> counts-flatten -> (modify the flattened seq) -+
For your convenience `counts-apply` combines these steps for you.
Util Fns
==================================================================================================================
Automagic Batched Hydration (via :model-keys)
==================================================================================================================
==================================================================================================================
==================================================================================================================
don't try to hydrate if they key is already present. If we find a matching fn, hydrate with it
Resetting Hydration keys (for REPL usage)
==================================================================================================================
Primary Hydration Fns
==================================================================================================================
Public Interface
==================================================================================================================
hydrate <-------------+
hydrate-many |
| (for each form) |
hydrate-1 | (recursively)
keyword? --+-- vector? |
| | |
hydrate-kw hydrate-vector ----+
|
can-automagically-batched-hydrate?
|
true ------------+----------------- false
| |
automagically-batched-hydrate can-fn-based-batched-hydrate?
|
true -------------+------------- false
| |
fn-based-batched-hydrate simple-hydrate
uses with-fields
as in the example above,
if a matching function
you can specify a different key to hydrate instead as the metadata value of `:hydrate`:
hydrate :pk_field with pk-field-id
move functions into the | (ns toucan.hydrate
"Functions for deserializing and hydrating fields in objects fetched from the DB."
(:require [toucan
[db :as db]
[models :as models]]))
Counts Destructuring & Restructuring
# # # # * DISCLAIMER *
This I wrote this code at 4 AM nearly 2 years ago and do n't remember exactly what it is supposed to accomplish ,
# # # # Original Overview
1 . Get a value that can be used to unflatten a sequence later with ` counts - of ` .
2 . Flatten the sequence with ` counts - flatten `
3 . Modify the flattened sequence as needed
4 . Unflatten the sequence by calling ` counts - unflatten ` with the modified sequence and value from step 1
5 . ` map merge ` the original sequence and the unflattened sequence .
(defn- counts-of
"Return a sequence of counts / keywords that can be used to unflatten
COLL later.
(counts-of [{:a [{:b 1} {:b 2}], :c 2}
{:a {:b 3}, :c 4}] :a)
-> [2 :atom]
For each `x` in COLL, return:
* `(count (k x))` if `(k x)` is sequential
* `:atom` if `(k x)` is otherwise non-nil
* `:nil` if `x` has key `k` but the value is nil
* `nil` if `x` is nil."
[coll k]
(map (fn [x]
(cond
(sequential? (k x)) (count (k x))
(k x) :atom
(contains? x k) :nil
:else nil))
coll))
(defn- counts-flatten
"Flatten COLL by K.
(counts-flatten [{:a [{:b 1} {:b 2}], :c 2}
{:a {:b 3}, :c 4}] :a)
-> [{:b 1} {:b 2} {:b 3}]"
[coll k]
{:pre [(sequential? coll)
(keyword? k)]}
(->> coll
(map k)
(mapcat (fn [x]
(if (sequential? x) x
[x])))))
(defn- counts-unflatten
"Unflatten COLL by K using COUNTS from `counts-of`.
(counts-unflatten [{:b 2} {:b 4} {:b 6}] :a [2 :atom])
-> [{:a [{:b 2} {:b 4}]}
{:a {:b 6}}]"
([coll k counts]
(counts-unflatten [] coll k counts))
([acc coll k [count & more]]
(let [[unflattend coll] (condp = count
nil [nil (rest coll)]
:atom [(first coll) (rest coll)]
:nil [:nil (rest coll)]
(split-at count coll))
acc (conj acc unflattend)]
(if-not (seq more) (map (fn [x]
(when x
{k (when-not (= x :nil) x)}))
acc)
(recur acc coll k more)))))
(defn- counts-apply
"Apply F to values of COLL flattened by K, then return unflattened/updated results.
(counts-apply [{:a [{:b 1} {:b 2}], :c 2}
{:a {:b 3}, :c 4}]
:a #(update-in % [:b] (partial * 2)))
-> [{:a [{:b 2} {:b 4}], :c 2}
{:a {:b 3}, :c 4}]"
[coll k f]
(let [counts (counts-of coll k)
new-vals (-> coll
(counts-flatten k)
f
(counts-unflatten k counts))]
(map merge coll new-vals)))
(defn- valid-hydration-form?
"Is this a valid argument to `hydrate`?"
[k]
(or (keyword? k)
(and (sequential? k)
(keyword? (first k))
(every? valid-hydration-form? (rest k)))))
(defn- kw-append
"Append to a keyword.
(kw-append :user \"_id\") -> :user_id"
[k suffix]
(keyword (str (name k) suffix)))
(defn- lookup-functions-with-metadata-key
"Return a map of hydration keywords to functions that should be used to hydrate them, e.g.
{:fields #'my-project.models.table/fields
:tables #'my-project.models.database/tables
...}
These functions are ones that are marked METADATA-KEY, e.g. `^:hydrate` or `^:batched-hydrate`."
[metadata-key]
(loop [m {}, [[k f] & more] (for [ns (all-ns)
[symb varr] (ns-interns ns)
:let [hydration-key (metadata-key (meta varr))]
:when hydration-key]
[(if (true? hydration-key)
(keyword (name symb))
hydration-key)
varr])]
(cond
(not k) m
(m k) (throw (Exception.
(format "Duplicate `^%s` functions for key '%s': %s and %s." metadata-key k (m k) f)))
:else (recur (assoc m k f) more))))
(defn- require-model-namespaces-and-find-hydration-fns
"Return map of `hydration-key` -> model
e.g. `:user -> User`.
This is built pulling the `hydration-keys` set from all of our entities."
[]
(into {} (for [ns (all-ns)
[_ varr] (ns-publics ns)
:let [model (var-get varr)]
:when (models/model? model)
:let [hydration-keys (models/hydration-keys model)]
k hydration-keys]
{k model})))
(def ^:private automagic-batched-hydration-key->model* (atom nil))
(defn- automagic-batched-hydration-key->model
"Get a map of hydration keys to corresponding models."
[]
(or @automagic-batched-hydration-key->model*
(reset! automagic-batched-hydration-key->model* (require-model-namespaces-and-find-hydration-fns))))
(defn- can-automagically-batched-hydrate?
"Can we do a batched hydration of RESULTS with key K?"
[results k]
(let [k-id-u (kw-append k "_id")
k-id-d (kw-append k "-id")
contains-k-id? (fn [obj]
(or (contains? obj k-id-u)
(contains? obj k-id-d)))]
(and (contains? (automagic-batched-hydration-key->model) k)
(every? contains-k-id? results))))
(defn- automagically-batched-hydrate
"Hydrate keyword DEST-KEY across all RESULTS by aggregating corresponding source keys (`DEST-KEY_id`),
doing a single `db/select`, and mapping corresponding objects to DEST-KEY."
[results dest-key]
{:pre [(keyword? dest-key)]}
(let [model ((automagic-batched-hydration-key->model) dest-key)
source-keys #{(kw-append dest-key "_id") (kw-append dest-key "-id")}
ids (set (for [result results
:when (not (get result dest-key))
:let [k (some result source-keys)]
:when k]
k))
primary-key (models/primary-key model)
objs (if (seq ids)
(into {} (for [item (db/select model, primary-key [:in ids])]
{(primary-key item) item}))
(constantly nil))]
(for [result results
:let [source-id (some result source-keys)]]
(if (get result dest-key)
result
(assoc result dest-key (objs source-id))))))
Function - Based Batched Hydration ( fns marked ^:batched - hydrate )
(def ^:private hydration-key->batched-f*
(atom nil))
(defn- hydration-key->batched-f
"Map of keys to functions marked `^:batched-hydrate` for them."
[]
(or @hydration-key->batched-f*
(reset! hydration-key->batched-f* (lookup-functions-with-metadata-key :batched-hydrate))))
(defn- can-fn-based-batched-hydrate? [_ k]
(contains? (hydration-key->batched-f) k))
(defn- fn-based-batched-hydrate
[results k]
{:pre [(keyword? k)]}
(((hydration-key->batched-f) k) results))
Function - Based Simple Hydration ( fns marked ^:hydrate )
(def ^:private hydration-key->f*
(atom nil))
(defn- hydration-key->f
"Fetch a map of keys to functions marked `^:hydrate` for them."
[]
(or @hydration-key->f*
(reset! hydration-key->f* (lookup-functions-with-metadata-key :hydrate))))
(defn- simple-hydrate
"Hydrate keyword K in results by calling corresponding functions when applicable."
[results k]
{:pre [(keyword? k)]}
(for [result results]
(when result
(or (when-not (k result)
(when-let [f ((hydration-key->f) k)]
(assoc result k (f result))))
result))))
(defn flush-hydration-key-caches!
"Clear out the cached hydration keys. Useful when doing interactive development and defining new hydration
functions."
[]
(reset! automagic-batched-hydration-key->model* nil)
(reset! hydration-key->batched-f* nil)
(reset! hydration-key->f* nil))
(declare hydrate)
(defn- hydrate-vector
"Hydrate a nested hydration form (vector) by recursively calling `hydrate`."
[results [k & more :as vect]]
(assert (> (count vect) 1)
(format (str "Replace '%s' with '%s'. Vectors are for nested hydration. "
"There's no need to use one when you only have a single key.")
vect (first vect)))
(let [results (hydrate results k)]
(if-not (seq more)
results
(counts-apply results k #(apply hydrate % more)))))
(defn- hydrate-kw
"Hydrate a single keyword."
[results k]
(cond
(can-automagically-batched-hydrate? results k) (automagically-batched-hydrate results k)
(can-fn-based-batched-hydrate? results k) (fn-based-batched-hydrate results k)
:else (simple-hydrate results k)))
(defn- hydrate-1
"Hydrate a single hydration form."
[results k]
(if (keyword? k)
(hydrate-kw results k)
(hydrate-vector results k)))
(defn- hydrate-many
"Hydrate many hydration forms across a *sequence* of RESULTS by recursively calling `hydrate-1`."
[results k & more]
(let [results (hydrate-1 results k)]
(if-not (seq more)
results
(recur results (first more) (rest more)))))
| |
| |
(defn hydrate
"Hydrate a single object or sequence of objects.
#### Automagic Batched Hydration (via hydration-keys)
`hydrate` attempts to do a *batched hydration* where possible.
If the key being hydrated is defined as one of some model's `hydration-keys`,
`hydrate` will do a batched `db/select` if a corresponding key ending with `_id`
is found in the objects being batch hydrated.
(hydrate [{:user_id 100}, {:user_id 101}] :user)
Since `:user` is a hydration key for `User`, a single `db/select` will used to
fetch `Users`:
(db/select User :id [:in #{100 101}])
The corresponding `Users` are then added under the key `:user`.
#### Function-Based Batched Hydration (via functions marked ^:batched-hydrate)
If the key can't be hydrated auto-magically with the appropriate `:hydration-keys`,
`hydrate` will look for a function tagged with `:batched-hydrate` in its metadata, and
use that instead. If a matching function is found, it is called with a collection of objects,
e.g.
(defn with-fields
\"Efficiently add `Fields` to a collection of TABLES.\"
{:batched-hydrate :fields}
[tables]
...)
(let [tables (get-some-tables)]
you can specify a different key to hydrate for in the metadata instead.
#### Simple Hydration (via functions marked ^:hydrate)
If the key is *not* eligible for batched hydration, `hydrate` will look for a function or method
is found, it is called on the object being hydrated and the result is `assoc`ed:
(defn ^:hydrate dashboard [{:keys [dashboard_id]}]
(Dashboard dashboard_id))
(let [dc (DashboardCard ...)]
roughly equivalent to ( assoc dc : dashboard ( dashboard dc ) )
As with `:batched-hydrate` functions, by default, the function will be used to hydrate keys that
`IModel` interface as needed.
#### Hydrating Multiple Keys
You can hydrate several keys at one time:
(hydrate {...} :a :b)
-> {:a 1, :b 2}
#### Nested Hydration
You can do recursive hydration by listing keys inside a vector:
(hydrate {...} [:a :b])
-> {:a {:b 1}}
The first key in a vector will be hydrated normally, and any subsequent keys
will be hydrated *inside* the corresponding values for that key.
(hydrate {...}
[:a [:b :c] :e])
-> {:a {:b {:c 1} :e 2}}"
[results k & ks]
{:pre [(valid-hydration-form? k)
(every? valid-hydration-form? ks)]}
(when results
(if (sequential? results)
(if (empty? results)
results
(apply hydrate-many results k ks))
(first (apply hydrate-many [results] k ks)))))
|
83eb64181406b5657fb6565f14cd3215dc23165b10e71587d6c5e1e82192ab51 | GaloisInc/ivory | Struct.hs | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE KindSignatures #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
# LANGUAGE CPP #
module Ivory.Language.Struct where
import Ivory.Language.Area
import Ivory.Language.Proxy
import Ivory.Language.Ref
import Ivory.Language.Type(IvoryExpr(..), IvoryVar(..))
import qualified Ivory.Language.Syntax as I
import GHC.TypeLits(Symbol)
-- Structs ---------------------------------------------------------------------
instance (IvoryStruct sym, ASymbol sym) => IvoryArea ('Struct sym) where
ivoryArea _ = I.TyStruct (fromTypeSym (aSymbol :: SymbolType sym))
newtype StructDef (sym :: Symbol) = StructDef { getStructDef :: I.Struct }
type family StructName (a :: Area *) :: Symbol
type instance StructName ('Struct sym) = sym
class (IvoryArea ('Struct sym), ASymbol sym) => IvoryStruct (sym :: Symbol) where
structDef :: StructDef sym
-- | Struct field labels.
newtype Label (sym :: Symbol) (field :: Area *) = Label { getLabel :: String }
instance Eq (Label (sym :: Symbol) (field :: Area *)) where
l0 == l1 = getLabel l0 == getLabel l1
-- | Label indexing in a structure.
(~>) :: forall ref s sym field.
( IvoryStruct sym, IvoryRef ref
, IvoryExpr (ref s ('Struct sym)), IvoryExpr (ref s field) )
=> ref s ('Struct sym) -> Label sym field -> ref s field
s ~> l = wrapExpr (I.ExpLabel ty (unwrapExpr s) (getLabel l))
where
ty = ivoryArea (Proxy :: Proxy ('Struct sym))
| null | https://raw.githubusercontent.com/GaloisInc/ivory/53a0795b4fbeb0b7da0f6cdaccdde18849a78cd6/ivory/src/Ivory/Language/Struct.hs | haskell | Structs ---------------------------------------------------------------------
| Struct field labels.
| Label indexing in a structure. | # OPTIONS_GHC -fno - warn - orphans #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE KindSignatures #
# LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE TypeFamilies #
# LANGUAGE CPP #
module Ivory.Language.Struct where
import Ivory.Language.Area
import Ivory.Language.Proxy
import Ivory.Language.Ref
import Ivory.Language.Type(IvoryExpr(..), IvoryVar(..))
import qualified Ivory.Language.Syntax as I
import GHC.TypeLits(Symbol)
instance (IvoryStruct sym, ASymbol sym) => IvoryArea ('Struct sym) where
ivoryArea _ = I.TyStruct (fromTypeSym (aSymbol :: SymbolType sym))
newtype StructDef (sym :: Symbol) = StructDef { getStructDef :: I.Struct }
type family StructName (a :: Area *) :: Symbol
type instance StructName ('Struct sym) = sym
class (IvoryArea ('Struct sym), ASymbol sym) => IvoryStruct (sym :: Symbol) where
structDef :: StructDef sym
newtype Label (sym :: Symbol) (field :: Area *) = Label { getLabel :: String }
instance Eq (Label (sym :: Symbol) (field :: Area *)) where
l0 == l1 = getLabel l0 == getLabel l1
(~>) :: forall ref s sym field.
( IvoryStruct sym, IvoryRef ref
, IvoryExpr (ref s ('Struct sym)), IvoryExpr (ref s field) )
=> ref s ('Struct sym) -> Label sym field -> ref s field
s ~> l = wrapExpr (I.ExpLabel ty (unwrapExpr s) (getLabel l))
where
ty = ivoryArea (Proxy :: Proxy ('Struct sym))
|
b40c946aec47c88c8e7274621497e011c62c452832c53d56507161441dec6151 | mvoidex/hsdev | Lisp.hs | module Data.Lisp (
Lisp(..),
lisp,
encodeLisp, decodeLisp
) where
import Prelude hiding (String, Bool)
import qualified Prelude as P (String, Bool)
import Data.Aeson (ToJSON(..), FromJSON(..), (.=))
import qualified Data.Aeson as A
import Data.Aeson.Types (parseMaybe, parseEither)
import Data.ByteString.Lazy (ByteString)
import Data.Char (isAlpha, isDigit)
import Data.Either (partitionEithers)
import qualified Data.HashMap.Strict as HM
import Data.List (unfoldr)
import Data.Scientific
import Data.String (fromString)
import qualified Data.Text as T (unpack)
import qualified Data.Text.Lazy as LT (pack, unpack)
import qualified Data.Text.Lazy.Encoding as LT (encodeUtf8, decodeUtf8)
import qualified Text.ParserCombinators.ReadP as R
import Text.Read (readMaybe)
import qualified Data.Vector as V
data Lisp =
Null |
Bool P.Bool |
Symbol P.String |
String P.String |
Number Scientific |
List [Lisp]
deriving (Eq)
readable :: Read a => Int -> R.ReadP a
readable = R.readS_to_P . readsPrec
lisp :: Int -> R.ReadP Lisp
lisp n = R.choice [
do
s <- symbol
return $ case s of
"null" -> Null
"true" -> Bool True
"false" -> Bool False
_ -> Symbol s,
fmap String string,
fmap Number number,
fmap List list]
where
symbol :: R.ReadP P.String
symbol = concat <$> sequence [
R.option [] (pure <$> R.char ':'),
pure <$> R.satisfy isAlpha,
R.munch (\ch -> isAlpha ch || isDigit ch || ch == '-')]
string :: R.ReadP P.String
string = (R.<++ R.pfail) $ do
('\"':_) <- R.look
readable n
number :: R.ReadP Scientific
number = do
s <- R.munch1 (\ch -> isDigit ch || ch `elem` ['e', 'E', '.', '+', '-'])
maybe R.pfail return $ readMaybe s
list :: R.ReadP [Lisp]
list = R.between (R.char '(') (R.char ')') $ R.sepBy (lisp n) R.skipSpaces
instance Read Lisp where
readsPrec = R.readP_to_S . lisp
instance Show Lisp where
show Null = "null"
show (Bool b)
| b = "true"
| otherwise = "false"
show (Symbol s) = s
show (String s) = show s
show (Number n) = either show show (floatingOrInteger n :: Either Double Integer)
show (List vs) = "(" ++ unwords (map show vs) ++ ")"
instance ToJSON Lisp where
toJSON Null = toJSON A.Null
toJSON (Bool b) = toJSON b
toJSON (Symbol s) = toJSON s
toJSON (String s) = toJSON s
toJSON (Number n) = toJSON n
toJSON (List vs)
| null keywords = toJSON $ map toJSON vals
| null vals = keywordsObject
| otherwise = toJSON $ map toJSON vals ++ [keywordsObject]
where
(vals, keywords) = partitionEithers $ unfoldr cutKeyword vs
keywordsObject = A.object [fromString (dropColon k) .= v | (k, v) <- keywords]
dropColon :: P.String -> P.String
dropColon (':' : s) = s
dropColon s = s
cutKeyword :: [Lisp] -> Maybe (Either Lisp (P.String, Lisp), [Lisp])
cutKeyword [] = Nothing
cutKeyword (Symbol s : []) = Just (Right (s, Null), [])
cutKeyword (Symbol s : Symbol h : hs) = Just (Right (s, Null), Symbol h : hs)
cutKeyword (Symbol s : h : hs) = Just (Right (s, h), hs)
cutKeyword (h : hs) = Just (Left h, hs)
instance FromJSON Lisp where
parseJSON A.Null = return Null
parseJSON (A.Bool b) = return $ Bool b
parseJSON (A.String s) = return $ String $ T.unpack s
parseJSON (A.Number n) = return $ Number n
parseJSON (A.Array vs) = fmap List $ mapM parseJSON $ V.toList vs
parseJSON (A.Object obj) = fmap (List . concat) $ mapM (\(k, v) -> sequence [pure $ Symbol (':' : T.unpack k), parseJSON v]) $ HM.toList obj
decodeLisp :: FromJSON a => ByteString -> Either P.String a
decodeLisp str = do
sexp <- maybe (Left "Not a s-exp") Right . readMaybe . LT.unpack . LT.decodeUtf8 $ str
parseEither parseJSON $ toJSON (sexp :: Lisp)
encodeLisp :: ToJSON a => a -> ByteString
encodeLisp r = LT.encodeUtf8 . LT.pack $ maybe
"(:error \"can't convert to s-exp\")"
(show :: Lisp -> P.String)
(parseMaybe parseJSON (toJSON r))
| null | https://raw.githubusercontent.com/mvoidex/hsdev/016646080a6859e4d9b4a1935fc1d732e388db1a/src/Data/Lisp.hs | haskell | module Data.Lisp (
Lisp(..),
lisp,
encodeLisp, decodeLisp
) where
import Prelude hiding (String, Bool)
import qualified Prelude as P (String, Bool)
import Data.Aeson (ToJSON(..), FromJSON(..), (.=))
import qualified Data.Aeson as A
import Data.Aeson.Types (parseMaybe, parseEither)
import Data.ByteString.Lazy (ByteString)
import Data.Char (isAlpha, isDigit)
import Data.Either (partitionEithers)
import qualified Data.HashMap.Strict as HM
import Data.List (unfoldr)
import Data.Scientific
import Data.String (fromString)
import qualified Data.Text as T (unpack)
import qualified Data.Text.Lazy as LT (pack, unpack)
import qualified Data.Text.Lazy.Encoding as LT (encodeUtf8, decodeUtf8)
import qualified Text.ParserCombinators.ReadP as R
import Text.Read (readMaybe)
import qualified Data.Vector as V
data Lisp =
Null |
Bool P.Bool |
Symbol P.String |
String P.String |
Number Scientific |
List [Lisp]
deriving (Eq)
readable :: Read a => Int -> R.ReadP a
readable = R.readS_to_P . readsPrec
lisp :: Int -> R.ReadP Lisp
lisp n = R.choice [
do
s <- symbol
return $ case s of
"null" -> Null
"true" -> Bool True
"false" -> Bool False
_ -> Symbol s,
fmap String string,
fmap Number number,
fmap List list]
where
symbol :: R.ReadP P.String
symbol = concat <$> sequence [
R.option [] (pure <$> R.char ':'),
pure <$> R.satisfy isAlpha,
R.munch (\ch -> isAlpha ch || isDigit ch || ch == '-')]
string :: R.ReadP P.String
string = (R.<++ R.pfail) $ do
('\"':_) <- R.look
readable n
number :: R.ReadP Scientific
number = do
s <- R.munch1 (\ch -> isDigit ch || ch `elem` ['e', 'E', '.', '+', '-'])
maybe R.pfail return $ readMaybe s
list :: R.ReadP [Lisp]
list = R.between (R.char '(') (R.char ')') $ R.sepBy (lisp n) R.skipSpaces
instance Read Lisp where
readsPrec = R.readP_to_S . lisp
instance Show Lisp where
show Null = "null"
show (Bool b)
| b = "true"
| otherwise = "false"
show (Symbol s) = s
show (String s) = show s
show (Number n) = either show show (floatingOrInteger n :: Either Double Integer)
show (List vs) = "(" ++ unwords (map show vs) ++ ")"
instance ToJSON Lisp where
toJSON Null = toJSON A.Null
toJSON (Bool b) = toJSON b
toJSON (Symbol s) = toJSON s
toJSON (String s) = toJSON s
toJSON (Number n) = toJSON n
toJSON (List vs)
| null keywords = toJSON $ map toJSON vals
| null vals = keywordsObject
| otherwise = toJSON $ map toJSON vals ++ [keywordsObject]
where
(vals, keywords) = partitionEithers $ unfoldr cutKeyword vs
keywordsObject = A.object [fromString (dropColon k) .= v | (k, v) <- keywords]
dropColon :: P.String -> P.String
dropColon (':' : s) = s
dropColon s = s
cutKeyword :: [Lisp] -> Maybe (Either Lisp (P.String, Lisp), [Lisp])
cutKeyword [] = Nothing
cutKeyword (Symbol s : []) = Just (Right (s, Null), [])
cutKeyword (Symbol s : Symbol h : hs) = Just (Right (s, Null), Symbol h : hs)
cutKeyword (Symbol s : h : hs) = Just (Right (s, h), hs)
cutKeyword (h : hs) = Just (Left h, hs)
instance FromJSON Lisp where
parseJSON A.Null = return Null
parseJSON (A.Bool b) = return $ Bool b
parseJSON (A.String s) = return $ String $ T.unpack s
parseJSON (A.Number n) = return $ Number n
parseJSON (A.Array vs) = fmap List $ mapM parseJSON $ V.toList vs
parseJSON (A.Object obj) = fmap (List . concat) $ mapM (\(k, v) -> sequence [pure $ Symbol (':' : T.unpack k), parseJSON v]) $ HM.toList obj
decodeLisp :: FromJSON a => ByteString -> Either P.String a
decodeLisp str = do
sexp <- maybe (Left "Not a s-exp") Right . readMaybe . LT.unpack . LT.decodeUtf8 $ str
parseEither parseJSON $ toJSON (sexp :: Lisp)
encodeLisp :: ToJSON a => a -> ByteString
encodeLisp r = LT.encodeUtf8 . LT.pack $ maybe
"(:error \"can't convert to s-exp\")"
(show :: Lisp -> P.String)
(parseMaybe parseJSON (toJSON r))
| |
15bd10e9b8f065095e57f45b75dabc4dc7e574ac75ef39167e5adc7529c0f46a | adam-james-v/scripts | scripter.clj | (ns scripter.core
(:require [babashka.classpath :refer [add-classpath]]
[clojure.string :as st]
[clojure.java.shell :refer [sh]]))
(defn split-fname
[fname]
(let [sf (st/split fname #"\.")]
[(apply str (drop-last sf))
(last sf)]))
(defn split-folders
[name]
(let [sf (st/split name #"/")]
(vec sf)))
(defn move
[f]
(let [[name ext] (split-fname f)
sname (split-folders name)
xfname (if (= (first sname) "src")
(apply str (conj (rest sname) "build/"))
(apply str (interpose "/" sname)))
fout (str xfname "." ext)]
(println fout)
(sh "chmod" "+x" fout)))
(defn main
[]
(let [f (first *command-line-args*)]
(println (str "Moving script: " f))
(move f)
#_(println (get-deps f))))
(main)
| null | https://raw.githubusercontent.com/adam-james-v/scripts/d91dc28b6e4946233fe675cc306af3343cc11204/scripter.clj | clojure | (ns scripter.core
(:require [babashka.classpath :refer [add-classpath]]
[clojure.string :as st]
[clojure.java.shell :refer [sh]]))
(defn split-fname
[fname]
(let [sf (st/split fname #"\.")]
[(apply str (drop-last sf))
(last sf)]))
(defn split-folders
[name]
(let [sf (st/split name #"/")]
(vec sf)))
(defn move
[f]
(let [[name ext] (split-fname f)
sname (split-folders name)
xfname (if (= (first sname) "src")
(apply str (conj (rest sname) "build/"))
(apply str (interpose "/" sname)))
fout (str xfname "." ext)]
(println fout)
(sh "chmod" "+x" fout)))
(defn main
[]
(let [f (first *command-line-args*)]
(println (str "Moving script: " f))
(move f)
#_(println (get-deps f))))
(main)
| |
515f245e8e90a9a4447ff3fa94edd6a723835bc4f2694c0100a42211f1d426f5 | well-typed/large-records | R070.hs | #if PROFILE_CORESIZE
{-# OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #-}
#endif
#if PROFILE_TIMING
{-# OPTIONS_GHC -ddump-to-file -ddump-timings #-}
#endif
{-# OPTIONS_GHC -fplugin=Data.Record.Anon.Plugin #-}
module Experiment.ConstructNoTypeLet.Sized.R070 where
import Data.Record.Anon.Simple (Record)
import Bench.Types
import Common.RowOfSize.Row070
record :: Word -> Record ExampleRow
record x = ANON {
-- 00 .. 09
t00 = MkT x
, t01 = MkT x
, t02 = MkT x
, t03 = MkT x
, t04 = MkT x
, t05 = MkT x
, t06 = MkT x
, t07 = MkT x
, t08 = MkT x
, t09 = MkT x
10 .. 19
, t10 = MkT x
, t11 = MkT x
, t12 = MkT x
, t13 = MkT x
, t14 = MkT x
, t15 = MkT x
, t16 = MkT x
, t17 = MkT x
, t18 = MkT x
, t19 = MkT x
20 .. 29
, t20 = MkT x
, t21 = MkT x
, t22 = MkT x
, t23 = MkT x
, t24 = MkT x
, t25 = MkT x
, t26 = MkT x
, t27 = MkT x
, t28 = MkT x
, t29 = MkT x
30 .. 39
, t30 = MkT x
, t31 = MkT x
, t32 = MkT x
, t33 = MkT x
, t34 = MkT x
, t35 = MkT x
, t36 = MkT x
, t37 = MkT x
, t38 = MkT x
, t39 = MkT x
40 .. 49
, t40 = MkT x
, t41 = MkT x
, t42 = MkT x
, t43 = MkT x
, t44 = MkT x
, t45 = MkT x
, t46 = MkT x
, t47 = MkT x
, t48 = MkT x
, t49 = MkT x
50 .. 59
, t50 = MkT x
, t51 = MkT x
, t52 = MkT x
, t53 = MkT x
, t54 = MkT x
, t55 = MkT x
, t56 = MkT x
, t57 = MkT x
, t58 = MkT x
, t59 = MkT x
60 .. 69
, t60 = MkT x
, t61 = MkT x
, t62 = MkT x
, t63 = MkT x
, t64 = MkT x
, t65 = MkT x
, t66 = MkT x
, t67 = MkT x
, t68 = MkT x
, t69 = MkT x
} | null | https://raw.githubusercontent.com/well-typed/large-records/78d0966e4871847e2c17a0aa821bacf38bdf96bc/large-records-benchmarks/bench/large-anon/Experiment/ConstructNoTypeLet/Sized/R070.hs | haskell | # OPTIONS_GHC -ddump-to-file -ddump-ds-preopt -ddump-ds -ddump-simpl #
# OPTIONS_GHC -ddump-to-file -ddump-timings #
# OPTIONS_GHC -fplugin=Data.Record.Anon.Plugin #
00 .. 09 | #if PROFILE_CORESIZE
#endif
#if PROFILE_TIMING
#endif
module Experiment.ConstructNoTypeLet.Sized.R070 where
import Data.Record.Anon.Simple (Record)
import Bench.Types
import Common.RowOfSize.Row070
record :: Word -> Record ExampleRow
record x = ANON {
t00 = MkT x
, t01 = MkT x
, t02 = MkT x
, t03 = MkT x
, t04 = MkT x
, t05 = MkT x
, t06 = MkT x
, t07 = MkT x
, t08 = MkT x
, t09 = MkT x
10 .. 19
, t10 = MkT x
, t11 = MkT x
, t12 = MkT x
, t13 = MkT x
, t14 = MkT x
, t15 = MkT x
, t16 = MkT x
, t17 = MkT x
, t18 = MkT x
, t19 = MkT x
20 .. 29
, t20 = MkT x
, t21 = MkT x
, t22 = MkT x
, t23 = MkT x
, t24 = MkT x
, t25 = MkT x
, t26 = MkT x
, t27 = MkT x
, t28 = MkT x
, t29 = MkT x
30 .. 39
, t30 = MkT x
, t31 = MkT x
, t32 = MkT x
, t33 = MkT x
, t34 = MkT x
, t35 = MkT x
, t36 = MkT x
, t37 = MkT x
, t38 = MkT x
, t39 = MkT x
40 .. 49
, t40 = MkT x
, t41 = MkT x
, t42 = MkT x
, t43 = MkT x
, t44 = MkT x
, t45 = MkT x
, t46 = MkT x
, t47 = MkT x
, t48 = MkT x
, t49 = MkT x
50 .. 59
, t50 = MkT x
, t51 = MkT x
, t52 = MkT x
, t53 = MkT x
, t54 = MkT x
, t55 = MkT x
, t56 = MkT x
, t57 = MkT x
, t58 = MkT x
, t59 = MkT x
60 .. 69
, t60 = MkT x
, t61 = MkT x
, t62 = MkT x
, t63 = MkT x
, t64 = MkT x
, t65 = MkT x
, t66 = MkT x
, t67 = MkT x
, t68 = MkT x
, t69 = MkT x
} |
efd4111f8be070a5d90b0c392d8b2c389b3ff5979452f68909990076ca6c7cec | runtimeverification/haskell-backend | OnePathClaim.hs | |
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Reachability.OnePathClaim (
OnePathClaim (..),
onePathRuleToTerm,
mkOnePathClaim,
Rule (..),
) where
import Control.Monad ((>=>))
import Data.Generics.Wrapped (
_Unwrapped,
)
import GHC.Generics qualified as GHC
import Generics.SOP qualified as SOP
import Kore.Attribute.Axiom qualified as Attribute
import Kore.Debug
import Kore.Internal.Alias (
Alias (aliasConstructor),
)
import Kore.Internal.OrPattern (
OrPattern,
)
import Kore.Internal.Pattern (
Pattern,
)
import Kore.Internal.Pattern qualified as Pattern
import Kore.Internal.Predicate qualified as Predicate
import Kore.Internal.TermLike (
ElementVariable,
TermLike,
VariableName,
getId,
weakExistsFinally,
)
import Kore.Internal.TermLike qualified as TermLike
import Kore.Reachability.Claim
import Kore.Rewrite.AxiomPattern
import Kore.Rewrite.ClaimPattern as ClaimPattern
import Kore.Rewrite.RewritingVariable (
RewritingVariableName,
mkRuleVariable,
)
import Kore.Rewrite.Transition (
TransitionT,
)
import Kore.Rewrite.UnifyingRule (
UnifyingRule (..),
)
import Kore.Simplify.Simplify (
Simplifier,
)
import Kore.Syntax.Sentence qualified as Syntax
import Kore.TopBottom (
TopBottom (..),
)
import Kore.Unparser (
Unparse (..),
)
import Prelude.Kore
-- | One-Path-Claim claim pattern.
newtype OnePathClaim = OnePathClaim {getOnePathClaim :: ClaimPattern}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
{- | Converts a 'OnePathClaim' into its term representation.
This is intended to be used only in unparsing situations,
as some of the variable information related to the
rewriting algorithm is lost.
-}
onePathRuleToTerm :: OnePathClaim -> TermLike VariableName
onePathRuleToTerm (OnePathClaim claimPattern') =
claimPatternToTerm TermLike.WEF claimPattern'
mkOnePathClaim ::
Pattern RewritingVariableName ->
OrPattern RewritingVariableName ->
[ElementVariable RewritingVariableName] ->
OnePathClaim
mkOnePathClaim left right existentials =
OnePathClaim (mkClaimPattern left right existentials)
instance Unparse OnePathClaim where
unparse claimPattern' =
unparse $ onePathRuleToTerm claimPattern'
unparse2 claimPattern' =
unparse2 $ onePathRuleToTerm claimPattern'
instance TopBottom OnePathClaim where
isTop _ = False
isBottom _ = False
instance From OnePathClaim Attribute.SourceLocation where
from = Attribute.sourceLocation . attributes . getOnePathClaim
instance From OnePathClaim Attribute.Label where
from = Attribute.label . attributes . getOnePathClaim
instance From OnePathClaim Attribute.RuleIndex where
from = Attribute.identifier . attributes . getOnePathClaim
instance From OnePathClaim Attribute.Trusted where
from = Attribute.trusted . attributes . getOnePathClaim
instance From OnePathClaim Attribute.UniqueId where
from = Attribute.uniqueId . attributes . getOnePathClaim
instance UnifyingRule OnePathClaim where
type UnifyingRuleVariable OnePathClaim = RewritingVariableName
matchingPattern (OnePathClaim claim) = matchingPattern claim
precondition (OnePathClaim claim) = precondition claim
refreshRule stale (OnePathClaim claim) =
OnePathClaim <$> refreshRule stale claim
NOTE : Non - deterministic semantics
The current implementation of one - path verification assumes that the proof claim
is deterministic , that is : the proof claim would not be discharged during at a
non - confluent state in the execution of a non - deterministic semantics . ( Often
this means that the definition is simply deterministic . ) As a result , given the
non - deterministic definition
> module ABC
> import DOMAINS
> syntax S : : = " a " | " b " | " c "
> rule [ ab ] : a = > b
> rule [ ac ] : a = > c
> endmodule
this claim would be provable ,
> rule a = > b [ claim ]
but this claim would * * not * * be provable ,
> rule a = > c [ claim ]
because the algorithm would first apply semantic rule [ ab ] , which prevents rule
[ ac ] from being used .
We decided to assume that the definition is deterministic because one - path
verification is mainly used only for deterministic semantics and the assumption
simplifies the implementation . However , this assumption is not an essential
feature of the algorithm . You should not rely on this assumption elsewhere . This
decision is subject to change without notice .
The current implementation of one-path verification assumes that the proof claim
is deterministic, that is: the proof claim would not be discharged during at a
non-confluent state in the execution of a non-deterministic semantics. (Often
this means that the definition is simply deterministic.) As a result, given the
non-deterministic definition
> module ABC
> import DOMAINS
> syntax S ::= "a" | "b" | "c"
> rule [ab]: a => b
> rule [ac]: a => c
> endmodule
this claim would be provable,
> rule a => b [claim]
but this claim would **not** be provable,
> rule a => c [claim]
because the algorithm would first apply semantic rule [ab], which prevents rule
[ac] from being used.
We decided to assume that the definition is deterministic because one-path
verification is mainly used only for deterministic semantics and the assumption
simplifies the implementation. However, this assumption is not an essential
feature of the algorithm. You should not rely on this assumption elsewhere. This
decision is subject to change without notice.
-}
instance Claim OnePathClaim where
newtype Rule OnePathClaim = OnePathRewriteRule
{unRuleOnePath :: RewriteRule RewritingVariableName}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
deriving newtype (Unparse)
simplify = simplify' _Unwrapped
checkImplication = checkImplication' _Unwrapped
applyClaims claims = deriveSeqClaim _Unwrapped OnePathClaim claims
applyAxioms axioms = deriveSeqAxiomOnePath (concat axioms)
instance From (Rule OnePathClaim) Attribute.PriorityAttributes where
from = from @(RewriteRule _) . unRuleOnePath
instance From OnePathClaim (AxiomPattern VariableName) where
from = AxiomPattern . onePathRuleToTerm
instance From OnePathClaim (AxiomPattern RewritingVariableName) where
from =
AxiomPattern
. TermLike.mapVariables (pure mkRuleVariable)
. onePathRuleToTerm
instance ClaimExtractor OnePathClaim where
extractClaim (attributes, sentence) =
case termLike of
TermLike.Implies_
_
(TermLike.And_ _ requires lhs)
(TermLike.ApplyAlias_ alias [rhs])
| aliasId == weakExistsFinally -> do
let rhs' = TermLike.mapVariables (pure mkRuleVariable) rhs
attributes' =
Attribute.mapAxiomVariables
(pure mkRuleVariable)
attributes
(right', existentials') =
ClaimPattern.termToExistentials rhs'
pure $
OnePathClaim $
ClaimPattern.refreshExistentials
ClaimPattern
{ ClaimPattern.left =
Pattern.fromTermAndPredicate
lhs
(Predicate.wrapPredicate requires)
& Pattern.mapVariables (pure mkRuleVariable)
, ClaimPattern.right = parseRightHandSide right'
, ClaimPattern.existentials = existentials'
, ClaimPattern.attributes = attributes'
}
where
aliasId = (getId . aliasConstructor) alias
_ -> Nothing
where
termLike =
(Syntax.sentenceAxiomPattern . Syntax.getSentenceClaim) sentence
deriveSeqAxiomOnePath ::
[Rule OnePathClaim] ->
OnePathClaim ->
TransitionT
(AppliedRule OnePathClaim)
Simplifier
(ApplyResult OnePathClaim)
deriveSeqAxiomOnePath rules =
deriveSeq' _Unwrapped OnePathRewriteRule rewrites
>=> simplifyRemainder
where
rewrites = unRuleOnePath <$> rules
simplifyRemainder applied =
case applied of
ApplyRemainder claim -> ApplyRemainder <$> simplify claim
_ -> return applied
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/4b04a36a24bc9188401230e7937b577ea83484af/kore/src/Kore/Reachability/OnePathClaim.hs | haskell | | One-Path-Claim claim pattern.
| Converts a 'OnePathClaim' into its term representation.
This is intended to be used only in unparsing situations,
as some of the variable information related to the
rewriting algorithm is lost.
| |
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Reachability.OnePathClaim (
OnePathClaim (..),
onePathRuleToTerm,
mkOnePathClaim,
Rule (..),
) where
import Control.Monad ((>=>))
import Data.Generics.Wrapped (
_Unwrapped,
)
import GHC.Generics qualified as GHC
import Generics.SOP qualified as SOP
import Kore.Attribute.Axiom qualified as Attribute
import Kore.Debug
import Kore.Internal.Alias (
Alias (aliasConstructor),
)
import Kore.Internal.OrPattern (
OrPattern,
)
import Kore.Internal.Pattern (
Pattern,
)
import Kore.Internal.Pattern qualified as Pattern
import Kore.Internal.Predicate qualified as Predicate
import Kore.Internal.TermLike (
ElementVariable,
TermLike,
VariableName,
getId,
weakExistsFinally,
)
import Kore.Internal.TermLike qualified as TermLike
import Kore.Reachability.Claim
import Kore.Rewrite.AxiomPattern
import Kore.Rewrite.ClaimPattern as ClaimPattern
import Kore.Rewrite.RewritingVariable (
RewritingVariableName,
mkRuleVariable,
)
import Kore.Rewrite.Transition (
TransitionT,
)
import Kore.Rewrite.UnifyingRule (
UnifyingRule (..),
)
import Kore.Simplify.Simplify (
Simplifier,
)
import Kore.Syntax.Sentence qualified as Syntax
import Kore.TopBottom (
TopBottom (..),
)
import Kore.Unparser (
Unparse (..),
)
import Prelude.Kore
newtype OnePathClaim = OnePathClaim {getOnePathClaim :: ClaimPattern}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
onePathRuleToTerm :: OnePathClaim -> TermLike VariableName
onePathRuleToTerm (OnePathClaim claimPattern') =
claimPatternToTerm TermLike.WEF claimPattern'
mkOnePathClaim ::
Pattern RewritingVariableName ->
OrPattern RewritingVariableName ->
[ElementVariable RewritingVariableName] ->
OnePathClaim
mkOnePathClaim left right existentials =
OnePathClaim (mkClaimPattern left right existentials)
instance Unparse OnePathClaim where
unparse claimPattern' =
unparse $ onePathRuleToTerm claimPattern'
unparse2 claimPattern' =
unparse2 $ onePathRuleToTerm claimPattern'
instance TopBottom OnePathClaim where
isTop _ = False
isBottom _ = False
instance From OnePathClaim Attribute.SourceLocation where
from = Attribute.sourceLocation . attributes . getOnePathClaim
instance From OnePathClaim Attribute.Label where
from = Attribute.label . attributes . getOnePathClaim
instance From OnePathClaim Attribute.RuleIndex where
from = Attribute.identifier . attributes . getOnePathClaim
instance From OnePathClaim Attribute.Trusted where
from = Attribute.trusted . attributes . getOnePathClaim
instance From OnePathClaim Attribute.UniqueId where
from = Attribute.uniqueId . attributes . getOnePathClaim
instance UnifyingRule OnePathClaim where
type UnifyingRuleVariable OnePathClaim = RewritingVariableName
matchingPattern (OnePathClaim claim) = matchingPattern claim
precondition (OnePathClaim claim) = precondition claim
refreshRule stale (OnePathClaim claim) =
OnePathClaim <$> refreshRule stale claim
NOTE : Non - deterministic semantics
The current implementation of one - path verification assumes that the proof claim
is deterministic , that is : the proof claim would not be discharged during at a
non - confluent state in the execution of a non - deterministic semantics . ( Often
this means that the definition is simply deterministic . ) As a result , given the
non - deterministic definition
> module ABC
> import DOMAINS
> syntax S : : = " a " | " b " | " c "
> rule [ ab ] : a = > b
> rule [ ac ] : a = > c
> endmodule
this claim would be provable ,
> rule a = > b [ claim ]
but this claim would * * not * * be provable ,
> rule a = > c [ claim ]
because the algorithm would first apply semantic rule [ ab ] , which prevents rule
[ ac ] from being used .
We decided to assume that the definition is deterministic because one - path
verification is mainly used only for deterministic semantics and the assumption
simplifies the implementation . However , this assumption is not an essential
feature of the algorithm . You should not rely on this assumption elsewhere . This
decision is subject to change without notice .
The current implementation of one-path verification assumes that the proof claim
is deterministic, that is: the proof claim would not be discharged during at a
non-confluent state in the execution of a non-deterministic semantics. (Often
this means that the definition is simply deterministic.) As a result, given the
non-deterministic definition
> module ABC
> import DOMAINS
> syntax S ::= "a" | "b" | "c"
> rule [ab]: a => b
> rule [ac]: a => c
> endmodule
this claim would be provable,
> rule a => b [claim]
but this claim would **not** be provable,
> rule a => c [claim]
because the algorithm would first apply semantic rule [ab], which prevents rule
[ac] from being used.
We decided to assume that the definition is deterministic because one-path
verification is mainly used only for deterministic semantics and the assumption
simplifies the implementation. However, this assumption is not an essential
feature of the algorithm. You should not rely on this assumption elsewhere. This
decision is subject to change without notice.
-}
instance Claim OnePathClaim where
newtype Rule OnePathClaim = OnePathRewriteRule
{unRuleOnePath :: RewriteRule RewritingVariableName}
deriving stock (Eq, Ord, Show)
deriving stock (GHC.Generic)
deriving anyclass (NFData)
deriving anyclass (SOP.Generic, SOP.HasDatatypeInfo)
deriving anyclass (Debug, Diff)
deriving newtype (Unparse)
simplify = simplify' _Unwrapped
checkImplication = checkImplication' _Unwrapped
applyClaims claims = deriveSeqClaim _Unwrapped OnePathClaim claims
applyAxioms axioms = deriveSeqAxiomOnePath (concat axioms)
instance From (Rule OnePathClaim) Attribute.PriorityAttributes where
from = from @(RewriteRule _) . unRuleOnePath
instance From OnePathClaim (AxiomPattern VariableName) where
from = AxiomPattern . onePathRuleToTerm
instance From OnePathClaim (AxiomPattern RewritingVariableName) where
from =
AxiomPattern
. TermLike.mapVariables (pure mkRuleVariable)
. onePathRuleToTerm
instance ClaimExtractor OnePathClaim where
extractClaim (attributes, sentence) =
case termLike of
TermLike.Implies_
_
(TermLike.And_ _ requires lhs)
(TermLike.ApplyAlias_ alias [rhs])
| aliasId == weakExistsFinally -> do
let rhs' = TermLike.mapVariables (pure mkRuleVariable) rhs
attributes' =
Attribute.mapAxiomVariables
(pure mkRuleVariable)
attributes
(right', existentials') =
ClaimPattern.termToExistentials rhs'
pure $
OnePathClaim $
ClaimPattern.refreshExistentials
ClaimPattern
{ ClaimPattern.left =
Pattern.fromTermAndPredicate
lhs
(Predicate.wrapPredicate requires)
& Pattern.mapVariables (pure mkRuleVariable)
, ClaimPattern.right = parseRightHandSide right'
, ClaimPattern.existentials = existentials'
, ClaimPattern.attributes = attributes'
}
where
aliasId = (getId . aliasConstructor) alias
_ -> Nothing
where
termLike =
(Syntax.sentenceAxiomPattern . Syntax.getSentenceClaim) sentence
deriveSeqAxiomOnePath ::
[Rule OnePathClaim] ->
OnePathClaim ->
TransitionT
(AppliedRule OnePathClaim)
Simplifier
(ApplyResult OnePathClaim)
deriveSeqAxiomOnePath rules =
deriveSeq' _Unwrapped OnePathRewriteRule rewrites
>=> simplifyRemainder
where
rewrites = unRuleOnePath <$> rules
simplifyRemainder applied =
case applied of
ApplyRemainder claim -> ApplyRemainder <$> simplify claim
_ -> return applied
|
ba3f3eb7ea764b4542cfbebf45efb54213b26f742cd26e2357b8e41e5b1f8a11 | vehicle-lang/vehicle | Check.hs | module Vehicle.Check
( CheckOptions (..),
check,
)
where
import Control.Exception (IOException, catch)
import Control.Monad.Trans (MonadIO (liftIO))
import Data.List.NonEmpty (NonEmpty (..))
import Vehicle.Prelude
import Vehicle.Resource
import Vehicle.Verify.ProofCache (ProofCache (..), readProofCache)
import Vehicle.Verify.Specification.Status (isVerified)
--------------------------------------------------------------------------------
-- Checking
newtype CheckOptions = CheckOptions
{ proofCache :: FilePath
}
deriving (Eq, Show)
check :: LoggingSettings -> CheckOptions -> IO ()
check loggingSettings checkOptions = runImmediateLogger loggingSettings $ do
-- If the user has specificied no logging target for check mode then
-- default to command-line.
status <- checkStatus checkOptions
programOutput $ pretty status
checkStatus :: CheckOptions -> ImmediateLoggerT IO CheckResult
checkStatus CheckOptions {..} = do
ProofCache {..} <- liftIO $ readProofCache proofCache
(missingNetworks, alteredNetworks) <- checkResourceIntegrity resourceSummaries
return $ case (missingNetworks, alteredNetworks, isVerified status) of
(x : xs, _, _) -> MissingResources (x :| xs)
([], x : xs, _) -> AlteredResources (x :| xs)
([], [], False) -> Unverified
([], [], True) -> Verified
getResourceStatus :: ResourceSummary -> IO ResourceStatus
getResourceStatus ResourceSummary {..} = do
let getResourceHash = hashResource resType value
maybeNewHash <- catch @IOException (Just <$> getResourceHash) (const $ return Nothing)
return $ case maybeNewHash of
Nothing -> Missing
Just newHash
| fileHash /= newHash -> Altered
| otherwise -> Unchanged
checkResourceIntegrity ::
(MonadLogger m, MonadIO m) =>
[ResourceSummary] ->
m ([ResourceSummary], [ResourceSummary])
checkResourceIntegrity = \case
[] -> return ([], [])
(r : rs) -> do
(missing, altered) <- checkResourceIntegrity rs
resourceStatus <- liftIO (getResourceStatus r)
return $ case resourceStatus of
Unchanged -> (missing, altered)
Altered -> (missing, r : altered)
Missing -> (r : missing, altered)
data ResourceStatus
= Unchanged
| Altered
| Missing
data CheckResult
= Verified
| Unverified
| MissingResources (NonEmpty ResourceSummary)
| AlteredResources (NonEmpty ResourceSummary)
instance Pretty CheckResult where
pretty Verified = "Status: verified"
pretty Unverified = "Status: unverified"
pretty (MissingResources missingNetworks) =
"Status: unknown"
<> line
<> line
<> "The following cannot not be found:"
<> line
<> line
<> indent 2 (vsep (fmap prettyResource missingNetworks))
<> line
<> line
<> "To fix this problem, either move the missing files back to"
<+> "the"
<+> locations
<+> "above or use Vehicle to reverify the"
<+> "specification with the new"
<+> locations <> "."
where
locations = "location" <> if length missingNetworks == 1 then "" else "s"
pretty (AlteredResources alteredNetworks) =
"Status: unknown"
<> line
<> line
<> "The following have been altered since verification was"
<+> "last run:"
<> line
<> line
<> indent 2 (vsep (fmap prettyResource alteredNetworks))
<> line
<> line
<> "To fix this problem, use Vehicle to reverify the specification."
prettyResource :: ResourceSummary -> Doc ann
prettyResource ResourceSummary {..} =
pretty resType <+> pretty name <+> parens (pretty value)
| null | https://raw.githubusercontent.com/vehicle-lang/vehicle/ca99b8da9e5aabde2c94b758bb4141fbe53ebed5/vehicle/src/Vehicle/Check.hs | haskell | ------------------------------------------------------------------------------
Checking
If the user has specificied no logging target for check mode then
default to command-line. | module Vehicle.Check
( CheckOptions (..),
check,
)
where
import Control.Exception (IOException, catch)
import Control.Monad.Trans (MonadIO (liftIO))
import Data.List.NonEmpty (NonEmpty (..))
import Vehicle.Prelude
import Vehicle.Resource
import Vehicle.Verify.ProofCache (ProofCache (..), readProofCache)
import Vehicle.Verify.Specification.Status (isVerified)
newtype CheckOptions = CheckOptions
{ proofCache :: FilePath
}
deriving (Eq, Show)
check :: LoggingSettings -> CheckOptions -> IO ()
check loggingSettings checkOptions = runImmediateLogger loggingSettings $ do
status <- checkStatus checkOptions
programOutput $ pretty status
checkStatus :: CheckOptions -> ImmediateLoggerT IO CheckResult
checkStatus CheckOptions {..} = do
ProofCache {..} <- liftIO $ readProofCache proofCache
(missingNetworks, alteredNetworks) <- checkResourceIntegrity resourceSummaries
return $ case (missingNetworks, alteredNetworks, isVerified status) of
(x : xs, _, _) -> MissingResources (x :| xs)
([], x : xs, _) -> AlteredResources (x :| xs)
([], [], False) -> Unverified
([], [], True) -> Verified
getResourceStatus :: ResourceSummary -> IO ResourceStatus
getResourceStatus ResourceSummary {..} = do
let getResourceHash = hashResource resType value
maybeNewHash <- catch @IOException (Just <$> getResourceHash) (const $ return Nothing)
return $ case maybeNewHash of
Nothing -> Missing
Just newHash
| fileHash /= newHash -> Altered
| otherwise -> Unchanged
checkResourceIntegrity ::
(MonadLogger m, MonadIO m) =>
[ResourceSummary] ->
m ([ResourceSummary], [ResourceSummary])
checkResourceIntegrity = \case
[] -> return ([], [])
(r : rs) -> do
(missing, altered) <- checkResourceIntegrity rs
resourceStatus <- liftIO (getResourceStatus r)
return $ case resourceStatus of
Unchanged -> (missing, altered)
Altered -> (missing, r : altered)
Missing -> (r : missing, altered)
data ResourceStatus
= Unchanged
| Altered
| Missing
data CheckResult
= Verified
| Unverified
| MissingResources (NonEmpty ResourceSummary)
| AlteredResources (NonEmpty ResourceSummary)
instance Pretty CheckResult where
pretty Verified = "Status: verified"
pretty Unverified = "Status: unverified"
pretty (MissingResources missingNetworks) =
"Status: unknown"
<> line
<> line
<> "The following cannot not be found:"
<> line
<> line
<> indent 2 (vsep (fmap prettyResource missingNetworks))
<> line
<> line
<> "To fix this problem, either move the missing files back to"
<+> "the"
<+> locations
<+> "above or use Vehicle to reverify the"
<+> "specification with the new"
<+> locations <> "."
where
locations = "location" <> if length missingNetworks == 1 then "" else "s"
pretty (AlteredResources alteredNetworks) =
"Status: unknown"
<> line
<> line
<> "The following have been altered since verification was"
<+> "last run:"
<> line
<> line
<> indent 2 (vsep (fmap prettyResource alteredNetworks))
<> line
<> line
<> "To fix this problem, use Vehicle to reverify the specification."
prettyResource :: ResourceSummary -> Doc ann
prettyResource ResourceSummary {..} =
pretty resType <+> pretty name <+> parens (pretty value)
|
4567a0e2567771853f883bafe6ccf7ac262b2732b3fa2aa34acc385828f69c8a | input-output-hk/plutus-apps | Common.hs | {-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DerivingStrategies #
# LANGUAGE ExplicitNamespaces #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
module Control.Monad.Freer.Extras.Beam.Common where
import Cardano.BM.Data.Tracer (ToObject (..))
import Control.Exception (Exception)
import Data.Aeson (FromJSON, ToJSON)
import Data.Text (Text)
import Database.Beam (Beamable, QBaseScope)
import Database.Beam.Backend (BeamSqlBackendCanSerialize)
import Database.Beam.Query.Internal (QNested)
import Database.Beam.Schema.Tables (FieldsFulfillConstraint)
import GHC.Generics (Generic)
import Prettyprinter (Pretty (..), colon, (<+>))
type BeamableDb db table = (Beamable table, FieldsFulfillConstraint (BeamSqlBackendCanSerialize db) table)
type BeamThreadingArg = QNested (QNested QBaseScope)
newtype BeamError =
SqlError Text
deriving stock (Eq, Show, Generic)
deriving anyclass (FromJSON, ToJSON, ToObject)
instance Exception BeamError
instance Pretty BeamError where
pretty = \case
SqlError s -> "SqlError (via Beam)" <> colon <+> pretty s
newtype BeamLog =
SqlLog String
deriving stock (Eq, Show, Generic)
deriving anyclass (FromJSON, ToJSON, ToObject)
instance Pretty BeamLog where
pretty = \case
SqlLog s -> "SqlLog" <> colon <+> pretty s
| null | https://raw.githubusercontent.com/input-output-hk/plutus-apps/b7f0e250a32387c8320ddd7fb19a21057b466a27/freer-extras/src/Control/Monad/Freer/Extras/Beam/Common.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE OverloadedStrings # | # LANGUAGE DerivingStrategies #
# LANGUAGE ExplicitNamespaces #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
module Control.Monad.Freer.Extras.Beam.Common where
import Cardano.BM.Data.Tracer (ToObject (..))
import Control.Exception (Exception)
import Data.Aeson (FromJSON, ToJSON)
import Data.Text (Text)
import Database.Beam (Beamable, QBaseScope)
import Database.Beam.Backend (BeamSqlBackendCanSerialize)
import Database.Beam.Query.Internal (QNested)
import Database.Beam.Schema.Tables (FieldsFulfillConstraint)
import GHC.Generics (Generic)
import Prettyprinter (Pretty (..), colon, (<+>))
type BeamableDb db table = (Beamable table, FieldsFulfillConstraint (BeamSqlBackendCanSerialize db) table)
type BeamThreadingArg = QNested (QNested QBaseScope)
newtype BeamError =
SqlError Text
deriving stock (Eq, Show, Generic)
deriving anyclass (FromJSON, ToJSON, ToObject)
instance Exception BeamError
instance Pretty BeamError where
pretty = \case
SqlError s -> "SqlError (via Beam)" <> colon <+> pretty s
newtype BeamLog =
SqlLog String
deriving stock (Eq, Show, Generic)
deriving anyclass (FromJSON, ToJSON, ToObject)
instance Pretty BeamLog where
pretty = \case
SqlLog s -> "SqlLog" <> colon <+> pretty s
|
e763abbbbd7801fc77d19a7f284245fe1b912506b89012960c3cfed2aa126835 | commercialhaskell/stack | Main.hs | import StackTest
main :: IO ()
main = do
stackCleanFull
stack ["build", "acme-dont-copy"]
stack ["test"]
| null | https://raw.githubusercontent.com/commercialhaskell/stack/255cd830627870cdef34b5e54d670ef07882523e/test/integration/tests/4783-doctest-deps/Main.hs | haskell | import StackTest
main :: IO ()
main = do
stackCleanFull
stack ["build", "acme-dont-copy"]
stack ["test"]
| |
e2ccc1bb671ea7018241001a7d6915c86588f8e8c59c810377ef17a8658417f8 | ogaml/ogaml | drawMode.mli | (** GL draw modes enum *)
type t =
| TriangleStrip
| TriangleFan
| Triangles
| Lines
| null | https://raw.githubusercontent.com/ogaml/ogaml/5e74597521abf7ba2833a9247e55780eabfbab78/src/graphics/vertex/drawMode.mli | ocaml | * GL draw modes enum | type t =
| TriangleStrip
| TriangleFan
| Triangles
| Lines
|
971128d97b6daf6d261c385080c9829539b914f93d7bc02f3b607c7856d929d0 | wilbowma/cur | ML-rewrite-2.rkt | #lang cur
(require (rename-in (except-in cur/stdlib/equality == refl)
[ML-= ==]
[ML-refl refl])
cur/stdlib/sugar
cur/stdlib/nat
cur/ntac/base
cur/ntac/standard
cur/ntac/ML-rewrite
"rackunit-ntac.rkt")
;; tests rewrite
;;plus-0-n raw term
(define plus-0-n-term (λ [n : Nat] (refl Nat n)))
(::
plus-0-n-term
(forall [n : Nat] (== Nat (plus 0 n) n)))
(define-theorem plus-0-n
(forall [n : Nat] (== Nat (plus 0 n) n))
by-intro
simpl
reflexivity)
;; mult-0-plus
;; - uses rewrite
;; raw term
(::
(λ [n : Nat]
((λ [H : (== Nat (plus 0 n) n)]
(new-elim
H
(λ [n0 : Nat] [n : Nat]
(λ [H : (== Nat n0 n)]
(Π [m : Nat]
(== Nat (mult n0 m) (mult n0 m)))))
(λ [n0 : Nat]
(λ [m : Nat]
(refl Nat (mult n0 m))))))
(plus-0-n-term n)))
(∀ [n : Nat] [m : Nat]
(== Nat (mult (plus 0 n) m) (mult n m))))
;; raw term, directly
(::
(λ [n : Nat]
(new-elim
(plus-0-n-term n)
(λ [n0 : Nat] [n : Nat]
(λ [H : (== Nat n0 n)]
(Π [m : Nat]
(== Nat (mult n m) (mult n m)))))
(λ [n : Nat]
(λ [m : Nat]
(refl Nat (mult n m))))))
(∀ [n : Nat] [m : Nat]
(== Nat (mult (plus 0 n) m) (mult n m))))
(define-theorem mult-0-plus
(∀ [n : Nat] [m : Nat]
(== Nat (mult (plus 0 n) m) (mult n m)))
(by-intro n)
by-intro
(by-rewrite plus-0-n n)
reflexivity)
| null | https://raw.githubusercontent.com/wilbowma/cur/e039c98941b3d272c6e462387df22846e10b0128/cur-test/cur/tests/ntac/ML-rewrite-2.rkt | racket | tests rewrite
plus-0-n raw term
mult-0-plus
- uses rewrite
raw term
raw term, directly | #lang cur
(require (rename-in (except-in cur/stdlib/equality == refl)
[ML-= ==]
[ML-refl refl])
cur/stdlib/sugar
cur/stdlib/nat
cur/ntac/base
cur/ntac/standard
cur/ntac/ML-rewrite
"rackunit-ntac.rkt")
(define plus-0-n-term (λ [n : Nat] (refl Nat n)))
(::
plus-0-n-term
(forall [n : Nat] (== Nat (plus 0 n) n)))
(define-theorem plus-0-n
(forall [n : Nat] (== Nat (plus 0 n) n))
by-intro
simpl
reflexivity)
(::
(λ [n : Nat]
((λ [H : (== Nat (plus 0 n) n)]
(new-elim
H
(λ [n0 : Nat] [n : Nat]
(λ [H : (== Nat n0 n)]
(Π [m : Nat]
(== Nat (mult n0 m) (mult n0 m)))))
(λ [n0 : Nat]
(λ [m : Nat]
(refl Nat (mult n0 m))))))
(plus-0-n-term n)))
(∀ [n : Nat] [m : Nat]
(== Nat (mult (plus 0 n) m) (mult n m))))
(::
(λ [n : Nat]
(new-elim
(plus-0-n-term n)
(λ [n0 : Nat] [n : Nat]
(λ [H : (== Nat n0 n)]
(Π [m : Nat]
(== Nat (mult n m) (mult n m)))))
(λ [n : Nat]
(λ [m : Nat]
(refl Nat (mult n m))))))
(∀ [n : Nat] [m : Nat]
(== Nat (mult (plus 0 n) m) (mult n m))))
(define-theorem mult-0-plus
(∀ [n : Nat] [m : Nat]
(== Nat (mult (plus 0 n) m) (mult n m)))
(by-intro n)
by-intro
(by-rewrite plus-0-n n)
reflexivity)
|
845e17bed67db59b6ed122087568d7d922be5fc3aa4625f2c12989b378e2eb7e | lesguillemets/sicp-haskell | 1.12.hs | module OneTwelve where
Write a procedure that computes elements of 's triangle by means
-- of a recursive process.
-- Not sure what the expected input is..
pascalAt :: Int -> Int -> Int
pascalAt row column
| column == 0 || column == row = 1
| column < 0 || row < column = 0
| otherwise = pascalAt (row-1) column + pascalAt (row-1) (column-1)
-- | Let's check, anyway.
> > > pascalAt 0 0
1
-- >>> pascalAt 2 1
2
> > > map ( pascalAt 3 ) [ 0 .. 3 ]
-- [1,3,3,1]
> > > map ( pascalAt 4 ) [ 0 .. 4 ]
-- [1,4,6,4,1]
| null | https://raw.githubusercontent.com/lesguillemets/sicp-haskell/df524a1e28c45fb16a56f539cad8babc881d0431/exercise/chap01/sect2/1.12.hs | haskell | of a recursive process.
Not sure what the expected input is..
| Let's check, anyway.
>>> pascalAt 2 1
[1,3,3,1]
[1,4,6,4,1] | module OneTwelve where
Write a procedure that computes elements of 's triangle by means
pascalAt :: Int -> Int -> Int
pascalAt row column
| column == 0 || column == row = 1
| column < 0 || row < column = 0
| otherwise = pascalAt (row-1) column + pascalAt (row-1) (column-1)
> > > pascalAt 0 0
1
2
> > > map ( pascalAt 3 ) [ 0 .. 3 ]
> > > map ( pascalAt 4 ) [ 0 .. 4 ]
|
5fa4b8a1a9216c3eac0173e2369f250cb6a71c0e1844f6dbaeb30453ed44ad7f | emqx/emqx | emqx_persistent_session_SUITE.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_persistent_session_SUITE).
-include_lib("stdlib/include/assert.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("../include/emqx.hrl").
-include("../src/persistent_session/emqx_persistent_session.hrl").
-compile(export_all).
-compile(nowarn_export_all).
%%--------------------------------------------------------------------
SUITE boilerplate
%%--------------------------------------------------------------------
all() ->
[
{group, persistent_store_enabled},
{group, persistent_store_disabled}
].
A persistent session can be resumed in two ways :
1 . The old connection process is still alive , and the session is taken
%% over by the new connection.
2 . The old session process has died ( e.g. , because of node down ) .
%% The new process resumes the session from the stored state, and finds
%% any subscribed messages from the persistent message store.
%%
%% We want to test both ways, both with the db backend enabled and disabled.
%%
%% In addition, we test both tcp and quic connections.
groups() ->
TCs = emqx_common_test_helpers:all(?MODULE),
SnabbkaffeTCs = [TC || TC <- TCs, is_snabbkaffe_tc(TC)],
GCTests = [TC || TC <- TCs, is_gc_tc(TC)],
OtherTCs = (TCs -- SnabbkaffeTCs) -- GCTests,
[
{persistent_store_enabled, [
{group, ram_tables},
{group, disc_tables}
]},
{persistent_store_disabled, [{group, no_kill_connection_process}]},
{ram_tables, [], [
{group, no_kill_connection_process},
{group, kill_connection_process},
{group, snabbkaffe},
{group, gc_tests}
]},
{disc_tables, [], [
{group, no_kill_connection_process},
{group, kill_connection_process},
{group, snabbkaffe},
{group, gc_tests}
]},
{no_kill_connection_process, [], [{group, tcp}, {group, quic}, {group, ws}]},
{kill_connection_process, [], [{group, tcp}, {group, quic}, {group, ws}]},
{snabbkaffe, [], [
{group, tcp_snabbkaffe}, {group, quic_snabbkaffe}, {group, ws_snabbkaffe}
]},
{tcp, [], OtherTCs},
{quic, [], OtherTCs},
{ws, [], OtherTCs},
{tcp_snabbkaffe, [], SnabbkaffeTCs},
{quic_snabbkaffe, [], SnabbkaffeTCs},
{ws_snabbkaffe, [], SnabbkaffeTCs},
{gc_tests, [], GCTests}
].
is_snabbkaffe_tc(TC) ->
re:run(atom_to_list(TC), "^t_snabbkaffe_") /= nomatch.
is_gc_tc(TC) ->
re:run(atom_to_list(TC), "^t_gc_") /= nomatch.
init_per_group(persistent_store_enabled, Config) ->
[{persistent_store_enabled, true} | Config];
init_per_group(Group, Config) when Group =:= ram_tables; Group =:= disc_tables ->
%% Start Apps
Reply =
case Group =:= ram_tables of
true -> ram;
false -> disc
end,
emqx_common_test_helpers:boot_modules(all),
meck:new(emqx_config, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_config, get, fun
(?on_disc_key) -> Reply =:= disc;
(?is_enabled_key) -> true;
(Other) -> meck:passthrough([Other])
end),
emqx_common_test_helpers:start_apps([], fun set_special_confs/1),
?assertEqual(true, emqx_persistent_session:is_store_enabled()),
Config;
init_per_group(persistent_store_disabled, Config) ->
%% Start Apps
emqx_common_test_helpers:boot_modules(all),
meck:new(emqx_config, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_config, get, fun
(?is_enabled_key) -> false;
(Other) -> meck:passthrough([Other])
end),
emqx_common_test_helpers:start_apps([], fun set_special_confs/1),
?assertEqual(false, emqx_persistent_session:is_store_enabled()),
[{persistent_store_enabled, false} | Config];
init_per_group(Group, Config) when Group == ws; Group == ws_snabbkaffe ->
[
{ssl, false},
{host, "localhost"},
{enable_websocket, true},
{port, 8083},
{conn_fun, ws_connect}
| Config
];
init_per_group(Group, Config) when Group == tcp; Group == tcp_snabbkaffe ->
[{port, 1883}, {conn_fun, connect} | Config];
init_per_group(Group, Config) when Group == quic; Group == quic_snabbkaffe ->
UdpPort = 1883,
emqx_common_test_helpers:ensure_quic_listener(?MODULE, UdpPort),
[{port, UdpPort}, {conn_fun, quic_connect} | Config];
init_per_group(no_kill_connection_process, Config) ->
[{kill_connection_process, false} | Config];
init_per_group(kill_connection_process, Config) ->
[{kill_connection_process, true} | Config];
init_per_group(snabbkaffe, Config) ->
[{kill_connection_process, true} | Config];
init_per_group(gc_tests, Config) ->
%% We need to make sure the system does not interfere with this test group.
lists:foreach(
fun(ClientId) ->
maybe_kill_connection_process(ClientId, [{kill_connection_process, true}])
end,
emqx_cm:all_client_ids()
),
emqx_common_test_helpers:stop_apps([]),
SessionMsgEts = gc_tests_session_store,
MsgEts = gc_tests_msg_store,
Pid = spawn(fun() ->
ets:new(SessionMsgEts, [named_table, public, ordered_set]),
ets:new(MsgEts, [named_table, public, ordered_set, {keypos, 2}]),
receive
stop -> ok
end
end),
meck:new(mnesia, [non_strict, passthrough, no_history, no_link]),
meck:expect(mnesia, dirty_first, fun
(?SESS_MSG_TAB) -> ets:first(SessionMsgEts);
(?MSG_TAB) -> ets:first(MsgEts);
(X) -> meck:passthrough([X])
end),
meck:expect(mnesia, dirty_next, fun
(?SESS_MSG_TAB, X) -> ets:next(SessionMsgEts, X);
(?MSG_TAB, X) -> ets:next(MsgEts, X);
(Tab, X) -> meck:passthrough([Tab, X])
end),
meck:expect(mnesia, dirty_delete, fun
(?MSG_TAB, X) -> ets:delete(MsgEts, X);
(Tab, X) -> meck:passthrough([Tab, X])
end),
[{store_owner, Pid}, {session_msg_store, SessionMsgEts}, {msg_store, MsgEts} | Config].
init_per_suite(Config) ->
Config.
set_special_confs(_) ->
ok.
end_per_suite(_Config) ->
emqx_common_test_helpers:ensure_mnesia_stopped(),
ok.
end_per_group(gc_tests, Config) ->
meck:unload(mnesia),
?config(store_owner, Config) ! stop,
ok;
end_per_group(Group, _Config) when
Group =:= ram_tables; Group =:= disc_tables
->
meck:unload(emqx_config),
emqx_common_test_helpers:stop_apps([]);
end_per_group(persistent_store_disabled, _Config) ->
meck:unload(emqx_config),
emqx_common_test_helpers:stop_apps([]);
end_per_group(_Group, _Config) ->
ok.
init_per_testcase(TestCase, Config) ->
Config1 = preconfig_per_testcase(TestCase, Config),
case is_gc_tc(TestCase) of
true ->
ets:delete_all_objects(?config(msg_store, Config)),
ets:delete_all_objects(?config(session_msg_store, Config));
false ->
skip
end,
case erlang:function_exported(?MODULE, TestCase, 2) of
true -> ?MODULE:TestCase(init, Config1);
_ -> Config1
end.
end_per_testcase(TestCase, Config) ->
case is_snabbkaffe_tc(TestCase) of
true -> snabbkaffe:stop();
false -> skip
end,
case erlang:function_exported(?MODULE, TestCase, 2) of
true -> ?MODULE:TestCase('end', Config);
false -> ok
end,
Config.
preconfig_per_testcase(TestCase, Config) ->
{BaseName, Config1} =
case ?config(tc_group_properties, Config) of
[] ->
%% We are running a single testcase
{
atom_to_binary(TestCase),
init_per_group(tcp, init_per_group(kill_connection_process, Config))
};
[_ | _] = Props ->
Path = lists:reverse(?config(tc_group_path, Config) ++ Props),
Pre0 = [atom_to_list(N) || {name, N} <- lists:flatten(Path)],
Pre1 = lists:join("_", Pre0 ++ [atom_to_binary(TestCase)]),
{iolist_to_binary(Pre1), Config}
end,
[
{topic, iolist_to_binary([BaseName, "/foo"])},
{stopic, iolist_to_binary([BaseName, "/+"])},
{stopic_alt, iolist_to_binary([BaseName, "/foo"])},
{client_id, BaseName}
| Config1
].
%%--------------------------------------------------------------------
%% Helpers
%%--------------------------------------------------------------------
client_info(Key, Client) ->
maps:get(Key, maps:from_list(emqtt:info(Client)), undefined).
receive_messages(Count) ->
receive_messages(Count, []).
receive_messages(0, Msgs) ->
Msgs;
receive_messages(Count, Msgs) ->
receive
{publish, Msg} ->
receive_messages(Count - 1, [Msg | Msgs]);
_Other ->
receive_messages(Count, Msgs)
after 5000 ->
Msgs
end.
maybe_kill_connection_process(ClientId, Config) ->
case ?config(kill_connection_process, Config) of
true ->
case emqx_cm:lookup_channels(ClientId) of
[] ->
ok;
[ConnectionPid] ->
?assert(is_pid(ConnectionPid)),
Ref = monitor(process, ConnectionPid),
ConnectionPid ! die_if_test,
receive
{'DOWN', Ref, process, ConnectionPid, normal} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientId)
end;
false ->
ok
end.
wait_for_cm_unregister(ClientId) ->
wait_for_cm_unregister(ClientId, 100).
wait_for_cm_unregister(_ClientId, 0) ->
error(cm_did_not_unregister);
wait_for_cm_unregister(ClientId, N) ->
case emqx_cm:lookup_channels(ClientId) of
[] ->
ok;
[_] ->
timer:sleep(100),
wait_for_cm_unregister(ClientId, N - 1)
end.
snabbkaffe_sync_publish(Topic, Payloads) ->
Fun = fun(Client, Payload) ->
?check_trace(
begin
?wait_async_action(
{ok, _} = emqtt:publish(Client, Topic, Payload, 2),
#{?snk_kind := ps_persist_msg, payload := Payload}
)
end,
fun(_, _Trace) -> ok end
)
end,
do_publish(Payloads, Fun, true).
publish(Topic, Payloads) ->
publish(Topic, Payloads, false).
publish(Topic, Payloads, WaitForUnregister) ->
Fun = fun(Client, Payload) ->
{ok, _} = emqtt:publish(Client, Topic, Payload, 2)
end,
do_publish(Payloads, Fun, WaitForUnregister).
do_publish(Payloads = [_ | _], PublishFun, WaitForUnregister) ->
%% Publish from another process to avoid connection confusion.
{Pid, Ref} =
spawn_monitor(
fun() ->
%% For convenience, always publish using tcp.
%% The publish path is not what we are testing.
ClientID = <<"ps_SUITE_publisher">>,
{ok, Client} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientID},
{port, 1883}
]),
{ok, _} = emqtt:connect(Client),
lists:foreach(fun(Payload) -> PublishFun(Client, Payload) end, Payloads),
ok = emqtt:disconnect(Client),
%% Snabbkaffe sometimes fails unless all processes are gone.
case WaitForUnregister of
false ->
ok;
true ->
case emqx_cm:lookup_channels(ClientID) of
[] ->
ok;
[ConnectionPid] ->
?assert(is_pid(ConnectionPid)),
Ref1 = monitor(process, ConnectionPid),
receive
{'DOWN', Ref1, process, ConnectionPid, _} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientID)
end
end
end
),
receive
{'DOWN', Ref, process, Pid, normal} -> ok;
{'DOWN', Ref, process, Pid, What} -> error({failed_publish, What})
end;
do_publish(Payload, PublishFun, WaitForUnregister) ->
do_publish([Payload], PublishFun, WaitForUnregister).
%%--------------------------------------------------------------------
%% Test Cases
%%--------------------------------------------------------------------
%% [MQTT-3.1.2-23]
t_connect_session_expiry_interval(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload = <<"test message">>,
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
[Msg | _] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Topic)}, maps:find(topic, Msg)),
?assertEqual({ok, iolist_to_binary(Payload)}, maps:find(payload, Msg)),
?assertEqual({ok, 2}, maps:find(qos, Msg)),
ok = emqtt:disconnect(Client2).
t_without_client_id(Config) ->
%% Emqtt client dies
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config),
{ok, Client0} = emqtt:start_link([
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{error, {client_identifier_not_valid, _}} = emqtt:ConnFun(Client0),
ok.
t_assigned_clientid_persistent_session(Config) ->
ConnFun = ?config(conn_fun, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
AssignedClientId = client_info(clientid, Client1),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(AssignedClientId, Config),
{ok, Client2} = emqtt:start_link([
{clientid, AssignedClientId},
{proto_ver, v5},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(1, client_info(session_present, Client2)),
ok = emqtt:disconnect(Client2).
t_cancel_on_disconnect(Config) ->
%% Open a persistent session, but cancel the persistence when
%% shutting down the connection.
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 0}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(0, client_info(session_present, Client2)),
ok = emqtt:disconnect(Client2).
t_persist_on_disconnect(Config) ->
%% Open a non-persistent session, but add the persistence when
%% shutting down the connection. This is a protocol error, and
%% should not convert the session into a persistent session.
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 0}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
%% Strangely enough, the disconnect is reported as successful by emqtt.
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 30}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
%% The session should not be known, since it wasn't persisted because of the
%% changed expiry interval in the disconnect call.
?assertEqual(0, client_info(session_present, Client2)),
ok = emqtt:disconnect(Client2).
wait_for_pending(SId) ->
wait_for_pending(SId, 100).
wait_for_pending(_SId, 0) ->
error(exhausted_wait_for_pending);
wait_for_pending(SId, N) ->
case emqx_persistent_session:pending(SId) of
[] ->
timer:sleep(1),
wait_for_pending(SId, N - 1);
[_ | _] = Pending ->
Pending
end.
t_process_dies_session_expires(Config) ->
%% Emulate an error in the connect process,
%% or that the node of the process goes down.
%% A persistent session should eventually expire.
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload = <<"test">>,
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 1}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload]),
SessionId =
case ?config(persistent_store_enabled, Config) of
false ->
undefined;
true ->
%% The session should not be marked as expired.
{Tag, Session} = emqx_persistent_session:lookup(ClientId),
?assertEqual(persistent, Tag),
SId = emqx_session:info(id, Session),
case ?config(kill_connection_process, Config) of
true ->
%% The session should have a pending message
?assertMatch([_], wait_for_pending(SId));
false ->
skip
end,
SId
end,
timer:sleep(1100),
%% The session should now be marked as expired.
case
(?config(kill_connection_process, Config) andalso
?config(persistent_store_enabled, Config))
of
true -> ?assertMatch({expired, _}, emqx_persistent_session:lookup(ClientId));
false -> skip
end,
{ok, Client2} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(0, client_info(session_present, Client2)),
case
(?config(kill_connection_process, Config) andalso
?config(persistent_store_enabled, Config))
of
true ->
The session should be a fresh one
{persistent, NewSession} = emqx_persistent_session:lookup(ClientId),
?assertNotEqual(SessionId, emqx_session:info(id, NewSession)),
%% The old session should now either
%% be marked as abandoned or already be garbage collected.
?assertMatch([], emqx_persistent_session:pending(SessionId));
false ->
skip
end,
%% We should not receive the pending message
?assertEqual([], receive_messages(1)),
emqtt:disconnect(Client2).
t_publish_while_client_is_gone(Config) ->
%% A persistent session should receive messages in its
%% subscription even if the process owning the session dies.
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload1 = <<"hello1">>,
Payload2 = <<"hello2">>,
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload1, Payload2]),
{ok, Client2} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(2),
?assertMatch([_, _], Msgs),
[Msg2, Msg1] = Msgs,
?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)),
?assertEqual({ok, 2}, maps:find(qos, Msg1)),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)),
?assertEqual({ok, 2}, maps:find(qos, Msg2)),
ok = emqtt:disconnect(Client2).
t_clean_start_drops_subscriptions(Config) ->
1 . A persistent session is started and disconnected .
2 . While disconnected , a message is published and persisted .
3 . When connecting again , the clean start flag is set , the subscription is renewed ,
%% then we disconnect again.
4 . Finally , a new connection is made with clean start set to false .
%% The original message should not be delivered.
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload1 = <<"hello1">>,
Payload2 = <<"hello2">>,
Payload3 = <<"hello3">>,
ClientId = ?config(client_id, Config),
1 .
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
2 .
ok = publish(Topic, Payload1),
3 .
{ok, Client2} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(0, client_info(session_present, Client2)),
{ok, _, [2]} = emqtt:subscribe(Client2, STopic, qos2),
ok = publish(Topic, Payload2),
[Msg1] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg1)),
ok = emqtt:disconnect(Client2),
maybe_kill_connection_process(ClientId, Config),
4 .
{ok, Client3} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client3),
ok = publish(Topic, Payload3),
[Msg2] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload3)}, maps:find(payload, Msg2)),
ok = emqtt:disconnect(Client3).
t_unsubscribe(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
ClientId = ?config(client_id, Config),
{ok, Client} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client),
{ok, _, [2]} = emqtt:subscribe(Client, STopic, qos2),
case emqx_persistent_session:is_store_enabled() of
true ->
{persistent, Session} = emqx_persistent_session:lookup(ClientId),
SessionID = emqx_session:info(id, Session),
SessionIDs = [SId || #route{dest = SId} <- emqx_session_router:match_routes(Topic)],
?assert(lists:member(SessionID, SessionIDs)),
?assertMatch([_], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic]),
{ok, _, _} = emqtt:unsubscribe(Client, STopic),
?assertMatch([], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic]),
SessionIDs2 = [SId || #route{dest = SId} <- emqx_session_router:match_routes(Topic)],
?assert(not lists:member(SessionID, SessionIDs2));
false ->
?assertMatch([_], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic]),
{ok, _, _} = emqtt:unsubscribe(Client, STopic),
?assertMatch([], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic])
end,
ok = emqtt:disconnect(Client).
t_multiple_subscription_matches(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic1 = ?config(stopic, Config),
STopic2 = ?config(stopic_alt, Config),
Payload = <<"test message">>,
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic1, qos2),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic2, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
We will receive the same message twice because it matches two subscriptions .
[Msg1, Msg2] = receive_messages(2),
?assertEqual({ok, iolist_to_binary(Topic)}, maps:find(topic, Msg1)),
?assertEqual({ok, iolist_to_binary(Payload)}, maps:find(payload, Msg1)),
?assertEqual({ok, iolist_to_binary(Topic)}, maps:find(topic, Msg2)),
?assertEqual({ok, iolist_to_binary(Payload)}, maps:find(payload, Msg2)),
?assertEqual({ok, 2}, maps:find(qos, Msg1)),
?assertEqual({ok, 2}, maps:find(qos, Msg2)),
ok = emqtt:disconnect(Client2).
t_lost_messages_because_of_gc(init, Config) ->
case
(emqx_persistent_session:is_store_enabled() andalso
?config(kill_connection_process, Config))
of
true ->
Retain = 1000,
OldRetain = emqx_config:get(?msg_retain, Retain),
emqx_config:put(?msg_retain, Retain),
[{retain, Retain}, {old_retain, OldRetain} | Config];
false ->
{skip, only_relevant_with_store_and_kill_process}
end;
t_lost_messages_because_of_gc('end', Config) ->
OldRetain = ?config(old_retain, Config),
emqx_config:put(?msg_retain, OldRetain),
ok.
t_lost_messages_because_of_gc(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
ClientId = ?config(client_id, Config),
Retain = ?config(retain, Config),
Payload1 = <<"hello1">>,
Payload2 = <<"hello2">>,
{ok, Client1} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload1),
timer:sleep(2 * Retain),
publish(Topic, Payload2),
emqx_persistent_session_gc:message_gc_worker(),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{clean_start, false},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(2),
?assertMatch([_], Msgs),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, hd(Msgs))),
emqtt:disconnect(Client2),
ok.
%%--------------------------------------------------------------------
Snabbkaffe helpers
%%--------------------------------------------------------------------
check_snabbkaffe_vanilla(Trace) ->
ResumeTrace = [
T
|| #{?snk_kind := K} = T <- Trace,
re:run(to_list(K), "^ps_") /= nomatch
],
?assertMatch([_ | _], ResumeTrace),
[_Sid] = lists:usort(?projection(sid, ResumeTrace)),
Check internal flow of the resuming
?assert(
?strict_causality(
#{?snk_kind := ps_resuming},
#{?snk_kind := ps_initial_pendings},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_initial_pendings},
#{?snk_kind := ps_persist_pendings},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_persist_pendings},
#{?snk_kind := ps_notify_writers},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_notify_writers},
#{?snk_kind := ps_node_markers},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_node_markers},
#{?snk_kind := ps_resume_session},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_resume_session},
#{?snk_kind := ps_marker_pendings},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_marker_pendings},
#{?snk_kind := ps_marker_pendings_msgs},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_marker_pendings_msgs},
#{?snk_kind := ps_resume_end},
ResumeTrace
)
),
Check flow between worker and
?assert(
?strict_causality(
#{?snk_kind := ps_notify_writers},
#{?snk_kind := ps_worker_started},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_marker_pendings},
#{?snk_kind := ps_worker_resume_end},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_worker_resume_end},
#{?snk_kind := ps_worker_shutdown},
ResumeTrace
)
),
[Markers] = ?projection(markers, ?of_kind(ps_node_markers, Trace)),
?assertMatch([_], Markers).
to_list(L) when is_list(L) -> L;
to_list(A) when is_atom(A) -> atom_to_list(A);
to_list(B) when is_binary(B) -> binary_to_list(B).
%%--------------------------------------------------------------------
Snabbkaffe tests
%%--------------------------------------------------------------------
t_snabbkaffe_vanilla_stages(Config) ->
%% Test that all stages of session resume works ok in the simplest case
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
EmqttOpts = [
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
],
{ok, Client1} = emqtt:start_link([{clean_start, true} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client1),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
?check_trace(
begin
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),
ok = emqtt:disconnect(Client2)
end,
fun(ok, Trace) ->
check_snabbkaffe_vanilla(Trace)
end
),
ok.
t_snabbkaffe_pending_messages(Config) ->
%% Make sure there are pending messages are fetched during the init stage.
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payloads = [<<"test", (integer_to_binary(X))/binary>> || X <- [1, 2, 3, 4, 5]],
EmqttOpts = [
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
],
{ok, Client1} = emqtt:start_link([{clean_start, true} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
?check_trace(
begin
snabbkaffe_sync_publish(Topic, Payloads),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(length(Payloads)),
ReceivedPayloads = [P || #{payload := P} <- Msgs],
?assertEqual(lists:sort(ReceivedPayloads), lists:sort(Payloads)),
ok = emqtt:disconnect(Client2)
end,
fun(ok, Trace) ->
check_snabbkaffe_vanilla(Trace),
%% Check that all messages was delivered from the DB
[Delivers1] = ?projection(msgs, ?of_kind(ps_persist_pendings_msgs, Trace)),
[Delivers2] = ?projection(msgs, ?of_kind(ps_marker_pendings_msgs, Trace)),
Delivers = Delivers1 ++ Delivers2,
?assertEqual(length(Payloads), length(Delivers)),
%% Check for no duplicates
?assertEqual(lists:usort(Delivers), lists:sort(Delivers))
end
),
ok.
t_snabbkaffe_buffered_messages(Config) ->
%% Make sure to buffer messages during startup.
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payloads1 = [<<"test", (integer_to_binary(X))/binary>> || X <- [1, 2, 3]],
Payloads2 = [<<"test", (integer_to_binary(X))/binary>> || X <- [4, 5, 6]],
EmqttOpts = [
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
],
{ok, Client1} = emqtt:start_link([{clean_start, true} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payloads1),
?check_trace(
begin
Make the resume init phase wait until the first message is delivered .
?force_ordering(
#{?snk_kind := ps_worker_deliver},
#{?snk_kind := ps_resume_end}
),
Parent = self(),
spawn_link(fun() ->
?block_until(#{?snk_kind := ps_marker_pendings_msgs}, infinity, 5000),
publish(Topic, Payloads2, true),
Parent ! publish_done,
ok
end),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),
receive
publish_done -> ok
after 10000 -> error(too_long_to_publish)
end,
Msgs = receive_messages(length(Payloads1) + length(Payloads2) + 1),
ReceivedPayloads = [P || #{payload := P} <- Msgs],
?assertEqual(
lists:sort(Payloads1 ++ Payloads2),
lists:sort(ReceivedPayloads)
),
ok = emqtt:disconnect(Client2)
end,
fun(ok, Trace) ->
check_snabbkaffe_vanilla(Trace),
%% Check that some messages was buffered in the writer process
[Msgs] = ?projection(msgs, ?of_kind(ps_writer_pendings, Trace)),
?assertMatch(
X when 0 < X andalso X =< length(Payloads2),
length(Msgs)
)
end
),
ok.
%%--------------------------------------------------------------------
GC tests
%%--------------------------------------------------------------------
-define(MARKER, 3).
-define(DELIVERED, 2).
-define(UNDELIVERED, 1).
-define(ABANDONED, 0).
msg_id() ->
emqx_guid:gen().
delivered_msg(MsgId, SessionID, STopic) ->
{SessionID, MsgId, STopic, ?DELIVERED}.
undelivered_msg(MsgId, SessionID, STopic) ->
{SessionID, MsgId, STopic, ?UNDELIVERED}.
marker_msg(MarkerID, SessionID) ->
{SessionID, MarkerID, <<>>, ?MARKER}.
guid(MicrosecondsAgo) ->
Make a fake GUID and set a timestamp .
<<TS:64, Tail:64>> = emqx_guid:gen(),
<<(TS - MicrosecondsAgo):64, Tail:64>>.
abandoned_session_msg(SessionID) ->
abandoned_session_msg(SessionID, 0).
abandoned_session_msg(SessionID, MicrosecondsAgo) ->
TS = erlang:system_time(microsecond),
{SessionID, <<>>, <<(TS - MicrosecondsAgo):64>>, ?ABANDONED}.
fresh_gc_delete_fun() ->
Ets = ets:new(gc_collect, [ordered_set]),
fun
(delete, Key) ->
ets:insert(Ets, {Key}),
ok;
(collect, <<>>) ->
List = ets:match(Ets, {'$1'}),
ets:delete(Ets),
lists:append(List);
(_, _Key) ->
ok
end.
fresh_gc_callbacks_fun() ->
Ets = ets:new(gc_collect, [ordered_set]),
fun
(collect, <<>>) ->
List = ets:match(Ets, {'$1'}),
ets:delete(Ets),
lists:append(List);
(Tag, Key) ->
ets:insert(Ets, {{Key, Tag}}),
ok
end.
get_gc_delete_messages() ->
Fun = fresh_gc_delete_fun(),
emqx_persistent_session:gc_session_messages(Fun),
Fun(collect, <<>>).
get_gc_callbacks() ->
Fun = fresh_gc_callbacks_fun(),
emqx_persistent_session:gc_session_messages(Fun),
Fun(collect, <<>>).
t_gc_all_delivered(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 5)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
SortedContent = lists:usort(Delivered ++ Undelivered),
ets:insert(Store, [{X, <<>>} || X <- SortedContent]),
GCMessages = get_gc_delete_messages(),
?assertEqual(SortedContent, GCMessages),
ok.
t_gc_some_undelivered(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 10)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Delivered1, _Delivered2} = split(Delivered),
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Undelivered1, Undelivered2} = split(Undelivered),
Content = Delivered1 ++ Undelivered1 ++ Undelivered2,
ets:insert(Store, [{X, <<>>} || X <- Content]),
Expected = lists:usort(Delivered1 ++ Undelivered1),
GCMessages = get_gc_delete_messages(),
?assertEqual(Expected, GCMessages),
ok.
t_gc_with_markers(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds1 = [msg_id() || _ <- lists:seq(1, 10)],
MarkerId = msg_id(),
MsgIds = [msg_id() || _ <- lists:seq(1, 4)] ++ MsgIds1,
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Delivered1, _Delivered2} = split(Delivered),
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Undelivered1, Undelivered2} = split(Undelivered),
Markers = [marker_msg(MarkerId, SessionId)],
Content = Delivered1 ++ Undelivered1 ++ Undelivered2 ++ Markers,
ets:insert(Store, [{X, <<>>} || X <- Content]),
Expected = lists:usort(Delivered1 ++ Undelivered1),
GCMessages = get_gc_delete_messages(),
?assertEqual(Expected, GCMessages),
ok.
t_gc_abandoned_some_undelivered(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 10)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Delivered1, _Delivered2} = split(Delivered),
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Undelivered1, Undelivered2} = split(Undelivered),
Abandoned = abandoned_session_msg(SessionId),
Content = Delivered1 ++ Undelivered1 ++ Undelivered2 ++ [Abandoned],
ets:insert(Store, [{X, <<>>} || X <- Content]),
Expected = lists:usort(Delivered1 ++ Undelivered1 ++ Undelivered2),
GCMessages = get_gc_delete_messages(),
?assertEqual(Expected, GCMessages),
ok.
t_gc_abandoned_only_called_on_empty_session(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 10)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
Abandoned = abandoned_session_msg(SessionId),
Content = Delivered ++ Undelivered ++ [Abandoned],
ets:insert(Store, [{X, <<>>} || X <- Content]),
GCMessages = get_gc_callbacks(),
%% Since we had messages to delete, we don't expect to get the
%% callback on the abandoned session
?assertEqual([], [X || {X, abandoned} <- GCMessages]),
%% But if we have only the abandoned session marker for this
%% session, it should be called.
ets:delete_all_objects(Store),
UndeliveredOtherSession = undelivered_msg(msg_id(), emqx_guid:gen(), <<"topic">>),
ets:insert(Store, [{X, <<>>} || X <- [Abandoned, UndeliveredOtherSession]]),
GCMessages2 = get_gc_callbacks(),
?assertEqual([Abandoned], [X || {X, abandoned} <- GCMessages2]),
ok.
t_gc_session_gc_worker(init, Config) ->
meck:new(emqx_persistent_session, [passthrough, no_link]),
Config;
t_gc_session_gc_worker('end', _Config) ->
meck:unload(emqx_persistent_session),
ok.
t_gc_session_gc_worker(Config) ->
STopic = ?config(stopic, Config),
SessionID = emqx_guid:gen(),
MsgDeleted = delivered_msg(msg_id(), SessionID, STopic),
MarkerNotDeleted = marker_msg(msg_id(), SessionID),
MarkerDeleted = marker_msg(guid(120 * 1000 * 1000), SessionID),
AbandonedNotDeleted = abandoned_session_msg(SessionID),
AbandonedDeleted = abandoned_session_msg(SessionID, 500 * 1000 * 1000),
meck:expect(emqx_persistent_session, delete_session_message, fun(_Key) -> ok end),
emqx_persistent_session_gc:session_gc_worker(delete, MsgDeleted),
emqx_persistent_session_gc:session_gc_worker(marker, MarkerNotDeleted),
emqx_persistent_session_gc:session_gc_worker(marker, MarkerDeleted),
emqx_persistent_session_gc:session_gc_worker(abandoned, AbandonedDeleted),
emqx_persistent_session_gc:session_gc_worker(abandoned, AbandonedNotDeleted),
History = meck:history(emqx_persistent_session, self()),
DeleteCalls = [
Key
|| {_Pid, {_, delete_session_message, [Key]}, _Result} <-
History
],
?assertEqual(
lists:sort([MsgDeleted, AbandonedDeleted, MarkerDeleted]),
lists:sort(DeleteCalls)
),
ok.
t_gc_message_gc(Config) ->
Topic = ?config(topic, Config),
ClientID = ?config(client_id, Config),
Store = ?config(msg_store, Config),
NewMsgs = [
emqx_message:make(ClientID, Topic, integer_to_binary(P))
|| P <- lists:seq(6, 10)
],
Retain = 60 * 1000,
emqx_config:put(?msg_retain, Retain),
Msgs1 = [
emqx_message:make(ClientID, Topic, integer_to_binary(P))
|| P <- lists:seq(1, 5)
],
OldMsgs = [M#message{id = guid(Retain * 1000)} || M <- Msgs1],
ets:insert(Store, NewMsgs ++ OldMsgs),
?assertEqual(lists:sort(OldMsgs ++ NewMsgs), ets:tab2list(Store)),
ok = emqx_persistent_session_gc:message_gc_worker(),
?assertEqual(lists:sort(NewMsgs), ets:tab2list(Store)),
ok.
split(List) ->
split(List, [], []).
split([], L1, L2) ->
{L1, L2};
split([H], L1, L2) ->
{[H | L1], L2};
split([H1, H2 | Left], L1, L2) ->
split(Left, [H1 | L1], [H2 | L2]).
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/apps/emqx/test/emqx_persistent_session_SUITE.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
over by the new connection.
The new process resumes the session from the stored state, and finds
any subscribed messages from the persistent message store.
We want to test both ways, both with the db backend enabled and disabled.
In addition, we test both tcp and quic connections.
Start Apps
Start Apps
We need to make sure the system does not interfere with this test group.
We are running a single testcase
--------------------------------------------------------------------
Helpers
--------------------------------------------------------------------
Publish from another process to avoid connection confusion.
For convenience, always publish using tcp.
The publish path is not what we are testing.
Snabbkaffe sometimes fails unless all processes are gone.
--------------------------------------------------------------------
Test Cases
--------------------------------------------------------------------
[MQTT-3.1.2-23]
Emqtt client dies
Open a persistent session, but cancel the persistence when
shutting down the connection.
Open a non-persistent session, but add the persistence when
shutting down the connection. This is a protocol error, and
should not convert the session into a persistent session.
Strangely enough, the disconnect is reported as successful by emqtt.
The session should not be known, since it wasn't persisted because of the
changed expiry interval in the disconnect call.
Emulate an error in the connect process,
or that the node of the process goes down.
A persistent session should eventually expire.
The session should not be marked as expired.
The session should have a pending message
The session should now be marked as expired.
The old session should now either
be marked as abandoned or already be garbage collected.
We should not receive the pending message
A persistent session should receive messages in its
subscription even if the process owning the session dies.
then we disconnect again.
The original message should not be delivered.
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
Test that all stages of session resume works ok in the simplest case
Make sure there are pending messages are fetched during the init stage.
Check that all messages was delivered from the DB
Check for no duplicates
Make sure to buffer messages during startup.
Check that some messages was buffered in the writer process
--------------------------------------------------------------------
--------------------------------------------------------------------
Since we had messages to delete, we don't expect to get the
callback on the abandoned session
But if we have only the abandoned session marker for this
session, it should be called. | Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_persistent_session_SUITE).
-include_lib("stdlib/include/assert.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-include_lib("../include/emqx.hrl").
-include("../src/persistent_session/emqx_persistent_session.hrl").
-compile(export_all).
-compile(nowarn_export_all).
SUITE boilerplate
all() ->
[
{group, persistent_store_enabled},
{group, persistent_store_disabled}
].
A persistent session can be resumed in two ways :
1 . The old connection process is still alive , and the session is taken
2 . The old session process has died ( e.g. , because of node down ) .
groups() ->
TCs = emqx_common_test_helpers:all(?MODULE),
SnabbkaffeTCs = [TC || TC <- TCs, is_snabbkaffe_tc(TC)],
GCTests = [TC || TC <- TCs, is_gc_tc(TC)],
OtherTCs = (TCs -- SnabbkaffeTCs) -- GCTests,
[
{persistent_store_enabled, [
{group, ram_tables},
{group, disc_tables}
]},
{persistent_store_disabled, [{group, no_kill_connection_process}]},
{ram_tables, [], [
{group, no_kill_connection_process},
{group, kill_connection_process},
{group, snabbkaffe},
{group, gc_tests}
]},
{disc_tables, [], [
{group, no_kill_connection_process},
{group, kill_connection_process},
{group, snabbkaffe},
{group, gc_tests}
]},
{no_kill_connection_process, [], [{group, tcp}, {group, quic}, {group, ws}]},
{kill_connection_process, [], [{group, tcp}, {group, quic}, {group, ws}]},
{snabbkaffe, [], [
{group, tcp_snabbkaffe}, {group, quic_snabbkaffe}, {group, ws_snabbkaffe}
]},
{tcp, [], OtherTCs},
{quic, [], OtherTCs},
{ws, [], OtherTCs},
{tcp_snabbkaffe, [], SnabbkaffeTCs},
{quic_snabbkaffe, [], SnabbkaffeTCs},
{ws_snabbkaffe, [], SnabbkaffeTCs},
{gc_tests, [], GCTests}
].
is_snabbkaffe_tc(TC) ->
re:run(atom_to_list(TC), "^t_snabbkaffe_") /= nomatch.
is_gc_tc(TC) ->
re:run(atom_to_list(TC), "^t_gc_") /= nomatch.
init_per_group(persistent_store_enabled, Config) ->
[{persistent_store_enabled, true} | Config];
init_per_group(Group, Config) when Group =:= ram_tables; Group =:= disc_tables ->
Reply =
case Group =:= ram_tables of
true -> ram;
false -> disc
end,
emqx_common_test_helpers:boot_modules(all),
meck:new(emqx_config, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_config, get, fun
(?on_disc_key) -> Reply =:= disc;
(?is_enabled_key) -> true;
(Other) -> meck:passthrough([Other])
end),
emqx_common_test_helpers:start_apps([], fun set_special_confs/1),
?assertEqual(true, emqx_persistent_session:is_store_enabled()),
Config;
init_per_group(persistent_store_disabled, Config) ->
emqx_common_test_helpers:boot_modules(all),
meck:new(emqx_config, [non_strict, passthrough, no_history, no_link]),
meck:expect(emqx_config, get, fun
(?is_enabled_key) -> false;
(Other) -> meck:passthrough([Other])
end),
emqx_common_test_helpers:start_apps([], fun set_special_confs/1),
?assertEqual(false, emqx_persistent_session:is_store_enabled()),
[{persistent_store_enabled, false} | Config];
init_per_group(Group, Config) when Group == ws; Group == ws_snabbkaffe ->
[
{ssl, false},
{host, "localhost"},
{enable_websocket, true},
{port, 8083},
{conn_fun, ws_connect}
| Config
];
init_per_group(Group, Config) when Group == tcp; Group == tcp_snabbkaffe ->
[{port, 1883}, {conn_fun, connect} | Config];
init_per_group(Group, Config) when Group == quic; Group == quic_snabbkaffe ->
UdpPort = 1883,
emqx_common_test_helpers:ensure_quic_listener(?MODULE, UdpPort),
[{port, UdpPort}, {conn_fun, quic_connect} | Config];
init_per_group(no_kill_connection_process, Config) ->
[{kill_connection_process, false} | Config];
init_per_group(kill_connection_process, Config) ->
[{kill_connection_process, true} | Config];
init_per_group(snabbkaffe, Config) ->
[{kill_connection_process, true} | Config];
init_per_group(gc_tests, Config) ->
lists:foreach(
fun(ClientId) ->
maybe_kill_connection_process(ClientId, [{kill_connection_process, true}])
end,
emqx_cm:all_client_ids()
),
emqx_common_test_helpers:stop_apps([]),
SessionMsgEts = gc_tests_session_store,
MsgEts = gc_tests_msg_store,
Pid = spawn(fun() ->
ets:new(SessionMsgEts, [named_table, public, ordered_set]),
ets:new(MsgEts, [named_table, public, ordered_set, {keypos, 2}]),
receive
stop -> ok
end
end),
meck:new(mnesia, [non_strict, passthrough, no_history, no_link]),
meck:expect(mnesia, dirty_first, fun
(?SESS_MSG_TAB) -> ets:first(SessionMsgEts);
(?MSG_TAB) -> ets:first(MsgEts);
(X) -> meck:passthrough([X])
end),
meck:expect(mnesia, dirty_next, fun
(?SESS_MSG_TAB, X) -> ets:next(SessionMsgEts, X);
(?MSG_TAB, X) -> ets:next(MsgEts, X);
(Tab, X) -> meck:passthrough([Tab, X])
end),
meck:expect(mnesia, dirty_delete, fun
(?MSG_TAB, X) -> ets:delete(MsgEts, X);
(Tab, X) -> meck:passthrough([Tab, X])
end),
[{store_owner, Pid}, {session_msg_store, SessionMsgEts}, {msg_store, MsgEts} | Config].
init_per_suite(Config) ->
Config.
set_special_confs(_) ->
ok.
end_per_suite(_Config) ->
emqx_common_test_helpers:ensure_mnesia_stopped(),
ok.
end_per_group(gc_tests, Config) ->
meck:unload(mnesia),
?config(store_owner, Config) ! stop,
ok;
end_per_group(Group, _Config) when
Group =:= ram_tables; Group =:= disc_tables
->
meck:unload(emqx_config),
emqx_common_test_helpers:stop_apps([]);
end_per_group(persistent_store_disabled, _Config) ->
meck:unload(emqx_config),
emqx_common_test_helpers:stop_apps([]);
end_per_group(_Group, _Config) ->
ok.
init_per_testcase(TestCase, Config) ->
Config1 = preconfig_per_testcase(TestCase, Config),
case is_gc_tc(TestCase) of
true ->
ets:delete_all_objects(?config(msg_store, Config)),
ets:delete_all_objects(?config(session_msg_store, Config));
false ->
skip
end,
case erlang:function_exported(?MODULE, TestCase, 2) of
true -> ?MODULE:TestCase(init, Config1);
_ -> Config1
end.
end_per_testcase(TestCase, Config) ->
case is_snabbkaffe_tc(TestCase) of
true -> snabbkaffe:stop();
false -> skip
end,
case erlang:function_exported(?MODULE, TestCase, 2) of
true -> ?MODULE:TestCase('end', Config);
false -> ok
end,
Config.
preconfig_per_testcase(TestCase, Config) ->
{BaseName, Config1} =
case ?config(tc_group_properties, Config) of
[] ->
{
atom_to_binary(TestCase),
init_per_group(tcp, init_per_group(kill_connection_process, Config))
};
[_ | _] = Props ->
Path = lists:reverse(?config(tc_group_path, Config) ++ Props),
Pre0 = [atom_to_list(N) || {name, N} <- lists:flatten(Path)],
Pre1 = lists:join("_", Pre0 ++ [atom_to_binary(TestCase)]),
{iolist_to_binary(Pre1), Config}
end,
[
{topic, iolist_to_binary([BaseName, "/foo"])},
{stopic, iolist_to_binary([BaseName, "/+"])},
{stopic_alt, iolist_to_binary([BaseName, "/foo"])},
{client_id, BaseName}
| Config1
].
client_info(Key, Client) ->
maps:get(Key, maps:from_list(emqtt:info(Client)), undefined).
receive_messages(Count) ->
receive_messages(Count, []).
receive_messages(0, Msgs) ->
Msgs;
receive_messages(Count, Msgs) ->
receive
{publish, Msg} ->
receive_messages(Count - 1, [Msg | Msgs]);
_Other ->
receive_messages(Count, Msgs)
after 5000 ->
Msgs
end.
maybe_kill_connection_process(ClientId, Config) ->
case ?config(kill_connection_process, Config) of
true ->
case emqx_cm:lookup_channels(ClientId) of
[] ->
ok;
[ConnectionPid] ->
?assert(is_pid(ConnectionPid)),
Ref = monitor(process, ConnectionPid),
ConnectionPid ! die_if_test,
receive
{'DOWN', Ref, process, ConnectionPid, normal} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientId)
end;
false ->
ok
end.
wait_for_cm_unregister(ClientId) ->
wait_for_cm_unregister(ClientId, 100).
wait_for_cm_unregister(_ClientId, 0) ->
error(cm_did_not_unregister);
wait_for_cm_unregister(ClientId, N) ->
case emqx_cm:lookup_channels(ClientId) of
[] ->
ok;
[_] ->
timer:sleep(100),
wait_for_cm_unregister(ClientId, N - 1)
end.
snabbkaffe_sync_publish(Topic, Payloads) ->
Fun = fun(Client, Payload) ->
?check_trace(
begin
?wait_async_action(
{ok, _} = emqtt:publish(Client, Topic, Payload, 2),
#{?snk_kind := ps_persist_msg, payload := Payload}
)
end,
fun(_, _Trace) -> ok end
)
end,
do_publish(Payloads, Fun, true).
publish(Topic, Payloads) ->
publish(Topic, Payloads, false).
publish(Topic, Payloads, WaitForUnregister) ->
Fun = fun(Client, Payload) ->
{ok, _} = emqtt:publish(Client, Topic, Payload, 2)
end,
do_publish(Payloads, Fun, WaitForUnregister).
do_publish(Payloads = [_ | _], PublishFun, WaitForUnregister) ->
{Pid, Ref} =
spawn_monitor(
fun() ->
ClientID = <<"ps_SUITE_publisher">>,
{ok, Client} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientID},
{port, 1883}
]),
{ok, _} = emqtt:connect(Client),
lists:foreach(fun(Payload) -> PublishFun(Client, Payload) end, Payloads),
ok = emqtt:disconnect(Client),
case WaitForUnregister of
false ->
ok;
true ->
case emqx_cm:lookup_channels(ClientID) of
[] ->
ok;
[ConnectionPid] ->
?assert(is_pid(ConnectionPid)),
Ref1 = monitor(process, ConnectionPid),
receive
{'DOWN', Ref1, process, ConnectionPid, _} -> ok
after 3000 -> error(process_did_not_die)
end,
wait_for_cm_unregister(ClientID)
end
end
end
),
receive
{'DOWN', Ref, process, Pid, normal} -> ok;
{'DOWN', Ref, process, Pid, What} -> error({failed_publish, What})
end;
do_publish(Payload, PublishFun, WaitForUnregister) ->
do_publish([Payload], PublishFun, WaitForUnregister).
t_connect_session_expiry_interval(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload = <<"test message">>,
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
[Msg | _] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Topic)}, maps:find(topic, Msg)),
?assertEqual({ok, iolist_to_binary(Payload)}, maps:find(payload, Msg)),
?assertEqual({ok, 2}, maps:find(qos, Msg)),
ok = emqtt:disconnect(Client2).
t_without_client_id(Config) ->
process_flag(trap_exit, true),
ConnFun = ?config(conn_fun, Config),
{ok, Client0} = emqtt:start_link([
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{error, {client_identifier_not_valid, _}} = emqtt:ConnFun(Client0),
ok.
t_assigned_clientid_persistent_session(Config) ->
ConnFun = ?config(conn_fun, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
AssignedClientId = client_info(clientid, Client1),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(AssignedClientId, Config),
{ok, Client2} = emqtt:start_link([
{clientid, AssignedClientId},
{proto_ver, v5},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(1, client_info(session_present, Client2)),
ok = emqtt:disconnect(Client2).
t_cancel_on_disconnect(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 0}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(0, client_info(session_present, Client2)),
ok = emqtt:disconnect(Client2).
t_persist_on_disconnect(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 0}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
ok = emqtt:disconnect(Client1, 0, #{'Session-Expiry-Interval' => 30}),
wait_for_cm_unregister(ClientId),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{clean_start, false},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(0, client_info(session_present, Client2)),
ok = emqtt:disconnect(Client2).
wait_for_pending(SId) ->
wait_for_pending(SId, 100).
wait_for_pending(_SId, 0) ->
error(exhausted_wait_for_pending);
wait_for_pending(SId, N) ->
case emqx_persistent_session:pending(SId) of
[] ->
timer:sleep(1),
wait_for_pending(SId, N - 1);
[_ | _] = Pending ->
Pending
end.
t_process_dies_session_expires(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload = <<"test">>,
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 1}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload]),
SessionId =
case ?config(persistent_store_enabled, Config) of
false ->
undefined;
true ->
{Tag, Session} = emqx_persistent_session:lookup(ClientId),
?assertEqual(persistent, Tag),
SId = emqx_session:info(id, Session),
case ?config(kill_connection_process, Config) of
true ->
?assertMatch([_], wait_for_pending(SId));
false ->
skip
end,
SId
end,
timer:sleep(1100),
case
(?config(kill_connection_process, Config) andalso
?config(persistent_store_enabled, Config))
of
true -> ?assertMatch({expired, _}, emqx_persistent_session:lookup(ClientId));
false -> skip
end,
{ok, Client2} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(0, client_info(session_present, Client2)),
case
(?config(kill_connection_process, Config) andalso
?config(persistent_store_enabled, Config))
of
true ->
The session should be a fresh one
{persistent, NewSession} = emqx_persistent_session:lookup(ClientId),
?assertNotEqual(SessionId, emqx_session:info(id, NewSession)),
?assertMatch([], emqx_persistent_session:pending(SessionId));
false ->
skip
end,
?assertEqual([], receive_messages(1)),
emqtt:disconnect(Client2).
t_publish_while_client_is_gone(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload1 = <<"hello1">>,
Payload2 = <<"hello2">>,
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
ok = publish(Topic, [Payload1, Payload2]),
{ok, Client2} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(2),
?assertMatch([_, _], Msgs),
[Msg2, Msg1] = Msgs,
?assertEqual({ok, iolist_to_binary(Payload1)}, maps:find(payload, Msg1)),
?assertEqual({ok, 2}, maps:find(qos, Msg1)),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg2)),
?assertEqual({ok, 2}, maps:find(qos, Msg2)),
ok = emqtt:disconnect(Client2).
t_clean_start_drops_subscriptions(Config) ->
1 . A persistent session is started and disconnected .
2 . While disconnected , a message is published and persisted .
3 . When connecting again , the clean start flag is set , the subscription is renewed ,
4 . Finally , a new connection is made with clean start set to false .
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payload1 = <<"hello1">>,
Payload2 = <<"hello2">>,
Payload3 = <<"hello3">>,
ClientId = ?config(client_id, Config),
1 .
{ok, Client1} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
2 .
ok = publish(Topic, Payload1),
3 .
{ok, Client2} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, true}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
?assertEqual(0, client_info(session_present, Client2)),
{ok, _, [2]} = emqtt:subscribe(Client2, STopic, qos2),
ok = publish(Topic, Payload2),
[Msg1] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, Msg1)),
ok = emqtt:disconnect(Client2),
maybe_kill_connection_process(ClientId, Config),
4 .
{ok, Client3} = emqtt:start_link([
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client3),
ok = publish(Topic, Payload3),
[Msg2] = receive_messages(1),
?assertEqual({ok, iolist_to_binary(Payload3)}, maps:find(payload, Msg2)),
ok = emqtt:disconnect(Client3).
t_unsubscribe(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
ClientId = ?config(client_id, Config),
{ok, Client} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client),
{ok, _, [2]} = emqtt:subscribe(Client, STopic, qos2),
case emqx_persistent_session:is_store_enabled() of
true ->
{persistent, Session} = emqx_persistent_session:lookup(ClientId),
SessionID = emqx_session:info(id, Session),
SessionIDs = [SId || #route{dest = SId} <- emqx_session_router:match_routes(Topic)],
?assert(lists:member(SessionID, SessionIDs)),
?assertMatch([_], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic]),
{ok, _, _} = emqtt:unsubscribe(Client, STopic),
?assertMatch([], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic]),
SessionIDs2 = [SId || #route{dest = SId} <- emqx_session_router:match_routes(Topic)],
?assert(not lists:member(SessionID, SessionIDs2));
false ->
?assertMatch([_], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic]),
{ok, _, _} = emqtt:unsubscribe(Client, STopic),
?assertMatch([], [Sub || {ST, _} = Sub <- emqtt:subscriptions(Client), ST =:= STopic])
end,
ok = emqtt:disconnect(Client).
t_multiple_subscription_matches(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic1 = ?config(stopic, Config),
STopic2 = ?config(stopic_alt, Config),
Payload = <<"test message">>,
ClientId = ?config(client_id, Config),
{ok, Client1} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic1, qos2),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic2, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}},
{clean_start, false}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
We will receive the same message twice because it matches two subscriptions .
[Msg1, Msg2] = receive_messages(2),
?assertEqual({ok, iolist_to_binary(Topic)}, maps:find(topic, Msg1)),
?assertEqual({ok, iolist_to_binary(Payload)}, maps:find(payload, Msg1)),
?assertEqual({ok, iolist_to_binary(Topic)}, maps:find(topic, Msg2)),
?assertEqual({ok, iolist_to_binary(Payload)}, maps:find(payload, Msg2)),
?assertEqual({ok, 2}, maps:find(qos, Msg1)),
?assertEqual({ok, 2}, maps:find(qos, Msg2)),
ok = emqtt:disconnect(Client2).
t_lost_messages_because_of_gc(init, Config) ->
case
(emqx_persistent_session:is_store_enabled() andalso
?config(kill_connection_process, Config))
of
true ->
Retain = 1000,
OldRetain = emqx_config:get(?msg_retain, Retain),
emqx_config:put(?msg_retain, Retain),
[{retain, Retain}, {old_retain, OldRetain} | Config];
false ->
{skip, only_relevant_with_store_and_kill_process}
end;
t_lost_messages_because_of_gc('end', Config) ->
OldRetain = ?config(old_retain, Config),
emqx_config:put(?msg_retain, OldRetain),
ok.
t_lost_messages_because_of_gc(Config) ->
ConnFun = ?config(conn_fun, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
ClientId = ?config(client_id, Config),
Retain = ?config(retain, Config),
Payload1 = <<"hello1">>,
Payload2 = <<"hello2">>,
{ok, Client1} = emqtt:start_link([
{clientid, ClientId},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payload1),
timer:sleep(2 * Retain),
publish(Topic, Payload2),
emqx_persistent_session_gc:message_gc_worker(),
{ok, Client2} = emqtt:start_link([
{clientid, ClientId},
{clean_start, false},
{proto_ver, v5},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
]),
{ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(2),
?assertMatch([_], Msgs),
?assertEqual({ok, iolist_to_binary(Payload2)}, maps:find(payload, hd(Msgs))),
emqtt:disconnect(Client2),
ok.
Snabbkaffe helpers
check_snabbkaffe_vanilla(Trace) ->
ResumeTrace = [
T
|| #{?snk_kind := K} = T <- Trace,
re:run(to_list(K), "^ps_") /= nomatch
],
?assertMatch([_ | _], ResumeTrace),
[_Sid] = lists:usort(?projection(sid, ResumeTrace)),
Check internal flow of the resuming
?assert(
?strict_causality(
#{?snk_kind := ps_resuming},
#{?snk_kind := ps_initial_pendings},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_initial_pendings},
#{?snk_kind := ps_persist_pendings},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_persist_pendings},
#{?snk_kind := ps_notify_writers},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_notify_writers},
#{?snk_kind := ps_node_markers},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_node_markers},
#{?snk_kind := ps_resume_session},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_resume_session},
#{?snk_kind := ps_marker_pendings},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_marker_pendings},
#{?snk_kind := ps_marker_pendings_msgs},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_marker_pendings_msgs},
#{?snk_kind := ps_resume_end},
ResumeTrace
)
),
Check flow between worker and
?assert(
?strict_causality(
#{?snk_kind := ps_notify_writers},
#{?snk_kind := ps_worker_started},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_marker_pendings},
#{?snk_kind := ps_worker_resume_end},
ResumeTrace
)
),
?assert(
?strict_causality(
#{?snk_kind := ps_worker_resume_end},
#{?snk_kind := ps_worker_shutdown},
ResumeTrace
)
),
[Markers] = ?projection(markers, ?of_kind(ps_node_markers, Trace)),
?assertMatch([_], Markers).
to_list(L) when is_list(L) -> L;
to_list(A) when is_atom(A) -> atom_to_list(A);
to_list(B) when is_binary(B) -> binary_to_list(B).
Snabbkaffe tests
t_snabbkaffe_vanilla_stages(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
EmqttOpts = [
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
],
{ok, Client1} = emqtt:start_link([{clean_start, true} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client1),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
?check_trace(
begin
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),
ok = emqtt:disconnect(Client2)
end,
fun(ok, Trace) ->
check_snabbkaffe_vanilla(Trace)
end
),
ok.
t_snabbkaffe_pending_messages(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payloads = [<<"test", (integer_to_binary(X))/binary>> || X <- [1, 2, 3, 4, 5]],
EmqttOpts = [
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
],
{ok, Client1} = emqtt:start_link([{clean_start, true} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
?check_trace(
begin
snabbkaffe_sync_publish(Topic, Payloads),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),
Msgs = receive_messages(length(Payloads)),
ReceivedPayloads = [P || #{payload := P} <- Msgs],
?assertEqual(lists:sort(ReceivedPayloads), lists:sort(Payloads)),
ok = emqtt:disconnect(Client2)
end,
fun(ok, Trace) ->
check_snabbkaffe_vanilla(Trace),
[Delivers1] = ?projection(msgs, ?of_kind(ps_persist_pendings_msgs, Trace)),
[Delivers2] = ?projection(msgs, ?of_kind(ps_marker_pendings_msgs, Trace)),
Delivers = Delivers1 ++ Delivers2,
?assertEqual(length(Payloads), length(Delivers)),
?assertEqual(lists:usort(Delivers), lists:sort(Delivers))
end
),
ok.
t_snabbkaffe_buffered_messages(Config) ->
ConnFun = ?config(conn_fun, Config),
ClientId = ?config(client_id, Config),
Topic = ?config(topic, Config),
STopic = ?config(stopic, Config),
Payloads1 = [<<"test", (integer_to_binary(X))/binary>> || X <- [1, 2, 3]],
Payloads2 = [<<"test", (integer_to_binary(X))/binary>> || X <- [4, 5, 6]],
EmqttOpts = [
{proto_ver, v5},
{clientid, ClientId},
{properties, #{'Session-Expiry-Interval' => 30}}
| Config
],
{ok, Client1} = emqtt:start_link([{clean_start, true} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client1),
{ok, _, [2]} = emqtt:subscribe(Client1, STopic, qos2),
ok = emqtt:disconnect(Client1),
maybe_kill_connection_process(ClientId, Config),
publish(Topic, Payloads1),
?check_trace(
begin
Make the resume init phase wait until the first message is delivered .
?force_ordering(
#{?snk_kind := ps_worker_deliver},
#{?snk_kind := ps_resume_end}
),
Parent = self(),
spawn_link(fun() ->
?block_until(#{?snk_kind := ps_marker_pendings_msgs}, infinity, 5000),
publish(Topic, Payloads2, true),
Parent ! publish_done,
ok
end),
{ok, Client2} = emqtt:start_link([{clean_start, false} | EmqttOpts]),
{ok, _} = emqtt:ConnFun(Client2),
receive
publish_done -> ok
after 10000 -> error(too_long_to_publish)
end,
Msgs = receive_messages(length(Payloads1) + length(Payloads2) + 1),
ReceivedPayloads = [P || #{payload := P} <- Msgs],
?assertEqual(
lists:sort(Payloads1 ++ Payloads2),
lists:sort(ReceivedPayloads)
),
ok = emqtt:disconnect(Client2)
end,
fun(ok, Trace) ->
check_snabbkaffe_vanilla(Trace),
[Msgs] = ?projection(msgs, ?of_kind(ps_writer_pendings, Trace)),
?assertMatch(
X when 0 < X andalso X =< length(Payloads2),
length(Msgs)
)
end
),
ok.
GC tests
-define(MARKER, 3).
-define(DELIVERED, 2).
-define(UNDELIVERED, 1).
-define(ABANDONED, 0).
msg_id() ->
emqx_guid:gen().
delivered_msg(MsgId, SessionID, STopic) ->
{SessionID, MsgId, STopic, ?DELIVERED}.
undelivered_msg(MsgId, SessionID, STopic) ->
{SessionID, MsgId, STopic, ?UNDELIVERED}.
marker_msg(MarkerID, SessionID) ->
{SessionID, MarkerID, <<>>, ?MARKER}.
guid(MicrosecondsAgo) ->
Make a fake GUID and set a timestamp .
<<TS:64, Tail:64>> = emqx_guid:gen(),
<<(TS - MicrosecondsAgo):64, Tail:64>>.
abandoned_session_msg(SessionID) ->
abandoned_session_msg(SessionID, 0).
abandoned_session_msg(SessionID, MicrosecondsAgo) ->
TS = erlang:system_time(microsecond),
{SessionID, <<>>, <<(TS - MicrosecondsAgo):64>>, ?ABANDONED}.
fresh_gc_delete_fun() ->
Ets = ets:new(gc_collect, [ordered_set]),
fun
(delete, Key) ->
ets:insert(Ets, {Key}),
ok;
(collect, <<>>) ->
List = ets:match(Ets, {'$1'}),
ets:delete(Ets),
lists:append(List);
(_, _Key) ->
ok
end.
fresh_gc_callbacks_fun() ->
Ets = ets:new(gc_collect, [ordered_set]),
fun
(collect, <<>>) ->
List = ets:match(Ets, {'$1'}),
ets:delete(Ets),
lists:append(List);
(Tag, Key) ->
ets:insert(Ets, {{Key, Tag}}),
ok
end.
get_gc_delete_messages() ->
Fun = fresh_gc_delete_fun(),
emqx_persistent_session:gc_session_messages(Fun),
Fun(collect, <<>>).
get_gc_callbacks() ->
Fun = fresh_gc_callbacks_fun(),
emqx_persistent_session:gc_session_messages(Fun),
Fun(collect, <<>>).
t_gc_all_delivered(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 5)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
SortedContent = lists:usort(Delivered ++ Undelivered),
ets:insert(Store, [{X, <<>>} || X <- SortedContent]),
GCMessages = get_gc_delete_messages(),
?assertEqual(SortedContent, GCMessages),
ok.
t_gc_some_undelivered(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 10)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Delivered1, _Delivered2} = split(Delivered),
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Undelivered1, Undelivered2} = split(Undelivered),
Content = Delivered1 ++ Undelivered1 ++ Undelivered2,
ets:insert(Store, [{X, <<>>} || X <- Content]),
Expected = lists:usort(Delivered1 ++ Undelivered1),
GCMessages = get_gc_delete_messages(),
?assertEqual(Expected, GCMessages),
ok.
t_gc_with_markers(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds1 = [msg_id() || _ <- lists:seq(1, 10)],
MarkerId = msg_id(),
MsgIds = [msg_id() || _ <- lists:seq(1, 4)] ++ MsgIds1,
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Delivered1, _Delivered2} = split(Delivered),
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Undelivered1, Undelivered2} = split(Undelivered),
Markers = [marker_msg(MarkerId, SessionId)],
Content = Delivered1 ++ Undelivered1 ++ Undelivered2 ++ Markers,
ets:insert(Store, [{X, <<>>} || X <- Content]),
Expected = lists:usort(Delivered1 ++ Undelivered1),
GCMessages = get_gc_delete_messages(),
?assertEqual(Expected, GCMessages),
ok.
t_gc_abandoned_some_undelivered(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 10)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Delivered1, _Delivered2} = split(Delivered),
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
{Undelivered1, Undelivered2} = split(Undelivered),
Abandoned = abandoned_session_msg(SessionId),
Content = Delivered1 ++ Undelivered1 ++ Undelivered2 ++ [Abandoned],
ets:insert(Store, [{X, <<>>} || X <- Content]),
Expected = lists:usort(Delivered1 ++ Undelivered1 ++ Undelivered2),
GCMessages = get_gc_delete_messages(),
?assertEqual(Expected, GCMessages),
ok.
t_gc_abandoned_only_called_on_empty_session(Config) ->
Store = ?config(session_msg_store, Config),
STopic = ?config(stopic, Config),
SessionId = emqx_guid:gen(),
MsgIds = [msg_id() || _ <- lists:seq(1, 10)],
Delivered = [delivered_msg(X, SessionId, STopic) || X <- MsgIds],
Undelivered = [undelivered_msg(X, SessionId, STopic) || X <- MsgIds],
Abandoned = abandoned_session_msg(SessionId),
Content = Delivered ++ Undelivered ++ [Abandoned],
ets:insert(Store, [{X, <<>>} || X <- Content]),
GCMessages = get_gc_callbacks(),
?assertEqual([], [X || {X, abandoned} <- GCMessages]),
ets:delete_all_objects(Store),
UndeliveredOtherSession = undelivered_msg(msg_id(), emqx_guid:gen(), <<"topic">>),
ets:insert(Store, [{X, <<>>} || X <- [Abandoned, UndeliveredOtherSession]]),
GCMessages2 = get_gc_callbacks(),
?assertEqual([Abandoned], [X || {X, abandoned} <- GCMessages2]),
ok.
t_gc_session_gc_worker(init, Config) ->
meck:new(emqx_persistent_session, [passthrough, no_link]),
Config;
t_gc_session_gc_worker('end', _Config) ->
meck:unload(emqx_persistent_session),
ok.
t_gc_session_gc_worker(Config) ->
STopic = ?config(stopic, Config),
SessionID = emqx_guid:gen(),
MsgDeleted = delivered_msg(msg_id(), SessionID, STopic),
MarkerNotDeleted = marker_msg(msg_id(), SessionID),
MarkerDeleted = marker_msg(guid(120 * 1000 * 1000), SessionID),
AbandonedNotDeleted = abandoned_session_msg(SessionID),
AbandonedDeleted = abandoned_session_msg(SessionID, 500 * 1000 * 1000),
meck:expect(emqx_persistent_session, delete_session_message, fun(_Key) -> ok end),
emqx_persistent_session_gc:session_gc_worker(delete, MsgDeleted),
emqx_persistent_session_gc:session_gc_worker(marker, MarkerNotDeleted),
emqx_persistent_session_gc:session_gc_worker(marker, MarkerDeleted),
emqx_persistent_session_gc:session_gc_worker(abandoned, AbandonedDeleted),
emqx_persistent_session_gc:session_gc_worker(abandoned, AbandonedNotDeleted),
History = meck:history(emqx_persistent_session, self()),
DeleteCalls = [
Key
|| {_Pid, {_, delete_session_message, [Key]}, _Result} <-
History
],
?assertEqual(
lists:sort([MsgDeleted, AbandonedDeleted, MarkerDeleted]),
lists:sort(DeleteCalls)
),
ok.
t_gc_message_gc(Config) ->
Topic = ?config(topic, Config),
ClientID = ?config(client_id, Config),
Store = ?config(msg_store, Config),
NewMsgs = [
emqx_message:make(ClientID, Topic, integer_to_binary(P))
|| P <- lists:seq(6, 10)
],
Retain = 60 * 1000,
emqx_config:put(?msg_retain, Retain),
Msgs1 = [
emqx_message:make(ClientID, Topic, integer_to_binary(P))
|| P <- lists:seq(1, 5)
],
OldMsgs = [M#message{id = guid(Retain * 1000)} || M <- Msgs1],
ets:insert(Store, NewMsgs ++ OldMsgs),
?assertEqual(lists:sort(OldMsgs ++ NewMsgs), ets:tab2list(Store)),
ok = emqx_persistent_session_gc:message_gc_worker(),
?assertEqual(lists:sort(NewMsgs), ets:tab2list(Store)),
ok.
split(List) ->
split(List, [], []).
split([], L1, L2) ->
{L1, L2};
split([H], L1, L2) ->
{[H | L1], L2};
split([H1, H2 | Left], L1, L2) ->
split(Left, [H1 | L1], [H2 | L2]).
|
118306fa4ac7ea77927680a1dc3cc45178215b1d2555263bcc6b152cf9d24f5b | gsakkas/rite | 20060308-17:49:12-350bb6231959f879ee62f44461de9e70.seminal.ml |
exception Unimplemented
let char2str c = String.make 1 c
let rec getStr str count =
if count = -1 then
""
else
(char2str (String.get str count)) ^ (getStr str (count-1))
let reverse str =
let count = (String.length str)-1 in
getStr str count
let rec map_helper f str index =
if index = String.length str then
[]
else
(f (String.get str index))::(map_helper f str (index+1))
let map f str =
map_helper f str 0
let test_fold_left acc b = "f(" ^ acc ^ ", " ^ (char2str b) ^ ")"
let rec fold_left_helper f acc str index =
if index = (String.length str) then
acc
else
fold_left_helper f (f acc (String.get str index)) str (index+1)
let fold_left f init str =
fold_left_helper test_fold_left init str 0
let test_fold_right b acc = "g(" ^ (char2str b) ^ ", " ^ acc ^ ")"
let rec fold_right_helper f str acc index =
if index = -1 then
acc
else
fold_right_helper f str (f (String.get str index) acc) (index-1)
let fold_right f str init =
fold_right_helper f str init ((String.length str)-1)
let rec char_list_to_string charList =
match charList with
[] -> ""
| hd::tl -> (char2str hd) ^ (char_list_to_string tl)
let uppercase s =
let lst = map (fun c -> (Char.uppercase c)) s in
char_list_to_string lst
let lowercase s =
let lst = map (fun c -> (Char.lowercase c)) s in
char_list_to_string lst
type caseTester =
Tester of bool * string
let test_title a acc =
if (Char.compare a ' ') = 0 then
match acc with
Tester (is_space, str) ->Tester( true, str^(char2str a))
else
match acc with
Tester(is_space, str) ->
if (is_space) then
Tester( false, str^(char2str (uppercase a)))
else
Tester(false; str^(char2str (lowercase a)))
let titlecase s =
fold_left (fun a acc ->test_title) Tester(false; "") s
let histogram s =
raise Unimplemented
let charConvert char = Char.code char
let rec printRec lst =
match lst with
[] -> ""
| hd::tl -> string_of_int hd ^ " " ^ (printRec tl)
let _ = let arg1 = Sys.argv.(1) in
let ret = reverse arg1 in
let _ = print_endline ret in
let lst = map charConvert arg1 in
let _=print_endline (printRec lst) in
let foldLeftString = fold_left test_fold_left "null" arg1 in
let _ = print_endline foldLeftString in
let foldRightString = fold_right test_fold_right arg1 "null" in
let _ = print_endline foldRightString in
let _ = print_endline (uppercase arg1) in
let _ = print_endline (lowercase arg1) in
print_endline (titlecase arg1)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/features/data/seminal/20060308-17%3A49%3A12-350bb6231959f879ee62f44461de9e70.seminal.ml | ocaml |
exception Unimplemented
let char2str c = String.make 1 c
let rec getStr str count =
if count = -1 then
""
else
(char2str (String.get str count)) ^ (getStr str (count-1))
let reverse str =
let count = (String.length str)-1 in
getStr str count
let rec map_helper f str index =
if index = String.length str then
[]
else
(f (String.get str index))::(map_helper f str (index+1))
let map f str =
map_helper f str 0
let test_fold_left acc b = "f(" ^ acc ^ ", " ^ (char2str b) ^ ")"
let rec fold_left_helper f acc str index =
if index = (String.length str) then
acc
else
fold_left_helper f (f acc (String.get str index)) str (index+1)
let fold_left f init str =
fold_left_helper test_fold_left init str 0
let test_fold_right b acc = "g(" ^ (char2str b) ^ ", " ^ acc ^ ")"
let rec fold_right_helper f str acc index =
if index = -1 then
acc
else
fold_right_helper f str (f (String.get str index) acc) (index-1)
let fold_right f str init =
fold_right_helper f str init ((String.length str)-1)
let rec char_list_to_string charList =
match charList with
[] -> ""
| hd::tl -> (char2str hd) ^ (char_list_to_string tl)
let uppercase s =
let lst = map (fun c -> (Char.uppercase c)) s in
char_list_to_string lst
let lowercase s =
let lst = map (fun c -> (Char.lowercase c)) s in
char_list_to_string lst
type caseTester =
Tester of bool * string
let test_title a acc =
if (Char.compare a ' ') = 0 then
match acc with
Tester (is_space, str) ->Tester( true, str^(char2str a))
else
match acc with
Tester(is_space, str) ->
if (is_space) then
Tester( false, str^(char2str (uppercase a)))
else
Tester(false; str^(char2str (lowercase a)))
let titlecase s =
fold_left (fun a acc ->test_title) Tester(false; "") s
let histogram s =
raise Unimplemented
let charConvert char = Char.code char
let rec printRec lst =
match lst with
[] -> ""
| hd::tl -> string_of_int hd ^ " " ^ (printRec tl)
let _ = let arg1 = Sys.argv.(1) in
let ret = reverse arg1 in
let _ = print_endline ret in
let lst = map charConvert arg1 in
let _=print_endline (printRec lst) in
let foldLeftString = fold_left test_fold_left "null" arg1 in
let _ = print_endline foldLeftString in
let foldRightString = fold_right test_fold_right arg1 "null" in
let _ = print_endline foldRightString in
let _ = print_endline (uppercase arg1) in
let _ = print_endline (lowercase arg1) in
print_endline (titlecase arg1)
| |
3e675e445623369f89026f464172e5af01f46a252c7946b68401aeee9cc3ebe3 | racket/drracket | pict-snip.rkt | #lang racket/base
(require racket/snip
racket/class
racket/match
racket/draw
file/convertible
racket/format
wxme
(prefix-in r: racket/base))
(module+ test (require rackunit))
(provide pict-snip% snip-class reader)
(define convertible<%>
(interface* () ([prop:convertible (lambda (v r d)
(send v convert r d))])
convert))
;; this snip is created on the user's space,
but its callbacks are invoked on 's .
(define pict-snip%
(class* snip% (convertible<%>)
(init-field w h d a recorded-datum)
(define/override (get-extent dc x y [wb #f] [hb #f] [descent #f] [space #f] [lspace #f] [rspace #f])
(set-box/f lspace 0)
(set-box/f rspace 0)
(set-box/f wb w)
(set-box/f hb h)
(set-box/f descent d)
(set-box/f space a))
(define proc #f)
(define/override (draw dc x y left top right bottom dx dy draw-caret)
(unless proc
(set! proc (with-handlers ((exn:fail? mk-error-drawer))
(recorded-datum->procedure recorded-datum))))
(define-values (ox oy) (send dc get-origin))
(send dc set-origin (+ ox x) (+ oy y))
(proc dc)
(send dc set-origin ox oy))
(define/override (copy) (new pict-snip% [w w] [h h] [d d] [a a]
[recorded-datum recorded-datum]))
(define/override (write f)
(write-version-1-of-snip w h d a recorded-datum f))
(define/override (find-scroll-step y)
(inexact->exact (floor (/ y 12))))
(define/override (get-num-scroll-steps)
(add1 (find-scroll-step h)))
(define/override (get-scroll-step-offset y)
(inexact->exact (floor (* y 12))))
(super-new)
(inherit set-snipclass)
(set-snipclass snip-class)
(define/public (convert r d)
(case r
[(png-bytes)
(define bm (make-bitmap (inexact->exact (ceiling w))
(inexact->exact (ceiling h))))
(define dc (send bm make-dc))
(draw dc 0 0 0 0 w h 0 0 #f)
(define b (open-output-bytes))
(send bm save-file b 'png)
(get-output-bytes b)]
[(pdf-bytes)
(define b (open-output-bytes))
(define dc (new pdf-dc%
[interactive #f]
[width w] [height h]
[output b]))
(send dc start-doc "pict")
(send dc start-page)
(draw dc 0 0 0 0 w h 0 0 #f)
(send dc end-page)
(send dc end-doc)
(get-output-bytes b)]
[else d]))))
(define (set-box/f b v) (when (box? b) (set-box! b v)))
(define ((mk-error-drawer exn) dc)
(define clr (send dc get-text-foreground))
(send dc set-text-foreground "red")
(send dc draw-text (exn-message exn) 0 0 'grapheme)
(send dc set-text-foreground clr))
(define snip-class
(new (class snip-class%
(inherit reading-version set-version)
(define/override (read f)
(define version (reading-version f))
(case version
[(0)
(parse-version-0-pict-snip-from-bytes
(send f get-unterminated-bytes))]
[(1)
(parse-version-1-pict-snip
(λ () (send f get-unterminated-bytes))
(λ () (send f get-exact)))]))
(super-new)
(set-version 1))))
(send snip-class set-classname (format "~s" (list '(lib "pict-snip.rkt" "drracket" "private")
'(lib "pict-snip.rkt" "drracket" "private"))))
(send (get-the-snip-class-list) add snip-class)
(define reader
(new (class* object% (snip-reader<%>)
(define/public (read-header version stream) (void))
(define/public (read-snip text-only? version stream)
(if text-only?
#"#<pict-snip>"
(or (case version
[(0)
(parse-version-0-pict-snip-from-bytes
(send stream read-raw-bytes 'pict-snip))]
[(1)
(parse-version-1-pict-snip
(λ () (send stream read-raw-bytes "drracket's pict-snip%"))
(λ () (send stream read-integer "drracket's pict-snip%")))])
(error 'pict-snip.rkt "could not read pict-snip from stream"))))
(super-new))))
;; parse-pict-snip-from-bytes : bytes -> (or/c (is-a?/c pict-snip%) #f)
(define (parse-version-0-pict-snip-from-bytes bytes)
(let/ec escape
(define prt (open-input-bytes bytes))
(define sexp (with-handlers ([exn:fail:read? (λ (x) (escape #f))])
(read prt)))
(match sexp
[`(,(? real? w) ,(? real? h) ,(? real? d) ,(? real? a) ,recorded-datum)
(new pict-snip% [w w] [h h] [d d] [a a]
[recorded-datum recorded-datum])]
[else
#f])))
(define (parse-version-1-pict-snip get-some-bytes get-a-number)
(define prefix (bytes->string/utf-8 (get-some-bytes)))
(define w (get-a-number))
(define h (get-a-number))
(define d (get-a-number))
(define a (get-a-number))
(define byteses
(for/list ([_ (in-range (get-a-number))])
(get-some-bytes)))
(define rewriten-datum (read (open-input-bytes (get-some-bytes))))
(define recorded-datum
(let loop ([datum rewriten-datum])
(cond
[(pair? datum) (cons (loop (car datum)) (loop (cdr datum)))]
[(equal? datum prefix)
(begin0
(car byteses)
(set! byteses (cdr byteses)))]
[else datum])))
(new pict-snip% [w w] [h h] [d d] [a a]
[recorded-datum recorded-datum]))
(define (write-version-1-of-snip w h d a recorded-datum f)
(define unique-string (get-unique-string recorded-datum))
(define unique-bytes (string->bytes/utf-8 unique-string))
(send f put unique-bytes)
(send f put w)
(send f put h)
(send f put d)
(send f put a)
(define-values (rewritten-datum byteses)
(rewrite-recorded-datum recorded-datum unique-string))
(send f put (length byteses))
(for ([bytes (in-list byteses)])
(send f put bytes))
(define bp (open-output-bytes))
(write rewritten-datum bp)
(send f put (get-output-bytes bp)))
(define (rewrite-recorded-datum recorded-datum unique-string)
(define byteses '())
(define rewriten-datum
(let loop ([recorded-datum recorded-datum])
(match recorded-datum
[(cons a b) (cons (loop a) (loop b))]
[(? bytes?)
(set! byteses (cons recorded-datum byteses))
unique-string]
[else recorded-datum])))
(values rewriten-datum byteses))
(define (get-unique-string recorded-datum)
(define prefix 0)
(let loop ([recorded-datum recorded-datum])
(cond
[(pair? recorded-datum)
(loop (car recorded-datum))
(loop (cdr recorded-datum))]
[(string? recorded-datum)
(define m (regexp-match #rx"^bmpref([0-9]+):" recorded-datum))
(when m
(define n (string->number (list-ref m 1)))
(set! prefix (max (+ n 1) prefix)))]))
(~a "bmpref" prefix ":"))
(module+ test
(check-equal? (get-unique-string '(((1) 2))) "bmpref0:")
(check-equal? (get-unique-string '((("bmpref4:") "bmpref1:"))) "bmpref5:"))
| null | https://raw.githubusercontent.com/racket/drracket/d2760acb76ef77046e7de788d26156f77330ab03/drracket/drracket/private/pict-snip.rkt | racket | this snip is created on the user's space,
parse-pict-snip-from-bytes : bytes -> (or/c (is-a?/c pict-snip%) #f) | #lang racket/base
(require racket/snip
racket/class
racket/match
racket/draw
file/convertible
racket/format
wxme
(prefix-in r: racket/base))
(module+ test (require rackunit))
(provide pict-snip% snip-class reader)
(define convertible<%>
(interface* () ([prop:convertible (lambda (v r d)
(send v convert r d))])
convert))
but its callbacks are invoked on 's .
(define pict-snip%
(class* snip% (convertible<%>)
(init-field w h d a recorded-datum)
(define/override (get-extent dc x y [wb #f] [hb #f] [descent #f] [space #f] [lspace #f] [rspace #f])
(set-box/f lspace 0)
(set-box/f rspace 0)
(set-box/f wb w)
(set-box/f hb h)
(set-box/f descent d)
(set-box/f space a))
(define proc #f)
(define/override (draw dc x y left top right bottom dx dy draw-caret)
(unless proc
(set! proc (with-handlers ((exn:fail? mk-error-drawer))
(recorded-datum->procedure recorded-datum))))
(define-values (ox oy) (send dc get-origin))
(send dc set-origin (+ ox x) (+ oy y))
(proc dc)
(send dc set-origin ox oy))
(define/override (copy) (new pict-snip% [w w] [h h] [d d] [a a]
[recorded-datum recorded-datum]))
(define/override (write f)
(write-version-1-of-snip w h d a recorded-datum f))
(define/override (find-scroll-step y)
(inexact->exact (floor (/ y 12))))
(define/override (get-num-scroll-steps)
(add1 (find-scroll-step h)))
(define/override (get-scroll-step-offset y)
(inexact->exact (floor (* y 12))))
(super-new)
(inherit set-snipclass)
(set-snipclass snip-class)
(define/public (convert r d)
(case r
[(png-bytes)
(define bm (make-bitmap (inexact->exact (ceiling w))
(inexact->exact (ceiling h))))
(define dc (send bm make-dc))
(draw dc 0 0 0 0 w h 0 0 #f)
(define b (open-output-bytes))
(send bm save-file b 'png)
(get-output-bytes b)]
[(pdf-bytes)
(define b (open-output-bytes))
(define dc (new pdf-dc%
[interactive #f]
[width w] [height h]
[output b]))
(send dc start-doc "pict")
(send dc start-page)
(draw dc 0 0 0 0 w h 0 0 #f)
(send dc end-page)
(send dc end-doc)
(get-output-bytes b)]
[else d]))))
(define (set-box/f b v) (when (box? b) (set-box! b v)))
(define ((mk-error-drawer exn) dc)
(define clr (send dc get-text-foreground))
(send dc set-text-foreground "red")
(send dc draw-text (exn-message exn) 0 0 'grapheme)
(send dc set-text-foreground clr))
(define snip-class
(new (class snip-class%
(inherit reading-version set-version)
(define/override (read f)
(define version (reading-version f))
(case version
[(0)
(parse-version-0-pict-snip-from-bytes
(send f get-unterminated-bytes))]
[(1)
(parse-version-1-pict-snip
(λ () (send f get-unterminated-bytes))
(λ () (send f get-exact)))]))
(super-new)
(set-version 1))))
(send snip-class set-classname (format "~s" (list '(lib "pict-snip.rkt" "drracket" "private")
'(lib "pict-snip.rkt" "drracket" "private"))))
(send (get-the-snip-class-list) add snip-class)
(define reader
(new (class* object% (snip-reader<%>)
(define/public (read-header version stream) (void))
(define/public (read-snip text-only? version stream)
(if text-only?
#"#<pict-snip>"
(or (case version
[(0)
(parse-version-0-pict-snip-from-bytes
(send stream read-raw-bytes 'pict-snip))]
[(1)
(parse-version-1-pict-snip
(λ () (send stream read-raw-bytes "drracket's pict-snip%"))
(λ () (send stream read-integer "drracket's pict-snip%")))])
(error 'pict-snip.rkt "could not read pict-snip from stream"))))
(super-new))))
(define (parse-version-0-pict-snip-from-bytes bytes)
(let/ec escape
(define prt (open-input-bytes bytes))
(define sexp (with-handlers ([exn:fail:read? (λ (x) (escape #f))])
(read prt)))
(match sexp
[`(,(? real? w) ,(? real? h) ,(? real? d) ,(? real? a) ,recorded-datum)
(new pict-snip% [w w] [h h] [d d] [a a]
[recorded-datum recorded-datum])]
[else
#f])))
(define (parse-version-1-pict-snip get-some-bytes get-a-number)
(define prefix (bytes->string/utf-8 (get-some-bytes)))
(define w (get-a-number))
(define h (get-a-number))
(define d (get-a-number))
(define a (get-a-number))
(define byteses
(for/list ([_ (in-range (get-a-number))])
(get-some-bytes)))
(define rewriten-datum (read (open-input-bytes (get-some-bytes))))
(define recorded-datum
(let loop ([datum rewriten-datum])
(cond
[(pair? datum) (cons (loop (car datum)) (loop (cdr datum)))]
[(equal? datum prefix)
(begin0
(car byteses)
(set! byteses (cdr byteses)))]
[else datum])))
(new pict-snip% [w w] [h h] [d d] [a a]
[recorded-datum recorded-datum]))
(define (write-version-1-of-snip w h d a recorded-datum f)
(define unique-string (get-unique-string recorded-datum))
(define unique-bytes (string->bytes/utf-8 unique-string))
(send f put unique-bytes)
(send f put w)
(send f put h)
(send f put d)
(send f put a)
(define-values (rewritten-datum byteses)
(rewrite-recorded-datum recorded-datum unique-string))
(send f put (length byteses))
(for ([bytes (in-list byteses)])
(send f put bytes))
(define bp (open-output-bytes))
(write rewritten-datum bp)
(send f put (get-output-bytes bp)))
(define (rewrite-recorded-datum recorded-datum unique-string)
(define byteses '())
(define rewriten-datum
(let loop ([recorded-datum recorded-datum])
(match recorded-datum
[(cons a b) (cons (loop a) (loop b))]
[(? bytes?)
(set! byteses (cons recorded-datum byteses))
unique-string]
[else recorded-datum])))
(values rewriten-datum byteses))
(define (get-unique-string recorded-datum)
(define prefix 0)
(let loop ([recorded-datum recorded-datum])
(cond
[(pair? recorded-datum)
(loop (car recorded-datum))
(loop (cdr recorded-datum))]
[(string? recorded-datum)
(define m (regexp-match #rx"^bmpref([0-9]+):" recorded-datum))
(when m
(define n (string->number (list-ref m 1)))
(set! prefix (max (+ n 1) prefix)))]))
(~a "bmpref" prefix ":"))
(module+ test
(check-equal? (get-unique-string '(((1) 2))) "bmpref0:")
(check-equal? (get-unique-string '((("bmpref4:") "bmpref1:"))) "bmpref5:"))
|
f1f2454428ab978c588acc535ede372bad6e13621e5b57b306ac45e944ee25a5 | couchbase/couchdb | couch_os_process.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_os_process).
-behaviour(gen_server).
-export([start_link/1, start_link/2, start_link/3, stop/1]).
-export([set_timeout/2, prompt/2, prompt_many/2]).
-export([send/2, writeline/2, readline/1, writejson/2, readjson/1]).
-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, code_change/3]).
-include("couch_db.hrl").
-define(PORT_OPTIONS, [stream, {line, 4096}, binary, exit_status, hide]).
-record(os_proc,
{command,
port,
writer,
reader,
timeout=5000
}).
start_link(Command) ->
start_link(Command, []).
start_link(Command, Options) ->
start_link(Command, Options, ?PORT_OPTIONS).
start_link(Command, Options, PortOptions) ->
gen_server:start_link(couch_os_process, [Command, Options, PortOptions], []).
stop(Pid) ->
gen_server:cast(Pid, stop).
% Read/Write API
set_timeout(Pid, TimeOut) when is_integer(TimeOut) ->
ok = gen_server:call(Pid, {set_timeout, TimeOut}, infinity).
% Used by couch_db_update_notifier.erl
send(Pid, Data) ->
gen_server:cast(Pid, {send, Data}).
prompt(Pid, Data) ->
case gen_server:call(Pid, {prompt, Data}, infinity) of
{ok, Result} ->
Result;
Error ->
?LOG_ERROR("OS Process Error ~p :: ~p",[Pid, Error]),
throw(Error)
end.
prompt_many(Pid, DataList) ->
OsProc = gen_server:call(Pid, get_os_proc, infinity),
true = port_connect(OsProc#os_proc.port, self()),
try
send_many(OsProc, DataList),
receive_many(length(DataList), OsProc, [])
after
Can throw badarg error , when OsProc Pid is dead or port was closed
% by the readline function on error/timeout.
(catch port_connect(OsProc#os_proc.port, Pid)),
unlink(OsProc#os_proc.port),
drop_port_messages(OsProc#os_proc.port)
end.
send_many(_OsProc, []) ->
ok;
send_many(#os_proc{writer = Writer} = OsProc, [Data | Rest]) ->
Writer(OsProc, Data),
send_many(OsProc, Rest).
receive_many(0, _OsProc, Acc) ->
{ok, lists:reverse(Acc)};
receive_many(N, #os_proc{reader = Reader} = OsProc, Acc) ->
Line = Reader(OsProc),
receive_many(N - 1, OsProc, [Line | Acc]).
drop_port_messages(Port) ->
receive
{Port, _} ->
drop_port_messages(Port)
after 0 ->
ok
end.
% Utility functions for reading and writing
% in custom functions
writeline(OsProc, Data) when is_record(OsProc, os_proc) ->
port_command(OsProc#os_proc.port, [Data, $\n]).
readline(#os_proc{} = OsProc) ->
readline(OsProc, []).
readline(#os_proc{port = Port} = OsProc, Acc) ->
receive
{Port, {data, {noeol, Data}}} ->
readline(OsProc, [Data|Acc]);
{Port, {data, {eol, Data}}} ->
lists:reverse(Acc, Data);
{Port, Err} ->
catch port_close(Port),
throw({os_process_error, Err})
after OsProc#os_proc.timeout ->
catch port_close(Port),
throw({os_process_error, "OS process timed out."})
end.
% Standard JSON functions
writejson(OsProc, Data) when is_record(OsProc, os_proc) ->
JsonData = ?JSON_ENCODE(Data),
?LOG_DEBUG("OS Process ~p Input :: ~s", [OsProc#os_proc.port, ?LOG_USERDATA(JsonData)]),
true = writeline(OsProc, JsonData).
readjson(OsProc) when is_record(OsProc, os_proc) ->
Line = iolist_to_binary(readline(OsProc)),
?LOG_DEBUG("OS Process ~p Output :: ~s", [OsProc#os_proc.port, ?LOG_USERDATA(Line)]),
try
% Don't actually parse the whole JSON. Just try to see if it's
% a command or a doc map/reduce/filter/show/list/update output.
% If it's a command then parse the whole JSON and execute the
% command, otherwise return the raw JSON line to the caller.
pick_command(Line)
catch
throw:abort ->
{json, Line};
throw:{cmd, _Cmd} ->
case ?JSON_DECODE(Line) of
[<<"log">>, Msg] when is_binary(Msg) ->
% we got a message to log. Log it and continue
?LOG_INFO("OS Process ~p Log :: ~s", [OsProc#os_proc.port, ?LOG_USERDATA(Msg)]),
readjson(OsProc);
[<<"error">>, Id, Reason] ->
throw({error, {couch_util:to_existing_atom(Id),Reason}});
[<<"fatal">>, Id, Reason] ->
?LOG_INFO("OS Process ~p Fatal Error :: ~s ~p",
[OsProc#os_proc.port, Id, Reason]),
throw({couch_util:to_existing_atom(Id),Reason});
_Result ->
{json, Line}
end
end.
pick_command(Line) ->
json_stream_parse:events(Line, fun pick_command0/1).
pick_command0(array_start) ->
fun pick_command1/1;
pick_command0(_) ->
throw(abort).
pick_command1(<<"log">> = Cmd) ->
throw({cmd, Cmd});
pick_command1(<<"error">> = Cmd) ->
throw({cmd, Cmd});
pick_command1(<<"fatal">> = Cmd) ->
throw({cmd, Cmd});
pick_command1(_) ->
throw(abort).
% gen_server API
init([Command, Options, PortOptions]) ->
PrivDir = couch_util:priv_dir(),
Spawnkiller = filename:join(PrivDir, "couchspawnkillable"),
CompleteCmd = "\"" ++ Spawnkiller ++ "\" " ++ Command,
BaseProc = #os_proc{
command=Command,
port=open_port({spawn, CompleteCmd}, PortOptions),
writer=fun writejson/2,
reader=fun readjson/1
},
KillCmd = readline(BaseProc),
Pid = self(),
?LOG_DEBUG("OS Process Start :: ~p", [BaseProc#os_proc.port]),
spawn(fun() ->
% this ensure the real os process is killed when this process dies.
erlang:monitor(process, Pid),
receive _ -> ok end,
os:cmd(?b2l(iolist_to_binary(KillCmd)))
end),
OsProc =
lists:foldl(fun(Opt, Proc) ->
case Opt of
{writer, Writer} when is_function(Writer) ->
Proc#os_proc{writer=Writer};
{reader, Reader} when is_function(Reader) ->
Proc#os_proc{reader=Reader};
{timeout, TimeOut} when is_integer(TimeOut) ->
Proc#os_proc{timeout=TimeOut}
end
end, BaseProc, Options),
{ok, OsProc}.
terminate(_Reason, #os_proc{port=Port}) ->
catch port_close(Port),
ok.
handle_call(get_os_proc, _From, OsProc) ->
{reply, OsProc, OsProc};
handle_call({set_timeout, TimeOut}, _From, OsProc) ->
{reply, ok, OsProc#os_proc{timeout=TimeOut}};
handle_call({prompt, Data}, _From, OsProc) ->
#os_proc{writer=Writer, reader=Reader} = OsProc,
try
Writer(OsProc, Data),
{reply, {ok, Reader(OsProc)}, OsProc}
catch
throw:{error, OsError} ->
{reply, OsError, OsProc};
throw:OtherError ->
{stop, normal, OtherError, OsProc}
end.
handle_cast({send, Data}, #os_proc{writer=Writer}=OsProc) ->
try
Writer(OsProc, Data),
{noreply, OsProc}
catch
throw:OsError ->
?LOG_ERROR("Failed sending data: ~p -> ~p", [?LOG_USERDATA(Data), OsError]),
{stop, normal, OsProc}
end;
handle_cast(stop, OsProc) ->
{stop, normal, OsProc};
handle_cast(Msg, OsProc) ->
?LOG_DEBUG("OS Proc: Unknown cast: ~p", [Msg]),
{noreply, OsProc}.
handle_info({Port, {exit_status, 0}}, #os_proc{port=Port}=OsProc) ->
?LOG_INFO("OS Process terminated normally", []),
{stop, normal, OsProc};
handle_info({Port, {exit_status, Status}}, #os_proc{port=Port}=OsProc) ->
?LOG_ERROR("OS Process died with status: ~p", [Status]),
{stop, {exit_status, Status}, OsProc}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/couchbase/couchdb/8a75fd2faa89f95158de1776354ceccf3e762753/src/couchdb/couch_os_process.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
Read/Write API
Used by couch_db_update_notifier.erl
by the readline function on error/timeout.
Utility functions for reading and writing
in custom functions
Standard JSON functions
Don't actually parse the whole JSON. Just try to see if it's
a command or a doc map/reduce/filter/show/list/update output.
If it's a command then parse the whole JSON and execute the
command, otherwise return the raw JSON line to the caller.
we got a message to log. Log it and continue
gen_server API
this ensure the real os process is killed when this process dies. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(couch_os_process).
-behaviour(gen_server).
-export([start_link/1, start_link/2, start_link/3, stop/1]).
-export([set_timeout/2, prompt/2, prompt_many/2]).
-export([send/2, writeline/2, readline/1, writejson/2, readjson/1]).
-export([init/1, terminate/2, handle_call/3, handle_cast/2, handle_info/2, code_change/3]).
-include("couch_db.hrl").
-define(PORT_OPTIONS, [stream, {line, 4096}, binary, exit_status, hide]).
-record(os_proc,
{command,
port,
writer,
reader,
timeout=5000
}).
start_link(Command) ->
start_link(Command, []).
start_link(Command, Options) ->
start_link(Command, Options, ?PORT_OPTIONS).
start_link(Command, Options, PortOptions) ->
gen_server:start_link(couch_os_process, [Command, Options, PortOptions], []).
stop(Pid) ->
gen_server:cast(Pid, stop).
set_timeout(Pid, TimeOut) when is_integer(TimeOut) ->
ok = gen_server:call(Pid, {set_timeout, TimeOut}, infinity).
send(Pid, Data) ->
gen_server:cast(Pid, {send, Data}).
prompt(Pid, Data) ->
case gen_server:call(Pid, {prompt, Data}, infinity) of
{ok, Result} ->
Result;
Error ->
?LOG_ERROR("OS Process Error ~p :: ~p",[Pid, Error]),
throw(Error)
end.
prompt_many(Pid, DataList) ->
OsProc = gen_server:call(Pid, get_os_proc, infinity),
true = port_connect(OsProc#os_proc.port, self()),
try
send_many(OsProc, DataList),
receive_many(length(DataList), OsProc, [])
after
Can throw badarg error , when OsProc Pid is dead or port was closed
(catch port_connect(OsProc#os_proc.port, Pid)),
unlink(OsProc#os_proc.port),
drop_port_messages(OsProc#os_proc.port)
end.
send_many(_OsProc, []) ->
ok;
send_many(#os_proc{writer = Writer} = OsProc, [Data | Rest]) ->
Writer(OsProc, Data),
send_many(OsProc, Rest).
receive_many(0, _OsProc, Acc) ->
{ok, lists:reverse(Acc)};
receive_many(N, #os_proc{reader = Reader} = OsProc, Acc) ->
Line = Reader(OsProc),
receive_many(N - 1, OsProc, [Line | Acc]).
drop_port_messages(Port) ->
receive
{Port, _} ->
drop_port_messages(Port)
after 0 ->
ok
end.
writeline(OsProc, Data) when is_record(OsProc, os_proc) ->
port_command(OsProc#os_proc.port, [Data, $\n]).
readline(#os_proc{} = OsProc) ->
readline(OsProc, []).
readline(#os_proc{port = Port} = OsProc, Acc) ->
receive
{Port, {data, {noeol, Data}}} ->
readline(OsProc, [Data|Acc]);
{Port, {data, {eol, Data}}} ->
lists:reverse(Acc, Data);
{Port, Err} ->
catch port_close(Port),
throw({os_process_error, Err})
after OsProc#os_proc.timeout ->
catch port_close(Port),
throw({os_process_error, "OS process timed out."})
end.
writejson(OsProc, Data) when is_record(OsProc, os_proc) ->
JsonData = ?JSON_ENCODE(Data),
?LOG_DEBUG("OS Process ~p Input :: ~s", [OsProc#os_proc.port, ?LOG_USERDATA(JsonData)]),
true = writeline(OsProc, JsonData).
readjson(OsProc) when is_record(OsProc, os_proc) ->
Line = iolist_to_binary(readline(OsProc)),
?LOG_DEBUG("OS Process ~p Output :: ~s", [OsProc#os_proc.port, ?LOG_USERDATA(Line)]),
try
pick_command(Line)
catch
throw:abort ->
{json, Line};
throw:{cmd, _Cmd} ->
case ?JSON_DECODE(Line) of
[<<"log">>, Msg] when is_binary(Msg) ->
?LOG_INFO("OS Process ~p Log :: ~s", [OsProc#os_proc.port, ?LOG_USERDATA(Msg)]),
readjson(OsProc);
[<<"error">>, Id, Reason] ->
throw({error, {couch_util:to_existing_atom(Id),Reason}});
[<<"fatal">>, Id, Reason] ->
?LOG_INFO("OS Process ~p Fatal Error :: ~s ~p",
[OsProc#os_proc.port, Id, Reason]),
throw({couch_util:to_existing_atom(Id),Reason});
_Result ->
{json, Line}
end
end.
pick_command(Line) ->
json_stream_parse:events(Line, fun pick_command0/1).
pick_command0(array_start) ->
fun pick_command1/1;
pick_command0(_) ->
throw(abort).
pick_command1(<<"log">> = Cmd) ->
throw({cmd, Cmd});
pick_command1(<<"error">> = Cmd) ->
throw({cmd, Cmd});
pick_command1(<<"fatal">> = Cmd) ->
throw({cmd, Cmd});
pick_command1(_) ->
throw(abort).
init([Command, Options, PortOptions]) ->
PrivDir = couch_util:priv_dir(),
Spawnkiller = filename:join(PrivDir, "couchspawnkillable"),
CompleteCmd = "\"" ++ Spawnkiller ++ "\" " ++ Command,
BaseProc = #os_proc{
command=Command,
port=open_port({spawn, CompleteCmd}, PortOptions),
writer=fun writejson/2,
reader=fun readjson/1
},
KillCmd = readline(BaseProc),
Pid = self(),
?LOG_DEBUG("OS Process Start :: ~p", [BaseProc#os_proc.port]),
spawn(fun() ->
erlang:monitor(process, Pid),
receive _ -> ok end,
os:cmd(?b2l(iolist_to_binary(KillCmd)))
end),
OsProc =
lists:foldl(fun(Opt, Proc) ->
case Opt of
{writer, Writer} when is_function(Writer) ->
Proc#os_proc{writer=Writer};
{reader, Reader} when is_function(Reader) ->
Proc#os_proc{reader=Reader};
{timeout, TimeOut} when is_integer(TimeOut) ->
Proc#os_proc{timeout=TimeOut}
end
end, BaseProc, Options),
{ok, OsProc}.
terminate(_Reason, #os_proc{port=Port}) ->
catch port_close(Port),
ok.
handle_call(get_os_proc, _From, OsProc) ->
{reply, OsProc, OsProc};
handle_call({set_timeout, TimeOut}, _From, OsProc) ->
{reply, ok, OsProc#os_proc{timeout=TimeOut}};
handle_call({prompt, Data}, _From, OsProc) ->
#os_proc{writer=Writer, reader=Reader} = OsProc,
try
Writer(OsProc, Data),
{reply, {ok, Reader(OsProc)}, OsProc}
catch
throw:{error, OsError} ->
{reply, OsError, OsProc};
throw:OtherError ->
{stop, normal, OtherError, OsProc}
end.
handle_cast({send, Data}, #os_proc{writer=Writer}=OsProc) ->
try
Writer(OsProc, Data),
{noreply, OsProc}
catch
throw:OsError ->
?LOG_ERROR("Failed sending data: ~p -> ~p", [?LOG_USERDATA(Data), OsError]),
{stop, normal, OsProc}
end;
handle_cast(stop, OsProc) ->
{stop, normal, OsProc};
handle_cast(Msg, OsProc) ->
?LOG_DEBUG("OS Proc: Unknown cast: ~p", [Msg]),
{noreply, OsProc}.
handle_info({Port, {exit_status, 0}}, #os_proc{port=Port}=OsProc) ->
?LOG_INFO("OS Process terminated normally", []),
{stop, normal, OsProc};
handle_info({Port, {exit_status, Status}}, #os_proc{port=Port}=OsProc) ->
?LOG_ERROR("OS Process died with status: ~p", [Status]),
{stop, {exit_status, Status}, OsProc}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
b491d966aaa96d8895b8dc996d95e5fc9c34c1faa445c31edde740cc2d2b33a4 | np/ling | Session.hs | # LANGUAGE LambdaCase #
# LANGUAGE ViewPatterns #
module Ling.Session
(module Ling.Session
,module Ling.Session.Core) where
import Ling.Session.Core
import Ling.Defs
import Ling.Norm
import Ling.Prelude hiding (subst1)
import Ling.Scoped
import Prelude hiding (log)
sessionStep :: Term -> Endom Session
sessionStep tm (IO _ (Arg x mty) s) = mkLet__ $ subst1 (x, Ann mty tm) s
sessionStep _ s = error $ "sessionStep: no steps " ++ show s
-- Should be length preserving
extractDuals :: Dual a => [Maybe a] -> [a]
extractDuals = \case
[Just s0, Nothing] -> [s0, dual s0]
[Nothing, Just s1] -> [dual s1, s1]
from the two cases above the general rule
-- so far is that all sessions should be annotated
mas -> mas ^? below _Just ?| error "Missing type signature in `new` (extractDuals)"
extractSession :: [Maybe a] -> a
extractSession l = l ^? each . _Just ?| error "Missing type signature in `new` (extractSession)"
-- See flatRSession in Ling.Reduce
unsafeFlatRSession :: RSession -> [Session]
unsafeFlatRSession (s `Repl` r) =
replicate (r ^? litR . integral ?| error ("unsafeFlatRSession " ++ show r)) s
See flatSessions in . Reduce
unsafeFlatSessions :: Sessions -> [Session]
unsafeFlatSessions = concatMap unsafeFlatRSession . view _Sessions
projSessions :: Integer -> Sessions -> Session
projSessions _ (Sessions []) = error "projSessions: out of bound"
projSessions n (Sessions (Repl s r:ss))
| Just i <- r ^? litR = if n < i
then s
else projSessions (n - i) (Sessions ss)
| otherwise = error "projSessions/Repl: only integer literals are supported"
replRSession :: RFactor -> Endom RSession
replRSession r (Repl s t) = Repl s (r <> t)
mkCaseRSession :: (Scoped Term -> Term) -> Rel (Scoped Term) -> MkCase' (Scoped RSession)
mkCaseRSession f rel u = repl . bimap h h . unzip . fmap unrepl
where
repl (s, r) = pure $ (s ^. from tSession) `Repl` (r ^. from rterm)
unrepl (con, rs) = ((con, view (rsession . tSession) <$> rs),
(con, view (rfactor . rterm) <$> rs))
h = mkCaseBy f rel u
mkCaseSessions :: (Scoped Term -> Term) -> Rel (Scoped Term) -> MkCase' (Scoped Sessions)
mkCaseSessions f rel u brs =
Sessions . pure <$> mkCaseRSession f rel u (brs & branches . scoped %~ unSingleton)
where
unSingleton (Sessions [x]) = x
unSingleton _ = error "mkCaseSessions"
-- -}
| null | https://raw.githubusercontent.com/np/ling/ca942db83ac927420d1ae5e24b4da164394ddbbe/Ling/Session.hs | haskell | Should be length preserving
so far is that all sessions should be annotated
See flatRSession in Ling.Reduce
-} | # LANGUAGE LambdaCase #
# LANGUAGE ViewPatterns #
module Ling.Session
(module Ling.Session
,module Ling.Session.Core) where
import Ling.Session.Core
import Ling.Defs
import Ling.Norm
import Ling.Prelude hiding (subst1)
import Ling.Scoped
import Prelude hiding (log)
sessionStep :: Term -> Endom Session
sessionStep tm (IO _ (Arg x mty) s) = mkLet__ $ subst1 (x, Ann mty tm) s
sessionStep _ s = error $ "sessionStep: no steps " ++ show s
extractDuals :: Dual a => [Maybe a] -> [a]
extractDuals = \case
[Just s0, Nothing] -> [s0, dual s0]
[Nothing, Just s1] -> [dual s1, s1]
from the two cases above the general rule
mas -> mas ^? below _Just ?| error "Missing type signature in `new` (extractDuals)"
extractSession :: [Maybe a] -> a
extractSession l = l ^? each . _Just ?| error "Missing type signature in `new` (extractSession)"
unsafeFlatRSession :: RSession -> [Session]
unsafeFlatRSession (s `Repl` r) =
replicate (r ^? litR . integral ?| error ("unsafeFlatRSession " ++ show r)) s
See flatSessions in . Reduce
unsafeFlatSessions :: Sessions -> [Session]
unsafeFlatSessions = concatMap unsafeFlatRSession . view _Sessions
projSessions :: Integer -> Sessions -> Session
projSessions _ (Sessions []) = error "projSessions: out of bound"
projSessions n (Sessions (Repl s r:ss))
| Just i <- r ^? litR = if n < i
then s
else projSessions (n - i) (Sessions ss)
| otherwise = error "projSessions/Repl: only integer literals are supported"
replRSession :: RFactor -> Endom RSession
replRSession r (Repl s t) = Repl s (r <> t)
mkCaseRSession :: (Scoped Term -> Term) -> Rel (Scoped Term) -> MkCase' (Scoped RSession)
mkCaseRSession f rel u = repl . bimap h h . unzip . fmap unrepl
where
repl (s, r) = pure $ (s ^. from tSession) `Repl` (r ^. from rterm)
unrepl (con, rs) = ((con, view (rsession . tSession) <$> rs),
(con, view (rfactor . rterm) <$> rs))
h = mkCaseBy f rel u
mkCaseSessions :: (Scoped Term -> Term) -> Rel (Scoped Term) -> MkCase' (Scoped Sessions)
mkCaseSessions f rel u brs =
Sessions . pure <$> mkCaseRSession f rel u (brs & branches . scoped %~ unSingleton)
where
unSingleton (Sessions [x]) = x
unSingleton _ = error "mkCaseSessions"
|
71ffa4a96042abe894cb073fe475bc414f0c6b3c1ca4c3343815082087547b84 | LexiFi/menhir | unionFind.ml | (******************************************************************************)
(* *)
(* *)
, Paris
, PPS , Université Paris Diderot
(* *)
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
(* file LICENSE. *)
(* *)
(******************************************************************************)
* This module implements a simple and efficient union / find algorithm .
See , ` ` Efficiency of a Good But Not Linear Set
Union Algorithm '' , JACM 22(2 ) , 1975 .
See Robert E. Tarjan, ``Efficiency of a Good But Not Linear Set
Union Algorithm'', JACM 22(2), 1975. *)
* The abstraction defined by this module is a set of points ,
partitioned into equivalence classes . With each equivalence class ,
a piece of information , of abstract type [ ' a ] , is associated ; we
call it a descriptor .
A point is implemented as a cell , whose ( mutable ) contents consist
of a single link to either information about the equivalence class ,
or another point . Thus , points form a graph , which must be acyclic ,
and whose connected components are the equivalence classes . In
every equivalence class , exactly one point has no outgoing edge ,
and carries information about the class instead . It is the class 's
representative element .
Information about a class consists of an integer weight ( the number
of elements in the class ) and of the class 's descriptor .
partitioned into equivalence classes. With each equivalence class,
a piece of information, of abstract type ['a], is associated; we
call it a descriptor.
A point is implemented as a cell, whose (mutable) contents consist
of a single link to either information about the equivalence class,
or another point. Thus, points form a graph, which must be acyclic,
and whose connected components are the equivalence classes. In
every equivalence class, exactly one point has no outgoing edge,
and carries information about the class instead. It is the class's
representative element.
Information about a class consists of an integer weight (the number
of elements in the class) and of the class's descriptor. *)
type 'a point = {
mutable link: 'a link
}
and 'a link =
| Info of 'a info
| Link of 'a point
and 'a info = {
mutable weight: int;
mutable descriptor: 'a
}
(** [fresh desc] creates a fresh point and returns it. It forms an
equivalence class of its own, whose descriptor is [desc]. *)
let fresh desc = {
link = Info { weight = 1; descriptor = desc }
}
(** [repr point] returns the representative element of [point]'s
equivalence class. It is found by starting at [point] and following
the links. For efficiency, the function performs path compression
at the same time. *)
let rec repr point =
match point.link with
| Link point' ->
let point'' = repr point' in
if point'' != point' then
(* [point''] is [point']'s representative element. Because we
just invoked [repr point'], [point'.link] must be [Link
point'']. We write this value into [point.link], thus
performing path compression. Note that this function never
performs memory allocation. *)
point.link <- point'.link;
point''
| Info _ ->
point
(** [get point] returns the descriptor associated with [point]'s
equivalence class. *)
let rec get point =
By not calling [ repr ] immediately , we optimize the common cases
where the path starting at [ point ] has length 0 or 1 , at the
expense of the general case .
where the path starting at [point] has length 0 or 1, at the
expense of the general case. *)
match point.link with
| Info info
| Link { link = Info info } ->
info.descriptor
| Link { link = Link _ } ->
get (repr point)
let rec set point v =
match point.link with
| Info info
| Link { link = Info info } ->
info.descriptor <- v
| Link { link = Link _ } ->
set (repr point) v
* [ union point1 point2 ] merges the equivalence classes associated
with [ point1 ] and [ point2 ] into a single class whose descriptor is
that originally associated with [ point2 ] . It does nothing if [ point1 ]
and [ point2 ] already are in the same class .
The weights are used to determine whether [ point1 ] should be made
to point to [ point2 ] , or vice - versa . By making the representative
of the smaller class point to that of the larger class , we
guarantee that paths remain of logarithmic length ( not accounting
for path compression , which makes them yet smaller ) .
with [point1] and [point2] into a single class whose descriptor is
that originally associated with [point2]. It does nothing if [point1]
and [point2] already are in the same class.
The weights are used to determine whether [point1] should be made
to point to [point2], or vice-versa. By making the representative
of the smaller class point to that of the larger class, we
guarantee that paths remain of logarithmic length (not accounting
for path compression, which makes them yet smaller). *)
let union point1 point2 =
let point1 = repr point1
and point2 = repr point2 in
if point1 != point2 then
match point1.link, point2.link with
| Info info1, Info info2 ->
let weight1 = info1.weight
and weight2 = info2.weight in
if weight1 >= weight2 then begin
point2.link <- Link point1;
info1.weight <- weight1 + weight2;
info1.descriptor <- info2.descriptor
end
else begin
point1.link <- Link point2;
info2.weight <- weight1 + weight2
end
| _, _ ->
assert false (* [repr] guarantees that [link] matches [Info _]. *)
* [ equivalent point1 point2 ] tells whether [ point1 ] and [ point2 ]
belong to the same equivalence class .
belong to the same equivalence class. *)
let equivalent point1 point2 =
repr point1 == repr point2
| null | https://raw.githubusercontent.com/LexiFi/menhir/794e64e7997d4d3f91d36dd49aaecc942ea858b7/src/unionFind.ml | ocaml | ****************************************************************************
file LICENSE.
****************************************************************************
* [fresh desc] creates a fresh point and returns it. It forms an
equivalence class of its own, whose descriptor is [desc].
* [repr point] returns the representative element of [point]'s
equivalence class. It is found by starting at [point] and following
the links. For efficiency, the function performs path compression
at the same time.
[point''] is [point']'s representative element. Because we
just invoked [repr point'], [point'.link] must be [Link
point'']. We write this value into [point.link], thus
performing path compression. Note that this function never
performs memory allocation.
* [get point] returns the descriptor associated with [point]'s
equivalence class.
[repr] guarantees that [link] matches [Info _]. |
, Paris
, PPS , Université Paris Diderot
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
* This module implements a simple and efficient union / find algorithm .
See , ` ` Efficiency of a Good But Not Linear Set
Union Algorithm '' , JACM 22(2 ) , 1975 .
See Robert E. Tarjan, ``Efficiency of a Good But Not Linear Set
Union Algorithm'', JACM 22(2), 1975. *)
* The abstraction defined by this module is a set of points ,
partitioned into equivalence classes . With each equivalence class ,
a piece of information , of abstract type [ ' a ] , is associated ; we
call it a descriptor .
A point is implemented as a cell , whose ( mutable ) contents consist
of a single link to either information about the equivalence class ,
or another point . Thus , points form a graph , which must be acyclic ,
and whose connected components are the equivalence classes . In
every equivalence class , exactly one point has no outgoing edge ,
and carries information about the class instead . It is the class 's
representative element .
Information about a class consists of an integer weight ( the number
of elements in the class ) and of the class 's descriptor .
partitioned into equivalence classes. With each equivalence class,
a piece of information, of abstract type ['a], is associated; we
call it a descriptor.
A point is implemented as a cell, whose (mutable) contents consist
of a single link to either information about the equivalence class,
or another point. Thus, points form a graph, which must be acyclic,
and whose connected components are the equivalence classes. In
every equivalence class, exactly one point has no outgoing edge,
and carries information about the class instead. It is the class's
representative element.
Information about a class consists of an integer weight (the number
of elements in the class) and of the class's descriptor. *)
type 'a point = {
mutable link: 'a link
}
and 'a link =
| Info of 'a info
| Link of 'a point
and 'a info = {
mutable weight: int;
mutable descriptor: 'a
}
let fresh desc = {
link = Info { weight = 1; descriptor = desc }
}
let rec repr point =
match point.link with
| Link point' ->
let point'' = repr point' in
if point'' != point' then
point.link <- point'.link;
point''
| Info _ ->
point
let rec get point =
By not calling [ repr ] immediately , we optimize the common cases
where the path starting at [ point ] has length 0 or 1 , at the
expense of the general case .
where the path starting at [point] has length 0 or 1, at the
expense of the general case. *)
match point.link with
| Info info
| Link { link = Info info } ->
info.descriptor
| Link { link = Link _ } ->
get (repr point)
let rec set point v =
match point.link with
| Info info
| Link { link = Info info } ->
info.descriptor <- v
| Link { link = Link _ } ->
set (repr point) v
* [ union point1 point2 ] merges the equivalence classes associated
with [ point1 ] and [ point2 ] into a single class whose descriptor is
that originally associated with [ point2 ] . It does nothing if [ point1 ]
and [ point2 ] already are in the same class .
The weights are used to determine whether [ point1 ] should be made
to point to [ point2 ] , or vice - versa . By making the representative
of the smaller class point to that of the larger class , we
guarantee that paths remain of logarithmic length ( not accounting
for path compression , which makes them yet smaller ) .
with [point1] and [point2] into a single class whose descriptor is
that originally associated with [point2]. It does nothing if [point1]
and [point2] already are in the same class.
The weights are used to determine whether [point1] should be made
to point to [point2], or vice-versa. By making the representative
of the smaller class point to that of the larger class, we
guarantee that paths remain of logarithmic length (not accounting
for path compression, which makes them yet smaller). *)
let union point1 point2 =
let point1 = repr point1
and point2 = repr point2 in
if point1 != point2 then
match point1.link, point2.link with
| Info info1, Info info2 ->
let weight1 = info1.weight
and weight2 = info2.weight in
if weight1 >= weight2 then begin
point2.link <- Link point1;
info1.weight <- weight1 + weight2;
info1.descriptor <- info2.descriptor
end
else begin
point1.link <- Link point2;
info2.weight <- weight1 + weight2
end
| _, _ ->
* [ equivalent point1 point2 ] tells whether [ point1 ] and [ point2 ]
belong to the same equivalence class .
belong to the same equivalence class. *)
let equivalent point1 point2 =
repr point1 == repr point2
|
04dc7f1d917e7f0dbb2acc24431da26cc816aa863d18eb118da6ddfd7af0b591 | inhabitedtype/ocaml-aws | getAccessKeyInfo.mli | open Types
type input = GetAccessKeyInfoRequest.t
type output = GetAccessKeyInfoResponse.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/b6d5554c5d201202b5de8d0b0253871f7b66dab6/libraries/sts/lib/getAccessKeyInfo.mli | ocaml | open Types
type input = GetAccessKeyInfoRequest.t
type output = GetAccessKeyInfoResponse.t
type error = Errors_internal.t
include
Aws.Call with type input := input and type output := output and type error := error
| |
ac60b0389c16cdc7b9ef60347587c6da7a57051eea51fc1257d2cc383d5b9304 | clojure-interop/java-jdk | HTMLEditorKit$InsertHTMLTextAction.clj | (ns javax.swing.text.html.HTMLEditorKit$InsertHTMLTextAction
"InsertHTMLTextAction can be used to insert an arbitrary string of HTML
into an existing HTML document. At least two HTML.Tags need to be
supplied. The first Tag, parentTag, identifies the parent in
the document to add the elements to. The second tag, addTag,
identifies the first tag that should be added to the document as
seen in the HTML string. One important thing to remember, is that
the parser is going to generate all the appropriate tags, even if
they aren't in the HTML string passed in.
For example, lets say you wanted to create an action to insert
a table into the body. The parentTag would be HTML.Tag.BODY,
addTag would be HTML.Tag.TABLE, and the string could be something
like <table><tr><td></td></tr></table>.
There is also an option to supply an alternate parentTag and
addTag. These will be checked for if there is no parentTag at
offset."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text.html HTMLEditorKit$InsertHTMLTextAction]))
(defn ->insert-html-text-action
"Constructor.
name - `java.lang.String`
html - `java.lang.String`
parent-tag - `javax.swing.text.html.HTML$Tag`
add-tag - `javax.swing.text.html.HTML$Tag`
alternate-parent-tag - `javax.swing.text.html.HTML$Tag`
alternate-add-tag - `javax.swing.text.html.HTML$Tag`"
(^HTMLEditorKit$InsertHTMLTextAction [^java.lang.String name ^java.lang.String html ^javax.swing.text.html.HTML$Tag parent-tag ^javax.swing.text.html.HTML$Tag add-tag ^javax.swing.text.html.HTML$Tag alternate-parent-tag ^javax.swing.text.html.HTML$Tag alternate-add-tag]
(new HTMLEditorKit$InsertHTMLTextAction name html parent-tag add-tag alternate-parent-tag alternate-add-tag))
(^HTMLEditorKit$InsertHTMLTextAction [^java.lang.String name ^java.lang.String html ^javax.swing.text.html.HTML$Tag parent-tag ^javax.swing.text.html.HTML$Tag add-tag]
(new HTMLEditorKit$InsertHTMLTextAction name html parent-tag add-tag)))
(defn action-performed
"Inserts the HTML into the document.
ae - the event - `java.awt.event.ActionEvent`"
([^HTMLEditorKit$InsertHTMLTextAction this ^java.awt.event.ActionEvent ae]
(-> this (.actionPerformed ae))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/text/html/HTMLEditorKit%24InsertHTMLTextAction.clj | clojure | (ns javax.swing.text.html.HTMLEditorKit$InsertHTMLTextAction
"InsertHTMLTextAction can be used to insert an arbitrary string of HTML
into an existing HTML document. At least two HTML.Tags need to be
supplied. The first Tag, parentTag, identifies the parent in
the document to add the elements to. The second tag, addTag,
identifies the first tag that should be added to the document as
seen in the HTML string. One important thing to remember, is that
the parser is going to generate all the appropriate tags, even if
they aren't in the HTML string passed in.
For example, lets say you wanted to create an action to insert
a table into the body. The parentTag would be HTML.Tag.BODY,
addTag would be HTML.Tag.TABLE, and the string could be something
like <table><tr><td></td></tr></table>.
There is also an option to supply an alternate parentTag and
addTag. These will be checked for if there is no parentTag at
offset."
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text.html HTMLEditorKit$InsertHTMLTextAction]))
(defn ->insert-html-text-action
"Constructor.
name - `java.lang.String`
html - `java.lang.String`
parent-tag - `javax.swing.text.html.HTML$Tag`
add-tag - `javax.swing.text.html.HTML$Tag`
alternate-parent-tag - `javax.swing.text.html.HTML$Tag`
alternate-add-tag - `javax.swing.text.html.HTML$Tag`"
(^HTMLEditorKit$InsertHTMLTextAction [^java.lang.String name ^java.lang.String html ^javax.swing.text.html.HTML$Tag parent-tag ^javax.swing.text.html.HTML$Tag add-tag ^javax.swing.text.html.HTML$Tag alternate-parent-tag ^javax.swing.text.html.HTML$Tag alternate-add-tag]
(new HTMLEditorKit$InsertHTMLTextAction name html parent-tag add-tag alternate-parent-tag alternate-add-tag))
(^HTMLEditorKit$InsertHTMLTextAction [^java.lang.String name ^java.lang.String html ^javax.swing.text.html.HTML$Tag parent-tag ^javax.swing.text.html.HTML$Tag add-tag]
(new HTMLEditorKit$InsertHTMLTextAction name html parent-tag add-tag)))
(defn action-performed
"Inserts the HTML into the document.
ae - the event - `java.awt.event.ActionEvent`"
([^HTMLEditorKit$InsertHTMLTextAction this ^java.awt.event.ActionEvent ae]
(-> this (.actionPerformed ae))))
| |
82487bd76956b040e408f53e6e3bf1c3caa6157b51d13d3962e0cff5f411f0ca | quil-lang/magicl | shape.lisp | ;;;; shapes.lisp
;;;;
Author :
(in-package #:magicl)
;;; Shapes
;; Predicates
(declaim (inline valid-shape-p))
(defun valid-shape-p (shape)
(and (typep shape 'list)
(plusp (length shape))
(cl:every (lambda (x) (typep x 'alexandria:positive-fixnum)) shape)))
(declaim (inline square-shape-p))
(defun square-shape-p (shape)
(and (valid-shape-p shape)
(apply #'cl:= shape)))
(declaim (inline valid-index-p))
(defun valid-index-p (index &optional shape)
(declare (notinline valid-index-p))
(if (null shape)
(and (typep index 'list)
(plusp (length index))
(cl:every (lambda (x) (typep x 'alexandria:non-negative-fixnum)) index))
(and (valid-index-p index)
(cl:= (length index) (length shape))
(cl:every #'< index shape))))
(declaim (inline valid-matrix-index-p))
(defun valid-matrix-index-p (index &optional nrows ncols)
(if (or (null nrows) (null ncols))
(and (typep index 'list)
(cl:= 2 (length index))
(cl:every (lambda (x) (typep x 'alexandria:non-negative-fixnum)) index))
(and (typep index 'list)
(cl:= 2 (length index))
(cl:every (lambda (x) (typep x 'alexandria:non-negative-fixnum)) index)
(< (first index) nrows)
(< (second index) ncols))))
;; Types
(deftype shape (&optional order)
(declare (ignore order))
`(satisfies valid-shape-p))
(deftype index ()
`(satisfies valid-index-p))
;; Assertions
(defmacro assert-square-shape (&rest shapes)
`(progn
,@(loop :for shape in shapes
:collect `(assert (square-shape-p ,shape)
()
"The value of ~a is ~a, which is not a square SHAPE" ,(symbol-name shape) ,shape))))
(defun fixnum-to-shape (num &optional (order 2))
(make-list order :initial-element num))
| null | https://raw.githubusercontent.com/quil-lang/magicl/45eae6e60329e8312496d25f1abfbf15274f4c89/src/high-level/shape.lisp | lisp | shapes.lisp
Shapes
Predicates
Types
Assertions | Author :
(in-package #:magicl)
(declaim (inline valid-shape-p))
(defun valid-shape-p (shape)
(and (typep shape 'list)
(plusp (length shape))
(cl:every (lambda (x) (typep x 'alexandria:positive-fixnum)) shape)))
(declaim (inline square-shape-p))
(defun square-shape-p (shape)
(and (valid-shape-p shape)
(apply #'cl:= shape)))
(declaim (inline valid-index-p))
(defun valid-index-p (index &optional shape)
(declare (notinline valid-index-p))
(if (null shape)
(and (typep index 'list)
(plusp (length index))
(cl:every (lambda (x) (typep x 'alexandria:non-negative-fixnum)) index))
(and (valid-index-p index)
(cl:= (length index) (length shape))
(cl:every #'< index shape))))
(declaim (inline valid-matrix-index-p))
(defun valid-matrix-index-p (index &optional nrows ncols)
(if (or (null nrows) (null ncols))
(and (typep index 'list)
(cl:= 2 (length index))
(cl:every (lambda (x) (typep x 'alexandria:non-negative-fixnum)) index))
(and (typep index 'list)
(cl:= 2 (length index))
(cl:every (lambda (x) (typep x 'alexandria:non-negative-fixnum)) index)
(< (first index) nrows)
(< (second index) ncols))))
(deftype shape (&optional order)
(declare (ignore order))
`(satisfies valid-shape-p))
(deftype index ()
`(satisfies valid-index-p))
(defmacro assert-square-shape (&rest shapes)
`(progn
,@(loop :for shape in shapes
:collect `(assert (square-shape-p ,shape)
()
"The value of ~a is ~a, which is not a square SHAPE" ,(symbol-name shape) ,shape))))
(defun fixnum-to-shape (num &optional (order 2))
(make-list order :initial-element num))
|
9d36e09f1e63ae2cab66edafcbfc2f38ccdb2ed9b61fd9feab92e1641a9f71b5 | noinia/hgeometry | SlowSeq.hs | module Data.SlowSeq where
import Control.Lens (bimap)
import qualified Data . FingerTree as FT
-- import Data.FingerTree hiding (null, viewl, viewr)
import Data.FingerTree(ViewL(..),ViewR(..))
import qualified Data.Foldable as F
import Data.Maybe
import qualified Data.Sequence as S
import qualified Data.Sequence.Util as SU
--------------------------------------------------------------------------------
data Key a = NoKey | Key { getKey :: a } deriving (Show,Eq,Ord)
instance Semigroup (Key a) where
k <> NoKey = k
_ <> k = k
instance Monoid (Key a) where
mempty = NoKey
k `mappend` k' = k <> k'
liftCmp :: (a -> a -> Ordering) -> Key a -> Key a -> Ordering
liftCmp _ NoKey NoKey = EQ
liftCmp _ NoKey (Key _) = LT
liftCmp _ (Key _) NoKey = GT
liftCmp cmp (Key x) (Key y) = x `cmp` y
newtype Elem a = Elem { getElem : : a } deriving ( Eq , Ord , , Foldable , Functor )
-- instance Show a => Show (Elem a) where
show ( ) = " Elem " < > show x
newtype OrdSeq a = OrdSeq { _asSeq :: S.Seq a }
deriving (Show,Eq)
instance Semigroup (OrdSeq a) where
(OrdSeq s) <> (OrdSeq t) = OrdSeq $ s `mappend` t
instance Monoid (OrdSeq a) where
mempty = OrdSeq mempty
mappend = (<>)
instance Foldable OrdSeq where
foldMap f = foldMap f . _asSeq
null = null . _asSeq
length = length . _asSeq
minimum = fromJust . lookupMin
maximum = fromJust . lookupMax
-- instance Measured (Key a) (Elem a) where
measure ( ) = Key x
type Compare a = a -> a -> Ordering
-- | Insert into a monotone OrdSeq.
--
-- pre: the comparator maintains monotonicity
--
-- \(O(\log^2 n)\)
insertBy :: Compare a -> a -> OrdSeq a -> OrdSeq a
insertBy cmp x (OrdSeq s) = OrdSeq $ l `mappend` (x S.<| r)
where
(l,r) = split (\v -> cmp v x `elem` [EQ, GT]) s
-- | Insert into a sorted OrdSeq
--
-- \(O(\log^2 n)\)
insert :: Ord a => a -> OrdSeq a -> OrdSeq a
insert = insertBy compare
deleteAllBy :: Compare a -> a -> OrdSeq a -> OrdSeq a
deleteAllBy cmp x s = l <> r
where
(l,_,r) = splitBy cmp x s
( l , m ) = split ( \v - > liftCmp v ( Key x ) ` elem ` [ EQ , GT ] ) s
( _ , r ) = split ( \v - > liftCmp v ( Key x ) = = GT ) m
-- | \(O(\log^2 n)\)
splitBy :: Compare a -> a -> OrdSeq a -> (OrdSeq a, OrdSeq a, OrdSeq a)
splitBy cmp x (OrdSeq s) = (OrdSeq l, OrdSeq m', OrdSeq r)
where
(l, m) = split (\v -> cmp v x `elem` [EQ,GT]) s
(m',r) = split (\v -> cmp v x == GT) m
HLINT ignore splitOn
-- | Given a monotonic function f that maps a to b, split the sequence s
-- depending on the b values. I.e. the result (l,m,r) is such that
* all ( < x ) . fmap f $ l
* all (= = x ) . fmap f $ m
* all ( > x ) . fmap f $ r
--
> > > splitOn i d 3 $ fromAscList ' [ 1 .. 5 ]
( OrdSeq { _ = fromList [ Elem 1,Elem 2]},OrdSeq { _ = fromList [ Elem 3]},OrdSeq { _ = fromList [ Elem 4,Elem 5 ] } )
> > > splitOn fst 2 $ fromAscList ' [ ( 0,"-"),(1,"A"),(2,"B"),(2,"C"),(3,"D"),(4,"E " ) ]
( OrdSeq { _ = fromList [ ( 0,"-"),Elem ( 1,"A")]},OrdSeq { _ = fromList [ ( 2,"B"),Elem ( 2,"C")]},OrdSeq { _ = fromList [ ( 3,"D"),Elem ( 4,"E " ) ] } )
--
-- \(O(\log^2 n)\)
splitOn :: Ord b => (a -> b) -> b -> OrdSeq a -> (OrdSeq a, OrdSeq a, OrdSeq a)
splitOn f x (OrdSeq s) = (OrdSeq l, OrdSeq m', OrdSeq r)
where
(l, m) = split (\v -> compare (f v) x `elem` [EQ,GT]) s
(m',r) = split (\v -> compare (f v) x == GT) m
| Given a monotonic predicate p , splits the sequence s into two sequences
-- (as,bs) such that all (not p) as and all p bs
--
-- \(O(\log^2 n)\)
splitMonotonic :: (a -> Bool) -> OrdSeq a -> (OrdSeq a, OrdSeq a)
splitMonotonic p = bimap OrdSeq OrdSeq . split p . _asSeq
-- monotonic split for Sequences
--
-- \(O(\log^2 n)\)
split :: (a -> Bool) -> S.Seq a -> (S.Seq a, S.Seq a)
split = SU.splitMonotone
Deletes all elements from the
--
-- \(O(\log^2 n)\)
deleteAll :: Ord a => a -> OrdSeq a -> OrdSeq a
deleteAll = deleteAllBy compare
-- | inserts all eleements in order
-- \(O(n\log n)\)
fromListBy :: Compare a -> [a] -> OrdSeq a
fromListBy cmp = foldr (insertBy cmp) mempty
-- | inserts all eleements in order
-- \(O(n\log n)\)
fromListByOrd :: Ord a => [a] -> OrdSeq a
fromListByOrd = fromListBy compare
-- | O(n)
fromAscList' :: [a] -> OrdSeq a
fromAscList' = OrdSeq . S.fromList
-- | \(O(\log^2 n)\)
lookupBy :: Compare a -> a -> OrdSeq a -> Maybe a
lookupBy cmp x s = let (_,m,_) = splitBy cmp x s in listToMaybe . F.toList $ m
memberBy :: Compare a -> a -> OrdSeq a -> Bool
memberBy cmp x = isJust . lookupBy cmp x
-- | Fmap, assumes the order does not change
-- \(O(n)\)
mapMonotonic :: (a -> b) -> OrdSeq a -> OrdSeq b
mapMonotonic f = fromAscList' . map f . F.toList
| Gets the first element from the sequence
-- \(O(1)\)
viewl :: OrdSeq a -> ViewL OrdSeq a
viewl = f . S.viewl . _asSeq
where
f S.EmptyL = EmptyL
f (x S.:< s) = x :< OrdSeq s
-- Last element
-- \(O(1)\)
viewr :: OrdSeq a -> ViewR OrdSeq a
viewr = f . S.viewr . _asSeq
where
f S.EmptyR = EmptyR
f (s S.:> x) = OrdSeq s :> x
-- \(O(1)\)
minView :: OrdSeq a -> Maybe (a, OrdSeq a)
minView s = case viewl s of
EmptyL -> Nothing
(x :< t) -> Just (x,t)
-- \(O(1)\)
lookupMin :: OrdSeq a -> Maybe a
lookupMin = fmap fst . minView
-- \(O(1)\)
maxView :: OrdSeq a -> Maybe (a, OrdSeq a)
maxView s = case viewr s of
EmptyR -> Nothing
(t :> x) -> Just (x,t)
-- \(O(1)\)
lookupMax :: OrdSeq a -> Maybe a
lookupMax = fmap fst . maxView
| null | https://raw.githubusercontent.com/noinia/hgeometry/a6abecb1ce4a7fd96b25cc1a5c65cd4257ecde7a/hgeometry-old/remove-by-2021-06-01/src/Data/SlowSeq.hs | haskell | import Data.FingerTree hiding (null, viewl, viewr)
------------------------------------------------------------------------------
instance Show a => Show (Elem a) where
instance Measured (Key a) (Elem a) where
| Insert into a monotone OrdSeq.
pre: the comparator maintains monotonicity
\(O(\log^2 n)\)
| Insert into a sorted OrdSeq
\(O(\log^2 n)\)
| \(O(\log^2 n)\)
| Given a monotonic function f that maps a to b, split the sequence s
depending on the b values. I.e. the result (l,m,r) is such that
\(O(\log^2 n)\)
(as,bs) such that all (not p) as and all p bs
\(O(\log^2 n)\)
monotonic split for Sequences
\(O(\log^2 n)\)
\(O(\log^2 n)\)
| inserts all eleements in order
\(O(n\log n)\)
| inserts all eleements in order
\(O(n\log n)\)
| O(n)
| \(O(\log^2 n)\)
| Fmap, assumes the order does not change
\(O(n)\)
\(O(1)\)
Last element
\(O(1)\)
\(O(1)\)
\(O(1)\)
\(O(1)\)
\(O(1)\) | module Data.SlowSeq where
import Control.Lens (bimap)
import qualified Data . FingerTree as FT
import Data.FingerTree(ViewL(..),ViewR(..))
import qualified Data.Foldable as F
import Data.Maybe
import qualified Data.Sequence as S
import qualified Data.Sequence.Util as SU
data Key a = NoKey | Key { getKey :: a } deriving (Show,Eq,Ord)
instance Semigroup (Key a) where
k <> NoKey = k
_ <> k = k
instance Monoid (Key a) where
mempty = NoKey
k `mappend` k' = k <> k'
liftCmp :: (a -> a -> Ordering) -> Key a -> Key a -> Ordering
liftCmp _ NoKey NoKey = EQ
liftCmp _ NoKey (Key _) = LT
liftCmp _ (Key _) NoKey = GT
liftCmp cmp (Key x) (Key y) = x `cmp` y
newtype Elem a = Elem { getElem : : a } deriving ( Eq , Ord , , Foldable , Functor )
show ( ) = " Elem " < > show x
newtype OrdSeq a = OrdSeq { _asSeq :: S.Seq a }
deriving (Show,Eq)
instance Semigroup (OrdSeq a) where
(OrdSeq s) <> (OrdSeq t) = OrdSeq $ s `mappend` t
instance Monoid (OrdSeq a) where
mempty = OrdSeq mempty
mappend = (<>)
instance Foldable OrdSeq where
foldMap f = foldMap f . _asSeq
null = null . _asSeq
length = length . _asSeq
minimum = fromJust . lookupMin
maximum = fromJust . lookupMax
measure ( ) = Key x
type Compare a = a -> a -> Ordering
insertBy :: Compare a -> a -> OrdSeq a -> OrdSeq a
insertBy cmp x (OrdSeq s) = OrdSeq $ l `mappend` (x S.<| r)
where
(l,r) = split (\v -> cmp v x `elem` [EQ, GT]) s
insert :: Ord a => a -> OrdSeq a -> OrdSeq a
insert = insertBy compare
deleteAllBy :: Compare a -> a -> OrdSeq a -> OrdSeq a
deleteAllBy cmp x s = l <> r
where
(l,_,r) = splitBy cmp x s
( l , m ) = split ( \v - > liftCmp v ( Key x ) ` elem ` [ EQ , GT ] ) s
( _ , r ) = split ( \v - > liftCmp v ( Key x ) = = GT ) m
splitBy :: Compare a -> a -> OrdSeq a -> (OrdSeq a, OrdSeq a, OrdSeq a)
splitBy cmp x (OrdSeq s) = (OrdSeq l, OrdSeq m', OrdSeq r)
where
(l, m) = split (\v -> cmp v x `elem` [EQ,GT]) s
(m',r) = split (\v -> cmp v x == GT) m
HLINT ignore splitOn
* all ( < x ) . fmap f $ l
* all (= = x ) . fmap f $ m
* all ( > x ) . fmap f $ r
> > > splitOn i d 3 $ fromAscList ' [ 1 .. 5 ]
( OrdSeq { _ = fromList [ Elem 1,Elem 2]},OrdSeq { _ = fromList [ Elem 3]},OrdSeq { _ = fromList [ Elem 4,Elem 5 ] } )
> > > splitOn fst 2 $ fromAscList ' [ ( 0,"-"),(1,"A"),(2,"B"),(2,"C"),(3,"D"),(4,"E " ) ]
( OrdSeq { _ = fromList [ ( 0,"-"),Elem ( 1,"A")]},OrdSeq { _ = fromList [ ( 2,"B"),Elem ( 2,"C")]},OrdSeq { _ = fromList [ ( 3,"D"),Elem ( 4,"E " ) ] } )
splitOn :: Ord b => (a -> b) -> b -> OrdSeq a -> (OrdSeq a, OrdSeq a, OrdSeq a)
splitOn f x (OrdSeq s) = (OrdSeq l, OrdSeq m', OrdSeq r)
where
(l, m) = split (\v -> compare (f v) x `elem` [EQ,GT]) s
(m',r) = split (\v -> compare (f v) x == GT) m
| Given a monotonic predicate p , splits the sequence s into two sequences
splitMonotonic :: (a -> Bool) -> OrdSeq a -> (OrdSeq a, OrdSeq a)
splitMonotonic p = bimap OrdSeq OrdSeq . split p . _asSeq
split :: (a -> Bool) -> S.Seq a -> (S.Seq a, S.Seq a)
split = SU.splitMonotone
Deletes all elements from the
deleteAll :: Ord a => a -> OrdSeq a -> OrdSeq a
deleteAll = deleteAllBy compare
fromListBy :: Compare a -> [a] -> OrdSeq a
fromListBy cmp = foldr (insertBy cmp) mempty
fromListByOrd :: Ord a => [a] -> OrdSeq a
fromListByOrd = fromListBy compare
fromAscList' :: [a] -> OrdSeq a
fromAscList' = OrdSeq . S.fromList
lookupBy :: Compare a -> a -> OrdSeq a -> Maybe a
lookupBy cmp x s = let (_,m,_) = splitBy cmp x s in listToMaybe . F.toList $ m
memberBy :: Compare a -> a -> OrdSeq a -> Bool
memberBy cmp x = isJust . lookupBy cmp x
mapMonotonic :: (a -> b) -> OrdSeq a -> OrdSeq b
mapMonotonic f = fromAscList' . map f . F.toList
| Gets the first element from the sequence
viewl :: OrdSeq a -> ViewL OrdSeq a
viewl = f . S.viewl . _asSeq
where
f S.EmptyL = EmptyL
f (x S.:< s) = x :< OrdSeq s
viewr :: OrdSeq a -> ViewR OrdSeq a
viewr = f . S.viewr . _asSeq
where
f S.EmptyR = EmptyR
f (s S.:> x) = OrdSeq s :> x
minView :: OrdSeq a -> Maybe (a, OrdSeq a)
minView s = case viewl s of
EmptyL -> Nothing
(x :< t) -> Just (x,t)
lookupMin :: OrdSeq a -> Maybe a
lookupMin = fmap fst . minView
maxView :: OrdSeq a -> Maybe (a, OrdSeq a)
maxView s = case viewr s of
EmptyR -> Nothing
(t :> x) -> Just (x,t)
lookupMax :: OrdSeq a -> Maybe a
lookupMax = fmap fst . maxView
|
e02af7dddbe35e0c3c8357b94767d863dba30370553e2ed43de0d60d4f5099ae | maximedenes/native-coq | fourier.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(* Méthode d'élimination de Fourier *)
Référence :
Auteur(s ) : Fourier ,
Titre(s ) : Oeuvres de Fourier [ Document électronique ] . second . Mémoires publiés dans divers recueils / publ . par les soins de M. Gaston Darboux , ...
Publication : Numérisation BnF de l'édition de Paris : , 1890
Pages : 326 - 327
/
Auteur(s) : Fourier, Jean-Baptiste-Joseph
Titre(s) : Oeuvres de Fourier [Document électronique]. Tome second. Mémoires publiés dans divers recueils / publ. par les soins de M. Gaston Darboux,...
Publication : Numérisation BnF de l'édition de Paris : Gauthier-Villars, 1890
Pages: 326-327
/
*)
Un peu de calcul sur les rationnels ...
Les opérations rendent des ,
i.e. le numérateur et le dénominateur sont premiers entre eux .
Les opérations rendent des rationnels normalisés,
i.e. le numérateur et le dénominateur sont premiers entre eux.
*)
type rational = {num:int;
den:int}
;;
let print_rational x =
print_int x.num;
print_string "/";
print_int x.den
;;
let rec pgcd x y = if y = 0 then x else pgcd y (x mod y);;
let r0 = {num=0;den=1};;
let r1 = {num=1;den=1};;
let rnorm x = let x = (if x.den<0 then {num=(-x.num);den=(-x.den)} else x) in
if x.num=0 then r0
else (let d=pgcd x.num x.den in
let d= (if d<0 then -d else d) in
{num=(x.num)/d;den=(x.den)/d});;
let rop x = rnorm {num=(-x.num);den=x.den};;
let rplus x y = rnorm {num=x.num*y.den + y.num*x.den;den=x.den*y.den};;
let rminus x y = rnorm {num=x.num*y.den - y.num*x.den;den=x.den*y.den};;
let rmult x y = rnorm {num=x.num*y.num;den=x.den*y.den};;
let rinv x = rnorm {num=x.den;den=x.num};;
let rdiv x y = rnorm {num=x.num*y.den;den=x.den*y.num};;
let rinf x y = x.num*y.den < y.num*x.den;;
let rinfeq x y = x.num*y.den <= y.num*x.den;;
{ coef;hist;strict } , où coef=[c1 ; ... ; cn ; d ] , représente l'inéquation
c1x1+ ... +cnxn < d si strict = true , < = sinon ,
hist donnant les coefficients ( positifs ) d'une .
c1x1+...+cnxn < d si strict=true, <= sinon,
hist donnant les coefficients (positifs) d'une combinaison linéaire qui permet d'obtenir l'inéquation à partir de celles du départ.
*)
type ineq = {coef:rational list;
hist:rational list;
strict:bool};;
let pop x l = l:=x::(!l);;
sépare la liste d'inéquations s selon que leur premier coefficient est
ou positif .
négatif, nul ou positif. *)
let partitionne s =
let lpos=ref [] in
let lneg=ref [] in
let lnul=ref [] in
List.iter (fun ie -> match ie.coef with
[] -> raise (Failure "empty ineq")
|(c::r) -> if rinf c r0
then pop ie lneg
else if rinf r0 c then pop ie lpos
else pop ie lnul)
s;
[!lneg;!lnul;!lpos]
;;
initialise les histoires d'une liste d'inéquations données par leurs listes de coefficients et leurs strictitudes ( ! ):
( add_hist [ ( equation 1 , s1); ... ;(équation n , sn ) ] )
=
[ { équation 1 , [ 1;0; ... ] , s1 } ;
{ équation 2 , [ 0;1; ... ] , s2 } ;
...
{ équation n , [ 0;0; ... ;1 ] , sn } ]
(add_hist [(equation 1, s1);...;(équation n, sn)])
=
[{équation 1, [1;0;...;0], s1};
{équation 2, [0;1;...;0], s2};
...
{équation n, [0;0;...;1], sn}]
*)
let add_hist le =
let n = List.length le in
let i=ref 0 in
List.map (fun (ie,s) ->
let h =ref [] in
for k=1 to (n-(!i)-1) do pop r0 h; done;
pop r1 h;
for k=1 to !i do pop r0 h; done;
i:=!i+1;
{coef=ie;hist=(!h);strict=s})
le
;;
(* additionne deux inéquations *)
let ie_add ie1 ie2 = {coef=List.map2 rplus ie1.coef ie2.coef;
hist=List.map2 rplus ie1.hist ie2.hist;
strict=ie1.strict || ie2.strict}
;;
(* multiplication d'une inéquation par un rationnel (positif) *)
let ie_emult a ie = {coef=List.map (fun x -> rmult a x) ie.coef;
hist=List.map (fun x -> rmult a x) ie.hist;
strict= ie.strict}
;;
on enlève le premier coefficient
let ie_tl ie = {coef=List.tl ie.coef;hist=ie.hist;strict=ie.strict}
;;
le premier coefficient : " tête " de l'inéquation
let hd_coef ie = List.hd ie.coef
;;
calcule toutes les combinaisons entre inéquations de tête négative et inéquations de tête positive premier coefficient .
*)
let deduce_add lneg lpos =
let res=ref [] in
List.iter (fun i1 ->
List.iter (fun i2 ->
let a = rop (hd_coef i1) in
let b = hd_coef i2 in
pop (ie_tl (ie_add (ie_emult b i1)
(ie_emult a i2))) res)
lpos)
lneg;
!res
;;
(* élimination de la première variable à partir d'une liste d'inéquations:
opération qu'on itère dans l'algorithme de Fourier.
*)
let deduce1 s =
match (partitionne s) with
[lneg;lnul;lpos] ->
let lnew = deduce_add lneg lpos in
(List.map ie_tl lnul)@lnew
|_->assert false
;;
algorithme de Fourier : on élimine successivement toutes les variables .
*)
let deduce lie =
let n = List.length (fst (List.hd lie)) in
let lie=ref (add_hist lie) in
for i=1 to n-1 do
lie:= deduce1 !lie;
done;
!lie
;;
donne [ ] si le système a des solutions ,
sinon donne [ c , s , lc ]
où lc est la des inéquations de départ
qui donne 0 < c si s = true
ou 0 < = c sinon
cette inéquation étant .
sinon donne [c,s,lc]
où lc est la combinaison linéaire des inéquations de départ
qui donne 0 < c si s=true
ou 0 <= c sinon
cette inéquation étant absurde.
*)
let unsolvable lie =
let lr = deduce lie in
let res = ref [] in
(try (List.iter (fun e ->
match e with
{coef=[c];hist=lc;strict=s} ->
if (rinf c r0 && (not s)) || (rinfeq c r0 && s)
then (res := [c,s,lc];
raise (Failure "contradiction found"))
|_->assert false)
lr)
with _ -> ());
!res
;;
Exemples :
let test1=[[r1;r1;r0],true;[rop r1;r1;r1],false;[r0;rop r1;rop r1],false ] ; ;
deduce ; ;
unsolvable ; ;
let test2= [
[ r1;r1;r0;r0;r0],false ;
[ r0;r1;r1;r0;r0],false ;
[ r0;r0;r1;r1;r0],false ;
[ r0;r0;r0;r1;r1],false ;
[ r1;r0;r0;r0;r1],false ;
[ rop r1;rop r1;r0;r0;r0],false ;
[ r0;rop r1;rop r1;r0;r0],false ;
[ r0;r0;rop r1;rop r1;r0],false ;
[ r0;r0;r0;rop r1;rop r1],false ;
[ rop r1;r0;r0;r0;rop r1],false
] ; ;
deduce test2 ; ;
unsolvable test2 ; ;
let test1=[[r1;r1;r0],true;[rop r1;r1;r1],false;[r0;rop r1;rop r1],false];;
deduce test1;;
unsolvable test1;;
let test2=[
[r1;r1;r0;r0;r0],false;
[r0;r1;r1;r0;r0],false;
[r0;r0;r1;r1;r0],false;
[r0;r0;r0;r1;r1],false;
[r1;r0;r0;r0;r1],false;
[rop r1;rop r1;r0;r0;r0],false;
[r0;rop r1;rop r1;r0;r0],false;
[r0;r0;rop r1;rop r1;r0],false;
[r0;r0;r0;rop r1;rop r1],false;
[rop r1;r0;r0;r0;rop r1],false
];;
deduce test2;;
unsolvable test2;;
*)
| null | https://raw.githubusercontent.com/maximedenes/native-coq/3623a4d9fe95c165f02f7119c0e6564a83a9f4c9/plugins/fourier/fourier.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
Méthode d'élimination de Fourier
additionne deux inéquations
multiplication d'une inéquation par un rationnel (positif)
élimination de la première variable à partir d'une liste d'inéquations:
opération qu'on itère dans l'algorithme de Fourier.
| v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Référence :
Auteur(s ) : Fourier ,
Titre(s ) : Oeuvres de Fourier [ Document électronique ] . second . Mémoires publiés dans divers recueils / publ . par les soins de M. Gaston Darboux , ...
Publication : Numérisation BnF de l'édition de Paris : , 1890
Pages : 326 - 327
/
Auteur(s) : Fourier, Jean-Baptiste-Joseph
Titre(s) : Oeuvres de Fourier [Document électronique]. Tome second. Mémoires publiés dans divers recueils / publ. par les soins de M. Gaston Darboux,...
Publication : Numérisation BnF de l'édition de Paris : Gauthier-Villars, 1890
Pages: 326-327
/
*)
Un peu de calcul sur les rationnels ...
Les opérations rendent des ,
i.e. le numérateur et le dénominateur sont premiers entre eux .
Les opérations rendent des rationnels normalisés,
i.e. le numérateur et le dénominateur sont premiers entre eux.
*)
type rational = {num:int;
den:int}
;;
let print_rational x =
print_int x.num;
print_string "/";
print_int x.den
;;
let rec pgcd x y = if y = 0 then x else pgcd y (x mod y);;
let r0 = {num=0;den=1};;
let r1 = {num=1;den=1};;
let rnorm x = let x = (if x.den<0 then {num=(-x.num);den=(-x.den)} else x) in
if x.num=0 then r0
else (let d=pgcd x.num x.den in
let d= (if d<0 then -d else d) in
{num=(x.num)/d;den=(x.den)/d});;
let rop x = rnorm {num=(-x.num);den=x.den};;
let rplus x y = rnorm {num=x.num*y.den + y.num*x.den;den=x.den*y.den};;
let rminus x y = rnorm {num=x.num*y.den - y.num*x.den;den=x.den*y.den};;
let rmult x y = rnorm {num=x.num*y.num;den=x.den*y.den};;
let rinv x = rnorm {num=x.den;den=x.num};;
let rdiv x y = rnorm {num=x.num*y.den;den=x.den*y.num};;
let rinf x y = x.num*y.den < y.num*x.den;;
let rinfeq x y = x.num*y.den <= y.num*x.den;;
{ coef;hist;strict } , où coef=[c1 ; ... ; cn ; d ] , représente l'inéquation
c1x1+ ... +cnxn < d si strict = true , < = sinon ,
hist donnant les coefficients ( positifs ) d'une .
c1x1+...+cnxn < d si strict=true, <= sinon,
hist donnant les coefficients (positifs) d'une combinaison linéaire qui permet d'obtenir l'inéquation à partir de celles du départ.
*)
type ineq = {coef:rational list;
hist:rational list;
strict:bool};;
let pop x l = l:=x::(!l);;
sépare la liste d'inéquations s selon que leur premier coefficient est
ou positif .
négatif, nul ou positif. *)
let partitionne s =
let lpos=ref [] in
let lneg=ref [] in
let lnul=ref [] in
List.iter (fun ie -> match ie.coef with
[] -> raise (Failure "empty ineq")
|(c::r) -> if rinf c r0
then pop ie lneg
else if rinf r0 c then pop ie lpos
else pop ie lnul)
s;
[!lneg;!lnul;!lpos]
;;
initialise les histoires d'une liste d'inéquations données par leurs listes de coefficients et leurs strictitudes ( ! ):
( add_hist [ ( equation 1 , s1); ... ;(équation n , sn ) ] )
=
[ { équation 1 , [ 1;0; ... ] , s1 } ;
{ équation 2 , [ 0;1; ... ] , s2 } ;
...
{ équation n , [ 0;0; ... ;1 ] , sn } ]
(add_hist [(equation 1, s1);...;(équation n, sn)])
=
[{équation 1, [1;0;...;0], s1};
{équation 2, [0;1;...;0], s2};
...
{équation n, [0;0;...;1], sn}]
*)
let add_hist le =
let n = List.length le in
let i=ref 0 in
List.map (fun (ie,s) ->
let h =ref [] in
for k=1 to (n-(!i)-1) do pop r0 h; done;
pop r1 h;
for k=1 to !i do pop r0 h; done;
i:=!i+1;
{coef=ie;hist=(!h);strict=s})
le
;;
let ie_add ie1 ie2 = {coef=List.map2 rplus ie1.coef ie2.coef;
hist=List.map2 rplus ie1.hist ie2.hist;
strict=ie1.strict || ie2.strict}
;;
let ie_emult a ie = {coef=List.map (fun x -> rmult a x) ie.coef;
hist=List.map (fun x -> rmult a x) ie.hist;
strict= ie.strict}
;;
on enlève le premier coefficient
let ie_tl ie = {coef=List.tl ie.coef;hist=ie.hist;strict=ie.strict}
;;
le premier coefficient : " tête " de l'inéquation
let hd_coef ie = List.hd ie.coef
;;
calcule toutes les combinaisons entre inéquations de tête négative et inéquations de tête positive premier coefficient .
*)
let deduce_add lneg lpos =
let res=ref [] in
List.iter (fun i1 ->
List.iter (fun i2 ->
let a = rop (hd_coef i1) in
let b = hd_coef i2 in
pop (ie_tl (ie_add (ie_emult b i1)
(ie_emult a i2))) res)
lpos)
lneg;
!res
;;
let deduce1 s =
match (partitionne s) with
[lneg;lnul;lpos] ->
let lnew = deduce_add lneg lpos in
(List.map ie_tl lnul)@lnew
|_->assert false
;;
algorithme de Fourier : on élimine successivement toutes les variables .
*)
let deduce lie =
let n = List.length (fst (List.hd lie)) in
let lie=ref (add_hist lie) in
for i=1 to n-1 do
lie:= deduce1 !lie;
done;
!lie
;;
donne [ ] si le système a des solutions ,
sinon donne [ c , s , lc ]
où lc est la des inéquations de départ
qui donne 0 < c si s = true
ou 0 < = c sinon
cette inéquation étant .
sinon donne [c,s,lc]
où lc est la combinaison linéaire des inéquations de départ
qui donne 0 < c si s=true
ou 0 <= c sinon
cette inéquation étant absurde.
*)
let unsolvable lie =
let lr = deduce lie in
let res = ref [] in
(try (List.iter (fun e ->
match e with
{coef=[c];hist=lc;strict=s} ->
if (rinf c r0 && (not s)) || (rinfeq c r0 && s)
then (res := [c,s,lc];
raise (Failure "contradiction found"))
|_->assert false)
lr)
with _ -> ());
!res
;;
Exemples :
let test1=[[r1;r1;r0],true;[rop r1;r1;r1],false;[r0;rop r1;rop r1],false ] ; ;
deduce ; ;
unsolvable ; ;
let test2= [
[ r1;r1;r0;r0;r0],false ;
[ r0;r1;r1;r0;r0],false ;
[ r0;r0;r1;r1;r0],false ;
[ r0;r0;r0;r1;r1],false ;
[ r1;r0;r0;r0;r1],false ;
[ rop r1;rop r1;r0;r0;r0],false ;
[ r0;rop r1;rop r1;r0;r0],false ;
[ r0;r0;rop r1;rop r1;r0],false ;
[ r0;r0;r0;rop r1;rop r1],false ;
[ rop r1;r0;r0;r0;rop r1],false
] ; ;
deduce test2 ; ;
unsolvable test2 ; ;
let test1=[[r1;r1;r0],true;[rop r1;r1;r1],false;[r0;rop r1;rop r1],false];;
deduce test1;;
unsolvable test1;;
let test2=[
[r1;r1;r0;r0;r0],false;
[r0;r1;r1;r0;r0],false;
[r0;r0;r1;r1;r0],false;
[r0;r0;r0;r1;r1],false;
[r1;r0;r0;r0;r1],false;
[rop r1;rop r1;r0;r0;r0],false;
[r0;rop r1;rop r1;r0;r0],false;
[r0;r0;rop r1;rop r1;r0],false;
[r0;r0;r0;rop r1;rop r1],false;
[rop r1;r0;r0;r0;rop r1],false
];;
deduce test2;;
unsolvable test2;;
*)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.