_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
dfcb17bb9a3342b06041fcc2a9c7c2831dd37ca4ffea52b769d83630997c59e6 | hdbc/hdbc-odbc | Testbasics.hs | module Testbasics(tests) where
import Test.HUnit
import Database.HDBC
import TestUtils
import System.IO
import Control.Exception
openClosedb = sqlTestCase $
do dbh <- connectDB
disconnect dbh
multiFinish = dbTestCase (\dbh ->
do sth <- prepare dbh "SELECT 1 + 1"
r <- execute sth []
assertEqual "basic count" 0 r
finish sth
finish sth
finish sth
)
basicQueries = dbTestCase (\dbh ->
do sth <- prepare dbh "SELECT 1 + 1"
execute sth [] >>= (0 @=?)
r <- fetchAllRows sth
assertEqual "converted from" [["2"]] (map (map fromSql) r)
assertEqual "int32 compare" [[SqlInt32 2]] r
assertEqual "iToSql compare" [[iToSql 2]] r
assertEqual "num compare" [[toSql (2::Int)]] r
assertEqual "nToSql compare" [[nToSql (2::Int)]] r
assertEqual "string compare" [[SqlString "2"]] r
)
createTable = dbTestCase (\dbh ->
do run dbh "CREATE TABLE hdbctest1 (testname VARCHAR(20), testid INTEGER, testint INTEGER, testtext TEXT)" []
commit dbh
)
dropTable = dbTestCase (\dbh ->
do run dbh "DROP TABLE hdbctest1" []
commit dbh
)
runReplace = dbTestCase (\dbh ->
do r <- run dbh "INSERT INTO hdbctest1 VALUES (?, ?, ?, ?)" r1
assertEqual "insert retval" 1 r
run dbh "INSERT INTO hdbctest1 VALUES (?, ?, ?, ?)" r2
commit dbh
sth <- prepare dbh "SELECT * FROM hdbctest1 WHERE testname = 'runReplace' ORDER BY testid"
rv2 <- execute sth []
assertEqual "select retval" 0 rv2
r <- fetchAllRows sth
assertEqual "" [r1, r2] r
)
where r1 = [toSql "runReplace", iToSql 1, iToSql 1234, SqlString "testdata"]
r2 = [toSql "runReplace", iToSql 2, iToSql 2, SqlNull]
executeReplace = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('executeReplace',?,?,?)"
execute sth [iToSql 1, iToSql 1234, toSql "Foo"]
execute sth [SqlInt32 2, SqlNull, toSql "Bar"]
commit dbh
sth <- prepare dbh "SELECT * FROM hdbctest1 WHERE testname = ? ORDER BY testid"
execute sth [SqlString "executeReplace"]
r <- fetchAllRows sth
assertEqual "result"
[[toSql "executeReplace", iToSql 1, toSql "1234",
toSql "Foo"],
[toSql "executeReplace", iToSql 2, SqlNull,
toSql "Bar"]]
r
)
testExecuteMany = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('multi',?,?,?)"
executeMany sth rows
commit dbh
sth <- prepare dbh "SELECT testid, testint, testtext FROM hdbctest1 WHERE testname = 'multi'"
execute sth []
r <- fetchAllRows sth
assertEqual "" rows r
)
where rows = [map toSql ["1", "1234", "foo"],
map toSql ["2", "1341", "bar"],
[toSql "3", SqlNull, SqlNull]]
testFetchAllRows = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('sFetchAllRows', ?, NULL, NULL)"
executeMany sth rows
commit dbh
sth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'sFetchAllRows' ORDER BY testid"
execute sth []
results <- fetchAllRows sth
assertEqual "" rows results
)
where rows = map (\x -> [iToSql x]) [1..9]
testFetchAllRows' = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('sFetchAllRows2', ?, NULL, NULL)"
executeMany sth rows
commit dbh
sth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'sFetchAllRows2' ORDER BY testid"
execute sth []
results <- fetchAllRows' sth
assertEqual "" rows results
)
where rows = map (\x -> [iToSql x]) [1..9]
basicTransactions = dbTestCase (\dbh ->
do assertBool "Connected database does not support transactions; skipping transaction test" (dbTransactionSupport dbh)
sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('basicTransactions', ?, NULL, NULL)"
execute sth [iToSql 0]
commit dbh
qrysth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'basicTransactions' ORDER BY testid"
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "initial commit" [[toSql "0"]])
-- Now try a rollback
executeMany sth rows
rollback dbh
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "rollback" [[toSql "0"]])
-- Now try another commit
executeMany sth rows
commit dbh
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "final commit" ([SqlString "0"]:rows))
)
where rows = map (\x -> [iToSql $ x]) [1..9]
testWithTransaction = dbTestCase (\dbh ->
do assertBool "Connected database does not support transactions; skipping transaction test" (dbTransactionSupport dbh)
sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('withTransaction', ?, NULL, NULL)"
execute sth [toSql "0"]
commit dbh
qrysth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'withTransaction' ORDER BY testid"
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "initial commit" [[toSql "0"]])
-- Let's try a rollback.
catch (withTransaction dbh (\_ -> do executeMany sth rows
fail "Foo"))
(\(_ :: SomeException) -> return ())
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "rollback" [[SqlString "0"]])
-- And now a commit.
withTransaction dbh (\_ -> executeMany sth rows)
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "final commit" ([iToSql 0]:rows))
)
where rows = map (\x -> [iToSql x]) [1..9]
tests = TestList
[
TestLabel "openClosedb" openClosedb,
TestLabel "multiFinish" multiFinish,
TestLabel "basicQueries" basicQueries,
TestLabel "createTable" createTable,
TestLabel "runReplace" runReplace,
TestLabel "executeReplace" executeReplace,
TestLabel "executeMany" testExecuteMany,
TestLabel "fetchAllRows" testFetchAllRows,
TestLabel "fetchAllRows'" testFetchAllRows',
TestLabel "basicTransactions" basicTransactions,
TestLabel "withTransaction" testWithTransaction,
TestLabel "dropTable" dropTable
]
| null | https://raw.githubusercontent.com/hdbc/hdbc-odbc/06833d77799f16634d2038bcdc308c35d4752cdd/testsrc/Testbasics.hs | haskell | Now try a rollback
Now try another commit
Let's try a rollback.
And now a commit. | module Testbasics(tests) where
import Test.HUnit
import Database.HDBC
import TestUtils
import System.IO
import Control.Exception
openClosedb = sqlTestCase $
do dbh <- connectDB
disconnect dbh
multiFinish = dbTestCase (\dbh ->
do sth <- prepare dbh "SELECT 1 + 1"
r <- execute sth []
assertEqual "basic count" 0 r
finish sth
finish sth
finish sth
)
basicQueries = dbTestCase (\dbh ->
do sth <- prepare dbh "SELECT 1 + 1"
execute sth [] >>= (0 @=?)
r <- fetchAllRows sth
assertEqual "converted from" [["2"]] (map (map fromSql) r)
assertEqual "int32 compare" [[SqlInt32 2]] r
assertEqual "iToSql compare" [[iToSql 2]] r
assertEqual "num compare" [[toSql (2::Int)]] r
assertEqual "nToSql compare" [[nToSql (2::Int)]] r
assertEqual "string compare" [[SqlString "2"]] r
)
createTable = dbTestCase (\dbh ->
do run dbh "CREATE TABLE hdbctest1 (testname VARCHAR(20), testid INTEGER, testint INTEGER, testtext TEXT)" []
commit dbh
)
dropTable = dbTestCase (\dbh ->
do run dbh "DROP TABLE hdbctest1" []
commit dbh
)
runReplace = dbTestCase (\dbh ->
do r <- run dbh "INSERT INTO hdbctest1 VALUES (?, ?, ?, ?)" r1
assertEqual "insert retval" 1 r
run dbh "INSERT INTO hdbctest1 VALUES (?, ?, ?, ?)" r2
commit dbh
sth <- prepare dbh "SELECT * FROM hdbctest1 WHERE testname = 'runReplace' ORDER BY testid"
rv2 <- execute sth []
assertEqual "select retval" 0 rv2
r <- fetchAllRows sth
assertEqual "" [r1, r2] r
)
where r1 = [toSql "runReplace", iToSql 1, iToSql 1234, SqlString "testdata"]
r2 = [toSql "runReplace", iToSql 2, iToSql 2, SqlNull]
executeReplace = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('executeReplace',?,?,?)"
execute sth [iToSql 1, iToSql 1234, toSql "Foo"]
execute sth [SqlInt32 2, SqlNull, toSql "Bar"]
commit dbh
sth <- prepare dbh "SELECT * FROM hdbctest1 WHERE testname = ? ORDER BY testid"
execute sth [SqlString "executeReplace"]
r <- fetchAllRows sth
assertEqual "result"
[[toSql "executeReplace", iToSql 1, toSql "1234",
toSql "Foo"],
[toSql "executeReplace", iToSql 2, SqlNull,
toSql "Bar"]]
r
)
testExecuteMany = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('multi',?,?,?)"
executeMany sth rows
commit dbh
sth <- prepare dbh "SELECT testid, testint, testtext FROM hdbctest1 WHERE testname = 'multi'"
execute sth []
r <- fetchAllRows sth
assertEqual "" rows r
)
where rows = [map toSql ["1", "1234", "foo"],
map toSql ["2", "1341", "bar"],
[toSql "3", SqlNull, SqlNull]]
testFetchAllRows = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('sFetchAllRows', ?, NULL, NULL)"
executeMany sth rows
commit dbh
sth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'sFetchAllRows' ORDER BY testid"
execute sth []
results <- fetchAllRows sth
assertEqual "" rows results
)
where rows = map (\x -> [iToSql x]) [1..9]
testFetchAllRows' = dbTestCase (\dbh ->
do sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('sFetchAllRows2', ?, NULL, NULL)"
executeMany sth rows
commit dbh
sth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'sFetchAllRows2' ORDER BY testid"
execute sth []
results <- fetchAllRows' sth
assertEqual "" rows results
)
where rows = map (\x -> [iToSql x]) [1..9]
basicTransactions = dbTestCase (\dbh ->
do assertBool "Connected database does not support transactions; skipping transaction test" (dbTransactionSupport dbh)
sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('basicTransactions', ?, NULL, NULL)"
execute sth [iToSql 0]
commit dbh
qrysth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'basicTransactions' ORDER BY testid"
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "initial commit" [[toSql "0"]])
executeMany sth rows
rollback dbh
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "rollback" [[toSql "0"]])
executeMany sth rows
commit dbh
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "final commit" ([SqlString "0"]:rows))
)
where rows = map (\x -> [iToSql $ x]) [1..9]
testWithTransaction = dbTestCase (\dbh ->
do assertBool "Connected database does not support transactions; skipping transaction test" (dbTransactionSupport dbh)
sth <- prepare dbh "INSERT INTO hdbctest1 VALUES ('withTransaction', ?, NULL, NULL)"
execute sth [toSql "0"]
commit dbh
qrysth <- prepare dbh "SELECT testid FROM hdbctest1 WHERE testname = 'withTransaction' ORDER BY testid"
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "initial commit" [[toSql "0"]])
catch (withTransaction dbh (\_ -> do executeMany sth rows
fail "Foo"))
(\(_ :: SomeException) -> return ())
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "rollback" [[SqlString "0"]])
withTransaction dbh (\_ -> executeMany sth rows)
execute qrysth []
fetchAllRows qrysth >>= (assertEqual "final commit" ([iToSql 0]:rows))
)
where rows = map (\x -> [iToSql x]) [1..9]
tests = TestList
[
TestLabel "openClosedb" openClosedb,
TestLabel "multiFinish" multiFinish,
TestLabel "basicQueries" basicQueries,
TestLabel "createTable" createTable,
TestLabel "runReplace" runReplace,
TestLabel "executeReplace" executeReplace,
TestLabel "executeMany" testExecuteMany,
TestLabel "fetchAllRows" testFetchAllRows,
TestLabel "fetchAllRows'" testFetchAllRows',
TestLabel "basicTransactions" basicTransactions,
TestLabel "withTransaction" testWithTransaction,
TestLabel "dropTable" dropTable
]
|
0a2449d969c92f4dd6f297bad3ca4c617a5be0143444f6790771c12e8d2370c3 | google/ormolu | transform-multi-line1.hs | # LANGUAGE TransformListComp #
foo' xs ys = [
(x,
y) |
x <- xs,
y <- ys,
First comment
Second comment
]
| null | https://raw.githubusercontent.com/google/ormolu/ffdf145bbdf917d54a3ef4951fc2655e35847ff0/data/examples/declaration/value/function/comprehension/transform-multi-line1.hs | haskell | # LANGUAGE TransformListComp #
foo' xs ys = [
(x,
y) |
x <- xs,
y <- ys,
First comment
Second comment
]
| |
6bfd515bc20055313f449ab7ba062b64b47ed3cc0969e315754a318919df1a32 | NoRedInk/haskell-libraries | Kafka.hs | {-# LANGUAGE GADTs #-}
| A module for creating great logs in code using .
module Log.Kafka
( emptyDetails,
Details,
topic,
partitionId,
key,
contents,
createTime,
logAppendTime,
processAttempt,
assignedPartitions,
pausedPartitions,
timeSinceLastRebalance,
requestId,
mkContents,
Contents,
)
where
import qualified Data.Aeson as Aeson
import qualified Data.Time.Clock as Clock
import qualified Platform
-- | A type describing a kafka message being processed by a consumer.
--
-- > emptyDetails
-- > { topic = Just "kafka-topic"
> , partitionId = Just 1
> , contents = Just ( " This message is a JSON string ! " )
-- > }
data Details = Details
{ -- | The topic name of the message.
topic :: Maybe Text,
-- | The partition id of the message.
partitionId :: Maybe Int,
-- | The key of the message (if it has one). If a key is provided by a
-- message producer it is used to determine the partition id, in such a way
-- that messages with the same key are guaranteed to end up in the same
-- partition.
key :: Maybe Text,
-- | The contents of the message.
contents :: Maybe Contents,
-- | The time at which this message was created by a producer.
-- Whether this property is available for a message depends on the
-- `log.message.timestamp.type` configuration option.
-- More context: #latency-measurement
createTime :: Maybe Clock.UTCTime,
-- | The time at which this message was added to a log by a broker.
-- Whether this property is available for a message depends on the
-- `log.message.timestamp.type` configuration option.
-- More context: #latency-measurement
logAppendTime :: Maybe Clock.UTCTime,
-- | Zero-based counter indicating the how-manyth time it is we're attemping
-- to process this message.
processAttempt :: Maybe Int,
-- | The amount of partitions for this topic the consumer is responsible
-- for.
assignedPartitions :: Maybe Int,
-- | The amount of partitions this consumer currently has paused, because
-- it's behing processing this partition.
pausedPartitions :: Maybe Int,
-- | Time since last rebalance in s
timeSinceLastRebalance :: Maybe Float,
-- | The request id of the http request that resulted in the enqueueing of
-- the message that is now being processed by a worker.
requestId :: Maybe Text
}
deriving (Generic)
-- | An empty details value to be modified by you.
emptyDetails :: Details
emptyDetails =
Details
{ topic = Nothing,
partitionId = Nothing,
key = Nothing,
contents = Nothing,
createTime = Nothing,
logAppendTime = Nothing,
processAttempt = Nothing,
assignedPartitions = Nothing,
pausedPartitions = Nothing,
timeSinceLastRebalance = Nothing,
requestId = Nothing
}
instance Aeson.ToJSON Details where
toJSON = Aeson.genericToJSON options
toEncoding = Aeson.genericToEncoding options
options :: Aeson.Options
options =
Aeson.defaultOptions
{ Aeson.fieldLabelModifier = Aeson.camelTo2 '_',
Aeson.omitNothingFields = True
}
instance Platform.TracingSpanDetails Details
| The contents of a message . Use ' mkContents ' to create one of these .
data Contents where
Contents :: (Aeson.ToJSON a) => a -> Contents
instance Aeson.ToJSON Contents where
toJSON (Contents x) = Aeson.toJSON x
toEncoding (Contents x) = Aeson.toEncoding x
-- | Create a 'Contents' value.
--
The type wrapped needs to have an . ToJSON instance , so we can present it
-- nicely in observability tools.
--
-- > data MyMessagePayload { counter :: Int } deriving (Generic)
> instance . ToJSON MyMessagePayload
-- >
> contents = mkContents MyMessagePayload { counter = 5 }
mkContents :: Aeson.ToJSON a => a -> Contents
mkContents = Contents
| null | https://raw.githubusercontent.com/NoRedInk/haskell-libraries/7af1e05549e09d519b08ab49dff956b5a97d4f7e/nri-observability/src/Log/Kafka.hs | haskell | # LANGUAGE GADTs #
| A type describing a kafka message being processed by a consumer.
> emptyDetails
> { topic = Just "kafka-topic"
> }
| The topic name of the message.
| The partition id of the message.
| The key of the message (if it has one). If a key is provided by a
message producer it is used to determine the partition id, in such a way
that messages with the same key are guaranteed to end up in the same
partition.
| The contents of the message.
| The time at which this message was created by a producer.
Whether this property is available for a message depends on the
`log.message.timestamp.type` configuration option.
More context: #latency-measurement
| The time at which this message was added to a log by a broker.
Whether this property is available for a message depends on the
`log.message.timestamp.type` configuration option.
More context: #latency-measurement
| Zero-based counter indicating the how-manyth time it is we're attemping
to process this message.
| The amount of partitions for this topic the consumer is responsible
for.
| The amount of partitions this consumer currently has paused, because
it's behing processing this partition.
| Time since last rebalance in s
| The request id of the http request that resulted in the enqueueing of
the message that is now being processed by a worker.
| An empty details value to be modified by you.
| Create a 'Contents' value.
nicely in observability tools.
> data MyMessagePayload { counter :: Int } deriving (Generic)
> |
| A module for creating great logs in code using .
module Log.Kafka
( emptyDetails,
Details,
topic,
partitionId,
key,
contents,
createTime,
logAppendTime,
processAttempt,
assignedPartitions,
pausedPartitions,
timeSinceLastRebalance,
requestId,
mkContents,
Contents,
)
where
import qualified Data.Aeson as Aeson
import qualified Data.Time.Clock as Clock
import qualified Platform
> , partitionId = Just 1
> , contents = Just ( " This message is a JSON string ! " )
data Details = Details
topic :: Maybe Text,
partitionId :: Maybe Int,
key :: Maybe Text,
contents :: Maybe Contents,
createTime :: Maybe Clock.UTCTime,
logAppendTime :: Maybe Clock.UTCTime,
processAttempt :: Maybe Int,
assignedPartitions :: Maybe Int,
pausedPartitions :: Maybe Int,
timeSinceLastRebalance :: Maybe Float,
requestId :: Maybe Text
}
deriving (Generic)
emptyDetails :: Details
emptyDetails =
Details
{ topic = Nothing,
partitionId = Nothing,
key = Nothing,
contents = Nothing,
createTime = Nothing,
logAppendTime = Nothing,
processAttempt = Nothing,
assignedPartitions = Nothing,
pausedPartitions = Nothing,
timeSinceLastRebalance = Nothing,
requestId = Nothing
}
instance Aeson.ToJSON Details where
toJSON = Aeson.genericToJSON options
toEncoding = Aeson.genericToEncoding options
options :: Aeson.Options
options =
Aeson.defaultOptions
{ Aeson.fieldLabelModifier = Aeson.camelTo2 '_',
Aeson.omitNothingFields = True
}
instance Platform.TracingSpanDetails Details
| The contents of a message . Use ' mkContents ' to create one of these .
data Contents where
Contents :: (Aeson.ToJSON a) => a -> Contents
instance Aeson.ToJSON Contents where
toJSON (Contents x) = Aeson.toJSON x
toEncoding (Contents x) = Aeson.toEncoding x
The type wrapped needs to have an . ToJSON instance , so we can present it
> instance . ToJSON MyMessagePayload
> contents = mkContents MyMessagePayload { counter = 5 }
mkContents :: Aeson.ToJSON a => a -> Contents
mkContents = Contents
|
156337f0f55286062146b3720d82d8fc1d36242a4999a1f86e158fa782911996 | haskell-lisp/liskell | GHCAPICompat.hs | -- -*-haskell-*-
-- ---------------------------------------------------------------------------
Liskell
--
-- Author(s):
-- ---------------------------------------------------------------------------
module GHCAPICompat where
import HsSyn
import RdrHsSyn
cHsModule a b c d e = HsModule a b c d e emptyHaddockModInfo Nothing
cmkClassDecl a b c d = mkClassDecl a b c d [] []
ccvBindsAndSigs x = let (binds, sigs, _, _ ) = cvBindsAndSigs x
in (binds, sigs)
cConDecl a b c d e f = ConDecl a b c d e f Nothing
cTySynonym a b c = TySynonym a b Nothing c
cmkTyData a (b, c, d) e f g = mkTyData a (b, c, d, Nothing) e f g
cInstDecl a b c = InstDecl a b c
| null | https://raw.githubusercontent.com/haskell-lisp/liskell/6fed2294bde7852fac7fd805bff6b7e4dde5aa81/GHCAPICompat.hs | haskell | -*-haskell-*-
---------------------------------------------------------------------------
Author(s):
--------------------------------------------------------------------------- | Liskell
module GHCAPICompat where
import HsSyn
import RdrHsSyn
cHsModule a b c d e = HsModule a b c d e emptyHaddockModInfo Nothing
cmkClassDecl a b c d = mkClassDecl a b c d [] []
ccvBindsAndSigs x = let (binds, sigs, _, _ ) = cvBindsAndSigs x
in (binds, sigs)
cConDecl a b c d e f = ConDecl a b c d e f Nothing
cTySynonym a b c = TySynonym a b Nothing c
cmkTyData a (b, c, d) e f g = mkTyData a (b, c, d, Nothing) e f g
cInstDecl a b c = InstDecl a b c
|
7354f9223fa91f426faf3c8e8980e8d4fca5e2f9169190217c2cd1ff8aa97c2a | rudolph-miller/cl-gists | user.lisp | (in-package :cl-user)
(defpackage cl-gists-test.user
(:use :cl
:prove
:cl-gists-test.init
:cl-gists)
(:import-from :cl-gists.user
:make-user))
(in-package :cl-gists-test.user)
(plan nil)
(subtest "user"
(let ((user (make-user :login "octocat"
:id 1
:avatar-url ""
:gravatar-id "abc"
:url ""
:html-url ""
:followers-url ""
:following-url "{/other_user}"
:gists-url "{/gist_id}"
:starred-url "{/owner}{/repo}"
:subscriptions-url ""
:organizations-url ""
:repos-url ""
:events-url "{/privacy}"
:received-events-url ""
:type "User"
:site-admin nil)))
(is-type user
'user
"can make-user.")
(test-user user)))
(finalize)
| null | https://raw.githubusercontent.com/rudolph-miller/cl-gists/bcf3687f0af8b2eb5acaeda5db94d67446e56daf/t/user.lisp | lisp | (in-package :cl-user)
(defpackage cl-gists-test.user
(:use :cl
:prove
:cl-gists-test.init
:cl-gists)
(:import-from :cl-gists.user
:make-user))
(in-package :cl-gists-test.user)
(plan nil)
(subtest "user"
(let ((user (make-user :login "octocat"
:id 1
:avatar-url ""
:gravatar-id "abc"
:url ""
:html-url ""
:followers-url ""
:following-url "{/other_user}"
:gists-url "{/gist_id}"
:starred-url "{/owner}{/repo}"
:subscriptions-url ""
:organizations-url ""
:repos-url ""
:events-url "{/privacy}"
:received-events-url ""
:type "User"
:site-admin nil)))
(is-type user
'user
"can make-user.")
(test-user user)))
(finalize)
| |
36e3f22cf6039cb640e414c53eb312c4690937340a0264090d3bdfe408e3192e | quan-nh/adventofcode | day8.clj | (ns adventofcode.day8
(:require [clojure.string :as str]))
(defn decrease [s]
(- (count s)
(-> s
(str/replace #"^\"" "")
(str/replace #"\"$" "")
(str/replace #"\\\"" "\"")
(str/replace #"\\\\" "q")
(str/replace #"\\x[0-9a-f]{2}" "q")
count)))
(defn increase [s]
(- (-> s
(str/replace #"^\"" "qqq")
(str/replace #"\"$" "qqq")
(str/replace #"\\\"" "qqqq")
(str/replace #"\\\\" "qqqq")
(str/replace #"\\x[0-9a-f]{2}" "qqqqq")
count)
(count s)))
(defn total [s f]
(->> (str/split s #"\n")
(map f)
(reduce +))) | null | https://raw.githubusercontent.com/quan-nh/adventofcode/98542934a121d1df85dd271d28fedc1f25b98206/src/adventofcode2015/day8.clj | clojure | (ns adventofcode.day8
(:require [clojure.string :as str]))
(defn decrease [s]
(- (count s)
(-> s
(str/replace #"^\"" "")
(str/replace #"\"$" "")
(str/replace #"\\\"" "\"")
(str/replace #"\\\\" "q")
(str/replace #"\\x[0-9a-f]{2}" "q")
count)))
(defn increase [s]
(- (-> s
(str/replace #"^\"" "qqq")
(str/replace #"\"$" "qqq")
(str/replace #"\\\"" "qqqq")
(str/replace #"\\\\" "qqqq")
(str/replace #"\\x[0-9a-f]{2}" "qqqqq")
count)
(count s)))
(defn total [s f]
(->> (str/split s #"\n")
(map f)
(reduce +))) | |
086c39c260a154f0dc83bddbe66282dd273e189f4c16b59268569b89d49f3b14 | google/mlir-hs | NativeSpec.hs | Copyright 2021 Google LLC
--
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- -2.0
--
-- Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
module MLIR.NativeSpec where
import Test.Hspec hiding (shouldContain, shouldStartWith)
import Text.RawString.QQ
import Data.Int
import Data.Maybe
import Data.Char (ord)
import qualified Data.ByteString as BS
import Control.Monad
import Foreign.Storable
import qualified MLIR.Native as MLIR
import qualified MLIR.Native.Pass as MLIR
import qualified MLIR.Native.ExecutionEngine as MLIR
exampleModuleStr :: BS.ByteString
exampleModuleStr = pack $ [r|module {
func.func @add(%arg0: i32) -> i32 attributes {llvm.emit_c_interface} {
%0 = arith.addi %arg0, %arg0 : i32
return %0 : i32
}
}
|]
-- XXX: Only valid for ASCII strings
pack :: String -> BS.ByteString
pack = BS.pack . fmap (fromIntegral . ord)
-- TODO(apaszke): Clean up
prepareContext :: IO MLIR.Context
prepareContext = do
ctx <- MLIR.createContext
MLIR.registerAllDialects ctx
return ctx
Helper matcher as shouldContain requires the same type both sides and here
-- we are predominantly checking if a BS contains some String.
shouldContain :: BS.ByteString -> BS.ByteString -> Expectation
shouldContain str sub = str `shouldSatisfy` BS.isInfixOf sub
shouldStartWith :: BS.ByteString -> BS.ByteString -> Expectation
shouldStartWith str sub = str `shouldSatisfy` BS.isPrefixOf sub
spec :: Spec
spec = do
describe "Basics" $ do
it "Can create a context" $ MLIR.withContext $ const $ return ()
it "Can load dialects" $ do
MLIR.withContext \ctx -> do
MLIR.registerAllDialects ctx
numDialects <- MLIR.getNumLoadedDialects ctx
numDialects `shouldSatisfy` (> 1)
describe "Modules" $ beforeAll prepareContext $ do
it "Can create an empty module" $ \ctx -> do
loc <- MLIR.getUnknownLocation ctx
m <- MLIR.createEmptyModule loc
str <- MLIR.showModule m
MLIR.destroyModule m
str `shouldBe` "module {\n}\n"
it "Can parse an example module" $ \ctx -> do
exampleModule <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
exampleModuleStr' <- MLIR.showModule exampleModule
exampleModuleStr' `shouldBe` exampleModuleStr
MLIR.destroyModule exampleModule
it "Fails to parse garbage" $ \ctx -> do
maybeModule <- MLIR.withStringRef "asdf" $ MLIR.parseModule ctx
(isNothing maybeModule) `shouldBe` True
it "Can create an empty module with location" $ \ctx -> do
MLIR.withStringRef "test.cc" $ \nameRef -> do
loc <- MLIR.getFileLineColLocation ctx nameRef 21 45
m <- MLIR.createEmptyModule loc
str <- (MLIR.moduleAsOperation >=> MLIR.showOperationWithLocation) m
MLIR.destroyModule m
str `shouldContain` "loc(\"test.cc\":21:45)"
it "Can create an empty module with name location" $ \ctx -> do
MLIR.withStringRef "WhatIamCalled" $ \nameRef -> do
loc <- MLIR.getNameLocation ctx nameRef =<< MLIR.getUnknownLocation ctx
m <- MLIR.createEmptyModule loc
str <- (MLIR.moduleAsOperation >=> MLIR.showOperationWithLocation) m
MLIR.destroyModule m
str `shouldContain` "loc(\"WhatIamCalled\")"
it "Can extract first operation (Function) of module" $ \ctx -> do
exampleModule <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
operations <- (MLIR.getModuleBody >=> MLIR.getBlockOperations) exampleModule
functionStr' <- MLIR.showOperation $ head operations
functionStr' `shouldStartWith` "func.func @add(%arg0: i32) -> i32"
MLIR.destroyModule exampleModule
it "Can show operations inside region of function" $ \ctx -> do
exampleModule <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
operations <- (MLIR.getModuleBody >=> MLIR.getBlockOperations) exampleModule
regions <- MLIR.getOperationRegions (head operations)
blocks <- MLIR.getRegionBlocks (head regions)
ops <- MLIR.getBlockOperations $ head blocks
opStrs <- sequence $ map MLIR.showOperation ops
(BS.intercalate " ; " opStrs) `shouldBe` "%0 = arith.addi %arg0, %arg0 : i32 ; func.return %0 : i32"
MLIR.destroyModule exampleModule
describe "Evaluation engine" $ beforeAll prepareContext $ do
it "Can evaluate the example module" $ \ctx -> do
m <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
lowerToLLVM m
result <- run @Int32 m "add" [MLIR.SomeStorable (123 :: Int32)]
result `shouldBe` 246
MLIR.destroyModule m
where
lowerToLLVM :: MLIR.Module -> IO ()
lowerToLLVM m = do
ctx <- MLIR.getContext m
MLIR.withPassManager ctx \pm -> do
MLIR.addConvertFuncToLLVMPass pm
MLIR.addConvertReconcileUnrealizedCastsPass pm
result <- MLIR.runPasses pm m
when (result == MLIR.Failure) $ error "Failed to lower to LLVM!"
run :: forall result. Storable result
=> MLIR.Module -> BS.ByteString -> [MLIR.SomeStorable] -> IO result
run m name args = do
MLIR.withExecutionEngine m \maybeEng -> do
let eng = fromMaybe (error "Failed to compile the module") maybeEng
MLIR.withStringRef name $ \nameRef -> do
maybeValue <- MLIR.executionEngineInvoke eng nameRef args
case maybeValue of
Just value -> return value
Nothing -> error "Failed to run the example program!"
main :: IO ()
main = hspec spec
| null | https://raw.githubusercontent.com/google/mlir-hs/a1d0a937e691e8d90cb4ab9197abd7da5772e03d/test/MLIR/NativeSpec.hs | haskell |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
XXX: Only valid for ASCII strings
TODO(apaszke): Clean up
we are predominantly checking if a BS contains some String. | Copyright 2021 Google LLC
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
module MLIR.NativeSpec where
import Test.Hspec hiding (shouldContain, shouldStartWith)
import Text.RawString.QQ
import Data.Int
import Data.Maybe
import Data.Char (ord)
import qualified Data.ByteString as BS
import Control.Monad
import Foreign.Storable
import qualified MLIR.Native as MLIR
import qualified MLIR.Native.Pass as MLIR
import qualified MLIR.Native.ExecutionEngine as MLIR
exampleModuleStr :: BS.ByteString
exampleModuleStr = pack $ [r|module {
func.func @add(%arg0: i32) -> i32 attributes {llvm.emit_c_interface} {
%0 = arith.addi %arg0, %arg0 : i32
return %0 : i32
}
}
|]
pack :: String -> BS.ByteString
pack = BS.pack . fmap (fromIntegral . ord)
prepareContext :: IO MLIR.Context
prepareContext = do
ctx <- MLIR.createContext
MLIR.registerAllDialects ctx
return ctx
Helper matcher as shouldContain requires the same type both sides and here
shouldContain :: BS.ByteString -> BS.ByteString -> Expectation
shouldContain str sub = str `shouldSatisfy` BS.isInfixOf sub
shouldStartWith :: BS.ByteString -> BS.ByteString -> Expectation
shouldStartWith str sub = str `shouldSatisfy` BS.isPrefixOf sub
spec :: Spec
spec = do
describe "Basics" $ do
it "Can create a context" $ MLIR.withContext $ const $ return ()
it "Can load dialects" $ do
MLIR.withContext \ctx -> do
MLIR.registerAllDialects ctx
numDialects <- MLIR.getNumLoadedDialects ctx
numDialects `shouldSatisfy` (> 1)
describe "Modules" $ beforeAll prepareContext $ do
it "Can create an empty module" $ \ctx -> do
loc <- MLIR.getUnknownLocation ctx
m <- MLIR.createEmptyModule loc
str <- MLIR.showModule m
MLIR.destroyModule m
str `shouldBe` "module {\n}\n"
it "Can parse an example module" $ \ctx -> do
exampleModule <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
exampleModuleStr' <- MLIR.showModule exampleModule
exampleModuleStr' `shouldBe` exampleModuleStr
MLIR.destroyModule exampleModule
it "Fails to parse garbage" $ \ctx -> do
maybeModule <- MLIR.withStringRef "asdf" $ MLIR.parseModule ctx
(isNothing maybeModule) `shouldBe` True
it "Can create an empty module with location" $ \ctx -> do
MLIR.withStringRef "test.cc" $ \nameRef -> do
loc <- MLIR.getFileLineColLocation ctx nameRef 21 45
m <- MLIR.createEmptyModule loc
str <- (MLIR.moduleAsOperation >=> MLIR.showOperationWithLocation) m
MLIR.destroyModule m
str `shouldContain` "loc(\"test.cc\":21:45)"
it "Can create an empty module with name location" $ \ctx -> do
MLIR.withStringRef "WhatIamCalled" $ \nameRef -> do
loc <- MLIR.getNameLocation ctx nameRef =<< MLIR.getUnknownLocation ctx
m <- MLIR.createEmptyModule loc
str <- (MLIR.moduleAsOperation >=> MLIR.showOperationWithLocation) m
MLIR.destroyModule m
str `shouldContain` "loc(\"WhatIamCalled\")"
it "Can extract first operation (Function) of module" $ \ctx -> do
exampleModule <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
operations <- (MLIR.getModuleBody >=> MLIR.getBlockOperations) exampleModule
functionStr' <- MLIR.showOperation $ head operations
functionStr' `shouldStartWith` "func.func @add(%arg0: i32) -> i32"
MLIR.destroyModule exampleModule
it "Can show operations inside region of function" $ \ctx -> do
exampleModule <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
operations <- (MLIR.getModuleBody >=> MLIR.getBlockOperations) exampleModule
regions <- MLIR.getOperationRegions (head operations)
blocks <- MLIR.getRegionBlocks (head regions)
ops <- MLIR.getBlockOperations $ head blocks
opStrs <- sequence $ map MLIR.showOperation ops
(BS.intercalate " ; " opStrs) `shouldBe` "%0 = arith.addi %arg0, %arg0 : i32 ; func.return %0 : i32"
MLIR.destroyModule exampleModule
describe "Evaluation engine" $ beforeAll prepareContext $ do
it "Can evaluate the example module" $ \ctx -> do
m <- liftM fromJust $
MLIR.withStringRef exampleModuleStr $ MLIR.parseModule ctx
lowerToLLVM m
result <- run @Int32 m "add" [MLIR.SomeStorable (123 :: Int32)]
result `shouldBe` 246
MLIR.destroyModule m
where
lowerToLLVM :: MLIR.Module -> IO ()
lowerToLLVM m = do
ctx <- MLIR.getContext m
MLIR.withPassManager ctx \pm -> do
MLIR.addConvertFuncToLLVMPass pm
MLIR.addConvertReconcileUnrealizedCastsPass pm
result <- MLIR.runPasses pm m
when (result == MLIR.Failure) $ error "Failed to lower to LLVM!"
run :: forall result. Storable result
=> MLIR.Module -> BS.ByteString -> [MLIR.SomeStorable] -> IO result
run m name args = do
MLIR.withExecutionEngine m \maybeEng -> do
let eng = fromMaybe (error "Failed to compile the module") maybeEng
MLIR.withStringRef name $ \nameRef -> do
maybeValue <- MLIR.executionEngineInvoke eng nameRef args
case maybeValue of
Just value -> return value
Nothing -> error "Failed to run the example program!"
main :: IO ()
main = hspec spec
|
74e0c3a4a3878e071c21fdd35003b56e89b6c6b805aebbcf9c8e7d576c83fc63 | CodyReichert/qi | reload.lisp | Copyright ( C ) 2001 , 2003
Copyright ( C ) 2005
" the conditions and ENSURE - SSL - FUNCALL are by . "
;;;
;;; See LICENSE for details.
;;; We do this in an extra file so that it happens
;;; - after the asd file has been loaded, so that users can
;;; customize *libssl-pathname* between loading the asd and LOAD-OPing
;;; the actual sources
;;; - before ssl.lisp is loaded, which needs the library at compilation
;;; time on some implemenations
- but not every time ffi.lisp is re - loaded as would happen if we
put this directly into ffi.lisp
#+xcvb (module (:depends-on ("package")))
(in-package :cl+ssl)
OpenBSD needs to load libcrypto before libssl
#+openbsd
(progn
(cffi:define-foreign-library libcrypto
(:openbsd "libcrypto.so"))
(cffi:use-foreign-library libcrypto))
(cffi:define-foreign-library libssl
(:windows "libssl32.dll")
(:darwin (:or "libssl.dylib" "/usr/lib/libssl.dylib"))
(:solaris (:or "/lib/64/libssl.so"
"libssl.so.0.9.8" "libssl.so" "libssl.so.4"))
;; Unlike some other systems, OpenBSD linker,
;; when passed library name without versions at the end,
;; will locate the library with highest macro.minor version,
;; so we can just use just "libssl.so".
;; More info at -plus-ssl/cl-plus-ssl/pull/2.
(:openbsd "libssl.so")
((and :unix (not :cygwin)) (:or "libssl.so.1.0.2"
"libssl.so.1.0.1l"
"libssl.so.1.0.1e"
"libssl.so.1.0.1j"
"libssl.so.1.0.1"
"libssl.so.1.0.0q"
"libssl.so.1.0.0"
"libssl.so.0.9.8ze"
"libssl.so.0.9.8"
"libssl.so"
"libssl.so.4"))
(:cygwin "cygssl-1.0.0.dll")
(t (:default "libssl3")))
(cffi:use-foreign-library libssl)
(cffi:define-foreign-library libeay32
(:windows "libeay32.dll"))
(cffi:use-foreign-library libeay32)
| null | https://raw.githubusercontent.com/CodyReichert/qi/9cf6d31f40e19f4a7f60891ef7c8c0381ccac66f/dependencies/cl%2Bssl-latest/reload.lisp | lisp |
See LICENSE for details.
We do this in an extra file so that it happens
- after the asd file has been loaded, so that users can
customize *libssl-pathname* between loading the asd and LOAD-OPing
the actual sources
- before ssl.lisp is loaded, which needs the library at compilation
time on some implemenations
Unlike some other systems, OpenBSD linker,
when passed library name without versions at the end,
will locate the library with highest macro.minor version,
so we can just use just "libssl.so".
More info at -plus-ssl/cl-plus-ssl/pull/2. | Copyright ( C ) 2001 , 2003
Copyright ( C ) 2005
" the conditions and ENSURE - SSL - FUNCALL are by . "
- but not every time ffi.lisp is re - loaded as would happen if we
put this directly into ffi.lisp
#+xcvb (module (:depends-on ("package")))
(in-package :cl+ssl)
OpenBSD needs to load libcrypto before libssl
#+openbsd
(progn
(cffi:define-foreign-library libcrypto
(:openbsd "libcrypto.so"))
(cffi:use-foreign-library libcrypto))
(cffi:define-foreign-library libssl
(:windows "libssl32.dll")
(:darwin (:or "libssl.dylib" "/usr/lib/libssl.dylib"))
(:solaris (:or "/lib/64/libssl.so"
"libssl.so.0.9.8" "libssl.so" "libssl.so.4"))
(:openbsd "libssl.so")
((and :unix (not :cygwin)) (:or "libssl.so.1.0.2"
"libssl.so.1.0.1l"
"libssl.so.1.0.1e"
"libssl.so.1.0.1j"
"libssl.so.1.0.1"
"libssl.so.1.0.0q"
"libssl.so.1.0.0"
"libssl.so.0.9.8ze"
"libssl.so.0.9.8"
"libssl.so"
"libssl.so.4"))
(:cygwin "cygssl-1.0.0.dll")
(t (:default "libssl3")))
(cffi:use-foreign-library libssl)
(cffi:define-foreign-library libeay32
(:windows "libeay32.dll"))
(cffi:use-foreign-library libeay32)
|
630b1b4eb3be1c83718daad1d481fd0331b46b2fbebcbbdfd281dcddb50d48fe | albertoruiz/easyVision | crosscorr.hs | import Vision.GUI
import Util.Geometry ( Polyline(polyPts) )
import Image.Processing ( Image, size, crossCorr, copy, grayf, crossCorrLoc )
import Image.ROI ( topLeft, roi2poly )
sel = grayf
fun = crossCorr
main = run $ getTemplate
>>> observe "template" snd
>>> arr (\(x,t) -> ((sel x,t), fun t (sel x)))
>>> observe "cross correlation" snd
>>> observe "best match" (\((x,t),c) -> showMatch t x c)
getTemplate = clickKeep "define region and click to set template" f g Nothing
where
f r = setRegion r . sel
g = Draw . sel . fst
showMatch t img corr | v > 0.5 = Draw [Draw (copy img [(t,topLeft r)])
, color green . lineWd 3 $ p
, text p0 (show v)
]
| otherwise = Draw img
where
(v,r) = crossCorrLoc t corr
p = roi2poly (size img) r
p0 = last (polyPts p)
| null | https://raw.githubusercontent.com/albertoruiz/easyVision/26bb2efaa676c902cecb12047560a09377a969f2/projects/examples/crosscorr.hs | haskell | import Vision.GUI
import Util.Geometry ( Polyline(polyPts) )
import Image.Processing ( Image, size, crossCorr, copy, grayf, crossCorrLoc )
import Image.ROI ( topLeft, roi2poly )
sel = grayf
fun = crossCorr
main = run $ getTemplate
>>> observe "template" snd
>>> arr (\(x,t) -> ((sel x,t), fun t (sel x)))
>>> observe "cross correlation" snd
>>> observe "best match" (\((x,t),c) -> showMatch t x c)
getTemplate = clickKeep "define region and click to set template" f g Nothing
where
f r = setRegion r . sel
g = Draw . sel . fst
showMatch t img corr | v > 0.5 = Draw [Draw (copy img [(t,topLeft r)])
, color green . lineWd 3 $ p
, text p0 (show v)
]
| otherwise = Draw img
where
(v,r) = crossCorrLoc t corr
p = roi2poly (size img) r
p0 = last (polyPts p)
| |
104b1fe19bea64e8d85270b176fa8584e06ad23bea93f8092ce378872a0c1599 | MegaLoler/Music | instrument.lisp | (in-package :music)
(defclass instrument ()
()
(:documentation "Represents an instrument to play musical notes."))
(defclass adsr-instrument (instrument)
((adsr
:initarg :asdr
:initform (make-instance 'adsr)
:type adsr
:accessor adsr))
(:documentation "An instrument with an ADSR envelope."))
(defclass envelope ()
()
(:documentation "An envelope."))
(defgeneric on-value (envelope time)
(:documentation "Get the value of an envelope at a given time since a trigger event."))
(defgeneric off-value (envelope time)
(:documentation "Get the value of an envelope at a given time since an release event."))
(defclass adsr (envelope)
((attack
:initarg :attack
:initform 1/16
:accessor attack
:type (integer 0))
(decay
:initarg :decay
:initform 1/2
:accessor decay
:type (integer 0))
(sustain
:initarg :sustain
:initform 2/3
:accessor sustain
:type (integer 0 1))
(release
:initarg :release
:initform 1/2
:accessor release
:type (integer 0)))
(:documentation "An ADSR envelope."))
(defmethod on-value ((env adsr) time)
"Get the value of an ADSR envelope at a given time since it was triggered."
(cond ((< time (attack env))
(/ time (attack env)))
((< time (+ (attack env)
(decay env)))
(- 1 (* (/ (- time (attack env))
(decay env))
(- 1 (sustain env)))))
(t (sustain env))))
(defmethod off-value ((env adsr) time)
"Get the value of an ADSR envelope at a given time since it was released."
(max 0
(- (sustain env)
(* (sustain env)
(/ time (release env))))))
| null | https://raw.githubusercontent.com/MegaLoler/Music/6d69042f6ed98994d3f2ec474c71c569e0ee06be/src/instrument.lisp | lisp | (in-package :music)
(defclass instrument ()
()
(:documentation "Represents an instrument to play musical notes."))
(defclass adsr-instrument (instrument)
((adsr
:initarg :asdr
:initform (make-instance 'adsr)
:type adsr
:accessor adsr))
(:documentation "An instrument with an ADSR envelope."))
(defclass envelope ()
()
(:documentation "An envelope."))
(defgeneric on-value (envelope time)
(:documentation "Get the value of an envelope at a given time since a trigger event."))
(defgeneric off-value (envelope time)
(:documentation "Get the value of an envelope at a given time since an release event."))
(defclass adsr (envelope)
((attack
:initarg :attack
:initform 1/16
:accessor attack
:type (integer 0))
(decay
:initarg :decay
:initform 1/2
:accessor decay
:type (integer 0))
(sustain
:initarg :sustain
:initform 2/3
:accessor sustain
:type (integer 0 1))
(release
:initarg :release
:initform 1/2
:accessor release
:type (integer 0)))
(:documentation "An ADSR envelope."))
(defmethod on-value ((env adsr) time)
"Get the value of an ADSR envelope at a given time since it was triggered."
(cond ((< time (attack env))
(/ time (attack env)))
((< time (+ (attack env)
(decay env)))
(- 1 (* (/ (- time (attack env))
(decay env))
(- 1 (sustain env)))))
(t (sustain env))))
(defmethod off-value ((env adsr) time)
"Get the value of an ADSR envelope at a given time since it was released."
(max 0
(- (sustain env)
(* (sustain env)
(/ time (release env))))))
| |
cdecbe1b4d5439d87e276810bba652466e8c67085c33e1130ed64f731278f274 | ocaml/dune | seq.mli | type 'a t = 'a Stdlib.Seq.t
and +'a node = 'a Stdlib.Seq.node =
| Nil
| Cons of 'a * 'a t
val empty : 'a t
val return : 'a -> 'a t
val cons : 'a -> 'a t -> 'a t
val append : 'a t -> 'a t -> 'a t
val concat : 'a t t -> 'a t
val map : 'a t -> f:('a -> 'b) -> 'b t
val filter : 'a t -> f:('a -> bool) -> 'a t
val filter_map : 'a t -> f:('a -> 'b option) -> 'b t
val fold_left : 'b t -> init:'a -> f:('a -> 'b -> 'a) -> 'a
val iter : 'a t -> f:('a -> unit) -> unit
| null | https://raw.githubusercontent.com/ocaml/dune/714626f4d408e5c71c24ba91d0d520588702ec52/otherlibs/stdune/src/seq.mli | ocaml | type 'a t = 'a Stdlib.Seq.t
and +'a node = 'a Stdlib.Seq.node =
| Nil
| Cons of 'a * 'a t
val empty : 'a t
val return : 'a -> 'a t
val cons : 'a -> 'a t -> 'a t
val append : 'a t -> 'a t -> 'a t
val concat : 'a t t -> 'a t
val map : 'a t -> f:('a -> 'b) -> 'b t
val filter : 'a t -> f:('a -> bool) -> 'a t
val filter_map : 'a t -> f:('a -> 'b option) -> 'b t
val fold_left : 'b t -> init:'a -> f:('a -> 'b -> 'a) -> 'a
val iter : 'a t -> f:('a -> unit) -> unit
| |
0b82ba133c77c8ba8b7a93cbeec439de1f1d704449737288ff05341fb4b6376e | Tritlo/spectacular | Paths.hs | {-# LANGUAGE OverloadedStrings #-}
| Representations of paths in an FTA , data structures for
-- equality constraints over paths, algorithms for saturating these constraints
module Data.ECTA.Internal.Paths (
Path(.., EmptyPath, ConsPath)
, unPath
, path
, Pathable(..)
, pathHeadUnsafe
, pathTailUnsafe
, isSubpath
, isStrictSubpath
, substSubpath
, smallestNonempty
, largestNonempty
, getMaxNonemptyIndex
, PathTrie(..)
, isEmptyPathTrie
, isTerminalPathTrie
, toPathTrie
, fromPathTrie
, pathTrieDescend
, PathEClass(PathEClass, ..)
, unPathEClass
, hasSubsumingMember
, completedSubsumptionOrdering
, EqConstraints(.., EmptyConstraints)
, rawMkEqConstraints
, unsafeGetEclasses
, hasSubsumingMemberListBased
, isContradicting
, mkEqConstraints
, combineEqConstraints
, eqConstraintsDescend
, constraintsAreContradictory
, constraintsImply
, subsumptionOrderedEclasses
, unsafeSubsumptionOrderedEclasses
) where
import Prelude hiding ( round )
import Data.Function ( on )
import Data.Hashable ( Hashable )
import Data.List ( isSubsequenceOf, nub, sort, sortBy )
import Data.Monoid ( Any(..) )
import Data.Semigroup ( Max(..) )
import qualified Data.Text as Text
import Data.Vector ( Vector )
import qualified Data.Vector as Vector
import Data.Vector.Instances ()
import GHC.Exts ( inline )
import GHC.Generics ( Generic )
import Data.Equivalence.Monad ( runEquivM, equate, desc, classes )
import Data.Memoization ( MemoCacheTag(..), memo2 )
import Data.Text.Extended.Pretty
import Utility.Fixpoint
-------------------------------------------------------
-----------------------------------------------------------------------
--------------------------- Misc / general ----------------------------
-----------------------------------------------------------------------
flipOrdering :: Ordering -> Ordering
flipOrdering GT = LT
flipOrdering LT = GT
flipOrdering EQ = EQ
-----------------------------------------------------------------------
-------------------------------- Paths --------------------------------
-----------------------------------------------------------------------
data Path = Path ![Int]
deriving (Eq, Ord, Show, Generic)
unPath :: Path -> [Int]
unPath (Path p) = p
instance Hashable Path
instance Semigroup Path where
p1 <> p2 = Path (unPath p1 <> unPath p2)
instance Monoid Path where
mempty = EmptyPath
path :: [Int] -> Path
path = Path
{-# COMPLETE EmptyPath, ConsPath #-}
pattern EmptyPath :: Path
pattern EmptyPath = Path []
pattern ConsPath :: Int -> Path -> Path
pattern ConsPath p ps <- Path (p : (Path -> ps)) where
ConsPath p (Path ps) = Path (p : ps)
pathHeadUnsafe :: Path -> Int
pathHeadUnsafe (Path ps) = head ps
pathTailUnsafe :: Path -> Path
pathTailUnsafe (Path ps) = Path (tail ps)
instance Pretty Path where
pretty (Path ps) = Text.intercalate "." (map (Text.pack . show) ps)
isSubpath :: Path -> Path -> Bool
isSubpath EmptyPath _ = True
isSubpath (ConsPath p1 ps1) (ConsPath p2 ps2)
| p1 == p2 = isSubpath ps1 ps2
isSubpath _ _ = False
isStrictSubpath :: Path -> Path -> Bool
isStrictSubpath EmptyPath EmptyPath = False
isStrictSubpath EmptyPath _ = True
isStrictSubpath (ConsPath p1 ps1) (ConsPath p2 ps2)
| p1 == p2 = isStrictSubpath ps1 ps2
isStrictSubpath _ _ = False
-- | Read `substSubpath p1 p2 p3` as `[p1/p2]p3`
--
` substSubpath replacement toReplace target ` takes ` toReplace ` , a prefix of target ,
-- and returns a new path in which `toReplace` has been replaced by `replacement`.
--
Undefined if toReplace is not a prefix of target
substSubpath :: Path -> Path -> Path -> Path
substSubpath replacement toReplace target = Path $ (unPath replacement) ++ drop (length $ unPath toReplace) (unPath target)
--------------------------------------------------------------------------
---------------------------- Using paths ---------------------------------
--------------------------------------------------------------------------
-- | TODO: Should this be redone as a lens-library traversal?
| TODO : I am unhappy about this Emptyable design ; makes one question whether
this should be a typeclass at all . ( Terms / ECTAs differ in that
-- there is always an ECTA Node that represents the value at a path)
class Pathable t t' | t -> t' where
type Emptyable t'
getPath :: Path -> t -> Emptyable t'
getAllAtPath :: Path -> t -> [t']
modifyAtPath :: (t' -> t') -> Path -> t -> t
-----------------------------------------------------------------------
---------------------------- Path tries -------------------------------
-----------------------------------------------------------------------
---------------------
------- Generic-ish utility functions
---------------------
-- | Precondition: A nonempty cell exists
smallestNonempty :: Vector PathTrie -> Int
smallestNonempty v = Vector.ifoldr (\i pt oldMin -> case pt of
EmptyPathTrie -> oldMin
_ -> i)
maxBound
v
-- | Precondition: A nonempty cell exists
largestNonempty :: Vector PathTrie -> Int
largestNonempty v = Vector.ifoldl (\oldMin i pt -> case pt of
EmptyPathTrie -> oldMin
_ -> i)
minBound
v
getMaxNonemptyIndex :: PathTrie -> Maybe Int
getMaxNonemptyIndex EmptyPathTrie = Nothing
getMaxNonemptyIndex TerminalPathTrie = Nothing
getMaxNonemptyIndex (PathTrieSingleChild i _) = Just i
getMaxNonemptyIndex (PathTrie vec) = Just $ largestNonempty vec
---------------------
------- Path tries
---------------------
data PathTrie = EmptyPathTrie
| TerminalPathTrie
| PathTrieSingleChild {-# UNPACK #-} !Int !PathTrie
Invariant : Must have at least two nonempty nodes
deriving ( Eq, Show, Generic )
instance Hashable PathTrie
isEmptyPathTrie :: PathTrie -> Bool
isEmptyPathTrie EmptyPathTrie = True
isEmptyPathTrie _ = False
isTerminalPathTrie :: PathTrie -> Bool
isTerminalPathTrie TerminalPathTrie = True
isTerminalPathTrie _ = False
comparePathTrieVectors :: Vector PathTrie -> Vector PathTrie -> Ordering
comparePathTrieVectors v1 v2 = foldr (\i res -> let (t1, t2) = (v1 `Vector.unsafeIndex` i, v2 `Vector.unsafeIndex` i)
in case (isEmptyPathTrie t1, isEmptyPathTrie t2) of
(False, True) -> LT
(True, False) -> GT
(True, True) -> res
(False, False) -> case compare t1 t2 of
LT -> LT
GT -> GT
EQ -> res)
valueIfComponentsMatch
[0..(min (Vector.length v1) (Vector.length v2) - 1)]
where
valueIfComponentsMatch = compare (Vector.length v1) (Vector.length v2)
instance Ord PathTrie where
compare EmptyPathTrie EmptyPathTrie = EQ
compare EmptyPathTrie _ = LT
compare _ EmptyPathTrie = GT
compare TerminalPathTrie TerminalPathTrie = EQ
compare TerminalPathTrie _ = LT
compare _ TerminalPathTrie = GT
compare (PathTrieSingleChild i1 pt1) (PathTrieSingleChild i2 pt2)
| i1 < i2 = LT
| i1 > i2 = GT
| otherwise = compare pt1 pt2
compare (PathTrieSingleChild i1 pt1) (PathTrie v2) = let i2 = smallestNonempty v2 in
case compare i1 i2 of
LT -> LT
GT -> GT
EQ -> case compare pt1 (v2 `Vector.unsafeIndex` i2) of
LT -> LT
GT -> GT
v2 must have a second nonempty
compare a@(PathTrie _) b@(PathTrieSingleChild _ _) = flipOrdering $ inline compare b a -- TODO: Check whether this inlining is effective
compare (PathTrie v1) (PathTrie v2) = comparePathTrieVectors v1 v2
-- | Precondition: No path in the input is a subpath of another
toPathTrie :: [Path] -> PathTrie
toPathTrie [] = EmptyPathTrie
toPathTrie [EmptyPath] = TerminalPathTrie
toPathTrie ps = if all (\p -> pathHeadUnsafe p == pathHeadUnsafe (head ps)) ps then
PathTrieSingleChild (pathHeadUnsafe $ head ps) (toPathTrie $ map pathTailUnsafe ps)
else
PathTrie vec
where
maxIndex = getMax $ foldMap (Max . pathHeadUnsafe) ps
-- TODO: Inefficient to use this; many passes. over the list.
-- This may not be used in a place where perf matters, though
pathsStartingWith :: Int -> [Path] -> [Path]
pathsStartingWith i = concatMap (\case EmptyPath -> []
ConsPath j p -> if i == j then [p] else [])
vec = Vector.generate (maxIndex + 1) (\i -> toPathTrie $ pathsStartingWith i ps)
fromPathTrie :: PathTrie -> [Path]
fromPathTrie EmptyPathTrie = []
fromPathTrie TerminalPathTrie = [EmptyPath]
fromPathTrie (PathTrieSingleChild i pt) = map (ConsPath i) $ fromPathTrie pt
fromPathTrie (PathTrie v) = Vector.ifoldr (\i pt acc -> map (ConsPath i) (fromPathTrie pt) ++ acc) [] v
pathTrieDescend :: PathTrie -> Int -> PathTrie
pathTrieDescend EmptyPathTrie _ = EmptyPathTrie
pathTrieDescend TerminalPathTrie _ = EmptyPathTrie
pathTrieDescend (PathTrie v) i = if Vector.length v > i then
v `Vector.unsafeIndex` i
else
EmptyPathTrie
pathTrieDescend (PathTrieSingleChild j pt') i
| i == j = pt'
| otherwise = EmptyPathTrie
--------------------------------------------------------------------------
---------------------- Equality constraints over paths -------------------
--------------------------------------------------------------------------
---------------------------
---------- Path E-classes
---------------------------
data PathEClass = PathEClass' { getPathTrie :: !PathTrie
, getOrigPaths :: [Path] -- Intentionally lazy because
not available when calling ` mkPathEClassFromPathTrie `
}
deriving ( Show, Generic )
instance Eq PathEClass where
(==) = (==) `on` getPathTrie
instance Ord PathEClass where
compare = compare `on` getPathTrie
-- | TODO: This pattern (and the caching of the original path list) is a temporary affair
until we convert all clients of PathEclass to fully be based on tries
pattern PathEClass :: [Path] -> PathEClass
pattern PathEClass ps <- PathEClass' _ ps where
PathEClass ps = PathEClass' (toPathTrie $ nub ps) (sort $ nub ps)
unPathEClass :: PathEClass -> [Path]
unPathEClass (PathEClass' _ paths) = paths
instance Pretty PathEClass where
pretty pec = "{" <> (Text.intercalate "=" $ map pretty $ unPathEClass pec) <> "}"
instance Hashable PathEClass
mkPathEClassFromPathTrie :: PathTrie -> PathEClass
mkPathEClassFromPathTrie pt = PathEClass' pt (fromPathTrie pt)
pathEClassDescend :: PathEClass -> Int -> PathEClass
pathEClassDescend (PathEClass' pt _) i = mkPathEClassFromPathTrie $ pathTrieDescend pt i
hasSubsumingMember :: PathEClass -> PathEClass -> Bool
hasSubsumingMember pec1 pec2 = go (getPathTrie pec1) (getPathTrie pec2)
where
go :: PathTrie -> PathTrie -> Bool
go EmptyPathTrie _ = False
go _ EmptyPathTrie = False
go TerminalPathTrie TerminalPathTrie = False
go TerminalPathTrie _ = True
go _ TerminalPathTrie = False
go (PathTrieSingleChild i1 pt1) (PathTrieSingleChild i2 pt2) = if i1 == i2 then
go pt1 pt2
else
False
go (PathTrieSingleChild i1 pt1) (PathTrie v2) = case v2 Vector.!? i1 of
Nothing -> False
Just pt2 -> go pt1 pt2
go (PathTrie v1) (PathTrieSingleChild i2 pt2) = case v1 Vector.!? i2 of
Nothing -> False
Just pt1 -> go pt1 pt2
go (PathTrie v1) (PathTrie v2) = any (\i -> go (v1 `Vector.unsafeIndex` i) (v2 `Vector.unsafeIndex` i))
[0..(min (Vector.length v1) (Vector.length v2) - 1)]
-- | Extends the subsumption ordering to a total ordering by using the default lexicographic
-- comparison for incomparable elements.
| TODO : Optimization opportunity : Redundant work in the hasSubsumingMember calls
completedSubsumptionOrdering :: PathEClass -> PathEClass -> Ordering
completedSubsumptionOrdering pec1 pec2
| hasSubsumingMember pec1 pec2 = LT
| hasSubsumingMember pec2 pec1 = GT
-- This next line is some hacky magic. Basically, it means that for the
workload , where there is no subsumption ,
-- constraints will be evaluated in left-to-right order (instead of the default
-- right-to-left), which for that particular workload produces better
-- constraint-propagation
| otherwise = compare pec2 pec1
--------------------------------
---------- Equality constraints
--------------------------------
data EqConstraints = EqConstraints { getEclasses :: [PathEClass] -- ^ Must be sorted
}
| EqContradiction
deriving ( Eq, Ord, Show, Generic )
instance Hashable EqConstraints
instance Pretty EqConstraints where
pretty ecs = "{" <> (Text.intercalate "," $ map pretty (getEclasses ecs)) <> "}"
instance Semigroup EqConstraints where
(<>) = combineEqConstraints
instance Monoid EqConstraints where
mempty = EmptyConstraints
--------- Destructors and patterns
-- | Unsafe. Internal use only
ecsGetPaths :: EqConstraints -> [[Path]]
ecsGetPaths = map unPathEClass . getEclasses
pattern EmptyConstraints :: EqConstraints
pattern EmptyConstraints = EqConstraints []
unsafeGetEclasses :: EqConstraints -> [PathEClass]
unsafeGetEclasses EqContradiction = error "unsafeGetEclasses: Illegal argument 'EqContradiction'"
unsafeGetEclasses ecs = getEclasses ecs
rawMkEqConstraints :: [[Path]] -> EqConstraints
rawMkEqConstraints = EqConstraints . map PathEClass
constraintsAreContradictory :: EqConstraints -> Bool
constraintsAreContradictory = (== EqContradiction)
--------- Construction
hasSubsumingMemberListBased :: [Path] -> [Path] -> Bool
hasSubsumingMemberListBased ps1 ps2 = getAny $ mconcat [Any (isStrictSubpath p1 p2) | p1 <- ps1
, p2 <- ps2]
-- | The real contradiction condition is a cycle in the subsumption ordering.
-- But, after congruence closure, this will reduce into a self-cycle in the subsumption ordering.
--
TODO ; Prove this .
isContradicting :: [[Path]] -> Bool
isContradicting cs = any (\pec -> hasSubsumingMemberListBased pec pec) cs
-- Contains an inefficient implementation of the congruence closure algorithm
mkEqConstraints :: [[Path]] -> EqConstraints
mkEqConstraints initialConstraints = case completedConstraints of
Nothing -> EqContradiction
Just cs -> EqConstraints $ sort $ map PathEClass cs
where
removeTrivial :: (Eq a) => [[a]] -> [[a]]
removeTrivial = filter (\x -> length x > 1) . map nub
-- Reason for the extra "complete" in this line:
The first simplification done to the constraints is eclass - completion ,
-- to remove redundancy and shrink things before the very inefficienc
addCongruences step ( important in tests ; less so in realistic input ) .
-- The last simplification must also be completion, to give a valid value.
completedConstraints = fixMaybe round $ complete $ removeTrivial initialConstraints
round :: [[Path]] -> Maybe [[Path]]
round cs = let cs' = addCongruences cs
cs'' = complete cs'
in if isContradicting cs'' then
Nothing
else
Just cs''
addCongruences :: [[Path]] -> [[Path]]
addCongruences cs = cs ++ [map (\z -> substSubpath z x y) left | left <- cs, right <- cs, x <- left, y <- right, isStrictSubpath x y]
assertEquivs xs = mapM (\y -> equate (head xs) y) (tail xs)
complete :: (Ord a) => [[a]] -> [[a]]
complete initialClasses = runEquivM (:[]) (++) $ do
mapM_ assertEquivs initialClasses
mapM desc =<< classes
---------- Operations
combineEqConstraints :: EqConstraints -> EqConstraints -> EqConstraints
combineEqConstraints = memo2 (NameTag "combineEqConstraints") go
where
go EqContradiction _ = EqContradiction
go _ EqContradiction = EqContradiction
go ec1 ec2 = mkEqConstraints $ ecsGetPaths ec1 ++ ecsGetPaths ec2
# NOINLINE combineEqConstraints #
eqConstraintsDescend :: EqConstraints -> Int -> EqConstraints
eqConstraintsDescend EqContradiction _ = EqContradiction
eqConstraintsDescend ecs i = EqConstraints $ sort $ map (`pathEClassDescend` i) (getEclasses ecs)
A faster implementation would be : Merge the eclasses of both , run ( or at least do eclass completion ) ,
-- check result equal to ecs2
constraintsImply :: EqConstraints -> EqConstraints -> Bool
constraintsImply EqContradiction _ = True
constraintsImply _ EqContradiction = False
constraintsImply ecs1 ecs2 = all (\cs -> any (isSubsequenceOf cs) (ecsGetPaths ecs1)) (ecsGetPaths ecs2)
subsumptionOrderedEclasses :: EqConstraints -> Maybe [PathEClass]
subsumptionOrderedEclasses ecs = case ecs of
EqContradiction -> Nothing
EqConstraints pecs -> Just $ sortBy completedSubsumptionOrdering pecs
unsafeSubsumptionOrderedEclasses :: EqConstraints -> [PathEClass]
unsafeSubsumptionOrderedEclasses (EqConstraints pecs) = sortBy completedSubsumptionOrdering pecs
unsafeSubsumptionOrderedEclasses EqContradiction = error $ "unsafeSubsumptionOrderedEclasses: unexpected EqContradiction" | null | https://raw.githubusercontent.com/Tritlo/spectacular/00621f4f0f03e5d0afea3c730434a8842e473546/src/Data/ECTA/Internal/Paths.hs | haskell | # LANGUAGE OverloadedStrings #
equality constraints over paths, algorithms for saturating these constraints
-----------------------------------------------------
---------------------------------------------------------------------
------------------------- Misc / general ----------------------------
---------------------------------------------------------------------
---------------------------------------------------------------------
------------------------------ Paths --------------------------------
---------------------------------------------------------------------
# COMPLETE EmptyPath, ConsPath #
| Read `substSubpath p1 p2 p3` as `[p1/p2]p3`
and returns a new path in which `toReplace` has been replaced by `replacement`.
------------------------------------------------------------------------
-------------------------- Using paths ---------------------------------
------------------------------------------------------------------------
| TODO: Should this be redone as a lens-library traversal?
there is always an ECTA Node that represents the value at a path)
---------------------------------------------------------------------
-------------------------- Path tries -------------------------------
---------------------------------------------------------------------
-------------------
----- Generic-ish utility functions
-------------------
| Precondition: A nonempty cell exists
| Precondition: A nonempty cell exists
-------------------
----- Path tries
-------------------
# UNPACK #
TODO: Check whether this inlining is effective
| Precondition: No path in the input is a subpath of another
TODO: Inefficient to use this; many passes. over the list.
This may not be used in a place where perf matters, though
------------------------------------------------------------------------
-------------------- Equality constraints over paths -------------------
------------------------------------------------------------------------
-------------------------
-------- Path E-classes
-------------------------
Intentionally lazy because
| TODO: This pattern (and the caching of the original path list) is a temporary affair
| Extends the subsumption ordering to a total ordering by using the default lexicographic
comparison for incomparable elements.
This next line is some hacky magic. Basically, it means that for the
constraints will be evaluated in left-to-right order (instead of the default
right-to-left), which for that particular workload produces better
constraint-propagation
------------------------------
-------- Equality constraints
------------------------------
^ Must be sorted
------- Destructors and patterns
| Unsafe. Internal use only
------- Construction
| The real contradiction condition is a cycle in the subsumption ordering.
But, after congruence closure, this will reduce into a self-cycle in the subsumption ordering.
Contains an inefficient implementation of the congruence closure algorithm
Reason for the extra "complete" in this line:
to remove redundancy and shrink things before the very inefficienc
The last simplification must also be completion, to give a valid value.
-------- Operations
check result equal to ecs2 |
| Representations of paths in an FTA , data structures for
module Data.ECTA.Internal.Paths (
Path(.., EmptyPath, ConsPath)
, unPath
, path
, Pathable(..)
, pathHeadUnsafe
, pathTailUnsafe
, isSubpath
, isStrictSubpath
, substSubpath
, smallestNonempty
, largestNonempty
, getMaxNonemptyIndex
, PathTrie(..)
, isEmptyPathTrie
, isTerminalPathTrie
, toPathTrie
, fromPathTrie
, pathTrieDescend
, PathEClass(PathEClass, ..)
, unPathEClass
, hasSubsumingMember
, completedSubsumptionOrdering
, EqConstraints(.., EmptyConstraints)
, rawMkEqConstraints
, unsafeGetEclasses
, hasSubsumingMemberListBased
, isContradicting
, mkEqConstraints
, combineEqConstraints
, eqConstraintsDescend
, constraintsAreContradictory
, constraintsImply
, subsumptionOrderedEclasses
, unsafeSubsumptionOrderedEclasses
) where
import Prelude hiding ( round )
import Data.Function ( on )
import Data.Hashable ( Hashable )
import Data.List ( isSubsequenceOf, nub, sort, sortBy )
import Data.Monoid ( Any(..) )
import Data.Semigroup ( Max(..) )
import qualified Data.Text as Text
import Data.Vector ( Vector )
import qualified Data.Vector as Vector
import Data.Vector.Instances ()
import GHC.Exts ( inline )
import GHC.Generics ( Generic )
import Data.Equivalence.Monad ( runEquivM, equate, desc, classes )
import Data.Memoization ( MemoCacheTag(..), memo2 )
import Data.Text.Extended.Pretty
import Utility.Fixpoint
flipOrdering :: Ordering -> Ordering
flipOrdering GT = LT
flipOrdering LT = GT
flipOrdering EQ = EQ
data Path = Path ![Int]
deriving (Eq, Ord, Show, Generic)
unPath :: Path -> [Int]
unPath (Path p) = p
instance Hashable Path
instance Semigroup Path where
p1 <> p2 = Path (unPath p1 <> unPath p2)
instance Monoid Path where
mempty = EmptyPath
path :: [Int] -> Path
path = Path
pattern EmptyPath :: Path
pattern EmptyPath = Path []
pattern ConsPath :: Int -> Path -> Path
pattern ConsPath p ps <- Path (p : (Path -> ps)) where
ConsPath p (Path ps) = Path (p : ps)
pathHeadUnsafe :: Path -> Int
pathHeadUnsafe (Path ps) = head ps
pathTailUnsafe :: Path -> Path
pathTailUnsafe (Path ps) = Path (tail ps)
instance Pretty Path where
pretty (Path ps) = Text.intercalate "." (map (Text.pack . show) ps)
isSubpath :: Path -> Path -> Bool
isSubpath EmptyPath _ = True
isSubpath (ConsPath p1 ps1) (ConsPath p2 ps2)
| p1 == p2 = isSubpath ps1 ps2
isSubpath _ _ = False
isStrictSubpath :: Path -> Path -> Bool
isStrictSubpath EmptyPath EmptyPath = False
isStrictSubpath EmptyPath _ = True
isStrictSubpath (ConsPath p1 ps1) (ConsPath p2 ps2)
| p1 == p2 = isStrictSubpath ps1 ps2
isStrictSubpath _ _ = False
` substSubpath replacement toReplace target ` takes ` toReplace ` , a prefix of target ,
Undefined if toReplace is not a prefix of target
substSubpath :: Path -> Path -> Path -> Path
substSubpath replacement toReplace target = Path $ (unPath replacement) ++ drop (length $ unPath toReplace) (unPath target)
| TODO : I am unhappy about this Emptyable design ; makes one question whether
this should be a typeclass at all . ( Terms / ECTAs differ in that
class Pathable t t' | t -> t' where
type Emptyable t'
getPath :: Path -> t -> Emptyable t'
getAllAtPath :: Path -> t -> [t']
modifyAtPath :: (t' -> t') -> Path -> t -> t
smallestNonempty :: Vector PathTrie -> Int
smallestNonempty v = Vector.ifoldr (\i pt oldMin -> case pt of
EmptyPathTrie -> oldMin
_ -> i)
maxBound
v
largestNonempty :: Vector PathTrie -> Int
largestNonempty v = Vector.ifoldl (\oldMin i pt -> case pt of
EmptyPathTrie -> oldMin
_ -> i)
minBound
v
getMaxNonemptyIndex :: PathTrie -> Maybe Int
getMaxNonemptyIndex EmptyPathTrie = Nothing
getMaxNonemptyIndex TerminalPathTrie = Nothing
getMaxNonemptyIndex (PathTrieSingleChild i _) = Just i
getMaxNonemptyIndex (PathTrie vec) = Just $ largestNonempty vec
data PathTrie = EmptyPathTrie
| TerminalPathTrie
Invariant : Must have at least two nonempty nodes
deriving ( Eq, Show, Generic )
instance Hashable PathTrie
isEmptyPathTrie :: PathTrie -> Bool
isEmptyPathTrie EmptyPathTrie = True
isEmptyPathTrie _ = False
isTerminalPathTrie :: PathTrie -> Bool
isTerminalPathTrie TerminalPathTrie = True
isTerminalPathTrie _ = False
comparePathTrieVectors :: Vector PathTrie -> Vector PathTrie -> Ordering
comparePathTrieVectors v1 v2 = foldr (\i res -> let (t1, t2) = (v1 `Vector.unsafeIndex` i, v2 `Vector.unsafeIndex` i)
in case (isEmptyPathTrie t1, isEmptyPathTrie t2) of
(False, True) -> LT
(True, False) -> GT
(True, True) -> res
(False, False) -> case compare t1 t2 of
LT -> LT
GT -> GT
EQ -> res)
valueIfComponentsMatch
[0..(min (Vector.length v1) (Vector.length v2) - 1)]
where
valueIfComponentsMatch = compare (Vector.length v1) (Vector.length v2)
instance Ord PathTrie where
compare EmptyPathTrie EmptyPathTrie = EQ
compare EmptyPathTrie _ = LT
compare _ EmptyPathTrie = GT
compare TerminalPathTrie TerminalPathTrie = EQ
compare TerminalPathTrie _ = LT
compare _ TerminalPathTrie = GT
compare (PathTrieSingleChild i1 pt1) (PathTrieSingleChild i2 pt2)
| i1 < i2 = LT
| i1 > i2 = GT
| otherwise = compare pt1 pt2
compare (PathTrieSingleChild i1 pt1) (PathTrie v2) = let i2 = smallestNonempty v2 in
case compare i1 i2 of
LT -> LT
GT -> GT
EQ -> case compare pt1 (v2 `Vector.unsafeIndex` i2) of
LT -> LT
GT -> GT
v2 must have a second nonempty
compare (PathTrie v1) (PathTrie v2) = comparePathTrieVectors v1 v2
toPathTrie :: [Path] -> PathTrie
toPathTrie [] = EmptyPathTrie
toPathTrie [EmptyPath] = TerminalPathTrie
toPathTrie ps = if all (\p -> pathHeadUnsafe p == pathHeadUnsafe (head ps)) ps then
PathTrieSingleChild (pathHeadUnsafe $ head ps) (toPathTrie $ map pathTailUnsafe ps)
else
PathTrie vec
where
maxIndex = getMax $ foldMap (Max . pathHeadUnsafe) ps
pathsStartingWith :: Int -> [Path] -> [Path]
pathsStartingWith i = concatMap (\case EmptyPath -> []
ConsPath j p -> if i == j then [p] else [])
vec = Vector.generate (maxIndex + 1) (\i -> toPathTrie $ pathsStartingWith i ps)
fromPathTrie :: PathTrie -> [Path]
fromPathTrie EmptyPathTrie = []
fromPathTrie TerminalPathTrie = [EmptyPath]
fromPathTrie (PathTrieSingleChild i pt) = map (ConsPath i) $ fromPathTrie pt
fromPathTrie (PathTrie v) = Vector.ifoldr (\i pt acc -> map (ConsPath i) (fromPathTrie pt) ++ acc) [] v
pathTrieDescend :: PathTrie -> Int -> PathTrie
pathTrieDescend EmptyPathTrie _ = EmptyPathTrie
pathTrieDescend TerminalPathTrie _ = EmptyPathTrie
pathTrieDescend (PathTrie v) i = if Vector.length v > i then
v `Vector.unsafeIndex` i
else
EmptyPathTrie
pathTrieDescend (PathTrieSingleChild j pt') i
| i == j = pt'
| otherwise = EmptyPathTrie
data PathEClass = PathEClass' { getPathTrie :: !PathTrie
not available when calling ` mkPathEClassFromPathTrie `
}
deriving ( Show, Generic )
instance Eq PathEClass where
(==) = (==) `on` getPathTrie
instance Ord PathEClass where
compare = compare `on` getPathTrie
until we convert all clients of PathEclass to fully be based on tries
pattern PathEClass :: [Path] -> PathEClass
pattern PathEClass ps <- PathEClass' _ ps where
PathEClass ps = PathEClass' (toPathTrie $ nub ps) (sort $ nub ps)
unPathEClass :: PathEClass -> [Path]
unPathEClass (PathEClass' _ paths) = paths
instance Pretty PathEClass where
pretty pec = "{" <> (Text.intercalate "=" $ map pretty $ unPathEClass pec) <> "}"
instance Hashable PathEClass
mkPathEClassFromPathTrie :: PathTrie -> PathEClass
mkPathEClassFromPathTrie pt = PathEClass' pt (fromPathTrie pt)
pathEClassDescend :: PathEClass -> Int -> PathEClass
pathEClassDescend (PathEClass' pt _) i = mkPathEClassFromPathTrie $ pathTrieDescend pt i
hasSubsumingMember :: PathEClass -> PathEClass -> Bool
hasSubsumingMember pec1 pec2 = go (getPathTrie pec1) (getPathTrie pec2)
where
go :: PathTrie -> PathTrie -> Bool
go EmptyPathTrie _ = False
go _ EmptyPathTrie = False
go TerminalPathTrie TerminalPathTrie = False
go TerminalPathTrie _ = True
go _ TerminalPathTrie = False
go (PathTrieSingleChild i1 pt1) (PathTrieSingleChild i2 pt2) = if i1 == i2 then
go pt1 pt2
else
False
go (PathTrieSingleChild i1 pt1) (PathTrie v2) = case v2 Vector.!? i1 of
Nothing -> False
Just pt2 -> go pt1 pt2
go (PathTrie v1) (PathTrieSingleChild i2 pt2) = case v1 Vector.!? i2 of
Nothing -> False
Just pt1 -> go pt1 pt2
go (PathTrie v1) (PathTrie v2) = any (\i -> go (v1 `Vector.unsafeIndex` i) (v2 `Vector.unsafeIndex` i))
[0..(min (Vector.length v1) (Vector.length v2) - 1)]
| TODO : Optimization opportunity : Redundant work in the hasSubsumingMember calls
completedSubsumptionOrdering :: PathEClass -> PathEClass -> Ordering
completedSubsumptionOrdering pec1 pec2
| hasSubsumingMember pec1 pec2 = LT
| hasSubsumingMember pec2 pec1 = GT
workload , where there is no subsumption ,
| otherwise = compare pec2 pec1
}
| EqContradiction
deriving ( Eq, Ord, Show, Generic )
instance Hashable EqConstraints
instance Pretty EqConstraints where
pretty ecs = "{" <> (Text.intercalate "," $ map pretty (getEclasses ecs)) <> "}"
instance Semigroup EqConstraints where
(<>) = combineEqConstraints
instance Monoid EqConstraints where
mempty = EmptyConstraints
ecsGetPaths :: EqConstraints -> [[Path]]
ecsGetPaths = map unPathEClass . getEclasses
pattern EmptyConstraints :: EqConstraints
pattern EmptyConstraints = EqConstraints []
unsafeGetEclasses :: EqConstraints -> [PathEClass]
unsafeGetEclasses EqContradiction = error "unsafeGetEclasses: Illegal argument 'EqContradiction'"
unsafeGetEclasses ecs = getEclasses ecs
rawMkEqConstraints :: [[Path]] -> EqConstraints
rawMkEqConstraints = EqConstraints . map PathEClass
constraintsAreContradictory :: EqConstraints -> Bool
constraintsAreContradictory = (== EqContradiction)
hasSubsumingMemberListBased :: [Path] -> [Path] -> Bool
hasSubsumingMemberListBased ps1 ps2 = getAny $ mconcat [Any (isStrictSubpath p1 p2) | p1 <- ps1
, p2 <- ps2]
TODO ; Prove this .
isContradicting :: [[Path]] -> Bool
isContradicting cs = any (\pec -> hasSubsumingMemberListBased pec pec) cs
mkEqConstraints :: [[Path]] -> EqConstraints
mkEqConstraints initialConstraints = case completedConstraints of
Nothing -> EqContradiction
Just cs -> EqConstraints $ sort $ map PathEClass cs
where
removeTrivial :: (Eq a) => [[a]] -> [[a]]
removeTrivial = filter (\x -> length x > 1) . map nub
The first simplification done to the constraints is eclass - completion ,
addCongruences step ( important in tests ; less so in realistic input ) .
completedConstraints = fixMaybe round $ complete $ removeTrivial initialConstraints
round :: [[Path]] -> Maybe [[Path]]
round cs = let cs' = addCongruences cs
cs'' = complete cs'
in if isContradicting cs'' then
Nothing
else
Just cs''
addCongruences :: [[Path]] -> [[Path]]
addCongruences cs = cs ++ [map (\z -> substSubpath z x y) left | left <- cs, right <- cs, x <- left, y <- right, isStrictSubpath x y]
assertEquivs xs = mapM (\y -> equate (head xs) y) (tail xs)
complete :: (Ord a) => [[a]] -> [[a]]
complete initialClasses = runEquivM (:[]) (++) $ do
mapM_ assertEquivs initialClasses
mapM desc =<< classes
combineEqConstraints :: EqConstraints -> EqConstraints -> EqConstraints
combineEqConstraints = memo2 (NameTag "combineEqConstraints") go
where
go EqContradiction _ = EqContradiction
go _ EqContradiction = EqContradiction
go ec1 ec2 = mkEqConstraints $ ecsGetPaths ec1 ++ ecsGetPaths ec2
# NOINLINE combineEqConstraints #
eqConstraintsDescend :: EqConstraints -> Int -> EqConstraints
eqConstraintsDescend EqContradiction _ = EqContradiction
eqConstraintsDescend ecs i = EqConstraints $ sort $ map (`pathEClassDescend` i) (getEclasses ecs)
A faster implementation would be : Merge the eclasses of both , run ( or at least do eclass completion ) ,
constraintsImply :: EqConstraints -> EqConstraints -> Bool
constraintsImply EqContradiction _ = True
constraintsImply _ EqContradiction = False
constraintsImply ecs1 ecs2 = all (\cs -> any (isSubsequenceOf cs) (ecsGetPaths ecs1)) (ecsGetPaths ecs2)
subsumptionOrderedEclasses :: EqConstraints -> Maybe [PathEClass]
subsumptionOrderedEclasses ecs = case ecs of
EqContradiction -> Nothing
EqConstraints pecs -> Just $ sortBy completedSubsumptionOrdering pecs
unsafeSubsumptionOrderedEclasses :: EqConstraints -> [PathEClass]
unsafeSubsumptionOrderedEclasses (EqConstraints pecs) = sortBy completedSubsumptionOrdering pecs
unsafeSubsumptionOrderedEclasses EqContradiction = error $ "unsafeSubsumptionOrderedEclasses: unexpected EqContradiction" |
95aa730f744d2ad241f129e2c5061d5eaf32c36de870de01c5f786ffa08f59e3 | wireapp/wire-server | TeamsIntra.hs | -- This file is part of the Wire Server implementation.
--
Copyright ( C ) 2022 Wire Swiss GmbH < >
--
-- This program is free software: you can redistribute it and/or modify it under
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
-- later version.
--
-- This program is distributed in the hope that it will be useful, but WITHOUT
-- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-- FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
-- details.
--
You should have received a copy of the GNU Affero General Public License along
-- with this program. If not, see </>.
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE OverloadedStrings #-}
module Wire.API.Routes.Internal.Galley.TeamsIntra
( TeamStatus (..),
TeamData (..),
TeamStatusUpdate (..),
TeamName (..),
GuardLegalholdPolicyConflicts (..),
)
where
import Control.Lens ((?~))
import Data.Aeson
import qualified Data.Currency as Currency
import Data.Json.Util
import qualified Data.Schema as S
import qualified Data.Swagger as Swagger
import Data.Time (UTCTime)
import Imports
import Test.QuickCheck.Arbitrary (Arbitrary)
import Wire.API.Message (UserClients)
import Wire.API.Team (Team)
import Wire.API.Team.LegalHold (LegalholdProtectee)
import Wire.Arbitrary (GenericUniform (..))
data TeamStatus
= Active
| PendingDelete
| Deleted
| Suspended
| PendingActive
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamStatus
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamStatus
instance S.ToSchema TeamStatus where
schema =
S.enum @Text "Access" $
mconcat
[ S.element "active" Active,
S.element "pending_delete" PendingDelete,
S.element "deleted" Deleted,
S.element "suspended" Suspended,
S.element "pending_active" PendingActive
]
data TeamData = TeamData
{ tdTeam :: !Team,
tdStatus :: !TeamStatus,
tdStatusTime :: !(Maybe UTCTime) -- This needs to be a Maybe due to backwards compatibility
}
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamData
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamData
instance S.ToSchema TeamData where
schema =
S.object "TeamData" $
TeamData
<$> tdTeam S..= S.field "team" S.schema
<*> tdStatus S..= S.field "status" S.schema
<*> tdStatusTime S..= S.maybe_ (S.optField "status_time" utcTimeSchema)
data TeamStatusUpdate = TeamStatusUpdate
{ tuStatus :: !TeamStatus,
tuCurrency :: !(Maybe Currency.Alpha)
-- TODO: Remove Currency selection once billing supports currency changes after team creation
}
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamStatusUpdate
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamStatusUpdate
instance S.ToSchema TeamStatusUpdate where
schema =
S.object "TeamStatusUpdate" $
TeamStatusUpdate
<$> tuStatus S..= S.field "status" S.schema
<*> tuCurrency S..= S.maybe_ (S.optField "currency" currencyAlphaSchema)
where
currencyAlphaSchema :: S.ValueSchema S.NamedSwaggerDoc Currency.Alpha
currencyAlphaSchema = S.mkSchema docs parseJSON (pure . toJSON)
where
docs =
S.swaggerDoc @Text
& Swagger.schema . Swagger.description ?~ "ISO 4217 alphabetic codes"
& Swagger.schema . Swagger.example ?~ "EUR"
newtype TeamName = TeamName
{tnName :: Text}
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamName
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamName
instance S.ToSchema TeamName where
schema =
S.object "TeamName" $
TeamName
<$> tnName S..= S.field "name" S.schema
data GuardLegalholdPolicyConflicts = GuardLegalholdPolicyConflicts
{ glhProtectee :: LegalholdProtectee,
glhUserClients :: UserClients
}
deriving (Show, Eq, Generic)
deriving (Arbitrary) via (GenericUniform GuardLegalholdPolicyConflicts)
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema GuardLegalholdPolicyConflicts
instance S.ToSchema GuardLegalholdPolicyConflicts where
schema =
S.object "GuardLegalholdPolicyConflicts" $
GuardLegalholdPolicyConflicts
<$> glhProtectee S..= S.field "glhProtectee" S.schema
<*> glhUserClients S..= S.field "glhUserClients" S.schema
| null | https://raw.githubusercontent.com/wireapp/wire-server/bd9d33f7650f1bc47700601a029df2672d4998a6/libs/wire-api/src/Wire/API/Routes/Internal/Galley/TeamsIntra.hs | haskell | This file is part of the Wire Server implementation.
This program is free software: you can redistribute it and/or modify it under
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
with this program. If not, see </>.
# LANGUAGE OverloadedStrings #
This needs to be a Maybe due to backwards compatibility
TODO: Remove Currency selection once billing supports currency changes after team creation | Copyright ( C ) 2022 Wire Swiss GmbH < >
the terms of the GNU Affero General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option ) any
You should have received a copy of the GNU Affero General Public License along
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
module Wire.API.Routes.Internal.Galley.TeamsIntra
( TeamStatus (..),
TeamData (..),
TeamStatusUpdate (..),
TeamName (..),
GuardLegalholdPolicyConflicts (..),
)
where
import Control.Lens ((?~))
import Data.Aeson
import qualified Data.Currency as Currency
import Data.Json.Util
import qualified Data.Schema as S
import qualified Data.Swagger as Swagger
import Data.Time (UTCTime)
import Imports
import Test.QuickCheck.Arbitrary (Arbitrary)
import Wire.API.Message (UserClients)
import Wire.API.Team (Team)
import Wire.API.Team.LegalHold (LegalholdProtectee)
import Wire.Arbitrary (GenericUniform (..))
data TeamStatus
= Active
| PendingDelete
| Deleted
| Suspended
| PendingActive
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamStatus
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamStatus
instance S.ToSchema TeamStatus where
schema =
S.enum @Text "Access" $
mconcat
[ S.element "active" Active,
S.element "pending_delete" PendingDelete,
S.element "deleted" Deleted,
S.element "suspended" Suspended,
S.element "pending_active" PendingActive
]
data TeamData = TeamData
{ tdTeam :: !Team,
tdStatus :: !TeamStatus,
}
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamData
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamData
instance S.ToSchema TeamData where
schema =
S.object "TeamData" $
TeamData
<$> tdTeam S..= S.field "team" S.schema
<*> tdStatus S..= S.field "status" S.schema
<*> tdStatusTime S..= S.maybe_ (S.optField "status_time" utcTimeSchema)
data TeamStatusUpdate = TeamStatusUpdate
{ tuStatus :: !TeamStatus,
tuCurrency :: !(Maybe Currency.Alpha)
}
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamStatusUpdate
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamStatusUpdate
instance S.ToSchema TeamStatusUpdate where
schema =
S.object "TeamStatusUpdate" $
TeamStatusUpdate
<$> tuStatus S..= S.field "status" S.schema
<*> tuCurrency S..= S.maybe_ (S.optField "currency" currencyAlphaSchema)
where
currencyAlphaSchema :: S.ValueSchema S.NamedSwaggerDoc Currency.Alpha
currencyAlphaSchema = S.mkSchema docs parseJSON (pure . toJSON)
where
docs =
S.swaggerDoc @Text
& Swagger.schema . Swagger.description ?~ "ISO 4217 alphabetic codes"
& Swagger.schema . Swagger.example ?~ "EUR"
newtype TeamName = TeamName
{tnName :: Text}
deriving (Eq, Show, Generic)
deriving (Arbitrary) via GenericUniform TeamName
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema TeamName
instance S.ToSchema TeamName where
schema =
S.object "TeamName" $
TeamName
<$> tnName S..= S.field "name" S.schema
data GuardLegalholdPolicyConflicts = GuardLegalholdPolicyConflicts
{ glhProtectee :: LegalholdProtectee,
glhUserClients :: UserClients
}
deriving (Show, Eq, Generic)
deriving (Arbitrary) via (GenericUniform GuardLegalholdPolicyConflicts)
deriving (ToJSON, FromJSON, Swagger.ToSchema) via S.Schema GuardLegalholdPolicyConflicts
instance S.ToSchema GuardLegalholdPolicyConflicts where
schema =
S.object "GuardLegalholdPolicyConflicts" $
GuardLegalholdPolicyConflicts
<$> glhProtectee S..= S.field "glhProtectee" S.schema
<*> glhUserClients S..= S.field "glhUserClients" S.schema
|
3b44c2fb5afc7ea367072bf5738e92c62f21fd5f6dbd95170adfd1964754832d | flosell/lambdacd | dev.cljs | (ns ^:figwheel-no-load lambdacd.dev
(:require [lambdacd.ui-core :as core]
[figwheel.client :as figwheel :include-macros true]
[weasel.repl :as weasel]
[lambdacd.testutils :refer [path]]
[lambdacd.styleguide :as styleguide]
[reagent.core :as r]))
(enable-console-print!)
(figwheel/watch-and-reload
:websocket-url "ws:3449/figwheel-ws"
:jsload-callback (fn []
(r/force-update-all)))
(defn- contains [s substr]
(not= -1 (.indexOf s substr)))
(defn initialize-app []
(core/init!))
(if (contains (path) "styleguide")
(styleguide/initialize-styleguide)
(initialize-app))
| null | https://raw.githubusercontent.com/flosell/lambdacd/e9ba3cebb2d5f0070a2e0e1e08fc85fc99ee7135/env/dev/cljs/lambdacd/dev.cljs | clojure | (ns ^:figwheel-no-load lambdacd.dev
(:require [lambdacd.ui-core :as core]
[figwheel.client :as figwheel :include-macros true]
[weasel.repl :as weasel]
[lambdacd.testutils :refer [path]]
[lambdacd.styleguide :as styleguide]
[reagent.core :as r]))
(enable-console-print!)
(figwheel/watch-and-reload
:websocket-url "ws:3449/figwheel-ws"
:jsload-callback (fn []
(r/force-update-all)))
(defn- contains [s substr]
(not= -1 (.indexOf s substr)))
(defn initialize-app []
(core/init!))
(if (contains (path) "styleguide")
(styleguide/initialize-styleguide)
(initialize-app))
| |
0c67a29f097bd68b98bab8ba715f645cd5a44334853c2678a6eb2a7f6b4a2e4d | eeng/mercurius | pub_sub.clj | (ns mercurius.core.adapters.messaging.pub-sub
(:require [clojure.spec.alpha :as s]))
(defprotocol PubSub
(publish [this topic message]
"Sends the `message` to the specified `topic` (string).")
(subscribe [this topic-pattern opts]
"Allows to be notified when a message is sent to a topic matching `topic-pattern`.
`opts` should contain an :on-message callback.
Returns a subscription id that may be used later to unsubscribe.
Multiple subscribers to the same `topic-pattern` are allowed.")
(unsubscribe [this subscription]))
(s/fdef publish
:args (s/cat :pubsub any? :topic string? :message any?))
| null | https://raw.githubusercontent.com/eeng/mercurius/f83778ddde99aa13692e4fe2e70b2e9dc2fd70e9/src/mercurius/core/adapters/messaging/pub_sub.clj | clojure | (ns mercurius.core.adapters.messaging.pub-sub
(:require [clojure.spec.alpha :as s]))
(defprotocol PubSub
(publish [this topic message]
"Sends the `message` to the specified `topic` (string).")
(subscribe [this topic-pattern opts]
"Allows to be notified when a message is sent to a topic matching `topic-pattern`.
`opts` should contain an :on-message callback.
Returns a subscription id that may be used later to unsubscribe.
Multiple subscribers to the same `topic-pattern` are allowed.")
(unsubscribe [this subscription]))
(s/fdef publish
:args (s/cat :pubsub any? :topic string? :message any?))
| |
0f2175f4648755ebe3b663348024a2b011945e8b5b284261df7ddca9089fad70 | unisonweb/unison | UserInfo.hs | module Unison.Auth.UserInfo where
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Types as Aeson
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import Unison.Auth.Types
import Unison.Prelude
-- | Get user info for an authenticated user.
getUserInfo :: (MonadIO m) => DiscoveryDoc -> AccessToken -> m (Either CredentialFailure UserInfo)
getUserInfo (DiscoveryDoc {userInfoEndpoint}) accessToken = liftIO $ do
unauthenticatedHttpClient <- HTTP.getGlobalManager
req <- HTTP.requestFromURI userInfoEndpoint <&> HTTP.applyBearerAuth (Text.encodeUtf8 accessToken)
resp <- HTTP.httpLbs req unauthenticatedHttpClient
case decodeUserInfo (HTTP.responseBody resp) of
Left err -> pure . Left $ FailedToFetchUserInfo userInfoEndpoint (Text.pack err)
Right userInfo -> pure . Right $ userInfo
decodeUserInfo :: BL.ByteString -> Either String UserInfo
decodeUserInfo bs = do
obj <- Aeson.eitherDecode bs
flip Aeson.parseEither obj $
Aeson.withObject "UserInfo" $ \o -> do
userId <- o Aeson..: "sub"
name <- o Aeson..:? "name"
handle <- o Aeson..: "handle"
pure
UserInfo
{ userId,
name,
handle
}
| null | https://raw.githubusercontent.com/unisonweb/unison/69d27be69621072848f7f82ce401ffea311c7a31/unison-cli/src/Unison/Auth/UserInfo.hs | haskell | | Get user info for an authenticated user. | module Unison.Auth.UserInfo where
import qualified Data.Aeson as Aeson
import qualified Data.Aeson.Types as Aeson
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import Unison.Auth.Types
import Unison.Prelude
getUserInfo :: (MonadIO m) => DiscoveryDoc -> AccessToken -> m (Either CredentialFailure UserInfo)
getUserInfo (DiscoveryDoc {userInfoEndpoint}) accessToken = liftIO $ do
unauthenticatedHttpClient <- HTTP.getGlobalManager
req <- HTTP.requestFromURI userInfoEndpoint <&> HTTP.applyBearerAuth (Text.encodeUtf8 accessToken)
resp <- HTTP.httpLbs req unauthenticatedHttpClient
case decodeUserInfo (HTTP.responseBody resp) of
Left err -> pure . Left $ FailedToFetchUserInfo userInfoEndpoint (Text.pack err)
Right userInfo -> pure . Right $ userInfo
decodeUserInfo :: BL.ByteString -> Either String UserInfo
decodeUserInfo bs = do
obj <- Aeson.eitherDecode bs
flip Aeson.parseEither obj $
Aeson.withObject "UserInfo" $ \o -> do
userId <- o Aeson..: "sub"
name <- o Aeson..:? "name"
handle <- o Aeson..: "handle"
pure
UserInfo
{ userId,
name,
handle
}
|
87869f4be441e171608210a2345a9418117e86ad82502a4f73548a5730f7fa91 | aiju/lilith | scope.ml | module H = Hashtbl
open Ast
module Def = struct type t = Symbol.t * Symbol.t let compare = compare end
module Sd = Set.Make(Def)
type blocktype = BSeq | BFix | BLet
type block = {
bid: int;
typ: blocktype;
mutable ast: meta ast list;
up: block option;
mutable sub: block list;
mutable next: block list;
mutable prev: block list;
mutable defs: Sd.t;
mutable scopein: Sd.t;
mutable scopeout: Sd.t
}
and meta = {
id: int;
block: block;
repl: Symbol.t option
}
let idalloc = Util.idAllocator ()
let bidalloc = Util.idAllocator()
let newblock typ up =
let b = {
ast=[];
bid=bidalloc();
next=[];
prev=[];
up;
typ;
sub=[];
defs=Sd.empty;
scopein=Sd.empty;
scopeout=Sd.empty
} in
(match up with
| Some b' -> b'.sub <- b::b'.sub
| None -> ());
b
let addast b s = b.ast <- s::b.ast
let edge bl bl' = bl |> List.iter (fun b -> bl' |> List.iter (fun b' ->
b.next <- b'::b.next;
b'.prev <- b::b'.prev))
let rec processLval up colon lhs =
let node x = let t' = {p=x; pos=lhs.pos; meta={id=idalloc(); block=up; repl=None}} in addast up t'; t' in
match lhs.p with
| TypeIs(a, Some b) ->
let a' = processLval up true a in
let (b', _, _) = blockify up b in
node (TypeIs(a', Some b'))
| TypeIs(a, None) ->
node (TypeIs(processLval up true a, None))
| Sym(s) ->
if colon then (
let temp = Symbol.temp ((Symbol.name s) ^ "$") in
up.defs <- Sd.add (s, temp) up.defs;
let t' = {p=Sym s; pos=lhs.pos; meta={id=idalloc(); block=up; repl=Some temp}} in
addast up t';
t'
)else
node (Sym s)
| Un(OpVar, a) -> node (Un(OpVar, processLval up colon a))
| Tuple l -> node (Tuple(List.map (processLval up colon) l))
| _ -> Util.error(lhs.pos) ("invalid lval "^(Vcg.astname lhs))
and blockify up t : meta ast * block list * block list =
let node x = let t' = {p=x; pos=t.pos; meta={id=idalloc(); block=up; repl=None}} in addast up t'; t' in
let h x = blockify (newblock BSeq (Some up)) x in
match t.p with
| Seq (_::_ as l) ->
let l' = List.map h l in
Util.iterAdj (fun (_, _, a) (_, b, _) -> edge a b) l';
(node (Seq (List.map (fun (a, _, _) -> a) l')), [up], [up])
| Fix l ->
let b = newblock BFix (Some up) in
let l' = List.map (fun x -> let (e, _, _) = blockify b x in e) l in
(node (Fix l'), [up], [up])
| Lambda(a,b) ->
let block = newblock BLet (Some up) in
let lhsblock = newblock BSeq (Some block) in
let a' = processLval lhsblock true a
and (b', bin, _) = blockify (newblock BSeq (Some block)) b in
edge [lhsblock] bin;
(node (Lambda (a', b')), [up], [up])
| Let(a,Some b) ->
let lhs = newblock BLet (Some up) in
let (a', _, bout) = blockify lhs a
and (b', bin, _) = h b in
edge bout bin;
(node (Let(a', Some b')), [up], [up])
| Let(a,None) ->
let block = newblock BLet up.up in
let (a', bin, bout) = blockify block a in
edge bout [up];
(node (Let(a', None)), bin, [up])
| If(a,b,c) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b and
(c', cin, cout) = h c in
edge aout bin;
edge aout cin;
(node (If(a', b', c')), [up], [up])
| While(a, b) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b in
edge aout bin;
edge bout ain;
(node (While(a', b')), [up], [up])
| DoWhile(a, b) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b in
edge aout bin;
edge bout ain;
(node (DoWhile(a', b')), [up], [up])
| For(a, b, c, d) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b and
(c', cin, cout) = h c and
(d', din, dout) = h d in
edge aout bin;
edge bin din;
edge dout cin;
edge cout bin;
(node (For(a', b', c', d')), [up], [up])
| Assign(a, b) ->
let a' = processLval up false a and
(b', _, _) = blockify up b in
(node (Assign(a', b')), [up], [up])
| Seq []
| Bin(_, _, _)
| Un(_, _)
| Sym _
| IntLit _
| TypeLit _
| Call(_, _)
| Index(_, _)
| Array(_)
| Tuple(_)
| TypeIs(_, _) ->
let f x =
let (e, _, _) = blockify up x in
e in
(node (Astutil.map f t), [up], [up])
let propagate b =
let blocks = (
let ret = ref [b] in
let rec findblocks b =
ret := b.sub @ !ret;
List.iter findblocks b.sub
in findblocks b;
!ret
) in
let changed = ref true in
let chCmp a b =
(if not (Sd.equal a b) then changed := true);
b in
let chUnion a b = chCmp a (Sd.union a b) in
while !changed do
changed := false;
blocks |> List.iter (fun b ->
b.sub |> List.iter (fun b' ->
if b'.typ <> BLet then
b.defs <- chUnion b.defs b'.defs
);
(match b.prev, b.up with
| [], Some b' ->
b.scopein <- b'.scopein
| _ ->
let s = List.fold_left (fun s b' -> Sd.union s b'.scopeout) Sd.empty b.prev in
let s' = (if b.typ = BFix then Sd.union s b.defs else Sd.diff s b.defs) in
b.scopein <- chCmp b.scopein s');
b.scopeout <- chCmp b.scopeout (Sd.union b.scopein b.defs)
);
done
let sdshow s = Sd.fold (fun(a,b) c -> (Symbol.name a ^ ":" ^ Symbol.name b)::c) s [] |> List.sort_uniq compare |> String.concat ","
let debug b =
let g = Vcg.create () in
let rec doast parent t =
let id = string_of_int t.meta.id in
Vcg.node g id (Vcg.astname t) parent;
Astutil.iter (fun t' ->
Vcg.edge g id (string_of_int t'.meta.id)) t;
id
and doblock parent b =
let name = Format.sprintf "G%d" b.bid in
Vcg.node g name "" parent;
Vcg.info g name (Format.sprintf "defs: %s\nscopein: %s\nscopeout: %s" (sdshow b.defs) (sdshow b.scopein) (sdshow b.scopeout));
List.iter (fun x -> ignore (doast name x)) b.ast;
List.iter (doblock name) b.sub;
List.iter (fun x -> Vcg.edge g name (Format.sprintf "G%d" x.bid)) b.next;
Vcg.color g name (match b.typ with
| BSeq -> ""
| BLet -> "lightred"
| BFix -> "lightgreen")
in doblock "" b;
Vcg.pp Format.std_formatter g
let fixnames (p, pos, {block={scopein}; repl}) =
match p with
| Sym s ->
(match repl with
| Some x -> {p=Sym x; pos; meta=()}
| None ->
match Sd.fold (fun (s',t) r -> if s = s' then t::r else r) scopein [] with
| [x] -> {p=Sym x; pos; meta=()}
| [] -> Util.error pos ((Symbol.name s) ^ " undefined")
| _ -> Util.error pos ((Symbol.name s) ^ " ambiguous"))
| _ -> {p; pos; meta=()}
let analyse t =
let b = newblock BFix None in
b.defs <- List.fold_left (fun l (a,_,_) -> Sd.add (a,a) l) Sd.empty Builtin.vars;
let (t', _, _) = blockify b t in
propagate b;
Astutil.walk fixnames t'
| null | https://raw.githubusercontent.com/aiju/lilith/56a34309abb016d75c7aa25937f9350f337287d7/eva/scope.ml | ocaml | module H = Hashtbl
open Ast
module Def = struct type t = Symbol.t * Symbol.t let compare = compare end
module Sd = Set.Make(Def)
type blocktype = BSeq | BFix | BLet
type block = {
bid: int;
typ: blocktype;
mutable ast: meta ast list;
up: block option;
mutable sub: block list;
mutable next: block list;
mutable prev: block list;
mutable defs: Sd.t;
mutable scopein: Sd.t;
mutable scopeout: Sd.t
}
and meta = {
id: int;
block: block;
repl: Symbol.t option
}
let idalloc = Util.idAllocator ()
let bidalloc = Util.idAllocator()
let newblock typ up =
let b = {
ast=[];
bid=bidalloc();
next=[];
prev=[];
up;
typ;
sub=[];
defs=Sd.empty;
scopein=Sd.empty;
scopeout=Sd.empty
} in
(match up with
| Some b' -> b'.sub <- b::b'.sub
| None -> ());
b
let addast b s = b.ast <- s::b.ast
let edge bl bl' = bl |> List.iter (fun b -> bl' |> List.iter (fun b' ->
b.next <- b'::b.next;
b'.prev <- b::b'.prev))
let rec processLval up colon lhs =
let node x = let t' = {p=x; pos=lhs.pos; meta={id=idalloc(); block=up; repl=None}} in addast up t'; t' in
match lhs.p with
| TypeIs(a, Some b) ->
let a' = processLval up true a in
let (b', _, _) = blockify up b in
node (TypeIs(a', Some b'))
| TypeIs(a, None) ->
node (TypeIs(processLval up true a, None))
| Sym(s) ->
if colon then (
let temp = Symbol.temp ((Symbol.name s) ^ "$") in
up.defs <- Sd.add (s, temp) up.defs;
let t' = {p=Sym s; pos=lhs.pos; meta={id=idalloc(); block=up; repl=Some temp}} in
addast up t';
t'
)else
node (Sym s)
| Un(OpVar, a) -> node (Un(OpVar, processLval up colon a))
| Tuple l -> node (Tuple(List.map (processLval up colon) l))
| _ -> Util.error(lhs.pos) ("invalid lval "^(Vcg.astname lhs))
and blockify up t : meta ast * block list * block list =
let node x = let t' = {p=x; pos=t.pos; meta={id=idalloc(); block=up; repl=None}} in addast up t'; t' in
let h x = blockify (newblock BSeq (Some up)) x in
match t.p with
| Seq (_::_ as l) ->
let l' = List.map h l in
Util.iterAdj (fun (_, _, a) (_, b, _) -> edge a b) l';
(node (Seq (List.map (fun (a, _, _) -> a) l')), [up], [up])
| Fix l ->
let b = newblock BFix (Some up) in
let l' = List.map (fun x -> let (e, _, _) = blockify b x in e) l in
(node (Fix l'), [up], [up])
| Lambda(a,b) ->
let block = newblock BLet (Some up) in
let lhsblock = newblock BSeq (Some block) in
let a' = processLval lhsblock true a
and (b', bin, _) = blockify (newblock BSeq (Some block)) b in
edge [lhsblock] bin;
(node (Lambda (a', b')), [up], [up])
| Let(a,Some b) ->
let lhs = newblock BLet (Some up) in
let (a', _, bout) = blockify lhs a
and (b', bin, _) = h b in
edge bout bin;
(node (Let(a', Some b')), [up], [up])
| Let(a,None) ->
let block = newblock BLet up.up in
let (a', bin, bout) = blockify block a in
edge bout [up];
(node (Let(a', None)), bin, [up])
| If(a,b,c) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b and
(c', cin, cout) = h c in
edge aout bin;
edge aout cin;
(node (If(a', b', c')), [up], [up])
| While(a, b) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b in
edge aout bin;
edge bout ain;
(node (While(a', b')), [up], [up])
| DoWhile(a, b) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b in
edge aout bin;
edge bout ain;
(node (DoWhile(a', b')), [up], [up])
| For(a, b, c, d) ->
let (a', ain, aout) = h a and
(b', bin, bout) = h b and
(c', cin, cout) = h c and
(d', din, dout) = h d in
edge aout bin;
edge bin din;
edge dout cin;
edge cout bin;
(node (For(a', b', c', d')), [up], [up])
| Assign(a, b) ->
let a' = processLval up false a and
(b', _, _) = blockify up b in
(node (Assign(a', b')), [up], [up])
| Seq []
| Bin(_, _, _)
| Un(_, _)
| Sym _
| IntLit _
| TypeLit _
| Call(_, _)
| Index(_, _)
| Array(_)
| Tuple(_)
| TypeIs(_, _) ->
let f x =
let (e, _, _) = blockify up x in
e in
(node (Astutil.map f t), [up], [up])
let propagate b =
let blocks = (
let ret = ref [b] in
let rec findblocks b =
ret := b.sub @ !ret;
List.iter findblocks b.sub
in findblocks b;
!ret
) in
let changed = ref true in
let chCmp a b =
(if not (Sd.equal a b) then changed := true);
b in
let chUnion a b = chCmp a (Sd.union a b) in
while !changed do
changed := false;
blocks |> List.iter (fun b ->
b.sub |> List.iter (fun b' ->
if b'.typ <> BLet then
b.defs <- chUnion b.defs b'.defs
);
(match b.prev, b.up with
| [], Some b' ->
b.scopein <- b'.scopein
| _ ->
let s = List.fold_left (fun s b' -> Sd.union s b'.scopeout) Sd.empty b.prev in
let s' = (if b.typ = BFix then Sd.union s b.defs else Sd.diff s b.defs) in
b.scopein <- chCmp b.scopein s');
b.scopeout <- chCmp b.scopeout (Sd.union b.scopein b.defs)
);
done
let sdshow s = Sd.fold (fun(a,b) c -> (Symbol.name a ^ ":" ^ Symbol.name b)::c) s [] |> List.sort_uniq compare |> String.concat ","
let debug b =
let g = Vcg.create () in
let rec doast parent t =
let id = string_of_int t.meta.id in
Vcg.node g id (Vcg.astname t) parent;
Astutil.iter (fun t' ->
Vcg.edge g id (string_of_int t'.meta.id)) t;
id
and doblock parent b =
let name = Format.sprintf "G%d" b.bid in
Vcg.node g name "" parent;
Vcg.info g name (Format.sprintf "defs: %s\nscopein: %s\nscopeout: %s" (sdshow b.defs) (sdshow b.scopein) (sdshow b.scopeout));
List.iter (fun x -> ignore (doast name x)) b.ast;
List.iter (doblock name) b.sub;
List.iter (fun x -> Vcg.edge g name (Format.sprintf "G%d" x.bid)) b.next;
Vcg.color g name (match b.typ with
| BSeq -> ""
| BLet -> "lightred"
| BFix -> "lightgreen")
in doblock "" b;
Vcg.pp Format.std_formatter g
let fixnames (p, pos, {block={scopein}; repl}) =
match p with
| Sym s ->
(match repl with
| Some x -> {p=Sym x; pos; meta=()}
| None ->
match Sd.fold (fun (s',t) r -> if s = s' then t::r else r) scopein [] with
| [x] -> {p=Sym x; pos; meta=()}
| [] -> Util.error pos ((Symbol.name s) ^ " undefined")
| _ -> Util.error pos ((Symbol.name s) ^ " ambiguous"))
| _ -> {p; pos; meta=()}
let analyse t =
let b = newblock BFix None in
b.defs <- List.fold_left (fun l (a,_,_) -> Sd.add (a,a) l) Sd.empty Builtin.vars;
let (t', _, _) = blockify b t in
propagate b;
Astutil.walk fixnames t'
| |
887d4a7b8b49dc715b26a50717a173ad4a26c570eeeac98fdcdee44b2ae6fa72 | ScrambledEggsOnToast/needle | Parse.hs | |
Module : Control . Arrow . Needle .
Description : Parsing needle diagrams
Copyright : ( c ) 2014
License : MIT
Maintainer :
This module 's main export is ' parseNeedle ' , which parses a needle diagram into a ` NeedleArrow ` .
Module : Control.Arrow.Needle.Parse
Description : Parsing needle diagrams
Copyright : (c) 2014 Josh Kirklin
License : MIT
Maintainer :
This module's main export is 'parseNeedle', which parses a needle diagram into a `NeedleArrow`.
-}
module Control.Arrow.Needle.Parse (
-- * Parsing needles
NeedleArrow (..)
, parseNeedle
-- * Errors
, NeedleError (..)
, presentNeedleError
) where
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Data.Maybe
import Data.Either
import Data.Monoid
import Text.Parsec as P
import Text.Parsec.Extra (natural)
import Data.Char
import Control.Monad
import Control.Applicative ((<$>), (<*>))
import Control.Monad.State
import Control.Arrow
import Control.Arrow.Needle.Internal.UnevenGrid as G
--------------------------------
-- Types
--------------------------------
-- | The datatype representing a generic needle arrow.
data NeedleArrow = Input Int Int
| Through (Maybe NeedleArrow) T.Text
| Join [NeedleArrow]
deriving (Show, Read, Eq)
| The grid element for the first round of parsing .
data NeedleElem = None
| Track
| In Int Int
| Out
| LabelIn T.Text
| LabelOut T.Text
| ExtArrow T.Text
| Switch Direction
| TunnelEntrance
| TunnelExit
deriving (Show, Read, Eq)
-- | Errors in parsing.
data NeedleError = ParseError String
| ConstructionError String
instance Show NeedleError where
show = presentNeedleError
-- | Present the error.
presentNeedleError :: NeedleError -> String
presentNeedleError (ParseError s) = "Needle parse error:\n"++s
presentNeedleError (ConstructionError s) = "Needle construction error:\n"++s
data Direction = Up | Down
deriving (Show, Read, Eq)
type NeedleGrid = Grid NeedleElem
--------------------------------
-- String -> NeedleArrow
--------------------------------
-- | Parse a string to a needle
parseNeedle :: String -> Either NeedleError NeedleArrow
parseNeedle = parseNeedleGrid >=> gridArrow
--------------------------------
NeedleGrid - > NeedleArrow
--------------------------------
gridArrow :: NeedleGrid -> Either NeedleError NeedleArrow
gridArrow grid = do
os <- mapM (arrowToPosition grid) $ outputPositions grid
maybe (Left $ ConstructionError "No outputs") return $ arrowJoin os
outputPositions :: NeedleGrid -> [GridPosition]
outputPositions = findPositions (== Out)
findLabelOutPosition :: T.Text -> GridExamine NeedleElem (Maybe GridPosition)
findLabelOutPosition t = do
grid <- getGrid
return $ listToMaybe (findPositions (== (LabelOut t)) grid)
arrowJoin :: [NeedleArrow] -> Maybe NeedleArrow
arrowJoin [] = Nothing
arrowJoin [a] = Just a
arrowJoin as = Just $ Join as
arrowToPosition :: NeedleGrid -> GridPosition -> Either NeedleError NeedleArrow
arrowToPosition grid pos = gridExamine grid pos go
where
err = return . Left . ConstructionError
success = return . Right
tryPath path = branch $ do
mp <- path
case mp of
Nothing -> err "Nothing on this path"
Just _ -> go
go = do
mh <- hereGet
case mh of
Nothing -> err "Position not in grid"
Just h -> case h of
None -> err "Arrow from nothing"
Track -> do
w <- fromJust <$> width
ups <- forM [0 .. (w - 1)] $ \n -> tryPath $ do
e <- lUpGet n
return $ mfilter (== (Switch Down)) e
downs <- forM [0 .. (w - 1)] $ \n -> tryPath $ do
e <- lDownGet n
return $ mfilter (== (Switch Up)) e
left <- tryPath leftGet
let paths = rights $ ups ++ [left] ++ downs
mJoint = arrowJoin paths
case mJoint of
Nothing -> do
(n, _) <- G.getPosition
err $ "Track from nowhere on line " ++ show (n + 1)
Just joint -> success joint
In n m -> success $ Input n m
Out -> do
ml <- leftGet
case ml of
Just l -> go
Nothing -> err "An output has no arrow going into it"
LabelIn t -> do
mlo <- findLabelOutPosition t
case mlo of
Just lo -> putPosition lo >> go
Nothing -> err $ "Found label-in '" ++ T.unpack t ++ "' with no label-out"
LabelOut t -> do
ml <- leftGet
case ml of
Just l -> go
Nothing -> err $ "Label-out '" ++ T.unpack t ++ "' has no arrow going into it"
ExtArrow t -> do
left <- tryPath leftGet
up <- tryPath $ do
e <- lUpGet 0
return $ mfilter (== (Switch Down)) e
down <- tryPath $ do
e <- lDownGet 0
return $ mfilter (== (Switch Up)) e
let paths = rights $ [up,left,down]
mJoint = arrowJoin paths
success $ Through mJoint t
Switch d -> do
left <- tryPath leftGet
continuing <- tryPath $ do
e <- case d of
Down -> lUpGet 0
Up -> lDownGet 0
return $ mfilter (== h) e
let paths = rights $ case d of
Down -> [continuing, left]
Up -> [left, continuing]
mJoint = arrowJoin paths
case mJoint of
Nothing -> do
(n, _) <- G.getPosition
err $ "Line switch from nowhere on line " ++ (show n)
Just joint -> success joint
TunnelExit -> do
let tunnel n = if n == 0
then go
else do
ml <- leftGet
case ml of
Nothing -> do
(n,_) <- G.getPosition
err $ "Tunnel from nowhere on line " ++ (show n)
Just TunnelExit -> tunnel (n+1)
Just TunnelEntrance -> tunnel (n-1)
Just _ -> tunnel n
tunnel 1
TunnelEntrance -> do
ml <- leftGet
case ml of
Nothing -> do
(n,_) <- G.getPosition
err $ "Tunnel entrance has no arrow going into it on line " ++ (show n)
Just _ -> go
--------------------------------
String - > NeedleGrid
--------------------------------
-- | Pretty print a needle grid
prettyNeedleGrid :: NeedleGrid -> String
prettyNeedleGrid = prettyGrid prettyElem
where
prettyElem None n = replicate n ' '
prettyElem Track n = replicate n '='
prettyElem (In _ _) n = replicate (n-1) ' ' ++ "}"
prettyElem Out n = ">" ++ replicate (n-1) ' '
prettyElem (LabelIn t) n = replicate (n - 1 - length s) ' ' ++ s ++ ":"
where
s = T.unpack t
prettyElem (LabelOut t) n = ":" ++ s ++ replicate (n - 1 - length s) ' '
where
s = T.unpack t
prettyElem (ExtArrow t) n = "{" ++ s ++ replicate (n - 2 - length s) ' ' ++ "}"
where
s = T.unpack t
prettyElem (Switch Up) n = replicate n '/'
prettyElem (Switch Down) n = replicate n '\\'
prettyElem TunnelEntrance n = replicate n ')'
prettyElem TunnelExit n = replicate n '('
-- | Parse a needle grid
parseNeedleGrid :: String -> Either NeedleError NeedleGrid
parseNeedleGrid s = case result of
Left pe -> Left . ParseError $
"line " ++ (show . sourceLine . errorPos $ pe) ++ ":\n" ++
ls !! ((sourceLine . errorPos $ pe) - 1) ++ "\n" ++
replicate ((sourceColumn . errorPos $ pe) - 1) ' ' ++ "^"
Right x -> Right (grid x)
where
result = zipWithM parseLine ls [1..]
ls = lines s
parseLine l n = runParser (do
p <- P.getPosition
setPosition $ setSourceLine p n
es <- many (withWidth . choice . map try $ elemParsers n)
optional $ try (string "-- " >> many anyChar)
eof
return es) 0 "needle expression" l
withWidth p = do
c1 <- sourceColumn <$> P.getPosition
x <- p
c2 <- sourceColumn <$> P.getPosition
return (x, c2 - c1)
elemParsers n = [
do
many1 space
return None
, do
many1 (char '=')
return Track
, do
void (char '}')
m <- getState
modifyState (+1)
return $ In n m
, do
void (char '>')
return Out
, do
l <- many1 letter
spaces
void (char ':')
return $ LabelIn (T.pack l)
, do
void (char ':')
spaces
l <- many1 letter
return $ LabelOut (T.pack l)
, do
void (char '{')
f <- anyChar
l <- manyTill anyChar (char '}')
return $ ExtArrow (T.pack $ f : l)
, do
void (char '/')
return $ Switch Up
, do
void (char '\\')
return $ Switch Down
, do
void (char ')')
return TunnelEntrance
, do
void (char '(')
return TunnelExit
]
| null | https://raw.githubusercontent.com/ScrambledEggsOnToast/needle/3c0d6a9c106d816871a94ad956955f8bf396db5c/Control/Arrow/Needle/Parse.hs | haskell | * Parsing needles
* Errors
------------------------------
Types
------------------------------
| The datatype representing a generic needle arrow.
| Errors in parsing.
| Present the error.
------------------------------
String -> NeedleArrow
------------------------------
| Parse a string to a needle
------------------------------
------------------------------
------------------------------
------------------------------
| Pretty print a needle grid
| Parse a needle grid | |
Module : Control . Arrow . Needle .
Description : Parsing needle diagrams
Copyright : ( c ) 2014
License : MIT
Maintainer :
This module 's main export is ' parseNeedle ' , which parses a needle diagram into a ` NeedleArrow ` .
Module : Control.Arrow.Needle.Parse
Description : Parsing needle diagrams
Copyright : (c) 2014 Josh Kirklin
License : MIT
Maintainer :
This module's main export is 'parseNeedle', which parses a needle diagram into a `NeedleArrow`.
-}
module Control.Arrow.Needle.Parse (
NeedleArrow (..)
, parseNeedle
, NeedleError (..)
, presentNeedleError
) where
import qualified Data.Map.Strict as M
import qualified Data.Text as T
import Data.Maybe
import Data.Either
import Data.Monoid
import Text.Parsec as P
import Text.Parsec.Extra (natural)
import Data.Char
import Control.Monad
import Control.Applicative ((<$>), (<*>))
import Control.Monad.State
import Control.Arrow
import Control.Arrow.Needle.Internal.UnevenGrid as G
data NeedleArrow = Input Int Int
| Through (Maybe NeedleArrow) T.Text
| Join [NeedleArrow]
deriving (Show, Read, Eq)
| The grid element for the first round of parsing .
data NeedleElem = None
| Track
| In Int Int
| Out
| LabelIn T.Text
| LabelOut T.Text
| ExtArrow T.Text
| Switch Direction
| TunnelEntrance
| TunnelExit
deriving (Show, Read, Eq)
data NeedleError = ParseError String
| ConstructionError String
instance Show NeedleError where
show = presentNeedleError
presentNeedleError :: NeedleError -> String
presentNeedleError (ParseError s) = "Needle parse error:\n"++s
presentNeedleError (ConstructionError s) = "Needle construction error:\n"++s
data Direction = Up | Down
deriving (Show, Read, Eq)
type NeedleGrid = Grid NeedleElem
parseNeedle :: String -> Either NeedleError NeedleArrow
parseNeedle = parseNeedleGrid >=> gridArrow
NeedleGrid - > NeedleArrow
gridArrow :: NeedleGrid -> Either NeedleError NeedleArrow
gridArrow grid = do
os <- mapM (arrowToPosition grid) $ outputPositions grid
maybe (Left $ ConstructionError "No outputs") return $ arrowJoin os
outputPositions :: NeedleGrid -> [GridPosition]
outputPositions = findPositions (== Out)
findLabelOutPosition :: T.Text -> GridExamine NeedleElem (Maybe GridPosition)
findLabelOutPosition t = do
grid <- getGrid
return $ listToMaybe (findPositions (== (LabelOut t)) grid)
arrowJoin :: [NeedleArrow] -> Maybe NeedleArrow
arrowJoin [] = Nothing
arrowJoin [a] = Just a
arrowJoin as = Just $ Join as
arrowToPosition :: NeedleGrid -> GridPosition -> Either NeedleError NeedleArrow
arrowToPosition grid pos = gridExamine grid pos go
where
err = return . Left . ConstructionError
success = return . Right
tryPath path = branch $ do
mp <- path
case mp of
Nothing -> err "Nothing on this path"
Just _ -> go
go = do
mh <- hereGet
case mh of
Nothing -> err "Position not in grid"
Just h -> case h of
None -> err "Arrow from nothing"
Track -> do
w <- fromJust <$> width
ups <- forM [0 .. (w - 1)] $ \n -> tryPath $ do
e <- lUpGet n
return $ mfilter (== (Switch Down)) e
downs <- forM [0 .. (w - 1)] $ \n -> tryPath $ do
e <- lDownGet n
return $ mfilter (== (Switch Up)) e
left <- tryPath leftGet
let paths = rights $ ups ++ [left] ++ downs
mJoint = arrowJoin paths
case mJoint of
Nothing -> do
(n, _) <- G.getPosition
err $ "Track from nowhere on line " ++ show (n + 1)
Just joint -> success joint
In n m -> success $ Input n m
Out -> do
ml <- leftGet
case ml of
Just l -> go
Nothing -> err "An output has no arrow going into it"
LabelIn t -> do
mlo <- findLabelOutPosition t
case mlo of
Just lo -> putPosition lo >> go
Nothing -> err $ "Found label-in '" ++ T.unpack t ++ "' with no label-out"
LabelOut t -> do
ml <- leftGet
case ml of
Just l -> go
Nothing -> err $ "Label-out '" ++ T.unpack t ++ "' has no arrow going into it"
ExtArrow t -> do
left <- tryPath leftGet
up <- tryPath $ do
e <- lUpGet 0
return $ mfilter (== (Switch Down)) e
down <- tryPath $ do
e <- lDownGet 0
return $ mfilter (== (Switch Up)) e
let paths = rights $ [up,left,down]
mJoint = arrowJoin paths
success $ Through mJoint t
Switch d -> do
left <- tryPath leftGet
continuing <- tryPath $ do
e <- case d of
Down -> lUpGet 0
Up -> lDownGet 0
return $ mfilter (== h) e
let paths = rights $ case d of
Down -> [continuing, left]
Up -> [left, continuing]
mJoint = arrowJoin paths
case mJoint of
Nothing -> do
(n, _) <- G.getPosition
err $ "Line switch from nowhere on line " ++ (show n)
Just joint -> success joint
TunnelExit -> do
let tunnel n = if n == 0
then go
else do
ml <- leftGet
case ml of
Nothing -> do
(n,_) <- G.getPosition
err $ "Tunnel from nowhere on line " ++ (show n)
Just TunnelExit -> tunnel (n+1)
Just TunnelEntrance -> tunnel (n-1)
Just _ -> tunnel n
tunnel 1
TunnelEntrance -> do
ml <- leftGet
case ml of
Nothing -> do
(n,_) <- G.getPosition
err $ "Tunnel entrance has no arrow going into it on line " ++ (show n)
Just _ -> go
String - > NeedleGrid
prettyNeedleGrid :: NeedleGrid -> String
prettyNeedleGrid = prettyGrid prettyElem
where
prettyElem None n = replicate n ' '
prettyElem Track n = replicate n '='
prettyElem (In _ _) n = replicate (n-1) ' ' ++ "}"
prettyElem Out n = ">" ++ replicate (n-1) ' '
prettyElem (LabelIn t) n = replicate (n - 1 - length s) ' ' ++ s ++ ":"
where
s = T.unpack t
prettyElem (LabelOut t) n = ":" ++ s ++ replicate (n - 1 - length s) ' '
where
s = T.unpack t
prettyElem (ExtArrow t) n = "{" ++ s ++ replicate (n - 2 - length s) ' ' ++ "}"
where
s = T.unpack t
prettyElem (Switch Up) n = replicate n '/'
prettyElem (Switch Down) n = replicate n '\\'
prettyElem TunnelEntrance n = replicate n ')'
prettyElem TunnelExit n = replicate n '('
parseNeedleGrid :: String -> Either NeedleError NeedleGrid
parseNeedleGrid s = case result of
Left pe -> Left . ParseError $
"line " ++ (show . sourceLine . errorPos $ pe) ++ ":\n" ++
ls !! ((sourceLine . errorPos $ pe) - 1) ++ "\n" ++
replicate ((sourceColumn . errorPos $ pe) - 1) ' ' ++ "^"
Right x -> Right (grid x)
where
result = zipWithM parseLine ls [1..]
ls = lines s
parseLine l n = runParser (do
p <- P.getPosition
setPosition $ setSourceLine p n
es <- many (withWidth . choice . map try $ elemParsers n)
optional $ try (string "-- " >> many anyChar)
eof
return es) 0 "needle expression" l
withWidth p = do
c1 <- sourceColumn <$> P.getPosition
x <- p
c2 <- sourceColumn <$> P.getPosition
return (x, c2 - c1)
elemParsers n = [
do
many1 space
return None
, do
many1 (char '=')
return Track
, do
void (char '}')
m <- getState
modifyState (+1)
return $ In n m
, do
void (char '>')
return Out
, do
l <- many1 letter
spaces
void (char ':')
return $ LabelIn (T.pack l)
, do
void (char ':')
spaces
l <- many1 letter
return $ LabelOut (T.pack l)
, do
void (char '{')
f <- anyChar
l <- manyTill anyChar (char '}')
return $ ExtArrow (T.pack $ f : l)
, do
void (char '/')
return $ Switch Up
, do
void (char '\\')
return $ Switch Down
, do
void (char ')')
return TunnelEntrance
, do
void (char '(')
return TunnelExit
]
|
961a365b8f1f36f5658921e67565ff95ea3737ad0a092939a0073f4d121a3f02 | ghc/packages-dph | Testsuite.hs | module DPH.Testsuite (
module DPH.Testsuite.Preproc,
module DPH.Testsuite.Testcase,
module DPH.Testsuite.Utils,
module Test.QuickCheck
) where
import DPH.Testsuite.Preproc
import DPH.Testsuite.Testcase
import DPH.Testsuite.Utils
import Test.QuickCheck
| null | https://raw.githubusercontent.com/ghc/packages-dph/64eca669f13f4d216af9024474a3fc73ce101793/dph-test/framework/DPH/Testsuite.hs | haskell | module DPH.Testsuite (
module DPH.Testsuite.Preproc,
module DPH.Testsuite.Testcase,
module DPH.Testsuite.Utils,
module Test.QuickCheck
) where
import DPH.Testsuite.Preproc
import DPH.Testsuite.Testcase
import DPH.Testsuite.Utils
import Test.QuickCheck
| |
61c3895c0552a18958e485b9273efc89d3b6f0cb318f0bb958497aa07f74e7f8 | prestancedesign/pingcrm-clojure | users.clj | (ns pingcrm.handlers.users
(:require [crypto.password.bcrypt :as password]
[inertia.middleware :as inertia]
[pingcrm.models.users :as db]
[ring.util.response :as rr]
[struct.core :as st]))
(def user-schema
[[:first_name st/required st/string]
[:last_name st/required st/string]
[:email st/required st/email]
[:owner st/required st/boolean-str]])
(defn validate-unique-user
[db params]
(let [{:keys [email]} params
validation (first (st/validate params user-schema))]
(if (db/get-user-by-email db email)
(assoc validation :email "The email has already been taken.")
validation)))
(defn get-users [db]
(fn [{:keys [params]}]
(let [filters {:search (:search params)
:role (:role params)
:trashed (:trashed params)}
props {:users (db/retrieve-and-filter-users db filters)
:filters filters}]
(inertia/render "Users/Index" props))))
(defn user-form [_]
(inertia/render "Users/Create"))
(defn store-user! [db]
(fn [{:keys [body-params] :as req}]
(if-let [errors (validate-unique-user db body-params)]
(-> (rr/redirect "/users/create")
(assoc :flash {:error errors}))
(let [account-id (-> req :identity :account_id)
user body-params
encrypted-user (update user :password password/encrypt)
user-created? (db/insert-user! db (assoc encrypted-user :account_id account-id))]
(when user-created?
(-> (rr/redirect "/users")
(assoc :flash {:success "User created."})))))))
(defn edit-user! [db]
(fn [{:keys [path-params]}]
(let [props {:user (db/get-user-by-id db (:user-id path-params))}]
(inertia/render "Users/Edit" props))))
(defn update-user! [db]
(fn [{:keys [body-params] :as req}]
(let [id (-> req :path-params :user-id)
url (str (-> req :uri) "/edit")]
(if-let [errors (first (st/validate body-params user-schema))]
(-> (rr/redirect url :see-other)
(assoc :flash {:error errors}))
(let [user-form (select-keys (:body-params req) [:first_name :last_name :email :owner])
user-updated? (db/update-user! db user-form id)]
(when user-updated?
(-> (rr/redirect url :see-other)
(assoc :flash {:success "User updated."}))))))))
(defn delete-user! [db]
(fn [req]
(let [id (-> req :path-params :user-id)
back (get (:headers req) "referer")
user-deleted? (db/soft-delete-user! db id)]
(when user-deleted?
(-> (rr/redirect back :see-other)
(assoc :flash {:success "User deleted."}))))))
(defn restore-user! [db]
(fn [req]
(let [id (-> req :path-params :user-id)
back (get (:headers req) "referer")
user-restored? (db/restore-deleted-user! db id)]
(when user-restored?
(-> (rr/redirect back :see-other)
(assoc :flash {:success "User restored."}))))))
| null | https://raw.githubusercontent.com/prestancedesign/pingcrm-clojure/1a0aa343d7ab1cfa3a042cbd948022b22c491c7d/src/clj/pingcrm/handlers/users.clj | clojure | (ns pingcrm.handlers.users
(:require [crypto.password.bcrypt :as password]
[inertia.middleware :as inertia]
[pingcrm.models.users :as db]
[ring.util.response :as rr]
[struct.core :as st]))
(def user-schema
[[:first_name st/required st/string]
[:last_name st/required st/string]
[:email st/required st/email]
[:owner st/required st/boolean-str]])
(defn validate-unique-user
[db params]
(let [{:keys [email]} params
validation (first (st/validate params user-schema))]
(if (db/get-user-by-email db email)
(assoc validation :email "The email has already been taken.")
validation)))
(defn get-users [db]
(fn [{:keys [params]}]
(let [filters {:search (:search params)
:role (:role params)
:trashed (:trashed params)}
props {:users (db/retrieve-and-filter-users db filters)
:filters filters}]
(inertia/render "Users/Index" props))))
(defn user-form [_]
(inertia/render "Users/Create"))
(defn store-user! [db]
(fn [{:keys [body-params] :as req}]
(if-let [errors (validate-unique-user db body-params)]
(-> (rr/redirect "/users/create")
(assoc :flash {:error errors}))
(let [account-id (-> req :identity :account_id)
user body-params
encrypted-user (update user :password password/encrypt)
user-created? (db/insert-user! db (assoc encrypted-user :account_id account-id))]
(when user-created?
(-> (rr/redirect "/users")
(assoc :flash {:success "User created."})))))))
(defn edit-user! [db]
(fn [{:keys [path-params]}]
(let [props {:user (db/get-user-by-id db (:user-id path-params))}]
(inertia/render "Users/Edit" props))))
(defn update-user! [db]
(fn [{:keys [body-params] :as req}]
(let [id (-> req :path-params :user-id)
url (str (-> req :uri) "/edit")]
(if-let [errors (first (st/validate body-params user-schema))]
(-> (rr/redirect url :see-other)
(assoc :flash {:error errors}))
(let [user-form (select-keys (:body-params req) [:first_name :last_name :email :owner])
user-updated? (db/update-user! db user-form id)]
(when user-updated?
(-> (rr/redirect url :see-other)
(assoc :flash {:success "User updated."}))))))))
(defn delete-user! [db]
(fn [req]
(let [id (-> req :path-params :user-id)
back (get (:headers req) "referer")
user-deleted? (db/soft-delete-user! db id)]
(when user-deleted?
(-> (rr/redirect back :see-other)
(assoc :flash {:success "User deleted."}))))))
(defn restore-user! [db]
(fn [req]
(let [id (-> req :path-params :user-id)
back (get (:headers req) "referer")
user-restored? (db/restore-deleted-user! db id)]
(when user-restored?
(-> (rr/redirect back :see-other)
(assoc :flash {:success "User restored."}))))))
| |
8079805f829141c485742e3a55d7293cc57ea83d2b7d42c268bcb79be8650167 | Hexstream/compatible-metaclasses | package.lisp | (cl:defpackage #:compatible-metaclasses
(:use #:cl)
(:shadowing-import-from #:enhanced-find-class #:find-class)
(:shadow #:class
#:standard-class
#:substitute)
(:export #:class
#:substitute
#:standard-class
#:standard-metaclass
#:validate-as-mixin
#:validate-as
#:metasubstitute-mixin
#:metasubstitute))
| null | https://raw.githubusercontent.com/Hexstream/compatible-metaclasses/7e0e44bc4944c9669adb4eed2539df1b5e3cab14/package.lisp | lisp | (cl:defpackage #:compatible-metaclasses
(:use #:cl)
(:shadowing-import-from #:enhanced-find-class #:find-class)
(:shadow #:class
#:standard-class
#:substitute)
(:export #:class
#:substitute
#:standard-class
#:standard-metaclass
#:validate-as-mixin
#:validate-as
#:metasubstitute-mixin
#:metasubstitute))
| |
08549d5bdc54375a41e15cd15b938e94706e7c2dbc1b3922b128a907fb4e653f | samirose/sicp-compiler-project | or.scm | (define-library
(or)
(export
empty-or-is-false
or-one-false-is-false
or-one-non-false-value-is-value
or-two-non-falses-is-first-value
or-any-non-false-is-non-false
or-with-complex-expressions
or-evaluates-expressions-from-left-to-right
or-true-short-circuits-to-true)
(import (scheme base))
(begin
(define (empty-or-is-false)
(or))
(define (or-one-false-is-false)
(or #f))
(define (or-one-non-false-value-is-value)
(or 42))
(define (or-two-non-falses-is-first-value)
(or 1 2))
(define (or-any-non-false-is-non-false)
(if (or #f 2)
(if (or 1 #f)
#t
#f)
#f))
(define (or-with-complex-expressions)
(or ((lambda (x) (if x #t #f)) #f)
(> (+ 0 1) 2)
(let ((x 1)) (* x 2))))
(define (or-evaluates-expressions-from-left-to-right)
(let ((step 0))
(if (or
(begin (set! step 1) (> step 1))
(begin (set! step 2) (> step 2))
(begin (set! step 3) 3))
step
#f)))
(define (or-true-short-circuits-to-true)
(let ((step 0))
(or (begin (set! step 1) #f)
(begin (set! step 2) 2)
(begin (set! step 3) 3))
step))
)
)
| null | https://raw.githubusercontent.com/samirose/sicp-compiler-project/2bc8a481044b5a653408f8595b2a2396600b8ad6/test-compiler/or.scm | scheme | (define-library
(or)
(export
empty-or-is-false
or-one-false-is-false
or-one-non-false-value-is-value
or-two-non-falses-is-first-value
or-any-non-false-is-non-false
or-with-complex-expressions
or-evaluates-expressions-from-left-to-right
or-true-short-circuits-to-true)
(import (scheme base))
(begin
(define (empty-or-is-false)
(or))
(define (or-one-false-is-false)
(or #f))
(define (or-one-non-false-value-is-value)
(or 42))
(define (or-two-non-falses-is-first-value)
(or 1 2))
(define (or-any-non-false-is-non-false)
(if (or #f 2)
(if (or 1 #f)
#t
#f)
#f))
(define (or-with-complex-expressions)
(or ((lambda (x) (if x #t #f)) #f)
(> (+ 0 1) 2)
(let ((x 1)) (* x 2))))
(define (or-evaluates-expressions-from-left-to-right)
(let ((step 0))
(if (or
(begin (set! step 1) (> step 1))
(begin (set! step 2) (> step 2))
(begin (set! step 3) 3))
step
#f)))
(define (or-true-short-circuits-to-true)
(let ((step 0))
(or (begin (set! step 1) #f)
(begin (set! step 2) 2)
(begin (set! step 3) 3))
step))
)
)
| |
3f466ec93f7196c3497764e7d5793a1f6270038b28e7ea79d52b58ea98fcf273 | FranklinChen/hugs98-plus-Sep2006 | Double.hs |
Double.hs ( adapted from double.c which is ( c ) Silicon Graphics , Inc. )
Copyright ( c ) 2002 - 2005 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This is a simple double buffered program .
Pressing the left mouse button rotates the rectangle .
Pressing the middle mouse button stops the rotation .
Double.hs (adapted from double.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2002-2005 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This is a simple double buffered program.
Pressing the left mouse button rotates the rectangle.
Pressing the middle mouse button stops the rotation.
-}
import Data.IORef ( IORef, newIORef )
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
data State = State { spin :: IORef GLfloat }
makeState :: IO State
makeState = do
s <- newIORef 0
return $ State { spin = s }
display :: State -> DisplayCallback
display state = do
clear [ ColorBuffer ]
preservingMatrix $ do
s <- get (spin state)
rotate s (Vector3 0 0 1)
color (Color3 1 1 1 :: Color3 GLfloat)
rect (Vertex2 (-25) (-25)) (Vertex2 25 25 :: Vertex2 GLfloat)
swapBuffers
spinDisplay :: State -> IdleCallback
spinDisplay state = do
let wrap n s = if s > n then s - n else s
spin state $~ (wrap 360 . (+ 2))
postRedisplay Nothing
myInit :: IO ()
myInit = do
clearColor $= Color4 0 0 0 0
shadeModel $= Flat
reshape :: ReshapeCallback
reshape size = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
ortho (-50) 50 (-50) 50 (-1) 1
matrixMode $= Modelview 0
loadIdentity
keyboardMouse :: State -> KeyboardMouseCallback
keyboardMouse state (MouseButton b) Down _ _ =
idleCallback $= case b of
LeftButton -> Just (spinDisplay state)
_ -> Nothing
ESC not handled in the original example , but useful nevertheless
keyboardMouse _ (Char '\27') Down _ _ = exitWith ExitSuccess
keyboardMouse _ _ _ _ _ = return ()
-- Request double buffer display mode.
-- Register mouse input callback functions
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ DoubleBuffered, RGBMode ]
initialWindowSize $= Size 250 250
initialWindowPosition $= Position 100 100
createWindow progName
state <- makeState
myInit
displayCallback $= display state
reshapeCallback $= Just reshape
keyboardMouseCallback $= Just (keyboardMouse state)
mainLoop
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/GLUT/examples/RedBook/Double.hs | haskell | Request double buffer display mode.
Register mouse input callback functions |
Double.hs ( adapted from double.c which is ( c ) Silicon Graphics , Inc. )
Copyright ( c ) 2002 - 2005 < >
This file is part of HOpenGL and distributed under a BSD - style license
See the file libraries / GLUT / LICENSE
This is a simple double buffered program .
Pressing the left mouse button rotates the rectangle .
Pressing the middle mouse button stops the rotation .
Double.hs (adapted from double.c which is (c) Silicon Graphics, Inc.)
Copyright (c) Sven Panne 2002-2005 <>
This file is part of HOpenGL and distributed under a BSD-style license
See the file libraries/GLUT/LICENSE
This is a simple double buffered program.
Pressing the left mouse button rotates the rectangle.
Pressing the middle mouse button stops the rotation.
-}
import Data.IORef ( IORef, newIORef )
import System.Exit ( exitWith, ExitCode(ExitSuccess) )
import Graphics.UI.GLUT
data State = State { spin :: IORef GLfloat }
makeState :: IO State
makeState = do
s <- newIORef 0
return $ State { spin = s }
display :: State -> DisplayCallback
display state = do
clear [ ColorBuffer ]
preservingMatrix $ do
s <- get (spin state)
rotate s (Vector3 0 0 1)
color (Color3 1 1 1 :: Color3 GLfloat)
rect (Vertex2 (-25) (-25)) (Vertex2 25 25 :: Vertex2 GLfloat)
swapBuffers
spinDisplay :: State -> IdleCallback
spinDisplay state = do
let wrap n s = if s > n then s - n else s
spin state $~ (wrap 360 . (+ 2))
postRedisplay Nothing
myInit :: IO ()
myInit = do
clearColor $= Color4 0 0 0 0
shadeModel $= Flat
reshape :: ReshapeCallback
reshape size = do
viewport $= (Position 0 0, size)
matrixMode $= Projection
loadIdentity
ortho (-50) 50 (-50) 50 (-1) 1
matrixMode $= Modelview 0
loadIdentity
keyboardMouse :: State -> KeyboardMouseCallback
keyboardMouse state (MouseButton b) Down _ _ =
idleCallback $= case b of
LeftButton -> Just (spinDisplay state)
_ -> Nothing
ESC not handled in the original example , but useful nevertheless
keyboardMouse _ (Char '\27') Down _ _ = exitWith ExitSuccess
keyboardMouse _ _ _ _ _ = return ()
main :: IO ()
main = do
(progName, _args) <- getArgsAndInitialize
initialDisplayMode $= [ DoubleBuffered, RGBMode ]
initialWindowSize $= Size 250 250
initialWindowPosition $= Position 100 100
createWindow progName
state <- makeState
myInit
displayCallback $= display state
reshapeCallback $= Just reshape
keyboardMouseCallback $= Just (keyboardMouse state)
mainLoop
|
5cca98ef3b4b04c58a8ddfe2ddd3111bb04f2836410a9d08a3c316ea07c4eb39 | factisresearch/large-hashable | MD5.hs | | An implementation of ' HashAlgorithm ' for ( ) .
{-# LANGUAGE BangPatterns #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE CApiFFI #
module Data.LargeHashable.MD5 (
MD5Hash(..), md5HashAlgorithm, runMD5
) where
keep imports in alphabetic order ( in Emacs , use " M - x sort - lines " )
import Data.LargeHashable.Intern
import Data.LargeHashable.LargeWord
import Data.Word
import Foreign.Marshal.Alloc
import Foreign.Ptr
import Foreign.Storable
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Char8 as BSC
newtype MD5Hash = MD5Hash { unMD5Hash :: Word128 }
deriving (Eq, Ord)
instance Show MD5Hash where
show (MD5Hash w) =
BSC.unpack (Base16.encode (w128ToBs w))
foreign import capi unsafe "md5.h md5_init"
c_md5_init :: Ptr RawCtx -> IO ()
foreign import capi unsafe "md5.h md5_update"
c_md5_update :: Ptr RawCtx -> Ptr Word8 -> Int -> IO ()
foreign import capi unsafe "md5.h md5_update_uchar"
c_md5_update_uchar :: Ptr RawCtx -> Word8 -> IO ()
foreign import capi unsafe "md5.h md5_update_ushort"
c_md5_update_ushort :: Ptr RawCtx -> Word16 -> IO ()
foreign import capi unsafe "md5.h md5_update_uint"
c_md5_update_uint :: Ptr RawCtx -> Word32 -> IO ()
foreign import capi unsafe "md5.h md5_update_ulong"
c_md5_update_ulong :: Ptr RawCtx -> Word64 -> IO ()
foreign import capi unsafe "md5.h md5_finalize"
c_md5_finalize :: Ptr RawCtx -> Ptr Word8 -> IO ()
# INLINE digestSize #
digestSize :: Int
digestSize = 16
# INLINE sizeCtx #
sizeCtx :: Int
sizeCtx = 96
data RawCtx -- phantom type argument
newtype Ctx = Ctx { _unCtx :: Ptr RawCtx }
withCtx :: (Ctx -> IO ()) -> IO MD5Hash
withCtx f =
allocaBytes sizeCtx $ \(ptr :: Ptr RawCtx) ->
do c_md5_init ptr
f (Ctx ptr)
allocaBytes digestSize $ \(resPtr :: Ptr Word8) ->
do c_md5_finalize ptr resPtr
let first = castPtr resPtr :: Ptr Word64
w1 <- peek first
let second = castPtr (plusPtr resPtr (sizeOf w1)) :: Ptr Word64
w2 <- peek second
return (MD5Hash (Word128 w1 w2))
md5HashAlgorithm :: HashAlgorithm MD5Hash
md5HashAlgorithm =
HashAlgorithm
{ ha_run = run
, ha_xor = xorMD5
, ha_updateHash = updateHash
}
where
xorMD5 (MD5Hash h1) (MD5Hash h2) = MD5Hash (h1 `xorW128` h2)
updateHash updates (MD5Hash h) =
let f = hu_updateULong updates
in do f (w128_first h)
f (w128_second h)
run f =
withCtx $ \(Ctx ctxPtr) ->
let !updates =
HashUpdates
{ hu_updatePtr = c_md5_update ctxPtr
, hu_updateUChar = c_md5_update_uchar ctxPtr
, hu_updateUShort = c_md5_update_ushort ctxPtr
, hu_updateUInt = c_md5_update_uint ctxPtr
, hu_updateULong = c_md5_update_ulong ctxPtr
}
in f updates
runMD5 :: LH () -> MD5Hash
runMD5 = runLH md5HashAlgorithm
| null | https://raw.githubusercontent.com/factisresearch/large-hashable/823772be60e1314c4cbe7ac14c7633b2b32fbeea/src/Data/LargeHashable/MD5.hs | haskell | # LANGUAGE BangPatterns #
phantom type argument | | An implementation of ' HashAlgorithm ' for ( ) .
# LANGUAGE ScopedTypeVariables #
# LANGUAGE CApiFFI #
module Data.LargeHashable.MD5 (
MD5Hash(..), md5HashAlgorithm, runMD5
) where
keep imports in alphabetic order ( in Emacs , use " M - x sort - lines " )
import Data.LargeHashable.Intern
import Data.LargeHashable.LargeWord
import Data.Word
import Foreign.Marshal.Alloc
import Foreign.Ptr
import Foreign.Storable
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Char8 as BSC
newtype MD5Hash = MD5Hash { unMD5Hash :: Word128 }
deriving (Eq, Ord)
instance Show MD5Hash where
show (MD5Hash w) =
BSC.unpack (Base16.encode (w128ToBs w))
foreign import capi unsafe "md5.h md5_init"
c_md5_init :: Ptr RawCtx -> IO ()
foreign import capi unsafe "md5.h md5_update"
c_md5_update :: Ptr RawCtx -> Ptr Word8 -> Int -> IO ()
foreign import capi unsafe "md5.h md5_update_uchar"
c_md5_update_uchar :: Ptr RawCtx -> Word8 -> IO ()
foreign import capi unsafe "md5.h md5_update_ushort"
c_md5_update_ushort :: Ptr RawCtx -> Word16 -> IO ()
foreign import capi unsafe "md5.h md5_update_uint"
c_md5_update_uint :: Ptr RawCtx -> Word32 -> IO ()
foreign import capi unsafe "md5.h md5_update_ulong"
c_md5_update_ulong :: Ptr RawCtx -> Word64 -> IO ()
foreign import capi unsafe "md5.h md5_finalize"
c_md5_finalize :: Ptr RawCtx -> Ptr Word8 -> IO ()
# INLINE digestSize #
digestSize :: Int
digestSize = 16
# INLINE sizeCtx #
sizeCtx :: Int
sizeCtx = 96
newtype Ctx = Ctx { _unCtx :: Ptr RawCtx }
withCtx :: (Ctx -> IO ()) -> IO MD5Hash
withCtx f =
allocaBytes sizeCtx $ \(ptr :: Ptr RawCtx) ->
do c_md5_init ptr
f (Ctx ptr)
allocaBytes digestSize $ \(resPtr :: Ptr Word8) ->
do c_md5_finalize ptr resPtr
let first = castPtr resPtr :: Ptr Word64
w1 <- peek first
let second = castPtr (plusPtr resPtr (sizeOf w1)) :: Ptr Word64
w2 <- peek second
return (MD5Hash (Word128 w1 w2))
md5HashAlgorithm :: HashAlgorithm MD5Hash
md5HashAlgorithm =
HashAlgorithm
{ ha_run = run
, ha_xor = xorMD5
, ha_updateHash = updateHash
}
where
xorMD5 (MD5Hash h1) (MD5Hash h2) = MD5Hash (h1 `xorW128` h2)
updateHash updates (MD5Hash h) =
let f = hu_updateULong updates
in do f (w128_first h)
f (w128_second h)
run f =
withCtx $ \(Ctx ctxPtr) ->
let !updates =
HashUpdates
{ hu_updatePtr = c_md5_update ctxPtr
, hu_updateUChar = c_md5_update_uchar ctxPtr
, hu_updateUShort = c_md5_update_ushort ctxPtr
, hu_updateUInt = c_md5_update_uint ctxPtr
, hu_updateULong = c_md5_update_ulong ctxPtr
}
in f updates
runMD5 :: LH () -> MD5Hash
runMD5 = runLH md5HashAlgorithm
|
39f1f4eeef59e90a775739433cae9b052d4581d3454ede91b6b22e4a2d405bf7 | cyverse-archive/DiscoveryEnvironmentBackend | common.clj | (ns kifshare.common
(:require [clojure.string :as string]
[kifshare.config :as cfg])
(:use [hiccup.core :only [html]]
[hiccup.page :only [include-css include-js html5]]))
(defn parse-accept-headers
"Parses out the accept headers and returns a list
of the acceptable content types."
[request]
(string/split (get-in request [:headers "accept"]) #","))
(defn show-html?
"Checks to see if 'text/html' is in the list of
acceptable content-types in the Accept header."
[request]
(contains? (set (parse-accept-headers request)) "text/html"))
(defn html-head []
(html
[:head
[:title "iPlant Public Downloads"]
(map include-css (cfg/css-files))
(map include-js (cfg/javascript-files))]))
(defn layout [& content]
(html5
(html-head)
[:body
[:div#wrapper {:id "page-wrapper" :class "container_12"}
content]]))
| null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/services/kifshare/src/kifshare/common.clj | clojure | (ns kifshare.common
(:require [clojure.string :as string]
[kifshare.config :as cfg])
(:use [hiccup.core :only [html]]
[hiccup.page :only [include-css include-js html5]]))
(defn parse-accept-headers
"Parses out the accept headers and returns a list
of the acceptable content types."
[request]
(string/split (get-in request [:headers "accept"]) #","))
(defn show-html?
"Checks to see if 'text/html' is in the list of
acceptable content-types in the Accept header."
[request]
(contains? (set (parse-accept-headers request)) "text/html"))
(defn html-head []
(html
[:head
[:title "iPlant Public Downloads"]
(map include-css (cfg/css-files))
(map include-js (cfg/javascript-files))]))
(defn layout [& content]
(html5
(html-head)
[:body
[:div#wrapper {:id "page-wrapper" :class "container_12"}
content]]))
| |
717a15e4aa968e9333f5ce4bd88c274fb5e40b0167094d73737d84c3090cc02d | xapix-io/axel-f | json.cljc | (ns axel-f.excel.json
(:require [axel-f.buddy.codecs.json :as json]))
(defn encode*
"Returns a JSON-encoding String for the given object."
[^{:doc "Object to encode"} to-encode]
(json/generate-string to-encode))
(def encode #'encode*)
(defn decode*
"Returns an object corresponding to the given JSON-encoded string."
[^{:doc "JSON-encoded string to decode"} to-decode]
(json/parse-string to-decode))
(def decode #'decode*)
(def env
{"JSONENCODE" (with-meta encode* (merge {:deprecated true} (meta #'encode*)))
"JSONDECODE" (with-meta decode* (merge {:deprecated true} (meta #'decode*)))
"JSON" {"ENCODE" encode
"DECODE" decode}})
| null | https://raw.githubusercontent.com/xapix-io/axel-f/ec8fca880033e0ae78a8d9f42538d4a71fba29bd/src/axel_f/excel/json.cljc | clojure | (ns axel-f.excel.json
(:require [axel-f.buddy.codecs.json :as json]))
(defn encode*
"Returns a JSON-encoding String for the given object."
[^{:doc "Object to encode"} to-encode]
(json/generate-string to-encode))
(def encode #'encode*)
(defn decode*
"Returns an object corresponding to the given JSON-encoded string."
[^{:doc "JSON-encoded string to decode"} to-decode]
(json/parse-string to-decode))
(def decode #'decode*)
(def env
{"JSONENCODE" (with-meta encode* (merge {:deprecated true} (meta #'encode*)))
"JSONDECODE" (with-meta decode* (merge {:deprecated true} (meta #'decode*)))
"JSON" {"ENCODE" encode
"DECODE" decode}})
| |
e8497ebcdc3d50524a5e2ade13958f859b8de98629a7d8efe3d8442a7f238c87 | exercism/scheme | test.scm | (load "test-util.ss")
(define test-cases
`((test-success "no primes under two" equal? sieve '(1) '()) (test-success "find first prime" equal? sieve '(2) '(2))
(test-success "find primes up to 10" equal? sieve '(10)
'(2 3 5 7))
(test-success "limit is prime" equal? sieve '(13)
'(2 3 5 7 11 13))
(test-success "find primes up to 1000" equal? sieve '(1000)
'(2 3 5 7 11 13 17 19 23 29 31 37 41 43 47 53 59 61 67 71 73
79 83 89 97 101 103 107 109 113 127 131 137 139 149 151 157
163 167 173 179 181 191 193 197 199 211 223 227 229 233 239
241 251 257 263 269 271 277 281 283 293 307 311 313 317 331
337 347 349 353 359 367 373 379 383 389 397 401 409 419 421
431 433 439 443 449 457 461 463 467 479 487 491 499 503 509
521 523 541 547 557 563 569 571 577 587 593 599 601 607 613
617 619 631 641 643 647 653 659 661 673 677 683 691 701 709
719 727 733 739 743 751 757 761 769 773 787 797 809 811 821
823 827 829 839 853 857 859 863 877 881 883 887 907 911 919
929 937 941 947 953 967 971 977 983 991 997))
(test-success "1229 primes below 10000"
(lambda (result n) (= n (length result))) sieve '(10000)
1229)
(test-success "9592 primes below 100000"
(lambda (result n) (= n (length result))) sieve '(100000)
9592)
(test-success "78498 primes below 1000000"
(lambda (result n) (= n (length result))) sieve '(1000000)
78498)))
(run-with-cli "sieve.scm" (list test-cases))
| null | https://raw.githubusercontent.com/exercism/scheme/2064dd5e5d5a03a06417d28c33c5349bec97dad7/exercises/practice/sieve/test.scm | scheme | (load "test-util.ss")
(define test-cases
`((test-success "no primes under two" equal? sieve '(1) '()) (test-success "find first prime" equal? sieve '(2) '(2))
(test-success "find primes up to 10" equal? sieve '(10)
'(2 3 5 7))
(test-success "limit is prime" equal? sieve '(13)
'(2 3 5 7 11 13))
(test-success "find primes up to 1000" equal? sieve '(1000)
'(2 3 5 7 11 13 17 19 23 29 31 37 41 43 47 53 59 61 67 71 73
79 83 89 97 101 103 107 109 113 127 131 137 139 149 151 157
163 167 173 179 181 191 193 197 199 211 223 227 229 233 239
241 251 257 263 269 271 277 281 283 293 307 311 313 317 331
337 347 349 353 359 367 373 379 383 389 397 401 409 419 421
431 433 439 443 449 457 461 463 467 479 487 491 499 503 509
521 523 541 547 557 563 569 571 577 587 593 599 601 607 613
617 619 631 641 643 647 653 659 661 673 677 683 691 701 709
719 727 733 739 743 751 757 761 769 773 787 797 809 811 821
823 827 829 839 853 857 859 863 877 881 883 887 907 911 919
929 937 941 947 953 967 971 977 983 991 997))
(test-success "1229 primes below 10000"
(lambda (result n) (= n (length result))) sieve '(10000)
1229)
(test-success "9592 primes below 100000"
(lambda (result n) (= n (length result))) sieve '(100000)
9592)
(test-success "78498 primes below 1000000"
(lambda (result n) (= n (length result))) sieve '(1000000)
78498)))
(run-with-cli "sieve.scm" (list test-cases))
| |
d1446c30e8c87900d6e006f99f18a53248324c537817bdfb4f69afd17d43465d | wkok/openai-clojure | sse.clj | (ns ^:no-doc wkok.openai-clojure.sse
(:require
[hato.client :as http]
[clojure.core.async :as a]
[clojure.string :as string]
[cheshire.core :as json])
(:import (java.io InputStream)))
(def event-mask (re-pattern (str "(?s).+?\n\n")))
(defn deliver-events
[events {:keys [on-next]}]
(when on-next
(a/go
(loop []
(let [event (a/<! events)]
(when (not= :done event)
(on-next event)
(recur)))))))
(defn- parse-event [raw-event]
(let [data-idx (string/index-of raw-event "{")
done-idx (string/index-of raw-event "[DONE]")]
(if done-idx
:done
(-> (subs raw-event data-idx)
(json/parse-string true)))))
(defn calc-buffer-size
"Buffer size should be at least equal to max_tokens
or 16 (the default in openai as of 2023-02-19)
plus the [DONE] terminator"
[{:keys [max_tokens]
:or {max_tokens 16}}]
(inc max_tokens))
(defn sse-events
"Returns a core.async channel with events as clojure data structures.
Inspiration from "
[{:keys [request params]}]
(let [event-stream ^InputStream (:body (http/request (merge request
params
{:as :stream})))
buffer-size (calc-buffer-size params)
events (a/chan (a/sliding-buffer buffer-size) (map parse-event))]
(a/thread
(loop [data nil]
(let [byte-array (byte-array (max 1 (.available event-stream)))
bytes-read (.read event-stream byte-array)]
(if (neg? bytes-read)
;; Input stream closed, exiting read-loop
(.close event-stream)
(let [data (str data (slurp byte-array))]
(if-let [es (not-empty (re-seq event-mask data))]
(if (every? true? (map #(a/>!! events %) es))
(recur (string/replace data event-mask ""))
;; Output stream closed, exiting read-loop
(.close event-stream))
(recur data)))))))
events))
(defn sse-request
"Process streamed results.
If on-next callback provided, then read from channel and call the callback.
Returns a response with the core.async channel as the body"
[{:keys [params] :as ctx}]
(let [events (sse-events ctx)]
(deliver-events events params)
{:status 200
:body events}))
(def perform-sse-capable-request
{:name ::perform-sse-capable-request
:leave (fn [{:keys [request params] :as ctx}]
(assoc ctx :response (if (:stream params)
(sse-request ctx)
(http/request request))))})
| null | https://raw.githubusercontent.com/wkok/openai-clojure/5d781bae62001aa8d597fb62a3ed3bcc55ab1b66/src/wkok/openai_clojure/sse.clj | clojure | Input stream closed, exiting read-loop
Output stream closed, exiting read-loop | (ns ^:no-doc wkok.openai-clojure.sse
(:require
[hato.client :as http]
[clojure.core.async :as a]
[clojure.string :as string]
[cheshire.core :as json])
(:import (java.io InputStream)))
(def event-mask (re-pattern (str "(?s).+?\n\n")))
(defn deliver-events
[events {:keys [on-next]}]
(when on-next
(a/go
(loop []
(let [event (a/<! events)]
(when (not= :done event)
(on-next event)
(recur)))))))
(defn- parse-event [raw-event]
(let [data-idx (string/index-of raw-event "{")
done-idx (string/index-of raw-event "[DONE]")]
(if done-idx
:done
(-> (subs raw-event data-idx)
(json/parse-string true)))))
(defn calc-buffer-size
"Buffer size should be at least equal to max_tokens
or 16 (the default in openai as of 2023-02-19)
plus the [DONE] terminator"
[{:keys [max_tokens]
:or {max_tokens 16}}]
(inc max_tokens))
(defn sse-events
"Returns a core.async channel with events as clojure data structures.
Inspiration from "
[{:keys [request params]}]
(let [event-stream ^InputStream (:body (http/request (merge request
params
{:as :stream})))
buffer-size (calc-buffer-size params)
events (a/chan (a/sliding-buffer buffer-size) (map parse-event))]
(a/thread
(loop [data nil]
(let [byte-array (byte-array (max 1 (.available event-stream)))
bytes-read (.read event-stream byte-array)]
(if (neg? bytes-read)
(.close event-stream)
(let [data (str data (slurp byte-array))]
(if-let [es (not-empty (re-seq event-mask data))]
(if (every? true? (map #(a/>!! events %) es))
(recur (string/replace data event-mask ""))
(.close event-stream))
(recur data)))))))
events))
(defn sse-request
"Process streamed results.
If on-next callback provided, then read from channel and call the callback.
Returns a response with the core.async channel as the body"
[{:keys [params] :as ctx}]
(let [events (sse-events ctx)]
(deliver-events events params)
{:status 200
:body events}))
(def perform-sse-capable-request
{:name ::perform-sse-capable-request
:leave (fn [{:keys [request params] :as ctx}]
(assoc ctx :response (if (:stream params)
(sse-request ctx)
(http/request request))))})
|
d53392ab1e5b4fc286b97f50e26ed4c2a0b4eb9ee41986a3efb625e2bbb16a1b | archhaskell/cblrepo | Main.hs |
- Copyright 2011 - 2014 Per
-
- Licensed under the Apache License , Version 2.0 ( the " License " ) ;
- you may not use this file except in compliance with the License .
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing , software
- distributed under the License is distributed on an " AS IS " BASIS ,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
- See the License for the specific language governing permissions and
- limitations under the License .
- Copyright 2011-2014 Per Magnus Therning
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Main where
{ { { 1 imports
import Add
import BuildPkgs
import BumpPkgs
import Update
import Versions
import ListPkgs
import Upgrades
import Util.Misc
import PkgBuild
import ConvertDB
import Remove
import Extract
import CreateConfig
import Util.Cfg
import Paths_cblrepo
import Data.Monoid
import Distribution.Text
import Options.Applicative as OA
import System.Directory
-- { { { 1 command line arguments
argAppDir, argDbFile :: Parser String
argAppDir = strOption (long "appdir" <> value "" <> showDefault <> help "Path to application data directory")
argDbFile = strOption (long "db" <> value "cblrepo.db" <> showDefault <> help "Path to package database")
argDryRun :: Parser Bool
argDryRun = switch (short 'n' <> help "Make no changes, (dry run)")
cmdAddPkgOpts :: Parser Cmds
cmdAddPkgOpts = CmdAdd
<$> strOption (long "patchdir" <> value "patches" <> showDefault <> help "Location of patches")
<*> option ghcVersionArgReader (long "ghc-version" <> value ghcDefVersion <> showDefault <> help "GHC version to use")
<*> many (option ghcPkgArgReader (short 'g' <> long "ghc-pkg" <> metavar "PKG,VER" <> help "GHC base package (multiple)"))
<*> many (option distroPkgArgReader (short 'd' <> long "distro-pkg" <> metavar "PKG,VER,XREV,REL" <> help "Distro package (multiple)"))
<*> many (option strCblFileArgReader (short 'f' <> long "cbl-file" <> metavar "FILE[:flag,-flag]" <> help "CABAL file (multiple)"))
<*> many (argument strCblPkgArgReader (metavar "PKGNAME,VERSION[:flag,-flag] ..."))
cmdAddPkgCmd = command "add" (info (helper <*> cmdAddPkgOpts) (fullDesc <> progDesc "Add a package to the database"))
cmdBumpPkgsCmd = command "bump" (info (helper <*> cmdBumpPkgsOpts) (fullDesc <> progDesc "Bump packages that need it after updating the named packages"))
where
cmdBumpPkgsOpts = CmdBumpPkgs
<$> switch (long "inclusive" <> help "Include the listed packages")
<*> some (strArgument (metavar "PKGNAME ..."))
cmdBuildPkgsCmd = command "build" (info (helper <*> cmdBuildPkgsOpts)
(fullDesc <> progDesc "Re-order packages into a good build order"))
where
cmdBuildPkgsOpts = CmdBuildPkgs <$> some (strArgument (metavar "PKGNAME ..."))
cmdUpdateCmd = command "update" (info (helper <*> cmdUpdateOpts) (fullDesc <> progDesc "Update the index"))
where
cmdUpdateOpts = CmdUpdate <$> switch (internal <> hidden)
cmdVersionsCmd = command "versions" (info (helper <*> cmdVersionsOpts) (fullDesc <> progDesc "List available versions of packages"))
where
cmdVersionsOpts = CmdVersions
<$> switch (short 'l' <> long "latest" <> help "List only the latest version of packages")
<*> some (strArgument (metavar "PKGNAME ..."))
cmdUpgradesCmd = command "upgrades" (info (helper <*> cmdUpgradesOpts) (fullDesc <> progDesc "Check for packages that can be upgraded"))
where
cmdUpgradesOpts = CmdUpgrades
<$> switch (short 's' <> help "A shorter output suitable for scripting")
<*> switch (short 'x' <> help "Limit list to packages with new x-revision")
cmdListPkgsCmd = command "list" (info (helper <*> cmdListPkgsOpts) (fullDesc <> progDesc "List packages in repo"))
where
cmdListPkgsOpts = CmdListPkgs
<$> switch (short 'g' <> long "ghc" <> help "List ghc packages")
<*> switch (short 'd' <> long "distro" <> help "List distro packages")
<*> switch (long "no-repo" <> help "Do not list repo packages")
<*> option listFormatReader (short 'f' <> long "format" <> value CmdListNormalFmt <> help "Output format: short, normal, hackage (default: normal)")
<*> many (argument str (metavar "PKGNAME ..."))
cmdPkgBuildCmd = command "pkgbuild" (info (helper <*> cmdPkgBuildOpts) (fullDesc <> progDesc "Create PKGBUILD other files necessary for an Arch package"))
where
cmdPkgBuildOpts = CmdPkgBuild
<$> option ghcVersionArgReader (long "ghc-version" <> value ghcDefVersion <> help "GHC version to use in PKGBUILD (default: 8.0.1)")
<*> option auto (long "ghc-release" <> value ghcDefRelease <> showDefault <> help "GHC release to use in PKGBUILD")
<*> strOption (long "patchdir" <> value "patches" <> showDefault <> help "Location of patches")
<*> some (strArgument (metavar "PKGNAME ..."))
cmdConvertDbCmd = command "convertdb" (info (helper <*> cmdConvertDbOpts) (fullDesc <> progDesc "Convert an old database to the new format"))
where
cmdConvertDbOpts = CmdConvertDb
<$> strOption (short 'i' <> long "indb" <> value "cblrepo.db" <> showDefault <> help "Old database")
<*> strOption (short 'o' <> long "outdb" <> value "new-cblrepo.db" <> showDefault <> help "New database")
cmdRemovePkgCmd = command "rm" (info (helper <*> cmdRemovePkgOpts) (fullDesc <> progDesc "Remove packages"))
where
cmdRemovePkgOpts = CmdRemovePkg <$> some (strArgument (metavar "PKGNAME ..."))
cmdExtractCmd = command "extract" (info (helper <*> cmdExtractOpts) (fullDesc <> progDesc "Extract Cabal file from index"))
where
cmdExtractOpts = CmdExtract <$> many (argument pkgNVersionArgReader (metavar "PKGNAME,VERSION"))
cmdCreateConfigCmd = command "create-config" (info (helper <*> cmdCreateConfigOpts) (fullDesc <> progDesc "Create configuration file with defaults"))
where
cmdCreateConfigOpts = pure CmdCreateConfig
argParser = info (helper <*> opts) (fullDesc <> header (progName ++ " v" ++ display version) <> progDesc "Maintain a database of dependencies of CABAL packages")
where
opts = Opts
<$> argAppDir <*> argDbFile <*> argDryRun
<*> subparser (cmdAddPkgCmd <> cmdBumpPkgsCmd <> cmdBuildPkgsCmd <> cmdUpdateCmd <> cmdVersionsCmd <> cmdUpgradesCmd <>
cmdListPkgsCmd <> cmdPkgBuildCmd <> cmdConvertDbCmd <> cmdRemovePkgCmd <> cmdExtractCmd <> cmdCreateConfigCmd)
{ { { 1 main
main :: IO ()
main = do
defAppDir <- getAppUserDataDirectory progName
execParser argParser >>= \ o -> do
let aD = if null (appDir o) then defAppDir else appDir o
createDirectoryIfMissing True aD
cfg <- readCfg "cblrepo.cfg"
let e = (o { appDir = aD }, cfg)
case optsCmd o of
CmdAdd {} -> runCommand e add
CmdBuildPkgs {} -> runCommand e buildPkgs
CmdBumpPkgs {} -> runCommand e bumpPkgs
CmdUpdate {} -> runCommand e update
CmdVersions {} -> runCommand e versions
CmdListPkgs {} -> runCommand e listPkgs
CmdUpgrades {} -> runCommand e upgrades
CmdPkgBuild {} -> runCommand e pkgBuild
CmdConvertDb {} -> runCommand e convertDb
CmdRemovePkg {} -> runCommand e remove
CmdExtract {} -> runCommand e extract
CmdCreateConfig -> runCommand e createConfig
| null | https://raw.githubusercontent.com/archhaskell/cblrepo/83316afca397b1e5e526a69d360efd9cb260921b/src/Main.hs | haskell | { { { 1 command line arguments |
- Copyright 2011 - 2014 Per
-
- Licensed under the Apache License , Version 2.0 ( the " License " ) ;
- you may not use this file except in compliance with the License .
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing , software
- distributed under the License is distributed on an " AS IS " BASIS ,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
- See the License for the specific language governing permissions and
- limitations under the License .
- Copyright 2011-2014 Per Magnus Therning
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- -2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-}
module Main where
{ { { 1 imports
import Add
import BuildPkgs
import BumpPkgs
import Update
import Versions
import ListPkgs
import Upgrades
import Util.Misc
import PkgBuild
import ConvertDB
import Remove
import Extract
import CreateConfig
import Util.Cfg
import Paths_cblrepo
import Data.Monoid
import Distribution.Text
import Options.Applicative as OA
import System.Directory
argAppDir, argDbFile :: Parser String
argAppDir = strOption (long "appdir" <> value "" <> showDefault <> help "Path to application data directory")
argDbFile = strOption (long "db" <> value "cblrepo.db" <> showDefault <> help "Path to package database")
argDryRun :: Parser Bool
argDryRun = switch (short 'n' <> help "Make no changes, (dry run)")
cmdAddPkgOpts :: Parser Cmds
cmdAddPkgOpts = CmdAdd
<$> strOption (long "patchdir" <> value "patches" <> showDefault <> help "Location of patches")
<*> option ghcVersionArgReader (long "ghc-version" <> value ghcDefVersion <> showDefault <> help "GHC version to use")
<*> many (option ghcPkgArgReader (short 'g' <> long "ghc-pkg" <> metavar "PKG,VER" <> help "GHC base package (multiple)"))
<*> many (option distroPkgArgReader (short 'd' <> long "distro-pkg" <> metavar "PKG,VER,XREV,REL" <> help "Distro package (multiple)"))
<*> many (option strCblFileArgReader (short 'f' <> long "cbl-file" <> metavar "FILE[:flag,-flag]" <> help "CABAL file (multiple)"))
<*> many (argument strCblPkgArgReader (metavar "PKGNAME,VERSION[:flag,-flag] ..."))
cmdAddPkgCmd = command "add" (info (helper <*> cmdAddPkgOpts) (fullDesc <> progDesc "Add a package to the database"))
cmdBumpPkgsCmd = command "bump" (info (helper <*> cmdBumpPkgsOpts) (fullDesc <> progDesc "Bump packages that need it after updating the named packages"))
where
cmdBumpPkgsOpts = CmdBumpPkgs
<$> switch (long "inclusive" <> help "Include the listed packages")
<*> some (strArgument (metavar "PKGNAME ..."))
cmdBuildPkgsCmd = command "build" (info (helper <*> cmdBuildPkgsOpts)
(fullDesc <> progDesc "Re-order packages into a good build order"))
where
cmdBuildPkgsOpts = CmdBuildPkgs <$> some (strArgument (metavar "PKGNAME ..."))
cmdUpdateCmd = command "update" (info (helper <*> cmdUpdateOpts) (fullDesc <> progDesc "Update the index"))
where
cmdUpdateOpts = CmdUpdate <$> switch (internal <> hidden)
cmdVersionsCmd = command "versions" (info (helper <*> cmdVersionsOpts) (fullDesc <> progDesc "List available versions of packages"))
where
cmdVersionsOpts = CmdVersions
<$> switch (short 'l' <> long "latest" <> help "List only the latest version of packages")
<*> some (strArgument (metavar "PKGNAME ..."))
cmdUpgradesCmd = command "upgrades" (info (helper <*> cmdUpgradesOpts) (fullDesc <> progDesc "Check for packages that can be upgraded"))
where
cmdUpgradesOpts = CmdUpgrades
<$> switch (short 's' <> help "A shorter output suitable for scripting")
<*> switch (short 'x' <> help "Limit list to packages with new x-revision")
cmdListPkgsCmd = command "list" (info (helper <*> cmdListPkgsOpts) (fullDesc <> progDesc "List packages in repo"))
where
cmdListPkgsOpts = CmdListPkgs
<$> switch (short 'g' <> long "ghc" <> help "List ghc packages")
<*> switch (short 'd' <> long "distro" <> help "List distro packages")
<*> switch (long "no-repo" <> help "Do not list repo packages")
<*> option listFormatReader (short 'f' <> long "format" <> value CmdListNormalFmt <> help "Output format: short, normal, hackage (default: normal)")
<*> many (argument str (metavar "PKGNAME ..."))
cmdPkgBuildCmd = command "pkgbuild" (info (helper <*> cmdPkgBuildOpts) (fullDesc <> progDesc "Create PKGBUILD other files necessary for an Arch package"))
where
cmdPkgBuildOpts = CmdPkgBuild
<$> option ghcVersionArgReader (long "ghc-version" <> value ghcDefVersion <> help "GHC version to use in PKGBUILD (default: 8.0.1)")
<*> option auto (long "ghc-release" <> value ghcDefRelease <> showDefault <> help "GHC release to use in PKGBUILD")
<*> strOption (long "patchdir" <> value "patches" <> showDefault <> help "Location of patches")
<*> some (strArgument (metavar "PKGNAME ..."))
cmdConvertDbCmd = command "convertdb" (info (helper <*> cmdConvertDbOpts) (fullDesc <> progDesc "Convert an old database to the new format"))
where
cmdConvertDbOpts = CmdConvertDb
<$> strOption (short 'i' <> long "indb" <> value "cblrepo.db" <> showDefault <> help "Old database")
<*> strOption (short 'o' <> long "outdb" <> value "new-cblrepo.db" <> showDefault <> help "New database")
cmdRemovePkgCmd = command "rm" (info (helper <*> cmdRemovePkgOpts) (fullDesc <> progDesc "Remove packages"))
where
cmdRemovePkgOpts = CmdRemovePkg <$> some (strArgument (metavar "PKGNAME ..."))
cmdExtractCmd = command "extract" (info (helper <*> cmdExtractOpts) (fullDesc <> progDesc "Extract Cabal file from index"))
where
cmdExtractOpts = CmdExtract <$> many (argument pkgNVersionArgReader (metavar "PKGNAME,VERSION"))
cmdCreateConfigCmd = command "create-config" (info (helper <*> cmdCreateConfigOpts) (fullDesc <> progDesc "Create configuration file with defaults"))
where
cmdCreateConfigOpts = pure CmdCreateConfig
argParser = info (helper <*> opts) (fullDesc <> header (progName ++ " v" ++ display version) <> progDesc "Maintain a database of dependencies of CABAL packages")
where
opts = Opts
<$> argAppDir <*> argDbFile <*> argDryRun
<*> subparser (cmdAddPkgCmd <> cmdBumpPkgsCmd <> cmdBuildPkgsCmd <> cmdUpdateCmd <> cmdVersionsCmd <> cmdUpgradesCmd <>
cmdListPkgsCmd <> cmdPkgBuildCmd <> cmdConvertDbCmd <> cmdRemovePkgCmd <> cmdExtractCmd <> cmdCreateConfigCmd)
{ { { 1 main
main :: IO ()
main = do
defAppDir <- getAppUserDataDirectory progName
execParser argParser >>= \ o -> do
let aD = if null (appDir o) then defAppDir else appDir o
createDirectoryIfMissing True aD
cfg <- readCfg "cblrepo.cfg"
let e = (o { appDir = aD }, cfg)
case optsCmd o of
CmdAdd {} -> runCommand e add
CmdBuildPkgs {} -> runCommand e buildPkgs
CmdBumpPkgs {} -> runCommand e bumpPkgs
CmdUpdate {} -> runCommand e update
CmdVersions {} -> runCommand e versions
CmdListPkgs {} -> runCommand e listPkgs
CmdUpgrades {} -> runCommand e upgrades
CmdPkgBuild {} -> runCommand e pkgBuild
CmdConvertDb {} -> runCommand e convertDb
CmdRemovePkg {} -> runCommand e remove
CmdExtract {} -> runCommand e extract
CmdCreateConfig -> runCommand e createConfig
|
1ca448a4e204d779c6ee3ad4369547ff6311ef7e0a33cddaf3a7215d89239c97 | mmottl/gsl-ocaml | eigen.mli | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
(** Eigensystems *)
open Vectmat
* { 3 Real Symmetric Matrices }
type symm_ws
val make_symm_ws : int -> symm_ws
external _symm : mat -> vec -> symm_ws -> unit
= "ml_gsl_eigen_symm"
val symm :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector.vector
type symmv_ws
val make_symmv_ws : int -> symmv_ws
external _symmv : mat -> vec -> mat -> symmv_ws -> unit
= "ml_gsl_eigen_symmv"
val symmv :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector.vector * Matrix.matrix
type sort =
| VAL_ASC
| VAL_DESC
| ABS_ASC
| ABS_DESC
external symmv_sort : Vector.vector * Matrix.matrix -> sort -> unit
= "ml_gsl_eigen_symmv_sort"
* { 3 Complex Hermitian Matrices }
type herm_ws
val make_herm_ws : int -> herm_ws
external _herm : cmat -> vec -> herm_ws -> unit
= "ml_gsl_eigen_herm"
val herm :
?protect:bool ->
[< `CM of Matrix_complex.matrix
| `CMF of Matrix_complex_flat.matrix
| `CA of Gsl_complex.complex_array * int * int ] ->
Vector.vector
type hermv_ws
val make_hermv_ws : int -> hermv_ws
external _hermv : cmat -> vec -> cmat -> hermv_ws -> unit
= "ml_gsl_eigen_hermv"
val hermv :
?protect:bool ->
[< `CM of Matrix_complex.matrix
| `CMF of Matrix_complex_flat.matrix
| `CA of Gsl_complex.complex_array * int * int ] ->
Vector.vector * Matrix_complex.matrix
external hermv_sort :
Vector.vector * Matrix_complex.matrix ->
sort -> unit
= "ml_gsl_eigen_hermv_sort"
* { 3 Real Nonsymmetric Matrices }
type nonsymm_ws
val make_nonsymm_ws : int -> nonsymm_ws
external _nonsymm : mat -> cvec -> nonsymm_ws -> unit
= "ml_gsl_eigen_nonsymm"
external _nonsymm_Z : mat -> cvec -> mat -> nonsymm_ws -> unit
= "ml_gsl_eigen_nonsymm_Z"
val nonsymm :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector_complex.vector
type nonsymmv_ws
val make_nonsymmv_ws : int -> nonsymmv_ws
external _nonsymmv : mat -> cvec -> cmat -> nonsymmv_ws -> unit
= "ml_gsl_eigen_nonsymmv"
external _nonsymmv_Z : mat -> cvec -> cmat -> mat -> nonsymmv_ws -> unit
= "ml_gsl_eigen_nonsymmv_Z"
val nonsymmv :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector_complex.vector * Matrix_complex.matrix
external nonsymmv_sort : Vector_complex.vector * Matrix_complex.matrix -> sort -> unit
= "ml_gsl_eigen_nonsymmv_sort"
| null | https://raw.githubusercontent.com/mmottl/gsl-ocaml/76f8d93cccc1f23084f4a33d3e0a8f1289450580/src/eigen.mli | ocaml | * Eigensystems | gsl - ocaml - OCaml interface to GSL
Copyright ( © ) 2002 - 2012 - Olivier Andrieu
Distributed under the terms of the GPL version 3
open Vectmat
* { 3 Real Symmetric Matrices }
type symm_ws
val make_symm_ws : int -> symm_ws
external _symm : mat -> vec -> symm_ws -> unit
= "ml_gsl_eigen_symm"
val symm :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector.vector
type symmv_ws
val make_symmv_ws : int -> symmv_ws
external _symmv : mat -> vec -> mat -> symmv_ws -> unit
= "ml_gsl_eigen_symmv"
val symmv :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector.vector * Matrix.matrix
type sort =
| VAL_ASC
| VAL_DESC
| ABS_ASC
| ABS_DESC
external symmv_sort : Vector.vector * Matrix.matrix -> sort -> unit
= "ml_gsl_eigen_symmv_sort"
* { 3 Complex Hermitian Matrices }
type herm_ws
val make_herm_ws : int -> herm_ws
external _herm : cmat -> vec -> herm_ws -> unit
= "ml_gsl_eigen_herm"
val herm :
?protect:bool ->
[< `CM of Matrix_complex.matrix
| `CMF of Matrix_complex_flat.matrix
| `CA of Gsl_complex.complex_array * int * int ] ->
Vector.vector
type hermv_ws
val make_hermv_ws : int -> hermv_ws
external _hermv : cmat -> vec -> cmat -> hermv_ws -> unit
= "ml_gsl_eigen_hermv"
val hermv :
?protect:bool ->
[< `CM of Matrix_complex.matrix
| `CMF of Matrix_complex_flat.matrix
| `CA of Gsl_complex.complex_array * int * int ] ->
Vector.vector * Matrix_complex.matrix
external hermv_sort :
Vector.vector * Matrix_complex.matrix ->
sort -> unit
= "ml_gsl_eigen_hermv_sort"
* { 3 Real Nonsymmetric Matrices }
type nonsymm_ws
val make_nonsymm_ws : int -> nonsymm_ws
external _nonsymm : mat -> cvec -> nonsymm_ws -> unit
= "ml_gsl_eigen_nonsymm"
external _nonsymm_Z : mat -> cvec -> mat -> nonsymm_ws -> unit
= "ml_gsl_eigen_nonsymm_Z"
val nonsymm :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector_complex.vector
type nonsymmv_ws
val make_nonsymmv_ws : int -> nonsymmv_ws
external _nonsymmv : mat -> cvec -> cmat -> nonsymmv_ws -> unit
= "ml_gsl_eigen_nonsymmv"
external _nonsymmv_Z : mat -> cvec -> cmat -> mat -> nonsymmv_ws -> unit
= "ml_gsl_eigen_nonsymmv_Z"
val nonsymmv :
?protect:bool ->
[< `M of Matrix.matrix
| `MF of Matrix_flat.matrix
| `A of float array * int * int
| `AA of float array array] ->
Vector_complex.vector * Matrix_complex.matrix
external nonsymmv_sort : Vector_complex.vector * Matrix_complex.matrix -> sort -> unit
= "ml_gsl_eigen_nonsymmv_sort"
|
9e33342474d0b9105d2adee715a09b22120f6e06ccf9245e806ea85e1ecbb367 | sarabander/p2pu-sicp | 3.70.scm |
Takes two streams of pairs and a weight function
(define (merge-weighted weight s1 s2)
(cond ((stream-null? s1) s2)
((stream-null? s2) s1)
(else
(let ((s1car (stream-car s1))
(s2car (stream-car s2)))
(cond ((<= (weight s1car) (weight s2car))
(cons-stream
s1car
(merge-weighted weight (stream-cdr s1) s2)))
(else
(cons-stream
s2car
(merge-weighted weight s1 (stream-cdr s2)))))))))
(define (weighted-pairs weight s t)
(cons-stream
(list (stream-car s) (stream-car t))
(merge-weighted
weight
(stream-map (lambda (x)
(list (stream-car s) x))
(stream-cdr t))
(weighted-pairs weight (stream-cdr s) (stream-cdr t)))))
1 .
(print-n (weighted-pairs (λ (p) (+ (first p) (second p)))
integers
integers)
31)
( 1 1 ) , ( 1 2 ) , ( 1 3 ) , ( 2 2 ) , ( 1 4 ) , ( 2 3 ) , ( 1 5 ) , ( 2 4 ) , ( 3 3 ) , ( 1 6 ) , ( 2 5 ) , ( 3 4 ) , ( 1 7 ) , ( 2 6 ) , ( 3 5 ) , ( 4 4 ) , ( 1 8) , ( 2 7 ) , ( 3 6 ) , ( 4 5 ) , ( 1 9 ) , ( 2 8) , ( 3 7 ) , ( 4 6 ) , ( 5 5 ) , ( 1 10 ) , ( 2 9 ) , ( 3 8) , ( 4 7 ) , ( 5 6 ) , ( 1 11 ) , ...
2 .
(let ((not-divisible-by-2-3-5
(λ (x) (not (or (divisible? x 2)
(divisible? x 3)
(divisible? x 5)))))
(weight-fn
(λ (p) (let ((i (first p)) (j (second p)))
(+ (* 2 i) (* 3 j) (* 5 i j))))))
(let ((our-stream-of-pairs
(weighted-pairs weight-fn
(stream-filter not-divisible-by-2-3-5 integers)
(stream-filter not-divisible-by-2-3-5 integers))))
(print-n our-stream-of-pairs 44)))
( 1 1 ) , ( 1 7 ) , ( 1 11 ) , ( 1 13 ) , ( 1 17 ) , ( 1 19 ) , ( 1 23 ) , ( 1 29 ) , ( 1 31 ) , ( 7 7 ) , ( 1 37 ) , ( 1 41 ) , ( 1 43 ) , ( 1 47 ) , ( 1 49 ) , ( 1 53 ) , ( 7 11 ) , ( 1 59 ) , ( 1 61 ) , ( 7 13 ) , ( 1 67 ) , ( 1 71 ) , ( 1 73 ) , ( 1 77 ) , ( 1 79 ) , ( 7 17 ) , ( 11 11 ) , ( 1 83 ) , ( 1 89 ) , ( 1 91 ) , ( 7 19 ) , ( 11 13 ) , ( 1 97 ) , ( 1 101 ) , ( 1 103 ) , ( 1 107 ) , ( 1 109 ) , ( 7 23 ) , ( 1 113 ) , ( 13 13 ) , ( 1 119 ) , ( 1 121 ) , ( 11 17 ) , ( 1 127 ) , ...
| null | https://raw.githubusercontent.com/sarabander/p2pu-sicp/fbc49b67dac717da1487629fb2d7a7d86dfdbe32/3.5/3.70.scm | scheme |
Takes two streams of pairs and a weight function
(define (merge-weighted weight s1 s2)
(cond ((stream-null? s1) s2)
((stream-null? s2) s1)
(else
(let ((s1car (stream-car s1))
(s2car (stream-car s2)))
(cond ((<= (weight s1car) (weight s2car))
(cons-stream
s1car
(merge-weighted weight (stream-cdr s1) s2)))
(else
(cons-stream
s2car
(merge-weighted weight s1 (stream-cdr s2)))))))))
(define (weighted-pairs weight s t)
(cons-stream
(list (stream-car s) (stream-car t))
(merge-weighted
weight
(stream-map (lambda (x)
(list (stream-car s) x))
(stream-cdr t))
(weighted-pairs weight (stream-cdr s) (stream-cdr t)))))
1 .
(print-n (weighted-pairs (λ (p) (+ (first p) (second p)))
integers
integers)
31)
( 1 1 ) , ( 1 2 ) , ( 1 3 ) , ( 2 2 ) , ( 1 4 ) , ( 2 3 ) , ( 1 5 ) , ( 2 4 ) , ( 3 3 ) , ( 1 6 ) , ( 2 5 ) , ( 3 4 ) , ( 1 7 ) , ( 2 6 ) , ( 3 5 ) , ( 4 4 ) , ( 1 8) , ( 2 7 ) , ( 3 6 ) , ( 4 5 ) , ( 1 9 ) , ( 2 8) , ( 3 7 ) , ( 4 6 ) , ( 5 5 ) , ( 1 10 ) , ( 2 9 ) , ( 3 8) , ( 4 7 ) , ( 5 6 ) , ( 1 11 ) , ...
2 .
(let ((not-divisible-by-2-3-5
(λ (x) (not (or (divisible? x 2)
(divisible? x 3)
(divisible? x 5)))))
(weight-fn
(λ (p) (let ((i (first p)) (j (second p)))
(+ (* 2 i) (* 3 j) (* 5 i j))))))
(let ((our-stream-of-pairs
(weighted-pairs weight-fn
(stream-filter not-divisible-by-2-3-5 integers)
(stream-filter not-divisible-by-2-3-5 integers))))
(print-n our-stream-of-pairs 44)))
( 1 1 ) , ( 1 7 ) , ( 1 11 ) , ( 1 13 ) , ( 1 17 ) , ( 1 19 ) , ( 1 23 ) , ( 1 29 ) , ( 1 31 ) , ( 7 7 ) , ( 1 37 ) , ( 1 41 ) , ( 1 43 ) , ( 1 47 ) , ( 1 49 ) , ( 1 53 ) , ( 7 11 ) , ( 1 59 ) , ( 1 61 ) , ( 7 13 ) , ( 1 67 ) , ( 1 71 ) , ( 1 73 ) , ( 1 77 ) , ( 1 79 ) , ( 7 17 ) , ( 11 11 ) , ( 1 83 ) , ( 1 89 ) , ( 1 91 ) , ( 7 19 ) , ( 11 13 ) , ( 1 97 ) , ( 1 101 ) , ( 1 103 ) , ( 1 107 ) , ( 1 109 ) , ( 7 23 ) , ( 1 113 ) , ( 13 13 ) , ( 1 119 ) , ( 1 121 ) , ( 11 17 ) , ( 1 127 ) , ...
| |
c2a8b802ce9ddf412bf28e54a6b594767a026bb2002d95d3ca9f181735b9e278 | 7bridges-eu/clj-odbp | db.clj | Copyright 2017 7bridges s.r.l .
;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;; you may not use this file except in compliance with the License.
;; You may obtain a copy of the License at
;;
;; -2.0
;;
;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
(ns clj-odbp.operations.db
(:require [clj-odbp
[constants :as consts]
[utils :refer [decode encode]]]
[clj-odbp.network.sessions :as sessions]
[clj-odbp.operations.specs.db :as specs]
[clj-odbp.constants :as const])
(:import java.io.DataInputStream))
;; REQUEST_SHUTDOWN
(defn shutdown-request
[username password]
(encode
specs/shutdown-request
[[:operation 1]
[:session-id -1]
[:username username]
[:password password]]))
(defn shutdown-response
[^DataInputStream in]
{})
;; REQUEST_CONNECT
(defn connect-request
[username password]
(encode
specs/connect-request
[[:operation 2]
[:session-id -1]
[:driver-name const/driver-name]
[:driver-version const/driver-version]
[:protocol-version const/protocol-version]
[:client-id ""]
[:serialization const/serialization-name]
[:token-session true]
[:support-push false]
[:collect-stats false]
[:username username]
[:password password]]))
(defn connect-response
[^DataInputStream in]
(decode
in
specs/connect-response))
REQUEST_DB_OPEN
(defn db-open-request
[db-name username password]
(encode
specs/db-open-request
[[:operation 3]
[:session-id -1]
[:driver-name const/driver-name]
[:driver-version const/driver-version]
[:protocol-version const/protocol-version]
[:client-id ""]
[:serialization const/serialization-name]
[:token-session true]
[:support-push false]
[:collect-stats false]
[:database-name db-name]
[:username username]
[:password password]]))
(defn db-open-response
[^DataInputStream in]
(decode
in
specs/db-open-response))
;; REQUEST_DB_CREATE
(defn db-create-request
[connection db-name
{:keys [db-type storage-type backup-path]
:or {db-type "graph" storage-type "plocal" backup-path ""}}]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-create-request
[[:operation 4]
[:session-id session-id]
[:token token]
[:database-name db-name]
[:database-type db-type]
[:storage-type storage-type]
[:backup-path backup-path]])))
(defn db-create-response
[^DataInputStream in]
(let [response (decode in specs/db-create-response)]
(when-not (empty? (:token response))
(sessions/reset-session! :db)
(sessions/put-session! response :db))))
;; REQUEST_DB_CLOSE
(defn db-close-request
[]
(encode
specs/db-close-request
[[:operation 5]]))
(defn db-close-response
[^DataInputStream in]
(let [response (decode in specs/db-close-response)]
(when-not (empty? (:token response))
(sessions/reset-session! :db)
(sessions/put-session! response :db))))
;; REQUEST_DB_EXIST
(defn db-exist-request
[connection db-name]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-exist-request
[[:operation 6]
[:session-id session-id]
[:token token]
[:database-name db-name]
[:server-storage-type consts/storage-type-plocal]])))
(defn db-exist-response
[^DataInputStream in]
(let [response (decode in specs/db-exist-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
;; REQUEST_DB_DROP
(defn db-drop-request
[connection db-name]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-drop-request
[[:operation 7]
[:session-id session-id]
[:token token]
[:database-name db-name]
[:storage-type consts/storage-type-plocal]])))
(defn db-drop-response
[^DataInputStream in]
(let [response (decode in specs/db-drop-response)]
(when-not (empty? (:token response))
(sessions/reset-session! :db)
(sessions/put-session! response :db))))
;; REQUEST_DB_SIZE
(defn db-size-request
[connection]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-size-request
[[:operation 8]
[:session-id session-id]
[:token token]])))
(defn db-size-response
[^DataInputStream in]
(let [response (decode in specs/db-size-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
REQUEST_DB_COUNTRECORDS
(defn db-countrecords-request
[connection]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-countrecords-request
[[:operation 9]
[:session-id session-id]
[:token token]])))
(defn db-countrecords-response
[^DataInputStream in]
(let [response (decode in specs/db-countrecords-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
(defn db-reload-request
[connection]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-reload-request
[[:operation 73]
[:session-id session-id]
[:token token]])))
(defn db-reload-response
[^DataInputStream in]
(let [response (decode in specs/db-reload-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
| null | https://raw.githubusercontent.com/7bridges-eu/clj-odbp/5a92515c2e4c6198bd1093ace83da96e30b90829/src/clj_odbp/operations/db.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
REQUEST_SHUTDOWN
REQUEST_CONNECT
REQUEST_DB_CREATE
REQUEST_DB_CLOSE
REQUEST_DB_EXIST
REQUEST_DB_DROP
REQUEST_DB_SIZE | Copyright 2017 7bridges s.r.l .
distributed under the License is distributed on an " AS IS " BASIS ,
(ns clj-odbp.operations.db
(:require [clj-odbp
[constants :as consts]
[utils :refer [decode encode]]]
[clj-odbp.network.sessions :as sessions]
[clj-odbp.operations.specs.db :as specs]
[clj-odbp.constants :as const])
(:import java.io.DataInputStream))
(defn shutdown-request
[username password]
(encode
specs/shutdown-request
[[:operation 1]
[:session-id -1]
[:username username]
[:password password]]))
(defn shutdown-response
[^DataInputStream in]
{})
(defn connect-request
[username password]
(encode
specs/connect-request
[[:operation 2]
[:session-id -1]
[:driver-name const/driver-name]
[:driver-version const/driver-version]
[:protocol-version const/protocol-version]
[:client-id ""]
[:serialization const/serialization-name]
[:token-session true]
[:support-push false]
[:collect-stats false]
[:username username]
[:password password]]))
(defn connect-response
[^DataInputStream in]
(decode
in
specs/connect-response))
REQUEST_DB_OPEN
(defn db-open-request
[db-name username password]
(encode
specs/db-open-request
[[:operation 3]
[:session-id -1]
[:driver-name const/driver-name]
[:driver-version const/driver-version]
[:protocol-version const/protocol-version]
[:client-id ""]
[:serialization const/serialization-name]
[:token-session true]
[:support-push false]
[:collect-stats false]
[:database-name db-name]
[:username username]
[:password password]]))
(defn db-open-response
[^DataInputStream in]
(decode
in
specs/db-open-response))
(defn db-create-request
[connection db-name
{:keys [db-type storage-type backup-path]
:or {db-type "graph" storage-type "plocal" backup-path ""}}]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-create-request
[[:operation 4]
[:session-id session-id]
[:token token]
[:database-name db-name]
[:database-type db-type]
[:storage-type storage-type]
[:backup-path backup-path]])))
(defn db-create-response
[^DataInputStream in]
(let [response (decode in specs/db-create-response)]
(when-not (empty? (:token response))
(sessions/reset-session! :db)
(sessions/put-session! response :db))))
(defn db-close-request
[]
(encode
specs/db-close-request
[[:operation 5]]))
(defn db-close-response
[^DataInputStream in]
(let [response (decode in specs/db-close-response)]
(when-not (empty? (:token response))
(sessions/reset-session! :db)
(sessions/put-session! response :db))))
(defn db-exist-request
[connection db-name]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-exist-request
[[:operation 6]
[:session-id session-id]
[:token token]
[:database-name db-name]
[:server-storage-type consts/storage-type-plocal]])))
(defn db-exist-response
[^DataInputStream in]
(let [response (decode in specs/db-exist-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
(defn db-drop-request
[connection db-name]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-drop-request
[[:operation 7]
[:session-id session-id]
[:token token]
[:database-name db-name]
[:storage-type consts/storage-type-plocal]])))
(defn db-drop-response
[^DataInputStream in]
(let [response (decode in specs/db-drop-response)]
(when-not (empty? (:token response))
(sessions/reset-session! :db)
(sessions/put-session! response :db))))
(defn db-size-request
[connection]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-size-request
[[:operation 8]
[:session-id session-id]
[:token token]])))
(defn db-size-response
[^DataInputStream in]
(let [response (decode in specs/db-size-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
REQUEST_DB_COUNTRECORDS
(defn db-countrecords-request
[connection]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-countrecords-request
[[:operation 9]
[:session-id session-id]
[:token token]])))
(defn db-countrecords-response
[^DataInputStream in]
(let [response (decode in specs/db-countrecords-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
(defn db-reload-request
[connection]
(let [session-id (:session-id connection)
token (:token connection)]
(encode
specs/db-reload-request
[[:operation 73]
[:session-id session-id]
[:token token]])))
(defn db-reload-response
[^DataInputStream in]
(let [response (decode in specs/db-reload-response)
session (select-keys response [:session-id :token])]
(when-not (empty? (:token session))
(sessions/reset-session! :db)
(sessions/put-session! session :db))
response))
|
1b022ff23b27c7249c5ab60c6cff81eaef980229e76460abf07ce12ce8573b55 | erlangbureau/jamdb_oracle | jamdb_oracle.erl | -module(jamdb_oracle).
-vsn("0.5.4").
-behaviour(gen_server).
%% API
-export([start_link/1, start/1]).
-export([stop/1]).
-export([sql_query/2, sql_query/3]).
%% gen_server callbacks
-export([init/1, terminate/2]).
-export([handle_call/3, handle_cast/2, handle_info/2]).
-export([code_change/3]).
-define(default_timeout, 5000).
%% API
-spec start_link(jamdb_oracle_conn:options()) -> {ok, pid()} | {error, term()}.
start_link(Opts) when is_list(Opts) ->
gen_server:start_link(?MODULE, Opts, []).
-spec start(jamdb_oracle_conn:options()) -> {ok, pid()} | {error, term()}.
start(Opts) when is_list(Opts) ->
gen_server:start(?MODULE, Opts, []).
-spec stop(pid()) -> ok.
stop(Pid) ->
gen_server:call(Pid, stop).
sql_query(Pid, Query, Tout) ->
gen_server:call(Pid, {sql_query, Query, Tout}, Tout).
sql_query(Pid, Query) ->
gen_server:call(Pid, {sql_query, Query, ?default_timeout}).
%% gen_server callbacks
init(Opts) ->
case jamdb_oracle_conn:connect(Opts) of
{ok, State} ->
{ok, State};
{ok, Result, _State} ->
{stop, Result};
{error, Type, Result, _State} ->
{stop, {Type, Result}}
end.
%% Error types: socket, remote, local
handle_call({sql_query, Query, Tout}, _From, State) ->
try jamdb_oracle_conn:sql_query(State, Query, Tout) of
{ok, Result, State2} ->
{reply, {ok, Result}, State2};
{error, Type, Reason, State2} ->
{reply, {error, Type, Reason}, State2}
catch
error:Reason ->
{reply, {error, local, Reason}, State}
end;
handle_call(stop, _From, State) ->
{ok, _InitOpts} = jamdb_oracle_conn:disconnect(State, 1),
{stop, normal, ok, State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/erlangbureau/jamdb_oracle/283ede9dc66a9e8423d510fa2b91c5c48fc371ee/src/jamdb_oracle.erl | erlang | API
gen_server callbacks
API
gen_server callbacks
Error types: socket, remote, local | -module(jamdb_oracle).
-vsn("0.5.4").
-behaviour(gen_server).
-export([start_link/1, start/1]).
-export([stop/1]).
-export([sql_query/2, sql_query/3]).
-export([init/1, terminate/2]).
-export([handle_call/3, handle_cast/2, handle_info/2]).
-export([code_change/3]).
-define(default_timeout, 5000).
-spec start_link(jamdb_oracle_conn:options()) -> {ok, pid()} | {error, term()}.
start_link(Opts) when is_list(Opts) ->
gen_server:start_link(?MODULE, Opts, []).
-spec start(jamdb_oracle_conn:options()) -> {ok, pid()} | {error, term()}.
start(Opts) when is_list(Opts) ->
gen_server:start(?MODULE, Opts, []).
-spec stop(pid()) -> ok.
stop(Pid) ->
gen_server:call(Pid, stop).
sql_query(Pid, Query, Tout) ->
gen_server:call(Pid, {sql_query, Query, Tout}, Tout).
sql_query(Pid, Query) ->
gen_server:call(Pid, {sql_query, Query, ?default_timeout}).
init(Opts) ->
case jamdb_oracle_conn:connect(Opts) of
{ok, State} ->
{ok, State};
{ok, Result, _State} ->
{stop, Result};
{error, Type, Result, _State} ->
{stop, {Type, Result}}
end.
handle_call({sql_query, Query, Tout}, _From, State) ->
try jamdb_oracle_conn:sql_query(State, Query, Tout) of
{ok, Result, State2} ->
{reply, {ok, Result}, State2};
{error, Type, Reason, State2} ->
{reply, {error, Type, Reason}, State2}
catch
error:Reason ->
{reply, {error, local, Reason}, State}
end;
handle_call(stop, _From, State) ->
{ok, _InitOpts} = jamdb_oracle_conn:disconnect(State, 1),
{stop, normal, ok, State};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
027fe26085bdc0ab207d32fd0e82188e52e3fcb4223582aca828c1ce7076eb47 | stylewarning/quickutil | conses.lisp | (in-package #:quickutil-utilities.utilities)
(defutil recons (:version (1 . 0)
:category conses)
"Reuse the cons cell `old-cons`, replacing its CAR with `a` and CDR
with `b`."
#>%%%>
(defun recons (a b old-cons)
%%DOC
(psetf (car old-cons) a
(cdr old-cons) b)
old-cons)
%%%)
(defutil copy-cons (:version (1 . 0)
:category (conses orthogonality))
"Copy the cons cell `c`."
#>%%%>
(defun copy-cons (c)
%%DOC
(cons (car c) (cdr c)))
%%%)
| null | https://raw.githubusercontent.com/stylewarning/quickutil/5adb3463d99095145325c4013117bd08a8f6cac2/quickutil-utilities/utilities/conses.lisp | lisp | (in-package #:quickutil-utilities.utilities)
(defutil recons (:version (1 . 0)
:category conses)
"Reuse the cons cell `old-cons`, replacing its CAR with `a` and CDR
with `b`."
#>%%%>
(defun recons (a b old-cons)
%%DOC
(psetf (car old-cons) a
(cdr old-cons) b)
old-cons)
%%%)
(defutil copy-cons (:version (1 . 0)
:category (conses orthogonality))
"Copy the cons cell `c`."
#>%%%>
(defun copy-cons (c)
%%DOC
(cons (car c) (cdr c)))
%%%)
| |
f9b9059a431144d3e2a4b607a034098d19a0e73fdc55693c0094a465dfa170a1 | saman-pasha/LCC | mode.lisp |
(defun lcc-add-keywords (face-name keyword-rules)
(let* ((keyword-list (mapcar #'(lambda (x)
(symbol-name (cdr x)))
keyword-rules))
(keyword-regexp (concat "(\\("
(regexp-opt keyword-list)
"\\)[ \t\n]*")))
(font-lock-add-keywords 'lisp-mode
`((,keyword-regexp 1 ',face-name))))
(mapc #'(lambda (x)
(put (cdr x)
'lisp-indent-function
(car x)))
keyword-rules))
(lcc-add-keywords
'font-lock-keyword-face
'((1 . format)
(1 . code)
(1 . target)
(1 . guard)
(1 . include)
(1 . variable)
(1 . function)
(1 . returns)
(1 . enum)
(1 . struct)
(1 . union)
(1 . member)
(1 . method)
(1 . declares)
(1 . typedef)
(1 . set)
(1 . nth)
(1 . not)
(1 . and)
(1 . or)
(1 . bitand)
(1 . bitor)
(1 . xor)
(1 . contentof)
(1 . addressof)
(1 . sizeof)
(1 . cast)
(1 . switch)
(1 . case)
(1 . default)
(1 . while)
(1 . for)
(1 . for-each)
(1 . new)
(1 . printf)
(1 . scanf)
))
(defun lcc-add-attributes (face-name keyword-rules)
(let* ((keyword-list (mapcar #'(lambda (x)
(symbol-name (cdr x)))
keyword-rules))
(keyword-regexp (concat "{\\("
(regexp-opt keyword-list)
"\\)[ \t\n]*")))
(font-lock-add-keywords 'lisp-mode
`((,keyword-regexp 1 ',face-name))))
(mapc #'(lambda (x)
(put (cdr x)
'lisp-indent-function
(car x)))
keyword-rules))
(lcc-add-attributes
'font-lock-preprocessor-face
'((1 . static)
(1 . declare)
(1 . inline)
(1 . extern)
(1 . register)
(1 . auto)
))
(defun lcc-add-types (face-name keyword-rules)
(let* ((keyword-list (mapcar #'(lambda (x)
(symbol-name (cdr x)))
keyword-rules))
(keyword-regexp (concat "[ \t\n(]\\("
(regexp-opt keyword-list)
"\\)[ \t\n)]")))
(font-lock-add-keywords 'lisp-mode
`((,keyword-regexp 1 ',face-name))))
(mapc #'(lambda (x)
(put (cdr x)
'lisp-indent-function
(car x)))
keyword-rules))
(lcc-add-types
'font-lock-type-face
'((1 . void)
(1 . unsigned)
(1 . char)
(1 . uchar)
(1 . short)
(1 . ushort)
(1 . int)
(1 . uint)
(1 . long)
(1 . ulong)
(1 . llong)
(1 . ullong)
(1 . float)
(1 . double)
(1 . real)
(1 . int8_t)
(1 . uint8_t)
(1 . int16_t)
(1 . uint16_t)
(1 . int32_t)
(1 . uint32_t)
(1 . int64_t)
(1 . uint64_t)
(1 . int_least8_t)
(1 . uint_least8_t)
(1 . int_least16_t)
(1 . uint_least16_t)
(1 . int_least32_t)
(1 . uint_least32_t)
(1 . int_least64_t)
(1 . uint_least64_t)
(1 . int_fast8_t)
(1 . uint_fast8_t)
(1 . int_fast16_t)
(1 . uint_fast16_t)
(1 . int_fast32_t)
(1 . uint_fast32_t)
(1 . int_fast64_t)
(1 . uint_fast64_t)
(1 . __int128)
(1 . i8)
(1 . u8)
(1 . i16)
(1 . u16)
(1 . i32)
(1 . u32)
(1 . i64)
(1 . u64)
(1 . i128)
(1 . u128)
(1 . intmax_t)
(1 . intptr_t)
(1 . bool)
(1 . true)
(1 . false)
(1 . nil)
))
(font-lock-add-keywords
'lisp-mode
'(("(\\(@\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-preprocessor-face))))
(font-lock-add-keywords
'lisp-mode
'(("(guard[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-preprocessor-face))))
(font-lock-add-keywords
'lisp-mode
'(("(function[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-function-name-face))))
(font-lock-add-keywords
'lisp-mode
'(("(enum[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-type-face))))
(font-lock-add-keywords
'lisp-mode
'(("(struct[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-type-face))))
(font-lock-add-keywords
'lisp-mode
'(("(union[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-type-face))))
| null | https://raw.githubusercontent.com/saman-pasha/LCC/00087c057676469fc6d6467de0ea563e1967af21/mode.lisp | lisp |
(defun lcc-add-keywords (face-name keyword-rules)
(let* ((keyword-list (mapcar #'(lambda (x)
(symbol-name (cdr x)))
keyword-rules))
(keyword-regexp (concat "(\\("
(regexp-opt keyword-list)
"\\)[ \t\n]*")))
(font-lock-add-keywords 'lisp-mode
`((,keyword-regexp 1 ',face-name))))
(mapc #'(lambda (x)
(put (cdr x)
'lisp-indent-function
(car x)))
keyword-rules))
(lcc-add-keywords
'font-lock-keyword-face
'((1 . format)
(1 . code)
(1 . target)
(1 . guard)
(1 . include)
(1 . variable)
(1 . function)
(1 . returns)
(1 . enum)
(1 . struct)
(1 . union)
(1 . member)
(1 . method)
(1 . declares)
(1 . typedef)
(1 . set)
(1 . nth)
(1 . not)
(1 . and)
(1 . or)
(1 . bitand)
(1 . bitor)
(1 . xor)
(1 . contentof)
(1 . addressof)
(1 . sizeof)
(1 . cast)
(1 . switch)
(1 . case)
(1 . default)
(1 . while)
(1 . for)
(1 . for-each)
(1 . new)
(1 . printf)
(1 . scanf)
))
(defun lcc-add-attributes (face-name keyword-rules)
(let* ((keyword-list (mapcar #'(lambda (x)
(symbol-name (cdr x)))
keyword-rules))
(keyword-regexp (concat "{\\("
(regexp-opt keyword-list)
"\\)[ \t\n]*")))
(font-lock-add-keywords 'lisp-mode
`((,keyword-regexp 1 ',face-name))))
(mapc #'(lambda (x)
(put (cdr x)
'lisp-indent-function
(car x)))
keyword-rules))
(lcc-add-attributes
'font-lock-preprocessor-face
'((1 . static)
(1 . declare)
(1 . inline)
(1 . extern)
(1 . register)
(1 . auto)
))
(defun lcc-add-types (face-name keyword-rules)
(let* ((keyword-list (mapcar #'(lambda (x)
(symbol-name (cdr x)))
keyword-rules))
(keyword-regexp (concat "[ \t\n(]\\("
(regexp-opt keyword-list)
"\\)[ \t\n)]")))
(font-lock-add-keywords 'lisp-mode
`((,keyword-regexp 1 ',face-name))))
(mapc #'(lambda (x)
(put (cdr x)
'lisp-indent-function
(car x)))
keyword-rules))
(lcc-add-types
'font-lock-type-face
'((1 . void)
(1 . unsigned)
(1 . char)
(1 . uchar)
(1 . short)
(1 . ushort)
(1 . int)
(1 . uint)
(1 . long)
(1 . ulong)
(1 . llong)
(1 . ullong)
(1 . float)
(1 . double)
(1 . real)
(1 . int8_t)
(1 . uint8_t)
(1 . int16_t)
(1 . uint16_t)
(1 . int32_t)
(1 . uint32_t)
(1 . int64_t)
(1 . uint64_t)
(1 . int_least8_t)
(1 . uint_least8_t)
(1 . int_least16_t)
(1 . uint_least16_t)
(1 . int_least32_t)
(1 . uint_least32_t)
(1 . int_least64_t)
(1 . uint_least64_t)
(1 . int_fast8_t)
(1 . uint_fast8_t)
(1 . int_fast16_t)
(1 . uint_fast16_t)
(1 . int_fast32_t)
(1 . uint_fast32_t)
(1 . int_fast64_t)
(1 . uint_fast64_t)
(1 . __int128)
(1 . i8)
(1 . u8)
(1 . i16)
(1 . u16)
(1 . i32)
(1 . u32)
(1 . i64)
(1 . u64)
(1 . i128)
(1 . u128)
(1 . intmax_t)
(1 . intptr_t)
(1 . bool)
(1 . true)
(1 . false)
(1 . nil)
))
(font-lock-add-keywords
'lisp-mode
'(("(\\(@\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-preprocessor-face))))
(font-lock-add-keywords
'lisp-mode
'(("(guard[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-preprocessor-face))))
(font-lock-add-keywords
'lisp-mode
'(("(function[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-function-name-face))))
(font-lock-add-keywords
'lisp-mode
'(("(enum[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-type-face))))
(font-lock-add-keywords
'lisp-mode
'(("(struct[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-type-face))))
(font-lock-add-keywords
'lisp-mode
'(("(union[ \t\n]+\\(\\(\\sw\\|\\s_\\)+\\)[ \t\n]*"
(1 'font-lock-type-face))))
| |
6dd43426da34c3f1c8a67bfc98f3bbc9b91ce47e762d95b8952d6e7eb8ed2b58 | nuprl/gradual-typing-performance | lib.rkt | #lang racket/base
(provide CONST)
(define CONST 5)
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/tools/summarize/test/path-overlap/src/lib.rkt | racket | #lang racket/base
(provide CONST)
(define CONST 5)
| |
005696b7a41acc545f7d354a5002c33c32d66c8f2b48d98b54a0cb2bc5f9d2d0 | dwayne/eopl3 | translator.test.rkt | #lang racket
(require "./parser.rkt")
(require "./translator.rkt")
(require rackunit)
(check-equal?
(translate
(parse
#<<CODE
let x = 37
in proc (y)
let z = -(y, x)
in -(x, y)
CODE
))
(a-program
(nameless-let-exp
(const-exp 37)
(nameless-proc-exp
(nameless-let-exp
(diff-exp
(nameless-var-exp 0)
(nameless-var-exp 1))
(diff-exp
(nameless-var-exp 2)
(nameless-var-exp 1)))))))
(check-equal?
(translate
(parse
#<<CODE
let x = 20
in let y = 30
in let z = 40
in let f = proc (a) v
in (f 1)
CODE
))
(a-program
(nameless-let-exp
(const-exp 20)
(nameless-let-exp
(const-exp 30)
(nameless-let-exp
(const-exp 40)
(nameless-let-exp
(nameless-proc-exp
(nameless-var-exp 1))
(call-exp
(nameless-var-exp 0)
(const-exp 1)))))))) | null | https://raw.githubusercontent.com/dwayne/eopl3/9d5fdb2a8dafac3bc48852d49cda8b83e7a825cf/solutions/03-ch3/interpreters/racket/NAMELESS-PROC-3.42-ATTEMPT-1/translator.test.rkt | racket | #lang racket
(require "./parser.rkt")
(require "./translator.rkt")
(require rackunit)
(check-equal?
(translate
(parse
#<<CODE
let x = 37
in proc (y)
let z = -(y, x)
in -(x, y)
CODE
))
(a-program
(nameless-let-exp
(const-exp 37)
(nameless-proc-exp
(nameless-let-exp
(diff-exp
(nameless-var-exp 0)
(nameless-var-exp 1))
(diff-exp
(nameless-var-exp 2)
(nameless-var-exp 1)))))))
(check-equal?
(translate
(parse
#<<CODE
let x = 20
in let y = 30
in let z = 40
in let f = proc (a) v
in (f 1)
CODE
))
(a-program
(nameless-let-exp
(const-exp 20)
(nameless-let-exp
(const-exp 30)
(nameless-let-exp
(const-exp 40)
(nameless-let-exp
(nameless-proc-exp
(nameless-var-exp 1))
(call-exp
(nameless-var-exp 0)
(const-exp 1)))))))) | |
f393607992748bd13991e05c113cd546d86ade55841f9a934edbbf1d40d23bf3 | backtracking/ocamlgraph | testgraphml.ml | (******************************************************************************)
(* *)
Copyright ( C ) 2012 < >
(* *)
(* This library is free software: you can redistribute it and/or modify *)
(* it under the terms of the GNU Lesser General Public License as *)
published by the Free Software Foundation , either version 3 of the
(* License, or (at your option) any later version. A special linking *)
exception to the GNU Lesser General Public License applies to this
(* library, see the COPYING file for more information. *)
(* *)
(******************************************************************************)
module V = struct
type t = int
let compare = compare
let hash i = i
let equal = (=)
end
module G = Graph.Imperative.Digraph.ConcreteBidirectional(V)
module Gr = struct
include G
let vertex_properties = ["id1","string",None; "id2","string",Some "2"]
let edge_properties = ["ed", "string",Some "3"]
let map_edge e = ["ed", string_of_int (E.dst e)]
let map_vertex v = [ "id1", string_of_int v ; "id2", string_of_int v]
let vertex_uid = G.V.hash
let edge_uid e =
Hashtbl.hash (vertex_uid (G.E.src e), G.E.label e, vertex_uid (G.E.dst e))
end
module GraphPrinter = Graph.Graphml.Print(G)(Gr)
let print g = GraphPrinter.print Format.std_formatter g
let () =
let g = G.create () in
G.add_vertex g 1;
G.add_vertex g 2;
G.add_vertex g 3;
G.add_edge g 1 2;
G.add_edge g 1 3;
print g;;
| null | https://raw.githubusercontent.com/backtracking/ocamlgraph/1c028af097339ca8bc379436f7bd9477fa3a49cd/tests/testgraphml.ml | ocaml | ****************************************************************************
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
License, or (at your option) any later version. A special linking
library, see the COPYING file for more information.
**************************************************************************** | Copyright ( C ) 2012 < >
published by the Free Software Foundation , either version 3 of the
exception to the GNU Lesser General Public License applies to this
module V = struct
type t = int
let compare = compare
let hash i = i
let equal = (=)
end
module G = Graph.Imperative.Digraph.ConcreteBidirectional(V)
module Gr = struct
include G
let vertex_properties = ["id1","string",None; "id2","string",Some "2"]
let edge_properties = ["ed", "string",Some "3"]
let map_edge e = ["ed", string_of_int (E.dst e)]
let map_vertex v = [ "id1", string_of_int v ; "id2", string_of_int v]
let vertex_uid = G.V.hash
let edge_uid e =
Hashtbl.hash (vertex_uid (G.E.src e), G.E.label e, vertex_uid (G.E.dst e))
end
module GraphPrinter = Graph.Graphml.Print(G)(Gr)
let print g = GraphPrinter.print Format.std_formatter g
let () =
let g = G.create () in
G.add_vertex g 1;
G.add_vertex g 2;
G.add_vertex g 3;
G.add_edge g 1 2;
G.add_edge g 1 3;
print g;;
|
ec1d108711dbe329b1fc4c9650614871f162e9d52fd311c6e9e8187c46652fe1 | mokus0/hs-hdf5 | LCPL.hs | # LANGUAGE GeneralizedNewtypeDeriving #
module Bindings.HDF5.PropertyList.LCPL
( module Bindings.HDF5.PropertyList.STRCPL
, LCPL
, LinkCreationPropertyList(..)
) where
import Bindings.HDF5.Core
import Bindings.HDF5.PropertyList.STRCPL
class StringCreationPropertyList t => LinkCreationPropertyList t where
newtype LCPL = LCPL STRCPL
deriving (Eq, HId, FromHId, HDFResultType, PropertyListOrClass, StringCreationPropertyList)
instance PropertyList LCPL where
staticPlistClass = Tagged linkCreate
instance LinkCreationPropertyList LCPL
-- TODO: implement functions
| null | https://raw.githubusercontent.com/mokus0/hs-hdf5/196e0d714a34e97358be9a907af0b93b4c7922e3/src/Bindings/HDF5/PropertyList/LCPL.hs | haskell | TODO: implement functions | # LANGUAGE GeneralizedNewtypeDeriving #
module Bindings.HDF5.PropertyList.LCPL
( module Bindings.HDF5.PropertyList.STRCPL
, LCPL
, LinkCreationPropertyList(..)
) where
import Bindings.HDF5.Core
import Bindings.HDF5.PropertyList.STRCPL
class StringCreationPropertyList t => LinkCreationPropertyList t where
newtype LCPL = LCPL STRCPL
deriving (Eq, HId, FromHId, HDFResultType, PropertyListOrClass, StringCreationPropertyList)
instance PropertyList LCPL where
staticPlistClass = Tagged linkCreate
instance LinkCreationPropertyList LCPL
|
c16f972c12eb2116bdd5f24f6f0097eb8120f52e3c1d19f08f1764ef5739b8e5 | karimarttila/clojure | session_common.clj | (ns simpleserver.session.session-common
(:require [clojure.tools.logging :as log]
[buddy.sign.jwt :as buddy-jwt]
[clj-time.core :as c-time]
[simpleserver.util.config :as ss-config]
))
(def my-hex-secret
"Creates dynamically a hex secret when the server boots."
((fn []
(let [my-chars (->> (range (int \a) (inc (int \z))) (map char))
my-ints (->> (range (int \0) (inc (int \9))) (map char))
my-set (lazy-cat my-chars my-ints)
hexify (fn [s]
(apply str
(map #(format "%02x" (int %)) s)))]
(hexify (repeatedly 24 #(rand-nth my-set)))))))
(defn create-json-web-token
[email]
(log/debug (str "ENTER create-json-web-token, email: " email))
(let [my-secret my-hex-secret
exp-time (c-time/plus (c-time/now) (c-time/seconds (get-in ss-config/config [:jwt :exp])))
my-claim {:email email :exp exp-time}
json-web-token (buddy-jwt/sign my-claim my-secret)]
json-web-token))
(defn validate-token
[token get-token remove-token]
(log/debug (str "ENTER validate-token, token: " token))
(let [found-token (get-token token)]
Part # 1 of validation .
(if (nil? found-token)
(do
(log/warn (str "Token not found in my sessions - unknown token: " token))
nil)
Part # 2 of validation .
(try
(buddy-jwt/unsign token my-hex-secret)
(catch Exception e
(if (.contains (.getMessage e) "Token is expired")
(do
(log/debug (str "Token is expired, removing it from my sessions and returning nil: " token))
(remove-token token)
nil)
; Some other issue, throw it.
(do
(log/error (str "Some unknown exception when handling expired token, exception: " (.getMessage e)) ", token: " token)
nil)))))))
| null | https://raw.githubusercontent.com/karimarttila/clojure/ee1261b9a8e6be92cb47aeb325f82a278f2c1ed3/webstore-demo/simple-server/src/simpleserver/session/session_common.clj | clojure | Some other issue, throw it. | (ns simpleserver.session.session-common
(:require [clojure.tools.logging :as log]
[buddy.sign.jwt :as buddy-jwt]
[clj-time.core :as c-time]
[simpleserver.util.config :as ss-config]
))
(def my-hex-secret
"Creates dynamically a hex secret when the server boots."
((fn []
(let [my-chars (->> (range (int \a) (inc (int \z))) (map char))
my-ints (->> (range (int \0) (inc (int \9))) (map char))
my-set (lazy-cat my-chars my-ints)
hexify (fn [s]
(apply str
(map #(format "%02x" (int %)) s)))]
(hexify (repeatedly 24 #(rand-nth my-set)))))))
(defn create-json-web-token
[email]
(log/debug (str "ENTER create-json-web-token, email: " email))
(let [my-secret my-hex-secret
exp-time (c-time/plus (c-time/now) (c-time/seconds (get-in ss-config/config [:jwt :exp])))
my-claim {:email email :exp exp-time}
json-web-token (buddy-jwt/sign my-claim my-secret)]
json-web-token))
(defn validate-token
[token get-token remove-token]
(log/debug (str "ENTER validate-token, token: " token))
(let [found-token (get-token token)]
Part # 1 of validation .
(if (nil? found-token)
(do
(log/warn (str "Token not found in my sessions - unknown token: " token))
nil)
Part # 2 of validation .
(try
(buddy-jwt/unsign token my-hex-secret)
(catch Exception e
(if (.contains (.getMessage e) "Token is expired")
(do
(log/debug (str "Token is expired, removing it from my sessions and returning nil: " token))
(remove-token token)
nil)
(do
(log/error (str "Some unknown exception when handling expired token, exception: " (.getMessage e)) ", token: " token)
nil)))))))
|
f1ef11562b85153cebe55695115e1d1f1ef1f963d00c41e124525420d81b63b5 | alexandergunnarson/quantum | form.cljc | (ns quantum.untyped.core.form
(:require
[quantum.untyped.core.core :as ucore
:refer [defalias]]
[quantum.untyped.core.form.evaluate
:refer [case-env*]]
[quantum.untyped.core.form.generate :as ufgen]))
(ucore/log-this-ns)
(defn core-symbol [env sym] (symbol (str (case-env* env :cljs "cljs" "clojure") ".core") (name sym)))
TODO move this code generation code to a different namespace
;; ===== Code quoting ===== ;;
------------- SYNTAX QUOTE ; QUOTE+ -------------
#?(:clj (defalias syntax-quote clojure.tools.reader/syntax-quote))
#?(:clj
(defn unquote-replacement
"Replaces each instance of `(clojure.core/unquote <whatever>)` in `quoted-form` with
the unquoted version of its inner content."
{:examples '{(unquote-replacement {'a 3} '(+ 1 ~a))
'(+ 1 3)}}
[sym-map quoted-form]
(ucore/prewalk
(fn [x]
(if (and (seq? x)
(-> x count (= 2))
(-> x (nth 0) (= 'clojure.core/unquote)))
(if (contains? sym-map (nth x 1))
(get sym-map (nth x 1))
(eval (nth x 1)))
x))
quoted-form)))
#?(:clj
(defmacro quote+
"Normal quoting with unquoting that works as in |syntax-quote|."
{:examples '{(let [a 1]
(quote+ (for [b 2] (inc ~a))))
'(for [a 1] (inc 1))}}
[form]
`(unquote-replacement (locals) '~form)))
#?(:clj
(defmacro $
"Reproducibly, unifiedly syntax quote without messing up the format as a literal
syntax quote might do."
[body]
`(binding [ufgen/*reproducible-gensym* (ufgen/reproducible-gensym|generator)]
(ufgen/unify-gensyms (syntax-quote ~body) true))))
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src-untyped/quantum/untyped/core/form.cljc | clojure | ===== Code quoting ===== ;;
QUOTE+ ------------- | (ns quantum.untyped.core.form
(:require
[quantum.untyped.core.core :as ucore
:refer [defalias]]
[quantum.untyped.core.form.evaluate
:refer [case-env*]]
[quantum.untyped.core.form.generate :as ufgen]))
(ucore/log-this-ns)
(defn core-symbol [env sym] (symbol (str (case-env* env :cljs "cljs" "clojure") ".core") (name sym)))
TODO move this code generation code to a different namespace
#?(:clj (defalias syntax-quote clojure.tools.reader/syntax-quote))
#?(:clj
(defn unquote-replacement
"Replaces each instance of `(clojure.core/unquote <whatever>)` in `quoted-form` with
the unquoted version of its inner content."
{:examples '{(unquote-replacement {'a 3} '(+ 1 ~a))
'(+ 1 3)}}
[sym-map quoted-form]
(ucore/prewalk
(fn [x]
(if (and (seq? x)
(-> x count (= 2))
(-> x (nth 0) (= 'clojure.core/unquote)))
(if (contains? sym-map (nth x 1))
(get sym-map (nth x 1))
(eval (nth x 1)))
x))
quoted-form)))
#?(:clj
(defmacro quote+
"Normal quoting with unquoting that works as in |syntax-quote|."
{:examples '{(let [a 1]
(quote+ (for [b 2] (inc ~a))))
'(for [a 1] (inc 1))}}
[form]
`(unquote-replacement (locals) '~form)))
#?(:clj
(defmacro $
"Reproducibly, unifiedly syntax quote without messing up the format as a literal
syntax quote might do."
[body]
`(binding [ufgen/*reproducible-gensym* (ufgen/reproducible-gensym|generator)]
(ufgen/unify-gensyms (syntax-quote ~body) true))))
|
711c53389efa31fc70207baab461a28a13dc9456a01fa98c9c25b9fc9be795d0 | binsec/haunted | riscv_arch.mli | (**************************************************************************)
This file is part of BINSEC .
(* *)
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
module type S = sig
type t
val zero : t
* Zero ( hard - wired ) aka x0
val ra : t
(** Return address / x1 *)
val sp: t
val gp: t
val tp: t
val fp: t (** Same as s0 *)
val a0: t
val a1: t
val a2: t
val a3: t
val a4: t
val a5: t
val a6: t
val a7: t
val t0: t
val t1: t
val t2: t
val t3: t
val t4: t
val t5: t
val t6: t
val s0: t
val s1: t
val s2: t
val s3: t
val s4: t
val s5: t
val s6: t
val s7: t
val s8: t
val s9: t
val s10: t
val s11: t
val name: t -> string
val size: t -> int
val num: t -> int
val bvnum : t -> Bitvector.t
val of_string : string -> t option
val of_int_exn : int -> t
val of_int : int -> t option
val expr: t -> Dba.Expr.t
val lval: t -> Dba.LValue.t
end
module Mode : sig
[@@@warning "-37"]
type t
val m32 : t
val m64 : t
val m128 : t
val is_m32 : t -> bool
val is_m64 : t -> bool
val is_m128 : t -> bool
val size : t -> int
(** size in bits *)
end
module V32 : S
module V64 : S
module V128 : S
| null | https://raw.githubusercontent.com/binsec/haunted/7ffc5f4072950fe138f53fe953ace98fff181c73/src/disasm/riscv/riscv_arch.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Return address / x1
* Same as s0
* size in bits | This file is part of BINSEC .
Copyright ( C ) 2016 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
module type S = sig
type t
val zero : t
* Zero ( hard - wired ) aka x0
val ra : t
val sp: t
val gp: t
val tp: t
val a0: t
val a1: t
val a2: t
val a3: t
val a4: t
val a5: t
val a6: t
val a7: t
val t0: t
val t1: t
val t2: t
val t3: t
val t4: t
val t5: t
val t6: t
val s0: t
val s1: t
val s2: t
val s3: t
val s4: t
val s5: t
val s6: t
val s7: t
val s8: t
val s9: t
val s10: t
val s11: t
val name: t -> string
val size: t -> int
val num: t -> int
val bvnum : t -> Bitvector.t
val of_string : string -> t option
val of_int_exn : int -> t
val of_int : int -> t option
val expr: t -> Dba.Expr.t
val lval: t -> Dba.LValue.t
end
module Mode : sig
[@@@warning "-37"]
type t
val m32 : t
val m64 : t
val m128 : t
val is_m32 : t -> bool
val is_m64 : t -> bool
val is_m128 : t -> bool
val size : t -> int
end
module V32 : S
module V64 : S
module V128 : S
|
3c242ed3a390736308927b64e2953ab216d2f2d39598357a123fdc2c89229d93 | finnishtransportagency/harja | kartta_debug.cljs | (ns harja.ui.kartta-debug)
(defn kartta-layers
[& args]
)
(defn aseta-kartta-debug-sijainti
[& args]
)
(defn nayta-kartan-debug []
)
| null | https://raw.githubusercontent.com/finnishtransportagency/harja/488b1e096f0611e175221d74ba4f2ffed6bea8f1/src/cljs-prod/harja/ui/kartta_debug.cljs | clojure | (ns harja.ui.kartta-debug)
(defn kartta-layers
[& args]
)
(defn aseta-kartta-debug-sijainti
[& args]
)
(defn nayta-kartan-debug []
)
| |
a793146d6c1d5471b63febe91b9aab8edf08af7e6db638b766d999a0edc1722b | simplegeo/erlang | wxGridCellFloatRenderer.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
%% @doc See external documentation: <a href="">wxGridCellFloatRenderer</a>.
%% <p>This class is derived (and can use functions) from:
%% <br />{@link wxGridCellStringRenderer}
< br />{@link wxGridCellRenderer }
%% </p>
%% @type wxGridCellFloatRenderer(). An object reference, The representation is internal
%% and can be changed without notice. It can't be used for comparsion
%% stored on disc or distributed for use on other nodes.
-module(wxGridCellFloatRenderer).
-include("wxe.hrl").
-export([destroy/1,getPrecision/1,getWidth/1,new/0,new/1,setParameters/2,setPrecision/2,
setWidth/2]).
%% inherited exports
-export([draw/8,getBestSize/6,parent_class/1]).
%% @hidden
parent_class(wxGridCellStringRenderer) -> true;
parent_class(wxGridCellRenderer) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
( ) - > wxGridCellFloatRenderer ( )
%% @equiv new([])
new() ->
new([]).
( [ Option ] ) - > wxGridCellFloatRenderer ( )
%% Option = {width, integer()} | {precision, integer()}
%% @doc See <a href="#wxgridcellfloatrendererwxgridcellfloatrenderer">external documentation</a>.
new(Options)
when is_list(Options) ->
MOpts = fun({width, Width}, Acc) -> [<<1:32/?UI,Width:32/?UI>>|Acc];
({precision, Precision}, Acc) -> [<<2:32/?UI,Precision:32/?UI>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:construct(?wxGridCellFloatRenderer_new,
<<BinOpt/binary>>).
%% @spec (This::wxGridCellFloatRenderer()) -> integer()
%% @doc See <a href="#wxgridcellfloatrenderergetprecision">external documentation</a>.
getPrecision(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:call(?wxGridCellFloatRenderer_GetPrecision,
<<ThisRef:32/?UI>>).
%% @spec (This::wxGridCellFloatRenderer()) -> integer()
%% @doc See <a href="#wxgridcellfloatrenderergetwidth">external documentation</a>.
getWidth(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:call(?wxGridCellFloatRenderer_GetWidth,
<<ThisRef:32/?UI>>).
%% @spec (This::wxGridCellFloatRenderer(), Params::string()) -> ok
%% @doc See <a href="#wxgridcellfloatrenderersetparameters">external documentation</a>.
setParameters(#wx_ref{type=ThisT,ref=ThisRef},Params)
when is_list(Params) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
Params_UC = unicode:characters_to_binary([Params,0]),
wxe_util:cast(?wxGridCellFloatRenderer_SetParameters,
<<ThisRef:32/?UI,(byte_size(Params_UC)):32/?UI,(Params_UC)/binary, 0:(((8- ((0+byte_size(Params_UC)) band 16#7)) band 16#7))/unit:8>>).
%% @spec (This::wxGridCellFloatRenderer(), Precision::integer()) -> ok
%% @doc See <a href="#wxgridcellfloatrenderersetprecision">external documentation</a>.
setPrecision(#wx_ref{type=ThisT,ref=ThisRef},Precision)
when is_integer(Precision) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:cast(?wxGridCellFloatRenderer_SetPrecision,
<<ThisRef:32/?UI,Precision:32/?UI>>).
%% @spec (This::wxGridCellFloatRenderer(), Width::integer()) -> ok
%% @doc See <a href="#wxgridcellfloatrenderersetwidth">external documentation</a>.
setWidth(#wx_ref{type=ThisT,ref=ThisRef},Width)
when is_integer(Width) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:cast(?wxGridCellFloatRenderer_SetWidth,
<<ThisRef:32/?UI,Width:32/?UI>>).
%% @spec (This::wxGridCellFloatRenderer()) -> ok
%% @doc Destroys this object, do not use object again
destroy(Obj=#wx_ref{type=Type}) ->
?CLASS(Type,wxGridCellFloatRenderer),
wxe_util:destroy(?wxGridCellFloatRenderer_destroy,Obj),
ok.
%% From wxGridCellStringRenderer
From wxGridCellRenderer
%% @hidden
getBestSize(This,Grid,Attr,Dc,Row,Col) -> wxGridCellRenderer:getBestSize(This,Grid,Attr,Dc,Row,Col).
%% @hidden
draw(This,Grid,Attr,Dc,Rect,Row,Col,IsSelected) -> wxGridCellRenderer:draw(This,Grid,Attr,Dc,Rect,Row,Col,IsSelected).
| null | https://raw.githubusercontent.com/simplegeo/erlang/15eda8de27ba73d176c7eeb3a70a64167f50e2c4/lib/wx/src/gen/wxGridCellFloatRenderer.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
@doc See external documentation: <a href="">wxGridCellFloatRenderer</a>.
<p>This class is derived (and can use functions) from:
<br />{@link wxGridCellStringRenderer}
</p>
@type wxGridCellFloatRenderer(). An object reference, The representation is internal
and can be changed without notice. It can't be used for comparsion
stored on disc or distributed for use on other nodes.
inherited exports
@hidden
@equiv new([])
Option = {width, integer()} | {precision, integer()}
@doc See <a href="#wxgridcellfloatrendererwxgridcellfloatrenderer">external documentation</a>.
@spec (This::wxGridCellFloatRenderer()) -> integer()
@doc See <a href="#wxgridcellfloatrenderergetprecision">external documentation</a>.
@spec (This::wxGridCellFloatRenderer()) -> integer()
@doc See <a href="#wxgridcellfloatrenderergetwidth">external documentation</a>.
@spec (This::wxGridCellFloatRenderer(), Params::string()) -> ok
@doc See <a href="#wxgridcellfloatrenderersetparameters">external documentation</a>.
@spec (This::wxGridCellFloatRenderer(), Precision::integer()) -> ok
@doc See <a href="#wxgridcellfloatrenderersetprecision">external documentation</a>.
@spec (This::wxGridCellFloatRenderer(), Width::integer()) -> ok
@doc See <a href="#wxgridcellfloatrenderersetwidth">external documentation</a>.
@spec (This::wxGridCellFloatRenderer()) -> ok
@doc Destroys this object, do not use object again
From wxGridCellStringRenderer
@hidden
@hidden | Copyright Ericsson AB 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
< br />{@link wxGridCellRenderer }
-module(wxGridCellFloatRenderer).
-include("wxe.hrl").
-export([destroy/1,getPrecision/1,getWidth/1,new/0,new/1,setParameters/2,setPrecision/2,
setWidth/2]).
-export([draw/8,getBestSize/6,parent_class/1]).
parent_class(wxGridCellStringRenderer) -> true;
parent_class(wxGridCellRenderer) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
( ) - > wxGridCellFloatRenderer ( )
new() ->
new([]).
( [ Option ] ) - > wxGridCellFloatRenderer ( )
new(Options)
when is_list(Options) ->
MOpts = fun({width, Width}, Acc) -> [<<1:32/?UI,Width:32/?UI>>|Acc];
({precision, Precision}, Acc) -> [<<2:32/?UI,Precision:32/?UI>>|Acc];
(BadOpt, _) -> erlang:error({badoption, BadOpt}) end,
BinOpt = list_to_binary(lists:foldl(MOpts, [<<0:32>>], Options)),
wxe_util:construct(?wxGridCellFloatRenderer_new,
<<BinOpt/binary>>).
getPrecision(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:call(?wxGridCellFloatRenderer_GetPrecision,
<<ThisRef:32/?UI>>).
getWidth(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:call(?wxGridCellFloatRenderer_GetWidth,
<<ThisRef:32/?UI>>).
setParameters(#wx_ref{type=ThisT,ref=ThisRef},Params)
when is_list(Params) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
Params_UC = unicode:characters_to_binary([Params,0]),
wxe_util:cast(?wxGridCellFloatRenderer_SetParameters,
<<ThisRef:32/?UI,(byte_size(Params_UC)):32/?UI,(Params_UC)/binary, 0:(((8- ((0+byte_size(Params_UC)) band 16#7)) band 16#7))/unit:8>>).
setPrecision(#wx_ref{type=ThisT,ref=ThisRef},Precision)
when is_integer(Precision) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:cast(?wxGridCellFloatRenderer_SetPrecision,
<<ThisRef:32/?UI,Precision:32/?UI>>).
setWidth(#wx_ref{type=ThisT,ref=ThisRef},Width)
when is_integer(Width) ->
?CLASS(ThisT,wxGridCellFloatRenderer),
wxe_util:cast(?wxGridCellFloatRenderer_SetWidth,
<<ThisRef:32/?UI,Width:32/?UI>>).
destroy(Obj=#wx_ref{type=Type}) ->
?CLASS(Type,wxGridCellFloatRenderer),
wxe_util:destroy(?wxGridCellFloatRenderer_destroy,Obj),
ok.
From wxGridCellRenderer
getBestSize(This,Grid,Attr,Dc,Row,Col) -> wxGridCellRenderer:getBestSize(This,Grid,Attr,Dc,Row,Col).
draw(This,Grid,Attr,Dc,Rect,Row,Col,IsSelected) -> wxGridCellRenderer:draw(This,Grid,Attr,Dc,Rect,Row,Col,IsSelected).
|
9d5267719f051a0cf7ee3f42f97a8225fcc30c3cd949488d59f664094a82223c | mojombo/yaws | wiki_yaws.erl |
% File: wiki_yaws.erl (~jb/work/wiki/src/wiki_yaws.erl)
Author :
Created : Thu Jun 27 22:26:49 2002
% Purpose: Yaws support utilities
-module('wiki_yaws').
-author('').
-export([get_path_prefix/1, parse_multipost/1, parse_post/2,
call_with_multi/3, call_with_query/3, call_with_post/3,
call_wiki/3, call_with_multiquery/3]).
-include("../../../include/yaws_api.hrl").
get_path_prefix(UrlPath) ->
%% search for initial path part
case string:rchr(UrlPath, $/) of
0 ->
UrlPath;
N ->
lists:sublist(UrlPath, N)
end.
parse_multipost(Arg) ->
case yaws_api:parse_multipart_post(Arg) of
{result, PostList} when Arg#arg.state == undefined->
{done, parse_post(PostList,[])};
{result, PostList} ->
Params = Arg#arg.state++PostList,
{done, parse_post(Params,[])};
{cont, Cont, Res} when Arg#arg.state == undefined ->
{get_more, Cont, Res};
{cont, Cont, Res} ->
{get_more, Cont, Arg#arg.state ++ Res}
end.
parse_post([], Acc) -> Acc;
parse_post([{head, {Name, Opts}}|Rest], Acc) ->
parse_post(Rest, [{to_string(Name), "", Opts}|Acc]);
parse_post([{body, Data}|Rest], [{Name, Value, Opts}|Acc]) ->
parse_post(Rest, [{to_string(Name), Value++Data, Opts}|Acc]);
parse_post([{part_body, Data}|Rest], [{Name, Value, Opts}|Acc]) ->
parse_post(Rest, [{to_string(Name), Value++Data, Opts}|Acc]);
parse_post([{Name, Value}|Rest], Acc) ->
parse_post(Rest, [{to_string(Name), Value, []}|Acc]).
to_string(Atom) when atom(Atom) ->
atom_to_list(Atom);
to_string(String) ->
String.
call_with_multi(M, F, Arg) ->
case parse_multipost(Arg) of
{done, Params} ->
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F(Params, WikiRoot, Prefix);
{get_more, Cont, State} ->
{get_more, Cont, State}
end.
call_with_multiquery(M, F, Arg) ->
case parse_multipost(Arg) of
{done, Params} ->
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
QueryArgs = yaws_api:parse_query(Arg),
QParams = [{N,V,[]} || {N,V} <- QueryArgs],
M:F(QParams++Params, WikiRoot, Prefix);
{get_more, Cont, State} ->
{get_more, Cont, State}
end.
call_with_post(M, F, Arg) ->
QueryArgs = yaws_api:parse_post(Arg),
Params = [{N,V,[]} || {N,V} <- QueryArgs],
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F(Params, WikiRoot, Prefix).
call_with_query(M, F, Arg) ->
QueryArgs = yaws_api:parse_query(Arg),
Params = [{N,V,[]} || {N,V} <- QueryArgs],
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F(Params, WikiRoot, Prefix).
call_wiki(M, F, Arg) ->
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F([], WikiRoot, Prefix).
| null | https://raw.githubusercontent.com/mojombo/yaws/f75fde80f1e35a87335f21d0983e3285eb665f17/applications/wiki/src/wiki_yaws.erl | erlang | File: wiki_yaws.erl (~jb/work/wiki/src/wiki_yaws.erl)
Purpose: Yaws support utilities
search for initial path part |
Author :
Created : Thu Jun 27 22:26:49 2002
-module('wiki_yaws').
-author('').
-export([get_path_prefix/1, parse_multipost/1, parse_post/2,
call_with_multi/3, call_with_query/3, call_with_post/3,
call_wiki/3, call_with_multiquery/3]).
-include("../../../include/yaws_api.hrl").
get_path_prefix(UrlPath) ->
case string:rchr(UrlPath, $/) of
0 ->
UrlPath;
N ->
lists:sublist(UrlPath, N)
end.
parse_multipost(Arg) ->
case yaws_api:parse_multipart_post(Arg) of
{result, PostList} when Arg#arg.state == undefined->
{done, parse_post(PostList,[])};
{result, PostList} ->
Params = Arg#arg.state++PostList,
{done, parse_post(Params,[])};
{cont, Cont, Res} when Arg#arg.state == undefined ->
{get_more, Cont, Res};
{cont, Cont, Res} ->
{get_more, Cont, Arg#arg.state ++ Res}
end.
parse_post([], Acc) -> Acc;
parse_post([{head, {Name, Opts}}|Rest], Acc) ->
parse_post(Rest, [{to_string(Name), "", Opts}|Acc]);
parse_post([{body, Data}|Rest], [{Name, Value, Opts}|Acc]) ->
parse_post(Rest, [{to_string(Name), Value++Data, Opts}|Acc]);
parse_post([{part_body, Data}|Rest], [{Name, Value, Opts}|Acc]) ->
parse_post(Rest, [{to_string(Name), Value++Data, Opts}|Acc]);
parse_post([{Name, Value}|Rest], Acc) ->
parse_post(Rest, [{to_string(Name), Value, []}|Acc]).
to_string(Atom) when atom(Atom) ->
atom_to_list(Atom);
to_string(String) ->
String.
call_with_multi(M, F, Arg) ->
case parse_multipost(Arg) of
{done, Params} ->
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F(Params, WikiRoot, Prefix);
{get_more, Cont, State} ->
{get_more, Cont, State}
end.
call_with_multiquery(M, F, Arg) ->
case parse_multipost(Arg) of
{done, Params} ->
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
QueryArgs = yaws_api:parse_query(Arg),
QParams = [{N,V,[]} || {N,V} <- QueryArgs],
M:F(QParams++Params, WikiRoot, Prefix);
{get_more, Cont, State} ->
{get_more, Cont, State}
end.
call_with_post(M, F, Arg) ->
QueryArgs = yaws_api:parse_post(Arg),
Params = [{N,V,[]} || {N,V} <- QueryArgs],
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F(Params, WikiRoot, Prefix).
call_with_query(M, F, Arg) ->
QueryArgs = yaws_api:parse_query(Arg),
Params = [{N,V,[]} || {N,V} <- QueryArgs],
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F(Params, WikiRoot, Prefix).
call_wiki(M, F, Arg) ->
WikiRoot = filename:dirname(Arg#arg.fullpath),
{abs_path, P} = (Arg#arg.req)#http_request.path,
Path = yaws_api:url_decode(P),
Prefix = wiki_yaws:get_path_prefix(Path),
M:F([], WikiRoot, Prefix).
|
b5293a4baadc22c1ddc5bf66aba1c1d400771b26a1b3216ef1807a552eb8693b | tezos/tezos-mirror | dac_plugin.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2023 Trili Tech , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
(** [Protocol.Sc_protocol_reveal_hash.t] is unknown to modules outside the
protocol and only known at runtime. To avoid the proliferation of functors
in the dac node, [hash] hides the dynamic [Protocol.Sc_protocol_reveal_hash.t]
behind an abstract static type. An instance of [Dac_plugin.T] behaviour
of operations on [hash].
*)
type hash
val hash_to_bytes : hash -> bytes
val hash_to_hex : hash -> Hex.t
* FIXME : /-/issues/4856
Fix static supported_hashes type
Fix static supported_hashes type *)
type supported_hashes = Blake2B
module type T = sig
(** The encoding of reveal hashes. *)
val encoding : hash Data_encoding.t
(** [hash_string ~scheme ?key strings] hashes [strings] using the
supported hashing [scheme] given in input. *)
val hash_string :
scheme:supported_hashes -> ?key:string -> string list -> hash
(** [hash_bytes ~scheme ?key strings] hashes [bytes] using the
supported hashing [scheme] given in input. *)
val hash_bytes : scheme:supported_hashes -> ?key:bytes -> bytes list -> hash
(** [scheme_of_hash] hash returns the supported hashing scheme
that was used to obtain [hash]. *)
val scheme_of_hash : hash -> supported_hashes
(** [of_hex hex] decodes a hex into hash. *)
val of_hex : string -> hash option
(** [to_hex hash] encodes hash into hex. *)
val to_hex : hash -> string
* [ size ~scheme ] returns the size of reveal hashes using the [ scheme ]
specified in input .
specified in input. *)
val size : scheme:supported_hashes -> int
(** Hash argument definition for RPC *)
val hash_rpc_arg : hash Tezos_rpc.Arg.arg
module Proto : Registered_protocol.T
end
(** Dac plugin module type *)
type t = (module T)
(** [register make_plugin] derives and registers a new [Dac_plugin.T] given an
[of_bytes]. Implementers of plugin are responsible for providing the
definition of this derivation. Functions that expose
[Protocol.Sc_protocol_reveal_hash.t] can be wrapped into [hash] via
[Dac_hash.to_bytes] and [of_bytes].
*)
val register : ((bytes -> hash) -> (module T)) -> unit
val get : Protocol_hash.Table.key -> (module T) option
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/bbca5502eb430d3915ad697259d3bffc62c2d01d/src/lib_dac_node/dac_plugin.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* [Protocol.Sc_protocol_reveal_hash.t] is unknown to modules outside the
protocol and only known at runtime. To avoid the proliferation of functors
in the dac node, [hash] hides the dynamic [Protocol.Sc_protocol_reveal_hash.t]
behind an abstract static type. An instance of [Dac_plugin.T] behaviour
of operations on [hash].
* The encoding of reveal hashes.
* [hash_string ~scheme ?key strings] hashes [strings] using the
supported hashing [scheme] given in input.
* [hash_bytes ~scheme ?key strings] hashes [bytes] using the
supported hashing [scheme] given in input.
* [scheme_of_hash] hash returns the supported hashing scheme
that was used to obtain [hash].
* [of_hex hex] decodes a hex into hash.
* [to_hex hash] encodes hash into hex.
* Hash argument definition for RPC
* Dac plugin module type
* [register make_plugin] derives and registers a new [Dac_plugin.T] given an
[of_bytes]. Implementers of plugin are responsible for providing the
definition of this derivation. Functions that expose
[Protocol.Sc_protocol_reveal_hash.t] can be wrapped into [hash] via
[Dac_hash.to_bytes] and [of_bytes].
| Copyright ( c ) 2023 Trili Tech , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
type hash
val hash_to_bytes : hash -> bytes
val hash_to_hex : hash -> Hex.t
* FIXME : /-/issues/4856
Fix static supported_hashes type
Fix static supported_hashes type *)
type supported_hashes = Blake2B
module type T = sig
val encoding : hash Data_encoding.t
val hash_string :
scheme:supported_hashes -> ?key:string -> string list -> hash
val hash_bytes : scheme:supported_hashes -> ?key:bytes -> bytes list -> hash
val scheme_of_hash : hash -> supported_hashes
val of_hex : string -> hash option
val to_hex : hash -> string
* [ size ~scheme ] returns the size of reveal hashes using the [ scheme ]
specified in input .
specified in input. *)
val size : scheme:supported_hashes -> int
val hash_rpc_arg : hash Tezos_rpc.Arg.arg
module Proto : Registered_protocol.T
end
type t = (module T)
val register : ((bytes -> hash) -> (module T)) -> unit
val get : Protocol_hash.Table.key -> (module T) option
|
1f4c4ddf3447f63aecb4a208189437b56c82470d7f3039a051588c6bc97176a6 | circuithub/haskell-gerber | StepRepeat.hs | module Gerber.StepRepeat ( StepRepeat(..) ) where
data StepRepeat = StepRepeat
{ xRepeats :: !Int
, yRepeats :: !Int
, xStep :: !Float
, yStep :: !Float
}
deriving ( Eq, Show )
| null | https://raw.githubusercontent.com/circuithub/haskell-gerber/c1ef0273e9dcf98d731a63ffc6aeb1b4c884bd8d/gerber/lib/Gerber/StepRepeat.hs | haskell | module Gerber.StepRepeat ( StepRepeat(..) ) where
data StepRepeat = StepRepeat
{ xRepeats :: !Int
, yRepeats :: !Int
, xStep :: !Float
, yStep :: !Float
}
deriving ( Eq, Show )
| |
a4084d425242e554a7ac15c9a74a3754b5a445507ce4759bb6b3a848259645f2 | AndrasKovacs/ELTE-func-lang | Practice03.hs | # LANGUAGE InstanceSigs #
module Practice03 where
data Coord = C
{ x : : , y : : Int
} deriving ( Eq , Ord , Show )
x : : Coord - > Int
x ( C c1 c2 ) = c1
data Coord = C
{ x :: Int
, y :: Int
} deriving (Eq, Ord, Show)
x :: Coord -> Int
x (C c1 c2) = c1
-}
newtype Fun a b = Fun { appF :: a -> b }
-- class C1 a => C2 a where
instance C2 a -- C1 a must be defined
instance C1 a = > C1 ( T a ) where
-- when i use C1 (T a), (C1 a) must be defined
instance Semigroup b => Semigroup (Fun a b) where
(<>) :: Fun a b -> Fun a b -> Fun a b
(<>) (Fun f) (Fun g) = Fun $ \x -> f x <> g x
instance Monoid b => Monoid (Fun a b) where
mempty :: Fun a b
mempty = Fun $ const mempty
data T a = TODO
deriving ( Eq , Ord , Show )
lift : : a - > T a
instance Semigroup a = > Semigroup ( T a ) where
-- TODO : ( < > ) : : T a - > T a - > T a
-- lift law : lift ( x < > y ) = = lift x < > lift y
instance Semigroup a = > Monoid ( T a ) where
-- TODO : : : T a
-- lift unitR : lift x < > = = lift x
-- lift unitL : < > lift y = = lift y
data T a = TODO
deriving (Eq, Ord, Show)
lift :: a -> T a
instance Semigroup a => Semigroup (T a) where
-- TODO: (<>) :: T a -> T a -> T a
-- lift law: lift (x <> y) == lift x <> lift y
instance Semigroup a => Monoid (T a) where
-- TODO: mempty :: T a
-- lift unitR: lift x <> mempty == lift x
-- lift unitL: mempty <> lift y == lift y
-}
data InfiniTree k v
= Nil
| Branch v (k -> InfiniTree k v)
data Void
data Nat = Zero | Suc Nat
type Maybe' a = InfiniTree Void a
type List' a = InfiniTree () a
type BinTree' a = InfiniTree Bool a
type RoseTree' a = InfiniTree Nat a
binTree :: BinTree' Int
binTree = Branch 1 (\b -> if b then Nil else (Branch 2 $ \b2 -> if b2 then Nil else Nil))
| null | https://raw.githubusercontent.com/AndrasKovacs/ELTE-func-lang/88d41930999d6056bdd7bfaa85761a527cce4113/2019-20-1/pentek_gyak/Practice03.hs | haskell | class C1 a => C2 a where
C1 a must be defined
when i use C1 (T a), (C1 a) must be defined
TODO : ( < > ) : : T a - > T a - > T a
lift law : lift ( x < > y ) = = lift x < > lift y
TODO : : : T a
lift unitR : lift x < > = = lift x
lift unitL : < > lift y = = lift y
TODO: (<>) :: T a -> T a -> T a
lift law: lift (x <> y) == lift x <> lift y
TODO: mempty :: T a
lift unitR: lift x <> mempty == lift x
lift unitL: mempty <> lift y == lift y
| # LANGUAGE InstanceSigs #
module Practice03 where
data Coord = C
{ x : : , y : : Int
} deriving ( Eq , Ord , Show )
x : : Coord - > Int
x ( C c1 c2 ) = c1
data Coord = C
{ x :: Int
, y :: Int
} deriving (Eq, Ord, Show)
x :: Coord -> Int
x (C c1 c2) = c1
-}
newtype Fun a b = Fun { appF :: a -> b }
instance C1 a = > C1 ( T a ) where
instance Semigroup b => Semigroup (Fun a b) where
(<>) :: Fun a b -> Fun a b -> Fun a b
(<>) (Fun f) (Fun g) = Fun $ \x -> f x <> g x
instance Monoid b => Monoid (Fun a b) where
mempty :: Fun a b
mempty = Fun $ const mempty
data T a = TODO
deriving ( Eq , Ord , Show )
lift : : a - > T a
instance Semigroup a = > Semigroup ( T a ) where
instance Semigroup a = > Monoid ( T a ) where
data T a = TODO
deriving (Eq, Ord, Show)
lift :: a -> T a
instance Semigroup a => Semigroup (T a) where
instance Semigroup a => Monoid (T a) where
-}
data InfiniTree k v
= Nil
| Branch v (k -> InfiniTree k v)
data Void
data Nat = Zero | Suc Nat
type Maybe' a = InfiniTree Void a
type List' a = InfiniTree () a
type BinTree' a = InfiniTree Bool a
type RoseTree' a = InfiniTree Nat a
binTree :: BinTree' Int
binTree = Branch 1 (\b -> if b then Nil else (Branch 2 $ \b2 -> if b2 then Nil else Nil))
|
0e67840fd855b25c8cd71485a4b741e111dae5c61ee4cc956e7f4995a65df10d | Wavenet/http_proxy | mod_post.erl | mod_post.erl
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@author < >
2013 - 2015 Global Wavenet ( Pty ) Ltd
%%% @end
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-module(mod_post).
-export([do/1]).
-include_lib("inets/include/httpd.hrl").
do(#mod{method = "POST", absolute_uri = AbsoluteURI,
request_uri = RequestURI, parsed_header = RequestHeaders,
entity_body = RequestBody, config_db = ConfigDB, data = Data}) ->
case lists:keyfind("content-length", 1, RequestHeaders) of
{"content-length", CL} ->
case list_to_integer(CL) of
ContentLength when ContentLength =:= length(RequestBody) ->
do_post(AbsoluteURI, RequestURI, RequestBody, ConfigDB, Data);
_ContentLength ->
{proceed, [{response, {400, []}} | Data]}
end;
false ->
{proceed, [{response, {411, []}} | Data]}
end.
do_post(AbsoluteURI, RequestURI, Contents, ConfigDB, Data) ->
DocumentRoot = httpd_util:lookup(ConfigDB, document_root),
Reference = base64:encode_to_string(erlang:ref_to_list(make_ref())),
{Path, Location} = case tl(RequestURI) of
$/ ->
{RequestURI ++ Reference, AbsoluteURI ++ Reference};
_ ->
{RequestURI ++ "/" ++ Reference, AbsoluteURI ++ "/" ++ Reference}
end,
FileName = DocumentRoot ++ Path,
case file:write_file(FileName, Contents) of
ok ->
case file:read_file_info(FileName) of
{ok, FileInfo} ->
Etag = httpd_util:create_etag(FileInfo),
{proceed, [{response, {response, [{code, 201},
{location, Location}, {etag, Etag},
{content_length, "0"}], []}} | Data]};
{error, Reason} ->
error_logger:error_report([{method, "POST"},
{absolute_uri, AbsoluteURI},
{request_uri, RequestURI},
{data, Data}, {reason, Reason}]),
{proceed, [{response, {500, []}} | Data]}
end;
{error, enoent} ->
{proceed, [{response, {404, []}} | Data]};
{error, Reason} ->
error_logger:error_report([{method, "POST"},
{absolute_uri, AbsoluteURI},
{request_uri, RequestURI},
{data, Data}, {reason, Reason}]),
{proceed, [{response, {500, []}} | Data]}
end.
| null | https://raw.githubusercontent.com/Wavenet/http_proxy/a66bbf5607fd8610a66355467378c171f8f65ce8/test/mod_post.erl | erlang |
@end
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
| mod_post.erl
@author < >
2013 - 2015 Global Wavenet ( Pty ) Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(mod_post).
-export([do/1]).
-include_lib("inets/include/httpd.hrl").
do(#mod{method = "POST", absolute_uri = AbsoluteURI,
request_uri = RequestURI, parsed_header = RequestHeaders,
entity_body = RequestBody, config_db = ConfigDB, data = Data}) ->
case lists:keyfind("content-length", 1, RequestHeaders) of
{"content-length", CL} ->
case list_to_integer(CL) of
ContentLength when ContentLength =:= length(RequestBody) ->
do_post(AbsoluteURI, RequestURI, RequestBody, ConfigDB, Data);
_ContentLength ->
{proceed, [{response, {400, []}} | Data]}
end;
false ->
{proceed, [{response, {411, []}} | Data]}
end.
do_post(AbsoluteURI, RequestURI, Contents, ConfigDB, Data) ->
DocumentRoot = httpd_util:lookup(ConfigDB, document_root),
Reference = base64:encode_to_string(erlang:ref_to_list(make_ref())),
{Path, Location} = case tl(RequestURI) of
$/ ->
{RequestURI ++ Reference, AbsoluteURI ++ Reference};
_ ->
{RequestURI ++ "/" ++ Reference, AbsoluteURI ++ "/" ++ Reference}
end,
FileName = DocumentRoot ++ Path,
case file:write_file(FileName, Contents) of
ok ->
case file:read_file_info(FileName) of
{ok, FileInfo} ->
Etag = httpd_util:create_etag(FileInfo),
{proceed, [{response, {response, [{code, 201},
{location, Location}, {etag, Etag},
{content_length, "0"}], []}} | Data]};
{error, Reason} ->
error_logger:error_report([{method, "POST"},
{absolute_uri, AbsoluteURI},
{request_uri, RequestURI},
{data, Data}, {reason, Reason}]),
{proceed, [{response, {500, []}} | Data]}
end;
{error, enoent} ->
{proceed, [{response, {404, []}} | Data]};
{error, Reason} ->
error_logger:error_report([{method, "POST"},
{absolute_uri, AbsoluteURI},
{request_uri, RequestURI},
{data, Data}, {reason, Reason}]),
{proceed, [{response, {500, []}} | Data]}
end.
|
cdb40596553a99e0cabbc0bfbc921bde54640687e9b6a4ce5d27045d8604ece1 | patricoferris/sesame | utils.mli | val date_to_ptime : string -> Ptime.t
(** [date_to_ptime s] converts the string [s] to a [Ptime.t] *)
val get_time : unit -> string
(** [get_time ()] gets the current time... might be removed because it is a UNIX
only thing *)
val html_path : ?dir:Fpath.t option -> Fpath.t -> Fpath.t
(** [html_path ?dir path] turns [a/b/c.md] into [a/b/c.html] *)
val filename_to_html : Fpath.t -> Fpath.t
val title_to_dirname : string -> string
module Fpath_input : S.Encodeable with type t = Fpath.t
(** A useful {!S.Encodeable} compliant module [Fpath.t] *)
* A { ! S.S } compliant module for JSON ( using [ Ezjson.value ] )
module Json : S.S with type Input.t = Fpath.t and type t = Ezjsonm.value
(** Build a module that will build things from a directory of files *)
module Dir (T : S.S with type Input.t = Fpath.t) :
S.S with type Input.t = Fpath.t and type t = T.Output.t list
(** Same as {!Dir} except will recursively search the directory for files*)
module RecDir (T : S.S with type Input.t = Fpath.t) :
S.S with type Input.t = Fpath.t and type t = T.Output.t list
module List (T : S.S) :
S.S with type Input.t = T.Input.t list and type t = T.Output.t list
| null | https://raw.githubusercontent.com/patricoferris/sesame/8521e2a086b49d0bc20f0fca705f07675c52e1ae/src/sesame/utils.mli | ocaml | * [date_to_ptime s] converts the string [s] to a [Ptime.t]
* [get_time ()] gets the current time... might be removed because it is a UNIX
only thing
* [html_path ?dir path] turns [a/b/c.md] into [a/b/c.html]
* A useful {!S.Encodeable} compliant module [Fpath.t]
* Build a module that will build things from a directory of files
* Same as {!Dir} except will recursively search the directory for files | val date_to_ptime : string -> Ptime.t
val get_time : unit -> string
val html_path : ?dir:Fpath.t option -> Fpath.t -> Fpath.t
val filename_to_html : Fpath.t -> Fpath.t
val title_to_dirname : string -> string
module Fpath_input : S.Encodeable with type t = Fpath.t
* A { ! S.S } compliant module for JSON ( using [ Ezjson.value ] )
module Json : S.S with type Input.t = Fpath.t and type t = Ezjsonm.value
module Dir (T : S.S with type Input.t = Fpath.t) :
S.S with type Input.t = Fpath.t and type t = T.Output.t list
module RecDir (T : S.S with type Input.t = Fpath.t) :
S.S with type Input.t = Fpath.t and type t = T.Output.t list
module List (T : S.S) :
S.S with type Input.t = T.Input.t list and type t = T.Output.t list
|
d6ace65c6acc5212a77cffa6920fc39d51301d836ebf843bd4896f06963ae158 | francescoc/scalabilitywitherlangotp | earth.erl | -module(earth).
-export([start/0, init/0]).
start() ->
spawn(?MODULE, init, []).
init() ->
create_earth(),
day().
day() ->
receive
eclipse -> day();
sunset -> night()
end.
night() ->
receive
sunrise ->
make_roosters_crow(),
day()
end.
create_earth() -> ok.
make_roosters_crow() -> ok.
| null | https://raw.githubusercontent.com/francescoc/scalabilitywitherlangotp/961de968f034e55eba22eea9a368fe9f47c608cc/ch6/earth.erl | erlang | -module(earth).
-export([start/0, init/0]).
start() ->
spawn(?MODULE, init, []).
init() ->
create_earth(),
day().
day() ->
receive
eclipse -> day();
sunset -> night()
end.
night() ->
receive
sunrise ->
make_roosters_crow(),
day()
end.
create_earth() -> ok.
make_roosters_crow() -> ok.
| |
d260bb7f0617ee7d7c7b33288fa5dc0981c6cf84d34f1a9128f311fc5791f301 | Clojure2D/clojure2d-examples | ch10_camera2.clj | (ns rt-in-weekend.ch10-camera2
(:require [clojure2d.core :refer :all]
[clojure2d.pixels :as p]
[clojure2d.extra.utils :as u]
[fastmath.vector :as v]
[rt-in-weekend.ray :refer :all]
[rt-in-weekend.hitable :refer :all]
[rt-in-weekend.sphere :refer :all]
[rt-in-weekend.camera :refer :all]
[rt-in-weekend.material :refer :all]
[fastmath.core :as m]
[fastmath.random :as r])
(:import [fastmath.vector Vec3]
[rt_in_weekend.ray Ray]
[rt_in_weekend.hitable HitData]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(def ^:const v1 (v/vec3 1.0 1.0 1.0))
(def ^:const v2 (v/vec3 0.5 0.7 1.0))
(def ^:const zero (v/vec3 0.0 0.0 0.0))
(def world [(->Sphere (v/vec3 0.0 0.0 -1.0) 0.5 (->Lambertian (v/vec3 0.1 0.2 0.5)))
(->Sphere (v/vec3 0.0 -100.5 -1.0) 100.0 (->Lambertian (v/vec3 0.8 0.8 0.0)))
(->Sphere (v/vec3 1.0 0.0 -1.0) 0.5 (->Metal (v/vec3 0.8 0.6 0.2) 0.0))
(->Sphere (v/vec3 -1.0 0.0 -1.0) 0.5 (->Dielectric 1.5))
(->Sphere (v/vec3 -1.0 0.0 -1.0) -0.45 (->Dielectric 1.5))])
(defn color
([ray world] (color ray world 50))
([^Ray ray world ^long depth]
(if-let [^HitData world-hit (hit-list world ray 0.001 Double/MAX_VALUE)]
(let [[attenuation scattered] (scatter (.material world-hit) ray world-hit)]
(if (and attenuation (pos? depth))
(v/emult attenuation (color scattered world (dec depth)))
zero))
(let [^Vec3 unit (v/normalize (.direction ray))
t (* 0.5 (inc (.y unit)))]
(v/interpolate v1 v2 t)))))
(def ^:const ^int nx 800)
(def ^:const ^int ny 400)
(def ^:const ^int samples 200)
(def img (p/pixels nx ny))
(def camera (positionable-camera (v/vec3 -2 2 1) (v/vec3 0 0 -1) (v/vec3 0 1 0) 90 (/ (double nx) ny)))
( def camera ( positionable - camera ( v / vec3 -2 2 1 ) ( v / vec3 0 0 -1 ) ( v / vec3 0 1 0 ) 20 ( / ( double nx ) ny ) ) )
(time (dotimes [j ny]
(println (str "Line: " j))
(dotimes [i nx]
(let [col (reduce v/add zero
(repeatedly samples #(let [u (/ (+ (r/drand) i) nx)
v (/ (+ (r/drand) j) ny)
r (get-ray camera u v)]
(color r world))))]
(p/set-color! img i (- (dec ny) j) (-> (v/div col samples)
(v/sqrt)
(v/mult 255.0)))))))
(u/show-image img)
;; (save img "results/rt-in-weekend/camera2-fov20.jpg")
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/9de82f5ac0737b7e78e07a17cf03ac577d973817/src/rt_in_weekend/ch10_camera2.clj | clojure | (save img "results/rt-in-weekend/camera2-fov20.jpg") | (ns rt-in-weekend.ch10-camera2
(:require [clojure2d.core :refer :all]
[clojure2d.pixels :as p]
[clojure2d.extra.utils :as u]
[fastmath.vector :as v]
[rt-in-weekend.ray :refer :all]
[rt-in-weekend.hitable :refer :all]
[rt-in-weekend.sphere :refer :all]
[rt-in-weekend.camera :refer :all]
[rt-in-weekend.material :refer :all]
[fastmath.core :as m]
[fastmath.random :as r])
(:import [fastmath.vector Vec3]
[rt_in_weekend.ray Ray]
[rt_in_weekend.hitable HitData]))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(m/use-primitive-operators)
(def ^:const v1 (v/vec3 1.0 1.0 1.0))
(def ^:const v2 (v/vec3 0.5 0.7 1.0))
(def ^:const zero (v/vec3 0.0 0.0 0.0))
(def world [(->Sphere (v/vec3 0.0 0.0 -1.0) 0.5 (->Lambertian (v/vec3 0.1 0.2 0.5)))
(->Sphere (v/vec3 0.0 -100.5 -1.0) 100.0 (->Lambertian (v/vec3 0.8 0.8 0.0)))
(->Sphere (v/vec3 1.0 0.0 -1.0) 0.5 (->Metal (v/vec3 0.8 0.6 0.2) 0.0))
(->Sphere (v/vec3 -1.0 0.0 -1.0) 0.5 (->Dielectric 1.5))
(->Sphere (v/vec3 -1.0 0.0 -1.0) -0.45 (->Dielectric 1.5))])
(defn color
([ray world] (color ray world 50))
([^Ray ray world ^long depth]
(if-let [^HitData world-hit (hit-list world ray 0.001 Double/MAX_VALUE)]
(let [[attenuation scattered] (scatter (.material world-hit) ray world-hit)]
(if (and attenuation (pos? depth))
(v/emult attenuation (color scattered world (dec depth)))
zero))
(let [^Vec3 unit (v/normalize (.direction ray))
t (* 0.5 (inc (.y unit)))]
(v/interpolate v1 v2 t)))))
(def ^:const ^int nx 800)
(def ^:const ^int ny 400)
(def ^:const ^int samples 200)
(def img (p/pixels nx ny))
(def camera (positionable-camera (v/vec3 -2 2 1) (v/vec3 0 0 -1) (v/vec3 0 1 0) 90 (/ (double nx) ny)))
( def camera ( positionable - camera ( v / vec3 -2 2 1 ) ( v / vec3 0 0 -1 ) ( v / vec3 0 1 0 ) 20 ( / ( double nx ) ny ) ) )
(time (dotimes [j ny]
(println (str "Line: " j))
(dotimes [i nx]
(let [col (reduce v/add zero
(repeatedly samples #(let [u (/ (+ (r/drand) i) nx)
v (/ (+ (r/drand) j) ny)
r (get-ray camera u v)]
(color r world))))]
(p/set-color! img i (- (dec ny) j) (-> (v/div col samples)
(v/sqrt)
(v/mult 255.0)))))))
(u/show-image img)
|
5692e9977e9f9231d26a73414dbf84053c24daba91a03dfe70ccc932766dc153 | Lovesan/virgil | proxy.lisp | ;;;; -*- Mode: lisp; indent-tabs-mode: nil -*-
Copyright ( C ) 2010 - 2012 , < >
;;; Permission is hereby granted, free of charge, to any person
;;; obtaining a copy of this software and associated documentation
files ( the " Software " ) , to deal in the Software without
;;; restriction, including without limitation the rights to use, copy,
;;; modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;; The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
;;; EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
;;; MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
;;; NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
;;; HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
;;; WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
;;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
;;; DEALINGS IN THE SOFTWARE
(in-package #:virgil)
(defclass proxy-type (translatable-type)
((proxied-type
:initform (error "Supply proxied type to proxy type")
:initarg :type
:accessor proxied-type)))
(defun proxy-type-p (type)
(typep type 'proxy-type))
(defmethod base-type ((type proxy-type))
(base-type (proxied-type type)))
(defmethod compute-alignment ((type proxy-type))
(compute-alignment (proxied-type type)))
(defmethod lisp-type ((type proxy-type))
(lisp-type (proxied-type type)))
(defmethod prototype ((type proxy-type))
(prototype (proxied-type type)))
(defmethod expand-prototype ((type proxy-type))
(expand-prototype (proxied-type type)))
(defmethod compute-size (value (type proxy-type))
(compute-size value (proxied-type type)))
(defmethod expand-compute-size (value (type proxy-type))
(expand-compute-size value (proxied-type type)))
(defmethod compute-fixed-size ((type proxy-type))
(compute-fixed-size (proxied-type type)))
(defmethod compute-slot-offset (slot-name (type proxy-type))
(compute-slot-offset slot-name (proxied-type type)))
(defmethod expand-compute-slot-offset (slot-name (type proxy-type))
(expand-compute-slot-offset slot-name (proxied-type type)))
(defmethod convert-value (value (type proxy-type))
(convert-value value (proxied-type type)))
(defmethod translate-value (value (type proxy-type))
(translate-value value (proxied-type type)))
(defmethod read-value (value output (type proxy-type))
(read-value value output (proxied-type type)))
(defmethod write-value (value pointer (type proxy-type))
(write-value value pointer (proxied-type type)))
(defmethod allocate-value (value (type proxy-type))
(allocate-value value (proxied-type type)))
(defmethod clean-value (pointer value (type proxy-type))
(clean-value pointer value (proxied-type type)))
(defmethod free-value (pointer (type proxy-type))
(free-value pointer (proxied-type type)))
(defmethod expand-convert-value (value (type proxy-type))
(expand-convert-value value (proxied-type type)))
(defmethod expand-translate-value (value (type proxy-type))
(expand-translate-value value (proxied-type type)))
(defmethod expand-read-value (value output (type proxy-type))
(expand-read-value value output (proxied-type type)))
(defmethod expand-write-value (value pointer (type proxy-type))
(expand-write-value value pointer (proxied-type type)))
(defmethod expand-allocate-value (value (type proxy-type))
(expand-allocate-value value (proxied-type type)))
(defmethod expand-clean-value (pointer value (type proxy-type))
(expand-clean-value pointer value (proxied-type type)))
(defmethod expand-free-value (pointer (type proxy-type))
(expand-free-value pointer (proxied-type type)))
(defmethod expand-dynamic-extent (var value-var body (type proxy-type))
(expand-dynamic-extent var value-var body (proxied-type type)))
(defmethod expand-callback-dynamic-extent (var value body (type proxy-type))
(expand-callback-dynamic-extent var value body (proxied-type type)))
(defmethod expand-reference-dynamic-extent
(var size-var value-var body mode (type proxy-type))
(expand-reference-dynamic-extent
var size-var value-var body mode (proxied-type type)))
| null | https://raw.githubusercontent.com/Lovesan/virgil/ab650955b939fba0c7f5c3fd945d3580fbf756c1/src/proxy.lisp | lisp | -*- Mode: lisp; indent-tabs-mode: nil -*-
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE |
Copyright ( C ) 2010 - 2012 , < >
files ( the " Software " ) , to deal in the Software without
of the Software , and to permit persons to whom the Software is
included in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND ,
(in-package #:virgil)
(defclass proxy-type (translatable-type)
((proxied-type
:initform (error "Supply proxied type to proxy type")
:initarg :type
:accessor proxied-type)))
(defun proxy-type-p (type)
(typep type 'proxy-type))
(defmethod base-type ((type proxy-type))
(base-type (proxied-type type)))
(defmethod compute-alignment ((type proxy-type))
(compute-alignment (proxied-type type)))
(defmethod lisp-type ((type proxy-type))
(lisp-type (proxied-type type)))
(defmethod prototype ((type proxy-type))
(prototype (proxied-type type)))
(defmethod expand-prototype ((type proxy-type))
(expand-prototype (proxied-type type)))
(defmethod compute-size (value (type proxy-type))
(compute-size value (proxied-type type)))
(defmethod expand-compute-size (value (type proxy-type))
(expand-compute-size value (proxied-type type)))
(defmethod compute-fixed-size ((type proxy-type))
(compute-fixed-size (proxied-type type)))
(defmethod compute-slot-offset (slot-name (type proxy-type))
(compute-slot-offset slot-name (proxied-type type)))
(defmethod expand-compute-slot-offset (slot-name (type proxy-type))
(expand-compute-slot-offset slot-name (proxied-type type)))
(defmethod convert-value (value (type proxy-type))
(convert-value value (proxied-type type)))
(defmethod translate-value (value (type proxy-type))
(translate-value value (proxied-type type)))
(defmethod read-value (value output (type proxy-type))
(read-value value output (proxied-type type)))
(defmethod write-value (value pointer (type proxy-type))
(write-value value pointer (proxied-type type)))
(defmethod allocate-value (value (type proxy-type))
(allocate-value value (proxied-type type)))
(defmethod clean-value (pointer value (type proxy-type))
(clean-value pointer value (proxied-type type)))
(defmethod free-value (pointer (type proxy-type))
(free-value pointer (proxied-type type)))
(defmethod expand-convert-value (value (type proxy-type))
(expand-convert-value value (proxied-type type)))
(defmethod expand-translate-value (value (type proxy-type))
(expand-translate-value value (proxied-type type)))
(defmethod expand-read-value (value output (type proxy-type))
(expand-read-value value output (proxied-type type)))
(defmethod expand-write-value (value pointer (type proxy-type))
(expand-write-value value pointer (proxied-type type)))
(defmethod expand-allocate-value (value (type proxy-type))
(expand-allocate-value value (proxied-type type)))
(defmethod expand-clean-value (pointer value (type proxy-type))
(expand-clean-value pointer value (proxied-type type)))
(defmethod expand-free-value (pointer (type proxy-type))
(expand-free-value pointer (proxied-type type)))
(defmethod expand-dynamic-extent (var value-var body (type proxy-type))
(expand-dynamic-extent var value-var body (proxied-type type)))
(defmethod expand-callback-dynamic-extent (var value body (type proxy-type))
(expand-callback-dynamic-extent var value body (proxied-type type)))
(defmethod expand-reference-dynamic-extent
(var size-var value-var body mode (type proxy-type))
(expand-reference-dynamic-extent
var size-var value-var body mode (proxied-type type)))
|
1fe7be48770779cbba3e24690b666e4bc05b9b051052a896e6cf583f1a966c23 | programaker-project/Programaker-Core | automate_rest_api_services_register.erl | %%% @doc
%%% REST endpoint to manage knowledge collections.
%%% @end
-module(automate_rest_api_services_register).
-export([init/2]).
-export([ allowed_methods/2
, options/2
, is_authorized/2
, content_types_accepted/2
]).
-export([ accept_json_register_service/2
]).
-define(UTILS, automate_rest_api_utils).
-include("./records.hrl").
-record(state, { username :: binary(), service_id :: binary() }).
-spec init(_,_) -> {'cowboy_rest',_,_}.
init(Req, _Opts) ->
UserId = cowboy_req:binding(user_id, Req),
ServiceId = cowboy_req:binding(service_id, Req),
Req1 = automate_rest_api_cors:set_headers(Req),
{cowboy_rest, Req1
, #state{ username=UserId
, service_id=ServiceId
}}.
%% CORS
options(Req, State) ->
{ok, Req, State}.
%% Authentication
-spec allowed_methods(cowboy_req:req(),_) -> {[binary()], cowboy_req:req(),_}.
allowed_methods(Req, State) ->
{[<<"POST">>, <<"OPTIONS">>], Req, State}.
is_authorized(Req, State) ->
Req1 = automate_rest_api_cors:set_headers(Req),
case cowboy_req:method(Req1) of
%% Don't do authentication if it's just asking for options
<<"OPTIONS">> ->
{ true, Req1, State };
_ ->
case cowboy_req:header(<<"authorization">>, Req, undefined) of
undefined ->
{ {false, <<"Authorization header not found">>} , Req1, State };
X ->
#state{username=Username} = State,
case automate_rest_api_backend:is_valid_token(X, create_services) of
{true, Username} ->
{ true, Req1, State };
{true, _} -> %% Non matching username
{ { false, <<"Unauthorized to register a service here">>}, Req1, State };
false ->
{ { false, <<"Authorization not correct">>}, Req1, State }
end
end
end.
%% POST handler
content_types_accepted(Req, State) ->
{[{{<<"application">>, <<"json">>, []},
accept_json_register_service}],
Req, State}.
-spec accept_json_register_service(cowboy_req:req(),
#state{}) -> {true, cowboy_req:req(), #state{}}.
accept_json_register_service(Req, State) ->
#state{username = Username, service_id = ServiceId} = State,
{ok, Body, Req1} = ?UTILS:read_body(Req),
FullRegistrationData = jiffy:decode(Body, [return_maps]),
{ RegistrationData, ConnectionId } = case FullRegistrationData of
#{ <<"metadata">> := #{<<"connection_id">> := ConnId} } ->
{maps:remove(<<"metadata">>, FullRegistrationData), ConnId};
#{ <<"metadata">> := #{} } ->
{maps:remove(<<"metadata">>, FullRegistrationData), undefined};
_ ->
{FullRegistrationData, undefined}
end,
case automate_rest_api_backend:register_service(Username, ServiceId, RegistrationData, ConnectionId) of
{ok, Data} ->
Output = jiffy:encode(Data),
Res2 = ?UTILS:send_json_output(Output, Req1),
{true, Res2, State}
end.
| null | https://raw.githubusercontent.com/programaker-project/Programaker-Core/ef10fc6d2a228b2096b121170c421f5c29f9f270/backend/apps/automate_rest_api/src/automate_rest_api_services_register.erl | erlang | @doc
REST endpoint to manage knowledge collections.
@end
CORS
Authentication
Don't do authentication if it's just asking for options
Non matching username
POST handler |
-module(automate_rest_api_services_register).
-export([init/2]).
-export([ allowed_methods/2
, options/2
, is_authorized/2
, content_types_accepted/2
]).
-export([ accept_json_register_service/2
]).
-define(UTILS, automate_rest_api_utils).
-include("./records.hrl").
-record(state, { username :: binary(), service_id :: binary() }).
-spec init(_,_) -> {'cowboy_rest',_,_}.
init(Req, _Opts) ->
UserId = cowboy_req:binding(user_id, Req),
ServiceId = cowboy_req:binding(service_id, Req),
Req1 = automate_rest_api_cors:set_headers(Req),
{cowboy_rest, Req1
, #state{ username=UserId
, service_id=ServiceId
}}.
options(Req, State) ->
{ok, Req, State}.
-spec allowed_methods(cowboy_req:req(),_) -> {[binary()], cowboy_req:req(),_}.
allowed_methods(Req, State) ->
{[<<"POST">>, <<"OPTIONS">>], Req, State}.
is_authorized(Req, State) ->
Req1 = automate_rest_api_cors:set_headers(Req),
case cowboy_req:method(Req1) of
<<"OPTIONS">> ->
{ true, Req1, State };
_ ->
case cowboy_req:header(<<"authorization">>, Req, undefined) of
undefined ->
{ {false, <<"Authorization header not found">>} , Req1, State };
X ->
#state{username=Username} = State,
case automate_rest_api_backend:is_valid_token(X, create_services) of
{true, Username} ->
{ true, Req1, State };
{ { false, <<"Unauthorized to register a service here">>}, Req1, State };
false ->
{ { false, <<"Authorization not correct">>}, Req1, State }
end
end
end.
content_types_accepted(Req, State) ->
{[{{<<"application">>, <<"json">>, []},
accept_json_register_service}],
Req, State}.
-spec accept_json_register_service(cowboy_req:req(),
#state{}) -> {true, cowboy_req:req(), #state{}}.
accept_json_register_service(Req, State) ->
#state{username = Username, service_id = ServiceId} = State,
{ok, Body, Req1} = ?UTILS:read_body(Req),
FullRegistrationData = jiffy:decode(Body, [return_maps]),
{ RegistrationData, ConnectionId } = case FullRegistrationData of
#{ <<"metadata">> := #{<<"connection_id">> := ConnId} } ->
{maps:remove(<<"metadata">>, FullRegistrationData), ConnId};
#{ <<"metadata">> := #{} } ->
{maps:remove(<<"metadata">>, FullRegistrationData), undefined};
_ ->
{FullRegistrationData, undefined}
end,
case automate_rest_api_backend:register_service(Username, ServiceId, RegistrationData, ConnectionId) of
{ok, Data} ->
Output = jiffy:encode(Data),
Res2 = ?UTILS:send_json_output(Output, Req1),
{true, Res2, State}
end.
|
b8f115a2009a37b9e4a4099415f75b3682a8104d2153e935b707e461be37b660 | D00mch/PWA-clojure | nrepl.clj | (ns pwa.nrepl
(:require
[nrepl.server :as nrepl]
[clojure.tools.logging :as log]))
(defn start
"Start a network repl for debugging on specified port followed by
an optional parameters map. The :bind, :transport-fn, :handler,
:ack-port and :greeting-fn will be forwarded to
clojure.tools.nrepl.server/start-server as they are."
[{:keys [port bind transport-fn handler ack-port greeting-fn]}]
(try
(log/info "starting nREPL server on port" port)
(nrepl/start-server :port port
:bind bind
:transport-fn transport-fn
:handler handler
:ack-port ack-port
:greeting-fn greeting-fn)
(catch Throwable t
(log/error t "failed to start nREPL")
(throw t))))
(defn stop [server]
(nrepl/stop-server server)
(log/info "nREPL server stopped"))
| null | https://raw.githubusercontent.com/D00mch/PWA-clojure/39ab4d3690c7a8ddbdd8095d65a782961f92c183/src/clj/pwa/nrepl.clj | clojure | (ns pwa.nrepl
(:require
[nrepl.server :as nrepl]
[clojure.tools.logging :as log]))
(defn start
"Start a network repl for debugging on specified port followed by
an optional parameters map. The :bind, :transport-fn, :handler,
:ack-port and :greeting-fn will be forwarded to
clojure.tools.nrepl.server/start-server as they are."
[{:keys [port bind transport-fn handler ack-port greeting-fn]}]
(try
(log/info "starting nREPL server on port" port)
(nrepl/start-server :port port
:bind bind
:transport-fn transport-fn
:handler handler
:ack-port ack-port
:greeting-fn greeting-fn)
(catch Throwable t
(log/error t "failed to start nREPL")
(throw t))))
(defn stop [server]
(nrepl/stop-server server)
(log/info "nREPL server stopped"))
| |
126a604d2f245d0c1c1402b9d763b15ad7b3ae6a87f2d5491613994bd38860a5 | Decentralized-Pictures/T4L3NT | protocol_logging.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2021 Nomadic Labs . < >
Copyright ( c ) 2021 DaiLambda , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Internal_event.Simple
let section = ["node"; "protocol"]
let make n level =
declare_1
~section
~name:(n ^ "_from_protocol")
~msg:"{message}"
~pp1:Format.pp_print_string
~level
("message", Data_encoding.string)
let debug = make "debug" Debug
let info = make "info" Info
let notice = make "notice" Notice
let warning = make "warning" Warning
let error = make "error" Error
let fatal = make "fatal" Fatal
let logging_failure =
declare_1
~section
~name:"logging_failure"
~msg:"Failure to log a protocol message: {exc}"
~pp1:Format.pp_print_string
~level:Error
("exc", Data_encoding.string)
let make_asynchronous_log_message_consumer () =
let (stream, push) = Lwt_stream.create () in
let alive = ref true in
Lwt.dont_wait
(fun () ->
Lwt_stream.iter_s
(fun (level, s) ->
let open Lwt_syntax in
(* Pause to avoid interleaving of execution *)
let* () = Lwt.pause () in
Lwt.catch
(fun () ->
match level with
| Internal_event.Debug -> emit debug s
| Info -> emit info s
| Notice -> emit notice s
| Warning -> emit warning s
| Error -> emit error s
| Fatal -> emit fatal s)
(fun exc -> emit logging_failure (Printexc.to_string exc)))
stream)
(fun exc ->
(* We ignore the exception because it can only be the exception raised
within the other exception handler which already attempted to log an
error. *)
ignore (exc : exn) ;
(* If the [iter_s] raises, then there are no more listeners on the stream
and we shouldn't push values on the stream. *)
alive := false) ;
fun level s ->
if !alive then
try push (Some (level, s)) with Lwt_stream.Closed -> alive := false
else ()
let make_log_message_consumer () level s =
Lwt.dont_wait
(fun () ->
match level with
| Internal_event.Debug -> emit debug s
| Info -> emit info s
| Notice -> emit notice s
| Warning -> emit warning s
| Error -> emit error s
| Fatal -> emit fatal s)
(fun exc ->
Lwt.dont_wait
(fun () -> emit logging_failure (Printexc.to_string exc))
(fun _exn ->
(* Ignoring: everything went wrong*)
()))
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/lib_validation/protocol_logging.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
Pause to avoid interleaving of execution
We ignore the exception because it can only be the exception raised
within the other exception handler which already attempted to log an
error.
If the [iter_s] raises, then there are no more listeners on the stream
and we shouldn't push values on the stream.
Ignoring: everything went wrong | Copyright ( c ) 2021 Nomadic Labs . < >
Copyright ( c ) 2021 DaiLambda , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Internal_event.Simple
let section = ["node"; "protocol"]
let make n level =
declare_1
~section
~name:(n ^ "_from_protocol")
~msg:"{message}"
~pp1:Format.pp_print_string
~level
("message", Data_encoding.string)
let debug = make "debug" Debug
let info = make "info" Info
let notice = make "notice" Notice
let warning = make "warning" Warning
let error = make "error" Error
let fatal = make "fatal" Fatal
let logging_failure =
declare_1
~section
~name:"logging_failure"
~msg:"Failure to log a protocol message: {exc}"
~pp1:Format.pp_print_string
~level:Error
("exc", Data_encoding.string)
let make_asynchronous_log_message_consumer () =
let (stream, push) = Lwt_stream.create () in
let alive = ref true in
Lwt.dont_wait
(fun () ->
Lwt_stream.iter_s
(fun (level, s) ->
let open Lwt_syntax in
let* () = Lwt.pause () in
Lwt.catch
(fun () ->
match level with
| Internal_event.Debug -> emit debug s
| Info -> emit info s
| Notice -> emit notice s
| Warning -> emit warning s
| Error -> emit error s
| Fatal -> emit fatal s)
(fun exc -> emit logging_failure (Printexc.to_string exc)))
stream)
(fun exc ->
ignore (exc : exn) ;
alive := false) ;
fun level s ->
if !alive then
try push (Some (level, s)) with Lwt_stream.Closed -> alive := false
else ()
let make_log_message_consumer () level s =
Lwt.dont_wait
(fun () ->
match level with
| Internal_event.Debug -> emit debug s
| Info -> emit info s
| Notice -> emit notice s
| Warning -> emit warning s
| Error -> emit error s
| Fatal -> emit fatal s)
(fun exc ->
Lwt.dont_wait
(fun () -> emit logging_failure (Printexc.to_string exc))
(fun _exn ->
()))
|
9dbf8892086c5019508e350f9e0f93adccf251e6907661215948de0bfd38bf50 | less-wrong/less-wrong | Parser.hs | module LessWrong.HL.Parser where
import Control.Applicative (many, some, (<|>))
import Data.Text (Text)
import Text.Megaparsec (eof, optional, parse,
parseErrorPretty)
import Text.Megaparsec.Text (Parser)
import LessWrong.COC.Context (Context (..))
import LessWrong.COC.Error (CalculusError (..))
import LessWrong.COC.Parser (parens, symbol, term, variable)
import LessWrong.COC.Type (Name, Term, Var (..))
import LessWrong.HL.Parser.Type
parseREPL :: Text -> Either CalculusError Interactive
parseREPL txt =
case parse (repl <* eof) "(repl)" txt of
Right int -> Right int
Left err -> Left . ParsingError $ parseErrorPretty err
parseDeclaration :: Text -> Either CalculusError Declaration
parseDeclaration txt =
case parse (declaration <* eof) "(source)" txt of
Right decl -> Right decl
Left err -> Left . ParsingError $ parseErrorPretty err
repl :: Parser Interactive
repl = (uncurry BIND <$> binding) <|> (CODE <$> term) <|> (DECL <$> declaration)
binding :: Parser (Var, Term)
binding = (,) <$> (symbol ":let" *> variable <* symbol "=") <*> term
declaration :: Parser Declaration
declaration = (Ind <$> inductive) <|> (Rec <$> record) <|> (Alg <$> algebraic) <|> parens declaration
inductive :: Parser Inductive
inductive = decl "inductive" Inductive
record :: Parser Record
record = decl "record" Record
algebraic :: Parser Algebraic
algebraic = do symbol "data"
V dname <- variable
params <- (extractName <$>) <$> many variable
conses <- (symbol "=" *> constructors) <|> pure []
pure $ Algebraic dname params (Context conses)
where constructors = (:) <$> constructor <*> many (symbol "|" *> constructor)
-- Helper declaration components functions
constructor :: Parser (Var, [TypeApp])
constructor = (,) <$> variable <*> many (tvar <|> parens tapp)
where tvar = TVar . extractName <$> variable
napp = tvar <|> parens tapp
tapp = foldl TApp <$> napp <*> many napp
decl :: String -> (Name -> Context Term -> Context Term -> a) -> Parser a
decl keyword cons = do symbol keyword
V dname <- variable
params <- many contextDecl
symbol "="
conses <- some contextDecl
pure $ cons dname (Context params) (Context conses)
contextDecl :: Parser (Var, Term)
contextDecl = parens ((,) <$> variable <*> (symbol ":" *> term))
extractName :: Var -> Name
extractName (V x) = x
| null | https://raw.githubusercontent.com/less-wrong/less-wrong/4dd04f396551a7c81438349041335453137c0951/src/LessWrong/HL/Parser.hs | haskell | Helper declaration components functions | module LessWrong.HL.Parser where
import Control.Applicative (many, some, (<|>))
import Data.Text (Text)
import Text.Megaparsec (eof, optional, parse,
parseErrorPretty)
import Text.Megaparsec.Text (Parser)
import LessWrong.COC.Context (Context (..))
import LessWrong.COC.Error (CalculusError (..))
import LessWrong.COC.Parser (parens, symbol, term, variable)
import LessWrong.COC.Type (Name, Term, Var (..))
import LessWrong.HL.Parser.Type
parseREPL :: Text -> Either CalculusError Interactive
parseREPL txt =
case parse (repl <* eof) "(repl)" txt of
Right int -> Right int
Left err -> Left . ParsingError $ parseErrorPretty err
parseDeclaration :: Text -> Either CalculusError Declaration
parseDeclaration txt =
case parse (declaration <* eof) "(source)" txt of
Right decl -> Right decl
Left err -> Left . ParsingError $ parseErrorPretty err
repl :: Parser Interactive
repl = (uncurry BIND <$> binding) <|> (CODE <$> term) <|> (DECL <$> declaration)
binding :: Parser (Var, Term)
binding = (,) <$> (symbol ":let" *> variable <* symbol "=") <*> term
declaration :: Parser Declaration
declaration = (Ind <$> inductive) <|> (Rec <$> record) <|> (Alg <$> algebraic) <|> parens declaration
inductive :: Parser Inductive
inductive = decl "inductive" Inductive
record :: Parser Record
record = decl "record" Record
algebraic :: Parser Algebraic
algebraic = do symbol "data"
V dname <- variable
params <- (extractName <$>) <$> many variable
conses <- (symbol "=" *> constructors) <|> pure []
pure $ Algebraic dname params (Context conses)
where constructors = (:) <$> constructor <*> many (symbol "|" *> constructor)
constructor :: Parser (Var, [TypeApp])
constructor = (,) <$> variable <*> many (tvar <|> parens tapp)
where tvar = TVar . extractName <$> variable
napp = tvar <|> parens tapp
tapp = foldl TApp <$> napp <*> many napp
decl :: String -> (Name -> Context Term -> Context Term -> a) -> Parser a
decl keyword cons = do symbol keyword
V dname <- variable
params <- many contextDecl
symbol "="
conses <- some contextDecl
pure $ cons dname (Context params) (Context conses)
contextDecl :: Parser (Var, Term)
contextDecl = parens ((,) <$> variable <*> (symbol ":" *> term))
extractName :: Var -> Name
extractName (V x) = x
|
fb20da1869f1e85c9837084d83c77d1019849d0eda5f41d0802ebde6a74a5298 | logicblocks/salutem | results.clj | (ns salutem.core.results
"Provides constructors and predicates for check results."
(:require
[tick.alpha.api :as t]))
(defn result
"Constructs a result with the provided `status`.
The optional map of extra data is stored with the result for future use.
Unless overridden in the extra data map, an `:salutem/evaluated-at` field is
added to the result, set to the current date time in the system default time
zone."
([status] (result status {}))
([status {:keys [salutem/evaluated-at]
:or {evaluated-at (t/now)}
:as extra-data}]
(merge extra-data
{:salutem/status status
:salutem/evaluated-at evaluated-at})))
(defn healthy
"Constructs a healthy result.
The optional map of extra data is stored with the result for future use.
Unless overridden in the extra data map, an `:salutem/evaluated-at` field is
added to the result, set to the current date time in the system default time
zone."
([] (healthy {}))
([extra-data]
(result :healthy extra-data)))
(defn unhealthy
"Constructs an unhealthy result.
The optional map of extra data is stored with the result for future use.
Unless overridden in the extra data map, an `:salutem/evaluated-at` field is
added to the result, set to the current date time in the system default time
zone."
([] (unhealthy {}))
([extra-date]
(result :unhealthy extra-date)))
(defn prepend
"Adds each entry from the provided map of extra data to the result if no entry
already exists in the result for the key."
[result extra-data]
(merge extra-data result))
(defn status
"Returns the status of the provided result."
[result]
(:salutem/status result))
(defn healthy?
"Returns `true` if the result has a `:healthy` status, `false`
otherwise."
[result]
(= (status result) :healthy))
(defn unhealthy?
"Returns `true` if the result has an `:unhealthy` status, `false`
otherwise."
[result]
(= (status result) :unhealthy))
(defn outdated?
"Returns `true` if the result of the check is outdated, `false`
otherwise.
For a realtime check, a result is always considered outdated.
For a background check, a result is considered outdated if the
time to re-evaluation of the check has passed, i.e., if its evaluation date
time is before the current date time minus the check's time to re-evaluation.
If `relative-to` is provided, the calculation is performed relative to that
date time rather than to the current date time."
([result check]
(outdated? result check (t/now)))
([result check relative-to]
(or
(= (:salutem/type check) :realtime)
(nil? result)
(t/< (:salutem/evaluated-at result)
(t/- relative-to (:salutem/time-to-re-evaluation check))))))
| null | https://raw.githubusercontent.com/logicblocks/salutem/9854c151b69c80c481f48d8be7ee7273831cb79b/core/src/salutem/core/results.clj | clojure | (ns salutem.core.results
"Provides constructors and predicates for check results."
(:require
[tick.alpha.api :as t]))
(defn result
"Constructs a result with the provided `status`.
The optional map of extra data is stored with the result for future use.
Unless overridden in the extra data map, an `:salutem/evaluated-at` field is
added to the result, set to the current date time in the system default time
zone."
([status] (result status {}))
([status {:keys [salutem/evaluated-at]
:or {evaluated-at (t/now)}
:as extra-data}]
(merge extra-data
{:salutem/status status
:salutem/evaluated-at evaluated-at})))
(defn healthy
"Constructs a healthy result.
The optional map of extra data is stored with the result for future use.
Unless overridden in the extra data map, an `:salutem/evaluated-at` field is
added to the result, set to the current date time in the system default time
zone."
([] (healthy {}))
([extra-data]
(result :healthy extra-data)))
(defn unhealthy
"Constructs an unhealthy result.
The optional map of extra data is stored with the result for future use.
Unless overridden in the extra data map, an `:salutem/evaluated-at` field is
added to the result, set to the current date time in the system default time
zone."
([] (unhealthy {}))
([extra-date]
(result :unhealthy extra-date)))
(defn prepend
"Adds each entry from the provided map of extra data to the result if no entry
already exists in the result for the key."
[result extra-data]
(merge extra-data result))
(defn status
"Returns the status of the provided result."
[result]
(:salutem/status result))
(defn healthy?
"Returns `true` if the result has a `:healthy` status, `false`
otherwise."
[result]
(= (status result) :healthy))
(defn unhealthy?
"Returns `true` if the result has an `:unhealthy` status, `false`
otherwise."
[result]
(= (status result) :unhealthy))
(defn outdated?
"Returns `true` if the result of the check is outdated, `false`
otherwise.
For a realtime check, a result is always considered outdated.
For a background check, a result is considered outdated if the
time to re-evaluation of the check has passed, i.e., if its evaluation date
time is before the current date time minus the check's time to re-evaluation.
If `relative-to` is provided, the calculation is performed relative to that
date time rather than to the current date time."
([result check]
(outdated? result check (t/now)))
([result check relative-to]
(or
(= (:salutem/type check) :realtime)
(nil? result)
(t/< (:salutem/evaluated-at result)
(t/- relative-to (:salutem/time-to-re-evaluation check))))))
| |
0f170c152438144687bb65ccce515f7db379c807850cf94fc930d760c37a3073 | paurkedal/batyr | user.mli | Copyright ( C ) 2022 < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
open Types
type t = {
uid: string;
username: string;
name: string option;
}
val decoder : t decoder
val option_decoder : t option decoder
| null | https://raw.githubusercontent.com/paurkedal/batyr/814791b6ce6476b79ecddc12b7d28fa4d23dc591/rockettime/lib/user.mli | ocaml | Copyright ( C ) 2022 < >
*
* This program is free software : you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation , either version 3 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License
* along with this program . If not , see < / > .
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see </>.
*)
open Types
type t = {
uid: string;
username: string;
name: string option;
}
val decoder : t decoder
val option_decoder : t option decoder
| |
740002ac28155bbead173d936ec3c759dc0e0e2959ec0e0dadbce61b979bfe34 | NorfairKing/intray | Foundation.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
module Intray.Web.Server.Foundation
( module Intray.Web.Server.Foundation,
module Intray.Web.Server.Widget,
module Intray.Web.Server.Static,
module Intray.Web.Server.Constants,
module Intray.Web.Server.DB,
)
where
import Control.Monad.Except
import Control.Monad.Trans.Maybe
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Database.Persist.Sql
import Database.Persist.Sqlite
import Import
import Intray.Client
import Intray.Web.Server.Constants
import Intray.Web.Server.DB
import Intray.Web.Server.Static
import Intray.Web.Server.Widget
import qualified Network.HTTP.Client as Http
import qualified Network.HTTP.Types as Http
import Text.Hamlet
import Web.Cookie
import Yesod hiding (Header)
import Yesod.Auth
import qualified Yesod.Auth.Message as Msg
import Yesod.AutoReload
import Yesod.EmbeddedStatic
type IntrayWidget = IntrayWidget' ()
type IntrayWidget' = WidgetFor App
type IntrayHandler = HandlerFor App
type IntrayAuthHandler a = AuthHandler App a
data App = App
{ appHttpManager :: Http.Manager,
appStatic :: EmbeddedStatic,
appTracking :: Maybe Text,
appVerification :: Maybe Text,
appAPIBaseUrl :: BaseUrl,
appConnectionPool :: ConnectionPool
}
mkYesodData "App" $(parseRoutesFile "routes")
instance Yesod App where
defaultLayout widget = do
let addReloadWidget = if development then (<> autoReloadWidgetFor ReloadR) else id
pc <- widgetToPageContent $ do
toWidgetHead [hamlet|<link rel="icon" href=@{StaticR static_favicon_ico} sizes="16x16 24x24 32x32 48x48 64x64" type="image/x-icon">|]
addReloadWidget $(widgetFile "default-body")
app <- getYesod
withUrlRenderer $(hamletFile "templates/default-page.hamlet")
yesodMiddleware = defaultCsrfMiddleware . defaultYesodMiddleware
authRoute _ = Just $ AuthR LoginR
maximumContentLengthIO s =
\case
Just AddR -> pure Nothing
r -> pure $ maximumContentLength s r
makeSessionBackend _ =
Just <$> defaultClientSessionBackend (60 * 24 * 365 * 10) "client_session_key.aes"
errorHandler NotFound =
fmap toTypedContent $
withNavBar $
do
setTitle "Page not found"
[whamlet|
<h1>
Page not found
|]
errorHandler other = defaultErrorHandler other
instance YesodPersist App where
type YesodPersistBackend App = SqlBackend
runDB func = do
pool <- getsYesod appConnectionPool
runSqlPool (retryOnBusy func) pool
instance YesodAuth App where
type AuthId App = Username
loginDest _ = AddR
logoutDest _ = HomeR
authHttpManager = getsYesod appHttpManager
authenticate creds =
if credsPlugin creds == intrayAuthPluginName
then case parseUsername $ credsIdent creds of
Nothing -> pure $ UserError Msg.InvalidLogin
Just un -> pure $ Authenticated un
else pure $ ServerError $ T.unwords ["Unknown authentication plugin:", credsPlugin creds]
authPlugins _ = [intrayAuthPlugin]
maybeAuthId =
runMaybeT $ do
s <- MaybeT $ lookupSession credsKey
MaybeT $ return $ fromPathPiece s
intrayAuthPluginName :: Text
intrayAuthPluginName = "intray-auth-plugin"
intrayAuthPlugin :: AuthPlugin App
intrayAuthPlugin = AuthPlugin intrayAuthPluginName dispatch loginWidget
where
dispatch :: Text -> [Text] -> IntrayAuthHandler TypedContent
dispatch "POST" ["login"] = postLoginR >>= sendResponse
dispatch "GET" ["register"] = getNewAccountR >>= sendResponse
dispatch "POST" ["register"] = postNewAccountR >>= sendResponse
dispatch "GET" ["change-password"] = getChangePasswordR >>= sendResponse
dispatch "POST" ["change-password"] = postChangePasswordR >>= sendResponse
dispatch _ _ = notFound
loginWidget :: (Route Auth -> Route App) -> IntrayWidget
loginWidget _ = do
token <- genToken
msgs <- getMessages
setTitle "Intray Login"
setDescriptionIdemp "Intray Registration: This is where you sign into your intray account."
$(widgetFile "auth/login")
data LoginData = LoginData
{ loginUserkey :: Text,
loginPassword :: Text
}
deriving (Show)
loginFormPostTargetR :: AuthRoute
loginFormPostTargetR = PluginR intrayAuthPluginName ["login"]
# ANN postLoginR ( " NOCOVER " : : String ) #
postLoginR :: IntrayAuthHandler TypedContent
postLoginR = do
let loginInputForm = LoginData <$> ireq textField "userkey" <*> ireq passwordField "passphrase"
result <- runInputPostResult loginInputForm
muser <-
case result of
FormMissing -> invalidArgs ["Form is missing"]
FormFailure _ -> return $ Left Msg.InvalidLogin
FormSuccess (LoginData ukey pwd) ->
case parseUsername ukey of
Nothing -> pure $ Left Msg.InvalidUsernamePass
Just un -> do
liftHandler $ login LoginForm {loginFormUsername = un, loginFormPassword = pwd}
pure $ Right un
case muser of
Left err -> loginErrorMessageI LoginR err
Right un -> setCredsRedirect $ Creds intrayAuthPluginName (usernameText un) []
registerR :: AuthRoute
registerR = PluginR intrayAuthPluginName ["register"]
getNewAccountR :: IntrayAuthHandler Html
getNewAccountR = do
token <- genToken
msgs <- getMessages
liftHandler $
defaultLayout $
do
setTitle "Intray Registration"
setDescriptionIdemp "Intray Registration: This is where you sign up for an intray account."
$(widgetFile "auth/register")
data NewAccount = NewAccount
{ newAccountUsername :: Username,
newAccountPassword1 :: Text,
newAccountPassword2 :: Text
}
deriving (Show)
postNewAccountR :: IntrayAuthHandler TypedContent
postNewAccountR = do
let newAccountInputForm =
NewAccount
<$> ireq
( checkMMap
( \t ->
pure $
case parseUsernameWithError t of
Left err -> Left (T.pack $ unwords ["Invalid username:", show t ++ ";", err])
Right un -> Right un
)
usernameText
textField
)
"username"
<*> ireq passwordField "passphrase"
<*> ireq passwordField "passphrase-confirm"
mr <- liftHandler getMessageRender
result <- liftHandler $ runInputPostResult newAccountInputForm
mdata <-
case result of
FormMissing -> invalidArgs ["Form is incomplete"]
FormFailure msgs -> pure $ Left msgs
FormSuccess d ->
pure $
if newAccountPassword1 d == newAccountPassword2 d
then
Right
Registration
{ registrationUsername = newAccountUsername d,
registrationPassword = newAccountPassword1 d
}
else Left [mr Msg.PassMismatch]
case mdata of
Left errs -> do
setMessage $ toHtml $ T.concat errs
liftHandler $ redirect $ AuthR registerR
Right reg -> do
errOrOk <- liftHandler $ runClient $ clientPostRegister reg
case errOrOk of
Left err -> do
case err of
FailureResponse _ resp ->
case Http.statusCode $ responseStatusCode resp of
409 -> setMessage "An account with this username already exists"
_ -> setMessage "Failed to register for unknown reasons."
_ -> setMessage "Failed to register for unknown reasons."
liftHandler $ redirect $ AuthR registerR
Right NoContent ->
liftHandler $ do
login
LoginForm
{ loginFormUsername = registrationUsername reg,
loginFormPassword = registrationPassword reg
}
setCredsRedirect $
Creds intrayAuthPluginName (usernameText $ registrationUsername reg) []
changePasswordTargetR :: AuthRoute
changePasswordTargetR = PluginR intrayAuthPluginName ["change-password"]
data ChangePassword = ChangePassword
{ changePasswordOldPassword :: Text,
changePasswordNewPassword1 :: Text,
changePasswordNewPassword2 :: Text
}
deriving (Show)
getChangePasswordR :: IntrayAuthHandler Html
getChangePasswordR = do
token <- genToken
msgs <- getMessages
liftHandler $ defaultLayout $(widgetFile "auth/change-password")
postChangePasswordR :: IntrayAuthHandler Html
postChangePasswordR = do
ChangePassword {..} <-
liftHandler $
runInputPost $
ChangePassword <$> ireq passwordField "old" <*> ireq passwordField "new1"
<*> ireq passwordField "new2"
unless (changePasswordNewPassword1 == changePasswordNewPassword2) $
invalidArgs ["Passwords do not match."]
liftHandler $
withLogin $
\t -> do
let cpp =
ChangePassphrase
{ changePassphraseOld = changePasswordOldPassword,
changePassphraseNew = changePasswordNewPassword1
}
mRes <- runClientOrDisallow $ clientPostChangePassphrase t cpp
case mRes of
Nothing -> invalidArgs ["Old password is not correct"]
Just NoContent -> redirect AccountR
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
instance PathPiece (UUID a) where
fromPathPiece = parseUUIDText
toPathPiece = uuidText
withNavBar :: WidgetFor App () -> HandlerFor App Html
withNavBar widget = do
currentRoute <- getCurrentRoute
mauth <- maybeAuthId
msgs <- getMessages
defaultLayout $(widgetFile "with-nav-bar")
genToken :: MonadHandler m => m Html
genToken = do
alreadyExpired
req <- getRequest
let tokenKey = defaultCsrfParamName
pure $
case reqToken req of
Nothing -> mempty
Just n -> [shamlet|<input type=hidden name=#{tokenKey} value=#{n}>|]
runClient :: ClientM a -> Handler (Either ClientError a)
runClient func = do
man <- getsYesod appHttpManager
burl <- getsYesod appAPIBaseUrl
let cenv = mkClientEnv man burl
liftIO $ runClientM func cenv
runClientOrErr :: ClientM a -> Handler a
runClientOrErr func = do
errOrRes <- runClient func
case errOrRes of
Left err ->
handleStandardServantErrs err $ \resp -> sendResponseStatus Http.status500 $ show resp
Right r -> pure r
runClientOrDisallow :: ClientM a -> Handler (Maybe a)
runClientOrDisallow func = do
errOrRes <- runClient func
case errOrRes of
Left err ->
handleStandardServantErrs err $ \resp ->
if responseStatusCode resp == Http.unauthorized401
then pure Nothing
else sendResponseStatus Http.status500 $ show resp
Right r -> pure $ Just r
handleStandardServantErrs :: ClientError -> (Response -> Handler a) -> Handler a
handleStandardServantErrs err func =
case err of
FailureResponse _ resp -> func resp
ConnectionError e -> sendResponseStatus Http.status500 $ unwords ["Connection error while calling API:", show e]
e -> sendResponseStatus Http.status500 $ unwords ["Error while calling API:", show e]
login :: LoginForm -> Handler ()
login form = do
errOrRes <- runClient $ clientPostLogin form
case errOrRes of
Left err ->
handleStandardServantErrs err $ \resp ->
if responseStatusCode resp == Http.unauthorized401
then do
addMessage "error" "Unable to login"
redirect $ AuthR LoginR
else sendResponseStatus Http.status500 $ show resp
Right (Headers NoContent (HCons sessionHeader HNil)) ->
case sessionHeader of
Header session -> recordLoginToken (loginFormUsername form) session
_ ->
sendResponseStatus Http.status500 $
unwords ["The server responded but with an invalid header for login", show sessionHeader]
withLogin :: (Token -> Handler a) -> Handler a
withLogin func = do
un <- requireAuthId
mLoginToken <- lookupToginToken un
case mLoginToken of
Nothing -> redirect $ AuthR LoginR
Just token -> func token
lookupToginToken :: Username -> Handler (Maybe Token)
lookupToginToken un = runDB $ fmap (userTokenToken . entityVal) <$> getBy (UniqueUserToken un)
recordLoginToken :: Username -> Text -> Handler ()
recordLoginToken un session = do
let token = Token $ setCookieValue $ parseSetCookie $ TE.encodeUtf8 session
void $
runDB $
upsert UserToken {userTokenName = un, userTokenToken = token} [UserTokenToken =. token]
addInfoMessage :: Html -> Handler ()
addInfoMessage = addMessage ""
addNegativeMessage :: Html -> Handler ()
addNegativeMessage = addMessage "danger"
addPositiveMessage :: Html -> Handler ()
addPositiveMessage = addMessage "success"
getReloadR :: Handler ()
getReloadR = getAutoReloadR
| null | https://raw.githubusercontent.com/NorfairKing/intray/8bb58ecf6975adedabdab091f2bb01980635a987/intray-web-server/src/Intray/Web/Server/Foundation.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes # | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE QuasiQuotes #
# LANGUAGE RecordWildCards #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
# OPTIONS_GHC -fno - warn - orphans #
module Intray.Web.Server.Foundation
( module Intray.Web.Server.Foundation,
module Intray.Web.Server.Widget,
module Intray.Web.Server.Static,
module Intray.Web.Server.Constants,
module Intray.Web.Server.DB,
)
where
import Control.Monad.Except
import Control.Monad.Trans.Maybe
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Database.Persist.Sql
import Database.Persist.Sqlite
import Import
import Intray.Client
import Intray.Web.Server.Constants
import Intray.Web.Server.DB
import Intray.Web.Server.Static
import Intray.Web.Server.Widget
import qualified Network.HTTP.Client as Http
import qualified Network.HTTP.Types as Http
import Text.Hamlet
import Web.Cookie
import Yesod hiding (Header)
import Yesod.Auth
import qualified Yesod.Auth.Message as Msg
import Yesod.AutoReload
import Yesod.EmbeddedStatic
type IntrayWidget = IntrayWidget' ()
type IntrayWidget' = WidgetFor App
type IntrayHandler = HandlerFor App
type IntrayAuthHandler a = AuthHandler App a
data App = App
{ appHttpManager :: Http.Manager,
appStatic :: EmbeddedStatic,
appTracking :: Maybe Text,
appVerification :: Maybe Text,
appAPIBaseUrl :: BaseUrl,
appConnectionPool :: ConnectionPool
}
mkYesodData "App" $(parseRoutesFile "routes")
instance Yesod App where
defaultLayout widget = do
let addReloadWidget = if development then (<> autoReloadWidgetFor ReloadR) else id
pc <- widgetToPageContent $ do
toWidgetHead [hamlet|<link rel="icon" href=@{StaticR static_favicon_ico} sizes="16x16 24x24 32x32 48x48 64x64" type="image/x-icon">|]
addReloadWidget $(widgetFile "default-body")
app <- getYesod
withUrlRenderer $(hamletFile "templates/default-page.hamlet")
yesodMiddleware = defaultCsrfMiddleware . defaultYesodMiddleware
authRoute _ = Just $ AuthR LoginR
maximumContentLengthIO s =
\case
Just AddR -> pure Nothing
r -> pure $ maximumContentLength s r
makeSessionBackend _ =
Just <$> defaultClientSessionBackend (60 * 24 * 365 * 10) "client_session_key.aes"
errorHandler NotFound =
fmap toTypedContent $
withNavBar $
do
setTitle "Page not found"
[whamlet|
<h1>
Page not found
|]
errorHandler other = defaultErrorHandler other
instance YesodPersist App where
type YesodPersistBackend App = SqlBackend
runDB func = do
pool <- getsYesod appConnectionPool
runSqlPool (retryOnBusy func) pool
instance YesodAuth App where
type AuthId App = Username
loginDest _ = AddR
logoutDest _ = HomeR
authHttpManager = getsYesod appHttpManager
authenticate creds =
if credsPlugin creds == intrayAuthPluginName
then case parseUsername $ credsIdent creds of
Nothing -> pure $ UserError Msg.InvalidLogin
Just un -> pure $ Authenticated un
else pure $ ServerError $ T.unwords ["Unknown authentication plugin:", credsPlugin creds]
authPlugins _ = [intrayAuthPlugin]
maybeAuthId =
runMaybeT $ do
s <- MaybeT $ lookupSession credsKey
MaybeT $ return $ fromPathPiece s
intrayAuthPluginName :: Text
intrayAuthPluginName = "intray-auth-plugin"
intrayAuthPlugin :: AuthPlugin App
intrayAuthPlugin = AuthPlugin intrayAuthPluginName dispatch loginWidget
where
dispatch :: Text -> [Text] -> IntrayAuthHandler TypedContent
dispatch "POST" ["login"] = postLoginR >>= sendResponse
dispatch "GET" ["register"] = getNewAccountR >>= sendResponse
dispatch "POST" ["register"] = postNewAccountR >>= sendResponse
dispatch "GET" ["change-password"] = getChangePasswordR >>= sendResponse
dispatch "POST" ["change-password"] = postChangePasswordR >>= sendResponse
dispatch _ _ = notFound
loginWidget :: (Route Auth -> Route App) -> IntrayWidget
loginWidget _ = do
token <- genToken
msgs <- getMessages
setTitle "Intray Login"
setDescriptionIdemp "Intray Registration: This is where you sign into your intray account."
$(widgetFile "auth/login")
data LoginData = LoginData
{ loginUserkey :: Text,
loginPassword :: Text
}
deriving (Show)
loginFormPostTargetR :: AuthRoute
loginFormPostTargetR = PluginR intrayAuthPluginName ["login"]
# ANN postLoginR ( " NOCOVER " : : String ) #
postLoginR :: IntrayAuthHandler TypedContent
postLoginR = do
let loginInputForm = LoginData <$> ireq textField "userkey" <*> ireq passwordField "passphrase"
result <- runInputPostResult loginInputForm
muser <-
case result of
FormMissing -> invalidArgs ["Form is missing"]
FormFailure _ -> return $ Left Msg.InvalidLogin
FormSuccess (LoginData ukey pwd) ->
case parseUsername ukey of
Nothing -> pure $ Left Msg.InvalidUsernamePass
Just un -> do
liftHandler $ login LoginForm {loginFormUsername = un, loginFormPassword = pwd}
pure $ Right un
case muser of
Left err -> loginErrorMessageI LoginR err
Right un -> setCredsRedirect $ Creds intrayAuthPluginName (usernameText un) []
registerR :: AuthRoute
registerR = PluginR intrayAuthPluginName ["register"]
getNewAccountR :: IntrayAuthHandler Html
getNewAccountR = do
token <- genToken
msgs <- getMessages
liftHandler $
defaultLayout $
do
setTitle "Intray Registration"
setDescriptionIdemp "Intray Registration: This is where you sign up for an intray account."
$(widgetFile "auth/register")
data NewAccount = NewAccount
{ newAccountUsername :: Username,
newAccountPassword1 :: Text,
newAccountPassword2 :: Text
}
deriving (Show)
postNewAccountR :: IntrayAuthHandler TypedContent
postNewAccountR = do
let newAccountInputForm =
NewAccount
<$> ireq
( checkMMap
( \t ->
pure $
case parseUsernameWithError t of
Left err -> Left (T.pack $ unwords ["Invalid username:", show t ++ ";", err])
Right un -> Right un
)
usernameText
textField
)
"username"
<*> ireq passwordField "passphrase"
<*> ireq passwordField "passphrase-confirm"
mr <- liftHandler getMessageRender
result <- liftHandler $ runInputPostResult newAccountInputForm
mdata <-
case result of
FormMissing -> invalidArgs ["Form is incomplete"]
FormFailure msgs -> pure $ Left msgs
FormSuccess d ->
pure $
if newAccountPassword1 d == newAccountPassword2 d
then
Right
Registration
{ registrationUsername = newAccountUsername d,
registrationPassword = newAccountPassword1 d
}
else Left [mr Msg.PassMismatch]
case mdata of
Left errs -> do
setMessage $ toHtml $ T.concat errs
liftHandler $ redirect $ AuthR registerR
Right reg -> do
errOrOk <- liftHandler $ runClient $ clientPostRegister reg
case errOrOk of
Left err -> do
case err of
FailureResponse _ resp ->
case Http.statusCode $ responseStatusCode resp of
409 -> setMessage "An account with this username already exists"
_ -> setMessage "Failed to register for unknown reasons."
_ -> setMessage "Failed to register for unknown reasons."
liftHandler $ redirect $ AuthR registerR
Right NoContent ->
liftHandler $ do
login
LoginForm
{ loginFormUsername = registrationUsername reg,
loginFormPassword = registrationPassword reg
}
setCredsRedirect $
Creds intrayAuthPluginName (usernameText $ registrationUsername reg) []
changePasswordTargetR :: AuthRoute
changePasswordTargetR = PluginR intrayAuthPluginName ["change-password"]
data ChangePassword = ChangePassword
{ changePasswordOldPassword :: Text,
changePasswordNewPassword1 :: Text,
changePasswordNewPassword2 :: Text
}
deriving (Show)
getChangePasswordR :: IntrayAuthHandler Html
getChangePasswordR = do
token <- genToken
msgs <- getMessages
liftHandler $ defaultLayout $(widgetFile "auth/change-password")
postChangePasswordR :: IntrayAuthHandler Html
postChangePasswordR = do
ChangePassword {..} <-
liftHandler $
runInputPost $
ChangePassword <$> ireq passwordField "old" <*> ireq passwordField "new1"
<*> ireq passwordField "new2"
unless (changePasswordNewPassword1 == changePasswordNewPassword2) $
invalidArgs ["Passwords do not match."]
liftHandler $
withLogin $
\t -> do
let cpp =
ChangePassphrase
{ changePassphraseOld = changePasswordOldPassword,
changePassphraseNew = changePasswordNewPassword1
}
mRes <- runClientOrDisallow $ clientPostChangePassphrase t cpp
case mRes of
Nothing -> invalidArgs ["Old password is not correct"]
Just NoContent -> redirect AccountR
instance RenderMessage App FormMessage where
renderMessage _ _ = defaultFormMessage
instance PathPiece (UUID a) where
fromPathPiece = parseUUIDText
toPathPiece = uuidText
withNavBar :: WidgetFor App () -> HandlerFor App Html
withNavBar widget = do
currentRoute <- getCurrentRoute
mauth <- maybeAuthId
msgs <- getMessages
defaultLayout $(widgetFile "with-nav-bar")
genToken :: MonadHandler m => m Html
genToken = do
alreadyExpired
req <- getRequest
let tokenKey = defaultCsrfParamName
pure $
case reqToken req of
Nothing -> mempty
Just n -> [shamlet|<input type=hidden name=#{tokenKey} value=#{n}>|]
runClient :: ClientM a -> Handler (Either ClientError a)
runClient func = do
man <- getsYesod appHttpManager
burl <- getsYesod appAPIBaseUrl
let cenv = mkClientEnv man burl
liftIO $ runClientM func cenv
runClientOrErr :: ClientM a -> Handler a
runClientOrErr func = do
errOrRes <- runClient func
case errOrRes of
Left err ->
handleStandardServantErrs err $ \resp -> sendResponseStatus Http.status500 $ show resp
Right r -> pure r
runClientOrDisallow :: ClientM a -> Handler (Maybe a)
runClientOrDisallow func = do
errOrRes <- runClient func
case errOrRes of
Left err ->
handleStandardServantErrs err $ \resp ->
if responseStatusCode resp == Http.unauthorized401
then pure Nothing
else sendResponseStatus Http.status500 $ show resp
Right r -> pure $ Just r
handleStandardServantErrs :: ClientError -> (Response -> Handler a) -> Handler a
handleStandardServantErrs err func =
case err of
FailureResponse _ resp -> func resp
ConnectionError e -> sendResponseStatus Http.status500 $ unwords ["Connection error while calling API:", show e]
e -> sendResponseStatus Http.status500 $ unwords ["Error while calling API:", show e]
login :: LoginForm -> Handler ()
login form = do
errOrRes <- runClient $ clientPostLogin form
case errOrRes of
Left err ->
handleStandardServantErrs err $ \resp ->
if responseStatusCode resp == Http.unauthorized401
then do
addMessage "error" "Unable to login"
redirect $ AuthR LoginR
else sendResponseStatus Http.status500 $ show resp
Right (Headers NoContent (HCons sessionHeader HNil)) ->
case sessionHeader of
Header session -> recordLoginToken (loginFormUsername form) session
_ ->
sendResponseStatus Http.status500 $
unwords ["The server responded but with an invalid header for login", show sessionHeader]
withLogin :: (Token -> Handler a) -> Handler a
withLogin func = do
un <- requireAuthId
mLoginToken <- lookupToginToken un
case mLoginToken of
Nothing -> redirect $ AuthR LoginR
Just token -> func token
lookupToginToken :: Username -> Handler (Maybe Token)
lookupToginToken un = runDB $ fmap (userTokenToken . entityVal) <$> getBy (UniqueUserToken un)
recordLoginToken :: Username -> Text -> Handler ()
recordLoginToken un session = do
let token = Token $ setCookieValue $ parseSetCookie $ TE.encodeUtf8 session
void $
runDB $
upsert UserToken {userTokenName = un, userTokenToken = token} [UserTokenToken =. token]
addInfoMessage :: Html -> Handler ()
addInfoMessage = addMessage ""
addNegativeMessage :: Html -> Handler ()
addNegativeMessage = addMessage "danger"
addPositiveMessage :: Html -> Handler ()
addPositiveMessage = addMessage "success"
getReloadR :: Handler ()
getReloadR = getAutoReloadR
|
507ff1e73045d20c5c41184b8e3e7982422f4052ef4a1c4effaf8e7762d233bd | tokenrove/imago | morphology.lisp | ;;; IMAGO library
;;; Morphology functions
;;;
Copyright ( C ) 2021
( ) , ( )
;;;
;;; The authors grant you the rights to distribute
;;; and use this software as governed by the terms
of the Lisp Lesser GNU Public License
;;; (),
;;; known as the LLGPL.
(in-package :imago)
;; ====================
;; Component labeling
;; ====================
(alex:define-constant +cross-pattern+
'((-1 0)
( 0 -1)
( 0 1)
( 1 0))
:test #'equalp
:documentation "Neighborhood pattern for Manhattan distance. Two
pixels are considered neighbors if Manhattan distance between them is
1")
(alex:define-constant +square-pattern+
'((-1 -1)
(-1 0)
(-1 1)
( 0 -1)
( 0 1)
( 1 -1)
( 1 0)
( 1 1))
:test #'equalp
:documentation "Neighborhood pattern for Chebyshev distance. Two
pixels are considered neighbors if Chebyshev distance between them is
1")
(defparameter *structuring-element*
(make-array '(3 3)
:element-type 'bit
:initial-contents '((1 1 1)
(1 1 1)
(1 1 1)))
"Default structuring element for erosion and dilation")
(sera:-> add-indices (list list list) (values list &optional))
(declaim (inline add-indices))
(defun add-indices (x y dimensions)
(declare (optimize (speed 3)))
(mapcar
(lambda (x y max)
(declare (type fixnum x y max))
(clamp (+ x y) 0 (1- max)))
x y dimensions))
(defun label-components (image &key (connectivity +cross-pattern+))
"Perform connected components labeling on binary image. Pixels with
value zero are considered background. Each cluster gets a unique
integer label. The result is returned in an array of fixnums with the
same dimenions as image."
(declare (type binary-image image)
(type list connectivity)
(optimize (speed 3)))
(with-image-definition (image width height pixels)
(declare (type (simple-array bit (* *)) pixels))
(let* ((dimensions (list height width))
(output (make-array dimensions
:element-type 'fixnum
:initial-element -1))
(current-label 1))
(declare (type fixnum current-label))
(do-image-pixels (image color x y)
(let (queue)
(declare (type list queue))
(flet ((push-in-queue (y x)
(cond
;; If an element is a background element, label it as so
((zerop (aref pixels y x))
(setf (aref output y x) 0)
0)
;; If the element does not have a label, assign the current label
((= (aref output y x) -1)
(setf (aref output y x) current-label)
(push (list y x) queue)
1)
(t
;; Already has a label - skip
0))))
(loop with delta fixnum = (push-in-queue y x)
until (null queue) do
(let ((index (pop queue)))
(map 'nil
(lambda (shift)
(apply #'push-in-queue
(add-indices index shift dimensions)))
connectivity))
finally (incf current-label delta)))))
output)))
(defun component-boxes (components)
"Return bounding boxes ((XMIN YMIN) (XMAX YMAX)) for connected
components of an image. COMPONENTS is an array returned by LABEL-COMPONENTS"
(declare (type (simple-array fixnum (* *)) components))
(let ((initial-box '((#.most-positive-fixnum
#.most-positive-fixnum)
(0 0)))
(boxes (make-hash-table)))
(array-operations/utilities:nested-loop (y x)
(array-dimensions components)
(let ((element (aref components y x)))
(destructuring-bind (min max)
(gethash element boxes initial-box)
(setf (gethash element boxes)
(list
(mapcar #'min min (list x y))
(mapcar #'max max (list x y)))))))
(loop for component fixnum from 1 by 1
for box = (gethash component boxes)
while box collect box)))
;; ====================
;; Erode & Dilate
;; ====================
(macrolet ((def-morphological-op (name operation documentation)
`(progn
(sera:-> ,name (binary-image &optional (simple-array bit (* *)))
(values binary-image &optional))
(defun ,name (image &optional (structuring-element *structuring-element*))
,documentation
(declare (optimize (speed 3)))
(let* ((width (image-width image))
(height (image-height image))
(result (make-binary-image width height))
(image-pixels (image-pixels image))
(result-pixels (image-pixels result)))
(declare (type alex:positive-fixnum width height)
(type (simple-array bit (* *)) image-pixels result-pixels))
(destructuring-bind (se-height/2 se-width/2)
(mapcar (the function (alex:rcurry #'floor 2))
(array-dimensions structuring-element))
(declare (type alex:positive-fixnum se-height/2 se-width/2))
(aops:each-index! result-pixels
(i j)
(aops:reduce-index #',operation (k l)
(* (aref structuring-element k l)
(aref image-pixels
(mod (+ i se-height/2 (- k)) height)
(mod (+ j se-width/2 (- l)) width))))))
result)))))
(def-morphological-op erode min
"Erode binary image. STRUCTURING-ELEMENT is an optional 2D simple
array of bits which serves as a structuring element and defaults to
*STRUCTURING-ELEMENT*.")
(def-morphological-op dilate max
"Dilate binary image. STRUCTURING-ELEMENT is an optional 2D simple
array of bits which serves as a structuring element and defaults to
*STRUCTURING-ELEMENT*."))
;; ====================
;; Distance transform
;; ====================
(sera:-> mdt-pass!
((array single-float (*)))
(values (array single-float (*)) &optional))
(defun mdt-pass! (array)
(declare (type (array single-float (*)) array))
(let ((length (length array)))
(loop for i from 1 below length do
(setf (aref array i)
(min (1+ (aref array (1- i)))
(aref array i))))
(loop for i from (- length 2) downto 0 do
(setf (aref array i)
(min (1+ (aref array (1+ i)))
(aref array i))))
array))
(sera:-> edt-pass!
((array single-float (*)))
(values (array single-float (*)) &optional))
(defun edt-pass! (array)
(declare (type (array single-float (*)) array))
(let ((length (length array))
(envelope-minima (list 0))
envelope-crossing)
(loop for i fixnum from 1 below length do
(loop
for current-minima fixnum =
(car envelope-minima)
for crossing single-float =
(/ (- (+ (expt i 2)
(aref array i))
(+ (expt current-minima 2)
(aref array current-minima)))
(* 2.0 (- i current-minima)))
while (and envelope-crossing
(<= crossing (car envelope-crossing)))
do
(pop envelope-crossing)
(pop envelope-minima)
finally
(push i envelope-minima)
(push crossing envelope-crossing)))
(loop
with dist = (copy-seq array)
with envelope-minima = (reverse envelope-minima)
with envelope-crossing = (reverse envelope-crossing)
for i fixnum below length do
(loop while (and envelope-crossing
(< (car envelope-crossing) i))
do
(pop envelope-crossing)
(pop envelope-minima))
(setf (aref array i)
(+ (expt (- i (car envelope-minima)) 2)
(aref dist (car envelope-minima))))))
array)
(declaim (inline distance-transform-pass))
(defun distance-transform-pass (type)
(declare (type (member :mdt :edt) type))
(ecase type
(:mdt #'mdt-pass!)
(:edt #'edt-pass!)))
(sera:-> distance-transform
(image &key (:type symbol))
(values (simple-array single-float (* *)) &optional))
(defun distance-transform (image &key (type :edt))
"Perform distance transform on a binary image. Every 1 is replaced
with 0f0 and every 0 is replaced with distance to the closest 1.
TYPE can be either :MDT (Manhattan distance transform) or :EDT
(squared Euclidean distance transform)."
(declare (type binary-image image))
(with-image-definition (image width height pixels)
(let ((dt-pass (distance-transform-pass type))
Initialize the array with distances
(distances
(let ((max-dim (expt (max width height) 2)))
(aops:vectorize* 'single-float (pixels)
(* (- 1.0 pixels) max-dim)))))
Walk through the rows of the array and calculate MDT for each
;; row separately.
(dotimes (row height)
(funcall dt-pass (make-array width
:element-type 'single-float
:displaced-to distances
:displaced-index-offset (* row width))))
;; Now walk through the columns. Have to permute the array for that :(
(let ((permutation (aops:permute '(1 0) distances)))
(dotimes (column width)
(funcall dt-pass (make-array height
:element-type 'single-float
:displaced-to permutation
:displaced-index-offset (* column height))))
(aops:permute '(1 0) permutation)))))
;; =========
;; Thinning
;; =========
(sera:-> thinning-pass (binary-image boolean)
(values binary-image &optional))
(defun thinning-pass (image odd-iteration-p)
(declare (optimize (speed 3)))
(with-image-definition (image width height pixels)
(declare (type (simple-array bit (* *)) pixels))
(let ((copy (alex:copy-array pixels)))
(declare (type (simple-array bit (* *)) copy))
(do-image-pixels (image pixel x y)
(unless (zerop (aref pixels y x))
(let* ((p1 (aref pixels (mod (+ y -1) height) (mod (+ x -1) width)))
(p2 (aref pixels (mod (+ y -1) height) (mod (+ x 0) width)))
(p3 (aref pixels (mod (+ y -1) height) (mod (+ x +1) width)))
(p4 (aref pixels (mod (+ y 0) height) (mod (+ x +1) width)))
(p5 (aref pixels (mod (+ y +1) height) (mod (+ x +1) width)))
(p6 (aref pixels (mod (+ y +1) height) (mod (+ x 0) width)))
(p7 (aref pixels (mod (+ y +1) height) (mod (+ x -1) width)))
(p8 (aref pixels (mod (+ y 0) height) (mod (+ x -1) width)))
(c (+ (logand (- 1 p2) (logior p3 p4))
(logand (- 1 p4) (logior p5 p6))
(logand (- 1 p6) (logior p7 p8))
(logand (- 1 p8) (logior p1 p2))))
(n1 (+ (logior p1 p2) (logior p3 p4)
(logior p5 p6) (logior p7 p8)))
(n2 (+ (logior p2 p3) (logior p4 p5)
(logior p6 p7) (logior p8 p1)))
(n (min n1 n2))
(o (if odd-iteration-p
(logand p4 (logior (- 1 p5) p3 p2))
(logand p8 (logior (- 1 p1) p7 p6)))))
(when (and (zerop o) (= c 1) (<= 2 n 3))
(setf (aref copy y x) 0)))))
(make-binary-image-from-pixels copy))))
(sera:-> thin (binary-image) (values binary-image &optional))
(defun thin (image)
"Perform thinning (extracting topological skeleton) of binary image."
(loop with current = image
for iteration fixnum from 1 by 1
for next = (thinning-pass current (oddp iteration))
until (equalp (image-pixels current)
(image-pixels next))
do (setq current next)
finally (return current)))
| null | https://raw.githubusercontent.com/tokenrove/imago/22d42e75e4f0dff24eb952f66e36c7f6816f7336/src/morphology.lisp | lisp | IMAGO library
Morphology functions
The authors grant you the rights to distribute
and use this software as governed by the terms
(),
known as the LLGPL.
====================
Component labeling
====================
If an element is a background element, label it as so
If the element does not have a label, assign the current label
Already has a label - skip
====================
Erode & Dilate
====================
====================
Distance transform
====================
row separately.
Now walk through the columns. Have to permute the array for that :(
=========
Thinning
========= | Copyright ( C ) 2021
( ) , ( )
of the Lisp Lesser GNU Public License
(in-package :imago)
(alex:define-constant +cross-pattern+
'((-1 0)
( 0 -1)
( 0 1)
( 1 0))
:test #'equalp
:documentation "Neighborhood pattern for Manhattan distance. Two
pixels are considered neighbors if Manhattan distance between them is
1")
(alex:define-constant +square-pattern+
'((-1 -1)
(-1 0)
(-1 1)
( 0 -1)
( 0 1)
( 1 -1)
( 1 0)
( 1 1))
:test #'equalp
:documentation "Neighborhood pattern for Chebyshev distance. Two
pixels are considered neighbors if Chebyshev distance between them is
1")
(defparameter *structuring-element*
(make-array '(3 3)
:element-type 'bit
:initial-contents '((1 1 1)
(1 1 1)
(1 1 1)))
"Default structuring element for erosion and dilation")
(sera:-> add-indices (list list list) (values list &optional))
(declaim (inline add-indices))
(defun add-indices (x y dimensions)
(declare (optimize (speed 3)))
(mapcar
(lambda (x y max)
(declare (type fixnum x y max))
(clamp (+ x y) 0 (1- max)))
x y dimensions))
(defun label-components (image &key (connectivity +cross-pattern+))
"Perform connected components labeling on binary image. Pixels with
value zero are considered background. Each cluster gets a unique
integer label. The result is returned in an array of fixnums with the
same dimenions as image."
(declare (type binary-image image)
(type list connectivity)
(optimize (speed 3)))
(with-image-definition (image width height pixels)
(declare (type (simple-array bit (* *)) pixels))
(let* ((dimensions (list height width))
(output (make-array dimensions
:element-type 'fixnum
:initial-element -1))
(current-label 1))
(declare (type fixnum current-label))
(do-image-pixels (image color x y)
(let (queue)
(declare (type list queue))
(flet ((push-in-queue (y x)
(cond
((zerop (aref pixels y x))
(setf (aref output y x) 0)
0)
((= (aref output y x) -1)
(setf (aref output y x) current-label)
(push (list y x) queue)
1)
(t
0))))
(loop with delta fixnum = (push-in-queue y x)
until (null queue) do
(let ((index (pop queue)))
(map 'nil
(lambda (shift)
(apply #'push-in-queue
(add-indices index shift dimensions)))
connectivity))
finally (incf current-label delta)))))
output)))
(defun component-boxes (components)
"Return bounding boxes ((XMIN YMIN) (XMAX YMAX)) for connected
components of an image. COMPONENTS is an array returned by LABEL-COMPONENTS"
(declare (type (simple-array fixnum (* *)) components))
(let ((initial-box '((#.most-positive-fixnum
#.most-positive-fixnum)
(0 0)))
(boxes (make-hash-table)))
(array-operations/utilities:nested-loop (y x)
(array-dimensions components)
(let ((element (aref components y x)))
(destructuring-bind (min max)
(gethash element boxes initial-box)
(setf (gethash element boxes)
(list
(mapcar #'min min (list x y))
(mapcar #'max max (list x y)))))))
(loop for component fixnum from 1 by 1
for box = (gethash component boxes)
while box collect box)))
(macrolet ((def-morphological-op (name operation documentation)
`(progn
(sera:-> ,name (binary-image &optional (simple-array bit (* *)))
(values binary-image &optional))
(defun ,name (image &optional (structuring-element *structuring-element*))
,documentation
(declare (optimize (speed 3)))
(let* ((width (image-width image))
(height (image-height image))
(result (make-binary-image width height))
(image-pixels (image-pixels image))
(result-pixels (image-pixels result)))
(declare (type alex:positive-fixnum width height)
(type (simple-array bit (* *)) image-pixels result-pixels))
(destructuring-bind (se-height/2 se-width/2)
(mapcar (the function (alex:rcurry #'floor 2))
(array-dimensions structuring-element))
(declare (type alex:positive-fixnum se-height/2 se-width/2))
(aops:each-index! result-pixels
(i j)
(aops:reduce-index #',operation (k l)
(* (aref structuring-element k l)
(aref image-pixels
(mod (+ i se-height/2 (- k)) height)
(mod (+ j se-width/2 (- l)) width))))))
result)))))
(def-morphological-op erode min
"Erode binary image. STRUCTURING-ELEMENT is an optional 2D simple
array of bits which serves as a structuring element and defaults to
*STRUCTURING-ELEMENT*.")
(def-morphological-op dilate max
"Dilate binary image. STRUCTURING-ELEMENT is an optional 2D simple
array of bits which serves as a structuring element and defaults to
*STRUCTURING-ELEMENT*."))
(sera:-> mdt-pass!
((array single-float (*)))
(values (array single-float (*)) &optional))
(defun mdt-pass! (array)
(declare (type (array single-float (*)) array))
(let ((length (length array)))
(loop for i from 1 below length do
(setf (aref array i)
(min (1+ (aref array (1- i)))
(aref array i))))
(loop for i from (- length 2) downto 0 do
(setf (aref array i)
(min (1+ (aref array (1+ i)))
(aref array i))))
array))
(sera:-> edt-pass!
((array single-float (*)))
(values (array single-float (*)) &optional))
(defun edt-pass! (array)
(declare (type (array single-float (*)) array))
(let ((length (length array))
(envelope-minima (list 0))
envelope-crossing)
(loop for i fixnum from 1 below length do
(loop
for current-minima fixnum =
(car envelope-minima)
for crossing single-float =
(/ (- (+ (expt i 2)
(aref array i))
(+ (expt current-minima 2)
(aref array current-minima)))
(* 2.0 (- i current-minima)))
while (and envelope-crossing
(<= crossing (car envelope-crossing)))
do
(pop envelope-crossing)
(pop envelope-minima)
finally
(push i envelope-minima)
(push crossing envelope-crossing)))
(loop
with dist = (copy-seq array)
with envelope-minima = (reverse envelope-minima)
with envelope-crossing = (reverse envelope-crossing)
for i fixnum below length do
(loop while (and envelope-crossing
(< (car envelope-crossing) i))
do
(pop envelope-crossing)
(pop envelope-minima))
(setf (aref array i)
(+ (expt (- i (car envelope-minima)) 2)
(aref dist (car envelope-minima))))))
array)
(declaim (inline distance-transform-pass))
(defun distance-transform-pass (type)
(declare (type (member :mdt :edt) type))
(ecase type
(:mdt #'mdt-pass!)
(:edt #'edt-pass!)))
(sera:-> distance-transform
(image &key (:type symbol))
(values (simple-array single-float (* *)) &optional))
(defun distance-transform (image &key (type :edt))
"Perform distance transform on a binary image. Every 1 is replaced
with 0f0 and every 0 is replaced with distance to the closest 1.
TYPE can be either :MDT (Manhattan distance transform) or :EDT
(squared Euclidean distance transform)."
(declare (type binary-image image))
(with-image-definition (image width height pixels)
(let ((dt-pass (distance-transform-pass type))
Initialize the array with distances
(distances
(let ((max-dim (expt (max width height) 2)))
(aops:vectorize* 'single-float (pixels)
(* (- 1.0 pixels) max-dim)))))
Walk through the rows of the array and calculate MDT for each
(dotimes (row height)
(funcall dt-pass (make-array width
:element-type 'single-float
:displaced-to distances
:displaced-index-offset (* row width))))
(let ((permutation (aops:permute '(1 0) distances)))
(dotimes (column width)
(funcall dt-pass (make-array height
:element-type 'single-float
:displaced-to permutation
:displaced-index-offset (* column height))))
(aops:permute '(1 0) permutation)))))
(sera:-> thinning-pass (binary-image boolean)
(values binary-image &optional))
(defun thinning-pass (image odd-iteration-p)
(declare (optimize (speed 3)))
(with-image-definition (image width height pixels)
(declare (type (simple-array bit (* *)) pixels))
(let ((copy (alex:copy-array pixels)))
(declare (type (simple-array bit (* *)) copy))
(do-image-pixels (image pixel x y)
(unless (zerop (aref pixels y x))
(let* ((p1 (aref pixels (mod (+ y -1) height) (mod (+ x -1) width)))
(p2 (aref pixels (mod (+ y -1) height) (mod (+ x 0) width)))
(p3 (aref pixels (mod (+ y -1) height) (mod (+ x +1) width)))
(p4 (aref pixels (mod (+ y 0) height) (mod (+ x +1) width)))
(p5 (aref pixels (mod (+ y +1) height) (mod (+ x +1) width)))
(p6 (aref pixels (mod (+ y +1) height) (mod (+ x 0) width)))
(p7 (aref pixels (mod (+ y +1) height) (mod (+ x -1) width)))
(p8 (aref pixels (mod (+ y 0) height) (mod (+ x -1) width)))
(c (+ (logand (- 1 p2) (logior p3 p4))
(logand (- 1 p4) (logior p5 p6))
(logand (- 1 p6) (logior p7 p8))
(logand (- 1 p8) (logior p1 p2))))
(n1 (+ (logior p1 p2) (logior p3 p4)
(logior p5 p6) (logior p7 p8)))
(n2 (+ (logior p2 p3) (logior p4 p5)
(logior p6 p7) (logior p8 p1)))
(n (min n1 n2))
(o (if odd-iteration-p
(logand p4 (logior (- 1 p5) p3 p2))
(logand p8 (logior (- 1 p1) p7 p6)))))
(when (and (zerop o) (= c 1) (<= 2 n 3))
(setf (aref copy y x) 0)))))
(make-binary-image-from-pixels copy))))
(sera:-> thin (binary-image) (values binary-image &optional))
(defun thin (image)
"Perform thinning (extracting topological skeleton) of binary image."
(loop with current = image
for iteration fixnum from 1 by 1
for next = (thinning-pass current (oddp iteration))
until (equalp (image-pixels current)
(image-pixels next))
do (setq current next)
finally (return current)))
|
74fa1b33f6c7fcee47439c79be5a16ed583fad6f6e18d4bfda78f1d241353696 | UU-ComputerScience/uhc | BoundedInt1.hs | {- ----------------------------------------------------------------------------------------
what : Bounded class, for Int
expected: ok
platform: word size dependent
---------------------------------------------------------------------------------------- -}
module BoundedInt1 where
main :: IO ()
main
= do putStrLn (show (minBound :: Int))
putStrLn (show (maxBound :: Int))
| null | https://raw.githubusercontent.com/UU-ComputerScience/uhc/f2b94a90d26e2093d84044b3832a9a3e3c36b129/EHC/test/regress/99/BoundedInt1.hs | haskell | ----------------------------------------------------------------------------------------
what : Bounded class, for Int
expected: ok
platform: word size dependent
---------------------------------------------------------------------------------------- |
module BoundedInt1 where
main :: IO ()
main
= do putStrLn (show (minBound :: Int))
putStrLn (show (maxBound :: Int))
|
23b63953c5fc0500f2a45e3b2d4d8f00287a9df7b073ad72b020d36edc1b1698 | locusmath/locus | object.clj | (ns locus.set.tree.two-quiver.core.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.sequence.object :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.con.core.setpart :refer :all]
[locus.con.core.object :refer [projection]]
[locus.set.quiver.relation.binary.product :refer :all]
[locus.set.quiver.relation.binary.br :refer :all]
[locus.set.quiver.relation.binary.sr :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.quiver.binary.core.object :refer :all]
[locus.set.tree.structure.core.protocols :refer :all])
(:import (locus.set.quiver.binary.core.object Quiver)))
A 2 - quiver is defined by an ordered pair of quivers P , Q : A - > B - > C , in which the two
quivers are composable so that the morphism set of one quiver is equal to the object
; set of another. This is similar to how given a pair of function f: A -> B and
; g: B -> C sharing a common input and output, they can form a triangle copresheaf. So
; these include any pair of quivers that can be chained together, without extra conditions.
Special cases include path quivers and two globular sets .
(defprotocol StructuredTwoQuiver
"A general protocol for describing structures over two-quiver copresheaves."
(two-morphisms [this]
"The two morphisms of the structured quiver.")
(two-source-fn [this]
"The source morphism of a two morphism in a two globular set.")
(two-target-fn [this]
"The target morphism of a two morphism in a two globular set."))
(deftype TwoQuiver [two-morphisms morphisms objects two-source two-target source target]
StructuredDiset
(first-set [this] morphisms)
(second-set [this] objects)
ConcreteObject
(underlying-set [this]
(->CartesianCoproduct [two-morphisms morphisms objects]))
StructuredQuiver
(underlying-quiver [this]
(->Quiver
morphisms
objects
source
target))
(source-fn [this] source)
(target-fn [this] target)
(transition [this e]
(list (source e) (target e)))
StructuredTwoQuiver
(two-morphisms [this] two-morphisms)
(two-source-fn [this] two-source)
(two-target-fn [this] two-target))
(derive ::two-quiver :locus.set.logic.structure.protocols/copresheaf)
(derive TwoQuiver ::two-quiver)
Visualisation of two quivers
(defmethod visualize TwoQuiver
[quiver]
(visualize (underlying-quiver quiver)))
Component functions for 2 - morphisms in 2 - quivers
(defn two-morphism-s
[quiver two-morphism]
((two-source-fn quiver) two-morphism))
(defn two-morphism-t
[quiver two-morphism]
((two-target-fn quiver) two-morphism))
(defn two-morphism-ss
[quiver two-morphism]
(source-element quiver (two-morphism-s quiver two-morphism)))
(defn two-morphism-st
[quiver two-morphism]
(source-element quiver (two-morphism-t quiver two-morphism)))
(defn two-morphism-ts
[quiver two-morphism]
(target-element quiver (two-morphism-s quiver two-morphism)))
(defn two-morphism-tt
[quiver two-morphism]
(target-element quiver (two-morphism-t quiver two-morphism)))
; Corresponding morphisms for the component functions
(defn s-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(morphisms quiver)
(fn [two-morphism]
(two-morphism-s quiver two-morphism))))
(defn t-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(morphisms quiver)
(fn [two-morphism]
(two-morphism-t quiver two-morphism))))
(defn ss-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-ss quiver two-morphism))))
(defn st-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-st quiver two-morphism))))
(defn ts-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-ts quiver two-morphism))))
(defn tt-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-tt quiver two-morphism))))
The morphic quiver of a two quiver copresheaf
This contains 1 - morphisms as its objects and 2 - morphisms as its morphisms .
(defn morphic-quiver
[quiver]
(->Quiver
(two-morphisms quiver)
(morphisms quiver)
(two-source-fn quiver)
(two-target-fn quiver)))
Get a successor quiver from a two - quiver
(defmethod successor-quiver ::two-quiver
[two-quiver cell-type]
(case (count cell-type)
0 (identity-function (objects two-quiver))
1 (underlying-quiver two-quiver)
2 (morphic-quiver two-quiver)))
Components of two quivers
(defmethod get-set ::two-quiver
[two-quiver x]
(case (count x)
0 (objects two-quiver)
1 (morphisms two-quiver)
2 (two-morphisms two-quiver)))
(defmethod get-function ::two-quiver
[two-quiver coll]
(cond
(= coll '(() ())) (identity-function (objects two-quiver))
(= coll '((0) (0))) (source-function two-quiver)
(= coll '((0) (1))) (target-function two-quiver)
(= coll '((0) ())) (identity-function (morphisms two-quiver))
(= coll '((0 0) ())) (identity-function (two-morphisms two-quiver))
(= coll '((0 0) (0))) (s-function two-quiver)
(= coll '((0 0) (1))) (t-function two-quiver)
(= coll '((0 0) (0 0))) (ss-function two-quiver)
(= coll '((0 0) (0 1))) (st-function two-quiver)
(= coll '((0 0) (1 0))) (ts-function two-quiver)
(= coll '((0 0) (1 1))) (tt-function two-quiver)))
All 2 - morphisms between a pair of 1 - morphisms
(defn two-hom
[quiver a b]
(set
(filter
(fn [two-morphism]
(and
(= (two-morphism-s quiver two-morphism) a)
(= (two-morphism-t quiver two-morphism) b)))
(two-morphisms quiver))))
; Underlying relations and multirelations
(defmethod underlying-relation TwoQuiver
[quiver]
(underlying-relation (underlying-quiver quiver)))
The special case of underlying multirelations of two quivers
(defmethod underlying-multirelation TwoQuiver
[quiver]
(underlying-multirelation (underlying-quiver quiver)))
; The underlying multirelation of the morphic quiver
(defn underlying-two-multirelation
[quiver]
(multiset
(map
(fn [two-morphism]
(list
(two-morphism-s quiver two-morphism)
(two-morphism-t quiver two-morphism)))
(two-morphisms quiver))))
(defn underlying-two-relation
[quiver]
(set (underlying-two-multirelation quiver)))
Combine two quivers in order to create a 2 - quiver copresheaf
This is similar to how two functions can be combined to create a triangle copreshaef
(defn combine-quivers
[morphic-quiver object-quiver]
(->TwoQuiver
(morphisms morphic-quiver)
(morphisms object-quiver)
(objects object-quiver)
(source-fn morphic-quiver)
(target-fn morphic-quiver)
(source-fn object-quiver)
(target-fn object-quiver)))
Two quivers created from special types of quivers naturally associated with 1 - quivers based upon
; various relations, including relations of composability, parallelism, source, and target equality.
(defn composability-two-quiver
[quiver]
(combine-quivers
(composability-quiver quiver)
quiver))
(defn parallelism-two-quiver
[quiver]
(combine-quivers
(parallelism-quiver quiver)
quiver))
(defn source-equivalence-two-quiver
[quiver]
(combine-quivers
(source-equivalence-quiver quiver)
quiver))
(defn target-equivalence-two-quiver
[quiver]
(combine-quivers
(target-equivalence-quiver quiver)
quiver))
Create a two quiver with a single object , but any number of morphisms or two morphisms . These type
; of structures include monoidal categories and a number of other constructions like ordered
; monoids and semigroups, which enrich a singular quiver with an ordering on morphisms.
(defn singular-two-quiver
[quiver object]
(combine-quivers
quiver
(singular-quiver (morphisms quiver) object)))
A quiver with no two - morphisms
(defn two-morphism-free-quiver
[quiver]
(combine-quivers
(->Quiver #{} (morphisms quiver) first second)
quiver))
A relational two - quiver is actually formed from a binary relation on ordered pairs , so each
; element of the component binary relation should be of the form ((a b) (c d)). This makes
for a much nicer relational two quiver then having to use a quaternary relation , in
; which case the component edges would have to be produced differently.
(defn relational-two-quiver
[rel]
(let [objects (apply union (map (fn [[a b] [c d]] (set (list a b c d))) rel))
morphisms (apply union (map set rel))]
(->TwoQuiver
rel
morphisms
objects
first
second
first
second)))
Convert structures into two quivers
(defmulti to-two-quiver type)
(defmethod to-two-quiver TwoQuiver
[two-quiver] two-quiver)
(defmethod to-two-quiver Quiver
[quiver] (two-morphism-free-quiver quiver))
(defmethod to-two-quiver :locus.set.logic.core.set/universal
[rel] (two-morphism-free-quiver (relational-quiver rel)))
Products and coproducts in the topos of two - quivers
(defmethod product TwoQuiver
[& quivers]
(->TwoQuiver
(apply cartesian-product (map two-morphisms quivers))
(apply cartesian-product (map morphisms quivers))
(apply cartesian-product (map objects quivers))
(apply product (map s-function quivers))
(apply product (map t-function quivers))
(apply product (map source-function quivers))
(apply product (map target-function quivers))))
(defmethod coproduct TwoQuiver
[& quivers]
(->TwoQuiver
(apply cartesian-coproduct (map two-morphisms quivers))
(apply cartesian-coproduct (map morphisms quivers))
(apply cartesian-coproduct (map objects quivers))
(apply coproduct (map s-function quivers))
(apply coproduct (map t-function quivers))
(apply coproduct (map source-function quivers))
(apply coproduct (map target-function quivers))))
Subobjects in the topos of 2 - quivers
(defn two-subquiver?
[quiver new-two-morphisms new-morphisms new-objects]
(and
(superset?
(list
(union
(set-image (source-function quiver) new-morphisms)
(set-image (target-function quiver) new-morphisms))
new-objects))
(superset?
(list
(union
(set-image (s-function quiver) new-two-morphisms)
(set-image (s-function quiver) new-two-morphisms))
new-morphisms))))
(defn restrict-two-morphisms
[two-quiver new-two-morphisms]
(->TwoQuiver
new-two-morphisms
(morphisms two-quiver)
(objects two-quiver)
(two-source-fn two-quiver)
(two-target-fn two-quiver)
(source-fn two-quiver)
(target-fn two-quiver)))
(defn two-subquiver
[quiver new-two-morphisms new-morphisms new-objects]
(->TwoQuiver
new-two-morphisms
new-morphisms
new-objects
(two-source-fn quiver)
(two-target-fn quiver)
(source-fn quiver)
(target-fn quiver)))
Congruences of objects in the topos of 2 - quivers
(defn two-quiver-congruence?
[quiver two-congruence one-congruence zero-congruence]
(and
(io-relation? (source-function quiver) one-congruence zero-congruence)
(io-relation? (target-function quiver) one-congruence zero-congruence)
(io-relation? (s-function quiver) two-congruence one-congruence)
(io-relation? (t-function quiver) two-congruence one-congruence)))
(defn two-quiver-quotient
[quiver two-congruence one-congruence zero-congruence]
(->TwoQuiver
two-congruence
one-congruence
zero-congruence
(fn [part]
(projection one-congruence (two-morphism-s quiver (first part))))
(fn [part]
(projection one-congruence (two-morphism-t quiver (first part))))
(fn [part]
(projection zero-congruence (source-element quiver (first part))))
(fn [part]
(projection zero-congruence (target-element quiver (first part))))))
; Over quivers
(defn target-equal-two-morphism-components?
[two-quiver two-morphism]
(target-equal-elements?
two-quiver
(two-morphism-s two-quiver two-morphism)
(two-morphism-t two-quiver two-morphism)))
(defn over-two-morphisms
[two-quiver]
(set
(filter
(partial target-equal-two-morphism-components? two-quiver)
(two-morphisms two-quiver))))
(defn over-component
[two-quiver]
(restrict-two-morphisms two-quiver (over-two-morphisms two-quiver)))
; Under quivers
(defn source-equal-two-morphism-components?
[two-quiver two-morphism]
(source-equal-elements?
two-quiver
(two-morphism-s two-quiver two-morphism)
(two-morphism-t two-quiver two-morphism)))
(defn under-two-morphisms
[two-quiver]
(set
(filter
(partial source-equal-two-morphism-components? two-quiver)
(two-morphisms two-quiver))))
(defn under-component
[two-quiver]
(restrict-two-morphisms two-quiver (under-two-morphisms two-quiver)))
Globular subobjects of two quivers
(defn globular-two-morphism?
[two-quiver two-morphism]
(let [source (two-morphism-s two-quiver two-morphism)
target (two-morphism-t two-quiver two-morphism)]
(parallel-elements? two-quiver source target)))
(defn globular-two-morphisms
[two-quiver]
(set
(filter
(partial globular-two-morphism? two-quiver)
(two-morphisms two-quiver))))
(defn globular-component
[two-quiver]
(restrict-two-morphisms two-quiver (globular-two-morphisms two-quiver)))
Ontology of two quivers
(defmulti two-quiver? type)
(defmethod two-quiver? ::two-quiver
[quiver] true)
(defmethod two-quiver? :default
[obj] false)
(defn over-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(every?
(fn [morphism]
(target-equal-two-morphism-components? two-quiver morphism))
(two-morphisms two-quiver))))
(defn under-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(every?
(fn [morphism]
(source-equal-two-morphism-components? two-quiver morphism))
(two-morphisms two-quiver))))
(defn globular-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(every?
(fn [morphism]
(globular-two-morphism? two-quiver morphism))
(two-morphisms two-quiver))))
(defn one-thin-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(universal? (underlying-multirelation two-quiver))))
(defn two-thin-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(universal? (underlying-two-multirelation two-quiver))))
(defn dually-thin-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(universal? (underlying-multirelation two-quiver))
(universal? (underlying-two-multirelation two-quiver))))
| null | https://raw.githubusercontent.com/locusmath/locus/fb6068bd78977b51fd3c5783545a5f9986e4235c/src/clojure/locus/set/tree/two_quiver/core/object.clj | clojure | set of another. This is similar to how given a pair of function f: A -> B and
g: B -> C sharing a common input and output, they can form a triangle copresheaf. So
these include any pair of quivers that can be chained together, without extra conditions.
Corresponding morphisms for the component functions
Underlying relations and multirelations
The underlying multirelation of the morphic quiver
various relations, including relations of composability, parallelism, source, and target equality.
of structures include monoidal categories and a number of other constructions like ordered
monoids and semigroups, which enrich a singular quiver with an ordering on morphisms.
element of the component binary relation should be of the form ((a b) (c d)). This makes
which case the component edges would have to be produced differently.
Over quivers
Under quivers | (ns locus.set.tree.two-quiver.core.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.sequence.object :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.con.core.setpart :refer :all]
[locus.con.core.object :refer [projection]]
[locus.set.quiver.relation.binary.product :refer :all]
[locus.set.quiver.relation.binary.br :refer :all]
[locus.set.quiver.relation.binary.sr :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.quiver.binary.core.object :refer :all]
[locus.set.tree.structure.core.protocols :refer :all])
(:import (locus.set.quiver.binary.core.object Quiver)))
A 2 - quiver is defined by an ordered pair of quivers P , Q : A - > B - > C , in which the two
quivers are composable so that the morphism set of one quiver is equal to the object
Special cases include path quivers and two globular sets .
(defprotocol StructuredTwoQuiver
"A general protocol for describing structures over two-quiver copresheaves."
(two-morphisms [this]
"The two morphisms of the structured quiver.")
(two-source-fn [this]
"The source morphism of a two morphism in a two globular set.")
(two-target-fn [this]
"The target morphism of a two morphism in a two globular set."))
(deftype TwoQuiver [two-morphisms morphisms objects two-source two-target source target]
StructuredDiset
(first-set [this] morphisms)
(second-set [this] objects)
ConcreteObject
(underlying-set [this]
(->CartesianCoproduct [two-morphisms morphisms objects]))
StructuredQuiver
(underlying-quiver [this]
(->Quiver
morphisms
objects
source
target))
(source-fn [this] source)
(target-fn [this] target)
(transition [this e]
(list (source e) (target e)))
StructuredTwoQuiver
(two-morphisms [this] two-morphisms)
(two-source-fn [this] two-source)
(two-target-fn [this] two-target))
(derive ::two-quiver :locus.set.logic.structure.protocols/copresheaf)
(derive TwoQuiver ::two-quiver)
Visualisation of two quivers
(defmethod visualize TwoQuiver
[quiver]
(visualize (underlying-quiver quiver)))
Component functions for 2 - morphisms in 2 - quivers
(defn two-morphism-s
[quiver two-morphism]
((two-source-fn quiver) two-morphism))
(defn two-morphism-t
[quiver two-morphism]
((two-target-fn quiver) two-morphism))
(defn two-morphism-ss
[quiver two-morphism]
(source-element quiver (two-morphism-s quiver two-morphism)))
(defn two-morphism-st
[quiver two-morphism]
(source-element quiver (two-morphism-t quiver two-morphism)))
(defn two-morphism-ts
[quiver two-morphism]
(target-element quiver (two-morphism-s quiver two-morphism)))
(defn two-morphism-tt
[quiver two-morphism]
(target-element quiver (two-morphism-t quiver two-morphism)))
(defn s-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(morphisms quiver)
(fn [two-morphism]
(two-morphism-s quiver two-morphism))))
(defn t-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(morphisms quiver)
(fn [two-morphism]
(two-morphism-t quiver two-morphism))))
(defn ss-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-ss quiver two-morphism))))
(defn st-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-st quiver two-morphism))))
(defn ts-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-ts quiver two-morphism))))
(defn tt-function
[quiver]
(->SetFunction
(two-morphisms quiver)
(objects quiver)
(fn [two-morphism]
(two-morphism-tt quiver two-morphism))))
The morphic quiver of a two quiver copresheaf
This contains 1 - morphisms as its objects and 2 - morphisms as its morphisms .
(defn morphic-quiver
[quiver]
(->Quiver
(two-morphisms quiver)
(morphisms quiver)
(two-source-fn quiver)
(two-target-fn quiver)))
Get a successor quiver from a two - quiver
(defmethod successor-quiver ::two-quiver
[two-quiver cell-type]
(case (count cell-type)
0 (identity-function (objects two-quiver))
1 (underlying-quiver two-quiver)
2 (morphic-quiver two-quiver)))
Components of two quivers
(defmethod get-set ::two-quiver
[two-quiver x]
(case (count x)
0 (objects two-quiver)
1 (morphisms two-quiver)
2 (two-morphisms two-quiver)))
(defmethod get-function ::two-quiver
[two-quiver coll]
(cond
(= coll '(() ())) (identity-function (objects two-quiver))
(= coll '((0) (0))) (source-function two-quiver)
(= coll '((0) (1))) (target-function two-quiver)
(= coll '((0) ())) (identity-function (morphisms two-quiver))
(= coll '((0 0) ())) (identity-function (two-morphisms two-quiver))
(= coll '((0 0) (0))) (s-function two-quiver)
(= coll '((0 0) (1))) (t-function two-quiver)
(= coll '((0 0) (0 0))) (ss-function two-quiver)
(= coll '((0 0) (0 1))) (st-function two-quiver)
(= coll '((0 0) (1 0))) (ts-function two-quiver)
(= coll '((0 0) (1 1))) (tt-function two-quiver)))
All 2 - morphisms between a pair of 1 - morphisms
(defn two-hom
[quiver a b]
(set
(filter
(fn [two-morphism]
(and
(= (two-morphism-s quiver two-morphism) a)
(= (two-morphism-t quiver two-morphism) b)))
(two-morphisms quiver))))
(defmethod underlying-relation TwoQuiver
[quiver]
(underlying-relation (underlying-quiver quiver)))
The special case of underlying multirelations of two quivers
(defmethod underlying-multirelation TwoQuiver
[quiver]
(underlying-multirelation (underlying-quiver quiver)))
(defn underlying-two-multirelation
[quiver]
(multiset
(map
(fn [two-morphism]
(list
(two-morphism-s quiver two-morphism)
(two-morphism-t quiver two-morphism)))
(two-morphisms quiver))))
(defn underlying-two-relation
[quiver]
(set (underlying-two-multirelation quiver)))
Combine two quivers in order to create a 2 - quiver copresheaf
This is similar to how two functions can be combined to create a triangle copreshaef
(defn combine-quivers
[morphic-quiver object-quiver]
(->TwoQuiver
(morphisms morphic-quiver)
(morphisms object-quiver)
(objects object-quiver)
(source-fn morphic-quiver)
(target-fn morphic-quiver)
(source-fn object-quiver)
(target-fn object-quiver)))
Two quivers created from special types of quivers naturally associated with 1 - quivers based upon
(defn composability-two-quiver
[quiver]
(combine-quivers
(composability-quiver quiver)
quiver))
(defn parallelism-two-quiver
[quiver]
(combine-quivers
(parallelism-quiver quiver)
quiver))
(defn source-equivalence-two-quiver
[quiver]
(combine-quivers
(source-equivalence-quiver quiver)
quiver))
(defn target-equivalence-two-quiver
[quiver]
(combine-quivers
(target-equivalence-quiver quiver)
quiver))
Create a two quiver with a single object , but any number of morphisms or two morphisms . These type
(defn singular-two-quiver
[quiver object]
(combine-quivers
quiver
(singular-quiver (morphisms quiver) object)))
A quiver with no two - morphisms
(defn two-morphism-free-quiver
[quiver]
(combine-quivers
(->Quiver #{} (morphisms quiver) first second)
quiver))
A relational two - quiver is actually formed from a binary relation on ordered pairs , so each
for a much nicer relational two quiver then having to use a quaternary relation , in
(defn relational-two-quiver
[rel]
(let [objects (apply union (map (fn [[a b] [c d]] (set (list a b c d))) rel))
morphisms (apply union (map set rel))]
(->TwoQuiver
rel
morphisms
objects
first
second
first
second)))
Convert structures into two quivers
(defmulti to-two-quiver type)
(defmethod to-two-quiver TwoQuiver
[two-quiver] two-quiver)
(defmethod to-two-quiver Quiver
[quiver] (two-morphism-free-quiver quiver))
(defmethod to-two-quiver :locus.set.logic.core.set/universal
[rel] (two-morphism-free-quiver (relational-quiver rel)))
Products and coproducts in the topos of two - quivers
(defmethod product TwoQuiver
[& quivers]
(->TwoQuiver
(apply cartesian-product (map two-morphisms quivers))
(apply cartesian-product (map morphisms quivers))
(apply cartesian-product (map objects quivers))
(apply product (map s-function quivers))
(apply product (map t-function quivers))
(apply product (map source-function quivers))
(apply product (map target-function quivers))))
(defmethod coproduct TwoQuiver
[& quivers]
(->TwoQuiver
(apply cartesian-coproduct (map two-morphisms quivers))
(apply cartesian-coproduct (map morphisms quivers))
(apply cartesian-coproduct (map objects quivers))
(apply coproduct (map s-function quivers))
(apply coproduct (map t-function quivers))
(apply coproduct (map source-function quivers))
(apply coproduct (map target-function quivers))))
Subobjects in the topos of 2 - quivers
(defn two-subquiver?
[quiver new-two-morphisms new-morphisms new-objects]
(and
(superset?
(list
(union
(set-image (source-function quiver) new-morphisms)
(set-image (target-function quiver) new-morphisms))
new-objects))
(superset?
(list
(union
(set-image (s-function quiver) new-two-morphisms)
(set-image (s-function quiver) new-two-morphisms))
new-morphisms))))
(defn restrict-two-morphisms
[two-quiver new-two-morphisms]
(->TwoQuiver
new-two-morphisms
(morphisms two-quiver)
(objects two-quiver)
(two-source-fn two-quiver)
(two-target-fn two-quiver)
(source-fn two-quiver)
(target-fn two-quiver)))
(defn two-subquiver
[quiver new-two-morphisms new-morphisms new-objects]
(->TwoQuiver
new-two-morphisms
new-morphisms
new-objects
(two-source-fn quiver)
(two-target-fn quiver)
(source-fn quiver)
(target-fn quiver)))
Congruences of objects in the topos of 2 - quivers
(defn two-quiver-congruence?
[quiver two-congruence one-congruence zero-congruence]
(and
(io-relation? (source-function quiver) one-congruence zero-congruence)
(io-relation? (target-function quiver) one-congruence zero-congruence)
(io-relation? (s-function quiver) two-congruence one-congruence)
(io-relation? (t-function quiver) two-congruence one-congruence)))
(defn two-quiver-quotient
[quiver two-congruence one-congruence zero-congruence]
(->TwoQuiver
two-congruence
one-congruence
zero-congruence
(fn [part]
(projection one-congruence (two-morphism-s quiver (first part))))
(fn [part]
(projection one-congruence (two-morphism-t quiver (first part))))
(fn [part]
(projection zero-congruence (source-element quiver (first part))))
(fn [part]
(projection zero-congruence (target-element quiver (first part))))))
(defn target-equal-two-morphism-components?
[two-quiver two-morphism]
(target-equal-elements?
two-quiver
(two-morphism-s two-quiver two-morphism)
(two-morphism-t two-quiver two-morphism)))
(defn over-two-morphisms
[two-quiver]
(set
(filter
(partial target-equal-two-morphism-components? two-quiver)
(two-morphisms two-quiver))))
(defn over-component
[two-quiver]
(restrict-two-morphisms two-quiver (over-two-morphisms two-quiver)))
(defn source-equal-two-morphism-components?
[two-quiver two-morphism]
(source-equal-elements?
two-quiver
(two-morphism-s two-quiver two-morphism)
(two-morphism-t two-quiver two-morphism)))
(defn under-two-morphisms
[two-quiver]
(set
(filter
(partial source-equal-two-morphism-components? two-quiver)
(two-morphisms two-quiver))))
(defn under-component
[two-quiver]
(restrict-two-morphisms two-quiver (under-two-morphisms two-quiver)))
Globular subobjects of two quivers
(defn globular-two-morphism?
[two-quiver two-morphism]
(let [source (two-morphism-s two-quiver two-morphism)
target (two-morphism-t two-quiver two-morphism)]
(parallel-elements? two-quiver source target)))
(defn globular-two-morphisms
[two-quiver]
(set
(filter
(partial globular-two-morphism? two-quiver)
(two-morphisms two-quiver))))
(defn globular-component
[two-quiver]
(restrict-two-morphisms two-quiver (globular-two-morphisms two-quiver)))
Ontology of two quivers
(defmulti two-quiver? type)
(defmethod two-quiver? ::two-quiver
[quiver] true)
(defmethod two-quiver? :default
[obj] false)
(defn over-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(every?
(fn [morphism]
(target-equal-two-morphism-components? two-quiver morphism))
(two-morphisms two-quiver))))
(defn under-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(every?
(fn [morphism]
(source-equal-two-morphism-components? two-quiver morphism))
(two-morphisms two-quiver))))
(defn globular-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(every?
(fn [morphism]
(globular-two-morphism? two-quiver morphism))
(two-morphisms two-quiver))))
(defn one-thin-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(universal? (underlying-multirelation two-quiver))))
(defn two-thin-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(universal? (underlying-two-multirelation two-quiver))))
(defn dually-thin-two-quiver?
[two-quiver]
(and
(two-quiver? two-quiver)
(universal? (underlying-multirelation two-quiver))
(universal? (underlying-two-multirelation two-quiver))))
|
2683c31aeb7fe051f31549d0254570d527bc7e553e061e61a3a8e484dd9d00b2 | tjammer/raylib-ocaml | ctypes_reexports.ml | Re - exports some ctypes functionality to the module
(* but it's a band-aid *)
module CArray = Ctypes.CArray
type 'a ctyp = 'a Ctypes.structure
type 'a ptr = 'a Ctypes.ptr
let to_ctyp (a : 'a Ctypes.structure) = a
let addr (x : 'a ctyp) = Ctypes.addr x
let to_voidp = Ctypes.to_voidp
let ptr_of_int i = Ctypes.(allocate int i)
let ptr_of_uint i = Ctypes.(allocate uint i)
let void_ptr_of_int i = Ctypes.(to_voidp (ptr_of_int i))
| null | https://raw.githubusercontent.com/tjammer/raylib-ocaml/76955c30d0a776138daeb93bfc73b104aefc6f6d/src/raylib/ctypes_reexports.ml | ocaml | but it's a band-aid | Re - exports some ctypes functionality to the module
module CArray = Ctypes.CArray
type 'a ctyp = 'a Ctypes.structure
type 'a ptr = 'a Ctypes.ptr
let to_ctyp (a : 'a Ctypes.structure) = a
let addr (x : 'a ctyp) = Ctypes.addr x
let to_voidp = Ctypes.to_voidp
let ptr_of_int i = Ctypes.(allocate int i)
let ptr_of_uint i = Ctypes.(allocate uint i)
let void_ptr_of_int i = Ctypes.(to_voidp (ptr_of_int i))
|
3d63e143f78cbab70e45876bea9985673d17c5e6a62a91659cd1c94da993df7d | atgreen/lisp-openshift | session.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : HUNCHENTOOT ; Base : 10 -*-
Copyright ( c ) 2004 - 2010 , Dr. . All rights reserved .
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :hunchentoot)
(defgeneric session-db-lock (acceptor &key whole-db-p)
(:documentation "A function which returns a lock that will be used
to prevent concurrent access to sessions. The first argument will be
the acceptor that handles the current request, the second argument is
true if the whole \(current) session database is modified. If it is
NIL, only one existing session in the database is modified.
This function can return NIL which means that sessions or session
databases will be modified without a lock held \(for example for
single-threaded environments). The default is to always return a
global lock \(ignoring the ACCEPTOR argument) for Lisps that support
threads and NIL otherwise."))
(defmethod session-db-lock ((acceptor t) &key (whole-db-p t))
(declare (ignore whole-db-p))
*global-session-db-lock*)
(defmacro with-session-lock-held ((lock) &body body)
"This is like WITH-LOCK-HELD except that it will accept NIL as a
\"lock\" and just execute BODY in this case."
(with-unique-names (thunk)
(with-rebinding (lock)
`(flet ((,thunk () ,@body))
(cond (,lock (with-lock-held (,lock) (,thunk)))
(t (,thunk)))))))
(defgeneric session-db (acceptor)
(:documentation "Returns the current session database which is an
alist where each car is a session's ID and the cdr is the
corresponding SESSION object itself. The default is to use a global
list for all acceptors."))
(defmethod session-db ((acceptor t))
*session-db*)
(defgeneric (setf session-db) (new-value acceptor)
(:documentation "Modifies the current session database. See SESSION-DB."))
(defmethod (setf session-db) (new-value (acceptor t))
(setq *session-db* new-value))
(defgeneric next-session-id (acceptor)
(:documentation "Returns the next sequential session ID, an integer,
which should be unique per session. The default method uses a simple
global counter and isn't guarded by a lock. For a high-performance
production environment you might consider using a more robust
implementation."))
(let ((session-id-counter 0))
(defmethod next-session-id ((acceptor t))
(incf session-id-counter)))
(defclass session ()
((session-id :initform (next-session-id (request-acceptor *request*))
:reader session-id
:type integer
:documentation "The unique ID \(an INTEGER) of the session.")
(session-string :reader session-string
:documentation "The session string encodes enough
data to safely retrieve this session. It is sent to the browser as a
cookie value or as a GET parameter.")
(user-agent :initform (user-agent *request*)
:reader session-user-agent
:documentation "The incoming 'User-Agent' header that
was sent when this session was created.")
(remote-addr :initform (real-remote-addr *request*)
:reader session-remote-addr
:documentation "The remote IP address of the client
when this session was started as returned by REAL-REMOTE-ADDR.")
(session-start :initform (get-universal-time)
:reader session-start
:documentation "The time this session was started.")
(last-click :initform (get-universal-time)
:reader session-last-click
:documentation "The last time this session was used.")
(session-data :initarg :session-data
:initform nil
:reader session-data
:documentation "Data associated with this session -
see SESSION-VALUE.")
(max-time :initarg :max-time
:initform *session-max-time*
:accessor session-max-time
:type fixnum
:documentation "The time \(in seconds) after which this
session expires if it's not used."))
(:documentation "SESSION objects are automatically maintained by
Hunchentoot. They should not be created explicitly with MAKE-INSTANCE
but implicitly with START-SESSION and they should be treated as opaque
objects.
You can ignore Hunchentoot's SESSION objects altogether and implement
your own sessions if you provide corresponding methods for
SESSION-COOKIE-VALUE and SESSION-VERIFY."))
(defun encode-session-string (id user-agent remote-addr start)
"Creates a uniquely encoded session string based on the values ID,
USER-AGENT, REMOTE-ADDR, and START"
(unless (boundp '*session-secret*)
(hunchentoot-warn "Session secret is unbound. Using Lisp's RANDOM function to initialize it.")
(reset-session-secret))
;; *SESSION-SECRET* is used twice due to known theoretical
vulnerabilities of MD5 encoding
(md5-hex (concatenate 'string
*session-secret*
(md5-hex (format nil "~A~A~@[~A~]~@[~A~]~A"
*session-secret*
id
(and *use-user-agent-for-sessions*
user-agent)
(and *use-remote-addr-for-sessions*
remote-addr)
start)))))
(defun stringify-session (session)
"Creates a string representing the SESSION object SESSION. See
ENCODE-SESSION-STRING."
(encode-session-string (session-id session)
(session-user-agent session)
(session-remote-addr session)
(session-start session)))
(defmethod initialize-instance :after ((session session) &rest init-args)
"Set SESSION-STRING slot after the session has been initialized."
(declare (ignore init-args))
(setf (slot-value session 'session-string) (stringify-session session)))
(defun session-gc ()
"Removes sessions from the current session database which are too
old - see SESSION-TOO-OLD-P."
(with-session-lock-held ((session-db-lock *acceptor*))
(setf (session-db *acceptor*)
(loop for id-session-pair in (session-db *acceptor*)
for (nil . session) = id-session-pair
when (session-too-old-p session)
do (acceptor-remove-session *acceptor* session)
else
collect id-session-pair)))
(values))
(defun session-value (symbol &optional (session *session*))
"Returns the value associated with SYMBOL from the session object
SESSION \(the default is the current session) if it exists."
(when session
(let ((found (assoc symbol (session-data session) :test #'eq)))
(values (cdr found) found))))
(defsetf session-value (symbol &optional session)
(new-value)
"Sets the value associated with SYMBOL from the session object
SESSION. If there is already a value associated with SYMBOL it will be
replaced. Will automatically start a session if none was supplied and
there's no session for the current request."
(with-rebinding (symbol)
(with-unique-names (place %session)
`(let ((,%session (or ,session (start-session))))
(with-session-lock-held ((session-db-lock *acceptor* :whole-db-p nil))
(let* ((,place (assoc ,symbol (session-data ,%session) :test #'eq)))
(cond
(,place
(setf (cdr ,place) ,new-value))
(t
(push (cons ,symbol ,new-value)
(slot-value ,%session 'session-data))
,new-value))))))))
(defun delete-session-value (symbol &optional (session *session*))
"Removes the value associated with SYMBOL from SESSION if there is
one."
(when session
(setf (slot-value session 'session-data)
(delete symbol (session-data session)
:key #'car :test #'eq)))
(values))
(defgeneric session-cookie-value (session)
(:documentation "Returns a string which can be used to safely
restore the session SESSION if as session has already been
established. This is used as the value stored in the session cookie
or in the corresponding GET parameter and verified by SESSION-VERIFY.
A default method is provided and there's no reason to change it unless
you want to use your own session objects."))
(defmethod session-cookie-value ((session session))
(and session
(format nil
"~A:~A"
(session-id session)
(session-string session))))
(defgeneric session-cookie-name (acceptor)
(:documentation "Returns the name \(a string) of the cookie \(or the
GET parameter) which is used to store a session on the client side.
The default is to use the string \"hunchentoot-session\", but you can
specialize this function if you want another name."))
(defmethod session-cookie-name ((acceptor t))
"hunchentoot-session")
(defgeneric session-created (acceptor new-session)
(:documentation "This function is called whenever a new session has
been created. There's a default method which might trigger a session
GC based on the value of *SESSION-GC-FREQUENCY*.
The return value is ignored."))
(let ((global-session-usage-counter 0))
(defmethod session-created ((acceptor t) (session t))
"Counts session usage globally and triggers session GC if
necessary."
(when (and *session-gc-frequency*
(zerop (mod (incf global-session-usage-counter)
*session-gc-frequency*)))
(session-gc))))
(defun start-session ()
"Returns the current SESSION object. If there is no current session,
creates one and updates the corresponding data structures. In this
case the function will also send a session cookie to the browser."
(let ((session (session *request*)))
(when session
(return-from start-session session))
(setf session (make-instance 'session)
(session *request*) session)
(with-session-lock-held ((session-db-lock *acceptor*))
(setf (session-db *acceptor*)
(acons (session-id session) session (session-db *acceptor*))))
(set-cookie (session-cookie-name *acceptor*)
:value (session-cookie-value session)
:path "/")
(session-created *acceptor* session)
(setq *session* session)))
(defun remove-session (session)
"Completely removes the SESSION object SESSION from Hunchentoot's
internal session database."
(with-session-lock-held ((session-db-lock *acceptor*))
(acceptor-remove-session *acceptor* session)
(setf (session-db *acceptor*)
(delete (session-id session) (session-db *acceptor*)
:key #'car :test #'=)))
(values))
(defun session-too-old-p (session)
"Returns true if the SESSION object SESSION has not been active in
the last \(SESSION-MAX-TIME SESSION) seconds."
(< (+ (session-last-click session) (session-max-time session))
(get-universal-time)))
(defun get-stored-session (id)
"Returns the SESSION object corresponding to the number ID if the
session has not expired. Will remove the session if it has expired but
will not create a new one."
(let ((session
(cdr (assoc id (session-db *acceptor*) :test #'=))))
(when (and session
(session-too-old-p session))
(when *reply*
(log-message* :info "Session with ID ~A too old" id))
(remove-session session)
(setq session nil))
session))
(defgeneric session-verify (request)
(:documentation "Tries to get a session identifier from the cookies
\(or alternatively from the GET parameters) sent by the client (see
SESSION-COOKIE-NAME and SESSION-COOKIE-VALUE). This identifier is
then checked for validity against the REQUEST object REQUEST. On
success the corresponding session object \(if not too old) is returned
\(and updated). Otherwise NIL is returned.
A default method is provided and you only need to write your own one
if you want to maintain your own sessions."))
(defmethod session-verify ((request request))
(let ((session-identifier (or (when-let (session-cookie (cookie-in (session-cookie-name *acceptor*) request))
(url-decode session-cookie))
(get-parameter (session-cookie-name *acceptor*) request))))
(unless (and session-identifier
(stringp session-identifier)
(plusp (length session-identifier)))
(return-from session-verify nil))
(destructuring-bind (id-string session-string)
(split ":" session-identifier :limit 2)
(let* ((id (parse-integer id-string))
(session (get-stored-session id))
(user-agent (user-agent request))
(remote-addr (remote-addr request)))
(cond
((and session
(string= session-string
(session-string session))
(string= session-string
(encode-session-string id
user-agent
(real-remote-addr request)
(session-start session))))
;; the session key presented by the client is valid
(setf (slot-value session 'last-click) (get-universal-time))
session)
(session
;; the session ID pointed to an existing session, but the
;; session string did not match the expected session string
(log-message* :warning
"Fake session identifier '~A' (User-Agent: '~A', IP: '~A')"
session-identifier user-agent remote-addr)
;; remove the session to make sure that it can't be used
;; again; the original legitimate user will be required to
;; log in again
(remove-session session)
nil)
(t
;; no session was found under the ID given, presumably
;; because it has expired.
(log-message* :info
"No session for session identifier '~A' (User-Agent: '~A', IP: '~A')"
session-identifier user-agent remote-addr)
nil))))))
(defun reset-session-secret ()
"Sets *SESSION-SECRET* to a new random value. All old sessions will
cease to be valid."
(setq *session-secret* (create-random-string 10 36)))
(defun reset-sessions (&optional (acceptor *acceptor*))
"Removes ALL stored sessions of ACCEPTOR."
(with-session-lock-held ((session-db-lock acceptor))
(loop for (nil . session) in (session-db acceptor)
do (acceptor-remove-session acceptor session))
(setq *session-db* nil))
(values))
| null | https://raw.githubusercontent.com/atgreen/lisp-openshift/40235286bd3c6a61cab9f5af883d9ed9befba849/quicklisp/dists/quicklisp/software/hunchentoot-1.2.3/session.lisp | lisp | Syntax : COMMON - LISP ; Package : HUNCHENTOOT ; Base : 10 -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*SESSION-SECRET* is used twice due to known theoretical
the session key presented by the client is valid
the session ID pointed to an existing session, but the
session string did not match the expected session string
remove the session to make sure that it can't be used
again; the original legitimate user will be required to
log in again
no session was found under the ID given, presumably
because it has expired. |
Copyright ( c ) 2004 - 2010 , Dr. . All rights reserved .
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package :hunchentoot)
(defgeneric session-db-lock (acceptor &key whole-db-p)
(:documentation "A function which returns a lock that will be used
to prevent concurrent access to sessions. The first argument will be
the acceptor that handles the current request, the second argument is
true if the whole \(current) session database is modified. If it is
NIL, only one existing session in the database is modified.
This function can return NIL which means that sessions or session
databases will be modified without a lock held \(for example for
single-threaded environments). The default is to always return a
global lock \(ignoring the ACCEPTOR argument) for Lisps that support
threads and NIL otherwise."))
(defmethod session-db-lock ((acceptor t) &key (whole-db-p t))
(declare (ignore whole-db-p))
*global-session-db-lock*)
(defmacro with-session-lock-held ((lock) &body body)
"This is like WITH-LOCK-HELD except that it will accept NIL as a
\"lock\" and just execute BODY in this case."
(with-unique-names (thunk)
(with-rebinding (lock)
`(flet ((,thunk () ,@body))
(cond (,lock (with-lock-held (,lock) (,thunk)))
(t (,thunk)))))))
(defgeneric session-db (acceptor)
(:documentation "Returns the current session database which is an
alist where each car is a session's ID and the cdr is the
corresponding SESSION object itself. The default is to use a global
list for all acceptors."))
(defmethod session-db ((acceptor t))
*session-db*)
(defgeneric (setf session-db) (new-value acceptor)
(:documentation "Modifies the current session database. See SESSION-DB."))
(defmethod (setf session-db) (new-value (acceptor t))
(setq *session-db* new-value))
(defgeneric next-session-id (acceptor)
(:documentation "Returns the next sequential session ID, an integer,
which should be unique per session. The default method uses a simple
global counter and isn't guarded by a lock. For a high-performance
production environment you might consider using a more robust
implementation."))
(let ((session-id-counter 0))
(defmethod next-session-id ((acceptor t))
(incf session-id-counter)))
(defclass session ()
((session-id :initform (next-session-id (request-acceptor *request*))
:reader session-id
:type integer
:documentation "The unique ID \(an INTEGER) of the session.")
(session-string :reader session-string
:documentation "The session string encodes enough
data to safely retrieve this session. It is sent to the browser as a
cookie value or as a GET parameter.")
(user-agent :initform (user-agent *request*)
:reader session-user-agent
:documentation "The incoming 'User-Agent' header that
was sent when this session was created.")
(remote-addr :initform (real-remote-addr *request*)
:reader session-remote-addr
:documentation "The remote IP address of the client
when this session was started as returned by REAL-REMOTE-ADDR.")
(session-start :initform (get-universal-time)
:reader session-start
:documentation "The time this session was started.")
(last-click :initform (get-universal-time)
:reader session-last-click
:documentation "The last time this session was used.")
(session-data :initarg :session-data
:initform nil
:reader session-data
:documentation "Data associated with this session -
see SESSION-VALUE.")
(max-time :initarg :max-time
:initform *session-max-time*
:accessor session-max-time
:type fixnum
:documentation "The time \(in seconds) after which this
session expires if it's not used."))
(:documentation "SESSION objects are automatically maintained by
Hunchentoot. They should not be created explicitly with MAKE-INSTANCE
but implicitly with START-SESSION and they should be treated as opaque
objects.
You can ignore Hunchentoot's SESSION objects altogether and implement
your own sessions if you provide corresponding methods for
SESSION-COOKIE-VALUE and SESSION-VERIFY."))
(defun encode-session-string (id user-agent remote-addr start)
"Creates a uniquely encoded session string based on the values ID,
USER-AGENT, REMOTE-ADDR, and START"
(unless (boundp '*session-secret*)
(hunchentoot-warn "Session secret is unbound. Using Lisp's RANDOM function to initialize it.")
(reset-session-secret))
vulnerabilities of MD5 encoding
(md5-hex (concatenate 'string
*session-secret*
(md5-hex (format nil "~A~A~@[~A~]~@[~A~]~A"
*session-secret*
id
(and *use-user-agent-for-sessions*
user-agent)
(and *use-remote-addr-for-sessions*
remote-addr)
start)))))
(defun stringify-session (session)
"Creates a string representing the SESSION object SESSION. See
ENCODE-SESSION-STRING."
(encode-session-string (session-id session)
(session-user-agent session)
(session-remote-addr session)
(session-start session)))
(defmethod initialize-instance :after ((session session) &rest init-args)
"Set SESSION-STRING slot after the session has been initialized."
(declare (ignore init-args))
(setf (slot-value session 'session-string) (stringify-session session)))
(defun session-gc ()
"Removes sessions from the current session database which are too
old - see SESSION-TOO-OLD-P."
(with-session-lock-held ((session-db-lock *acceptor*))
(setf (session-db *acceptor*)
(loop for id-session-pair in (session-db *acceptor*)
for (nil . session) = id-session-pair
when (session-too-old-p session)
do (acceptor-remove-session *acceptor* session)
else
collect id-session-pair)))
(values))
(defun session-value (symbol &optional (session *session*))
"Returns the value associated with SYMBOL from the session object
SESSION \(the default is the current session) if it exists."
(when session
(let ((found (assoc symbol (session-data session) :test #'eq)))
(values (cdr found) found))))
(defsetf session-value (symbol &optional session)
(new-value)
"Sets the value associated with SYMBOL from the session object
SESSION. If there is already a value associated with SYMBOL it will be
replaced. Will automatically start a session if none was supplied and
there's no session for the current request."
(with-rebinding (symbol)
(with-unique-names (place %session)
`(let ((,%session (or ,session (start-session))))
(with-session-lock-held ((session-db-lock *acceptor* :whole-db-p nil))
(let* ((,place (assoc ,symbol (session-data ,%session) :test #'eq)))
(cond
(,place
(setf (cdr ,place) ,new-value))
(t
(push (cons ,symbol ,new-value)
(slot-value ,%session 'session-data))
,new-value))))))))
(defun delete-session-value (symbol &optional (session *session*))
"Removes the value associated with SYMBOL from SESSION if there is
one."
(when session
(setf (slot-value session 'session-data)
(delete symbol (session-data session)
:key #'car :test #'eq)))
(values))
(defgeneric session-cookie-value (session)
(:documentation "Returns a string which can be used to safely
restore the session SESSION if as session has already been
established. This is used as the value stored in the session cookie
or in the corresponding GET parameter and verified by SESSION-VERIFY.
A default method is provided and there's no reason to change it unless
you want to use your own session objects."))
(defmethod session-cookie-value ((session session))
(and session
(format nil
"~A:~A"
(session-id session)
(session-string session))))
(defgeneric session-cookie-name (acceptor)
(:documentation "Returns the name \(a string) of the cookie \(or the
GET parameter) which is used to store a session on the client side.
The default is to use the string \"hunchentoot-session\", but you can
specialize this function if you want another name."))
(defmethod session-cookie-name ((acceptor t))
"hunchentoot-session")
(defgeneric session-created (acceptor new-session)
(:documentation "This function is called whenever a new session has
been created. There's a default method which might trigger a session
GC based on the value of *SESSION-GC-FREQUENCY*.
The return value is ignored."))
(let ((global-session-usage-counter 0))
(defmethod session-created ((acceptor t) (session t))
"Counts session usage globally and triggers session GC if
necessary."
(when (and *session-gc-frequency*
(zerop (mod (incf global-session-usage-counter)
*session-gc-frequency*)))
(session-gc))))
(defun start-session ()
"Returns the current SESSION object. If there is no current session,
creates one and updates the corresponding data structures. In this
case the function will also send a session cookie to the browser."
(let ((session (session *request*)))
(when session
(return-from start-session session))
(setf session (make-instance 'session)
(session *request*) session)
(with-session-lock-held ((session-db-lock *acceptor*))
(setf (session-db *acceptor*)
(acons (session-id session) session (session-db *acceptor*))))
(set-cookie (session-cookie-name *acceptor*)
:value (session-cookie-value session)
:path "/")
(session-created *acceptor* session)
(setq *session* session)))
(defun remove-session (session)
"Completely removes the SESSION object SESSION from Hunchentoot's
internal session database."
(with-session-lock-held ((session-db-lock *acceptor*))
(acceptor-remove-session *acceptor* session)
(setf (session-db *acceptor*)
(delete (session-id session) (session-db *acceptor*)
:key #'car :test #'=)))
(values))
(defun session-too-old-p (session)
"Returns true if the SESSION object SESSION has not been active in
the last \(SESSION-MAX-TIME SESSION) seconds."
(< (+ (session-last-click session) (session-max-time session))
(get-universal-time)))
(defun get-stored-session (id)
"Returns the SESSION object corresponding to the number ID if the
session has not expired. Will remove the session if it has expired but
will not create a new one."
(let ((session
(cdr (assoc id (session-db *acceptor*) :test #'=))))
(when (and session
(session-too-old-p session))
(when *reply*
(log-message* :info "Session with ID ~A too old" id))
(remove-session session)
(setq session nil))
session))
(defgeneric session-verify (request)
(:documentation "Tries to get a session identifier from the cookies
\(or alternatively from the GET parameters) sent by the client (see
SESSION-COOKIE-NAME and SESSION-COOKIE-VALUE). This identifier is
then checked for validity against the REQUEST object REQUEST. On
success the corresponding session object \(if not too old) is returned
\(and updated). Otherwise NIL is returned.
A default method is provided and you only need to write your own one
if you want to maintain your own sessions."))
(defmethod session-verify ((request request))
(let ((session-identifier (or (when-let (session-cookie (cookie-in (session-cookie-name *acceptor*) request))
(url-decode session-cookie))
(get-parameter (session-cookie-name *acceptor*) request))))
(unless (and session-identifier
(stringp session-identifier)
(plusp (length session-identifier)))
(return-from session-verify nil))
(destructuring-bind (id-string session-string)
(split ":" session-identifier :limit 2)
(let* ((id (parse-integer id-string))
(session (get-stored-session id))
(user-agent (user-agent request))
(remote-addr (remote-addr request)))
(cond
((and session
(string= session-string
(session-string session))
(string= session-string
(encode-session-string id
user-agent
(real-remote-addr request)
(session-start session))))
(setf (slot-value session 'last-click) (get-universal-time))
session)
(session
(log-message* :warning
"Fake session identifier '~A' (User-Agent: '~A', IP: '~A')"
session-identifier user-agent remote-addr)
(remove-session session)
nil)
(t
(log-message* :info
"No session for session identifier '~A' (User-Agent: '~A', IP: '~A')"
session-identifier user-agent remote-addr)
nil))))))
(defun reset-session-secret ()
"Sets *SESSION-SECRET* to a new random value. All old sessions will
cease to be valid."
(setq *session-secret* (create-random-string 10 36)))
(defun reset-sessions (&optional (acceptor *acceptor*))
"Removes ALL stored sessions of ACCEPTOR."
(with-session-lock-held ((session-db-lock acceptor))
(loop for (nil . session) in (session-db acceptor)
do (acceptor-remove-session acceptor session))
(setq *session-db* nil))
(values))
|
7bbfb52355ccdfc440370af0745ac5a1ba8481d916799d876ccc49d946095866 | lojic/RacketCon2020 | axio-init.rkt | #lang racket/base
(require "./axio-database.rkt"
"./axio-env.rkt"
"./axio-init-structs.rkt"
"./axio-logger.rkt")
(require db
racket/contract)
(provide axio-init)
;; --------------------------------------------------------------------------------------------
;; Public Interface
;; --------------------------------------------------------------------------------------------
(define/contract (axio-init app-env-id #:log-level [ log-level 'warning ])
(->* (symbol?) (#:log-level symbol?) axio-context?)
(axio-init-logger log-level)
(axio-context
app-env-id
(axio-init-db (get-app-env app-env-id))))
;; --------------------------------------------------------------------------------------------
;; Private Implementation
;; --------------------------------------------------------------------------------------------
;; (axio-init-db app-env-obj) -> axio-db-context?
;; app-env-obj : app-env?
(define (axio-init-db app-env-obj)
(virtual-connection
(connection-pool (λ () (db-connect app-env-obj))
#:max-connections 30
#:max-idle-connections 4)))
| null | https://raw.githubusercontent.com/lojic/RacketCon2020/310e0ab01d8c3e1546029720f6bb66e6d1a5fb1e/TodoApp/axio/axio-init.rkt | racket | --------------------------------------------------------------------------------------------
Public Interface
--------------------------------------------------------------------------------------------
--------------------------------------------------------------------------------------------
Private Implementation
--------------------------------------------------------------------------------------------
(axio-init-db app-env-obj) -> axio-db-context?
app-env-obj : app-env? | #lang racket/base
(require "./axio-database.rkt"
"./axio-env.rkt"
"./axio-init-structs.rkt"
"./axio-logger.rkt")
(require db
racket/contract)
(provide axio-init)
(define/contract (axio-init app-env-id #:log-level [ log-level 'warning ])
(->* (symbol?) (#:log-level symbol?) axio-context?)
(axio-init-logger log-level)
(axio-context
app-env-id
(axio-init-db (get-app-env app-env-id))))
(define (axio-init-db app-env-obj)
(virtual-connection
(connection-pool (λ () (db-connect app-env-obj))
#:max-connections 30
#:max-idle-connections 4)))
|
febc3dfd15353fc3e0c84a0d9e5bcc204b24faa1ca7afef2c59f60de0c903440 | tengstrand/lein-polylith | doc.clj | (ns leiningen.polylith.cmd.help.doc)
(defn help []
(println " Generates system documentation to the WS-ROOT/doc directory by using")
(println " the Selmer template engine ().")
(println)
(println " lein polylith doc [TEMPLATE]")
(println " TEMPLATE = (omitted) -> workspace.ftl.")
(println " else -> Name of the template file in WS/ROOT/doc/templates.")
(println)
(println " example:")
(println " lein polylith doc")
(println " lein polylith doc mytemplate.html"))
| null | https://raw.githubusercontent.com/tengstrand/lein-polylith/27bf508a7b4806e4d2dfac01787e99edf2c1c306/src/leiningen/polylith/cmd/help/doc.clj | clojure | (ns leiningen.polylith.cmd.help.doc)
(defn help []
(println " Generates system documentation to the WS-ROOT/doc directory by using")
(println " the Selmer template engine ().")
(println)
(println " lein polylith doc [TEMPLATE]")
(println " TEMPLATE = (omitted) -> workspace.ftl.")
(println " else -> Name of the template file in WS/ROOT/doc/templates.")
(println)
(println " example:")
(println " lein polylith doc")
(println " lein polylith doc mytemplate.html"))
| |
4589ababcf03883d6af68b6b4556195c3c7fd341784eb4a9ee5f7f775fdd21fc | dinosaure/carton | carton_lwt.ml | open Lwt_io
type lwt = Lwt_io.lwt
external inj : 'a Lwt.t -> ('a, lwt) Carton.io = "%identity"
external prj : ('a, lwt) Carton.io -> 'a Lwt.t = "%identity"
let lwt_bind x f =
let open Lwt.Infix in
inj (prj x >>= fun x -> prj (f x))
[@@inline]
let lwt_return x = inj (Lwt.return x) [@@inline]
let lwt =
{ Carton.bind= lwt_bind
; Carton.return= lwt_return }
module Scheduler = Lwt_scheduler
module Dec = struct
module W = struct
type 'fd t = 'fd Carton.Dec.W.t
and slice = Carton.Dec.W.slice =
{ offset : int64
; length : int
; payload : Bigstringaf.t }
and 'fd map =
'fd -> pos:int64 -> int -> Bigstringaf.t Lwt.t
let make fd = Carton.Dec.W.make fd
end
type weight = Carton.Dec.weight
type 'fd read = 'fd -> bytes -> off:int -> len:int -> int Lwt.t
module Idx = Carton.Dec.Idx
module Fp (Uid : Carton.UID) = struct
include Carton.Dec.Fp (Uid)
let check_header read fd =
let read fd buf ~off ~len =
inj (read fd buf ~off ~len) in
prj (check_header lwt read fd)
end
type ('fd, 'uid) t = ('fd, 'uid) Carton.Dec.t
let with_z buf t = Carton.Dec.with_z buf t
let with_w lru t = Carton.Dec.with_w lru t
let with_allocate ~allocate t = Carton.Dec.with_allocate ~allocate t
let fd t = Carton.Dec.fd t
type raw = Carton.Dec.raw
let make_raw ~weight = Carton.Dec.make_raw ~weight
type v = Carton.Dec.v
let v ~kind ?depth buf = Carton.Dec.v ~kind ?depth buf
let kind v = Carton.Dec.kind v
let raw v = Carton.Dec.raw v
let len v = Carton.Dec.len v
let depth v = Carton.Dec.depth v
let make fd ~z ~allocate ~uid_ln ~uid_rw where =
Carton.Dec.make fd ~z ~allocate ~uid_ln ~uid_rw where
(* XXX(dinosaure): [?visited] disappeared but it's only
* about internal use. *)
let weight_of_offset ~map t ~weight cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.weight_of_offset lwt ~map t ~weight cursor)
let weight_of_uid ~map t ~weight uid =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.weight_of_uid lwt ~map t ~weight uid)
let of_offset ~map t raw ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.of_offset lwt ~map t raw ~cursor)
let of_uid ~map t raw uid =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.of_uid lwt ~map t raw uid)
type path = Carton.Dec.path
let path_to_list path = Carton.Dec.path_to_list path
let kind_of_path path = Carton.Dec.kind_of_path path
let path_of_offset ~map t ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.path_of_offset lwt ~map t ~cursor)
let path_of_uid ~map t uid =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.path_of_uid lwt ~map t uid)
let of_offset_with_path ~map t ~path raw ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.of_offset_with_path lwt ~map t ~path raw ~cursor)
type 'uid digest = 'uid Carton.Dec.digest
let uid_of_offset ~map ~digest t raw ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.uid_of_offset lwt ~map ~digest t raw ~cursor)
let uid_of_offset_with_source ~map ~digest t ~kind raw ~depth ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.uid_of_offset_with_source lwt ~map ~digest t ~kind raw ~depth ~cursor)
type 'uid oracle = 'uid Carton.Dec.oracle
module Verify (Uid : Carton.UID) = struct
include Carton.Dec.Verify (Uid) (Lwt_scheduler) (Lwt_io)
let verify ~threads ~map ~oracle t ~matrix =
let map fd ~pos len =
inj (map fd ~pos len) in
verify ~threads ~map ~oracle t ~matrix
end
module Ip (Uid : Carton.UID) =
Carton.Dec.Ip (Lwt_scheduler) (Lwt_io) (Uid)
end
module Enc = struct
type 'uid entry = 'uid Carton.Enc.entry
type 'uid delta = 'uid Carton.Enc.delta =
| From of 'uid | Zero
let make_entry ~kind ~length ?preferred ?delta uid =
Carton.Enc.make_entry ~kind ~length ?preferred ?delta uid
let length entry = Carton.Enc.length entry
type 'uid q = 'uid Carton.Enc.q
type 'uid p = 'uid Carton.Enc.p
type 'uid patch = 'uid Carton.Enc.patch
type 'uid load = 'uid -> Dec.v Lwt.t
type 'uid find = 'uid -> int option Lwt.t
type 'uid uid = 'uid Carton.Enc.uid =
{ uid_ln : int
; uid_rw : 'uid -> string }
let target_to_source target = Carton.Enc.target_to_source target
let target_uid target = Carton.Enc.target_uid target
let entry_to_target ~load entry =
let load uid =
inj (load uid) in
prj (Carton.Enc.entry_to_target lwt ~load entry)
let apply ~load ~uid_ln ~source ~target =
let load uid =
inj (load uid) in
prj (Carton.Enc.apply lwt ~load ~uid_ln ~source ~target)
module type VERBOSE = Carton.Enc.VERBOSE with type 'a fiber = 'a Lwt.t
module type UID = Carton.Enc.UID
module Delta (Uid : UID) (Verbose : VERBOSE) = struct
include Carton.Enc.Delta (Lwt_scheduler) (Lwt_io) (Uid) (Verbose)
let delta ~threads ~weight ~uid_ln matrix =
let threads = List.map (fun load -> (fun uid -> inj (load uid))) threads in
delta ~threads ~weight ~uid_ln matrix
end
module N = struct
include Carton.Enc.N
let encoder ~b ~load target =
let load uid =
inj (load uid) in
prj (encoder lwt ~b ~load target)
end
type b = Carton.Enc.b =
{ i : Bigstringaf.t
; q : De.Queue.t
; w : De.window
; o : Bigstringaf.t }
let header_of_pack ~length buf off len =
Carton.Enc.header_of_pack ~length buf off len
let encode_target ~b ~find ~load ~uid target ~cursor =
let load uid = inj (load uid) in
let find uid = inj (find uid) in
prj (Carton.Enc.encode_target lwt ~b ~find ~load ~uid target ~cursor)
end
module Thin = struct
type 'uid light_load = 'uid -> (Carton.kind * int) Lwt.t
type 'uid heavy_load = 'uid -> Carton.Dec.v Lwt.t
type optint = Optint.t
module Make (Uid : Carton.UID) = struct
include Thin.Make (Lwt_scheduler) (Lwt_io) (Uid)
let canonicalize ~light_load ~heavy_load ~src ~dst fs n requireds weight =
let light_load uid = inj (light_load uid) in
let heavy_load uid = inj (heavy_load uid) in
canonicalize ~light_load ~heavy_load ~src ~dst fs n requireds weight
end
end
| null | https://raw.githubusercontent.com/dinosaure/carton/8cb8b3685fab477594b2d83c70b2fc965d1d59b9/lwt/carton_lwt.ml | ocaml | XXX(dinosaure): [?visited] disappeared but it's only
* about internal use. | open Lwt_io
type lwt = Lwt_io.lwt
external inj : 'a Lwt.t -> ('a, lwt) Carton.io = "%identity"
external prj : ('a, lwt) Carton.io -> 'a Lwt.t = "%identity"
let lwt_bind x f =
let open Lwt.Infix in
inj (prj x >>= fun x -> prj (f x))
[@@inline]
let lwt_return x = inj (Lwt.return x) [@@inline]
let lwt =
{ Carton.bind= lwt_bind
; Carton.return= lwt_return }
module Scheduler = Lwt_scheduler
module Dec = struct
module W = struct
type 'fd t = 'fd Carton.Dec.W.t
and slice = Carton.Dec.W.slice =
{ offset : int64
; length : int
; payload : Bigstringaf.t }
and 'fd map =
'fd -> pos:int64 -> int -> Bigstringaf.t Lwt.t
let make fd = Carton.Dec.W.make fd
end
type weight = Carton.Dec.weight
type 'fd read = 'fd -> bytes -> off:int -> len:int -> int Lwt.t
module Idx = Carton.Dec.Idx
module Fp (Uid : Carton.UID) = struct
include Carton.Dec.Fp (Uid)
let check_header read fd =
let read fd buf ~off ~len =
inj (read fd buf ~off ~len) in
prj (check_header lwt read fd)
end
type ('fd, 'uid) t = ('fd, 'uid) Carton.Dec.t
let with_z buf t = Carton.Dec.with_z buf t
let with_w lru t = Carton.Dec.with_w lru t
let with_allocate ~allocate t = Carton.Dec.with_allocate ~allocate t
let fd t = Carton.Dec.fd t
type raw = Carton.Dec.raw
let make_raw ~weight = Carton.Dec.make_raw ~weight
type v = Carton.Dec.v
let v ~kind ?depth buf = Carton.Dec.v ~kind ?depth buf
let kind v = Carton.Dec.kind v
let raw v = Carton.Dec.raw v
let len v = Carton.Dec.len v
let depth v = Carton.Dec.depth v
let make fd ~z ~allocate ~uid_ln ~uid_rw where =
Carton.Dec.make fd ~z ~allocate ~uid_ln ~uid_rw where
let weight_of_offset ~map t ~weight cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.weight_of_offset lwt ~map t ~weight cursor)
let weight_of_uid ~map t ~weight uid =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.weight_of_uid lwt ~map t ~weight uid)
let of_offset ~map t raw ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.of_offset lwt ~map t raw ~cursor)
let of_uid ~map t raw uid =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.of_uid lwt ~map t raw uid)
type path = Carton.Dec.path
let path_to_list path = Carton.Dec.path_to_list path
let kind_of_path path = Carton.Dec.kind_of_path path
let path_of_offset ~map t ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.path_of_offset lwt ~map t ~cursor)
let path_of_uid ~map t uid =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.path_of_uid lwt ~map t uid)
let of_offset_with_path ~map t ~path raw ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.of_offset_with_path lwt ~map t ~path raw ~cursor)
type 'uid digest = 'uid Carton.Dec.digest
let uid_of_offset ~map ~digest t raw ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.uid_of_offset lwt ~map ~digest t raw ~cursor)
let uid_of_offset_with_source ~map ~digest t ~kind raw ~depth ~cursor =
let map fd ~pos len =
inj (map fd ~pos len) in
prj (Carton.Dec.uid_of_offset_with_source lwt ~map ~digest t ~kind raw ~depth ~cursor)
type 'uid oracle = 'uid Carton.Dec.oracle
module Verify (Uid : Carton.UID) = struct
include Carton.Dec.Verify (Uid) (Lwt_scheduler) (Lwt_io)
let verify ~threads ~map ~oracle t ~matrix =
let map fd ~pos len =
inj (map fd ~pos len) in
verify ~threads ~map ~oracle t ~matrix
end
module Ip (Uid : Carton.UID) =
Carton.Dec.Ip (Lwt_scheduler) (Lwt_io) (Uid)
end
module Enc = struct
type 'uid entry = 'uid Carton.Enc.entry
type 'uid delta = 'uid Carton.Enc.delta =
| From of 'uid | Zero
let make_entry ~kind ~length ?preferred ?delta uid =
Carton.Enc.make_entry ~kind ~length ?preferred ?delta uid
let length entry = Carton.Enc.length entry
type 'uid q = 'uid Carton.Enc.q
type 'uid p = 'uid Carton.Enc.p
type 'uid patch = 'uid Carton.Enc.patch
type 'uid load = 'uid -> Dec.v Lwt.t
type 'uid find = 'uid -> int option Lwt.t
type 'uid uid = 'uid Carton.Enc.uid =
{ uid_ln : int
; uid_rw : 'uid -> string }
let target_to_source target = Carton.Enc.target_to_source target
let target_uid target = Carton.Enc.target_uid target
let entry_to_target ~load entry =
let load uid =
inj (load uid) in
prj (Carton.Enc.entry_to_target lwt ~load entry)
let apply ~load ~uid_ln ~source ~target =
let load uid =
inj (load uid) in
prj (Carton.Enc.apply lwt ~load ~uid_ln ~source ~target)
module type VERBOSE = Carton.Enc.VERBOSE with type 'a fiber = 'a Lwt.t
module type UID = Carton.Enc.UID
module Delta (Uid : UID) (Verbose : VERBOSE) = struct
include Carton.Enc.Delta (Lwt_scheduler) (Lwt_io) (Uid) (Verbose)
let delta ~threads ~weight ~uid_ln matrix =
let threads = List.map (fun load -> (fun uid -> inj (load uid))) threads in
delta ~threads ~weight ~uid_ln matrix
end
module N = struct
include Carton.Enc.N
let encoder ~b ~load target =
let load uid =
inj (load uid) in
prj (encoder lwt ~b ~load target)
end
type b = Carton.Enc.b =
{ i : Bigstringaf.t
; q : De.Queue.t
; w : De.window
; o : Bigstringaf.t }
let header_of_pack ~length buf off len =
Carton.Enc.header_of_pack ~length buf off len
let encode_target ~b ~find ~load ~uid target ~cursor =
let load uid = inj (load uid) in
let find uid = inj (find uid) in
prj (Carton.Enc.encode_target lwt ~b ~find ~load ~uid target ~cursor)
end
module Thin = struct
type 'uid light_load = 'uid -> (Carton.kind * int) Lwt.t
type 'uid heavy_load = 'uid -> Carton.Dec.v Lwt.t
type optint = Optint.t
module Make (Uid : Carton.UID) = struct
include Thin.Make (Lwt_scheduler) (Lwt_io) (Uid)
let canonicalize ~light_load ~heavy_load ~src ~dst fs n requireds weight =
let light_load uid = inj (light_load uid) in
let heavy_load uid = inj (heavy_load uid) in
canonicalize ~light_load ~heavy_load ~src ~dst fs n requireds weight
end
end
|
fa71dbf4c6220cf9ba29d0d514717e42ee18367921c67d90f8ed556332a2bd84 | WhatsApp/erlt | calc_core.erl | -file("calc/src/calc_core.erlt", 1).
-module(calc_core).
-eqwalizer_unchecked([]).
-export_type([expr/0]).
-export([expr_number/1,
expr_add/2,
expr_subtr/2,
expr_mult/2,
expr_divd/2,
expr_var/1]).
-type expr() :: {'$#calc_core:expr.number', integer()} |
{'$#calc_core:expr.add', expr(), expr()} |
{'$#calc_core:expr.subtr', expr(), expr()} |
{'$#calc_core:expr.mult', expr(), expr()} |
{'$#calc_core:expr.divd', expr(), expr()} |
{'$#calc_core:expr.var', atom()}.
-spec expr_number(integer()) -> expr().
expr_number(N) -> {'$#calc_core:expr.number', N}.
-spec expr_add(expr(), expr()) -> expr().
expr_add(E1, E2) -> {'$#calc_core:expr.add', E1, E2}.
-spec expr_subtr(expr(), expr()) -> expr().
expr_subtr(E1, E2) ->
{'$#calc_core:expr.subtr', E1, E2}.
-spec expr_mult(expr(), expr()) -> expr().
expr_mult(E1, E2) -> {'$#calc_core:expr.mult', E1, E2}.
-spec expr_divd(expr(), expr()) -> expr().
expr_divd(E1, E2) -> {'$#calc_core:expr.divd', E1, E2}.
-spec expr_var(atom()) -> expr().
expr_var(A) -> {'$#calc_core:expr.var', A}.
| null | https://raw.githubusercontent.com/WhatsApp/erlt/616a4adc628ca8754112e659701e57f1cd7fecd1/tests/calc/ir-spec/calc_core.erl | erlang | -file("calc/src/calc_core.erlt", 1).
-module(calc_core).
-eqwalizer_unchecked([]).
-export_type([expr/0]).
-export([expr_number/1,
expr_add/2,
expr_subtr/2,
expr_mult/2,
expr_divd/2,
expr_var/1]).
-type expr() :: {'$#calc_core:expr.number', integer()} |
{'$#calc_core:expr.add', expr(), expr()} |
{'$#calc_core:expr.subtr', expr(), expr()} |
{'$#calc_core:expr.mult', expr(), expr()} |
{'$#calc_core:expr.divd', expr(), expr()} |
{'$#calc_core:expr.var', atom()}.
-spec expr_number(integer()) -> expr().
expr_number(N) -> {'$#calc_core:expr.number', N}.
-spec expr_add(expr(), expr()) -> expr().
expr_add(E1, E2) -> {'$#calc_core:expr.add', E1, E2}.
-spec expr_subtr(expr(), expr()) -> expr().
expr_subtr(E1, E2) ->
{'$#calc_core:expr.subtr', E1, E2}.
-spec expr_mult(expr(), expr()) -> expr().
expr_mult(E1, E2) -> {'$#calc_core:expr.mult', E1, E2}.
-spec expr_divd(expr(), expr()) -> expr().
expr_divd(E1, E2) -> {'$#calc_core:expr.divd', E1, E2}.
-spec expr_var(atom()) -> expr().
expr_var(A) -> {'$#calc_core:expr.var', A}.
| |
eb65badc6823991d618da3c47b34f540e14f41198a1801481b1023204dca77fd | emina/rosette | reals.rkt | #lang rosette
(require "type.rkt" "errors.rkt"
rosette/lib/match (only-in racket/syntax format-symbol)
(for-syntax (only-in racket/syntax format-id))
(only-in rosette/base/core/type subtype? type-cast)
(only-in rosette [void rosette-void]))
(provide real-type? real-type-length
vector-type? (rename-out [has-vector-type? vector-value?]) vector-type
vector-select vector-update
scalar-type? scalar-value? scalar-type
real-type-of common-real-type void
bool int float
int2 int3 int4 int8 int16
float2 float3 float4 float8 float16
convert_int2 convert_int3 convert_int4 convert_int8 convert_int16
convert_float2 convert_float3 convert_float4 convert_float8 convert_float16)
Each real - type instance represents an OpenCL built - in real type ,
; which can be either a scalar or a vector type. A real type has a
name ; length ( which is 1 for scalar types ) ; a corresponding base
; (element) type; and a procedure for implicitly converting values
; to that type, and for constructing values of that type from base
; components. The base type of a scalar type is its corresponding
Rosette type , and the base type of a vector type is the scalar type
; of its components.
;
; Every instance of a real type can be used as a procedure. Every
procedure t takes two forms : a no - argument form , and a k - argument
; form, where k is (primitive-type-length t). The no-argument form
; returns another procedure, which, when given an argument, implicitly
; converts that argument to a value of type t, or throws an error if
; such a conversion is not possible. Valid implicit conversions are
described in Ch . 6.2 of opencl-1.2 specification .
;
; When t is a vector type, its k-argument form returns a value of type
; t if the provided arguments are instances of (type-base t). When t
; is a scalar type, the constructor returns a value of type t, only if
; that value can be constructed from the provided argument without type
; conversion.
(struct real-type (base length procedure)
#:property prop:procedure
[struct-field-index procedure]
#:methods gen:custom-write
[(define (write-proc self port mode)
(fprintf port "~a" (object-name self)))]
#:methods gen:type
[(define (type-name self) (object-name self))
(define (type-base self) (real-type-base self))])
(define-syntax-rule (define-real-type id #:base base #:length length
#:convert ([pat convert-expr ...] ...)
#:construct [(arg ...) construct-expr ...])
(define id
(real-type base length
(let* ([convert (match-lambda [pat convert-expr ...] ...
[v (raise-conversion-error v id)])]
[id (case-lambda [() convert]
[(arg ...) construct-expr ...])])
id))))
; Returns true if t is a scalar type.
(define (scalar-type? t) (and (real-type? t) (= 1 (real-type-length t))))
; Returns true if t is a vector type.
(define (vector-type? t) (and (real-type? t) (< 1 (real-type-length t))))
; Returns true iff an arithmetic operation on values of types
; t1 and t2 will yield a value of type t2.
(define (real-type<=? t1 t2)
(and (real-type? t1) (real-type? t2)
(or (equal? t1 t2)
(equal? t1 bool)
(and (equal? t1 int) (not (equal? t2 bool)))
(and (equal? t1 float) (equal? (type-base t2) float)))))
; Returns the common real type of the given types, as specified in
Ch . 6.2.6 of opencl-1.2 specification . If there is no common
real type , returns # f.
(define common-real-type
(case-lambda
[(t) (and (real-type? t) t)]
[(t1 t2) (cond [(real-type<=? t1 t2) t2]
[(real-type<=? t2 t1) t1]
[else #f])]
[ts (common-real-type (car ts) (apply common-real-type (cdr ts)))]))
----------- SCALAR TYPES -----------
We represent OpenCL scalar values as concrete or symbolic values .
;
; When given a symbolic value of type (type-base t) as an argument, a
; constructor for a scalar type t simply returns that value.
; When given a concrete value of type t, the constructor checks that the
; concrete value corresponds to type t and returns it. The constructor will throw an
; error if given a symbolic or concrete value that cannot be cast to an instance
; of t without type conversion.
(define-syntax-rule
(define-scalar-type id #:base base #:primitive primitive #:convert convert-clauses)
(define-real-type id
#:base base #:length 1 #:convert convert-clauses
#:construct [(v) (match v
[(? primitive) v]
[(term _ (== base)) v]
[(union (list _ (... ...) (cons g (or (? primitive p)
(and (term _ (== base)) p)))
_ (... ...)) _)
(assert g)
p]
[_ (raise-argument-error 'id (~a 'id) v)])]))
(define-scalar-type bool
#:base boolean?
#:primitive boolean?
#:convert ([(? boolean? v) v]
[(? number? v) (! (= v 0))]))
(define-scalar-type int
#:base integer?
#:primitive fixnum?
#:convert ([(? boolean? v) (if v 1 0)]
[(? fixnum? v) v]
[(? flonum? v) (exact-truncate v)]
[v (real->integer v)]))
(define-scalar-type float
#:base real?
#:primitive flonum?
#:convert ([(? boolean? v) (if v 1.0 0.0)]
[(? fixnum? v) (exact->inexact v)]
[(? flonum? v) v]
[v (type-cast real? v 'float)]))
(define void
(let ()
(struct void ()
#:property prop:procedure
(lambda (self)
(error 'void "cannot cast a value to void"))
#:methods gen:type
[(define (type-name self) 'void)
(define (type-base self) rosette-void)]
#:methods gen:custom-write
[(define (write-proc self port mode)
(fprintf port "void"))])
(void)))
----------- VECTOR TYPES -----------
We represent OpenCL vector values as Racket immutable vector impersonators ,
with the prop : vector - type property which specifies their OpenCL type .
; The constructor for each vector type t creates an immutable vector value of
; type t when given (real-type-length t) scalars of type (type-base t). Otherwise
; it throws an error.
; The property attached to vector values to indicate their
OpenCL vector type .
(define-values (prop:vector-type has-vector-type? vector-type)
(make-impersonator-property 'vector-type))
(define (vector-access v idx val) val)
(define-syntax (define-vector-type stx)
(syntax-case stx ()
[(_ id [base length])
(with-syntax ([(arg ...) (generate-temporaries (make-list (syntax->datum #'length) 'arg))]
[(idx ...) (build-list (syntax->datum #'length) values)]
[convert_id (format-id #'id "convert_~a" #'id #:source #'id #:props #'id)])
(syntax/loc stx
(begin
(define-real-type id
#:base base #:length length
#:convert ([(? boolean? v) (apply id (make-list length (if v ((base) -1) ((base) 0))))]
[(? real? v) (apply id (make-list length ((base) v)))]
[(and (? has-vector-type?) (app vector-type (== id)) v) v])
#:construct [(arg ...) (chaperone-vector (vector-immutable (base arg) ...)
vector-access vector-access
prop:vector-type id)])
(define (convert_id vec)
(id ((base) (vector-ref vec idx)) ...)))))]))
(define-vector-type int2 [int 2])
(define-vector-type int3 [int 3])
(define-vector-type int4 [int 4])
(define-vector-type int8 [int 8])
(define-vector-type int16 [int 16])
(define-vector-type float2 [float 2])
(define-vector-type float3 [float 3])
(define-vector-type float4 [float 4])
(define-vector-type float8 [float 8])
(define-vector-type float16 [float 16])
; Returns a scalar or vector value obtained by extracting
; the values at the given indices from the given vector.
; This procedure requires that
; (real-type-length type) = (length selector) and
; (vector-type? type) => (type-base type) = (type-base (vector-type vec)).
(define (vector-select vec selector type)
(define size (length selector))
(assert (= (real-type-length type) size))
(cond [(= size 1) (vector-ref vec (car selector))]
[else (assert (equal? (type-base type) (type-base (vector-type vec))))
(apply type (for/list ([idx selector]) (vector-ref vec idx)))]))
; Returns a vector obtained by updating the values at the
; given indices of the given vector, with the specified value.
; This procedure requires that
; (type-length (real-type-of val)) = (length selector) and
; (type-base (real-type-of val)) = (type-base (real-type-of vec)).
(define (vector-update vec selector val)
(define size (length selector))
(define type (vector-type vec))
(define len (real-type-length type))
(define out (vector-copy vec))
(if (= size 1)
(vector-set! out (car selector) val)
(for ([idx selector] [v val])
(vector-set! out idx v)))
(apply type (vector->list out)))
;----------- VALUES -----------
Returns the type of the given value or # f if the
value does not have an OpenCL real type .
(define (real-type-of v)
(match v
[(? boolean?) bool]
[(? has-vector-type?) (vector-type v)]
[(term _ (== integer?)) int]
[(term _ (== real?)) float]
[(? integer?) int]
[(? real?) float]
[_ #f]))
; Returns true when given a scalar value, otherwise returns false.
(define (scalar-value? v)
(scalar-type? (type-of v)))
; Returns the scalar type for the given scalar value v.
(define (scalar-type v)
(match (real-type-of v)
[(? scalar-type? t) t]
[_ (raise-argument-error 'scalar-type "scalar" v)]))
| null | https://raw.githubusercontent.com/emina/rosette/a64e2bccfe5876c5daaf4a17c5a28a49e2fbd501/sdsl/synthcl/model/reals.rkt | racket | which can be either a scalar or a vector type. A real type has a
length ( which is 1 for scalar types ) ; a corresponding base
(element) type; and a procedure for implicitly converting values
to that type, and for constructing values of that type from base
components. The base type of a scalar type is its corresponding
of its components.
Every instance of a real type can be used as a procedure. Every
form, where k is (primitive-type-length t). The no-argument form
returns another procedure, which, when given an argument, implicitly
converts that argument to a value of type t, or throws an error if
such a conversion is not possible. Valid implicit conversions are
When t is a vector type, its k-argument form returns a value of type
t if the provided arguments are instances of (type-base t). When t
is a scalar type, the constructor returns a value of type t, only if
that value can be constructed from the provided argument without type
conversion.
Returns true if t is a scalar type.
Returns true if t is a vector type.
Returns true iff an arithmetic operation on values of types
t1 and t2 will yield a value of type t2.
Returns the common real type of the given types, as specified in
When given a symbolic value of type (type-base t) as an argument, a
constructor for a scalar type t simply returns that value.
When given a concrete value of type t, the constructor checks that the
concrete value corresponds to type t and returns it. The constructor will throw an
error if given a symbolic or concrete value that cannot be cast to an instance
of t without type conversion.
The constructor for each vector type t creates an immutable vector value of
type t when given (real-type-length t) scalars of type (type-base t). Otherwise
it throws an error.
The property attached to vector values to indicate their
Returns a scalar or vector value obtained by extracting
the values at the given indices from the given vector.
This procedure requires that
(real-type-length type) = (length selector) and
(vector-type? type) => (type-base type) = (type-base (vector-type vec)).
Returns a vector obtained by updating the values at the
given indices of the given vector, with the specified value.
This procedure requires that
(type-length (real-type-of val)) = (length selector) and
(type-base (real-type-of val)) = (type-base (real-type-of vec)).
----------- VALUES -----------
Returns true when given a scalar value, otherwise returns false.
Returns the scalar type for the given scalar value v. | #lang rosette
(require "type.rkt" "errors.rkt"
rosette/lib/match (only-in racket/syntax format-symbol)
(for-syntax (only-in racket/syntax format-id))
(only-in rosette/base/core/type subtype? type-cast)
(only-in rosette [void rosette-void]))
(provide real-type? real-type-length
vector-type? (rename-out [has-vector-type? vector-value?]) vector-type
vector-select vector-update
scalar-type? scalar-value? scalar-type
real-type-of common-real-type void
bool int float
int2 int3 int4 int8 int16
float2 float3 float4 float8 float16
convert_int2 convert_int3 convert_int4 convert_int8 convert_int16
convert_float2 convert_float3 convert_float4 convert_float8 convert_float16)
Each real - type instance represents an OpenCL built - in real type ,
Rosette type , and the base type of a vector type is the scalar type
procedure t takes two forms : a no - argument form , and a k - argument
described in Ch . 6.2 of opencl-1.2 specification .
(struct real-type (base length procedure)
#:property prop:procedure
[struct-field-index procedure]
#:methods gen:custom-write
[(define (write-proc self port mode)
(fprintf port "~a" (object-name self)))]
#:methods gen:type
[(define (type-name self) (object-name self))
(define (type-base self) (real-type-base self))])
(define-syntax-rule (define-real-type id #:base base #:length length
#:convert ([pat convert-expr ...] ...)
#:construct [(arg ...) construct-expr ...])
(define id
(real-type base length
(let* ([convert (match-lambda [pat convert-expr ...] ...
[v (raise-conversion-error v id)])]
[id (case-lambda [() convert]
[(arg ...) construct-expr ...])])
id))))
(define (scalar-type? t) (and (real-type? t) (= 1 (real-type-length t))))
(define (vector-type? t) (and (real-type? t) (< 1 (real-type-length t))))
(define (real-type<=? t1 t2)
(and (real-type? t1) (real-type? t2)
(or (equal? t1 t2)
(equal? t1 bool)
(and (equal? t1 int) (not (equal? t2 bool)))
(and (equal? t1 float) (equal? (type-base t2) float)))))
Ch . 6.2.6 of opencl-1.2 specification . If there is no common
real type , returns # f.
(define common-real-type
(case-lambda
[(t) (and (real-type? t) t)]
[(t1 t2) (cond [(real-type<=? t1 t2) t2]
[(real-type<=? t2 t1) t1]
[else #f])]
[ts (common-real-type (car ts) (apply common-real-type (cdr ts)))]))
----------- SCALAR TYPES -----------
We represent OpenCL scalar values as concrete or symbolic values .
(define-syntax-rule
(define-scalar-type id #:base base #:primitive primitive #:convert convert-clauses)
(define-real-type id
#:base base #:length 1 #:convert convert-clauses
#:construct [(v) (match v
[(? primitive) v]
[(term _ (== base)) v]
[(union (list _ (... ...) (cons g (or (? primitive p)
(and (term _ (== base)) p)))
_ (... ...)) _)
(assert g)
p]
[_ (raise-argument-error 'id (~a 'id) v)])]))
(define-scalar-type bool
#:base boolean?
#:primitive boolean?
#:convert ([(? boolean? v) v]
[(? number? v) (! (= v 0))]))
(define-scalar-type int
#:base integer?
#:primitive fixnum?
#:convert ([(? boolean? v) (if v 1 0)]
[(? fixnum? v) v]
[(? flonum? v) (exact-truncate v)]
[v (real->integer v)]))
(define-scalar-type float
#:base real?
#:primitive flonum?
#:convert ([(? boolean? v) (if v 1.0 0.0)]
[(? fixnum? v) (exact->inexact v)]
[(? flonum? v) v]
[v (type-cast real? v 'float)]))
(define void
(let ()
(struct void ()
#:property prop:procedure
(lambda (self)
(error 'void "cannot cast a value to void"))
#:methods gen:type
[(define (type-name self) 'void)
(define (type-base self) rosette-void)]
#:methods gen:custom-write
[(define (write-proc self port mode)
(fprintf port "void"))])
(void)))
----------- VECTOR TYPES -----------
We represent OpenCL vector values as Racket immutable vector impersonators ,
with the prop : vector - type property which specifies their OpenCL type .
OpenCL vector type .
(define-values (prop:vector-type has-vector-type? vector-type)
(make-impersonator-property 'vector-type))
(define (vector-access v idx val) val)
(define-syntax (define-vector-type stx)
(syntax-case stx ()
[(_ id [base length])
(with-syntax ([(arg ...) (generate-temporaries (make-list (syntax->datum #'length) 'arg))]
[(idx ...) (build-list (syntax->datum #'length) values)]
[convert_id (format-id #'id "convert_~a" #'id #:source #'id #:props #'id)])
(syntax/loc stx
(begin
(define-real-type id
#:base base #:length length
#:convert ([(? boolean? v) (apply id (make-list length (if v ((base) -1) ((base) 0))))]
[(? real? v) (apply id (make-list length ((base) v)))]
[(and (? has-vector-type?) (app vector-type (== id)) v) v])
#:construct [(arg ...) (chaperone-vector (vector-immutable (base arg) ...)
vector-access vector-access
prop:vector-type id)])
(define (convert_id vec)
(id ((base) (vector-ref vec idx)) ...)))))]))
(define-vector-type int2 [int 2])
(define-vector-type int3 [int 3])
(define-vector-type int4 [int 4])
(define-vector-type int8 [int 8])
(define-vector-type int16 [int 16])
(define-vector-type float2 [float 2])
(define-vector-type float3 [float 3])
(define-vector-type float4 [float 4])
(define-vector-type float8 [float 8])
(define-vector-type float16 [float 16])
(define (vector-select vec selector type)
(define size (length selector))
(assert (= (real-type-length type) size))
(cond [(= size 1) (vector-ref vec (car selector))]
[else (assert (equal? (type-base type) (type-base (vector-type vec))))
(apply type (for/list ([idx selector]) (vector-ref vec idx)))]))
(define (vector-update vec selector val)
(define size (length selector))
(define type (vector-type vec))
(define len (real-type-length type))
(define out (vector-copy vec))
(if (= size 1)
(vector-set! out (car selector) val)
(for ([idx selector] [v val])
(vector-set! out idx v)))
(apply type (vector->list out)))
Returns the type of the given value or # f if the
value does not have an OpenCL real type .
(define (real-type-of v)
(match v
[(? boolean?) bool]
[(? has-vector-type?) (vector-type v)]
[(term _ (== integer?)) int]
[(term _ (== real?)) float]
[(? integer?) int]
[(? real?) float]
[_ #f]))
(define (scalar-value? v)
(scalar-type? (type-of v)))
(define (scalar-type v)
(match (real-type-of v)
[(? scalar-type? t) t]
[_ (raise-argument-error 'scalar-type "scalar" v)]))
|
e6b32c2fc6e7a689bb8f6b5232a9e0e29ea29e394bdc74f654af8d201dac0bde | jarohen/clidget | app.cljs | (ns todomvc.cljs.app
(:require [cljs.core.async :as a]
[todomvc.cljs.todomvc-widget :refer [make-todomvc]]
[todomvc.cljs.todomvc-model :as model]
[dommy.core :as d])
(:require-macros [dommy.macros :refer [sel1]]
[cljs.core.async.macros :refer [go]]))
(enable-console-print!)
(defn test-todos []
(->> (for [x (range 5)]
[x {:caption (str "Test todo " x)}])
(into {})))
(defn run-benchmark! [!todos]
(reset! !todos {})
(go
(let [els 100]
(dotimes [i els]
(swap! !todos
assoc i {:caption (str "test" i), :done? false}))
(dotimes [i els]
(swap! !todos
assoc-in [i :done?] true))
(dotimes [i els]
(swap! !todos
dissoc i))
(swap! !todos
assoc els {:caption (str "test" els), :done? false}))))
(set! (.-onload js/window)
(fn []
(let [!todos (atom (test-todos))
events-ch (doto (a/chan)
(model/watch-events! !todos))]
(d/replace-contents! (sel1 :#content) (make-todomvc !todos events-ch))
#_(run-benchmark! !todos))))
| null | https://raw.githubusercontent.com/jarohen/clidget/bd67082ad3819908e2d532151417cab436333ad0/todomvc/src/cljs/todomvc/cljs/app.cljs | clojure | (ns todomvc.cljs.app
(:require [cljs.core.async :as a]
[todomvc.cljs.todomvc-widget :refer [make-todomvc]]
[todomvc.cljs.todomvc-model :as model]
[dommy.core :as d])
(:require-macros [dommy.macros :refer [sel1]]
[cljs.core.async.macros :refer [go]]))
(enable-console-print!)
(defn test-todos []
(->> (for [x (range 5)]
[x {:caption (str "Test todo " x)}])
(into {})))
(defn run-benchmark! [!todos]
(reset! !todos {})
(go
(let [els 100]
(dotimes [i els]
(swap! !todos
assoc i {:caption (str "test" i), :done? false}))
(dotimes [i els]
(swap! !todos
assoc-in [i :done?] true))
(dotimes [i els]
(swap! !todos
dissoc i))
(swap! !todos
assoc els {:caption (str "test" els), :done? false}))))
(set! (.-onload js/window)
(fn []
(let [!todos (atom (test-todos))
events-ch (doto (a/chan)
(model/watch-events! !todos))]
(d/replace-contents! (sel1 :#content) (make-todomvc !todos events-ch))
#_(run-benchmark! !todos))))
| |
5d20e6b58745e113fd7fe006a9dfc774d14bf1e8b7780cbdc788e92ffca52849 | tisnik/clojure-examples | core.clj | ;
( C ) Copyright 2016 , 2020 , 2021
;
; All rights reserved. This program and the accompanying materials
; are made available under the terms of the Eclipse Public License v1.0
; which accompanies this distribution, and is available at
-v10.html
;
; Contributors:
;
(ns consume-messages-2.core
(:require [jackdaw.client :as jc]
[jackdaw.client.log :as jl]
[clojure.pprint :as pp]))
(def consumer-config
{"bootstrap.servers" "localhost:9092"
"key.deserializer" "org.apache.kafka.common.serialization.StringDeserializer"
"value.deserializer" "org.apache.kafka.common.serialization.StringDeserializer"
"auto.offset.reset" "earliest"
"group.id" "group-A"})
(defn -main
[& args]
(with-open [consumer (-> (jc/consumer consumer-config)
(jc/subscribe [{:topic-name "test1"}]))]
(doseq [{:keys [key value partition timestamp offset]} (jl/log consumer 10)]
(println "key: " key)
(println "value: " value)
(println "partition: " partition)
(println "timestamp: " timestamp)
(println "offset: " offset))))
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/a5f9d6119b62520b05da64b7929d07b832b957ab/kafka-consume-messages-2/src/consume_messages_2/core.clj | clojure |
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
Contributors:
| ( C ) Copyright 2016 , 2020 , 2021
-v10.html
(ns consume-messages-2.core
(:require [jackdaw.client :as jc]
[jackdaw.client.log :as jl]
[clojure.pprint :as pp]))
(def consumer-config
{"bootstrap.servers" "localhost:9092"
"key.deserializer" "org.apache.kafka.common.serialization.StringDeserializer"
"value.deserializer" "org.apache.kafka.common.serialization.StringDeserializer"
"auto.offset.reset" "earliest"
"group.id" "group-A"})
(defn -main
[& args]
(with-open [consumer (-> (jc/consumer consumer-config)
(jc/subscribe [{:topic-name "test1"}]))]
(doseq [{:keys [key value partition timestamp offset]} (jl/log consumer 10)]
(println "key: " key)
(println "value: " value)
(println "partition: " partition)
(println "timestamp: " timestamp)
(println "offset: " offset))))
|
e0f6861172572d332b0bd4fa675c2af7efd90cdfb6d55d7531d6da14770851af | 2600hz/kazoo | crossbar_view.erl | %%%-----------------------------------------------------------------------------
( C ) 2010 - 2020 , 2600Hz
%%% @doc
@author
@author Hesaam
%%%
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(crossbar_view).
-export([load/2, load/3
,load_time_range/2, load_time_range/3
,load_modb/2, load_modb/3
,load_yodb/2, load_yodb/3
,next_chunk/1
,build_load_params/3
,build_load_time_range_params/3
,build_load_modb_params/3
,build_load_yodb_params/3
,direction/1, direction/2
,start_end_keys/2
,time_range/1, time_range/2
,ranged_start_end_keys/2
,get_page_size/2
,suffix_key_fun/1
,get_doc_fun/0
,get_value_fun/0
,get_key_fun/0
,get_id_fun/0
,high_value_key/0
]).
-include("crossbar.hrl").
-define(CB_SPECIFIC_VIEW_OPTIONS,
['ascending', 'databases', 'mapper', 'no_filter'
%% non-range query
,'end_keymap', 'keymap', 'start_keymap'
%% chunked query
,'chunk_size', 'is_chunked', 'unchunkable'
%% ranged query
,'created_to', 'created_from', 'max_range'
,'range_end_keymap', 'range_key_name', 'range_keymap', 'range_start_keymap'
,'should_paginate'
%% start/end key length fixer
,'key_min_length'
]).
-type direction() :: 'ascending' | 'descending'.
-type page_size() :: kz_term:api_pos_integer() | 'infinity'.
-type time_range() :: {kz_time:gregorian_seconds(), kz_time:gregorian_seconds()}.
` { StartTimestamp , EndTimestamp } ' .
-type api_range_key() :: 'undefined' | ['undefined'] | kazoo_data:range_key().
-type range_keys() :: {api_range_key(), api_range_key()}.
` { Startkey , } ' .
-type keymap_fun() :: fun((cb_context:context()) -> api_range_key()) |
fun((cb_context:context(), kazoo_data:view_options()) -> api_range_key()).
Function of arity 1 or 2 to create customize start / end key .
-type keymap() :: api_range_key() | keymap_fun().
%% A literal CouchDB `startkey' or `endkey', or a {@link keymap_fun()} for non-range requests.
%% See also {@link start_end_keys/3}.
-type range_keymap_fun() :: fun((kz_time:gregorian_seconds()) -> api_range_key()).
A function of arity 1 . The timestamp from ` create_from ' or ` created_to ' will pass to this function
%% to construct the start or end key.
-type range_keymap() :: 'nil' | api_range_key() | range_keymap_fun().
%% Creates a start/end key for ranged queries. A binary or integer or a list of binary or integer
%% to create start/end key. The timestamp will added to end of it.
%% If `undefined' only the timestamp will be used as the key. If timestamp in the view key is at start of the key,
use { @link suffix_key_fun } . If the view does n't need any start / end key you can set this ` nil ' to bypass setting
%% timestamp as key.
-type user_mapper_fun() :: 'undefined' |
fun((kz_json:objects()) -> kz_json:objects() | {'error', any()}) |
fun((kz_json:object(), kz_json:objects()) -> kz_json:objects() | {'error', any()}) |
fun((cb_context:context(), kz_json:object(), kz_json:objects()) -> kz_json:objects() | {'error', any()}).
A function to filter / map view result . For use in Crossbar modules to call { @link crossbar_view } functions .
-type mapper_fun() :: 'undefined' |
fun((kz_json:objects()) -> kz_json:objects() | {'error', any()}) |
fun((kz_json:object(), kz_json:objects()) -> kz_json:objects() | {'error', any()}).
%% A function to filter/map view result. Internal to {@link crossbar_view}.
-type options() :: kazoo_data:view_options() |
[{'databases', kz_term:ne_binaries()} |
{'mapper', user_mapper_fun()} |
{'max_range', pos_integer()} |
{'no_filter', boolean()} |
%% for non-ranged query
{'end_keymap', keymap()} |
{'keymap', keymap()} |
{'start_keymap', keymap()} |
%% for chunked query
{'chunk_size', pos_integer()} |
{'is_chunked', boolean()} |
%% for ranged/modb query
{'created_from', pos_integer()} |
{'created_to', pos_integer()} |
{'range_end_keymap', range_keymap()} |
{'range_keymap', range_keymap()} |
{'range_key_name', kz_term:ne_binary()} |
{'range_start_keymap', range_keymap()} |
{'should_paginate', boolean()} |
%% start/end key length fixer
{'key_min_length', pos_integer()}
].
-type load_params() :: #{chunk_size => pos_integer()
,context => cb_context:context()
,databases => kz_term:ne_binaries()
,direction => direction()
,end_key => kazoo_data:range_key()
,end_time => kz_time:gregorian_seconds()
,has_qs_filter => boolean()
,is_chunked => boolean()
,last_key => last_key()
,mapper => mapper_fun()
,page_size => pos_integer()
,previous_chunk_length => non_neg_integer()
,queried_jobjs => kz_json:objects()
,should_paginate => boolean()
,start_key => kazoo_data:range_key()
,start_time => kz_time:gregorian_seconds()
,total_queried => non_neg_integer()
,view => kz_term:ne_binary()
,view_options => kazoo_data:view_options()
}.
-type last_key() :: api_range_key().
%% Last key of the view result from previous iteration, also it is used to set `next_start_key'.
-export_type([range_keys/0, time_range/0
,options/0, direction/0
,mapper_fun/0 ,user_mapper_fun/0
,keymap/0, keymap_fun/0
,range_keymap/0, range_keymap_fun/0
]
).
%% @equiv load(Context, View, [])
-spec load(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load(Context, View) ->
load(Context, View, []).
%%------------------------------------------------------------------------------
%% @doc This function attempts to load the context with the results of a view
%% run against the database.
%% @end
%%------------------------------------------------------------------------------
-spec load(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load(Context, View, Options) ->
load_view(build_load_params(Context, View, Options), Context).
%% @equiv load_time_range(Context, View, [])
-spec load_time_range(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_time_range(Context, View) ->
load_time_range(Context, View, []).
%%------------------------------------------------------------------------------
@doc This function attempts to load the context with the
%% results of a view run against the database.
%% @end
%%------------------------------------------------------------------------------
-spec load_time_range(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load_time_range(Context, View, Options) ->
load_view(build_load_time_range_params(Context, View, Options), Context).
%% @equiv load_modb(Context, View, [])
-spec load_modb(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_modb(Context, View) ->
load_modb(Context, View, []).
%%------------------------------------------------------------------------------
%% @doc This function attempts to load the context with the results of a view
run against the account 's MODBs .
%% @end
%%------------------------------------------------------------------------------
-spec load_modb(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load_modb(Context, View, Options) ->
LoadParams = build_load_modb_params(Context, View, Options),
load_view(LoadParams, Context).
%% @equiv load_yodb(Context, View, [])
-spec load_yodb(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_yodb(Context, View) ->
load_yodb(Context, View, []).
%%------------------------------------------------------------------------------
%% @doc This function attempts to load the context with the results of a view
%% run against the account's YODBs.
%% @end
%%------------------------------------------------------------------------------
-spec load_yodb(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load_yodb(Context, View, Options) ->
LoadParams = build_load_yodb_params(Context, View, Options),
load_view(LoadParams, Context).
%%------------------------------------------------------------------------------
%% @doc
%% Takes {@link options()} and returns {@link load_params()}, for normal querying
%% of a view.
%% @end
%%------------------------------------------------------------------------------
-spec build_load_params(cb_context:context(), kz_term:ne_binary(), options()) -> load_params() | cb_context:context().
build_load_params(Context, View, Options) ->
try build_general_load_params(Context, View, Options) of
#{direction := Direction}=LoadMap ->
HasQSFilter = not props:get_is_true('no_filter', Options, 'false')
andalso crossbar_filter:is_defined(Context),
lager:debug("has qs filter: ~s", [HasQSFilter]),
UserMapper = props:get_value('mapper', Options),
StartEnd = start_end_keys(Context, Options, Direction),
KeyMinLength = props:get_value('key_min_length', Options),
{StartKey, EndKey} = expand_min_max_keys(StartEnd, Direction, KeyMinLength),
Params = LoadMap#{has_qs_filter => HasQSFilter
,mapper => crossbar_filter:build_with_mapper(Context, UserMapper, HasQSFilter)
,start_key => StartKey
,view_options => build_view_query(Options, Direction, StartKey, EndKey, HasQSFilter)
},
maybe_set_start_end_keys(Params, StartKey, EndKey);
Ctx -> Ctx
catch
?STACKTRACE(_E, _T, ST)
lager:debug("exception occurred during building view options for ~s", [View]),
kz_log:log_stacktrace(ST),
cb_context:add_system_error('datastore_fault', Context)
end.
%%------------------------------------------------------------------------------
%% @doc
%% Takes {@link options()} and returns {@link load_params()}, for querying
%% of a view over a specified range of time.
%% @end
%%------------------------------------------------------------------------------
-spec build_load_time_range_params(cb_context:context(), kz_term:ne_binary(), options()) ->
load_params() | cb_context:context().
build_load_time_range_params(Context, View, Options) ->
try build_general_load_params(Context, View, Options) of
#{direction := Direction}=LoadMap ->
TimeFilterKey = props:get_ne_binary_value('range_key_name', Options, <<"created">>),
UserMapper = props:get_value('mapper', Options),
HasQSFilter = not props:get_is_true('no_filter', Options, 'false')
andalso crossbar_filter:is_defined(Context)
andalso not crossbar_filter:is_only_time_filter(Context, TimeFilterKey),
lager:debug("has qs filter: ~s", [HasQSFilter]),
case time_range(Context, Options, TimeFilterKey) of
{StartTime, EndTime} ->
StartEnd = ranged_start_end_keys(Context, Options, Direction, StartTime, EndTime),
KeyMinLength = props:get_value('key_min_length', Options),
{StartKey, EndKey} = expand_min_max_keys(StartEnd, Direction, KeyMinLength),
Params = LoadMap#{end_time => EndTime
,has_qs_filter => HasQSFilter
,mapper => crossbar_filter:build_with_mapper(Context, UserMapper, HasQSFilter)
,start_key => StartKey
,start_time => StartTime
,view_options => build_view_query(Options, Direction, StartKey, EndKey, HasQSFilter)
},
maybe_set_start_end_keys(Params, StartKey, EndKey);
Ctx -> Ctx
end;
Ctx -> Ctx
catch
?STACKTRACE(_E, _T, ST)
lager:debug("exception occurred during building range view options for ~s", [View]),
kz_log:log_stacktrace(ST),
cb_context:add_system_error('datastore_fault', Context)
end.
%%------------------------------------------------------------------------------
%% @doc
%% Takes {@link options()} and returns {@link load_params()}, for querying
of a view over a specified range of time in account 's MODBs .
%% @end
%%------------------------------------------------------------------------------
-spec build_load_modb_params(cb_context:context(), kz_term:ne_binary(), options()) ->
load_params() | cb_context:context().
build_load_modb_params(Context, View, Options) ->
case build_load_time_range_params(Context, View, Options) of
#{direction := Direction
,start_time := StartTime
,end_time := EndTime
}=LoadMap ->
LoadMap#{databases => get_range_modbs(Context, Options, Direction, StartTime, EndTime)};
Ctx -> Ctx
end.
%%------------------------------------------------------------------------------
%% @doc
%% Takes {@link options()} and returns {@link load_params()}, for querying
of a view over a specified range of time in account 's MODBs .
%% @end
%%------------------------------------------------------------------------------
-spec build_load_yodb_params(cb_context:context(), kz_term:ne_binary(), options()) ->
load_params() | cb_context:context().
build_load_yodb_params(Context, View, Options) ->
case build_load_time_range_params(Context, View, Options) of
#{direction := Direction
,start_time := StartTime
,end_time := EndTime
}=LoadMap ->
Databases = get_range_yodbs(Context, Options, Direction, StartTime, EndTime),
LoadMap#{databases => lists:usort(Databases)};
Ctx -> Ctx
end.
%%------------------------------------------------------------------------------
%% @doc Build CouchDB view options. It sets start/end keys,
direction and ` include_docs ' ( if it 's not using reduce ) and removes
%% {@link options()}.
%%
%% <div class="notice">Do not set start or end keys in Options, use provided
%% special keys to generate start/end keys based on timestamp.</div>
%% @end
%%------------------------------------------------------------------------------
-spec build_view_query(options(), direction(), api_range_key(), api_range_key(), boolean()) ->
kazoo_data:view_options().
build_view_query(Options, Direction, StartKey, EndKey, HasQSFilter) ->
DeleteKeys = ['startkey', 'endkey'
,'descending', 'limit'
| ?CB_SPECIFIC_VIEW_OPTIONS
],
DefaultOptions =
props:filter_undefined(
[{'startkey', StartKey}
,{'endkey', EndKey}
,Direction
| props:delete_keys(DeleteKeys, Options)
]),
IncludeOptions =
case HasQSFilter of
'true' -> ['include_docs' | props:delete('include_docs', DefaultOptions)];
'false' -> DefaultOptions
end,
case props:get_first_defined(['reduce', 'group', 'group_level'], IncludeOptions) of
'undefined' -> IncludeOptions;
'false' -> IncludeOptions;
_V -> props:delete('include_docs', IncludeOptions)
end.
%%------------------------------------------------------------------------------
%% @doc Returns start/end keys based on direction.
%% Returned tuple is `{start_key, end_key}'.
%%
%% If `start_key' or `end_key' is present in the request (query string or payload)
%% they will be returned instead. Otherwise the keys will built by key map options.
%%
< strong > Options description:</strong >
%% <dl>
%% <dt>`keymap'</dt><dd>Use this to map both start/end keys.</dd>
%% <dt>`start_keymap'</dt><dd>Maps start key only.</dd>
%% <dt>`end_keymap'</dt><dd>Maps end key only.</dd>
%% </dl>
%%
%% See also {@link direction/2} for `direction' option explanation.
%%
< strong > Keymap description:</strong >
%% <dl>
< range_key()}</dt><dd > A regular CouchDB key to construct
%% keys like `[<<"en">>, <<"us">>]'.</dd>
%% <dt>{@type keymap_fun()}</dt><dd>To customize your own key using a function.</dd>
%% </dl>
%%
%% The keys will be swapped if direction is descending.
%% @see direction/2
%% @end
%%------------------------------------------------------------------------------
-spec start_end_keys(cb_context:context(), options()) -> range_keys().
start_end_keys(Context, Options) ->
start_end_keys(Context, Options, direction(Context, Options)).
-spec start_end_keys(cb_context:context(), options(), direction()) -> range_keys().
start_end_keys(Context, Options, Direction) ->
{OptsStartK, OptsEndK} = get_start_end_keys(Context, Options),
case {cb_context:req_value(Context, <<"start_key">>)
,cb_context:req_value(Context, <<"end_key">>)
}
of
{'undefined', 'undefined'} when Direction =:= 'ascending' -> {OptsStartK, OptsEndK};
{StartKeyReq, 'undefined'} when Direction =:= 'ascending' -> {StartKeyReq, OptsEndK};
{'undefined', EndKeyReq} when Direction =:= 'ascending' -> {OptsStartK, EndKeyReq};
{StartKeyReq, EndKeyReq} when Direction =:= 'ascending' -> {StartKeyReq, EndKeyReq};
{'undefined', 'undefined'} when Direction =:= 'descending' -> {OptsEndK, OptsStartK};
{StartKeyReq, 'undefined'} when Direction =:= 'descending' -> {StartKeyReq, OptsStartK};
{'undefined', EndKeyReq} when Direction =:= 'descending' -> {OptsEndK, EndKeyReq};
{StartKeyReq, EndKeyReq} when Direction =:= 'descending' -> {StartKeyReq, EndKeyReq}
end.
%%------------------------------------------------------------------------------
%% @doc Returns start/end keys based on direction. Start/end timestamp will be
%% added to keys based on requested time range.
%% Returned tuple is `{start_key, end_key}'.
%%
%% If `start_key' or `end_key' is present in the request (query string or payload)
%% they will be returned instead. Otherwise the keys will built by key map options.
%%
< strong > Options description:</strong >
%% <dl>
%% <dt>`range_keymap'</dt><dd>Use this to map both start/end keys.</dd>
%% <dt>`range_start_keymap'</dt><dd>maps start key only.</dd>
%% <dt>`range_end_keymap'</dt><dd>maps end key only.</dd>
%% </dl>
%%
%% See also {@link direction/2} and {@link time_range/2} for explanation of
%% other options.
%%
< strong > Keymap description:</strong >
%% <dl>
%% <dt>{@type kz_term:ne_binary()}</dt><dd>Constructs keys like `[<<"account">>, Timestamp]'.</dd>
< dt>{@type integer()}</dt><dd > Constructs keys like ` [ 1234 , Timestamp]'.</dd >
%% <dt>{@type list()}</dt><dd>Constructs keys like `[<<"en">>, <<"us">>, Timestamp]'.</dd>
%% <dt>{@type range_keymap_fun()}</dt><dd>Customize your own key using a function.</dd>
%% </dl>
%%
%% The keys will be swapped if direction is descending.
%% @end
%%------------------------------------------------------------------------------
-spec ranged_start_end_keys(cb_context:context(), options()) -> range_keys().
ranged_start_end_keys(Context, Options) ->
{StartTime, EndTime} = time_range(Context, Options),
Direction = direction(Context, Options),
ranged_start_end_keys(Context, Options, Direction, StartTime, EndTime).
-spec ranged_start_end_keys(cb_context:context(), options(), direction(), kz_time:gregorian_seconds(), kz_time:gregorian_seconds()) -> range_keys().
ranged_start_end_keys(Context, Options, Direction, StartTime, EndTime) ->
{StartKeyMap, EndKeyMap} = get_range_key_maps(Options),
case {cb_context:req_value(Context, <<"start_key">>)
,cb_context:req_value(Context, <<"end_key">>)
}
of
{'undefined', 'undefined'} when Direction =:= 'ascending' -> {StartKeyMap(StartTime), EndKeyMap(EndTime)};
{StartKey, 'undefined'} when Direction =:= 'ascending' -> {StartKey, EndKeyMap(EndTime)};
{'undefined', EndKey} when Direction =:= 'ascending' -> {StartKeyMap(StartTime), EndKey};
{StartKey, EndKey} when Direction =:= 'ascending' -> {StartKey, EndKey};
{'undefined', 'undefined'} when Direction =:= 'descending' -> {EndKeyMap(EndTime), StartKeyMap(StartTime)};
{StartKey, 'undefined'} when Direction =:= 'descending' -> {StartKey, StartKeyMap(StartTime)};
{'undefined', EndKey} when Direction =:= 'descending' -> {EndKeyMap(EndTime), EndKey};
{StartKey, EndKey} when Direction =:= 'descending' -> {StartKey, EndKey}
end.
%%------------------------------------------------------------------------------
%% @doc Suffix the Timestamp to the provided key map option. Useful to use
generate the keys like ` [ Timestamp , InteractionId ] ' for the end key in
%% {@link cb_cdrs} for example.
%% @end
%%------------------------------------------------------------------------------
-spec suffix_key_fun(range_keymap()) -> range_keymap_fun().
suffix_key_fun('nil') -> fun(_) -> 'undefined' end;
suffix_key_fun('undefined') -> fun kz_term:identity/1;
suffix_key_fun(['undefined']) -> fun kz_term:identity/1;
suffix_key_fun(K) when is_binary(K) -> fun(Ts) -> [Ts, K] end;
suffix_key_fun(K) when is_integer(K) -> fun(Ts) -> [Ts, K] end;
suffix_key_fun(K) when is_list(K) -> fun(Ts) -> [Ts | K] end;
suffix_key_fun(K) when is_function(K, 1) -> K.
%%------------------------------------------------------------------------------
%% @doc Depending on sort direction of the result set, ensure that the length of
%% the startkey/endkey supplied in the query is the same length as that returned
%% by the result set (grouped or not). This ensures that the result at the start
%% or end is not filtered out if it matches on all present keys.
%% @end
%%------------------------------------------------------------------------------
-spec expand_min_max_keys(range_keys(), direction(), kz_term:api_non_neg_integer()) -> range_keys().
expand_min_max_keys({StartKey, EndKey}, Direction, KeyMinLength) ->
OtherDirection = case Direction of
'ascending' -> 'descending';
'descending' -> 'ascending'
end,
{expand_min_max_keys2(StartKey, Direction, KeyMinLength)
,expand_min_max_keys2(EndKey, OtherDirection, KeyMinLength)
}.
-spec expand_min_max_keys2(api_range_key(), direction(), kz_term:api_non_neg_integer()) -> api_range_key().
expand_min_max_keys2(RangeKey, Direction, KeyMinLength) when is_list(RangeKey) ->
RangeKeyPadded = maybe_min_max_pad(KeyMinLength, RangeKey),
lists:map(fun(Elem) -> expand_min_max_key(Elem, Direction) end, RangeKeyPadded);
expand_min_max_keys2(RangeKey, Direction, _) -> expand_min_max_key(RangeKey, Direction).
-spec expand_min_max_key(api_range_key(), direction()) -> api_range_key().
expand_min_max_key('min_max', 'ascending') ->
lager:debug("padding ascending composite key"),
'false';
expand_min_max_key('min_max', 'descending') ->
lager:debug("padding descending composite key"),
high_value_key();
expand_min_max_key(RangeKey, _) -> RangeKey.
-spec maybe_min_max_pad(kz_term:api_non_neg_integer(), api_range_key()) -> api_range_key() | ['min_max'].
maybe_min_max_pad('undefined', RangeKey) -> RangeKey;
maybe_min_max_pad(KeyMinLength, RangeKey) ->
lists:reverse(min_max_pad(KeyMinLength - length(RangeKey), lists:reverse(RangeKey))).
-spec min_max_pad(non_neg_integer(), api_range_key()) -> api_range_key() | ['min_max'].
min_max_pad(0, RangeKey) -> RangeKey;
min_max_pad(N, RangeKey) -> min_max_pad(N-1, ['min_max' | RangeKey]).
-spec high_value_key() -> kz_term:ne_binary().
high_value_key() -> <<16#fff0/utf8>>.
%% @equiv direction(Context, [])
-spec direction(cb_context:context()) -> direction().
direction(Context) ->
direction(Context, []).
%%------------------------------------------------------------------------------
%% @doc Find view sort direction from `Options' or request
%% query string. Default to `descending'.
%% @end
%%------------------------------------------------------------------------------
-spec direction(cb_context:context(), options()) -> direction().
direction(Context, Options) ->
case props:get_value('descending', Options, 'false')
orelse kz_json:is_true(<<"descending">>, cb_context:query_string(Context))
orelse kz_json:is_false(<<"ascending">>, cb_context:query_string(Context), 'true')
of
'true' -> 'descending';
'false' -> 'ascending'
end.
%% @equiv time_range(Context, [])
-spec time_range(cb_context:context()) -> time_range() | cb_context:context().
time_range(Context) -> time_range(Context, []).
%%------------------------------------------------------------------------------
%% @doc Returns a time range for range query based or payload on request or `Options'
%% and default range based on system configuration (maximum range).
%%
%% The start time, `created_from' (default), should always be prior to end time
%% `created_to'.
%%
%% <strong>Options:</strong>
%% <dl>
%% <dt>`max_range'</dt><dd>Maximum range allowed. Default is the value of
` crossbar.maximum_range ' , 31 days.</dd >
%%
%% <dt>`range_key'</dt><dd>The key name in query string to get values
%% from (created, modified or ...). Default is `created'.</dd>
%%
%% <dt>`{RANGE_KEY}_from'</dt><dd>Start time.</dd>
%%
%% <dt>`{RANGE_KEY}_to'</dt><dd>End time.</dd>
%% </dl>
%% @end
%%------------------------------------------------------------------------------
-spec time_range(cb_context:context(), options()) -> time_range() | cb_context:context().
time_range(Context, Options) ->
time_range(Context, Options, props:get_ne_binary_value('range_key_name', Options, <<"created">>)).
-spec time_range(cb_context:context(), options(), kz_term:ne_binary()) -> time_range() | cb_context:context().
time_range(Context, Options, Key) ->
MaxRange = get_max_range(Options),
TSTime = kz_time:now_s(),
RangeTo = get_time_key(Context, <<Key/binary, "_to">>, Options, TSTime),
RangeFrom = get_time_key(Context, <<Key/binary, "_from">>, Options, RangeTo - MaxRange),
time_range(Context, MaxRange, Key, RangeFrom, RangeTo).
%%------------------------------------------------------------------------------
%% @doc Checks whether or not end time is prior to start time. Returns a ranged
%% tuple `{start_time, end_time}' or `context' with validation error.
%% @end
%%------------------------------------------------------------------------------
-spec time_range(cb_context:context(), pos_integer(), kz_term:ne_binary(), pos_integer(), pos_integer()) ->
time_range() | cb_context:context().
time_range(Context, MaxRange, Key, RangeFrom, RangeTo) ->
Path = <<Key/binary, "_from">>,
case RangeTo - RangeFrom of
N when N < 0 ->
Msg = kz_term:to_binary(io_lib:format("~s_to ~b is prior to ~s ~b", [Key, RangeTo, Path, RangeFrom])),
JObj = kz_json:from_list([{<<"message">>, Msg}, {<<"cause">>, RangeFrom}]),
lager:debug("range error: ~s", [Msg]),
cb_context:add_validation_error(Path, <<"date_range">>, JObj, Context);
N when N > MaxRange ->
Msg = kz_term:to_binary(io_lib:format("~s_to ~b is more than ~b seconds from ~s ~b", [Key, RangeTo, MaxRange, Path, RangeFrom])),
JObj = kz_json:from_list([{<<"message">>, Msg}, {<<"cause">>, RangeTo}]),
lager:debug("range_error: ~s", [Msg]),
cb_context:add_validation_error(Path, <<"date_range">>, JObj, Context);
_ ->
{RangeFrom, RangeTo}
end.
%%------------------------------------------------------------------------------
%% @doc Returns a function to get `doc' object from each view result.
%% @end
%%------------------------------------------------------------------------------
-spec get_doc_fun() -> mapper_fun().
get_doc_fun() -> fun(JObj, Acc) -> [kz_json:get_json_value(<<"doc">>, JObj)|Acc] end.
%%------------------------------------------------------------------------------
%% @doc Returns a function to get `value' object from each view result.
%% @end
%%------------------------------------------------------------------------------
-spec get_value_fun() -> mapper_fun().
get_value_fun() -> fun(JObj, Acc) -> [kz_json:get_value(<<"value">>, JObj)|Acc] end.
%%------------------------------------------------------------------------------
%% @doc
%% @end
%%------------------------------------------------------------------------------
-spec get_key_fun() -> mapper_fun().
get_key_fun() -> fun(JObj, Acc) -> [kz_json:get_value(<<"key">>, JObj)|Acc] end.
%%------------------------------------------------------------------------------
%% @doc Returns a function to get `value' object from each view result.
%% @end
%%------------------------------------------------------------------------------
-spec get_id_fun() -> mapper_fun().
get_id_fun() -> fun(JObj, Acc) -> [kz_doc:id(JObj)|Acc] end.
%%------------------------------------------------------------------------------
%% @doc If pagination available, returns page size.
%%
< div class="notice">DO NOT ADD ONE ( 1 ) TO PAGE_SIZE OR LIMIT YOURSELF !
%% It will be added by this module during querying.</div>
%% If `paginate=false` is explicitly set, still load results in pages but check
%% process' memory usage on each page, terminating if memory exceeds a threshold
%% @end
%%------------------------------------------------------------------------------
-spec get_page_size(cb_context:context(), options()) -> page_size().
get_page_size(Context, Options) ->
case props:is_true('should_paginate', Options, 'true')
andalso cb_context:should_paginate(Context)
of
'true' ->
case props:get_value('limit', Options) of
'undefined' ->
get_page_size_from_request(Context);
Limit ->
lager:debug("got limit from options: ~b", [Limit]),
Limit
end;
'false' ->
lager:debug("pagination disabled in context or option"),
'infinity'
end.
%%%=============================================================================
%%% Load view internal functions
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc Load view results based on options. If the request is chunked
%% finish the chunk if it's started and set is_chunked or return
%% the {@link cb_cowboy_payload()} back to {@link api_resource} and {@link api_util}.
%% @end
%%------------------------------------------------------------------------------
-spec load_view(load_params() | cb_context:context(), cb_context:context()) -> cb_context:context().
load_view(#{is_chunked := 'true'
,direction := Direction
,has_qs_filter := HasQSFilter
}=LoadMap, Context) ->
Setters = [{fun cb_context:set_doc/2, []}
,{fun cb_context:set_resp_data/2, []}
,{fun cb_context:set_resp_status/2, 'success'}
,{fun cb_context:store/3, 'is_chunked', 'true'}
,{fun cb_context:store/3, 'next_chunk_fun', fun next_chunk/1}
,{fun cb_context:store/3, 'chunking_started', 'false'}
,{fun cb_context:store/3, 'view_direction', Direction}
,{fun cb_context:store/3, 'has_qs_filter', HasQSFilter}
,{fun cb_context:store/3, 'load_view_opts', LoadMap}
],
cb_context:setters(Context, Setters);
load_view(#{direction := Direction
,has_qs_filter := HasQSFilter
}=LoadMap, Context) ->
Setters = [{fun cb_context:set_doc/2, []}
,{fun cb_context:set_resp_data/2, []}
,{fun cb_context:set_resp_status/2, 'success'}
,{fun cb_context:store/3, 'view_direction', Direction}
,{fun cb_context:store/3, 'has_qs_filter', HasQSFilter}
],
format_response(get_results(LoadMap#{context => cb_context:setters(Context, Setters)}));
load_view(ContextError, _) ->
ContextError.
%%------------------------------------------------------------------------------
%% @doc The function which is called by {@link api_resource} to get next chunk
%% from view for chunked request. It checks whether page size is exhausted or
%% or not to continue querying same database or go to next database.
%%
%% It sets `chunking_finished' after the database or page size is exhausted.
%% @end
%%------------------------------------------------------------------------------
-spec next_chunk(map()) -> map().
next_chunk(#{options := #{databases := []}
,previous_chunk_length := PrevLength
,total_queried := TotalQueried
}=ChunkMap) ->
lager:debug("(chunked) databases exhausted"),
ChunkMap#{total_queried => TotalQueried + PrevLength
,chunking_finished => 'true'
};
%% page_size is exhausted when query is limited by page_size
%% Condition: page_size = total_queried + current_db_results
%% and the last key has been found.
next_chunk(#{options := #{page_size := PageSize}
,last_key := LastKey
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
}=ChunkMap)
when is_integer(PageSize)
andalso PageSize > 0
andalso TotalQueried + PrevLength =:= PageSize
andalso LastKey =/= 'undefined' ->
lager:debug("(chunked) page size exhausted: ~b", [PageSize]),
ChunkMap#{total_queried => TotalQueried + PrevLength
,chunking_finished => 'true'
};
%% query next chunk from same db when query is chunked
%% Condition: the current last_key has been found and it's not equal to the previous last_key
next_chunk(#{options := #{last_key := OldLastKey}=LoadMap
,last_key := LastKey
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
,context := Context
}=ChunkMap)
when OldLastKey =/= LastKey,
LastKey =/= 'undefined' ->
lager:debug("(chunked) db has more chunks to give, querying same db again"),
chunk_map_roll_in(ChunkMap, get_results(LoadMap#{total_queried => TotalQueried + PrevLength
,context => Context
,last_key => LastKey
,previous_chunk_length => 0
}));
only one database is left and it does not have any more result give , so request is completed .
next_chunk(#{options := #{databases := [_]}
,previous_chunk_length := PrevLength
,total_queried := TotalQueried
}=ChunkMap) ->
lager:debug("(chunked) databases exhausted"),
ChunkMap#{total_queried => TotalQueried + PrevLength
,chunking_finished => 'true'
};
just query
next_chunk(#{options := #{databases := [_|RestDbs], last_key := LastKey}=LoadMap
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
,context := Context
}=ChunkMap) ->
lager:debug("(chunked) querying next db"),
chunk_map_roll_in(ChunkMap, get_results(LoadMap#{total_queried => TotalQueried + PrevLength
,databases => RestDbs
,context => Context
,last_key => LastKey
,previous_chunk_length => 0
}));
%% starting chunked query
next_chunk(#{context := Context}=ChunkMap) ->
case cb_context:fetch(Context, 'load_view_opts') of
#{databases := []} ->
lager:debug("(chunked) databases exhausted"),
ChunkMap#{chunking_finished => 'true'};
#{}=LoadMap ->
chunk_map_roll_in(ChunkMap
,get_results(LoadMap#{context => cb_context:store(Context, 'load_view_opts', 'undefined')
,previous_chunk_length => 0
}))
end.
-spec chunk_map_roll_in(map(), load_params()) -> map().
chunk_map_roll_in(#{last_key := OldLastKey}=ChunkMap
,#{start_key := StartKey
,last_key := LastKey
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
,context := Context
}=LoadMap) ->
ChunkMap#{start_key => StartKey
,last_key => LastKey
,total_queried => TotalQueried
,previous_chunk_length => PrevLength
,context => Context
,options => maps:remove(context, LoadMap#{last_key => OldLastKey}) %% to be checked in the next iteration
}.
%%------------------------------------------------------------------------------
@doc Fold over databases and fetch result from each and count total result .
%% If pagination is requested keeps track of last key.
%% If `page_size' is not in the options, make unlimited get_results.
%%
%% Based on chunked, limited or unlimited query, get the correct
%% Limit for this loop (if it's limited query) and do the query.
%%
%% We use limit (limit + 1) to get an extra object (if available) to
%% get last object's key as the `next_start_key'. If the page size
%% has been satisfied and the last key has been found, return the result,
if the last key is not defined , query next DBs until DBs exhausted .
%%
%% If `chunked_size' is lower than sum of the `total_queried' and
%% `current_db_length', we set the chunk_size as the limit. In this
case the db may return up to the limit size result , if the last_key
%% is defined it means the db has more results to give, so we query
%% the same db again, until the page size satisfied or no last_key is
%% defined. In that case if pages size is not exhausted yet we query
%% the next db.
%%
%% @end
%%------------------------------------------------------------------------------
-spec get_results(load_params()) -> load_params().
get_results(#{databases := []}=LoadMap) ->
lager:debug("databases exhausted"),
LoadMap;
get_results(#{databases := [Db|RestDbs]
,view := View
,view_options := ViewOpts
,direction := Direction
,is_chunked := IsChunked
,chunk_size := ChunkSize
,total_queried := TotalQueried
,context := Context
,last_key := LastKey
,page_size := PageSize
,start_key := StartKey
}=LoadMap) ->
LimitWithLast = limit_with_last_key(IsChunked, PageSize, ChunkSize, TotalQueried),
lager:debug("querying view '~s' from '~s', starting at '~p' with page size ~p and limit ~p in direction ~s"
,[View, Db, StartKey, PageSize, LimitWithLast, Direction]
),
NextStartKey = case LastKey of
'undefined' -> props:get_value('startkey', ViewOpts);
_ -> LastKey
end,
ViewOptions = props:filter_undefined(
[{'limit', LimitWithLast}
,{'startkey', NextStartKey}
| props:delete('startkey', ViewOpts)
]),
lager:debug("kz_datamgr:get_results(~p, ~p, ~p)", [Db, View, ViewOptions]),
case kz_datamgr:get_results(Db, View, ViewOptions) of
{'error', 'not_found'} when [] =:= RestDbs ->
lager:debug("either the db ~s or view ~s was not found", [Db, View]),
LoadMap#{context => crossbar_util:response_missing_view(Context)};
{'error', 'not_found'} ->
lager:debug("either the db ~s or view ~s was not found, querying next db...", [Db, View]),
get_results(LoadMap#{databases => RestDbs});
{'error', Error} ->
lager:debug("failed to query view ~s from db ~s: ~p", [View, Db, Error]),
LoadMap#{context => crossbar_doc:handle_datamgr_errors(Error, View, Context)};
{'ok', JObjs} ->
handle_query_results(LoadMap, JObjs, LimitWithLast)
end.
handle_query_results(LoadMap, JObjs, LimitWithLast) ->
[{'memory', End}] = process_info(self(), ['memory']),
MemoryLimit = kapps_config:get_integer(?CONFIG_CAT, <<"request_memory_limit">>),
handle_query_results(LoadMap, JObjs, LimitWithLast, End, MemoryLimit).
handle_query_results(LoadMap, JObjs, LimitWithLast, _End, 'undefined') ->
process_query_results(LoadMap, JObjs, LimitWithLast);
handle_query_results(LoadMap, JObjs, LimitWithLast, MemoryUsed, MemoryLimit) when MemoryUsed < MemoryLimit ->
lager:debug("under memory cap of ~p: ~p used", [MemoryLimit, MemoryUsed]),
process_query_results(LoadMap, JObjs, LimitWithLast);
handle_query_results(#{context := Context}=LoadMap, _JObjs, _LimitWithLast, _TooMuch, _Limit) ->
lager:warning("memory used ~p exceeds limit ~p", [_TooMuch, _Limit]),
LoadMap#{context => crossbar_util:response_range_not_satisfiable(Context)}.
process_query_results(#{databases := [Db|_]=Dbs
,context := Context
,view := View
}=LoadMap
,JObjs
,LimitWithLast
) ->
%% catching crashes when applying users map functions (filter map)
%% so we can handle errors when request is chunked and chunk is already started
try handle_query_result(LoadMap, Dbs, JObjs, LimitWithLast)
catch
?STACKTRACE(_E, _T, ST)
lager:warning("exception occurred during querying db ~s for view ~s : ~p:~p", [Db, View, _E, _T]),
kz_log:log_stacktrace(ST),
LoadMap#{context => cb_context:add_system_error('datastore_fault', Context)}
end.
%%------------------------------------------------------------------------------
%% @doc Apply filter to result, find last key.
%% Then based on page_size, limit, result length and last key see
%% we're done or shall we continue.
%% @end
%%------------------------------------------------------------------------------
-spec handle_query_result(load_params(), kz_term:ne_binaries(), kz_json:objects(), kz_term:api_pos_integer()) -> load_params().
handle_query_result(#{last_key := LastKey
,mapper := Mapper
,context := Context
,page_size := PageSize
}=LoadMap
,Dbs
,Results
,Limit
) ->
ResultsLength = erlang:length(Results),
{NewLastKey, JObjs} = last_key(LastKey, Results, Limit, ResultsLength, PageSize),
case apply_filter(Mapper, JObjs) of
{'error', Reason} ->
LoadMap#{context => cb_context:add_system_error('datastore_fault', kz_term:to_binary(Reason), Context)};
FilteredJObjs when is_list(FilteredJObjs) ->
FilteredLength = length(FilteredJObjs),
lager:debug("db_returned: ~b(~p) passed_filter: ~p next_start_key: ~p"
,[ResultsLength, PageSize, FilteredLength, NewLastKey]
),
handle_query_result(LoadMap, Dbs, FilteredJObjs, FilteredLength, NewLastKey)
end.
-spec handle_query_result(load_params(), kz_term:ne_binaries(), kz_json:object() | kz_json:objects(), non_neg_integer(), last_key()) -> load_params().
handle_query_result(#{is_chunked := 'true'
,context := Context
}=LoadMap
,[Db|_]
,FilteredJObjs
,FilteredLength
,NewLastKey
) ->
Setters = [{fun cb_context:set_resp_data/2, FilteredJObjs}
,{fun cb_context:set_db_name/2, Db}
],
Context1 = cb_context:setters(Context, Setters),
LoadMap#{last_key => NewLastKey
,context => Context1
,previous_chunk_length => FilteredLength
};
handle_query_result(#{page_size := _PageSize
,last_key := _OldLastKey
}=LoadMap
,[_|RestDbs]
,FilteredJObjs
,FilteredLength
,NewLastKey
) ->
case check_page_size_and_length(LoadMap, FilteredLength, FilteredJObjs, NewLastKey) of
{'exhausted', LoadMap2} -> LoadMap2;
{'next_db', LoadMap2} when NewLastKey =/= 'undefined' ->
lager:debug("updating new last key to ~p from ~p", [NewLastKey, _OldLastKey]),
get_results(LoadMap2#{last_key => NewLastKey});
{'next_db', LoadMap2} -> get_results(LoadMap2#{databases => RestDbs})
end.
%%------------------------------------------------------------------------------
%% @doc Check page size is exhausted or not.
%% @end
%%------------------------------------------------------------------------------
-spec check_page_size_and_length(load_params(), non_neg_integer(), kz_json:objects(), last_key()) ->
{'exhausted' | 'next_db', load_params()}.
%% page_size is exhausted when query is limited by page_size
%% Condition: page_size = total_queried + current_db_results
%% and the last key has been found.
check_page_size_and_length(#{page_size := 'infinity'
,queried_jobjs := QueriedJObjs
,total_queried := TotalQueried
}=LoadMap
,Length
,FilteredJObjs
,LastKey
) ->
{'next_db', LoadMap#{total_queried => TotalQueried + Length
,queried_jobjs => QueriedJObjs ++ FilteredJObjs
,last_key => LastKey
}
};
check_page_size_and_length(#{page_size := PageSize
,queried_jobjs := QueriedJObjs
,total_queried := TotalQueried
}=LoadMap
,Length
,FilteredJObjs
,LastKey
)
when is_integer(PageSize)
andalso PageSize > 0
andalso TotalQueried + Length == PageSize
andalso LastKey =/= 'undefined' ->
lager:debug("page size exhausted: ~b", [PageSize]),
{'exhausted', LoadMap#{total_queried => TotalQueried + Length
,queried_jobjs => QueriedJObjs ++ FilteredJObjs
,last_key => LastKey
}
};
just query
check_page_size_and_length(#{total_queried := TotalQueried
,queried_jobjs := QueriedJObjs
}=LoadMap
,Length
,FilteredJObjs
,LastKey
) ->
{'next_db', LoadMap#{total_queried => TotalQueried + Length
,queried_jobjs => QueriedJObjs ++ FilteredJObjs
,last_key => LastKey
}
}.
%%------------------------------------------------------------------------------
%% @doc Find out db request limit to use based on chunk size and remaining
%% amount to satisfy page_size.
%% @end
%%------------------------------------------------------------------------------
-spec limit_with_last_key(boolean(), page_size(), pos_integer(), non_neg_integer()) ->
kz_term:api_pos_integer().
%% non-chunked unlimited request => no limit
limit_with_last_key('false', 'undefined', _, _) ->
'undefined';
%% explicitly disabled pagination
limit_with_last_key(_IsChunked, 'infinity', ChunkSize, _TotalQueried) ->
1 + ChunkSize;
%% non-chunked limited request
limit_with_last_key('false', PageSize, _, TotalQueried) ->
1 + PageSize - TotalQueried;
%% chunked unlimited request
limit_with_last_key('true', 'undefined', ChunkSize, _) ->
1 + ChunkSize;
%% same chunk_size and page_size
limit_with_last_key('true', PageSize, PageSize, TotalQueried) ->
1 + PageSize - TotalQueried;
%% chunk_size is lower than sum of remaining amount to query and page_size, forcing chunk_size
limit_with_last_key('true', PageSize, ChunkSize, TotalQueried) when ChunkSize < (PageSize - TotalQueried) ->
1 + ChunkSize;
%% chunk_size is bigger than page_size, using page_size
limit_with_last_key('true', PageSize, _ChunkSize, TotalQueried) ->
1 + PageSize - TotalQueried.
%%------------------------------------------------------------------------------
%% @doc Apply filter/mapper function if provided while keep maintaining
%% the result order.
%%
Mapper function can be arity 1 ( operating on a list of JObjs ) and
arity 2 of ` ( JObj , Acc ) ' .
%%
%% Take note that because of the call to {@link last_key/4} the result set is
%% in reverse order when passed to the mapper/filter function
%%
If you use mapper function of arity 1 , you have to reverse your result before
%% returning, for example you can use {@link lists:foldl/3} to do the
%% folding or {@link lists:reverse/1} when returning the end result.
%%
If mapper is an arity 2 function , the output should be in the same order
%% as input, in other words, whatever you're dong, DO NOT change the order!
%%
%% If something goes wrong and you want to stop the request return an error tuple
with a reason ( preferred as binary ) as a second element .
%% @end
%%------------------------------------------------------------------------------
-spec apply_filter(mapper_fun(), kz_json:objects()) ->
kz_json:objects() |
kz_json:object() |
{'error', any()}.
apply_filter(_Mapper, []) -> [];
apply_filter('undefined', JObjs) ->
lists:reverse(JObjs);
apply_filter(Mapper, JObjs) when is_function(Mapper, 1) ->
%% Can I trust you to return sorted result in the correct direction?
Mapper(JObjs);
apply_filter(Mapper, JObjs) when is_function(Mapper, 2) ->
filter_foldl(Mapper, JObjs, []).
-spec filter_foldl(mapper_fun(), kz_json:objects(), kz_json:objects()) ->
kz_json:objects() |
{'error', any()}.
filter_foldl(_Mapper, [], Acc) ->
[JObj
|| JObj <- Acc,
not kz_term:is_empty(JObj)
];
filter_foldl(Mapper, [JObj | JObjs], Acc) ->
case Mapper(JObj, Acc) of
{'error', _} = Error -> Error;
NewAcc -> filter_foldl(Mapper, JObjs, NewAcc)
end.
%%------------------------------------------------------------------------------
%% @doc Figure out the last key if we have some result and page size is not
%% exhausted yet.
%% @end
%%------------------------------------------------------------------------------
-spec last_key(last_key(), kz_json:objects(), non_neg_integer() | 'undefined', non_neg_integer(), page_size()) ->
{last_key(), kz_json:objects()}.
last_key(LastKey, [], _Limit, _Returned, _PageSize) ->
lager:debug("no results same last key ~p", [LastKey]),
{LastKey, []};
last_key(LastKey, JObjs, 'undefined', _Returned, _PageSize) ->
lager:debug("no limit, re-using last key ~p", [LastKey]),
{LastKey, lists:reverse(JObjs)};
last_key(_LastKey, JObjs, Limit, Limit, _PageSize) ->
lager:debug("full page fetched, calculating new key"),
new_last_key(JObjs);
last_key(_LastKey, JObjs, _Limit, _Returned, _PageSize) ->
lager:debug("returned page ~p smaller than page limit ~p", [_Returned, _Limit]),
{'undefined', lists:reverse(JObjs)}.
-spec new_last_key(kz_json:objects()) -> {last_key(), kz_json:objects()}.
new_last_key(JObjs) ->
[Last|JObjs1] = lists:reverse(JObjs),
{kz_json:get_value(<<"key">>, Last), JObjs1}.
%%------------------------------------------------------------------------------
%% @doc If the last key is known set as the `next_start_key' in the
%% response envelope.
%% @end
%%------------------------------------------------------------------------------
-spec format_response(load_params()) -> cb_context:context().
format_response(#{context := Context}=LoadMap) ->
case cb_context:resp_status(Context) of
'success' -> format_success_response(LoadMap);
_Error -> Context
end.
-spec format_success_response(load_params()) -> cb_context:context().
format_success_response(#{total_queried := TotalQueried
,queried_jobjs := JObjs
,context := Context
,last_key := NextStartKey
,start_key := StartKey
}) ->
Envelope = add_paging(StartKey, TotalQueried, NextStartKey, cb_context:resp_envelope(Context)),
crossbar_doc:handle_datamgr_success(JObjs, cb_context:set_resp_envelope(Context, Envelope)).
-spec add_paging(api_range_key(), non_neg_integer(), api_range_key(), kz_json:object()) -> kz_json:object().
add_paging(StartKey, PageSize, NextStartKey, JObj) ->
DeleteKeys = [<<"start_key">>, <<"page_size">>, <<"next_start_key">>],
kz_json:set_values([{<<"start_key">>, StartKey},
{<<"page_size">>, PageSize},
{<<"next_start_key">>, NextStartKey}
]
,kz_json:delete_keys(DeleteKeys, JObj)
).
%%%=============================================================================
%%% Build load view parameters internal functions
%%%=============================================================================
%%------------------------------------------------------------------------------
%% @doc Generates general crossbar_view options map for querying view.
%% @end
%%------------------------------------------------------------------------------
-spec build_general_load_params(cb_context:context(), kz_term:ne_binary(), options()) -> load_params() | cb_context:context().
build_general_load_params(Context, View, Options) ->
Direction = direction(Context, Options),
try maps:from_list(
[{'chunk_size', get_chunk_size(Context, Options)}
,{'databases', props:get_value('databases', Options, [cb_context:db_name(Context)])}
,{'direction', Direction}
,{'is_chunked', is_chunked(Context, Options)}
,{'last_key', 'undefined'}
,{'page_size', get_page_size(Context, Options)}
,{'queried_jobjs', []}
,{'should_paginate', cb_context:should_paginate(Context)}
,{'total_queried', 0}
,{'view', View}
])
catch
'throw':{'error', ErrorMsg} ->
cb_context:add_system_error(404, 'faulty_request', ErrorMsg, Context)
end.
-spec is_chunked(cb_context:context(), options()) -> boolean().
is_chunked(Context, Options) ->
kz_json:is_true(<<"is_chunked">>
,cb_context:query_string(Context)
,props:get_is_true('is_chunked', Options, 'false')
)
andalso not props:get_is_true('unchunkable', Options, 'false').
%%------------------------------------------------------------------------------
%% @doc Create ranged view lookup database list using start/end time and
%% direction.
%% @end
%%------------------------------------------------------------------------------
-spec get_range_modbs(cb_context:context(), options(), direction(), kz_time:gregorian_seconds(), kz_time:gregorian_seconds()) ->
kz_term:ne_binaries().
get_range_modbs(Context, Options, Direction, StartTime, EndTime) ->
case props:get_value('databases', Options) of
'undefined' when Direction =:= 'ascending' ->
kazoo_modb:get_range(cb_context:account_id(Context), StartTime, EndTime);
'undefined' when Direction =:= 'descending' ->
lists:reverse(kazoo_modb:get_range(cb_context:account_id(Context), StartTime, EndTime));
Dbs when Direction =:= 'ascending' ->
lists:usort(Dbs);
Dbs when Direction =:= 'descending' ->
lists:reverse(lists:usort(Dbs))
end.
%%------------------------------------------------------------------------------
%% @doc Create ranged view lookup database list using start/end time and
%% direction.
%% @end
%%------------------------------------------------------------------------------
-spec get_range_yodbs(cb_context:context(), options(), direction(), kz_time:gregorian_seconds(), kz_time:gregorian_seconds()) ->
kz_term:ne_binaries().
get_range_yodbs(Context, Options, Direction, StartTime, EndTime) ->
case props:get_value('databases', Options) of
'undefined' when Direction =:= 'ascending' ->
kazoo_yodb:get_range(cb_context:account_id(Context), StartTime, EndTime);
'undefined' when Direction =:= 'descending' ->
lists:reverse(kazoo_yodb:get_range(cb_context:account_id(Context), StartTime, EndTime));
Dbs when Direction =:= 'ascending' ->
lists:usort(Dbs);
Dbs when Direction =:= 'descending' ->
lists:reverse(lists:usort(Dbs))
end.
-spec get_chunk_size(cb_context:context(), options()) -> kz_term:api_pos_integer().
get_chunk_size(Context, Options) ->
SystemSize = kapps_config:get_pos_integer(?CONFIG_CAT, <<"load_view_chunk_size">>, 50),
OptionsSize = props:get_integer_value('chunk_size', Options, SystemSize),
case kz_json:get_value(<<"chunk_size">>, cb_context:query_string(Context)) of
'undefined' -> OptionsSize;
Size ->
try kz_term:to_integer(Size) of
ChunkSize when ChunkSize > 0,
ChunkSize =< SystemSize ->
ChunkSize;
ChunkSize when ChunkSize < 0 ->
throw({'error', <<"chunk size must be at least 1">>});
ChunkSize when ChunkSize > SystemSize ->
throw({'error', <<"chunk size must be lower than ", (integer_to_binary(SystemSize))/binary>>})
catch
_:_ ->
throw({'error', <<"invalid chunk size">>})
end
end.
-spec maybe_set_start_end_keys(load_params(), api_range_key(), api_range_key()) -> load_params().
maybe_set_start_end_keys(LoadMap, 'undefined', 'undefined') -> LoadMap;
maybe_set_start_end_keys(LoadMap, StartKey, 'undefined') -> LoadMap#{start_key => StartKey};
maybe_set_start_end_keys(LoadMap, 'undefined', EndKey) -> LoadMap#{end_key => EndKey};
maybe_set_start_end_keys(LoadMap, StartKey, EndKey) -> LoadMap#{start_key => StartKey, end_key => EndKey}.
-spec get_page_size_from_request(cb_context:context()) -> pos_integer().
get_page_size_from_request(Context) ->
case cb_context:req_value(Context, <<"page_size">>) of
'undefined' -> cb_context:pagination_page_size();
Size ->
try kz_term:to_integer(Size) of
PageSize when PageSize > 0 -> PageSize;
_ ->
throw({'error', <<"page size must be at least 1">>})
catch
_:_ ->
throw({'error', <<"invalid page size">>})
end
end.
%%------------------------------------------------------------------------------
%% @doc Get time key value from options or request.
%% @end
%%------------------------------------------------------------------------------
-spec get_time_key(cb_context:context(), kz_term:ne_binary(), options(), pos_integer()) -> pos_integer().
get_time_key(Context, Key, Options, Default) ->
case props:get_integer_value(Key, Options) of
'undefined' ->
case kz_term:safe_cast(cb_context:req_value(Context, Key), Default, fun kz_term:to_integer/1) of
T when T > 0 -> T;
_ -> Default
end;
Value -> Value
end.
%%------------------------------------------------------------------------------
%% @doc Get `max_range' from option or system config.
%% @end
%%------------------------------------------------------------------------------
-spec get_max_range(options()) -> pos_integer().
get_max_range(Options) ->
case props:get_integer_value('max_range', Options) of
'undefined' -> ?MAX_RANGE;
MaxRange -> MaxRange
end.
%%------------------------------------------------------------------------------
%% @doc Build customized start/end key mapper.
%% @end
%%------------------------------------------------------------------------------
-spec get_start_end_keys(cb_context:context(), options()) -> {api_range_key(), api_range_key()}.
get_start_end_keys(Context, Options) ->
case props:get_value('keymap', Options) of
'undefined' ->
{map_keymap(Context, Options, props:get_first_defined(['startkey', 'start_keymap'], Options))
,map_keymap(Context, Options, props:get_first_defined(['endkey', 'end_keymap'], Options))
};
KeyMap ->
{map_keymap(Context, Options, KeyMap)
,map_keymap(Context, Options, KeyMap)
}
end.
-spec map_keymap(cb_context:context(), options(), keymap()) -> api_range_key().
map_keymap(Context, _, Fun) when is_function(Fun, 1) -> Fun(Context) ;
map_keymap(Context, Options, Fun) when is_function(Fun, 2) -> Fun(Options, Context);
map_keymap(_, _, ApiRangeKey) -> ApiRangeKey.
%%------------------------------------------------------------------------------
%% @doc See {@link ranged_start_end_keys/2} for explaining of options and range_keymap.
%% @end
%%------------------------------------------------------------------------------
-spec get_range_key_maps(options()) -> {range_keymap_fun(), range_keymap_fun()}.
get_range_key_maps(Options) ->
case props:get_value('range_keymap', Options) of
'undefined' ->
{map_range_keymap(props:get_first_defined(['startkey', 'range_start_keymap'], Options))
,map_range_keymap(props:get_first_defined(['endkey', 'range_end_keymap'], Options))
};
KeyMap -> {map_range_keymap(KeyMap), map_range_keymap(KeyMap)}
end.
-spec map_range_keymap(range_keymap()) -> range_keymap_fun().
map_range_keymap('nil') -> fun(_) -> 'undefined' end;
map_range_keymap('undefined') -> fun kz_term:identity/1;
map_range_keymap(['undefined']) -> fun(Ts) -> [Ts] end;
map_range_keymap(K) when is_binary(K) -> fun(Ts) -> [K, Ts] end;
map_range_keymap(K) when is_integer(K) -> fun(Ts) -> [K, Ts] end;
map_range_keymap(K) when is_list(K) -> fun(Ts) -> K ++ [Ts] end;
map_range_keymap(K) when is_function(K, 1) -> K.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/applications/crossbar/src/crossbar_view.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
non-range query
chunked query
ranged query
start/end key length fixer
A literal CouchDB `startkey' or `endkey', or a {@link keymap_fun()} for non-range requests.
See also {@link start_end_keys/3}.
to construct the start or end key.
Creates a start/end key for ranged queries. A binary or integer or a list of binary or integer
to create start/end key. The timestamp will added to end of it.
If `undefined' only the timestamp will be used as the key. If timestamp in the view key is at start of the key,
timestamp as key.
A function to filter/map view result. Internal to {@link crossbar_view}.
for non-ranged query
for chunked query
for ranged/modb query
start/end key length fixer
Last key of the view result from previous iteration, also it is used to set `next_start_key'.
@equiv load(Context, View, [])
------------------------------------------------------------------------------
@doc This function attempts to load the context with the results of a view
run against the database.
@end
------------------------------------------------------------------------------
@equiv load_time_range(Context, View, [])
------------------------------------------------------------------------------
results of a view run against the database.
@end
------------------------------------------------------------------------------
@equiv load_modb(Context, View, [])
------------------------------------------------------------------------------
@doc This function attempts to load the context with the results of a view
@end
------------------------------------------------------------------------------
@equiv load_yodb(Context, View, [])
------------------------------------------------------------------------------
@doc This function attempts to load the context with the results of a view
run against the account's YODBs.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
Takes {@link options()} and returns {@link load_params()}, for normal querying
of a view.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
Takes {@link options()} and returns {@link load_params()}, for querying
of a view over a specified range of time.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
Takes {@link options()} and returns {@link load_params()}, for querying
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
Takes {@link options()} and returns {@link load_params()}, for querying
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Build CouchDB view options. It sets start/end keys,
{@link options()}.
<div class="notice">Do not set start or end keys in Options, use provided
special keys to generate start/end keys based on timestamp.</div>
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Returns start/end keys based on direction.
Returned tuple is `{start_key, end_key}'.
If `start_key' or `end_key' is present in the request (query string or payload)
they will be returned instead. Otherwise the keys will built by key map options.
<dl>
<dt>`keymap'</dt><dd>Use this to map both start/end keys.</dd>
<dt>`start_keymap'</dt><dd>Maps start key only.</dd>
<dt>`end_keymap'</dt><dd>Maps end key only.</dd>
</dl>
See also {@link direction/2} for `direction' option explanation.
<dl>
keys like `[<<"en">>, <<"us">>]'.</dd>
<dt>{@type keymap_fun()}</dt><dd>To customize your own key using a function.</dd>
</dl>
The keys will be swapped if direction is descending.
@see direction/2
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Returns start/end keys based on direction. Start/end timestamp will be
added to keys based on requested time range.
Returned tuple is `{start_key, end_key}'.
If `start_key' or `end_key' is present in the request (query string or payload)
they will be returned instead. Otherwise the keys will built by key map options.
<dl>
<dt>`range_keymap'</dt><dd>Use this to map both start/end keys.</dd>
<dt>`range_start_keymap'</dt><dd>maps start key only.</dd>
<dt>`range_end_keymap'</dt><dd>maps end key only.</dd>
</dl>
See also {@link direction/2} and {@link time_range/2} for explanation of
other options.
<dl>
<dt>{@type kz_term:ne_binary()}</dt><dd>Constructs keys like `[<<"account">>, Timestamp]'.</dd>
<dt>{@type list()}</dt><dd>Constructs keys like `[<<"en">>, <<"us">>, Timestamp]'.</dd>
<dt>{@type range_keymap_fun()}</dt><dd>Customize your own key using a function.</dd>
</dl>
The keys will be swapped if direction is descending.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Suffix the Timestamp to the provided key map option. Useful to use
{@link cb_cdrs} for example.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Depending on sort direction of the result set, ensure that the length of
the startkey/endkey supplied in the query is the same length as that returned
by the result set (grouped or not). This ensures that the result at the start
or end is not filtered out if it matches on all present keys.
@end
------------------------------------------------------------------------------
@equiv direction(Context, [])
------------------------------------------------------------------------------
@doc Find view sort direction from `Options' or request
query string. Default to `descending'.
@end
------------------------------------------------------------------------------
@equiv time_range(Context, [])
------------------------------------------------------------------------------
@doc Returns a time range for range query based or payload on request or `Options'
and default range based on system configuration (maximum range).
The start time, `created_from' (default), should always be prior to end time
`created_to'.
<strong>Options:</strong>
<dl>
<dt>`max_range'</dt><dd>Maximum range allowed. Default is the value of
<dt>`range_key'</dt><dd>The key name in query string to get values
from (created, modified or ...). Default is `created'.</dd>
<dt>`{RANGE_KEY}_from'</dt><dd>Start time.</dd>
<dt>`{RANGE_KEY}_to'</dt><dd>End time.</dd>
</dl>
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Checks whether or not end time is prior to start time. Returns a ranged
tuple `{start_time, end_time}' or `context' with validation error.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Returns a function to get `doc' object from each view result.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Returns a function to get `value' object from each view result.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Returns a function to get `value' object from each view result.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc If pagination available, returns page size.
It will be added by this module during querying.</div>
If `paginate=false` is explicitly set, still load results in pages but check
process' memory usage on each page, terminating if memory exceeds a threshold
@end
------------------------------------------------------------------------------
=============================================================================
Load view internal functions
=============================================================================
------------------------------------------------------------------------------
@doc Load view results based on options. If the request is chunked
finish the chunk if it's started and set is_chunked or return
the {@link cb_cowboy_payload()} back to {@link api_resource} and {@link api_util}.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc The function which is called by {@link api_resource} to get next chunk
from view for chunked request. It checks whether page size is exhausted or
or not to continue querying same database or go to next database.
It sets `chunking_finished' after the database or page size is exhausted.
@end
------------------------------------------------------------------------------
page_size is exhausted when query is limited by page_size
Condition: page_size = total_queried + current_db_results
and the last key has been found.
query next chunk from same db when query is chunked
Condition: the current last_key has been found and it's not equal to the previous last_key
starting chunked query
to be checked in the next iteration
------------------------------------------------------------------------------
If pagination is requested keeps track of last key.
If `page_size' is not in the options, make unlimited get_results.
Based on chunked, limited or unlimited query, get the correct
Limit for this loop (if it's limited query) and do the query.
We use limit (limit + 1) to get an extra object (if available) to
get last object's key as the `next_start_key'. If the page size
has been satisfied and the last key has been found, return the result,
If `chunked_size' is lower than sum of the `total_queried' and
`current_db_length', we set the chunk_size as the limit. In this
is defined it means the db has more results to give, so we query
the same db again, until the page size satisfied or no last_key is
defined. In that case if pages size is not exhausted yet we query
the next db.
@end
------------------------------------------------------------------------------
catching crashes when applying users map functions (filter map)
so we can handle errors when request is chunked and chunk is already started
------------------------------------------------------------------------------
@doc Apply filter to result, find last key.
Then based on page_size, limit, result length and last key see
we're done or shall we continue.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Check page size is exhausted or not.
@end
------------------------------------------------------------------------------
page_size is exhausted when query is limited by page_size
Condition: page_size = total_queried + current_db_results
and the last key has been found.
------------------------------------------------------------------------------
@doc Find out db request limit to use based on chunk size and remaining
amount to satisfy page_size.
@end
------------------------------------------------------------------------------
non-chunked unlimited request => no limit
explicitly disabled pagination
non-chunked limited request
chunked unlimited request
same chunk_size and page_size
chunk_size is lower than sum of remaining amount to query and page_size, forcing chunk_size
chunk_size is bigger than page_size, using page_size
------------------------------------------------------------------------------
@doc Apply filter/mapper function if provided while keep maintaining
the result order.
Take note that because of the call to {@link last_key/4} the result set is
in reverse order when passed to the mapper/filter function
returning, for example you can use {@link lists:foldl/3} to do the
folding or {@link lists:reverse/1} when returning the end result.
as input, in other words, whatever you're dong, DO NOT change the order!
If something goes wrong and you want to stop the request return an error tuple
@end
------------------------------------------------------------------------------
Can I trust you to return sorted result in the correct direction?
------------------------------------------------------------------------------
@doc Figure out the last key if we have some result and page size is not
exhausted yet.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc If the last key is known set as the `next_start_key' in the
response envelope.
@end
------------------------------------------------------------------------------
=============================================================================
Build load view parameters internal functions
=============================================================================
------------------------------------------------------------------------------
@doc Generates general crossbar_view options map for querying view.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Create ranged view lookup database list using start/end time and
direction.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Create ranged view lookup database list using start/end time and
direction.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Get time key value from options or request.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Get `max_range' from option or system config.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Build customized start/end key mapper.
@end
------------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc See {@link ranged_start_end_keys/2} for explaining of options and range_keymap.
@end
------------------------------------------------------------------------------ | ( C ) 2010 - 2020 , 2600Hz
@author
@author Hesaam
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(crossbar_view).
-export([load/2, load/3
,load_time_range/2, load_time_range/3
,load_modb/2, load_modb/3
,load_yodb/2, load_yodb/3
,next_chunk/1
,build_load_params/3
,build_load_time_range_params/3
,build_load_modb_params/3
,build_load_yodb_params/3
,direction/1, direction/2
,start_end_keys/2
,time_range/1, time_range/2
,ranged_start_end_keys/2
,get_page_size/2
,suffix_key_fun/1
,get_doc_fun/0
,get_value_fun/0
,get_key_fun/0
,get_id_fun/0
,high_value_key/0
]).
-include("crossbar.hrl").
-define(CB_SPECIFIC_VIEW_OPTIONS,
['ascending', 'databases', 'mapper', 'no_filter'
,'end_keymap', 'keymap', 'start_keymap'
,'chunk_size', 'is_chunked', 'unchunkable'
,'created_to', 'created_from', 'max_range'
,'range_end_keymap', 'range_key_name', 'range_keymap', 'range_start_keymap'
,'should_paginate'
,'key_min_length'
]).
-type direction() :: 'ascending' | 'descending'.
-type page_size() :: kz_term:api_pos_integer() | 'infinity'.
-type time_range() :: {kz_time:gregorian_seconds(), kz_time:gregorian_seconds()}.
` { StartTimestamp , EndTimestamp } ' .
-type api_range_key() :: 'undefined' | ['undefined'] | kazoo_data:range_key().
-type range_keys() :: {api_range_key(), api_range_key()}.
` { Startkey , } ' .
-type keymap_fun() :: fun((cb_context:context()) -> api_range_key()) |
fun((cb_context:context(), kazoo_data:view_options()) -> api_range_key()).
Function of arity 1 or 2 to create customize start / end key .
-type keymap() :: api_range_key() | keymap_fun().
-type range_keymap_fun() :: fun((kz_time:gregorian_seconds()) -> api_range_key()).
A function of arity 1 . The timestamp from ` create_from ' or ` created_to ' will pass to this function
-type range_keymap() :: 'nil' | api_range_key() | range_keymap_fun().
use { @link suffix_key_fun } . If the view does n't need any start / end key you can set this ` nil ' to bypass setting
-type user_mapper_fun() :: 'undefined' |
fun((kz_json:objects()) -> kz_json:objects() | {'error', any()}) |
fun((kz_json:object(), kz_json:objects()) -> kz_json:objects() | {'error', any()}) |
fun((cb_context:context(), kz_json:object(), kz_json:objects()) -> kz_json:objects() | {'error', any()}).
A function to filter / map view result . For use in Crossbar modules to call { @link crossbar_view } functions .
-type mapper_fun() :: 'undefined' |
fun((kz_json:objects()) -> kz_json:objects() | {'error', any()}) |
fun((kz_json:object(), kz_json:objects()) -> kz_json:objects() | {'error', any()}).
-type options() :: kazoo_data:view_options() |
[{'databases', kz_term:ne_binaries()} |
{'mapper', user_mapper_fun()} |
{'max_range', pos_integer()} |
{'no_filter', boolean()} |
{'end_keymap', keymap()} |
{'keymap', keymap()} |
{'start_keymap', keymap()} |
{'chunk_size', pos_integer()} |
{'is_chunked', boolean()} |
{'created_from', pos_integer()} |
{'created_to', pos_integer()} |
{'range_end_keymap', range_keymap()} |
{'range_keymap', range_keymap()} |
{'range_key_name', kz_term:ne_binary()} |
{'range_start_keymap', range_keymap()} |
{'should_paginate', boolean()} |
{'key_min_length', pos_integer()}
].
-type load_params() :: #{chunk_size => pos_integer()
,context => cb_context:context()
,databases => kz_term:ne_binaries()
,direction => direction()
,end_key => kazoo_data:range_key()
,end_time => kz_time:gregorian_seconds()
,has_qs_filter => boolean()
,is_chunked => boolean()
,last_key => last_key()
,mapper => mapper_fun()
,page_size => pos_integer()
,previous_chunk_length => non_neg_integer()
,queried_jobjs => kz_json:objects()
,should_paginate => boolean()
,start_key => kazoo_data:range_key()
,start_time => kz_time:gregorian_seconds()
,total_queried => non_neg_integer()
,view => kz_term:ne_binary()
,view_options => kazoo_data:view_options()
}.
-type last_key() :: api_range_key().
-export_type([range_keys/0, time_range/0
,options/0, direction/0
,mapper_fun/0 ,user_mapper_fun/0
,keymap/0, keymap_fun/0
,range_keymap/0, range_keymap_fun/0
]
).
-spec load(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load(Context, View) ->
load(Context, View, []).
-spec load(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load(Context, View, Options) ->
load_view(build_load_params(Context, View, Options), Context).
-spec load_time_range(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_time_range(Context, View) ->
load_time_range(Context, View, []).
@doc This function attempts to load the context with the
-spec load_time_range(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load_time_range(Context, View, Options) ->
load_view(build_load_time_range_params(Context, View, Options), Context).
-spec load_modb(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_modb(Context, View) ->
load_modb(Context, View, []).
run against the account 's MODBs .
-spec load_modb(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load_modb(Context, View, Options) ->
LoadParams = build_load_modb_params(Context, View, Options),
load_view(LoadParams, Context).
-spec load_yodb(cb_context:context(), kz_term:ne_binary()) -> cb_context:context().
load_yodb(Context, View) ->
load_yodb(Context, View, []).
-spec load_yodb(cb_context:context(), kz_term:ne_binary(), options()) -> cb_context:context().
load_yodb(Context, View, Options) ->
LoadParams = build_load_yodb_params(Context, View, Options),
load_view(LoadParams, Context).
-spec build_load_params(cb_context:context(), kz_term:ne_binary(), options()) -> load_params() | cb_context:context().
build_load_params(Context, View, Options) ->
try build_general_load_params(Context, View, Options) of
#{direction := Direction}=LoadMap ->
HasQSFilter = not props:get_is_true('no_filter', Options, 'false')
andalso crossbar_filter:is_defined(Context),
lager:debug("has qs filter: ~s", [HasQSFilter]),
UserMapper = props:get_value('mapper', Options),
StartEnd = start_end_keys(Context, Options, Direction),
KeyMinLength = props:get_value('key_min_length', Options),
{StartKey, EndKey} = expand_min_max_keys(StartEnd, Direction, KeyMinLength),
Params = LoadMap#{has_qs_filter => HasQSFilter
,mapper => crossbar_filter:build_with_mapper(Context, UserMapper, HasQSFilter)
,start_key => StartKey
,view_options => build_view_query(Options, Direction, StartKey, EndKey, HasQSFilter)
},
maybe_set_start_end_keys(Params, StartKey, EndKey);
Ctx -> Ctx
catch
?STACKTRACE(_E, _T, ST)
lager:debug("exception occurred during building view options for ~s", [View]),
kz_log:log_stacktrace(ST),
cb_context:add_system_error('datastore_fault', Context)
end.
-spec build_load_time_range_params(cb_context:context(), kz_term:ne_binary(), options()) ->
load_params() | cb_context:context().
build_load_time_range_params(Context, View, Options) ->
try build_general_load_params(Context, View, Options) of
#{direction := Direction}=LoadMap ->
TimeFilterKey = props:get_ne_binary_value('range_key_name', Options, <<"created">>),
UserMapper = props:get_value('mapper', Options),
HasQSFilter = not props:get_is_true('no_filter', Options, 'false')
andalso crossbar_filter:is_defined(Context)
andalso not crossbar_filter:is_only_time_filter(Context, TimeFilterKey),
lager:debug("has qs filter: ~s", [HasQSFilter]),
case time_range(Context, Options, TimeFilterKey) of
{StartTime, EndTime} ->
StartEnd = ranged_start_end_keys(Context, Options, Direction, StartTime, EndTime),
KeyMinLength = props:get_value('key_min_length', Options),
{StartKey, EndKey} = expand_min_max_keys(StartEnd, Direction, KeyMinLength),
Params = LoadMap#{end_time => EndTime
,has_qs_filter => HasQSFilter
,mapper => crossbar_filter:build_with_mapper(Context, UserMapper, HasQSFilter)
,start_key => StartKey
,start_time => StartTime
,view_options => build_view_query(Options, Direction, StartKey, EndKey, HasQSFilter)
},
maybe_set_start_end_keys(Params, StartKey, EndKey);
Ctx -> Ctx
end;
Ctx -> Ctx
catch
?STACKTRACE(_E, _T, ST)
lager:debug("exception occurred during building range view options for ~s", [View]),
kz_log:log_stacktrace(ST),
cb_context:add_system_error('datastore_fault', Context)
end.
of a view over a specified range of time in account 's MODBs .
-spec build_load_modb_params(cb_context:context(), kz_term:ne_binary(), options()) ->
load_params() | cb_context:context().
build_load_modb_params(Context, View, Options) ->
case build_load_time_range_params(Context, View, Options) of
#{direction := Direction
,start_time := StartTime
,end_time := EndTime
}=LoadMap ->
LoadMap#{databases => get_range_modbs(Context, Options, Direction, StartTime, EndTime)};
Ctx -> Ctx
end.
of a view over a specified range of time in account 's MODBs .
-spec build_load_yodb_params(cb_context:context(), kz_term:ne_binary(), options()) ->
load_params() | cb_context:context().
build_load_yodb_params(Context, View, Options) ->
case build_load_time_range_params(Context, View, Options) of
#{direction := Direction
,start_time := StartTime
,end_time := EndTime
}=LoadMap ->
Databases = get_range_yodbs(Context, Options, Direction, StartTime, EndTime),
LoadMap#{databases => lists:usort(Databases)};
Ctx -> Ctx
end.
direction and ` include_docs ' ( if it 's not using reduce ) and removes
-spec build_view_query(options(), direction(), api_range_key(), api_range_key(), boolean()) ->
kazoo_data:view_options().
build_view_query(Options, Direction, StartKey, EndKey, HasQSFilter) ->
DeleteKeys = ['startkey', 'endkey'
,'descending', 'limit'
| ?CB_SPECIFIC_VIEW_OPTIONS
],
DefaultOptions =
props:filter_undefined(
[{'startkey', StartKey}
,{'endkey', EndKey}
,Direction
| props:delete_keys(DeleteKeys, Options)
]),
IncludeOptions =
case HasQSFilter of
'true' -> ['include_docs' | props:delete('include_docs', DefaultOptions)];
'false' -> DefaultOptions
end,
case props:get_first_defined(['reduce', 'group', 'group_level'], IncludeOptions) of
'undefined' -> IncludeOptions;
'false' -> IncludeOptions;
_V -> props:delete('include_docs', IncludeOptions)
end.
< strong > Options description:</strong >
< strong > Keymap description:</strong >
< range_key()}</dt><dd > A regular CouchDB key to construct
-spec start_end_keys(cb_context:context(), options()) -> range_keys().
start_end_keys(Context, Options) ->
start_end_keys(Context, Options, direction(Context, Options)).
-spec start_end_keys(cb_context:context(), options(), direction()) -> range_keys().
start_end_keys(Context, Options, Direction) ->
{OptsStartK, OptsEndK} = get_start_end_keys(Context, Options),
case {cb_context:req_value(Context, <<"start_key">>)
,cb_context:req_value(Context, <<"end_key">>)
}
of
{'undefined', 'undefined'} when Direction =:= 'ascending' -> {OptsStartK, OptsEndK};
{StartKeyReq, 'undefined'} when Direction =:= 'ascending' -> {StartKeyReq, OptsEndK};
{'undefined', EndKeyReq} when Direction =:= 'ascending' -> {OptsStartK, EndKeyReq};
{StartKeyReq, EndKeyReq} when Direction =:= 'ascending' -> {StartKeyReq, EndKeyReq};
{'undefined', 'undefined'} when Direction =:= 'descending' -> {OptsEndK, OptsStartK};
{StartKeyReq, 'undefined'} when Direction =:= 'descending' -> {StartKeyReq, OptsStartK};
{'undefined', EndKeyReq} when Direction =:= 'descending' -> {OptsEndK, EndKeyReq};
{StartKeyReq, EndKeyReq} when Direction =:= 'descending' -> {StartKeyReq, EndKeyReq}
end.
< strong > Options description:</strong >
< strong > Keymap description:</strong >
< dt>{@type integer()}</dt><dd > Constructs keys like ` [ 1234 , Timestamp]'.</dd >
-spec ranged_start_end_keys(cb_context:context(), options()) -> range_keys().
ranged_start_end_keys(Context, Options) ->
{StartTime, EndTime} = time_range(Context, Options),
Direction = direction(Context, Options),
ranged_start_end_keys(Context, Options, Direction, StartTime, EndTime).
-spec ranged_start_end_keys(cb_context:context(), options(), direction(), kz_time:gregorian_seconds(), kz_time:gregorian_seconds()) -> range_keys().
ranged_start_end_keys(Context, Options, Direction, StartTime, EndTime) ->
{StartKeyMap, EndKeyMap} = get_range_key_maps(Options),
case {cb_context:req_value(Context, <<"start_key">>)
,cb_context:req_value(Context, <<"end_key">>)
}
of
{'undefined', 'undefined'} when Direction =:= 'ascending' -> {StartKeyMap(StartTime), EndKeyMap(EndTime)};
{StartKey, 'undefined'} when Direction =:= 'ascending' -> {StartKey, EndKeyMap(EndTime)};
{'undefined', EndKey} when Direction =:= 'ascending' -> {StartKeyMap(StartTime), EndKey};
{StartKey, EndKey} when Direction =:= 'ascending' -> {StartKey, EndKey};
{'undefined', 'undefined'} when Direction =:= 'descending' -> {EndKeyMap(EndTime), StartKeyMap(StartTime)};
{StartKey, 'undefined'} when Direction =:= 'descending' -> {StartKey, StartKeyMap(StartTime)};
{'undefined', EndKey} when Direction =:= 'descending' -> {EndKeyMap(EndTime), EndKey};
{StartKey, EndKey} when Direction =:= 'descending' -> {StartKey, EndKey}
end.
generate the keys like ` [ Timestamp , InteractionId ] ' for the end key in
-spec suffix_key_fun(range_keymap()) -> range_keymap_fun().
suffix_key_fun('nil') -> fun(_) -> 'undefined' end;
suffix_key_fun('undefined') -> fun kz_term:identity/1;
suffix_key_fun(['undefined']) -> fun kz_term:identity/1;
suffix_key_fun(K) when is_binary(K) -> fun(Ts) -> [Ts, K] end;
suffix_key_fun(K) when is_integer(K) -> fun(Ts) -> [Ts, K] end;
suffix_key_fun(K) when is_list(K) -> fun(Ts) -> [Ts | K] end;
suffix_key_fun(K) when is_function(K, 1) -> K.
-spec expand_min_max_keys(range_keys(), direction(), kz_term:api_non_neg_integer()) -> range_keys().
expand_min_max_keys({StartKey, EndKey}, Direction, KeyMinLength) ->
OtherDirection = case Direction of
'ascending' -> 'descending';
'descending' -> 'ascending'
end,
{expand_min_max_keys2(StartKey, Direction, KeyMinLength)
,expand_min_max_keys2(EndKey, OtherDirection, KeyMinLength)
}.
-spec expand_min_max_keys2(api_range_key(), direction(), kz_term:api_non_neg_integer()) -> api_range_key().
expand_min_max_keys2(RangeKey, Direction, KeyMinLength) when is_list(RangeKey) ->
RangeKeyPadded = maybe_min_max_pad(KeyMinLength, RangeKey),
lists:map(fun(Elem) -> expand_min_max_key(Elem, Direction) end, RangeKeyPadded);
expand_min_max_keys2(RangeKey, Direction, _) -> expand_min_max_key(RangeKey, Direction).
-spec expand_min_max_key(api_range_key(), direction()) -> api_range_key().
expand_min_max_key('min_max', 'ascending') ->
lager:debug("padding ascending composite key"),
'false';
expand_min_max_key('min_max', 'descending') ->
lager:debug("padding descending composite key"),
high_value_key();
expand_min_max_key(RangeKey, _) -> RangeKey.
-spec maybe_min_max_pad(kz_term:api_non_neg_integer(), api_range_key()) -> api_range_key() | ['min_max'].
maybe_min_max_pad('undefined', RangeKey) -> RangeKey;
maybe_min_max_pad(KeyMinLength, RangeKey) ->
lists:reverse(min_max_pad(KeyMinLength - length(RangeKey), lists:reverse(RangeKey))).
-spec min_max_pad(non_neg_integer(), api_range_key()) -> api_range_key() | ['min_max'].
min_max_pad(0, RangeKey) -> RangeKey;
min_max_pad(N, RangeKey) -> min_max_pad(N-1, ['min_max' | RangeKey]).
-spec high_value_key() -> kz_term:ne_binary().
high_value_key() -> <<16#fff0/utf8>>.
-spec direction(cb_context:context()) -> direction().
direction(Context) ->
direction(Context, []).
-spec direction(cb_context:context(), options()) -> direction().
direction(Context, Options) ->
case props:get_value('descending', Options, 'false')
orelse kz_json:is_true(<<"descending">>, cb_context:query_string(Context))
orelse kz_json:is_false(<<"ascending">>, cb_context:query_string(Context), 'true')
of
'true' -> 'descending';
'false' -> 'ascending'
end.
-spec time_range(cb_context:context()) -> time_range() | cb_context:context().
time_range(Context) -> time_range(Context, []).
` crossbar.maximum_range ' , 31 days.</dd >
-spec time_range(cb_context:context(), options()) -> time_range() | cb_context:context().
time_range(Context, Options) ->
time_range(Context, Options, props:get_ne_binary_value('range_key_name', Options, <<"created">>)).
-spec time_range(cb_context:context(), options(), kz_term:ne_binary()) -> time_range() | cb_context:context().
time_range(Context, Options, Key) ->
MaxRange = get_max_range(Options),
TSTime = kz_time:now_s(),
RangeTo = get_time_key(Context, <<Key/binary, "_to">>, Options, TSTime),
RangeFrom = get_time_key(Context, <<Key/binary, "_from">>, Options, RangeTo - MaxRange),
time_range(Context, MaxRange, Key, RangeFrom, RangeTo).
-spec time_range(cb_context:context(), pos_integer(), kz_term:ne_binary(), pos_integer(), pos_integer()) ->
time_range() | cb_context:context().
time_range(Context, MaxRange, Key, RangeFrom, RangeTo) ->
Path = <<Key/binary, "_from">>,
case RangeTo - RangeFrom of
N when N < 0 ->
Msg = kz_term:to_binary(io_lib:format("~s_to ~b is prior to ~s ~b", [Key, RangeTo, Path, RangeFrom])),
JObj = kz_json:from_list([{<<"message">>, Msg}, {<<"cause">>, RangeFrom}]),
lager:debug("range error: ~s", [Msg]),
cb_context:add_validation_error(Path, <<"date_range">>, JObj, Context);
N when N > MaxRange ->
Msg = kz_term:to_binary(io_lib:format("~s_to ~b is more than ~b seconds from ~s ~b", [Key, RangeTo, MaxRange, Path, RangeFrom])),
JObj = kz_json:from_list([{<<"message">>, Msg}, {<<"cause">>, RangeTo}]),
lager:debug("range_error: ~s", [Msg]),
cb_context:add_validation_error(Path, <<"date_range">>, JObj, Context);
_ ->
{RangeFrom, RangeTo}
end.
-spec get_doc_fun() -> mapper_fun().
get_doc_fun() -> fun(JObj, Acc) -> [kz_json:get_json_value(<<"doc">>, JObj)|Acc] end.
-spec get_value_fun() -> mapper_fun().
get_value_fun() -> fun(JObj, Acc) -> [kz_json:get_value(<<"value">>, JObj)|Acc] end.
-spec get_key_fun() -> mapper_fun().
get_key_fun() -> fun(JObj, Acc) -> [kz_json:get_value(<<"key">>, JObj)|Acc] end.
-spec get_id_fun() -> mapper_fun().
get_id_fun() -> fun(JObj, Acc) -> [kz_doc:id(JObj)|Acc] end.
< div class="notice">DO NOT ADD ONE ( 1 ) TO PAGE_SIZE OR LIMIT YOURSELF !
-spec get_page_size(cb_context:context(), options()) -> page_size().
get_page_size(Context, Options) ->
case props:is_true('should_paginate', Options, 'true')
andalso cb_context:should_paginate(Context)
of
'true' ->
case props:get_value('limit', Options) of
'undefined' ->
get_page_size_from_request(Context);
Limit ->
lager:debug("got limit from options: ~b", [Limit]),
Limit
end;
'false' ->
lager:debug("pagination disabled in context or option"),
'infinity'
end.
-spec load_view(load_params() | cb_context:context(), cb_context:context()) -> cb_context:context().
load_view(#{is_chunked := 'true'
,direction := Direction
,has_qs_filter := HasQSFilter
}=LoadMap, Context) ->
Setters = [{fun cb_context:set_doc/2, []}
,{fun cb_context:set_resp_data/2, []}
,{fun cb_context:set_resp_status/2, 'success'}
,{fun cb_context:store/3, 'is_chunked', 'true'}
,{fun cb_context:store/3, 'next_chunk_fun', fun next_chunk/1}
,{fun cb_context:store/3, 'chunking_started', 'false'}
,{fun cb_context:store/3, 'view_direction', Direction}
,{fun cb_context:store/3, 'has_qs_filter', HasQSFilter}
,{fun cb_context:store/3, 'load_view_opts', LoadMap}
],
cb_context:setters(Context, Setters);
load_view(#{direction := Direction
,has_qs_filter := HasQSFilter
}=LoadMap, Context) ->
Setters = [{fun cb_context:set_doc/2, []}
,{fun cb_context:set_resp_data/2, []}
,{fun cb_context:set_resp_status/2, 'success'}
,{fun cb_context:store/3, 'view_direction', Direction}
,{fun cb_context:store/3, 'has_qs_filter', HasQSFilter}
],
format_response(get_results(LoadMap#{context => cb_context:setters(Context, Setters)}));
load_view(ContextError, _) ->
ContextError.
-spec next_chunk(map()) -> map().
next_chunk(#{options := #{databases := []}
,previous_chunk_length := PrevLength
,total_queried := TotalQueried
}=ChunkMap) ->
lager:debug("(chunked) databases exhausted"),
ChunkMap#{total_queried => TotalQueried + PrevLength
,chunking_finished => 'true'
};
next_chunk(#{options := #{page_size := PageSize}
,last_key := LastKey
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
}=ChunkMap)
when is_integer(PageSize)
andalso PageSize > 0
andalso TotalQueried + PrevLength =:= PageSize
andalso LastKey =/= 'undefined' ->
lager:debug("(chunked) page size exhausted: ~b", [PageSize]),
ChunkMap#{total_queried => TotalQueried + PrevLength
,chunking_finished => 'true'
};
next_chunk(#{options := #{last_key := OldLastKey}=LoadMap
,last_key := LastKey
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
,context := Context
}=ChunkMap)
when OldLastKey =/= LastKey,
LastKey =/= 'undefined' ->
lager:debug("(chunked) db has more chunks to give, querying same db again"),
chunk_map_roll_in(ChunkMap, get_results(LoadMap#{total_queried => TotalQueried + PrevLength
,context => Context
,last_key => LastKey
,previous_chunk_length => 0
}));
only one database is left and it does not have any more result give , so request is completed .
next_chunk(#{options := #{databases := [_]}
,previous_chunk_length := PrevLength
,total_queried := TotalQueried
}=ChunkMap) ->
lager:debug("(chunked) databases exhausted"),
ChunkMap#{total_queried => TotalQueried + PrevLength
,chunking_finished => 'true'
};
just query
next_chunk(#{options := #{databases := [_|RestDbs], last_key := LastKey}=LoadMap
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
,context := Context
}=ChunkMap) ->
lager:debug("(chunked) querying next db"),
chunk_map_roll_in(ChunkMap, get_results(LoadMap#{total_queried => TotalQueried + PrevLength
,databases => RestDbs
,context => Context
,last_key => LastKey
,previous_chunk_length => 0
}));
next_chunk(#{context := Context}=ChunkMap) ->
case cb_context:fetch(Context, 'load_view_opts') of
#{databases := []} ->
lager:debug("(chunked) databases exhausted"),
ChunkMap#{chunking_finished => 'true'};
#{}=LoadMap ->
chunk_map_roll_in(ChunkMap
,get_results(LoadMap#{context => cb_context:store(Context, 'load_view_opts', 'undefined')
,previous_chunk_length => 0
}))
end.
-spec chunk_map_roll_in(map(), load_params()) -> map().
chunk_map_roll_in(#{last_key := OldLastKey}=ChunkMap
,#{start_key := StartKey
,last_key := LastKey
,total_queried := TotalQueried
,previous_chunk_length := PrevLength
,context := Context
}=LoadMap) ->
ChunkMap#{start_key => StartKey
,last_key => LastKey
,total_queried => TotalQueried
,previous_chunk_length => PrevLength
,context => Context
}.
@doc Fold over databases and fetch result from each and count total result .
if the last key is not defined , query next DBs until DBs exhausted .
case the db may return up to the limit size result , if the last_key
-spec get_results(load_params()) -> load_params().
get_results(#{databases := []}=LoadMap) ->
lager:debug("databases exhausted"),
LoadMap;
get_results(#{databases := [Db|RestDbs]
,view := View
,view_options := ViewOpts
,direction := Direction
,is_chunked := IsChunked
,chunk_size := ChunkSize
,total_queried := TotalQueried
,context := Context
,last_key := LastKey
,page_size := PageSize
,start_key := StartKey
}=LoadMap) ->
LimitWithLast = limit_with_last_key(IsChunked, PageSize, ChunkSize, TotalQueried),
lager:debug("querying view '~s' from '~s', starting at '~p' with page size ~p and limit ~p in direction ~s"
,[View, Db, StartKey, PageSize, LimitWithLast, Direction]
),
NextStartKey = case LastKey of
'undefined' -> props:get_value('startkey', ViewOpts);
_ -> LastKey
end,
ViewOptions = props:filter_undefined(
[{'limit', LimitWithLast}
,{'startkey', NextStartKey}
| props:delete('startkey', ViewOpts)
]),
lager:debug("kz_datamgr:get_results(~p, ~p, ~p)", [Db, View, ViewOptions]),
case kz_datamgr:get_results(Db, View, ViewOptions) of
{'error', 'not_found'} when [] =:= RestDbs ->
lager:debug("either the db ~s or view ~s was not found", [Db, View]),
LoadMap#{context => crossbar_util:response_missing_view(Context)};
{'error', 'not_found'} ->
lager:debug("either the db ~s or view ~s was not found, querying next db...", [Db, View]),
get_results(LoadMap#{databases => RestDbs});
{'error', Error} ->
lager:debug("failed to query view ~s from db ~s: ~p", [View, Db, Error]),
LoadMap#{context => crossbar_doc:handle_datamgr_errors(Error, View, Context)};
{'ok', JObjs} ->
handle_query_results(LoadMap, JObjs, LimitWithLast)
end.
handle_query_results(LoadMap, JObjs, LimitWithLast) ->
[{'memory', End}] = process_info(self(), ['memory']),
MemoryLimit = kapps_config:get_integer(?CONFIG_CAT, <<"request_memory_limit">>),
handle_query_results(LoadMap, JObjs, LimitWithLast, End, MemoryLimit).
handle_query_results(LoadMap, JObjs, LimitWithLast, _End, 'undefined') ->
process_query_results(LoadMap, JObjs, LimitWithLast);
handle_query_results(LoadMap, JObjs, LimitWithLast, MemoryUsed, MemoryLimit) when MemoryUsed < MemoryLimit ->
lager:debug("under memory cap of ~p: ~p used", [MemoryLimit, MemoryUsed]),
process_query_results(LoadMap, JObjs, LimitWithLast);
handle_query_results(#{context := Context}=LoadMap, _JObjs, _LimitWithLast, _TooMuch, _Limit) ->
lager:warning("memory used ~p exceeds limit ~p", [_TooMuch, _Limit]),
LoadMap#{context => crossbar_util:response_range_not_satisfiable(Context)}.
process_query_results(#{databases := [Db|_]=Dbs
,context := Context
,view := View
}=LoadMap
,JObjs
,LimitWithLast
) ->
try handle_query_result(LoadMap, Dbs, JObjs, LimitWithLast)
catch
?STACKTRACE(_E, _T, ST)
lager:warning("exception occurred during querying db ~s for view ~s : ~p:~p", [Db, View, _E, _T]),
kz_log:log_stacktrace(ST),
LoadMap#{context => cb_context:add_system_error('datastore_fault', Context)}
end.
-spec handle_query_result(load_params(), kz_term:ne_binaries(), kz_json:objects(), kz_term:api_pos_integer()) -> load_params().
handle_query_result(#{last_key := LastKey
,mapper := Mapper
,context := Context
,page_size := PageSize
}=LoadMap
,Dbs
,Results
,Limit
) ->
ResultsLength = erlang:length(Results),
{NewLastKey, JObjs} = last_key(LastKey, Results, Limit, ResultsLength, PageSize),
case apply_filter(Mapper, JObjs) of
{'error', Reason} ->
LoadMap#{context => cb_context:add_system_error('datastore_fault', kz_term:to_binary(Reason), Context)};
FilteredJObjs when is_list(FilteredJObjs) ->
FilteredLength = length(FilteredJObjs),
lager:debug("db_returned: ~b(~p) passed_filter: ~p next_start_key: ~p"
,[ResultsLength, PageSize, FilteredLength, NewLastKey]
),
handle_query_result(LoadMap, Dbs, FilteredJObjs, FilteredLength, NewLastKey)
end.
-spec handle_query_result(load_params(), kz_term:ne_binaries(), kz_json:object() | kz_json:objects(), non_neg_integer(), last_key()) -> load_params().
handle_query_result(#{is_chunked := 'true'
,context := Context
}=LoadMap
,[Db|_]
,FilteredJObjs
,FilteredLength
,NewLastKey
) ->
Setters = [{fun cb_context:set_resp_data/2, FilteredJObjs}
,{fun cb_context:set_db_name/2, Db}
],
Context1 = cb_context:setters(Context, Setters),
LoadMap#{last_key => NewLastKey
,context => Context1
,previous_chunk_length => FilteredLength
};
handle_query_result(#{page_size := _PageSize
,last_key := _OldLastKey
}=LoadMap
,[_|RestDbs]
,FilteredJObjs
,FilteredLength
,NewLastKey
) ->
case check_page_size_and_length(LoadMap, FilteredLength, FilteredJObjs, NewLastKey) of
{'exhausted', LoadMap2} -> LoadMap2;
{'next_db', LoadMap2} when NewLastKey =/= 'undefined' ->
lager:debug("updating new last key to ~p from ~p", [NewLastKey, _OldLastKey]),
get_results(LoadMap2#{last_key => NewLastKey});
{'next_db', LoadMap2} -> get_results(LoadMap2#{databases => RestDbs})
end.
-spec check_page_size_and_length(load_params(), non_neg_integer(), kz_json:objects(), last_key()) ->
{'exhausted' | 'next_db', load_params()}.
check_page_size_and_length(#{page_size := 'infinity'
,queried_jobjs := QueriedJObjs
,total_queried := TotalQueried
}=LoadMap
,Length
,FilteredJObjs
,LastKey
) ->
{'next_db', LoadMap#{total_queried => TotalQueried + Length
,queried_jobjs => QueriedJObjs ++ FilteredJObjs
,last_key => LastKey
}
};
check_page_size_and_length(#{page_size := PageSize
,queried_jobjs := QueriedJObjs
,total_queried := TotalQueried
}=LoadMap
,Length
,FilteredJObjs
,LastKey
)
when is_integer(PageSize)
andalso PageSize > 0
andalso TotalQueried + Length == PageSize
andalso LastKey =/= 'undefined' ->
lager:debug("page size exhausted: ~b", [PageSize]),
{'exhausted', LoadMap#{total_queried => TotalQueried + Length
,queried_jobjs => QueriedJObjs ++ FilteredJObjs
,last_key => LastKey
}
};
just query
check_page_size_and_length(#{total_queried := TotalQueried
,queried_jobjs := QueriedJObjs
}=LoadMap
,Length
,FilteredJObjs
,LastKey
) ->
{'next_db', LoadMap#{total_queried => TotalQueried + Length
,queried_jobjs => QueriedJObjs ++ FilteredJObjs
,last_key => LastKey
}
}.
-spec limit_with_last_key(boolean(), page_size(), pos_integer(), non_neg_integer()) ->
kz_term:api_pos_integer().
limit_with_last_key('false', 'undefined', _, _) ->
'undefined';
limit_with_last_key(_IsChunked, 'infinity', ChunkSize, _TotalQueried) ->
1 + ChunkSize;
limit_with_last_key('false', PageSize, _, TotalQueried) ->
1 + PageSize - TotalQueried;
limit_with_last_key('true', 'undefined', ChunkSize, _) ->
1 + ChunkSize;
limit_with_last_key('true', PageSize, PageSize, TotalQueried) ->
1 + PageSize - TotalQueried;
limit_with_last_key('true', PageSize, ChunkSize, TotalQueried) when ChunkSize < (PageSize - TotalQueried) ->
1 + ChunkSize;
limit_with_last_key('true', PageSize, _ChunkSize, TotalQueried) ->
1 + PageSize - TotalQueried.
Mapper function can be arity 1 ( operating on a list of JObjs ) and
arity 2 of ` ( JObj , Acc ) ' .
If you use mapper function of arity 1 , you have to reverse your result before
If mapper is an arity 2 function , the output should be in the same order
with a reason ( preferred as binary ) as a second element .
-spec apply_filter(mapper_fun(), kz_json:objects()) ->
kz_json:objects() |
kz_json:object() |
{'error', any()}.
apply_filter(_Mapper, []) -> [];
apply_filter('undefined', JObjs) ->
lists:reverse(JObjs);
apply_filter(Mapper, JObjs) when is_function(Mapper, 1) ->
Mapper(JObjs);
apply_filter(Mapper, JObjs) when is_function(Mapper, 2) ->
filter_foldl(Mapper, JObjs, []).
-spec filter_foldl(mapper_fun(), kz_json:objects(), kz_json:objects()) ->
kz_json:objects() |
{'error', any()}.
filter_foldl(_Mapper, [], Acc) ->
[JObj
|| JObj <- Acc,
not kz_term:is_empty(JObj)
];
filter_foldl(Mapper, [JObj | JObjs], Acc) ->
case Mapper(JObj, Acc) of
{'error', _} = Error -> Error;
NewAcc -> filter_foldl(Mapper, JObjs, NewAcc)
end.
-spec last_key(last_key(), kz_json:objects(), non_neg_integer() | 'undefined', non_neg_integer(), page_size()) ->
{last_key(), kz_json:objects()}.
last_key(LastKey, [], _Limit, _Returned, _PageSize) ->
lager:debug("no results same last key ~p", [LastKey]),
{LastKey, []};
last_key(LastKey, JObjs, 'undefined', _Returned, _PageSize) ->
lager:debug("no limit, re-using last key ~p", [LastKey]),
{LastKey, lists:reverse(JObjs)};
last_key(_LastKey, JObjs, Limit, Limit, _PageSize) ->
lager:debug("full page fetched, calculating new key"),
new_last_key(JObjs);
last_key(_LastKey, JObjs, _Limit, _Returned, _PageSize) ->
lager:debug("returned page ~p smaller than page limit ~p", [_Returned, _Limit]),
{'undefined', lists:reverse(JObjs)}.
-spec new_last_key(kz_json:objects()) -> {last_key(), kz_json:objects()}.
new_last_key(JObjs) ->
[Last|JObjs1] = lists:reverse(JObjs),
{kz_json:get_value(<<"key">>, Last), JObjs1}.
-spec format_response(load_params()) -> cb_context:context().
format_response(#{context := Context}=LoadMap) ->
case cb_context:resp_status(Context) of
'success' -> format_success_response(LoadMap);
_Error -> Context
end.
-spec format_success_response(load_params()) -> cb_context:context().
format_success_response(#{total_queried := TotalQueried
,queried_jobjs := JObjs
,context := Context
,last_key := NextStartKey
,start_key := StartKey
}) ->
Envelope = add_paging(StartKey, TotalQueried, NextStartKey, cb_context:resp_envelope(Context)),
crossbar_doc:handle_datamgr_success(JObjs, cb_context:set_resp_envelope(Context, Envelope)).
-spec add_paging(api_range_key(), non_neg_integer(), api_range_key(), kz_json:object()) -> kz_json:object().
add_paging(StartKey, PageSize, NextStartKey, JObj) ->
DeleteKeys = [<<"start_key">>, <<"page_size">>, <<"next_start_key">>],
kz_json:set_values([{<<"start_key">>, StartKey},
{<<"page_size">>, PageSize},
{<<"next_start_key">>, NextStartKey}
]
,kz_json:delete_keys(DeleteKeys, JObj)
).
-spec build_general_load_params(cb_context:context(), kz_term:ne_binary(), options()) -> load_params() | cb_context:context().
build_general_load_params(Context, View, Options) ->
Direction = direction(Context, Options),
try maps:from_list(
[{'chunk_size', get_chunk_size(Context, Options)}
,{'databases', props:get_value('databases', Options, [cb_context:db_name(Context)])}
,{'direction', Direction}
,{'is_chunked', is_chunked(Context, Options)}
,{'last_key', 'undefined'}
,{'page_size', get_page_size(Context, Options)}
,{'queried_jobjs', []}
,{'should_paginate', cb_context:should_paginate(Context)}
,{'total_queried', 0}
,{'view', View}
])
catch
'throw':{'error', ErrorMsg} ->
cb_context:add_system_error(404, 'faulty_request', ErrorMsg, Context)
end.
-spec is_chunked(cb_context:context(), options()) -> boolean().
is_chunked(Context, Options) ->
kz_json:is_true(<<"is_chunked">>
,cb_context:query_string(Context)
,props:get_is_true('is_chunked', Options, 'false')
)
andalso not props:get_is_true('unchunkable', Options, 'false').
-spec get_range_modbs(cb_context:context(), options(), direction(), kz_time:gregorian_seconds(), kz_time:gregorian_seconds()) ->
kz_term:ne_binaries().
get_range_modbs(Context, Options, Direction, StartTime, EndTime) ->
case props:get_value('databases', Options) of
'undefined' when Direction =:= 'ascending' ->
kazoo_modb:get_range(cb_context:account_id(Context), StartTime, EndTime);
'undefined' when Direction =:= 'descending' ->
lists:reverse(kazoo_modb:get_range(cb_context:account_id(Context), StartTime, EndTime));
Dbs when Direction =:= 'ascending' ->
lists:usort(Dbs);
Dbs when Direction =:= 'descending' ->
lists:reverse(lists:usort(Dbs))
end.
-spec get_range_yodbs(cb_context:context(), options(), direction(), kz_time:gregorian_seconds(), kz_time:gregorian_seconds()) ->
kz_term:ne_binaries().
get_range_yodbs(Context, Options, Direction, StartTime, EndTime) ->
case props:get_value('databases', Options) of
'undefined' when Direction =:= 'ascending' ->
kazoo_yodb:get_range(cb_context:account_id(Context), StartTime, EndTime);
'undefined' when Direction =:= 'descending' ->
lists:reverse(kazoo_yodb:get_range(cb_context:account_id(Context), StartTime, EndTime));
Dbs when Direction =:= 'ascending' ->
lists:usort(Dbs);
Dbs when Direction =:= 'descending' ->
lists:reverse(lists:usort(Dbs))
end.
-spec get_chunk_size(cb_context:context(), options()) -> kz_term:api_pos_integer().
get_chunk_size(Context, Options) ->
SystemSize = kapps_config:get_pos_integer(?CONFIG_CAT, <<"load_view_chunk_size">>, 50),
OptionsSize = props:get_integer_value('chunk_size', Options, SystemSize),
case kz_json:get_value(<<"chunk_size">>, cb_context:query_string(Context)) of
'undefined' -> OptionsSize;
Size ->
try kz_term:to_integer(Size) of
ChunkSize when ChunkSize > 0,
ChunkSize =< SystemSize ->
ChunkSize;
ChunkSize when ChunkSize < 0 ->
throw({'error', <<"chunk size must be at least 1">>});
ChunkSize when ChunkSize > SystemSize ->
throw({'error', <<"chunk size must be lower than ", (integer_to_binary(SystemSize))/binary>>})
catch
_:_ ->
throw({'error', <<"invalid chunk size">>})
end
end.
-spec maybe_set_start_end_keys(load_params(), api_range_key(), api_range_key()) -> load_params().
maybe_set_start_end_keys(LoadMap, 'undefined', 'undefined') -> LoadMap;
maybe_set_start_end_keys(LoadMap, StartKey, 'undefined') -> LoadMap#{start_key => StartKey};
maybe_set_start_end_keys(LoadMap, 'undefined', EndKey) -> LoadMap#{end_key => EndKey};
maybe_set_start_end_keys(LoadMap, StartKey, EndKey) -> LoadMap#{start_key => StartKey, end_key => EndKey}.
-spec get_page_size_from_request(cb_context:context()) -> pos_integer().
get_page_size_from_request(Context) ->
case cb_context:req_value(Context, <<"page_size">>) of
'undefined' -> cb_context:pagination_page_size();
Size ->
try kz_term:to_integer(Size) of
PageSize when PageSize > 0 -> PageSize;
_ ->
throw({'error', <<"page size must be at least 1">>})
catch
_:_ ->
throw({'error', <<"invalid page size">>})
end
end.
-spec get_time_key(cb_context:context(), kz_term:ne_binary(), options(), pos_integer()) -> pos_integer().
get_time_key(Context, Key, Options, Default) ->
case props:get_integer_value(Key, Options) of
'undefined' ->
case kz_term:safe_cast(cb_context:req_value(Context, Key), Default, fun kz_term:to_integer/1) of
T when T > 0 -> T;
_ -> Default
end;
Value -> Value
end.
-spec get_max_range(options()) -> pos_integer().
get_max_range(Options) ->
case props:get_integer_value('max_range', Options) of
'undefined' -> ?MAX_RANGE;
MaxRange -> MaxRange
end.
-spec get_start_end_keys(cb_context:context(), options()) -> {api_range_key(), api_range_key()}.
get_start_end_keys(Context, Options) ->
case props:get_value('keymap', Options) of
'undefined' ->
{map_keymap(Context, Options, props:get_first_defined(['startkey', 'start_keymap'], Options))
,map_keymap(Context, Options, props:get_first_defined(['endkey', 'end_keymap'], Options))
};
KeyMap ->
{map_keymap(Context, Options, KeyMap)
,map_keymap(Context, Options, KeyMap)
}
end.
-spec map_keymap(cb_context:context(), options(), keymap()) -> api_range_key().
map_keymap(Context, _, Fun) when is_function(Fun, 1) -> Fun(Context) ;
map_keymap(Context, Options, Fun) when is_function(Fun, 2) -> Fun(Options, Context);
map_keymap(_, _, ApiRangeKey) -> ApiRangeKey.
-spec get_range_key_maps(options()) -> {range_keymap_fun(), range_keymap_fun()}.
get_range_key_maps(Options) ->
case props:get_value('range_keymap', Options) of
'undefined' ->
{map_range_keymap(props:get_first_defined(['startkey', 'range_start_keymap'], Options))
,map_range_keymap(props:get_first_defined(['endkey', 'range_end_keymap'], Options))
};
KeyMap -> {map_range_keymap(KeyMap), map_range_keymap(KeyMap)}
end.
-spec map_range_keymap(range_keymap()) -> range_keymap_fun().
map_range_keymap('nil') -> fun(_) -> 'undefined' end;
map_range_keymap('undefined') -> fun kz_term:identity/1;
map_range_keymap(['undefined']) -> fun(Ts) -> [Ts] end;
map_range_keymap(K) when is_binary(K) -> fun(Ts) -> [K, Ts] end;
map_range_keymap(K) when is_integer(K) -> fun(Ts) -> [K, Ts] end;
map_range_keymap(K) when is_list(K) -> fun(Ts) -> K ++ [Ts] end;
map_range_keymap(K) when is_function(K, 1) -> K.
|
e1128e2318e0008bd96897405a737f7074aa4a0d98f586d96728cb787d814ae2 | vimus/libmpd-haskell | Util.hs | {-# LANGUAGE OverloadedStrings #-}
-- | Module : Network.MPD.Util
Copyright : ( c ) 2005 - 2009 , Joachim Fasting 2010
License : MIT ( see LICENSE )
-- Maintainer : Joachim Fasting <>
-- Stability : alpha
--
Utilities .
module Network.MPD.Util (
parseDate, parseIso8601, formatIso8601, parseNum, parseFrac,
parseBool, parseSingle, showBool, breakChar, parseTriple,
toAssoc, toAssocList, splitGroups, read
) where
import Control.Arrow
import Data.Time.Format (ParseTime, parseTimeM, FormatTime, formatTime)
import Data.Time.Format (defaultTimeLocale)
import qualified Prelude
import Prelude hiding (break, take, drop, dropWhile, read)
import Data.ByteString.Char8 (break, drop, dropWhile, ByteString)
import qualified Data.ByteString.UTF8 as UTF8
import Data.String
import Control.Applicative
import qualified Data.Attoparsec.ByteString.Char8 as A
| Like Prelude.read , but works with ByteString .
read :: Read a => ByteString -> a
read = Prelude.read . UTF8.toString
-- Break a string by character, removing the separator.
breakChar :: Char -> ByteString -> (ByteString, ByteString)
breakChar c = second (drop 1) . break (== c)
Parse a date value .
> parseDate " 2008 " = Just 2008
> parseDate " 2008 - 03 - 01 " = Just 2008
parseDate :: ByteString -> Maybe Int
parseDate = parseMaybe p
where
p = A.decimal <* A.skipMany (A.char '-' <|> A.digit)
Parse date in iso 8601 format
parseIso8601 :: (ParseTime t) => ByteString -> Maybe t
parseIso8601 = parseTimeM True defaultTimeLocale iso8601Format . UTF8.toString
formatIso8601 :: FormatTime t => t -> String
formatIso8601 = formatTime defaultTimeLocale iso8601Format
iso8601Format :: String
iso8601Format = "%FT%TZ"
Parse a positive or negative integer value , returning ' Nothing ' on failure .
parseNum :: (Read a, Integral a) => ByteString -> Maybe a
parseNum = parseMaybe (A.signed A.decimal)
-- Parse C style floating point value, returning 'Nothing' on failure.
parseFrac :: (Fractional a, Read a) => ByteString -> Maybe a
parseFrac = parseMaybe p
where
p = A.string "nan" *> pure (Prelude.read "NaN")
<|> A.string "inf" *> pure (Prelude.read "Infinity")
<|> A.string "-inf" *> pure (Prelude.read "-Infinity")
<|> A.rational
-- Inverts 'parseBool'.
showBool :: IsString a => Bool -> a
-- FIXME: can we change the type to (Bool -> ByteString)?
-- not without also changing Arg to use bytestrings rather than plain String.
showBool x = if x then "1" else "0"
Parse a boolean response value .
parseBool :: ByteString -> Maybe Bool
parseBool = parseMaybe p
where
p = A.char '1' *> pure True <|> A.char '0' *> pure False
Parse a boolean response value .
parseSingle :: ByteString -> Maybe Bool
parseSingle = parseMaybe p
where
p = A.char '1' *> pure True
<|> A.char '0' *> pure False
<|> A.string "oneshot" *> pure True
-- Break a string into triple.
parseTriple :: Char -> (ByteString -> Maybe a) -> ByteString -> Maybe (a, a, a)
parseTriple c f s = let (u, u') = breakChar c s
(v, w) = breakChar c u' in
case (f u, f v, f w) of
(Just a, Just b, Just c') -> Just (a, b, c')
_ -> Nothing
Break a string into a key - value pair , separating at the first ' : ' .
toAssoc :: ByteString -> (ByteString, ByteString)
toAssoc = second (dropWhile (== ' ') . drop 1) . break (== ':')
toAssocList :: [ByteString] -> [(ByteString, ByteString)]
toAssocList = map toAssoc
-- Takes an association list with recurring keys and groups each cycle of keys
-- with their values together. There can be several keys that begin cycles,
( the elements of the first parameter ) .
splitGroups :: [ByteString] -> [(ByteString, ByteString)] -> [[(ByteString, ByteString)]]
splitGroups groupHeads = go
where
go [] = []
go (x:xs) =
let
(ys, zs) = Prelude.break isGroupHead xs
in
(x:ys) : go zs
isGroupHead = (`elem` groupHeads) . fst
A helper for running a Parser , turning errors into Nothing .
parseMaybe :: A.Parser a -> ByteString -> Maybe a
parseMaybe p s = either (const Nothing) Just $ A.parseOnly (p <* A.endOfInput) s
| null | https://raw.githubusercontent.com/vimus/libmpd-haskell/1ec02deba33ce2a16012d8f0954e648eb4b5c485/src/Network/MPD/Util.hs | haskell | # LANGUAGE OverloadedStrings #
| Module : Network.MPD.Util
Maintainer : Joachim Fasting <>
Stability : alpha
Break a string by character, removing the separator.
Parse C style floating point value, returning 'Nothing' on failure.
Inverts 'parseBool'.
FIXME: can we change the type to (Bool -> ByteString)?
not without also changing Arg to use bytestrings rather than plain String.
Break a string into triple.
Takes an association list with recurring keys and groups each cycle of keys
with their values together. There can be several keys that begin cycles, | Copyright : ( c ) 2005 - 2009 , Joachim Fasting 2010
License : MIT ( see LICENSE )
Utilities .
module Network.MPD.Util (
parseDate, parseIso8601, formatIso8601, parseNum, parseFrac,
parseBool, parseSingle, showBool, breakChar, parseTriple,
toAssoc, toAssocList, splitGroups, read
) where
import Control.Arrow
import Data.Time.Format (ParseTime, parseTimeM, FormatTime, formatTime)
import Data.Time.Format (defaultTimeLocale)
import qualified Prelude
import Prelude hiding (break, take, drop, dropWhile, read)
import Data.ByteString.Char8 (break, drop, dropWhile, ByteString)
import qualified Data.ByteString.UTF8 as UTF8
import Data.String
import Control.Applicative
import qualified Data.Attoparsec.ByteString.Char8 as A
| Like Prelude.read , but works with ByteString .
read :: Read a => ByteString -> a
read = Prelude.read . UTF8.toString
breakChar :: Char -> ByteString -> (ByteString, ByteString)
breakChar c = second (drop 1) . break (== c)
Parse a date value .
> parseDate " 2008 " = Just 2008
> parseDate " 2008 - 03 - 01 " = Just 2008
parseDate :: ByteString -> Maybe Int
parseDate = parseMaybe p
where
p = A.decimal <* A.skipMany (A.char '-' <|> A.digit)
Parse date in iso 8601 format
parseIso8601 :: (ParseTime t) => ByteString -> Maybe t
parseIso8601 = parseTimeM True defaultTimeLocale iso8601Format . UTF8.toString
formatIso8601 :: FormatTime t => t -> String
formatIso8601 = formatTime defaultTimeLocale iso8601Format
iso8601Format :: String
iso8601Format = "%FT%TZ"
Parse a positive or negative integer value , returning ' Nothing ' on failure .
parseNum :: (Read a, Integral a) => ByteString -> Maybe a
parseNum = parseMaybe (A.signed A.decimal)
parseFrac :: (Fractional a, Read a) => ByteString -> Maybe a
parseFrac = parseMaybe p
where
p = A.string "nan" *> pure (Prelude.read "NaN")
<|> A.string "inf" *> pure (Prelude.read "Infinity")
<|> A.string "-inf" *> pure (Prelude.read "-Infinity")
<|> A.rational
showBool :: IsString a => Bool -> a
showBool x = if x then "1" else "0"
Parse a boolean response value .
parseBool :: ByteString -> Maybe Bool
parseBool = parseMaybe p
where
p = A.char '1' *> pure True <|> A.char '0' *> pure False
Parse a boolean response value .
parseSingle :: ByteString -> Maybe Bool
parseSingle = parseMaybe p
where
p = A.char '1' *> pure True
<|> A.char '0' *> pure False
<|> A.string "oneshot" *> pure True
parseTriple :: Char -> (ByteString -> Maybe a) -> ByteString -> Maybe (a, a, a)
parseTriple c f s = let (u, u') = breakChar c s
(v, w) = breakChar c u' in
case (f u, f v, f w) of
(Just a, Just b, Just c') -> Just (a, b, c')
_ -> Nothing
Break a string into a key - value pair , separating at the first ' : ' .
toAssoc :: ByteString -> (ByteString, ByteString)
toAssoc = second (dropWhile (== ' ') . drop 1) . break (== ':')
toAssocList :: [ByteString] -> [(ByteString, ByteString)]
toAssocList = map toAssoc
( the elements of the first parameter ) .
splitGroups :: [ByteString] -> [(ByteString, ByteString)] -> [[(ByteString, ByteString)]]
splitGroups groupHeads = go
where
go [] = []
go (x:xs) =
let
(ys, zs) = Prelude.break isGroupHead xs
in
(x:ys) : go zs
isGroupHead = (`elem` groupHeads) . fst
A helper for running a Parser , turning errors into Nothing .
parseMaybe :: A.Parser a -> ByteString -> Maybe a
parseMaybe p s = either (const Nothing) Just $ A.parseOnly (p <* A.endOfInput) s
|
945c1e388ad0a2b2baf758e7c53169100970a92ec12dc2c3c05bd612f0e52e90 | rjnw/sham | ctxt.rkt | #lang racket
(require "utils.rkt")
(require sham/sam/runtime)
(provide (all-defined-out))
(struct env-var [name val] #:transparent)
;; a local variable bound by bind or where
(struct env-bind-var env-var [] #:transparent)
(struct env-where-var env-var [] #:transparent)
val is wrapped in a function which takes and ctxt to compile
(struct env-lazy-var env-var [ast] #:transparent)
;; primitive value which should compile an app primitively
(struct env-primitive-var env-var [] #:transparent)
(struct env-prelude-var env-var [] #:transparent) ;; prelude value
;; lazy compile returns a specialized value for specific pargs and gensym'd name
(struct env-special-var env-var [type oname otype pargs] #:transparent)
(define (maybe-first-env-vars env-vars (getter env-var-val))
(match env-vars
['() #f]
[(cons fst rst) (getter fst)]))
(define (lookup-env-vars evs name)
(define (is-val? v)
(id-datum=? (env-var-name v) name))
(filter is-val? evs))
(define ((lookup-in-env f (getter env-var-val)) c/e name)
(define env (cond
[(cc? c/e) (cc-env c/e)]
[(env? c/e) c/e]
[else (error 'sham/cryptol "unknown ctxt/env ~a" c/e)]))
( debug ( printf " looking - in - env : ~a ~a\n " f name ) ( print - env env ) )
(maybe-first-env-vars (lookup-env-vars (f env) name) getter))
(define (print-ev ev)
(match ev
[(env-primitive-var n v) (void)]
[(env-prelude-var n v) (void)]
[(env-special-var name val type oname otype pargs)
(printf "~a<~a:~a>:~a, " name oname (map pretty-cry pargs) (pretty-cry otype))]
[(env-lazy-var name val ast) (printf "~a%, " name)]
[(env-var name val) (printf "~a=~a$\n" name (pretty-cry val))]
[ ( env - primitive - var name ) ( printf " ~a:~a\n " name ( pretty - cry type ) ) ]
;; [(env-var name #f)
( printf " ~a:~a\n " name ( pretty - cry type ) ) ]
;; [(env-var name #f) (printf " ~a?" name)]
)
ev)
(define (print-evs evs) (for [(ev evs)] (print-ev ev)) evs)
;; type stores name type for val and kind for type in bind
(struct env [val typeof type tvar]
#:methods gen:custom-write
[(define (write-proc val port mode)
(fprintf port "<env>")
#;(parameterize ([current-output-port port])
(print-env val)))])
(define (print-env e)
(match-define (env vs tos ts tvs) e)
(printf " vals:\n") (print-evs vs)
(printf " typofs:\n") (print-evs tos)
(printf " type:\n") (print-evs ts)
(printf " tvars:\n") (print-evs tvs)
e)
(define lookup-val-env (lookup-in-env env-val identity))
(define lookup-val (lookup-in-env env-val))
(define lookup-typeof (lookup-in-env env-typeof))
(define lookup-type (lookup-in-env env-type))
(define lookup-tvar (lookup-in-env env-tvar))
(define (empty-env) (env '() '() '() '()))
(define (update-env c/e
#:val (vals '())
#:typeof (typeofs '())
#:type (types '())
#:tvar (tvars '()))
( printf " update - env : ~a ~a ~a ~a\n " vals typeofs types tvars )
(define (combine news old)
(append (if (env-var? news) (list news) news) old))
(define (doe e)
(match-define (env ovals otypeofs otypes otvars) e)
(env (combine vals ovals)
(combine typeofs otypeofs)
(combine types otypes)
(combine tvars otvars)))
(cond [(cc? c/e) (update-context! c/e #:env (doe (cc-env c/e)))]
[(env? c/e) (doe c/e)]
[else (doe (empty-env))]))
;; context keeps track of current type, poly type vars currently active, result value and lifts
(struct cc [type env pvars res cctxt lifts]
#:methods gen:custom-write
[(define (write-proc val port mode)
(fprintf port "<ctxt>")
#;(parameterize ([current-output-port port])
(print-cc val)))])
(define (print-cc c)
(match-define (cc t env pvars res icc lifts) c)
(printf "ctxt:\n type: ~a\n pvars: ~a\n res: ~a\n #lifts: ~a\n" t pvars res (length (unbox lifts)))
(printf " env:\n")
(print-env env)
c)
(define (empty-context) (cc #f (empty-env) '() #f #f (box '())))
(define (update-context! (from #f)
#:type (type #f)
#:env (env #f)
#:lifts (lifts '())
#:pvars (pvars #f)
#:res (res (void))
#:cc (cctxt #f))
(cond
[(cc? from)
(match-define (cc t oe op os oc ol) from)
(unless (empty? lifts) (set-box! ol (append lifts (unbox ol))))
(cc (or type t) (or env oe) (or pvars op) (if (void? res) os res) (or oc cctxt) ol)]
[else (cc type env pvars (if (void? res) #f res) cctxt (if (box? lifts) lifts (box lifts)))]))
(define (add-lifts! c . lfs)
(define lifts (flatten lfs))
(set-box! (cc-lifts c) (append (unbox (cc-lifts c)) lifts))
c)
| null | https://raw.githubusercontent.com/rjnw/sham/6e0524b1eb01bcda83ae7a5be6339da4257c6781/sham-examples/sham/cryptol/compiler/ctxt.rkt | racket | a local variable bound by bind or where
primitive value which should compile an app primitively
prelude value
lazy compile returns a specialized value for specific pargs and gensym'd name
[(env-var name #f)
[(env-var name #f) (printf " ~a?" name)]
type stores name type for val and kind for type in bind
(parameterize ([current-output-port port])
context keeps track of current type, poly type vars currently active, result value and lifts
(parameterize ([current-output-port port]) | #lang racket
(require "utils.rkt")
(require sham/sam/runtime)
(provide (all-defined-out))
(struct env-var [name val] #:transparent)
(struct env-bind-var env-var [] #:transparent)
(struct env-where-var env-var [] #:transparent)
val is wrapped in a function which takes and ctxt to compile
(struct env-lazy-var env-var [ast] #:transparent)
(struct env-primitive-var env-var [] #:transparent)
(struct env-special-var env-var [type oname otype pargs] #:transparent)
(define (maybe-first-env-vars env-vars (getter env-var-val))
(match env-vars
['() #f]
[(cons fst rst) (getter fst)]))
(define (lookup-env-vars evs name)
(define (is-val? v)
(id-datum=? (env-var-name v) name))
(filter is-val? evs))
(define ((lookup-in-env f (getter env-var-val)) c/e name)
(define env (cond
[(cc? c/e) (cc-env c/e)]
[(env? c/e) c/e]
[else (error 'sham/cryptol "unknown ctxt/env ~a" c/e)]))
( debug ( printf " looking - in - env : ~a ~a\n " f name ) ( print - env env ) )
(maybe-first-env-vars (lookup-env-vars (f env) name) getter))
(define (print-ev ev)
(match ev
[(env-primitive-var n v) (void)]
[(env-prelude-var n v) (void)]
[(env-special-var name val type oname otype pargs)
(printf "~a<~a:~a>:~a, " name oname (map pretty-cry pargs) (pretty-cry otype))]
[(env-lazy-var name val ast) (printf "~a%, " name)]
[(env-var name val) (printf "~a=~a$\n" name (pretty-cry val))]
[ ( env - primitive - var name ) ( printf " ~a:~a\n " name ( pretty - cry type ) ) ]
( printf " ~a:~a\n " name ( pretty - cry type ) ) ]
)
ev)
(define (print-evs evs) (for [(ev evs)] (print-ev ev)) evs)
(struct env [val typeof type tvar]
#:methods gen:custom-write
[(define (write-proc val port mode)
(fprintf port "<env>")
(print-env val)))])
(define (print-env e)
(match-define (env vs tos ts tvs) e)
(printf " vals:\n") (print-evs vs)
(printf " typofs:\n") (print-evs tos)
(printf " type:\n") (print-evs ts)
(printf " tvars:\n") (print-evs tvs)
e)
(define lookup-val-env (lookup-in-env env-val identity))
(define lookup-val (lookup-in-env env-val))
(define lookup-typeof (lookup-in-env env-typeof))
(define lookup-type (lookup-in-env env-type))
(define lookup-tvar (lookup-in-env env-tvar))
(define (empty-env) (env '() '() '() '()))
(define (update-env c/e
#:val (vals '())
#:typeof (typeofs '())
#:type (types '())
#:tvar (tvars '()))
( printf " update - env : ~a ~a ~a ~a\n " vals typeofs types tvars )
(define (combine news old)
(append (if (env-var? news) (list news) news) old))
(define (doe e)
(match-define (env ovals otypeofs otypes otvars) e)
(env (combine vals ovals)
(combine typeofs otypeofs)
(combine types otypes)
(combine tvars otvars)))
(cond [(cc? c/e) (update-context! c/e #:env (doe (cc-env c/e)))]
[(env? c/e) (doe c/e)]
[else (doe (empty-env))]))
(struct cc [type env pvars res cctxt lifts]
#:methods gen:custom-write
[(define (write-proc val port mode)
(fprintf port "<ctxt>")
(print-cc val)))])
(define (print-cc c)
(match-define (cc t env pvars res icc lifts) c)
(printf "ctxt:\n type: ~a\n pvars: ~a\n res: ~a\n #lifts: ~a\n" t pvars res (length (unbox lifts)))
(printf " env:\n")
(print-env env)
c)
(define (empty-context) (cc #f (empty-env) '() #f #f (box '())))
(define (update-context! (from #f)
#:type (type #f)
#:env (env #f)
#:lifts (lifts '())
#:pvars (pvars #f)
#:res (res (void))
#:cc (cctxt #f))
(cond
[(cc? from)
(match-define (cc t oe op os oc ol) from)
(unless (empty? lifts) (set-box! ol (append lifts (unbox ol))))
(cc (or type t) (or env oe) (or pvars op) (if (void? res) os res) (or oc cctxt) ol)]
[else (cc type env pvars (if (void? res) #f res) cctxt (if (box? lifts) lifts (box lifts)))]))
(define (add-lifts! c . lfs)
(define lifts (flatten lfs))
(set-box! (cc-lifts c) (append (unbox (cc-lifts c)) lifts))
c)
|
0c78deff46c12e0c8a3a32fbc31e52510436c679f1e9ec574567d34600a97b19 | techascent/tech.datatype | color_gradients.clj | (ns tech.v2.tensor.color-gradients
""
(:require [clojure.java.io :as io]
[tech.v2.tensor :as dtt]
[tech.v2.datatype :as dtype]
[tech.v2.datatype.functional :as dfn]
[tech.v2.datatype.unary-op :as unary-op]
[tech.v2.tensor.typecast :as tens-typecast]
[tech.v2.datatype.typecast :as typecast]
[clojure.edn :as edn]
[tech.parallel.for :as pfor]
[tech.libs.buffered-image :as bufimg])
(:import [java.awt.image BufferedImage]
[tech.v2.datatype ObjectReader]
[clojure.lang IFn]))
(def gradient-map (delay (-> (io/resource "gradients.edn")
(slurp)
(edn/read-string))))
(def gradient-tens (delay (-> (io/resource "gradients.png")
(bufimg/load)
(dtt/ensure-tensor))))
(defn- flp-close
[val desired & [error]]
(< (Math/abs (- (double val) (double desired)))
(double (or error 0.001))))
(defn gradient-name->gradient-line
[gradient-name invert-gradient? gradient-default-n]
(let [gradient-default-n (long gradient-default-n)
gradient-line
(cond
(keyword? gradient-name)
(let [src-gradient-info (get @gradient-map gradient-name)
_ (when-not src-gradient-info
(throw (Exception.
(format "Failed to find gradient %s"
gradient-name))))
gradient-tens @gradient-tens]
(dtt/select gradient-tens
(:tensor-index src-gradient-info)
:all :all))
(dtt/tensor? gradient-name)
gradient-name
(instance? IFn gradient-name)
(dtt/->tensor
(->> (range gradient-default-n)
(map (fn [idx]
(let [p-val (/ (double idx)
(double gradient-default-n))
grad-val (gradient-name p-val)]
(when-not (= 3 (count grad-val))
(throw (Exception. (format
"Gradient fns must return bgr tuples:
function returned: %s"
grad-val))))
grad-val))))))
n-pixels (long (first (dtype/shape gradient-line)))]
;;Gradients are accessed potentially many many times so reversing it here
;;is often wise as opposed to inline reversing in the main loops.
(-> (if invert-gradient?
(-> (dtt/select gradient-line (range (dec n-pixels) -1 -1)
:all)
(dtype/copy! (dtt/reshape (dtype/make-container :typed-buffer :uint8
(dtype/ecount gradient-line))
[n-pixels 3])))
gradient-line)
(dtt/ensure-tensor))))
(defn colorize
"Apply a color gradient to a tensor returning an image. Takes A 1 or 2d tensor.
If data-min, data-max aren't provided they are found in the data.
A buffered image is returned.
src-tens - Source tensor whose shape determines the shape of the final image.
gradient-name - may be a keyword, in which it must be a key in @gradient-map and
these gradients come from:
.
gradient-name may be a tensor of dimensions [n 3].
gradient-name may be a function that takes a value from 0-1 and returns a tuple
of length 3.
Additional arguments:
:data-min :data-max - If provided then the data isn't scanned for min and max. If min
is equal to 0 and max is equal to 1.0 then the data doesn't need to be normalized.
data ranges are clamped to min and max.
:alpha? - If true, an image with an alpha channel is returned. This is useful for
when your data has NAN or INFs as in that case the returned image is transparent
in those sections.
:check-invalid? - If true then the data is scanned for NAN or INF's. Used in
conjunction with :alpha?
:invert-gradient? - When true, reverses the provided gradient.
:gradient-default-n - When an IFn is provided, it is quantized over n steps."
[src-tens gradient-name & {:keys [data-min data-max
alpha?
check-invalid?
invert-gradient?
gradient-default-n]
:or {gradient-default-n 200}}]
(let [src-tens (dtt/ensure-tensor src-tens)
img-shape (dtype/shape src-tens)
{data-min :min
data-max :max
valid-indexes :valid-indexes
src-reader :src-reader}
(if (and data-min data-max)
{:min data-min
:max data-max
:src-reader src-tens}
;;If we have to min/max check then we have to filter out invalid indexes.
;;In that case we prefilter the data and trim it. We always check for
;;invalid data in the main loops below and this is faster than
;;pre-checking and indexing.
(let [valid-indexes (when check-invalid?
(dfn/argfilter dfn/valid?
(dtype/->reader src-tens
:float64)))
src-reader (if (and valid-indexes
(not= (dtype/ecount valid-indexes)
(dtype/ecount src-tens)))
(dtype/indexed-reader valid-indexes src-tens)
src-tens)]
(merge
(dfn/descriptive-stats src-reader [:min :max])
{:src-reader src-reader
:valid-indexes valid-indexes})))
n-pixels (if valid-indexes
(dtype/ecount valid-indexes)
(dtype/ecount src-reader))
data-min (double data-min)
data-max (double data-max)
_ (when (or (dfn/invalid? data-min)
(dfn/invalid? data-max))
(throw (Exception. "NAN or INF in src data detected!")))
data-range (- data-max data-min)
src-reader (if-not (and (flp-close 0.0 data-min)
(flp-close 1.0 data-max))
(unary-op/unary-reader :float64
(-> (- x data-min)
(/ data-range))
src-reader)
src-reader)
src-reader (typecast/datatype->reader :float64 src-reader)
img-type (if alpha?
:byte-abgr
:byte-bgr)
res-image (case (count img-shape)
2 (bufimg/new-image (first img-shape) (second img-shape) img-type)
1 (bufimg/new-image 1 (first img-shape) img-type))
;;Flatten out src-tens and res-tens and make them readers
n-channels (long (if alpha? 4 3))
res-tens (dtt/reshape res-image [n-pixels n-channels])
res-tens (tens-typecast/datatype->tensor-writer
:uint8 res-tens)
gradient-line (gradient-name->gradient-line gradient-name invert-gradient?
gradient-default-n)
n-gradient-increments (long (first (dtype/shape gradient-line)))
gradient-line (tens-typecast/datatype->tensor-reader
:uint8
gradient-line)
line-last-idx (double (dec n-gradient-increments))
n-pixels (long (if valid-indexes
(dtype/ecount valid-indexes)
n-pixels))]
(if alpha?
(pfor/parallel-for
idx
n-pixels
(let [src-val (.read src-reader idx)]
(when (Double/isFinite src-val)
(let [p-value (min 1.0 (max 0.0 src-val))
line-idx (long (Math/round (* p-value line-last-idx)))]
alpha channel first
(.write2d res-tens idx 0 255)
(.write2d res-tens idx 1 (.read2d gradient-line line-idx 0))
(.write2d res-tens idx 2 (.read2d gradient-line line-idx 1))
(.write2d res-tens idx 3 (.read2d gradient-line line-idx 2))))))
(pfor/parallel-for
idx
n-pixels
(let [src-val (.read src-reader idx)]
(when (Double/isFinite src-val)
(let [p-value (min 1.0 (max 0.0 (.read src-reader idx)))
line-idx (long (Math/round (* p-value line-last-idx)))]
(.write2d res-tens idx 0 (.read2d gradient-line line-idx 0))
(.write2d res-tens idx 1 (.read2d gradient-line line-idx 1))
(.write2d res-tens idx 2 (.read2d gradient-line line-idx 2)))))))
res-image))
(defn colorize->clj
"Same as colorize but returns a ND sequence of [b g r] persistent vectors.
For options, see documentation in colorize."
[src-tens gradient-name & options]
(when (seq src-tens)
(let [src-dims (dtype/shape src-tens)]
(-> (apply colorize src-tens gradient-name options)
In case of 1d . colorize always returns buffered image which is always
;;2d.
(dtt/reshape (concat src-dims [3]))
(dtt/->jvm)))))
(defn- update-or-append-gradient
[img-fname gradient-name]
(let [png-img (bufimg/load img-fname)
[height width n-chans] (dtype/shape png-img)
;;ensure known color palette and such
png-img (bufimg/resize png-img 260 1 {:dst-img-type :byte-bgr})
existing-map @gradient-map
existing-tens @gradient-tens
new-entry (get existing-map gradient-name
{:tensor-index (count existing-map)
:gradient-shape [260 3]})
existing-map (assoc existing-map gradient-name new-entry)
new-img (bufimg/new-image (count existing-map) 260 :byte-bgr)
img-tens (dtt/ensure-tensor new-img)]
(doseq [[grad-n {:keys [tensor-index gradient-shape]}] existing-map]
(dtype/copy!
(if (= gradient-name grad-n)
(dtt/select png-img 0 :all :all)
(dtt/select existing-tens tensor-index :all :all))
(dtt/select img-tens tensor-index :all :all)))
(spit "resources/gradients.edn" existing-map)
(bufimg/save! new-img "resources/gradients.png")
:ok))
(comment
(require '[clojure.java.io :as io])
(io/make-parents "gradient-demo/test.txt")
(def test-src-tens (dtt/->tensor (repeat 128 (range 0 512))))
(time (doseq [grad-name (keys @gradient-map)]
(bufimg/save! (colorize test-src-tens grad-name)
"PNG"
(format "gradient-demo/%s.png" (name grad-name)))
))
(defn bad-range
[start end]
(->> (range start end)
(map (fn [item]
(if (> (rand) 0.5)
Double/NaN
item)))))
Sometimes data has NAN 's or INF 's
(def test-nan-tens (dtt/->tensor (repeatedly 128 #(bad-range 0 512))))
(colorize test-nan-tens :temperature-map
:alpha? true
:check-invalid? true
:invert-gradient? true
:data-min 0
:data-max 512)
(bufimg/save! (colorize test-nan-tens :temperature-map
:alpha? true
:check-invalid? true
:invert-gradient? true
:data-min 0
:data-max 512)
"PNG"
(format "gradient-demo/%s-nan.png"
(name :temperature-map)))
(dotimes [iter 100]
(time (doseq [grad-name (keys @gradient-map)]
(comment (bufimg/save! (colorize test-nan-tens grad-name
:alpha? true
:check-invalid? true
:data-min 0
:data-max 512)
"PNG"
(format "gradient-demo/%s-nan.png"
(name grad-name))))
(colorize test-nan-tens grad-name
:alpha? true
:check-invalid? true
:invert-gradient? true
:data-min 0
:data-max 512))))
(def custom-gradient-tens (dtt/->tensor
(->> (range 100)
(map (fn [idx]
(let [p-value (/ (double idx)
(double 100))]
[(* 255 p-value) 0 (* (- 1.0 p-value)
255)]))))))
(bufimg/save! (colorize test-src-tens custom-gradient-tens
:invert-gradient? true)
"PNG"
"gradient-demo/custom-tensor-gradient.png")
(defn custom-gradient-fn
[^double p-value]
(let [one-m-p (- 1.0 p-value)]
[(* 255 one-m-p) (* 255 p-value) (* 255 one-m-p)]))
(bufimg/save! (colorize test-src-tens custom-gradient-fn
:invert-gradient? true)
"PNG"
"gradient-demo/custom-ifn-gradient.png")
)
| null | https://raw.githubusercontent.com/techascent/tech.datatype/8cc83d771d9621d580fd5d4d0625005bd7ab0e0c/src/tech/v2/tensor/color_gradients.clj | clojure | Gradients are accessed potentially many many times so reversing it here
is often wise as opposed to inline reversing in the main loops.
If we have to min/max check then we have to filter out invalid indexes.
In that case we prefilter the data and trim it. We always check for
invalid data in the main loops below and this is faster than
pre-checking and indexing.
Flatten out src-tens and res-tens and make them readers
2d.
ensure known color palette and such | (ns tech.v2.tensor.color-gradients
""
(:require [clojure.java.io :as io]
[tech.v2.tensor :as dtt]
[tech.v2.datatype :as dtype]
[tech.v2.datatype.functional :as dfn]
[tech.v2.datatype.unary-op :as unary-op]
[tech.v2.tensor.typecast :as tens-typecast]
[tech.v2.datatype.typecast :as typecast]
[clojure.edn :as edn]
[tech.parallel.for :as pfor]
[tech.libs.buffered-image :as bufimg])
(:import [java.awt.image BufferedImage]
[tech.v2.datatype ObjectReader]
[clojure.lang IFn]))
(def gradient-map (delay (-> (io/resource "gradients.edn")
(slurp)
(edn/read-string))))
(def gradient-tens (delay (-> (io/resource "gradients.png")
(bufimg/load)
(dtt/ensure-tensor))))
(defn- flp-close
[val desired & [error]]
(< (Math/abs (- (double val) (double desired)))
(double (or error 0.001))))
(defn gradient-name->gradient-line
[gradient-name invert-gradient? gradient-default-n]
(let [gradient-default-n (long gradient-default-n)
gradient-line
(cond
(keyword? gradient-name)
(let [src-gradient-info (get @gradient-map gradient-name)
_ (when-not src-gradient-info
(throw (Exception.
(format "Failed to find gradient %s"
gradient-name))))
gradient-tens @gradient-tens]
(dtt/select gradient-tens
(:tensor-index src-gradient-info)
:all :all))
(dtt/tensor? gradient-name)
gradient-name
(instance? IFn gradient-name)
(dtt/->tensor
(->> (range gradient-default-n)
(map (fn [idx]
(let [p-val (/ (double idx)
(double gradient-default-n))
grad-val (gradient-name p-val)]
(when-not (= 3 (count grad-val))
(throw (Exception. (format
"Gradient fns must return bgr tuples:
function returned: %s"
grad-val))))
grad-val))))))
n-pixels (long (first (dtype/shape gradient-line)))]
(-> (if invert-gradient?
(-> (dtt/select gradient-line (range (dec n-pixels) -1 -1)
:all)
(dtype/copy! (dtt/reshape (dtype/make-container :typed-buffer :uint8
(dtype/ecount gradient-line))
[n-pixels 3])))
gradient-line)
(dtt/ensure-tensor))))
(defn colorize
"Apply a color gradient to a tensor returning an image. Takes A 1 or 2d tensor.
If data-min, data-max aren't provided they are found in the data.
A buffered image is returned.
src-tens - Source tensor whose shape determines the shape of the final image.
gradient-name - may be a keyword, in which it must be a key in @gradient-map and
these gradients come from:
.
gradient-name may be a tensor of dimensions [n 3].
gradient-name may be a function that takes a value from 0-1 and returns a tuple
of length 3.
Additional arguments:
:data-min :data-max - If provided then the data isn't scanned for min and max. If min
is equal to 0 and max is equal to 1.0 then the data doesn't need to be normalized.
data ranges are clamped to min and max.
:alpha? - If true, an image with an alpha channel is returned. This is useful for
when your data has NAN or INFs as in that case the returned image is transparent
in those sections.
:check-invalid? - If true then the data is scanned for NAN or INF's. Used in
conjunction with :alpha?
:invert-gradient? - When true, reverses the provided gradient.
:gradient-default-n - When an IFn is provided, it is quantized over n steps."
[src-tens gradient-name & {:keys [data-min data-max
alpha?
check-invalid?
invert-gradient?
gradient-default-n]
:or {gradient-default-n 200}}]
(let [src-tens (dtt/ensure-tensor src-tens)
img-shape (dtype/shape src-tens)
{data-min :min
data-max :max
valid-indexes :valid-indexes
src-reader :src-reader}
(if (and data-min data-max)
{:min data-min
:max data-max
:src-reader src-tens}
(let [valid-indexes (when check-invalid?
(dfn/argfilter dfn/valid?
(dtype/->reader src-tens
:float64)))
src-reader (if (and valid-indexes
(not= (dtype/ecount valid-indexes)
(dtype/ecount src-tens)))
(dtype/indexed-reader valid-indexes src-tens)
src-tens)]
(merge
(dfn/descriptive-stats src-reader [:min :max])
{:src-reader src-reader
:valid-indexes valid-indexes})))
n-pixels (if valid-indexes
(dtype/ecount valid-indexes)
(dtype/ecount src-reader))
data-min (double data-min)
data-max (double data-max)
_ (when (or (dfn/invalid? data-min)
(dfn/invalid? data-max))
(throw (Exception. "NAN or INF in src data detected!")))
data-range (- data-max data-min)
src-reader (if-not (and (flp-close 0.0 data-min)
(flp-close 1.0 data-max))
(unary-op/unary-reader :float64
(-> (- x data-min)
(/ data-range))
src-reader)
src-reader)
src-reader (typecast/datatype->reader :float64 src-reader)
img-type (if alpha?
:byte-abgr
:byte-bgr)
res-image (case (count img-shape)
2 (bufimg/new-image (first img-shape) (second img-shape) img-type)
1 (bufimg/new-image 1 (first img-shape) img-type))
n-channels (long (if alpha? 4 3))
res-tens (dtt/reshape res-image [n-pixels n-channels])
res-tens (tens-typecast/datatype->tensor-writer
:uint8 res-tens)
gradient-line (gradient-name->gradient-line gradient-name invert-gradient?
gradient-default-n)
n-gradient-increments (long (first (dtype/shape gradient-line)))
gradient-line (tens-typecast/datatype->tensor-reader
:uint8
gradient-line)
line-last-idx (double (dec n-gradient-increments))
n-pixels (long (if valid-indexes
(dtype/ecount valid-indexes)
n-pixels))]
(if alpha?
(pfor/parallel-for
idx
n-pixels
(let [src-val (.read src-reader idx)]
(when (Double/isFinite src-val)
(let [p-value (min 1.0 (max 0.0 src-val))
line-idx (long (Math/round (* p-value line-last-idx)))]
alpha channel first
(.write2d res-tens idx 0 255)
(.write2d res-tens idx 1 (.read2d gradient-line line-idx 0))
(.write2d res-tens idx 2 (.read2d gradient-line line-idx 1))
(.write2d res-tens idx 3 (.read2d gradient-line line-idx 2))))))
(pfor/parallel-for
idx
n-pixels
(let [src-val (.read src-reader idx)]
(when (Double/isFinite src-val)
(let [p-value (min 1.0 (max 0.0 (.read src-reader idx)))
line-idx (long (Math/round (* p-value line-last-idx)))]
(.write2d res-tens idx 0 (.read2d gradient-line line-idx 0))
(.write2d res-tens idx 1 (.read2d gradient-line line-idx 1))
(.write2d res-tens idx 2 (.read2d gradient-line line-idx 2)))))))
res-image))
(defn colorize->clj
"Same as colorize but returns a ND sequence of [b g r] persistent vectors.
For options, see documentation in colorize."
[src-tens gradient-name & options]
(when (seq src-tens)
(let [src-dims (dtype/shape src-tens)]
(-> (apply colorize src-tens gradient-name options)
In case of 1d . colorize always returns buffered image which is always
(dtt/reshape (concat src-dims [3]))
(dtt/->jvm)))))
(defn- update-or-append-gradient
[img-fname gradient-name]
(let [png-img (bufimg/load img-fname)
[height width n-chans] (dtype/shape png-img)
png-img (bufimg/resize png-img 260 1 {:dst-img-type :byte-bgr})
existing-map @gradient-map
existing-tens @gradient-tens
new-entry (get existing-map gradient-name
{:tensor-index (count existing-map)
:gradient-shape [260 3]})
existing-map (assoc existing-map gradient-name new-entry)
new-img (bufimg/new-image (count existing-map) 260 :byte-bgr)
img-tens (dtt/ensure-tensor new-img)]
(doseq [[grad-n {:keys [tensor-index gradient-shape]}] existing-map]
(dtype/copy!
(if (= gradient-name grad-n)
(dtt/select png-img 0 :all :all)
(dtt/select existing-tens tensor-index :all :all))
(dtt/select img-tens tensor-index :all :all)))
(spit "resources/gradients.edn" existing-map)
(bufimg/save! new-img "resources/gradients.png")
:ok))
(comment
(require '[clojure.java.io :as io])
(io/make-parents "gradient-demo/test.txt")
(def test-src-tens (dtt/->tensor (repeat 128 (range 0 512))))
(time (doseq [grad-name (keys @gradient-map)]
(bufimg/save! (colorize test-src-tens grad-name)
"PNG"
(format "gradient-demo/%s.png" (name grad-name)))
))
(defn bad-range
[start end]
(->> (range start end)
(map (fn [item]
(if (> (rand) 0.5)
Double/NaN
item)))))
Sometimes data has NAN 's or INF 's
(def test-nan-tens (dtt/->tensor (repeatedly 128 #(bad-range 0 512))))
(colorize test-nan-tens :temperature-map
:alpha? true
:check-invalid? true
:invert-gradient? true
:data-min 0
:data-max 512)
(bufimg/save! (colorize test-nan-tens :temperature-map
:alpha? true
:check-invalid? true
:invert-gradient? true
:data-min 0
:data-max 512)
"PNG"
(format "gradient-demo/%s-nan.png"
(name :temperature-map)))
(dotimes [iter 100]
(time (doseq [grad-name (keys @gradient-map)]
(comment (bufimg/save! (colorize test-nan-tens grad-name
:alpha? true
:check-invalid? true
:data-min 0
:data-max 512)
"PNG"
(format "gradient-demo/%s-nan.png"
(name grad-name))))
(colorize test-nan-tens grad-name
:alpha? true
:check-invalid? true
:invert-gradient? true
:data-min 0
:data-max 512))))
(def custom-gradient-tens (dtt/->tensor
(->> (range 100)
(map (fn [idx]
(let [p-value (/ (double idx)
(double 100))]
[(* 255 p-value) 0 (* (- 1.0 p-value)
255)]))))))
(bufimg/save! (colorize test-src-tens custom-gradient-tens
:invert-gradient? true)
"PNG"
"gradient-demo/custom-tensor-gradient.png")
(defn custom-gradient-fn
[^double p-value]
(let [one-m-p (- 1.0 p-value)]
[(* 255 one-m-p) (* 255 p-value) (* 255 one-m-p)]))
(bufimg/save! (colorize test-src-tens custom-gradient-fn
:invert-gradient? true)
"PNG"
"gradient-demo/custom-ifn-gradient.png")
)
|
09a3b539d41c52995299fd3820629917a233463c26a42406232074dc7722cc32 | haskell-numerics/hmatrix | root.hs | -- root finding examples
import Numeric.GSL
import Numeric.LinearAlgebra
import Text.Printf(printf)
rosenbrock a b [x,y] = [ a*(1-x), b*(y-x^2) ]
test method = do
print method
let (s,p) = root method 1E-7 30 (rosenbrock 1 10) [-10,-5]
print s -- solution
disp' p -- evolution of the algorithm
jacobian a b [x,y] = [ [-a , 0]
, [-2*b*x, b] ]
testJ method = do
print method
let (s,p) = rootJ method 1E-7 30 (rosenbrock 1 10) (jacobian 1 10) [-10,-5]
print s
disp' p
disp' = putStrLn . format " " (printf "%.3f")
main = do
test Hybrids
test Hybrid
test DNewton
test Broyden
mapM_ testJ [HybridsJ .. GNewton]
| null | https://raw.githubusercontent.com/haskell-numerics/hmatrix/2694f776c7b5034d239acb5d984c489417739225/examples/root.hs | haskell | root finding examples
solution
evolution of the algorithm | import Numeric.GSL
import Numeric.LinearAlgebra
import Text.Printf(printf)
rosenbrock a b [x,y] = [ a*(1-x), b*(y-x^2) ]
test method = do
print method
let (s,p) = root method 1E-7 30 (rosenbrock 1 10) [-10,-5]
jacobian a b [x,y] = [ [-a , 0]
, [-2*b*x, b] ]
testJ method = do
print method
let (s,p) = rootJ method 1E-7 30 (rosenbrock 1 10) (jacobian 1 10) [-10,-5]
print s
disp' p
disp' = putStrLn . format " " (printf "%.3f")
main = do
test Hybrids
test Hybrid
test DNewton
test Broyden
mapM_ testJ [HybridsJ .. GNewton]
|
f1a3ab6d7f7d9bf2db3e6cbe857af485dfeb089708d2acf3b1934177932e39ae | panda-planner-dev/ipc2020-domains | p-16.lisp | (defproblem problem domain
(
(In_City O27 Ulm)
(In_City O28 Ulm)
(In_City Bibliothek Ulm)
(In_City Rathaus Ulm)
(At_Vehicle Pferd Rathaus)
(Connects James_Franck_Ring O27 O28)
(Connects Frauen_Strasse Bibliothek O27)
(Connects Albecker_Strasse Rathaus Bibliothek)
(Available James_Franck_Ring)
(Available Frauen_Strasse)
(Available Albecker_Strasse)
(Available Pferd)
(PV_Compatible Toshiba_Laptops Pferd)
(RV_Compatible James_Franck_Ring Pferd)
(RV_Compatible Frauen_Strasse Pferd)
(RV_Compatible Albecker_Strasse Pferd)
(At_Package Toshiba_Laptops O27)
(type_City Ulm)
(type_City_Location Bibliothek)
(type_City_Location O27)
(type_City_Location O28)
(type_City_Location Rathaus)
(type_Customer_Location Bibliothek)
(type_Customer_Location O27)
(type_Customer_Location O28)
(type_Customer_Location Rathaus)
(type_Equipment_Position Bibliothek)
(type_Equipment_Position O27)
(type_Equipment_Position O28)
(type_Equipment_Position Pferd)
(type_Equipment_Position Rathaus)
(type_Equipment_Position Ulm)
(type_Local_Road_Route Albecker_Strasse)
(type_Local_Road_Route Frauen_Strasse)
(type_Local_Road_Route James_Franck_Ring)
(type_Location Bibliothek)
(type_Location O27)
(type_Location O28)
(type_Location Rathaus)
(type_Location Ulm)
(type_Not_TCenter Bibliothek)
(type_Not_TCenter O27)
(type_Not_TCenter O28)
(type_Not_TCenter Rathaus)
(type_Object Pferd)
(type_Object Toshiba_Laptops)
(type_Package Toshiba_Laptops)
(type_Package_Storage_Position Bibliothek)
(type_Package_Storage_Position O27)
(type_Package_Storage_Position O28)
(type_Package_Storage_Position Pferd)
(type_Package_Storage_Position Rathaus)
(type_Package_Storage_Position Ulm)
(type_Parcels Toshiba_Laptops)
(type_Physical Pferd)
(type_Physical Toshiba_Laptops)
(type_Regular Pferd)
(type_Regular Toshiba_Laptops)
(type_Regular_Truck Pferd)
(type_Regular_Vehicle Pferd)
(type_Road_Route Albecker_Strasse)
(type_Road_Route Frauen_Strasse)
(type_Road_Route James_Franck_Ring)
(type_Route Albecker_Strasse)
(type_Route Frauen_Strasse)
(type_Route James_Franck_Ring)
(type_Thing Albecker_Strasse)
(type_Thing Bibliothek)
(type_Thing Frauen_Strasse)
(type_Thing James_Franck_Ring)
(type_Thing O27)
(type_Thing O28)
(type_Thing Pferd)
(type_Thing Rathaus)
(type_Thing Toshiba_Laptops)
(type_Thing Ulm)
(type_Truck Pferd)
(type_Vehicle Pferd)
(type_Vehicle_Position Bibliothek)
(type_Vehicle_Position O27)
(type_Vehicle_Position O28)
(type_Vehicle_Position Rathaus)
(type_Vehicle_Position Ulm)
(type_sort_for_Albecker_Strasse Albecker_Strasse)
(type_sort_for_Bibliothek Bibliothek)
(type_sort_for_Frauen_Strasse Frauen_Strasse)
(type_sort_for_James_Franck_Ring James_Franck_Ring)
(type_sort_for_O27 O27)
(type_sort_for_O28 O28)
(type_sort_for_Pferd Pferd)
(type_sort_for_Rathaus Rathaus)
(type_sort_for_Toshiba_Laptops Toshiba_Laptops)
(type_sort_for_Ulm Ulm)
)
((__top))
)
| null | https://raw.githubusercontent.com/panda-planner-dev/ipc2020-domains/9adb54325d3df35907adc7115fcc65f0ce5953cc/partial-order/UM-Translog/other/SHOP2/p-16.lisp | lisp | (defproblem problem domain
(
(In_City O27 Ulm)
(In_City O28 Ulm)
(In_City Bibliothek Ulm)
(In_City Rathaus Ulm)
(At_Vehicle Pferd Rathaus)
(Connects James_Franck_Ring O27 O28)
(Connects Frauen_Strasse Bibliothek O27)
(Connects Albecker_Strasse Rathaus Bibliothek)
(Available James_Franck_Ring)
(Available Frauen_Strasse)
(Available Albecker_Strasse)
(Available Pferd)
(PV_Compatible Toshiba_Laptops Pferd)
(RV_Compatible James_Franck_Ring Pferd)
(RV_Compatible Frauen_Strasse Pferd)
(RV_Compatible Albecker_Strasse Pferd)
(At_Package Toshiba_Laptops O27)
(type_City Ulm)
(type_City_Location Bibliothek)
(type_City_Location O27)
(type_City_Location O28)
(type_City_Location Rathaus)
(type_Customer_Location Bibliothek)
(type_Customer_Location O27)
(type_Customer_Location O28)
(type_Customer_Location Rathaus)
(type_Equipment_Position Bibliothek)
(type_Equipment_Position O27)
(type_Equipment_Position O28)
(type_Equipment_Position Pferd)
(type_Equipment_Position Rathaus)
(type_Equipment_Position Ulm)
(type_Local_Road_Route Albecker_Strasse)
(type_Local_Road_Route Frauen_Strasse)
(type_Local_Road_Route James_Franck_Ring)
(type_Location Bibliothek)
(type_Location O27)
(type_Location O28)
(type_Location Rathaus)
(type_Location Ulm)
(type_Not_TCenter Bibliothek)
(type_Not_TCenter O27)
(type_Not_TCenter O28)
(type_Not_TCenter Rathaus)
(type_Object Pferd)
(type_Object Toshiba_Laptops)
(type_Package Toshiba_Laptops)
(type_Package_Storage_Position Bibliothek)
(type_Package_Storage_Position O27)
(type_Package_Storage_Position O28)
(type_Package_Storage_Position Pferd)
(type_Package_Storage_Position Rathaus)
(type_Package_Storage_Position Ulm)
(type_Parcels Toshiba_Laptops)
(type_Physical Pferd)
(type_Physical Toshiba_Laptops)
(type_Regular Pferd)
(type_Regular Toshiba_Laptops)
(type_Regular_Truck Pferd)
(type_Regular_Vehicle Pferd)
(type_Road_Route Albecker_Strasse)
(type_Road_Route Frauen_Strasse)
(type_Road_Route James_Franck_Ring)
(type_Route Albecker_Strasse)
(type_Route Frauen_Strasse)
(type_Route James_Franck_Ring)
(type_Thing Albecker_Strasse)
(type_Thing Bibliothek)
(type_Thing Frauen_Strasse)
(type_Thing James_Franck_Ring)
(type_Thing O27)
(type_Thing O28)
(type_Thing Pferd)
(type_Thing Rathaus)
(type_Thing Toshiba_Laptops)
(type_Thing Ulm)
(type_Truck Pferd)
(type_Vehicle Pferd)
(type_Vehicle_Position Bibliothek)
(type_Vehicle_Position O27)
(type_Vehicle_Position O28)
(type_Vehicle_Position Rathaus)
(type_Vehicle_Position Ulm)
(type_sort_for_Albecker_Strasse Albecker_Strasse)
(type_sort_for_Bibliothek Bibliothek)
(type_sort_for_Frauen_Strasse Frauen_Strasse)
(type_sort_for_James_Franck_Ring James_Franck_Ring)
(type_sort_for_O27 O27)
(type_sort_for_O28 O28)
(type_sort_for_Pferd Pferd)
(type_sort_for_Rathaus Rathaus)
(type_sort_for_Toshiba_Laptops Toshiba_Laptops)
(type_sort_for_Ulm Ulm)
)
((__top))
)
| |
9182816418060e3539105de667efe2f75f1508b493471adbaed8cfae2956eed9 | ghc/testsuite | T1633.hs | -- This just tests what the kind error message looks like
Trac # 1633
module T1633 where
instance Functor Bool
| null | https://raw.githubusercontent.com/ghc/testsuite/998a816ae89c4fd573f4abd7c6abb346cf7ee9af/tests/typecheck/should_fail/T1633.hs | haskell | This just tests what the kind error message looks like | Trac # 1633
module T1633 where
instance Functor Bool
|
14a17967a9938f01272d8855c38bd1999e77e191bd4363fb42b72836a08b5a06 | Cognician/dogstatsd-clj | dogstatsd.clj | (ns ^{:doc
" (configure! \"localhost:8125\")
Total value/rate:
(increment! \"chat.request.count\" 1)
In-the-moment value:
(gauge! \"chat.ws.connections\" 17)
Values distribution (mean, avg, max, percentiles):
(histogram! \"chat.request.time\" 188.17)
Counting unique values:
(set! \"chat.user.email\" \"\")
Supported opts (third argument):
{ :tags => [String+] | { Keyword -> Any | Nil }
:sample-rate => Double[0..1] }
E.g. (increment! \"chat.request.count\" 1
{ :tags { :env \"production\", :chat nil } ;; => |#env:production,chat
:tags [ \"env:production\" \"chat\" ] ;; => |#env:production,chat
Throttling 50 % " }
cognician.dogstatsd
(:require
[clojure.string :as str])
(:import
[java.net InetSocketAddress DatagramSocket DatagramPacket]))
(defonce *state (atom nil))
(defn configure!
"Just pass StatsD server URI:
(configure! \"localhost:8125\")
(configure! \":8125\")
(configure! \"localhost\")
Pass system-wide tags to opts:
(configure! \"localhost:8125\" {:tags {:env \"production\"}})"
([uri] (configure! uri {}))
([uri opts]
(when-let [[_ host port] (and uri (re-matches #"([^:]*)(?:\:(\d+))?" uri))]
(let [host (if (str/blank? host) "localhost" host)
port (if (str/blank? port) 8125 port)
port (if (string? port) (Integer/parseInt port) port)
socket ^java.net.SocketAddress (DatagramSocket.)
addr ^java.net.InetSocketAddress (InetSocketAddress. ^String host ^Long port)]
(reset! *state (merge (select-keys opts [:tags])
{:socket socket
:addr addr}))))))
(defn- send! [^String payload]
;; (println "[ metrics ]" payload)
(if-let [{:keys [socket addr]} @*state]
(let [bytes (.getBytes payload "UTF-8")]
(try
(.send ^DatagramSocket socket
(DatagramPacket. bytes (alength bytes) ^InetSocketAddress addr))
(catch Exception e
(.printStackTrace e))))))
(defn- format-tags [& tag-colls]
(->> tag-colls
(mapcat (fn [tags]
(cond->> tags
(map? tags) (map (fn [[k v]]
(if (nil? v)
(name k)
(str (name k) ":" v)))))))
(str/join ",")))
(defn- format-metric [metric type value tags sample-rate]
(assert (re-matches #"[a-zA-Z][a-zA-Z0-9_.]*" metric) (str "Invalid metric name: " metric))
(assert (< (count metric) 200) (str "Metric name too long: " metric))
(str metric
":" value
"|" type
(when-not (== 1 sample-rate)
(str "|@" sample-rate))
(let [global-tags (:tags @*state)]
(when (or (not-empty tags)
(not-empty global-tags))
(str "|#" (format-tags global-tags tags))))))
(defn- report-fn [type]
(fn report!
([name value] (report! name value {}))
([name value opts]
(let [tags (:tags opts [])
sample-rate (:sample-rate opts 1)]
(when (or (== sample-rate 1)
(< (rand) sample-rate))
(send! (format-metric name type value tags sample-rate)))))))
(def increment! (report-fn "c"))
(def gauge! (report-fn "g"))
(def histogram! (report-fn "h"))
(defmacro measure! [metric opts & body]
`(let [t0# (System/currentTimeMillis)
res# (do ~@body)]
(histogram! ~metric (- (System/currentTimeMillis) t0#) ~opts)
res#))
(def set! (report-fn "s"))
(defn- escape-event-string [s]
(str/replace s "\n" "\\n"))
(defn- format-event [title text opts]
(let [title' (escape-event-string title)
text' (escape-event-string text)
{:keys [tags ^java.util.Date date-happened hostname aggregation-key
priority source-type-name alert-type]} opts]
(str "_e{" (count title') "," (count text') "}:" title' "|" text'
(when date-happened
(assert (instance? java.util.Date date-happened))
(str "|d:" (-> date-happened .getTime (/ 1000) long)))
(when hostname
(str "|h:" hostname))
(when aggregation-key
(str "|k:" aggregation-key))
(when priority
(assert (#{:normal :low} priority))
(str "|p:" (name priority)))
(when source-type-name
(str "|s:" source-type-name))
(when alert-type
(assert (#{:error :warning :info :success} alert-type))
(str "|t:" (name alert-type)))
(let [global-tags (:tags @*state)]
(when (or (not-empty tags)
(not-empty global-tags))
(str "|#" (format-tags global-tags tags)))))))
(defn event!
"title => String
text => String
opts => { :tags => [String+] | { Keyword -> Any | Nil }
:date-happened => #inst
:hostname => String
:aggregation-key => String
:priority => :normal | :low
:source-type=name => String
:alert-type => :error | :warning | :info | :success }"
[title text opts]
(let [payload (format-event title text opts)]
(assert (< (count payload) (* 8 1024)) (str "Payload too big: " title text payload))
(send! payload)))
| null | https://raw.githubusercontent.com/Cognician/dogstatsd-clj/adddac1b2dc44d9e280d79409d89781e8f8584ce/src/cognician/dogstatsd.clj | clojure | => |#env:production,chat
=> |#env:production,chat
(println "[ metrics ]" payload) | (ns ^{:doc
" (configure! \"localhost:8125\")
Total value/rate:
(increment! \"chat.request.count\" 1)
In-the-moment value:
(gauge! \"chat.ws.connections\" 17)
Values distribution (mean, avg, max, percentiles):
(histogram! \"chat.request.time\" 188.17)
Counting unique values:
(set! \"chat.user.email\" \"\")
Supported opts (third argument):
{ :tags => [String+] | { Keyword -> Any | Nil }
:sample-rate => Double[0..1] }
E.g. (increment! \"chat.request.count\" 1
Throttling 50 % " }
cognician.dogstatsd
(:require
[clojure.string :as str])
(:import
[java.net InetSocketAddress DatagramSocket DatagramPacket]))
(defonce *state (atom nil))
(defn configure!
"Just pass StatsD server URI:
(configure! \"localhost:8125\")
(configure! \":8125\")
(configure! \"localhost\")
Pass system-wide tags to opts:
(configure! \"localhost:8125\" {:tags {:env \"production\"}})"
([uri] (configure! uri {}))
([uri opts]
(when-let [[_ host port] (and uri (re-matches #"([^:]*)(?:\:(\d+))?" uri))]
(let [host (if (str/blank? host) "localhost" host)
port (if (str/blank? port) 8125 port)
port (if (string? port) (Integer/parseInt port) port)
socket ^java.net.SocketAddress (DatagramSocket.)
addr ^java.net.InetSocketAddress (InetSocketAddress. ^String host ^Long port)]
(reset! *state (merge (select-keys opts [:tags])
{:socket socket
:addr addr}))))))
(defn- send! [^String payload]
(if-let [{:keys [socket addr]} @*state]
(let [bytes (.getBytes payload "UTF-8")]
(try
(.send ^DatagramSocket socket
(DatagramPacket. bytes (alength bytes) ^InetSocketAddress addr))
(catch Exception e
(.printStackTrace e))))))
(defn- format-tags [& tag-colls]
(->> tag-colls
(mapcat (fn [tags]
(cond->> tags
(map? tags) (map (fn [[k v]]
(if (nil? v)
(name k)
(str (name k) ":" v)))))))
(str/join ",")))
(defn- format-metric [metric type value tags sample-rate]
(assert (re-matches #"[a-zA-Z][a-zA-Z0-9_.]*" metric) (str "Invalid metric name: " metric))
(assert (< (count metric) 200) (str "Metric name too long: " metric))
(str metric
":" value
"|" type
(when-not (== 1 sample-rate)
(str "|@" sample-rate))
(let [global-tags (:tags @*state)]
(when (or (not-empty tags)
(not-empty global-tags))
(str "|#" (format-tags global-tags tags))))))
(defn- report-fn [type]
(fn report!
([name value] (report! name value {}))
([name value opts]
(let [tags (:tags opts [])
sample-rate (:sample-rate opts 1)]
(when (or (== sample-rate 1)
(< (rand) sample-rate))
(send! (format-metric name type value tags sample-rate)))))))
(def increment! (report-fn "c"))
(def gauge! (report-fn "g"))
(def histogram! (report-fn "h"))
(defmacro measure! [metric opts & body]
`(let [t0# (System/currentTimeMillis)
res# (do ~@body)]
(histogram! ~metric (- (System/currentTimeMillis) t0#) ~opts)
res#))
(def set! (report-fn "s"))
(defn- escape-event-string [s]
(str/replace s "\n" "\\n"))
(defn- format-event [title text opts]
(let [title' (escape-event-string title)
text' (escape-event-string text)
{:keys [tags ^java.util.Date date-happened hostname aggregation-key
priority source-type-name alert-type]} opts]
(str "_e{" (count title') "," (count text') "}:" title' "|" text'
(when date-happened
(assert (instance? java.util.Date date-happened))
(str "|d:" (-> date-happened .getTime (/ 1000) long)))
(when hostname
(str "|h:" hostname))
(when aggregation-key
(str "|k:" aggregation-key))
(when priority
(assert (#{:normal :low} priority))
(str "|p:" (name priority)))
(when source-type-name
(str "|s:" source-type-name))
(when alert-type
(assert (#{:error :warning :info :success} alert-type))
(str "|t:" (name alert-type)))
(let [global-tags (:tags @*state)]
(when (or (not-empty tags)
(not-empty global-tags))
(str "|#" (format-tags global-tags tags)))))))
(defn event!
"title => String
text => String
opts => { :tags => [String+] | { Keyword -> Any | Nil }
:date-happened => #inst
:hostname => String
:aggregation-key => String
:priority => :normal | :low
:source-type=name => String
:alert-type => :error | :warning | :info | :success }"
[title text opts]
(let [payload (format-event title text opts)]
(assert (< (count payload) (* 8 1024)) (str "Payload too big: " title text payload))
(send! payload)))
|
8f25551820c982af36cd18e494e6de1ca5ed289c44fb853b858e8d517d751627 | haskus/packages | Map.hs | module Haskus.Utils.Map
( module Data.Map
)
where
import Data.Map
| null | https://raw.githubusercontent.com/haskus/packages/40ea6101cea84e2c1466bc55cdb22bed92f642a2/haskus-utils-data/src/lib/Haskus/Utils/Map.hs | haskell | module Haskus.Utils.Map
( module Data.Map
)
where
import Data.Map
| |
13f9d2be720985c3cb7cd963d7db09fcc995273eca8b24dbcaa534ade654632d | danr/hipspec | ParseDSL.hs | -- | A hacky way of parsing the property language DSL
module HipSpec.ParseDSL where
--import Type
import
--
import Var hiding (Id)
import HipSpec.GHC.Utils
import Data.List
import HipSpec.Id
import HipSpec.Lang.Type
import qualified HipSpec.Lang.CoreToRich as CTR
import TyCon (TyCon)
varWithPropType :: Var -> Bool
varWithPropType x = case CTR.trPolyType (varType x) of
Right (Forall _ t) -> isPropType t
_ -> False
varFromPrelude :: Var -> Bool
varFromPrelude = isInfixOf "HipSpec" . showOutputable . varName
isPropTyCon :: TyCon -> Bool
isPropTyCon = isPropId . idFromTyCon
ghcName :: (String -> Bool) -> Id -> Bool
ghcName k (GHCOrigin n) = k (showOutputable n)
ghcName _ _ = False
isPropId :: Id -> Bool
isPropId = ghcName (isInfixOf "HipSpec.Prop")
isPropType :: Type Id -> Bool
isPropType t =
case res of
TyCon p as -> isPropId p && not (any isPropType as)
_ -> False
where
(_args,res) = collectArrTy t
fromPrelude :: Id -> Bool
fromPrelude = ghcName (isInfixOf "HipSpec")
isMain :: Id -> Bool
isMain = ghcName (isInfixOf "main")
isEquals :: Id -> Bool
isEquals = ghcName (isInfixOfs [":=:","=:="])
isGiven :: Id -> Bool
isGiven = ghcName (isInfixOfs ["Given","given","==>"])
isTotal :: Id -> Bool
isTotal = ghcName (isInfixOfs ["Total","total"])
isGivenBool :: Id -> Bool
isGivenBool = ghcName (isInfixOf "givenBool")
isProveBool :: Id -> Bool
isProveBool = ghcName (isInfixOf "proveBool")
isOops :: Id -> Bool
isOops = ghcName (isInfixOfs ["Oops","oops"])
isInfixOfs :: [String] -> String -> Bool
isInfixOfs ss s = any (`isInfixOf` s) ss
| null | https://raw.githubusercontent.com/danr/hipspec/a114db84abd5fee8ce0b026abc5380da11147aa9/src/HipSpec/ParseDSL.hs | haskell | | A hacky way of parsing the property language DSL
import Type
| module HipSpec.ParseDSL where
import
import Var hiding (Id)
import HipSpec.GHC.Utils
import Data.List
import HipSpec.Id
import HipSpec.Lang.Type
import qualified HipSpec.Lang.CoreToRich as CTR
import TyCon (TyCon)
varWithPropType :: Var -> Bool
varWithPropType x = case CTR.trPolyType (varType x) of
Right (Forall _ t) -> isPropType t
_ -> False
varFromPrelude :: Var -> Bool
varFromPrelude = isInfixOf "HipSpec" . showOutputable . varName
isPropTyCon :: TyCon -> Bool
isPropTyCon = isPropId . idFromTyCon
ghcName :: (String -> Bool) -> Id -> Bool
ghcName k (GHCOrigin n) = k (showOutputable n)
ghcName _ _ = False
isPropId :: Id -> Bool
isPropId = ghcName (isInfixOf "HipSpec.Prop")
isPropType :: Type Id -> Bool
isPropType t =
case res of
TyCon p as -> isPropId p && not (any isPropType as)
_ -> False
where
(_args,res) = collectArrTy t
fromPrelude :: Id -> Bool
fromPrelude = ghcName (isInfixOf "HipSpec")
isMain :: Id -> Bool
isMain = ghcName (isInfixOf "main")
isEquals :: Id -> Bool
isEquals = ghcName (isInfixOfs [":=:","=:="])
isGiven :: Id -> Bool
isGiven = ghcName (isInfixOfs ["Given","given","==>"])
isTotal :: Id -> Bool
isTotal = ghcName (isInfixOfs ["Total","total"])
isGivenBool :: Id -> Bool
isGivenBool = ghcName (isInfixOf "givenBool")
isProveBool :: Id -> Bool
isProveBool = ghcName (isInfixOf "proveBool")
isOops :: Id -> Bool
isOops = ghcName (isInfixOfs ["Oops","oops"])
isInfixOfs :: [String] -> String -> Bool
isInfixOfs ss s = any (`isInfixOf` s) ss
|
b089551f331eb34f33adbdb0ae9c997a34cab8df22c2d24a3d911e803b872429 | seizans/happiage | Application.hs | # OPTIONS_GHC -fno - warn - orphans #
module Application
( getApplication
, getApplicationDev
) where
import Import
import Settings
import Yesod.Auth
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers (getRobotsR)
#if DEVELOPMENT
import Yesod.Logger (Logger, logBS)
import Network.Wai.Middleware.RequestLogger (logCallbackDev)
#else
import Yesod.Logger (Logger, logBS, toProduction)
import Network.Wai.Middleware.RequestLogger (logCallback)
#endif
import qualified Database.Persist.Store
import Network.HTTP.Conduit (newManager, def)
-- Import all relevant handler modules here.
import Handler.Welcome
import Handler.Invitation
import Handler.Entry
import Handler.Album
import Handler.Profile
import Handler.Contact
import Handler.Admin
This line actually creates our YesodSite instance . It is the second half
of the call to mkYesodData which occurs in Foundation.hs . Please see
-- the comments there for more details.
mkYesodDispatch "Happiage" resourcesHappiage
-- This function allocates resources (such as a database connection pool),
performs initialization and creates a WAI application . This is also the
-- place to put your migrate statements to have automatic database
migrations handled by Yesod .
getApplication :: AppConfig DefaultEnv Extra -> Logger -> IO Application
getApplication conf logger = do
manager <- newManager def
s <- staticSite
dbconf <- withYamlEnvironment "config/mongoDB.yml" (appEnv conf)
Database.Persist.Store.loadConfig >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
let foundation = Happiage conf setLogger s p manager dbconf
app <- toWaiAppPlain foundation
return $ logWare app
where
#ifdef DEVELOPMENT
logWare = logCallbackDev (logBS setLogger)
setLogger = logger
#else
setLogger = toProduction logger -- by default the logger is set for development
logWare = logCallback (logBS setLogger)
#endif
for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader getApplication
where
loader = loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
| null | https://raw.githubusercontent.com/seizans/happiage/38843c2a8fe3306e4a8e7d85899434c6ed4210d7/src/Application.hs | haskell | Import all relevant handler modules here.
the comments there for more details.
This function allocates resources (such as a database connection pool),
place to put your migrate statements to have automatic database
by default the logger is set for development | # OPTIONS_GHC -fno - warn - orphans #
module Application
( getApplication
, getApplicationDev
) where
import Import
import Settings
import Yesod.Auth
import Yesod.Default.Config
import Yesod.Default.Main
import Yesod.Default.Handlers (getRobotsR)
#if DEVELOPMENT
import Yesod.Logger (Logger, logBS)
import Network.Wai.Middleware.RequestLogger (logCallbackDev)
#else
import Yesod.Logger (Logger, logBS, toProduction)
import Network.Wai.Middleware.RequestLogger (logCallback)
#endif
import qualified Database.Persist.Store
import Network.HTTP.Conduit (newManager, def)
import Handler.Welcome
import Handler.Invitation
import Handler.Entry
import Handler.Album
import Handler.Profile
import Handler.Contact
import Handler.Admin
This line actually creates our YesodSite instance . It is the second half
of the call to mkYesodData which occurs in Foundation.hs . Please see
mkYesodDispatch "Happiage" resourcesHappiage
performs initialization and creates a WAI application . This is also the
migrations handled by Yesod .
getApplication :: AppConfig DefaultEnv Extra -> Logger -> IO Application
getApplication conf logger = do
manager <- newManager def
s <- staticSite
dbconf <- withYamlEnvironment "config/mongoDB.yml" (appEnv conf)
Database.Persist.Store.loadConfig >>=
Database.Persist.Store.applyEnv
p <- Database.Persist.Store.createPoolConfig (dbconf :: Settings.PersistConfig)
let foundation = Happiage conf setLogger s p manager dbconf
app <- toWaiAppPlain foundation
return $ logWare app
where
#ifdef DEVELOPMENT
logWare = logCallbackDev (logBS setLogger)
setLogger = logger
#else
logWare = logCallback (logBS setLogger)
#endif
for yesod devel
getApplicationDev :: IO (Int, Application)
getApplicationDev =
defaultDevelApp loader getApplication
where
loader = loadConfig (configSettings Development)
{ csParseExtra = parseExtra
}
|
43bd908c73941a1f801c3b322ff6b3eebab1716739bd61dcbcc014d192029d21 | mikera/clisk | node.clj | (ns
^{:author "mikera"
:doc "Functions for managing clisk AST nodes. Should not normally be needed by library users"}
clisk.node
(:require [mikera.vectorz.core :as vec])
(:import [clisk Util NodeMarker])
(:import java.awt.image.BufferedImage)
(:import clisk.IRenderFunction)
(:use clisk.util))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
;; ==============================
;; Image generation constants
(def ^:const DEFAULT-IMAGE-SIZE 256)
(declare node)
(declare img)
(declare constant-node)
(declare vector-node)
(declare scalar-node?)
(declare vec-node)
(declare code-node)
(declare evaluate)
(declare warp)
(declare ZERO-NODE)
(declare node-info)
;; ===========================
;; Node protocols
(defprotocol PCodeGen
(gen-code [node input-syms output-syms inner-code]
"Returns generated code wrapping inner-code
input-syms are the symbols provided
output-syms are the symbols required to be bound for inner-code, as a vector
argument inner-code is the code that should be inserted as the core of the generated code")
(gen-component [node input-syms index]
"Generates code to produce one component of the node as specified by index.")
)
(defprotocol PNodeShape
(node-shape [node]
"Returns the length of the node output vector, or nil if scalar"))
(defprotocol PValidate
(validate [node]
"Returns truthy if node is valid, throws an exception otherwise"))
(defprotocol PNodeComponent
(component [node i]
"Gets the i'th component of a node, as a new scalar node"))
;; =======================================
;; Default protocol implementations
(extend-protocol PNodeComponent
Object
(component [this i]
(let [nd (node this)]
;; (when (identical? nd this)) (error "Get-component problem with: " this)
(component nd i)))
clojure.lang.IPersistentVector
(component [this i]
(node (nth this i 0.0))))
;; =======================================
;; Code generation utility functions
(defn map-symbols
"Returns a sequence of bindings maping old symbols to new symbols"
([new-syms old-syms]
(let [pairs (map vector new-syms old-syms)]
(mapcat (fn [[a b :as pair]] (if (= a b) nil pair)) pairs))))
(defn get-code
"Gets the generated code for a scalar node, assuming [x y z t] as input symbols"
([node]
(when-not (scalar-node? node) (error "get-code requires a scalar node"))
(gen-code node '[x y z t] '[x] 'x)))
(defn get-codes
"Gets the generated codes for a vector node, assuming [x y z t] as input symbols"
([node]
(when (scalar-node? node) (error "get-codes requires a vector node"))
(let [dims (node-shape node)]
(vec (for [i (range dims)]
(gen-component node '[x y z t] i))))))
(defn gen-let-bindings
"Generates a set of let bindings around the given code, if necessary.
Bindings are a sequence of symbols and bindings for each, as with a regular let vector. Must have an even length."
([bindings code]
(let [_ (when (not (even? (count bindings))) (error "bindings must have an even number of elements"))
pairs (partition 2 bindings)
bindings (apply concat (filter (fn [[a b]] (not= a b)) pairs))]
(cond
(empty? bindings)
code
(and (== 2 (count bindings)) (= code (first bindings)))
(second bindings)
:else
`(let [~@bindings] ~code)))))
;; =======================================
Node record type implementing pure code
;; Generally should be a low-level input, not suitable for higher level generation / optimisation
;; since channels are treated as opaque code blocks
;;
;; Code should use the symbols '[x y z t], which will be bound for the execution of the code
;;
;; A scalar node is a node that returns a scalar value. all other nodes return a vector.
(defrecord CodeNode []
clojure.lang.IFn
(invoke [this]
this)
(invoke [this x]
(warp x this))
(applyTo [this args]
(if-let [ss (seq args)]
(warp (first args) (.applyTo this (next ss)))
this))
clisk.NodeMarker
PNodeShape
(node-shape [node]
(if (= :scalar (:type node))
nil
(count (:codes node))))
PValidate
(validate [nd]
(let [nd (node nd)]
(cond
(not (xor (:code nd) (:codes nd)))
(error "AST code node must have :code or :codes")
(and (scalar-node? nd)
(not (:primitive? (node-info nd))))
(error "AST scalar code node must be of primitive type: " (:code nd) " was: [" (:type (node-info nd)) "]")
:else
nd)))
PNodeComponent
(component [node i]
(if (scalar-node? node)
node
(let [my-code (nth (:codes node) i 0.0) ]
(when-not my-code (error "No code for: " (pr-str node)))
(code-node my-code :objects (:objects node)))))
PCodeGen
;; note that code is assumed to use '[x y z t]
(gen-code [node input-syms output-syms inner-code]
(let [scalarnode? (scalar-node? node)]
(if scalarnode?
(let [tsym (first output-syms)
input-bindings (map-symbols '[x y z t] input-syms)
output-bindings (map-symbols (next output-syms) (repeat tsym)) ;; all inner symbols are set to scalar value
bindings (concat input-bindings [tsym (:code node)] output-bindings)
]
(gen-let-bindings bindings inner-code))
(let [codes (:codes node)
input-bindings (map-symbols '[x y z t] input-syms) ;; bind inputs
gsyms (mapv gensym output-syms) ;; generate temp syms for outputs
gcode (mapcat vector gsyms (concat codes (repeat 0.0))) ;; map result of code to each temp symbol
output-bindings (map-symbols output-syms gsyms)
bindings (concat input-bindings gcode output-bindings)]
(gen-let-bindings bindings inner-code)))))
(gen-component [node input-syms index]
(let [scalarnode? (scalar-node? node)
code (if scalarnode? (:code node) (nth (:codes node) index 0.0))
input-bindings (map-symbols '[x y z t] input-syms)]
(gen-let-bindings input-bindings code)))
)
;; =======================================
;; Node implementing a warp : g(f(x))
(defrecord WarpNode [f g]
clojure.lang.IFn
(invoke [this]
this)
(invoke [this x]
(warp x this))
(applyTo [this args]
(if-let [ss (seq args)]
(warp (first args) (.applyTo this (next ss)))
this))
clisk.NodeMarker
PNodeShape
(node-shape [node]
(node-shape g))
PValidate
(validate [nd]
(validate f)
(validate g)
nd)
PNodeComponent
(component [node i]
(warp f (component g i)))
PCodeGen
(gen-code [node input-syms output-syms inner-code]
(gen-code f input-syms '[x y z t]
(gen-code g '[x y z t] output-syms inner-code)))
(gen-component [node input-syms index]
(gen-code f input-syms '[x y z t]
(gen-component g '[x y z t] index))))
;; ==============================
;; Node predicates
(defn node?
"Returns true if the argument is a clisk node"
([x]
(instance? clisk.NodeMarker x)))
(defn constant-node?
"Returns true if x is a constant node"
([x]
(and (node? x) (:constant x))))
(defn vector-node?
"Returns true if the argument is a vector node"
([x]
(boolean (node-shape x))))
(defn scalar-node?
"Returns true if the argument is a scalar node"
([x]
(nil? (node-shape x))))
(defn is-constant
"Returns a function that tests if a node is equivalent to a specified constant value"
([value]
(fn [n]
(let [n (node n)]
(and (constant-node? n)
(== (double value) (double (eval (get-code n)))))))))
(defn constant-form?
"Returns true if a form is constant, i.e. contains none of the symbols x, y, z or t"
([form]
(cond
(symbol? form) false
(vector? form) (every? constant-form? form)
(sequential? form) (every? constant-form? (next form)) ;; ignore initial operator
(number? form) true
:else (error "Unexpected element of form: " (pr-str form)))))
;; standard position vector
(def position-symbol-vector ['x 'y 'z 't])
;; =====================================
basic Node functions
;; these are private but aliased in clisk.functions
(defn dimensions
"Returns the number of dimensions in a vector node, or 1 if scalar"
(^long [a]
(let [a (node a)]
(or (node-shape a) 1))))
(defn components
"Gets the components of a node, as a sequence of scalar nodes. Returns a length 1 vector if the node is scalar."
([a]
(let [a (node a)]
(mapv
(fn [i]
(component a i))
(range (dimensions a))))))
(defn select-components
"Selects a subset of components from a node, returning a new vector node. Duplicates a scalar as necessary to create more components."
([a index-vector]
(when-not (sequential? index-vector) (error "select-components requires a vector of components as second argument"))
(let [a (node a)]
(vec-node
(mapv
(fn [i]
(component a i))
index-vector)))))
(defn ^:private take-components [n a]
"Take the first n components from a vector function. Duplicates a scalar as necessary to create more components."
(let [a (node a)]
(vec-node
(for [i (range n)]
(component a i)))))
;; ========================================
;; Node constructors
(defn value-node
"Create a node that represents a constant value"
([v]
(if
(sequential? v)
(CodeNode. nil
{:type :vector
:codes (mapv double v)
:constant true})
(CodeNode. nil
{:type :scalar
:code (double v)
:constant true}))))
(defn new-node
"Create a new AST node with the given properties"
([props]
(let [n (CodeNode. nil props)]
(if (and (:constant props) (not (number? (:code props))))
(value-node (evaluate n))
n))))
(defn object-node
"Creates a node with an embedded Java object"
([obj]
(let [sym (gensym "obj")]
(CodeNode. nil
{:type :scalar
:code sym
:objects {sym obj}
:constant true}))))
(defn vec-node
"Creates a node from a sequence of scalars. The new node returns each scalar value as a separate component."
([xs]
(let [nodes (map node xs)]
(when-not (every? scalar-node? nodes)
(error "vec-node requires scalar values as input"))
(new-node
{:type :vector
:codes (vec (map get-code nodes))
:objects (apply merge (map :objects nodes))
:constant (every? constant-node? nodes)}))))
(defn vector-node
"Creates a vector node from the given scalars"
([& xs]
(vec-node xs)))
(defn generate-scalar-code
"Creates code that generates a (fn [objects]) which returns a scalar clisk.IFunction"
([n]
(when-not (scalar-node? n) (error "Trying to compile non-scalar node"))
(let [n (node n)
obj-map (:objects n)
obj-syms (keys obj-map)
code (gen-code n '[x y z t] ['x] 'x)]
`(fn [~@obj-syms]
(let []
(reify clisk.IFunction
(calc
[~'this ~'x ~'y ~'z ~'t]
(double ~code))
(calc
[~'this ~'x ~'y ~'z]
(.calc ~'this ~'x ~'y ~'z 0.0))
(calc
[~'this ~'x ~'y]
(.calc ~'this ~'x ~'y 0.0))
(calc
[~'this ~'x]
(.calc ~'this ~'x 0.0))
(calc
[~'this]
(.calc ~'this 0.0))))))))
(defn compile-scalar-node ^clisk.IFunction [n]
"Compile a scalar node to a clisk.IFunction"
(let [n (node n)
obj-map (:objects n)
objs (vals obj-map)]
(if-not (scalar-node? n) (error "Trying to compile non-scalar node"))
(apply
(eval
(generate-scalar-code n))
objs)))
(defn ^:private evaluate
"Evaluates a node at a given position (defaults to zero). Can return either vector or scalar result."
([n] (evaluate n 0.0 0.0 0.0 0.0))
([n x] (evaluate n x 0.0 0.0 0.0))
([n x y] (evaluate n x y 0.0 0.0))
([n x y z] (evaluate n x y z 0.0))
([n x y z t]
(let [n (node n)]
(if (scalar-node? n)
(.calc (compile-scalar-node n) (double x) (double y) (double z) (double t))
(mapv
#(.calc (compile-scalar-node %) (double x) (double y) (double z) (double t))
(components n))))))
(defn constant-node
"Create a node that returns a constant value, can be either a constant vector or scalar value"
([v]
(cond
(vector? v)
(let [node (vec-node v)]
(if (not (:constant node)) (error "Not a constant vector!"))
node)
:else
(value-node (double v)))))
(defn transform-node
"Creates a node containing code based on transforming the other nodes into a new form"
([f & nodes]
(let [nodes (map node nodes)
generated-node (node (apply f nodes))]
(if (every? constant-node? nodes)
(constant-node (evaluate generated-node))
(merge
generated-node
{:objects (apply merge (map :objects nodes))})))))
(defn transform-components
"Calls transform-node separately on each component of a set of nodes. Returns a scalar iff all input nodes are scalar."
([f & nodes]
(let [nodes (map node nodes)]
(if (some vector-node? nodes)
(let [dims (apply max (map dimensions nodes))]
(vec-node
(for [i (range dims)]
(apply transform-node f (map #(component % i) nodes)))))
(apply transform-node f nodes)))))
(defn function-node
"Creates a node which is a scalar function of scalar nodes. Function should be provided as a symbol."
([f & scalars]
(let [scalars (map node scalars)]
(if-let [nd (first (filter (complement scalar-node?) scalars))] (error "Input nodes to function-node must be scalar, got: " (pr-str nd)))
(if-not (symbol? f) (error "Function in function-node must be a symbol, got: " f))
(apply
transform-node
(fn [& xs] `(~f ~@(map get-code xs)))
scalars))))
(defn code-node
"Creates a node from a given code form (may be a vector). Does not preserve objects - must be copied over manually."
[form
& {:keys [objects]
:or {objects nil}}]
(if (vector? form)
(vec-node (map #(code-node % :objects objects) form))
(if (constant-form? form)
(new-node {:type :scalar
:code (eval form)
:constant true
:objects objects
})
(new-node {:type :scalar
:code form
:constant false
:objects objects
}))))
(defmacro texture-bound [v offset width max]
`(let [tv# (double (+ (* (double ~v) ~(double width)) ~(double offset)) )
max# (int (dec ~max)) ]
(if (>= tv# max#)
max#
(if (<= tv# 0)
(int 0)
(int tv#)))))
(defn ^:private texture-map
([image]
(let [^BufferedImage image (cond
(instance? BufferedImage image)
image
(string? image)
(clisk.util/load-image image)
:else
(clisk.node/img image))]
(texture-map image 0 0 (.getWidth image) (.getHeight image))))
([^BufferedImage image x y w h]
(let [texture (object-node image)
tsym (first (keys (:objects texture)))
mw (.getWidth image)
mh (.getHeight image)]
(vec-node
(mapv
(fn [fsym]
(assoc
(code-node
`(let [image# ^java.awt.image.BufferedImage ~tsym
tx# (int (texture-bound ~'x ~x ~w ~mw))
ty# (int (texture-bound ~'y ~y ~h ~mh))]
(~fsym (.getRGB ^BufferedImage image# tx# ty#)) ) )
:objects (:objects texture)) )
[`red-from-argb `green-from-argb `blue-from-argb `alpha-from-argb])))))
(defn vector-function-node
"Creates a vector node from a VectorFunction"
[^mikera.transformz.ATransform vf]
(let [input-dims (.inputDimensions vf)
output-dims (.outputDimensions vf)]
(error "Not yet implemented")))
(defn node [a]
"Creates a node from arbitrary input. Idempotent, can be used to force conversion to node."
(cond
(node? a) a
(number? a) (constant-node a)
(vector? a) (vec-node a)
(vec/vec? a) (vec-node (seq a))
(fn? a) (node (a position-symbol-vector))
(symbol? a) (code-node a)
(keyword? a) (error "Can't convert keyword to node: " a)
(sequential? a) (code-node a)
(instance? java.awt.image.BufferedImage a) (texture-map a)
(instance? mikera.transformz.ATransform a) (vector-function-node a)
:object (object-node a)
:else (error "Unable to build an AST node from: " a)))
(defn ^:private vectorize
"Converts a value into a vector function form. If a is already a vector node, does nothing. If a is a function, apply it to the current position.
If dims are supplied, vectorizes to the given number of dimensions. This duplicates scalars and zero-extends vectors."
([a]
(let [a (node a)]
(cond
(vector-node? a)
a
(scalar-node? a)
(vector-node a)
:else
(error "Should not be possible!"))))
([dims a]
(select-components a (range dims))))
(defn ^:private vlet*
"let one or more values within a vector function"
([bindings form]
(let [form (node form)
binding-pairs (partition 2 bindings)
symbols (map first binding-pairs)
binding-nodes (map (comp node second) binding-pairs)]
;; (if-not (every? scalar-node? binding-nodes) (error "All binding values must be scalar"))
(if (seq bindings)
(apply transform-components
(fn [nd & binds]
`(let [~@(interleave symbols (map get-code binds))]
~(get-code nd)))
(cons form binding-nodes))
form))))
(defn ^:private warp
"Warps the position vector before calculating a vector function"
([new-position f]
(let [new-position (node new-position)
f (node f)]
(WarpNode. (node new-position) (node f) nil {:objects (merge (:objects new-position) (:objects f))}))))
(def ZERO-NODE (node 0.0))
(defn ^clisk.IFunction compile-fn [node]
"Compiles clisk scalar node into an object that extends clisk.Function and clojure.lang.IFn.
For most purposes, compile-render-fn should be used to create an IRenderFunction for efficiency."
(clisk.node/compile-scalar-node node))
(defn code-gen
"Generates code for a given node, using standard x, y, z and t parameters."
[node]
(let [node (clisk.node/node node)] ;; ensure we have a node
(gen-code node '[x y z t] '[x y z] `(Util/toARGB ~'x ~'y ~'z))) ;; rendering only requires x, y, z
)
(defn ^clisk.IRenderFunction compile-render-fn [node]
"Compiles clisk node into an object that implements clisk.IRenderFunction"
we want 4 channel output
obj-map (:objects node)
osyms (keys obj-map)
code (code-gen node)
]
(apply (eval
`(fn [~@osyms]
(reify clisk.IRenderFunction
(^int calc [this ^double x ^double y]
(let [~'z 0.0 ~'t 0.0]
~code)))))
(vals obj-map))))
(defn img
"Creates a BufferedImage from the given node."
(^BufferedImage [node]
(img node DEFAULT-IMAGE-SIZE DEFAULT-IMAGE-SIZE))
(^BufferedImage [node w h]
(img node w h 1.0 (/ (double h) (double w))))
(^BufferedImage [node w h dx dy]
(let [node (clisk.node/node node)
image (clisk.Util/newImage (int w) (int h))
rf (compile-render-fn node)
w (int w)
h (int h)
dx (double dx)
dy (double dy)
dw (double w)
dh (double h)
gen-row! (fn [rownum]
(let [iy (int rownum)]
(dotimes [ix w]
(let [iy (int iy)
x (/ (* dx (+ 0.5 ix)) dw)
y (/ (* dy (+ 0.5 iy)) dh)
argb (.calc rf x y)]
(.setRGB image ix iy argb)))))]
(doall (pmap gen-row! (range h)))
image)))
(defn node-info [node]
(expression-info-internal
`(fn [~@(keys (:objects node))]
(let [~'x 1.0
~'y 1.0
~'z 1.0
~'t 1.0 ]
~(gen-code node '[x y z t] '[x] 'x)))))
| null | https://raw.githubusercontent.com/mikera/clisk/44dd35fabbae68ad44f0e8ac2dec4c580203f7b9/src/main/clojure/clisk/node.clj | clojure | ==============================
Image generation constants
===========================
Node protocols
=======================================
Default protocol implementations
(when (identical? nd this)) (error "Get-component problem with: " this)
=======================================
Code generation utility functions
=======================================
Generally should be a low-level input, not suitable for higher level generation / optimisation
since channels are treated as opaque code blocks
Code should use the symbols '[x y z t], which will be bound for the execution of the code
A scalar node is a node that returns a scalar value. all other nodes return a vector.
note that code is assumed to use '[x y z t]
all inner symbols are set to scalar value
bind inputs
generate temp syms for outputs
map result of code to each temp symbol
=======================================
Node implementing a warp : g(f(x))
==============================
Node predicates
ignore initial operator
standard position vector
=====================================
these are private but aliased in clisk.functions
========================================
Node constructors
(if-not (every? scalar-node? binding-nodes) (error "All binding values must be scalar"))
ensure we have a node
rendering only requires x, y, z | (ns
^{:author "mikera"
:doc "Functions for managing clisk AST nodes. Should not normally be needed by library users"}
clisk.node
(:require [mikera.vectorz.core :as vec])
(:import [clisk Util NodeMarker])
(:import java.awt.image.BufferedImage)
(:import clisk.IRenderFunction)
(:use clisk.util))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(def ^:const DEFAULT-IMAGE-SIZE 256)
(declare node)
(declare img)
(declare constant-node)
(declare vector-node)
(declare scalar-node?)
(declare vec-node)
(declare code-node)
(declare evaluate)
(declare warp)
(declare ZERO-NODE)
(declare node-info)
(defprotocol PCodeGen
(gen-code [node input-syms output-syms inner-code]
"Returns generated code wrapping inner-code
input-syms are the symbols provided
output-syms are the symbols required to be bound for inner-code, as a vector
argument inner-code is the code that should be inserted as the core of the generated code")
(gen-component [node input-syms index]
"Generates code to produce one component of the node as specified by index.")
)
(defprotocol PNodeShape
(node-shape [node]
"Returns the length of the node output vector, or nil if scalar"))
(defprotocol PValidate
(validate [node]
"Returns truthy if node is valid, throws an exception otherwise"))
(defprotocol PNodeComponent
(component [node i]
"Gets the i'th component of a node, as a new scalar node"))
(extend-protocol PNodeComponent
Object
(component [this i]
(let [nd (node this)]
(component nd i)))
clojure.lang.IPersistentVector
(component [this i]
(node (nth this i 0.0))))
(defn map-symbols
"Returns a sequence of bindings maping old symbols to new symbols"
([new-syms old-syms]
(let [pairs (map vector new-syms old-syms)]
(mapcat (fn [[a b :as pair]] (if (= a b) nil pair)) pairs))))
(defn get-code
"Gets the generated code for a scalar node, assuming [x y z t] as input symbols"
([node]
(when-not (scalar-node? node) (error "get-code requires a scalar node"))
(gen-code node '[x y z t] '[x] 'x)))
(defn get-codes
"Gets the generated codes for a vector node, assuming [x y z t] as input symbols"
([node]
(when (scalar-node? node) (error "get-codes requires a vector node"))
(let [dims (node-shape node)]
(vec (for [i (range dims)]
(gen-component node '[x y z t] i))))))
(defn gen-let-bindings
"Generates a set of let bindings around the given code, if necessary.
Bindings are a sequence of symbols and bindings for each, as with a regular let vector. Must have an even length."
([bindings code]
(let [_ (when (not (even? (count bindings))) (error "bindings must have an even number of elements"))
pairs (partition 2 bindings)
bindings (apply concat (filter (fn [[a b]] (not= a b)) pairs))]
(cond
(empty? bindings)
code
(and (== 2 (count bindings)) (= code (first bindings)))
(second bindings)
:else
`(let [~@bindings] ~code)))))
Node record type implementing pure code
(defrecord CodeNode []
clojure.lang.IFn
(invoke [this]
this)
(invoke [this x]
(warp x this))
(applyTo [this args]
(if-let [ss (seq args)]
(warp (first args) (.applyTo this (next ss)))
this))
clisk.NodeMarker
PNodeShape
(node-shape [node]
(if (= :scalar (:type node))
nil
(count (:codes node))))
PValidate
(validate [nd]
(let [nd (node nd)]
(cond
(not (xor (:code nd) (:codes nd)))
(error "AST code node must have :code or :codes")
(and (scalar-node? nd)
(not (:primitive? (node-info nd))))
(error "AST scalar code node must be of primitive type: " (:code nd) " was: [" (:type (node-info nd)) "]")
:else
nd)))
PNodeComponent
(component [node i]
(if (scalar-node? node)
node
(let [my-code (nth (:codes node) i 0.0) ]
(when-not my-code (error "No code for: " (pr-str node)))
(code-node my-code :objects (:objects node)))))
PCodeGen
(gen-code [node input-syms output-syms inner-code]
(let [scalarnode? (scalar-node? node)]
(if scalarnode?
(let [tsym (first output-syms)
input-bindings (map-symbols '[x y z t] input-syms)
bindings (concat input-bindings [tsym (:code node)] output-bindings)
]
(gen-let-bindings bindings inner-code))
(let [codes (:codes node)
output-bindings (map-symbols output-syms gsyms)
bindings (concat input-bindings gcode output-bindings)]
(gen-let-bindings bindings inner-code)))))
(gen-component [node input-syms index]
(let [scalarnode? (scalar-node? node)
code (if scalarnode? (:code node) (nth (:codes node) index 0.0))
input-bindings (map-symbols '[x y z t] input-syms)]
(gen-let-bindings input-bindings code)))
)
(defrecord WarpNode [f g]
clojure.lang.IFn
(invoke [this]
this)
(invoke [this x]
(warp x this))
(applyTo [this args]
(if-let [ss (seq args)]
(warp (first args) (.applyTo this (next ss)))
this))
clisk.NodeMarker
PNodeShape
(node-shape [node]
(node-shape g))
PValidate
(validate [nd]
(validate f)
(validate g)
nd)
PNodeComponent
(component [node i]
(warp f (component g i)))
PCodeGen
(gen-code [node input-syms output-syms inner-code]
(gen-code f input-syms '[x y z t]
(gen-code g '[x y z t] output-syms inner-code)))
(gen-component [node input-syms index]
(gen-code f input-syms '[x y z t]
(gen-component g '[x y z t] index))))
(defn node?
"Returns true if the argument is a clisk node"
([x]
(instance? clisk.NodeMarker x)))
(defn constant-node?
"Returns true if x is a constant node"
([x]
(and (node? x) (:constant x))))
(defn vector-node?
"Returns true if the argument is a vector node"
([x]
(boolean (node-shape x))))
(defn scalar-node?
"Returns true if the argument is a scalar node"
([x]
(nil? (node-shape x))))
(defn is-constant
"Returns a function that tests if a node is equivalent to a specified constant value"
([value]
(fn [n]
(let [n (node n)]
(and (constant-node? n)
(== (double value) (double (eval (get-code n)))))))))
(defn constant-form?
"Returns true if a form is constant, i.e. contains none of the symbols x, y, z or t"
([form]
(cond
(symbol? form) false
(vector? form) (every? constant-form? form)
(number? form) true
:else (error "Unexpected element of form: " (pr-str form)))))
(def position-symbol-vector ['x 'y 'z 't])
basic Node functions
(defn dimensions
"Returns the number of dimensions in a vector node, or 1 if scalar"
(^long [a]
(let [a (node a)]
(or (node-shape a) 1))))
(defn components
"Gets the components of a node, as a sequence of scalar nodes. Returns a length 1 vector if the node is scalar."
([a]
(let [a (node a)]
(mapv
(fn [i]
(component a i))
(range (dimensions a))))))
(defn select-components
"Selects a subset of components from a node, returning a new vector node. Duplicates a scalar as necessary to create more components."
([a index-vector]
(when-not (sequential? index-vector) (error "select-components requires a vector of components as second argument"))
(let [a (node a)]
(vec-node
(mapv
(fn [i]
(component a i))
index-vector)))))
(defn ^:private take-components [n a]
"Take the first n components from a vector function. Duplicates a scalar as necessary to create more components."
(let [a (node a)]
(vec-node
(for [i (range n)]
(component a i)))))
(defn value-node
"Create a node that represents a constant value"
([v]
(if
(sequential? v)
(CodeNode. nil
{:type :vector
:codes (mapv double v)
:constant true})
(CodeNode. nil
{:type :scalar
:code (double v)
:constant true}))))
(defn new-node
"Create a new AST node with the given properties"
([props]
(let [n (CodeNode. nil props)]
(if (and (:constant props) (not (number? (:code props))))
(value-node (evaluate n))
n))))
(defn object-node
"Creates a node with an embedded Java object"
([obj]
(let [sym (gensym "obj")]
(CodeNode. nil
{:type :scalar
:code sym
:objects {sym obj}
:constant true}))))
(defn vec-node
"Creates a node from a sequence of scalars. The new node returns each scalar value as a separate component."
([xs]
(let [nodes (map node xs)]
(when-not (every? scalar-node? nodes)
(error "vec-node requires scalar values as input"))
(new-node
{:type :vector
:codes (vec (map get-code nodes))
:objects (apply merge (map :objects nodes))
:constant (every? constant-node? nodes)}))))
(defn vector-node
"Creates a vector node from the given scalars"
([& xs]
(vec-node xs)))
(defn generate-scalar-code
"Creates code that generates a (fn [objects]) which returns a scalar clisk.IFunction"
([n]
(when-not (scalar-node? n) (error "Trying to compile non-scalar node"))
(let [n (node n)
obj-map (:objects n)
obj-syms (keys obj-map)
code (gen-code n '[x y z t] ['x] 'x)]
`(fn [~@obj-syms]
(let []
(reify clisk.IFunction
(calc
[~'this ~'x ~'y ~'z ~'t]
(double ~code))
(calc
[~'this ~'x ~'y ~'z]
(.calc ~'this ~'x ~'y ~'z 0.0))
(calc
[~'this ~'x ~'y]
(.calc ~'this ~'x ~'y 0.0))
(calc
[~'this ~'x]
(.calc ~'this ~'x 0.0))
(calc
[~'this]
(.calc ~'this 0.0))))))))
(defn compile-scalar-node ^clisk.IFunction [n]
"Compile a scalar node to a clisk.IFunction"
(let [n (node n)
obj-map (:objects n)
objs (vals obj-map)]
(if-not (scalar-node? n) (error "Trying to compile non-scalar node"))
(apply
(eval
(generate-scalar-code n))
objs)))
(defn ^:private evaluate
"Evaluates a node at a given position (defaults to zero). Can return either vector or scalar result."
([n] (evaluate n 0.0 0.0 0.0 0.0))
([n x] (evaluate n x 0.0 0.0 0.0))
([n x y] (evaluate n x y 0.0 0.0))
([n x y z] (evaluate n x y z 0.0))
([n x y z t]
(let [n (node n)]
(if (scalar-node? n)
(.calc (compile-scalar-node n) (double x) (double y) (double z) (double t))
(mapv
#(.calc (compile-scalar-node %) (double x) (double y) (double z) (double t))
(components n))))))
(defn constant-node
"Create a node that returns a constant value, can be either a constant vector or scalar value"
([v]
(cond
(vector? v)
(let [node (vec-node v)]
(if (not (:constant node)) (error "Not a constant vector!"))
node)
:else
(value-node (double v)))))
(defn transform-node
"Creates a node containing code based on transforming the other nodes into a new form"
([f & nodes]
(let [nodes (map node nodes)
generated-node (node (apply f nodes))]
(if (every? constant-node? nodes)
(constant-node (evaluate generated-node))
(merge
generated-node
{:objects (apply merge (map :objects nodes))})))))
(defn transform-components
"Calls transform-node separately on each component of a set of nodes. Returns a scalar iff all input nodes are scalar."
([f & nodes]
(let [nodes (map node nodes)]
(if (some vector-node? nodes)
(let [dims (apply max (map dimensions nodes))]
(vec-node
(for [i (range dims)]
(apply transform-node f (map #(component % i) nodes)))))
(apply transform-node f nodes)))))
(defn function-node
"Creates a node which is a scalar function of scalar nodes. Function should be provided as a symbol."
([f & scalars]
(let [scalars (map node scalars)]
(if-let [nd (first (filter (complement scalar-node?) scalars))] (error "Input nodes to function-node must be scalar, got: " (pr-str nd)))
(if-not (symbol? f) (error "Function in function-node must be a symbol, got: " f))
(apply
transform-node
(fn [& xs] `(~f ~@(map get-code xs)))
scalars))))
(defn code-node
"Creates a node from a given code form (may be a vector). Does not preserve objects - must be copied over manually."
[form
& {:keys [objects]
:or {objects nil}}]
(if (vector? form)
(vec-node (map #(code-node % :objects objects) form))
(if (constant-form? form)
(new-node {:type :scalar
:code (eval form)
:constant true
:objects objects
})
(new-node {:type :scalar
:code form
:constant false
:objects objects
}))))
(defmacro texture-bound [v offset width max]
`(let [tv# (double (+ (* (double ~v) ~(double width)) ~(double offset)) )
max# (int (dec ~max)) ]
(if (>= tv# max#)
max#
(if (<= tv# 0)
(int 0)
(int tv#)))))
(defn ^:private texture-map
([image]
(let [^BufferedImage image (cond
(instance? BufferedImage image)
image
(string? image)
(clisk.util/load-image image)
:else
(clisk.node/img image))]
(texture-map image 0 0 (.getWidth image) (.getHeight image))))
([^BufferedImage image x y w h]
(let [texture (object-node image)
tsym (first (keys (:objects texture)))
mw (.getWidth image)
mh (.getHeight image)]
(vec-node
(mapv
(fn [fsym]
(assoc
(code-node
`(let [image# ^java.awt.image.BufferedImage ~tsym
tx# (int (texture-bound ~'x ~x ~w ~mw))
ty# (int (texture-bound ~'y ~y ~h ~mh))]
(~fsym (.getRGB ^BufferedImage image# tx# ty#)) ) )
:objects (:objects texture)) )
[`red-from-argb `green-from-argb `blue-from-argb `alpha-from-argb])))))
(defn vector-function-node
"Creates a vector node from a VectorFunction"
[^mikera.transformz.ATransform vf]
(let [input-dims (.inputDimensions vf)
output-dims (.outputDimensions vf)]
(error "Not yet implemented")))
(defn node [a]
"Creates a node from arbitrary input. Idempotent, can be used to force conversion to node."
(cond
(node? a) a
(number? a) (constant-node a)
(vector? a) (vec-node a)
(vec/vec? a) (vec-node (seq a))
(fn? a) (node (a position-symbol-vector))
(symbol? a) (code-node a)
(keyword? a) (error "Can't convert keyword to node: " a)
(sequential? a) (code-node a)
(instance? java.awt.image.BufferedImage a) (texture-map a)
(instance? mikera.transformz.ATransform a) (vector-function-node a)
:object (object-node a)
:else (error "Unable to build an AST node from: " a)))
(defn ^:private vectorize
"Converts a value into a vector function form. If a is already a vector node, does nothing. If a is a function, apply it to the current position.
If dims are supplied, vectorizes to the given number of dimensions. This duplicates scalars and zero-extends vectors."
([a]
(let [a (node a)]
(cond
(vector-node? a)
a
(scalar-node? a)
(vector-node a)
:else
(error "Should not be possible!"))))
([dims a]
(select-components a (range dims))))
(defn ^:private vlet*
"let one or more values within a vector function"
([bindings form]
(let [form (node form)
binding-pairs (partition 2 bindings)
symbols (map first binding-pairs)
binding-nodes (map (comp node second) binding-pairs)]
(if (seq bindings)
(apply transform-components
(fn [nd & binds]
`(let [~@(interleave symbols (map get-code binds))]
~(get-code nd)))
(cons form binding-nodes))
form))))
(defn ^:private warp
"Warps the position vector before calculating a vector function"
([new-position f]
(let [new-position (node new-position)
f (node f)]
(WarpNode. (node new-position) (node f) nil {:objects (merge (:objects new-position) (:objects f))}))))
(def ZERO-NODE (node 0.0))
(defn ^clisk.IFunction compile-fn [node]
"Compiles clisk scalar node into an object that extends clisk.Function and clojure.lang.IFn.
For most purposes, compile-render-fn should be used to create an IRenderFunction for efficiency."
(clisk.node/compile-scalar-node node))
(defn code-gen
"Generates code for a given node, using standard x, y, z and t parameters."
[node]
)
(defn ^clisk.IRenderFunction compile-render-fn [node]
"Compiles clisk node into an object that implements clisk.IRenderFunction"
we want 4 channel output
obj-map (:objects node)
osyms (keys obj-map)
code (code-gen node)
]
(apply (eval
`(fn [~@osyms]
(reify clisk.IRenderFunction
(^int calc [this ^double x ^double y]
(let [~'z 0.0 ~'t 0.0]
~code)))))
(vals obj-map))))
(defn img
"Creates a BufferedImage from the given node."
(^BufferedImage [node]
(img node DEFAULT-IMAGE-SIZE DEFAULT-IMAGE-SIZE))
(^BufferedImage [node w h]
(img node w h 1.0 (/ (double h) (double w))))
(^BufferedImage [node w h dx dy]
(let [node (clisk.node/node node)
image (clisk.Util/newImage (int w) (int h))
rf (compile-render-fn node)
w (int w)
h (int h)
dx (double dx)
dy (double dy)
dw (double w)
dh (double h)
gen-row! (fn [rownum]
(let [iy (int rownum)]
(dotimes [ix w]
(let [iy (int iy)
x (/ (* dx (+ 0.5 ix)) dw)
y (/ (* dy (+ 0.5 iy)) dh)
argb (.calc rf x y)]
(.setRGB image ix iy argb)))))]
(doall (pmap gen-row! (range h)))
image)))
(defn node-info [node]
(expression-info-internal
`(fn [~@(keys (:objects node))]
(let [~'x 1.0
~'y 1.0
~'z 1.0
~'t 1.0 ]
~(gen-code node '[x y z t] '[x] 'x)))))
|
03df023b7ab99785510ae880539901444eb562d9dd5fea7db5185e109b2d6818 | master/ejabberd | node_hometree.erl | %%% ====================================================================
` ` The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%%% compliance with the License. You should have received a copy of the
%%% Erlang Public License along with this software. If not, it can be
%%% retrieved via the world wide web at /.
%%%
Software distributed under the License is distributed on an " AS IS "
%%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%%% the License for the specific language governing rights and limitations
%%% under the License.
%%%
The Initial Developer of the Original Code is ProcessOne .
Portions created by ProcessOne are Copyright 2006 - 2012 , ProcessOne
All Rights Reserved . ''
This software is copyright 2006 - 2012 , ProcessOne .
%%%
%%%
2006 - 2012 ProcessOne
@author < >
%%% [-one.net/]
%%% @version {@vsn}, {@date} {@time}
%%% @end
%%% ====================================================================
%%% @todo The item table should be handled by the plugin, but plugin that do
%%% not want to manage it should be able to use the default behaviour.
%%% @todo Plugin modules should be able to register to receive presence update
%%% send to pubsub.
@doc The module < strong>{@module}</strong > is the default PubSub plugin .
< p > It is used as a default for all unknown PubSub node type . It can serve
%%% as a developer basis and reference to build its own custom pubsub node
%%% types.</p>
< p > PubSub plugin nodes are using the { @link gen_node } behaviour.</p >
%%% <p><strong>The API isn't stabilized yet</strong>. The pubsub plugin
%%% development is still a work in progress. However, the system is already
%%% useable and useful as is. Please, send us comments, feedback and
%%% improvements.</p>
-module(node_hometree).
-author('').
-include("pubsub.hrl").
-include("jlib.hrl").
-behaviour(gen_pubsub_node).
%% API definition
-export([init/3, terminate/2,
options/0, features/0,
create_node_permission/6,
create_node/2,
delete_node/1,
purge_node/2,
subscribe_node/8,
unsubscribe_node/4,
publish_item/6,
delete_item/4,
remove_extra_items/3,
get_entity_affiliations/2,
get_node_affiliations/1,
get_affiliation/2,
set_affiliation/3,
get_entity_subscriptions/2,
get_node_subscriptions/1,
get_subscriptions/2,
set_subscriptions/4,
get_pending_nodes/2,
get_states/1,
get_state/2,
set_state/1,
get_items/6,
get_items/2,
get_item/7,
get_item/2,
set_item/1,
get_item_name/3,
node_to_path/1,
path_to_node/1
]).
%% ================
%% API definition
%% ================
%% @spec (Host, ServerHost, Options) -> ok
%% Host = string()
ServerHost = string ( )
%% Options = [{atom(), term()}]
%% @doc <p>Called during pubsub modules initialisation. Any pubsub plugin must
%% implement this function. It can return anything.</p>
%% <p>This function is mainly used to trigger the setup task necessary for the
%% plugin. It can be used for example by the developer to create the specific
module database schema if it does not exists >
init(_Host, _ServerHost, _Options) ->
pubsub_subscription:init(),
mnesia:create_table(pubsub_state,
[{disc_copies, [node()]},
{attributes, record_info(fields, pubsub_state)}]),
mnesia:create_table(pubsub_item,
[{disc_only_copies, [node()]},
{attributes, record_info(fields, pubsub_item)}]),
ItemsFields = record_info(fields, pubsub_item),
case mnesia:table_info(pubsub_item, attributes) of
ItemsFields -> ok;
_ ->
mnesia:transform_table(pubsub_item, ignore, ItemsFields)
end,
ok.
%% @spec (Host, ServerHost) -> ok
%% Host = string()
ServerHost = string ( )
%% @doc <p>Called during pubsub modules termination. Any pubsub plugin must
%% implement this function. It can return anything.</p>
terminate(_Host, _ServerHost) ->
ok.
( ) - > Options
%% Options = [mod_pubsub:nodeOption()]
%% @doc Returns the default pubsub node options.
< p > Example of function return >
%% ```
%% [{deliver_payloads, true},
%% {notify_config, false},
%% {notify_delete, false},
%% {notify_retract, true},
%% {persist_items, true},
{ max_items , 10 } ,
%% {subscribe, true},
%% {access_model, open},
%% {publish_model, publishers},
{ max_payload_size , 100000 } ,
{ send_last_published_item , never } ,
%% {presence_based_delivery, false}]'''
options() ->
[{deliver_payloads, true},
{notify_config, false},
{notify_delete, false},
{notify_retract, true},
{purge_offline, false},
{persist_items, true},
{max_items, ?MAXITEMS},
{subscribe, true},
{access_model, open},
{roster_groups_allowed, []},
{publish_model, publishers},
{notification_type, headline},
{max_payload_size, ?MAX_PAYLOAD_SIZE},
{send_last_published_item, on_sub_and_presence},
{deliver_notifications, true},
{presence_based_delivery, false}].
( ) - > Features
%% Features = [string()]
%% @doc Returns the node features
features() ->
["create-nodes",
"auto-create",
"access-authorize",
"delete-nodes",
"delete-items",
"get-pending",
"instant-nodes",
"manage-subscriptions",
"modify-affiliations",
"multi-subscribe",
"outcast-affiliation",
"persistent-items",
"publish",
"purge-nodes",
"retract-items",
"retrieve-affiliations",
"retrieve-items",
"retrieve-subscriptions",
"subscribe",
"subscription-notifications",
"subscription-options"
].
@spec ( Host , ServerHost , NodeId , ParentNodeId , Owner , Access ) - > { result , Allowed }
Host = mod_pubsub : ( )
ServerHost = string ( )
%% NodeId = mod_pubsub:nodeId()
%% ParentNodeId = mod_pubsub:nodeId()
%% Owner = mod_pubsub:jid()
%% Access = all | atom()
%% Allowed = boolean()
%% @doc Checks if the current user has the permission to create the requested node
%% <p>In {@link node_default}, the permission is decided by the place in the
%% hierarchy where the user is creating the node. The access parameter is also
%% checked in the default module. This parameter depends on the value of the
< tt > access_createnode</tt > ACL value in ejabberd config >
%% <p>This function also check that node can be created a a children of its
%% parent node</p>
< p > PubSub plugins can redefine the PubSub node creation rights as they
%% which. They can simply delegate this check to the {@link node_default}
%% module by implementing this function like this:
` ` ` check_create_user_permission(Host , ServerHost , NodeId , ParentNodeId , Owner , Access ) - >
node_default : check_create_user_permission(Host , ServerHost , NodeId , ParentNodeId , Owner , Access).'''</p >
create_node_permission(Host, ServerHost, NodeId, _ParentNodeId, Owner, Access) ->
LOwner = jlib:jid_tolower(Owner),
{User, Server, _Resource} = LOwner,
Allowed = case LOwner of
{"", Host, ""} ->
true; % pubsub service always allowed
_ ->
case acl:match_rule(ServerHost, Access, LOwner) of
allow ->
case node_to_path(NodeId) of
["home", Server, User | _] -> true;
_ -> false
end;
_ ->
false
end
end,
{result, Allowed}.
@spec ( NodeIdx , Owner ) - > { result , { default , broadcast } }
NodeIdx = mod_pubsub : nodeIdx ( )
%% Owner = mod_pubsub:jid()
%% @doc <p></p>
create_node(NodeIdx, Owner) ->
OwnerKey = jlib:jid_tolower(jlib:jid_remove_resource(Owner)),
set_state(#pubsub_state{stateid = {OwnerKey, NodeIdx}, affiliation = owner}),
{result, {default, broadcast}}.
%% @spec (Nodes) -> {result, {default, broadcast, Reply}}
%% Nodes = [mod_pubsub:pubsubNode()]
%% Reply = [{mod_pubsub:pubsubNode(),
[ { mod_pubsub : ( ) , [ { mod_pubsub : subscription ( ) , mod_pubsub : subId ( ) } ] } ] } ]
%% @doc <p>purge items of deleted nodes after effective deletion.</p>
delete_node(Nodes) ->
Tr = fun(#pubsub_state{stateid = {J, _}, subscriptions = Ss}) ->
lists:map(fun(S) ->
{J, S}
end, Ss)
end,
Reply = lists:map(
fun(#pubsub_node{id = NodeId} = PubsubNode) ->
{result, States} = get_states(NodeId),
lists:foreach(
fun(#pubsub_state{stateid = {LJID, _}, items = Items}) ->
del_items(NodeId, Items),
del_state(NodeId, LJID)
end, States),
{PubsubNode, lists:flatmap(Tr, States)}
end, Nodes),
{result, {default, broadcast, Reply}}.
@spec ( NodeIdx , Sender , Subscriber , AccessModel , SendLast , PresenceSubscription , RosterGroup , Options ) - > { error , Reason } | { result , Result }
NodeIdx : nodeIdx ( )
%% Sender = mod_pubsub:jid()
%% Subscriber = mod_pubsub:jid()
AccessModel = mod_pubsub : ( )
%% SendLast = atom()
%% PresenceSubscription = boolean()
%% RosterGroup = boolean()
%% Options = [mod_pubsub:nodeOption()]
Reason = mod_pubsub : ( )
%% Result = {result, {default, subscribed, mod_pubsub:subId()}}
%% | {result, {default, subscribed, mod_pubsub:subId(), send_last}}
%% | {result, {default, pending, mod_pubsub:subId()}}
%%
@doc < p > Accepts or rejects subcription requests on a PubSub node.</p >
%% <p>The mechanism works as follow:
%% <ul>
< li > The main PubSub module prepares the subscription and passes the
%% result of the preparation as a record.</li>
%% <li>This function gets the prepared record and several other parameters and
%% can decide to:<ul>
%% <li>reject the subscription;</li>
%% <li>allow it as is, letting the main module perform the database
%% persistance;</li>
%% <li>allow it, modifying the record. The main module will store the
modified record;</li >
%% <li>allow it, but perform the needed persistance operations.</li></ul>
%% </li></ul></p>
%% <p>The selected behaviour depends on the return parameter:
%% <ul>
%% <li><tt>{error, Reason}</tt>: an IQ error result will be returned. No
%% subscription will actually be performed.</li>
%% <li><tt>true</tt>: Subscribe operation is allowed, based on the
%% unmodified record passed in parameter <tt>SubscribeResult</tt>. If this
%% parameter contains an error, no subscription will be performed.</li>
< li><tt>{true , PubsubState}</tt > : Subscribe operation is allowed , but
the { @link mod_pubsub : pubsubState ( ) } record returned replaces the value
passed in parameter < tt > >
%% <li><tt>{true, done}</tt>: Subscribe operation is allowed, but the
{ @link mod_pubsub : pubsubState ( ) } will be considered as already stored and
%% no further persistance operation will be performed. This case is used,
%% when the plugin module is doing the persistance by itself or when it want
%% to completly disable persistance.</li></ul>
%% </p>
%% <p>In the default plugin module, the record is unchanged.</p>
subscribe_node(NodeIdx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, Options) ->
SubKey = jlib:jid_tolower(Subscriber),
GenKey = jlib:jid_remove_resource(SubKey),
Authorized = (jlib:jid_tolower(jlib:jid_remove_resource(Sender)) == GenKey),
GenState = get_state(NodeIdx, GenKey),
SubState = case SubKey of
GenKey -> GenState;
_ -> get_state(NodeIdx, SubKey)
end,
Affiliation = GenState#pubsub_state.affiliation,
Subscriptions = SubState#pubsub_state.subscriptions,
Whitelisted = lists:member(Affiliation, [member, publisher, owner]),
PendingSubscription = lists:any(fun({pending, _}) -> true;
(_) -> false
end, Subscriptions),
if
not Authorized ->
%% JIDs do not match
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "invalid-jid")};
Affiliation == outcast ->
%% Requesting entity is blocked
{error, ?ERR_FORBIDDEN};
PendingSubscription ->
%% Requesting entity has pending subscription
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "pending-subscription")};
(AccessModel == presence) and (not PresenceSubscription) ->
%% Entity is not authorized to create a subscription (presence subscription required)
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "presence-subscription-required")};
(AccessModel == roster) and (not RosterGroup) ->
%% Entity is not authorized to create a subscription (not in roster group)
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "not-in-roster-group")};
(AccessModel == whitelist) and (not Whitelisted) ->
%% Node has whitelist access model and entity lacks required affiliation
{error, ?ERR_EXTENDED(?ERR_NOT_ALLOWED, "closed-node")};
- >
%% % Payment is required for a subscription
%% {error, ?ERR_PAYMENT_REQUIRED};
%%ForbiddenAnonymous ->
%% % Requesting entity is anonymous
%% {error, ?ERR_FORBIDDEN};
true ->
case pubsub_subscription:add_subscription(Subscriber, NodeIdx, Options) of
SubId when is_list(SubId) ->
NewSub = case AccessModel of
authorize -> pending;
_ -> subscribed
end,
set_state(SubState#pubsub_state{subscriptions = [{NewSub, SubId} | Subscriptions]}),
case {NewSub, SendLast} of
{subscribed, never} ->
{result, {default, subscribed, SubId}};
{subscribed, _} ->
{result, {default, subscribed, SubId, send_last}};
{_, _} ->
{result, {default, pending, SubId}}
end;
_ ->
{error, ?ERR_INTERNAL_SERVER_ERROR}
end
end.
@spec ( NodeIdx , Sender , Subscriber , SubId ) - > { error , Reason } | { result , default }
NodeIdx = mod_pubsub : nodeIdx ( )
%% Sender = mod_pubsub:jid()
%% Subscriber = mod_pubsub:jid()
%% SubId = mod_pubsub:subId()
Reason = mod_pubsub : ( )
%% @doc <p>Unsubscribe the <tt>Subscriber</tt> from the <tt>Node</tt>.</p>
unsubscribe_node(NodeIdx, Sender, Subscriber, SubId) ->
SubKey = jlib:jid_tolower(Subscriber),
GenKey = jlib:jid_remove_resource(SubKey),
Authorized = (jlib:jid_tolower(jlib:jid_remove_resource(Sender)) == GenKey),
GenState = get_state(NodeIdx, GenKey),
SubState = case SubKey of
GenKey -> GenState;
_ -> get_state(NodeIdx, SubKey)
end,
Subscriptions = lists:filter(fun({_Sub, _SubId}) -> true;
(_SubId) -> false
end, SubState#pubsub_state.subscriptions),
SubIdExists = case SubId of
[] -> false;
List when is_list(List) -> true;
_ -> false
end,
if
%% Requesting entity is prohibited from unsubscribing entity
not Authorized ->
{error, ?ERR_FORBIDDEN};
%% Entity did not specify SubId
%%SubId == "", ?? ->
%% {error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
Invalid subscription identifier
%%InvalidSubId ->
%% {error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
%% Requesting entity is not a subscriber
Subscriptions == [] ->
{error, ?ERR_EXTENDED(?ERR_UNEXPECTED_REQUEST_CANCEL, "not-subscribed")};
%% Subid supplied, so use that.
SubIdExists ->
Sub = first_in_list(fun(S) ->
case S of
{_Sub, SubId} -> true;
_ -> false
end
end, SubState#pubsub_state.subscriptions),
case Sub of
{value, S} ->
delete_subscriptions(SubKey, NodeIdx, [S], SubState),
{result, default};
false ->
{error, ?ERR_EXTENDED(?ERR_UNEXPECTED_REQUEST_CANCEL, "not-subscribed")}
end;
%% Asking to remove all subscriptions to the given node
SubId == all ->
delete_subscriptions(SubKey, NodeIdx, Subscriptions, SubState),
{result, default};
No subid supplied , but there 's only one matching subscription
length(Subscriptions) == 1 ->
delete_subscriptions(SubKey, NodeIdx, Subscriptions, SubState),
{result, default};
No subid and more than one possible subscription match .
true ->
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")}
end.
delete_subscriptions(SubKey, NodeIdx, Subscriptions, SubState) ->
NewSubs = lists:foldl(fun({Subscription, SubId}, Acc) ->
pubsub_subscription:delete_subscription(SubKey, NodeIdx, SubId),
Acc -- [{Subscription, SubId}]
end, SubState#pubsub_state.subscriptions, Subscriptions),
case {SubState#pubsub_state.affiliation, NewSubs} of
{none, []} ->
% Just a regular subscriber, and this is final item, so
% delete the state.
del_state(NodeIdx, SubKey);
_ ->
set_state(SubState#pubsub_state{subscriptions = NewSubs})
end.
@spec ( NodeIdx , Publisher , PublishModel , MaxItems , ItemId , Payload ) - >
{ result , { default , broadcast , ItemIds } } | { error , Reason }
NodeIdx = mod_pubsub : nodeIdx ( )
%% Publisher = mod_pubsub:jid()
%% PublishModel = atom()
MaxItems = integer ( )
%% ItemId = mod_pubsub:itemId()
%% Payload = mod_pubsub:payload()
%% ItemIds = [mod_pubsub:itemId()] | []
Reason = mod_pubsub : ( )
%% @doc <p>Publishes the item passed as parameter.</p>
%% <p>The mechanism works as follow:
%% <ul>
< li > The main PubSub module prepares the item to publish and passes the
%% result of the preparation as a {@link mod_pubsub:pubsubItem()} record.</li>
%% <li>This function gets the prepared record and several other parameters and can decide to:<ul>
< li > reject the publication;</li >
%% <li>allow the publication as is, letting the main module perform the database persistance;</li>
%% <li>allow the publication, modifying the record. The main module will store the modified record;</li>
%% <li>allow it, but perform the needed persistance operations.</li></ul>
%% </li></ul></p>
%% <p>The selected behaviour depends on the return parameter:
%% <ul>
%% <li><tt>{error, Reason}</tt>: an iq error result will be return. No
%% publication is actually performed.</li>
%% <li><tt>true</tt>: Publication operation is allowed, based on the
unmodified record passed in parameter < tt > Item</tt > . If the < tt > Item</tt >
%% parameter contains an error, no subscription will actually be
%% performed.</li>
< li><tt>{true , Item}</tt > : Publication operation is allowed , but the
%% {@link mod_pubsub:pubsubItem()} record returned replaces the value passed
in parameter < tt > Item</tt > . The persistance will be performed by the main
>
%% <li><tt>{true, done}</tt>: Publication operation is allowed, but the
%% {@link mod_pubsub:pubsubItem()} will be considered as already stored and
%% no further persistance operation will be performed. This case is used,
%% when the plugin module is doing the persistance by itself or when it want
%% to completly disable persistance.</li></ul>
%% </p>
%% <p>In the default plugin module, the record is unchanged.</p>
publish_item(NodeIdx, Publisher, PublishModel, MaxItems, ItemId, Payload) ->
SubKey = jlib:jid_tolower(Publisher),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
SubState = case SubKey of
GenKey -> GenState;
_ -> get_state(NodeIdx, SubKey)
end,
Affiliation = GenState#pubsub_state.affiliation,
Subscribed = case PublishModel of
subscribers -> is_subscribed(SubState#pubsub_state.subscriptions);
_ -> undefined
end,
if
not ((PublishModel == open)
or ((PublishModel == publishers)
and ((Affiliation == owner) or (Affiliation == publisher)))
or (Subscribed == true)) ->
%% Entity does not have sufficient privileges to publish to node
{error, ?ERR_FORBIDDEN};
true ->
%% TODO: check creation, presence, roster
if MaxItems > 0 ->
Now = now(),
PubId = {Now, SubKey},
Item = case get_item(NodeIdx, ItemId) of
{result, OldItem} ->
OldItem#pubsub_item{modification = PubId,
payload = Payload};
_ ->
#pubsub_item{itemid = {ItemId, NodeIdx},
creation = {Now, GenKey},
modification = PubId,
payload = Payload}
end,
Items = [ItemId | GenState#pubsub_state.items--[ItemId]],
{result, {NI, OI}} = remove_extra_items(NodeIdx, MaxItems, Items),
set_item(Item),
set_state(GenState#pubsub_state{items = NI}),
{result, {default, broadcast, OI}};
true ->
{result, {default, broadcast, []}}
end
end.
@spec ( NodeIdx , MaxItems , ItemIds ) - > { result , { NewItemIds , OldItemIds } }
NodeIdx = mod_pubsub : nodeIdx ( )
MaxItems = integer ( ) | unlimited
%% ItemIds = [mod_pubsub:itemId()]
%% NewItemIds = [mod_pubsub:itemId()]
%% OldItemIds = [mod_pubsub:itemId()] | []
%% @doc <p>This function is used to remove extra items, most notably when the
%% maximum number of items has been reached.</p>
< p > This function is used internally by the core PubSub module , as no
%% permission check is performed.</p>
%% <p>In the default plugin module, the oldest items are removed, but other
%% rules can be used.</p>
< p > If another PubSub plugin wants to delegate the item removal ( and if the
%% plugin is using the default pubsub storage), it can implements this function like this:
` ` ` , MaxItems , ItemIds ) - >
node_default : , MaxItems , ItemIds).'''</p >
remove_extra_items(_NodeIdx, unlimited, ItemIds) ->
{result, {ItemIds, []}};
remove_extra_items(NodeIdx, MaxItems, ItemIds) ->
NewItems = lists:sublist(ItemIds, MaxItems),
OldItems = lists:nthtail(length(NewItems), ItemIds),
%% Remove extra items:
del_items(NodeIdx, OldItems),
%% Return the new items list:
{result, {NewItems, OldItems}}.
@spec ( NodeIdx , Publisher , PublishModel , ItemId ) - >
%% {result, {default, broadcast}} | {error, Reason}
NodeIdx = mod_pubsub : nodeIdx ( )
%% Publisher = mod_pubsub:jid()
%% PublishModel = atom()
%% ItemId = mod_pubsub:itemId()
Reason = mod_pubsub : ( )
%% @doc <p>Triggers item deletion.</p>
%% <p>Default plugin: The user performing the deletion must be the node owner
or a publisher , or PublishModel being open.</p >
delete_item(NodeIdx, Publisher, PublishModel, ItemId) ->
SubKey = jlib:jid_tolower(Publisher),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
#pubsub_state{affiliation = Affiliation, items = Items} = GenState,
Allowed = (Affiliation == publisher) orelse (Affiliation == owner)
orelse (PublishModel == open)
orelse case get_item(NodeIdx, ItemId) of
{result, #pubsub_item{creation = {_, GenKey}}} -> true;
_ -> false
end,
if
not Allowed ->
%% Requesting entity does not have sufficient privileges
{error, ?ERR_FORBIDDEN};
true ->
case lists:member(ItemId, Items) of
true ->
del_item(NodeIdx, ItemId),
set_state(GenState#pubsub_state{items = lists:delete(ItemId, Items)}),
{result, {default, broadcast}};
false ->
case Affiliation of
owner ->
%% Owner can delete any items from its own node
{result, States} = get_states(NodeIdx),
lists:foldl(
fun(#pubsub_state{items = PI} = S, Res) ->
case lists:member(ItemId, PI) of
true ->
del_item(NodeIdx, ItemId),
set_state(S#pubsub_state{items = lists:delete(ItemId, PI)}),
{result, {default, broadcast}};
false ->
Res
end;
(_, Res) ->
Res
end, {error, ?ERR_ITEM_NOT_FOUND}, States);
_ ->
%% Non-existent node or item
{error, ?ERR_ITEM_NOT_FOUND}
end
end
end.
@spec ( NodeIdx , Owner ) - > { error , Reason } | { result , { default , broadcast } }
NodeIdx = mod_pubsub : nodeIdx ( )
%% Owner = mod_pubsub:jid()
Reason = mod_pubsub : ( )
purge_node(NodeIdx, Owner) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
case GenState of
#pubsub_state{affiliation = owner} ->
{result, States} = get_states(NodeIdx),
lists:foreach(
fun(#pubsub_state{items = []}) ->
ok;
(#pubsub_state{items = Items} = S) ->
del_items(NodeIdx, Items),
set_state(S#pubsub_state{items = []})
end, States),
{result, {default, broadcast}};
_ ->
%% Entity is not owner
{error, ?ERR_FORBIDDEN}
end.
%% @spec (Host, Owner) -> {result, Reply}
Host = mod_pubsub : ( )
%% Owner = mod_pubsub:jid()
%% Reply = [] | [{mod_pubsub:pubsubNode(), mod_pubsub:affiliation()}]
%% @doc <p>Return the current affiliations for the given user</p>
< p > The default module reads affiliations in the main
%% <tt>pubsub_state</tt> table. If a plugin stores its data in the same
%% table, it should return an empty list, as the affiliation will be read by
the default PubSub module . Otherwise , it should return its own affiliation ,
%% that will be added to the affiliation stored in the main
%% <tt>pubsub_state</tt> table.</p>
get_entity_affiliations(Host, Owner) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
States = mnesia:match_object(#pubsub_state{stateid = {GenKey, '_'}, _ = '_'}),
NodeTree = case catch ets:lookup(gen_mod:get_module_proc(Host, config), nodetree) of
[{nodetree, N}] -> N;
_ -> nodetree_tree
end,
Reply = lists:foldl(fun(#pubsub_state{stateid = {_, N}, affiliation = A}, Acc) ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {Host, _}} = Node -> [{Node, A}|Acc];
_ -> Acc
end
end, [], States),
{result, Reply}.
get_node_affiliations(NodeId) ->
{result, States} = get_states(NodeId),
Tr = fun(#pubsub_state{stateid = {J, _}, affiliation = A}) ->
{J, A}
end,
{result, lists:map(Tr, States)}.
get_affiliation(NodeId, Owner) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeId, GenKey),
{result, GenState#pubsub_state.affiliation}.
set_affiliation(NodeId, Owner, Affiliation) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeId, GenKey),
case {Affiliation, GenState#pubsub_state.subscriptions} of
{none, none} ->
del_state(NodeId, GenKey);
_ ->
set_state(GenState#pubsub_state{affiliation = Affiliation})
end.
%% @spec (Host, Owner) ->
%% {'result', []
%% | [{Node, Subscription, SubId, Entity}]
%% | [{Node, Subscription, Entity}]}
Host = mod_pubsub : ( )
%% Owner = mod_pubsub:jid()
%% Node = mod_pubsub:pubsubNode()
%% Subscription = mod_pubsub:subscription()
%% SubId = mod_pubsub:subId()
Entity = mod_pubsub : ( )
%% @doc <p>Return the current subscriptions for the given user</p>
< p > The default module reads subscriptions in the main
%% <tt>pubsub_state</tt> table. If a plugin stores its data in the same
%% table, it should return an empty list, as the affiliation will be read by
the default PubSub module . Otherwise , it should return its own affiliation ,
%% that will be added to the affiliation stored in the main
%% <tt>pubsub_state</tt> table.</p>
get_entity_subscriptions(Host, Owner) ->
{U, D, _} = SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
States = case SubKey of
GenKey -> mnesia:match_object(
#pubsub_state{stateid = {{U, D, '_'}, '_'}, _ = '_'});
_ -> mnesia:match_object(
#pubsub_state{stateid = {GenKey, '_'}, _ = '_'})
++ mnesia:match_object(
#pubsub_state{stateid = {SubKey, '_'}, _ = '_'})
end,
NodeTree = case catch ets:lookup(gen_mod:get_module_proc(Host, config), nodetree) of
[{nodetree, N}] -> N;
_ -> nodetree_tree
end,
Reply = lists:foldl(fun(#pubsub_state{stateid = {J, N}, subscriptions = Ss}, Acc) ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {Host, _}} = Node ->
lists:foldl(fun({Sub, SubId}, Acc2) ->
[{Node, Sub, SubId, J} | Acc2];
(S, Acc2) ->
[{Node, S, J} | Acc2]
end, Acc, Ss);
_ -> Acc
end
end, [], States),
{result, Reply}.
get_node_subscriptions(NodeId) ->
{result, States} = get_states(NodeId),
Tr = fun(#pubsub_state{stateid = {J, _}, subscriptions = Subscriptions}) ->
%% TODO: get rid of cases to handle non-list subscriptions
case Subscriptions of
[_|_] ->
lists:foldl(fun({S, SubId}, Acc) ->
[{J, S, SubId} | Acc];
(S, Acc) ->
[{J, S} | Acc]
end, [], Subscriptions);
[] ->
[];
_ ->
[{J, none}]
end
end,
{result, lists:flatmap(Tr, States)}.
get_subscriptions(NodeId, Owner) ->
SubKey = jlib:jid_tolower(Owner),
SubState = get_state(NodeId, SubKey),
{result, SubState#pubsub_state.subscriptions}.
set_subscriptions(NodeId, Owner, Subscription, SubId) ->
SubKey = jlib:jid_tolower(Owner),
SubState = get_state(NodeId, SubKey),
case {SubId, SubState#pubsub_state.subscriptions} of
{_, []} ->
case Subscription of
none -> {error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "not-subscribed")};
_ -> new_subscription(NodeId, Owner, Subscription, SubState)
end;
{"", [{_, SID}]} ->
case Subscription of
none -> unsub_with_subid(NodeId, SID, SubState);
_ -> replace_subscription({Subscription, SID}, SubState)
end;
{"", [_|_]} ->
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
_ ->
case Subscription of
none -> unsub_with_subid(NodeId, SubId, SubState);
_ -> replace_subscription({Subscription, SubId}, SubState)
end
end.
replace_subscription(NewSub, SubState) ->
NewSubs = replace_subscription(NewSub,
SubState#pubsub_state.subscriptions, []),
set_state(SubState#pubsub_state{subscriptions = NewSubs}).
replace_subscription(_, [], Acc) ->
Acc;
replace_subscription({Sub, SubId}, [{_, SubID} | T], Acc) ->
replace_subscription({Sub, SubId}, T, [{Sub, SubID} | Acc]).
new_subscription(NodeId, Owner, Subscription, SubState) ->
SubId = pubsub_subscription:add_subscription(Owner, NodeId, []),
Subscriptions = SubState#pubsub_state.subscriptions,
set_state(SubState#pubsub_state{subscriptions = [{Subscription, SubId} | Subscriptions]}),
{Subscription, SubId}.
unsub_with_subid(NodeId, SubId, SubState) ->
pubsub_subscription:delete_subscription(SubState#pubsub_state.stateid,
NodeId, SubId),
NewSubs = lists:filter(fun ({_, SID}) -> SubId =/= SID end,
SubState#pubsub_state.subscriptions),
case {NewSubs, SubState#pubsub_state.affiliation} of
{[], none} ->
del_state(NodeId, element(1, SubState#pubsub_state.stateid));
_ ->
set_state(SubState#pubsub_state{subscriptions = NewSubs})
end.
%% TODO : doc
%% @spec (Host, Owner) -> {result, Reply} | {error, Reason}
Host = mod_pubsub : ( )
%% Owner = mod_pubsub:jid()
%% Reply = [] | [mod_pubsub:nodeId()]
@doc < p > Returns a list of Owner 's nodes on Host with pending
%% subscriptions.</p>
get_pending_nodes(Host, Owner) ->
GenKey = jlib:jid_remove_resource(jlib:jid_tolower(Owner)),
States = mnesia:match_object(#pubsub_state{stateid = {GenKey, '_'},
affiliation = owner,
_ = '_'}),
NodeIDs = [ID || #pubsub_state{stateid = {_, ID}} <- States],
NodeTree = case catch ets:lookup(gen_mod:get_module_proc(Host, config), nodetree) of
[{nodetree, N}] -> N;
_ -> nodetree_tree
end,
Reply = mnesia:foldl(fun(#pubsub_state{stateid = {_, NID}} = S, Acc) ->
case lists:member(NID, NodeIDs) of
true ->
case get_nodes_helper(NodeTree, S) of
{value, Node} -> [Node | Acc];
false -> Acc
end;
false ->
Acc
end
end, [], pubsub_state),
{result, Reply}.
get_nodes_helper(NodeTree,
#pubsub_state{stateid = {_, N}, subscriptions = Subs}) ->
HasPending = fun ({pending, _}) -> true;
(pending) -> true;
(_) -> false
end,
case lists:any(HasPending, Subs) of
true ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {_, Node}} ->
{value, Node};
_ ->
false
end;
false ->
false
end.
( NodeIdx ) - > { result , States }
NodeIdx = mod_pubsub : nodeIdx ( )
States = [ ] | [ mod_pubsub : pubsubState ( ) ]
%% @doc Returns the list of stored states for a given node.
< p > For the default PubSub module , states are stored in database.</p >
%% <p>We can consider that the pubsub_state table have been created by the main
%% mod_pubsub module.</p>
%% <p>PubSub plugins can store the states where they wants (for example in a
%% relational database).</p>
< p > If a PubSub plugin wants to delegate the states storage to the default node ,
%% they can implement this function like this:
%% ```get_states(NodeIdx) ->
%% node_default:get_states(NodeIdx).'''</p>
get_states(NodeIdx) ->
States = case catch mnesia:match_object(
#pubsub_state{stateid = {'_', NodeIdx}, _ = '_'}) of
List when is_list(List) -> List;
_ -> []
end,
{result, States}.
@spec ( NodeIdx , JID ) - > State
NodeIdx = mod_pubsub : nodeIdx ( )
JID = mod_pubsub : jid ( )
State = mod_pubsub : pubsubState ( )
@doc < p > Returns a state ( one state list ) , given its reference.</p >
get_state(NodeIdx, JID) ->
StateId = {JID, NodeIdx},
case catch mnesia:read({pubsub_state, StateId}) of
[State] when is_record(State, pubsub_state) -> State;
_ -> #pubsub_state{stateid=StateId}
end.
%% @spec (State) -> ok | {error, Reason}
State = mod_pubsub : pubsubState ( )
Reason = mod_pubsub : ( )
@doc < p > Write a state into database.</p >
set_state(State) when is_record(State, pubsub_state) ->
mnesia:write(State);
set_state(_) ->
{error, ?ERR_INTERNAL_SERVER_ERROR}.
@spec ( NodeIdx , JID ) - > ok | { error , Reason }
NodeIdx = mod_pubsub : nodeIdx ( )
JID = mod_pubsub : jid ( )
Reason = mod_pubsub : ( )
@doc < p > Delete a state from database.</p >
del_state(NodeIdx, JID) ->
mnesia:delete({pubsub_state, {JID, NodeIdx}}).
@spec ( NodeIdx , From ) - > { result , Items }
NodeIdx = mod_pubsub : nodeIdx ( )
%% From = mod_pubsub:jid()
%% Items = [] | [mod_pubsub:pubsubItem()]
%% @doc Returns the list of stored items for a given node.
< p > For the default PubSub module , items are stored in database.</p >
%% <p>We can consider that the pubsub_item table have been created by the main
%% mod_pubsub module.</p>
%% <p>PubSub plugins can store the items where they wants (for example in a
%% relational database), or they can even decide not to persist any items.</p>
< p > If a PubSub plugin wants to delegate the item storage to the default node ,
%% they can implement this function like this:
%% ```get_items(NodeIdx, From) ->
%% node_default:get_items(NodeIdx, From).'''</p>
get_items(NodeIdx, _From) ->
Items = mnesia:match_object(#pubsub_item{itemid = {'_', NodeIdx}, _ = '_'}),
{result, lists:reverse(lists:keysort(#pubsub_item.modification, Items))}.
get_items(NodeIdx, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId) ->
SubKey = jlib:jid_tolower(JID),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
SubState = get_state(NodeIdx, SubKey),
Affiliation = GenState#pubsub_state.affiliation,
Subscriptions = SubState#pubsub_state.subscriptions,
Whitelisted = can_fetch_item(Affiliation, Subscriptions),
if
%%SubId == "", ?? ->
%% Entity has multiple subscriptions to the node but does not specify a subscription ID
%{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
%%InvalidSubId ->
%% Entity is subscribed but specifies an invalid subscription ID
%{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
GenState#pubsub_state.affiliation == outcast ->
%% Requesting entity is blocked
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and (not PresenceSubscription) ->
%% Entity is not authorized to create a subscription (presence subscription required)
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "presence-subscription-required")};
(AccessModel == roster) and (not RosterGroup) ->
%% Entity is not authorized to create a subscription (not in roster group)
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "not-in-roster-group")};
(AccessModel == whitelist) and (not Whitelisted) ->
%% Node has whitelist access model and entity lacks required affiliation
{error, ?ERR_EXTENDED(?ERR_NOT_ALLOWED, "closed-node")};
(AccessModel == authorize) and (not Whitelisted) ->
%% Node has authorize access model
{error, ?ERR_FORBIDDEN};
- >
%% % Payment is required for a subscription
%% {error, ?ERR_PAYMENT_REQUIRED};
true ->
get_items(NodeIdx, JID)
end.
@spec ( NodeIdx , ItemId ) - > { result , Item } | { error , ' item - not - found ' }
NodeIdx = mod_pubsub : nodeIdx ( )
ItemId = mod_pubsub : itemId ( )
%% Item = mod_pubsub:pubsubItem()
@doc < p > Returns an item ( one item list ) , given its reference.</p >
get_item(NodeIdx, ItemId) ->
case mnesia:read({pubsub_item, {ItemId, NodeIdx}}) of
[Item] when is_record(Item, pubsub_item) ->
{result, Item};
_ ->
{error, ?ERR_ITEM_NOT_FOUND}
end.
@spec ( NodeIdx , ItemId , JID , AccessModel , PresenceSubscription , RosterGroup , SubId ) - > { result , Item } | { error , Reason }
NodeIdx : nodeIdx ( )
ItemId = mod_pubsub : itemId ( )
%% JID = mod_pubsub:jid()
AccessModel = mod_pubsub : ( )
%% PresenceSubscription = boolean()
%% RosterGroup = boolean()
%% SubId = mod_pubsub:subId()
%% Item = mod_pubsub:pubsubItem()
Reason = mod_pubsub : ( ) | ' item - not - found '
get_item(NodeIdx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId) ->
SubKey = jlib:jid_tolower(JID),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
Affiliation = GenState#pubsub_state.affiliation,
Subscriptions = GenState#pubsub_state.subscriptions,
Whitelisted = can_fetch_item(Affiliation, Subscriptions),
if
%%SubId == "", ?? ->
%% Entity has multiple subscriptions to the node but does not specify a subscription ID
%{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
%%InvalidSubId ->
%% Entity is subscribed but specifies an invalid subscription ID
%{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
GenState#pubsub_state.affiliation == outcast ->
%% Requesting entity is blocked
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and (not PresenceSubscription) ->
%% Entity is not authorized to create a subscription (presence subscription required)
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "presence-subscription-required")};
(AccessModel == roster) and (not RosterGroup) ->
%% Entity is not authorized to create a subscription (not in roster group)
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "not-in-roster-group")};
(AccessModel == whitelist) and (not Whitelisted) ->
%% Node has whitelist access model and entity lacks required affiliation
{error, ?ERR_EXTENDED(?ERR_NOT_ALLOWED, "closed-node")};
(AccessModel == authorize) and (not Whitelisted) ->
%% Node has authorize access model
{error, ?ERR_FORBIDDEN};
- >
%% % Payment is required for a subscription
%% {error, ?ERR_PAYMENT_REQUIRED};
true ->
get_item(NodeIdx, ItemId)
end.
%% @spec (Item) -> ok | {error, Reason}
%% Item = mod_pubsub:pubsubItem()
Reason = mod_pubsub : ( )
@doc < p > Write an item into database.</p >
set_item(Item) when is_record(Item, pubsub_item) ->
mnesia:write(Item);
set_item(_) ->
{error, ?ERR_INTERNAL_SERVER_ERROR}.
@spec ( NodeIdx , ItemId ) - > ok | { error , Reason }
NodeIdx = mod_pubsub : nodeIdx ( )
ItemId = mod_pubsub : itemId ( )
Reason = mod_pubsub : ( )
@doc < p > Delete an item from database.</p >
del_item(NodeIdx, ItemId) ->
mnesia:delete({pubsub_item, {ItemId, NodeIdx}}).
del_items(NodeIdx, ItemIds) ->
lists:foreach(fun(ItemId) ->
del_item(NodeIdx, ItemId)
end, ItemIds).
@doc < p > Return the name of the node if known : is to return
node >
get_item_name(_Host, _Node, Id) ->
Id.
node_to_path(Node) ->
string:tokens(binary_to_list(Node), "/").
path_to_node([]) ->
<<>>;
path_to_node(Path) ->
list_to_binary(string:join([""|Path], "/")).
@spec ( Affiliation , Subscription ) - > true | false
%% Affiliation = owner | member | publisher | outcast | none
%% Subscription = subscribed | none
@doc Determines if the combination of Affiliation and Subscribed
%% are allowed to get items from a node.
can_fetch_item(owner, _) -> true;
can_fetch_item(member, _) -> true;
can_fetch_item(publisher, _) -> true;
can_fetch_item(outcast, _) -> false;
can_fetch_item(none, Subscriptions) -> is_subscribed(Subscriptions);
can_fetch_item(_Affiliation, _Subscription) -> false.
is_subscribed(Subscriptions) ->
lists:any(fun ({subscribed, _SubId}) -> true;
(_) -> false
end, Subscriptions).
Returns the first item where Pred ( ) is true in List
first_in_list(_Pred, []) ->
false;
first_in_list(Pred, [H | T]) ->
case Pred(H) of
true -> {value, H};
_ -> first_in_list(Pred, T)
end.
| null | https://raw.githubusercontent.com/master/ejabberd/9c31874d5a9d1852ece1b8ae70dd4b7e5eef7cf7/src/mod_pubsub/node_hometree.erl | erlang | ====================================================================
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved via the world wide web at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
[-one.net/]
@version {@vsn}, {@date} {@time}
@end
====================================================================
@todo The item table should be handled by the plugin, but plugin that do
not want to manage it should be able to use the default behaviour.
@todo Plugin modules should be able to register to receive presence update
send to pubsub.
as a developer basis and reference to build its own custom pubsub node
types.</p>
<p><strong>The API isn't stabilized yet</strong>. The pubsub plugin
development is still a work in progress. However, the system is already
useable and useful as is. Please, send us comments, feedback and
improvements.</p>
API definition
================
API definition
================
@spec (Host, ServerHost, Options) -> ok
Host = string()
Options = [{atom(), term()}]
@doc <p>Called during pubsub modules initialisation. Any pubsub plugin must
implement this function. It can return anything.</p>
<p>This function is mainly used to trigger the setup task necessary for the
plugin. It can be used for example by the developer to create the specific
@spec (Host, ServerHost) -> ok
Host = string()
@doc <p>Called during pubsub modules termination. Any pubsub plugin must
implement this function. It can return anything.</p>
Options = [mod_pubsub:nodeOption()]
@doc Returns the default pubsub node options.
```
[{deliver_payloads, true},
{notify_config, false},
{notify_delete, false},
{notify_retract, true},
{persist_items, true},
{subscribe, true},
{access_model, open},
{publish_model, publishers},
{presence_based_delivery, false}]'''
Features = [string()]
@doc Returns the node features
NodeId = mod_pubsub:nodeId()
ParentNodeId = mod_pubsub:nodeId()
Owner = mod_pubsub:jid()
Access = all | atom()
Allowed = boolean()
@doc Checks if the current user has the permission to create the requested node
<p>In {@link node_default}, the permission is decided by the place in the
hierarchy where the user is creating the node. The access parameter is also
checked in the default module. This parameter depends on the value of the
<p>This function also check that node can be created a a children of its
parent node</p>
which. They can simply delegate this check to the {@link node_default}
module by implementing this function like this:
pubsub service always allowed
Owner = mod_pubsub:jid()
@doc <p></p>
@spec (Nodes) -> {result, {default, broadcast, Reply}}
Nodes = [mod_pubsub:pubsubNode()]
Reply = [{mod_pubsub:pubsubNode(),
@doc <p>purge items of deleted nodes after effective deletion.</p>
Sender = mod_pubsub:jid()
Subscriber = mod_pubsub:jid()
SendLast = atom()
PresenceSubscription = boolean()
RosterGroup = boolean()
Options = [mod_pubsub:nodeOption()]
Result = {result, {default, subscribed, mod_pubsub:subId()}}
| {result, {default, subscribed, mod_pubsub:subId(), send_last}}
| {result, {default, pending, mod_pubsub:subId()}}
<p>The mechanism works as follow:
<ul>
result of the preparation as a record.</li>
<li>This function gets the prepared record and several other parameters and
can decide to:<ul>
<li>reject the subscription;</li>
<li>allow it as is, letting the main module perform the database
persistance;</li>
<li>allow it, modifying the record. The main module will store the
<li>allow it, but perform the needed persistance operations.</li></ul>
</li></ul></p>
<p>The selected behaviour depends on the return parameter:
<ul>
<li><tt>{error, Reason}</tt>: an IQ error result will be returned. No
subscription will actually be performed.</li>
<li><tt>true</tt>: Subscribe operation is allowed, based on the
unmodified record passed in parameter <tt>SubscribeResult</tt>. If this
parameter contains an error, no subscription will be performed.</li>
<li><tt>{true, done}</tt>: Subscribe operation is allowed, but the
no further persistance operation will be performed. This case is used,
when the plugin module is doing the persistance by itself or when it want
to completly disable persistance.</li></ul>
</p>
<p>In the default plugin module, the record is unchanged.</p>
JIDs do not match
Requesting entity is blocked
Requesting entity has pending subscription
Entity is not authorized to create a subscription (presence subscription required)
Entity is not authorized to create a subscription (not in roster group)
Node has whitelist access model and entity lacks required affiliation
% Payment is required for a subscription
{error, ?ERR_PAYMENT_REQUIRED};
ForbiddenAnonymous ->
% Requesting entity is anonymous
{error, ?ERR_FORBIDDEN};
Sender = mod_pubsub:jid()
Subscriber = mod_pubsub:jid()
SubId = mod_pubsub:subId()
@doc <p>Unsubscribe the <tt>Subscriber</tt> from the <tt>Node</tt>.</p>
Requesting entity is prohibited from unsubscribing entity
Entity did not specify SubId
SubId == "", ?? ->
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
InvalidSubId ->
{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
Requesting entity is not a subscriber
Subid supplied, so use that.
Asking to remove all subscriptions to the given node
Just a regular subscriber, and this is final item, so
delete the state.
Publisher = mod_pubsub:jid()
PublishModel = atom()
ItemId = mod_pubsub:itemId()
Payload = mod_pubsub:payload()
ItemIds = [mod_pubsub:itemId()] | []
@doc <p>Publishes the item passed as parameter.</p>
<p>The mechanism works as follow:
<ul>
result of the preparation as a {@link mod_pubsub:pubsubItem()} record.</li>
<li>This function gets the prepared record and several other parameters and can decide to:<ul>
<li>allow the publication as is, letting the main module perform the database persistance;</li>
<li>allow the publication, modifying the record. The main module will store the modified record;</li>
<li>allow it, but perform the needed persistance operations.</li></ul>
</li></ul></p>
<p>The selected behaviour depends on the return parameter:
<ul>
<li><tt>{error, Reason}</tt>: an iq error result will be return. No
publication is actually performed.</li>
<li><tt>true</tt>: Publication operation is allowed, based on the
parameter contains an error, no subscription will actually be
performed.</li>
{@link mod_pubsub:pubsubItem()} record returned replaces the value passed
<li><tt>{true, done}</tt>: Publication operation is allowed, but the
{@link mod_pubsub:pubsubItem()} will be considered as already stored and
no further persistance operation will be performed. This case is used,
when the plugin module is doing the persistance by itself or when it want
to completly disable persistance.</li></ul>
</p>
<p>In the default plugin module, the record is unchanged.</p>
Entity does not have sufficient privileges to publish to node
TODO: check creation, presence, roster
ItemIds = [mod_pubsub:itemId()]
NewItemIds = [mod_pubsub:itemId()]
OldItemIds = [mod_pubsub:itemId()] | []
@doc <p>This function is used to remove extra items, most notably when the
maximum number of items has been reached.</p>
permission check is performed.</p>
<p>In the default plugin module, the oldest items are removed, but other
rules can be used.</p>
plugin is using the default pubsub storage), it can implements this function like this:
Remove extra items:
Return the new items list:
{result, {default, broadcast}} | {error, Reason}
Publisher = mod_pubsub:jid()
PublishModel = atom()
ItemId = mod_pubsub:itemId()
@doc <p>Triggers item deletion.</p>
<p>Default plugin: The user performing the deletion must be the node owner
Requesting entity does not have sufficient privileges
Owner can delete any items from its own node
Non-existent node or item
Owner = mod_pubsub:jid()
Entity is not owner
@spec (Host, Owner) -> {result, Reply}
Owner = mod_pubsub:jid()
Reply = [] | [{mod_pubsub:pubsubNode(), mod_pubsub:affiliation()}]
@doc <p>Return the current affiliations for the given user</p>
<tt>pubsub_state</tt> table. If a plugin stores its data in the same
table, it should return an empty list, as the affiliation will be read by
that will be added to the affiliation stored in the main
<tt>pubsub_state</tt> table.</p>
@spec (Host, Owner) ->
{'result', []
| [{Node, Subscription, SubId, Entity}]
| [{Node, Subscription, Entity}]}
Owner = mod_pubsub:jid()
Node = mod_pubsub:pubsubNode()
Subscription = mod_pubsub:subscription()
SubId = mod_pubsub:subId()
@doc <p>Return the current subscriptions for the given user</p>
<tt>pubsub_state</tt> table. If a plugin stores its data in the same
table, it should return an empty list, as the affiliation will be read by
that will be added to the affiliation stored in the main
<tt>pubsub_state</tt> table.</p>
TODO: get rid of cases to handle non-list subscriptions
TODO : doc
@spec (Host, Owner) -> {result, Reply} | {error, Reason}
Owner = mod_pubsub:jid()
Reply = [] | [mod_pubsub:nodeId()]
subscriptions.</p>
@doc Returns the list of stored states for a given node.
<p>We can consider that the pubsub_state table have been created by the main
mod_pubsub module.</p>
<p>PubSub plugins can store the states where they wants (for example in a
relational database).</p>
they can implement this function like this:
```get_states(NodeIdx) ->
node_default:get_states(NodeIdx).'''</p>
@spec (State) -> ok | {error, Reason}
From = mod_pubsub:jid()
Items = [] | [mod_pubsub:pubsubItem()]
@doc Returns the list of stored items for a given node.
<p>We can consider that the pubsub_item table have been created by the main
mod_pubsub module.</p>
<p>PubSub plugins can store the items where they wants (for example in a
relational database), or they can even decide not to persist any items.</p>
they can implement this function like this:
```get_items(NodeIdx, From) ->
node_default:get_items(NodeIdx, From).'''</p>
SubId == "", ?? ->
Entity has multiple subscriptions to the node but does not specify a subscription ID
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
InvalidSubId ->
Entity is subscribed but specifies an invalid subscription ID
{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
Requesting entity is blocked
Entity is not authorized to create a subscription (presence subscription required)
Entity is not authorized to create a subscription (not in roster group)
Node has whitelist access model and entity lacks required affiliation
Node has authorize access model
% Payment is required for a subscription
{error, ?ERR_PAYMENT_REQUIRED};
Item = mod_pubsub:pubsubItem()
JID = mod_pubsub:jid()
PresenceSubscription = boolean()
RosterGroup = boolean()
SubId = mod_pubsub:subId()
Item = mod_pubsub:pubsubItem()
SubId == "", ?? ->
Entity has multiple subscriptions to the node but does not specify a subscription ID
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
InvalidSubId ->
Entity is subscribed but specifies an invalid subscription ID
{error, ?ERR_EXTENDED(?ERR_NOT_ACCEPTABLE, "invalid-subid")};
Requesting entity is blocked
Entity is not authorized to create a subscription (presence subscription required)
Entity is not authorized to create a subscription (not in roster group)
Node has whitelist access model and entity lacks required affiliation
Node has authorize access model
% Payment is required for a subscription
{error, ?ERR_PAYMENT_REQUIRED};
@spec (Item) -> ok | {error, Reason}
Item = mod_pubsub:pubsubItem()
Affiliation = owner | member | publisher | outcast | none
Subscription = subscribed | none
are allowed to get items from a node. | ` ` The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Initial Developer of the Original Code is ProcessOne .
Portions created by ProcessOne are Copyright 2006 - 2012 , ProcessOne
All Rights Reserved . ''
This software is copyright 2006 - 2012 , ProcessOne .
2006 - 2012 ProcessOne
@author < >
@doc The module < strong>{@module}</strong > is the default PubSub plugin .
< p > It is used as a default for all unknown PubSub node type . It can serve
< p > PubSub plugin nodes are using the { @link gen_node } behaviour.</p >
-module(node_hometree).
-author('').
-include("pubsub.hrl").
-include("jlib.hrl").
-behaviour(gen_pubsub_node).
-export([init/3, terminate/2,
options/0, features/0,
create_node_permission/6,
create_node/2,
delete_node/1,
purge_node/2,
subscribe_node/8,
unsubscribe_node/4,
publish_item/6,
delete_item/4,
remove_extra_items/3,
get_entity_affiliations/2,
get_node_affiliations/1,
get_affiliation/2,
set_affiliation/3,
get_entity_subscriptions/2,
get_node_subscriptions/1,
get_subscriptions/2,
set_subscriptions/4,
get_pending_nodes/2,
get_states/1,
get_state/2,
set_state/1,
get_items/6,
get_items/2,
get_item/7,
get_item/2,
set_item/1,
get_item_name/3,
node_to_path/1,
path_to_node/1
]).
ServerHost = string ( )
module database schema if it does not exists >
init(_Host, _ServerHost, _Options) ->
pubsub_subscription:init(),
mnesia:create_table(pubsub_state,
[{disc_copies, [node()]},
{attributes, record_info(fields, pubsub_state)}]),
mnesia:create_table(pubsub_item,
[{disc_only_copies, [node()]},
{attributes, record_info(fields, pubsub_item)}]),
ItemsFields = record_info(fields, pubsub_item),
case mnesia:table_info(pubsub_item, attributes) of
ItemsFields -> ok;
_ ->
mnesia:transform_table(pubsub_item, ignore, ItemsFields)
end,
ok.
ServerHost = string ( )
terminate(_Host, _ServerHost) ->
ok.
( ) - > Options
< p > Example of function return >
{ max_items , 10 } ,
{ max_payload_size , 100000 } ,
{ send_last_published_item , never } ,
options() ->
[{deliver_payloads, true},
{notify_config, false},
{notify_delete, false},
{notify_retract, true},
{purge_offline, false},
{persist_items, true},
{max_items, ?MAXITEMS},
{subscribe, true},
{access_model, open},
{roster_groups_allowed, []},
{publish_model, publishers},
{notification_type, headline},
{max_payload_size, ?MAX_PAYLOAD_SIZE},
{send_last_published_item, on_sub_and_presence},
{deliver_notifications, true},
{presence_based_delivery, false}].
( ) - > Features
features() ->
["create-nodes",
"auto-create",
"access-authorize",
"delete-nodes",
"delete-items",
"get-pending",
"instant-nodes",
"manage-subscriptions",
"modify-affiliations",
"multi-subscribe",
"outcast-affiliation",
"persistent-items",
"publish",
"purge-nodes",
"retract-items",
"retrieve-affiliations",
"retrieve-items",
"retrieve-subscriptions",
"subscribe",
"subscription-notifications",
"subscription-options"
].
@spec ( Host , ServerHost , NodeId , ParentNodeId , Owner , Access ) - > { result , Allowed }
Host = mod_pubsub : ( )
ServerHost = string ( )
< tt > access_createnode</tt > ACL value in ejabberd config >
< p > PubSub plugins can redefine the PubSub node creation rights as they
` ` ` check_create_user_permission(Host , ServerHost , NodeId , ParentNodeId , Owner , Access ) - >
node_default : check_create_user_permission(Host , ServerHost , NodeId , ParentNodeId , Owner , Access).'''</p >
create_node_permission(Host, ServerHost, NodeId, _ParentNodeId, Owner, Access) ->
LOwner = jlib:jid_tolower(Owner),
{User, Server, _Resource} = LOwner,
Allowed = case LOwner of
{"", Host, ""} ->
_ ->
case acl:match_rule(ServerHost, Access, LOwner) of
allow ->
case node_to_path(NodeId) of
["home", Server, User | _] -> true;
_ -> false
end;
_ ->
false
end
end,
{result, Allowed}.
@spec ( NodeIdx , Owner ) - > { result , { default , broadcast } }
NodeIdx = mod_pubsub : nodeIdx ( )
create_node(NodeIdx, Owner) ->
OwnerKey = jlib:jid_tolower(jlib:jid_remove_resource(Owner)),
set_state(#pubsub_state{stateid = {OwnerKey, NodeIdx}, affiliation = owner}),
{result, {default, broadcast}}.
[ { mod_pubsub : ( ) , [ { mod_pubsub : subscription ( ) , mod_pubsub : subId ( ) } ] } ] } ]
delete_node(Nodes) ->
Tr = fun(#pubsub_state{stateid = {J, _}, subscriptions = Ss}) ->
lists:map(fun(S) ->
{J, S}
end, Ss)
end,
Reply = lists:map(
fun(#pubsub_node{id = NodeId} = PubsubNode) ->
{result, States} = get_states(NodeId),
lists:foreach(
fun(#pubsub_state{stateid = {LJID, _}, items = Items}) ->
del_items(NodeId, Items),
del_state(NodeId, LJID)
end, States),
{PubsubNode, lists:flatmap(Tr, States)}
end, Nodes),
{result, {default, broadcast, Reply}}.
@spec ( NodeIdx , Sender , Subscriber , AccessModel , SendLast , PresenceSubscription , RosterGroup , Options ) - > { error , Reason } | { result , Result }
NodeIdx : nodeIdx ( )
AccessModel = mod_pubsub : ( )
Reason = mod_pubsub : ( )
@doc < p > Accepts or rejects subcription requests on a PubSub node.</p >
< li > The main PubSub module prepares the subscription and passes the
modified record;</li >
< li><tt>{true , PubsubState}</tt > : Subscribe operation is allowed , but
the { @link mod_pubsub : pubsubState ( ) } record returned replaces the value
passed in parameter < tt > >
{ @link mod_pubsub : pubsubState ( ) } will be considered as already stored and
subscribe_node(NodeIdx, Sender, Subscriber, AccessModel,
SendLast, PresenceSubscription, RosterGroup, Options) ->
SubKey = jlib:jid_tolower(Subscriber),
GenKey = jlib:jid_remove_resource(SubKey),
Authorized = (jlib:jid_tolower(jlib:jid_remove_resource(Sender)) == GenKey),
GenState = get_state(NodeIdx, GenKey),
SubState = case SubKey of
GenKey -> GenState;
_ -> get_state(NodeIdx, SubKey)
end,
Affiliation = GenState#pubsub_state.affiliation,
Subscriptions = SubState#pubsub_state.subscriptions,
Whitelisted = lists:member(Affiliation, [member, publisher, owner]),
PendingSubscription = lists:any(fun({pending, _}) -> true;
(_) -> false
end, Subscriptions),
if
not Authorized ->
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "invalid-jid")};
Affiliation == outcast ->
{error, ?ERR_FORBIDDEN};
PendingSubscription ->
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "pending-subscription")};
(AccessModel == presence) and (not PresenceSubscription) ->
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "presence-subscription-required")};
(AccessModel == roster) and (not RosterGroup) ->
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "not-in-roster-group")};
(AccessModel == whitelist) and (not Whitelisted) ->
{error, ?ERR_EXTENDED(?ERR_NOT_ALLOWED, "closed-node")};
- >
true ->
case pubsub_subscription:add_subscription(Subscriber, NodeIdx, Options) of
SubId when is_list(SubId) ->
NewSub = case AccessModel of
authorize -> pending;
_ -> subscribed
end,
set_state(SubState#pubsub_state{subscriptions = [{NewSub, SubId} | Subscriptions]}),
case {NewSub, SendLast} of
{subscribed, never} ->
{result, {default, subscribed, SubId}};
{subscribed, _} ->
{result, {default, subscribed, SubId, send_last}};
{_, _} ->
{result, {default, pending, SubId}}
end;
_ ->
{error, ?ERR_INTERNAL_SERVER_ERROR}
end
end.
@spec ( NodeIdx , Sender , Subscriber , SubId ) - > { error , Reason } | { result , default }
NodeIdx = mod_pubsub : nodeIdx ( )
Reason = mod_pubsub : ( )
unsubscribe_node(NodeIdx, Sender, Subscriber, SubId) ->
SubKey = jlib:jid_tolower(Subscriber),
GenKey = jlib:jid_remove_resource(SubKey),
Authorized = (jlib:jid_tolower(jlib:jid_remove_resource(Sender)) == GenKey),
GenState = get_state(NodeIdx, GenKey),
SubState = case SubKey of
GenKey -> GenState;
_ -> get_state(NodeIdx, SubKey)
end,
Subscriptions = lists:filter(fun({_Sub, _SubId}) -> true;
(_SubId) -> false
end, SubState#pubsub_state.subscriptions),
SubIdExists = case SubId of
[] -> false;
List when is_list(List) -> true;
_ -> false
end,
if
not Authorized ->
{error, ?ERR_FORBIDDEN};
Invalid subscription identifier
Subscriptions == [] ->
{error, ?ERR_EXTENDED(?ERR_UNEXPECTED_REQUEST_CANCEL, "not-subscribed")};
SubIdExists ->
Sub = first_in_list(fun(S) ->
case S of
{_Sub, SubId} -> true;
_ -> false
end
end, SubState#pubsub_state.subscriptions),
case Sub of
{value, S} ->
delete_subscriptions(SubKey, NodeIdx, [S], SubState),
{result, default};
false ->
{error, ?ERR_EXTENDED(?ERR_UNEXPECTED_REQUEST_CANCEL, "not-subscribed")}
end;
SubId == all ->
delete_subscriptions(SubKey, NodeIdx, Subscriptions, SubState),
{result, default};
No subid supplied , but there 's only one matching subscription
length(Subscriptions) == 1 ->
delete_subscriptions(SubKey, NodeIdx, Subscriptions, SubState),
{result, default};
No subid and more than one possible subscription match .
true ->
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")}
end.
delete_subscriptions(SubKey, NodeIdx, Subscriptions, SubState) ->
NewSubs = lists:foldl(fun({Subscription, SubId}, Acc) ->
pubsub_subscription:delete_subscription(SubKey, NodeIdx, SubId),
Acc -- [{Subscription, SubId}]
end, SubState#pubsub_state.subscriptions, Subscriptions),
case {SubState#pubsub_state.affiliation, NewSubs} of
{none, []} ->
del_state(NodeIdx, SubKey);
_ ->
set_state(SubState#pubsub_state{subscriptions = NewSubs})
end.
@spec ( NodeIdx , Publisher , PublishModel , MaxItems , ItemId , Payload ) - >
{ result , { default , broadcast , ItemIds } } | { error , Reason }
NodeIdx = mod_pubsub : nodeIdx ( )
MaxItems = integer ( )
Reason = mod_pubsub : ( )
< li > The main PubSub module prepares the item to publish and passes the
< li > reject the publication;</li >
unmodified record passed in parameter < tt > Item</tt > . If the < tt > Item</tt >
< li><tt>{true , Item}</tt > : Publication operation is allowed , but the
in parameter < tt > Item</tt > . The persistance will be performed by the main
>
publish_item(NodeIdx, Publisher, PublishModel, MaxItems, ItemId, Payload) ->
SubKey = jlib:jid_tolower(Publisher),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
SubState = case SubKey of
GenKey -> GenState;
_ -> get_state(NodeIdx, SubKey)
end,
Affiliation = GenState#pubsub_state.affiliation,
Subscribed = case PublishModel of
subscribers -> is_subscribed(SubState#pubsub_state.subscriptions);
_ -> undefined
end,
if
not ((PublishModel == open)
or ((PublishModel == publishers)
and ((Affiliation == owner) or (Affiliation == publisher)))
or (Subscribed == true)) ->
{error, ?ERR_FORBIDDEN};
true ->
if MaxItems > 0 ->
Now = now(),
PubId = {Now, SubKey},
Item = case get_item(NodeIdx, ItemId) of
{result, OldItem} ->
OldItem#pubsub_item{modification = PubId,
payload = Payload};
_ ->
#pubsub_item{itemid = {ItemId, NodeIdx},
creation = {Now, GenKey},
modification = PubId,
payload = Payload}
end,
Items = [ItemId | GenState#pubsub_state.items--[ItemId]],
{result, {NI, OI}} = remove_extra_items(NodeIdx, MaxItems, Items),
set_item(Item),
set_state(GenState#pubsub_state{items = NI}),
{result, {default, broadcast, OI}};
true ->
{result, {default, broadcast, []}}
end
end.
@spec ( NodeIdx , MaxItems , ItemIds ) - > { result , { NewItemIds , OldItemIds } }
NodeIdx = mod_pubsub : nodeIdx ( )
MaxItems = integer ( ) | unlimited
< p > This function is used internally by the core PubSub module , as no
< p > If another PubSub plugin wants to delegate the item removal ( and if the
` ` ` , MaxItems , ItemIds ) - >
node_default : , MaxItems , ItemIds).'''</p >
remove_extra_items(_NodeIdx, unlimited, ItemIds) ->
{result, {ItemIds, []}};
remove_extra_items(NodeIdx, MaxItems, ItemIds) ->
NewItems = lists:sublist(ItemIds, MaxItems),
OldItems = lists:nthtail(length(NewItems), ItemIds),
del_items(NodeIdx, OldItems),
{result, {NewItems, OldItems}}.
@spec ( NodeIdx , Publisher , PublishModel , ItemId ) - >
NodeIdx = mod_pubsub : nodeIdx ( )
Reason = mod_pubsub : ( )
or a publisher , or PublishModel being open.</p >
delete_item(NodeIdx, Publisher, PublishModel, ItemId) ->
SubKey = jlib:jid_tolower(Publisher),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
#pubsub_state{affiliation = Affiliation, items = Items} = GenState,
Allowed = (Affiliation == publisher) orelse (Affiliation == owner)
orelse (PublishModel == open)
orelse case get_item(NodeIdx, ItemId) of
{result, #pubsub_item{creation = {_, GenKey}}} -> true;
_ -> false
end,
if
not Allowed ->
{error, ?ERR_FORBIDDEN};
true ->
case lists:member(ItemId, Items) of
true ->
del_item(NodeIdx, ItemId),
set_state(GenState#pubsub_state{items = lists:delete(ItemId, Items)}),
{result, {default, broadcast}};
false ->
case Affiliation of
owner ->
{result, States} = get_states(NodeIdx),
lists:foldl(
fun(#pubsub_state{items = PI} = S, Res) ->
case lists:member(ItemId, PI) of
true ->
del_item(NodeIdx, ItemId),
set_state(S#pubsub_state{items = lists:delete(ItemId, PI)}),
{result, {default, broadcast}};
false ->
Res
end;
(_, Res) ->
Res
end, {error, ?ERR_ITEM_NOT_FOUND}, States);
_ ->
{error, ?ERR_ITEM_NOT_FOUND}
end
end
end.
@spec ( NodeIdx , Owner ) - > { error , Reason } | { result , { default , broadcast } }
NodeIdx = mod_pubsub : nodeIdx ( )
Reason = mod_pubsub : ( )
purge_node(NodeIdx, Owner) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
case GenState of
#pubsub_state{affiliation = owner} ->
{result, States} = get_states(NodeIdx),
lists:foreach(
fun(#pubsub_state{items = []}) ->
ok;
(#pubsub_state{items = Items} = S) ->
del_items(NodeIdx, Items),
set_state(S#pubsub_state{items = []})
end, States),
{result, {default, broadcast}};
_ ->
{error, ?ERR_FORBIDDEN}
end.
Host = mod_pubsub : ( )
< p > The default module reads affiliations in the main
the default PubSub module . Otherwise , it should return its own affiliation ,
get_entity_affiliations(Host, Owner) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
States = mnesia:match_object(#pubsub_state{stateid = {GenKey, '_'}, _ = '_'}),
NodeTree = case catch ets:lookup(gen_mod:get_module_proc(Host, config), nodetree) of
[{nodetree, N}] -> N;
_ -> nodetree_tree
end,
Reply = lists:foldl(fun(#pubsub_state{stateid = {_, N}, affiliation = A}, Acc) ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {Host, _}} = Node -> [{Node, A}|Acc];
_ -> Acc
end
end, [], States),
{result, Reply}.
get_node_affiliations(NodeId) ->
{result, States} = get_states(NodeId),
Tr = fun(#pubsub_state{stateid = {J, _}, affiliation = A}) ->
{J, A}
end,
{result, lists:map(Tr, States)}.
get_affiliation(NodeId, Owner) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeId, GenKey),
{result, GenState#pubsub_state.affiliation}.
set_affiliation(NodeId, Owner, Affiliation) ->
SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeId, GenKey),
case {Affiliation, GenState#pubsub_state.subscriptions} of
{none, none} ->
del_state(NodeId, GenKey);
_ ->
set_state(GenState#pubsub_state{affiliation = Affiliation})
end.
Host = mod_pubsub : ( )
Entity = mod_pubsub : ( )
< p > The default module reads subscriptions in the main
the default PubSub module . Otherwise , it should return its own affiliation ,
get_entity_subscriptions(Host, Owner) ->
{U, D, _} = SubKey = jlib:jid_tolower(Owner),
GenKey = jlib:jid_remove_resource(SubKey),
States = case SubKey of
GenKey -> mnesia:match_object(
#pubsub_state{stateid = {{U, D, '_'}, '_'}, _ = '_'});
_ -> mnesia:match_object(
#pubsub_state{stateid = {GenKey, '_'}, _ = '_'})
++ mnesia:match_object(
#pubsub_state{stateid = {SubKey, '_'}, _ = '_'})
end,
NodeTree = case catch ets:lookup(gen_mod:get_module_proc(Host, config), nodetree) of
[{nodetree, N}] -> N;
_ -> nodetree_tree
end,
Reply = lists:foldl(fun(#pubsub_state{stateid = {J, N}, subscriptions = Ss}, Acc) ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {Host, _}} = Node ->
lists:foldl(fun({Sub, SubId}, Acc2) ->
[{Node, Sub, SubId, J} | Acc2];
(S, Acc2) ->
[{Node, S, J} | Acc2]
end, Acc, Ss);
_ -> Acc
end
end, [], States),
{result, Reply}.
get_node_subscriptions(NodeId) ->
{result, States} = get_states(NodeId),
Tr = fun(#pubsub_state{stateid = {J, _}, subscriptions = Subscriptions}) ->
case Subscriptions of
[_|_] ->
lists:foldl(fun({S, SubId}, Acc) ->
[{J, S, SubId} | Acc];
(S, Acc) ->
[{J, S} | Acc]
end, [], Subscriptions);
[] ->
[];
_ ->
[{J, none}]
end
end,
{result, lists:flatmap(Tr, States)}.
get_subscriptions(NodeId, Owner) ->
SubKey = jlib:jid_tolower(Owner),
SubState = get_state(NodeId, SubKey),
{result, SubState#pubsub_state.subscriptions}.
set_subscriptions(NodeId, Owner, Subscription, SubId) ->
SubKey = jlib:jid_tolower(Owner),
SubState = get_state(NodeId, SubKey),
case {SubId, SubState#pubsub_state.subscriptions} of
{_, []} ->
case Subscription of
none -> {error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "not-subscribed")};
_ -> new_subscription(NodeId, Owner, Subscription, SubState)
end;
{"", [{_, SID}]} ->
case Subscription of
none -> unsub_with_subid(NodeId, SID, SubState);
_ -> replace_subscription({Subscription, SID}, SubState)
end;
{"", [_|_]} ->
{error, ?ERR_EXTENDED(?ERR_BAD_REQUEST, "subid-required")};
_ ->
case Subscription of
none -> unsub_with_subid(NodeId, SubId, SubState);
_ -> replace_subscription({Subscription, SubId}, SubState)
end
end.
replace_subscription(NewSub, SubState) ->
NewSubs = replace_subscription(NewSub,
SubState#pubsub_state.subscriptions, []),
set_state(SubState#pubsub_state{subscriptions = NewSubs}).
replace_subscription(_, [], Acc) ->
Acc;
replace_subscription({Sub, SubId}, [{_, SubID} | T], Acc) ->
replace_subscription({Sub, SubId}, T, [{Sub, SubID} | Acc]).
new_subscription(NodeId, Owner, Subscription, SubState) ->
SubId = pubsub_subscription:add_subscription(Owner, NodeId, []),
Subscriptions = SubState#pubsub_state.subscriptions,
set_state(SubState#pubsub_state{subscriptions = [{Subscription, SubId} | Subscriptions]}),
{Subscription, SubId}.
unsub_with_subid(NodeId, SubId, SubState) ->
pubsub_subscription:delete_subscription(SubState#pubsub_state.stateid,
NodeId, SubId),
NewSubs = lists:filter(fun ({_, SID}) -> SubId =/= SID end,
SubState#pubsub_state.subscriptions),
case {NewSubs, SubState#pubsub_state.affiliation} of
{[], none} ->
del_state(NodeId, element(1, SubState#pubsub_state.stateid));
_ ->
set_state(SubState#pubsub_state{subscriptions = NewSubs})
end.
Host = mod_pubsub : ( )
@doc < p > Returns a list of Owner 's nodes on Host with pending
get_pending_nodes(Host, Owner) ->
GenKey = jlib:jid_remove_resource(jlib:jid_tolower(Owner)),
States = mnesia:match_object(#pubsub_state{stateid = {GenKey, '_'},
affiliation = owner,
_ = '_'}),
NodeIDs = [ID || #pubsub_state{stateid = {_, ID}} <- States],
NodeTree = case catch ets:lookup(gen_mod:get_module_proc(Host, config), nodetree) of
[{nodetree, N}] -> N;
_ -> nodetree_tree
end,
Reply = mnesia:foldl(fun(#pubsub_state{stateid = {_, NID}} = S, Acc) ->
case lists:member(NID, NodeIDs) of
true ->
case get_nodes_helper(NodeTree, S) of
{value, Node} -> [Node | Acc];
false -> Acc
end;
false ->
Acc
end
end, [], pubsub_state),
{result, Reply}.
get_nodes_helper(NodeTree,
#pubsub_state{stateid = {_, N}, subscriptions = Subs}) ->
HasPending = fun ({pending, _}) -> true;
(pending) -> true;
(_) -> false
end,
case lists:any(HasPending, Subs) of
true ->
case NodeTree:get_node(N) of
#pubsub_node{nodeid = {_, Node}} ->
{value, Node};
_ ->
false
end;
false ->
false
end.
( NodeIdx ) - > { result , States }
NodeIdx = mod_pubsub : nodeIdx ( )
States = [ ] | [ mod_pubsub : pubsubState ( ) ]
< p > For the default PubSub module , states are stored in database.</p >
< p > If a PubSub plugin wants to delegate the states storage to the default node ,
get_states(NodeIdx) ->
States = case catch mnesia:match_object(
#pubsub_state{stateid = {'_', NodeIdx}, _ = '_'}) of
List when is_list(List) -> List;
_ -> []
end,
{result, States}.
@spec ( NodeIdx , JID ) - > State
NodeIdx = mod_pubsub : nodeIdx ( )
JID = mod_pubsub : jid ( )
State = mod_pubsub : pubsubState ( )
@doc < p > Returns a state ( one state list ) , given its reference.</p >
get_state(NodeIdx, JID) ->
StateId = {JID, NodeIdx},
case catch mnesia:read({pubsub_state, StateId}) of
[State] when is_record(State, pubsub_state) -> State;
_ -> #pubsub_state{stateid=StateId}
end.
State = mod_pubsub : pubsubState ( )
Reason = mod_pubsub : ( )
@doc < p > Write a state into database.</p >
set_state(State) when is_record(State, pubsub_state) ->
mnesia:write(State);
set_state(_) ->
{error, ?ERR_INTERNAL_SERVER_ERROR}.
@spec ( NodeIdx , JID ) - > ok | { error , Reason }
NodeIdx = mod_pubsub : nodeIdx ( )
JID = mod_pubsub : jid ( )
Reason = mod_pubsub : ( )
@doc < p > Delete a state from database.</p >
del_state(NodeIdx, JID) ->
mnesia:delete({pubsub_state, {JID, NodeIdx}}).
@spec ( NodeIdx , From ) - > { result , Items }
NodeIdx = mod_pubsub : nodeIdx ( )
< p > For the default PubSub module , items are stored in database.</p >
< p > If a PubSub plugin wants to delegate the item storage to the default node ,
get_items(NodeIdx, _From) ->
Items = mnesia:match_object(#pubsub_item{itemid = {'_', NodeIdx}, _ = '_'}),
{result, lists:reverse(lists:keysort(#pubsub_item.modification, Items))}.
get_items(NodeIdx, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId) ->
SubKey = jlib:jid_tolower(JID),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
SubState = get_state(NodeIdx, SubKey),
Affiliation = GenState#pubsub_state.affiliation,
Subscriptions = SubState#pubsub_state.subscriptions,
Whitelisted = can_fetch_item(Affiliation, Subscriptions),
if
GenState#pubsub_state.affiliation == outcast ->
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and (not PresenceSubscription) ->
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "presence-subscription-required")};
(AccessModel == roster) and (not RosterGroup) ->
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "not-in-roster-group")};
(AccessModel == whitelist) and (not Whitelisted) ->
{error, ?ERR_EXTENDED(?ERR_NOT_ALLOWED, "closed-node")};
(AccessModel == authorize) and (not Whitelisted) ->
{error, ?ERR_FORBIDDEN};
- >
true ->
get_items(NodeIdx, JID)
end.
@spec ( NodeIdx , ItemId ) - > { result , Item } | { error , ' item - not - found ' }
NodeIdx = mod_pubsub : nodeIdx ( )
ItemId = mod_pubsub : itemId ( )
@doc < p > Returns an item ( one item list ) , given its reference.</p >
get_item(NodeIdx, ItemId) ->
case mnesia:read({pubsub_item, {ItemId, NodeIdx}}) of
[Item] when is_record(Item, pubsub_item) ->
{result, Item};
_ ->
{error, ?ERR_ITEM_NOT_FOUND}
end.
@spec ( NodeIdx , ItemId , JID , AccessModel , PresenceSubscription , RosterGroup , SubId ) - > { result , Item } | { error , Reason }
NodeIdx : nodeIdx ( )
ItemId = mod_pubsub : itemId ( )
AccessModel = mod_pubsub : ( )
Reason = mod_pubsub : ( ) | ' item - not - found '
get_item(NodeIdx, ItemId, JID, AccessModel, PresenceSubscription, RosterGroup, _SubId) ->
SubKey = jlib:jid_tolower(JID),
GenKey = jlib:jid_remove_resource(SubKey),
GenState = get_state(NodeIdx, GenKey),
Affiliation = GenState#pubsub_state.affiliation,
Subscriptions = GenState#pubsub_state.subscriptions,
Whitelisted = can_fetch_item(Affiliation, Subscriptions),
if
GenState#pubsub_state.affiliation == outcast ->
{error, ?ERR_FORBIDDEN};
(AccessModel == presence) and (not PresenceSubscription) ->
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "presence-subscription-required")};
(AccessModel == roster) and (not RosterGroup) ->
{error, ?ERR_EXTENDED(?ERR_NOT_AUTHORIZED, "not-in-roster-group")};
(AccessModel == whitelist) and (not Whitelisted) ->
{error, ?ERR_EXTENDED(?ERR_NOT_ALLOWED, "closed-node")};
(AccessModel == authorize) and (not Whitelisted) ->
{error, ?ERR_FORBIDDEN};
- >
true ->
get_item(NodeIdx, ItemId)
end.
Reason = mod_pubsub : ( )
@doc < p > Write an item into database.</p >
set_item(Item) when is_record(Item, pubsub_item) ->
mnesia:write(Item);
set_item(_) ->
{error, ?ERR_INTERNAL_SERVER_ERROR}.
@spec ( NodeIdx , ItemId ) - > ok | { error , Reason }
NodeIdx = mod_pubsub : nodeIdx ( )
ItemId = mod_pubsub : itemId ( )
Reason = mod_pubsub : ( )
@doc < p > Delete an item from database.</p >
del_item(NodeIdx, ItemId) ->
mnesia:delete({pubsub_item, {ItemId, NodeIdx}}).
del_items(NodeIdx, ItemIds) ->
lists:foreach(fun(ItemId) ->
del_item(NodeIdx, ItemId)
end, ItemIds).
@doc < p > Return the name of the node if known : is to return
node >
get_item_name(_Host, _Node, Id) ->
Id.
node_to_path(Node) ->
string:tokens(binary_to_list(Node), "/").
path_to_node([]) ->
<<>>;
path_to_node(Path) ->
list_to_binary(string:join([""|Path], "/")).
@spec ( Affiliation , Subscription ) - > true | false
@doc Determines if the combination of Affiliation and Subscribed
can_fetch_item(owner, _) -> true;
can_fetch_item(member, _) -> true;
can_fetch_item(publisher, _) -> true;
can_fetch_item(outcast, _) -> false;
can_fetch_item(none, Subscriptions) -> is_subscribed(Subscriptions);
can_fetch_item(_Affiliation, _Subscription) -> false.
is_subscribed(Subscriptions) ->
lists:any(fun ({subscribed, _SubId}) -> true;
(_) -> false
end, Subscriptions).
Returns the first item where Pred ( ) is true in List
first_in_list(_Pred, []) ->
false;
first_in_list(Pred, [H | T]) ->
case Pred(H) of
true -> {value, H};
_ -> first_in_list(Pred, T)
end.
|
1f5ca69f8de2aa471c70ce3f9fda30d5fabeee90559494bda02d20174940a7b9 | bisphone/Toveri | toveri_buffer.erl | -module(toveri_buffer).
-behaviour(gen_server).
%% -----------------------------------------------------------------------------
%% API
%% -----------------------------------------------------------------------------
-export([start_link/2]).
-export([stop/1]).
-export([get_size/1]).
-export([insert/2]).
-export([read_pos/2]).
-export([read_next/1]).
-export([delete_pos/2]).
%% -----------------------------------------------------------------------------
%% gen_server callbacks
%% -----------------------------------------------------------------------------
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([terminate/2]).
-export([code_change/3]).
-include("toveri.hrl").
-type pos() :: non_neg_integer().
-record(state, {name :: atom(),
len = 0 :: non_neg_integer(),
w_pos = [] :: [pos()],
r_pos = 0 :: pos()}).
%% -----------------------------------------------------------------------------
%% API
%% -----------------------------------------------------------------------------
-spec start_link(atom(), non_neg_integer()) ->
{ok, pid()} | ignore | {error, term()}.
start_link(Name, Len) ->
gen_server:start_link({local, Name}, ?MODULE, [Name, Len], []).
-spec stop(atom()) -> ok.
stop(Name) ->
gen_server:cast(Name, stop).
-spec get_size(atom()) -> {ok, non_neg_integer()}.
get_size(Name) ->
gen_server:call(Name, get_size).
-spec insert(atom(), pid()) -> ok.
insert(Name, Pid) ->
gen_server:call(Name, {insert, Pid}).
-spec read_pos(atom(), pos()) -> {ok, pid()} | {error, {empty, pos()}}.
read_pos(Name, Pos) ->
gen_server:call(Name, {read_pos, Pos}).
-spec read_next(atom()) -> {ok, pid()} | {error, {empty, pos()}}.
read_next(Name) ->
gen_server:call(Name, read_next).
-spec delete_pos(atom(), pos()) -> ok.
delete_pos(Name, Pos) ->
gen_server:call(Name, {delete_pos, Pos}).
%% -----------------------------------------------------------------------------
%% gen_server callbacks
%% -----------------------------------------------------------------------------
init([Name, Len]) ->
process_flag(trap_exit, true),
State = #state{name = Name,
len = Len,
w_pos = lists:seq(1, Len)},
{ok, State}.
handle_call(get_size, _From, #state{len=Len}=State) ->
{reply, {ok, Len}, State};
handle_call({insert, Pid}, _From, #state{name=Name}=State) ->
[Pos|WPos] = new_w_pos(State),
do_insert(Name, Pid, Pos),
NewState = State#state{w_pos = WPos},
{reply, ok, NewState};
handle_call({read_pos, Pos}, _From, #state{name=Name}=State) ->
Reply = do_read_pos(Name, Pos),
{reply, Reply, State};
handle_call(read_next, _From, #state{name=Name}=State) ->
NewRPos = new_r_pos(State),
Reply = do_read_pos(Name, NewRPos),
NewState = State#state{r_pos = NewRPos},
{reply, Reply, NewState};
handle_call({delete_pos, Pos}, _From, #state{name=Name}=State) ->
do_delete_pos(Name, Pos),
NewState = State#state{w_pos = [Pos|State#state.w_pos]},
{reply, ok, NewState};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(stop, State) ->
{stop, shutdown, State};
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info({'DOWN', _, process, Pid, _}, State) ->
{ok, Rbuf} = match_pid(State#state.name, Pid),
true = delete_object(Rbuf),
NewState = State#state{w_pos = [Rbuf#ringbuf.pos|State#state.w_pos]},
{noreply, NewState};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, State) ->
true = ets:delete(?ETS_TAB, State#state.name),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% -----------------------------------------------------------------------------
%% internal
%% -----------------------------------------------------------------------------
new_pos(Pos, Len) ->
(Pos rem Len) + 1.
new_w_pos(#state{w_pos=[], len=Len}) ->
lists:seq(1, Len);
new_w_pos(#state{w_pos=[_|_]=WPos}) ->
WPos.
new_r_pos(#state{r_pos=Pos, len=Len}) ->
new_pos(Pos, Len).
do_insert(Name, Pid, Pos) ->
true = insert(#ringbuf{name = Name, pos = Pos, pid = Pid}),
erlang:monitor(process, Pid).
do_read_pos(Name, Pos) ->
case match_pos(Name, Pos) of
{ok, Rb} ->
{ok, Rb#ringbuf.pid};
_Else ->
{error, {empty, Pos}}
end.
do_delete_pos(Name, Pos) ->
case match_pos(Name, Pos) of
{ok, Rbuf} ->
true = delete_object(Rbuf);
_ ->
ok
end.
insert(Obj) ->
ets:insert(?ETS_TAB, Obj).
delete_object(Obj) ->
ets:delete_object(?ETS_TAB, Obj).
match_pos(Name, Pos) ->
Pattern = #ringbuf{name = Name, pos = Pos, _ = '_'},
match(Pattern).
match_pid(Name, Pid) ->
Pattern = #ringbuf{name = Name, pid = Pid, _ = '_'},
match(Pattern).
match(Pattern) ->
case ets:match_object(?ETS_TAB, Pattern) of
[RingBuf] ->
{ok, RingBuf};
[] ->
{error, not_found}
end.
| null | https://raw.githubusercontent.com/bisphone/Toveri/2c6132447dfeb54a89d0cff11dfd9626f318ed88/src/toveri_buffer.erl | erlang | -----------------------------------------------------------------------------
API
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
gen_server callbacks
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
API
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
gen_server callbacks
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
internal
----------------------------------------------------------------------------- | -module(toveri_buffer).
-behaviour(gen_server).
-export([start_link/2]).
-export([stop/1]).
-export([get_size/1]).
-export([insert/2]).
-export([read_pos/2]).
-export([read_next/1]).
-export([delete_pos/2]).
-export([init/1]).
-export([handle_call/3]).
-export([handle_cast/2]).
-export([handle_info/2]).
-export([terminate/2]).
-export([code_change/3]).
-include("toveri.hrl").
-type pos() :: non_neg_integer().
-record(state, {name :: atom(),
len = 0 :: non_neg_integer(),
w_pos = [] :: [pos()],
r_pos = 0 :: pos()}).
-spec start_link(atom(), non_neg_integer()) ->
{ok, pid()} | ignore | {error, term()}.
start_link(Name, Len) ->
gen_server:start_link({local, Name}, ?MODULE, [Name, Len], []).
-spec stop(atom()) -> ok.
stop(Name) ->
gen_server:cast(Name, stop).
-spec get_size(atom()) -> {ok, non_neg_integer()}.
get_size(Name) ->
gen_server:call(Name, get_size).
-spec insert(atom(), pid()) -> ok.
insert(Name, Pid) ->
gen_server:call(Name, {insert, Pid}).
-spec read_pos(atom(), pos()) -> {ok, pid()} | {error, {empty, pos()}}.
read_pos(Name, Pos) ->
gen_server:call(Name, {read_pos, Pos}).
-spec read_next(atom()) -> {ok, pid()} | {error, {empty, pos()}}.
read_next(Name) ->
gen_server:call(Name, read_next).
-spec delete_pos(atom(), pos()) -> ok.
delete_pos(Name, Pos) ->
gen_server:call(Name, {delete_pos, Pos}).
init([Name, Len]) ->
process_flag(trap_exit, true),
State = #state{name = Name,
len = Len,
w_pos = lists:seq(1, Len)},
{ok, State}.
handle_call(get_size, _From, #state{len=Len}=State) ->
{reply, {ok, Len}, State};
handle_call({insert, Pid}, _From, #state{name=Name}=State) ->
[Pos|WPos] = new_w_pos(State),
do_insert(Name, Pid, Pos),
NewState = State#state{w_pos = WPos},
{reply, ok, NewState};
handle_call({read_pos, Pos}, _From, #state{name=Name}=State) ->
Reply = do_read_pos(Name, Pos),
{reply, Reply, State};
handle_call(read_next, _From, #state{name=Name}=State) ->
NewRPos = new_r_pos(State),
Reply = do_read_pos(Name, NewRPos),
NewState = State#state{r_pos = NewRPos},
{reply, Reply, NewState};
handle_call({delete_pos, Pos}, _From, #state{name=Name}=State) ->
do_delete_pos(Name, Pos),
NewState = State#state{w_pos = [Pos|State#state.w_pos]},
{reply, ok, NewState};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(stop, State) ->
{stop, shutdown, State};
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info({'DOWN', _, process, Pid, _}, State) ->
{ok, Rbuf} = match_pid(State#state.name, Pid),
true = delete_object(Rbuf),
NewState = State#state{w_pos = [Rbuf#ringbuf.pos|State#state.w_pos]},
{noreply, NewState};
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, State) ->
true = ets:delete(?ETS_TAB, State#state.name),
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
new_pos(Pos, Len) ->
(Pos rem Len) + 1.
new_w_pos(#state{w_pos=[], len=Len}) ->
lists:seq(1, Len);
new_w_pos(#state{w_pos=[_|_]=WPos}) ->
WPos.
new_r_pos(#state{r_pos=Pos, len=Len}) ->
new_pos(Pos, Len).
do_insert(Name, Pid, Pos) ->
true = insert(#ringbuf{name = Name, pos = Pos, pid = Pid}),
erlang:monitor(process, Pid).
do_read_pos(Name, Pos) ->
case match_pos(Name, Pos) of
{ok, Rb} ->
{ok, Rb#ringbuf.pid};
_Else ->
{error, {empty, Pos}}
end.
do_delete_pos(Name, Pos) ->
case match_pos(Name, Pos) of
{ok, Rbuf} ->
true = delete_object(Rbuf);
_ ->
ok
end.
insert(Obj) ->
ets:insert(?ETS_TAB, Obj).
delete_object(Obj) ->
ets:delete_object(?ETS_TAB, Obj).
match_pos(Name, Pos) ->
Pattern = #ringbuf{name = Name, pos = Pos, _ = '_'},
match(Pattern).
match_pid(Name, Pid) ->
Pattern = #ringbuf{name = Name, pid = Pid, _ = '_'},
match(Pattern).
match(Pattern) ->
case ets:match_object(?ETS_TAB, Pattern) of
[RingBuf] ->
{ok, RingBuf};
[] ->
{error, not_found}
end.
|
8644b38acaefe6fd0f41adbf87c4518a1644ffd259dc039724881f68f084d7c9 | takikawa/tr-pfds | binomial.rkt | #lang typed/racket
(provide filter remove fold
(rename-out [heap-map map]
[heap-ormap ormap] [heap-andmap andmap])
empty? insert find-min/max delete-min/max
merge sorted-list heap Heap build-heap)
(struct: (A) Node ([rank : Integer]
[val : A]
[trees : (Listof (Node A))]))
(struct: (A) Heap ([comparer : (A A -> Boolean)]
[trees : (Listof (Node A))]))
;; Checks for empty heap
(: empty? : (All (A) ((Heap A) -> Boolean)))
(define (empty? heap)
(null? (Heap-trees heap)))
merges two given nodes
;(: link : (All (A) ((Node A) (Node A) (A A -> Boolean) -> (Node A))))
(define-syntax-rule (link node1 node2 func)
(let ([val1 (Node-val node1)]
[val2 (Node-val node2)]
[rank1 (add1 (Node-rank node1))])
(if (func val1 val2)
(Node rank1 val1 (cons node2 (Node-trees node1)))
(Node rank1 val2 (cons node1 (Node-trees node2))))))
;; Inserts a node into the tree
(: ins-tree : (All (A) ((Node A) (Listof (Node A)) (A A -> Boolean) -> (Heap A))))
(define (ins-tree node trees comparer)
(let ([first (car trees)])
(if (< (Node-rank node) (Node-rank first))
(Heap comparer (cons node trees))
(let ([rest (cdr trees)]
[new (link node first comparer)])
(if (null? rest)
(Heap comparer (list new))
(ins-tree new rest comparer))))))
;; Inserts an element into the heap
(: insert : (All (A) (A (Heap A) -> (Heap A))))
(define (insert val heap)
(let ([new-node (Node 0 val null)]
[comparer (Heap-comparer heap)]
[trees (Heap-trees heap)])
(if (null? trees)
(Heap comparer (list new-node))
(ins-tree new-node trees comparer))))
Merges two given heaps
(: merge : (All (A) ((Heap A) (Heap A) -> (Heap A))))
(define (merge heap1 heap2)
(let ([heap1-trees (Heap-trees heap1)]
[heap2-trees (Heap-trees heap2)])
(cond
[(null? heap2-trees) heap1]
[(null? heap1-trees) heap2]
[else (merge-helper heap1-trees
heap2-trees
(Heap-comparer heap1))])))
;; Helper for merge
(: merge-helper :
(All (A) ((Listof (Node A)) (Listof (Node A)) (A A -> Boolean) -> (Heap A))))
(define (merge-helper heap1-trees heap2-trees comp)
(let* ([first-tree1 (car heap1-trees)]
[first-tree2 (car heap2-trees)]
[heap1 (Heap comp (cdr heap1-trees))]
[heap2 (Heap comp (cdr heap2-trees))]
[rank1 (Node-rank first-tree1)]
[rank2 (Node-rank first-tree2)])
(cond
[(< rank1 rank2)
(Heap comp (cons first-tree1
(Heap-trees (merge heap1 (Heap comp heap2-trees)))))]
[(> rank1 rank2)
(Heap comp (cons first-tree2
(Heap-trees (merge (Heap comp heap1-trees) heap2))))]
[else (let ([rest (Heap-trees (merge heap1 heap2))]
[new (link first-tree1 first-tree2 comp)])
(if (null? rest)
(Heap comp (list new))
(ins-tree new rest comp)))])))
Returns the min element if min - heap else returns the element
(: find-min/max : (All (A) ((Heap A) -> A)))
(define (find-min/max heap)
(let ([trees (Heap-trees heap)])
(cond
[(null? trees) (error 'find-min/max "given heap is empty")]
[(null? (cdr trees)) (Node-val (car trees))]
[else (let* ([comparer (Heap-comparer heap)]
[x (Node-val (car trees))]
[y (find-min/max (Heap comparer (cdr trees)))])
(if (comparer x y) x y))])))
Deletes min or element ( depends on min or heap )
(: delete-min/max : (All (A) ((Heap A) -> (Heap A))))
(define (delete-min/max heap)
(: get-min : (All (A) ((Listof (Node A)) (A A -> Boolean) -> (Heap A))))
(define (get-min trees func)
(let ([first (car trees)]
[rest (cdr trees)]
[heap (Heap func trees)])
(if (null? rest)
heap
(let* ([min-trees (Heap-trees (get-min rest func))]
[first-tree (car min-trees)])
(if (func (Node-val first) (Node-val first-tree))
heap
(Heap func (cons first-tree (cons first (cdr min-trees)))))))))
(if (null? (Heap-trees heap))
(error 'delete-min/max "given heap is empty")
(let* ([func (Heap-comparer heap)]
[newpair (get-min (Heap-trees heap) func)]
[newpair-trees (Heap-trees newpair)])
(merge (Heap func (reverse (Node-trees (car newpair-trees))))
(Heap func (cdr newpair-trees))))))
Returns a sorted list ( sorting depends on min or heap )
(: sorted-list : (All (A) ((Heap A) -> (Listof A))))
(define (sorted-list heap)
(if (empty? heap)
null
(cons (find-min/max heap)
(sorted-list (delete-min/max heap)))))
Heap constructor
(: heap : (All (A) ((A A -> Boolean) A * -> (Heap A))))
(define (heap func . lst)
(foldl (inst insert A) ((inst Heap A) func empty) lst))
;; similar to list map function. apply is expensive so using case-lambda
;; in order to saperate the more common case
(: heap-map :
(All (A C B ...)
(case-lambda
((C C -> Boolean) (A -> C) (Heap A) -> (Heap C))
((C C -> Boolean)
(A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C)))))
(define heap-map
(pcase-lambda: (A C B ...)
[([comp : (C C -> Boolean)]
[func : (A -> C)]
[heap : (Heap A)])
(map-single ((inst Heap C) comp empty) func heap)]
[([comp : (C C -> Boolean)]
[func : (A B ... B -> C)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(apply map-multiple
((inst Heap C) comp empty)
func heap heaps)]))
(: map-single : (All (A C) ((Heap C) (A -> C) (Heap A) -> (Heap C))))
(define (map-single accum func heap)
(if (empty? heap)
accum
(map-single (insert (func (find-min/max heap)) accum) func
(delete-min/max heap))))
(: map-multiple :
(All (A C B ...)
((Heap C) (A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C))))
(define (map-multiple accum func heap . heaps)
(if (or (empty? heap) (ormap empty? heaps))
accum
(apply map-multiple
(insert (apply func (find-min/max heap) (map find-min/max heaps))
accum)
func
(delete-min/max heap)
(map delete-min/max heaps))))
;; similar to list foldr or foldl
(: fold :
(All (A C B ...)
(case-lambda ((C A -> C) C (Heap A) -> C)
((C A B ... B -> C) C (Heap A) (Heap B) ... B -> C))))
(define fold
(pcase-lambda: (A C B ...)
[([func : (C A -> C)]
[base : C]
[heap : (Heap A)])
(if (empty? heap)
base
(fold func (func base (find-min/max heap))
(delete-min/max heap)))]
[([func : (C A B ... B -> C)]
[base : C]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(if (or (empty? heap) (ormap empty? heaps))
base
(apply fold
func
(apply func base (find-min/max heap)
(map find-min/max heaps))
(delete-min/max heap)
(map delete-min/max heaps)))]))
;; similar to list filter function
(: filter : (All (A) ((A -> Boolean) (Heap A) -> (Heap A))))
(define (filter func hep)
(: inner : (All (A) ((A -> Boolean) (Heap A) (Heap A) -> (Heap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail (insert head accum))
(inner func tail accum)))))
(inner func hep ((inst Heap A) (Heap-comparer hep) empty)))
;; similar to list remove function
(: remove : (All (A) ((A -> Boolean) (Heap A) -> (Heap A))))
(define (remove func hep)
(: inner : (All (A) ((A -> Boolean) (Heap A) (Heap A) -> (Heap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail accum)
(inner func tail (insert head accum))))))
(inner func hep ((inst Heap A) (Heap-comparer hep) empty)))
;; Similar to build-list
(: build-heap : (All (A) (Natural (Natural -> A) (A A -> Boolean) -> (Heap A))))
(define (build-heap size func comparer)
(let: loop : (Heap A) ([n : Natural size])
(if (zero? n)
((inst Heap A) comparer empty)
(let ([nsub1 (sub1 n)])
(insert (func nsub1) (loop nsub1))))))
similar to list andmap function
(: heap-andmap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-andmap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[heap : (Heap A)])
(or (empty? heap)
(and (func (find-min/max heap))
(heap-andmap func (delete-min/max heap))))]
[([func : (A B ... B -> Boolean)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(or (empty? heap) (ormap empty? heaps)
(and (apply func (find-min/max heap)
(map find-min/max heaps))
(apply heap-andmap func (delete-min/max heap)
(map delete-min/max heaps))))]))
;; Similar to ormap
(: heap-ormap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-ormap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[heap : (Heap A)])
(and (not (empty? heap))
(or (func (find-min/max heap))
(heap-ormap func (delete-min/max heap))))]
[([func : (A B ... B -> Boolean)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(and (not (or (empty? heap) (ormap empty? heaps)))
(or (apply func (find-min/max heap)
(map find-min/max heaps))
(apply heap-ormap func (delete-min/max heap)
(map delete-min/max heaps))))]))
| null | https://raw.githubusercontent.com/takikawa/tr-pfds/a08810bdfc760bb9ed68d08ea222a59135d9a203/pfds/heap/binomial.rkt | racket | Checks for empty heap
(: link : (All (A) ((Node A) (Node A) (A A -> Boolean) -> (Node A))))
Inserts a node into the tree
Inserts an element into the heap
Helper for merge
similar to list map function. apply is expensive so using case-lambda
in order to saperate the more common case
similar to list foldr or foldl
similar to list filter function
similar to list remove function
Similar to build-list
Similar to ormap | #lang typed/racket
(provide filter remove fold
(rename-out [heap-map map]
[heap-ormap ormap] [heap-andmap andmap])
empty? insert find-min/max delete-min/max
merge sorted-list heap Heap build-heap)
(struct: (A) Node ([rank : Integer]
[val : A]
[trees : (Listof (Node A))]))
(struct: (A) Heap ([comparer : (A A -> Boolean)]
[trees : (Listof (Node A))]))
(: empty? : (All (A) ((Heap A) -> Boolean)))
(define (empty? heap)
(null? (Heap-trees heap)))
merges two given nodes
(define-syntax-rule (link node1 node2 func)
(let ([val1 (Node-val node1)]
[val2 (Node-val node2)]
[rank1 (add1 (Node-rank node1))])
(if (func val1 val2)
(Node rank1 val1 (cons node2 (Node-trees node1)))
(Node rank1 val2 (cons node1 (Node-trees node2))))))
(: ins-tree : (All (A) ((Node A) (Listof (Node A)) (A A -> Boolean) -> (Heap A))))
(define (ins-tree node trees comparer)
(let ([first (car trees)])
(if (< (Node-rank node) (Node-rank first))
(Heap comparer (cons node trees))
(let ([rest (cdr trees)]
[new (link node first comparer)])
(if (null? rest)
(Heap comparer (list new))
(ins-tree new rest comparer))))))
(: insert : (All (A) (A (Heap A) -> (Heap A))))
(define (insert val heap)
(let ([new-node (Node 0 val null)]
[comparer (Heap-comparer heap)]
[trees (Heap-trees heap)])
(if (null? trees)
(Heap comparer (list new-node))
(ins-tree new-node trees comparer))))
Merges two given heaps
(: merge : (All (A) ((Heap A) (Heap A) -> (Heap A))))
(define (merge heap1 heap2)
(let ([heap1-trees (Heap-trees heap1)]
[heap2-trees (Heap-trees heap2)])
(cond
[(null? heap2-trees) heap1]
[(null? heap1-trees) heap2]
[else (merge-helper heap1-trees
heap2-trees
(Heap-comparer heap1))])))
(: merge-helper :
(All (A) ((Listof (Node A)) (Listof (Node A)) (A A -> Boolean) -> (Heap A))))
(define (merge-helper heap1-trees heap2-trees comp)
(let* ([first-tree1 (car heap1-trees)]
[first-tree2 (car heap2-trees)]
[heap1 (Heap comp (cdr heap1-trees))]
[heap2 (Heap comp (cdr heap2-trees))]
[rank1 (Node-rank first-tree1)]
[rank2 (Node-rank first-tree2)])
(cond
[(< rank1 rank2)
(Heap comp (cons first-tree1
(Heap-trees (merge heap1 (Heap comp heap2-trees)))))]
[(> rank1 rank2)
(Heap comp (cons first-tree2
(Heap-trees (merge (Heap comp heap1-trees) heap2))))]
[else (let ([rest (Heap-trees (merge heap1 heap2))]
[new (link first-tree1 first-tree2 comp)])
(if (null? rest)
(Heap comp (list new))
(ins-tree new rest comp)))])))
Returns the min element if min - heap else returns the element
(: find-min/max : (All (A) ((Heap A) -> A)))
(define (find-min/max heap)
(let ([trees (Heap-trees heap)])
(cond
[(null? trees) (error 'find-min/max "given heap is empty")]
[(null? (cdr trees)) (Node-val (car trees))]
[else (let* ([comparer (Heap-comparer heap)]
[x (Node-val (car trees))]
[y (find-min/max (Heap comparer (cdr trees)))])
(if (comparer x y) x y))])))
Deletes min or element ( depends on min or heap )
(: delete-min/max : (All (A) ((Heap A) -> (Heap A))))
(define (delete-min/max heap)
(: get-min : (All (A) ((Listof (Node A)) (A A -> Boolean) -> (Heap A))))
(define (get-min trees func)
(let ([first (car trees)]
[rest (cdr trees)]
[heap (Heap func trees)])
(if (null? rest)
heap
(let* ([min-trees (Heap-trees (get-min rest func))]
[first-tree (car min-trees)])
(if (func (Node-val first) (Node-val first-tree))
heap
(Heap func (cons first-tree (cons first (cdr min-trees)))))))))
(if (null? (Heap-trees heap))
(error 'delete-min/max "given heap is empty")
(let* ([func (Heap-comparer heap)]
[newpair (get-min (Heap-trees heap) func)]
[newpair-trees (Heap-trees newpair)])
(merge (Heap func (reverse (Node-trees (car newpair-trees))))
(Heap func (cdr newpair-trees))))))
Returns a sorted list ( sorting depends on min or heap )
(: sorted-list : (All (A) ((Heap A) -> (Listof A))))
(define (sorted-list heap)
(if (empty? heap)
null
(cons (find-min/max heap)
(sorted-list (delete-min/max heap)))))
Heap constructor
(: heap : (All (A) ((A A -> Boolean) A * -> (Heap A))))
(define (heap func . lst)
(foldl (inst insert A) ((inst Heap A) func empty) lst))
(: heap-map :
(All (A C B ...)
(case-lambda
((C C -> Boolean) (A -> C) (Heap A) -> (Heap C))
((C C -> Boolean)
(A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C)))))
(define heap-map
(pcase-lambda: (A C B ...)
[([comp : (C C -> Boolean)]
[func : (A -> C)]
[heap : (Heap A)])
(map-single ((inst Heap C) comp empty) func heap)]
[([comp : (C C -> Boolean)]
[func : (A B ... B -> C)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(apply map-multiple
((inst Heap C) comp empty)
func heap heaps)]))
(: map-single : (All (A C) ((Heap C) (A -> C) (Heap A) -> (Heap C))))
(define (map-single accum func heap)
(if (empty? heap)
accum
(map-single (insert (func (find-min/max heap)) accum) func
(delete-min/max heap))))
(: map-multiple :
(All (A C B ...)
((Heap C) (A B ... B -> C) (Heap A) (Heap B) ... B -> (Heap C))))
(define (map-multiple accum func heap . heaps)
(if (or (empty? heap) (ormap empty? heaps))
accum
(apply map-multiple
(insert (apply func (find-min/max heap) (map find-min/max heaps))
accum)
func
(delete-min/max heap)
(map delete-min/max heaps))))
(: fold :
(All (A C B ...)
(case-lambda ((C A -> C) C (Heap A) -> C)
((C A B ... B -> C) C (Heap A) (Heap B) ... B -> C))))
(define fold
(pcase-lambda: (A C B ...)
[([func : (C A -> C)]
[base : C]
[heap : (Heap A)])
(if (empty? heap)
base
(fold func (func base (find-min/max heap))
(delete-min/max heap)))]
[([func : (C A B ... B -> C)]
[base : C]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(if (or (empty? heap) (ormap empty? heaps))
base
(apply fold
func
(apply func base (find-min/max heap)
(map find-min/max heaps))
(delete-min/max heap)
(map delete-min/max heaps)))]))
(: filter : (All (A) ((A -> Boolean) (Heap A) -> (Heap A))))
(define (filter func hep)
(: inner : (All (A) ((A -> Boolean) (Heap A) (Heap A) -> (Heap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail (insert head accum))
(inner func tail accum)))))
(inner func hep ((inst Heap A) (Heap-comparer hep) empty)))
(: remove : (All (A) ((A -> Boolean) (Heap A) -> (Heap A))))
(define (remove func hep)
(: inner : (All (A) ((A -> Boolean) (Heap A) (Heap A) -> (Heap A))))
(define (inner func hep accum)
(if (empty? hep)
accum
(let ([head (find-min/max hep)]
[tail (delete-min/max hep)])
(if (func head)
(inner func tail accum)
(inner func tail (insert head accum))))))
(inner func hep ((inst Heap A) (Heap-comparer hep) empty)))
(: build-heap : (All (A) (Natural (Natural -> A) (A A -> Boolean) -> (Heap A))))
(define (build-heap size func comparer)
(let: loop : (Heap A) ([n : Natural size])
(if (zero? n)
((inst Heap A) comparer empty)
(let ([nsub1 (sub1 n)])
(insert (func nsub1) (loop nsub1))))))
similar to list andmap function
(: heap-andmap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-andmap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[heap : (Heap A)])
(or (empty? heap)
(and (func (find-min/max heap))
(heap-andmap func (delete-min/max heap))))]
[([func : (A B ... B -> Boolean)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(or (empty? heap) (ormap empty? heaps)
(and (apply func (find-min/max heap)
(map find-min/max heaps))
(apply heap-andmap func (delete-min/max heap)
(map delete-min/max heaps))))]))
(: heap-ormap :
(All (A B ...)
(case-lambda ((A -> Boolean) (Heap A) -> Boolean)
((A B ... B -> Boolean) (Heap A) (Heap B) ... B
-> Boolean))))
(define heap-ormap
(pcase-lambda: (A B ... )
[([func : (A -> Boolean)]
[heap : (Heap A)])
(and (not (empty? heap))
(or (func (find-min/max heap))
(heap-ormap func (delete-min/max heap))))]
[([func : (A B ... B -> Boolean)]
[heap : (Heap A)] . [heaps : (Heap B) ... B])
(and (not (or (empty? heap) (ormap empty? heaps)))
(or (apply func (find-min/max heap)
(map find-min/max heaps))
(apply heap-ormap func (delete-min/max heap)
(map delete-min/max heaps))))]))
|
d6d3603fd72963a2ad7780c436fb6d31e88387ec0d3ab2dd95c4a0fb310edc36 | ryanpbrewster/haskell | parsec_example_1.hs | -- parsec_example_1.hs
{-
- Do (+), then (*)
-}
import Text.ParserCombinators.Parsec
line = adds
adds = do first <- add
next <- moreAdds
return (first:next)
moreAdds = (char '+' >> adds)
<|> (return [])
add = prods
prods = do first <- prod
next <- moreProds
return (first:next)
prod :: GenParser Char st Int
prod = do spaces
ans <- many digit
spaces
return (read ans)
moreProds = (char '*' >> prods)
<|> (return [])
parseLine input = parse line "(unknown)" input
evaluate tree = sum $ map product tree
| null | https://raw.githubusercontent.com/ryanpbrewster/haskell/6edd0afe234008a48b4871032dedfd143ca6e412/parsec/parsec_example_1.hs | haskell | parsec_example_1.hs
- Do (+), then (*)
|
import Text.ParserCombinators.Parsec
line = adds
adds = do first <- add
next <- moreAdds
return (first:next)
moreAdds = (char '+' >> adds)
<|> (return [])
add = prods
prods = do first <- prod
next <- moreProds
return (first:next)
prod :: GenParser Char st Int
prod = do spaces
ans <- many digit
spaces
return (read ans)
moreProds = (char '*' >> prods)
<|> (return [])
parseLine input = parse line "(unknown)" input
evaluate tree = sum $ map product tree
|
5d719c752ad95f33353e8cd8884474401275b90a4907068645624e073f88c74f | airbus-seclab/bincat | mapOpt.ml | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
(* the special exception on linking described in file COPYING-LGPL *)
(* *)
Modifications by Airbus Group - Copyright 2014 - 2017
(* *)
(***********************************************************************)
module type OrderedType =
sig
type t
val compare: t -> t -> int
end
module Make(Ord: OrderedType) = struct
type key = Ord.t
type 'a t =
Empty
(* left tree, key, value, right tree, height of whole tree *)
| Node of ('a t * key * 'a * 'a t * int)
let height = function
Empty -> 0
| Node(_, _, _, _,h) -> h
let create l x d r =
let hl = height l and hr = height r in
Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1))
let bal l x d r =
let hl = height l and hr = height r in
if hl > hr + 2 then begin
match l with
Empty -> invalid_arg "MapOpt.bal"
| Node(ll, lv, ld, lr, _) ->
if height ll >= height lr then
create ll lv ld (create lr x d r)
else begin
match lr with
Empty -> invalid_arg "MapOpt.bal"
| Node(lrl, lrv, lrd, lrr, _)->
create (create ll lv ld lrl) lrv lrd (create lrr x d r)
end
end else if hr > hl + 2 then begin
match r with
Empty -> invalid_arg "MapOpt.bal"
| Node(rl, rv, rd, rr, _) ->
if height rr >= height rl then
create (create l x d rl) rv rd rr
else begin
match rl with
Empty -> invalid_arg "MapOpt.bal"
| Node(rll, rlv, rld, rlr, _) ->
create (create l x d rll) rlv rld (create rlr rv rd rr)
end
end else
Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1))
let empty = Empty
let is_empty = function Empty -> true | _ -> false
let rec add x data = function
Empty ->
Node (Empty, x, data, Empty, 1)
| Node(l, v, d, r, h) ->
let c = Ord.compare x v in
if c = 0 then
Node (l, x, data, r, h)
else if c < 0 then
bal (add x data l) v d r
else
bal l v d (add x data r)
let rec max_key x =
match x with
Empty -> raise Not_found
| Node (_, v, _, Empty, _) -> v
| Node (_, _, _, r, _) -> max_key r
let rec min_key x =
match x with
Empty -> raise Not_found
| Node (Empty, v, _, _, _) -> v
| Node (l, _, _, _, _) -> min_key l
let find_key p x =
let rec find x =
match x with
| Empty -> raise Not_found
| Node (l, k, v, r, _) ->
if p k = 0 then k, v
else if p k < 0 then find l
else find r
in
find x
let find_all_keys p x =
let rec find x =
match x with
| Empty -> []
| Node (l, k, v, r, _) ->
let l' = find l in
let r' = find r in
l' @ (if p k then (k, v)::r' else r')
in
find x
let rec mem x = function
Empty ->
false
| Node(l, v, _, r, _) ->
let c = Ord.compare x v in
c = 0 || mem x (if c < 0 then l else r)
let rec min_binding = function
Empty -> raise Not_found
| Node (Empty, x, d, _, _) -> (x, d)
| Node (l, _, _, _, _) -> min_binding l
let rec remove_min_binding = function
Empty -> invalid_arg "MapOpt.remove_min_elt"
| Node (Empty, _, _, r, _) -> r
| Node (l, x, d, r, _) -> bal (remove_min_binding l) x d r
let merge t1 t2 =
match (t1, t2) with
(Empty, t) -> t
| (t, Empty) -> t
| (_, _) ->
let (x, d) = min_binding t2 in
bal t1 x d (remove_min_binding t2)
let rec remove x = function
Empty -> Empty
| Node(l, v, d, r, _) ->
let c = Ord.compare x v in
if c = 0 then merge l r
else if c < 0 then bal (remove x l) v d r
else bal l v d (remove x r)
let rec find x = function
Empty ->
raise Not_found
| Node(l, v, d, r, _) ->
let c = Ord.compare x v in
if c = 0 then d
else find x (if c < 0 then l else r)
let rec update x f m =
match m with
Empty -> raise Not_found
| Node (l, v, d, r, h) ->
let c = Ord.compare x v in
if c = 0 then Node (l, v, f d, r, h)
else if c < 0 then Node (update x f l, v, d, r, h)
else Node (l, v, d, update x f r, h)
let rec replace x data = function
Empty -> raise Not_found
| Node (l, v, d, r, h) ->
let c = Ord.compare x v in
if c = 0 then Node (l, v, data, r, h)
(* important not: there is no need to balance the tree here, since
we are replacing a node and not changing the structure of the tree at all
*)
else if c < 0 then Node (replace x data l, v, d, r, h)
else Node (l, v, d, replace x data r, h)
let rec iteri f x =
match x with
Empty -> ()
| Node (l, v, d, r, _) ->
iteri f l; f v d; iteri f r
let rec iter f = function
Empty -> ()
| Node(l, _, d, r, _) ->
iter f l; f d; iter f r
let iter_from x f t =
let rec iter_from t =
match t with
Node (l, v, d, r, _) ->
let c = Ord.compare x v in
if c < 0 then iter_from l;
if c <= 0 then f v d;
iter_from r
| Empty -> ()
in
iter_from t
let rec iter2 f m1 m2 =
match (m1,m2) with
| (Empty, Empty) -> ()
| (Node(l1, v1, d1, r1, _), Node(l2, v2, d2, r2, _))
when Ord.compare v1 v2 = 0 ->
iter2 f l1 l2; f d1 d2; iter2 f r1 r2
| _ -> invalid_arg "MapOpt.iter2"
let rec iteri2 f m1 m2 =
match (m1,m2) with
| (Empty, Empty) -> ()
| (Node(l1, v1, d1, r1, _), Node(l2, v2, d2, r2, _))
when Ord.compare v1 v2 = 0 ->
iteri2 f l1 l2; f v1 d1 d2; iteri2 f r1 r2
| _ -> invalid_arg "MapOpt.iter2"
let rec map f = function
Empty -> Empty
| Node(l, v, d, r, h) -> Node(map f l, v, f d, map f r, h)
let rec mapi f = function
Empty -> Empty
| Node(l, v, d, r, h) -> Node(mapi f l, v, f v d, mapi f r, h)
(* Carefull set_root does not preserve the balancing
+ the height may be incorrect *)
let rec set_root k (l, v, d, r, h) =
match (l, r) with
_ when v = k -> (l, v, d, r, h)
| (Node n, _) when Ord.compare k v < 0 ->
let (ll, _, ld, lr, lh) = set_root k n in
(ll, k, ld, Node (lr, v, d, r, h), lh)
| (_, Node n) ->
let (rl, _, rd, rr, rh) = set_root k n in
(Node (l, v, d, rl, h), k, rd, rr, rh)
| _ -> invalid_arg "MapOpt.set_root"
f must be such that f d d = d
m1 and should have the same set of keys
m1 and m2 should have the same set of keys *)
let rec map2 f m1 m2 =
match (m1, m2) with
_ when (m1 == m2) -> m1
| (Node (l1, v1, d1, r1, h1), Node (l2, v2, d2, r2, _))
when (Ord.compare v1 v2 = 0) ->
Node (map2 f l1 l2, v1, f d1 d2, map2 f r1 r2, h1)
| (Node (_, v, _, _, _), Node n) ->
map2 f m1 (Node (set_root v n))
| _ -> invalid_arg "MapOpt.map2_opt"
let rec mapi2 f m1 m2 =
match (m1, m2) with
_ when (m1 == m2) -> m1
| (Node (l1, v1, d1, r1, h1), Node (l2, v2, d2, r2, _))
when (Ord.compare v1 v2 = 0) ->
Node (mapi2 f l1 l2, v1, f v1 d1 d2, mapi2 f r1 r2, h1)
| (Node (_, v, _, _, _), Node n) ->
mapi2 f m1 (Node (set_root v n))
| _ -> invalid_arg "MapOpt.mapi2"
let rec for_all p m =
match m with
Empty -> true
| Node (l, _, d, r, _) -> p d && (for_all p l) && (for_all p r)
(* p must be such that p m m = true *)
let rec for_all2 p m1 m2 =
match (m1,m2) with
_ when m1 == m2 -> true
| (Node(l1, v1, d1, r1, _), Node(l2, v2, d2, r2, _))
when (Ord.compare v1 v2 = 0) ->
(p d1 d2) && (for_all2 p l1 l2) && (for_all2 p r1 r2)
| (Node (_, v, _, _, _), Node n) ->
for_all2 p m1 (Node (set_root v n))
| _ -> invalid_arg "MapOpt.for_all2"
let rec fold f m accu =
match m with
Empty -> accu
| Node(l, v, d, r, _) ->
fold f l (f v d (fold f r accu))
let rec fold2 f m1 m2 accu =
match (m1,m2) with
| (Empty,Empty) -> accu
| (Node(l1, v1, d1, r1, h1),Node(l2, v2, d2, r2, h2))
when (h1 = h2) && (Ord.compare v1 v2 = 0) ->
fold2 f l1 l2 (f v1 d1 d2 (fold2 f r1 r2 accu))
| _ -> invalid_arg "MapOpt.fold2"
let rec exists p m =
match m with
Empty -> false
| Node (l, _, d, r, _) -> (p d) || (exists p l) || (exists p r)
type 'a enumeration = End | More of key * 'a * 'a t * 'a enumeration
let rec cons_enum m e =
match m with
Empty -> e
| Node(l, v, d, r, _) -> cons_enum l (More(v, d, r, e))
let compare cmp m1 m2 =
let rec compare_aux e1 e2 =
match (e1, e2) with
(End, End) -> 0
| (End, _) -> -1
| (_, End) -> 1
| (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) ->
let c = Ord.compare v1 v2 in
if c <> 0 then c else
let c = cmp d1 d2 in
if c <> 0 then c else
compare_aux (cons_enum r1 e1) (cons_enum r2 e2)
in compare_aux (cons_enum m1 End) (cons_enum m2 End)
let equal cmp m1 m2 =
let rec equal_aux e1 e2 =
match (e1, e2) with
(End, End) -> true
| (End, _) -> false
| (_, End) -> false
| (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) ->
Ord.compare v1 v2 = 0 && cmp d1 d2 &&
equal_aux (cons_enum r1 e1) (cons_enum r2 e2)
in equal_aux (cons_enum m1 End) (cons_enum m2 End)
let concat m1 m2 =
let result = ref m1 in
let add key data = result := add key data !result in
iteri add m2;
!result
let rec cardinal m =
match m with
| Node (l, _, _, r, _) -> cardinal l + cardinal r + 1
| Empty -> 0
end
| null | https://raw.githubusercontent.com/airbus-seclab/bincat/493a03890b3b472fd198ce58c7e9280abd0f9f93/ocaml/src/utils/mapOpt.ml | ocaml | *********************************************************************
OCaml
the special exception on linking described in file COPYING-LGPL
*********************************************************************
left tree, key, value, right tree, height of whole tree
important not: there is no need to balance the tree here, since
we are replacing a node and not changing the structure of the tree at all
Carefull set_root does not preserve the balancing
+ the height may be incorrect
p must be such that p m m = true | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the GNU Library General Public License , with
Modifications by Airbus Group - Copyright 2014 - 2017
module type OrderedType =
sig
type t
val compare: t -> t -> int
end
module Make(Ord: OrderedType) = struct
type key = Ord.t
type 'a t =
Empty
| Node of ('a t * key * 'a * 'a t * int)
let height = function
Empty -> 0
| Node(_, _, _, _,h) -> h
let create l x d r =
let hl = height l and hr = height r in
Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1))
let bal l x d r =
let hl = height l and hr = height r in
if hl > hr + 2 then begin
match l with
Empty -> invalid_arg "MapOpt.bal"
| Node(ll, lv, ld, lr, _) ->
if height ll >= height lr then
create ll lv ld (create lr x d r)
else begin
match lr with
Empty -> invalid_arg "MapOpt.bal"
| Node(lrl, lrv, lrd, lrr, _)->
create (create ll lv ld lrl) lrv lrd (create lrr x d r)
end
end else if hr > hl + 2 then begin
match r with
Empty -> invalid_arg "MapOpt.bal"
| Node(rl, rv, rd, rr, _) ->
if height rr >= height rl then
create (create l x d rl) rv rd rr
else begin
match rl with
Empty -> invalid_arg "MapOpt.bal"
| Node(rll, rlv, rld, rlr, _) ->
create (create l x d rll) rlv rld (create rlr rv rd rr)
end
end else
Node(l, x, d, r, (if hl >= hr then hl + 1 else hr + 1))
let empty = Empty
let is_empty = function Empty -> true | _ -> false
let rec add x data = function
Empty ->
Node (Empty, x, data, Empty, 1)
| Node(l, v, d, r, h) ->
let c = Ord.compare x v in
if c = 0 then
Node (l, x, data, r, h)
else if c < 0 then
bal (add x data l) v d r
else
bal l v d (add x data r)
let rec max_key x =
match x with
Empty -> raise Not_found
| Node (_, v, _, Empty, _) -> v
| Node (_, _, _, r, _) -> max_key r
let rec min_key x =
match x with
Empty -> raise Not_found
| Node (Empty, v, _, _, _) -> v
| Node (l, _, _, _, _) -> min_key l
let find_key p x =
let rec find x =
match x with
| Empty -> raise Not_found
| Node (l, k, v, r, _) ->
if p k = 0 then k, v
else if p k < 0 then find l
else find r
in
find x
let find_all_keys p x =
let rec find x =
match x with
| Empty -> []
| Node (l, k, v, r, _) ->
let l' = find l in
let r' = find r in
l' @ (if p k then (k, v)::r' else r')
in
find x
let rec mem x = function
Empty ->
false
| Node(l, v, _, r, _) ->
let c = Ord.compare x v in
c = 0 || mem x (if c < 0 then l else r)
let rec min_binding = function
Empty -> raise Not_found
| Node (Empty, x, d, _, _) -> (x, d)
| Node (l, _, _, _, _) -> min_binding l
let rec remove_min_binding = function
Empty -> invalid_arg "MapOpt.remove_min_elt"
| Node (Empty, _, _, r, _) -> r
| Node (l, x, d, r, _) -> bal (remove_min_binding l) x d r
let merge t1 t2 =
match (t1, t2) with
(Empty, t) -> t
| (t, Empty) -> t
| (_, _) ->
let (x, d) = min_binding t2 in
bal t1 x d (remove_min_binding t2)
let rec remove x = function
Empty -> Empty
| Node(l, v, d, r, _) ->
let c = Ord.compare x v in
if c = 0 then merge l r
else if c < 0 then bal (remove x l) v d r
else bal l v d (remove x r)
let rec find x = function
Empty ->
raise Not_found
| Node(l, v, d, r, _) ->
let c = Ord.compare x v in
if c = 0 then d
else find x (if c < 0 then l else r)
let rec update x f m =
match m with
Empty -> raise Not_found
| Node (l, v, d, r, h) ->
let c = Ord.compare x v in
if c = 0 then Node (l, v, f d, r, h)
else if c < 0 then Node (update x f l, v, d, r, h)
else Node (l, v, d, update x f r, h)
let rec replace x data = function
Empty -> raise Not_found
| Node (l, v, d, r, h) ->
let c = Ord.compare x v in
if c = 0 then Node (l, v, data, r, h)
else if c < 0 then Node (replace x data l, v, d, r, h)
else Node (l, v, d, replace x data r, h)
let rec iteri f x =
match x with
Empty -> ()
| Node (l, v, d, r, _) ->
iteri f l; f v d; iteri f r
let rec iter f = function
Empty -> ()
| Node(l, _, d, r, _) ->
iter f l; f d; iter f r
let iter_from x f t =
let rec iter_from t =
match t with
Node (l, v, d, r, _) ->
let c = Ord.compare x v in
if c < 0 then iter_from l;
if c <= 0 then f v d;
iter_from r
| Empty -> ()
in
iter_from t
let rec iter2 f m1 m2 =
match (m1,m2) with
| (Empty, Empty) -> ()
| (Node(l1, v1, d1, r1, _), Node(l2, v2, d2, r2, _))
when Ord.compare v1 v2 = 0 ->
iter2 f l1 l2; f d1 d2; iter2 f r1 r2
| _ -> invalid_arg "MapOpt.iter2"
let rec iteri2 f m1 m2 =
match (m1,m2) with
| (Empty, Empty) -> ()
| (Node(l1, v1, d1, r1, _), Node(l2, v2, d2, r2, _))
when Ord.compare v1 v2 = 0 ->
iteri2 f l1 l2; f v1 d1 d2; iteri2 f r1 r2
| _ -> invalid_arg "MapOpt.iter2"
let rec map f = function
Empty -> Empty
| Node(l, v, d, r, h) -> Node(map f l, v, f d, map f r, h)
let rec mapi f = function
Empty -> Empty
| Node(l, v, d, r, h) -> Node(mapi f l, v, f v d, mapi f r, h)
let rec set_root k (l, v, d, r, h) =
match (l, r) with
_ when v = k -> (l, v, d, r, h)
| (Node n, _) when Ord.compare k v < 0 ->
let (ll, _, ld, lr, lh) = set_root k n in
(ll, k, ld, Node (lr, v, d, r, h), lh)
| (_, Node n) ->
let (rl, _, rd, rr, rh) = set_root k n in
(Node (l, v, d, rl, h), k, rd, rr, rh)
| _ -> invalid_arg "MapOpt.set_root"
f must be such that f d d = d
m1 and should have the same set of keys
m1 and m2 should have the same set of keys *)
let rec map2 f m1 m2 =
match (m1, m2) with
_ when (m1 == m2) -> m1
| (Node (l1, v1, d1, r1, h1), Node (l2, v2, d2, r2, _))
when (Ord.compare v1 v2 = 0) ->
Node (map2 f l1 l2, v1, f d1 d2, map2 f r1 r2, h1)
| (Node (_, v, _, _, _), Node n) ->
map2 f m1 (Node (set_root v n))
| _ -> invalid_arg "MapOpt.map2_opt"
let rec mapi2 f m1 m2 =
match (m1, m2) with
_ when (m1 == m2) -> m1
| (Node (l1, v1, d1, r1, h1), Node (l2, v2, d2, r2, _))
when (Ord.compare v1 v2 = 0) ->
Node (mapi2 f l1 l2, v1, f v1 d1 d2, mapi2 f r1 r2, h1)
| (Node (_, v, _, _, _), Node n) ->
mapi2 f m1 (Node (set_root v n))
| _ -> invalid_arg "MapOpt.mapi2"
let rec for_all p m =
match m with
Empty -> true
| Node (l, _, d, r, _) -> p d && (for_all p l) && (for_all p r)
let rec for_all2 p m1 m2 =
match (m1,m2) with
_ when m1 == m2 -> true
| (Node(l1, v1, d1, r1, _), Node(l2, v2, d2, r2, _))
when (Ord.compare v1 v2 = 0) ->
(p d1 d2) && (for_all2 p l1 l2) && (for_all2 p r1 r2)
| (Node (_, v, _, _, _), Node n) ->
for_all2 p m1 (Node (set_root v n))
| _ -> invalid_arg "MapOpt.for_all2"
let rec fold f m accu =
match m with
Empty -> accu
| Node(l, v, d, r, _) ->
fold f l (f v d (fold f r accu))
let rec fold2 f m1 m2 accu =
match (m1,m2) with
| (Empty,Empty) -> accu
| (Node(l1, v1, d1, r1, h1),Node(l2, v2, d2, r2, h2))
when (h1 = h2) && (Ord.compare v1 v2 = 0) ->
fold2 f l1 l2 (f v1 d1 d2 (fold2 f r1 r2 accu))
| _ -> invalid_arg "MapOpt.fold2"
let rec exists p m =
match m with
Empty -> false
| Node (l, _, d, r, _) -> (p d) || (exists p l) || (exists p r)
type 'a enumeration = End | More of key * 'a * 'a t * 'a enumeration
let rec cons_enum m e =
match m with
Empty -> e
| Node(l, v, d, r, _) -> cons_enum l (More(v, d, r, e))
let compare cmp m1 m2 =
let rec compare_aux e1 e2 =
match (e1, e2) with
(End, End) -> 0
| (End, _) -> -1
| (_, End) -> 1
| (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) ->
let c = Ord.compare v1 v2 in
if c <> 0 then c else
let c = cmp d1 d2 in
if c <> 0 then c else
compare_aux (cons_enum r1 e1) (cons_enum r2 e2)
in compare_aux (cons_enum m1 End) (cons_enum m2 End)
let equal cmp m1 m2 =
let rec equal_aux e1 e2 =
match (e1, e2) with
(End, End) -> true
| (End, _) -> false
| (_, End) -> false
| (More(v1, d1, r1, e1), More(v2, d2, r2, e2)) ->
Ord.compare v1 v2 = 0 && cmp d1 d2 &&
equal_aux (cons_enum r1 e1) (cons_enum r2 e2)
in equal_aux (cons_enum m1 End) (cons_enum m2 End)
let concat m1 m2 =
let result = ref m1 in
let add key data = result := add key data !result in
iteri add m2;
!result
let rec cardinal m =
match m with
| Node (l, _, _, r, _) -> cardinal l + cardinal r + 1
| Empty -> 0
end
|
4675d1544df4697db5f828e521e079f85ff85ac66d553f16941b6e7101036428 | inaka/elvis_core | pass_export_used_types.erl | -module pass_export_used_types.
-type my_type() :: my | type.
-type private_type(X) :: {X}.
-export_type [my_type/0].
-export [my_fun/1].
-spec my_fun(my_type()) -> my_type().
my_fun(my) -> type;
my_fun(type) ->
private_fun(none),
my.
% ignore types only used by private functions
-spec private_fun(X) -> private_type(X).
private_fun(none) -> {none}.
| null | https://raw.githubusercontent.com/inaka/elvis_core/72df3ba9dae03cb070fa7af3b266c38bb3095778/test/examples/pass_export_used_types.erl | erlang | ignore types only used by private functions | -module pass_export_used_types.
-type my_type() :: my | type.
-type private_type(X) :: {X}.
-export_type [my_type/0].
-export [my_fun/1].
-spec my_fun(my_type()) -> my_type().
my_fun(my) -> type;
my_fun(type) ->
private_fun(none),
my.
-spec private_fun(X) -> private_type(X).
private_fun(none) -> {none}.
|
87e70ad69cdf42001dddfa8b21c96aecc7eacb08656c4c5761adc31a2ba8b335 | pflanze/chj-schemelib | read-csv.scm | Copyright 2016 - 2019 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require easy
eol
csv-defaults
(predicates length-=)
jclass
stream
oo-lib-vector
(string-util-3 string.replace-substrings)
error
(spreadsheet-reference (class spreadsheet-reference-absolute))
(cj-path FILE)
(cj-io-util dirname))
(export (class read-csv-error)
(class csv-cell)
csv-cell-of
possibly-csv-cell-of
possibly-csv-cell.value ;; *not* a method
x-csv-cell-of
X-csv-cell-of ;; allows unwrapped inputs, too (if match predicate)
(method csv-cell.xvalue-of)
;; The main use:
csv-file-stream
csv-port-stream
;; Heavy OO:
(class csv-reader)
(interface input-provider
(class file-input-provider)))
;; type error reporting
(defclass (csv-type-error maybe-nested-error ;; force |error?| here instead of BUG msg below?
[csv-cell? cell])
implements: error-interface
(defmethod (string s)
(string-append
(if maybe-nested-error
(if (error? maybe-nested-error)
(string-append (.string maybe-nested-error)
" ")
"(BUG while reporting: invalid type of nested error) ")
"")
"at "
(.error-string cell))))
;;XX this should be in some interface file, once useful methods were
;;added; also, confusion with exception/continuation. Also, implements
;;error ? Also, rename error to error-interface ?
(defclass (error/continuation [continuation? continuation]))
;; read/parse error reporting
(defclass (read-csv-error [(either string? port?) path-or-port]
[fixnum-natural0? line]
;; error_diag values from the perl library
[fixnum-natural0? cde]
[string? message]
[(maybe fixnum-natural0?) column])
extends: error/continuation
implements: error-interface ;; <- put this into error/continuation class?
;; XX .string is old code, instead use .location now internally?
;; And/or introduce a global protocol for errors which are not just
;; location but the whole thing? Well, that's what the error
;; protocol already should be (see note above).
(defmethod (string s)
($ (string.replace-substrings message "QUO character" "quote character")
" in "
(object->string (if (port? path-or-port)
(.name path-or-port)
path-or-port))
" line "
line
(if column
($ " pos $column")
"")))
(defmethod (location s)
(location (if (port? path-or-port)
(.name path-or-port)
path-or-port)
(position line column)))
(defmethod (csv-type-error s maybe-nested-error)
(csv-type-error maybe-nested-error)))
;; location tracking
(defclass (csv-cell [(maybe string?) value]
[(either string? port?) path-or-port]
[fixnum-natural? rowno]
[fixnum-natural? colno])
(defmethod (error-string s)
($ "row "
rowno
" col "
colno
" ("
(.formula-string-fragment (spreadsheet-reference-absolute #f rowno colno))
") in file "
(object->string path-or-port))))
;; evil naming to use a dot if it's not a method; sigh, but I don't
;; want to have to change more than prefix possibly- to change the
;; usage case.
(def (possibly-csv-cell.value v)
(if (csv-cell? v)
(@csv-cell.value v)
v))
(def ((csv-cell-of pred) v)
(if (csv-cell? v)
(let ((w (pred (@csv-cell.value v))))
(if (eq? w #t)
#t
(csv-type-error w v)))
#f))
(def ((possibly-csv-cell-of pred) v)
(if (csv-cell? v)
(let ((w (pred (@csv-cell.value v))))
(if (eq? w #t)
#t
(csv-type-error w v)))
(pred v)))
(def (@x-csv-cell-of v pred msg)
(let* ((val (@csv-cell.value v))
(w (pred val)))
(if (eq? w #t)
val
(error ($ (if msg ($ msg ": ") "")
"expecting a "
;; XX oh, ()almost?) need macro for this,
;; too?
(object->string (try-show pred))
" "
;; XX show actual value? consistency?
(csv-type-error w v))))))
;; could be a method but then order of arguments would be wrong and
;; dunno?; (Should this be a macro to tell the expression like
;; cj-typed does? No, right?)
(def (x-csv-cell-of v pred #!optional msg)
(if (csv-cell? v)
(@x-csv-cell-of v pred msg)
(error "not a csv-cell:" v)))
;; and then, still, too?
(def. csv-cell.xvalue-of x-csv-cell-of)
;; Variant that allows unwrapped values, too:
(def (X-csv-cell-of v pred #!optional msg)
(if (csv-cell? v)
(@x-csv-cell-of v pred msg)
;; (-> pred v) no, since it's not a macro now we have to do
;; runtime:
(let* ((val v)
(w (pred val)))
(if (eq? w #t)
val
(error ($ (if msg ($ msg ": ") "")
"expecting a "
;; XX oh, ()almost?) need macro for this,
;; too?
(object->string (try-show pred))
" "
;; XX show actual value? consistency?
(if w
(.string w)
"")))))))
(TEST
> (def c (csv-cell "hi" "foo.csv" 1039 4))
> (.error-string c)
"row 1039 col 4 (D1039) in file \"foo.csv\""
> ((csv-cell-of string?) c)
#t
> (show ((csv-cell-of nothing?) c))
(csv-type-error #f (csv-cell "hi" "foo.csv" 1039 4))
> (x-csv-cell-of c string?)
"hi"
> (%try (x-csv-cell-of c symbol?))
(exception
text:
"expecting a symbol? at row 1039 col 4 (D1039) in file \"foo.csv\"\n")
> (%try (x-csv-cell-of c number? "expecting the number of beers"))
(exception
text:
"expecting the number of beers: expecting a number? at row 1039 col 4 (D1039) in file \"foo.csv\"\n")
> (%try (X-csv-cell-of c number? "expecting the number of beers"))
(exception
text:
"expecting the number of beers: expecting a number? at row 1039 col 4 (D1039) in file \"foo.csv\"\n")
> (%try (X-csv-cell-of "foo" number? "expecting the number of beers"))
(exception text: "expecting the number of beers: expecting a number? \n")
> (%try (X-csv-cell-of "foo" number?))
(exception text: "expecting a number? \n")
> (X-csv-cell-of 123 number? "expecting the number of beers")
123
)
(def (_csv-port-stream port
maybe-file-or-port
source?
#!optional (tail '()))
(let lp ((rowno 1))
(delay
(let ((line (read-line port)))
(if (eof-object? line)
(begin
(close-port port)
(assert (zero? (process-status port)))
tail)
(let ((vals-or-signal
(xone (call-with-input-string line read-all)))
(rest (lp (inc rowno))))
(xcond
((and (vector? vals-or-signal)
(> (vector-length vals-or-signal) 0))
(let ((signal vals-or-signal))
(xcase (vector-ref signal 0)
((OK)
(if (null? (force rest))
tail
(error "read-csv bug: did get OK signal before end of output")))
((ERROR)
(assert (= (vector-length signal) 6))
(continuation-capture
(lambda (cont)
(read-csv-error cont
;; path-or-port:
( vector - ref signal 1 )
;; ^ OK re SECURITY? alternative:
;; The above is just "-", use this:
(or maybe-file-or-port port)
;; lineno:
(vector-ref signal 2)
;; cde:
(vector-ref signal 3)
;; message:
(vector-ref signal 4)
;; maybe pos:
(vector-ref signal 5))))))))
((ilist? vals-or-signal)
(let ((vals vals-or-signal))
(cons (if (and source? maybe-file-or-port)
(map/iota (lambda (val colno)
(csv-cell val
maybe-file-or-port
rowno
(inc colno)))
vals)
vals)
rest))))))))))
(def (csv-file-stream path
#!key
([char? sep-char] (current-csv-input-sep-char))
([eol-name? eol] (current-csv-input-eol))
(tail '())
source?)
(_csv-port-stream (open-input-process
(list path: "lib/csv2sexpr"
arguments: (list path
"-"
(string sep-char)
(symbol.string eol))
char-encoding: 'UTF-8))
path
source?
tail))
;;XX lib (and lostontie?)
(def (send-file-strings inport outport)
;; XX well.
(let ((str (read-line inport #f)))
(display str outport)))
(def (csv-port-stream port
#!key
([char? sep-char] (current-csv-input-sep-char))
([eol-name? eol] (current-csv-input-eol))
(tail '())
maybe-source
(source? #t))
(let ((p (open-process
(list path: (path-append (dirname (FILE))
"csv2sexpr")
arguments: (list "-"
"-"
(string sep-char)
(symbol.string eol))
WOW , would really need different encoding for
;; input vs. output. If I wanted to process bytes
;; inbetween; breaking down so much. But yeah,
;; char-encoding when opening orig file is done,
;; then we have strings, we write them here as
UTF-8 and read them back as the same , so ,
;; actually fine here. "send-file-strings kinda"
;; does the transcoding.
char-encoding: 'UTF-8
stdout-redirection: #t
stdin-redirection: #t))))
(future
(let lp ()
(send-file-strings port p)
(close-output-port p)
XX btw TODO : check status , do n't even do that in csv - file - stream !
(close-port port)))
(_csv-port-stream p
(or maybe-source port)
(and maybe-source source?)
tail)))
;; XX add more
(def char-encodings '(UTF-8))
(def (char-encoding? v)
(and (memq v char-encodings) #t))
(jinterface input-provider
(method (open) -> input-port?)
(jclass (file-input-provider [path-string? path-string]
[char-encoding? char-encoding])
(def-method (open s)
(open-input-file (list path: path-string
char-encoding: char-encoding)))
;; (def-method (close s port)
;; (close-port port)) unused
))
;;XX lib
(def (filter/snd vals keep?s)
(if (or (null? vals) (null? keep?s))
'()
(let-pair ((v vals) vals)
(let-pair ((k? keep?s) keep?s)
(let ((r (filter/snd vals keep?s)))
(if k?
(cons v r)
r))))))
(TEST
> (filter/snd '(a b c) '(#f #t #f))
(b)
> (filter/snd '(a b c) '(#f #t #t))
(b c)
> (filter/snd '(a b c) '(#f #t))
(b)
> (filter/snd '(a b c) '(#f))
()
> (filter/snd '(a b c) '(#t))
(a)
;; XX hmm what about these, really??:
> (filter/snd '(a b c) '(#f #t #t #f))
(b c)
> (filter/snd '(a b c) '(#f #t #t #t))
(b c))
(jclass (csv-reader [input-provider? input-provider]
#!key
[char? sep-char]
[eol-name? eol]
[(maybe natural0?) maybe-head-skip]
[boolean? skip-last?]
[(maybe (list-of natural0?)) maybe-columns])
(def-method (stream s)
(let* ((s (csv-port-stream (.open input-provider)
sep-char: sep-char
eol: eol))
(s (if maybe-head-skip
(stream-drop s maybe-head-skip)
s))
(s (if skip-last?
(stream-butlast s)
s))
(s (if maybe-columns
(stream-map (lambda (row)
(map (let ((row* (list.vector row)))
(lambda (i)
(vector.ref row* i)))
maybe-columns))
s)
s)))
s)))
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/read-csv.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version.
*not* a method
allows unwrapped inputs, too (if match predicate)
The main use:
Heavy OO:
type error reporting
force |error?| here instead of BUG msg below?
XX this should be in some interface file, once useful methods were
added; also, confusion with exception/continuation. Also, implements
error ? Also, rename error to error-interface ?
read/parse error reporting
error_diag values from the perl library
<- put this into error/continuation class?
XX .string is old code, instead use .location now internally?
And/or introduce a global protocol for errors which are not just
location but the whole thing? Well, that's what the error
protocol already should be (see note above).
location tracking
evil naming to use a dot if it's not a method; sigh, but I don't
want to have to change more than prefix possibly- to change the
usage case.
XX oh, ()almost?) need macro for this,
too?
XX show actual value? consistency?
could be a method but then order of arguments would be wrong and
dunno?; (Should this be a macro to tell the expression like
cj-typed does? No, right?)
and then, still, too?
Variant that allows unwrapped values, too:
(-> pred v) no, since it's not a macro now we have to do
runtime:
XX oh, ()almost?) need macro for this,
too?
XX show actual value? consistency?
path-or-port:
^ OK re SECURITY? alternative:
The above is just "-", use this:
lineno:
cde:
message:
maybe pos:
XX lib (and lostontie?)
XX well.
input vs. output. If I wanted to process bytes
inbetween; breaking down so much. But yeah,
char-encoding when opening orig file is done,
then we have strings, we write them here as
actually fine here. "send-file-strings kinda"
does the transcoding.
XX add more
(def-method (close s port)
(close-port port)) unused
XX lib
XX hmm what about these, really??: | Copyright 2016 - 2019 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
(require easy
eol
csv-defaults
(predicates length-=)
jclass
stream
oo-lib-vector
(string-util-3 string.replace-substrings)
error
(spreadsheet-reference (class spreadsheet-reference-absolute))
(cj-path FILE)
(cj-io-util dirname))
(export (class read-csv-error)
(class csv-cell)
csv-cell-of
possibly-csv-cell-of
x-csv-cell-of
(method csv-cell.xvalue-of)
csv-file-stream
csv-port-stream
(class csv-reader)
(interface input-provider
(class file-input-provider)))
[csv-cell? cell])
implements: error-interface
(defmethod (string s)
(string-append
(if maybe-nested-error
(if (error? maybe-nested-error)
(string-append (.string maybe-nested-error)
" ")
"(BUG while reporting: invalid type of nested error) ")
"")
"at "
(.error-string cell))))
(defclass (error/continuation [continuation? continuation]))
(defclass (read-csv-error [(either string? port?) path-or-port]
[fixnum-natural0? line]
[fixnum-natural0? cde]
[string? message]
[(maybe fixnum-natural0?) column])
extends: error/continuation
(defmethod (string s)
($ (string.replace-substrings message "QUO character" "quote character")
" in "
(object->string (if (port? path-or-port)
(.name path-or-port)
path-or-port))
" line "
line
(if column
($ " pos $column")
"")))
(defmethod (location s)
(location (if (port? path-or-port)
(.name path-or-port)
path-or-port)
(position line column)))
(defmethod (csv-type-error s maybe-nested-error)
(csv-type-error maybe-nested-error)))
(defclass (csv-cell [(maybe string?) value]
[(either string? port?) path-or-port]
[fixnum-natural? rowno]
[fixnum-natural? colno])
(defmethod (error-string s)
($ "row "
rowno
" col "
colno
" ("
(.formula-string-fragment (spreadsheet-reference-absolute #f rowno colno))
") in file "
(object->string path-or-port))))
(def (possibly-csv-cell.value v)
(if (csv-cell? v)
(@csv-cell.value v)
v))
(def ((csv-cell-of pred) v)
(if (csv-cell? v)
(let ((w (pred (@csv-cell.value v))))
(if (eq? w #t)
#t
(csv-type-error w v)))
#f))
(def ((possibly-csv-cell-of pred) v)
(if (csv-cell? v)
(let ((w (pred (@csv-cell.value v))))
(if (eq? w #t)
#t
(csv-type-error w v)))
(pred v)))
(def (@x-csv-cell-of v pred msg)
(let* ((val (@csv-cell.value v))
(w (pred val)))
(if (eq? w #t)
val
(error ($ (if msg ($ msg ": ") "")
"expecting a "
(object->string (try-show pred))
" "
(csv-type-error w v))))))
(def (x-csv-cell-of v pred #!optional msg)
(if (csv-cell? v)
(@x-csv-cell-of v pred msg)
(error "not a csv-cell:" v)))
(def. csv-cell.xvalue-of x-csv-cell-of)
(def (X-csv-cell-of v pred #!optional msg)
(if (csv-cell? v)
(@x-csv-cell-of v pred msg)
(let* ((val v)
(w (pred val)))
(if (eq? w #t)
val
(error ($ (if msg ($ msg ": ") "")
"expecting a "
(object->string (try-show pred))
" "
(if w
(.string w)
"")))))))
(TEST
> (def c (csv-cell "hi" "foo.csv" 1039 4))
> (.error-string c)
"row 1039 col 4 (D1039) in file \"foo.csv\""
> ((csv-cell-of string?) c)
#t
> (show ((csv-cell-of nothing?) c))
(csv-type-error #f (csv-cell "hi" "foo.csv" 1039 4))
> (x-csv-cell-of c string?)
"hi"
> (%try (x-csv-cell-of c symbol?))
(exception
text:
"expecting a symbol? at row 1039 col 4 (D1039) in file \"foo.csv\"\n")
> (%try (x-csv-cell-of c number? "expecting the number of beers"))
(exception
text:
"expecting the number of beers: expecting a number? at row 1039 col 4 (D1039) in file \"foo.csv\"\n")
> (%try (X-csv-cell-of c number? "expecting the number of beers"))
(exception
text:
"expecting the number of beers: expecting a number? at row 1039 col 4 (D1039) in file \"foo.csv\"\n")
> (%try (X-csv-cell-of "foo" number? "expecting the number of beers"))
(exception text: "expecting the number of beers: expecting a number? \n")
> (%try (X-csv-cell-of "foo" number?))
(exception text: "expecting a number? \n")
> (X-csv-cell-of 123 number? "expecting the number of beers")
123
)
(def (_csv-port-stream port
maybe-file-or-port
source?
#!optional (tail '()))
(let lp ((rowno 1))
(delay
(let ((line (read-line port)))
(if (eof-object? line)
(begin
(close-port port)
(assert (zero? (process-status port)))
tail)
(let ((vals-or-signal
(xone (call-with-input-string line read-all)))
(rest (lp (inc rowno))))
(xcond
((and (vector? vals-or-signal)
(> (vector-length vals-or-signal) 0))
(let ((signal vals-or-signal))
(xcase (vector-ref signal 0)
((OK)
(if (null? (force rest))
tail
(error "read-csv bug: did get OK signal before end of output")))
((ERROR)
(assert (= (vector-length signal) 6))
(continuation-capture
(lambda (cont)
(read-csv-error cont
( vector - ref signal 1 )
(or maybe-file-or-port port)
(vector-ref signal 2)
(vector-ref signal 3)
(vector-ref signal 4)
(vector-ref signal 5))))))))
((ilist? vals-or-signal)
(let ((vals vals-or-signal))
(cons (if (and source? maybe-file-or-port)
(map/iota (lambda (val colno)
(csv-cell val
maybe-file-or-port
rowno
(inc colno)))
vals)
vals)
rest))))))))))
(def (csv-file-stream path
#!key
([char? sep-char] (current-csv-input-sep-char))
([eol-name? eol] (current-csv-input-eol))
(tail '())
source?)
(_csv-port-stream (open-input-process
(list path: "lib/csv2sexpr"
arguments: (list path
"-"
(string sep-char)
(symbol.string eol))
char-encoding: 'UTF-8))
path
source?
tail))
(def (send-file-strings inport outport)
(let ((str (read-line inport #f)))
(display str outport)))
(def (csv-port-stream port
#!key
([char? sep-char] (current-csv-input-sep-char))
([eol-name? eol] (current-csv-input-eol))
(tail '())
maybe-source
(source? #t))
(let ((p (open-process
(list path: (path-append (dirname (FILE))
"csv2sexpr")
arguments: (list "-"
"-"
(string sep-char)
(symbol.string eol))
WOW , would really need different encoding for
UTF-8 and read them back as the same , so ,
char-encoding: 'UTF-8
stdout-redirection: #t
stdin-redirection: #t))))
(future
(let lp ()
(send-file-strings port p)
(close-output-port p)
XX btw TODO : check status , do n't even do that in csv - file - stream !
(close-port port)))
(_csv-port-stream p
(or maybe-source port)
(and maybe-source source?)
tail)))
(def char-encodings '(UTF-8))
(def (char-encoding? v)
(and (memq v char-encodings) #t))
(jinterface input-provider
(method (open) -> input-port?)
(jclass (file-input-provider [path-string? path-string]
[char-encoding? char-encoding])
(def-method (open s)
(open-input-file (list path: path-string
char-encoding: char-encoding)))
))
(def (filter/snd vals keep?s)
(if (or (null? vals) (null? keep?s))
'()
(let-pair ((v vals) vals)
(let-pair ((k? keep?s) keep?s)
(let ((r (filter/snd vals keep?s)))
(if k?
(cons v r)
r))))))
(TEST
> (filter/snd '(a b c) '(#f #t #f))
(b)
> (filter/snd '(a b c) '(#f #t #t))
(b c)
> (filter/snd '(a b c) '(#f #t))
(b)
> (filter/snd '(a b c) '(#f))
()
> (filter/snd '(a b c) '(#t))
(a)
> (filter/snd '(a b c) '(#f #t #t #f))
(b c)
> (filter/snd '(a b c) '(#f #t #t #t))
(b c))
(jclass (csv-reader [input-provider? input-provider]
#!key
[char? sep-char]
[eol-name? eol]
[(maybe natural0?) maybe-head-skip]
[boolean? skip-last?]
[(maybe (list-of natural0?)) maybe-columns])
(def-method (stream s)
(let* ((s (csv-port-stream (.open input-provider)
sep-char: sep-char
eol: eol))
(s (if maybe-head-skip
(stream-drop s maybe-head-skip)
s))
(s (if skip-last?
(stream-butlast s)
s))
(s (if maybe-columns
(stream-map (lambda (row)
(map (let ((row* (list.vector row)))
(lambda (i)
(vector.ref row* i)))
maybe-columns))
s)
s)))
s)))
|
f0a9ba6f9ec5618377a9e03b09719c504795691746595ae7be6428a1fdd0e081 | HunterYIboHu/htdp2-solution | ex268-ir-sort.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-reader.ss" "lang")((modname ex268-ir-sort) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
; structs
(define-struct IR [name description acq-price sale-price])
; an inventory record is (make-IR String String Number Number)
; (make-IR n d ap sp) the n means the name of IR, and the d is the description
; and the ap, sp are acquisition price and sale price.
(define IR-1 (make-IR "delicious apple"
"An special kind of apple which come from Malizia"
1000
1500))
(define IR-2 (make-IR "banana"
"Just a banana, a little yellow."
996
1205))
(define IR-3 (make-IR "Salt"
"Come from Hainan, salty."
540
999))
; functions
; [List-of IR] -> [List-of IR]
sort the given l by the difference between the two prices .
the larger one first .
(check-expect (sort-ir `(,IR-1 ,IR-2 ,IR-3)) `(,IR-1 ,IR-3 ,IR-2))
(check-expect (sort-ir `(,IR-2 ,IR-3 ,IR-1)) `(,IR-1 ,IR-3 ,IR-2))
(define (sort-ir l)
(local (; IR -> Number
get the difference between the given IR 's two prices .
(define (get-diff ir)
(- (IR-sale-price ir)
(IR-acq-price ir)))
; IR IR -> Boolean
compare the difference of the two ir and
if the first one larger than the second , return # true .
(define (cmp-diff ir-1 ir-2)
(> (get-diff ir-1) (get-diff ir-2))))
(sort l cmp-diff)))
| null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter3-Abstraction/Section16-Using-Abstraction/ex268-ir-sort.rkt | racket | about the language level of this file in a form that our tools can easily process.
structs
an inventory record is (make-IR String String Number Number)
(make-IR n d ap sp) the n means the name of IR, and the d is the description
and the ap, sp are acquisition price and sale price.
functions
[List-of IR] -> [List-of IR]
IR -> Number
IR IR -> Boolean | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-reader.ss" "lang")((modname ex268-ir-sort) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct IR [name description acq-price sale-price])
(define IR-1 (make-IR "delicious apple"
"An special kind of apple which come from Malizia"
1000
1500))
(define IR-2 (make-IR "banana"
"Just a banana, a little yellow."
996
1205))
(define IR-3 (make-IR "Salt"
"Come from Hainan, salty."
540
999))
sort the given l by the difference between the two prices .
the larger one first .
(check-expect (sort-ir `(,IR-1 ,IR-2 ,IR-3)) `(,IR-1 ,IR-3 ,IR-2))
(check-expect (sort-ir `(,IR-2 ,IR-3 ,IR-1)) `(,IR-1 ,IR-3 ,IR-2))
(define (sort-ir l)
get the difference between the given IR 's two prices .
(define (get-diff ir)
(- (IR-sale-price ir)
(IR-acq-price ir)))
compare the difference of the two ir and
if the first one larger than the second , return # true .
(define (cmp-diff ir-1 ir-2)
(> (get-diff ir-1) (get-diff ir-2))))
(sort l cmp-diff)))
|
506d39d2c41d3320a3fa3ec7e2aa70f88dbf561f702cad54401f513b2ad316f2 | subttle/regular | TransitionGraph.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module TransitionGraph where
import Algebra.Graph.Relation as Relation (Relation, gmap, transpose)
import Data.Foldable (Foldable (toList))
import Data.Functor.Contravariant (Contravariant (..))
import Common (quoteWith)
import Finite (Finite (..), Q (..), Σ (..))
-- Transition Graph of an automaton
newtype TG q s = TG ( s → Relation q)
-- Transition Graph of an automaton with ε-transitions
newtype ETG q s = ETG (Maybe s → Relation q)
instance (Finite q) ⇒ Q (TG q s) q
instance (Finite s) ⇒ Σ (TG q s) s
instance (Finite q) ⇒ Q (ETG q s) q
instance (Finite s) ⇒ Σ (ETG q s) s
instance Contravariant (TG q) where
contramap ∷ (a → b) → TG q b → TG q a
contramap f (TG g) = TG (g . f)
instance Contravariant (ETG q) where
contramap ∷ (a → b) → ETG q b → ETG q a
contramap f (ETG g) = ETG (g . fmap f)
instance (Show q, Show s, Finite q, Finite s) ⇒ Show (TG q s) where
show ∷ TG q s → String
show (TG m) = unlines (fmap (\s → quoteWith (show s) (show (m s)) " → ") (toList (sigma (TG m))))
instance (Show q, Show s, Finite q, Finite s) ⇒ Show (ETG q s) where
show ∷ ETG q s → String
show (ETG m) = unlines (fmap (\s → quoteWith (show s) (show (m s)) " → ") (toList (sigma_ε (ETG m))))
reverse ∷ (Ord q) ⇒ TG q s → TG q s
reverse (TG g) = TG (Relation.transpose . g)
map ∷ (Ord p) ⇒ (q → p) → TG q s → TG p s
map f (TG g) = TG (gmap f . g)
| null | https://raw.githubusercontent.com/subttle/regular/1a9e71cb1c43cb3215b9331efa98d4dcf21fbf9c/src/TransitionGraph.hs | haskell | Transition Graph of an automaton
Transition Graph of an automaton with ε-transitions | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module TransitionGraph where
import Algebra.Graph.Relation as Relation (Relation, gmap, transpose)
import Data.Foldable (Foldable (toList))
import Data.Functor.Contravariant (Contravariant (..))
import Common (quoteWith)
import Finite (Finite (..), Q (..), Σ (..))
newtype TG q s = TG ( s → Relation q)
newtype ETG q s = ETG (Maybe s → Relation q)
instance (Finite q) ⇒ Q (TG q s) q
instance (Finite s) ⇒ Σ (TG q s) s
instance (Finite q) ⇒ Q (ETG q s) q
instance (Finite s) ⇒ Σ (ETG q s) s
instance Contravariant (TG q) where
contramap ∷ (a → b) → TG q b → TG q a
contramap f (TG g) = TG (g . f)
instance Contravariant (ETG q) where
contramap ∷ (a → b) → ETG q b → ETG q a
contramap f (ETG g) = ETG (g . fmap f)
instance (Show q, Show s, Finite q, Finite s) ⇒ Show (TG q s) where
show ∷ TG q s → String
show (TG m) = unlines (fmap (\s → quoteWith (show s) (show (m s)) " → ") (toList (sigma (TG m))))
instance (Show q, Show s, Finite q, Finite s) ⇒ Show (ETG q s) where
show ∷ ETG q s → String
show (ETG m) = unlines (fmap (\s → quoteWith (show s) (show (m s)) " → ") (toList (sigma_ε (ETG m))))
reverse ∷ (Ord q) ⇒ TG q s → TG q s
reverse (TG g) = TG (Relation.transpose . g)
map ∷ (Ord p) ⇒ (q → p) → TG q s → TG p s
map f (TG g) = TG (gmap f . g)
|
71464312c4efe078a6e78b0cd515028c15b1d9e354340e33951f72626ef16535 | myShoggoth/deckbuilder | LegendarySpec.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleContexts #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TypeApplications #
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module LegendarySpec
( spec
) where
import Control.Lens
import Control.Monad.RWS
import Data.Generics.Product
import DeckBuilding
import DeckBuilding . Legendary
import DeckBuilding . Legendary . Cards . Base
import DeckBuilding . Legendary . Strategies . Basic
import DeckBuilding . Legendary . Types
import DeckBuilding . Legendary . Utils
import DeckBuilding.Types
import System.Random
import Test.Hspec
spec :: Spec
spec = describe "TODO: fix me" $
it "is a tautology" $
1 `shouldBe` 1
spec : : Spec
spec = do
let g = mkStdGen 45752345316
let c =
[ ( " Player 1 " , dumbStrategy )
, ( " Player 2 " , dumbStrategy )
]
1
[ ]
[ doombots , mastersOfEvil , mastersOfEvil ]
( take 30 $ repeat bystander )
defaultCity
legacyVirus
drdoom
[ g ]
let p0 = PlayerNumber 0
p1 = PlayerNumber 1
let dg = configToGame c g
let afterDeal = fst $ execRWS ( deal 6 p0 ) c dg
let ( Just p1AfterDeal ) = afterDeal ^ ? # players . ix 0
let afterDeal2 = fst $ execRWS ( deal 6 p1 ) c afterDeal
let afterEvaluate = fst $ execRWS ( evaluateHand p0 ) c afterDeal2
let ( Just p1AfterEvaluate ) = afterEvaluate ^ ? # players . ix 0
let afterReset = fst $ execRWS ( resetTurn p0 ) c afterEvaluate
let ( Just p1AfterReset ) = afterReset ^ ? # players . ix 0
describe " Utils.deal " $ do
it " deals the correct number of cards " $ do
length ( p1AfterDeal ^. # hand ) ` shouldBe ` 6
length ( p1AfterDeal ^. # deck ) ` shouldBe ` 6
length ( p1AfterDeal ^. # discard ) ` shouldBe ` 0
it " has a total of eight shield agents " $
length ( filter (= = shieldAgent ) ( ( p1AfterDeal ^. # hand ) + + ( p1AfterDeal ^. # deck ) ) ) ` shouldBe ` 8
it " has a total of four field troopers " $
length ( filter (= = shieldTrooper ) ( ( p1AfterDeal ^. # hand ) + + ( p1AfterDeal ^. # deck ) ) ) ` shouldBe ` 4
describe " evaluateHand " $ do
it " has no more cards in hand " $ do
length ( p1AfterEvaluate ^. # played ) ` shouldBe ` 6
length ( p1AfterEvaluate ^. # hand ) ` shouldBe ` 0
length ( p1AfterDeal ^. # discard ) ` shouldBe ` 0
it " calculates the right amount of money " $
p1AfterEvaluate ^. # unusedMoney ` shouldBe ` length ( filter (= = shieldAgent ) ( p1AfterEvaluate ^. # played ) )
{ - Do n't calculate victory points as we go anymore
it " calculates the right amount of victory " $
p1AfterEvaluate ^. # victory ` shouldBe ` length ( filter (= = estateCard ) ( p1AfterEvaluate ^. # played ) )
spec :: Spec
spec = do
let g = mkStdGen 45752345316
let c = LegendaryConfig
[ ("Player 1", dumbStrategy)
, ("Player 2", dumbStrategy)
]
1
[]
[doombots, mastersOfEvil, mastersOfEvil]
(take 30 $ repeat bystander)
defaultCity
legacyVirus
drdoom
[g]
let p0 = PlayerNumber 0
p1 = PlayerNumber 1
let dg = configToGame c g
let afterDeal = fst $ execRWS (deal 6 p0) c dg
let (Just p1AfterDeal) = afterDeal ^? #players . ix 0
let afterDeal2 = fst $ execRWS (deal 6 p1) c afterDeal
let afterEvaluate = fst $ execRWS (evaluateHand p0) c afterDeal2
let (Just p1AfterEvaluate) = afterEvaluate ^? #players . ix 0
let afterReset = fst $ execRWS (resetTurn p0) c afterEvaluate
let (Just p1AfterReset) = afterReset ^? #players . ix 0
describe "Utils.deal" $ do
it "deals the correct number of cards" $ do
length (p1AfterDeal ^. #hand) `shouldBe` 6
length (p1AfterDeal ^. #deck) `shouldBe` 6
length (p1AfterDeal ^. #discard) `shouldBe` 0
it "has a total of eight shield agents" $
length (filter (== shieldAgent) ((p1AfterDeal ^. #hand) ++ (p1AfterDeal ^. #deck))) `shouldBe` 8
it "has a total of four field troopers" $
length (filter (== shieldTrooper) ((p1AfterDeal ^. #hand) ++ (p1AfterDeal ^. #deck))) `shouldBe` 4
describe "evaluateHand" $ do
it "has no more cards in hand" $ do
length (p1AfterEvaluate ^. #played) `shouldBe` 6
length (p1AfterEvaluate ^. #hand) `shouldBe` 0
length (p1AfterDeal ^. #discard) `shouldBe` 0
it "calculates the right amount of money" $
p1AfterEvaluate ^. #unusedMoney `shouldBe` length (filter (== shieldAgent) (p1AfterEvaluate ^. #played))
{- Don't calculate victory points as we go anymore
it "calculates the right amount of victory" $
p1AfterEvaluate ^. #victory `shouldBe` length (filter (== estateCard) (p1AfterEvaluate ^. #played))
-}
describe "resetTurn" $ do
it "has an empty played pile" $
length (p1AfterReset ^. #played) `shouldBe` 0
it "has zero money" $
(p1AfterReset ^. #unusedMoney) `shouldBe` 0
it "has zero victory" $
(p1AfterReset ^. #victory) `shouldBe` 0
it "has an empty hand" $
length (p1AfterReset ^. #hand) `shouldBe` 0
describe "doTurn" $ do
let afterDoTurn = fst $ execRWS ((runTurn p0) :: LegendaryState Bool) c afterDeal2
it "bought a card" $ do
length ((afterDoTurn ^. #players) !! 0 ^. #discard) `shouldBe` 6
describe "doTurns" $ do
let afterDoTurns = fst $ execRWS ((runTurns (PlayerNumber <$> [0..1]) False) :: LegendaryState Bool) c afterDeal2
it "has players with more cards" $ do
length ((afterDoTurns ^. #players) !! 0 ^. #discard) `shouldBe` 6
-}
| null | https://raw.githubusercontent.com/myShoggoth/deckbuilder/2f2f6613b0edd80610d30405b21192470f9e0bff/test/LegendarySpec.hs | haskell | # LANGUAGE ScopedTypeVariables #
# LANGUAGE OverloadedStrings #
Don't calculate victory points as we go anymore
it "calculates the right amount of victory" $
p1AfterEvaluate ^. #victory `shouldBe` length (filter (== estateCard) (p1AfterEvaluate ^. #played))
| # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleContexts #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TypeApplications #
module LegendarySpec
( spec
) where
import Control.Lens
import Control.Monad.RWS
import Data.Generics.Product
import DeckBuilding
import DeckBuilding . Legendary
import DeckBuilding . Legendary . Cards . Base
import DeckBuilding . Legendary . Strategies . Basic
import DeckBuilding . Legendary . Types
import DeckBuilding . Legendary . Utils
import DeckBuilding.Types
import System.Random
import Test.Hspec
spec :: Spec
spec = describe "TODO: fix me" $
it "is a tautology" $
1 `shouldBe` 1
spec : : Spec
spec = do
let g = mkStdGen 45752345316
let c =
[ ( " Player 1 " , dumbStrategy )
, ( " Player 2 " , dumbStrategy )
]
1
[ ]
[ doombots , mastersOfEvil , mastersOfEvil ]
( take 30 $ repeat bystander )
defaultCity
legacyVirus
drdoom
[ g ]
let p0 = PlayerNumber 0
p1 = PlayerNumber 1
let dg = configToGame c g
let afterDeal = fst $ execRWS ( deal 6 p0 ) c dg
let ( Just p1AfterDeal ) = afterDeal ^ ? # players . ix 0
let afterDeal2 = fst $ execRWS ( deal 6 p1 ) c afterDeal
let afterEvaluate = fst $ execRWS ( evaluateHand p0 ) c afterDeal2
let ( Just p1AfterEvaluate ) = afterEvaluate ^ ? # players . ix 0
let afterReset = fst $ execRWS ( resetTurn p0 ) c afterEvaluate
let ( Just p1AfterReset ) = afterReset ^ ? # players . ix 0
describe " Utils.deal " $ do
it " deals the correct number of cards " $ do
length ( p1AfterDeal ^. # hand ) ` shouldBe ` 6
length ( p1AfterDeal ^. # deck ) ` shouldBe ` 6
length ( p1AfterDeal ^. # discard ) ` shouldBe ` 0
it " has a total of eight shield agents " $
length ( filter (= = shieldAgent ) ( ( p1AfterDeal ^. # hand ) + + ( p1AfterDeal ^. # deck ) ) ) ` shouldBe ` 8
it " has a total of four field troopers " $
length ( filter (= = shieldTrooper ) ( ( p1AfterDeal ^. # hand ) + + ( p1AfterDeal ^. # deck ) ) ) ` shouldBe ` 4
describe " evaluateHand " $ do
it " has no more cards in hand " $ do
length ( p1AfterEvaluate ^. # played ) ` shouldBe ` 6
length ( p1AfterEvaluate ^. # hand ) ` shouldBe ` 0
length ( p1AfterDeal ^. # discard ) ` shouldBe ` 0
it " calculates the right amount of money " $
p1AfterEvaluate ^. # unusedMoney ` shouldBe ` length ( filter (= = shieldAgent ) ( p1AfterEvaluate ^. # played ) )
{ - Do n't calculate victory points as we go anymore
it " calculates the right amount of victory " $
p1AfterEvaluate ^. # victory ` shouldBe ` length ( filter (= = estateCard ) ( p1AfterEvaluate ^. # played ) )
spec :: Spec
spec = do
let g = mkStdGen 45752345316
let c = LegendaryConfig
[ ("Player 1", dumbStrategy)
, ("Player 2", dumbStrategy)
]
1
[]
[doombots, mastersOfEvil, mastersOfEvil]
(take 30 $ repeat bystander)
defaultCity
legacyVirus
drdoom
[g]
let p0 = PlayerNumber 0
p1 = PlayerNumber 1
let dg = configToGame c g
let afterDeal = fst $ execRWS (deal 6 p0) c dg
let (Just p1AfterDeal) = afterDeal ^? #players . ix 0
let afterDeal2 = fst $ execRWS (deal 6 p1) c afterDeal
let afterEvaluate = fst $ execRWS (evaluateHand p0) c afterDeal2
let (Just p1AfterEvaluate) = afterEvaluate ^? #players . ix 0
let afterReset = fst $ execRWS (resetTurn p0) c afterEvaluate
let (Just p1AfterReset) = afterReset ^? #players . ix 0
describe "Utils.deal" $ do
it "deals the correct number of cards" $ do
length (p1AfterDeal ^. #hand) `shouldBe` 6
length (p1AfterDeal ^. #deck) `shouldBe` 6
length (p1AfterDeal ^. #discard) `shouldBe` 0
it "has a total of eight shield agents" $
length (filter (== shieldAgent) ((p1AfterDeal ^. #hand) ++ (p1AfterDeal ^. #deck))) `shouldBe` 8
it "has a total of four field troopers" $
length (filter (== shieldTrooper) ((p1AfterDeal ^. #hand) ++ (p1AfterDeal ^. #deck))) `shouldBe` 4
describe "evaluateHand" $ do
it "has no more cards in hand" $ do
length (p1AfterEvaluate ^. #played) `shouldBe` 6
length (p1AfterEvaluate ^. #hand) `shouldBe` 0
length (p1AfterDeal ^. #discard) `shouldBe` 0
it "calculates the right amount of money" $
p1AfterEvaluate ^. #unusedMoney `shouldBe` length (filter (== shieldAgent) (p1AfterEvaluate ^. #played))
describe "resetTurn" $ do
it "has an empty played pile" $
length (p1AfterReset ^. #played) `shouldBe` 0
it "has zero money" $
(p1AfterReset ^. #unusedMoney) `shouldBe` 0
it "has zero victory" $
(p1AfterReset ^. #victory) `shouldBe` 0
it "has an empty hand" $
length (p1AfterReset ^. #hand) `shouldBe` 0
describe "doTurn" $ do
let afterDoTurn = fst $ execRWS ((runTurn p0) :: LegendaryState Bool) c afterDeal2
it "bought a card" $ do
length ((afterDoTurn ^. #players) !! 0 ^. #discard) `shouldBe` 6
describe "doTurns" $ do
let afterDoTurns = fst $ execRWS ((runTurns (PlayerNumber <$> [0..1]) False) :: LegendaryState Bool) c afterDeal2
it "has players with more cards" $ do
length ((afterDoTurns ^. #players) !! 0 ^. #discard) `shouldBe` 6
-}
|
ae0cacb97e7c39e52ebb303f9e5a017a66cbc1cd9b69c8f54fa0b353bad26fd6 | BranchTaken/Hemlock | test_add_sub.ml | open! Basis.Rudiments
open! Basis
open U128
let test () =
let rec test_pairs = function
| [] -> ()
| (x, y) :: pairs' -> begin
File.Fmt.stdout
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt " +,- "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true y
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true (x + y)
|> Fmt.fmt ", "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true (x - y)
|> Fmt.fmt "\n"
|> ignore;
test_pairs pairs'
end
in
let pairs = [
(of_string "0", of_string "0");
(of_string "0", of_string "1");
(of_string "1", of_string "0");
(of_string "1", of_string "0xffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff", of_string "1");
(of_string "1", of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff", of_string "1");
] in
test_pairs pairs
let _ = test ()
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a07e362d66319108c1478a4cbebab765c1808b1a/bootstrap/test/basis/u128/test_add_sub.ml | ocaml | open! Basis.Rudiments
open! Basis
open U128
let test () =
let rec test_pairs = function
| [] -> ()
| (x, y) :: pairs' -> begin
File.Fmt.stdout
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true x
|> Fmt.fmt " +,- "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true y
|> Fmt.fmt " -> "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true (x + y)
|> Fmt.fmt ", "
|> fmt ~alt:true ~zpad:true ~width:32L ~radix:Radix.Hex ~pretty:true (x - y)
|> Fmt.fmt "\n"
|> ignore;
test_pairs pairs'
end
in
let pairs = [
(of_string "0", of_string "0");
(of_string "0", of_string "1");
(of_string "1", of_string "0");
(of_string "1", of_string "0xffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff", of_string "1");
(of_string "1", of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff");
(of_string "0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff", of_string "1");
] in
test_pairs pairs
let _ = test ()
| |
54acca16958ae034a52e5724eabb823be1a9ebb71adda04024621338b2dd079a | HunterYIboHu/htdp2-solution | ex521.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex521) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
PuzzleState - > PuzzleState
; is the final state reachable from state0
; generative create a tree of possible boat rides
; termination ???
(check-expect (solve initial-puzzle) final-puzzle)
(define (solve state0)
[ List - of PuzzuleState ] - > PuzzleState
; generative generate the successors of los
(define (solve* los)
(cond [(ormap final? los)
(first (filter final? los))]
[else (solve* (create-next-states los))])
))
(solve* (list state0))))
;; Questions
;; Q1: Because of this systematic way of traversing the tree, solve*
;; cannot go into an infinite loop. Why?
;;
;; A1: Because when some result start over, other way still usable.
This systemcatic way will create all ways , and stop at the first
;; final one. | null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter6/Section33/ex521.rkt | racket | about the language level of this file in a form that our tools can easily process.
is the final state reachable from state0
generative create a tree of possible boat rides
termination ???
generative generate the successors of los
Questions
Q1: Because of this systematic way of traversing the tree, solve*
cannot go into an infinite loop. Why?
A1: Because when some result start over, other way still usable.
final one. | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex521) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
PuzzleState - > PuzzleState
(check-expect (solve initial-puzzle) final-puzzle)
(define (solve state0)
[ List - of PuzzuleState ] - > PuzzleState
(define (solve* los)
(cond [(ormap final? los)
(first (filter final? los))]
[else (solve* (create-next-states los))])
))
(solve* (list state0))))
This systemcatic way will create all ways , and stop at the first |
ce14a31ea81bdb2cb067c5d47db5949736d9b0bda75c5926c9e533fb12347b87 | larsen/wiz | Core.hs | module Wiz.Core (
equal,
car,
cdr,
cons,
list,
nil,
Wiz.Core.not,
Wiz.Core.or,
Wiz.Core.and,
pair,
evalBool
) where
import Wiz.Types
import Wiz.Environment
equal :: Value -> Value -> Expression
equal x y | x == y = Boolean True
| otherwise = Boolean False
car :: Value -> Expression
car (E (List (x:_))) = x
car (E (List [])) = List []
car (E (Quote (List (x:_)))) = x
car _ = error "car applied to non list expression!"
cdr :: Value -> Expression
cdr (E (List (_:xs))) = List xs
cdr (E (List [])) = List []
cdr _ = error "cdr applied to non list expression!"
-- (cons ’a ’()) ⇒ (a)
-- (cons ’(a) ’(b c d)) ⇒ ((a) b c d)
( cons " a " ’ ( b c ) ) ⇒ ( " a " b c )
( cons ’ a 3 ) ⇒ ( a . 3 )
-- (cons ’(a b) ’c) ⇒ ((a b) . c)
cons :: Value -> Value -> Expression
-- cons v1 v2
-- | trace ("-> cons v1:" ++ show v1 ++ ", v2:" ++ show v2) False = undefined
cons (E x) (E (List ys)) = List $ x:ys
cons (E x) (E e) = List [x,e]
list :: [Expression] -> Expression
list = List
nil :: Value -> Expression
nil (E (List [])) = Boolean True
nil _ = Boolean False
not :: Value -> Expression
not (E (Boolean False)) = Boolean True
not _ = Boolean False
evalBool :: Value -> Bool
-- evalBool v | trace ("evalBool " ++ show v) False = undefined
evalBool (E (Boolean b)) = b
evalBool e = error $ "evalBool " ++ show e
or :: [Value] -> Expression
or = Boolean . any evalBool
and :: [Value] -> Expression
and = Boolean . all evalBool
pair :: Value -> Expression
pair (E (List (x:_))) = Boolean True
pair _ = Boolean False
| null | https://raw.githubusercontent.com/larsen/wiz/59b8c5fa5a1bde0a5ed83261599407e2d80efb25/src/Wiz/Core.hs | haskell | (cons ’a ’()) ⇒ (a)
(cons ’(a) ’(b c d)) ⇒ ((a) b c d)
(cons ’(a b) ’c) ⇒ ((a b) . c)
cons v1 v2
| trace ("-> cons v1:" ++ show v1 ++ ", v2:" ++ show v2) False = undefined
evalBool v | trace ("evalBool " ++ show v) False = undefined | module Wiz.Core (
equal,
car,
cdr,
cons,
list,
nil,
Wiz.Core.not,
Wiz.Core.or,
Wiz.Core.and,
pair,
evalBool
) where
import Wiz.Types
import Wiz.Environment
equal :: Value -> Value -> Expression
equal x y | x == y = Boolean True
| otherwise = Boolean False
car :: Value -> Expression
car (E (List (x:_))) = x
car (E (List [])) = List []
car (E (Quote (List (x:_)))) = x
car _ = error "car applied to non list expression!"
cdr :: Value -> Expression
cdr (E (List (_:xs))) = List xs
cdr (E (List [])) = List []
cdr _ = error "cdr applied to non list expression!"
( cons " a " ’ ( b c ) ) ⇒ ( " a " b c )
( cons ’ a 3 ) ⇒ ( a . 3 )
cons :: Value -> Value -> Expression
cons (E x) (E (List ys)) = List $ x:ys
cons (E x) (E e) = List [x,e]
list :: [Expression] -> Expression
list = List
nil :: Value -> Expression
nil (E (List [])) = Boolean True
nil _ = Boolean False
not :: Value -> Expression
not (E (Boolean False)) = Boolean True
not _ = Boolean False
evalBool :: Value -> Bool
evalBool (E (Boolean b)) = b
evalBool e = error $ "evalBool " ++ show e
or :: [Value] -> Expression
or = Boolean . any evalBool
and :: [Value] -> Expression
and = Boolean . all evalBool
pair :: Value -> Expression
pair (E (List (x:_))) = Boolean True
pair _ = Boolean False
|
a6bd2555e0619f0c67b13746bb0a449f66fed16100fe4b2b4a555019cf2a14ea | ekmett/reactive | Future.hs | # LANGUAGE GeneralizedNewtypeDeriving #
# OPTIONS_GHC -Wall -fno - warn - orphans #
----------------------------------------------------------------------
-- |
Module : FRP.Reactive . Future
Copyright : ( c ) 2007 - 2008
-- License : GNU AGPLv3 (see COPYING)
--
-- Maintainer :
-- Stability : experimental
--
-- A simple formulation of functional /futures/, roughly as
-- described at <>.
--
-- A /future/ is a value with an associated time of /arrival/. Typically,
-- neither the time nor the value can be known until the arrival time.
--
-- Primitive futures can be things like /the value of the next key you
press/ , or /the value of LambdaPix stock at noon next Monday/.
--
-- Composition is via standard type classes: 'Functor', 'Applicative',
' Monad ' , and ' Monoid ' . Some comments on the ' Future ' instances of
-- these classes:
--
* Monoid : ' ' is a future that never arrives ( infinite time and
undefined value ) , and @a ` mappend ` b@ is the earlier of @a@ and ,
-- preferring @a@ when simultaneous.
--
-- * 'Functor': apply a function to a future argument. The (future)
-- result arrives simultaneously with the argument.
--
-- * 'Applicative': 'pure' gives value arriving negative infinity.
' ( \<*\ > ) ' applies a future function to a future argument , yielding a
-- future result that arrives once /both/ function and argument have
arrived ( coinciding with the later of the two times ) .
--
* ' ' : ' return ' is the same as ' pure ' ( as usual ) . @(>>=)@ cascades
-- futures. 'join' resolves a future future value into a future value.
--
-- Futures are parametric over /time/ as well as /value/ types. The time
-- parameter can be any ordered type and is particularly useful with time
-- types that have rich partial information structure, such as /improving
-- values/.
----------------------------------------------------------------------
module FRP.Reactive.Future
(
* Time & futures
Time, ftime
, FutureG(..), isNeverF, inFuture, inFuture2, futTime, futVal, future
, withTimeF
-- * Tests
#ifdef TEST
, batch
#endif
) where
import Data.Monoid (Monoid(..))
import Data.Semigroup (Semigroup(..), Max(..))
import Data . AddBounds
import FRP.Reactive.Internal.Future
#ifdef TEST
-- Testing
import Test.QuickCheck
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes
#endif
{----------------------------------------------------------
Time and futures
----------------------------------------------------------}
-- | Make a finite time
ftime :: t -> Time t
ftime = Max
#ifdef TEST
FutureG representation in Internal . Future
instance (Bounded t, Eq t, EqProp t, EqProp a) => EqProp (FutureG t a) where
u =-= v | isNeverF u && isNeverF v = property True
Future a =-= Future b = a =-= b
#endif
-- I'd rather say:
--
instance ( Bounded t , EqProp t , EqProp a ) = > EqProp ( FutureG t a ) where
-- Future a =-= Future b =
( fst a = -= maxBound & & fst b = -= maxBound ) .| . a = -= b
--
However , I do n't know how to define disjunction on QuickCheck properties .
-- | A future's time
futTime :: FutureG t a -> Time t
futTime = fst . unFuture
-- | A future's value
futVal :: FutureG t a -> a
futVal = snd . unFuture
-- | A future value with given time & value
future :: t -> a -> FutureG t a
future t a = Future (ftime t, a)
-- | Access time of future
withTimeF :: FutureG t a -> FutureG t (Time t, a)
withTimeF = inFuture $ \ (t,a) -> (t,(t,a))
-- withTimeF = inFuture duplicate (with Comonad)
TODO : Eliminate this Monoid instance . Derive Monoid along with all the
other classes . And do n't use mempty and mappend for the operations
below . For one thing , the current instance makes Future a monoid but
-- unFuture not be a monoid morphism.
instance Ord t => Semigroup (FutureG t a) where
Future (s,a) <> Future (t,b) =
Future (s `min` t, if s <= t then a else b)
instance (Ord t, Bounded t) => Monoid (FutureG t a) where
mempty = Future (maxBound, error "Future mempty: it'll never happen, buddy")
-- Pick the earlier future.
Future (s,a) `mappend` Future (t,b) =
Future (s `min` t, if s <= t then a else b)
-- Consider the following simpler definition:
--
-- fa@(Future (s,_)) `mappend` fb@(Future (t,_)) =
-- if s <= t then fa else fb
--
Nothing can be known about the resulting future until @s < = t@ is
-- determined. In particular, we cannot know lower bounds for the time.
-- In contrast, the actual 'mappend' definition can potentially yield
-- useful partial information, such as lower bounds, about the future
-- time, if the type parameter @t@ has rich partial information structure
-- (non-flat).
-- For some choices of @t@, there may be an efficient combination of 'min'
-- and '(<=)', so the 'mappend' definition is sub-optimal. In particular,
-- 'Improving' has 'minI'.
-- -- A future known never to happen (by construction), i.e., infinite time.
isNever : : FutureG t a - > Bool
-- isNever = isMaxBound . futTime
-- where
isMaxBound ( ) = True
-- isMaxBound _ = False
--
-- This function is an abstraction leak. Don't export it to library
-- users.
{----------------------------------------------------------
Tests
----------------------------------------------------------}
-- Represents times at a given instant.
newtype TimeInfo t = TimeInfo (Maybe t)
#ifdef TEST
deriving EqProp
#endif
instance Bounded t => Bounded (TimeInfo t) where
minBound = TimeInfo (Just minBound)
maxBound = TimeInfo Nothing
-- A time at a given instant can be some unknown time in the future
unknownTimeInFuture :: TimeInfo a
unknownTimeInFuture = TimeInfo Nothing
instance Eq a => Eq (TimeInfo a) where
TimeInfo Nothing == TimeInfo Nothing = error "Cannot tell if two unknown times in the future are equal"
TimeInfo (Just _) == TimeInfo Nothing = False
TimeInfo Nothing == TimeInfo (Just _) = False
TimeInfo (Just a) == TimeInfo (Just b) = a == b
instance Ord a => Ord (TimeInfo a) where
The minimum of two unknown times in the future is an unkown time in the
-- future.
TimeInfo Nothing `min` TimeInfo Nothing = unknownTimeInFuture
TimeInfo Nothing `min` b = b
a `min` TimeInfo Nothing = a
TimeInfo (Just a) `min` TimeInfo (Just b) = (TimeInfo . Just) (a `min` b)
TimeInfo Nothing <= TimeInfo Nothing = error "Cannot tell if one unknown time in the future is less than another."
TimeInfo Nothing <= TimeInfo (Just _) = False
TimeInfo (Just _) <= TimeInfo Nothing = True
TimeInfo (Just a) <= TimeInfo (Just b) = a <= b
#ifdef TEST
-- or, a known time in the past. We're ignoring known future times for now.
knownTimeInPast :: a -> TimeInfo a
knownTimeInPast = TimeInfo . Just
-- Move to checkers
type BoundedT = Int
batch :: TestBatch
batch = ( "FRP.Reactive.Future"
, concatMap unbatch
[ monoid (undefined :: FutureG NumT T)
, functorMonoid (undefined :: FutureG NumT
(T,NumT))
-- Checking the semantics here isn't necessary because
-- the implementation is identical to them.
--
Also , Functor , Applicative , and Monad do n't require checking
-- since they are automatically derived.
--
, semanticMonoid ' ( undefined : : FutureG NumT T )
, functor ( undefined : : FutureG NumT ( T , NumT , T ) )
, semanticFunctor ( undefined : : FutureG NumT ( ) )
, applicative ( undefined : : FutureG NumT ( NumT , T , NumT ) )
, semanticApplicative ( undefined : : FutureG NumT ( ) )
, monad ( undefined : : FutureG NumT ( NumT , T , NumT ) )
, semanticMonad ( undefined : : FutureG NumT ( ) )
, ("specifics",
[ ("laziness", property laziness )
])
]
)
where
laziness :: BoundedT -> T -> Property
laziness t a = (uf `mappend` uf) `mappend` kf =-= kf
where
uf = unknownFuture
kf = knownFuture
knownFuture = future (knownTimeInPast t) a
unknownFuture = future unknownTimeInFuture (error "cannot retrieve value at unknown time at the future")
#endif
| null | https://raw.githubusercontent.com/ekmett/reactive/61b20b7a2e92af372b5bd9a2af294db0fbdfa9d8/src/FRP/Reactive/Future.hs | haskell | --------------------------------------------------------------------
|
License : GNU AGPLv3 (see COPYING)
Maintainer :
Stability : experimental
A simple formulation of functional /futures/, roughly as
described at <>.
A /future/ is a value with an associated time of /arrival/. Typically,
neither the time nor the value can be known until the arrival time.
Primitive futures can be things like /the value of the next key you
Composition is via standard type classes: 'Functor', 'Applicative',
these classes:
preferring @a@ when simultaneous.
* 'Functor': apply a function to a future argument. The (future)
result arrives simultaneously with the argument.
* 'Applicative': 'pure' gives value arriving negative infinity.
future result that arrives once /both/ function and argument have
futures. 'join' resolves a future future value into a future value.
Futures are parametric over /time/ as well as /value/ types. The time
parameter can be any ordered type and is particularly useful with time
types that have rich partial information structure, such as /improving
values/.
--------------------------------------------------------------------
* Tests
Testing
---------------------------------------------------------
Time and futures
---------------------------------------------------------
| Make a finite time
I'd rather say:
Future a =-= Future b =
| A future's time
| A future's value
| A future value with given time & value
| Access time of future
withTimeF = inFuture duplicate (with Comonad)
unFuture not be a monoid morphism.
Pick the earlier future.
Consider the following simpler definition:
fa@(Future (s,_)) `mappend` fb@(Future (t,_)) =
if s <= t then fa else fb
determined. In particular, we cannot know lower bounds for the time.
In contrast, the actual 'mappend' definition can potentially yield
useful partial information, such as lower bounds, about the future
time, if the type parameter @t@ has rich partial information structure
(non-flat).
For some choices of @t@, there may be an efficient combination of 'min'
and '(<=)', so the 'mappend' definition is sub-optimal. In particular,
'Improving' has 'minI'.
-- A future known never to happen (by construction), i.e., infinite time.
isNever = isMaxBound . futTime
where
isMaxBound _ = False
This function is an abstraction leak. Don't export it to library
users.
---------------------------------------------------------
Tests
---------------------------------------------------------
Represents times at a given instant.
A time at a given instant can be some unknown time in the future
future.
or, a known time in the past. We're ignoring known future times for now.
Move to checkers
Checking the semantics here isn't necessary because
the implementation is identical to them.
since they are automatically derived.
| # LANGUAGE GeneralizedNewtypeDeriving #
# OPTIONS_GHC -Wall -fno - warn - orphans #
Module : FRP.Reactive . Future
Copyright : ( c ) 2007 - 2008
press/ , or /the value of LambdaPix stock at noon next Monday/.
' Monad ' , and ' Monoid ' . Some comments on the ' Future ' instances of
* Monoid : ' ' is a future that never arrives ( infinite time and
undefined value ) , and @a ` mappend ` b@ is the earlier of @a@ and ,
' ( \<*\ > ) ' applies a future function to a future argument , yielding a
arrived ( coinciding with the later of the two times ) .
* ' ' : ' return ' is the same as ' pure ' ( as usual ) . @(>>=)@ cascades
module FRP.Reactive.Future
(
* Time & futures
Time, ftime
, FutureG(..), isNeverF, inFuture, inFuture2, futTime, futVal, future
, withTimeF
#ifdef TEST
, batch
#endif
) where
import Data.Monoid (Monoid(..))
import Data.Semigroup (Semigroup(..), Max(..))
import Data . AddBounds
import FRP.Reactive.Internal.Future
#ifdef TEST
import Test.QuickCheck
import Test.QuickCheck.Checkers
import Test.QuickCheck.Classes
#endif
ftime :: t -> Time t
ftime = Max
#ifdef TEST
FutureG representation in Internal . Future
instance (Bounded t, Eq t, EqProp t, EqProp a) => EqProp (FutureG t a) where
u =-= v | isNeverF u && isNeverF v = property True
Future a =-= Future b = a =-= b
#endif
instance ( Bounded t , EqProp t , EqProp a ) = > EqProp ( FutureG t a ) where
( fst a = -= maxBound & & fst b = -= maxBound ) .| . a = -= b
However , I do n't know how to define disjunction on QuickCheck properties .
futTime :: FutureG t a -> Time t
futTime = fst . unFuture
futVal :: FutureG t a -> a
futVal = snd . unFuture
future :: t -> a -> FutureG t a
future t a = Future (ftime t, a)
withTimeF :: FutureG t a -> FutureG t (Time t, a)
withTimeF = inFuture $ \ (t,a) -> (t,(t,a))
TODO : Eliminate this Monoid instance . Derive Monoid along with all the
other classes . And do n't use mempty and mappend for the operations
below . For one thing , the current instance makes Future a monoid but
instance Ord t => Semigroup (FutureG t a) where
Future (s,a) <> Future (t,b) =
Future (s `min` t, if s <= t then a else b)
instance (Ord t, Bounded t) => Monoid (FutureG t a) where
mempty = Future (maxBound, error "Future mempty: it'll never happen, buddy")
Future (s,a) `mappend` Future (t,b) =
Future (s `min` t, if s <= t then a else b)
Nothing can be known about the resulting future until @s < = t@ is
isNever : : FutureG t a - > Bool
isMaxBound ( ) = True
newtype TimeInfo t = TimeInfo (Maybe t)
#ifdef TEST
deriving EqProp
#endif
instance Bounded t => Bounded (TimeInfo t) where
minBound = TimeInfo (Just minBound)
maxBound = TimeInfo Nothing
unknownTimeInFuture :: TimeInfo a
unknownTimeInFuture = TimeInfo Nothing
instance Eq a => Eq (TimeInfo a) where
TimeInfo Nothing == TimeInfo Nothing = error "Cannot tell if two unknown times in the future are equal"
TimeInfo (Just _) == TimeInfo Nothing = False
TimeInfo Nothing == TimeInfo (Just _) = False
TimeInfo (Just a) == TimeInfo (Just b) = a == b
instance Ord a => Ord (TimeInfo a) where
The minimum of two unknown times in the future is an unkown time in the
TimeInfo Nothing `min` TimeInfo Nothing = unknownTimeInFuture
TimeInfo Nothing `min` b = b
a `min` TimeInfo Nothing = a
TimeInfo (Just a) `min` TimeInfo (Just b) = (TimeInfo . Just) (a `min` b)
TimeInfo Nothing <= TimeInfo Nothing = error "Cannot tell if one unknown time in the future is less than another."
TimeInfo Nothing <= TimeInfo (Just _) = False
TimeInfo (Just _) <= TimeInfo Nothing = True
TimeInfo (Just a) <= TimeInfo (Just b) = a <= b
#ifdef TEST
knownTimeInPast :: a -> TimeInfo a
knownTimeInPast = TimeInfo . Just
type BoundedT = Int
batch :: TestBatch
batch = ( "FRP.Reactive.Future"
, concatMap unbatch
[ monoid (undefined :: FutureG NumT T)
, functorMonoid (undefined :: FutureG NumT
(T,NumT))
Also , Functor , Applicative , and Monad do n't require checking
, semanticMonoid ' ( undefined : : FutureG NumT T )
, functor ( undefined : : FutureG NumT ( T , NumT , T ) )
, semanticFunctor ( undefined : : FutureG NumT ( ) )
, applicative ( undefined : : FutureG NumT ( NumT , T , NumT ) )
, semanticApplicative ( undefined : : FutureG NumT ( ) )
, monad ( undefined : : FutureG NumT ( NumT , T , NumT ) )
, semanticMonad ( undefined : : FutureG NumT ( ) )
, ("specifics",
[ ("laziness", property laziness )
])
]
)
where
laziness :: BoundedT -> T -> Property
laziness t a = (uf `mappend` uf) `mappend` kf =-= kf
where
uf = unknownFuture
kf = knownFuture
knownFuture = future (knownTimeInPast t) a
unknownFuture = future unknownTimeInFuture (error "cannot retrieve value at unknown time at the future")
#endif
|
29a85d42000ad59554b05f77ba545989846bbb1a676d338f388750346976a1d9 | apinf/proxy42 | vegur_upgrade_middleware.erl | Copyright ( c ) 2013 - 2015 , Heroku Inc < > .
%%% All rights reserved.
%%%
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions are
%%% met:
%%%
%%% * Redistributions of source code must retain the above copyright
%%% notice, this list of conditions and the following disclaimer.
%%%
%%% * Redistributions in binary form must reproduce the above copyright
%%% notice, this list of conditions and the following disclaimer in the
%%% documentation and/or other materials provided with the distribution.
%%%
%%% * The names of its contributors may not be used to endorse or promote
%%% products derived from this software without specific prior written
%%% permission.
%%%
%%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
%%% LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
%%% A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
%%% DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
%%% (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
%%% OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-module(vegur_upgrade_middleware).
-behaviour(cowboyku_middleware).
-export([execute/2]).
execute(Req, Env) ->
match_headers(vegur_utils:parse_header(<<"connection">>, Req), Req, Env).
match_headers({ok,{ConnectionTokens, Req1}}, _, Env) ->
case lists:member(<<"upgrade">>, ConnectionTokens) of
false ->
{ok, Req1, Env};
true ->
%% The connection should be upgraded
case cowboyku_req:parse_header(<<"upgrade">>, Req1) of
{ok, undefined, Req2} ->
{HttpCode, Req3} = vegur_utils:handle_error(bad_request_header, Req2),
{error, HttpCode, Req3};
{ok, Upgrade, Req2} ->
handle_upgrade(Upgrade, Req2, Env);
{undefined, _, Req2} ->
{HttpCode, Req3} = vegur_utils:handle_error(bad_request_header, Req2),
426 ?
_ ->
{HttpCode, Req2} = vegur_utils:handle_error(bad_request_header, Req1),
{error, HttpCode, Req2}
end
end;
match_headers({error,_}, Req, _Env) ->
{HttpCode, Req1} = vegur_utils:handle_error(bad_request_header, Req),
{error, HttpCode, Req1}.
% -sec14.html#sec14.42
-spec handle_upgrade(undefined|[binary()] | {error, term()}, Req, Env) ->
{ok, Req, Env} |
{error, ErrorCode, Req} when
Req :: cowboyku_req:req(),
Env :: cowboyku_middleware:env(),
ErrorCode :: 400.
handle_upgrade(undefined, Req, Env) ->
% No Upgrade header
{ok, Req, Env};
handle_upgrade(UpgradeTokens, Req, Env) when is_list(UpgradeTokens) ->
{Type, Req1} = cowboyku_req:meta(request_type, Req, []),
Req2 = cowboyku_req:set_meta(request_type, [upgrade|Type], Req1),
{ok, Req2, Env};
handle_upgrade({error, _}, Req, _Env) ->
Req1 = vegur_utils:set_request_status(error, Req),
@todo add custom errors
{error, 400, Req1};
handle_upgrade(_, Req, _Env) ->
% The upgrade header can contain other values, those will result in a client error
Req1 = vegur_utils:set_request_status(error, Req),
@todo add custom errors
{error, 400, Req1}.
| null | https://raw.githubusercontent.com/apinf/proxy42/01b483b711881391e8306bf64b83b4df9d0bc832/apps/vegur/src/vegur_upgrade_middleware.erl | erlang | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* The names of its contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The connection should be upgraded
-sec14.html#sec14.42
No Upgrade header
The upgrade header can contain other values, those will result in a client error | Copyright ( c ) 2013 - 2015 , Heroku Inc < > .
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
-module(vegur_upgrade_middleware).
-behaviour(cowboyku_middleware).
-export([execute/2]).
execute(Req, Env) ->
match_headers(vegur_utils:parse_header(<<"connection">>, Req), Req, Env).
match_headers({ok,{ConnectionTokens, Req1}}, _, Env) ->
case lists:member(<<"upgrade">>, ConnectionTokens) of
false ->
{ok, Req1, Env};
true ->
case cowboyku_req:parse_header(<<"upgrade">>, Req1) of
{ok, undefined, Req2} ->
{HttpCode, Req3} = vegur_utils:handle_error(bad_request_header, Req2),
{error, HttpCode, Req3};
{ok, Upgrade, Req2} ->
handle_upgrade(Upgrade, Req2, Env);
{undefined, _, Req2} ->
{HttpCode, Req3} = vegur_utils:handle_error(bad_request_header, Req2),
426 ?
_ ->
{HttpCode, Req2} = vegur_utils:handle_error(bad_request_header, Req1),
{error, HttpCode, Req2}
end
end;
match_headers({error,_}, Req, _Env) ->
{HttpCode, Req1} = vegur_utils:handle_error(bad_request_header, Req),
{error, HttpCode, Req1}.
-spec handle_upgrade(undefined|[binary()] | {error, term()}, Req, Env) ->
{ok, Req, Env} |
{error, ErrorCode, Req} when
Req :: cowboyku_req:req(),
Env :: cowboyku_middleware:env(),
ErrorCode :: 400.
handle_upgrade(undefined, Req, Env) ->
{ok, Req, Env};
handle_upgrade(UpgradeTokens, Req, Env) when is_list(UpgradeTokens) ->
{Type, Req1} = cowboyku_req:meta(request_type, Req, []),
Req2 = cowboyku_req:set_meta(request_type, [upgrade|Type], Req1),
{ok, Req2, Env};
handle_upgrade({error, _}, Req, _Env) ->
Req1 = vegur_utils:set_request_status(error, Req),
@todo add custom errors
{error, 400, Req1};
handle_upgrade(_, Req, _Env) ->
Req1 = vegur_utils:set_request_status(error, Req),
@todo add custom errors
{error, 400, Req1}.
|
8f6cc7a3dff64509c428f439ab6fea031415ef2dbc045c20ed389db42f734289 | rabbitmq/rabbitmq-erlang-client | amqp_rpc_server.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
%%
%% @doc This is a utility module that is used to expose an arbitrary function
%% via an asynchronous RPC over AMQP mechanism. It frees the implementor of
a simple function from having to plumb this into . Note that the
%% RPC server does not handle any data encoding, so it is up to the callback
function to marshall and unmarshall message payloads accordingly .
-module(amqp_rpc_server).
-behaviour(gen_server).
-include("amqp_client.hrl").
-export([init/1, terminate/2, code_change/3, handle_call/3,
handle_cast/2, handle_info/2]).
-export([start/3, start_link/3]).
-export([stop/1]).
-record(state, {channel,
handler}).
%%--------------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------------
%% @spec (Connection, Queue, RpcHandler) -> RpcServer
%% where
%% Connection = pid()
%% Queue = binary()
%% RpcHandler = function()
%% RpcServer = pid()
@doc Starts a new RPC server instance that receives requests via a
%% specified queue and dispatches them to a specified handler function. This
function returns the pid of the RPC server that can be used to stop the
%% server.
start(Connection, Queue, Fun) ->
{ok, Pid} = gen_server:start(?MODULE, [Connection, Queue, Fun], []),
Pid.
%% @spec (Connection, Queue, RpcHandler) -> RpcServer
%% where
%% Connection = pid()
%% Queue = binary()
%% RpcHandler = function()
%% RpcServer = pid()
@doc Starts , and links to , a new RPC server instance that receives
%% requests via a specified queue and dispatches them to a specified
handler function . This function returns the pid of the RPC server that
%% can be used to stop the server.
start_link(Connection, Queue, Fun) ->
{ok, Pid} = gen_server:start_link(?MODULE, [Connection, Queue, Fun], []),
Pid.
( RpcServer ) - > ok
%% where
%% RpcServer = pid()
@doc Stops an existing RPC server .
stop(Pid) ->
gen_server:call(Pid, stop, amqp_util:call_timeout()).
%%--------------------------------------------------------------------------
%% gen_server callbacks
%%--------------------------------------------------------------------------
@private
init([Connection, Q, Fun]) ->
{ok, Channel} = amqp_connection:open_channel(
Connection, {amqp_direct_consumer, [self()]}),
amqp_channel:call(Channel, #'queue.declare'{queue = Q}),
amqp_channel:call(Channel, #'basic.consume'{queue = Q}),
{ok, #state{channel = Channel, handler = Fun} }.
@private
handle_info(shutdown, State) ->
{stop, normal, State};
@private
handle_info({#'basic.consume'{}, _}, State) ->
{noreply, State};
@private
handle_info(#'basic.consume_ok'{}, State) ->
{noreply, State};
@private
handle_info(#'basic.cancel'{}, State) ->
{noreply, State};
@private
handle_info(#'basic.cancel_ok'{}, State) ->
{stop, normal, State};
@private
handle_info({#'basic.deliver'{delivery_tag = DeliveryTag},
#amqp_msg{props = Props, payload = Payload}},
State = #state{handler = Fun, channel = Channel}) ->
#'P_basic'{correlation_id = CorrelationId,
reply_to = Q} = Props,
Response = Fun(Payload),
Properties = #'P_basic'{correlation_id = CorrelationId},
Publish = #'basic.publish'{exchange = <<>>,
routing_key = Q,
mandatory = true},
amqp_channel:call(Channel, Publish, #amqp_msg{props = Properties,
payload = Response}),
amqp_channel:call(Channel, #'basic.ack'{delivery_tag = DeliveryTag}),
{noreply, State};
@private
handle_info({'DOWN', _MRef, process, _Pid, _Info}, State) ->
{noreply, State}.
@private
handle_call(stop, _From, State) ->
{stop, normal, ok, State}.
%%--------------------------------------------------------------------------
%% Rest of the gen_server callbacks
%%--------------------------------------------------------------------------
@private
handle_cast(_Message, State) ->
{noreply, State}.
%% Closes the channel this gen_server instance started
@private
terminate(_Reason, #state{channel = Channel}) ->
amqp_channel:close(Channel),
ok.
@private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
| null | https://raw.githubusercontent.com/rabbitmq/rabbitmq-erlang-client/2022e01c515d93ed1883e9e9e987be2e58fe15c9/src/amqp_rpc_server.erl | erlang |
@doc This is a utility module that is used to expose an arbitrary function
via an asynchronous RPC over AMQP mechanism. It frees the implementor of
RPC server does not handle any data encoding, so it is up to the callback
--------------------------------------------------------------------------
API
--------------------------------------------------------------------------
@spec (Connection, Queue, RpcHandler) -> RpcServer
where
Connection = pid()
Queue = binary()
RpcHandler = function()
RpcServer = pid()
specified queue and dispatches them to a specified handler function. This
server.
@spec (Connection, Queue, RpcHandler) -> RpcServer
where
Connection = pid()
Queue = binary()
RpcHandler = function()
RpcServer = pid()
requests via a specified queue and dispatches them to a specified
can be used to stop the server.
where
RpcServer = pid()
--------------------------------------------------------------------------
gen_server callbacks
--------------------------------------------------------------------------
--------------------------------------------------------------------------
Rest of the gen_server callbacks
--------------------------------------------------------------------------
Closes the channel this gen_server instance started | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
Copyright ( c ) 2007 - 2020 VMware , Inc. or its affiliates . All rights reserved .
a simple function from having to plumb this into . Note that the
function to marshall and unmarshall message payloads accordingly .
-module(amqp_rpc_server).
-behaviour(gen_server).
-include("amqp_client.hrl").
-export([init/1, terminate/2, code_change/3, handle_call/3,
handle_cast/2, handle_info/2]).
-export([start/3, start_link/3]).
-export([stop/1]).
-record(state, {channel,
handler}).
@doc Starts a new RPC server instance that receives requests via a
function returns the pid of the RPC server that can be used to stop the
start(Connection, Queue, Fun) ->
{ok, Pid} = gen_server:start(?MODULE, [Connection, Queue, Fun], []),
Pid.
@doc Starts , and links to , a new RPC server instance that receives
handler function . This function returns the pid of the RPC server that
start_link(Connection, Queue, Fun) ->
{ok, Pid} = gen_server:start_link(?MODULE, [Connection, Queue, Fun], []),
Pid.
( RpcServer ) - > ok
@doc Stops an existing RPC server .
stop(Pid) ->
gen_server:call(Pid, stop, amqp_util:call_timeout()).
@private
init([Connection, Q, Fun]) ->
{ok, Channel} = amqp_connection:open_channel(
Connection, {amqp_direct_consumer, [self()]}),
amqp_channel:call(Channel, #'queue.declare'{queue = Q}),
amqp_channel:call(Channel, #'basic.consume'{queue = Q}),
{ok, #state{channel = Channel, handler = Fun} }.
@private
handle_info(shutdown, State) ->
{stop, normal, State};
@private
handle_info({#'basic.consume'{}, _}, State) ->
{noreply, State};
@private
handle_info(#'basic.consume_ok'{}, State) ->
{noreply, State};
@private
handle_info(#'basic.cancel'{}, State) ->
{noreply, State};
@private
handle_info(#'basic.cancel_ok'{}, State) ->
{stop, normal, State};
@private
handle_info({#'basic.deliver'{delivery_tag = DeliveryTag},
#amqp_msg{props = Props, payload = Payload}},
State = #state{handler = Fun, channel = Channel}) ->
#'P_basic'{correlation_id = CorrelationId,
reply_to = Q} = Props,
Response = Fun(Payload),
Properties = #'P_basic'{correlation_id = CorrelationId},
Publish = #'basic.publish'{exchange = <<>>,
routing_key = Q,
mandatory = true},
amqp_channel:call(Channel, Publish, #amqp_msg{props = Properties,
payload = Response}),
amqp_channel:call(Channel, #'basic.ack'{delivery_tag = DeliveryTag}),
{noreply, State};
@private
handle_info({'DOWN', _MRef, process, _Pid, _Info}, State) ->
{noreply, State}.
@private
handle_call(stop, _From, State) ->
{stop, normal, ok, State}.
@private
handle_cast(_Message, State) ->
{noreply, State}.
@private
terminate(_Reason, #state{channel = Channel}) ->
amqp_channel:close(Channel),
ok.
@private
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.