_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
710e63178bb0b7cd190454896f57cfe2092c51d0ea27219b7af0dbf397775a50 | open-company/open-company-storage | access.clj | (ns oc.storage.api.access
"Access control functions for storage API."
(:require [if-let.core :refer (if-let*)]
[defun.core :refer (defun)]
[taoensso.timbre :as timbre]
[oc.lib.schema :as lib-schema]
[oc.lib.slugify :as slugify]
[oc.storage.resources.org :as org-res]
[oc.storage.resources.board :as board-res]))
;; ----- Validation -----
(defn malformed-user-id?
"Read in the body param from the request and make sure it's a non-blank string
that corresponds to a user-id. Otherwise just indicate it's malformed."
[ctx]
(try
(if-let* [user-id (slurp (get-in ctx [:request :body]))
valid? (lib-schema/unique-id? user-id)]
[false {:data user-id}]
true)
(catch Exception e
(timbre/warn "Request body not processable as a user-id: " e)
true)))
(defn premium-org? [org user]
((set (:premium-teams user)) (:team-id org)))
;; ----- Authorization -----
(defun access-level-for
"
Given an org (or slug) and a user map, return a map describing the authorization level for the user on the org,
or nil if the user has no access.
If a board is specified (slug or map) the map will describe the access level of the user on it.
Ie:
{:access-level :author|:viewer|:public|:does-not-exist
:role :admin|:member|:anonymous
:premium? true|false}
"
;; Access to org
Invalid org slug
([_conn org-slug :guard #(and (string? %) (not (slugify/valid-slug? %))) _user]
;; Will fail existence checks later
{:access-level :does-not-exist})
;; With org slug, check if it exists
([conn :guard lib-schema/conn? org-slug :guard slugify/valid-slug? user :guard #(or (map? %) (nil? %))]
(if-let [org (org-res/get-org conn org-slug)]
(access-level-for conn org user)
;; Will fail existence checks later
{:access-level :does-not-exist}))
;; With org resource, return the access level
([conn :guard lib-schema/conn? org :guard map? user :guard #(or (map? %) (nil? %))]
(let [user-id (:user-id user)
teams (:teams user)
admin (:admin user)
org-uuid (:uuid org)
org-authors (set (:authors org))
premium? (premium-org? org user)
member? ((set teams) (:team-id org))
admin? ((set admin) (:team-id org))
role (cond admin? :admin
member? :member
:else :anonymous)
base-access {:role role
:premium? premium?}]
(cond
;; an admin of this org's team
((set admin) (:team-id org))
(merge base-access {:access-level :author})
;; a named author of this org
(org-authors user-id)
(merge base-access {:access-level :author})
;; a team member of this org
((set teams) (:team-id org))
(merge base-access {:access-level :viewer})
public access to orgs w/ at least 1 public board AND that allow public boards
(and
(seq (board-res/list-boards-by-index conn "org-uuid-access" [[org-uuid "public"]]))
(not (-> org :content-visibility :disallow-public-board)))
(merge base-access {:access-level :public})
;; no access
:else false)))
Access to board
Invalid org slug
([_conn org-slug :guard #(and (string? %) (not (slugify/valid-slug? %))) _board_slug _user]
;; Will fail existence checks later
{:access-level :does-not-exist})
Invalid board slug
([_conn _org-slug _board_slug :guard #(and (string? %) (not (slugify/valid-slug? %))) _user]
;; Will fail existence checks later
{:access-level :does-not-exist})
([conn :guard lib-schema/conn? org-slug :guard slugify/valid-slug? board-slug-or-uuid :guard slugify/valid-slug?
user :guard #(or (map? %) (nil? %))]
(if-let* [org (org-res/get-org conn org-slug)
board (board-res/get-board conn (:uuid org) board-slug-or-uuid)]
(access-level-for org board user)
;; Will fail existence checks later
{:access-level :does-not-exist}))
([org :guard map? board :guard map? user :guard #(or (map? %) (nil? %))]
(let [user-id (:user-id user)
teams (:teams user)
admin (:admin user)
org-authors (set (:authors org))
board-access (keyword (:access board))
board-authors (set (:authors board))
board-viewers (set (:viewers board))
premium? (premium-org? org user)
org-member? ((set teams) (:team-id org))
admin? ((set admin) (:team-id org))
role (cond admin? :admin
org-member? :member
:else :anonymous)
publisher-board-role (if (= (str board-res/publisher-board-slug-prefix user-id) (:slug board))
:author
:viewer)
base-access {:role role
:premium? premium?}]
(cond
;; a named author of this private board
(and (= board-access :private)
(board-authors user-id))
(merge base-access {:access-level :author})
(and (= board-access :team)
(:publisher-board board))
(merge base-access {:access-level publisher-board-role})
;; an admin of this org's team for this non-private board
(and (not= board-access :private) ((set admin) (:team-id org)))
(merge base-access {:access-level :author})
;; an org author of this non-private board
(and (not= board-access :private) (org-authors user-id))
(merge base-access {:access-level :author})
;; a named viewer of this board
(and (= board-access :private) (board-viewers user-id))
(merge base-access {:access-level :viewer})
;; a team member on a non-private board
(and (not= board-access :private) ((set teams) (:team-id org)))
(merge base-access {:access-level :viewer})
;; anyone else on a public board IF the org allows public boards
(and (= board-access :public) (not (-> org :content-visibility :disallow-public-board)))
(merge base-access {:access-level :public})
;; no access
:else false))))
(defn allow-team-admins-or-no-org
""
[_conn _user]
TODO
{:access-level :author})
(defn allow-members
"
Given an org slug and a user map, return an access level of :author or :viewer if the user is a team member
and false otherwise.
"
[conn org-slug user]
(let [access (access-level-for conn org-slug user)]
(if (= (:access-level access) :public)
false
access)))
(defn allow-premium
"
Given an org slug and a user map, return an access level of :author or :viewer if the user is a team member
of a premium org, false otherwise.
"
[conn org-slug user]
(let [access (allow-members conn org-slug user)]
(if (and (:access-level access)
(not= (:access-level access) :public)
(:premium? access))
access
false)))
(defn allow-authors
"
Given an org slug, and user map, return true if the user is an author on the org.
Or, given an org slug, board slug and user map, return true if the user is an author on the board.
"
([conn org-slug user]
(let [access (access-level-for conn org-slug user)
access-level (:access-level access)]
(if (or (= access-level :author)
;; Allow to fail existence check later
(= access-level :does-not-exist))
access
false)))
([conn org-slug board-slug-or-uuid user]
(let [access (access-level-for conn org-slug board-slug-or-uuid user)
access-level (:access-level access)]
(if (or (= access-level :author)
(= access-level :does-not-exist))
access
false))))
(defn allow-admins
"
Given an org slug and a user map, return the access
map if the user is an admin of the given org's team.
"
[conn org-slug user]
(let [access (access-level-for conn org-slug user)]
(if (and (= (:role access) :admin)
(not= (:access-level access) :public))
access
false)))
(defn board-with-access-level
"
Merge in `access` level user is accessing this board with, and if that level is public, remove author and
viewer lists.
"
[org board user]
(let [level (access-level-for org board user)
public? (= :public (:access-level level))]
(as-> board b
(if public?
(dissoc board :authors :viewers)
b)
(if (map? level)
(merge b level)
b)))) | null | https://raw.githubusercontent.com/open-company/open-company-storage/ae4bbe6245f8736f3c1813c3048448035aff5815/src/oc/storage/api/access.clj | clojure | ----- Validation -----
----- Authorization -----
Access to org
Will fail existence checks later
With org slug, check if it exists
Will fail existence checks later
With org resource, return the access level
an admin of this org's team
a named author of this org
a team member of this org
no access
Will fail existence checks later
Will fail existence checks later
Will fail existence checks later
a named author of this private board
an admin of this org's team for this non-private board
an org author of this non-private board
a named viewer of this board
a team member on a non-private board
anyone else on a public board IF the org allows public boards
no access
Allow to fail existence check later | (ns oc.storage.api.access
"Access control functions for storage API."
(:require [if-let.core :refer (if-let*)]
[defun.core :refer (defun)]
[taoensso.timbre :as timbre]
[oc.lib.schema :as lib-schema]
[oc.lib.slugify :as slugify]
[oc.storage.resources.org :as org-res]
[oc.storage.resources.board :as board-res]))
(defn malformed-user-id?
"Read in the body param from the request and make sure it's a non-blank string
that corresponds to a user-id. Otherwise just indicate it's malformed."
[ctx]
(try
(if-let* [user-id (slurp (get-in ctx [:request :body]))
valid? (lib-schema/unique-id? user-id)]
[false {:data user-id}]
true)
(catch Exception e
(timbre/warn "Request body not processable as a user-id: " e)
true)))
(defn premium-org? [org user]
((set (:premium-teams user)) (:team-id org)))
(defun access-level-for
"
Given an org (or slug) and a user map, return a map describing the authorization level for the user on the org,
or nil if the user has no access.
If a board is specified (slug or map) the map will describe the access level of the user on it.
Ie:
{:access-level :author|:viewer|:public|:does-not-exist
:role :admin|:member|:anonymous
:premium? true|false}
"
Invalid org slug
([_conn org-slug :guard #(and (string? %) (not (slugify/valid-slug? %))) _user]
{:access-level :does-not-exist})
([conn :guard lib-schema/conn? org-slug :guard slugify/valid-slug? user :guard #(or (map? %) (nil? %))]
(if-let [org (org-res/get-org conn org-slug)]
(access-level-for conn org user)
{:access-level :does-not-exist}))
([conn :guard lib-schema/conn? org :guard map? user :guard #(or (map? %) (nil? %))]
(let [user-id (:user-id user)
teams (:teams user)
admin (:admin user)
org-uuid (:uuid org)
org-authors (set (:authors org))
premium? (premium-org? org user)
member? ((set teams) (:team-id org))
admin? ((set admin) (:team-id org))
role (cond admin? :admin
member? :member
:else :anonymous)
base-access {:role role
:premium? premium?}]
(cond
((set admin) (:team-id org))
(merge base-access {:access-level :author})
(org-authors user-id)
(merge base-access {:access-level :author})
((set teams) (:team-id org))
(merge base-access {:access-level :viewer})
public access to orgs w/ at least 1 public board AND that allow public boards
(and
(seq (board-res/list-boards-by-index conn "org-uuid-access" [[org-uuid "public"]]))
(not (-> org :content-visibility :disallow-public-board)))
(merge base-access {:access-level :public})
:else false)))
Access to board
Invalid org slug
([_conn org-slug :guard #(and (string? %) (not (slugify/valid-slug? %))) _board_slug _user]
{:access-level :does-not-exist})
Invalid board slug
([_conn _org-slug _board_slug :guard #(and (string? %) (not (slugify/valid-slug? %))) _user]
{:access-level :does-not-exist})
([conn :guard lib-schema/conn? org-slug :guard slugify/valid-slug? board-slug-or-uuid :guard slugify/valid-slug?
user :guard #(or (map? %) (nil? %))]
(if-let* [org (org-res/get-org conn org-slug)
board (board-res/get-board conn (:uuid org) board-slug-or-uuid)]
(access-level-for org board user)
{:access-level :does-not-exist}))
([org :guard map? board :guard map? user :guard #(or (map? %) (nil? %))]
(let [user-id (:user-id user)
teams (:teams user)
admin (:admin user)
org-authors (set (:authors org))
board-access (keyword (:access board))
board-authors (set (:authors board))
board-viewers (set (:viewers board))
premium? (premium-org? org user)
org-member? ((set teams) (:team-id org))
admin? ((set admin) (:team-id org))
role (cond admin? :admin
org-member? :member
:else :anonymous)
publisher-board-role (if (= (str board-res/publisher-board-slug-prefix user-id) (:slug board))
:author
:viewer)
base-access {:role role
:premium? premium?}]
(cond
(and (= board-access :private)
(board-authors user-id))
(merge base-access {:access-level :author})
(and (= board-access :team)
(:publisher-board board))
(merge base-access {:access-level publisher-board-role})
(and (not= board-access :private) ((set admin) (:team-id org)))
(merge base-access {:access-level :author})
(and (not= board-access :private) (org-authors user-id))
(merge base-access {:access-level :author})
(and (= board-access :private) (board-viewers user-id))
(merge base-access {:access-level :viewer})
(and (not= board-access :private) ((set teams) (:team-id org)))
(merge base-access {:access-level :viewer})
(and (= board-access :public) (not (-> org :content-visibility :disallow-public-board)))
(merge base-access {:access-level :public})
:else false))))
(defn allow-team-admins-or-no-org
""
[_conn _user]
TODO
{:access-level :author})
(defn allow-members
"
Given an org slug and a user map, return an access level of :author or :viewer if the user is a team member
and false otherwise.
"
[conn org-slug user]
(let [access (access-level-for conn org-slug user)]
(if (= (:access-level access) :public)
false
access)))
(defn allow-premium
"
Given an org slug and a user map, return an access level of :author or :viewer if the user is a team member
of a premium org, false otherwise.
"
[conn org-slug user]
(let [access (allow-members conn org-slug user)]
(if (and (:access-level access)
(not= (:access-level access) :public)
(:premium? access))
access
false)))
(defn allow-authors
"
Given an org slug, and user map, return true if the user is an author on the org.
Or, given an org slug, board slug and user map, return true if the user is an author on the board.
"
([conn org-slug user]
(let [access (access-level-for conn org-slug user)
access-level (:access-level access)]
(if (or (= access-level :author)
(= access-level :does-not-exist))
access
false)))
([conn org-slug board-slug-or-uuid user]
(let [access (access-level-for conn org-slug board-slug-or-uuid user)
access-level (:access-level access)]
(if (or (= access-level :author)
(= access-level :does-not-exist))
access
false))))
(defn allow-admins
"
Given an org slug and a user map, return the access
map if the user is an admin of the given org's team.
"
[conn org-slug user]
(let [access (access-level-for conn org-slug user)]
(if (and (= (:role access) :admin)
(not= (:access-level access) :public))
access
false)))
(defn board-with-access-level
"
Merge in `access` level user is accessing this board with, and if that level is public, remove author and
viewer lists.
"
[org board user]
(let [level (access-level-for org board user)
public? (= :public (:access-level level))]
(as-> board b
(if public?
(dissoc board :authors :viewers)
b)
(if (map? level)
(merge b level)
b)))) |
990adaee9c94996cfd641681ca11c0976954003df713e3087a916438be223478 | CSCfi/rems | fix_userid.clj |
(ns rems.db.fix-userid
(:require rems.service.dependencies
[rems.db.api-key]
[rems.db.applications]
[rems.db.attachments]
[rems.db.blacklist]
[rems.db.core]
[rems.db.events]
[rems.db.form]
[rems.db.invitation]
[rems.db.licenses]
[rems.db.organizations]
[rems.db.resource]
[rems.db.roles]
[rems.db.user-mappings]
[rems.db.user-secrets]
[rems.db.user-settings]
[rems.db.users]
[rems.db.workflow]))
(defn fix-apikey [old-userid new-userid simulate?]
(doall
(for [api-key (rems.db.api-key/get-api-keys)
:when (contains? (set (:users api-key)) old-userid)
:let [params [(:apikey api-key) {:users (replace {old-userid new-userid} (:users api-key))}]]]
(do
(apply prn #'fix-apikey api-key params)
(when-not simulate?
(apply rems.db.api-key/update-api-key! params))
{:api-key api-key :params params}))))
(comment
(fix-apikey "alice" "charlie" false))
(defn fix-application-event [old-userid new-userid simulate?]
(doall
(for [old-event (rems.db.events/get-all-events-since 0)
:let [new-event (cond-> old-event
(= old-userid (:event/actor old-event))
(assoc :event/actor new-userid)
(contains? (set (:application/reviewers old-event)) old-userid)
(update :application/reviewers #(replace {old-userid new-userid} %))
(contains? (set (:application/deciders old-event)) old-userid)
(update :application/deciders #(replace {old-userid new-userid} %))
(= old-userid (:application/member old-event))
(assoc :application/member new-userid)
(= old-userid (:application/applicant old-event))
(assoc :application/applicant new-userid))]
:when (not= new-event old-event)
:let [params [new-event]]]
(do
(apply prn #'fix-application-event old-event params)
(when-not simulate?
(apply rems.db.events/update-event! params))
{:old-event old-event :params params}))))
(comment
(fix-application-event "alice" "frank" false)
(fix-application-event "carl" "charlie" true))
(defn fix-attachment [old-userid new-userid simulate?]
(doall
(for [attachment (rems.db.attachments/get-attachments)
:when (= old-userid (:attachment/user attachment))
:let [params [(assoc attachment :attachment/user new-userid)]]]
(do
(apply prn #'fix-attachment attachment params)
(when-not simulate?
(apply rems.db.attachments/update-attachment! params))
{:attachment attachment :params params}))))
(comment
(fix-attachment "alice" "frank" false))
(defn fix-audit-log [old-userid new-userid simulate?]
(doall
(for [audit-log (rems.db.core/get-audit-log)
:when (= old-userid (:userid audit-log))
:let [params [(merge audit-log
{:time-new (:time audit-log)
:path-new (:path audit-log)
:method-new (:method audit-log)
:apikey-new (:apikey audit-log)
:userid-new new-userid
:status-new (:status audit-log)})]]]
(do
(apply prn #'fix-audit-log audit-log params)
(when-not simulate?
(let [result (apply rems.db.core/update-audit-log! params)]
(assert (= 1 (first result)) {:audit-log audit-log :params params :result result})))
{:audit-log audit-log :params params}))))
(comment
(fix-audit-log "alice" "frank" false))
(defn fix-blacklist-event [old-userid new-userid simulate?]
(doall
(for [old-event (rems.db.blacklist/get-events nil)
:let [new-event (cond-> old-event
(= old-userid (:event/actor old-event))
(assoc :event/actor new-userid)
(= old-userid (:userid old-event))
(assoc :userid new-userid))]
:when (not= new-event old-event)
:let [params [new-event]]]
(do
(apply prn #'fix-blacklist-event old-event params)
(when-not simulate?
(apply rems.db.blacklist/update-event! params))
{:old-event old-event :params params}))))
(comment
(fix-blacklist-event "alice" "frank" false))
nothing to fix in
nothing to fix in catalogue_item_application
nothing to fix in catalogue_item_localization
(defn fix-entitlement [old-userid new-userid simulate?]
(doall
(for [old (rems.db.core/get-entitlements nil)
:let [new (cond-> old
(= old-userid (:userid old))
(assoc :userid new-userid)
(= old-userid (:approvedby old))
(assoc :approvedby new-userid)
(= old-userid (:revokedby old))
(assoc :revokedby new-userid))]
:when (not= new old)
:let [params [{:user (:userid new)
:resource (:resourceid new)
:application (:catappid new)
:approvedby (:approvedby new)
:revokedby (:revokedby new)
:start (:start new)
:end (:end new)
:id (:entitlementid new)}]]]
(do
(apply prn #'fix-entitlement old params)
(when-not simulate?
(apply rems.db.core/update-entitlement! params))
{:old old :params params}))))
(comment
(fix-entitlement "alice" "frank" false))
;; nothing to fix in external_application_id
;; nothing to fix in form_template
(defn fix-invitation [old-userid new-userid simulate?]
(doall
(for [old (rems.db.invitation/get-invitations nil)
:let [new (cond-> old
(= old-userid (get-in old [:invitation/invited-by :userid]))
(assoc-in [:invitation/invited-by :userid] new-userid)
(= old-userid (get-in old [:invitation/invited-user :userid]))
(assoc-in [:invitation/invited-user :userid] new-userid))]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-invitation old params)
(when-not simulate?
(apply rems.db.invitation/update-invitation! params))
{:old old :params params}))))
(comment
(fix-invitation "alice" "frank" false))
;; nothing to fix in license
;; nothing to fix in license_attachment
;; nothing to fix in license_localization
(defn fix-organization [old-userid new-userid simulate?]
(doall
(for [old (rems.db.organizations/get-organizations-raw)
:let [new (update old :organization/owners (partial mapv #(if (= old-userid (:userid %))
{:userid new-userid}
%)))]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-organization old params)
(when-not simulate?
(apply rems.db.organizations/set-organization! params))
{:old old :params params}))))
(comment
(fix-organization "organization-owner2" "frank" false))
;; nothing to fix in outbox
NB : this is a table that should contain rows only momentarily
;; nothing to fix in resource
;; nothing to fix in resource_licenses
(defn fix-roles [old-userid new-userid simulate?]
(doall
(for [old (rems.db.roles/get-all-roles)
:let [new (if (= old-userid (:userid old))
(assoc old :userid new-userid)
old)]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-roles old params)
(when-not simulate?
(rems.db.roles/remove-roles! old-userid)
(apply rems.db.roles/update-roles! params))
{:old old :params params}))))
(comment
(fix-roles "frank" "owner" false))
NB : referential constraints force use to handle
users , settings and secrets in one go
(defn fix-user [old-userid new-userid simulate?]
(when (rems.db.users/user-exists? old-userid) ; referential constraints will force this to exist at least
(let [old-user (rems.db.users/get-user old-userid)
old-settings (rems.db.user-settings/get-user-settings old-userid)
old-secrets (rems.db.user-secrets/get-user-secrets old-userid)
old-mappings (rems.db.user-mappings/get-user-mappings {:userid old-userid})]
(apply prn #'fix-user old-user old-settings old-secrets old-mappings)
(when-not simulate?
(rems.db.users/add-user! (assoc old-user :userid new-userid))
(rems.db.user-secrets/update-user-secrets! new-userid old-secrets)
(rems.db.user-settings/update-user-settings! new-userid old-settings)
(rems.db.user-mappings/delete-user-mapping! old-userid)
(doseq [old-mapping old-mappings
:when (not= (:ext-id-value old-mapping) new-userid)] ; not saved in login either
(rems.db.user-mappings/create-user-mapping! (assoc old-mapping :userid new-userid))))
{:old {:user old-user :settings old-settings :secrets old-secrets :mappings old-mappings}
:params [new-userid]})))
(defn remove-old-user [old-userid simulate?]
(when-not simulate?
(rems.db.user-secrets/delete-user-secrets! old-userid)
(rems.db.user-settings/delete-user-settings! old-userid)
(rems.db.users/remove-user! old-userid)))
(comment
(rems.db.users/get-user "alice")
(fix-user "alice" "frank" false))
(defn fix-workflow [old-userid new-userid simulate?]
(doall
(for [old (rems.db.workflow/get-workflows nil)
:let [old {:id (:id old)
:organization (:organization old)
:title (:title old)
:handlers (mapv :userid (get-in old [:workflow :handlers]))}
new (update old :handlers (partial mapv #(if (= old-userid %)
NB : format is different
%)))]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-workflow old params)
(when-not simulate?
(apply rems.db.workflow/edit-workflow! params))
{:old old :params params}))))
(comment
(fix-workflow "bona-fide-bot" "frank" false))
;; nothing to fix in workflow_licenses
(defn fix-all [old-userid new-userid simulate?]
(let [result (doall
(for [f [#'fix-user ; many tables refer to user
#'fix-apikey
#'fix-application-event
#'fix-attachment
#'fix-audit-log
#'fix-blacklist-event
#'fix-entitlement
#'fix-invitation
#'fix-organization
#'fix-roles
#'fix-workflow]]
[(:name (meta f))
(f old-userid new-userid simulate?)]))]
(remove-old-user old-userid simulate?)
( rems.db.applications/reload-cache ! ) ; can be useful if running from REPL
result))
(comment
(fix-all "owner" "elsa" false)
(fix-all "alice" "frank" false)
(fix-all "elixir-alice" "alice" false))
| null | https://raw.githubusercontent.com/CSCfi/rems/490087c4d58339c908da792111029fbaf817a26f/src/clj/rems/db/fix_userid.clj | clojure | nothing to fix in external_application_id
nothing to fix in form_template
nothing to fix in license
nothing to fix in license_attachment
nothing to fix in license_localization
nothing to fix in outbox
nothing to fix in resource
nothing to fix in resource_licenses
referential constraints will force this to exist at least
not saved in login either
nothing to fix in workflow_licenses
many tables refer to user
can be useful if running from REPL |
(ns rems.db.fix-userid
(:require rems.service.dependencies
[rems.db.api-key]
[rems.db.applications]
[rems.db.attachments]
[rems.db.blacklist]
[rems.db.core]
[rems.db.events]
[rems.db.form]
[rems.db.invitation]
[rems.db.licenses]
[rems.db.organizations]
[rems.db.resource]
[rems.db.roles]
[rems.db.user-mappings]
[rems.db.user-secrets]
[rems.db.user-settings]
[rems.db.users]
[rems.db.workflow]))
(defn fix-apikey [old-userid new-userid simulate?]
(doall
(for [api-key (rems.db.api-key/get-api-keys)
:when (contains? (set (:users api-key)) old-userid)
:let [params [(:apikey api-key) {:users (replace {old-userid new-userid} (:users api-key))}]]]
(do
(apply prn #'fix-apikey api-key params)
(when-not simulate?
(apply rems.db.api-key/update-api-key! params))
{:api-key api-key :params params}))))
(comment
(fix-apikey "alice" "charlie" false))
(defn fix-application-event [old-userid new-userid simulate?]
(doall
(for [old-event (rems.db.events/get-all-events-since 0)
:let [new-event (cond-> old-event
(= old-userid (:event/actor old-event))
(assoc :event/actor new-userid)
(contains? (set (:application/reviewers old-event)) old-userid)
(update :application/reviewers #(replace {old-userid new-userid} %))
(contains? (set (:application/deciders old-event)) old-userid)
(update :application/deciders #(replace {old-userid new-userid} %))
(= old-userid (:application/member old-event))
(assoc :application/member new-userid)
(= old-userid (:application/applicant old-event))
(assoc :application/applicant new-userid))]
:when (not= new-event old-event)
:let [params [new-event]]]
(do
(apply prn #'fix-application-event old-event params)
(when-not simulate?
(apply rems.db.events/update-event! params))
{:old-event old-event :params params}))))
(comment
(fix-application-event "alice" "frank" false)
(fix-application-event "carl" "charlie" true))
(defn fix-attachment [old-userid new-userid simulate?]
(doall
(for [attachment (rems.db.attachments/get-attachments)
:when (= old-userid (:attachment/user attachment))
:let [params [(assoc attachment :attachment/user new-userid)]]]
(do
(apply prn #'fix-attachment attachment params)
(when-not simulate?
(apply rems.db.attachments/update-attachment! params))
{:attachment attachment :params params}))))
(comment
(fix-attachment "alice" "frank" false))
(defn fix-audit-log [old-userid new-userid simulate?]
(doall
(for [audit-log (rems.db.core/get-audit-log)
:when (= old-userid (:userid audit-log))
:let [params [(merge audit-log
{:time-new (:time audit-log)
:path-new (:path audit-log)
:method-new (:method audit-log)
:apikey-new (:apikey audit-log)
:userid-new new-userid
:status-new (:status audit-log)})]]]
(do
(apply prn #'fix-audit-log audit-log params)
(when-not simulate?
(let [result (apply rems.db.core/update-audit-log! params)]
(assert (= 1 (first result)) {:audit-log audit-log :params params :result result})))
{:audit-log audit-log :params params}))))
(comment
(fix-audit-log "alice" "frank" false))
(defn fix-blacklist-event [old-userid new-userid simulate?]
(doall
(for [old-event (rems.db.blacklist/get-events nil)
:let [new-event (cond-> old-event
(= old-userid (:event/actor old-event))
(assoc :event/actor new-userid)
(= old-userid (:userid old-event))
(assoc :userid new-userid))]
:when (not= new-event old-event)
:let [params [new-event]]]
(do
(apply prn #'fix-blacklist-event old-event params)
(when-not simulate?
(apply rems.db.blacklist/update-event! params))
{:old-event old-event :params params}))))
(comment
(fix-blacklist-event "alice" "frank" false))
nothing to fix in
nothing to fix in catalogue_item_application
nothing to fix in catalogue_item_localization
(defn fix-entitlement [old-userid new-userid simulate?]
(doall
(for [old (rems.db.core/get-entitlements nil)
:let [new (cond-> old
(= old-userid (:userid old))
(assoc :userid new-userid)
(= old-userid (:approvedby old))
(assoc :approvedby new-userid)
(= old-userid (:revokedby old))
(assoc :revokedby new-userid))]
:when (not= new old)
:let [params [{:user (:userid new)
:resource (:resourceid new)
:application (:catappid new)
:approvedby (:approvedby new)
:revokedby (:revokedby new)
:start (:start new)
:end (:end new)
:id (:entitlementid new)}]]]
(do
(apply prn #'fix-entitlement old params)
(when-not simulate?
(apply rems.db.core/update-entitlement! params))
{:old old :params params}))))
(comment
(fix-entitlement "alice" "frank" false))
(defn fix-invitation [old-userid new-userid simulate?]
(doall
(for [old (rems.db.invitation/get-invitations nil)
:let [new (cond-> old
(= old-userid (get-in old [:invitation/invited-by :userid]))
(assoc-in [:invitation/invited-by :userid] new-userid)
(= old-userid (get-in old [:invitation/invited-user :userid]))
(assoc-in [:invitation/invited-user :userid] new-userid))]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-invitation old params)
(when-not simulate?
(apply rems.db.invitation/update-invitation! params))
{:old old :params params}))))
(comment
(fix-invitation "alice" "frank" false))
(defn fix-organization [old-userid new-userid simulate?]
(doall
(for [old (rems.db.organizations/get-organizations-raw)
:let [new (update old :organization/owners (partial mapv #(if (= old-userid (:userid %))
{:userid new-userid}
%)))]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-organization old params)
(when-not simulate?
(apply rems.db.organizations/set-organization! params))
{:old old :params params}))))
(comment
(fix-organization "organization-owner2" "frank" false))
NB : this is a table that should contain rows only momentarily
(defn fix-roles [old-userid new-userid simulate?]
(doall
(for [old (rems.db.roles/get-all-roles)
:let [new (if (= old-userid (:userid old))
(assoc old :userid new-userid)
old)]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-roles old params)
(when-not simulate?
(rems.db.roles/remove-roles! old-userid)
(apply rems.db.roles/update-roles! params))
{:old old :params params}))))
(comment
(fix-roles "frank" "owner" false))
NB : referential constraints force use to handle
users , settings and secrets in one go
(defn fix-user [old-userid new-userid simulate?]
(let [old-user (rems.db.users/get-user old-userid)
old-settings (rems.db.user-settings/get-user-settings old-userid)
old-secrets (rems.db.user-secrets/get-user-secrets old-userid)
old-mappings (rems.db.user-mappings/get-user-mappings {:userid old-userid})]
(apply prn #'fix-user old-user old-settings old-secrets old-mappings)
(when-not simulate?
(rems.db.users/add-user! (assoc old-user :userid new-userid))
(rems.db.user-secrets/update-user-secrets! new-userid old-secrets)
(rems.db.user-settings/update-user-settings! new-userid old-settings)
(rems.db.user-mappings/delete-user-mapping! old-userid)
(doseq [old-mapping old-mappings
(rems.db.user-mappings/create-user-mapping! (assoc old-mapping :userid new-userid))))
{:old {:user old-user :settings old-settings :secrets old-secrets :mappings old-mappings}
:params [new-userid]})))
(defn remove-old-user [old-userid simulate?]
(when-not simulate?
(rems.db.user-secrets/delete-user-secrets! old-userid)
(rems.db.user-settings/delete-user-settings! old-userid)
(rems.db.users/remove-user! old-userid)))
(comment
(rems.db.users/get-user "alice")
(fix-user "alice" "frank" false))
(defn fix-workflow [old-userid new-userid simulate?]
(doall
(for [old (rems.db.workflow/get-workflows nil)
:let [old {:id (:id old)
:organization (:organization old)
:title (:title old)
:handlers (mapv :userid (get-in old [:workflow :handlers]))}
new (update old :handlers (partial mapv #(if (= old-userid %)
NB : format is different
%)))]
:when (not= new old)
:let [params [new]]]
(do
(apply prn #'fix-workflow old params)
(when-not simulate?
(apply rems.db.workflow/edit-workflow! params))
{:old old :params params}))))
(comment
(fix-workflow "bona-fide-bot" "frank" false))
(defn fix-all [old-userid new-userid simulate?]
(let [result (doall
#'fix-apikey
#'fix-application-event
#'fix-attachment
#'fix-audit-log
#'fix-blacklist-event
#'fix-entitlement
#'fix-invitation
#'fix-organization
#'fix-roles
#'fix-workflow]]
[(:name (meta f))
(f old-userid new-userid simulate?)]))]
(remove-old-user old-userid simulate?)
result))
(comment
(fix-all "owner" "elsa" false)
(fix-all "alice" "frank" false)
(fix-all "elixir-alice" "alice" false))
|
135aadaa35b1980d487bafa5549cc6468ceeba0df2bc1433640d182321704fa9 | haskell/cabal | ParseUtils.hs | # LANGUAGE CPP #
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Deprecated.ParseUtils
Copyright : ( c ) The University of Glasgow 2004
-- License : BSD3
--
-- Maintainer :
-- Portability : portable
--
Utilities for parsing ' PackageDescription ' and ' InstalledPackageInfo ' .
--
-- The @.cabal@ file format is not trivial, especially with the introduction
-- of configurations and the section syntax that goes with that. This module
-- has a bunch of parsing functions that is used by the @.cabal@ parser and a
-- couple others. It has the parsing framework code and also little parsers for
-- many of the formats we get in various @.cabal@ file fields, like module
-- names, comma separated lists etc.
-- This module is meant to be local-only to Distribution...
{-# OPTIONS_HADDOCK hide #-}
{-# LANGUAGE Rank2Types #-}
module Distribution.Deprecated.ParseUtils (
LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning,
runP, runE, ParseResult(..), parseFail, showPWarning,
Field(..), lineNo,
FieldDescr(..), readFields,
parseHaskellString, parseTokenQ,
parseOptCommaList,
showFilePath, showToken, showFreeText,
field, simpleField, listField, listFieldWithSep, spaceListField,
newLineListField,
liftField,
readPToMaybe,
fieldParsec, simpleFieldParsec,
listFieldParsec,
commaListFieldParsec,
commaNewLineListFieldParsec,
UnrecFieldParser,
) where
import Distribution.Client.Compat.Prelude hiding (get)
import Prelude ()
import Distribution.Deprecated.ReadP as ReadP hiding (get)
import Distribution.Pretty
import Distribution.ReadE
import Distribution.Utils.Generic
import System.FilePath (normalise)
import Text.PrettyPrint (Doc, punctuate, comma, fsep, sep)
import qualified Text.Read as Read
import qualified Control.Monad.Fail as Fail
import Distribution.Parsec (ParsecParser, parsecLeadingCommaList, parsecLeadingOptCommaList)
import qualified Data.ByteString as BS
import qualified Distribution.Fields as Fields
import qualified Distribution.Fields.Field as Fields
import qualified Distribution.Parsec as Parsec
import qualified Distribution.Fields.LexerMonad as Fields
import qualified Text.Parsec.Error as PE
import qualified Text.Parsec.Pos as PP
-- -----------------------------------------------------------------------------
type LineNo = Int
data PError = AmbiguousParse String LineNo
| NoParse String LineNo
| TabsError LineNo
| FromString String (Maybe LineNo)
deriving (Eq, Show)
data PWarning = PWarning String
| UTFWarning LineNo String
deriving (Eq, Show)
showPWarning :: FilePath -> PWarning -> String
showPWarning fpath (PWarning msg) =
normalise fpath ++ ": " ++ msg
showPWarning fpath (UTFWarning line fname) =
normalise fpath ++ ":" ++ show line
++ ": Invalid UTF-8 text in the '" ++ fname ++ "' field."
data ParseResult a = ParseFailed PError | ParseOk [PWarning] a
deriving Show
instance Functor ParseResult where
fmap _ (ParseFailed err) = ParseFailed err
fmap f (ParseOk ws x) = ParseOk ws $ f x
instance Applicative ParseResult where
pure = ParseOk []
(<*>) = ap
instance Monad ParseResult where
return = pure
ParseFailed err >>= _ = ParseFailed err
ParseOk ws x >>= f = case f x of
ParseFailed err -> ParseFailed err
ParseOk ws' x' -> ParseOk (ws'++ws) x'
#if !(MIN_VERSION_base(4,9,0))
fail = parseResultFail
#elif !(MIN_VERSION_base(4,13,0))
fail = Fail.fail
#endif
instance Foldable ParseResult where
foldMap _ (ParseFailed _ ) = mempty
foldMap f (ParseOk _ x) = f x
instance Traversable ParseResult where
traverse _ (ParseFailed err) = pure (ParseFailed err)
traverse f (ParseOk ws x) = ParseOk ws <$> f x
instance Fail.MonadFail ParseResult where
fail = parseResultFail
parseResultFail :: String -> ParseResult a
parseResultFail s = parseFail (FromString s Nothing)
parseFail :: PError -> ParseResult a
parseFail = ParseFailed
runP :: LineNo -> String -> ReadP a a -> String -> ParseResult a
runP line fieldname p s =
case [ x | (x,"") <- results ] of
[a] -> ParseOk (utf8Warnings line fieldname s) a
--TODO: what is this double parse thing all about?
Ca n't we just do the all isSpace test the first time ?
[] -> case [ x | (x,ys) <- results, all isSpace ys ] of
[a] -> ParseOk (utf8Warnings line fieldname s) a
[] -> ParseFailed (NoParse fieldname line)
_ -> ParseFailed (AmbiguousParse fieldname line)
_ -> ParseFailed (AmbiguousParse fieldname line)
where results = readP_to_S p s
runE :: LineNo -> String -> ReadE a -> String -> ParseResult a
runE line fieldname p s =
case runReadE p s of
Right a -> ParseOk (utf8Warnings line fieldname s) a
Left e -> syntaxError line $
"Parse of field '" ++ fieldname ++ "' failed (" ++ e ++ "): " ++ s
utf8Warnings :: LineNo -> String -> String -> [PWarning]
utf8Warnings line fieldname s =
take 1 [ UTFWarning n fieldname
| (n,l) <- zip [line..] (lines s)
, '\xfffd' `elem` l ]
locatedErrorMsg :: PError -> (Maybe LineNo, String)
locatedErrorMsg (AmbiguousParse f n) = (Just n,
"Ambiguous parse in field '"++f++"'.")
locatedErrorMsg (NoParse f n) = (Just n,
"Parse of field '"++f++"' failed.")
locatedErrorMsg (TabsError n) = (Just n, "Tab used as indentation.")
locatedErrorMsg (FromString s n) = (n, s)
syntaxError :: LineNo -> String -> ParseResult a
syntaxError n s = ParseFailed $ FromString s (Just n)
warning :: String -> ParseResult ()
warning s = ParseOk [PWarning s] ()
-- | Field descriptor. The parameter @a@ parameterizes over where the field's
-- value is stored in.
data FieldDescr a
= FieldDescr
{ fieldName :: String
, fieldGet :: a -> Doc
, fieldSet :: LineNo -> String -> a -> ParseResult a
^ @fieldSet n str x@ Parses the field value from the given input
-- string @str@ and stores the result in @x@ if the parse was
-- successful. Otherwise, reports an error on line number @n@.
}
field :: String -> (a -> Doc) -> ReadP a a -> FieldDescr a
field name showF readF =
FieldDescr name showF (\line val _st -> runP line name readF val)
fieldParsec :: String -> (a -> Doc) -> ParsecParser a -> FieldDescr a
fieldParsec name showF readF =
FieldDescr name showF $ \line val _st -> case explicitEitherParsec readF val of
Left err -> ParseFailed (FromString err (Just line))
Right x -> ParseOk [] x
-- Lift a field descriptor storing into an 'a' to a field descriptor storing
-- into a 'b'.
liftField :: (b -> a) -> (a -> b -> b) -> FieldDescr a -> FieldDescr b
liftField get set (FieldDescr name showF parseF)
= FieldDescr name (showF . get)
(\line str b -> do
a <- parseF line str (get b)
return (set a b))
Parser combinator for simple fields . Takes a field name , a pretty printer ,
a parser function , an accessor , and a setter , returns a FieldDescr over the
compoid structure .
simpleField :: String -> (a -> Doc) -> ReadP a a
-> (b -> a) -> (a -> b -> b) -> FieldDescr b
simpleField name showF readF get set
= liftField get set $ field name showF readF
simpleFieldParsec :: String -> (a -> Doc) -> ParsecParser a
-> (b -> a) -> (a -> b -> b) -> FieldDescr b
simpleFieldParsec name showF readF get set
= liftField get set $ fieldParsec name showF readF
commaListFieldWithSepParsec :: Separator -> String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
commaListFieldWithSepParsec separator name showF readF get set =
liftField get set' $
fieldParsec name showF' (parsecLeadingCommaList readF)
where
set' xs b = set (get b ++ xs) b
showF' = separator . punctuate comma . map showF
commaListFieldParsec :: String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
commaListFieldParsec = commaListFieldWithSepParsec fsep
commaNewLineListFieldParsec
:: String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
commaNewLineListFieldParsec = commaListFieldWithSepParsec sep
spaceListField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
spaceListField name showF readF get set =
liftField get set' $
field name showF' (parseSpaceList readF)
where
set' xs b = set (get b ++ xs) b
showF' = fsep . map showF
this is a different definition from listField , like
-- commaNewLineListField it pretty prints on multiple lines
newLineListField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
newLineListField = listFieldWithSep sep
listFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldWithSep separator name showF readF get set =
liftField get set' $
field name showF' (parseOptCommaList readF)
where
set' xs b = set (get b ++ xs) b
showF' = separator . map showF
listFieldWithSepParsec :: Separator -> String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldWithSepParsec separator name showF readF get set =
liftField get set' $
fieldParsec name showF' (parsecLeadingOptCommaList readF)
where
set' xs b = set (get b ++ xs) b
showF' = separator . map showF
listField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listField = listFieldWithSep fsep
listFieldParsec
:: String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldParsec = listFieldWithSepParsec fsep
-- | The type of a function which, given a name-value pair of an
-- unrecognized field, and the current structure being built,
-- decides whether to incorporate the unrecognized field
-- (by returning Just x, where x is a possibly modified version
-- of the structure being built), or not (by returning Nothing).
type UnrecFieldParser a = (String,String) -> a -> Maybe a
------------------------------------------------------------------------------
The data type for our three syntactic categories
data Field
= F LineNo String String
-- ^ A regular @<property>: <value>@ field
| Section LineNo String String [Field]
-- ^ A section with a name and possible parameter. The syntactic
-- structure is:
--
-- @
-- <sectionname> <arg> {
-- <field>*
-- }
-- @
deriving (Show
,Eq) -- for testing
lineNo :: Field -> LineNo
lineNo (F n _ _) = n
lineNo (Section n _ _ _) = n
readFields :: BS.ByteString -> ParseResult [Field]
readFields input = case Fields.readFields' input of
Right (fs, ws) -> ParseOk
[ PWarning msg | Fields.PWarning _ _ msg <- Fields.toPWarnings ws ]
(legacyFields fs)
Left perr -> ParseFailed $ NoParse
(PE.showErrorMessages
"or" "unknown parse error" "expecting" "unexpected" "end of file"
(PE.errorMessages perr))
(PP.sourceLine pos)
where
pos = PE.errorPos perr
legacyFields :: [Fields.Field Parsec.Position] -> [Field]
legacyFields = map legacyField
legacyField :: Fields.Field Parsec.Position -> Field
legacyField (Fields.Field (Fields.Name pos name) fls) =
F (posToLineNo pos) (fromUTF8BS name) (Fields.fieldLinesToString fls)
legacyField (Fields.Section (Fields.Name pos name) args fs) =
Section (posToLineNo pos) (fromUTF8BS name) (Fields.sectionArgsToString args) (legacyFields fs)
posToLineNo :: Parsec.Position -> LineNo
posToLineNo (Parsec.Position row _) = row
------------------------------------------------------------------------------
-- urgh, we can't define optQuotes :: ReadP r a -> ReadP r a
-- because the "compat" version of ReadP isn't quite powerful enough. In
-- particular, the type of <++ is ReadP r r -> ReadP r a -> ReadP r a
-- Hence the trick above to make 'lic' polymorphic.
Different than the naive version . it turns out Read instance for accepts
-- the ['a', 'b'] syntax, which we do not want. In particular it messes
-- up any token starting with [].
parseHaskellString :: ReadP r String
parseHaskellString =
readS_to_P $
Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0
parseTokenQ :: ReadP r String
parseTokenQ = parseHaskellString <++ munch1 (\x -> not (isSpace x) && x /= ',')
parseSpaceList :: ReadP r a -- ^The parser for the stuff between commas
-> ReadP r [a]
parseSpaceList p = sepBy p skipSpaces
-- This version avoid parse ambiguity for list element parsers
-- that have multiple valid parses of prefixes.
parseOptCommaList :: ReadP r a -> ReadP r [a]
parseOptCommaList p = sepBy p localSep
where
-- The separator must not be empty or it introduces ambiguity
localSep = (skipSpaces >> char ',' >> skipSpaces)
+++ (satisfy isSpace >> skipSpaces)
readPToMaybe :: ReadP a a -> String -> Maybe a
readPToMaybe p str = listToMaybe [ r | (r,s) <- readP_to_S p str
, all isSpace s ]
| null | https://raw.githubusercontent.com/haskell/cabal/32259a1518d45723b0bbf989088bee61b07d4bb6/cabal-install/src/Distribution/Deprecated/ParseUtils.hs | haskell | ---------------------------------------------------------------------------
|
Module : Distribution.Deprecated.ParseUtils
License : BSD3
Maintainer :
Portability : portable
The @.cabal@ file format is not trivial, especially with the introduction
of configurations and the section syntax that goes with that. This module
has a bunch of parsing functions that is used by the @.cabal@ parser and a
couple others. It has the parsing framework code and also little parsers for
many of the formats we get in various @.cabal@ file fields, like module
names, comma separated lists etc.
This module is meant to be local-only to Distribution...
# OPTIONS_HADDOCK hide #
# LANGUAGE Rank2Types #
-----------------------------------------------------------------------------
TODO: what is this double parse thing all about?
| Field descriptor. The parameter @a@ parameterizes over where the field's
value is stored in.
string @str@ and stores the result in @x@ if the parse was
successful. Otherwise, reports an error on line number @n@.
Lift a field descriptor storing into an 'a' to a field descriptor storing
into a 'b'.
commaNewLineListField it pretty prints on multiple lines
| The type of a function which, given a name-value pair of an
unrecognized field, and the current structure being built,
decides whether to incorporate the unrecognized field
(by returning Just x, where x is a possibly modified version
of the structure being built), or not (by returning Nothing).
----------------------------------------------------------------------------
^ A regular @<property>: <value>@ field
^ A section with a name and possible parameter. The syntactic
structure is:
@
<sectionname> <arg> {
<field>*
}
@
for testing
----------------------------------------------------------------------------
urgh, we can't define optQuotes :: ReadP r a -> ReadP r a
because the "compat" version of ReadP isn't quite powerful enough. In
particular, the type of <++ is ReadP r r -> ReadP r a -> ReadP r a
Hence the trick above to make 'lic' polymorphic.
the ['a', 'b'] syntax, which we do not want. In particular it messes
up any token starting with [].
^The parser for the stuff between commas
This version avoid parse ambiguity for list element parsers
that have multiple valid parses of prefixes.
The separator must not be empty or it introduces ambiguity | # LANGUAGE CPP #
Copyright : ( c ) The University of Glasgow 2004
Utilities for parsing ' PackageDescription ' and ' InstalledPackageInfo ' .
module Distribution.Deprecated.ParseUtils (
LineNo, PError(..), PWarning(..), locatedErrorMsg, syntaxError, warning,
runP, runE, ParseResult(..), parseFail, showPWarning,
Field(..), lineNo,
FieldDescr(..), readFields,
parseHaskellString, parseTokenQ,
parseOptCommaList,
showFilePath, showToken, showFreeText,
field, simpleField, listField, listFieldWithSep, spaceListField,
newLineListField,
liftField,
readPToMaybe,
fieldParsec, simpleFieldParsec,
listFieldParsec,
commaListFieldParsec,
commaNewLineListFieldParsec,
UnrecFieldParser,
) where
import Distribution.Client.Compat.Prelude hiding (get)
import Prelude ()
import Distribution.Deprecated.ReadP as ReadP hiding (get)
import Distribution.Pretty
import Distribution.ReadE
import Distribution.Utils.Generic
import System.FilePath (normalise)
import Text.PrettyPrint (Doc, punctuate, comma, fsep, sep)
import qualified Text.Read as Read
import qualified Control.Monad.Fail as Fail
import Distribution.Parsec (ParsecParser, parsecLeadingCommaList, parsecLeadingOptCommaList)
import qualified Data.ByteString as BS
import qualified Distribution.Fields as Fields
import qualified Distribution.Fields.Field as Fields
import qualified Distribution.Parsec as Parsec
import qualified Distribution.Fields.LexerMonad as Fields
import qualified Text.Parsec.Error as PE
import qualified Text.Parsec.Pos as PP
type LineNo = Int
data PError = AmbiguousParse String LineNo
| NoParse String LineNo
| TabsError LineNo
| FromString String (Maybe LineNo)
deriving (Eq, Show)
data PWarning = PWarning String
| UTFWarning LineNo String
deriving (Eq, Show)
showPWarning :: FilePath -> PWarning -> String
showPWarning fpath (PWarning msg) =
normalise fpath ++ ": " ++ msg
showPWarning fpath (UTFWarning line fname) =
normalise fpath ++ ":" ++ show line
++ ": Invalid UTF-8 text in the '" ++ fname ++ "' field."
data ParseResult a = ParseFailed PError | ParseOk [PWarning] a
deriving Show
instance Functor ParseResult where
fmap _ (ParseFailed err) = ParseFailed err
fmap f (ParseOk ws x) = ParseOk ws $ f x
instance Applicative ParseResult where
pure = ParseOk []
(<*>) = ap
instance Monad ParseResult where
return = pure
ParseFailed err >>= _ = ParseFailed err
ParseOk ws x >>= f = case f x of
ParseFailed err -> ParseFailed err
ParseOk ws' x' -> ParseOk (ws'++ws) x'
#if !(MIN_VERSION_base(4,9,0))
fail = parseResultFail
#elif !(MIN_VERSION_base(4,13,0))
fail = Fail.fail
#endif
instance Foldable ParseResult where
foldMap _ (ParseFailed _ ) = mempty
foldMap f (ParseOk _ x) = f x
instance Traversable ParseResult where
traverse _ (ParseFailed err) = pure (ParseFailed err)
traverse f (ParseOk ws x) = ParseOk ws <$> f x
instance Fail.MonadFail ParseResult where
fail = parseResultFail
parseResultFail :: String -> ParseResult a
parseResultFail s = parseFail (FromString s Nothing)
parseFail :: PError -> ParseResult a
parseFail = ParseFailed
runP :: LineNo -> String -> ReadP a a -> String -> ParseResult a
runP line fieldname p s =
case [ x | (x,"") <- results ] of
[a] -> ParseOk (utf8Warnings line fieldname s) a
Ca n't we just do the all isSpace test the first time ?
[] -> case [ x | (x,ys) <- results, all isSpace ys ] of
[a] -> ParseOk (utf8Warnings line fieldname s) a
[] -> ParseFailed (NoParse fieldname line)
_ -> ParseFailed (AmbiguousParse fieldname line)
_ -> ParseFailed (AmbiguousParse fieldname line)
where results = readP_to_S p s
runE :: LineNo -> String -> ReadE a -> String -> ParseResult a
runE line fieldname p s =
case runReadE p s of
Right a -> ParseOk (utf8Warnings line fieldname s) a
Left e -> syntaxError line $
"Parse of field '" ++ fieldname ++ "' failed (" ++ e ++ "): " ++ s
utf8Warnings :: LineNo -> String -> String -> [PWarning]
utf8Warnings line fieldname s =
take 1 [ UTFWarning n fieldname
| (n,l) <- zip [line..] (lines s)
, '\xfffd' `elem` l ]
locatedErrorMsg :: PError -> (Maybe LineNo, String)
locatedErrorMsg (AmbiguousParse f n) = (Just n,
"Ambiguous parse in field '"++f++"'.")
locatedErrorMsg (NoParse f n) = (Just n,
"Parse of field '"++f++"' failed.")
locatedErrorMsg (TabsError n) = (Just n, "Tab used as indentation.")
locatedErrorMsg (FromString s n) = (n, s)
syntaxError :: LineNo -> String -> ParseResult a
syntaxError n s = ParseFailed $ FromString s (Just n)
warning :: String -> ParseResult ()
warning s = ParseOk [PWarning s] ()
data FieldDescr a
= FieldDescr
{ fieldName :: String
, fieldGet :: a -> Doc
, fieldSet :: LineNo -> String -> a -> ParseResult a
^ @fieldSet n str x@ Parses the field value from the given input
}
field :: String -> (a -> Doc) -> ReadP a a -> FieldDescr a
field name showF readF =
FieldDescr name showF (\line val _st -> runP line name readF val)
fieldParsec :: String -> (a -> Doc) -> ParsecParser a -> FieldDescr a
fieldParsec name showF readF =
FieldDescr name showF $ \line val _st -> case explicitEitherParsec readF val of
Left err -> ParseFailed (FromString err (Just line))
Right x -> ParseOk [] x
liftField :: (b -> a) -> (a -> b -> b) -> FieldDescr a -> FieldDescr b
liftField get set (FieldDescr name showF parseF)
= FieldDescr name (showF . get)
(\line str b -> do
a <- parseF line str (get b)
return (set a b))
Parser combinator for simple fields . Takes a field name , a pretty printer ,
a parser function , an accessor , and a setter , returns a FieldDescr over the
compoid structure .
simpleField :: String -> (a -> Doc) -> ReadP a a
-> (b -> a) -> (a -> b -> b) -> FieldDescr b
simpleField name showF readF get set
= liftField get set $ field name showF readF
simpleFieldParsec :: String -> (a -> Doc) -> ParsecParser a
-> (b -> a) -> (a -> b -> b) -> FieldDescr b
simpleFieldParsec name showF readF get set
= liftField get set $ fieldParsec name showF readF
commaListFieldWithSepParsec :: Separator -> String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
commaListFieldWithSepParsec separator name showF readF get set =
liftField get set' $
fieldParsec name showF' (parsecLeadingCommaList readF)
where
set' xs b = set (get b ++ xs) b
showF' = separator . punctuate comma . map showF
commaListFieldParsec :: String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
commaListFieldParsec = commaListFieldWithSepParsec fsep
commaNewLineListFieldParsec
:: String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
commaNewLineListFieldParsec = commaListFieldWithSepParsec sep
spaceListField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
spaceListField name showF readF get set =
liftField get set' $
field name showF' (parseSpaceList readF)
where
set' xs b = set (get b ++ xs) b
showF' = fsep . map showF
this is a different definition from listField , like
newLineListField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
newLineListField = listFieldWithSep sep
listFieldWithSep :: Separator -> String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldWithSep separator name showF readF get set =
liftField get set' $
field name showF' (parseOptCommaList readF)
where
set' xs b = set (get b ++ xs) b
showF' = separator . map showF
listFieldWithSepParsec :: Separator -> String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldWithSepParsec separator name showF readF get set =
liftField get set' $
fieldParsec name showF' (parsecLeadingOptCommaList readF)
where
set' xs b = set (get b ++ xs) b
showF' = separator . map showF
listField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listField = listFieldWithSep fsep
listFieldParsec
:: String -> (a -> Doc) -> ParsecParser a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldParsec = listFieldWithSepParsec fsep
type UnrecFieldParser a = (String,String) -> a -> Maybe a
The data type for our three syntactic categories
data Field
= F LineNo String String
| Section LineNo String String [Field]
deriving (Show
lineNo :: Field -> LineNo
lineNo (F n _ _) = n
lineNo (Section n _ _ _) = n
readFields :: BS.ByteString -> ParseResult [Field]
readFields input = case Fields.readFields' input of
Right (fs, ws) -> ParseOk
[ PWarning msg | Fields.PWarning _ _ msg <- Fields.toPWarnings ws ]
(legacyFields fs)
Left perr -> ParseFailed $ NoParse
(PE.showErrorMessages
"or" "unknown parse error" "expecting" "unexpected" "end of file"
(PE.errorMessages perr))
(PP.sourceLine pos)
where
pos = PE.errorPos perr
legacyFields :: [Fields.Field Parsec.Position] -> [Field]
legacyFields = map legacyField
legacyField :: Fields.Field Parsec.Position -> Field
legacyField (Fields.Field (Fields.Name pos name) fls) =
F (posToLineNo pos) (fromUTF8BS name) (Fields.fieldLinesToString fls)
legacyField (Fields.Section (Fields.Name pos name) args fs) =
Section (posToLineNo pos) (fromUTF8BS name) (Fields.sectionArgsToString args) (legacyFields fs)
posToLineNo :: Parsec.Position -> LineNo
posToLineNo (Parsec.Position row _) = row
Different than the naive version . it turns out Read instance for accepts
parseHaskellString :: ReadP r String
parseHaskellString =
readS_to_P $
Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0
parseTokenQ :: ReadP r String
parseTokenQ = parseHaskellString <++ munch1 (\x -> not (isSpace x) && x /= ',')
-> ReadP r [a]
parseSpaceList p = sepBy p skipSpaces
parseOptCommaList :: ReadP r a -> ReadP r [a]
parseOptCommaList p = sepBy p localSep
where
localSep = (skipSpaces >> char ',' >> skipSpaces)
+++ (satisfy isSpace >> skipSpaces)
readPToMaybe :: ReadP a a -> String -> Maybe a
readPToMaybe p str = listToMaybe [ r | (r,s) <- readP_to_S p str
, all isSpace s ]
|
0a13312a826ed4e39e9860f317b201ec08d20a2122737c6b6064bed827678869 | static-analysis-engineering/codehawk | bCHByteUtilities.ml | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2023 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2023 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHNumerical
open CHPretty
(* chutil *)
open CHXmlDocument
(* bchlib *)
open BCHBasicTypes
open BCHDoubleword
open BCHLibTypes
module TR = CHTraceResult
module DoublewordCollections = CHCollections.Make
(struct
type t = doubleword_int
let compare dw1 dw2 = dw1#compare dw2
let toPretty dw = STR dw#to_fixed_length_hex_string
end)
let is_printable c = (c >= 32 && c < 127)
let is_printable_char c = is_printable (Char.code c)
let byte_to_string (b:int):string =
let l = b mod 16 in
let h = b lsr 4 in
Printf.sprintf "%x%x" h l
let get_aligned_address (a:doubleword_int) =
let n16 = mkNumerical 16 in
let n = a#to_numerical in
if (n#modulo n16)#equal numerical_zero then
a
else
let n = ((n#div n16)#mult n16)#add n16 in
TR.tget_ok (numerical_to_doubleword n)
let get_1kaligned_address (a:doubleword_int) =
let n1024 = mkNumerical 1024 in
let n = a#to_numerical in
if (n#modulo n1024)#equal numerical_zero then
a
else
let n = ((n#div n1024)#mult n1024)#add n1024 in
TR.tget_ok (numerical_to_doubleword n)
return the byte_string to a string containing 16 bytes in hexadecimal form
per line
per line *)
let rawdata_to_string ?(markwritten:(doubleword_int list option)) (byte_string:string)
(start_address:doubleword_int):string =
let ch = IO.input_string byte_string in
let current_address = ref start_address in
let len = String.length byte_string in
let lines = len / 16 in
let remaining_bytes = len mod 16 in
let s = ref "" in
let a = Array.make 16 0 in
let pr_marked:(doubleword_int -> string) =
match markwritten with
Some l ->
let mwset = new DoublewordCollections.set_t in
let _ = mwset#addList l in
fun address ->
let ws = ref "" in
let aa = ref address in
begin
for i = 1 to 4 do
(if mwset#has !aa then ws := !ws ^ "W " else ws := !ws ^ ". " );
aa := (!aa)#add_int 4
done;
!ws
end
| _ -> fun _ -> "" in
let bytes_to_line_output () =
begin
s := !s ^ !current_address#to_fixed_length_hex_string ;
s := !s ^ " " ;
for i=0 to 3 do
for j=0 to 3 do
s := !s ^ (byte_to_string a.(4*i+j));
done;
s := !s ^ " " ;
done;
s := !s ^ " " ^ (pr_marked !current_address) ^ " " ;
s := !s ^ " " ;
for i=0 to 15 do
if is_printable a.(i) then
s := !s ^ (Printf.sprintf "%c" (Char.chr a.(i)))
else
s := !s ^ "."
done;
s := !s ^ (Printf.sprintf "\n")
end in
begin
for i=0 to lines-1 do
begin
for j=0 to 15 do a.(j) <- IO.read_byte ch done;
bytes_to_line_output ();
current_address := !current_address#add_int 16
end
done;
(if remaining_bytes > 0 then
let k = ref 0 in
let m = ref 0 in
for j=0 to remaining_bytes-1 do a.(j) <- IO.read_byte ch done;
begin
s := !s ^ !current_address#to_fixed_length_hex_string ;
s := !s ^ " " ;
while (4*(!k) + (!m)) < remaining_bytes do
while (4*(!k) + (!m)) < remaining_bytes && !m < 4 do
s := !s ^ (byte_to_string a.(4*(!k)+(!m)));
m := !m + 1 ;
done ;
k := !k + 1;
m := 0 ;
s := !s ^ " " ;
done ;
for i=0 to (16 - remaining_bytes) do s := !s ^ " " done ;
for i=0 to remaining_bytes-1 do
if is_printable a.(i) then
s := !s ^ (Printf.sprintf "%c" (Char.chr a.(i)))
else
s := !s ^ "."
done
end) ;
!s
end
let write_xml_raw_data_block
(node:xml_element_int)
(byte_string:string)
(start_address:doubleword_int) =
let append = node#appendChildren in
let ch = IO.input_string byte_string in
let currentAddr = ref start_address in
let len = String.length byte_string in
let lines = len / 16 in
let remainingBytes = len mod 16 in
let nodes = ref [] in
let a = Array.make 16 0 in
let get_char i =
if is_printable a.(i) then
Printf.sprintf "%c" (Char.chr a.(i))
else
"." in
let write_xml_line lNode =
let s = ref "" in
let p = ref "" in
begin
(for i = 0 to 3 do
for j=0 to 3 do
s := !s ^ (byte_to_string a.(4*i+j))
done; s := !s ^ " "
done) ;
(for i = 0 to 15 do p := !p ^ (get_char i) done);
lNode#setAttribute "bytes" !s ;
lNode#setAttribute "print" !p ;
end in
let write_xml_partial_line lNode =
let s = ref "" in
let p = ref "" in
begin
(for i = 0 to remainingBytes-1 do
s := !s ^ (byte_to_string a.(i))
done) ;
(for i = 0 to remainingBytes-1 do
p := !p ^ (get_char i)
done) ;
lNode#setAttribute "bytes" !s ;
lNode#setAttribute "print" !p ;
lNode#setIntAttribute "length" remainingBytes
end in
begin
for i=0 to lines-1 do
let lNode = xmlElement "aline" in
begin
lNode#setAttribute "va" !currentAddr#to_hex_string ;
for j=0 to 15 do a.(j) <- IO.read_byte ch done;
write_xml_line lNode ;
nodes := lNode :: !nodes ;
currentAddr := !currentAddr#add_int 16
end
done;
(if remainingBytes > 0 then
let lNode = xmlElement "aline" in
begin
lNode#setAttribute "va" !currentAddr#to_hex_string ;
for j=0 to remainingBytes-1 do a.(j) <- IO.read_byte ch done;
write_xml_partial_line lNode ;
nodes := lNode :: !nodes
end) ;
append (List.rev !nodes)
end
let write_xml_raw_data
(node:xml_element_int)
(byte_string:string)
(start_address:doubleword_int) =
let blocksize = 160000 in
let rec strdivide str len result =
let strlen = String.length str in
if strlen <= len then
List.rev (str::result)
else
let strpre = String.sub str 0 len in
let strsuf = String.sub str len (strlen - len) in
strdivide strsuf len (strpre :: result) in
let strblocks = strdivide byte_string blocksize [] in
begin
node#appendChildren (List.mapi (fun i bstr ->
let bNode = xmlElement "ablock" in
let addr = start_address#add_int (i * blocksize) in
begin
write_xml_raw_data_block bNode bstr addr ;
bNode#setIntAttribute "block" i ;
bNode
end) strblocks) ;
node#setIntAttribute "blocks" (List.length strblocks)
end
let read_xml_raw_data_block (node:xml_element_int) =
let bNodes = node#getTaggedChildren "aline" in
let ch = IO.output_string () in
begin
List.iter (fun n ->
let has = n#hasNamedAttribute in
let geti = n#getIntAttribute in
let byteString = n#getAttribute "bytes" in
if has "length" then
for i = 0 to (geti "length") - 1 do
let s = "0x" ^ (String.sub byteString (i*2) 2) in
try
let b = int_of_string s in IO.write_byte ch b
with Failure _ ->
begin
pr_debug [ STR "Failure (length): " ; STR s ; NL ] ;
raise (Failure "int-of-string")
end
done
else
for i = 0 to 3 do
for j = 0 to 3 do
let s = "0x" ^ (String.sub byteString ((i*9) + (j*2)) 2) in
try
let b = int_of_string s in IO.write_byte ch b
with Failure _ ->
begin
pr_debug [ STR "Failure: " ; STR s ; NL ] ;
raise (Failure "int-of-string")
end
done
done) bNodes ;
IO.close_out ch
end
let write_doubleword_to_bytestring (dw:doubleword_int) =
let ch = IO.output_string () in
let hexstring = dw#to_fixed_length_hex_string in
begin
for i = 0 to 3 do
let s = "0x" ^ (String.sub hexstring ((3-i)*2) 2) in
try
let b = int_of_string s in IO.write_byte ch b
with
|Failure m ->
begin
pr_debug [
STR "Failure in bCHByteUtilities:write_doubleword_to_bytestring ";
STR s;
STR ": ";
STR m;
NL];
raise
(Failure
"bCHByteUtilities:write_doubleword_to_byte_string:int-of-string")
end
done ;
IO.close_out ch
end
let write_hex_bytes_to_bytestring (s: string) =
let ch = IO.output_string () in
let _ =
if ((String.length s) mod 2) = 1 then
raise
(Failure "bCHByteUtilities:write_hex_bytes_to_bytestring:odd") in
let len = (String.length s) / 2 in
begin
for i = 0 to (len - 1) do
let bx = "0x" ^ (String.sub s (i * 2) 2) in
try
let b = int_of_string bx in IO.write_byte ch b
with
| Failure m ->
raise
(Failure
"bCHByteUtilities:write_hex_bytes_to_bytestring:int_of_string")
done;
IO.close_out ch
end
let read_xml_raw_data (node:xml_element_int) =
String.concat
"" (List.map read_xml_raw_data_block (node#getTaggedChildren "ablock"))
let byte_string_to_spaced_string (byte_string:string):string =
let ch = IO.input_string byte_string in
let s = ref "" in
let len = String.length byte_string in
begin
for i = 0 to len-1 do s := !s ^ (byte_to_string (IO.read_byte ch)) ^ " " done;
!s
end
let byte_string_to_printed_string (byte_string:string):string =
let ch = IO.input_string byte_string in
let s = ref "" in
let len = String.length byte_string in
begin
for i = 0 to len-1 do s := !s ^ (byte_to_string (IO.read_byte ch)) done;
!s
end
converts a little - endian hex string for a doubleword extracted by pattern
matching to the corresponding doubleword string
matching to the corresponding doubleword string *)
let littleendian_hexstring_todwstring (s:string) =
let b = Bytes.of_string s in
let bnew = Bytes.copy b in
let cp pn po = Bytes.set bnew pn (Bytes.get b po) in
begin
cp 0 6;
cp 1 7;
cp 2 4;
cp 3 5;
cp 4 2;
cp 5 3;
cp 6 0;
cp 7 1;
"0x" ^ Bytes.to_string bnew
end
let littleendian_hexstring_towstring (s:string) =
let b = Bytes.of_string s in
let bnew = Bytes.copy b in
let cp pn po = Bytes.set bnew pn (Bytes.get b po) in
begin
cp 0 2;
cp 1 3;
cp 2 0;
cp 3 1;
"0x" ^ Bytes.to_string bnew
end
let decode_string_aux (s:string) (va:doubleword_int)
(enc:(string * doubleword_int * doubleword_int * doubleword_int * int)) =
let (_, start, size, key, width) = enc in
let offset =
fail_tfold
(trerror_record (STR "decode_string_aux"))
(fun i -> i + width)
(start#subtract_to_int va) in
let prefix = String.sub s 0 offset in
let encstring = String.sub s offset size#to_int in
let suffix =
String.sub
s (offset + size#to_int) ((String.length s) - (offset + size#to_int)) in
try
let ch = IO.input_string encstring in
if width = 4 then
let read_doubleword ch =
let l = IO.read_ui16 ch in
let h = IO.read_ui16 ch in
TR.tget_ok (make_doubleword l h) in
let result = ref prefix in
begin
for i = 0 to ((size#to_int / 4) - 2) do
let dw = read_doubleword ch in
let decoded = dw#xor key in
result := !result ^ (write_doubleword_to_bytestring decoded);
done;
!result ^ suffix
end
else if width = 1 then
let result = ref prefix in
begin
for i = 0 to size#to_int - 1 do
let b = IO.read_byte ch in
let decoded = b lxor key#to_int in
let ch = IO.output_string () in
let _ = IO.write_byte ch decoded in
let bs = IO.close_out ch in
result := !result ^ bs
done;
!result ^ suffix
end
else
s
with
_ ->
let encoding_to_pretty (ty,va,size,key,width) =
LBLOCK [
STR "(";
STR ty;
STR ",";
va#toPretty;
STR ",";
size#toPretty;
STR ",";
key#toPretty;
STR ",";
INT width] in
begin
pr_debug [
STR "Error in decode_string with ";
encoding_to_pretty enc;
NL;
STR " and string length ";
INT (String.length encstring);
STR " and base address ";
va#toPretty;
NL;
STR " and offset ";
INT offset;
NL];
raise (BCH_failure (STR "Error in decoding"))
end
let decode_string (str:string) (va:doubleword_int)
(encodings:
(string * doubleword_int * doubleword_int * doubleword_int * int) list) =
List.fold_left (fun s e -> decode_string_aux s va e) str encodings
let read_hex_stream_file (filename:string) =
let ch = open_in filename in
let outch = IO.output_string () in
let _ = try
while true do
let line = input_line ch in
for i = 0 to 39 do
let s = "0x" ^ (String.sub line (i*2) 2) in
try
let b = int_of_string s in IO.write_byte outch b
with
| Failure _ ->
begin
pr_debug [ STR "Failure in reading stream file: "; STR s; NL];
raise (Failure "read_stream:int_of_string")
end
done
done
with _ -> () in
IO.close_out outch
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/dd2c3b9f84b4b5f3c88898505ee912e1e461e809/CodeHawk/CHB/bchlib/bCHByteUtilities.ml | ocaml | chutil
bchlib | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2019 Kestrel Technology LLC
Copyright ( c ) 2020 ( c ) 2021 - 2023 Aarno Labs LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Binary Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2019 Kestrel Technology LLC
Copyright (c) 2020 Henny Sipma
Copyright (c) 2021-2023 Aarno Labs LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHNumerical
open CHPretty
open CHXmlDocument
open BCHBasicTypes
open BCHDoubleword
open BCHLibTypes
module TR = CHTraceResult
module DoublewordCollections = CHCollections.Make
(struct
type t = doubleword_int
let compare dw1 dw2 = dw1#compare dw2
let toPretty dw = STR dw#to_fixed_length_hex_string
end)
let is_printable c = (c >= 32 && c < 127)
let is_printable_char c = is_printable (Char.code c)
let byte_to_string (b:int):string =
let l = b mod 16 in
let h = b lsr 4 in
Printf.sprintf "%x%x" h l
let get_aligned_address (a:doubleword_int) =
let n16 = mkNumerical 16 in
let n = a#to_numerical in
if (n#modulo n16)#equal numerical_zero then
a
else
let n = ((n#div n16)#mult n16)#add n16 in
TR.tget_ok (numerical_to_doubleword n)
let get_1kaligned_address (a:doubleword_int) =
let n1024 = mkNumerical 1024 in
let n = a#to_numerical in
if (n#modulo n1024)#equal numerical_zero then
a
else
let n = ((n#div n1024)#mult n1024)#add n1024 in
TR.tget_ok (numerical_to_doubleword n)
return the byte_string to a string containing 16 bytes in hexadecimal form
per line
per line *)
let rawdata_to_string ?(markwritten:(doubleword_int list option)) (byte_string:string)
(start_address:doubleword_int):string =
let ch = IO.input_string byte_string in
let current_address = ref start_address in
let len = String.length byte_string in
let lines = len / 16 in
let remaining_bytes = len mod 16 in
let s = ref "" in
let a = Array.make 16 0 in
let pr_marked:(doubleword_int -> string) =
match markwritten with
Some l ->
let mwset = new DoublewordCollections.set_t in
let _ = mwset#addList l in
fun address ->
let ws = ref "" in
let aa = ref address in
begin
for i = 1 to 4 do
(if mwset#has !aa then ws := !ws ^ "W " else ws := !ws ^ ". " );
aa := (!aa)#add_int 4
done;
!ws
end
| _ -> fun _ -> "" in
let bytes_to_line_output () =
begin
s := !s ^ !current_address#to_fixed_length_hex_string ;
s := !s ^ " " ;
for i=0 to 3 do
for j=0 to 3 do
s := !s ^ (byte_to_string a.(4*i+j));
done;
s := !s ^ " " ;
done;
s := !s ^ " " ^ (pr_marked !current_address) ^ " " ;
s := !s ^ " " ;
for i=0 to 15 do
if is_printable a.(i) then
s := !s ^ (Printf.sprintf "%c" (Char.chr a.(i)))
else
s := !s ^ "."
done;
s := !s ^ (Printf.sprintf "\n")
end in
begin
for i=0 to lines-1 do
begin
for j=0 to 15 do a.(j) <- IO.read_byte ch done;
bytes_to_line_output ();
current_address := !current_address#add_int 16
end
done;
(if remaining_bytes > 0 then
let k = ref 0 in
let m = ref 0 in
for j=0 to remaining_bytes-1 do a.(j) <- IO.read_byte ch done;
begin
s := !s ^ !current_address#to_fixed_length_hex_string ;
s := !s ^ " " ;
while (4*(!k) + (!m)) < remaining_bytes do
while (4*(!k) + (!m)) < remaining_bytes && !m < 4 do
s := !s ^ (byte_to_string a.(4*(!k)+(!m)));
m := !m + 1 ;
done ;
k := !k + 1;
m := 0 ;
s := !s ^ " " ;
done ;
for i=0 to (16 - remaining_bytes) do s := !s ^ " " done ;
for i=0 to remaining_bytes-1 do
if is_printable a.(i) then
s := !s ^ (Printf.sprintf "%c" (Char.chr a.(i)))
else
s := !s ^ "."
done
end) ;
!s
end
let write_xml_raw_data_block
(node:xml_element_int)
(byte_string:string)
(start_address:doubleword_int) =
let append = node#appendChildren in
let ch = IO.input_string byte_string in
let currentAddr = ref start_address in
let len = String.length byte_string in
let lines = len / 16 in
let remainingBytes = len mod 16 in
let nodes = ref [] in
let a = Array.make 16 0 in
let get_char i =
if is_printable a.(i) then
Printf.sprintf "%c" (Char.chr a.(i))
else
"." in
let write_xml_line lNode =
let s = ref "" in
let p = ref "" in
begin
(for i = 0 to 3 do
for j=0 to 3 do
s := !s ^ (byte_to_string a.(4*i+j))
done; s := !s ^ " "
done) ;
(for i = 0 to 15 do p := !p ^ (get_char i) done);
lNode#setAttribute "bytes" !s ;
lNode#setAttribute "print" !p ;
end in
let write_xml_partial_line lNode =
let s = ref "" in
let p = ref "" in
begin
(for i = 0 to remainingBytes-1 do
s := !s ^ (byte_to_string a.(i))
done) ;
(for i = 0 to remainingBytes-1 do
p := !p ^ (get_char i)
done) ;
lNode#setAttribute "bytes" !s ;
lNode#setAttribute "print" !p ;
lNode#setIntAttribute "length" remainingBytes
end in
begin
for i=0 to lines-1 do
let lNode = xmlElement "aline" in
begin
lNode#setAttribute "va" !currentAddr#to_hex_string ;
for j=0 to 15 do a.(j) <- IO.read_byte ch done;
write_xml_line lNode ;
nodes := lNode :: !nodes ;
currentAddr := !currentAddr#add_int 16
end
done;
(if remainingBytes > 0 then
let lNode = xmlElement "aline" in
begin
lNode#setAttribute "va" !currentAddr#to_hex_string ;
for j=0 to remainingBytes-1 do a.(j) <- IO.read_byte ch done;
write_xml_partial_line lNode ;
nodes := lNode :: !nodes
end) ;
append (List.rev !nodes)
end
let write_xml_raw_data
(node:xml_element_int)
(byte_string:string)
(start_address:doubleword_int) =
let blocksize = 160000 in
let rec strdivide str len result =
let strlen = String.length str in
if strlen <= len then
List.rev (str::result)
else
let strpre = String.sub str 0 len in
let strsuf = String.sub str len (strlen - len) in
strdivide strsuf len (strpre :: result) in
let strblocks = strdivide byte_string blocksize [] in
begin
node#appendChildren (List.mapi (fun i bstr ->
let bNode = xmlElement "ablock" in
let addr = start_address#add_int (i * blocksize) in
begin
write_xml_raw_data_block bNode bstr addr ;
bNode#setIntAttribute "block" i ;
bNode
end) strblocks) ;
node#setIntAttribute "blocks" (List.length strblocks)
end
let read_xml_raw_data_block (node:xml_element_int) =
let bNodes = node#getTaggedChildren "aline" in
let ch = IO.output_string () in
begin
List.iter (fun n ->
let has = n#hasNamedAttribute in
let geti = n#getIntAttribute in
let byteString = n#getAttribute "bytes" in
if has "length" then
for i = 0 to (geti "length") - 1 do
let s = "0x" ^ (String.sub byteString (i*2) 2) in
try
let b = int_of_string s in IO.write_byte ch b
with Failure _ ->
begin
pr_debug [ STR "Failure (length): " ; STR s ; NL ] ;
raise (Failure "int-of-string")
end
done
else
for i = 0 to 3 do
for j = 0 to 3 do
let s = "0x" ^ (String.sub byteString ((i*9) + (j*2)) 2) in
try
let b = int_of_string s in IO.write_byte ch b
with Failure _ ->
begin
pr_debug [ STR "Failure: " ; STR s ; NL ] ;
raise (Failure "int-of-string")
end
done
done) bNodes ;
IO.close_out ch
end
let write_doubleword_to_bytestring (dw:doubleword_int) =
let ch = IO.output_string () in
let hexstring = dw#to_fixed_length_hex_string in
begin
for i = 0 to 3 do
let s = "0x" ^ (String.sub hexstring ((3-i)*2) 2) in
try
let b = int_of_string s in IO.write_byte ch b
with
|Failure m ->
begin
pr_debug [
STR "Failure in bCHByteUtilities:write_doubleword_to_bytestring ";
STR s;
STR ": ";
STR m;
NL];
raise
(Failure
"bCHByteUtilities:write_doubleword_to_byte_string:int-of-string")
end
done ;
IO.close_out ch
end
let write_hex_bytes_to_bytestring (s: string) =
let ch = IO.output_string () in
let _ =
if ((String.length s) mod 2) = 1 then
raise
(Failure "bCHByteUtilities:write_hex_bytes_to_bytestring:odd") in
let len = (String.length s) / 2 in
begin
for i = 0 to (len - 1) do
let bx = "0x" ^ (String.sub s (i * 2) 2) in
try
let b = int_of_string bx in IO.write_byte ch b
with
| Failure m ->
raise
(Failure
"bCHByteUtilities:write_hex_bytes_to_bytestring:int_of_string")
done;
IO.close_out ch
end
let read_xml_raw_data (node:xml_element_int) =
String.concat
"" (List.map read_xml_raw_data_block (node#getTaggedChildren "ablock"))
let byte_string_to_spaced_string (byte_string:string):string =
let ch = IO.input_string byte_string in
let s = ref "" in
let len = String.length byte_string in
begin
for i = 0 to len-1 do s := !s ^ (byte_to_string (IO.read_byte ch)) ^ " " done;
!s
end
let byte_string_to_printed_string (byte_string:string):string =
let ch = IO.input_string byte_string in
let s = ref "" in
let len = String.length byte_string in
begin
for i = 0 to len-1 do s := !s ^ (byte_to_string (IO.read_byte ch)) done;
!s
end
converts a little - endian hex string for a doubleword extracted by pattern
matching to the corresponding doubleword string
matching to the corresponding doubleword string *)
let littleendian_hexstring_todwstring (s:string) =
let b = Bytes.of_string s in
let bnew = Bytes.copy b in
let cp pn po = Bytes.set bnew pn (Bytes.get b po) in
begin
cp 0 6;
cp 1 7;
cp 2 4;
cp 3 5;
cp 4 2;
cp 5 3;
cp 6 0;
cp 7 1;
"0x" ^ Bytes.to_string bnew
end
let littleendian_hexstring_towstring (s:string) =
let b = Bytes.of_string s in
let bnew = Bytes.copy b in
let cp pn po = Bytes.set bnew pn (Bytes.get b po) in
begin
cp 0 2;
cp 1 3;
cp 2 0;
cp 3 1;
"0x" ^ Bytes.to_string bnew
end
let decode_string_aux (s:string) (va:doubleword_int)
(enc:(string * doubleword_int * doubleword_int * doubleword_int * int)) =
let (_, start, size, key, width) = enc in
let offset =
fail_tfold
(trerror_record (STR "decode_string_aux"))
(fun i -> i + width)
(start#subtract_to_int va) in
let prefix = String.sub s 0 offset in
let encstring = String.sub s offset size#to_int in
let suffix =
String.sub
s (offset + size#to_int) ((String.length s) - (offset + size#to_int)) in
try
let ch = IO.input_string encstring in
if width = 4 then
let read_doubleword ch =
let l = IO.read_ui16 ch in
let h = IO.read_ui16 ch in
TR.tget_ok (make_doubleword l h) in
let result = ref prefix in
begin
for i = 0 to ((size#to_int / 4) - 2) do
let dw = read_doubleword ch in
let decoded = dw#xor key in
result := !result ^ (write_doubleword_to_bytestring decoded);
done;
!result ^ suffix
end
else if width = 1 then
let result = ref prefix in
begin
for i = 0 to size#to_int - 1 do
let b = IO.read_byte ch in
let decoded = b lxor key#to_int in
let ch = IO.output_string () in
let _ = IO.write_byte ch decoded in
let bs = IO.close_out ch in
result := !result ^ bs
done;
!result ^ suffix
end
else
s
with
_ ->
let encoding_to_pretty (ty,va,size,key,width) =
LBLOCK [
STR "(";
STR ty;
STR ",";
va#toPretty;
STR ",";
size#toPretty;
STR ",";
key#toPretty;
STR ",";
INT width] in
begin
pr_debug [
STR "Error in decode_string with ";
encoding_to_pretty enc;
NL;
STR " and string length ";
INT (String.length encstring);
STR " and base address ";
va#toPretty;
NL;
STR " and offset ";
INT offset;
NL];
raise (BCH_failure (STR "Error in decoding"))
end
let decode_string (str:string) (va:doubleword_int)
(encodings:
(string * doubleword_int * doubleword_int * doubleword_int * int) list) =
List.fold_left (fun s e -> decode_string_aux s va e) str encodings
let read_hex_stream_file (filename:string) =
let ch = open_in filename in
let outch = IO.output_string () in
let _ = try
while true do
let line = input_line ch in
for i = 0 to 39 do
let s = "0x" ^ (String.sub line (i*2) 2) in
try
let b = int_of_string s in IO.write_byte outch b
with
| Failure _ ->
begin
pr_debug [ STR "Failure in reading stream file: "; STR s; NL];
raise (Failure "read_stream:int_of_string")
end
done
done
with _ -> () in
IO.close_out outch
|
ceaebc05451fb5522ad59aac15834a6f4bcba7b098cc3db20133b2f628e9b63f | na4zagin3/satyrographos | command_lint__missing_file.ml | open Core
open Satyrographos_testlib
let satysfi_package_opam =
"satysfi-package.opam", TestLib.opam_file_for_test
~name:"satysfi-package"
~version:"0.1"
()
let satysfi_package_doc_opam =
"satysfi-package-doc.opam", TestLib.opam_file_for_test
~name:"satysfi-package-doc"
~version:"0.1"
~depends:{|
"satysfi" {>= "0.0.5" & < "0.0.6"}
"satyrographos" {>= "0.0.2.6" & < "0.0.3"}
"satysfi-package" {= "0.1"}
|}
()
let satyristes =
"Satyristes", sprintf
{|(version "0.0.2")
(library
(name "package")
(version "0.1")
(sources ((package "test.satyh" "test.satyh")))
(opam "satysfi-package.opam")
(dependencies ()))
|}
let opam_libs = Satyrographos.Library.[
{empty with
name = Some "package";
files = LibraryFiles.of_alist_exn [
"packages/package/test.satyh", `Content ""
]
};
]
let files =
[ satysfi_package_opam;
satysfi_package_doc_opam;
satyristes;
]
let () =
TestCommand.test_lint_command ~opam_libs files
| null | https://raw.githubusercontent.com/na4zagin3/satyrographos/9dbccf05138510c977a67c859bbbb48755470c7f/test/testcases/command_lint__missing_file.ml | ocaml | open Core
open Satyrographos_testlib
let satysfi_package_opam =
"satysfi-package.opam", TestLib.opam_file_for_test
~name:"satysfi-package"
~version:"0.1"
()
let satysfi_package_doc_opam =
"satysfi-package-doc.opam", TestLib.opam_file_for_test
~name:"satysfi-package-doc"
~version:"0.1"
~depends:{|
"satysfi" {>= "0.0.5" & < "0.0.6"}
"satyrographos" {>= "0.0.2.6" & < "0.0.3"}
"satysfi-package" {= "0.1"}
|}
()
let satyristes =
"Satyristes", sprintf
{|(version "0.0.2")
(library
(name "package")
(version "0.1")
(sources ((package "test.satyh" "test.satyh")))
(opam "satysfi-package.opam")
(dependencies ()))
|}
let opam_libs = Satyrographos.Library.[
{empty with
name = Some "package";
files = LibraryFiles.of_alist_exn [
"packages/package/test.satyh", `Content ""
]
};
]
let files =
[ satysfi_package_opam;
satysfi_package_doc_opam;
satyristes;
]
let () =
TestCommand.test_lint_command ~opam_libs files
| |
56b25a3cee337a9a6a0da1663ac21c65a735c30e29bd70b25323bdae71fcc214 | 2600hz/kazoo | kazoo_media_maintenance.erl | %%%-----------------------------------------------------------------------------
( C ) 2010 - 2020 , 2600Hz
%%% @doc
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
%%%
%%% @end
%%%-----------------------------------------------------------------------------
-module(kazoo_media_maintenance).
-export([remove_empty_media_docs/0
,remove_empty_media_docs/1
,migrate/0, migrate_prompts/0
,import_prompts/1, import_prompts/2
,import_prompt/1, import_prompt/2
,set_account_language/2
,refresh/0
,register_views/0
,fix_media_names/0
]).
-include("kazoo_media.hrl").
-spec migrate() -> 'no_return'.
migrate() ->
io:format("migrating relevant settings from system_config/callflow to system_config/~s~n", [?CONFIG_CAT]),
maybe_migrate_system_config(<<"callflow">>),
io:format("migrating relevant settings from system_config/media_mgr to system_config/~s~n", [?CONFIG_CAT]),
maybe_migrate_system_config(<<"media_mgr">>, 'true'),
'no_return'.
-spec migrate_prompts() -> 'no_return'.
migrate_prompts() ->
io:format("Now updating existing system_media docs to be internationalizable!~n", []),
'no_return'.
-spec set_account_language(kz_term:ne_binary(), kz_term:ne_binary()) -> 'ok'.
set_account_language(Account, Language) ->
AccountId = kzs_util:format_account_id(Account),
OldLang = kz_media_util:prompt_language(AccountId),
try kapps_account_config:set(AccountId
,?CONFIG_CAT
,?PROMPT_LANGUAGE_KEY
,kz_term:to_lower_binary(Language)
)
of
_Config ->
io:format("successfully updated account ~s's language from '~s' to '~s'~n"
,[AccountId, OldLang, Language]
)
catch
_E:_R -> 'ok'
end.
-spec import_prompts(file:filename_all()) -> 'ok'.
import_prompts(DirPath) ->
import_prompts(DirPath, kz_media_util:default_prompt_language()).
-spec import_prompts(file:filename_all(), kz_term:text()) -> 'ok'.
import_prompts(DirPath, Lang) ->
case filelib:is_dir(DirPath) of
'false' ->
io:format("not a directory, or is inaccessible: '~s'\n", [DirPath]);
'true' ->
kz_datamgr:db_create(?KZ_MEDIA_DB),
MediaPath = filename:join([DirPath, "*.{wav,mp3}"]),
case filelib:wildcard(kz_term:to_list(MediaPath)) of
[] -> io:format("failed to find media files in '~s'~n", [DirPath]);
Files -> import_files(DirPath, Lang, Files)
end
end.
-spec import_files(file:filename_all(), kz_term:ne_binary(), [file:filename_all()]) -> 'ok'.
import_files(Path, Lang, Files) ->
io:format("importing prompts from '~s' with language '~s'~n", [Path, Lang]),
case import_prompts_from_files(Files, Lang) of
[] -> io:format("importing went successfully~n");
Errors ->
io:format("errors encountered during import:~n"),
_ = [io:format(" '~s': ~p~n", [F, Err]) || {F, Err} <- Errors],
'ok'
end.
-spec import_prompts_from_files([file:filename_all()], kz_term:ne_binary()) ->
[{file:filename_all(), {'error', _}}].
import_prompts_from_files(Files, Lang) ->
[{File, Err}
|| File <- Files,
(Err = (catch import_prompt(File, Lang))) =/= 'ok'
].
-spec import_prompt(file:filename_all()) -> 'ok' | {'error', any()}.
import_prompt(Path) ->
import_prompt(Path, kz_media_util:default_prompt_language()).
-spec import_prompt(file:filename_all(), kz_term:text()) -> 'ok' | {'error', any()}.
import_prompt(Path, Lang) ->
kz_datamgr:db_create(?KZ_MEDIA_DB),
timer:sleep(250),
case file:read_file(Path) of
{'ok', Contents} ->
io:format("importing prompt '~s' with language '~s'~n", [Path, Lang]),
import_prompt(Path, Lang, Contents);
{'error', E}=Error ->
io:format("failed to open path '~s' for importing: ~p~n", [Path, E]),
Error
end.
-spec import_prompt(file:filename_all(), kz_term:text(), kz_term:ne_binary()) -> 'ok' | {'error', any()}.
import_prompt(Path0, Lang0, Contents) ->
Lang = kz_term:to_binary(Lang0),
Path = kz_term:to_binary(Path0),
ContentLength = byte_size(Contents),
MetaJObj = media_meta_doc(Path, Lang, ContentLength),
case kz_datamgr:ensure_saved(?KZ_MEDIA_DB, MetaJObj) of
{'ok', MetaJObj1} ->
io:format(" saved metadata about '~s'~n", [Path]),
AttachmentName = iolist_to_binary([kzd_media:prompt_id(MetaJObj1)
,kz_term:to_binary(filename:extension(Path))
]),
upload_prompt(kz_doc:id(MetaJObj1)
,AttachmentName
,Contents
,[{'content_type', kz_json:get_string_value(<<"content_type">>, MetaJObj1)}
,{'content_length', ContentLength}
,{'rev', kz_doc:revision(MetaJObj1)}
]
);
{'error', E}=Error ->
io:format(" error saving metadata: ~p~n", [E]),
Error
end.
-spec media_meta_doc(file:filename_all(), kz_term:ne_binary(), pos_integer()) ->
kz_json:object().
media_meta_doc(Path, Lang, ContentLength) ->
MediaDoc = base_media_doc(Path, Lang, ContentLength),
kz_doc:update_pvt_parameters(MediaDoc
,?KZ_MEDIA_DB
,[{'type', kzd_media:type()}
,{'now', kz_time:now_s()}
]
).
-spec base_media_doc(file:filename_all(), kz_term:ne_binary(), pos_integer()) ->
kz_json:object().
base_media_doc(Path, Lang, ContentLength) ->
PromptName = prompt_name_from_path(Path),
ContentType = content_type_from_path(Path),
ID = kz_media_util:prompt_id(PromptName, Lang),
io:format(" importing as '~s'~n", [ID]),
kz_json:from_list(
[{<<"_id">>, ID}
,{<<"name">>, ID}
,{<<"prompt_id">>, PromptName}
,{<<"description">>, media_description(PromptName, Lang)}
,{<<"content_length">>, ContentLength}
,{<<"language">>, kz_term:to_lower_binary(Lang)}
,{<<"content_type">>, ContentType}
,{<<"source_type">>, kz_term:to_binary(?MODULE)}
,{<<"streamable">>, 'true'}
]
).
-spec prompt_name_from_path(file:filename_all()) -> kz_term:ne_binary().
prompt_name_from_path(Path) ->
Extension = filename:extension(Path),
kz_term:to_binary(filename:basename(Path, Extension)).
-spec content_type_from_path(file:filename_all()) -> kz_term:ne_binary().
content_type_from_path(Path) ->
{Category, Type, _} = cow_mimetypes:all(Path),
filename:join([Category, Type]).
-spec media_description(kz_term:ne_binary(), kz_term:ne_binary()) -> kz_term:ne_binary().
media_description(PromptName, Lang) ->
<<"System prompt in ", Lang/binary, " for ", PromptName/binary>>.
-spec upload_prompt(kz_term:ne_binary(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:proplist()) ->
'ok' |
{'error', any()}.
upload_prompt(ID, AttachmentName, Contents, Options) ->
upload_prompt(ID, AttachmentName, Contents, Options, 3).
-spec upload_prompt(kz_term:ne_binary(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:proplist(), non_neg_integer()) ->
'ok' |
{'error', any()}.
upload_prompt(_ID, _AttachmentName, _Contents, _Options, 0) ->
io:format(" retries exceeded for uploading ~s to ~s~n", [_AttachmentName, _ID]),
{'error', 'retries_exceeded'};
upload_prompt(ID, AttachmentName, Contents, Options, Retries) ->
case kz_datamgr:put_attachment(?KZ_MEDIA_DB, ID, AttachmentName, Contents, Options) of
{'ok', _MetaJObj} ->
io:format(" uploaded prompt binary to ~s as ~s~n", [ID, AttachmentName]);
{'error', 'conflict'} ->
io:format(" conflict when uploading media binary; checking doc to see if it was actually successful~n"),
maybe_retry_upload(ID, AttachmentName, Contents, Options, Retries);
{'error', E} ->
io:format(" error uploading prompt binary: ~p~n", [E]),
maybe_cleanup_metadoc(ID, E)
end.
-spec maybe_cleanup_metadoc(kz_term:ne_binary(), any()) -> {'error', any()}.
maybe_cleanup_metadoc(ID, E) ->
io:format(" deleting metadata from ~s~n", [?KZ_MEDIA_DB]),
case kz_datamgr:del_doc(?KZ_MEDIA_DB, ID) of
{'ok', _} ->
io:format(" removed metadata for ~s~n", [ID]),
{'error', E};
{'error', E1}=Error ->
io:format(" failed to remove metadata for ~s: ~p~n", [ID, E1]),
Error
end.
-spec maybe_retry_upload(kz_term:ne_binary(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:proplist(), non_neg_integer()) ->
'ok' |
{'error', any()}.
maybe_retry_upload(ID, AttachmentName, Contents, Options, Retries) ->
case kz_datamgr:open_doc(?KZ_MEDIA_DB, ID) of
{'ok', JObj} ->
case kz_doc:attachment(JObj, AttachmentName) of
'undefined' ->
io:format(" attachment does not appear on the document, retrying after a pause~n"),
timer:sleep(?MILLISECONDS_IN_SECOND),
upload_prompt(ID, AttachmentName, Contents, Options, Retries-1);
_Attachment ->
io:format(" attachment appears to have uploaded successfully!~n")
end;
{'error', E}=Error ->
io:format(" failed to open the media doc again: ~p~n", [E]),
Error
end.
-spec refresh() -> 'ok'.
refresh() ->
case kz_datamgr:db_exists(?KZ_MEDIA_DB) of
'false' ->
Result = kz_datamgr:db_create(?KZ_MEDIA_DB),
lager:debug("~s database is created: ~p", [?KZ_MEDIA_DB, Result]);
'true' -> 'ok'
end,
_ = kapps_maintenance:refresh(?KZ_MEDIA_DB),
'ok'.
-spec register_views() -> 'ok'.
register_views() ->
kz_datamgr:register_views_from_folder('kazoo_media').
-spec maybe_migrate_system_config(kz_term:ne_binary()) -> 'ok'.
maybe_migrate_system_config(ConfigId) ->
maybe_migrate_system_config(ConfigId, 'false').
-spec maybe_migrate_system_config(kz_term:ne_binary(), boolean()) -> 'ok'.
maybe_migrate_system_config(ConfigId, DeleteAfter) ->
case kz_datamgr:open_doc(?KZ_CONFIG_DB, ConfigId) of
{'error', 'not_found'} ->
io:format(" failed to find ~s, not migrating~n", [ConfigId]);
{'ok', JObj} ->
migrate_system_config(kz_doc:public_fields(JObj), DeleteAfter),
maybe_delete_system_config(ConfigId, DeleteAfter)
end.
-spec maybe_delete_system_config(kz_term:ne_binary(), boolean()) -> 'ok'.
maybe_delete_system_config(ConfigId, 'true') ->
{'ok', _} = kz_datamgr:del_doc(?KZ_CONFIG_DB, ConfigId),
io:format("deleted ~s from ~s", [ConfigId, ?KZ_CONFIG_DB]);
maybe_delete_system_config(_ConfigId, 'false') -> 'ok'.
-spec migrate_system_config(kz_json:object(), boolean()) -> 'ok'.
migrate_system_config(ConfigJObj, DeleteAfter) ->
{'ok', MediaJObj} = get_media_config_doc(),
{Updates, _} = kz_json:foldl(fun migrate_system_config_fold/3
,{[], MediaJObj}
,ConfigJObj
),
maybe_update_media_config(Updates, MediaJObj),
delete_original_config(ConfigJObj, Updates, DeleteAfter).
maybe_update_media_config([], _) ->
io:format("no changes for that need saving~n");
maybe_update_media_config(Updates, MediaJObj) ->
io:format("saving updated media config with:~n~p~n", [Updates]),
ensure_save_config_db(Updates, MediaJObj).
%%------------------------------------------------------------------------------
%% @doc Only delete original attributes when the original doc is NOT deleted.
%% @end
%%------------------------------------------------------------------------------
-spec delete_original_config(kz_json:object(), list(), boolean()) -> 'ok'.
delete_original_config(_OriginalJObj, _Updates, 'true') -> 'ok';
delete_original_config(OrigJObj, Updates, 'false') ->
Removed = lists:foldl(fun({X, _V}, L) -> [{X, 'null'} | L] end, [], Updates),
ensure_save_config_db(Removed, OrigJObj).
-spec ensure_save_config_db(kz_json:flat_proplist(), kz_json:object()) -> 'ok'.
ensure_save_config_db(Updates, JObj) ->
Id = kz_doc:id(JObj),
PvtUpdates = [{<<"pvt_modified">>, kz_time:now_s()}],
Update = [{'update', Updates}
,{'extra_update', PvtUpdates}
,{'ensure_saved', 'true'}
],
{'ok', _} = kz_datamgr:update_doc(?KZ_CONFIG_DB, Id, Update),
'ok'.
-spec get_media_config_doc() -> {'ok', kz_json:object()}.
get_media_config_doc() ->
case kz_datamgr:open_doc(?KZ_CONFIG_DB, ?CONFIG_CAT) of
{'ok', _MediaJObj}=OK -> OK;
{'error', 'not_found'} ->
{'ok', kz_json:from_list([{<<"_id">>, ?CONFIG_CAT}])}
end.
-type migrate_fold_acc() :: {kz_term:proplist(), kzd_system_configs:doc()}.
-spec migrate_system_config_fold(kz_term:ne_binary(), kz_json:json_term(), migrate_fold_acc()) ->
migrate_fold_acc().
migrate_system_config_fold(<<"default">> = Node, Settings, Updates) ->
io:format("migrating node '~s' settings~n", [Node]),
migrate_node_config(Node, Settings, Updates, ?CONFIG_KVS);
migrate_system_config_fold(Node, Settings, Updates) ->
case binary:split(Node, <<"@">>) of
[_User, _Domain] ->
io:format("migrating node '~s' settings~n", [Node]),
migrate_node_config(Node, Settings, Updates, ?CONFIG_KVS);
_Split ->
io:format("skipping non-node '~s'~n", [Node]),
Updates
end.
-spec migrate_node_config(kz_term:ne_binary(), kz_json:object(), migrate_fold_acc(), kz_term:proplist()) ->
migrate_fold_acc().
migrate_node_config(_Node, _Settings, Updates, []) -> Updates;
migrate_node_config(Node, Settings, Updates, [{K, V} | KVs]) ->
case kz_json:get_value(K, Settings) of
'undefined' ->
io:format(" maybe setting ~p for node ~p to default '~p'~n", [K, Node, V]),
migrate_node_config(Node, Settings, maybe_update_media_config(Node, K, V, Updates), KVs);
NodeV ->
io:format(" maybe setting ~p for node ~p to '~p'~n", [K, Node, NodeV]),
migrate_node_config(Node, Settings, set_node_value(Node, K, NodeV, Updates), KVs)
end.
-spec set_node_value(kz_term:ne_binary(), kz_json:path(), kz_term:ne_binary(), migrate_fold_acc()) ->
migrate_fold_acc().
set_node_value(Node, <<_/binary>> = K, V, Updates) ->
set_node_value(Node, [K], V, Updates);
set_node_value(Node, K, V, {Updates, MediaJObj}) ->
{[{[Node | K], V} | Updates], MediaJObj}.
-spec maybe_update_media_config(kz_term:ne_binary(), kz_json:path(), kz_term:api_binary(), migrate_fold_acc()) ->
migrate_fold_acc().
maybe_update_media_config(_Node, _K, 'undefined', Updates) ->
io:format(" no value to set for ~p~n", [_K]),
Updates;
maybe_update_media_config(Node, <<_/binary>> = K, V, Updates) ->
maybe_update_media_config(Node, [K], V, Updates);
maybe_update_media_config(Node, K, V, {Updates, MediaJObj}=Acc) ->
Key = [Node | K],
case kz_json:get_value(Key, MediaJObj) of
'undefined' ->
{[{Key, V} | Updates], MediaJObj};
V ->
io:format(" media config has matching value for ~p~n", [Key]),
Acc;
_V ->
io:format(" media config has existing value '~p' for ~p~n", [_V, Key]),
Acc
end.
-spec remove_empty_media_docs() -> 'no_return'.
remove_empty_media_docs() ->
{'ok', JObjs} = kz_datamgr:all_docs(?KZ_MEDIA_DB, ['include_docs']),
remove_empty_system_media(JObjs).
-spec remove_empty_system_media(kz_json:objects()) -> 'no_return'.
remove_empty_system_media([]) -> 'no_return';
remove_empty_system_media([JObj|JObjs]) ->
Doc = kz_json:get_value(<<"doc">>, JObj),
Id = kz_json:get_value(<<"id">>, JObj),
case kz_json:get_ne_value(<<"_attachments">>, Doc) =:= 'undefined'
andalso binary:match(Id, <<"_design">>) =:= 'nomatch'
of
'true' ->
_ = io:format("media document ~s has no attachments, removing~n", [Id]),
_ = kz_datamgr:del_doc(?KZ_MEDIA_DB, Doc),
remove_empty_system_media(JObjs);
'false' -> remove_empty_system_media(JObjs)
end.
-spec remove_empty_media_docs(kz_term:ne_binary()) -> 'ok'.
remove_empty_media_docs(AccountId) ->
AccountDb = kzs_util:format_account_db(AccountId),
remove_empty_media_docs(AccountId, AccountDb).
-spec remove_empty_media_docs(kz_term:ne_binary(), kz_term:ne_binary()) -> 'ok'.
remove_empty_media_docs(AccountId, AccountDb) ->
case kz_datamgr:get_results(AccountDb, <<"media/crossbar_listing">>, ['include_docs']) of
{'ok', []} ->
io:format("no media docs in account ~s~n", [AccountId]);
{'ok', MediaDocs} ->
io:format("found ~b media docs in account ~s~n", [length(MediaDocs), AccountId]),
Filename = media_doc_filename(AccountId, kz_time:now_s()),
io:format("archiving removed media docs to ~s~n", [Filename]),
{'ok', File} = file:open(Filename, ['write', 'binary', 'append']),
catch remove_empty_media_docs(AccountId, AccountDb, File, MediaDocs),
'ok' = file:close(File);
{'error', _E} ->
io:format("error looking up media docs in account ~s: ~p~n", [AccountId, _E])
end.
-spec media_doc_filename(kz_term:ne_binary(), non_neg_integer()) -> file:name().
media_doc_filename(AccountId, Timestamp) ->
Path = ["/tmp/empty_media_", AccountId, "_", kz_term:to_binary(Timestamp), ".json"],
binary_to_list(list_to_binary(Path)).
-spec remove_empty_media_docs(kz_term:ne_binary(), kz_term:ne_binary(), file:io_device(), kz_json:objects()) -> 'ok'.
remove_empty_media_docs(AccountId, _AccountDb, _Filename, []) ->
io:format("finished cleaning up empty media docs for account ~s~n", [AccountId]);
remove_empty_media_docs(AccountId, AccountDb, File, [Media|MediaDocs]) ->
maybe_remove_media_doc(AccountDb, File, kz_json:get_value(<<"doc">>, Media)),
remove_empty_media_docs(AccountId, AccountDb, File, MediaDocs).
-spec maybe_remove_media_doc(kz_term:ne_binary(), file:io_device(), kz_json:object()) -> 'ok'.
maybe_remove_media_doc(AccountDb, File, MediaJObj) ->
DocId = kz_doc:id(MediaJObj),
case kz_doc:attachments(MediaJObj) of
'undefined' ->
io:format("media doc ~s has no attachments, archiving and removing~n", [DocId]),
_R = file:write(File, [kz_json:encode(MediaJObj), $\n]),
io:format("dumping media doc ~s to file : ~p\n", [DocId, _R]),
remove_media_doc(AccountDb, MediaJObj);
_Attachments ->
io:format("media doc ~s has attachments, leaving alone~n", [kz_doc:id(MediaJObj)])
end.
-spec remove_media_doc(kz_term:ne_binary(), kz_json:object()) -> 'ok'.
remove_media_doc(AccountDb, MediaJObj) ->
{'ok', _Doc} = kz_datamgr:del_doc(AccountDb, MediaJObj),
io:format("removed media doc ~s~n", [kz_doc:id(MediaJObj)]).
filter_media_names(JObj) ->
kz_doc:id(JObj) =/= kz_http_util:urldecode(kz_doc:id(JObj)).
-spec fix_media_name(kz_json:object()) -> 'ok'.
fix_media_name(JObj) ->
FromId = kz_doc:id(JObj),
ToId = kz_http_util:urldecode(kz_doc:id(JObj)),
Options = [{'transform', fun(_, B) -> kz_json:set_value(<<"name">>, ToId, B) end}],
case kz_datamgr:move_doc(?KZ_MEDIA_DB, FromId, ?KZ_MEDIA_DB, ToId, Options) of
{'ok', _} -> lager:info("renamed media doc from ~s to ~s", [FromId, ToId]);
{'error', Error} -> lager:info("error renaming media doc from ~s to ~s : ~p", [FromId, ToId, Error])
end.
-spec fix_media_names() -> any().
fix_media_names() ->
case kz_datamgr:all_docs(?KZ_MEDIA_DB) of
{'ok', JObjs} ->
case [ JObj || JObj <- JObjs, filter_media_names(JObj)] of
[] -> kapps_config:set(?CONFIG_CAT, <<"fix_media_names">>, 'false');
List -> lists:foreach(fun fix_media_name/1, List)
end;
{'error', Error} ->
lager:debug("error '~p' getting media names", [Error])
end.
| null | https://raw.githubusercontent.com/2600hz/kazoo/24519b9af9792caa67f7c09bbb9d27e2418f7ad6/core/kazoo_media/src/kazoo_media_maintenance.erl | erlang | -----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
------------------------------------------------------------------------------
@doc Only delete original attributes when the original doc is NOT deleted.
@end
------------------------------------------------------------------------------ | ( C ) 2010 - 2020 , 2600Hz
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(kazoo_media_maintenance).
-export([remove_empty_media_docs/0
,remove_empty_media_docs/1
,migrate/0, migrate_prompts/0
,import_prompts/1, import_prompts/2
,import_prompt/1, import_prompt/2
,set_account_language/2
,refresh/0
,register_views/0
,fix_media_names/0
]).
-include("kazoo_media.hrl").
-spec migrate() -> 'no_return'.
migrate() ->
io:format("migrating relevant settings from system_config/callflow to system_config/~s~n", [?CONFIG_CAT]),
maybe_migrate_system_config(<<"callflow">>),
io:format("migrating relevant settings from system_config/media_mgr to system_config/~s~n", [?CONFIG_CAT]),
maybe_migrate_system_config(<<"media_mgr">>, 'true'),
'no_return'.
-spec migrate_prompts() -> 'no_return'.
migrate_prompts() ->
io:format("Now updating existing system_media docs to be internationalizable!~n", []),
'no_return'.
-spec set_account_language(kz_term:ne_binary(), kz_term:ne_binary()) -> 'ok'.
set_account_language(Account, Language) ->
AccountId = kzs_util:format_account_id(Account),
OldLang = kz_media_util:prompt_language(AccountId),
try kapps_account_config:set(AccountId
,?CONFIG_CAT
,?PROMPT_LANGUAGE_KEY
,kz_term:to_lower_binary(Language)
)
of
_Config ->
io:format("successfully updated account ~s's language from '~s' to '~s'~n"
,[AccountId, OldLang, Language]
)
catch
_E:_R -> 'ok'
end.
-spec import_prompts(file:filename_all()) -> 'ok'.
import_prompts(DirPath) ->
import_prompts(DirPath, kz_media_util:default_prompt_language()).
-spec import_prompts(file:filename_all(), kz_term:text()) -> 'ok'.
import_prompts(DirPath, Lang) ->
case filelib:is_dir(DirPath) of
'false' ->
io:format("not a directory, or is inaccessible: '~s'\n", [DirPath]);
'true' ->
kz_datamgr:db_create(?KZ_MEDIA_DB),
MediaPath = filename:join([DirPath, "*.{wav,mp3}"]),
case filelib:wildcard(kz_term:to_list(MediaPath)) of
[] -> io:format("failed to find media files in '~s'~n", [DirPath]);
Files -> import_files(DirPath, Lang, Files)
end
end.
-spec import_files(file:filename_all(), kz_term:ne_binary(), [file:filename_all()]) -> 'ok'.
import_files(Path, Lang, Files) ->
io:format("importing prompts from '~s' with language '~s'~n", [Path, Lang]),
case import_prompts_from_files(Files, Lang) of
[] -> io:format("importing went successfully~n");
Errors ->
io:format("errors encountered during import:~n"),
_ = [io:format(" '~s': ~p~n", [F, Err]) || {F, Err} <- Errors],
'ok'
end.
-spec import_prompts_from_files([file:filename_all()], kz_term:ne_binary()) ->
[{file:filename_all(), {'error', _}}].
import_prompts_from_files(Files, Lang) ->
[{File, Err}
|| File <- Files,
(Err = (catch import_prompt(File, Lang))) =/= 'ok'
].
-spec import_prompt(file:filename_all()) -> 'ok' | {'error', any()}.
import_prompt(Path) ->
import_prompt(Path, kz_media_util:default_prompt_language()).
-spec import_prompt(file:filename_all(), kz_term:text()) -> 'ok' | {'error', any()}.
import_prompt(Path, Lang) ->
kz_datamgr:db_create(?KZ_MEDIA_DB),
timer:sleep(250),
case file:read_file(Path) of
{'ok', Contents} ->
io:format("importing prompt '~s' with language '~s'~n", [Path, Lang]),
import_prompt(Path, Lang, Contents);
{'error', E}=Error ->
io:format("failed to open path '~s' for importing: ~p~n", [Path, E]),
Error
end.
-spec import_prompt(file:filename_all(), kz_term:text(), kz_term:ne_binary()) -> 'ok' | {'error', any()}.
import_prompt(Path0, Lang0, Contents) ->
Lang = kz_term:to_binary(Lang0),
Path = kz_term:to_binary(Path0),
ContentLength = byte_size(Contents),
MetaJObj = media_meta_doc(Path, Lang, ContentLength),
case kz_datamgr:ensure_saved(?KZ_MEDIA_DB, MetaJObj) of
{'ok', MetaJObj1} ->
io:format(" saved metadata about '~s'~n", [Path]),
AttachmentName = iolist_to_binary([kzd_media:prompt_id(MetaJObj1)
,kz_term:to_binary(filename:extension(Path))
]),
upload_prompt(kz_doc:id(MetaJObj1)
,AttachmentName
,Contents
,[{'content_type', kz_json:get_string_value(<<"content_type">>, MetaJObj1)}
,{'content_length', ContentLength}
,{'rev', kz_doc:revision(MetaJObj1)}
]
);
{'error', E}=Error ->
io:format(" error saving metadata: ~p~n", [E]),
Error
end.
-spec media_meta_doc(file:filename_all(), kz_term:ne_binary(), pos_integer()) ->
kz_json:object().
media_meta_doc(Path, Lang, ContentLength) ->
MediaDoc = base_media_doc(Path, Lang, ContentLength),
kz_doc:update_pvt_parameters(MediaDoc
,?KZ_MEDIA_DB
,[{'type', kzd_media:type()}
,{'now', kz_time:now_s()}
]
).
-spec base_media_doc(file:filename_all(), kz_term:ne_binary(), pos_integer()) ->
kz_json:object().
base_media_doc(Path, Lang, ContentLength) ->
PromptName = prompt_name_from_path(Path),
ContentType = content_type_from_path(Path),
ID = kz_media_util:prompt_id(PromptName, Lang),
io:format(" importing as '~s'~n", [ID]),
kz_json:from_list(
[{<<"_id">>, ID}
,{<<"name">>, ID}
,{<<"prompt_id">>, PromptName}
,{<<"description">>, media_description(PromptName, Lang)}
,{<<"content_length">>, ContentLength}
,{<<"language">>, kz_term:to_lower_binary(Lang)}
,{<<"content_type">>, ContentType}
,{<<"source_type">>, kz_term:to_binary(?MODULE)}
,{<<"streamable">>, 'true'}
]
).
-spec prompt_name_from_path(file:filename_all()) -> kz_term:ne_binary().
prompt_name_from_path(Path) ->
Extension = filename:extension(Path),
kz_term:to_binary(filename:basename(Path, Extension)).
-spec content_type_from_path(file:filename_all()) -> kz_term:ne_binary().
content_type_from_path(Path) ->
{Category, Type, _} = cow_mimetypes:all(Path),
filename:join([Category, Type]).
-spec media_description(kz_term:ne_binary(), kz_term:ne_binary()) -> kz_term:ne_binary().
media_description(PromptName, Lang) ->
<<"System prompt in ", Lang/binary, " for ", PromptName/binary>>.
-spec upload_prompt(kz_term:ne_binary(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:proplist()) ->
'ok' |
{'error', any()}.
upload_prompt(ID, AttachmentName, Contents, Options) ->
upload_prompt(ID, AttachmentName, Contents, Options, 3).
-spec upload_prompt(kz_term:ne_binary(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:proplist(), non_neg_integer()) ->
'ok' |
{'error', any()}.
upload_prompt(_ID, _AttachmentName, _Contents, _Options, 0) ->
io:format(" retries exceeded for uploading ~s to ~s~n", [_AttachmentName, _ID]),
{'error', 'retries_exceeded'};
upload_prompt(ID, AttachmentName, Contents, Options, Retries) ->
case kz_datamgr:put_attachment(?KZ_MEDIA_DB, ID, AttachmentName, Contents, Options) of
{'ok', _MetaJObj} ->
io:format(" uploaded prompt binary to ~s as ~s~n", [ID, AttachmentName]);
{'error', 'conflict'} ->
io:format(" conflict when uploading media binary; checking doc to see if it was actually successful~n"),
maybe_retry_upload(ID, AttachmentName, Contents, Options, Retries);
{'error', E} ->
io:format(" error uploading prompt binary: ~p~n", [E]),
maybe_cleanup_metadoc(ID, E)
end.
-spec maybe_cleanup_metadoc(kz_term:ne_binary(), any()) -> {'error', any()}.
maybe_cleanup_metadoc(ID, E) ->
io:format(" deleting metadata from ~s~n", [?KZ_MEDIA_DB]),
case kz_datamgr:del_doc(?KZ_MEDIA_DB, ID) of
{'ok', _} ->
io:format(" removed metadata for ~s~n", [ID]),
{'error', E};
{'error', E1}=Error ->
io:format(" failed to remove metadata for ~s: ~p~n", [ID, E1]),
Error
end.
-spec maybe_retry_upload(kz_term:ne_binary(), kz_term:ne_binary(), kz_term:ne_binary(), kz_term:proplist(), non_neg_integer()) ->
'ok' |
{'error', any()}.
maybe_retry_upload(ID, AttachmentName, Contents, Options, Retries) ->
case kz_datamgr:open_doc(?KZ_MEDIA_DB, ID) of
{'ok', JObj} ->
case kz_doc:attachment(JObj, AttachmentName) of
'undefined' ->
io:format(" attachment does not appear on the document, retrying after a pause~n"),
timer:sleep(?MILLISECONDS_IN_SECOND),
upload_prompt(ID, AttachmentName, Contents, Options, Retries-1);
_Attachment ->
io:format(" attachment appears to have uploaded successfully!~n")
end;
{'error', E}=Error ->
io:format(" failed to open the media doc again: ~p~n", [E]),
Error
end.
-spec refresh() -> 'ok'.
refresh() ->
case kz_datamgr:db_exists(?KZ_MEDIA_DB) of
'false' ->
Result = kz_datamgr:db_create(?KZ_MEDIA_DB),
lager:debug("~s database is created: ~p", [?KZ_MEDIA_DB, Result]);
'true' -> 'ok'
end,
_ = kapps_maintenance:refresh(?KZ_MEDIA_DB),
'ok'.
-spec register_views() -> 'ok'.
register_views() ->
kz_datamgr:register_views_from_folder('kazoo_media').
-spec maybe_migrate_system_config(kz_term:ne_binary()) -> 'ok'.
maybe_migrate_system_config(ConfigId) ->
maybe_migrate_system_config(ConfigId, 'false').
-spec maybe_migrate_system_config(kz_term:ne_binary(), boolean()) -> 'ok'.
maybe_migrate_system_config(ConfigId, DeleteAfter) ->
case kz_datamgr:open_doc(?KZ_CONFIG_DB, ConfigId) of
{'error', 'not_found'} ->
io:format(" failed to find ~s, not migrating~n", [ConfigId]);
{'ok', JObj} ->
migrate_system_config(kz_doc:public_fields(JObj), DeleteAfter),
maybe_delete_system_config(ConfigId, DeleteAfter)
end.
-spec maybe_delete_system_config(kz_term:ne_binary(), boolean()) -> 'ok'.
maybe_delete_system_config(ConfigId, 'true') ->
{'ok', _} = kz_datamgr:del_doc(?KZ_CONFIG_DB, ConfigId),
io:format("deleted ~s from ~s", [ConfigId, ?KZ_CONFIG_DB]);
maybe_delete_system_config(_ConfigId, 'false') -> 'ok'.
-spec migrate_system_config(kz_json:object(), boolean()) -> 'ok'.
migrate_system_config(ConfigJObj, DeleteAfter) ->
{'ok', MediaJObj} = get_media_config_doc(),
{Updates, _} = kz_json:foldl(fun migrate_system_config_fold/3
,{[], MediaJObj}
,ConfigJObj
),
maybe_update_media_config(Updates, MediaJObj),
delete_original_config(ConfigJObj, Updates, DeleteAfter).
maybe_update_media_config([], _) ->
io:format("no changes for that need saving~n");
maybe_update_media_config(Updates, MediaJObj) ->
io:format("saving updated media config with:~n~p~n", [Updates]),
ensure_save_config_db(Updates, MediaJObj).
-spec delete_original_config(kz_json:object(), list(), boolean()) -> 'ok'.
delete_original_config(_OriginalJObj, _Updates, 'true') -> 'ok';
delete_original_config(OrigJObj, Updates, 'false') ->
Removed = lists:foldl(fun({X, _V}, L) -> [{X, 'null'} | L] end, [], Updates),
ensure_save_config_db(Removed, OrigJObj).
-spec ensure_save_config_db(kz_json:flat_proplist(), kz_json:object()) -> 'ok'.
ensure_save_config_db(Updates, JObj) ->
Id = kz_doc:id(JObj),
PvtUpdates = [{<<"pvt_modified">>, kz_time:now_s()}],
Update = [{'update', Updates}
,{'extra_update', PvtUpdates}
,{'ensure_saved', 'true'}
],
{'ok', _} = kz_datamgr:update_doc(?KZ_CONFIG_DB, Id, Update),
'ok'.
-spec get_media_config_doc() -> {'ok', kz_json:object()}.
get_media_config_doc() ->
case kz_datamgr:open_doc(?KZ_CONFIG_DB, ?CONFIG_CAT) of
{'ok', _MediaJObj}=OK -> OK;
{'error', 'not_found'} ->
{'ok', kz_json:from_list([{<<"_id">>, ?CONFIG_CAT}])}
end.
-type migrate_fold_acc() :: {kz_term:proplist(), kzd_system_configs:doc()}.
-spec migrate_system_config_fold(kz_term:ne_binary(), kz_json:json_term(), migrate_fold_acc()) ->
migrate_fold_acc().
migrate_system_config_fold(<<"default">> = Node, Settings, Updates) ->
io:format("migrating node '~s' settings~n", [Node]),
migrate_node_config(Node, Settings, Updates, ?CONFIG_KVS);
migrate_system_config_fold(Node, Settings, Updates) ->
case binary:split(Node, <<"@">>) of
[_User, _Domain] ->
io:format("migrating node '~s' settings~n", [Node]),
migrate_node_config(Node, Settings, Updates, ?CONFIG_KVS);
_Split ->
io:format("skipping non-node '~s'~n", [Node]),
Updates
end.
-spec migrate_node_config(kz_term:ne_binary(), kz_json:object(), migrate_fold_acc(), kz_term:proplist()) ->
migrate_fold_acc().
migrate_node_config(_Node, _Settings, Updates, []) -> Updates;
migrate_node_config(Node, Settings, Updates, [{K, V} | KVs]) ->
case kz_json:get_value(K, Settings) of
'undefined' ->
io:format(" maybe setting ~p for node ~p to default '~p'~n", [K, Node, V]),
migrate_node_config(Node, Settings, maybe_update_media_config(Node, K, V, Updates), KVs);
NodeV ->
io:format(" maybe setting ~p for node ~p to '~p'~n", [K, Node, NodeV]),
migrate_node_config(Node, Settings, set_node_value(Node, K, NodeV, Updates), KVs)
end.
-spec set_node_value(kz_term:ne_binary(), kz_json:path(), kz_term:ne_binary(), migrate_fold_acc()) ->
migrate_fold_acc().
set_node_value(Node, <<_/binary>> = K, V, Updates) ->
set_node_value(Node, [K], V, Updates);
set_node_value(Node, K, V, {Updates, MediaJObj}) ->
{[{[Node | K], V} | Updates], MediaJObj}.
-spec maybe_update_media_config(kz_term:ne_binary(), kz_json:path(), kz_term:api_binary(), migrate_fold_acc()) ->
migrate_fold_acc().
maybe_update_media_config(_Node, _K, 'undefined', Updates) ->
io:format(" no value to set for ~p~n", [_K]),
Updates;
maybe_update_media_config(Node, <<_/binary>> = K, V, Updates) ->
maybe_update_media_config(Node, [K], V, Updates);
maybe_update_media_config(Node, K, V, {Updates, MediaJObj}=Acc) ->
Key = [Node | K],
case kz_json:get_value(Key, MediaJObj) of
'undefined' ->
{[{Key, V} | Updates], MediaJObj};
V ->
io:format(" media config has matching value for ~p~n", [Key]),
Acc;
_V ->
io:format(" media config has existing value '~p' for ~p~n", [_V, Key]),
Acc
end.
-spec remove_empty_media_docs() -> 'no_return'.
remove_empty_media_docs() ->
{'ok', JObjs} = kz_datamgr:all_docs(?KZ_MEDIA_DB, ['include_docs']),
remove_empty_system_media(JObjs).
-spec remove_empty_system_media(kz_json:objects()) -> 'no_return'.
remove_empty_system_media([]) -> 'no_return';
remove_empty_system_media([JObj|JObjs]) ->
Doc = kz_json:get_value(<<"doc">>, JObj),
Id = kz_json:get_value(<<"id">>, JObj),
case kz_json:get_ne_value(<<"_attachments">>, Doc) =:= 'undefined'
andalso binary:match(Id, <<"_design">>) =:= 'nomatch'
of
'true' ->
_ = io:format("media document ~s has no attachments, removing~n", [Id]),
_ = kz_datamgr:del_doc(?KZ_MEDIA_DB, Doc),
remove_empty_system_media(JObjs);
'false' -> remove_empty_system_media(JObjs)
end.
-spec remove_empty_media_docs(kz_term:ne_binary()) -> 'ok'.
remove_empty_media_docs(AccountId) ->
AccountDb = kzs_util:format_account_db(AccountId),
remove_empty_media_docs(AccountId, AccountDb).
-spec remove_empty_media_docs(kz_term:ne_binary(), kz_term:ne_binary()) -> 'ok'.
remove_empty_media_docs(AccountId, AccountDb) ->
case kz_datamgr:get_results(AccountDb, <<"media/crossbar_listing">>, ['include_docs']) of
{'ok', []} ->
io:format("no media docs in account ~s~n", [AccountId]);
{'ok', MediaDocs} ->
io:format("found ~b media docs in account ~s~n", [length(MediaDocs), AccountId]),
Filename = media_doc_filename(AccountId, kz_time:now_s()),
io:format("archiving removed media docs to ~s~n", [Filename]),
{'ok', File} = file:open(Filename, ['write', 'binary', 'append']),
catch remove_empty_media_docs(AccountId, AccountDb, File, MediaDocs),
'ok' = file:close(File);
{'error', _E} ->
io:format("error looking up media docs in account ~s: ~p~n", [AccountId, _E])
end.
-spec media_doc_filename(kz_term:ne_binary(), non_neg_integer()) -> file:name().
media_doc_filename(AccountId, Timestamp) ->
Path = ["/tmp/empty_media_", AccountId, "_", kz_term:to_binary(Timestamp), ".json"],
binary_to_list(list_to_binary(Path)).
-spec remove_empty_media_docs(kz_term:ne_binary(), kz_term:ne_binary(), file:io_device(), kz_json:objects()) -> 'ok'.
remove_empty_media_docs(AccountId, _AccountDb, _Filename, []) ->
io:format("finished cleaning up empty media docs for account ~s~n", [AccountId]);
remove_empty_media_docs(AccountId, AccountDb, File, [Media|MediaDocs]) ->
maybe_remove_media_doc(AccountDb, File, kz_json:get_value(<<"doc">>, Media)),
remove_empty_media_docs(AccountId, AccountDb, File, MediaDocs).
-spec maybe_remove_media_doc(kz_term:ne_binary(), file:io_device(), kz_json:object()) -> 'ok'.
maybe_remove_media_doc(AccountDb, File, MediaJObj) ->
DocId = kz_doc:id(MediaJObj),
case kz_doc:attachments(MediaJObj) of
'undefined' ->
io:format("media doc ~s has no attachments, archiving and removing~n", [DocId]),
_R = file:write(File, [kz_json:encode(MediaJObj), $\n]),
io:format("dumping media doc ~s to file : ~p\n", [DocId, _R]),
remove_media_doc(AccountDb, MediaJObj);
_Attachments ->
io:format("media doc ~s has attachments, leaving alone~n", [kz_doc:id(MediaJObj)])
end.
-spec remove_media_doc(kz_term:ne_binary(), kz_json:object()) -> 'ok'.
remove_media_doc(AccountDb, MediaJObj) ->
{'ok', _Doc} = kz_datamgr:del_doc(AccountDb, MediaJObj),
io:format("removed media doc ~s~n", [kz_doc:id(MediaJObj)]).
filter_media_names(JObj) ->
kz_doc:id(JObj) =/= kz_http_util:urldecode(kz_doc:id(JObj)).
-spec fix_media_name(kz_json:object()) -> 'ok'.
fix_media_name(JObj) ->
FromId = kz_doc:id(JObj),
ToId = kz_http_util:urldecode(kz_doc:id(JObj)),
Options = [{'transform', fun(_, B) -> kz_json:set_value(<<"name">>, ToId, B) end}],
case kz_datamgr:move_doc(?KZ_MEDIA_DB, FromId, ?KZ_MEDIA_DB, ToId, Options) of
{'ok', _} -> lager:info("renamed media doc from ~s to ~s", [FromId, ToId]);
{'error', Error} -> lager:info("error renaming media doc from ~s to ~s : ~p", [FromId, ToId, Error])
end.
-spec fix_media_names() -> any().
fix_media_names() ->
case kz_datamgr:all_docs(?KZ_MEDIA_DB) of
{'ok', JObjs} ->
case [ JObj || JObj <- JObjs, filter_media_names(JObj)] of
[] -> kapps_config:set(?CONFIG_CAT, <<"fix_media_names">>, 'false');
List -> lists:foreach(fun fix_media_name/1, List)
end;
{'error', Error} ->
lager:debug("error '~p' getting media names", [Error])
end.
|
86ee537ff5e19e2cca9e1e3a4f23949a7797b1ef7a8a837a9201c550619cce62 | greglook/clj-cbor | simple_test.clj | (ns clj-cbor.data.simple-test
(:require
[clj-cbor.data.simple :refer [->Undefined ->SimpleValue]]
[clojure.test :refer [deftest testing is]]))
(deftest undefined-values
(let [undefined (->Undefined nil)]
(testing "representation"
(is (= "undefined" (str undefined))))
(testing "equality"
(is (= undefined undefined))
(is (= undefined (->Undefined nil))
"all undefined values should be equal")
(is (not= undefined :foo)))
(testing "hash code"
(is (integer? (hash undefined)))
(is (= (hash undefined) (hash (->Undefined nil)))
"all undefined values should have the same hash"))
(testing "metadata"
(is (nil? (meta undefined)))
(is (= undefined (vary-meta undefined assoc :x 123))
"metadata does not affect equality")
(is (= {:x 123} (meta (vary-meta undefined assoc :x 123)))
"metadata is preserved"))))
(deftest simple-values
(let [simple24 (->SimpleValue 24 nil)
simple64 (->SimpleValue 64 nil)]
(testing "representation"
(is (= "simple(24)" (str simple24)))
(is (= "simple(64)" (str simple64))))
(testing "equality"
(is (= simple24 simple24)
"should be reflexive")
(is (= simple64 simple64)
"should be reflexive")
(is (= simple24 (->SimpleValue 24 nil))
"different instances of the same value should be equal")
(is (not= simple24 simple64)
"different simple values should not be equal")
(is (not= simple64 :foo)
"different types should not be equal"))
(testing "hash code"
(is (integer? (hash simple24)))
(is (= (hash simple24) (hash simple24))
"should be stable")
(is (= (hash simple24) (hash (->SimpleValue 24 nil)))
"different instances of the same value should have the same hash")
(is (not= (hash simple24) (hash simple64))
"different simple values should have different hashes"))
(testing "comparable"
(is (zero? (compare simple24 simple24))
"identical instances should compare the same")
(is (zero? (compare simple24 (->SimpleValue 24 nil)))
"different instances of the same value should compare the same")
(is (neg? (compare simple24 simple64))
"lower numbered values should sort earlier")
(is (pos? (compare simple64 simple24))
"higher numbered values should sort later"))
(testing "metadata"
(is (nil? (meta simple24)))
(is (= simple24 (vary-meta simple24 assoc :x 123))
"should not affect equality")
(is (= (hash simple24) (hash (vary-meta simple24 assoc :y true)))
"should not affect hash code")
(is (zero? (compare simple24 (vary-meta simple24 assoc :foo :abc)))
"should not affect comparison")
(is (= {:x 123} (meta (vary-meta simple24 assoc :x 123)))
"metadata is preserved"))))
| null | https://raw.githubusercontent.com/greglook/clj-cbor/ff3ec660fe40789e2bf97b87b6a5e9be0361b0b2/test/clj_cbor/data/simple_test.clj | clojure | (ns clj-cbor.data.simple-test
(:require
[clj-cbor.data.simple :refer [->Undefined ->SimpleValue]]
[clojure.test :refer [deftest testing is]]))
(deftest undefined-values
(let [undefined (->Undefined nil)]
(testing "representation"
(is (= "undefined" (str undefined))))
(testing "equality"
(is (= undefined undefined))
(is (= undefined (->Undefined nil))
"all undefined values should be equal")
(is (not= undefined :foo)))
(testing "hash code"
(is (integer? (hash undefined)))
(is (= (hash undefined) (hash (->Undefined nil)))
"all undefined values should have the same hash"))
(testing "metadata"
(is (nil? (meta undefined)))
(is (= undefined (vary-meta undefined assoc :x 123))
"metadata does not affect equality")
(is (= {:x 123} (meta (vary-meta undefined assoc :x 123)))
"metadata is preserved"))))
(deftest simple-values
(let [simple24 (->SimpleValue 24 nil)
simple64 (->SimpleValue 64 nil)]
(testing "representation"
(is (= "simple(24)" (str simple24)))
(is (= "simple(64)" (str simple64))))
(testing "equality"
(is (= simple24 simple24)
"should be reflexive")
(is (= simple64 simple64)
"should be reflexive")
(is (= simple24 (->SimpleValue 24 nil))
"different instances of the same value should be equal")
(is (not= simple24 simple64)
"different simple values should not be equal")
(is (not= simple64 :foo)
"different types should not be equal"))
(testing "hash code"
(is (integer? (hash simple24)))
(is (= (hash simple24) (hash simple24))
"should be stable")
(is (= (hash simple24) (hash (->SimpleValue 24 nil)))
"different instances of the same value should have the same hash")
(is (not= (hash simple24) (hash simple64))
"different simple values should have different hashes"))
(testing "comparable"
(is (zero? (compare simple24 simple24))
"identical instances should compare the same")
(is (zero? (compare simple24 (->SimpleValue 24 nil)))
"different instances of the same value should compare the same")
(is (neg? (compare simple24 simple64))
"lower numbered values should sort earlier")
(is (pos? (compare simple64 simple24))
"higher numbered values should sort later"))
(testing "metadata"
(is (nil? (meta simple24)))
(is (= simple24 (vary-meta simple24 assoc :x 123))
"should not affect equality")
(is (= (hash simple24) (hash (vary-meta simple24 assoc :y true)))
"should not affect hash code")
(is (zero? (compare simple24 (vary-meta simple24 assoc :foo :abc)))
"should not affect comparison")
(is (= {:x 123} (meta (vary-meta simple24 assoc :x 123)))
"metadata is preserved"))))
| |
ced645fa3659dbb10aa450652835a77524f461cf6e58fcf1f451a5d8a0b38c65 | haskell-tools/haskell-tools | RangeToRangeTemplate.hs | # LANGUAGE ScopedTypeVariables #
-- | Transform a syntax tree with ranges to a syntax tree that has range templates. Cuts the ranges of children
-- from the ranges of their parents and replaces it with placeholders.
module Language.Haskell.Tools.PrettyPrint.Prepare.RangeToRangeTemplate (cutUpRanges, fixRanges, BreakUpProblem(..)) where
import Language.Haskell.Tools.AST
import Control.Exception (Exception, throw)
import Control.Monad.State
import Control.Reference ((^.))
import Data.List
import Data.Maybe (Maybe(..), mapMaybe)
import FastString as GHC (unpackFS)
import SrcLoc
import Language.Haskell.Tools.PrettyPrint.Prepare.RangeTemplate
-- | Creates a source template from the ranges and the input file.
-- All source ranges must be good ranges.
cutUpRanges :: forall node dom . SourceInfoTraversal node
=> Ann node dom NormRangeStage
-> Ann node dom RngTemplateStage
cutUpRanges n = evalState (cutUpRanges' n) [[],[]]
where cutUpRanges' :: Ann node dom NormRangeStage -> State [[SrcSpan]] (Ann node dom RngTemplateStage)
cutUpRanges' = sourceInfoTraverseUp (SourceInfoTrf (trf cutOutElemSpan) (trf cutOutElemList) (trf cutOutElemOpt)) desc asc
-- keep the stack to contain the children elements on the place of the parent element
desc = modify ([]:)
asc = modify tail
-- combine the current node with its children, and add it to the list of current nodes
trf :: HasRange (x RngTemplateStage)
=> ([SrcSpan] -> x NormRangeStage -> x RngTemplateStage) -> x NormRangeStage -> State [[SrcSpan]] (x RngTemplateStage)
trf f ni = do stack <- get
case stack of
(below : top : xs) -> do
let res = f below ni
put ([] : (top ++ [ getRange res ]) : xs)
return res
_ -> trfProblem "RangeToRangeTemplate.cutUpRanges.trf: stack is not right"
-- | Cuts out a list of source ranges from a given range
cutOutElemSpan :: [SrcSpan] -> SpanInfo NormRangeStage -> SpanInfo RngTemplateStage
cutOutElemSpan sps (NormNodeInfo (RealSrcSpan sp))
= RangeTemplateNode sp $ foldl breakFirstHit (foldl breakFirstHit [RangeElem sp] loc) span
where (loc,span) = partition (\sp -> srcSpanStart sp == srcSpanEnd sp) sps
breakFirstHit (elem:rest) sp
= case breakUpRangeElem elem sp of
-- only continue if the correct place for the child range is not found
Just pieces -> pieces ++ rest
Nothing -> elem : breakFirstHit rest sp
breakFirstHit [] inner = throw $ BreakUpProblem sp inner sps
cutOutElemSpan _ (NormNodeInfo (UnhelpfulSpan {}))
= trfProblem "cutOutElemSpan: no real span"
data BreakUpProblem = BreakUpProblem { bupOuter :: RealSrcSpan
, bupInner :: SrcSpan
, bupSiblings :: [SrcSpan]
}
instance Show BreakUpProblem where
show (BreakUpProblem _ (RealSrcSpan inner) _)
= unpackFS (srcSpanFile inner) ++ ": didn't find correct place for AST element at " ++ shortShowSpan (RealSrcSpan inner)
show (BreakUpProblem outer _ _)
= unpackFS (srcSpanFile outer) ++ ": didn't find correct place for AST element in " ++ shortShowSpan (RealSrcSpan outer)
instance Exception BreakUpProblem
cutOutElemList :: [SrcSpan] -> ListInfo NormRangeStage -> ListInfo RngTemplateStage
cutOutElemList sps (NormListInfo bef aft sep indented sp)
= let RealSrcSpan wholeRange = foldl1 combineSrcSpans $ sp : sps
in RangeTemplateList wholeRange bef aft sep indented (getSeparators wholeRange sps)
-- | Cuts out all elements from a list, the rest is the list of separators
getSeparators :: RealSrcSpan -> [SrcSpan] -> [RealSrcSpan]
getSeparators sp infos@(_:_:_)
= mapMaybe getRangeElemSpan (cutOutElemSpan infos (NormNodeInfo (RealSrcSpan sp)) ^. rngTemplateNodeElems)
at least two elements needed or there can be no separators
getSeparators _ _ = []
cutOutElemOpt :: [SrcSpan] -> OptionalInfo NormRangeStage -> OptionalInfo RngTemplateStage
cutOutElemOpt sps (NormOptInfo bef aft sp)
= let RealSrcSpan wholeRange = foldl1 combineSrcSpans $ sp : sps
in RangeTemplateOpt wholeRange bef aft
| Breaks the given template element into possibly 2 or 3 parts by cutting out the given part
if it is inside the range of the template element . Returns Nothing if the second argument is not inside .
breakUpRangeElem :: RangeTemplateElem -> SrcSpan -> Maybe [RangeTemplateElem]
breakUpRangeElem (RangeElem outer) (RealSrcSpan inner)
| outer `containsSpan` inner
= Just $ (if (realSrcSpanStart outer) < (realSrcSpanStart inner)
then [ RangeElem (mkRealSrcSpan (realSrcSpanStart outer) (realSrcSpanStart inner)) ]
else []) ++
[ RangeChildElem ] ++
(if (realSrcSpanEnd inner) < (realSrcSpanEnd outer)
then [ RangeElem (mkRealSrcSpan (realSrcSpanEnd inner) (realSrcSpanEnd outer)) ]
else [])
breakUpRangeElem _ _ = Nothing
-- | Modifies ranges to contain their children
fixRanges :: SourceInfoTraversal node
=> Ann node dom RangeStage
-> Ann node dom NormRangeStage
fixRanges node = evalState (sourceInfoTraverseUp (SourceInfoTrf (trf expandToContain) (trf expandListToContain) (trf expandOptToContain)) desc asc node) [[],[]]
where -- keep the stack to contain the children elements on the place of the parent element
desc = modify ([]:)
asc = modify tail
trf :: HasRange (x NormRangeStage)
=> ([SrcSpan] -> x RangeStage -> x NormRangeStage) -> x RangeStage -> State [[SrcSpan]] (x NormRangeStage)
trf f ni = do stack <- get
case stack of
(below : top : xs) -> do
let res = f below ni
resRange = getRange res
endOfSiblings = srcSpanEnd (collectSpanRanges (srcSpanStart resRange) top)
correctedRange = if endOfSiblings > srcSpanStart resRange
then mkSrcSpan endOfSiblings (max endOfSiblings (srcSpanEnd resRange))
else resRange
put ([] : (top ++ [ correctedRange ]) : xs)
return $ setRange correctedRange res
_ -> trfProblem "RangeToRangeTemplate.fixRanges.trf: stack is not right"
-- | Expand a simple node to contain its children
expandToContain :: [SrcSpan] -> SpanInfo RangeStage -> SpanInfo NormRangeStage
expandToContain cont (NodeSpan sp)
= NormNodeInfo (checkSpans cont $ foldl1 combineSrcSpans $ sp : cont)
expandListToContain :: [SrcSpan] -> ListInfo RangeStage -> ListInfo NormRangeStage
expandListToContain cont (ListPos bef aft def ind sp)
= NormListInfo bef aft def ind (checkSpans cont $ collectSpanRanges sp cont)
expandOptToContain :: [SrcSpan] -> OptionalInfo RangeStage -> OptionalInfo NormRangeStage
expandOptToContain cont (OptionalPos bef aft sp)
= NormOptInfo bef aft (checkSpans cont $ collectSpanRanges sp cont)
collectSpanRanges :: SrcLoc -> [SrcSpan] -> SrcSpan
collectSpanRanges loc@(RealSrcLoc _) [] = srcLocSpan loc
collectSpanRanges _ ls = foldl combineSrcSpans noSrcSpan ls
-- | Checks the contained source ranges to detect the convertion problems where we can see their location.
checkSpans :: [SrcSpan] -> SrcSpan -> SrcSpan
checkSpans spans res
= if any (not . isGoodSrcSpan) spans && isGoodSrcSpan res
then trfProblem $ "Wrong src spans in " ++ show res
else res
| null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/prettyprint/Language/Haskell/Tools/PrettyPrint/Prepare/RangeToRangeTemplate.hs | haskell | | Transform a syntax tree with ranges to a syntax tree that has range templates. Cuts the ranges of children
from the ranges of their parents and replaces it with placeholders.
| Creates a source template from the ranges and the input file.
All source ranges must be good ranges.
keep the stack to contain the children elements on the place of the parent element
combine the current node with its children, and add it to the list of current nodes
| Cuts out a list of source ranges from a given range
only continue if the correct place for the child range is not found
| Cuts out all elements from a list, the rest is the list of separators
| Modifies ranges to contain their children
keep the stack to contain the children elements on the place of the parent element
| Expand a simple node to contain its children
| Checks the contained source ranges to detect the convertion problems where we can see their location. | # LANGUAGE ScopedTypeVariables #
module Language.Haskell.Tools.PrettyPrint.Prepare.RangeToRangeTemplate (cutUpRanges, fixRanges, BreakUpProblem(..)) where
import Language.Haskell.Tools.AST
import Control.Exception (Exception, throw)
import Control.Monad.State
import Control.Reference ((^.))
import Data.List
import Data.Maybe (Maybe(..), mapMaybe)
import FastString as GHC (unpackFS)
import SrcLoc
import Language.Haskell.Tools.PrettyPrint.Prepare.RangeTemplate
cutUpRanges :: forall node dom . SourceInfoTraversal node
=> Ann node dom NormRangeStage
-> Ann node dom RngTemplateStage
cutUpRanges n = evalState (cutUpRanges' n) [[],[]]
where cutUpRanges' :: Ann node dom NormRangeStage -> State [[SrcSpan]] (Ann node dom RngTemplateStage)
cutUpRanges' = sourceInfoTraverseUp (SourceInfoTrf (trf cutOutElemSpan) (trf cutOutElemList) (trf cutOutElemOpt)) desc asc
desc = modify ([]:)
asc = modify tail
trf :: HasRange (x RngTemplateStage)
=> ([SrcSpan] -> x NormRangeStage -> x RngTemplateStage) -> x NormRangeStage -> State [[SrcSpan]] (x RngTemplateStage)
trf f ni = do stack <- get
case stack of
(below : top : xs) -> do
let res = f below ni
put ([] : (top ++ [ getRange res ]) : xs)
return res
_ -> trfProblem "RangeToRangeTemplate.cutUpRanges.trf: stack is not right"
cutOutElemSpan :: [SrcSpan] -> SpanInfo NormRangeStage -> SpanInfo RngTemplateStage
cutOutElemSpan sps (NormNodeInfo (RealSrcSpan sp))
= RangeTemplateNode sp $ foldl breakFirstHit (foldl breakFirstHit [RangeElem sp] loc) span
where (loc,span) = partition (\sp -> srcSpanStart sp == srcSpanEnd sp) sps
breakFirstHit (elem:rest) sp
= case breakUpRangeElem elem sp of
Just pieces -> pieces ++ rest
Nothing -> elem : breakFirstHit rest sp
breakFirstHit [] inner = throw $ BreakUpProblem sp inner sps
cutOutElemSpan _ (NormNodeInfo (UnhelpfulSpan {}))
= trfProblem "cutOutElemSpan: no real span"
data BreakUpProblem = BreakUpProblem { bupOuter :: RealSrcSpan
, bupInner :: SrcSpan
, bupSiblings :: [SrcSpan]
}
instance Show BreakUpProblem where
show (BreakUpProblem _ (RealSrcSpan inner) _)
= unpackFS (srcSpanFile inner) ++ ": didn't find correct place for AST element at " ++ shortShowSpan (RealSrcSpan inner)
show (BreakUpProblem outer _ _)
= unpackFS (srcSpanFile outer) ++ ": didn't find correct place for AST element in " ++ shortShowSpan (RealSrcSpan outer)
instance Exception BreakUpProblem
cutOutElemList :: [SrcSpan] -> ListInfo NormRangeStage -> ListInfo RngTemplateStage
cutOutElemList sps (NormListInfo bef aft sep indented sp)
= let RealSrcSpan wholeRange = foldl1 combineSrcSpans $ sp : sps
in RangeTemplateList wholeRange bef aft sep indented (getSeparators wholeRange sps)
getSeparators :: RealSrcSpan -> [SrcSpan] -> [RealSrcSpan]
getSeparators sp infos@(_:_:_)
= mapMaybe getRangeElemSpan (cutOutElemSpan infos (NormNodeInfo (RealSrcSpan sp)) ^. rngTemplateNodeElems)
at least two elements needed or there can be no separators
getSeparators _ _ = []
cutOutElemOpt :: [SrcSpan] -> OptionalInfo NormRangeStage -> OptionalInfo RngTemplateStage
cutOutElemOpt sps (NormOptInfo bef aft sp)
= let RealSrcSpan wholeRange = foldl1 combineSrcSpans $ sp : sps
in RangeTemplateOpt wholeRange bef aft
| Breaks the given template element into possibly 2 or 3 parts by cutting out the given part
if it is inside the range of the template element . Returns Nothing if the second argument is not inside .
breakUpRangeElem :: RangeTemplateElem -> SrcSpan -> Maybe [RangeTemplateElem]
breakUpRangeElem (RangeElem outer) (RealSrcSpan inner)
| outer `containsSpan` inner
= Just $ (if (realSrcSpanStart outer) < (realSrcSpanStart inner)
then [ RangeElem (mkRealSrcSpan (realSrcSpanStart outer) (realSrcSpanStart inner)) ]
else []) ++
[ RangeChildElem ] ++
(if (realSrcSpanEnd inner) < (realSrcSpanEnd outer)
then [ RangeElem (mkRealSrcSpan (realSrcSpanEnd inner) (realSrcSpanEnd outer)) ]
else [])
breakUpRangeElem _ _ = Nothing
fixRanges :: SourceInfoTraversal node
=> Ann node dom RangeStage
-> Ann node dom NormRangeStage
fixRanges node = evalState (sourceInfoTraverseUp (SourceInfoTrf (trf expandToContain) (trf expandListToContain) (trf expandOptToContain)) desc asc node) [[],[]]
desc = modify ([]:)
asc = modify tail
trf :: HasRange (x NormRangeStage)
=> ([SrcSpan] -> x RangeStage -> x NormRangeStage) -> x RangeStage -> State [[SrcSpan]] (x NormRangeStage)
trf f ni = do stack <- get
case stack of
(below : top : xs) -> do
let res = f below ni
resRange = getRange res
endOfSiblings = srcSpanEnd (collectSpanRanges (srcSpanStart resRange) top)
correctedRange = if endOfSiblings > srcSpanStart resRange
then mkSrcSpan endOfSiblings (max endOfSiblings (srcSpanEnd resRange))
else resRange
put ([] : (top ++ [ correctedRange ]) : xs)
return $ setRange correctedRange res
_ -> trfProblem "RangeToRangeTemplate.fixRanges.trf: stack is not right"
expandToContain :: [SrcSpan] -> SpanInfo RangeStage -> SpanInfo NormRangeStage
expandToContain cont (NodeSpan sp)
= NormNodeInfo (checkSpans cont $ foldl1 combineSrcSpans $ sp : cont)
expandListToContain :: [SrcSpan] -> ListInfo RangeStage -> ListInfo NormRangeStage
expandListToContain cont (ListPos bef aft def ind sp)
= NormListInfo bef aft def ind (checkSpans cont $ collectSpanRanges sp cont)
expandOptToContain :: [SrcSpan] -> OptionalInfo RangeStage -> OptionalInfo NormRangeStage
expandOptToContain cont (OptionalPos bef aft sp)
= NormOptInfo bef aft (checkSpans cont $ collectSpanRanges sp cont)
collectSpanRanges :: SrcLoc -> [SrcSpan] -> SrcSpan
collectSpanRanges loc@(RealSrcLoc _) [] = srcLocSpan loc
collectSpanRanges _ ls = foldl combineSrcSpans noSrcSpan ls
checkSpans :: [SrcSpan] -> SrcSpan -> SrcSpan
checkSpans spans res
= if any (not . isGoodSrcSpan) spans && isGoodSrcSpan res
then trfProblem $ "Wrong src spans in " ++ show res
else res
|
88b8c9880090d60763dfe0f68a23d1acaa938921899d1d44535cf2ebdebe9ec1 | haskell-repa/repa | InstMaybe.hs | # LANGUAGE UndecidableInstances , IncoherentInstances #
# OPTIONS_GHC -fno - warn - orphans #
module Data.Repa.Array.Material.Auto.InstMaybe
( A (..)
, Name (..)
, Array (..))
where
import Data.Repa.Array.Material.Auto.Base as A
import Data.Repa.Array.Material.Boxed as A
import Data.Repa.Array.Meta.Window as A
import Data.Repa.Array.Internals.Bulk as A
import Data.Repa.Array.Internals.Target as A
import Data.Repa.Array.Internals.Layout as A
import Control.Monad
#include "repa-array.h"
instance Bulk A a => Bulk A (Maybe a) where
data Array A (Maybe a)
= AArray_Maybe !(Array B (Maybe a))
layout (AArray_Maybe arr)
= Auto (A.length arr)
# INLINE_ARRAY layout #
index (AArray_Maybe arr) ix
= A.index arr ix
# INLINE_ARRAY index #
deriving instance Show a => Show (Array A (Maybe a))
instance Bulk A a => Windowable A (Maybe a) where
window st len (AArray_Maybe arr)
= AArray_Maybe (window st len arr)
# INLINE_ARRAY window #
instance Target A (Maybe a) where
data Buffer A (Maybe a)
= ABuffer_Maybe !(Buffer B (Maybe a))
unsafeNewBuffer (Auto len)
= liftM ABuffer_Maybe $ unsafeNewBuffer (Boxed len)
{-# INLINE_ARRAY unsafeNewBuffer #-}
unsafeReadBuffer (ABuffer_Maybe arr) ix
= unsafeReadBuffer arr ix
# INLINE_ARRAY unsafeReadBuffer #
unsafeWriteBuffer (ABuffer_Maybe arr) ix !mx
= unsafeWriteBuffer arr ix mx
# INLINE_ARRAY unsafeWriteBuffer #
unsafeGrowBuffer (ABuffer_Maybe arr) bump
= liftM ABuffer_Maybe $ unsafeGrowBuffer arr bump
# INLINE_ARRAY unsafeGrowBuffer #
unsafeFreezeBuffer (ABuffer_Maybe arr)
= liftM AArray_Maybe $ unsafeFreezeBuffer arr
# INLINE_ARRAY unsafeFreezeBuffer #
unsafeThawBuffer (AArray_Maybe arr)
= liftM ABuffer_Maybe $ unsafeThawBuffer arr
{-# INLINE_ARRAY unsafeThawBuffer #-}
unsafeSliceBuffer st len (ABuffer_Maybe buf)
= liftM ABuffer_Maybe $ unsafeSliceBuffer st len buf
{-# INLINE_ARRAY unsafeSliceBuffer #-}
touchBuffer (ABuffer_Maybe buf)
= touchBuffer buf
# INLINE_ARRAY touchBuffer #
bufferLayout (ABuffer_Maybe buf)
= Auto $ A.extent $ bufferLayout buf
{-# INLINE_ARRAY bufferLayout #-}
instance Eq a
=> Eq (Array A (Maybe a)) where
(==) (AArray_Maybe arr1) (AArray_Maybe arr2) = arr1 == arr2
# INLINE (= =) #
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/repa-array/Data/Repa/Array/Material/Auto/InstMaybe.hs | haskell | # INLINE_ARRAY unsafeNewBuffer #
# INLINE_ARRAY unsafeThawBuffer #
# INLINE_ARRAY unsafeSliceBuffer #
# INLINE_ARRAY bufferLayout # | # LANGUAGE UndecidableInstances , IncoherentInstances #
# OPTIONS_GHC -fno - warn - orphans #
module Data.Repa.Array.Material.Auto.InstMaybe
( A (..)
, Name (..)
, Array (..))
where
import Data.Repa.Array.Material.Auto.Base as A
import Data.Repa.Array.Material.Boxed as A
import Data.Repa.Array.Meta.Window as A
import Data.Repa.Array.Internals.Bulk as A
import Data.Repa.Array.Internals.Target as A
import Data.Repa.Array.Internals.Layout as A
import Control.Monad
#include "repa-array.h"
instance Bulk A a => Bulk A (Maybe a) where
data Array A (Maybe a)
= AArray_Maybe !(Array B (Maybe a))
layout (AArray_Maybe arr)
= Auto (A.length arr)
# INLINE_ARRAY layout #
index (AArray_Maybe arr) ix
= A.index arr ix
# INLINE_ARRAY index #
deriving instance Show a => Show (Array A (Maybe a))
instance Bulk A a => Windowable A (Maybe a) where
window st len (AArray_Maybe arr)
= AArray_Maybe (window st len arr)
# INLINE_ARRAY window #
instance Target A (Maybe a) where
data Buffer A (Maybe a)
= ABuffer_Maybe !(Buffer B (Maybe a))
unsafeNewBuffer (Auto len)
= liftM ABuffer_Maybe $ unsafeNewBuffer (Boxed len)
unsafeReadBuffer (ABuffer_Maybe arr) ix
= unsafeReadBuffer arr ix
# INLINE_ARRAY unsafeReadBuffer #
unsafeWriteBuffer (ABuffer_Maybe arr) ix !mx
= unsafeWriteBuffer arr ix mx
# INLINE_ARRAY unsafeWriteBuffer #
unsafeGrowBuffer (ABuffer_Maybe arr) bump
= liftM ABuffer_Maybe $ unsafeGrowBuffer arr bump
# INLINE_ARRAY unsafeGrowBuffer #
unsafeFreezeBuffer (ABuffer_Maybe arr)
= liftM AArray_Maybe $ unsafeFreezeBuffer arr
# INLINE_ARRAY unsafeFreezeBuffer #
unsafeThawBuffer (AArray_Maybe arr)
= liftM ABuffer_Maybe $ unsafeThawBuffer arr
unsafeSliceBuffer st len (ABuffer_Maybe buf)
= liftM ABuffer_Maybe $ unsafeSliceBuffer st len buf
touchBuffer (ABuffer_Maybe buf)
= touchBuffer buf
# INLINE_ARRAY touchBuffer #
bufferLayout (ABuffer_Maybe buf)
= Auto $ A.extent $ bufferLayout buf
instance Eq a
=> Eq (Array A (Maybe a)) where
(==) (AArray_Maybe arr1) (AArray_Maybe arr2) = arr1 == arr2
# INLINE (= =) #
|
0f30ff800ef06d9c9dbc8772d893bc46dc89feedf7a0846758a69682fbbfbb68 | fission-codes/fission | Token.hs | module Fission.Web.Server.Auth.Token
( get
, handler
, module Fission.Web.Server.Auth.Token.Types
) where
import Network.Wai
import Servant.API
import qualified Web.UCAN.Resolver as UCAN
import Fission.Prelude
import Fission.Authorization.ServerDID.Class
import qualified Fission.Web.Server.Auth.Error as Auth
import qualified Fission.Web.Server.Auth.Token.Basic as Basic
import Fission.Web.Server.Auth.Token.Types
import qualified Fission.Web.Server.Auth.Token.UCAN as UCAN
import Fission.Web.Server.Authorization.Types
import qualified Fission.Web.Server.Error as Web.Error
import Fission.Web.Server.MonadDB
import qualified Fission.Web.Server.User as User
-- | Higher order auth handler
-- Uses basic auth for "Basic" tokens
Uses our custom JWT auth for " Bearer " tokens
handler ::
( UCAN.Resolver m
, ServerDID m
, MonadLogger m
, MonadThrow m
, MonadTime m
, MonadDB t m
, User.Retriever t
)
=> Request
-> m Authorization
handler req =
case get req of
Right (Bearer bearer) -> UCAN.handler bearer
Right (Basic basic') -> Basic.handler basic'
Left err -> Web.Error.throw err
get :: Request -> Either Auth.Error Token
get req =
case lookup "Authorization" headers <|> lookup "authorization" headers of
Nothing ->
Left Auth.NoToken
Just auth ->
case parseHeader auth of
Left errMsg -> Left $ Auth.CannotParse errMsg
Right token -> Right token
where
headers = requestHeaders req
| null | https://raw.githubusercontent.com/fission-codes/fission/ae177407dccc20be67948a901956b99f40d37ac8/fission-web-server/library/Fission/Web/Server/Auth/Token.hs | haskell | | Higher order auth handler
Uses basic auth for "Basic" tokens | module Fission.Web.Server.Auth.Token
( get
, handler
, module Fission.Web.Server.Auth.Token.Types
) where
import Network.Wai
import Servant.API
import qualified Web.UCAN.Resolver as UCAN
import Fission.Prelude
import Fission.Authorization.ServerDID.Class
import qualified Fission.Web.Server.Auth.Error as Auth
import qualified Fission.Web.Server.Auth.Token.Basic as Basic
import Fission.Web.Server.Auth.Token.Types
import qualified Fission.Web.Server.Auth.Token.UCAN as UCAN
import Fission.Web.Server.Authorization.Types
import qualified Fission.Web.Server.Error as Web.Error
import Fission.Web.Server.MonadDB
import qualified Fission.Web.Server.User as User
Uses our custom JWT auth for " Bearer " tokens
handler ::
( UCAN.Resolver m
, ServerDID m
, MonadLogger m
, MonadThrow m
, MonadTime m
, MonadDB t m
, User.Retriever t
)
=> Request
-> m Authorization
handler req =
case get req of
Right (Bearer bearer) -> UCAN.handler bearer
Right (Basic basic') -> Basic.handler basic'
Left err -> Web.Error.throw err
get :: Request -> Either Auth.Error Token
get req =
case lookup "Authorization" headers <|> lookup "authorization" headers of
Nothing ->
Left Auth.NoToken
Just auth ->
case parseHeader auth of
Left errMsg -> Left $ Auth.CannotParse errMsg
Right token -> Right token
where
headers = requestHeaders req
|
6a3cd87e3910ee79096fce5127f8afc76cd93b2fe50720ec58cd70842f74df36 | jgrodziski/set-game | core_test.clj | (ns set-game.core-test
(:require [clojure.test :refer :all]
[set-game.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/jgrodziski/set-game/4f918f53f26fe1b7744773cf1786ccf7272a121d/frontend/test/set_game/core_test.clj | clojure | (ns set-game.core-test
(:require [clojure.test :refer :all]
[set-game.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| |
ebcaa1bd28149f3479d3c9df28473ad294e70cd1554ee372cafd52bf2e5fa29d | retrogradeorbit/cloud-fighter | boss.cljs | (ns cloud-fighter.boss
(:require [infinitelives.utils.vec2 :as vec2]
[infinitelives.utils.events :as e]
[infinitelives.utils.boid :as b]
[infinitelives.utils.math :as math]
[infinitelives.utils.console :refer [log]]
[infinitelives.utils.spatial :as spatial]
[infinitelives.utils.sound :as sound]
[infinitelives.pixi.sprite :as s]
[cloud-fighter.state :as state]
[cloud-fighter.explosion :as explosion]
[cloud-fighter.bullet :as bullet]
[cloud-fighter.score :as score]
[cloud-fighter.missile :as missile]
[cljs.core.async :refer [<! timeout]])
(:require-macros [cljs.core.async.macros :refer [go]]
[cloud-fighter.async :refer [go-while]]
[infinitelives.pixi.macros :as m]))
(defonce bosses (atom {}))
(defn add! [bkey boss]
(swap! bosses assoc bkey boss))
(defn remove! [bkey]
(swap! bosses dissoc bkey))
(defn count-bosses []
(count @bosses))
(defn constrain [vec w h]
(let [hw (/ w 2)
hh (/ h 2)
x (vec2/get-x vec)
y (vec2/get-y vec)]
(vec2/vec2
(- (mod (+ hw x) w) hw)
(- (mod (+ hh y) h) hh))))
(defn spawn [canvas]
(go
(let [[sfx gain] (sound/play-sound :boss-loop 0.3 true)
gutter 32
hw (+ gutter (/ (.-innerWidth js/window) 2))
hh (+ gutter (/ (.-innerHeight js/window) 2))
direction (rand-nth [:left :right])
start-pos (case direction
:right
(vec2/vec2 (- hw) (math/rand-between (- hh) hh))
:left
(vec2/vec2 hw (math/rand-between (- hh) hh)))
dx (case direction :left -1 :right 1)
bkey (keyword (gensym))
skey [:boss bkey]
enemy-bullet-scale (:enemy-bullet-scale @state/state)
enemy-bullet-gfx (:enemy-bullet-gfx @state/state)
]
(m/with-sprite canvas :enemy
[boss (s/make-sprite (:boss @state/state) :scale 3)]
(s/set-scale! boss (* -3 dx) 3)
(add! bkey bosses)
(spatial/add-to-spatial! :default skey (vec2/as-vector start-pos))
(loop [boid {:mass 10.0 :pos start-pos
:vel (vec2/vec2 (* (:boss-speed @state/state) dx) 0)
:max-force 2.0 :max-speed (:boss-speed @state/state)}
shot-times 0
]
(s/set-pos! boss (:pos boid))
(<! (e/next-frame))
;; shoot & missile
(let [prob (rand)]
(cond (< prob (:boss-bullet-probability @state/state))
(cloud-fighter.enemy/spawn-bullet!
canvas (:pos boid)
(vec2/scale (:pos boid) -1)
(:boss-bullet-speed @state/state)
(:boss-bullet-life @state/state)
enemy-bullet-scale
enemy-bullet-gfx)
(< (:boss-bullet-probability @state/state) prob (+ (:boss-bullet-probability @state/state) (:boss-missile-probability @state/state)))
(missile/spawn canvas (:pos boid) (:vel boid) (:boss-missile-life @state/state))))
;; check for collision
(let [matched (->>
(spatial/query (:default @spatial/spatial-hashes)
(vec2/as-vector (vec2/sub (:pos boid) (vec2/vec2 32 32)))
(vec2/as-vector (vec2/add (:pos boid) (vec2/vec2 32 32))))
keys
(filter #(= :bullet (first %)))
)]
(cond
(and (pos? (count matched)) (= shot-times (:boss-shots @state/state)))
;; final-shot!
(do
(.stop sfx)
(bullet/remove! (-> matched first second))
(spatial/remove-from-spatial :default skey (vec2/as-vector (:pos boid)))
(remove! bkey)
(state/add-score! (:boss-score @state/state))
(score/popup! canvas (:pos boid) (:boss-score @state/state) 200)
(state/level-up!)
(set! (.-backgroundColor (:renderer canvas)) (:background @state/state))
)
;; key no longer in set
(not (bkey @bosses))
(do
(.stop sfx)
(spatial/remove-from-spatial :default skey (vec2/as-vector (:pos boid)))
(state/add-score! (:boss-score @state/state))
(score/popup! canvas (:pos boid) (:boss-score @state/state) 200))
(not (state/playing?))
(do
(.stop sfx)
(spatial/remove-from-spatial :default skey (vec2/as-vector (:pos boid)))
(remove! bkey))
:default
;; alive!
(let [gutter 128
w (+ gutter (.-innerWidth js/window))
h (+ gutter (.-innerHeight js/window))
hw (/ w 2)
hh (/ h 2)
next-boid (update-in
(b/apply-steering boid (vec2/zero))
[:pos]
#(-> %
(vec2/sub (:vel @state/state))
(constrain w h)
))]
(spatial/move-in-spatial
:default skey
(vec2/as-vector (:pos boid))
(vec2/as-vector (:pos next-boid)))
(recur next-boid
(if (pos? (count matched))
;; shot
(do
(bullet/remove! (-> matched first second))
(inc shot-times))
;; not shot
shot-times))))))))))
| null | https://raw.githubusercontent.com/retrogradeorbit/cloud-fighter/4c4d30fc2d9b14ce4c73f3d252be519daaa09d51/src/cloud_fighter/boss.cljs | clojure | shoot & missile
check for collision
final-shot!
key no longer in set
alive!
shot
not shot | (ns cloud-fighter.boss
(:require [infinitelives.utils.vec2 :as vec2]
[infinitelives.utils.events :as e]
[infinitelives.utils.boid :as b]
[infinitelives.utils.math :as math]
[infinitelives.utils.console :refer [log]]
[infinitelives.utils.spatial :as spatial]
[infinitelives.utils.sound :as sound]
[infinitelives.pixi.sprite :as s]
[cloud-fighter.state :as state]
[cloud-fighter.explosion :as explosion]
[cloud-fighter.bullet :as bullet]
[cloud-fighter.score :as score]
[cloud-fighter.missile :as missile]
[cljs.core.async :refer [<! timeout]])
(:require-macros [cljs.core.async.macros :refer [go]]
[cloud-fighter.async :refer [go-while]]
[infinitelives.pixi.macros :as m]))
(defonce bosses (atom {}))
(defn add! [bkey boss]
(swap! bosses assoc bkey boss))
(defn remove! [bkey]
(swap! bosses dissoc bkey))
(defn count-bosses []
(count @bosses))
(defn constrain [vec w h]
(let [hw (/ w 2)
hh (/ h 2)
x (vec2/get-x vec)
y (vec2/get-y vec)]
(vec2/vec2
(- (mod (+ hw x) w) hw)
(- (mod (+ hh y) h) hh))))
(defn spawn [canvas]
(go
(let [[sfx gain] (sound/play-sound :boss-loop 0.3 true)
gutter 32
hw (+ gutter (/ (.-innerWidth js/window) 2))
hh (+ gutter (/ (.-innerHeight js/window) 2))
direction (rand-nth [:left :right])
start-pos (case direction
:right
(vec2/vec2 (- hw) (math/rand-between (- hh) hh))
:left
(vec2/vec2 hw (math/rand-between (- hh) hh)))
dx (case direction :left -1 :right 1)
bkey (keyword (gensym))
skey [:boss bkey]
enemy-bullet-scale (:enemy-bullet-scale @state/state)
enemy-bullet-gfx (:enemy-bullet-gfx @state/state)
]
(m/with-sprite canvas :enemy
[boss (s/make-sprite (:boss @state/state) :scale 3)]
(s/set-scale! boss (* -3 dx) 3)
(add! bkey bosses)
(spatial/add-to-spatial! :default skey (vec2/as-vector start-pos))
(loop [boid {:mass 10.0 :pos start-pos
:vel (vec2/vec2 (* (:boss-speed @state/state) dx) 0)
:max-force 2.0 :max-speed (:boss-speed @state/state)}
shot-times 0
]
(s/set-pos! boss (:pos boid))
(<! (e/next-frame))
(let [prob (rand)]
(cond (< prob (:boss-bullet-probability @state/state))
(cloud-fighter.enemy/spawn-bullet!
canvas (:pos boid)
(vec2/scale (:pos boid) -1)
(:boss-bullet-speed @state/state)
(:boss-bullet-life @state/state)
enemy-bullet-scale
enemy-bullet-gfx)
(< (:boss-bullet-probability @state/state) prob (+ (:boss-bullet-probability @state/state) (:boss-missile-probability @state/state)))
(missile/spawn canvas (:pos boid) (:vel boid) (:boss-missile-life @state/state))))
(let [matched (->>
(spatial/query (:default @spatial/spatial-hashes)
(vec2/as-vector (vec2/sub (:pos boid) (vec2/vec2 32 32)))
(vec2/as-vector (vec2/add (:pos boid) (vec2/vec2 32 32))))
keys
(filter #(= :bullet (first %)))
)]
(cond
(and (pos? (count matched)) (= shot-times (:boss-shots @state/state)))
(do
(.stop sfx)
(bullet/remove! (-> matched first second))
(spatial/remove-from-spatial :default skey (vec2/as-vector (:pos boid)))
(remove! bkey)
(state/add-score! (:boss-score @state/state))
(score/popup! canvas (:pos boid) (:boss-score @state/state) 200)
(state/level-up!)
(set! (.-backgroundColor (:renderer canvas)) (:background @state/state))
)
(not (bkey @bosses))
(do
(.stop sfx)
(spatial/remove-from-spatial :default skey (vec2/as-vector (:pos boid)))
(state/add-score! (:boss-score @state/state))
(score/popup! canvas (:pos boid) (:boss-score @state/state) 200))
(not (state/playing?))
(do
(.stop sfx)
(spatial/remove-from-spatial :default skey (vec2/as-vector (:pos boid)))
(remove! bkey))
:default
(let [gutter 128
w (+ gutter (.-innerWidth js/window))
h (+ gutter (.-innerHeight js/window))
hw (/ w 2)
hh (/ h 2)
next-boid (update-in
(b/apply-steering boid (vec2/zero))
[:pos]
#(-> %
(vec2/sub (:vel @state/state))
(constrain w h)
))]
(spatial/move-in-spatial
:default skey
(vec2/as-vector (:pos boid))
(vec2/as-vector (:pos next-boid)))
(recur next-boid
(if (pos? (count matched))
(do
(bullet/remove! (-> matched first second))
(inc shot-times))
shot-times))))))))))
|
b6469f3f96cde069d33441f7e7599e5dc681bc35d75e0a7d5694c75ce31c986c | bgusach/exercises-htdp2e | ex-112.rkt | #lang htdp/bsl
(require test-engine/racket-tests)
(check-expect (missile-or-not? #true) #false)
(check-expect (missile-or-not? #false) #true)
(check-expect (missile-or-not? (make-posn 0 0)) #true)
(check-expect (missile-or-not? "lol") #false)
(define (missile-or-not? v)
(or
(false? v)
(posn? v)
))
(test)
| null | https://raw.githubusercontent.com/bgusach/exercises-htdp2e/c4fd33f28fb0427862a2777a1fde8bf6432a7690/ex-112.rkt | racket | #lang htdp/bsl
(require test-engine/racket-tests)
(check-expect (missile-or-not? #true) #false)
(check-expect (missile-or-not? #false) #true)
(check-expect (missile-or-not? (make-posn 0 0)) #true)
(check-expect (missile-or-not? "lol") #false)
(define (missile-or-not? v)
(or
(false? v)
(posn? v)
))
(test)
| |
1e7cd2a2990fe06d4ed82452542d4c1979f8ef5a65b5eef8948fcdb3131212e1 | wilbowma/compiler-theory | serve.rkt | #lang racket/base
(require
web-server/dispatchers/dispatch
web-server/servlet-env)
(serve/servlet (lambda (_) (next-dispatcher))
#:servlet-path "/index.html"
#:extra-files-paths (list "compiler-theory")
#:port 8000
#:listen-ip #f
#:launch-browser? #f)
| null | https://raw.githubusercontent.com/wilbowma/compiler-theory/3facf11302e0f6051b1175e7cf3e3a8bcd30e982/serve.rkt | racket | #lang racket/base
(require
web-server/dispatchers/dispatch
web-server/servlet-env)
(serve/servlet (lambda (_) (next-dispatcher))
#:servlet-path "/index.html"
#:extra-files-paths (list "compiler-theory")
#:port 8000
#:listen-ip #f
#:launch-browser? #f)
| |
da43190052b14e40fe7d81e1347faae4c8aa315f5cdbbe9676fe53801211916b | homegrownlabs/sim-template | model.clj | (ns {{namespace}}.model
"This namespace describes potential agent interactions with a system, as well
as functions for realizing that non-deterministic behavior into a fixed
simulant 'test'."
(:require [simulant.util :refer [tx-ent]]
[datomic.api :as d]
[causatum.event-streams :as es]
[{{namespace}}.util :as util]))
(def agent-behavior
;; from-state to-state weight max-delay (ms)
#{[:start :retrieve 100 100]
[:retrieve :retrieve 33 50]
[:retrieve :inc 33 50]
[:retrieve :dec 33 50]
[:inc :retrieve 20 50]
[:inc :inc 60 50]
[:inc :dec 20 50]
[:dec :retrieve 20 50]
[:dec :dec 60 50]
[:dec :inc 20 50]})
(defn delay->qualified-delay
"Convert an integer max-delay delay into a causatum-qualified :random delay."
[delay]
[:random delay])
(defn edge->node
"Convert an edge-vector into a causatum node."
[edge]
(let [[_ to weight delay] edge]
{to {:weight weight
:delay (delay->qualified-delay delay)}}))
(defn edge-graph->node-graph
"Transform a set of edges into a causatum-compatible node graph."
[edges]
(->> edges
(group-by first)
(map (fn [[from tos]] [from (->> tos
(mapv edge->node)
(into {})
vector)]))
(into {})))
(defn state-transition-model
"Construct a causatum state-transition model from a set of edges."
[edges]
{:graph (edge-graph->node-graph edges)
:delay-ops {:random (fn [rtime n] (rand n))}})
(def initial-state
[{:state :start
:rtime 0}])
(defn state-stream
"Produce a lazy sequence of states for a set of edges describing a state
machine."
[edges]
(es/event-stream (state-transition-model edges)
initial-state))
(defn find-by-name [db name]
(d/entity db [:model/name name]))
(defn create-model!
"Persist a model and its metadata to the database"
[uri name description type]
(let [conn (d/connect uri)
model-id (d/tempid :model)
model (cond-> {:db/id model-id
:model/type type
:model/name name
:source/codebase (util/codebase-ent :model)}
description
(assoc :model/description description))]
(-> @(d/transact conn [model])
(tx-ent model-id))))
(defn list-models
[db]
(->> (d/q '[:find ?m
:where [?m :model/name]]
db)
(map first)
(map (partial d/entity db))))
| null | https://raw.githubusercontent.com/homegrownlabs/sim-template/2ddba7c1a3c2a17aff1e1ed30bada941bcb938a3/src/leiningen/new/sim_test/src/model.clj | clojure | from-state to-state weight max-delay (ms) | (ns {{namespace}}.model
"This namespace describes potential agent interactions with a system, as well
as functions for realizing that non-deterministic behavior into a fixed
simulant 'test'."
(:require [simulant.util :refer [tx-ent]]
[datomic.api :as d]
[causatum.event-streams :as es]
[{{namespace}}.util :as util]))
(def agent-behavior
#{[:start :retrieve 100 100]
[:retrieve :retrieve 33 50]
[:retrieve :inc 33 50]
[:retrieve :dec 33 50]
[:inc :retrieve 20 50]
[:inc :inc 60 50]
[:inc :dec 20 50]
[:dec :retrieve 20 50]
[:dec :dec 60 50]
[:dec :inc 20 50]})
(defn delay->qualified-delay
"Convert an integer max-delay delay into a causatum-qualified :random delay."
[delay]
[:random delay])
(defn edge->node
"Convert an edge-vector into a causatum node."
[edge]
(let [[_ to weight delay] edge]
{to {:weight weight
:delay (delay->qualified-delay delay)}}))
(defn edge-graph->node-graph
"Transform a set of edges into a causatum-compatible node graph."
[edges]
(->> edges
(group-by first)
(map (fn [[from tos]] [from (->> tos
(mapv edge->node)
(into {})
vector)]))
(into {})))
(defn state-transition-model
"Construct a causatum state-transition model from a set of edges."
[edges]
{:graph (edge-graph->node-graph edges)
:delay-ops {:random (fn [rtime n] (rand n))}})
(def initial-state
[{:state :start
:rtime 0}])
(defn state-stream
"Produce a lazy sequence of states for a set of edges describing a state
machine."
[edges]
(es/event-stream (state-transition-model edges)
initial-state))
(defn find-by-name [db name]
(d/entity db [:model/name name]))
(defn create-model!
"Persist a model and its metadata to the database"
[uri name description type]
(let [conn (d/connect uri)
model-id (d/tempid :model)
model (cond-> {:db/id model-id
:model/type type
:model/name name
:source/codebase (util/codebase-ent :model)}
description
(assoc :model/description description))]
(-> @(d/transact conn [model])
(tx-ent model-id))))
(defn list-models
[db]
(->> (d/q '[:find ?m
:where [?m :model/name]]
db)
(map first)
(map (partial d/entity db))))
|
b15c15ad1eaa39565d5c9d14e686996f659f0af9219a4473f28f0ab9d05a310b | altsun/My-Lisps | (SW) Continue Polyline.lsp | (defun C:SW (/ dat c elst wid ename pend pt)
(vl-load-com)
(setvar "cmdecho" 0)
(setq plw (getvar "plinewid"))
(if
(and (setq dat (entsel "\nSelect source polyline: "))
(wcmatch (cdadr (setq elst (entget (setq ename (car dat)))))
"*POLYLINE*"))
(progn
(setq wid (cdr (assoc 40 elst)))
(prompt (strcat "\nWidth is " (rtos wid)))
(setq pend (osnap (cadr dat) "_end"))
(setq pt
(cond
((equal (vlax-curve-getstartpoint ename) pend 0.0001)
(vlax-curve-getstartpoint ename))
((equal (vlax-curve-getendpoint ename) pend 0.0001)
(vlax-curve-getendpoint ename))
(t nil)))
(if pt
(setq p pt)
(setq p (getpoint "\nSpecify start point: ")))
(command "_.pline" p "_w" wid wid)
(while (eq 1 (logand 1 (getvar "cmdactive")))
(command pause))
(if
(and pt (wcmatch (cdadr (entget (entlast))) "*POLYLINE*"))
(command "_.pedit" ename "_j" (entlast) "" "")))
(prompt "\nNot a polyline"))
(if plw
(setvar "plinewid" plw))
(setvar "cmdecho" 1)
(princ))
(princ) | null | https://raw.githubusercontent.com/altsun/My-Lisps/85476bb09b79ef5e966402cc5158978d1cebd7eb/Common/(SW)%20Continue%20Polyline.lsp | lisp | (defun C:SW (/ dat c elst wid ename pend pt)
(vl-load-com)
(setvar "cmdecho" 0)
(setq plw (getvar "plinewid"))
(if
(and (setq dat (entsel "\nSelect source polyline: "))
(wcmatch (cdadr (setq elst (entget (setq ename (car dat)))))
"*POLYLINE*"))
(progn
(setq wid (cdr (assoc 40 elst)))
(prompt (strcat "\nWidth is " (rtos wid)))
(setq pend (osnap (cadr dat) "_end"))
(setq pt
(cond
((equal (vlax-curve-getstartpoint ename) pend 0.0001)
(vlax-curve-getstartpoint ename))
((equal (vlax-curve-getendpoint ename) pend 0.0001)
(vlax-curve-getendpoint ename))
(t nil)))
(if pt
(setq p pt)
(setq p (getpoint "\nSpecify start point: ")))
(command "_.pline" p "_w" wid wid)
(while (eq 1 (logand 1 (getvar "cmdactive")))
(command pause))
(if
(and pt (wcmatch (cdadr (entget (entlast))) "*POLYLINE*"))
(command "_.pedit" ename "_j" (entlast) "" "")))
(prompt "\nNot a polyline"))
(if plw
(setvar "plinewid" plw))
(setvar "cmdecho" 1)
(princ))
(princ) | |
5b4e2715063fe17f5e719c40fbe24bb876991e8e9626fab68e29449d8450febf | ocurrent/ocurrent | current_ssh.ml | open Current.Syntax
module R = Current_cache.Output(Run)
let run ~schedule ~key host args =
Current.component "ssh@,%s" host |>
let> args = args in
R.set ~schedule host key { Run.Value.args }
| null | https://raw.githubusercontent.com/ocurrent/ocurrent/ffc6e710d9d3735e747539701de4a14228a1f9a5/plugins/ssh/current_ssh.ml | ocaml | open Current.Syntax
module R = Current_cache.Output(Run)
let run ~schedule ~key host args =
Current.component "ssh@,%s" host |>
let> args = args in
R.set ~schedule host key { Run.Value.args }
| |
d01376c0a58f07de9b8506b2c27dd5f16a6a77af7da8b89b66c20b2b9ecece4a | cbaggers/cepl | proxy.lisp |
;; rather than interfaces make proxies
;; a proxy is defined as a list of functions the object must adhere to
a proxy is then a struct that has a field for the object and then one
;; for each function
(defgeneric defproxy-impl (proxy-name target-type-name))
(defstruct proxy
(target (error "All proxies must be initialized with a target") :type t))
(defmacro defproxy (name &body func-descriptions)
`(progn
(defstruct (,(symb name 'proxy) (:include proxy) (:conc-name %prxy-))
,@(loop :for fd :in func-descriptions :collect
`(,(first fd) (error ,(format nil "Method ~a for Proxy ~a must be provided"
name (first fd)))
:type function)))
,@(loop :for fd :in func-descriptions :collect
(let ((slot-name (symb '%prxy- (first fd))))
`(defun ,(first fd) (prxy ,@(rest fd))
(let ((target (proxy-target prxy)))
(funcall (,slot-name target) target ,@(rest fd))))))))
;; def-proxy-impl is a macro that lets you define a implementation for a proxy
;; for a given type
;; you can then use (make-*-proxy x) where * is the type (proxy *) which is
;; generic and specialized on the type.
(def-proxy-impl (printable node)
:print #'print-node)
(defmacro def-proxy-impl ((proxy-type target-type) &body funcs)
`(progn
(defun ,(symb 'make- target-type '-proxy) (x)
(,(symb 'make- proxy-type)
,@funcs))
(defmethod proxy ((x ,target-type) (p (eql ',proxy-type)))
(declare (ignore p))
(,(symb 'make- target-type '-proxy) x))))
| null | https://raw.githubusercontent.com/cbaggers/cepl/d1a10b6c8f4cedc07493bf06aef3a56c7b6f8d5b/core/protocode/_/proxy.lisp | lisp | rather than interfaces make proxies
a proxy is defined as a list of functions the object must adhere to
for each function
def-proxy-impl is a macro that lets you define a implementation for a proxy
for a given type
you can then use (make-*-proxy x) where * is the type (proxy *) which is
generic and specialized on the type. |
a proxy is then a struct that has a field for the object and then one
(defgeneric defproxy-impl (proxy-name target-type-name))
(defstruct proxy
(target (error "All proxies must be initialized with a target") :type t))
(defmacro defproxy (name &body func-descriptions)
`(progn
(defstruct (,(symb name 'proxy) (:include proxy) (:conc-name %prxy-))
,@(loop :for fd :in func-descriptions :collect
`(,(first fd) (error ,(format nil "Method ~a for Proxy ~a must be provided"
name (first fd)))
:type function)))
,@(loop :for fd :in func-descriptions :collect
(let ((slot-name (symb '%prxy- (first fd))))
`(defun ,(first fd) (prxy ,@(rest fd))
(let ((target (proxy-target prxy)))
(funcall (,slot-name target) target ,@(rest fd))))))))
(def-proxy-impl (printable node)
:print #'print-node)
(defmacro def-proxy-impl ((proxy-type target-type) &body funcs)
`(progn
(defun ,(symb 'make- target-type '-proxy) (x)
(,(symb 'make- proxy-type)
,@funcs))
(defmethod proxy ((x ,target-type) (p (eql ',proxy-type)))
(declare (ignore p))
(,(symb 'make- target-type '-proxy) x))))
|
bac2a4830587876f5696bbda1907a974b9a396d031e8ea74acde27f41dabb8d3 | deusdat/waller | ragtime.clj | (ns waller.ragtime
"This ns implements the features required by Ragtime."
(:require [ragtime.protocols :as p]
[waller.core :refer [create-context,ensure-track-store!]]
[travesedo.document :as tdoc]
[clojure.edn :as edn]
[clojure.string :as cstr]
[clojure.java.io :as io]
[resauce.core :as resauce]
[waller.files :refer [react]]
[ragtime.core :as rag])
(:import [java.io File]))
(def migration-col "waller")
(defn migration-url [migration-col] (str "/_api/document/" migration-col "/"))
Allows the interaction between Ragtime and previously applied migrations .
(defrecord ArangoDatabase
[conn db]
p/DataStore
(add-migration-id [this id]
(ensure-track-store! this migration-col)
(tdoc/create (merge this {:in-collection migration-col,
:payload {:_key id, :id id}})))
(remove-migration-id [this id]
(ensure-track-store! this migration-col)
(tdoc/delete (assoc this :_id (str migration-col "/" id))))
(applied-migration-ids [this]
(ensure-track-store! this migration-col)
(let [id-ctx (assoc this :in-collection migration-col :type :key),
ids (:documents (tdoc/read-all-docs id-ctx)),
path-sep #"/"
sorted-ids (map #(last (cstr/split % path-sep)) (sort ids))]
sorted-ids)))
(defn arango-connection
"Constructs an instance of the ArangoDatabase by converting the 'url' into
a travesedo context."
[url]
(map->ArangoDatabase (create-context url)))
(defrecord ArangoMigration
[id up down]
p/Migration
(id [_] id)
(run-up! [_ db] (react up db))
(run-down! [_ db] (react down db))
(toString[_]
(println (cstr/join " " [id up down]))))
(defn arango-migration
"Converts untyped map into Ragtime Migration"
[migration-map]
(map->ArangoMigration migration-map))
;; Shameless stealing from Ragtime JDBC to load files.
(let [pattern (re-pattern (str "([^" File/separator "]*)" File/separator "?$"))]
(defn- basename [file]
(second (re-find pattern (str file)))))
(defn- remove-extension [file]
(second (re-matches #"(.*)\.[^.]*" (str file))))
(defn- file-extension [file]
(re-find #"\.[^.]*$" (str file)))
(defmulti load-files
"Given an collection of files with the same extension, return a ordered
collection of migrations. Dispatches on extension (e.g. \".edn\"). Extend
this multimethod to support new formats for specifying SQL migrations."
(fn [files] (file-extension (first files))))
(defmethod load-files ".edn" [files]
(for [file files]
(-> (slurp file)
(edn/read-string)
(update-in [:id] #(or % (-> file basename remove-extension)))
(arango-migration))))
(defn- load-all-files [files]
(->> (sort-by str files)
(group-by file-extension)
(vals)
(mapcat load-files)))
(defn load-directory
"Load a collection of Ragtime migrations from a directory."
[path]
(load-all-files (file-seq (io/file path))))
(defn load-resources
"Load a collection of Ragtime migrations from a classpath prefix."
[path]
(load-all-files (resauce/resource-dir path)))
;; Migration functions
(defn migrate-from-classpath
[{:keys [url index dir], :or {index {}, dir "migrations"}}]
(rag/migrate-all (arango-connection url)
index
(load-resources dir)))
(defn create-config-for-repl
"Helper constructor for the Ragtime configuration. This will return a
map with keys :datastore of type DataStore and :migrations a list of
Migration."
([url]
(create-config-for-repl url "migrations"))
([url classpath-dir]
{:datastore (arango-connection url),
:migrations (load-resources classpath-dir)})) | null | https://raw.githubusercontent.com/deusdat/waller/bf003cf328defe73fc6189c0d7912eea687af6f9/src/waller/ragtime.clj | clojure | Shameless stealing from Ragtime JDBC to load files.
Migration functions | (ns waller.ragtime
"This ns implements the features required by Ragtime."
(:require [ragtime.protocols :as p]
[waller.core :refer [create-context,ensure-track-store!]]
[travesedo.document :as tdoc]
[clojure.edn :as edn]
[clojure.string :as cstr]
[clojure.java.io :as io]
[resauce.core :as resauce]
[waller.files :refer [react]]
[ragtime.core :as rag])
(:import [java.io File]))
(def migration-col "waller")
(defn migration-url [migration-col] (str "/_api/document/" migration-col "/"))
Allows the interaction between Ragtime and previously applied migrations .
(defrecord ArangoDatabase
[conn db]
p/DataStore
(add-migration-id [this id]
(ensure-track-store! this migration-col)
(tdoc/create (merge this {:in-collection migration-col,
:payload {:_key id, :id id}})))
(remove-migration-id [this id]
(ensure-track-store! this migration-col)
(tdoc/delete (assoc this :_id (str migration-col "/" id))))
(applied-migration-ids [this]
(ensure-track-store! this migration-col)
(let [id-ctx (assoc this :in-collection migration-col :type :key),
ids (:documents (tdoc/read-all-docs id-ctx)),
path-sep #"/"
sorted-ids (map #(last (cstr/split % path-sep)) (sort ids))]
sorted-ids)))
(defn arango-connection
"Constructs an instance of the ArangoDatabase by converting the 'url' into
a travesedo context."
[url]
(map->ArangoDatabase (create-context url)))
(defrecord ArangoMigration
[id up down]
p/Migration
(id [_] id)
(run-up! [_ db] (react up db))
(run-down! [_ db] (react down db))
(toString[_]
(println (cstr/join " " [id up down]))))
(defn arango-migration
"Converts untyped map into Ragtime Migration"
[migration-map]
(map->ArangoMigration migration-map))
(let [pattern (re-pattern (str "([^" File/separator "]*)" File/separator "?$"))]
(defn- basename [file]
(second (re-find pattern (str file)))))
(defn- remove-extension [file]
(second (re-matches #"(.*)\.[^.]*" (str file))))
(defn- file-extension [file]
(re-find #"\.[^.]*$" (str file)))
(defmulti load-files
"Given an collection of files with the same extension, return a ordered
collection of migrations. Dispatches on extension (e.g. \".edn\"). Extend
this multimethod to support new formats for specifying SQL migrations."
(fn [files] (file-extension (first files))))
(defmethod load-files ".edn" [files]
(for [file files]
(-> (slurp file)
(edn/read-string)
(update-in [:id] #(or % (-> file basename remove-extension)))
(arango-migration))))
(defn- load-all-files [files]
(->> (sort-by str files)
(group-by file-extension)
(vals)
(mapcat load-files)))
(defn load-directory
"Load a collection of Ragtime migrations from a directory."
[path]
(load-all-files (file-seq (io/file path))))
(defn load-resources
"Load a collection of Ragtime migrations from a classpath prefix."
[path]
(load-all-files (resauce/resource-dir path)))
(defn migrate-from-classpath
[{:keys [url index dir], :or {index {}, dir "migrations"}}]
(rag/migrate-all (arango-connection url)
index
(load-resources dir)))
(defn create-config-for-repl
"Helper constructor for the Ragtime configuration. This will return a
map with keys :datastore of type DataStore and :migrations a list of
Migration."
([url]
(create-config-for-repl url "migrations"))
([url classpath-dir]
{:datastore (arango-connection url),
:migrations (load-resources classpath-dir)})) |
f47e998d547560cd2896289ed54da47e7f14b74e7ede0035262325ef6726685b | damianfral/BrainHask | Parser.hs | module Language.BrainHask.Parser (parseBF) where
import Control.Applicative hiding (optional)
import Language.BrainHask.Types
import Text.Parsec hiding (many)
type Parser = Parsec String ()
toOpParser :: String -> BFOp -> Parser BFOp
toOpParser s p = p <$ string s
programParser :: Parser BFProgram
programParser = many opParser
opParser = choice [rightParser, leftParser, incParser, decParser, writeParser, readParser, loopParser]
rightParser = toOpParser ">" BFMoveRight
leftParser = toOpParser "<" BFMoveLeft
incParser = toOpParser "+" BFIncrease
decParser = toOpParser "-" BFDecrease
writeParser = toOpParser "." BFWrite
readParser = toOpParser "," BFRead
loopParser = between (string "[") (string "]") $ BFLoop <$> programParser
clean :: String -> String
clean = filter (`elem` "+-<>[].,")
parseBF :: String -> Either ParseError BFProgram
parseBF = runParser programParser () "" . clean
| null | https://raw.githubusercontent.com/damianfral/BrainHask/64b4cb0ba2c3ee85b647b578a4165b7e38d55dce/src/Language/BrainHask/Parser.hs | haskell | module Language.BrainHask.Parser (parseBF) where
import Control.Applicative hiding (optional)
import Language.BrainHask.Types
import Text.Parsec hiding (many)
type Parser = Parsec String ()
toOpParser :: String -> BFOp -> Parser BFOp
toOpParser s p = p <$ string s
programParser :: Parser BFProgram
programParser = many opParser
opParser = choice [rightParser, leftParser, incParser, decParser, writeParser, readParser, loopParser]
rightParser = toOpParser ">" BFMoveRight
leftParser = toOpParser "<" BFMoveLeft
incParser = toOpParser "+" BFIncrease
decParser = toOpParser "-" BFDecrease
writeParser = toOpParser "." BFWrite
readParser = toOpParser "," BFRead
loopParser = between (string "[") (string "]") $ BFLoop <$> programParser
clean :: String -> String
clean = filter (`elem` "+-<>[].,")
parseBF :: String -> Either ParseError BFProgram
parseBF = runParser programParser () "" . clean
| |
49cc1f40feade8dae2f5e888e1e66c128535c6ad671dc2adb064b1aa0c6f0825 | basho/machi | machi_lifecycle_mgr_test.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2014 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(machi_lifecycle_mgr_test).
-compile(export_all).
-ifdef(TEST).
-ifndef(PULSE).
-include_lib("eunit/include/eunit.hrl").
-include("machi.hrl").
-include("machi_projection.hrl").
-define(MGR, machi_chain_manager1).
setup() ->
catch application:stop(machi),
{ok, SupPid} = machi_sup:start_link(),
error_logger:tty(false),
Dir = "./" ++ atom_to_list(?MODULE) ++ ".datadir",
machi_flu1_test:clean_up_data_dir(Dir ++ "/*/*"),
machi_flu1_test:clean_up_data_dir(Dir),
Envs = [{flu_data_dir, Dir ++ "/data/flu"},
{flu_config_dir, Dir ++ "/etc/flu-config"},
{chain_config_dir, Dir ++ "/etc/chain-config"},
{platform_data_dir, Dir ++ "/data"},
{platform_etc_dir, Dir ++ "/etc"},
{not_used_pending, Dir ++ "/etc/pending"}
],
EnvKeys = [K || {K,_V} <- Envs],
undefined = application:get_env(machi, yo),
Cleanup = machi_flu1_test:get_env_vars(machi, EnvKeys ++ [yo]),
[begin
filelib:ensure_dir(V ++ "/unused"),
application:set_env(machi, K, V)
end || {K, V} <- Envs],
{SupPid, Dir, Cleanup}.
cleanup({SupPid, Dir, Cleanup}) ->
exit(SupPid, normal),
machi_util:wait_for_death(SupPid, 100),
error_logger:tty(true),
catch application:stop(machi),
machi_flu1_test:clean_up_data_dir(Dir ++ "/*/*"),
machi_flu1_test:clean_up_data_dir(Dir),
machi_flu1_test:clean_up_env_vars(Cleanup),
undefined = application:get_env(machi, yo),
ok.
smoke_test_() ->
{timeout, 60, fun() -> smoke_test2() end}.
smoke_test2() ->
YoCleanup = setup(),
try
Prefix = <<"pre">>,
Chunk1 = <<"yochunk">>,
Host = "localhost",
PortBase = 60120,
Pa = #p_srvr{name=a,address="localhost",port=PortBase+0},
Pb = #p_srvr{name=b,address="localhost",port=PortBase+1},
Pc = #p_srvr{name=c,address="localhost",port=PortBase+2},
Pstore_a = machi_flu1 : ) ,
%% Pstore_b = machi_flu1:make_projection_server_regname(b),
%% Pstore_c = machi_flu1:make_projection_server_regname(c),
Pstores = [Pstore_a, Pstore_b, Pstore_c] =
[machi_flu1:make_projection_server_regname(a),
machi_flu1:make_projection_server_regname(b),
machi_flu1:make_projection_server_regname(c)],
ChMgrs = [ChMgr_a, ChMgr_b, ChMgr_c] =
[machi_chain_manager1:make_chmgr_regname(a),
machi_chain_manager1:make_chmgr_regname(b),
machi_chain_manager1:make_chmgr_regname(c)],
Fits = [Fit_a, Fit_b, Fit_c] =
[machi_flu_psup:make_fitness_regname(a),
machi_flu_psup:make_fitness_regname(b),
machi_flu_psup:make_fitness_regname(c)],
Advance = machi_chain_manager1_test:make_advance_fun(
Fits, [a,b,c], ChMgrs, 3),
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
io:format("\nSTEP: Start 3 FLUs, no chain.\n", []),
[machi_lifecycle_mgr:make_pending_config(P) || P <- [Pa,Pb,Pc] ],
{[_,_,_],[]} = machi_lifecycle_mgr:process_pending(),
[{ok, #projection_v1{epoch_number=0}} =
machi_projection_store:read_latest_projection(PSTORE, private)
|| PSTORE <- Pstores],
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
io:format("\nSTEP: Start chain = [a,b,c]\n", []),
C1 = #chain_def_v1{name=cx, mode=ap_mode, full=[Pa,Pb,Pc],
local_run=[a,b,c]},
machi_lifecycle_mgr:make_pending_config(C1),
{[],[_]} = machi_lifecycle_mgr:process_pending(),
Advance(),
[{ok, #projection_v1{all_members=[a,b,c]}} =
machi_projection_store:read_latest_projection(PSTORE, private)
|| PSTORE <- Pstores],
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
io:format("\nSTEP: Reset chain = [b,c]\n", []),
C2 = #chain_def_v1{name=cx, mode=ap_mode, full=[Pb,Pc],
old_full=[a,b,c], old_witnesses=[],
local_stop=[a], local_run=[b,c]},
machi_lifecycle_mgr:make_pending_config(C2),
{[],[_]} = machi_lifecycle_mgr:process_pending(),
Advance(),
%% a should be down
{'EXIT', _} = (catch machi_projection_store:read_latest_projection(
hd(Pstores), private)),
[{ok, #projection_v1{all_members=[b,c]}} =
machi_projection_store:read_latest_projection(PSTORE, private)
|| PSTORE <- tl(Pstores)],
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
io:format("\nSTEP: Reset chain = []\n", []),
C3 = #chain_def_v1{name=cx, mode=ap_mode, full=[],
old_full=[b,c], old_witnesses=[],
local_stop=[b,c], local_run=[]},
machi_lifecycle_mgr:make_pending_config(C3),
{[],[_]} = machi_lifecycle_mgr:process_pending(),
Advance(),
%% a,b,c should be down
[{'EXIT', _} = (catch machi_projection_store:read_latest_projection(
PSTORE, private))
|| PSTORE <- Pstores],
ok
after
cleanup(YoCleanup)
end.
ast_tuple_syntax_test() ->
T = fun(L) -> machi_lifecycle_mgr:check_ast_tuple_syntax(L) end,
Canon1 = [ {host, "localhost", []},
{host, "localhost", [{client_interface, "1.2.3.4"},
{admin_interface, "5.6.7.8"}]},
{flu, 'fx', "foohost", 4000, []},
switch_old_and_new,
{chain, 'cy', ['fx', 'fy'], [{foo,"yay"},{bar,baz}]} ],
{_Good,[]=_Bad} = T(Canon1),
Canon1_norm = machi_lifecycle_mgr:normalize_ast_tuple_syntax(Canon1),
true = (length(Canon1) == length(Canon1_norm)),
{Canon1_norm_b, []} = T(Canon1_norm),
true = (length(Canon1_norm) == length(Canon1_norm_b)),
{[],[_,_,_,_]} =
T([ {host, 'localhost', []},
{host, 'localhost', yo},
{host, "localhost", [{client_interface, 77.88293829832}]},
{host, "localhost", [{client_interface, "1.2.3.4"},
{bummer, "5.6.7.8"}]} ]),
{[],[_,_,_,_,_,_]} =
T([ {flu, 'fx', 'foohost', 4000, []},
{flu, 'fx', <<"foohost">>, 4000, []},
{flu, 'fx', "foohost", -4000, []},
{flu, 'fx', "foohost", 40009999, []},
{flu, 'fx', "foohost", 4000, gack},
{flu, 'fx', "foohost", 4000, [22]} ]),
{[],[_,_,_]} =
T([ {chain, 'cy', ["fx", "fy"], [foo,{bar,baz}]},
yoloyolo,
{chain, "cy", ["fx", 27], oops,arity,way,way,way,too,big,x}
]).
ast_run_test() ->
PortBase = 20300,
R1 = [
{host, "localhost", "localhost", "localhost", []},
{flu, 'f0', "localhost", PortBase+0, []},
{flu, 'f1', "localhost", PortBase+1, []},
{chain, 'ca', ['f0'], []},
{chain, 'cb', ['f1'], []},
switch_old_and_new,
{flu, 'f2', "localhost", PortBase+2, []},
{flu, 'f3', "localhost", PortBase+3, []},
{flu, 'f4', "localhost", PortBase+4, []},
{chain, 'ca', ['f0', 'f2'], []},
{chain, 'cc', ['f3', 'f4'], []}
],
{ok, Env1} = machi_lifecycle_mgr:run_ast(R1),
%% Uncomment to examine the Env trees.
Y1 = { lists : sort(gb_trees : to_list(element(1 , Env1 ) ) ) ,
lists : sort(gb_trees : to_list(element(2 , Env1 ) ) ) ,
element(3 , Env1 ) } ,
io : , " \nY1 ~p\n " , [ Y1 ] ) ,
Negative_after_R1 =
[
dupe host
{flu, 'f1', "other", PortBase+9999999, []}, % bogus port # (syntax)
{flu, 'f1', "other", PortBase+888, []}, % dupe flu name
dupe host+port
{chain, 'ca', ['f7'], []}, % unknown flu
{chain, 'cc', ['f0'], []}, % flu previously assigned
{chain, 'ca', cp_mode, ['f0', 'f1', 'f2'], [], []} % mode change
],
[begin
io : , " dbg : Neg ~p\n " , [ Neg ] ) ,
{error, _} = machi_lifecycle_mgr:run_ast(R1 ++ [Neg])
end || Neg <- Negative_after_R1],
%% The 'run' phase doesn't blow smoke. What about 'diff'?
{X1a, X1b} = machi_lifecycle_mgr:diff_env(Env1, "localhost"),
There 's only one host , " localhost " , so ' all ' should be exactly equal .
{X1a, X1b} = machi_lifecycle_mgr:diff_env(Env1, all),
io : , " X1b : ~p\n " , [ X1b ] ) ,
Append to the R1 scenario : for chain cc : add f5 , remove f4
Expect : see pattern matching below on X2b .
R2 = (R1 -- [switch_old_and_new]) ++
[switch_old_and_new,
{flu, 'f5', "localhost", PortBase+5, []},
{chain, 'cc', ['f3','f5'], []}],
{ok, Env2} = machi_lifecycle_mgr:run_ast(R2),
{_X2a, X2b} = machi_lifecycle_mgr:diff_env(Env2, "localhost"),
io : , " X2b : ~p\n " , [ X2b ] ) ,
F5_port = PortBase+5,
[#p_srvr{name='f5',address="localhost",port=F5_port},
#chain_def_v1{name='cc',
full=[#p_srvr{name='f3'},#p_srvr{name='f5'}], witnesses=[],
old_full=[f3,f4], old_witnesses=[],
local_run=[f5], local_stop=[f4]}] = X2b,
ok.
ast_then_apply_test_() ->
{timeout, 60, fun() -> ast_then_apply_test2() end}.
ast_then_apply_test2() ->
YoCleanup = setup(),
try
PortBase = 20400,
NumChains = 4,
ChainLen = 3,
FLU_num = NumChains * ChainLen,
FLU_defs = [{flu, list_to_atom("f"++integer_to_list(X)),
"localhost", PortBase+X, []} || X <- lists:seq(1,FLU_num)],
FLU_names = [FLU || {flu,FLU,_,_,_} <- FLU_defs],
Ch_defs = [{chain, list_to_atom("c"++integer_to_list(X)),
lists:sublist(FLU_names, X, 3),
[]} || X <- lists:seq(1, FLU_num, 3)],
R1 = [switch_old_and_new,
{host, "localhost", "localhost", "localhost", []}]
++ FLU_defs ++ Ch_defs,
{ok, Env1} = machi_lifecycle_mgr:run_ast(R1),
{_X1a, X1b} = machi_lifecycle_mgr:diff_env(Env1, "localhost"),
io : , " X1b ~p\n " , [ X1b ] ) ,
[machi_lifecycle_mgr:make_pending_config(X) || X <- X1b],
{PassFLUs, PassChains} = machi_lifecycle_mgr:process_pending(),
true = (length(PassFLUs) == length(FLU_defs)),
true = (length(PassChains) == length(Ch_defs)),
%% Kick the chain managers into doing something useful right now.
Pstores = [list_to_atom(atom_to_list(X) ++ "_pstore") || X <- FLU_names],
Fits = [list_to_atom(atom_to_list(X) ++ "_fitness") || X <- FLU_names],
ChMgrs = [list_to_atom(atom_to_list(X) ++ "_chmgr") || X <- FLU_names],
Advance = machi_chain_manager1_test:make_advance_fun(
Fits, FLU_names, ChMgrs, 3),
Advance(),
%% Sanity check: everyone is configured properly.
[begin
{ok, #projection_v1{epoch_number=Epoch, all_members=All,
chain_name=ChainName, upi=UPI}} =
machi_projection_store:read_latest_projection(PStore, private),
io : , " ~p : epoch ~p all ~p\n " , [ PStore , Epoch , All ] ) ,
true = Epoch > 0,
ChainLen = length(All),
true = (length(UPI) > 0),
{chain, _, Full, []} = lists:keyfind(ChainName, 2, Ch_defs),
true = lists:sort(Full) == lists:sort(All)
end || PStore <- Pstores],
ok
after
cleanup(YoCleanup)
end.
-endif. % !PULSE
-endif. % TEST
| null | https://raw.githubusercontent.com/basho/machi/e87bd59a9777d805b00f9e9981467eb28e28390c/test/machi_lifecycle_mgr_test.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
Pstore_b = machi_flu1:make_projection_server_regname(b),
Pstore_c = machi_flu1:make_projection_server_regname(c),
a should be down
a,b,c should be down
Uncomment to examine the Env trees.
bogus port # (syntax)
dupe flu name
unknown flu
flu previously assigned
mode change
The 'run' phase doesn't blow smoke. What about 'diff'?
Kick the chain managers into doing something useful right now.
Sanity check: everyone is configured properly.
!PULSE
TEST | Copyright ( c ) 2007 - 2014 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(machi_lifecycle_mgr_test).
-compile(export_all).
-ifdef(TEST).
-ifndef(PULSE).
-include_lib("eunit/include/eunit.hrl").
-include("machi.hrl").
-include("machi_projection.hrl").
-define(MGR, machi_chain_manager1).
setup() ->
catch application:stop(machi),
{ok, SupPid} = machi_sup:start_link(),
error_logger:tty(false),
Dir = "./" ++ atom_to_list(?MODULE) ++ ".datadir",
machi_flu1_test:clean_up_data_dir(Dir ++ "/*/*"),
machi_flu1_test:clean_up_data_dir(Dir),
Envs = [{flu_data_dir, Dir ++ "/data/flu"},
{flu_config_dir, Dir ++ "/etc/flu-config"},
{chain_config_dir, Dir ++ "/etc/chain-config"},
{platform_data_dir, Dir ++ "/data"},
{platform_etc_dir, Dir ++ "/etc"},
{not_used_pending, Dir ++ "/etc/pending"}
],
EnvKeys = [K || {K,_V} <- Envs],
undefined = application:get_env(machi, yo),
Cleanup = machi_flu1_test:get_env_vars(machi, EnvKeys ++ [yo]),
[begin
filelib:ensure_dir(V ++ "/unused"),
application:set_env(machi, K, V)
end || {K, V} <- Envs],
{SupPid, Dir, Cleanup}.
cleanup({SupPid, Dir, Cleanup}) ->
exit(SupPid, normal),
machi_util:wait_for_death(SupPid, 100),
error_logger:tty(true),
catch application:stop(machi),
machi_flu1_test:clean_up_data_dir(Dir ++ "/*/*"),
machi_flu1_test:clean_up_data_dir(Dir),
machi_flu1_test:clean_up_env_vars(Cleanup),
undefined = application:get_env(machi, yo),
ok.
smoke_test_() ->
{timeout, 60, fun() -> smoke_test2() end}.
smoke_test2() ->
YoCleanup = setup(),
try
Prefix = <<"pre">>,
Chunk1 = <<"yochunk">>,
Host = "localhost",
PortBase = 60120,
Pa = #p_srvr{name=a,address="localhost",port=PortBase+0},
Pb = #p_srvr{name=b,address="localhost",port=PortBase+1},
Pc = #p_srvr{name=c,address="localhost",port=PortBase+2},
Pstore_a = machi_flu1 : ) ,
Pstores = [Pstore_a, Pstore_b, Pstore_c] =
[machi_flu1:make_projection_server_regname(a),
machi_flu1:make_projection_server_regname(b),
machi_flu1:make_projection_server_regname(c)],
ChMgrs = [ChMgr_a, ChMgr_b, ChMgr_c] =
[machi_chain_manager1:make_chmgr_regname(a),
machi_chain_manager1:make_chmgr_regname(b),
machi_chain_manager1:make_chmgr_regname(c)],
Fits = [Fit_a, Fit_b, Fit_c] =
[machi_flu_psup:make_fitness_regname(a),
machi_flu_psup:make_fitness_regname(b),
machi_flu_psup:make_fitness_regname(c)],
Advance = machi_chain_manager1_test:make_advance_fun(
Fits, [a,b,c], ChMgrs, 3),
io:format("\nSTEP: Start 3 FLUs, no chain.\n", []),
[machi_lifecycle_mgr:make_pending_config(P) || P <- [Pa,Pb,Pc] ],
{[_,_,_],[]} = machi_lifecycle_mgr:process_pending(),
[{ok, #projection_v1{epoch_number=0}} =
machi_projection_store:read_latest_projection(PSTORE, private)
|| PSTORE <- Pstores],
io:format("\nSTEP: Start chain = [a,b,c]\n", []),
C1 = #chain_def_v1{name=cx, mode=ap_mode, full=[Pa,Pb,Pc],
local_run=[a,b,c]},
machi_lifecycle_mgr:make_pending_config(C1),
{[],[_]} = machi_lifecycle_mgr:process_pending(),
Advance(),
[{ok, #projection_v1{all_members=[a,b,c]}} =
machi_projection_store:read_latest_projection(PSTORE, private)
|| PSTORE <- Pstores],
io:format("\nSTEP: Reset chain = [b,c]\n", []),
C2 = #chain_def_v1{name=cx, mode=ap_mode, full=[Pb,Pc],
old_full=[a,b,c], old_witnesses=[],
local_stop=[a], local_run=[b,c]},
machi_lifecycle_mgr:make_pending_config(C2),
{[],[_]} = machi_lifecycle_mgr:process_pending(),
Advance(),
{'EXIT', _} = (catch machi_projection_store:read_latest_projection(
hd(Pstores), private)),
[{ok, #projection_v1{all_members=[b,c]}} =
machi_projection_store:read_latest_projection(PSTORE, private)
|| PSTORE <- tl(Pstores)],
io:format("\nSTEP: Reset chain = []\n", []),
C3 = #chain_def_v1{name=cx, mode=ap_mode, full=[],
old_full=[b,c], old_witnesses=[],
local_stop=[b,c], local_run=[]},
machi_lifecycle_mgr:make_pending_config(C3),
{[],[_]} = machi_lifecycle_mgr:process_pending(),
Advance(),
[{'EXIT', _} = (catch machi_projection_store:read_latest_projection(
PSTORE, private))
|| PSTORE <- Pstores],
ok
after
cleanup(YoCleanup)
end.
ast_tuple_syntax_test() ->
T = fun(L) -> machi_lifecycle_mgr:check_ast_tuple_syntax(L) end,
Canon1 = [ {host, "localhost", []},
{host, "localhost", [{client_interface, "1.2.3.4"},
{admin_interface, "5.6.7.8"}]},
{flu, 'fx', "foohost", 4000, []},
switch_old_and_new,
{chain, 'cy', ['fx', 'fy'], [{foo,"yay"},{bar,baz}]} ],
{_Good,[]=_Bad} = T(Canon1),
Canon1_norm = machi_lifecycle_mgr:normalize_ast_tuple_syntax(Canon1),
true = (length(Canon1) == length(Canon1_norm)),
{Canon1_norm_b, []} = T(Canon1_norm),
true = (length(Canon1_norm) == length(Canon1_norm_b)),
{[],[_,_,_,_]} =
T([ {host, 'localhost', []},
{host, 'localhost', yo},
{host, "localhost", [{client_interface, 77.88293829832}]},
{host, "localhost", [{client_interface, "1.2.3.4"},
{bummer, "5.6.7.8"}]} ]),
{[],[_,_,_,_,_,_]} =
T([ {flu, 'fx', 'foohost', 4000, []},
{flu, 'fx', <<"foohost">>, 4000, []},
{flu, 'fx', "foohost", -4000, []},
{flu, 'fx', "foohost", 40009999, []},
{flu, 'fx', "foohost", 4000, gack},
{flu, 'fx', "foohost", 4000, [22]} ]),
{[],[_,_,_]} =
T([ {chain, 'cy', ["fx", "fy"], [foo,{bar,baz}]},
yoloyolo,
{chain, "cy", ["fx", 27], oops,arity,way,way,way,too,big,x}
]).
ast_run_test() ->
PortBase = 20300,
R1 = [
{host, "localhost", "localhost", "localhost", []},
{flu, 'f0', "localhost", PortBase+0, []},
{flu, 'f1', "localhost", PortBase+1, []},
{chain, 'ca', ['f0'], []},
{chain, 'cb', ['f1'], []},
switch_old_and_new,
{flu, 'f2', "localhost", PortBase+2, []},
{flu, 'f3', "localhost", PortBase+3, []},
{flu, 'f4', "localhost", PortBase+4, []},
{chain, 'ca', ['f0', 'f2'], []},
{chain, 'cc', ['f3', 'f4'], []}
],
{ok, Env1} = machi_lifecycle_mgr:run_ast(R1),
Y1 = { lists : sort(gb_trees : to_list(element(1 , Env1 ) ) ) ,
lists : sort(gb_trees : to_list(element(2 , Env1 ) ) ) ,
element(3 , Env1 ) } ,
io : , " \nY1 ~p\n " , [ Y1 ] ) ,
Negative_after_R1 =
[
dupe host
dupe host+port
],
[begin
io : , " dbg : Neg ~p\n " , [ Neg ] ) ,
{error, _} = machi_lifecycle_mgr:run_ast(R1 ++ [Neg])
end || Neg <- Negative_after_R1],
{X1a, X1b} = machi_lifecycle_mgr:diff_env(Env1, "localhost"),
There 's only one host , " localhost " , so ' all ' should be exactly equal .
{X1a, X1b} = machi_lifecycle_mgr:diff_env(Env1, all),
io : , " X1b : ~p\n " , [ X1b ] ) ,
Append to the R1 scenario : for chain cc : add f5 , remove f4
Expect : see pattern matching below on X2b .
R2 = (R1 -- [switch_old_and_new]) ++
[switch_old_and_new,
{flu, 'f5', "localhost", PortBase+5, []},
{chain, 'cc', ['f3','f5'], []}],
{ok, Env2} = machi_lifecycle_mgr:run_ast(R2),
{_X2a, X2b} = machi_lifecycle_mgr:diff_env(Env2, "localhost"),
io : , " X2b : ~p\n " , [ X2b ] ) ,
F5_port = PortBase+5,
[#p_srvr{name='f5',address="localhost",port=F5_port},
#chain_def_v1{name='cc',
full=[#p_srvr{name='f3'},#p_srvr{name='f5'}], witnesses=[],
old_full=[f3,f4], old_witnesses=[],
local_run=[f5], local_stop=[f4]}] = X2b,
ok.
ast_then_apply_test_() ->
{timeout, 60, fun() -> ast_then_apply_test2() end}.
ast_then_apply_test2() ->
YoCleanup = setup(),
try
PortBase = 20400,
NumChains = 4,
ChainLen = 3,
FLU_num = NumChains * ChainLen,
FLU_defs = [{flu, list_to_atom("f"++integer_to_list(X)),
"localhost", PortBase+X, []} || X <- lists:seq(1,FLU_num)],
FLU_names = [FLU || {flu,FLU,_,_,_} <- FLU_defs],
Ch_defs = [{chain, list_to_atom("c"++integer_to_list(X)),
lists:sublist(FLU_names, X, 3),
[]} || X <- lists:seq(1, FLU_num, 3)],
R1 = [switch_old_and_new,
{host, "localhost", "localhost", "localhost", []}]
++ FLU_defs ++ Ch_defs,
{ok, Env1} = machi_lifecycle_mgr:run_ast(R1),
{_X1a, X1b} = machi_lifecycle_mgr:diff_env(Env1, "localhost"),
io : , " X1b ~p\n " , [ X1b ] ) ,
[machi_lifecycle_mgr:make_pending_config(X) || X <- X1b],
{PassFLUs, PassChains} = machi_lifecycle_mgr:process_pending(),
true = (length(PassFLUs) == length(FLU_defs)),
true = (length(PassChains) == length(Ch_defs)),
Pstores = [list_to_atom(atom_to_list(X) ++ "_pstore") || X <- FLU_names],
Fits = [list_to_atom(atom_to_list(X) ++ "_fitness") || X <- FLU_names],
ChMgrs = [list_to_atom(atom_to_list(X) ++ "_chmgr") || X <- FLU_names],
Advance = machi_chain_manager1_test:make_advance_fun(
Fits, FLU_names, ChMgrs, 3),
Advance(),
[begin
{ok, #projection_v1{epoch_number=Epoch, all_members=All,
chain_name=ChainName, upi=UPI}} =
machi_projection_store:read_latest_projection(PStore, private),
io : , " ~p : epoch ~p all ~p\n " , [ PStore , Epoch , All ] ) ,
true = Epoch > 0,
ChainLen = length(All),
true = (length(UPI) > 0),
{chain, _, Full, []} = lists:keyfind(ChainName, 2, Ch_defs),
true = lists:sort(Full) == lists:sort(All)
end || PStore <- Pstores],
ok
after
cleanup(YoCleanup)
end.
|
61381d36c870b96ca4150ae37c2ed598ffa2157d70835e5ef2a7f9115ea382c1 | sellout/yaya | Retrofit.hs | # LANGUAGE CPP #
# LANGUAGE TemplateHaskell #
| This module re - exports a subset of ` . Fold ` , intended for when you want
-- to define recursion scheme instances for your existing recursive types.
--
This is /not/ the recommended way to use , but it solves some real
-- problems:
1 . you have existing directly - recursive types and you want to start taking
-- advantage of recursion schemes without having to rewrite your existing
-- code, or
2 . a directly - recursive type has been imposed on you by some other library
-- and you want to take advantage of recursion schemes.
--
The distinction between these two cases is whether you have control of the
@data@ declaration . In the first case , you probably do . In that case , you
should only generate the /safe/ instances , and ensure that all the
recursive type references are /strict/ ( if you want a ` Recursive `
-- instance). If you don't have control, then you /may/ need to generate all
-- instances.
--
-- Another difference when you have control is that it means you may migrate
-- away from direct recursion entirely, at which point this import should
-- disappear.
module Yaya.Retrofit
( module Yaya.Fold,
PatternFunctorRules (..),
defaultRules,
extractPatternFunctor,
)
where
import Control.Exception (Exception (..), throw)
import Control.Monad ((<=<))
import Data.Bifunctor (bimap)
import Data.Either.Validation (Validation (..), validationToEither)
import Data.Functor.Identity (Identity (..))
import Data.List.NonEmpty (NonEmpty)
import Language.Haskell.TH as TH
import Language.Haskell.TH.Datatype as TH.Abs
import Language.Haskell.TH.Syntax (mkNameG_tc)
import Text.Read.Lex (isSymbolChar)
import Yaya.Fold
( Corecursive (..),
Projectable (..),
Recursive (..),
Steppable (..),
recursiveEq,
recursiveShowsPrec,
)
#if MIN_VERSION_template_haskell(2, 17, 0)
type TyVarBndr' = TyVarBndr ()
#else
type TyVarBndr' = TyVarBndr
#endif
conP' :: Name -> [Pat] -> Pat
#if MIN_VERSION_template_haskell(2, 18, 0)
conP' n = ConP n []
#else
conP' = ConP
#endif
-- | Extract a pattern functor and relevant instances from a simply recursive type.
--
-- /e.g./
--
-- @
data a
-- = Lit a
| Add ( Expr a ) ( a )
| Expr a :* [ a ]
-- deriving (Show)
--
-- `extractPatternFunctor` `defaultRules` ''Expr
-- @
--
-- will create
--
-- @
-- data ExprF a x
-- = LitF a
-- | AddF x x
-- | x :*$ [x]
deriving ( ' Functor ' , ' Foldable ' , ' ' )
--
instance ` Projectable ` ( - > ) ( a ) ( ExprF a ) where
-- `project` (Lit x) = LitF x
-- `project` (Add x y) = AddF x y
-- `project` (x :* y) = x :*$ y
--
instance ` Steppable ` ( - > ) ( a ) ( ExprF a ) where
-- `embed` (LitF x) = Lit x
-- `embed` (AddF x y) = Add x y
-- `embed` (x :*$ y) = x :* y
--
instance ` Recursive ` ( - > ) ( a ) ( ExprF a ) where
-- `cata` φ = φ . `fmap` (`cata` φ) . `project`
--
instance ` Corecursive ` ( - > ) ( a ) ( ExprF a ) where
-- `ana` ψ = `embed` . `fmap` (`ana` ψ) . ψ
-- @
--
-- /Notes:/
--
- ` extractPatternFunctor ` works properly only with .
Existentials and GADTs are n't supported ,
-- as we don't try to do better than
< /~ghc/latest/docs/html/users_guide/glasgow_exts.html#deriving-functor-instances GHC 's DeriveFunctor > .
- we always generate both ` Recursive ` and ` Corecursive ` instances , but one of these is always unsafe .
-- In future, we should check the strictness of the recursive parameter and generate only the appropriate one (unless overridden by a rule).
extractPatternFunctor :: PatternFunctorRules -> Name -> Q [Dec]
extractPatternFunctor rules =
either throw id . makePrimForDI rules <=< reifyDatatype
-- | Rules of renaming data names
data PatternFunctorRules = PatternFunctorRules
{ patternType :: Name -> Name,
patternCon :: Name -> Name,
patternField :: Name -> Name
}
| Default ' PatternFunctorRules ' : append @F@ or @$@ to data type , constructors and field names .
defaultRules :: PatternFunctorRules
defaultRules =
PatternFunctorRules
{ patternType = toFName,
patternCon = toFName,
patternField = toFName
}
toFName :: Name -> Name
toFName = mkName . f . nameBase
where
f name
| isInfixName name = name ++ "$"
| otherwise = name ++ "F"
isInfixName :: String -> Bool
isInfixName = all isSymbolChar
data UnsupportedDatatype
= UnsupportedInstTypes (NonEmpty Type)
| UnsupportedVariant DatatypeVariant
instance Show UnsupportedDatatype where
show = \case
UnsupportedInstTypes tys ->
"extractPatternFunctor: Couldn't process the following types " <> show tys
UnsupportedVariant _variant ->
"extractPatternFunctor: Data families are currently not supported."
instance Exception UnsupportedDatatype
makePrimForDI ::
PatternFunctorRules -> DatatypeInfo -> Either UnsupportedDatatype (Q [Dec])
makePrimForDI
rules
( DatatypeInfo
{ datatypeName = tyName,
datatypeInstTypes = instTys,
datatypeCons = cons,
datatypeVariant = variant
}
) =
if isDataFamInstance
then Left $ UnsupportedVariant variant
else
bimap
UnsupportedInstTypes
(flip (makePrimForDI' rules (variant == Newtype) tyName) cons)
. validationToEither
$ traverse (\ty -> maybe (Failure $ pure ty) Success $ toTyVarBndr ty) instTys
where
isDataFamInstance = case variant of
DataInstance -> True
NewtypeInstance -> True
Datatype -> False
Newtype -> False
toTyVarBndr :: Type -> Maybe TyVarBndr'
toTyVarBndr (VarT n) = pure $ plainTV n
toTyVarBndr (SigT (VarT n) k) = pure $ kindedTV n k
toTyVarBndr _ = Nothing
TH 2.12.O means GHC 8.2.1 , otherwise , we work back to GHC 8.0.1
#if MIN_VERSION_template_haskell(2, 12, 0)
deriveds :: [DerivClause]
deriveds =
pure $
DerivClause
Nothing
[ ConT functorTypeName,
ConT foldableTypeName,
ConT traversableTypeName
]
#else
deriveds :: [TH.Type]
deriveds =
[ ConT functorTypeName,
ConT foldableTypeName,
ConT traversableTypeName
]
#endif
makePrimForDI' ::
PatternFunctorRules -> Bool -> Name -> [TyVarBndr'] -> [ConstructorInfo] -> Q [Dec]
makePrimForDI' rules isNewtype tyName vars cons = do
-- variable parameters
let vars' = map VarT (typeVars vars)
-- Name of base functor
let tyNameF = patternType rules tyName
-- Recursive type
let s = conAppsT tyName vars'
-- Additional argument
rName <- newName "r"
let r = VarT rName
Vars
let varsF = vars ++ [plainTV rName]
# 33
cons' <- traverse (conTypeTraversal resolveTypeSynonyms) cons
let consF =
toCon
. conNameMap (patternCon rules)
. conFieldNameMap (patternField rules)
. conTypeMap (substType s r)
<$> cons'
-- Data definition
let dataDec = case consF of
[conF]
| isNewtype -> NewtypeD [] tyNameF varsF Nothing conF deriveds
_ -> DataD [] tyNameF varsF Nothing consF deriveds
recursiveDec <-
[d|
instance Projectable (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
project = $(LamCaseE <$> mkMorphism id (patternCon rules) cons')
instance Steppable (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
embed = $(LamCaseE <$> mkMorphism (patternCon rules) id cons')
instance Recursive (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
cata φ = φ . fmap (cata φ) . project
instance Corecursive (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
ana ψ = embed . fmap (ana ψ) . ψ
|]
Combine
pure ([dataDec] <> recursiveDec)
-- | makes clauses to rename constructors
mkMorphism ::
(Name -> Name) ->
(Name -> Name) ->
[ConstructorInfo] ->
Q [Match]
mkMorphism nFrom nTo =
traverse
( \ci -> do
let n = constructorName ci
fs <- traverse (const $ newName "x") $ constructorFields ci
pure $
Match
(conP' (nFrom n) (map VarP fs)) -- pattern
(NormalB $ foldl AppE (ConE $ nTo n) (map VarE fs)) -- body
[] -- where dec
)
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
conNameTraversal :: Traversal' ConstructorInfo Name
conNameTraversal = lens constructorName (\s v -> s {constructorName = v})
conFieldNameTraversal :: Traversal' ConstructorInfo Name
conFieldNameTraversal =
lens constructorVariant (\s v -> s {constructorVariant = v})
. conVariantTraversal
where
conVariantTraversal :: Traversal' ConstructorVariant Name
conVariantTraversal _ NormalConstructor = pure NormalConstructor
conVariantTraversal _ InfixConstructor = pure InfixConstructor
conVariantTraversal f (RecordConstructor fs) = RecordConstructor <$> traverse f fs
conTypeTraversal :: Traversal' ConstructorInfo Type
conTypeTraversal =
lens constructorFields (\s v -> s {constructorFields = v})
. traverse
conNameMap :: (Name -> Name) -> ConstructorInfo -> ConstructorInfo
conNameMap = over conNameTraversal
conFieldNameMap :: (Name -> Name) -> ConstructorInfo -> ConstructorInfo
conFieldNameMap = over conFieldNameTraversal
conTypeMap :: (Type -> Type) -> ConstructorInfo -> ConstructorInfo
conTypeMap = over conTypeTraversal
-------------------------------------------------------------------------------
-- Lenses
-------------------------------------------------------------------------------
type Lens' s a = forall f. Functor f => (a -> f a) -> s -> f s
type Traversal' s a = forall f. Applicative f => (a -> f a) -> s -> f s
lens :: (s -> a) -> (s -> a -> s) -> Lens' s a
lens sa sas afa s = sas s <$> afa (sa s)
# INLINE lens #
over :: Traversal' s a -> (a -> a) -> s -> s
over l f = runIdentity . l (Identity . f)
# INLINE over #
-------------------------------------------------------------------------------
-- Type mangling
-------------------------------------------------------------------------------
-- | Extract type variables
typeVars :: [TyVarBndr'] -> [Name]
typeVars = map tvName
-- | Apply arguments to a type constructor.
conAppsT :: Name -> [Type] -> Type
conAppsT conName = foldl AppT (ConT conName)
-- | Provides substitution for types
substType ::
Type ->
Type ->
Type ->
Type
substType a b = go
where
go x | x == a = b
go (VarT n) = VarT n
go (AppT l r) = AppT (go l) (go r)
go (ForallT xs ctx t) = ForallT xs ctx (go t)
-- This may fail with kind error
go (SigT t k) = SigT (go t) k
go (InfixT l n r) = InfixT (go l) n (go r)
go (UInfixT l n r) = UInfixT (go l) n (go r)
go (ParensT t) = ParensT (go t)
-- Rest are unchanged
go x = x
toCon :: ConstructorInfo -> Con
toCon
( ConstructorInfo
{ constructorName = name,
constructorVars = vars,
constructorContext = ctxt,
constructorFields = ftys,
constructorStrictness = fstricts,
constructorVariant = variant
}
)
| not (null vars && null ctxt) =
error "makeBaseFunctor: GADTs are not currently supported."
| otherwise =
let bangs = map toBang fstricts
in case variant of
NormalConstructor -> NormalC name $ zip bangs ftys
RecordConstructor fnames -> RecC name $ zip3 fnames bangs ftys
InfixConstructor ->
let [bang1, bang2] = bangs
[fty1, fty2] = ftys
in InfixC (bang1, fty1) name (bang2, fty2)
where
toBang (FieldStrictness upkd strct) =
Bang
(toSourceUnpackedness upkd)
(toSourceStrictness strct)
where
toSourceUnpackedness :: Unpackedness -> SourceUnpackedness
toSourceUnpackedness UnspecifiedUnpackedness = NoSourceUnpackedness
toSourceUnpackedness NoUnpack = SourceNoUnpack
toSourceUnpackedness Unpack = SourceUnpack
toSourceStrictness :: Strictness -> SourceStrictness
toSourceStrictness UnspecifiedStrictness = NoSourceStrictness
toSourceStrictness Lazy = SourceLazy
toSourceStrictness TH.Abs.Strict = SourceStrict
-------------------------------------------------------------------------------
-- Manually quoted names
-------------------------------------------------------------------------------
-- By manually generating these names we avoid needing to use the
TemplateHaskell language extension when compiling this library .
This allows the library to be used in stage1 cross - compilers .
functorTypeName :: Name
functorTypeName = mkNameG_tc "base" "GHC.Base" "Functor"
foldableTypeName :: Name
foldableTypeName = mkNameG_tc "base" "Data.Foldable" "Foldable"
traversableTypeName :: Name
traversableTypeName = mkNameG_tc "base" "Data.Traversable" "Traversable"
| null | https://raw.githubusercontent.com/sellout/yaya/8ec5afb3481b2fc3bcec7b7f8db63f867027e0d2/core/src/Yaya/Retrofit.hs | haskell | to define recursion scheme instances for your existing recursive types.
problems:
advantage of recursion schemes without having to rewrite your existing
code, or
and you want to take advantage of recursion schemes.
instance). If you don't have control, then you /may/ need to generate all
instances.
Another difference when you have control is that it means you may migrate
away from direct recursion entirely, at which point this import should
disappear.
| Extract a pattern functor and relevant instances from a simply recursive type.
/e.g./
@
= Lit a
deriving (Show)
`extractPatternFunctor` `defaultRules` ''Expr
@
will create
@
data ExprF a x
= LitF a
| AddF x x
| x :*$ [x]
`project` (Lit x) = LitF x
`project` (Add x y) = AddF x y
`project` (x :* y) = x :*$ y
`embed` (LitF x) = Lit x
`embed` (AddF x y) = Add x y
`embed` (x :*$ y) = x :* y
`cata` φ = φ . `fmap` (`cata` φ) . `project`
`ana` ψ = `embed` . `fmap` (`ana` ψ) . ψ
@
/Notes:/
as we don't try to do better than
In future, we should check the strictness of the recursive parameter and generate only the appropriate one (unless overridden by a rule).
| Rules of renaming data names
variable parameters
Name of base functor
Recursive type
Additional argument
Data definition
| makes clauses to rename constructors
pattern
body
where dec
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Lenses
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Type mangling
-----------------------------------------------------------------------------
| Extract type variables
| Apply arguments to a type constructor.
| Provides substitution for types
This may fail with kind error
Rest are unchanged
-----------------------------------------------------------------------------
Manually quoted names
-----------------------------------------------------------------------------
By manually generating these names we avoid needing to use the | # LANGUAGE CPP #
# LANGUAGE TemplateHaskell #
| This module re - exports a subset of ` . Fold ` , intended for when you want
This is /not/ the recommended way to use , but it solves some real
1 . you have existing directly - recursive types and you want to start taking
2 . a directly - recursive type has been imposed on you by some other library
The distinction between these two cases is whether you have control of the
@data@ declaration . In the first case , you probably do . In that case , you
should only generate the /safe/ instances , and ensure that all the
recursive type references are /strict/ ( if you want a ` Recursive `
module Yaya.Retrofit
( module Yaya.Fold,
PatternFunctorRules (..),
defaultRules,
extractPatternFunctor,
)
where
import Control.Exception (Exception (..), throw)
import Control.Monad ((<=<))
import Data.Bifunctor (bimap)
import Data.Either.Validation (Validation (..), validationToEither)
import Data.Functor.Identity (Identity (..))
import Data.List.NonEmpty (NonEmpty)
import Language.Haskell.TH as TH
import Language.Haskell.TH.Datatype as TH.Abs
import Language.Haskell.TH.Syntax (mkNameG_tc)
import Text.Read.Lex (isSymbolChar)
import Yaya.Fold
( Corecursive (..),
Projectable (..),
Recursive (..),
Steppable (..),
recursiveEq,
recursiveShowsPrec,
)
#if MIN_VERSION_template_haskell(2, 17, 0)
type TyVarBndr' = TyVarBndr ()
#else
type TyVarBndr' = TyVarBndr
#endif
conP' :: Name -> [Pat] -> Pat
#if MIN_VERSION_template_haskell(2, 18, 0)
conP' n = ConP n []
#else
conP' = ConP
#endif
data a
| Add ( Expr a ) ( a )
| Expr a :* [ a ]
deriving ( ' Functor ' , ' Foldable ' , ' ' )
instance ` Projectable ` ( - > ) ( a ) ( ExprF a ) where
instance ` Steppable ` ( - > ) ( a ) ( ExprF a ) where
instance ` Recursive ` ( - > ) ( a ) ( ExprF a ) where
instance ` Corecursive ` ( - > ) ( a ) ( ExprF a ) where
- ` extractPatternFunctor ` works properly only with .
Existentials and GADTs are n't supported ,
< /~ghc/latest/docs/html/users_guide/glasgow_exts.html#deriving-functor-instances GHC 's DeriveFunctor > .
- we always generate both ` Recursive ` and ` Corecursive ` instances , but one of these is always unsafe .
extractPatternFunctor :: PatternFunctorRules -> Name -> Q [Dec]
extractPatternFunctor rules =
either throw id . makePrimForDI rules <=< reifyDatatype
data PatternFunctorRules = PatternFunctorRules
{ patternType :: Name -> Name,
patternCon :: Name -> Name,
patternField :: Name -> Name
}
| Default ' PatternFunctorRules ' : append @F@ or @$@ to data type , constructors and field names .
defaultRules :: PatternFunctorRules
defaultRules =
PatternFunctorRules
{ patternType = toFName,
patternCon = toFName,
patternField = toFName
}
toFName :: Name -> Name
toFName = mkName . f . nameBase
where
f name
| isInfixName name = name ++ "$"
| otherwise = name ++ "F"
isInfixName :: String -> Bool
isInfixName = all isSymbolChar
data UnsupportedDatatype
= UnsupportedInstTypes (NonEmpty Type)
| UnsupportedVariant DatatypeVariant
instance Show UnsupportedDatatype where
show = \case
UnsupportedInstTypes tys ->
"extractPatternFunctor: Couldn't process the following types " <> show tys
UnsupportedVariant _variant ->
"extractPatternFunctor: Data families are currently not supported."
instance Exception UnsupportedDatatype
makePrimForDI ::
PatternFunctorRules -> DatatypeInfo -> Either UnsupportedDatatype (Q [Dec])
makePrimForDI
rules
( DatatypeInfo
{ datatypeName = tyName,
datatypeInstTypes = instTys,
datatypeCons = cons,
datatypeVariant = variant
}
) =
if isDataFamInstance
then Left $ UnsupportedVariant variant
else
bimap
UnsupportedInstTypes
(flip (makePrimForDI' rules (variant == Newtype) tyName) cons)
. validationToEither
$ traverse (\ty -> maybe (Failure $ pure ty) Success $ toTyVarBndr ty) instTys
where
isDataFamInstance = case variant of
DataInstance -> True
NewtypeInstance -> True
Datatype -> False
Newtype -> False
toTyVarBndr :: Type -> Maybe TyVarBndr'
toTyVarBndr (VarT n) = pure $ plainTV n
toTyVarBndr (SigT (VarT n) k) = pure $ kindedTV n k
toTyVarBndr _ = Nothing
TH 2.12.O means GHC 8.2.1 , otherwise , we work back to GHC 8.0.1
#if MIN_VERSION_template_haskell(2, 12, 0)
deriveds :: [DerivClause]
deriveds =
pure $
DerivClause
Nothing
[ ConT functorTypeName,
ConT foldableTypeName,
ConT traversableTypeName
]
#else
deriveds :: [TH.Type]
deriveds =
[ ConT functorTypeName,
ConT foldableTypeName,
ConT traversableTypeName
]
#endif
makePrimForDI' ::
PatternFunctorRules -> Bool -> Name -> [TyVarBndr'] -> [ConstructorInfo] -> Q [Dec]
makePrimForDI' rules isNewtype tyName vars cons = do
let vars' = map VarT (typeVars vars)
let tyNameF = patternType rules tyName
let s = conAppsT tyName vars'
rName <- newName "r"
let r = VarT rName
Vars
let varsF = vars ++ [plainTV rName]
# 33
cons' <- traverse (conTypeTraversal resolveTypeSynonyms) cons
let consF =
toCon
. conNameMap (patternCon rules)
. conFieldNameMap (patternField rules)
. conTypeMap (substType s r)
<$> cons'
let dataDec = case consF of
[conF]
| isNewtype -> NewtypeD [] tyNameF varsF Nothing conF deriveds
_ -> DataD [] tyNameF varsF Nothing consF deriveds
recursiveDec <-
[d|
instance Projectable (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
project = $(LamCaseE <$> mkMorphism id (patternCon rules) cons')
instance Steppable (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
embed = $(LamCaseE <$> mkMorphism (patternCon rules) id cons')
instance Recursive (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
cata φ = φ . fmap (cata φ) . project
instance Corecursive (->) $(pure s) $(pure $ conAppsT tyNameF vars') where
ana ψ = embed . fmap (ana ψ) . ψ
|]
Combine
pure ([dataDec] <> recursiveDec)
mkMorphism ::
(Name -> Name) ->
(Name -> Name) ->
[ConstructorInfo] ->
Q [Match]
mkMorphism nFrom nTo =
traverse
( \ci -> do
let n = constructorName ci
fs <- traverse (const $ newName "x") $ constructorFields ci
pure $
Match
)
conNameTraversal :: Traversal' ConstructorInfo Name
conNameTraversal = lens constructorName (\s v -> s {constructorName = v})
conFieldNameTraversal :: Traversal' ConstructorInfo Name
conFieldNameTraversal =
lens constructorVariant (\s v -> s {constructorVariant = v})
. conVariantTraversal
where
conVariantTraversal :: Traversal' ConstructorVariant Name
conVariantTraversal _ NormalConstructor = pure NormalConstructor
conVariantTraversal _ InfixConstructor = pure InfixConstructor
conVariantTraversal f (RecordConstructor fs) = RecordConstructor <$> traverse f fs
conTypeTraversal :: Traversal' ConstructorInfo Type
conTypeTraversal =
lens constructorFields (\s v -> s {constructorFields = v})
. traverse
conNameMap :: (Name -> Name) -> ConstructorInfo -> ConstructorInfo
conNameMap = over conNameTraversal
conFieldNameMap :: (Name -> Name) -> ConstructorInfo -> ConstructorInfo
conFieldNameMap = over conFieldNameTraversal
conTypeMap :: (Type -> Type) -> ConstructorInfo -> ConstructorInfo
conTypeMap = over conTypeTraversal
type Lens' s a = forall f. Functor f => (a -> f a) -> s -> f s
type Traversal' s a = forall f. Applicative f => (a -> f a) -> s -> f s
lens :: (s -> a) -> (s -> a -> s) -> Lens' s a
lens sa sas afa s = sas s <$> afa (sa s)
# INLINE lens #
over :: Traversal' s a -> (a -> a) -> s -> s
over l f = runIdentity . l (Identity . f)
# INLINE over #
typeVars :: [TyVarBndr'] -> [Name]
typeVars = map tvName
conAppsT :: Name -> [Type] -> Type
conAppsT conName = foldl AppT (ConT conName)
substType ::
Type ->
Type ->
Type ->
Type
substType a b = go
where
go x | x == a = b
go (VarT n) = VarT n
go (AppT l r) = AppT (go l) (go r)
go (ForallT xs ctx t) = ForallT xs ctx (go t)
go (SigT t k) = SigT (go t) k
go (InfixT l n r) = InfixT (go l) n (go r)
go (UInfixT l n r) = UInfixT (go l) n (go r)
go (ParensT t) = ParensT (go t)
go x = x
toCon :: ConstructorInfo -> Con
toCon
( ConstructorInfo
{ constructorName = name,
constructorVars = vars,
constructorContext = ctxt,
constructorFields = ftys,
constructorStrictness = fstricts,
constructorVariant = variant
}
)
| not (null vars && null ctxt) =
error "makeBaseFunctor: GADTs are not currently supported."
| otherwise =
let bangs = map toBang fstricts
in case variant of
NormalConstructor -> NormalC name $ zip bangs ftys
RecordConstructor fnames -> RecC name $ zip3 fnames bangs ftys
InfixConstructor ->
let [bang1, bang2] = bangs
[fty1, fty2] = ftys
in InfixC (bang1, fty1) name (bang2, fty2)
where
toBang (FieldStrictness upkd strct) =
Bang
(toSourceUnpackedness upkd)
(toSourceStrictness strct)
where
toSourceUnpackedness :: Unpackedness -> SourceUnpackedness
toSourceUnpackedness UnspecifiedUnpackedness = NoSourceUnpackedness
toSourceUnpackedness NoUnpack = SourceNoUnpack
toSourceUnpackedness Unpack = SourceUnpack
toSourceStrictness :: Strictness -> SourceStrictness
toSourceStrictness UnspecifiedStrictness = NoSourceStrictness
toSourceStrictness Lazy = SourceLazy
toSourceStrictness TH.Abs.Strict = SourceStrict
TemplateHaskell language extension when compiling this library .
This allows the library to be used in stage1 cross - compilers .
functorTypeName :: Name
functorTypeName = mkNameG_tc "base" "GHC.Base" "Functor"
foldableTypeName :: Name
foldableTypeName = mkNameG_tc "base" "Data.Foldable" "Foldable"
traversableTypeName :: Name
traversableTypeName = mkNameG_tc "base" "Data.Traversable" "Traversable"
|
22d4a8c6cf1eb61cb4ebf8eed78a6f3b17ea45af4134e5767a82f016932ae521 | McCLIM/McCLIM | puzzle.lisp | $ fiHeader : puzzle.lisp , v 1.23 1993/07/27 01:46:05
(in-package #:clim-demo)
"Copyright (c) 1989, 1990, 1991, 1992 Symbolics, Inc. All rights reserved."
(define-application-frame puzzle ()
((puzzle :initform (make-array '(4 4))
:accessor puzzle-puzzle))
(:panes
(display :application
:display-function 'draw-puzzle
:text-style (make-text-style :fix :bold :very-large)
:incremental-redisplay t
:text-cursor nil
:width :compute :height :compute
:end-of-page-action :allow
:end-of-line-action :allow))
(:layouts
(:default display)))
(defmethod frame-standard-input ((puzzle puzzle))
(get-frame-pane puzzle 'display))
(defmethod frame-standard-output ((puzzle puzzle))
(get-frame-pane puzzle 'display))
(defmethod run-frame-top-level :before ((puzzle puzzle) &key)
(initialize-puzzle puzzle))
(defmethod read-frame-command ((puzzle puzzle) &key (stream *standard-input*))
(let ((abort-chars #+genera '(#\Abort #\End)
#-genera nil))
(let ((command (read-command-using-keystrokes
(frame-command-table puzzle) abort-chars
:stream stream)))
(if (characterp command)
(frame-exit puzzle)
command))))
(define-presentation-type puzzle-cell ()
:inherit-from '(integer 0 15))
(define-presentation-method highlight-presentation ((type puzzle-cell) record stream state)
(with-bounding-rectangle* (left top right bottom) record
(draw-rectangle* stream
left top right bottom
:ink +flipping-ink+)))
(defun encode-puzzle-cell (row column)
(+ (* row 4) column))
(defun decode-puzzle-cell (encoding)
(floor encoding 4))
(defmethod initialize-puzzle ((puzzle puzzle))
(let ((puzzle-array (puzzle-puzzle puzzle)))
(dotimes (row 4)
(dotimes (column 4)
(setf (aref puzzle-array row column) (mod (1+ (encode-puzzle-cell row column)) 16))))))
(defmethod draw-puzzle ((puzzle puzzle) stream &key max-width max-height)
(declare (ignore max-width max-height))
(let ((puzzle-array (puzzle-puzzle puzzle)))
(formatting-table (stream)
(dotimes (row 4)
(formatting-row (stream)
(dotimes (column 4)
(let* ((value (aref puzzle-array row column))
(cell-id (encode-puzzle-cell row column)))
(updating-output (stream :unique-id cell-id
:cache-value value)
(formatting-cell (stream :align-x :right)
(unless (zerop value)
(with-output-as-presentation
(stream cell-id 'puzzle-cell)
(format stream "~2D" value))))))))))))
(defun find-open-cell (puzzle)
(dotimes (row 4)
(dotimes (column 4)
(when (zerop (aref puzzle row column))
(return (encode-puzzle-cell row column))))))
(defun cell-adjacent-to-open-cell (puzzle r c)
;; check row
(or
(dotimes (column 4)
(when (and (/= column c) (zerop (aref puzzle r column)))
(return (encode-puzzle-cell r column))))
(dotimes (row 4)
(when (and (/= row r) (zerop (aref puzzle row c)))
(return (encode-puzzle-cell row c))))))
(define-puzzle-command com-move-cell
((cell 'puzzle-cell))
(with-slots (puzzle) *application-frame*
(multiple-value-bind (this-row this-column) (decode-puzzle-cell cell)
(let ((open-cell (cell-adjacent-to-open-cell puzzle this-row this-column)))
(multiple-value-bind (open-row open-column) (decode-puzzle-cell open-cell)
(cond ((= open-row this-row)
(cond ((> open-column this-column)
(do ((c open-column (1- c)))
((= c this-column))
(setf (aref puzzle this-row c)
(aref puzzle this-row (1- c)))))
(t (do ((c open-column (1+ c)))
((= c this-column))
(setf (aref puzzle this-row c)
(aref puzzle this-row (1+ c)))))))
((= open-column this-column)
(cond ((> open-row this-row)
(do ((r open-row (1- r)))
((= r this-row))
(setf (aref puzzle r this-column)
(aref puzzle (1- r) this-column))))
(t (do ((r open-row (1+ r)))
((= r this-row))
(setf (aref puzzle r this-column)
(aref puzzle (1+ r) this-column)))))))))
(setf (aref puzzle this-row this-column) 0))))
(define-presentation-to-command-translator move-cell
(puzzle-cell com-move-cell puzzle
:documentation "Move cell"
:tester ((object) (cell-moveable-p object)))
(object)
(list object))
(defun cell-moveable-p (object)
(multiple-value-bind (r c)
(decode-puzzle-cell object)
(cell-adjacent-to-open-cell (puzzle-puzzle *application-frame*) r c)))
(define-puzzle-command (com-scramble :menu t)
()
(let ((ordering (list 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14))
(puzzle-array (puzzle-puzzle *application-frame*)))
(flet ((random-predicate (x y)
(declare (ignore x y))
(zerop (random 2))))
(declare (dynamic-extent #'random-predicate))
(setq ordering (sort ordering #'random-predicate)))
(flet ((ordering-parity (ordering)
(do* ((ordering2 (copy-list ordering))
(total-parity t)
(start (position-if #'identity ordering2)
(position-if #'identity ordering2)))
((null start) total-parity)
(let ((cycle-parity (do* ((evenp t (not evenp))
(item (nth start ordering) (nth item ordering)))
((= item start)
(setf (nth start ordering2) nil)
evenp)
(setf (nth item ordering2) nil))))
(when (null cycle-parity)
(setq total-parity (not total-parity)))))))
(unless (ordering-parity ordering)
(rotatef (first ordering) (second ordering))))
(dotimes (row 4)
(dotimes (column 4)
(setf (aref puzzle-array row column) (if ordering (+ 1 (pop ordering)) 0))))))
(define-puzzle-command (com-exit-puzzle :menu "Exit")
()
(frame-exit *application-frame*))
| null | https://raw.githubusercontent.com/McCLIM/McCLIM/282753392aab1d1c25e9f61a8285647387ae1c70/Examples/puzzle.lisp | lisp | check row | $ fiHeader : puzzle.lisp , v 1.23 1993/07/27 01:46:05
(in-package #:clim-demo)
"Copyright (c) 1989, 1990, 1991, 1992 Symbolics, Inc. All rights reserved."
(define-application-frame puzzle ()
((puzzle :initform (make-array '(4 4))
:accessor puzzle-puzzle))
(:panes
(display :application
:display-function 'draw-puzzle
:text-style (make-text-style :fix :bold :very-large)
:incremental-redisplay t
:text-cursor nil
:width :compute :height :compute
:end-of-page-action :allow
:end-of-line-action :allow))
(:layouts
(:default display)))
(defmethod frame-standard-input ((puzzle puzzle))
(get-frame-pane puzzle 'display))
(defmethod frame-standard-output ((puzzle puzzle))
(get-frame-pane puzzle 'display))
(defmethod run-frame-top-level :before ((puzzle puzzle) &key)
(initialize-puzzle puzzle))
(defmethod read-frame-command ((puzzle puzzle) &key (stream *standard-input*))
(let ((abort-chars #+genera '(#\Abort #\End)
#-genera nil))
(let ((command (read-command-using-keystrokes
(frame-command-table puzzle) abort-chars
:stream stream)))
(if (characterp command)
(frame-exit puzzle)
command))))
(define-presentation-type puzzle-cell ()
:inherit-from '(integer 0 15))
(define-presentation-method highlight-presentation ((type puzzle-cell) record stream state)
(with-bounding-rectangle* (left top right bottom) record
(draw-rectangle* stream
left top right bottom
:ink +flipping-ink+)))
(defun encode-puzzle-cell (row column)
(+ (* row 4) column))
(defun decode-puzzle-cell (encoding)
(floor encoding 4))
(defmethod initialize-puzzle ((puzzle puzzle))
(let ((puzzle-array (puzzle-puzzle puzzle)))
(dotimes (row 4)
(dotimes (column 4)
(setf (aref puzzle-array row column) (mod (1+ (encode-puzzle-cell row column)) 16))))))
(defmethod draw-puzzle ((puzzle puzzle) stream &key max-width max-height)
(declare (ignore max-width max-height))
(let ((puzzle-array (puzzle-puzzle puzzle)))
(formatting-table (stream)
(dotimes (row 4)
(formatting-row (stream)
(dotimes (column 4)
(let* ((value (aref puzzle-array row column))
(cell-id (encode-puzzle-cell row column)))
(updating-output (stream :unique-id cell-id
:cache-value value)
(formatting-cell (stream :align-x :right)
(unless (zerop value)
(with-output-as-presentation
(stream cell-id 'puzzle-cell)
(format stream "~2D" value))))))))))))
(defun find-open-cell (puzzle)
(dotimes (row 4)
(dotimes (column 4)
(when (zerop (aref puzzle row column))
(return (encode-puzzle-cell row column))))))
(defun cell-adjacent-to-open-cell (puzzle r c)
(or
(dotimes (column 4)
(when (and (/= column c) (zerop (aref puzzle r column)))
(return (encode-puzzle-cell r column))))
(dotimes (row 4)
(when (and (/= row r) (zerop (aref puzzle row c)))
(return (encode-puzzle-cell row c))))))
(define-puzzle-command com-move-cell
((cell 'puzzle-cell))
(with-slots (puzzle) *application-frame*
(multiple-value-bind (this-row this-column) (decode-puzzle-cell cell)
(let ((open-cell (cell-adjacent-to-open-cell puzzle this-row this-column)))
(multiple-value-bind (open-row open-column) (decode-puzzle-cell open-cell)
(cond ((= open-row this-row)
(cond ((> open-column this-column)
(do ((c open-column (1- c)))
((= c this-column))
(setf (aref puzzle this-row c)
(aref puzzle this-row (1- c)))))
(t (do ((c open-column (1+ c)))
((= c this-column))
(setf (aref puzzle this-row c)
(aref puzzle this-row (1+ c)))))))
((= open-column this-column)
(cond ((> open-row this-row)
(do ((r open-row (1- r)))
((= r this-row))
(setf (aref puzzle r this-column)
(aref puzzle (1- r) this-column))))
(t (do ((r open-row (1+ r)))
((= r this-row))
(setf (aref puzzle r this-column)
(aref puzzle (1+ r) this-column)))))))))
(setf (aref puzzle this-row this-column) 0))))
(define-presentation-to-command-translator move-cell
(puzzle-cell com-move-cell puzzle
:documentation "Move cell"
:tester ((object) (cell-moveable-p object)))
(object)
(list object))
(defun cell-moveable-p (object)
(multiple-value-bind (r c)
(decode-puzzle-cell object)
(cell-adjacent-to-open-cell (puzzle-puzzle *application-frame*) r c)))
(define-puzzle-command (com-scramble :menu t)
()
(let ((ordering (list 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14))
(puzzle-array (puzzle-puzzle *application-frame*)))
(flet ((random-predicate (x y)
(declare (ignore x y))
(zerop (random 2))))
(declare (dynamic-extent #'random-predicate))
(setq ordering (sort ordering #'random-predicate)))
(flet ((ordering-parity (ordering)
(do* ((ordering2 (copy-list ordering))
(total-parity t)
(start (position-if #'identity ordering2)
(position-if #'identity ordering2)))
((null start) total-parity)
(let ((cycle-parity (do* ((evenp t (not evenp))
(item (nth start ordering) (nth item ordering)))
((= item start)
(setf (nth start ordering2) nil)
evenp)
(setf (nth item ordering2) nil))))
(when (null cycle-parity)
(setq total-parity (not total-parity)))))))
(unless (ordering-parity ordering)
(rotatef (first ordering) (second ordering))))
(dotimes (row 4)
(dotimes (column 4)
(setf (aref puzzle-array row column) (if ordering (+ 1 (pop ordering)) 0))))))
(define-puzzle-command (com-exit-puzzle :menu "Exit")
()
(frame-exit *application-frame*))
|
5cd304e9d0f75bb43de152edca26f1e410350ba55671fce2173ce4adc1e84a4b | factisresearch/large-hashable | Intern.hs | -- | Generic, low-level data types for hashing. This is an internal module.
--
-- You should only import this module if you write your own hash algorithm
-- or if you need access to low-level hashing functions when defining
-- instances of 'LargeHash'.
--
-- Regular users should not import this module. Import 'Data.LargeHashable'
-- instead.
{-# LANGUAGE BangPatterns #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE GeneralizedNewtypeDeriving #
module Data.LargeHashable.Intern (
HashUpdates(..), HashAlgorithm(..), LH
, hashUpdates, ioInLH, runLH, updateXorHash
) where
keep imports in alphabetic order ( in Emacs , use " M - x sort - lines " )
import Control.Monad
import Data.Word
import Foreign.Ptr
import System.IO.Unsafe (unsafePerformIO)
-- | Functions for updating an intermediate hash value. The functions live
in the ' IO ' monad because they are typically implemented via FFI .
data HashUpdates
= HashUpdates
{ hu_updatePtr :: {-# NOUNPACK #-} !(Ptr Word8 -> Int -> IO ()) -- ^ adds a byte array to the hash
, hu_updateUChar :: {-# NOUNPACK #-} !(Word8 -> IO ()) -- Word8
, hu_updateUShort :: {-# NOUNPACK #-} !(Word16 -> IO ()) -- Word16
, hu_updateULong :: {-# NOUNPACK #-} !(Word64 -> IO ()) -- Word64
}
-- | The interface for a hashing algorithm. The interface contains a simple run
-- function, which is used to update the hash with all values needed, and the
-- outputs the resulting hash.
data HashAlgorithm h
= HashAlgorithm
{ ha_run :: {-# NOUNPACK #-} !((HashUpdates -> IO ()) -> IO h)
, ha_xor :: {-# NOUNPACK #-} !(h -> h -> h)
, ha_updateHash :: {-# NOUNPACK #-} !(HashUpdates -> h -> IO ())
}
data LHEnv
= LHEnv
{ lh_updates :: {-# NOUNPACK #-} !HashUpdates
, lh_updateXorHash :: {-# NOUNPACK #-} !([LH ()] -> IO ())
}
-- | The 'LH' monad (`LH` stands for "large hash") is used in the definition of
-- hashing functions for arbitrary data types.
newtype LH a = LH (LHEnv -> IO a)
# INLINE lhFmap #
lhFmap :: (a -> b) -> LH a -> LH b
lhFmap f (LH x) =
LH $ \env ->
do y <- x env
return (f y)
# INLINE lhReturn #
lhReturn :: a -> LH a
lhReturn x = LH $ \_env -> return x
# INLINE lhApp #
lhApp :: LH (a -> b) -> LH a -> LH b
lhApp (LH f) (LH x) =
LH $ \env -> f env <*> x env
# INLINE lhBind #
lhBind :: LH a -> (a -> LH b) -> LH b
lhBind (LH x) f =
LH $ \env ->
do y <- x env
let (LH g) = f y
g env
# INLINE lhBind ' #
lhBind' :: LH a -> LH b -> LH b
lhBind' (LH x) (LH y) =
LH $ \env ->
do _ <- x env
y env
instance Functor LH where
fmap = lhFmap
instance Applicative LH where
pure = lhReturn
(<*>) = lhApp
instance Monad LH where
return = lhReturn
(>>=) = lhBind
(>>) = lhBind'
{-# INLINE hashUpdates #-}
hashUpdates :: LH HashUpdates
hashUpdates =
LH $ \env -> return (lh_updates env)
# INLINE getUpdateXorHash #
getUpdateXorHash :: LH ([LH ()] -> IO ())
getUpdateXorHash =
LH $ \env -> return (lh_updateXorHash env)
-- | Perform an 'IO' action in the 'LH' monad. Use with care, do not perform
-- arbitrary 'IO' operation with this function! Only use it for calling
functions of the ' HashUpdates ' datatype .
# INLINE ioInLH #
ioInLH :: IO a -> LH a
ioInLH io =
LH $ \_env -> io
-- | Runs a 'LH' computation and returns the resulting hash.
# NOINLINE runLH #
runLH :: HashAlgorithm h -> LH () -> h
runLH alg lh =
unsafePerformIO (runLH' alg lh)
runLH' :: HashAlgorithm h -> LH () -> IO h
runLH' alg (LH lh) =
ha_run alg fun
where
fun updates =
lh (LHEnv updates (updateXor updates))
updateXor updates actions =
do mh <- foldM foldFun Nothing actions
case mh of
Just h -> ha_updateHash alg updates h
Nothing -> return ()
foldFun mh action =
do h2 <- runLH' alg action
case mh of
Nothing -> return (Just h2)
Just h1 ->
let !h = ha_xor alg h1 h2
in return (Just h)
updateXorHash :: [LH ()] -> LH ()
updateXorHash actions =
do f <- getUpdateXorHash
ioInLH (f actions)
| null | https://raw.githubusercontent.com/factisresearch/large-hashable/7b7c2ed6ac6e096478e8ee00160fa9d220df853a/src/Data/LargeHashable/Intern.hs | haskell | | Generic, low-level data types for hashing. This is an internal module.
You should only import this module if you write your own hash algorithm
or if you need access to low-level hashing functions when defining
instances of 'LargeHash'.
Regular users should not import this module. Import 'Data.LargeHashable'
instead.
# LANGUAGE BangPatterns #
| Functions for updating an intermediate hash value. The functions live
# NOUNPACK #
^ adds a byte array to the hash
# NOUNPACK #
Word8
# NOUNPACK #
Word16
# NOUNPACK #
Word64
| The interface for a hashing algorithm. The interface contains a simple run
function, which is used to update the hash with all values needed, and the
outputs the resulting hash.
# NOUNPACK #
# NOUNPACK #
# NOUNPACK #
# NOUNPACK #
# NOUNPACK #
| The 'LH' monad (`LH` stands for "large hash") is used in the definition of
hashing functions for arbitrary data types.
# INLINE hashUpdates #
| Perform an 'IO' action in the 'LH' monad. Use with care, do not perform
arbitrary 'IO' operation with this function! Only use it for calling
| Runs a 'LH' computation and returns the resulting hash. | # LANGUAGE DeriveGeneric #
# LANGUAGE GeneralizedNewtypeDeriving #
module Data.LargeHashable.Intern (
HashUpdates(..), HashAlgorithm(..), LH
, hashUpdates, ioInLH, runLH, updateXorHash
) where
keep imports in alphabetic order ( in Emacs , use " M - x sort - lines " )
import Control.Monad
import Data.Word
import Foreign.Ptr
import System.IO.Unsafe (unsafePerformIO)
in the ' IO ' monad because they are typically implemented via FFI .
data HashUpdates
= HashUpdates
}
data HashAlgorithm h
= HashAlgorithm
}
data LHEnv
= LHEnv
}
newtype LH a = LH (LHEnv -> IO a)
# INLINE lhFmap #
lhFmap :: (a -> b) -> LH a -> LH b
lhFmap f (LH x) =
LH $ \env ->
do y <- x env
return (f y)
# INLINE lhReturn #
lhReturn :: a -> LH a
lhReturn x = LH $ \_env -> return x
# INLINE lhApp #
lhApp :: LH (a -> b) -> LH a -> LH b
lhApp (LH f) (LH x) =
LH $ \env -> f env <*> x env
# INLINE lhBind #
lhBind :: LH a -> (a -> LH b) -> LH b
lhBind (LH x) f =
LH $ \env ->
do y <- x env
let (LH g) = f y
g env
# INLINE lhBind ' #
lhBind' :: LH a -> LH b -> LH b
lhBind' (LH x) (LH y) =
LH $ \env ->
do _ <- x env
y env
instance Functor LH where
fmap = lhFmap
instance Applicative LH where
pure = lhReturn
(<*>) = lhApp
instance Monad LH where
return = lhReturn
(>>=) = lhBind
(>>) = lhBind'
hashUpdates :: LH HashUpdates
hashUpdates =
LH $ \env -> return (lh_updates env)
# INLINE getUpdateXorHash #
getUpdateXorHash :: LH ([LH ()] -> IO ())
getUpdateXorHash =
LH $ \env -> return (lh_updateXorHash env)
functions of the ' HashUpdates ' datatype .
# INLINE ioInLH #
ioInLH :: IO a -> LH a
ioInLH io =
LH $ \_env -> io
# NOINLINE runLH #
runLH :: HashAlgorithm h -> LH () -> h
runLH alg lh =
unsafePerformIO (runLH' alg lh)
runLH' :: HashAlgorithm h -> LH () -> IO h
runLH' alg (LH lh) =
ha_run alg fun
where
fun updates =
lh (LHEnv updates (updateXor updates))
updateXor updates actions =
do mh <- foldM foldFun Nothing actions
case mh of
Just h -> ha_updateHash alg updates h
Nothing -> return ()
foldFun mh action =
do h2 <- runLH' alg action
case mh of
Nothing -> return (Just h2)
Just h1 ->
let !h = ha_xor alg h1 h2
in return (Just h)
updateXorHash :: [LH ()] -> LH ()
updateXorHash actions =
do f <- getUpdateXorHash
ioInLH (f actions)
|
b060a84bdcf029dfc24bbb8240a5f3a28b1e4cfeddf45d34b8aede1a59792fad | dpiponi/Wattage | ex4.hs | import Prelude hiding (iterate)
import Formal as F
main = do
let x = F.var :: Formal Q
let n = 10
let f = sum [(1 + x)^i | i <- [0 .. n - 1]] - fromIntegral n
-- A054474 Number of walks on square lattice that start and end at
origin after 2n steps , not touching origin at intermediate stages .
--
# of valid terms doubles with each iteration so 10 is
good enough for 1000 terms .
print $ F.truncate 10 $ inverse f
| null | https://raw.githubusercontent.com/dpiponi/Wattage/c4b9c30fa42cf5d134d83b490bed77d0a29c16af/examples/ex4.hs | haskell | A054474 Number of walks on square lattice that start and end at
| import Prelude hiding (iterate)
import Formal as F
main = do
let x = F.var :: Formal Q
let n = 10
let f = sum [(1 + x)^i | i <- [0 .. n - 1]] - fromIntegral n
origin after 2n steps , not touching origin at intermediate stages .
# of valid terms doubles with each iteration so 10 is
good enough for 1000 terms .
print $ F.truncate 10 $ inverse f
|
1923d2115919d400d4e5e6c95a032f97367377c1ba4cd84176bfd54f0d58d818 | fulcrologic/fulcro | user.clj | (ns user
(:require
[clojure.tools.namespace.repl :as tools-ns :refer [set-refresh-dirs]]))
(set-refresh-dirs "src/main" "src/test" "src/dev" "src/todomvc")
| null | https://raw.githubusercontent.com/fulcrologic/fulcro/3d6442da84d240f0dbbe2338aa43349dcaca6f03/src/dev/user.clj | clojure | (ns user
(:require
[clojure.tools.namespace.repl :as tools-ns :refer [set-refresh-dirs]]))
(set-refresh-dirs "src/main" "src/test" "src/dev" "src/todomvc")
| |
e47bf6dcb1b050b798b4037830bcad7cd7c1a9caf55aa2353a1c8d5cb9209992 | crclark/foundationdb-haskell | Directory.hs | -- | The directory layer provides tools for creating a
hierarchy of ' 's , which can be operated on efficiently with a
-- directory-like API. This is one of the official layers supported by all
-- language bindings. See the
-- <-guide.html#directories official FoundationDB documentation>
-- for more information.
--
-- This implementation of the directory layer does not yet support directory
-- partitions. They will be added in the future.
module FoundationDB.Layer.Directory
( DirectoryLayer,
defaultDirLayer,
newDirectoryLayer,
Directory,
Path,
dirSubspace,
dirPath,
dirLayer,
open,
createOrOpen,
move,
remove,
exists,
list,
-- * Advanced usage
open',
createOrOpen',
)
where
import FoundationDB.Layer.Directory.Internal
| null | https://raw.githubusercontent.com/crclark/foundationdb-haskell/c8c9527ffa02dd2d08c15e23b09dd982a0b2a0ec/src/FoundationDB/Layer/Directory.hs | haskell | | The directory layer provides tools for creating a
directory-like API. This is one of the official layers supported by all
language bindings. See the
<-guide.html#directories official FoundationDB documentation>
for more information.
This implementation of the directory layer does not yet support directory
partitions. They will be added in the future.
* Advanced usage | hierarchy of ' 's , which can be operated on efficiently with a
module FoundationDB.Layer.Directory
( DirectoryLayer,
defaultDirLayer,
newDirectoryLayer,
Directory,
Path,
dirSubspace,
dirPath,
dirLayer,
open,
createOrOpen,
move,
remove,
exists,
list,
open',
createOrOpen',
)
where
import FoundationDB.Layer.Directory.Internal
|
a736161cc09b7da131890ca48dfd4b884666cb4d2bdcf051ed03b450c17018b7 | Enecuum/Node | PoASpec.hs | module Enecuum.Tests.Scenarios.PoASpec where
import qualified Data.Map as M
import Data.Typeable
import qualified Enecuum.Domain as D
import qualified Enecuum.Interpreters as I
import qualified Enecuum.Language as L
import Enecuum.Prelude
import qualified Enecuum.Runtime as R
import qualified Enecuum.Samples.Assets.Blockchain.Generation as A
import qualified Enecuum.Samples.Assets.Nodes.Address as A
import qualified Enecuum.Samples.Assets.Nodes.Messages as D
import qualified Enecuum.Samples.Assets.TstScenarios as Tst
import qualified Enecuum.Samples.Blockchain.Domain as D
import qualified Enecuum.Samples.Blockchain.Language as L
import Enecuum.Testing.Integrational
import Enecuum.Tests.Helpers
import Enecuum.Testing.Wrappers
import Test.Hspec
import Test.Hspec.Contrib.HUnit (fromHUnitTest)
import Test.HUnit
spec :: Spec
spec = unstableTest $ slowTest $ describe "PoA" $ fromHUnitTest $ TestList
[TestLabel "Check microblock generation" testPoA]
testPoA :: Test
testPoA = TestCase $ withNodesManager $ \mgr -> do
let transmitterRpcAddress = A.getRpcAddress A.tstGraphNodeTransmitterAddress
void $ startNode Nothing mgr $ Tst.tstGraphNode Tst.tstGraphNodeTransmitterConfig
void $ startNode Nothing mgr Tst.powNode
void $ startNode Nothing mgr $ Tst.poaNode Tst.Good Tst.tstGenPoANodeConfig
-- Generate and send transactions to graph node
transactions <- I.runERandomL $ replicateM A.transactionsInMicroblock $ A.genTransaction A.Generated
_ :: [Either Text D.SuccessMsg] <- forM transactions $ \tx ->
makeIORpcRequest transmitterRpcAddress $ D.CreateTransaction tx
-- Check transaction pending on graph node
txPending :: [D.Transaction] <- makeRpcRequestUntilSuccess transmitterRpcAddress D.GetTransactionPending
(sort txPending) `shouldBe` (sort transactions)
Ask pow node to generate n kblocks
let timeGap = 0
let kblockCount = 1
_ :: Either Text D.SuccessMsg <- makeIORpcRequest (A.getRpcAddress A.tstGenPoWNodeAddress) $ D.NBlockPacketGeneration kblockCount timeGap
-- Get last kblock from graph node
kBlock :: D.KBlock <- makeRpcRequestUntilSuccess transmitterRpcAddress D.GetLastKBlock
let kblockHash = D.toHash kBlock
on graph node received from poa
(D.GetMBlocksForKBlockResponse mblock) <- do
let request = D.GetMBlocksForKBlockRequest kblockHash
let predicate (D.GetMBlocksForKBlockResponse mblock) = length mblock == 1
makeRpcRequestWithPredicate predicate transmitterRpcAddress request
(length mblock) `shouldBe` 1
-- Check transaction pending on graph node, it must to be empty now
void $ do
let predicate :: [D.Transaction] -> Bool
predicate txPending = txPending == []
makeRpcRequestWithPredicate predicate transmitterRpcAddress D.GetTransactionPending
| null | https://raw.githubusercontent.com/Enecuum/Node/3dfbc6a39c84bd45dd5f4b881e067044dde0153a/test/spec/Enecuum/Tests/Scenarios/PoASpec.hs | haskell | Generate and send transactions to graph node
Check transaction pending on graph node
Get last kblock from graph node
Check transaction pending on graph node, it must to be empty now | module Enecuum.Tests.Scenarios.PoASpec where
import qualified Data.Map as M
import Data.Typeable
import qualified Enecuum.Domain as D
import qualified Enecuum.Interpreters as I
import qualified Enecuum.Language as L
import Enecuum.Prelude
import qualified Enecuum.Runtime as R
import qualified Enecuum.Samples.Assets.Blockchain.Generation as A
import qualified Enecuum.Samples.Assets.Nodes.Address as A
import qualified Enecuum.Samples.Assets.Nodes.Messages as D
import qualified Enecuum.Samples.Assets.TstScenarios as Tst
import qualified Enecuum.Samples.Blockchain.Domain as D
import qualified Enecuum.Samples.Blockchain.Language as L
import Enecuum.Testing.Integrational
import Enecuum.Tests.Helpers
import Enecuum.Testing.Wrappers
import Test.Hspec
import Test.Hspec.Contrib.HUnit (fromHUnitTest)
import Test.HUnit
spec :: Spec
spec = unstableTest $ slowTest $ describe "PoA" $ fromHUnitTest $ TestList
[TestLabel "Check microblock generation" testPoA]
testPoA :: Test
testPoA = TestCase $ withNodesManager $ \mgr -> do
let transmitterRpcAddress = A.getRpcAddress A.tstGraphNodeTransmitterAddress
void $ startNode Nothing mgr $ Tst.tstGraphNode Tst.tstGraphNodeTransmitterConfig
void $ startNode Nothing mgr Tst.powNode
void $ startNode Nothing mgr $ Tst.poaNode Tst.Good Tst.tstGenPoANodeConfig
transactions <- I.runERandomL $ replicateM A.transactionsInMicroblock $ A.genTransaction A.Generated
_ :: [Either Text D.SuccessMsg] <- forM transactions $ \tx ->
makeIORpcRequest transmitterRpcAddress $ D.CreateTransaction tx
txPending :: [D.Transaction] <- makeRpcRequestUntilSuccess transmitterRpcAddress D.GetTransactionPending
(sort txPending) `shouldBe` (sort transactions)
Ask pow node to generate n kblocks
let timeGap = 0
let kblockCount = 1
_ :: Either Text D.SuccessMsg <- makeIORpcRequest (A.getRpcAddress A.tstGenPoWNodeAddress) $ D.NBlockPacketGeneration kblockCount timeGap
kBlock :: D.KBlock <- makeRpcRequestUntilSuccess transmitterRpcAddress D.GetLastKBlock
let kblockHash = D.toHash kBlock
on graph node received from poa
(D.GetMBlocksForKBlockResponse mblock) <- do
let request = D.GetMBlocksForKBlockRequest kblockHash
let predicate (D.GetMBlocksForKBlockResponse mblock) = length mblock == 1
makeRpcRequestWithPredicate predicate transmitterRpcAddress request
(length mblock) `shouldBe` 1
void $ do
let predicate :: [D.Transaction] -> Bool
predicate txPending = txPending == []
makeRpcRequestWithPredicate predicate transmitterRpcAddress D.GetTransactionPending
|
de3656bf7d9bfa64dee5b1b182c7e66915911f8fd57dd9297b16062ebc04b1c9 | wgnet/fox | fox_pub_pool.erl | -module(fox_pub_pool).
-behavior(gen_server).
-export([start_link/2, get_channel/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("otp_types.hrl").
-include("fox.hrl").
-record(state, {
connection :: pid() | undefined,
connection_ref :: reference() | undefined,
connection_params :: #amqp_params_network{},
reconnect_attempt = 0 :: non_neg_integer(),
num_channels :: integer(),
channels :: queue:queue(),
registered_name :: atom()
}).
%%% module API
-spec start_link(atom(), #amqp_params_network{}) -> gs_start_link_reply().
start_link(PoolName, ConnectionParams) ->
RegName = fox_utils:make_reg_name(?MODULE, PoolName),
gen_server:start_link({local, RegName}, ?MODULE, {RegName, ConnectionParams}, []).
-spec get_channel(atom()) -> {ok, pid()} | {error, no_connection}.
get_channel(PoolName) ->
RegName = fox_utils:make_reg_name(?MODULE, PoolName),
gen_server:call(RegName, get_channel).
-spec stop(atom()) -> ok.
stop(PoolName) ->
RegName = fox_utils:make_reg_name(?MODULE, PoolName),
gen_server:call(RegName, stop).
%%% gen_server API
-spec(init(gs_args()) -> gs_init_reply()).
init({RegName, ConnectionParams}) ->
put('$module', ?MODULE),
{ok, NumChannels} = application:get_env(fox, num_publish_channels),
self() ! connect,
{
ok,
#state{
connection_params = ConnectionParams,
num_channels = NumChannels,
registered_name = RegName
}
}.
-spec(handle_call(gs_request(), gs_from(), gs_reply()) -> gs_call_reply()).
handle_call(get_channel, _From, #state{connection = undefined} = State) ->
{reply, {error, no_connection}, State};
handle_call(get_channel, _From, #state{connection = Conn, num_channels = PoolSize, channels = Channels} = State) ->
{Channel, Channels1} = handle_get_channel(Channels, PoolSize, Conn),
{reply, {ok, Channel}, State#state{channels = Channels1}};
handle_call(stop, _From,
#state{
connection = Conn,
connection_ref = Ref,
channels = Channels}
= State) ->
case Conn of
undefined -> do_nothing;
Pid ->
erlang:demonitor(Ref, [flush]),
fox_priv_utils:close_connection(Pid)
end,
lists:foreach(
fun(Channel) ->
fox_priv_utils:close_channel(Channel)
end,
queue:to_list(Channels)
),
{stop, normal, ok, State};
handle_call(Any, _From, State) ->
error_logger:error_msg("unknown call ~p in ~p ~n", [Any, ?MODULE]),
{noreply, State}.
-spec(handle_cast(gs_request(), gs_state()) -> gs_cast_reply()).
handle_cast(Any, State) ->
error_logger:error_msg("unknown cast ~p in ~p ~n", [Any, ?MODULE]),
{noreply, State}.
-spec(handle_info(gs_request(), gs_state()) -> gs_info_reply()).
handle_info(connect,
#state{
connection = undefined,
connection_ref = undefined,
connection_params = Params,
reconnect_attempt = Attempt,
registered_name = RegName
} = State) ->
SParams = fox_utils:params_network_to_str(Params),
case amqp_connection:start(Params) of
{ok, Conn} ->
Ref = erlang:monitor(process, Conn),
error_logger:info_msg("~s connected to ~s", [RegName, SParams]),
{noreply, State#state{
connection = Conn,
connection_ref = Ref,
reconnect_attempt = 0,
channels = queue:new()
}};
{error, Reason} ->
error_logger:error_msg("~s could not connect to ~s ~p", [RegName, SParams, Reason]),
fox_priv_utils:reconnect(Attempt),
{noreply, State#state{
connection = undefined,
connection_ref = undefined,
reconnect_attempt = Attempt + 1}}
end;
handle_info({'DOWN', Ref, process, Conn, Reason},
#state{
connection = Conn,
connection_ref = Ref,
reconnect_attempt = Attempt,
channels = Channels,
registered_name = RegName
} = State) ->
lists:foreach(
fun(Channel) ->
fox_priv_utils:close_channel(Channel)
end,
queue:to_list(Channels)),
fox_priv_utils:error_or_info(Reason, "~s, connection is DOWN: ~p", [RegName, Reason]),
fox_priv_utils:reconnect(Attempt),
{noreply, State#state{connection = undefined, connection_ref = undefined, channels = undefined}};
handle_info(Request, State) ->
error_logger:error_msg("unknown info ~p in ~p ~n", [Request, ?MODULE]),
{noreply, State}.
-spec(terminate(terminate_reason(), gs_state()) -> ok).
terminate(_Reason, _State) ->
ok.
-spec(code_change(term(), term(), term()) -> gs_code_change_reply()).
code_change(_OldVersion, State, _Extra) ->
{ok, State}.
%% internal functions
-spec handle_get_channel(queue:queue(pid()), pos_integer(), pid()) -> {pid(), queue:queue(pid())}.
handle_get_channel(Channels, PoolSize, Connection) ->
NumChannels = queue:len(Channels),
{Channel1, Channels2} = case NumChannels < PoolSize of
true ->
{ok, Channel} = amqp_connection:open_channel(Connection),
{Channel, Channels};
false ->
{{value, Channel}, Channels1} = queue:out(Channels),
{Channel, Channels1}
end,
case is_process_alive(Channel1) of
true ->
{Channel1, queue:in(Channel1, Channels2)};
false ->
error_logger:info_msg(
"Fox channel ~p seems to be dead. Drop it and create new",
[Channel1]
),
handle_get_channel(Channels2, PoolSize, Connection)
end.
| null | https://raw.githubusercontent.com/wgnet/fox/da2b45640b9feb2c3f43a926cec791dfeba38a1e/src/publish/fox_pub_pool.erl | erlang | module API
gen_server API
internal functions | -module(fox_pub_pool).
-behavior(gen_server).
-export([start_link/2, get_channel/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("otp_types.hrl").
-include("fox.hrl").
-record(state, {
connection :: pid() | undefined,
connection_ref :: reference() | undefined,
connection_params :: #amqp_params_network{},
reconnect_attempt = 0 :: non_neg_integer(),
num_channels :: integer(),
channels :: queue:queue(),
registered_name :: atom()
}).
-spec start_link(atom(), #amqp_params_network{}) -> gs_start_link_reply().
start_link(PoolName, ConnectionParams) ->
RegName = fox_utils:make_reg_name(?MODULE, PoolName),
gen_server:start_link({local, RegName}, ?MODULE, {RegName, ConnectionParams}, []).
-spec get_channel(atom()) -> {ok, pid()} | {error, no_connection}.
get_channel(PoolName) ->
RegName = fox_utils:make_reg_name(?MODULE, PoolName),
gen_server:call(RegName, get_channel).
-spec stop(atom()) -> ok.
stop(PoolName) ->
RegName = fox_utils:make_reg_name(?MODULE, PoolName),
gen_server:call(RegName, stop).
-spec(init(gs_args()) -> gs_init_reply()).
init({RegName, ConnectionParams}) ->
put('$module', ?MODULE),
{ok, NumChannels} = application:get_env(fox, num_publish_channels),
self() ! connect,
{
ok,
#state{
connection_params = ConnectionParams,
num_channels = NumChannels,
registered_name = RegName
}
}.
-spec(handle_call(gs_request(), gs_from(), gs_reply()) -> gs_call_reply()).
handle_call(get_channel, _From, #state{connection = undefined} = State) ->
{reply, {error, no_connection}, State};
handle_call(get_channel, _From, #state{connection = Conn, num_channels = PoolSize, channels = Channels} = State) ->
{Channel, Channels1} = handle_get_channel(Channels, PoolSize, Conn),
{reply, {ok, Channel}, State#state{channels = Channels1}};
handle_call(stop, _From,
#state{
connection = Conn,
connection_ref = Ref,
channels = Channels}
= State) ->
case Conn of
undefined -> do_nothing;
Pid ->
erlang:demonitor(Ref, [flush]),
fox_priv_utils:close_connection(Pid)
end,
lists:foreach(
fun(Channel) ->
fox_priv_utils:close_channel(Channel)
end,
queue:to_list(Channels)
),
{stop, normal, ok, State};
handle_call(Any, _From, State) ->
error_logger:error_msg("unknown call ~p in ~p ~n", [Any, ?MODULE]),
{noreply, State}.
-spec(handle_cast(gs_request(), gs_state()) -> gs_cast_reply()).
handle_cast(Any, State) ->
error_logger:error_msg("unknown cast ~p in ~p ~n", [Any, ?MODULE]),
{noreply, State}.
-spec(handle_info(gs_request(), gs_state()) -> gs_info_reply()).
handle_info(connect,
#state{
connection = undefined,
connection_ref = undefined,
connection_params = Params,
reconnect_attempt = Attempt,
registered_name = RegName
} = State) ->
SParams = fox_utils:params_network_to_str(Params),
case amqp_connection:start(Params) of
{ok, Conn} ->
Ref = erlang:monitor(process, Conn),
error_logger:info_msg("~s connected to ~s", [RegName, SParams]),
{noreply, State#state{
connection = Conn,
connection_ref = Ref,
reconnect_attempt = 0,
channels = queue:new()
}};
{error, Reason} ->
error_logger:error_msg("~s could not connect to ~s ~p", [RegName, SParams, Reason]),
fox_priv_utils:reconnect(Attempt),
{noreply, State#state{
connection = undefined,
connection_ref = undefined,
reconnect_attempt = Attempt + 1}}
end;
handle_info({'DOWN', Ref, process, Conn, Reason},
#state{
connection = Conn,
connection_ref = Ref,
reconnect_attempt = Attempt,
channels = Channels,
registered_name = RegName
} = State) ->
lists:foreach(
fun(Channel) ->
fox_priv_utils:close_channel(Channel)
end,
queue:to_list(Channels)),
fox_priv_utils:error_or_info(Reason, "~s, connection is DOWN: ~p", [RegName, Reason]),
fox_priv_utils:reconnect(Attempt),
{noreply, State#state{connection = undefined, connection_ref = undefined, channels = undefined}};
handle_info(Request, State) ->
error_logger:error_msg("unknown info ~p in ~p ~n", [Request, ?MODULE]),
{noreply, State}.
-spec(terminate(terminate_reason(), gs_state()) -> ok).
terminate(_Reason, _State) ->
ok.
-spec(code_change(term(), term(), term()) -> gs_code_change_reply()).
code_change(_OldVersion, State, _Extra) ->
{ok, State}.
-spec handle_get_channel(queue:queue(pid()), pos_integer(), pid()) -> {pid(), queue:queue(pid())}.
handle_get_channel(Channels, PoolSize, Connection) ->
NumChannels = queue:len(Channels),
{Channel1, Channels2} = case NumChannels < PoolSize of
true ->
{ok, Channel} = amqp_connection:open_channel(Connection),
{Channel, Channels};
false ->
{{value, Channel}, Channels1} = queue:out(Channels),
{Channel, Channels1}
end,
case is_process_alive(Channel1) of
true ->
{Channel1, queue:in(Channel1, Channels2)};
false ->
error_logger:info_msg(
"Fox channel ~p seems to be dead. Drop it and create new",
[Channel1]
),
handle_get_channel(Channels2, PoolSize, Connection)
end.
|
76c4965c26aead348e7be25c2df8efe9c6a5efd9dc34a245db1b0e55f807c2b1 | dextroamphetamine/nuko | Driver.hs | module Nuko.Driver (compile, mainMod) where
import Relude
import Nuko.Names (Attribute (..), Label (Label), ModName,
NameKind (TyName), genIdent, mkModName,
mkName, mkQualifiedWithPos, mkTyName)
import Nuko.Report.Message (Diagnostic)
import Nuko.Resolver (initProgram, resolveProgram)
import Nuko.Resolver.Env (ImportErrorKind (..), MonadResolver,
NameSpace, Query (..), ResolverState (..),
Use (..), emptyState)
import Nuko.Resolver.Occourence (insertOcc)
import Nuko.Syntax.Lexer.Support (runLexer)
import Nuko.Syntax.Parser (parseProgram)
import Nuko.Tree (Program (..), Tc)
import Nuko.Typer.Env (TyInfo (..), TyInfoKind (..), TypeSpace (..),
TypingEnv, emptyTypeSpace, runToIO)
import Nuko.Typer.Infer (inferProgram)
import Nuko.Typer.Infer.Literal (boolTy, intTy, preludeQual, strTy)
import Nuko.Typer.Kinds (TKind (..))
import Control.Monad.Chronicle (MonadChronicle)
import Control.Monad.Query (MonadQuery (query))
import Data.These (These (..))
import GHC.IO (unsafePerformIO)
import Control.Monad.Chronicle qualified as Chronicle
import Control.Monad.Reader qualified as Reader
import Control.Monad.State qualified as State
import Data.HashMap.Strict qualified as HashMap
import Nuko.Resolver.Occourence qualified as Occ
newtype ConstImporter m a = ConstImporter { runImporter :: ReaderT (HashMap ModName NameSpace) m a }
deriving newtype (Functor, Monad, Applicative, MonadState b, MonadChronicle b)
instance Monad m => MonadQuery Query (ConstImporter m) where
query (GetModule modName') = ConstImporter $ do
r <- Reader.asks (HashMap.lookup modName')
case r of
Just res -> pure (Right res)
Nothing -> pure (Left CannotFind)
runImporterTo :: ConstImporter m a -> HashMap ModName NameSpace -> m a
runImporterTo imp = Reader.runReaderT (runImporter imp)
runResolver :: (forall m . MonadResolver m => m a) -> ResolverState -> HashMap ModName NameSpace -> These [Diagnostic] (a, ResolverState)
runResolver action r p = first (`appEndo` []) (Chronicle.runChronicle $ State.runStateT (runImporterTo action p) r)
intType :: Label
intType = Label $ mkTyName (genIdent "Int")
strType :: Label
strType = Label $ mkTyName (genIdent "String")
boolType :: Label
boolType = Label $ mkTyName (genIdent "Bool")
preludeMod :: ModName
preludeMod = mkModName (genIdent "Prelude" :| [])
openedPrelude :: Occ.OccEnv Use
openedPrelude =
insertOcc intType (Single (mkQualifiedWithPos preludeMod intType))
$ insertOcc strType (Single (mkQualifiedWithPos preludeMod strType))
$ insertOcc boolType (Single (mkQualifiedWithPos preludeMod boolType))
Occ.emptyOcc
mainMod :: ModName
mainMod = mkModName (one $ genIdent "Main")
commonState :: Text -> ResolverState
commonState filename = (emptyState mainMod filename) { _openedNames = openedPrelude }
prelude :: TypeSpace
prelude = emptyTypeSpace
{ _tsTypes = HashMap.fromList
[ (preludeQual "Int", (KiStar, TyInfo intTy (mkName TyName (genIdent "Int") Untouched) [] IsOpaque))
, (preludeQual "String", (KiStar, TyInfo strTy (mkName TyName (genIdent "String") Untouched) [] IsOpaque))
, (preludeQual "Bool", (KiStar, TyInfo boolTy (mkName TyName (genIdent "Bool") Untouched) [] IsOpaque))
]
}
compile :: ModName -> Text -> ByteString -> These [Diagnostic] (Program Tc, TypingEnv)
compile modName' filename content =
runLexer parseProgram modName' filename content
>>= \program -> runResolver (initProgram program) (commonState filename) HashMap.empty
>>= \(_, ns) -> runResolver (resolveProgram program) ns (ns._newNamespaces)
>>= \(res,_) -> unsafePerformIO (runToIO prelude modName' filename (inferProgram res))
| null | https://raw.githubusercontent.com/dextroamphetamine/nuko/1b6f960eeeebbfcbfb5700279c39758f82e7df28/src/Nuko/Driver.hs | haskell | module Nuko.Driver (compile, mainMod) where
import Relude
import Nuko.Names (Attribute (..), Label (Label), ModName,
NameKind (TyName), genIdent, mkModName,
mkName, mkQualifiedWithPos, mkTyName)
import Nuko.Report.Message (Diagnostic)
import Nuko.Resolver (initProgram, resolveProgram)
import Nuko.Resolver.Env (ImportErrorKind (..), MonadResolver,
NameSpace, Query (..), ResolverState (..),
Use (..), emptyState)
import Nuko.Resolver.Occourence (insertOcc)
import Nuko.Syntax.Lexer.Support (runLexer)
import Nuko.Syntax.Parser (parseProgram)
import Nuko.Tree (Program (..), Tc)
import Nuko.Typer.Env (TyInfo (..), TyInfoKind (..), TypeSpace (..),
TypingEnv, emptyTypeSpace, runToIO)
import Nuko.Typer.Infer (inferProgram)
import Nuko.Typer.Infer.Literal (boolTy, intTy, preludeQual, strTy)
import Nuko.Typer.Kinds (TKind (..))
import Control.Monad.Chronicle (MonadChronicle)
import Control.Monad.Query (MonadQuery (query))
import Data.These (These (..))
import GHC.IO (unsafePerformIO)
import Control.Monad.Chronicle qualified as Chronicle
import Control.Monad.Reader qualified as Reader
import Control.Monad.State qualified as State
import Data.HashMap.Strict qualified as HashMap
import Nuko.Resolver.Occourence qualified as Occ
newtype ConstImporter m a = ConstImporter { runImporter :: ReaderT (HashMap ModName NameSpace) m a }
deriving newtype (Functor, Monad, Applicative, MonadState b, MonadChronicle b)
instance Monad m => MonadQuery Query (ConstImporter m) where
query (GetModule modName') = ConstImporter $ do
r <- Reader.asks (HashMap.lookup modName')
case r of
Just res -> pure (Right res)
Nothing -> pure (Left CannotFind)
runImporterTo :: ConstImporter m a -> HashMap ModName NameSpace -> m a
runImporterTo imp = Reader.runReaderT (runImporter imp)
runResolver :: (forall m . MonadResolver m => m a) -> ResolverState -> HashMap ModName NameSpace -> These [Diagnostic] (a, ResolverState)
runResolver action r p = first (`appEndo` []) (Chronicle.runChronicle $ State.runStateT (runImporterTo action p) r)
intType :: Label
intType = Label $ mkTyName (genIdent "Int")
strType :: Label
strType = Label $ mkTyName (genIdent "String")
boolType :: Label
boolType = Label $ mkTyName (genIdent "Bool")
preludeMod :: ModName
preludeMod = mkModName (genIdent "Prelude" :| [])
openedPrelude :: Occ.OccEnv Use
openedPrelude =
insertOcc intType (Single (mkQualifiedWithPos preludeMod intType))
$ insertOcc strType (Single (mkQualifiedWithPos preludeMod strType))
$ insertOcc boolType (Single (mkQualifiedWithPos preludeMod boolType))
Occ.emptyOcc
mainMod :: ModName
mainMod = mkModName (one $ genIdent "Main")
commonState :: Text -> ResolverState
commonState filename = (emptyState mainMod filename) { _openedNames = openedPrelude }
prelude :: TypeSpace
prelude = emptyTypeSpace
{ _tsTypes = HashMap.fromList
[ (preludeQual "Int", (KiStar, TyInfo intTy (mkName TyName (genIdent "Int") Untouched) [] IsOpaque))
, (preludeQual "String", (KiStar, TyInfo strTy (mkName TyName (genIdent "String") Untouched) [] IsOpaque))
, (preludeQual "Bool", (KiStar, TyInfo boolTy (mkName TyName (genIdent "Bool") Untouched) [] IsOpaque))
]
}
compile :: ModName -> Text -> ByteString -> These [Diagnostic] (Program Tc, TypingEnv)
compile modName' filename content =
runLexer parseProgram modName' filename content
>>= \program -> runResolver (initProgram program) (commonState filename) HashMap.empty
>>= \(_, ns) -> runResolver (resolveProgram program) ns (ns._newNamespaces)
>>= \(res,_) -> unsafePerformIO (runToIO prelude modName' filename (inferProgram res))
| |
19388daf85ed94d17c8b831bcc7f558d37e5530c56e278055bccff39b2836de8 | wdebeaum/step | clock.lisp | ;;;;
;;;; W::clock
;;;;
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
(W::clock
(SENSES
((meta-data :origin calo-ontology :entry-date 20060123 :change-date nil :wn ("clock%1:06:00") :comment caloy3)
(LF-PARENT ONT::DEVICE)
(example "the clock on the wall")
)
)
)
))
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
((W::CLOCK W::SPEED)
(wordfeats (W::morph (:forms (-S-3P) :plur (W::clock W::speeds))))
(SENSES
((LF-PARENT ONT::frequency-scale)
(TEMPL reln-subcat-of-units-TEMPL)
)
((LF-PARENT ONT::frequency-scale)
(TEMPL other-reln-TEMPL)
)
)
)
))
(define-words :pos W::v
:words (
(W::clock
(SENSES
((LF-PARENT ONT::register)
(TEMPL agent-neutral-xp-templ)
(example "He clocked the runners")
(meta-data :origin trips :entry-date 20090910 :change-date nil :comments nil :vn ("register-54.1"))
)
((LF-PARENT ONT::register)
(TEMPL AGENT-NEUTRAL-FORMAL-2-XP1-3-XP2-TEMPL)
(example "He clocked the runners at different speeds")
(meta-data :origin trips :entry-date 20090910 :change-date nil :comments nil :vn ("register-54.1"))
)
((LF-PARENT ONT::register)
(TEMPL neutral-extent-xp-templ (xp (% W::PP (W::ptype W::at))))
(example "He clocked at 85km/hr")
(meta-data :origin trips :entry-date 20090910 :change-date nil :comments nil :vn ("register-54.1"))
)
)
)
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/clock.lisp | lisp |
W::clock
|
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
(W::clock
(SENSES
((meta-data :origin calo-ontology :entry-date 20060123 :change-date nil :wn ("clock%1:06:00") :comment caloy3)
(LF-PARENT ONT::DEVICE)
(example "the clock on the wall")
)
)
)
))
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
((W::CLOCK W::SPEED)
(wordfeats (W::morph (:forms (-S-3P) :plur (W::clock W::speeds))))
(SENSES
((LF-PARENT ONT::frequency-scale)
(TEMPL reln-subcat-of-units-TEMPL)
)
((LF-PARENT ONT::frequency-scale)
(TEMPL other-reln-TEMPL)
)
)
)
))
(define-words :pos W::v
:words (
(W::clock
(SENSES
((LF-PARENT ONT::register)
(TEMPL agent-neutral-xp-templ)
(example "He clocked the runners")
(meta-data :origin trips :entry-date 20090910 :change-date nil :comments nil :vn ("register-54.1"))
)
((LF-PARENT ONT::register)
(TEMPL AGENT-NEUTRAL-FORMAL-2-XP1-3-XP2-TEMPL)
(example "He clocked the runners at different speeds")
(meta-data :origin trips :entry-date 20090910 :change-date nil :comments nil :vn ("register-54.1"))
)
((LF-PARENT ONT::register)
(TEMPL neutral-extent-xp-templ (xp (% W::PP (W::ptype W::at))))
(example "He clocked at 85km/hr")
(meta-data :origin trips :entry-date 20090910 :change-date nil :comments nil :vn ("register-54.1"))
)
)
)
))
|
c0eaa8cd0dcb64b648cee0fb5a34fcd8020455736913241f97171467e020b944 | tonsky/advent2018 | stub.clj | (ns advent2018.stub
(:require
[clojure.string :as str]
[clojure.set :as set]
[clojure.walk :as walk]
[advent2018.core :refer :all]))
(set! *warn-on-reflection* true)
(def input (slurp "inputs/stub"))
(def lines (str/split input #"\n"))
(defn part1 []
)
#_(time (part1))
(defn part2 []
)
#_(time (part2))
#_(require 'advent2018.stub :reload) | null | https://raw.githubusercontent.com/tonsky/advent2018/6f8d15bf37a150a288e3447df7766c362f7086e9/src/advent2018/stub.clj | clojure | (ns advent2018.stub
(:require
[clojure.string :as str]
[clojure.set :as set]
[clojure.walk :as walk]
[advent2018.core :refer :all]))
(set! *warn-on-reflection* true)
(def input (slurp "inputs/stub"))
(def lines (str/split input #"\n"))
(defn part1 []
)
#_(time (part1))
(defn part2 []
)
#_(time (part2))
#_(require 'advent2018.stub :reload) | |
2876dbb27c379af42ddd24050d7332e4288cf89ab670d2559ad4eaee63950fc1 | embecosm/cgen | cos-pprint.scm | ;;;; cos-pprint.scm --- pretty-print definitions for COS
Copyright ( C ) 2005 , 2009 Red Hat , Inc.
This file is part of CGEN .
;;;; See file COPYING.CGEN for details.
;;; To use this with pprint.scm:
;;;
;;; (load "pprint.scm")
;;; (load "cos-pprint.scm")
;;;
You must load this file second , so it can redefine the ELIDE ? and
;;; ELIDED-NAME hooks.
;;;
;;; See the documentation in pprint.scm for details.
(define (elide? obj)
(or (object? obj) (class? obj)))
(define (elided-name obj)
(cond ((class? obj) `(class ,(class-name obj)))
((object? obj)
`(object ,(class-name (object-class obj))
,@(if (method-present? obj 'get-name)
(list (send obj 'get-name))
'())))
(else (error "unexpected elided object"))))
| null | https://raw.githubusercontent.com/embecosm/cgen/3fa8809c015376cd0e80018a655d372df3678bc6/cgen/cos-pprint.scm | scheme | cos-pprint.scm --- pretty-print definitions for COS
See file COPYING.CGEN for details.
To use this with pprint.scm:
(load "pprint.scm")
(load "cos-pprint.scm")
ELIDED-NAME hooks.
See the documentation in pprint.scm for details. | Copyright ( C ) 2005 , 2009 Red Hat , Inc.
This file is part of CGEN .
You must load this file second , so it can redefine the ELIDE ? and
(define (elide? obj)
(or (object? obj) (class? obj)))
(define (elided-name obj)
(cond ((class? obj) `(class ,(class-name obj)))
((object? obj)
`(object ,(class-name (object-class obj))
,@(if (method-present? obj 'get-name)
(list (send obj 'get-name))
'())))
(else (error "unexpected elided object"))))
|
c0daa75dd9d0b4a440512353860aee9de7c3a51a9fa9dbc6036702914ea1abfe | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | PaymentMethodBoleto.hs | {-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
| Contains the types generated from the schema PaymentMethodBoleto
module StripeAPI.Types.PaymentMethodBoleto where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
-- | Defines the object schema located at @components.schemas.payment_method_boleto@ in the specification.
data PaymentMethodBoleto = PaymentMethodBoleto
| tax_id : Uniquely identifies the customer tax i d ( CNPJ or CPF )
--
-- Constraints:
--
* Maximum length of 5000
paymentMethodBoletoTaxId :: Data.Text.Internal.Text
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON PaymentMethodBoleto where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["tax_id" Data.Aeson.Types.ToJSON..= paymentMethodBoletoTaxId obj] : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["tax_id" Data.Aeson.Types.ToJSON..= paymentMethodBoletoTaxId obj] : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON PaymentMethodBoleto where
parseJSON = Data.Aeson.Types.FromJSON.withObject "PaymentMethodBoleto" (\obj -> GHC.Base.pure PaymentMethodBoleto GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "tax_id"))
| Create a new ' PaymentMethodBoleto ' with all required fields .
mkPaymentMethodBoleto ::
-- | 'paymentMethodBoletoTaxId'
Data.Text.Internal.Text ->
PaymentMethodBoleto
mkPaymentMethodBoleto paymentMethodBoletoTaxId = PaymentMethodBoleto {paymentMethodBoletoTaxId = paymentMethodBoletoTaxId}
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Types/PaymentMethodBoleto.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Defines the object schema located at @components.schemas.payment_method_boleto@ in the specification.
Constraints:
| 'paymentMethodBoletoTaxId' | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
| Contains the types generated from the schema PaymentMethodBoleto
module StripeAPI.Types.PaymentMethodBoleto where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
data PaymentMethodBoleto = PaymentMethodBoleto
| tax_id : Uniquely identifies the customer tax i d ( CNPJ or CPF )
* Maximum length of 5000
paymentMethodBoletoTaxId :: Data.Text.Internal.Text
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON PaymentMethodBoleto where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (["tax_id" Data.Aeson.Types.ToJSON..= paymentMethodBoletoTaxId obj] : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (["tax_id" Data.Aeson.Types.ToJSON..= paymentMethodBoletoTaxId obj] : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON PaymentMethodBoleto where
parseJSON = Data.Aeson.Types.FromJSON.withObject "PaymentMethodBoleto" (\obj -> GHC.Base.pure PaymentMethodBoleto GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..: "tax_id"))
| Create a new ' PaymentMethodBoleto ' with all required fields .
mkPaymentMethodBoleto ::
Data.Text.Internal.Text ->
PaymentMethodBoleto
mkPaymentMethodBoleto paymentMethodBoletoTaxId = PaymentMethodBoleto {paymentMethodBoletoTaxId = paymentMethodBoletoTaxId}
|
de02c550fba035b0d321256c3b5dc816e3c9fe0098b6622b5302ef4d1271c014 | freuk/obandit | delegate.ml | #!/usr/bin/env ocaml
#use "topfind"
#require "bos.setup"
#require "topkg"
#require "topkg.care"
---------------------------------------------------------------------------
Copyright ( c ) 2016 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2016 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
open Bos_setup
(* Publish documentation *)
let publish_doc_gh_pages owner repo dir name version docdir =
let remote = strf "" owner repo in
let git_for_repo r = Cmd.of_list (Topkg.Cmd.to_list @@ Topkg.Vcs.cmd r) in
let create_empty_gh_pages git =
let msg = "Initial commit by topkg." in
let create () =
OS.Cmd.run Cmd.(v "git" % "init")
>>= fun () -> Topkg.Vcs.get ()
>>= fun repo -> Ok (git_for_repo repo)
>>= fun git -> OS.Cmd.run Cmd.(git % "checkout" % "--orphan" % "gh-pages")
>>= fun () -> OS.File.write (Fpath.v "README") "" (* need some file *)
>>= fun () -> OS.Cmd.run Cmd.(git % "add" % "README")
>>= fun () -> OS.Cmd.run Cmd.(git % "commit" % "README" % "-m" % msg)
in
OS.Dir.with_tmp "gh-pages-%s.tmp" (fun dir () ->
OS.Dir.with_current dir create () |> R.join
>>= fun () -> OS.Cmd.run Cmd.(git % "fetch" % Fpath.to_string dir
% "gh-pages")
) () |> R.join
in
Topkg.Vcs.get ()
>>= fun repo -> Ok (git_for_repo repo)
>>= fun git ->
(match OS.Cmd.run Cmd.(git % "fetch" % remote % "gh-pages") with
| Ok () -> Ok ()
| Error _ -> create_empty_gh_pages git)
>>= fun () -> (OS.Cmd.run_out Cmd.(git % "rev-parse" % "FETCH_HEAD")
|> OS.Cmd.to_string)
>>= fun id -> OS.Cmd.run Cmd.(git % "branch" % "-f" % "gh-pages" % id)
>>= fun () -> Topkg_care.Delegate.publish_in_git_branch ~remote
~branch:"gh-pages" ~name ~version ~docdir ~dir
>>= fun () -> Ok 0
(* Publish releases *)
let repo_and_owner_of_uri uri =
let uri_error uri =
R.msgf "Could not derive owner and repo from opam dev-repo \
field value %a; expected the pattern \
$SCHEME://$HOST/$OWNER/$REPO[.$EXT][/$DIR]" String.dump uri
in
match Topkg_care.Text.split_uri ~rel:true uri with
| None -> Error (uri_error uri)
| Some (_, _, path) ->
if path = "" then Error (uri_error uri) else
match String.cut ~sep:"/" path with
| None -> Error (uri_error uri)
| Some (owner, path) ->
let repo = match String.cut ~sep:"/" path with
| None -> path
| Some (repo, path) -> repo
in
begin
Fpath.of_string repo
>>= fun repo -> Ok (owner, Fpath.(to_string @@ rem_ext repo))
end
|> R.reword_error_msg (fun _ -> uri_error uri)
let steal_opam_publish_github_auth () =
let opam = Cmd.(v "opam") in
let publish = Fpath.v "plugins/opam-publish" in
OS.Cmd.exists opam >>= function
| false -> Ok None
| true ->
OS.Cmd.(run_out Cmd.(opam % "config" % "var" % "root") |> to_string)
>>= fun root -> Fpath.of_string root
>>= fun root -> OS.Path.query Fpath.(root // publish / "$(user).token")
>>= function
| [] -> Ok None
| (file, defs) :: _ ->
OS.File.read file >>= fun token ->
Ok (Some (strf "%s:%s" (String.Map.get "user" defs) token))
let github_auth ~owner =
match
steal_opam_publish_github_auth ()
|> Logs.on_error_msg ~use:(fun _ -> None)
with
| Some auth -> auth
| None -> OS.Env.(value "TOPKG_GITHUB_AUTH" string ~absent:owner)
let create_release_json version msg =
let escape_for_json s =
let len = String.length s in
let max = len - 1 in
let rec escaped_len i l =
if i > max then l else
match String.get s i with
| '\\' | '\"' | '\n' | '\r' | '\t' -> escaped_len (i + 1) (l + 2)
| _ -> escaped_len (i + 1) (l + 1)
in
let escaped_len = escaped_len 0 0 in
if escaped_len = len then s else
let b = Bytes.create escaped_len in
let rec loop i k =
if i > max then Bytes.unsafe_to_string b else
match String.get s i with
| ('\\' | '\"' | '\n' | '\r' | '\t' as c) ->
Bytes.set b k '\\';
let c = match c with
| '\\' -> '\\' | '\"' -> '\"' | '\n' -> 'n' | '\r' -> 'r'
| '\t' -> 't'
| _ -> assert false
in
Bytes.set b (k + 1) c; loop (i + 1) (k + 2)
| c ->
Bytes.set b k c; loop (i + 1) (k + 1)
in
loop 0 0
in
strf "{ \"tag_name\" : \"%s\", \
\"body\" : \"%s\" }" (escape_for_json version) (escape_for_json msg)
let run_with_auth auth curl =
let auth = strf "-u %s" auth in
OS.Cmd.(in_string auth |> run_io curl)
let curl_create_release curl version msg owner repo =
FIXME this is retired .
let headers = String.cuts ~sep:"\r\n" resp in
try
let not_slash c = not (Char.equal '/' c) in
let loc = List.find (String.is_prefix ~affix:"Location:") headers in
let id = String.take ~rev:true ~sat:not_slash loc in
match String.to_int id with
| None -> R.error_msgf "Could not parse id from location header %S" loc
| Some id -> Ok id
with Not_found ->
R.error_msgf "Could not find release id in response:\n%s."
(String.concat ~sep:"\n" headers)
in
let data = create_release_json version msg in
let uri = strf "" owner repo in
let auth = github_auth ~owner in
let cmd = Cmd.(curl % "-D" % "-" % "--data" % data % uri) in
run_with_auth auth cmd |> OS.Cmd.to_string ~trim:false
>>= parse_release_id
let curl_upload_archive curl archive owner repo release_id =
let uri =
FIXME upload URI prefix should be taken from release creation
response
response *)
strf ""
owner repo release_id (Fpath.filename archive)
in
let auth = github_auth ~owner in
let data = Cmd.(v "--data-binary" % strf "@@%s" (Fpath.to_string archive)) in
let ctype = Cmd.(v "-H" % "Content-Type:application/x-tar") in
let cmd = Cmd.(curl %% ctype %% data % uri) in
OS.Cmd.(run_with_auth auth cmd |> to_stdout)
let publish_distrib uri name version msg archive =
let git_for_repo r = Cmd.of_list (Topkg.Cmd.to_list @@ Topkg.Vcs.cmd r) in
Fpath.of_string archive
>>= fun archive -> OS.Cmd.must_exist Cmd.(v "curl" % "-s" % "-S" % "-K" % "-")
>>= fun curl -> Topkg.Vcs.get ()
>>= fun repo -> Ok (git_for_repo repo)
>>= fun git -> OS.Cmd.run Cmd.(git % "push" % "--force" % "--tags")
>>= fun () -> repo_and_owner_of_uri uri
>>= fun (owner, repo) -> curl_create_release curl version msg owner repo
>>= fun id -> curl_upload_archive curl archive owner repo id
>>= fun () -> Ok 0
(* Publish delegations *)
let unsupported = Ok 1
let publish = function
| "distrib" :: uri :: name :: version :: msg :: archive :: _ ->
publish_distrib uri name version msg archive
| "doc" :: uri :: name :: version :: msg :: docdir :: _ -> Ok 0
publish_doc_gh_pages uri name version
| "alt" :: kind :: uri :: name :: version :: msg :: archive :: _ ->
unsupported
| args ->
unsupported
(* Issue delegations *)
let issue = function
| "list" :: uri :: _ -> unsupported
| "show" :: uri :: id :: _ -> unsupported
| "open" :: uri :: title :: descr :: _ -> unsupported
| "close" :: uri :: id :: msg :: _ -> unsupported
| args -> unsupported
(* Delegation requests *)
let request = function
| "publish" :: args -> publish args
| "issue" :: args -> issue args
| args -> unsupported
(* Delegate tool commands *)
let ipc_cmd args =
begin match args with
| verbosity :: req ->
Logs.level_of_string verbosity >>= fun logs_level ->
Topkg.Log.level_of_string verbosity >>= fun topkg_level ->
Topkg.Log.set_level topkg_level;
Logs.set_level logs_level;
request req
| [] ->
R.error_msg "malformed delegate request, verbosity is missing"
end
|> Logs.on_error_msg ~use:(fun () -> 2)
let main_cmd () = `Help (`Pager, None)
(* Cli interface *)
open Cmdliner
let ipc_cmd =
let doc = "Delegate request IPCs" in
let man =
[ `S "DESCRIPTION";
`P "The $(tname) command implements the topkg delegate protocol.
See topkg-delegate(7) and $(mname) $(b,--help) for more
information." ]
in
let args =
let doc = "IPC call arguments" in
Arg.(value (pos_all string [] & info [] ~doc ~docv:"ARG"))
in
let info = Term.info "ipc" ~doc ~man in
let t = Term.(const ipc_cmd $ args) in
(t, info)
let main_cmd =
let doc = "Topkg's toy GitHub delegate" in
let envs =
[ Term.env_info "TOPKG_GITHUB_AUTH" ~doc:"GitHub authentication data, see
the section GITHUB AUTHENTICATION for details." ]
in
let man_xrefs = [ `Tool "topkg" ] in
let man =
[ `S "DESCRIPTION";
`P "$(mname) is a toy topkg delegate for GitHub. It will disappear
once a decent GitHub delegate emerges. For more
information about topkg delegates, see topkg-delegate(7).";
`P "This delegate only supports the following delegations:";
`I ("$(b,topkg publish doc)",
"Commits and pushes the documentation to the gh-pages of the
source repository. The publication directory PATH in the branch is
determined by matching the opam 'doc' field against the
pattern SCHEME.");
`I ("$(b,topkg publish distrib)",
"This requires curl(1). Creates a GitHub release with the
version and publication message given to the delegate and
uploads the distribution archive as a release artefact. This
requires GitHub authentication, see section GITHUB AUTHENTICATION
for details. Also bear in mind that error reporting
(e.g. if the release already exists) is made of raw JSON
responses and thus very user-unfriendly.");
`S "GITHUB AUTHENTICATION";
`P "This being a toy delegate, you get toy authentication. Here
are the steps, in order, that are tried to authenticate you on
GitHub.";
`I ("1. opam-publish token stealing.",
"If you have already used opam-publish, an authorization token
was generated for it that is keept in
\\$(opam config var root)/plugins/opam-publish/\\$(user).token. If
such a file exists, \\$(user) and the corresponding token will
be used for authentication.");
`I ("2. Environment variable.",
"You scan specify the user and the password or token using
the TOPKG_GITHUB_AUTH environment variable with a username:token
value, see $(i,/).");
`I ("3. Cli prompt.",
"As a last resort the username used for authentication is
the name of the GitHub owner of the repo (determined from
the $(i,DISTRIB_URI) URI, itself determined from the 'dev-repo'
field of the opam file, see topkg-delegate(7) and topkg's API
documentation for more details); in this case your GitHub
password will be prompted twice on the command line by curl (ugh).")]
in
let version = "%%VERSION%%" in
Term.(ret (const main_cmd $ const ())),
Term.info "toy-github-topkg-delegate" ~version ~doc ~envs ~man ~man_xrefs
let main () =
Topkg.Private.disable_main ();
match Term.eval_choice main_cmd [ipc_cmd] with
| `Error _ -> exit 3
| `Ok ret -> exit ret
| _ -> exit 0
let () = main ()
| null | https://raw.githubusercontent.com/freuk/obandit/0d8222c9e8dbb4b7f324290121bc45892620c783/delegate.ml | ocaml | Publish documentation
need some file
Publish releases
Publish delegations
Issue delegations
Delegation requests
Delegate tool commands
Cli interface | #!/usr/bin/env ocaml
#use "topfind"
#require "bos.setup"
#require "topkg"
#require "topkg.care"
---------------------------------------------------------------------------
Copyright ( c ) 2016 . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
% % NAME%% % % ---------------------------------------------------------------------------
Copyright (c) 2016 Daniel C. Bünzli. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
%%NAME%% %%VERSION%%
---------------------------------------------------------------------------*)
open Bos_setup
let publish_doc_gh_pages owner repo dir name version docdir =
let remote = strf "" owner repo in
let git_for_repo r = Cmd.of_list (Topkg.Cmd.to_list @@ Topkg.Vcs.cmd r) in
let create_empty_gh_pages git =
let msg = "Initial commit by topkg." in
let create () =
OS.Cmd.run Cmd.(v "git" % "init")
>>= fun () -> Topkg.Vcs.get ()
>>= fun repo -> Ok (git_for_repo repo)
>>= fun git -> OS.Cmd.run Cmd.(git % "checkout" % "--orphan" % "gh-pages")
>>= fun () -> OS.Cmd.run Cmd.(git % "add" % "README")
>>= fun () -> OS.Cmd.run Cmd.(git % "commit" % "README" % "-m" % msg)
in
OS.Dir.with_tmp "gh-pages-%s.tmp" (fun dir () ->
OS.Dir.with_current dir create () |> R.join
>>= fun () -> OS.Cmd.run Cmd.(git % "fetch" % Fpath.to_string dir
% "gh-pages")
) () |> R.join
in
Topkg.Vcs.get ()
>>= fun repo -> Ok (git_for_repo repo)
>>= fun git ->
(match OS.Cmd.run Cmd.(git % "fetch" % remote % "gh-pages") with
| Ok () -> Ok ()
| Error _ -> create_empty_gh_pages git)
>>= fun () -> (OS.Cmd.run_out Cmd.(git % "rev-parse" % "FETCH_HEAD")
|> OS.Cmd.to_string)
>>= fun id -> OS.Cmd.run Cmd.(git % "branch" % "-f" % "gh-pages" % id)
>>= fun () -> Topkg_care.Delegate.publish_in_git_branch ~remote
~branch:"gh-pages" ~name ~version ~docdir ~dir
>>= fun () -> Ok 0
let repo_and_owner_of_uri uri =
let uri_error uri =
R.msgf "Could not derive owner and repo from opam dev-repo \
field value %a; expected the pattern \
$SCHEME://$HOST/$OWNER/$REPO[.$EXT][/$DIR]" String.dump uri
in
match Topkg_care.Text.split_uri ~rel:true uri with
| None -> Error (uri_error uri)
| Some (_, _, path) ->
if path = "" then Error (uri_error uri) else
match String.cut ~sep:"/" path with
| None -> Error (uri_error uri)
| Some (owner, path) ->
let repo = match String.cut ~sep:"/" path with
| None -> path
| Some (repo, path) -> repo
in
begin
Fpath.of_string repo
>>= fun repo -> Ok (owner, Fpath.(to_string @@ rem_ext repo))
end
|> R.reword_error_msg (fun _ -> uri_error uri)
let steal_opam_publish_github_auth () =
let opam = Cmd.(v "opam") in
let publish = Fpath.v "plugins/opam-publish" in
OS.Cmd.exists opam >>= function
| false -> Ok None
| true ->
OS.Cmd.(run_out Cmd.(opam % "config" % "var" % "root") |> to_string)
>>= fun root -> Fpath.of_string root
>>= fun root -> OS.Path.query Fpath.(root // publish / "$(user).token")
>>= function
| [] -> Ok None
| (file, defs) :: _ ->
OS.File.read file >>= fun token ->
Ok (Some (strf "%s:%s" (String.Map.get "user" defs) token))
let github_auth ~owner =
match
steal_opam_publish_github_auth ()
|> Logs.on_error_msg ~use:(fun _ -> None)
with
| Some auth -> auth
| None -> OS.Env.(value "TOPKG_GITHUB_AUTH" string ~absent:owner)
let create_release_json version msg =
let escape_for_json s =
let len = String.length s in
let max = len - 1 in
let rec escaped_len i l =
if i > max then l else
match String.get s i with
| '\\' | '\"' | '\n' | '\r' | '\t' -> escaped_len (i + 1) (l + 2)
| _ -> escaped_len (i + 1) (l + 1)
in
let escaped_len = escaped_len 0 0 in
if escaped_len = len then s else
let b = Bytes.create escaped_len in
let rec loop i k =
if i > max then Bytes.unsafe_to_string b else
match String.get s i with
| ('\\' | '\"' | '\n' | '\r' | '\t' as c) ->
Bytes.set b k '\\';
let c = match c with
| '\\' -> '\\' | '\"' -> '\"' | '\n' -> 'n' | '\r' -> 'r'
| '\t' -> 't'
| _ -> assert false
in
Bytes.set b (k + 1) c; loop (i + 1) (k + 2)
| c ->
Bytes.set b k c; loop (i + 1) (k + 1)
in
loop 0 0
in
strf "{ \"tag_name\" : \"%s\", \
\"body\" : \"%s\" }" (escape_for_json version) (escape_for_json msg)
let run_with_auth auth curl =
let auth = strf "-u %s" auth in
OS.Cmd.(in_string auth |> run_io curl)
let curl_create_release curl version msg owner repo =
FIXME this is retired .
let headers = String.cuts ~sep:"\r\n" resp in
try
let not_slash c = not (Char.equal '/' c) in
let loc = List.find (String.is_prefix ~affix:"Location:") headers in
let id = String.take ~rev:true ~sat:not_slash loc in
match String.to_int id with
| None -> R.error_msgf "Could not parse id from location header %S" loc
| Some id -> Ok id
with Not_found ->
R.error_msgf "Could not find release id in response:\n%s."
(String.concat ~sep:"\n" headers)
in
let data = create_release_json version msg in
let uri = strf "" owner repo in
let auth = github_auth ~owner in
let cmd = Cmd.(curl % "-D" % "-" % "--data" % data % uri) in
run_with_auth auth cmd |> OS.Cmd.to_string ~trim:false
>>= parse_release_id
let curl_upload_archive curl archive owner repo release_id =
let uri =
FIXME upload URI prefix should be taken from release creation
response
response *)
strf ""
owner repo release_id (Fpath.filename archive)
in
let auth = github_auth ~owner in
let data = Cmd.(v "--data-binary" % strf "@@%s" (Fpath.to_string archive)) in
let ctype = Cmd.(v "-H" % "Content-Type:application/x-tar") in
let cmd = Cmd.(curl %% ctype %% data % uri) in
OS.Cmd.(run_with_auth auth cmd |> to_stdout)
let publish_distrib uri name version msg archive =
let git_for_repo r = Cmd.of_list (Topkg.Cmd.to_list @@ Topkg.Vcs.cmd r) in
Fpath.of_string archive
>>= fun archive -> OS.Cmd.must_exist Cmd.(v "curl" % "-s" % "-S" % "-K" % "-")
>>= fun curl -> Topkg.Vcs.get ()
>>= fun repo -> Ok (git_for_repo repo)
>>= fun git -> OS.Cmd.run Cmd.(git % "push" % "--force" % "--tags")
>>= fun () -> repo_and_owner_of_uri uri
>>= fun (owner, repo) -> curl_create_release curl version msg owner repo
>>= fun id -> curl_upload_archive curl archive owner repo id
>>= fun () -> Ok 0
let unsupported = Ok 1
let publish = function
| "distrib" :: uri :: name :: version :: msg :: archive :: _ ->
publish_distrib uri name version msg archive
| "doc" :: uri :: name :: version :: msg :: docdir :: _ -> Ok 0
publish_doc_gh_pages uri name version
| "alt" :: kind :: uri :: name :: version :: msg :: archive :: _ ->
unsupported
| args ->
unsupported
let issue = function
| "list" :: uri :: _ -> unsupported
| "show" :: uri :: id :: _ -> unsupported
| "open" :: uri :: title :: descr :: _ -> unsupported
| "close" :: uri :: id :: msg :: _ -> unsupported
| args -> unsupported
let request = function
| "publish" :: args -> publish args
| "issue" :: args -> issue args
| args -> unsupported
let ipc_cmd args =
begin match args with
| verbosity :: req ->
Logs.level_of_string verbosity >>= fun logs_level ->
Topkg.Log.level_of_string verbosity >>= fun topkg_level ->
Topkg.Log.set_level topkg_level;
Logs.set_level logs_level;
request req
| [] ->
R.error_msg "malformed delegate request, verbosity is missing"
end
|> Logs.on_error_msg ~use:(fun () -> 2)
let main_cmd () = `Help (`Pager, None)
open Cmdliner
let ipc_cmd =
let doc = "Delegate request IPCs" in
let man =
[ `S "DESCRIPTION";
`P "The $(tname) command implements the topkg delegate protocol.
See topkg-delegate(7) and $(mname) $(b,--help) for more
information." ]
in
let args =
let doc = "IPC call arguments" in
Arg.(value (pos_all string [] & info [] ~doc ~docv:"ARG"))
in
let info = Term.info "ipc" ~doc ~man in
let t = Term.(const ipc_cmd $ args) in
(t, info)
let main_cmd =
let doc = "Topkg's toy GitHub delegate" in
let envs =
[ Term.env_info "TOPKG_GITHUB_AUTH" ~doc:"GitHub authentication data, see
the section GITHUB AUTHENTICATION for details." ]
in
let man_xrefs = [ `Tool "topkg" ] in
let man =
[ `S "DESCRIPTION";
`P "$(mname) is a toy topkg delegate for GitHub. It will disappear
once a decent GitHub delegate emerges. For more
information about topkg delegates, see topkg-delegate(7).";
`P "This delegate only supports the following delegations:";
`I ("$(b,topkg publish doc)",
"Commits and pushes the documentation to the gh-pages of the
source repository. The publication directory PATH in the branch is
determined by matching the opam 'doc' field against the
pattern SCHEME.");
`I ("$(b,topkg publish distrib)",
"This requires curl(1). Creates a GitHub release with the
version and publication message given to the delegate and
uploads the distribution archive as a release artefact. This
requires GitHub authentication, see section GITHUB AUTHENTICATION
for details. Also bear in mind that error reporting
(e.g. if the release already exists) is made of raw JSON
responses and thus very user-unfriendly.");
`S "GITHUB AUTHENTICATION";
`P "This being a toy delegate, you get toy authentication. Here
are the steps, in order, that are tried to authenticate you on
GitHub.";
`I ("1. opam-publish token stealing.",
"If you have already used opam-publish, an authorization token
was generated for it that is keept in
\\$(opam config var root)/plugins/opam-publish/\\$(user).token. If
such a file exists, \\$(user) and the corresponding token will
be used for authentication.");
`I ("2. Environment variable.",
"You scan specify the user and the password or token using
the TOPKG_GITHUB_AUTH environment variable with a username:token
value, see $(i,/).");
`I ("3. Cli prompt.",
"As a last resort the username used for authentication is
the name of the GitHub owner of the repo (determined from
the $(i,DISTRIB_URI) URI, itself determined from the 'dev-repo'
field of the opam file, see topkg-delegate(7) and topkg's API
documentation for more details); in this case your GitHub
password will be prompted twice on the command line by curl (ugh).")]
in
let version = "%%VERSION%%" in
Term.(ret (const main_cmd $ const ())),
Term.info "toy-github-topkg-delegate" ~version ~doc ~envs ~man ~man_xrefs
let main () =
Topkg.Private.disable_main ();
match Term.eval_choice main_cmd [ipc_cmd] with
| `Error _ -> exit 3
| `Ok ret -> exit ret
| _ -> exit 0
let () = main ()
|
414b40ee57b68f32a10ef614113af9464557eef13f6145170ca1bf6b2dba7755 | keera-studios/keera-posture | Detector.hs | -- | Determine whether the detector is executing
module Model.ReactiveModel.Detector where
-- Internal imports
import Model.Model
import Model.ReactiveModel.ReactiveModelInternals
-- | Set whether the detector should be running
setDetector :: ReactiveModel -> Bool -> ReactiveModel
setDetector rm n
-- Nothing has changed
| getDetector rm == n = rm
-- Ok
| otherwise = rm `onBasicModel` (\b -> b { detectorRunning = n })
-- | Get whether the detector is running
getDetector :: ReactiveModel -> Bool
getDetector = detectorRunning . basicModel
| null | https://raw.githubusercontent.com/keera-studios/keera-posture/9de5095a4a10c0cb54f6191f1a448e96645193ae/src/Model/ReactiveModel/Detector.hs | haskell | | Determine whether the detector is executing
Internal imports
| Set whether the detector should be running
Nothing has changed
Ok
| Get whether the detector is running | module Model.ReactiveModel.Detector where
import Model.Model
import Model.ReactiveModel.ReactiveModelInternals
setDetector :: ReactiveModel -> Bool -> ReactiveModel
setDetector rm n
| getDetector rm == n = rm
| otherwise = rm `onBasicModel` (\b -> b { detectorRunning = n })
getDetector :: ReactiveModel -> Bool
getDetector = detectorRunning . basicModel
|
1f331d9b320c794af78e3be2d827147ce47b1d19b0a82cab139bc8a28b7beab9 | domainlanguage/time-count | a_representations.clj | (ns time-count.explainer.a-representations
(:require
[time-count.iso8601 :refer [to-iso from-iso t-> t->>]]
[time-count.metajoda :refer [map->MetaJodaTime from-place-values]]
[time-count.core :refer [place-values]]
[midje.sweet :refer :all])
(:import [org.joda.time DateTime LocalDateTime DateTimeZone]))
Each time value in time - count must have at least two representations ,
a string representation , usually from a subset of ISO8601 ,
;; and a representation that implements the basic protocols
;; in particular time-count.core/CountableTime.
;;
;; MetaJodaTime (the provided default implementation) uses
a few types of DateTime from the Joda Time library ,
;; combined with some metadata about nested scales.
;; Some other representation could be used, so long as it could
implement CountableTime and the other interfaces MetaJodaTime does .
(fact "The default representation of time values (for doing computations) is a record containing Joda Time DateTime values with added metadata for nested scales."
(from-iso "2017-04-09")
=> (map->MetaJodaTime
{:dt (LocalDateTime. 2017 4 9 0 0 0 0)
:nesting [:day :month :year]})
(from-iso "2017-12-13T11:17")
=> (map->MetaJodaTime
{:dt (LocalDateTime. 2017 12 13 11 17 0 0)
:nesting [:minute :hour :day :month :year]})
(from-iso "2017-12-13T11:17-07:00")
=> (map->MetaJodaTime
{:dt (DateTime. 2017 12 13 11 17 0 0
(DateTimeZone/forOffsetHours -7))
:nesting [:minute :hour :day :month :year]}))
(fact "All time values in time-count have a canonical string representation, based closely on ISO 8601, fully reversable from the MetaJoda representation."
(from-iso "2017-04-09")
=> (map->MetaJodaTime
{:dt (LocalDateTime. 2017 4 9 0 0 0 0)
:nesting [:day :month :year]})
(to-iso (map->MetaJodaTime
{:dt (LocalDateTime. 2017 4 9 0 0 0 0)
:nesting [:day :month :year]}))
=> "2017-04-09")
(fact "A third representation is the place-values vector."
place - values is currently a bit undeveloped , but it could be completely reversible with the other two , and maybe should be .
(-> "2017-04-09" from-iso place-values)
=> [[:day 9] [:month 4] [:year 2017]]
(-> [[:day 9] [:month 4] [:year 2017]] from-place-values to-iso)
=> "2017-04-09")
(fact "For convenience, two special threading macros allow use of ISO 8601 string representation with other operations."
(t-> "2017-12-13" identity) ; Where, instead of identity, you could have any time operation.
=> (-> "2017-12-13" from-iso identity to-iso)
(t->> "2017-12-13" identity)
=> (->> "2017-12-13" from-iso identity to-iso))
| null | https://raw.githubusercontent.com/domainlanguage/time-count/f2d26a1e5fe1a137f5d1a01be295dde0385cc31e/explainers/time_count/explainer/a_representations.clj | clojure | and a representation that implements the basic protocols
in particular time-count.core/CountableTime.
MetaJodaTime (the provided default implementation) uses
combined with some metadata about nested scales.
Some other representation could be used, so long as it could
Where, instead of identity, you could have any time operation. | (ns time-count.explainer.a-representations
(:require
[time-count.iso8601 :refer [to-iso from-iso t-> t->>]]
[time-count.metajoda :refer [map->MetaJodaTime from-place-values]]
[time-count.core :refer [place-values]]
[midje.sweet :refer :all])
(:import [org.joda.time DateTime LocalDateTime DateTimeZone]))
Each time value in time - count must have at least two representations ,
a string representation , usually from a subset of ISO8601 ,
a few types of DateTime from the Joda Time library ,
implement CountableTime and the other interfaces MetaJodaTime does .
(fact "The default representation of time values (for doing computations) is a record containing Joda Time DateTime values with added metadata for nested scales."
(from-iso "2017-04-09")
=> (map->MetaJodaTime
{:dt (LocalDateTime. 2017 4 9 0 0 0 0)
:nesting [:day :month :year]})
(from-iso "2017-12-13T11:17")
=> (map->MetaJodaTime
{:dt (LocalDateTime. 2017 12 13 11 17 0 0)
:nesting [:minute :hour :day :month :year]})
(from-iso "2017-12-13T11:17-07:00")
=> (map->MetaJodaTime
{:dt (DateTime. 2017 12 13 11 17 0 0
(DateTimeZone/forOffsetHours -7))
:nesting [:minute :hour :day :month :year]}))
(fact "All time values in time-count have a canonical string representation, based closely on ISO 8601, fully reversable from the MetaJoda representation."
(from-iso "2017-04-09")
=> (map->MetaJodaTime
{:dt (LocalDateTime. 2017 4 9 0 0 0 0)
:nesting [:day :month :year]})
(to-iso (map->MetaJodaTime
{:dt (LocalDateTime. 2017 4 9 0 0 0 0)
:nesting [:day :month :year]}))
=> "2017-04-09")
(fact "A third representation is the place-values vector."
place - values is currently a bit undeveloped , but it could be completely reversible with the other two , and maybe should be .
(-> "2017-04-09" from-iso place-values)
=> [[:day 9] [:month 4] [:year 2017]]
(-> [[:day 9] [:month 4] [:year 2017]] from-place-values to-iso)
=> "2017-04-09")
(fact "For convenience, two special threading macros allow use of ISO 8601 string representation with other operations."
=> (-> "2017-12-13" from-iso identity to-iso)
(t->> "2017-12-13" identity)
=> (->> "2017-12-13" from-iso identity to-iso))
|
40bff7ea970caa436451f4a0f6e13ce27ff9409f4b13db796038cc64fce98242 | input-output-hk/plutus | SkewBinarySlab.hs | -- editorconfig-checker-disable-file
{-# LANGUAGE BangPatterns #-}
# LANGUAGE LambdaCase #
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE TypeFamilies #-}
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
module Data.RandomAccessList.SkewBinarySlab
( RAList(Cons,Nil)
, safeIndexZero
, unsafeIndexZero
, Data.RandomAccessList.SkewBinarySlab.null
, uncons
, consSlab
) where
import Data.Bits (unsafeShiftR)
import Data.Vector.NonEmpty qualified as NEV
import Data.Word
import GHC.Exts
import Data.RandomAccessList.Class qualified as RAL
Note [ Skew binary slab lists ]
This module implements a very similar structure to the one in ' SkewBinary ' , but
instead of storing a single value at each node , it instead stores potentially many
values .
The advantage of this is that we can rapidly cons on a collection of values , and that
if we do this regularly then the size of the structure will grow more slowly than
the number of values stored , giving us a discount on our lookup performance ( which
depends on the size of the structure ! ) .
The disadvantages are several :
- It 's more complex .
- We need another intermediary type , which means more indirect lookups .
- We need to store another size in the spine of the list * and * in the tree nodes ,
since a ) the structure size no longer tells us the element count , and b ) as we
traverse a tree it 's no longer true that the size on each side is always half of
the overall size .
Benchmarking suggests that it * is * slightly slower than the normal version
on the non - slab - based workflows , but it 's much faster on the slab - based workflows .
So it 's not an unqualified win , but it may be better in some cases .
This module implements a very similar structure to the one in 'SkewBinary', but
instead of storing a single value at each node, it instead stores potentially many
values.
The advantage of this is that we can rapidly cons on a collection of values, and that
if we do this regularly then the size of the structure will grow more slowly than
the number of values stored, giving us a discount on our lookup performance (which
depends on the size of the structure!).
The disadvantages are several:
- It's more complex.
- We need another intermediary type, which means more indirect lookups.
- We need to store another size in the spine of the list *and* in the tree nodes,
since a) the structure size no longer tells us the element count, and b) as we
traverse a tree it's no longer true that the size on each side is always half of
the overall size.
Benchmarking suggests that it *is* slightly slower than the normal version
on the non-slab-based workflows, but it's much faster on the slab-based workflows.
So it's not an unqualified win, but it may be better in some cases.
-}
Why not just store ` NonEmptyVector`s and add singleton values by making singleton
-- vectors? The answer is that using only vectors makes simple consing significantly
-- slower, and doesn't obviously make the other code paths faster.
-- | The values that can be stored in a node. Either a single value, or a non-empty vector of values.
data Values a = One a | Many {-# UNPACK #-} !(NEV.NonEmptyVector a)
deriving stock (Eq, Show)
valuesCount :: Values a -> Word64
valuesCount (One _) = 1
valuesCount (Many v) = fromIntegral $ NEV.length v
unsafeIndexValues :: Word64 -> Values a -> a
unsafeIndexValues 0 (One a) = a
unsafeIndexValues _ (One _) = error "out of bounds"
unsafeIndexValues i (Many v) = v NEV.! fromIntegral i
safeIndexValues :: Word64 -> Values a -> Maybe a
safeIndexValues 0 (One a) = Just a
safeIndexValues _ (One _) = Nothing
safeIndexValues i (Many v) = v NEV.!? fromIntegral i
O(1 )
unconsValues :: Values a -> RAList a -> (a, RAList a)
unconsValues (One x) l = (x, l)
unconsValues (Many v) l =
-- unconsing vectors is actually O(1), which is important!
let (x, xs) = NEV.uncons v
remaining = case NEV.fromVector xs of
Just v' -> consSlab v' l
Nothing -> l
in (x, remaining)
-- | A complete binary tree.
data Tree a = Leaf !(Values a)
-- Nodes track the number of elements in the tree (including those in the node)
| Node {-# UNPACK #-} !Word64 !(Values a) !(Tree a) !(Tree a)
deriving stock (Eq, Show)
treeCount :: Tree a -> Word64
treeCount (Leaf v) = valuesCount v
treeCount (Node s _ _ _) = s
unsafeIndexTree :: Word64 -> Tree a -> a
unsafeIndexTree offset (Leaf v) = unsafeIndexValues offset v
unsafeIndexTree offset (Node _ v t1 t2) =
let nCount = valuesCount v
in if offset < nCount
then unsafeIndexValues offset v
else
let offset' = offset - nCount
lCount = treeCount t1
in if offset' < lCount
then unsafeIndexTree offset' t1
else unsafeIndexTree (offset' - lCount) t2
safeIndexTree :: Word64 -> Tree a -> Maybe a
safeIndexTree offset (Leaf v) = safeIndexValues offset v
safeIndexTree offset (Node _ v t1 t2) =
let nCount = valuesCount v
in if offset < nCount
then safeIndexValues offset v
else
let offset' = offset - nCount
lCount = treeCount t1
in if offset' < lCount
then safeIndexTree offset' t1
else safeIndexTree (offset' - lCount) t2
-- | A strict list of complete binary trees accompanied by their node size.
-- The trees appear in >=-node size order.
Note : this list is strict in its spine , unlike the Prelude list
data RAList a = BHead
{-# UNPACK #-} !Word64 -- ^ the number of nodes in the head tree
!(Tree a) -- ^ the head tree
!(RAList a) -- ^ the tail trees
| Nil
deriving stock (Show)
deriving (IsList) via RAL.AsRAL (RAList a)
-- Can't use the derived instance because it's no longer the case that lists with
-- the same contents have to have the same structure! Could definitely write a
-- faster implementation if it matters, though.
instance Eq a => Eq (RAList a) where
l == l' = toList l == toList l'
# INLINABLE null #
null :: RAList a -> Bool
null Nil = True
null _ = False
{-# complete Cons, Nil #-}
# complete BHead , Nil #
-- /O(1)/
pattern Cons :: a -> RAList a -> RAList a
pattern Cons x xs <- (uncons -> Just (x, xs)) where
Cons x xs = cons x xs
-- O(1) worst-case
consValues :: Values a -> RAList a -> RAList a
consValues x l = case l of
(BHead w1 t1 (BHead w2 t2 ts')) | w1 == w2 ->
let ts = w1 + w2 + 1
ec = treeCount t1 + treeCount t2 + valuesCount x
in BHead ts (Node ec x t1 t2) ts'
ts -> BHead 1 (Leaf x) ts
-- O(1) worst-case
# INLINE cons #
cons :: a -> RAList a -> RAList a
cons x = consValues (One x)
-- O(1) worst-case
# INLINE consSlab #
consSlab :: NEV.NonEmptyVector a -> RAList a -> RAList a
consSlab x = consValues (Many x)
-- /O(1)/
-- 'uncons' is a bit funny: if we uncons a vector of values
-- initially, we will then uncons the front of *that* and possibly
cons the rest back on ! Fortunately all these operations are O(1 ) ,
-- so it adds up to being okay.
uncons :: RAList a -> Maybe (a, RAList a)
uncons = \case
BHead _ (Leaf v) ts -> Just $ unconsValues v ts
BHead _ (Node treeSize x t1 t2) ts ->
-- probably faster than `div w 2`
let halfSize = unsafeShiftR treeSize 1
split the node in two )
in Just $ unconsValues x (BHead halfSize t1 $ BHead halfSize t2 ts)
Nil -> Nothing
-- 0-based
unsafeIndexZero :: RAList a -> Word64 -> a
unsafeIndexZero Nil _ = error "out of bounds"
unsafeIndexZero (BHead _ t ts) !i =
let tCount = treeCount t
in if i < tCount
then unsafeIndexTree i t
else unsafeIndexZero ts (i - tCount)
-- 0-based
safeIndexZero :: RAList a -> Word64 -> Maybe a
safeIndexZero Nil _ = Nothing
safeIndexZero (BHead _ t ts) !i =
let tCount = treeCount t
in if i < tCount
then safeIndexTree i t
else safeIndexZero ts (i - tCount)
instance RAL.RandomAccessList (RAList a) where
type Element (RAList a) = a
# INLINABLE empty #
empty = Nil
# INLINABLE cons #
cons = Cons
# INLINABLE uncons #
uncons = uncons
# INLINABLE length #
length Nil = 0
length (BHead _ t tl) = treeCount t + RAL.length tl
# INLINABLE consSlab #
consSlab = consSlab
# INLINABLE indexZero #
indexZero l i = safeIndexZero l i
# INLINABLE unsafeIndexZero #
unsafeIndexZero l i = unsafeIndexZero l i
| null | https://raw.githubusercontent.com/input-output-hk/plutus/c8d4364d0e639fef4d5b93f7d6c0912d992b54f9/plutus-core/index-envs/src/Data/RandomAccessList/SkewBinarySlab.hs | haskell | editorconfig-checker-disable-file
# LANGUAGE BangPatterns #
# LANGUAGE PatternSynonyms #
# LANGUAGE TypeFamilies #
vectors? The answer is that using only vectors makes simple consing significantly
slower, and doesn't obviously make the other code paths faster.
| The values that can be stored in a node. Either a single value, or a non-empty vector of values.
# UNPACK #
unconsing vectors is actually O(1), which is important!
| A complete binary tree.
Nodes track the number of elements in the tree (including those in the node)
# UNPACK #
| A strict list of complete binary trees accompanied by their node size.
The trees appear in >=-node size order.
# UNPACK #
^ the number of nodes in the head tree
^ the head tree
^ the tail trees
Can't use the derived instance because it's no longer the case that lists with
the same contents have to have the same structure! Could definitely write a
faster implementation if it matters, though.
# complete Cons, Nil #
/O(1)/
O(1) worst-case
O(1) worst-case
O(1) worst-case
/O(1)/
'uncons' is a bit funny: if we uncons a vector of values
initially, we will then uncons the front of *that* and possibly
so it adds up to being okay.
probably faster than `div w 2`
0-based
0-based | # LANGUAGE LambdaCase #
# LANGUAGE UndecidableInstances #
# LANGUAGE ViewPatterns #
module Data.RandomAccessList.SkewBinarySlab
( RAList(Cons,Nil)
, safeIndexZero
, unsafeIndexZero
, Data.RandomAccessList.SkewBinarySlab.null
, uncons
, consSlab
) where
import Data.Bits (unsafeShiftR)
import Data.Vector.NonEmpty qualified as NEV
import Data.Word
import GHC.Exts
import Data.RandomAccessList.Class qualified as RAL
Note [ Skew binary slab lists ]
This module implements a very similar structure to the one in ' SkewBinary ' , but
instead of storing a single value at each node , it instead stores potentially many
values .
The advantage of this is that we can rapidly cons on a collection of values , and that
if we do this regularly then the size of the structure will grow more slowly than
the number of values stored , giving us a discount on our lookup performance ( which
depends on the size of the structure ! ) .
The disadvantages are several :
- It 's more complex .
- We need another intermediary type , which means more indirect lookups .
- We need to store another size in the spine of the list * and * in the tree nodes ,
since a ) the structure size no longer tells us the element count , and b ) as we
traverse a tree it 's no longer true that the size on each side is always half of
the overall size .
Benchmarking suggests that it * is * slightly slower than the normal version
on the non - slab - based workflows , but it 's much faster on the slab - based workflows .
So it 's not an unqualified win , but it may be better in some cases .
This module implements a very similar structure to the one in 'SkewBinary', but
instead of storing a single value at each node, it instead stores potentially many
values.
The advantage of this is that we can rapidly cons on a collection of values, and that
if we do this regularly then the size of the structure will grow more slowly than
the number of values stored, giving us a discount on our lookup performance (which
depends on the size of the structure!).
The disadvantages are several:
- It's more complex.
- We need another intermediary type, which means more indirect lookups.
- We need to store another size in the spine of the list *and* in the tree nodes,
since a) the structure size no longer tells us the element count, and b) as we
traverse a tree it's no longer true that the size on each side is always half of
the overall size.
Benchmarking suggests that it *is* slightly slower than the normal version
on the non-slab-based workflows, but it's much faster on the slab-based workflows.
So it's not an unqualified win, but it may be better in some cases.
-}
Why not just store ` NonEmptyVector`s and add singleton values by making singleton
deriving stock (Eq, Show)
valuesCount :: Values a -> Word64
valuesCount (One _) = 1
valuesCount (Many v) = fromIntegral $ NEV.length v
unsafeIndexValues :: Word64 -> Values a -> a
unsafeIndexValues 0 (One a) = a
unsafeIndexValues _ (One _) = error "out of bounds"
unsafeIndexValues i (Many v) = v NEV.! fromIntegral i
safeIndexValues :: Word64 -> Values a -> Maybe a
safeIndexValues 0 (One a) = Just a
safeIndexValues _ (One _) = Nothing
safeIndexValues i (Many v) = v NEV.!? fromIntegral i
O(1 )
unconsValues :: Values a -> RAList a -> (a, RAList a)
unconsValues (One x) l = (x, l)
unconsValues (Many v) l =
let (x, xs) = NEV.uncons v
remaining = case NEV.fromVector xs of
Just v' -> consSlab v' l
Nothing -> l
in (x, remaining)
data Tree a = Leaf !(Values a)
deriving stock (Eq, Show)
treeCount :: Tree a -> Word64
treeCount (Leaf v) = valuesCount v
treeCount (Node s _ _ _) = s
unsafeIndexTree :: Word64 -> Tree a -> a
unsafeIndexTree offset (Leaf v) = unsafeIndexValues offset v
unsafeIndexTree offset (Node _ v t1 t2) =
let nCount = valuesCount v
in if offset < nCount
then unsafeIndexValues offset v
else
let offset' = offset - nCount
lCount = treeCount t1
in if offset' < lCount
then unsafeIndexTree offset' t1
else unsafeIndexTree (offset' - lCount) t2
safeIndexTree :: Word64 -> Tree a -> Maybe a
safeIndexTree offset (Leaf v) = safeIndexValues offset v
safeIndexTree offset (Node _ v t1 t2) =
let nCount = valuesCount v
in if offset < nCount
then safeIndexValues offset v
else
let offset' = offset - nCount
lCount = treeCount t1
in if offset' < lCount
then safeIndexTree offset' t1
else safeIndexTree (offset' - lCount) t2
Note : this list is strict in its spine , unlike the Prelude list
data RAList a = BHead
| Nil
deriving stock (Show)
deriving (IsList) via RAL.AsRAL (RAList a)
instance Eq a => Eq (RAList a) where
l == l' = toList l == toList l'
# INLINABLE null #
null :: RAList a -> Bool
null Nil = True
null _ = False
# complete BHead , Nil #
pattern Cons :: a -> RAList a -> RAList a
pattern Cons x xs <- (uncons -> Just (x, xs)) where
Cons x xs = cons x xs
consValues :: Values a -> RAList a -> RAList a
consValues x l = case l of
(BHead w1 t1 (BHead w2 t2 ts')) | w1 == w2 ->
let ts = w1 + w2 + 1
ec = treeCount t1 + treeCount t2 + valuesCount x
in BHead ts (Node ec x t1 t2) ts'
ts -> BHead 1 (Leaf x) ts
# INLINE cons #
cons :: a -> RAList a -> RAList a
cons x = consValues (One x)
# INLINE consSlab #
consSlab :: NEV.NonEmptyVector a -> RAList a -> RAList a
consSlab x = consValues (Many x)
cons the rest back on ! Fortunately all these operations are O(1 ) ,
uncons :: RAList a -> Maybe (a, RAList a)
uncons = \case
BHead _ (Leaf v) ts -> Just $ unconsValues v ts
BHead _ (Node treeSize x t1 t2) ts ->
let halfSize = unsafeShiftR treeSize 1
split the node in two )
in Just $ unconsValues x (BHead halfSize t1 $ BHead halfSize t2 ts)
Nil -> Nothing
unsafeIndexZero :: RAList a -> Word64 -> a
unsafeIndexZero Nil _ = error "out of bounds"
unsafeIndexZero (BHead _ t ts) !i =
let tCount = treeCount t
in if i < tCount
then unsafeIndexTree i t
else unsafeIndexZero ts (i - tCount)
safeIndexZero :: RAList a -> Word64 -> Maybe a
safeIndexZero Nil _ = Nothing
safeIndexZero (BHead _ t ts) !i =
let tCount = treeCount t
in if i < tCount
then safeIndexTree i t
else safeIndexZero ts (i - tCount)
instance RAL.RandomAccessList (RAList a) where
type Element (RAList a) = a
# INLINABLE empty #
empty = Nil
# INLINABLE cons #
cons = Cons
# INLINABLE uncons #
uncons = uncons
# INLINABLE length #
length Nil = 0
length (BHead _ t tl) = treeCount t + RAL.length tl
# INLINABLE consSlab #
consSlab = consSlab
# INLINABLE indexZero #
indexZero l i = safeIndexZero l i
# INLINABLE unsafeIndexZero #
unsafeIndexZero l i = unsafeIndexZero l i
|
9ad4f2ef10fc6a2ac41daabd22137090096354a0ebfbe516522451a7b5deaa12 | shnarazk/mios | Stat.hs | # LANGUAGE
RecordWildCards
, TupleSections
#
RecordWildCards
, TupleSections
#-}
{-# LANGUAGE Safe #-}
module SAT.Mios.Util.Stat
( Dumpable (..)
, MiosDump (..)
, MergedDump (..)
, merge
, parseBy
, fromDump
, fromStat
, fromCSV
, fromMergedCSV
, toMerge
)
where
import Data.Bits
import qualified Data.IORef as IORef
import Data.List
import Numeric (showFFloat)
import Text.ParserCombinators.ReadP
import SAT.Mios.Types ( MiosConfiguration(..)
, CNFDescription(..)
, DumpTag(..)
, MiosStats(..)
, QuadLearntC(..)
, MiosDump(..)
)
logBase2 :: Int -> Int
logBase2 x
| x <= 0 = 0
| otherwise =finiteBitSize x - 1 - countLeadingZeros x
showf :: Double -> String
showf x = showFFloat (Just 2) x ""
(+.) :: Either Double Int -> Either Double Int -> Either Double Int
(Right x) +. (Right y) = Right (x + y)
(Right x) +. (Left y) = Left (fromIntegral x + y)
(Left x) +. (Right y) = Left (x + fromIntegral y)
(Left x) +. (Left y) = Left (x + y)
(/.) :: Either Double Int -> Int -> Either Double Int
_ /. 0 = Left 0
(Right x) /. n = Left $ fromIntegral x / fromIntegral n
(Left x) /. n = Left $ x / fromIntegral n
class Monoid s => Dumpable s where
-- higher level functions over Monoid methods
divideBy :: Int -> s -> s
divideBy _ _ = mempty
average :: [s] -> s
average l = divideBy (length l) (mconcat l)
unaccumulate :: s -> s
unaccumulate l = error "default method of unaccumulate"
--
header :: (String, s)
header = ("undefined", mempty)
toCSV :: s -> String
toCSV _ = "undefined:"
# MINIMAL #
instance Monoid CNFDescription where
mempty = CNFDescription (Just "") 0 0
mappend _ _ = mempty
mconcat _ = mempty
instance Dumpable CNFDescription where
header = ("file, nvar, nclause", mempty)
toCSV (CNFDescription (Just f) v c) = intercalate "," [show f, show v, show c]
toCSV (CNFDescription Nothing v c) = intercalate "," ["", show v, show c]
instance Monoid MiosConfiguration where
mempty = MiosConfiguration 0 0 0 0 0 0
mappend a b
| a == b = a
| otherwise = error "invalid configurations to mappend"
mconcat [] = error "invalid configurations to sconcat"
mconcat (a:l)
| all (a ==) l = a
| otherwise = error "invalid configurations to mconcat"
instance Dumpable MiosConfiguration where
header = (intercalate "," ["vDecay", "propLim", "par1", "par2", "par3", "par4"], mempty)
toCSV MiosConfiguration{..} =
intercalate "," [ show variableDecayRate
, show (logBase2 propagationLimit)
, showf gpParameter1
, showf gpParameter2
, show extraParameter3
, show extraParameter4]
average = mconcat
instance Monoid MiosStats where
mempty = MiosStats $ map (, Right 0) [minBound :: DumpTag .. maxBound]
mappend (MiosStats x) (MiosStats y) = MiosStats $ zipWith add x y
where
add (k1, v1) (k2, v2)
| k1 == k2 = (k1, v1 +. v2)
| otherwise = error "unbalanced stats"
instance Dumpable MiosStats where
header = ( intercalate "," $ map toHeader [minBound :: DumpTag .. maxBound]
, mempty
)
where
toHeader TerminateS = "result"
toHeader BackjumpS = "backjump"
toHeader RestartS = "restart"
toHeader PropagationS = "propagation"
toHeader ConflictS = "conflict"
toHeader LearntS = "learnt"
toHeader LearningRateS = "lRate"
toHeader ExtraS = "extra"
toCSV (MiosStats l) = intercalate "," $ map val [minBound :: DumpTag .. maxBound]
where
val k = case lookup k l of
Just (Right v) -> show v
Just (Left v) -> showf v
Nothing -> "0"
divideBy n (MiosStats x) = MiosStats $ map (\(k,v) -> (k, v /. n)) x
instance Monoid QuadLearntC where
mempty = QuadLearntC (0, 0, 0) (0, 0, 0) (0, 0, 0) (0, 0, 0)
mappend (QuadLearntC a1 b1 c1 d1) (QuadLearntC a2 b2 c2 d2) = QuadLearntC (a1 +.+ a2) (b1 +.+ b2) (c1 +.+ c2) (d1 +.+ d2)
where (a1, a2, a3) +.+ (b1, b2, b3) = (a1 + b1, a2 + b2, a3 + b3)
instance Dumpable QuadLearntC where
header = ("bl0,bl1,bl2,blx,ll0,ll1,ll2,llx,sl0,sl1,sl2,slx", mempty)
toCSV (QuadLearntC (b0, l0, s0) (b1, l1, s1) (b2, l2, s2) (b3, l3, s3)) = intercalate "," $ map showf [b0, b1, b2, b3, l0, l1, l2, l3, s0, s1, s2, s3]
divideBy n (QuadLearntC a b c d) = QuadLearntC (a /./ n') (b /./ n') (c /./ n') (d /./ n')
where
n' = fromIntegral n
(a1, a2, a3) /./ n = (a1 / n, a2 / n, a3 / n)
instance Monoid MiosDump where
mempty = MiosDump ("solver", mempty) mempty mempty mempty
mappend _ _ = error "MiosDump is not allowed to mappend; convert to MergedDump"
instance Dumpable MiosDump where
header = ( intercalate "," [ "solver"
, fst (header :: (String, MiosConfiguration))
, fst (header :: (String, CNFDescription))
, fst (header :: (String, MiosStats))
, fst (header :: (String, QuadLearntC))
]
, mempty
)
toCSV (MiosDump (i, m) d s q) = intercalate "," [show i, toCSV m, toCSV d, toCSV s, toCSV q]
average _ = error "MiosDump is not alloved to average; convert to MergedDump"
divideBy _ _ = error "MiosDump is not alloved to divideBy; convert to MergedDump"
data MergedDump =
MergedDump { _mergedConf :: (String, MiosConfiguration) -- solver configuration = key
, _merged :: Int -- number of problems
, _mergedStat :: MiosStats
, _mergedLCat :: QuadLearntC
}
deriving (Eq, Ord, Read)
instance Monoid MergedDump where
mempty = MergedDump ("", mempty) 0 mempty mempty
mappend (MergedDump m n s l) (MergedDump m' n' s' l')
| m == m' = MergedDump m (n + n') (mappend s s') (mappend l l')
| otherwise = error "invalid mergeddumps to mappend"
mconcat [] = mempty
mconcat (a:l) = foldl mappend a l
instance Dumpable MergedDump where
header = (intercalate "," ["solver"
, fst (header :: (String, MiosConfiguration))
, "problem"
, fst (header :: (String, MiosStats))
, fst (header :: (String, QuadLearntC))
]
, mempty
)
toCSV (MergedDump (i, m) n s q) = intercalate "," [show i, toCSV m, show n, toCSV s, toCSV q]
divideBy n (MergedDump (i, m) k s q) = MergedDump (i, m) k (divideBy n s) (divideBy n q)
unaccumulate d = divideBy (_merged d) d
toMerge :: MiosDump -> MergedDump
toMerge (MiosDump m _ s c) = MergedDump m 1 s c
merge :: [MiosDump] -> IO [MergedDump]
merge l = do
let aborted ::MiosDump -> Bool
aborted (MiosDump _ _ (MiosStats vals) _)
| Just (Right 0) <- lookup TerminateS vals = True
| Nothing <- lookup TerminateS vals = True
| otherwise = False
inject :: [((String, MiosConfiguration), IORef.IORef MergedDump)] -> [MiosDump] -> IO [MergedDump]
inject h [] = map unaccumulate <$> mapM (IORef.readIORef . snd) h
inject h (d:ds)
| aborted d = inject h ds
| Just x <- lookup k h = do
IORef.modifyIORef x (mappend (toMerge d))
inject h ds
| otherwise = do
n <- (k ,) <$> IORef.newIORef (toMerge d)
inject (n : h) ds
where k = _dumpedConf d
inject [] l
-- |
parseBy :: [(String, ReadP s)] -> String -> [s]
parseBy fs l'
| null l = []
| Just (h, f) <- find ((head l ==) . fst) fs = map (fst. head . readP_to_S f) $ filter (h /=) (tail l)
| Just (_, f) <- find (("" ==) . fst) fs = map (fst. head . readP_to_S f) l
| otherwise = error $ "ABORT\nheader: \n" ++ (head l) ++ "\nis not matches to\n" ++ intercalate "\n" (map fst fs)
where
l = filter (('#' /=) . head) $ lines l'
-- | read a token separated by commas
getToken :: String -> ReadP String
getToken s = string s *> skipSpaces *> (munch (',' /=) +++ look)
-- | read a string separated by commas
getQuote :: String -> ReadP String
getQuote s = string s *> skipSpaces *> string "\"" *> munch ('\"' /=) <* string "\""
fromDump :: ReadP MiosDump
fromDump = read <$> munch ('\n' /=)
fromStat :: ReadP MiosDump
fromStat = do
so <- getQuote "Solver:"
va <- getToken ", VarDecayRate:"
pr <- getToken ", PropagationLimit:"
fl <- getQuote ", File:"
nv <- getToken ", NumOfVariables:"
nc <- getToken ", NumOfClauses:"
sa <- getToken ", SatisfiabilityValue:"
np <- getToken ", NumOfPropagations:"
cf <- getToken ", NumOfConflicts:"
nl <- getToken ", NumOfLearnts:"
nb <- getToken ", NumOfBackjumps:"
nr <- getToken ", NumOfRestarts:"
l1 <- getToken ", NumOfBirthLevel1:"
l2 <- getToken ", NumOfBirthLevel2:"
l3 <- getToken ", NumOfBirthLevelX:"
let nl' = read nl
n1' = read l1
n2' = read l2
n3' = read l3
di :: Double -> Double
di n = if nl' == 0 then 0 else n / nl'
return $
MiosDump
(so, MiosConfiguration (read va) (read pr) 0 0 0 0)
(CNFDescription (Just fl) (read nv) (read nc))
(MiosStats (sort
[ (TerminateS, Right (read sa)), (BackjumpS, Right (read nb)), (RestartS, Right (read nr))
, (PropagationS, Right (read np)), (ConflictS, Right (read cf)), (LearntS, Right (read nl))
, (LearningRateS, Left (read nl / read nb)), (ExtraS, Left 0)
]))
(QuadLearntC (di (nl' - n1' - n2' - n3'), 0, 0) (di n1', 0, 0) (di n2', 0, 0) (di n3', 0, 0))
fromCSV :: ReadP MiosDump
fromCSV = do
so <- getQuote "" -- solver
vd <- getToken "," -- vardecayrace
pl <- getToken "," -- propagationlimit
p1 <- getToken "," -- parameter1
p2 <- getToken "," -- parameter2
parameter3
parameter4
fl <- getQuote "," -- file
nv <- getToken "," -- nvar
nclause
sa <- getToken "," -- satisfiability
np <- getToken "," -- npropagation
nconflicts
nl <- getToken "," -- nlearning
nb <- getToken "," -- nbackjump
nr <- getToken "," -- nrestart
la <- getToken "," -- learningrate
ev <- getToken "," -- extravalue
b0 <- getToken "," -- born level 0
born level 1
born level 2
b3 <- getToken "," -- born level +
l0 <- getToken "," -- living level 0
living level 1
living level 2
l3 <- getToken "," -- living level +
s0 <- getToken "," -- suvive level 0
suvive level 1
suvive level 2
s3 <- getToken "," -- suvive level +
return $
MiosDump
(so, MiosConfiguration (read vd) (2 ^ ((read pl) :: Int)) (read p1) (read p2) (read p3) (read p4))
(CNFDescription (Just fl) (read nv) (read nc))
(MiosStats (sort
[ (TerminateS, Right (read sa))
, (PropagationS, Right (read np)), (ConflictS, Right (read cf)), (LearntS, Right (read nl))
, (BackjumpS, Right (read nb)), (RestartS, Right (read nr))
, (LearningRateS, Left (read la)), (ExtraS, Left (read ev))
]))
(QuadLearntC (read b0, read l0, read s0) (read b1, read l1, read s1) (read b2, read l2, read s2) (read b3, read l3, read s3))
fromMergedCSV :: ReadP MergedDump
fromMergedCSV = do
so <- getQuote "" -- solver
vd <- getToken "," -- vardecayrace
pl <- getToken "," -- propagationlimit
p1 <- getToken "," -- parameter1
p2 <- getToken "," -- parameter2
parameter3
parameter4
nn <- getToken "," -- problem
rs <- getToken "," -- result
np <- getToken "," -- propagation
cf <- getToken "," -- conflict
nl <- getToken "," -- learning
nb <- getToken "," -- backjump
nr <- getToken "," -- restart
la <- getToken "," -- learningrate
ev <- getToken "," -- extravalue
b0 <- getToken "," -- birth level 0
birth level 1
birth level 2
b3 <- getToken "," -- birth level +
d0 <- getToken "," -- dead level 0
dead level 1
dead level 2
d3 <- getToken "," -- dead level +
e0 <- getToken "," -- survive level 0
survive level 1
survive level 2
e3 <- getToken "," -- survive level +
return $
MergedDump
(so, MiosConfiguration (read vd) (2 ^ ((read pl) :: Int)) (read p1) (read p2) (read p3) (read p4))
(read nn)
(MiosStats (sort
[ (TerminateS, Right (read rs))
, (PropagationS, Left (read np)), (ConflictS, Left (read cf)), (LearntS, Left (read nl))
, (BackjumpS, Left (read nb)), (RestartS, Left (read nr))
, (LearningRateS, Left (read la)), (ExtraS, Left (read ev))
]))
(QuadLearntC
(read b0, read d0, read e0)
(read b1, read d1, read e1)
(read b2, read d2, read e2)
(read b3, read d3, read e3)
)
| null | https://raw.githubusercontent.com/shnarazk/mios/d032d761d73224f981a07ec2ea90936db6f495e8/MultiConflict/SAT/Mios/Util/Stat.hs | haskell | # LANGUAGE Safe #
higher level functions over Monoid methods
solver configuration = key
number of problems
|
| read a token separated by commas
| read a string separated by commas
solver
vardecayrace
propagationlimit
parameter1
parameter2
file
nvar
satisfiability
npropagation
nlearning
nbackjump
nrestart
learningrate
extravalue
born level 0
born level +
living level 0
living level +
suvive level 0
suvive level +
solver
vardecayrace
propagationlimit
parameter1
parameter2
problem
result
propagation
conflict
learning
backjump
restart
learningrate
extravalue
birth level 0
birth level +
dead level 0
dead level +
survive level 0
survive level + | # LANGUAGE
RecordWildCards
, TupleSections
#
RecordWildCards
, TupleSections
#-}
module SAT.Mios.Util.Stat
( Dumpable (..)
, MiosDump (..)
, MergedDump (..)
, merge
, parseBy
, fromDump
, fromStat
, fromCSV
, fromMergedCSV
, toMerge
)
where
import Data.Bits
import qualified Data.IORef as IORef
import Data.List
import Numeric (showFFloat)
import Text.ParserCombinators.ReadP
import SAT.Mios.Types ( MiosConfiguration(..)
, CNFDescription(..)
, DumpTag(..)
, MiosStats(..)
, QuadLearntC(..)
, MiosDump(..)
)
logBase2 :: Int -> Int
logBase2 x
| x <= 0 = 0
| otherwise =finiteBitSize x - 1 - countLeadingZeros x
showf :: Double -> String
showf x = showFFloat (Just 2) x ""
(+.) :: Either Double Int -> Either Double Int -> Either Double Int
(Right x) +. (Right y) = Right (x + y)
(Right x) +. (Left y) = Left (fromIntegral x + y)
(Left x) +. (Right y) = Left (x + fromIntegral y)
(Left x) +. (Left y) = Left (x + y)
(/.) :: Either Double Int -> Int -> Either Double Int
_ /. 0 = Left 0
(Right x) /. n = Left $ fromIntegral x / fromIntegral n
(Left x) /. n = Left $ x / fromIntegral n
class Monoid s => Dumpable s where
divideBy :: Int -> s -> s
divideBy _ _ = mempty
average :: [s] -> s
average l = divideBy (length l) (mconcat l)
unaccumulate :: s -> s
unaccumulate l = error "default method of unaccumulate"
header :: (String, s)
header = ("undefined", mempty)
toCSV :: s -> String
toCSV _ = "undefined:"
# MINIMAL #
instance Monoid CNFDescription where
mempty = CNFDescription (Just "") 0 0
mappend _ _ = mempty
mconcat _ = mempty
instance Dumpable CNFDescription where
header = ("file, nvar, nclause", mempty)
toCSV (CNFDescription (Just f) v c) = intercalate "," [show f, show v, show c]
toCSV (CNFDescription Nothing v c) = intercalate "," ["", show v, show c]
instance Monoid MiosConfiguration where
mempty = MiosConfiguration 0 0 0 0 0 0
mappend a b
| a == b = a
| otherwise = error "invalid configurations to mappend"
mconcat [] = error "invalid configurations to sconcat"
mconcat (a:l)
| all (a ==) l = a
| otherwise = error "invalid configurations to mconcat"
instance Dumpable MiosConfiguration where
header = (intercalate "," ["vDecay", "propLim", "par1", "par2", "par3", "par4"], mempty)
toCSV MiosConfiguration{..} =
intercalate "," [ show variableDecayRate
, show (logBase2 propagationLimit)
, showf gpParameter1
, showf gpParameter2
, show extraParameter3
, show extraParameter4]
average = mconcat
instance Monoid MiosStats where
mempty = MiosStats $ map (, Right 0) [minBound :: DumpTag .. maxBound]
mappend (MiosStats x) (MiosStats y) = MiosStats $ zipWith add x y
where
add (k1, v1) (k2, v2)
| k1 == k2 = (k1, v1 +. v2)
| otherwise = error "unbalanced stats"
instance Dumpable MiosStats where
header = ( intercalate "," $ map toHeader [minBound :: DumpTag .. maxBound]
, mempty
)
where
toHeader TerminateS = "result"
toHeader BackjumpS = "backjump"
toHeader RestartS = "restart"
toHeader PropagationS = "propagation"
toHeader ConflictS = "conflict"
toHeader LearntS = "learnt"
toHeader LearningRateS = "lRate"
toHeader ExtraS = "extra"
toCSV (MiosStats l) = intercalate "," $ map val [minBound :: DumpTag .. maxBound]
where
val k = case lookup k l of
Just (Right v) -> show v
Just (Left v) -> showf v
Nothing -> "0"
divideBy n (MiosStats x) = MiosStats $ map (\(k,v) -> (k, v /. n)) x
instance Monoid QuadLearntC where
mempty = QuadLearntC (0, 0, 0) (0, 0, 0) (0, 0, 0) (0, 0, 0)
mappend (QuadLearntC a1 b1 c1 d1) (QuadLearntC a2 b2 c2 d2) = QuadLearntC (a1 +.+ a2) (b1 +.+ b2) (c1 +.+ c2) (d1 +.+ d2)
where (a1, a2, a3) +.+ (b1, b2, b3) = (a1 + b1, a2 + b2, a3 + b3)
instance Dumpable QuadLearntC where
header = ("bl0,bl1,bl2,blx,ll0,ll1,ll2,llx,sl0,sl1,sl2,slx", mempty)
toCSV (QuadLearntC (b0, l0, s0) (b1, l1, s1) (b2, l2, s2) (b3, l3, s3)) = intercalate "," $ map showf [b0, b1, b2, b3, l0, l1, l2, l3, s0, s1, s2, s3]
divideBy n (QuadLearntC a b c d) = QuadLearntC (a /./ n') (b /./ n') (c /./ n') (d /./ n')
where
n' = fromIntegral n
(a1, a2, a3) /./ n = (a1 / n, a2 / n, a3 / n)
instance Monoid MiosDump where
mempty = MiosDump ("solver", mempty) mempty mempty mempty
mappend _ _ = error "MiosDump is not allowed to mappend; convert to MergedDump"
instance Dumpable MiosDump where
header = ( intercalate "," [ "solver"
, fst (header :: (String, MiosConfiguration))
, fst (header :: (String, CNFDescription))
, fst (header :: (String, MiosStats))
, fst (header :: (String, QuadLearntC))
]
, mempty
)
toCSV (MiosDump (i, m) d s q) = intercalate "," [show i, toCSV m, toCSV d, toCSV s, toCSV q]
average _ = error "MiosDump is not alloved to average; convert to MergedDump"
divideBy _ _ = error "MiosDump is not alloved to divideBy; convert to MergedDump"
data MergedDump =
, _mergedStat :: MiosStats
, _mergedLCat :: QuadLearntC
}
deriving (Eq, Ord, Read)
instance Monoid MergedDump where
mempty = MergedDump ("", mempty) 0 mempty mempty
mappend (MergedDump m n s l) (MergedDump m' n' s' l')
| m == m' = MergedDump m (n + n') (mappend s s') (mappend l l')
| otherwise = error "invalid mergeddumps to mappend"
mconcat [] = mempty
mconcat (a:l) = foldl mappend a l
instance Dumpable MergedDump where
header = (intercalate "," ["solver"
, fst (header :: (String, MiosConfiguration))
, "problem"
, fst (header :: (String, MiosStats))
, fst (header :: (String, QuadLearntC))
]
, mempty
)
toCSV (MergedDump (i, m) n s q) = intercalate "," [show i, toCSV m, show n, toCSV s, toCSV q]
divideBy n (MergedDump (i, m) k s q) = MergedDump (i, m) k (divideBy n s) (divideBy n q)
unaccumulate d = divideBy (_merged d) d
toMerge :: MiosDump -> MergedDump
toMerge (MiosDump m _ s c) = MergedDump m 1 s c
merge :: [MiosDump] -> IO [MergedDump]
merge l = do
let aborted ::MiosDump -> Bool
aborted (MiosDump _ _ (MiosStats vals) _)
| Just (Right 0) <- lookup TerminateS vals = True
| Nothing <- lookup TerminateS vals = True
| otherwise = False
inject :: [((String, MiosConfiguration), IORef.IORef MergedDump)] -> [MiosDump] -> IO [MergedDump]
inject h [] = map unaccumulate <$> mapM (IORef.readIORef . snd) h
inject h (d:ds)
| aborted d = inject h ds
| Just x <- lookup k h = do
IORef.modifyIORef x (mappend (toMerge d))
inject h ds
| otherwise = do
n <- (k ,) <$> IORef.newIORef (toMerge d)
inject (n : h) ds
where k = _dumpedConf d
inject [] l
parseBy :: [(String, ReadP s)] -> String -> [s]
parseBy fs l'
| null l = []
| Just (h, f) <- find ((head l ==) . fst) fs = map (fst. head . readP_to_S f) $ filter (h /=) (tail l)
| Just (_, f) <- find (("" ==) . fst) fs = map (fst. head . readP_to_S f) l
| otherwise = error $ "ABORT\nheader: \n" ++ (head l) ++ "\nis not matches to\n" ++ intercalate "\n" (map fst fs)
where
l = filter (('#' /=) . head) $ lines l'
getToken :: String -> ReadP String
getToken s = string s *> skipSpaces *> (munch (',' /=) +++ look)
getQuote :: String -> ReadP String
getQuote s = string s *> skipSpaces *> string "\"" *> munch ('\"' /=) <* string "\""
fromDump :: ReadP MiosDump
fromDump = read <$> munch ('\n' /=)
fromStat :: ReadP MiosDump
fromStat = do
so <- getQuote "Solver:"
va <- getToken ", VarDecayRate:"
pr <- getToken ", PropagationLimit:"
fl <- getQuote ", File:"
nv <- getToken ", NumOfVariables:"
nc <- getToken ", NumOfClauses:"
sa <- getToken ", SatisfiabilityValue:"
np <- getToken ", NumOfPropagations:"
cf <- getToken ", NumOfConflicts:"
nl <- getToken ", NumOfLearnts:"
nb <- getToken ", NumOfBackjumps:"
nr <- getToken ", NumOfRestarts:"
l1 <- getToken ", NumOfBirthLevel1:"
l2 <- getToken ", NumOfBirthLevel2:"
l3 <- getToken ", NumOfBirthLevelX:"
let nl' = read nl
n1' = read l1
n2' = read l2
n3' = read l3
di :: Double -> Double
di n = if nl' == 0 then 0 else n / nl'
return $
MiosDump
(so, MiosConfiguration (read va) (read pr) 0 0 0 0)
(CNFDescription (Just fl) (read nv) (read nc))
(MiosStats (sort
[ (TerminateS, Right (read sa)), (BackjumpS, Right (read nb)), (RestartS, Right (read nr))
, (PropagationS, Right (read np)), (ConflictS, Right (read cf)), (LearntS, Right (read nl))
, (LearningRateS, Left (read nl / read nb)), (ExtraS, Left 0)
]))
(QuadLearntC (di (nl' - n1' - n2' - n3'), 0, 0) (di n1', 0, 0) (di n2', 0, 0) (di n3', 0, 0))
fromCSV :: ReadP MiosDump
fromCSV = do
parameter3
parameter4
nclause
nconflicts
born level 1
born level 2
living level 1
living level 2
suvive level 1
suvive level 2
return $
MiosDump
(so, MiosConfiguration (read vd) (2 ^ ((read pl) :: Int)) (read p1) (read p2) (read p3) (read p4))
(CNFDescription (Just fl) (read nv) (read nc))
(MiosStats (sort
[ (TerminateS, Right (read sa))
, (PropagationS, Right (read np)), (ConflictS, Right (read cf)), (LearntS, Right (read nl))
, (BackjumpS, Right (read nb)), (RestartS, Right (read nr))
, (LearningRateS, Left (read la)), (ExtraS, Left (read ev))
]))
(QuadLearntC (read b0, read l0, read s0) (read b1, read l1, read s1) (read b2, read l2, read s2) (read b3, read l3, read s3))
fromMergedCSV :: ReadP MergedDump
fromMergedCSV = do
parameter3
parameter4
birth level 1
birth level 2
dead level 1
dead level 2
survive level 1
survive level 2
return $
MergedDump
(so, MiosConfiguration (read vd) (2 ^ ((read pl) :: Int)) (read p1) (read p2) (read p3) (read p4))
(read nn)
(MiosStats (sort
[ (TerminateS, Right (read rs))
, (PropagationS, Left (read np)), (ConflictS, Left (read cf)), (LearntS, Left (read nl))
, (BackjumpS, Left (read nb)), (RestartS, Left (read nr))
, (LearningRateS, Left (read la)), (ExtraS, Left (read ev))
]))
(QuadLearntC
(read b0, read d0, read e0)
(read b1, read d1, read e1)
(read b2, read d2, read e2)
(read b3, read d3, read e3)
)
|
800fad9d2a90aca8ab9d27d62a22dc6effa84c39fd37c5ec93afc7862a5afa2f | pjones/openid-connect | Main.hs | {-|
Copyright:
This file is part of the package openid-connect. It is subject to
the license terms in the LICENSE file found in the top-level
directory of this distribution and at:
-connect
No part of this package, including this file, may be copied,
modified, propagated, or distributed except according to the terms
contained in the LICENSE file.
License: BSD-2-Clause
-}
module Main (main) where
--------------------------------------------------------------------------------
import Test.Tasty
import qualified Client
import qualified DiscoveryTest
--------------------------------------------------------------------------------
main :: IO ()
main = defaultMain $ testGroup "Tests"
[ Client.test
, DiscoveryTest.test
]
| null | https://raw.githubusercontent.com/pjones/openid-connect/23b03940613a1b0a633747b5fecaf61b3fbd8a09/test/Main.hs | haskell | |
Copyright:
This file is part of the package openid-connect. It is subject to
the license terms in the LICENSE file found in the top-level
directory of this distribution and at:
-connect
No part of this package, including this file, may be copied,
modified, propagated, or distributed except according to the terms
contained in the LICENSE file.
License: BSD-2-Clause
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | module Main (main) where
import Test.Tasty
import qualified Client
import qualified DiscoveryTest
main :: IO ()
main = defaultMain $ testGroup "Tests"
[ Client.test
, DiscoveryTest.test
]
|
2f3ee31d4ad31c954dd1cfce04b7c7449fa6b960cc469a227e9c19de83e48a4e | stevenvar/OMicroB | arduboy.ml | open Avr
let cs = PIN12
let dc = PIN4
let rst = PIN6
let button_left = PINA2
let button_right = PINA1
let button_down = PINA3
let button_up = PINA0
let button_a = PIN7
let button_b = PIN8
let g = PIN9
let r = PIN10
let b = PIN11
let init_rgb r g b =
digital_write r HIGH;
digital_write g HIGH;
digital_write b HIGH
let boot_pins () =
pin_mode button_left INPUT_PULLUP;
pin_mode button_right INPUT_PULLUP;
pin_mode button_down INPUT_PULLUP;
pin_mode button_up INPUT_PULLUP;
pin_mode r OUTPUT;
pin_mode g OUTPUT;
pin_mode b OUTPUT;
pin_mode button_a INPUT_PULLUP;
pin_mode button_b INPUT_PULLUP;
pin_mode cs OUTPUT;
pin_mode dc OUTPUT;
pin_mode rst OUTPUT;
init_rgb r g b
let write_at line column data =
Oled.write_at ~cs ~dc line column data
let init () =
Spi.begin_spi ~ss:SS ~sck:SCK ~mosi:MOSI;
boot_pins ();
Oled.boot ~cs:cs ~dc:dc ~rst:rst
| null | https://raw.githubusercontent.com/stevenvar/OMicroB/e4324d0736ac677b3086741dfdefb0e46775642b/tests/snake-mustard/arduboy.ml | ocaml | open Avr
let cs = PIN12
let dc = PIN4
let rst = PIN6
let button_left = PINA2
let button_right = PINA1
let button_down = PINA3
let button_up = PINA0
let button_a = PIN7
let button_b = PIN8
let g = PIN9
let r = PIN10
let b = PIN11
let init_rgb r g b =
digital_write r HIGH;
digital_write g HIGH;
digital_write b HIGH
let boot_pins () =
pin_mode button_left INPUT_PULLUP;
pin_mode button_right INPUT_PULLUP;
pin_mode button_down INPUT_PULLUP;
pin_mode button_up INPUT_PULLUP;
pin_mode r OUTPUT;
pin_mode g OUTPUT;
pin_mode b OUTPUT;
pin_mode button_a INPUT_PULLUP;
pin_mode button_b INPUT_PULLUP;
pin_mode cs OUTPUT;
pin_mode dc OUTPUT;
pin_mode rst OUTPUT;
init_rgb r g b
let write_at line column data =
Oled.write_at ~cs ~dc line column data
let init () =
Spi.begin_spi ~ss:SS ~sck:SCK ~mosi:MOSI;
boot_pins ();
Oled.boot ~cs:cs ~dc:dc ~rst:rst
| |
69d5770a6dc59554b5734a7ed31ec39040a316554d95008f66dbc81d6cdda70a | mbutterick/pollen | whitespace.rkt | #lang racket/base
(require racket/match)
(provide (all-defined-out))
(define (whitespace-base x #:nbsp-is-white? nbsp-white?)
(define white-pat (pregexp (format "^[\\s~a]+$" (if nbsp-white? #\u00A0 ""))))
(let loop ([x x])
(match x
["" #true] ; empty string is deemed whitespace
[(pregexp white-pat) #true]
[(? symbol?) (loop (symbol->string x))]
[(? pair?) (andmap loop x)]
[(? vector?) (loop (vector->list x))]
[_ #false])))
(define (whitespace? x) (whitespace-base x #:nbsp-is-white? #f))
(define (not-whitespace? x) (not (whitespace? x)))
(define (whitespace/nbsp? x) (whitespace-base x #:nbsp-is-white? #t))
(module+ test
(require rackunit racket/format)
(check-true (whitespace? " "))
(check-false (whitespace? (~a #\u00A0)))
(check-true (whitespace/nbsp? (~a #\u00A0)))
(check-true (whitespace/nbsp? (vector (~a #\u00A0))))
(check-false (whitespace? (format " ~a " #\u00A0)))
(check-true (whitespace/nbsp? (format " ~a " #\u00A0)))) | null | https://raw.githubusercontent.com/mbutterick/pollen/a4910a86dc62d1147f3aad94b56cecd6499d7aa6/pollen/private/whitespace.rkt | racket | empty string is deemed whitespace | #lang racket/base
(require racket/match)
(provide (all-defined-out))
(define (whitespace-base x #:nbsp-is-white? nbsp-white?)
(define white-pat (pregexp (format "^[\\s~a]+$" (if nbsp-white? #\u00A0 ""))))
(let loop ([x x])
(match x
[(pregexp white-pat) #true]
[(? symbol?) (loop (symbol->string x))]
[(? pair?) (andmap loop x)]
[(? vector?) (loop (vector->list x))]
[_ #false])))
(define (whitespace? x) (whitespace-base x #:nbsp-is-white? #f))
(define (not-whitespace? x) (not (whitespace? x)))
(define (whitespace/nbsp? x) (whitespace-base x #:nbsp-is-white? #t))
(module+ test
(require rackunit racket/format)
(check-true (whitespace? " "))
(check-false (whitespace? (~a #\u00A0)))
(check-true (whitespace/nbsp? (~a #\u00A0)))
(check-true (whitespace/nbsp? (vector (~a #\u00A0))))
(check-false (whitespace? (format " ~a " #\u00A0)))
(check-true (whitespace/nbsp? (format " ~a " #\u00A0)))) |
9915284d13fb455c636ad3daf73388d030fa52d00197f717e73f77683cbbfa28 | racketscript/racketscript | simple-provide.rkt | #lang racket
(provide say-hello
(rename-out [say-hola say-ahoy]))
(define (say-hello msg)
(displayln (list "Hello" msg)))
(define (say-hola msg)
(displayln (list "Hola" msg)))
| null | https://raw.githubusercontent.com/racketscript/racketscript/f94006d11338a674ae10f6bd83fc53e6806d07d8/tests/modules/private/simple-provide.rkt | racket | #lang racket
(provide say-hello
(rename-out [say-hola say-ahoy]))
(define (say-hello msg)
(displayln (list "Hello" msg)))
(define (say-hola msg)
(displayln (list "Hola" msg)))
| |
0a7c69ac0ff2a0ebf042bc8452ffffe4805fd06166f68f1c1dc088a9dee9b192 | wellposed/numerical | Range.hs |
{-# LANGUAGE DeriveDataTypeable #-}
# LANGUAGE CPP #
# LANGUAGE FunctionalDependencies #
# LANGUAGE FlexibleInstances #
# LANGUAGE DeriveFunctor , DeriveGeneric , DeriveFunctor #
{-# LANGUAGE DeriveFoldable,DeriveTraversable #-}
module Numerical.Array.Range (
Range(..)
,AffineRange(..)
,HasRange(..)
,affineRangeStride) where
import Data.Data
import GHC.Generics
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 709
import Data.Foldable
import Data.Traversable
#endif
{-
not quite the right module for this notion of range, but lets
fix that later
-}
-- | whenever you are tempted to do a (lo,hi) tuple, use this instead
-- This should perhaps be made lazy, but strict for now.
data Range a =Range {_RangeMin :: !a
,_RangeMax :: !a}
deriving (Eq,Show,Data,Typeable,Generic ,Foldable,Traversable,Functor)
class HasRange r a | r -> a where
rangeMin:: Functor f => (a -> f a )-> r -> f r
rangeMax:: Functor f => (a -> f a )-> r -> f r
instance HasRange (Range a) a where
rangeMax = _rangeMax
# INLINE rangeMax #
rangeMin = _rangeMin
# INLINE rangeMin #
instance HasRange (AffineRange a) a where
rangeMin = _affineRangeMin
# INLINE rangeMin #
rangeMax = _affineRangeMax
# INLINE rangeMax #
_rangeMin :: Functor f => (a -> f a)-> Range a -> f (Range a)
_rangeMin = \ fun rec -> fmap (\mup -> rec{_RangeMin= mup}) $ fun (_RangeMin rec )
{-# INLINE _rangeMin#-}
_rangeMax :: Functor f => (a -> f a) -> Range a -> f (Range a)
_rangeMax = \ fun rec -> fmap (\mup -> rec{_RangeMax= mup}) $ fun (_RangeMax rec )
# INLINE _ rangeMax #
-- | this is uniform address interval by any other name
data AffineRange a = AffineRange{_AffineRangeMin :: !a
,_AffineRangeStride :: !Int
,_AffineRangeMax :: !a}
deriving (Eq,Show,Data,Generic,Typeable,Functor,Foldable,Traversable )
_affineRangeMin :: Functor f => (a-> f a) -> AffineRange a -> f (AffineRange a)
_affineRangeMin= \ fun rec -> fmap (\mup -> rec{_AffineRangeMin=mup}) $ fun (_AffineRangeMin rec)
{-# INLINE _affineRangeMin#-}
_affineRangeMax :: Functor f => (a -> f a) -> AffineRange a -> f (AffineRange a)
_affineRangeMax= \ fun rec -> fmap (\mup -> rec{_AffineRangeMax=mup}) $ fun (_AffineRangeMax rec)
{-# INLINE _affineRangeMax #-}
affineRangeStride :: Functor f => (Int -> f Int) -> AffineRange a -> f (AffineRange a)
affineRangeStride = \fun rec -> fmap (\mup -> rec{_AffineRangeStride=mup}) $ fun (_AffineRangeStride rec)
# INLINE affineRangeStride #
| null | https://raw.githubusercontent.com/wellposed/numerical/bf61926a854586b9707b88798887d7a72beeed13/src/Numerical/Array/Range.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE DeriveFoldable,DeriveTraversable #
not quite the right module for this notion of range, but lets
fix that later
| whenever you are tempted to do a (lo,hi) tuple, use this instead
This should perhaps be made lazy, but strict for now.
# INLINE _rangeMin#
| this is uniform address interval by any other name
# INLINE _affineRangeMin#
# INLINE _affineRangeMax # |
# LANGUAGE CPP #
# LANGUAGE FunctionalDependencies #
# LANGUAGE FlexibleInstances #
# LANGUAGE DeriveFunctor , DeriveGeneric , DeriveFunctor #
module Numerical.Array.Range (
Range(..)
,AffineRange(..)
,HasRange(..)
,affineRangeStride) where
import Data.Data
import GHC.Generics
#if defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL__ < 709
import Data.Foldable
import Data.Traversable
#endif
data Range a =Range {_RangeMin :: !a
,_RangeMax :: !a}
deriving (Eq,Show,Data,Typeable,Generic ,Foldable,Traversable,Functor)
class HasRange r a | r -> a where
rangeMin:: Functor f => (a -> f a )-> r -> f r
rangeMax:: Functor f => (a -> f a )-> r -> f r
instance HasRange (Range a) a where
rangeMax = _rangeMax
# INLINE rangeMax #
rangeMin = _rangeMin
# INLINE rangeMin #
instance HasRange (AffineRange a) a where
rangeMin = _affineRangeMin
# INLINE rangeMin #
rangeMax = _affineRangeMax
# INLINE rangeMax #
_rangeMin :: Functor f => (a -> f a)-> Range a -> f (Range a)
_rangeMin = \ fun rec -> fmap (\mup -> rec{_RangeMin= mup}) $ fun (_RangeMin rec )
_rangeMax :: Functor f => (a -> f a) -> Range a -> f (Range a)
_rangeMax = \ fun rec -> fmap (\mup -> rec{_RangeMax= mup}) $ fun (_RangeMax rec )
# INLINE _ rangeMax #
data AffineRange a = AffineRange{_AffineRangeMin :: !a
,_AffineRangeStride :: !Int
,_AffineRangeMax :: !a}
deriving (Eq,Show,Data,Generic,Typeable,Functor,Foldable,Traversable )
_affineRangeMin :: Functor f => (a-> f a) -> AffineRange a -> f (AffineRange a)
_affineRangeMin= \ fun rec -> fmap (\mup -> rec{_AffineRangeMin=mup}) $ fun (_AffineRangeMin rec)
_affineRangeMax :: Functor f => (a -> f a) -> AffineRange a -> f (AffineRange a)
_affineRangeMax= \ fun rec -> fmap (\mup -> rec{_AffineRangeMax=mup}) $ fun (_AffineRangeMax rec)
affineRangeStride :: Functor f => (Int -> f Int) -> AffineRange a -> f (AffineRange a)
affineRangeStride = \fun rec -> fmap (\mup -> rec{_AffineRangeStride=mup}) $ fun (_AffineRangeStride rec)
# INLINE affineRangeStride #
|
683ad7b08451b10df8f684cd794b9866579c734b3fd3eb25c2894d49417e7e10 | softlab-ntua/bencherl | class_WasteTransport.erl | Copyright ( C ) 2012 - 2014 EDF R&D
This file is part of Sim - Diasca .
Sim - Diasca is free software : you can redistribute it and/or modify
% it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
Sim - Diasca is distributed in the hope that it will be useful ,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with .
% If not, see </>.
Author : ( )
% Class modelling an abstract waste transport.
%
-module(class_WasteTransport).
% Determines what are the mother classes of this class (if any):
-define( wooper_superclasses, [ class_Actor, class_GeolocalizedElement ] ).
% parameters taken by the constructor ('construct').
-define( wooper_construct_parameters, ActorSettings, TransportName,
InitialLocation, MaxTransportedMass, MaxTransportedVolume,
SupportedWasteStates ).
Declaring all variations of WOOPER - defined standard life - cycle operations :
( template pasted , just two replacements performed to update arities )
-define( wooper_construct_export, new/6, new_link/6,
synchronous_new/6, synchronous_new_link/6,
synchronous_timed_new/6, synchronous_timed_new_link/6,
remote_new/7, remote_new_link/7, remote_synchronous_new/7,
remote_synchronous_new_link/7, remote_synchronisable_new_link/7,
remote_synchronous_timed_new/7, remote_synchronous_timed_new_link/7,
construct/7, destruct/1 ).
% Member method declarations:
-define( wooper_method_export, updateProbe/1, toString/1 ).
% For all shared defines and types:
-include("city_example_types.hrl").
% Design notes:
%
- currently a waste transport only transports one kind of waste ( even if it ,
% when empty, can load various different types of wastes)
% Type section.
% Describes the state (ex: liquid, solid, etc.) of the wastes that can be
% transported.
%
-type supported_waste_state() :: physical_state().
-export_type([ supported_waste_state/0 ]).
Allows to define WOOPER base variables and methods for that class :
-include("wooper.hrl").
% Must be included before class_TraceEmitter header:
-define(TraceEmitterCategorization,"City-example.Waste.Transport").
% Allows to use macros for trace sending:
-include("class_TraceEmitter.hrl").
% Implementation notes:
%
A waste transport embeds exactly one waste tank .
% The class-specific attributes of a waste transport instance are:
%
- tank : : waste_tank ( ): the tank this transport stores waste in
Constructs a new waste transport , from following parameters :
%
% - InitialLocation is the (initial) location of this waste transport (generally
% a point of interest)
%
% - MaxTransportedMass :: unit_utils:tons() is the maximum transported mass
%
% - MaxTransportedVolume :: unit_utils:cubic_meters() is the maximum
% transported volume
%
- SupportedWasteStates : : [ supported_waste_state ( ) ] is the list of the waste
% states this transport can support
%
% A waste transport is created empty.
%
-spec construct( wooper:state(), class_Actor:actor_settings(),
class_Actor:name(), class_GIS:location(),
unit_utils:cubic_meters(), unit_utils:tons(),
[ supported_waste_state() ] ) -> wooper:state().
construct( State, ActorSettings, TransportName, InitialLocation,
MaxTransportedVolume, MaxTransportedMass, SupportedWasteStates ) ->
ActorState = class_Actor:construct( State, ActorSettings, TransportName ),
GeoState = class_GeolocalizedElement:construct( ActorState,
InitialLocation ),
Tank = #waste_tank{
id=1,
% All types allowed here:
allowed_types=SupportedWasteStates,
current_type=none,
current_volume_stored=0.0,
max_volume_stored=MaxTransportedVolume,
current_mass_stored=0.0,
max_mass_stored=MaxTransportedMass,
busy=false
},
For probe labels : duration of one tick ; milliseconds needed :
TickDuration = text_utils:duration_to_string(
1000 * class_Actor:convert_ticks_to_seconds( 1, GeoState ) ),
% Depending on the choice of the result manager, it will be either a PID (if
% the corresponding result is wanted) or a 'non_wanted_probe' atom:
TransportProbePid = class_Actor:declare_probe(
_Name=text_utils:format( "~s Transported Waste Stock Probe",
[ TransportName ] ),
_Curves=[ "Quantity of waste currently stored (in tons)" ],
_Zones=[],
_Title=text_utils:format( "Waste Storage Monitoring"
"for Transport ~s", [ TransportName ] ),
_XLabel=text_utils:format(
"Simulation tick: one tick corresponds to ~s",
[ TickDuration ] ),
_YLabel="Tons of wastes currently transported" ),
setAttributes( GeoState, [
{ tank, Tank },
{ probe_pid, TransportProbePid },
{ trace_categorization,
text_utils:string_to_binary( ?TraceEmitterCategorization ) }
] ).
-spec destruct( wooper:state() ) -> wooper:state().
destruct( State ) ->
State.
% Section for member methods.
% Sends an update to the associated probe.
%
% (const oneway)
%
updateProbe( State ) ->
CurrentTickOffset = ?getAttr(current_tick_offset),
% Manages automatically the fact that the creation of this probe may have
% been rejected by the result manager:
class_Probe:send_data( ?getAttr(probe_pid), CurrentTickOffset,
{ (?getAttr(tank))#waste_tank.current_mass_stored } ),
?wooper_return_state_only( State ).
% Section for static methods.
% Returns a string describing the state of this instance.
%
% (const request)
%
-spec toString( wooper:state() ) -> request_return( string() ).
toString( State ) ->
Tank = ?getAttr(tank),
FinalString = text_utils:format( "Waste transport containing ~s",
[ waste_utils:waste_tank_to_string( Tank ) ] ),
?wooper_return_state_result( State, FinalString ).
| null | https://raw.githubusercontent.com/softlab-ntua/bencherl/317bdbf348def0b2f9ed32cb6621e21083b7e0ca/app/sim-diasca/mock-simulators/city-example/src/class_WasteTransport.erl | erlang | it under the terms of the GNU Lesser General Public License as
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
If not, see </>.
Class modelling an abstract waste transport.
Determines what are the mother classes of this class (if any):
parameters taken by the constructor ('construct').
Member method declarations:
For all shared defines and types:
Design notes:
when empty, can load various different types of wastes)
Type section.
Describes the state (ex: liquid, solid, etc.) of the wastes that can be
transported.
Must be included before class_TraceEmitter header:
Allows to use macros for trace sending:
Implementation notes:
The class-specific attributes of a waste transport instance are:
- InitialLocation is the (initial) location of this waste transport (generally
a point of interest)
- MaxTransportedMass :: unit_utils:tons() is the maximum transported mass
- MaxTransportedVolume :: unit_utils:cubic_meters() is the maximum
transported volume
states this transport can support
A waste transport is created empty.
All types allowed here:
Depending on the choice of the result manager, it will be either a PID (if
the corresponding result is wanted) or a 'non_wanted_probe' atom:
Section for member methods.
Sends an update to the associated probe.
(const oneway)
Manages automatically the fact that the creation of this probe may have
been rejected by the result manager:
Section for static methods.
Returns a string describing the state of this instance.
(const request)
| Copyright ( C ) 2012 - 2014 EDF R&D
This file is part of Sim - Diasca .
Sim - Diasca is free software : you can redistribute it and/or modify
published by the Free Software Foundation , either version 3 of
the License , or ( at your option ) any later version .
Sim - Diasca is distributed in the hope that it will be useful ,
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with .
Author : ( )
-module(class_WasteTransport).
-define( wooper_superclasses, [ class_Actor, class_GeolocalizedElement ] ).
-define( wooper_construct_parameters, ActorSettings, TransportName,
InitialLocation, MaxTransportedMass, MaxTransportedVolume,
SupportedWasteStates ).
Declaring all variations of WOOPER - defined standard life - cycle operations :
( template pasted , just two replacements performed to update arities )
-define( wooper_construct_export, new/6, new_link/6,
synchronous_new/6, synchronous_new_link/6,
synchronous_timed_new/6, synchronous_timed_new_link/6,
remote_new/7, remote_new_link/7, remote_synchronous_new/7,
remote_synchronous_new_link/7, remote_synchronisable_new_link/7,
remote_synchronous_timed_new/7, remote_synchronous_timed_new_link/7,
construct/7, destruct/1 ).
-define( wooper_method_export, updateProbe/1, toString/1 ).
-include("city_example_types.hrl").
- currently a waste transport only transports one kind of waste ( even if it ,
-type supported_waste_state() :: physical_state().
-export_type([ supported_waste_state/0 ]).
Allows to define WOOPER base variables and methods for that class :
-include("wooper.hrl").
-define(TraceEmitterCategorization,"City-example.Waste.Transport").
-include("class_TraceEmitter.hrl").
A waste transport embeds exactly one waste tank .
- tank : : waste_tank ( ): the tank this transport stores waste in
Constructs a new waste transport , from following parameters :
- SupportedWasteStates : : [ supported_waste_state ( ) ] is the list of the waste
-spec construct( wooper:state(), class_Actor:actor_settings(),
class_Actor:name(), class_GIS:location(),
unit_utils:cubic_meters(), unit_utils:tons(),
[ supported_waste_state() ] ) -> wooper:state().
construct( State, ActorSettings, TransportName, InitialLocation,
MaxTransportedVolume, MaxTransportedMass, SupportedWasteStates ) ->
ActorState = class_Actor:construct( State, ActorSettings, TransportName ),
GeoState = class_GeolocalizedElement:construct( ActorState,
InitialLocation ),
Tank = #waste_tank{
id=1,
allowed_types=SupportedWasteStates,
current_type=none,
current_volume_stored=0.0,
max_volume_stored=MaxTransportedVolume,
current_mass_stored=0.0,
max_mass_stored=MaxTransportedMass,
busy=false
},
For probe labels : duration of one tick ; milliseconds needed :
TickDuration = text_utils:duration_to_string(
1000 * class_Actor:convert_ticks_to_seconds( 1, GeoState ) ),
TransportProbePid = class_Actor:declare_probe(
_Name=text_utils:format( "~s Transported Waste Stock Probe",
[ TransportName ] ),
_Curves=[ "Quantity of waste currently stored (in tons)" ],
_Zones=[],
_Title=text_utils:format( "Waste Storage Monitoring"
"for Transport ~s", [ TransportName ] ),
_XLabel=text_utils:format(
"Simulation tick: one tick corresponds to ~s",
[ TickDuration ] ),
_YLabel="Tons of wastes currently transported" ),
setAttributes( GeoState, [
{ tank, Tank },
{ probe_pid, TransportProbePid },
{ trace_categorization,
text_utils:string_to_binary( ?TraceEmitterCategorization ) }
] ).
-spec destruct( wooper:state() ) -> wooper:state().
destruct( State ) ->
State.
updateProbe( State ) ->
CurrentTickOffset = ?getAttr(current_tick_offset),
class_Probe:send_data( ?getAttr(probe_pid), CurrentTickOffset,
{ (?getAttr(tank))#waste_tank.current_mass_stored } ),
?wooper_return_state_only( State ).
-spec toString( wooper:state() ) -> request_return( string() ).
toString( State ) ->
Tank = ?getAttr(tank),
FinalString = text_utils:format( "Waste transport containing ~s",
[ waste_utils:waste_tank_to_string( Tank ) ] ),
?wooper_return_state_result( State, FinalString ).
|
6b04d836d46fc2c72c3ab057339172dd8a3a3240c4134fbaca0135b11d4fe1f2 | oliyh/pedestal-api | error_handling.clj | (ns pedestal-api.error-handling
(:require [route-swagger.interceptor :as sw.int]
[route-swagger.doc :as sw.doc]
[ring.swagger.middleware :refer [stringify-error]]
[io.pedestal.interceptor.error :as error]
[ring.util.http-status :as status]
[io.pedestal.http.body-params :as body-params]))
(def error-responses
(sw.doc/annotate
{:responses {status/bad-request {}
status/internal-server-error {}}}
(error/error-dispatch [ctx ex]
[{:interceptor ::body-params/body-params}]
(assoc ctx :response {:status status/bad-request :body "Cannot deserialise body" :headers {"Content-Type" "text/plain"}})
[{:interceptor ::sw.int/coerce-request}]
(assoc ctx :response {:status status/bad-request :body (stringify-error (:error (ex-data ex))) :headers {"Content-Type" "text/plain"}})
[{:interceptor ::sw.int/validate-response}]
(assoc ctx :response {:status status/internal-server-error :body (stringify-error (:error (ex-data ex))) :headers {"Content-Type" "text/plain"}}))))
| null | https://raw.githubusercontent.com/oliyh/pedestal-api/1a90c29e97c3cccfe59a1f9d114765c104f70c13/src/pedestal_api/error_handling.clj | clojure | (ns pedestal-api.error-handling
(:require [route-swagger.interceptor :as sw.int]
[route-swagger.doc :as sw.doc]
[ring.swagger.middleware :refer [stringify-error]]
[io.pedestal.interceptor.error :as error]
[ring.util.http-status :as status]
[io.pedestal.http.body-params :as body-params]))
(def error-responses
(sw.doc/annotate
{:responses {status/bad-request {}
status/internal-server-error {}}}
(error/error-dispatch [ctx ex]
[{:interceptor ::body-params/body-params}]
(assoc ctx :response {:status status/bad-request :body "Cannot deserialise body" :headers {"Content-Type" "text/plain"}})
[{:interceptor ::sw.int/coerce-request}]
(assoc ctx :response {:status status/bad-request :body (stringify-error (:error (ex-data ex))) :headers {"Content-Type" "text/plain"}})
[{:interceptor ::sw.int/validate-response}]
(assoc ctx :response {:status status/internal-server-error :body (stringify-error (:error (ex-data ex))) :headers {"Content-Type" "text/plain"}}))))
| |
ca5f543fe55ce4728738d494f9916e622b5856681507afa127f5616a2bb48451 | typelead/etlas | Legacy.hs | # LANGUAGE RecordWildCards , NamedFieldPuns , DeriveGeneric #
-- | Project configuration, implementation in terms of legacy types.
--
module Distribution.Client.ProjectConfig.Legacy (
-- * Project config in terms of legacy types
LegacyProjectConfig,
parseLegacyProjectConfig,
showLegacyProjectConfig,
-- * Conversion to and from legacy config types
commandLineFlagsToProjectConfig,
convertLegacyProjectConfig,
convertLegacyGlobalConfig,
convertToLegacyProjectConfig,
-- * Internals, just for tests
parsePackageLocationTokenQ,
renderPackageLocationToken,
) where
import Prelude ()
import Distribution.Client.Compat.Prelude
import Distribution.Client.ProjectConfig.Types
import Distribution.Client.Types
( RemoteRepo(..), emptyRemoteRepo )
import Distribution.Client.Config
( SavedConfig(..), remoteRepoFields )
import Distribution.Solver.Types.ConstraintSource
import Distribution.Package
import Distribution.Types.Dependency
import Distribution.PackageDescription
( SourceRepo(..), RepoKind(..)
, dispFlagAssignment, parseFlagAssignment )
import Distribution.PackageDescription.Parse
( sourceRepoFieldDescrs )
import Distribution.Simple.Compiler
( OptimisationLevel(..), DebugInfoLevel(..) )
import Distribution.Simple.Setup
( Flag(Flag), toFlag, fromFlagOrDefault
, ConfigFlags(..), configureOptions
, HaddockFlags(..), haddockOptions, defaultHaddockFlags
, programDbPaths', splitArgs, showPackageDb, readPackageDb
, AllowNewer(..), AllowOlder(..), RelaxDeps(..) )
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand
, ConfigExFlags(..), configureExOptions, defaultConfigExFlags
, InstallFlags(..), installOptions, defaultInstallFlags )
import Distribution.Simple.Program
( programName, knownPrograms )
import Distribution.Simple.Program.Db
( ProgramDb, defaultProgramDb )
import Distribution.Simple.Utils
( lowercase )
import Distribution.Utils.NubList
( toNubList, fromNubList, overNubList )
import Distribution.Simple.LocalBuildInfo
( toPathTemplate, fromPathTemplate )
import Distribution.Text
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP
( ReadP, (+++), (<++) )
import qualified Text.Read as Read
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint
( Doc, ($+$) )
import qualified Distribution.ParseUtils as ParseUtils (field)
import Distribution.ParseUtils
( ParseResult(..), PError(..), syntaxError, PWarning(..), warning
, simpleField, commaNewLineListField
, showToken )
import Distribution.Client.ParseUtils
import Distribution.Simple.Command
( CommandUI(commandOptions), ShowOrParseArgs(..)
, OptionField, option, reqArg' )
import qualified Data.Map as Map
------------------------------------------------------------------
-- Representing the project config file in terms of legacy types
--
| We already have parsers\/pretty - printers for almost all the fields in the
-- project config file, but they're in terms of the types used for the command
-- line flags for Setup.hs or etlas commands. We don't want to redefine them
-- all, at least not yet so for the moment we use the parsers at the old types
-- and use conversion functions.
--
-- Ultimately if\/when this project-based approach becomes the default then we
-- can redefine the parsers directly for the new types.
--
data LegacyProjectConfig = LegacyProjectConfig {
legacyPackages :: [String],
legacyPackagesOptional :: [String],
legacyPackagesRepo :: [SourceRepo],
legacyPackagesNamed :: [Dependency],
legacySharedConfig :: LegacySharedConfig,
legacyAllConfig :: LegacyPackageConfig,
legacyLocalConfig :: LegacyPackageConfig,
legacySpecificConfig :: MapMappend PackageName LegacyPackageConfig
} deriving Generic
instance Monoid LegacyProjectConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup LegacyProjectConfig where
(<>) = gmappend
data LegacyPackageConfig = LegacyPackageConfig {
legacyConfigureFlags :: ConfigFlags,
legacyInstallPkgFlags :: InstallFlags,
legacyHaddockFlags :: HaddockFlags
} deriving Generic
instance Monoid LegacyPackageConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup LegacyPackageConfig where
(<>) = gmappend
data LegacySharedConfig = LegacySharedConfig {
legacyGlobalFlags :: GlobalFlags,
legacyConfigureShFlags :: ConfigFlags,
legacyConfigureExFlags :: ConfigExFlags,
legacyInstallFlags :: InstallFlags
} deriving Generic
instance Monoid LegacySharedConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup LegacySharedConfig where
(<>) = gmappend
------------------------------------------------------------------
-- Converting from and to the legacy types
--
| Convert configuration from the @etlas configure@ or @etlas build@ command
-- line into a 'ProjectConfig' value that can combined with configuration from
-- other sources.
--
-- At the moment this uses the legacy command line flag types. See
' ' for an explanation .
--
commandLineFlagsToProjectConfig :: GlobalFlags
-> ConfigFlags -> ConfigExFlags
-> InstallFlags -> HaddockFlags
-> ProjectConfig
commandLineFlagsToProjectConfig globalFlags configFlags configExFlags
installFlags haddockFlags =
mempty {
projectConfigBuildOnly = convertLegacyBuildOnlyFlags
globalFlags configFlags
installFlags haddockFlags,
projectConfigShared = convertLegacyAllPackageFlags
globalFlags configFlags
configExFlags installFlags,
projectConfigLocalPackages = convertLegacyPerPackageFlags
configFlags installFlags haddockFlags
}
-- | Convert from the types currently used for the user-wide @~/.etlas/config@
-- file into the 'ProjectConfig' type.
--
-- Only a subset of the 'ProjectConfig' can be represented in the user-wide
-- config. In particular it does not include packages that are in the project,
-- and it also doesn't support package-specific configuration (only
-- configuration that applies to all packages).
--
convertLegacyGlobalConfig :: SavedConfig -> ProjectConfig
convertLegacyGlobalConfig
SavedConfig {
savedGlobalFlags = globalFlags,
savedInstallFlags = installFlags,
savedConfigureFlags = configFlags,
savedConfigureExFlags = configExFlags,
savedUserInstallDirs = _,
savedGlobalInstallDirs = _,
savedUploadFlags = _,
savedReportFlags = _,
savedHaddockFlags = haddockFlags
} =
mempty {
projectConfigShared = configAllPackages,
projectConfigLocalPackages = configLocalPackages,
projectConfigBuildOnly = configBuildOnly
}
where
--TODO: [code cleanup] eliminate use of default*Flags here and specify the
-- defaults in the various resolve functions in terms of the new types.
configExFlags' = defaultConfigExFlags <> configExFlags
installFlags' = defaultInstallFlags <> installFlags
haddockFlags' = defaultHaddockFlags <> haddockFlags
configLocalPackages = convertLegacyPerPackageFlags
configFlags installFlags' haddockFlags'
configAllPackages = convertLegacyAllPackageFlags
globalFlags configFlags
configExFlags' installFlags'
configBuildOnly = convertLegacyBuildOnlyFlags
globalFlags configFlags
installFlags' haddockFlags'
-- | Convert the project config from the legacy types to the 'ProjectConfig'
and associated types . See ' ' for an explanation of the
-- approach.
--
convertLegacyProjectConfig :: LegacyProjectConfig -> ProjectConfig
convertLegacyProjectConfig
LegacyProjectConfig {
legacyPackages,
legacyPackagesOptional,
legacyPackagesRepo,
legacyPackagesNamed,
legacySharedConfig = LegacySharedConfig globalFlags configShFlags
configExFlags installSharedFlags,
legacyAllConfig,
legacyLocalConfig = LegacyPackageConfig configFlags installPerPkgFlags
haddockFlags,
legacySpecificConfig
} =
ProjectConfig {
projectPackages = legacyPackages,
projectPackagesOptional = legacyPackagesOptional,
projectPackagesRepo = legacyPackagesRepo,
projectPackagesNamed = legacyPackagesNamed,
projectConfigBuildOnly = configBuildOnly,
projectConfigShared = configPackagesShared,
projectConfigProvenance = mempty,
projectConfigAllPackages = configAllPackages,
projectConfigLocalPackages = configLocalPackages,
projectConfigSpecificPackage = fmap perPackage legacySpecificConfig
}
where
configAllPackages = convertLegacyPerPackageFlags g i h
where LegacyPackageConfig g i h = legacyAllConfig
configLocalPackages = convertLegacyPerPackageFlags
configFlags installPerPkgFlags haddockFlags
configPackagesShared= convertLegacyAllPackageFlags
globalFlags (configFlags <> configShFlags)
configExFlags installSharedFlags
configBuildOnly = convertLegacyBuildOnlyFlags
globalFlags configShFlags
installSharedFlags haddockFlags
perPackage (LegacyPackageConfig perPkgConfigFlags perPkgInstallFlags
perPkgHaddockFlags) =
convertLegacyPerPackageFlags
perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags
-- | Helper used by other conversion functions that returns the
-- 'ProjectConfigShared' subset of the 'ProjectConfig'.
--
convertLegacyAllPackageFlags :: GlobalFlags -> ConfigFlags
-> ConfigExFlags -> InstallFlags
-> ProjectConfigShared
convertLegacyAllPackageFlags globalFlags configFlags
configExFlags installFlags =
ProjectConfigShared{..}
where
GlobalFlags {
globalConfigFile = projectConfigConfigFile,
globalSandboxConfigFile = _, -- ??
globalRemoteRepos = projectConfigRemoteRepos,
globalLocalRepos = projectConfigLocalRepos,
globalPatchesDir = projectConfigPatchesDir,
globalEtaVersion = projectConfigEtaVersion
} = globalFlags
ConfigFlags {
configDistPref = projectConfigDistDir,
configHcFlavor = projectConfigHcFlavor,
configHcPath = projectConfigHcPath,
configHcPkg = projectConfigHcPkg,
configInstallDirs = projectConfigInstallDirs ,
--configUserInstall = projectConfigUserInstall,
configPackageDBs = projectConfigPackageDBs,
configAllowOlder = projectConfigAllowOlder,
configAllowNewer = projectConfigAllowNewer
} = configFlags
ConfigExFlags {
configCabalVersion = projectConfigCabalVersion,
configExConstraints = projectConfigConstraints,
configPreferences = projectConfigPreferences,
configSolver = projectConfigSolver
} = configExFlags
InstallFlags {
installProjectFileName = projectConfigProjectFile,
installHaddockIndex = projectConfigHaddockIndex,
--installReinstall = projectConfigReinstall,
--installAvoidReinstalls = projectConfigAvoidReinstalls,
--installOverrideReinstall = projectConfigOverrideReinstall,
installIndexState = projectConfigIndexState,
installMaxBackjumps = projectConfigMaxBackjumps,
--installUpgradeDeps = projectConfigUpgradeDeps,
installReorderGoals = projectConfigReorderGoals,
installCountConflicts = projectConfigCountConflicts,
installPerComponent = projectConfigPerComponent,
installIndependentGoals = projectConfigIndependentGoals,
installShadowPkgs = projectConfigShadowPkgs ,
installStrongFlags = projectConfigStrongFlags,
installAllowBootLibInstalls = projectConfigAllowBootLibInstalls
} = installFlags
-- | Helper used by other conversion functions that returns the
-- 'PackageConfig' subset of the 'ProjectConfig'.
--
convertLegacyPerPackageFlags :: ConfigFlags -> InstallFlags -> HaddockFlags
-> PackageConfig
convertLegacyPerPackageFlags configFlags installFlags haddockFlags =
PackageConfig{..}
where
ConfigFlags {
configProgramPaths,
configProgramArgs,
configProgramPathExtra = packageConfigProgramPathExtra,
configVanillaLib = packageConfigVanillaLib,
configProfLib = packageConfigProfLib,
configSharedLib = packageConfigSharedLib,
configDynExe = packageConfigDynExe,
configProfExe = packageConfigProfExe,
configProf = packageConfigProf,
configProfDetail = packageConfigProfDetail,
configProfLibDetail = packageConfigProfLibDetail,
configConfigureArgs = packageConfigConfigureArgs,
configOptimization = packageConfigOptimization,
configProgPrefix = packageConfigProgPrefix,
configProgSuffix = packageConfigProgSuffix,
configGHCiLib = packageConfigGHCiLib,
configSplitObjs = packageConfigSplitObjs,
configStripExes = packageConfigStripExes,
configStripLibs = packageConfigStripLibs,
configExtraLibDirs = packageConfigExtraLibDirs,
configExtraFrameworkDirs = packageConfigExtraFrameworkDirs,
configExtraIncludeDirs = packageConfigExtraIncludeDirs,
configConfigurationsFlags = packageConfigFlagAssignment,
configTests = packageConfigTests,
configBenchmarks = packageConfigBenchmarks,
configCoverage = coverage,
configLibCoverage = libcoverage, --deprecated
configDebugInfo = packageConfigDebugInfo,
configRelocatable = packageConfigRelocatable,
configVerifyMode = packageConfigVerifyMode
} = configFlags
packageConfigProgramPaths = MapLast (Map.fromList configProgramPaths)
packageConfigProgramArgs = MapMappend (Map.fromList configProgramArgs)
packageConfigCoverage = coverage <> libcoverage
--TODO: defer this merging to the resolve phase
InstallFlags {
installDocumentation = packageConfigDocumentation,
installRunTests = packageConfigRunTests
} = installFlags
HaddockFlags {
haddockHoogle = packageConfigHaddockHoogle,
haddockHtml = packageConfigHaddockHtml,
haddockHtmlLocation = packageConfigHaddockHtmlLocation,
haddockForeignLibs = packageConfigHaddockForeignLibs,
haddockExecutables = packageConfigHaddockExecutables,
haddockTestSuites = packageConfigHaddockTestSuites,
haddockBenchmarks = packageConfigHaddockBenchmarks,
haddockInternal = packageConfigHaddockInternal,
haddockCss = packageConfigHaddockCss,
haddockHscolour = packageConfigHaddockHscolour,
haddockHscolourCss = packageConfigHaddockHscolourCss,
haddockContents = packageConfigHaddockContents
} = haddockFlags
-- | Helper used by other conversion functions that returns the
-- 'ProjectConfigBuildOnly' subset of the 'ProjectConfig'.
--
convertLegacyBuildOnlyFlags :: GlobalFlags -> ConfigFlags
-> InstallFlags -> HaddockFlags
-> ProjectConfigBuildOnly
convertLegacyBuildOnlyFlags globalFlags configFlags
installFlags haddockFlags =
ProjectConfigBuildOnly{..}
where
GlobalFlags {
globalCacheDir = projectConfigCacheDir,
globalLogsDir = projectConfigLogsDir,
globalWorldFile = _,
globalHttpTransport = projectConfigHttpTransport,
globalIgnoreExpiry = projectConfigIgnoreExpiry,
globalStoreDir = projectConfigStoreDir,
globalBinariesDir = projectConfigBinariesDir,
globalAutoUpdate = projectConfigAutoUpdate,
globalSendMetrics = projectConfigSendMetrics
} = globalFlags
ConfigFlags {
configVerbosity = projectConfigVerbosity
} = configFlags
InstallFlags {
installDryRun = projectConfigDryRun,
installOnly = _,
installOnlyDeps = projectConfigOnlyDeps,
installRootCmd = _,
installSummaryFile = projectConfigSummaryFile,
installLogFile = projectConfigLogFile,
installBuildReports = projectConfigBuildReports,
installReportPlanningFailure = projectConfigReportPlanningFailure,
installSymlinkBinDir = projectConfigSymlinkBinDir,
installOneShot = projectConfigOneShot,
installNumJobs = projectConfigNumJobs,
installKeepGoing = projectConfigKeepGoing,
installOfflineMode = projectConfigOfflineMode
} = installFlags
HaddockFlags {
haddockKeepTempFiles = projectConfigKeepTempFiles --TODO: this ought to live elsewhere
} = haddockFlags
convertToLegacyProjectConfig :: ProjectConfig -> LegacyProjectConfig
convertToLegacyProjectConfig
projectConfig@ProjectConfig {
projectPackages,
projectPackagesOptional,
projectPackagesRepo,
projectPackagesNamed,
projectConfigAllPackages,
projectConfigLocalPackages,
projectConfigSpecificPackage
} =
LegacyProjectConfig {
legacyPackages = projectPackages,
legacyPackagesOptional = projectPackagesOptional,
legacyPackagesRepo = projectPackagesRepo,
legacyPackagesNamed = projectPackagesNamed,
legacySharedConfig = convertToLegacySharedConfig projectConfig,
legacyAllConfig = convertToLegacyPerPackageConfig
projectConfigAllPackages,
legacyLocalConfig = convertToLegacyAllPackageConfig projectConfig
<> convertToLegacyPerPackageConfig
projectConfigLocalPackages,
legacySpecificConfig = fmap convertToLegacyPerPackageConfig
projectConfigSpecificPackage
}
convertToLegacySharedConfig :: ProjectConfig -> LegacySharedConfig
convertToLegacySharedConfig
ProjectConfig {
projectConfigBuildOnly = ProjectConfigBuildOnly {..},
projectConfigShared = ProjectConfigShared {..},
projectConfigAllPackages = PackageConfig {
packageConfigDocumentation
}
} =
LegacySharedConfig {
legacyGlobalFlags = globalFlags,
legacyConfigureShFlags = configFlags,
legacyConfigureExFlags = configExFlags,
legacyInstallFlags = installFlags
}
where
globalFlags = GlobalFlags {
globalVersion = mempty,
globalNumericVersion = mempty,
globalConfigFile = projectConfigConfigFile,
globalSandboxConfigFile = mempty,
globalConstraintsFile = mempty,
globalRemoteRepos = projectConfigRemoteRepos,
globalCacheDir = projectConfigCacheDir,
globalLocalRepos = projectConfigLocalRepos,
globalLogsDir = projectConfigLogsDir,
globalWorldFile = mempty,
globalRequireSandbox = mempty,
globalIgnoreSandbox = mempty,
globalIgnoreExpiry = projectConfigIgnoreExpiry,
globalHttpTransport = projectConfigHttpTransport,
globalNix = mempty,
globalStoreDir = projectConfigStoreDir,
globalPatchesDir = projectConfigPatchesDir,
globalBinariesDir = projectConfigBinariesDir,
globalAutoUpdate = projectConfigAutoUpdate,
globalSendMetrics = projectConfigSendMetrics,
globalEtaVersion = projectConfigEtaVersion
}
configFlags = mempty {
configVerbosity = projectConfigVerbosity,
configDistPref = projectConfigDistDir,
configAllowOlder = projectConfigAllowOlder,
configAllowNewer = projectConfigAllowNewer
}
configExFlags = ConfigExFlags {
configCabalVersion = projectConfigCabalVersion,
configExConstraints = projectConfigConstraints,
configPreferences = projectConfigPreferences,
configSolver = projectConfigSolver
}
installFlags = InstallFlags {
installDocumentation = packageConfigDocumentation,
installHaddockIndex = projectConfigHaddockIndex,
installDryRun = projectConfigDryRun,
installReinstall = mempty, --projectConfigReinstall,
installAvoidReinstalls = mempty, --projectConfigAvoidReinstalls,
installOverrideReinstall = mempty, --projectConfigOverrideReinstall,
installMaxBackjumps = projectConfigMaxBackjumps,
installUpgradeDeps = mempty, --projectConfigUpgradeDeps,
installReorderGoals = projectConfigReorderGoals,
installCountConflicts = projectConfigCountConflicts,
installIndependentGoals = projectConfigIndependentGoals,
projectConfigShadowPkgs ,
installStrongFlags = projectConfigStrongFlags,
installAllowBootLibInstalls = projectConfigAllowBootLibInstalls,
installOnly = mempty,
installOnlyDeps = projectConfigOnlyDeps,
installIndexState = projectConfigIndexState,
installRootCmd = mempty, --no longer supported
installSummaryFile = projectConfigSummaryFile,
installLogFile = projectConfigLogFile,
installBuildReports = projectConfigBuildReports,
installReportPlanningFailure = projectConfigReportPlanningFailure,
installBinariesOutputDir = mempty,
installSymlinkBinDir = projectConfigSymlinkBinDir,
installPerComponent = projectConfigPerComponent,
installOneShot = projectConfigOneShot,
installNumJobs = projectConfigNumJobs,
installKeepGoing = projectConfigKeepGoing,
installRunTests = mempty,
installOfflineMode = projectConfigOfflineMode,
installProjectFileName = projectConfigProjectFile
}
convertToLegacyAllPackageConfig :: ProjectConfig -> LegacyPackageConfig
convertToLegacyAllPackageConfig
ProjectConfig {
projectConfigBuildOnly = ProjectConfigBuildOnly {..},
projectConfigShared = ProjectConfigShared {..}
} =
LegacyPackageConfig {
legacyConfigureFlags = configFlags,
legacyInstallPkgFlags= mempty,
legacyHaddockFlags = haddockFlags
}
where
configFlags = ConfigFlags {
configArgs = mempty,
configPrograms_ = mempty,
configProgramPaths = mempty,
configProgramArgs = mempty,
configProgramPathExtra = mempty,
configHcFlavor = projectConfigHcFlavor,
configHcPath = projectConfigHcPath,
configHcPkg = projectConfigHcPkg,
configInstantiateWith = mempty,
configVanillaLib = mempty,
configProfLib = mempty,
configSharedLib = mempty,
configDynExe = mempty,
configProfExe = mempty,
configProf = mempty,
configProfDetail = mempty,
configProfLibDetail = mempty,
configConfigureArgs = mempty,
configOptimization = mempty,
configProgPrefix = mempty,
configProgSuffix = mempty,
configInstallDirs = mempty,
configScratchDir = mempty,
configDistPref = mempty,
configCabalFilePath = mempty,
configVerbosity = mempty,
configUserInstall = mempty, --projectConfigUserInstall,
configPackageDBs = projectConfigPackageDBs,
configGHCiLib = mempty,
configSplitObjs = mempty,
configStripExes = mempty,
configStripLibs = mempty,
configExtraLibDirs = mempty,
configExtraFrameworkDirs = mempty,
configConstraints = mempty,
configDependencies = mempty,
configExtraIncludeDirs = mempty,
configDeterministic = mempty,
configIPID = mempty,
configCID = mempty,
configConfigurationsFlags = mempty,
configTests = mempty,
configCoverage = mempty, --TODO: don't merge
configLibCoverage = mempty, --TODO: don't merge
configExactConfiguration = mempty,
configBenchmarks = mempty,
configFlagError = mempty, --TODO: ???
configRelocatable = mempty,
configDebugInfo = mempty,
configAllowOlder = mempty,
configAllowNewer = mempty,
configVerifyMode = mempty
}
haddockFlags = mempty {
haddockKeepTempFiles = projectConfigKeepTempFiles
}
convertToLegacyPerPackageConfig :: PackageConfig -> LegacyPackageConfig
convertToLegacyPerPackageConfig PackageConfig {..} =
LegacyPackageConfig {
legacyConfigureFlags = configFlags,
legacyInstallPkgFlags = installFlags,
legacyHaddockFlags = haddockFlags
}
where
configFlags = ConfigFlags {
configArgs = mempty,
configPrograms_ = configPrograms_ mempty,
configProgramPaths = Map.toList (getMapLast packageConfigProgramPaths),
configProgramArgs = Map.toList (getMapMappend packageConfigProgramArgs),
configProgramPathExtra = packageConfigProgramPathExtra,
configHcFlavor = mempty,
configHcPath = mempty,
configHcPkg = mempty,
configInstantiateWith = mempty,
configVanillaLib = packageConfigVanillaLib,
configProfLib = packageConfigProfLib,
configSharedLib = packageConfigSharedLib,
configDynExe = packageConfigDynExe,
configProfExe = packageConfigProfExe,
configProf = packageConfigProf,
configProfDetail = packageConfigProfDetail,
configProfLibDetail = packageConfigProfLibDetail,
configConfigureArgs = packageConfigConfigureArgs,
configOptimization = packageConfigOptimization,
configProgPrefix = packageConfigProgPrefix,
configProgSuffix = packageConfigProgSuffix,
configInstallDirs = mempty,
configScratchDir = mempty,
configDistPref = mempty,
configCabalFilePath = mempty,
configVerbosity = mempty,
configUserInstall = mempty,
configPackageDBs = mempty,
configGHCiLib = packageConfigGHCiLib,
configSplitObjs = packageConfigSplitObjs,
configStripExes = packageConfigStripExes,
configStripLibs = packageConfigStripLibs,
configExtraLibDirs = packageConfigExtraLibDirs,
configExtraFrameworkDirs = packageConfigExtraFrameworkDirs,
configConstraints = mempty,
configDependencies = mempty,
configExtraIncludeDirs = packageConfigExtraIncludeDirs,
configIPID = mempty,
configCID = mempty,
configDeterministic = mempty,
configConfigurationsFlags = packageConfigFlagAssignment,
configTests = packageConfigTests,
configCoverage = packageConfigCoverage, --TODO: don't merge
configLibCoverage = packageConfigCoverage, --TODO: don't merge
configExactConfiguration = mempty,
configBenchmarks = packageConfigBenchmarks,
configFlagError = mempty, --TODO: ???
configRelocatable = packageConfigRelocatable,
configDebugInfo = packageConfigDebugInfo,
configAllowOlder = mempty,
configAllowNewer = mempty,
configVerifyMode = mempty
}
installFlags = mempty {
installDocumentation = packageConfigDocumentation,
installRunTests = packageConfigRunTests
}
haddockFlags = HaddockFlags {
haddockProgramPaths = mempty,
haddockProgramArgs = mempty,
haddockHoogle = packageConfigHaddockHoogle,
haddockHtml = packageConfigHaddockHtml,
haddockHtmlLocation = packageConfigHaddockHtmlLocation,
haddockForHackage = mempty, --TODO: added recently
haddockForeignLibs = packageConfigHaddockForeignLibs,
haddockExecutables = packageConfigHaddockExecutables,
haddockTestSuites = packageConfigHaddockTestSuites,
haddockBenchmarks = packageConfigHaddockBenchmarks,
haddockInternal = packageConfigHaddockInternal,
haddockCss = packageConfigHaddockCss,
haddockHscolour = packageConfigHaddockHscolour,
haddockHscolourCss = packageConfigHaddockHscolourCss,
haddockContents = packageConfigHaddockContents,
haddockDistPref = mempty,
haddockKeepTempFiles = mempty,
haddockVerbosity = mempty
}
------------------------------------------------
-- Parsing and showing the project config file
--
parseLegacyProjectConfig :: String -> ParseResult LegacyProjectConfig
parseLegacyProjectConfig =
parseConfig legacyProjectConfigFieldDescrs
legacyPackageConfigSectionDescrs
mempty
showLegacyProjectConfig :: LegacyProjectConfig -> String
showLegacyProjectConfig config =
Disp.render $
showConfig legacyProjectConfigFieldDescrs
legacyPackageConfigSectionDescrs
config
$+$
Disp.text ""
legacyProjectConfigFieldDescrs :: [FieldDescr LegacyProjectConfig]
legacyProjectConfigFieldDescrs =
[ newLineListField "packages"
(Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ
legacyPackages
(\v flags -> flags { legacyPackages = v })
, newLineListField "optional-packages"
(Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ
legacyPackagesOptional
(\v flags -> flags { legacyPackagesOptional = v })
, commaNewLineListField "extra-packages"
disp parse
legacyPackagesNamed
(\v flags -> flags { legacyPackagesNamed = v })
]
++ map (liftField
legacySharedConfig
(\flags conf -> conf { legacySharedConfig = flags }))
legacySharedConfigFieldDescrs
++ map (liftField
legacyLocalConfig
(\flags conf -> conf { legacyLocalConfig = flags }))
legacyPackageConfigFieldDescrs
-- | This is a bit tricky since it has to cover globs which have embedded @,@
-- chars. But we don't just want to parse strictly as a glob since we want to
-- allow http urls which don't parse as globs, and possibly some
-- system-dependent file paths. So we parse fairly liberally as a token, but
-- we allow @,@ inside matched @{}@ braces.
--
parsePackageLocationTokenQ :: ReadP r String
parsePackageLocationTokenQ = parseHaskellString
Parse.<++ parsePackageLocationToken
where
parsePackageLocationToken :: ReadP r String
parsePackageLocationToken = fmap fst (Parse.gather outerTerm)
where
outerTerm = alternateEither1 outerToken (braces innerTerm)
innerTerm = alternateEither innerToken (braces innerTerm)
outerToken = Parse.munch1 outerChar >> return ()
innerToken = Parse.munch1 innerChar >> return ()
outerChar c = not (isSpace c || c == '{' || c == '}' || c == ',')
innerChar c = not (isSpace c || c == '{' || c == '}')
braces = Parse.between (Parse.char '{') (Parse.char '}')
alternateEither, alternateEither1,
alternatePQs, alternate1PQs, alternateQsP, alternate1QsP
:: ReadP r () -> ReadP r () -> ReadP r ()
alternateEither1 p q = alternate1PQs p q +++ alternate1QsP q p
alternateEither p q = alternateEither1 p q +++ return ()
alternate1PQs p q = p >> alternateQsP q p
alternatePQs p q = alternate1PQs p q +++ return ()
alternate1QsP q p = Parse.many1 q >> alternatePQs p q
alternateQsP q p = alternate1QsP q p +++ return ()
renderPackageLocationToken :: String -> String
renderPackageLocationToken s | needsQuoting = show s
| otherwise = s
where
needsQuoting = not (ok 0 s)
|| s == "." -- . on its own on a line has special meaning
|| take 2 s == "--" -- on its own line is comment syntax
--TODO: [code cleanup] these "." and "--" escaping issues
ought to be dealt with systematically in ParseUtils .
ok :: Int -> String -> Bool
ok n [] = n == 0
ok _ ('"':_) = False
ok n ('{':cs) = ok (n+1) cs
ok n ('}':cs) = ok (n-1) cs
ok n (',':cs) = (n > 0) && ok n cs
ok _ (c:_)
| isSpace c = False
ok n (_ :cs) = ok n cs
legacySharedConfigFieldDescrs :: [FieldDescr LegacySharedConfig]
legacySharedConfigFieldDescrs =
( liftFields
legacyGlobalFlags
(\flags conf -> conf { legacyGlobalFlags = flags })
. addFields
[ newLineListField "local-repo"
showTokenQ parseTokenQ
(fromNubList . globalLocalRepos)
(\v conf -> conf { globalLocalRepos = toNubList v })
]
. filterFields
[ "remote-repo-cache"
, "logs-dir", "store-dir", "ignore-expiry", "http-transport"
, "patches-dir", "select-eta", "auto-update", "send-metrics"
, "binaries-dir"
]
. commandOptionsToFields
) (commandOptions (globalCommand []) ParseArgs)
++
( liftFields
legacyConfigureShFlags
(\flags conf -> conf { legacyConfigureShFlags = flags })
. addFields
[ simpleField "allow-older"
(maybe mempty dispRelaxDeps) (fmap Just parseRelaxDeps)
(fmap unAllowOlder . configAllowOlder)
(\v conf -> conf { configAllowOlder = fmap AllowOlder v })
]
. addFields
[ simpleField "allow-newer"
(maybe mempty dispRelaxDeps) (fmap Just parseRelaxDeps)
(fmap unAllowNewer . configAllowNewer)
(\v conf -> conf { configAllowNewer = fmap AllowNewer v })
]
. filterFields ["verbose", "builddir" ]
. commandOptionsToFields
) (configureOptions ParseArgs)
++
( liftFields
legacyConfigureExFlags
(\flags conf -> conf { legacyConfigureExFlags = flags })
. addFields
[ commaNewLineListField "constraints"
(disp . fst) (fmap (\constraint -> (constraint, constraintSrc)) parse)
configExConstraints (\v conf -> conf { configExConstraints = v })
, commaNewLineListField "preferences"
disp parse
configPreferences (\v conf -> conf { configPreferences = v })
]
. filterFields
[ "cabal-lib-version", "solver"
-- not "constraint" or "preference", we use our own plural ones above
]
. commandOptionsToFields
) (configureExOptions ParseArgs constraintSrc)
++
( liftFields
legacyInstallFlags
(\flags conf -> conf { legacyInstallFlags = flags })
. addFields
[ newLineListField "build-summary"
(showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ)
(fromNubList . installSummaryFile)
(\v conf -> conf { installSummaryFile = toNubList v })
]
. filterFields
[ "doc-index-file"
, "root-cmd", "symlink-bindir"
, "build-log"
, "remote-build-reporting", "report-planning-failure"
, "one-shot", "jobs", "keep-going", "offline", "per-component"
-- solver flags:
, "max-backjumps", "reorder-goals", "count-conflicts", "independent-goals"
, "strong-flags", "allow-boot-library-installs", "index-state"
]
. commandOptionsToFields
) (installOptions ParseArgs)
where
constraintSrc = ConstraintSourceProjectConfig "TODO"
parseRelaxDeps :: ReadP r RelaxDeps
parseRelaxDeps =
((const RelaxDepsNone <$> (Parse.string "none" +++ Parse.string "None"))
+++ (const RelaxDepsAll <$> (Parse.string "all" +++ Parse.string "All")))
<++ ( RelaxDepsSome <$> parseOptCommaList parse)
dispRelaxDeps :: RelaxDeps -> Doc
dispRelaxDeps RelaxDepsNone = Disp.text "None"
dispRelaxDeps (RelaxDepsSome pkgs) = Disp.fsep . Disp.punctuate Disp.comma
. map disp $ pkgs
dispRelaxDeps RelaxDepsAll = Disp.text "All"
legacyPackageConfigFieldDescrs :: [FieldDescr LegacyPackageConfig]
legacyPackageConfigFieldDescrs =
( liftFields
legacyConfigureFlags
(\flags conf -> conf { legacyConfigureFlags = flags })
. addFields
[ newLineListField "extra-include-dirs"
showTokenQ parseTokenQ
configExtraIncludeDirs
(\v conf -> conf { configExtraIncludeDirs = v })
, newLineListField "extra-lib-dirs"
showTokenQ parseTokenQ
configExtraLibDirs
(\v conf -> conf { configExtraLibDirs = v })
, newLineListField "extra-framework-dirs"
showTokenQ parseTokenQ
configExtraFrameworkDirs
(\v conf -> conf { configExtraFrameworkDirs = v })
, newLineListField "extra-prog-path"
showTokenQ parseTokenQ
(fromNubList . configProgramPathExtra)
(\v conf -> conf { configProgramPathExtra = toNubList v })
, newLineListField "configure-options"
showTokenQ parseTokenQ
configConfigureArgs
(\v conf -> conf { configConfigureArgs = v })
, simpleField "flags"
dispFlagAssignment parseFlagAssignment
configConfigurationsFlags
(\v conf -> conf { configConfigurationsFlags = v })
, newLineListField "package-dbs"
(showTokenQ . showPackageDb) (fmap readPackageDb parseTokenQ)
configPackageDBs
(\v conf -> conf { configPackageDBs = v })
]
. filterFields
[ "with-compiler", "with-hc-pkg"
, "program-prefix", "program-suffix"
, "library-vanilla", "library-profiling"
, "shared", "executable-dynamic", "uberjar-mode"
, "profiling", "executable-profiling"
, "profiling-detail", "library-profiling-detail"
, "library-for-ghci", "split-objs"
, "executable-stripping", "library-stripping"
, "tests", "benchmarks"
, "coverage", "library-coverage"
, "relocatable", "verify"
-- not "extra-include-dirs", "extra-lib-dirs", "extra-framework-dirs"
-- or "extra-prog-path". We use corrected ones above that parse
-- as list fields.
]
. commandOptionsToFields
) (configureOptions ParseArgs)
++
liftFields
legacyConfigureFlags
(\flags conf -> conf { legacyConfigureFlags = flags })
[ overrideFieldCompiler
, overrideFieldOptimization
, overrideFieldDebugInfo
]
++
( liftFields
legacyInstallPkgFlags
(\flags conf -> conf { legacyInstallPkgFlags = flags })
. filterFields
[ "documentation", "run-tests"
]
. commandOptionsToFields
) (installOptions ParseArgs)
++
( liftFields
legacyHaddockFlags
(\flags conf -> conf { legacyHaddockFlags = flags })
. mapFieldNames
("docs-"++)
. filterFields
[ "hoogle", "html", "html-location"
, "foreign-libraries"
, "executables", "tests", "benchmarks", "all", "internal", "css"
, "hyperlink-source", "hscolour-css"
, "contents-location", "keep-temp-files"
]
. commandOptionsToFields
) (haddockOptions ParseArgs)
where
overrideFieldCompiler =
simpleField "compiler"
(fromFlagOrDefault Disp.empty . fmap disp)
(Parse.option mempty (fmap toFlag parse))
configHcFlavor (\v flags -> flags { configHcFlavor = v })
-- TODO: [code cleanup] The following is a hack. The "optimization" and
" debug - info " fields are OptArg , and viewAsFieldDescr fails on that .
-- Instead of a hand-written parser and printer, we should handle this case
-- properly in the library.
overrideFieldOptimization =
liftField configOptimization
(\v flags -> flags { configOptimization = v }) $
let name = "optimization" in
FieldDescr name
(\f -> case f of
Flag NoOptimisation -> Disp.text "False"
Flag NormalOptimisation -> Disp.text "True"
Flag MaximumOptimisation -> Disp.text "2"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoOptimisation)
| str == "True" -> ParseOk [] (Flag NormalOptimisation)
| str == "0" -> ParseOk [] (Flag NoOptimisation)
| str == "1" -> ParseOk [] (Flag NormalOptimisation)
| str == "2" -> ParseOk [] (Flag MaximumOptimisation)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
overrideFieldDebugInfo =
liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $
let name = "debug-info" in
FieldDescr name
(\f -> case f of
Flag NoDebugInfo -> Disp.text "False"
Flag MinimalDebugInfo -> Disp.text "1"
Flag NormalDebugInfo -> Disp.text "True"
Flag MaximalDebugInfo -> Disp.text "3"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoDebugInfo)
| str == "True" -> ParseOk [] (Flag NormalDebugInfo)
| str == "0" -> ParseOk [] (Flag NoDebugInfo)
| str == "1" -> ParseOk [] (Flag MinimalDebugInfo)
| str == "2" -> ParseOk [] (Flag NormalDebugInfo)
| str == "3" -> ParseOk [] (Flag MaximalDebugInfo)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
legacyPackageConfigSectionDescrs :: [SectionDescr LegacyProjectConfig]
legacyPackageConfigSectionDescrs =
[ packageRepoSectionDescr
, allPackagesOptionsSectionDescr
, packageSpecificOptionsSectionDescr
, liftSection
legacyLocalConfig
(\flags conf -> conf { legacyLocalConfig = flags })
programOptionsSectionDescr
, liftSection
legacyLocalConfig
(\flags conf -> conf { legacyLocalConfig = flags })
programLocationsSectionDescr
, liftSection
legacySharedConfig
(\flags conf -> conf { legacySharedConfig = flags }) $
liftSection
legacyGlobalFlags
(\flags conf -> conf { legacyGlobalFlags = flags })
remoteRepoSectionDescr
]
packageRepoSectionDescr :: SectionDescr LegacyProjectConfig
packageRepoSectionDescr =
SectionDescr {
sectionName = "source-repository-package",
sectionFields = sourceRepoFieldDescrs,
sectionSubsections = [],
sectionGet = map (\x->("", x))
. legacyPackagesRepo,
sectionSet =
\lineno unused pkgrepo projconf -> do
unless (null unused) $
syntaxError lineno "the section 'source-repository-package' takes no arguments"
return projconf {
legacyPackagesRepo = legacyPackagesRepo projconf ++ [pkgrepo]
},
sectionEmpty = SourceRepo {
repoKind = RepoThis, -- hopefully unused
repoType = Nothing,
repoLocation = Nothing,
repoModule = Nothing,
repoBranch = Nothing,
repoTag = Nothing,
repoCommit = Nothing,
repoSubdir = Nothing
}
}
packageSpecificOptionsFieldDescrs :: [FieldDescr LegacyPackageConfig]
packageSpecificOptionsFieldDescrs =
legacyPackageConfigFieldDescrs
++ programOptionsFieldDescrs
(configProgramArgs . legacyConfigureFlags)
(\args pkgconf -> pkgconf {
legacyConfigureFlags = (legacyConfigureFlags pkgconf) {
configProgramArgs = args
}
}
)
++ liftFields
legacyConfigureFlags
(\flags pkgconf -> pkgconf {
legacyConfigureFlags = flags
}
)
programLocationsFieldDescrs
allPackagesOptionsSectionDescr :: SectionDescr LegacyProjectConfig
allPackagesOptionsSectionDescr =
SectionDescr {
sectionName = "all-packages",
sectionFields = packageSpecificOptionsFieldDescrs,
sectionSubsections = [],
sectionGet = (\x->[("", x)])
. legacyAllConfig,
sectionSet =
\lineno unused pkgsconf projconf -> do
unless (null unused) $
syntaxError lineno "the section 'all-packages' takes no arguments"
return projconf {
legacyAllConfig = legacyAllConfig projconf <> pkgsconf
},
sectionEmpty = mempty
}
packageSpecificOptionsSectionDescr :: SectionDescr LegacyProjectConfig
packageSpecificOptionsSectionDescr =
SectionDescr {
sectionName = "package",
sectionFields = packageSpecificOptionsFieldDescrs,
sectionSubsections = [],
sectionGet = \projconf ->
[ (display pkgname, pkgconf)
| (pkgname, pkgconf) <-
Map.toList . getMapMappend
. legacySpecificConfig $ projconf ],
sectionSet =
\lineno pkgnamestr pkgconf projconf -> do
pkgname <- case simpleParse pkgnamestr of
Just pkgname -> return pkgname
Nothing -> syntaxError lineno $
"a 'package' section requires a package name "
++ "as an argument"
return projconf {
legacySpecificConfig =
MapMappend $
Map.insertWith mappend pkgname pkgconf
(getMapMappend $ legacySpecificConfig projconf)
},
sectionEmpty = mempty
}
programOptionsFieldDescrs :: (a -> [(String, [String])])
-> ([(String, [String])] -> a -> a)
-> [FieldDescr a]
programOptionsFieldDescrs get' set =
commandOptionsToFields
$ programDbOptions
defaultProgramDb
ParseArgs get' set
programOptionsSectionDescr :: SectionDescr LegacyPackageConfig
programOptionsSectionDescr =
SectionDescr {
sectionName = "program-options",
sectionFields = programOptionsFieldDescrs
configProgramArgs
(\args conf -> conf { configProgramArgs = args }),
sectionSubsections = [],
sectionGet = (\x->[("", x)])
. legacyConfigureFlags,
sectionSet =
\lineno unused confflags pkgconf -> do
unless (null unused) $
syntaxError lineno "the section 'program-options' takes no arguments"
return pkgconf {
legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags
},
sectionEmpty = mempty
}
programLocationsFieldDescrs :: [FieldDescr ConfigFlags]
programLocationsFieldDescrs =
commandOptionsToFields
$ programDbPaths'
(++ "-location")
defaultProgramDb
ParseArgs
configProgramPaths
(\paths conf -> conf { configProgramPaths = paths })
programLocationsSectionDescr :: SectionDescr LegacyPackageConfig
programLocationsSectionDescr =
SectionDescr {
sectionName = "program-locations",
sectionFields = programLocationsFieldDescrs,
sectionSubsections = [],
sectionGet = (\x->[("", x)])
. legacyConfigureFlags,
sectionSet =
\lineno unused confflags pkgconf -> do
unless (null unused) $
syntaxError lineno "the section 'program-locations' takes no arguments"
return pkgconf {
legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags
},
sectionEmpty = mempty
}
| For each known program @PROG@ in ' progDb ' , produce a @PROG - options@
-- 'OptionField'.
programDbOptions
:: ProgramDb
-> ShowOrParseArgs
-> (flags -> [(String, [String])])
-> ([(String, [String])] -> (flags -> flags))
-> [OptionField flags]
programDbOptions progDb showOrParseArgs get' set =
case showOrParseArgs of
-- we don't want a verbose help text list so we just show a generic one:
ShowArgs -> [programOptions "PROG"]
ParseArgs -> map (programOptions . programName . fst)
(knownPrograms progDb)
where
programOptions prog =
option "" [prog ++ "-options"]
("give extra options to " ++ prog)
get' set
(reqArg' "OPTS" (\args -> [(prog, splitArgs args)])
(\progArgs -> [ joinsArgs args
| (prog', args) <- progArgs, prog==prog' ]))
joinsArgs = unwords . map escape
escape arg | any isSpace arg = "\"" ++ arg ++ "\""
| otherwise = arg
remoteRepoSectionDescr :: SectionDescr GlobalFlags
remoteRepoSectionDescr =
SectionDescr {
sectionName = "repository",
sectionFields = remoteRepoFields,
sectionSubsections = [],
sectionGet = map (\x->(remoteRepoName x, x)) . fromNubList
. globalRemoteRepos,
sectionSet =
\lineno reponame repo0 conf -> do
when (null reponame) $
syntaxError lineno $ "a 'repository' section requires the "
++ "repository name as an argument"
let repo = repo0 { remoteRepoName = reponame }
when (remoteRepoKeyThreshold repo
> length (remoteRepoRootKeys repo)) $
warning $ "'key-threshold' for repository "
++ show (remoteRepoName repo)
++ " higher than number of keys"
when (not (null (remoteRepoRootKeys repo))
&& remoteRepoSecure repo /= Just True) $
warning $ "'root-keys' for repository "
++ show (remoteRepoName repo)
++ " non-empty, but 'secure' not set to True."
return conf {
globalRemoteRepos = overNubList (++[repo]) (globalRemoteRepos conf)
},
sectionEmpty = emptyRemoteRepo ""
}
-------------------------------
-- Local field utils
--
--TODO: [code cleanup] all these utils should move to Distribution.ParseUtils
-- either augmenting or replacing the ones there
TODO : [ code cleanup ] this is a different definition from listField , like
-- commaNewLineListField it pretty prints on multiple lines
newLineListField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
newLineListField = listFieldWithSep Disp.sep
--TODO: [code cleanup] local copy purely so we can use the fixed version
-- of parseOptCommaList below
listFieldWithSep :: ([Doc] -> Doc) -> String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldWithSep separator name showF readF get' set =
liftField get' set' $
ParseUtils.field name showF' (parseOptCommaList readF)
where
set' xs b = set (get' b ++ xs) b
showF' = separator . map showF
--TODO: [code cleanup] local redefinition that should replace the version in
-- D.ParseUtils. This version avoid parse ambiguity for list element parsers
-- that have multiple valid parses of prefixes.
parseOptCommaList :: ReadP r a -> ReadP r [a]
parseOptCommaList p = Parse.sepBy p sep
where
-- The separator must not be empty or it introduces ambiguity
sep = (Parse.skipSpaces >> Parse.char ',' >> Parse.skipSpaces)
+++ (Parse.satisfy isSpace >> Parse.skipSpaces)
--TODO: [code cleanup] local redefinition that should replace the version in
D.ParseUtils called showFilePath . This version escapes " . " and " -- " which
-- otherwise are special syntax.
showTokenQ :: String -> Doc
showTokenQ "" = Disp.empty
showTokenQ x@('-':'-':_) = Disp.text (show x)
showTokenQ x@('.':[]) = Disp.text (show x)
showTokenQ x = showToken x
This is just a copy of parseTokenQ , using the fixed parseHaskellString
parseTokenQ :: ReadP r String
parseTokenQ = parseHaskellString
<++ Parse.munch1 (\x -> not (isSpace x) && x /= ',')
--TODO: [code cleanup] use this to replace the parseHaskellString in
Distribution . ParseUtils . It turns out Read instance for accepts
-- the ['a', 'b'] syntax, which we do not want. In particular it messes
-- up any token starting with [].
parseHaskellString :: ReadP r String
parseHaskellString =
Parse.readS_to_P $
Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0
Handy util
addFields :: [FieldDescr a]
-> ([FieldDescr a] -> [FieldDescr a])
addFields = (++)
| null | https://raw.githubusercontent.com/typelead/etlas/bbd7c558169e1fda086e759e1a6f8c8ca2807583/etlas/Distribution/Client/ProjectConfig/Legacy.hs | haskell | | Project configuration, implementation in terms of legacy types.
* Project config in terms of legacy types
* Conversion to and from legacy config types
* Internals, just for tests
----------------------------------------------------------------
Representing the project config file in terms of legacy types
project config file, but they're in terms of the types used for the command
line flags for Setup.hs or etlas commands. We don't want to redefine them
all, at least not yet so for the moment we use the parsers at the old types
and use conversion functions.
Ultimately if\/when this project-based approach becomes the default then we
can redefine the parsers directly for the new types.
----------------------------------------------------------------
Converting from and to the legacy types
line into a 'ProjectConfig' value that can combined with configuration from
other sources.
At the moment this uses the legacy command line flag types. See
| Convert from the types currently used for the user-wide @~/.etlas/config@
file into the 'ProjectConfig' type.
Only a subset of the 'ProjectConfig' can be represented in the user-wide
config. In particular it does not include packages that are in the project,
and it also doesn't support package-specific configuration (only
configuration that applies to all packages).
TODO: [code cleanup] eliminate use of default*Flags here and specify the
defaults in the various resolve functions in terms of the new types.
| Convert the project config from the legacy types to the 'ProjectConfig'
approach.
| Helper used by other conversion functions that returns the
'ProjectConfigShared' subset of the 'ProjectConfig'.
??
configUserInstall = projectConfigUserInstall,
installReinstall = projectConfigReinstall,
installAvoidReinstalls = projectConfigAvoidReinstalls,
installOverrideReinstall = projectConfigOverrideReinstall,
installUpgradeDeps = projectConfigUpgradeDeps,
| Helper used by other conversion functions that returns the
'PackageConfig' subset of the 'ProjectConfig'.
deprecated
TODO: defer this merging to the resolve phase
| Helper used by other conversion functions that returns the
'ProjectConfigBuildOnly' subset of the 'ProjectConfig'.
TODO: this ought to live elsewhere
projectConfigReinstall,
projectConfigAvoidReinstalls,
projectConfigOverrideReinstall,
projectConfigUpgradeDeps,
no longer supported
projectConfigUserInstall,
TODO: don't merge
TODO: don't merge
TODO: ???
TODO: don't merge
TODO: don't merge
TODO: ???
TODO: added recently
----------------------------------------------
Parsing and showing the project config file
| This is a bit tricky since it has to cover globs which have embedded @,@
chars. But we don't just want to parse strictly as a glob since we want to
allow http urls which don't parse as globs, and possibly some
system-dependent file paths. So we parse fairly liberally as a token, but
we allow @,@ inside matched @{}@ braces.
. on its own on a line has special meaning
on its own line is comment syntax
TODO: [code cleanup] these "." and "--" escaping issues
not "constraint" or "preference", we use our own plural ones above
solver flags:
not "extra-include-dirs", "extra-lib-dirs", "extra-framework-dirs"
or "extra-prog-path". We use corrected ones above that parse
as list fields.
TODO: [code cleanup] The following is a hack. The "optimization" and
Instead of a hand-written parser and printer, we should handle this case
properly in the library.
hopefully unused
'OptionField'.
we don't want a verbose help text list so we just show a generic one:
-----------------------------
Local field utils
TODO: [code cleanup] all these utils should move to Distribution.ParseUtils
either augmenting or replacing the ones there
commaNewLineListField it pretty prints on multiple lines
TODO: [code cleanup] local copy purely so we can use the fixed version
of parseOptCommaList below
TODO: [code cleanup] local redefinition that should replace the version in
D.ParseUtils. This version avoid parse ambiguity for list element parsers
that have multiple valid parses of prefixes.
The separator must not be empty or it introduces ambiguity
TODO: [code cleanup] local redefinition that should replace the version in
otherwise are special syntax.
TODO: [code cleanup] use this to replace the parseHaskellString in
the ['a', 'b'] syntax, which we do not want. In particular it messes
up any token starting with []. | # LANGUAGE RecordWildCards , NamedFieldPuns , DeriveGeneric #
module Distribution.Client.ProjectConfig.Legacy (
LegacyProjectConfig,
parseLegacyProjectConfig,
showLegacyProjectConfig,
commandLineFlagsToProjectConfig,
convertLegacyProjectConfig,
convertLegacyGlobalConfig,
convertToLegacyProjectConfig,
parsePackageLocationTokenQ,
renderPackageLocationToken,
) where
import Prelude ()
import Distribution.Client.Compat.Prelude
import Distribution.Client.ProjectConfig.Types
import Distribution.Client.Types
( RemoteRepo(..), emptyRemoteRepo )
import Distribution.Client.Config
( SavedConfig(..), remoteRepoFields )
import Distribution.Solver.Types.ConstraintSource
import Distribution.Package
import Distribution.Types.Dependency
import Distribution.PackageDescription
( SourceRepo(..), RepoKind(..)
, dispFlagAssignment, parseFlagAssignment )
import Distribution.PackageDescription.Parse
( sourceRepoFieldDescrs )
import Distribution.Simple.Compiler
( OptimisationLevel(..), DebugInfoLevel(..) )
import Distribution.Simple.Setup
( Flag(Flag), toFlag, fromFlagOrDefault
, ConfigFlags(..), configureOptions
, HaddockFlags(..), haddockOptions, defaultHaddockFlags
, programDbPaths', splitArgs, showPackageDb, readPackageDb
, AllowNewer(..), AllowOlder(..), RelaxDeps(..) )
import Distribution.Client.Setup
( GlobalFlags(..), globalCommand
, ConfigExFlags(..), configureExOptions, defaultConfigExFlags
, InstallFlags(..), installOptions, defaultInstallFlags )
import Distribution.Simple.Program
( programName, knownPrograms )
import Distribution.Simple.Program.Db
( ProgramDb, defaultProgramDb )
import Distribution.Simple.Utils
( lowercase )
import Distribution.Utils.NubList
( toNubList, fromNubList, overNubList )
import Distribution.Simple.LocalBuildInfo
( toPathTemplate, fromPathTemplate )
import Distribution.Text
import qualified Distribution.Compat.ReadP as Parse
import Distribution.Compat.ReadP
( ReadP, (+++), (<++) )
import qualified Text.Read as Read
import qualified Text.PrettyPrint as Disp
import Text.PrettyPrint
( Doc, ($+$) )
import qualified Distribution.ParseUtils as ParseUtils (field)
import Distribution.ParseUtils
( ParseResult(..), PError(..), syntaxError, PWarning(..), warning
, simpleField, commaNewLineListField
, showToken )
import Distribution.Client.ParseUtils
import Distribution.Simple.Command
( CommandUI(commandOptions), ShowOrParseArgs(..)
, OptionField, option, reqArg' )
import qualified Data.Map as Map
| We already have parsers\/pretty - printers for almost all the fields in the
data LegacyProjectConfig = LegacyProjectConfig {
legacyPackages :: [String],
legacyPackagesOptional :: [String],
legacyPackagesRepo :: [SourceRepo],
legacyPackagesNamed :: [Dependency],
legacySharedConfig :: LegacySharedConfig,
legacyAllConfig :: LegacyPackageConfig,
legacyLocalConfig :: LegacyPackageConfig,
legacySpecificConfig :: MapMappend PackageName LegacyPackageConfig
} deriving Generic
instance Monoid LegacyProjectConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup LegacyProjectConfig where
(<>) = gmappend
data LegacyPackageConfig = LegacyPackageConfig {
legacyConfigureFlags :: ConfigFlags,
legacyInstallPkgFlags :: InstallFlags,
legacyHaddockFlags :: HaddockFlags
} deriving Generic
instance Monoid LegacyPackageConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup LegacyPackageConfig where
(<>) = gmappend
data LegacySharedConfig = LegacySharedConfig {
legacyGlobalFlags :: GlobalFlags,
legacyConfigureShFlags :: ConfigFlags,
legacyConfigureExFlags :: ConfigExFlags,
legacyInstallFlags :: InstallFlags
} deriving Generic
instance Monoid LegacySharedConfig where
mempty = gmempty
mappend = (<>)
instance Semigroup LegacySharedConfig where
(<>) = gmappend
| Convert configuration from the @etlas configure@ or @etlas build@ command
' ' for an explanation .
commandLineFlagsToProjectConfig :: GlobalFlags
-> ConfigFlags -> ConfigExFlags
-> InstallFlags -> HaddockFlags
-> ProjectConfig
commandLineFlagsToProjectConfig globalFlags configFlags configExFlags
installFlags haddockFlags =
mempty {
projectConfigBuildOnly = convertLegacyBuildOnlyFlags
globalFlags configFlags
installFlags haddockFlags,
projectConfigShared = convertLegacyAllPackageFlags
globalFlags configFlags
configExFlags installFlags,
projectConfigLocalPackages = convertLegacyPerPackageFlags
configFlags installFlags haddockFlags
}
convertLegacyGlobalConfig :: SavedConfig -> ProjectConfig
convertLegacyGlobalConfig
SavedConfig {
savedGlobalFlags = globalFlags,
savedInstallFlags = installFlags,
savedConfigureFlags = configFlags,
savedConfigureExFlags = configExFlags,
savedUserInstallDirs = _,
savedGlobalInstallDirs = _,
savedUploadFlags = _,
savedReportFlags = _,
savedHaddockFlags = haddockFlags
} =
mempty {
projectConfigShared = configAllPackages,
projectConfigLocalPackages = configLocalPackages,
projectConfigBuildOnly = configBuildOnly
}
where
configExFlags' = defaultConfigExFlags <> configExFlags
installFlags' = defaultInstallFlags <> installFlags
haddockFlags' = defaultHaddockFlags <> haddockFlags
configLocalPackages = convertLegacyPerPackageFlags
configFlags installFlags' haddockFlags'
configAllPackages = convertLegacyAllPackageFlags
globalFlags configFlags
configExFlags' installFlags'
configBuildOnly = convertLegacyBuildOnlyFlags
globalFlags configFlags
installFlags' haddockFlags'
and associated types . See ' ' for an explanation of the
convertLegacyProjectConfig :: LegacyProjectConfig -> ProjectConfig
convertLegacyProjectConfig
LegacyProjectConfig {
legacyPackages,
legacyPackagesOptional,
legacyPackagesRepo,
legacyPackagesNamed,
legacySharedConfig = LegacySharedConfig globalFlags configShFlags
configExFlags installSharedFlags,
legacyAllConfig,
legacyLocalConfig = LegacyPackageConfig configFlags installPerPkgFlags
haddockFlags,
legacySpecificConfig
} =
ProjectConfig {
projectPackages = legacyPackages,
projectPackagesOptional = legacyPackagesOptional,
projectPackagesRepo = legacyPackagesRepo,
projectPackagesNamed = legacyPackagesNamed,
projectConfigBuildOnly = configBuildOnly,
projectConfigShared = configPackagesShared,
projectConfigProvenance = mempty,
projectConfigAllPackages = configAllPackages,
projectConfigLocalPackages = configLocalPackages,
projectConfigSpecificPackage = fmap perPackage legacySpecificConfig
}
where
configAllPackages = convertLegacyPerPackageFlags g i h
where LegacyPackageConfig g i h = legacyAllConfig
configLocalPackages = convertLegacyPerPackageFlags
configFlags installPerPkgFlags haddockFlags
configPackagesShared= convertLegacyAllPackageFlags
globalFlags (configFlags <> configShFlags)
configExFlags installSharedFlags
configBuildOnly = convertLegacyBuildOnlyFlags
globalFlags configShFlags
installSharedFlags haddockFlags
perPackage (LegacyPackageConfig perPkgConfigFlags perPkgInstallFlags
perPkgHaddockFlags) =
convertLegacyPerPackageFlags
perPkgConfigFlags perPkgInstallFlags perPkgHaddockFlags
convertLegacyAllPackageFlags :: GlobalFlags -> ConfigFlags
-> ConfigExFlags -> InstallFlags
-> ProjectConfigShared
convertLegacyAllPackageFlags globalFlags configFlags
configExFlags installFlags =
ProjectConfigShared{..}
where
GlobalFlags {
globalConfigFile = projectConfigConfigFile,
globalRemoteRepos = projectConfigRemoteRepos,
globalLocalRepos = projectConfigLocalRepos,
globalPatchesDir = projectConfigPatchesDir,
globalEtaVersion = projectConfigEtaVersion
} = globalFlags
ConfigFlags {
configDistPref = projectConfigDistDir,
configHcFlavor = projectConfigHcFlavor,
configHcPath = projectConfigHcPath,
configHcPkg = projectConfigHcPkg,
configInstallDirs = projectConfigInstallDirs ,
configPackageDBs = projectConfigPackageDBs,
configAllowOlder = projectConfigAllowOlder,
configAllowNewer = projectConfigAllowNewer
} = configFlags
ConfigExFlags {
configCabalVersion = projectConfigCabalVersion,
configExConstraints = projectConfigConstraints,
configPreferences = projectConfigPreferences,
configSolver = projectConfigSolver
} = configExFlags
InstallFlags {
installProjectFileName = projectConfigProjectFile,
installHaddockIndex = projectConfigHaddockIndex,
installIndexState = projectConfigIndexState,
installMaxBackjumps = projectConfigMaxBackjumps,
installReorderGoals = projectConfigReorderGoals,
installCountConflicts = projectConfigCountConflicts,
installPerComponent = projectConfigPerComponent,
installIndependentGoals = projectConfigIndependentGoals,
installShadowPkgs = projectConfigShadowPkgs ,
installStrongFlags = projectConfigStrongFlags,
installAllowBootLibInstalls = projectConfigAllowBootLibInstalls
} = installFlags
convertLegacyPerPackageFlags :: ConfigFlags -> InstallFlags -> HaddockFlags
-> PackageConfig
convertLegacyPerPackageFlags configFlags installFlags haddockFlags =
PackageConfig{..}
where
ConfigFlags {
configProgramPaths,
configProgramArgs,
configProgramPathExtra = packageConfigProgramPathExtra,
configVanillaLib = packageConfigVanillaLib,
configProfLib = packageConfigProfLib,
configSharedLib = packageConfigSharedLib,
configDynExe = packageConfigDynExe,
configProfExe = packageConfigProfExe,
configProf = packageConfigProf,
configProfDetail = packageConfigProfDetail,
configProfLibDetail = packageConfigProfLibDetail,
configConfigureArgs = packageConfigConfigureArgs,
configOptimization = packageConfigOptimization,
configProgPrefix = packageConfigProgPrefix,
configProgSuffix = packageConfigProgSuffix,
configGHCiLib = packageConfigGHCiLib,
configSplitObjs = packageConfigSplitObjs,
configStripExes = packageConfigStripExes,
configStripLibs = packageConfigStripLibs,
configExtraLibDirs = packageConfigExtraLibDirs,
configExtraFrameworkDirs = packageConfigExtraFrameworkDirs,
configExtraIncludeDirs = packageConfigExtraIncludeDirs,
configConfigurationsFlags = packageConfigFlagAssignment,
configTests = packageConfigTests,
configBenchmarks = packageConfigBenchmarks,
configCoverage = coverage,
configDebugInfo = packageConfigDebugInfo,
configRelocatable = packageConfigRelocatable,
configVerifyMode = packageConfigVerifyMode
} = configFlags
packageConfigProgramPaths = MapLast (Map.fromList configProgramPaths)
packageConfigProgramArgs = MapMappend (Map.fromList configProgramArgs)
packageConfigCoverage = coverage <> libcoverage
InstallFlags {
installDocumentation = packageConfigDocumentation,
installRunTests = packageConfigRunTests
} = installFlags
HaddockFlags {
haddockHoogle = packageConfigHaddockHoogle,
haddockHtml = packageConfigHaddockHtml,
haddockHtmlLocation = packageConfigHaddockHtmlLocation,
haddockForeignLibs = packageConfigHaddockForeignLibs,
haddockExecutables = packageConfigHaddockExecutables,
haddockTestSuites = packageConfigHaddockTestSuites,
haddockBenchmarks = packageConfigHaddockBenchmarks,
haddockInternal = packageConfigHaddockInternal,
haddockCss = packageConfigHaddockCss,
haddockHscolour = packageConfigHaddockHscolour,
haddockHscolourCss = packageConfigHaddockHscolourCss,
haddockContents = packageConfigHaddockContents
} = haddockFlags
convertLegacyBuildOnlyFlags :: GlobalFlags -> ConfigFlags
-> InstallFlags -> HaddockFlags
-> ProjectConfigBuildOnly
convertLegacyBuildOnlyFlags globalFlags configFlags
installFlags haddockFlags =
ProjectConfigBuildOnly{..}
where
GlobalFlags {
globalCacheDir = projectConfigCacheDir,
globalLogsDir = projectConfigLogsDir,
globalWorldFile = _,
globalHttpTransport = projectConfigHttpTransport,
globalIgnoreExpiry = projectConfigIgnoreExpiry,
globalStoreDir = projectConfigStoreDir,
globalBinariesDir = projectConfigBinariesDir,
globalAutoUpdate = projectConfigAutoUpdate,
globalSendMetrics = projectConfigSendMetrics
} = globalFlags
ConfigFlags {
configVerbosity = projectConfigVerbosity
} = configFlags
InstallFlags {
installDryRun = projectConfigDryRun,
installOnly = _,
installOnlyDeps = projectConfigOnlyDeps,
installRootCmd = _,
installSummaryFile = projectConfigSummaryFile,
installLogFile = projectConfigLogFile,
installBuildReports = projectConfigBuildReports,
installReportPlanningFailure = projectConfigReportPlanningFailure,
installSymlinkBinDir = projectConfigSymlinkBinDir,
installOneShot = projectConfigOneShot,
installNumJobs = projectConfigNumJobs,
installKeepGoing = projectConfigKeepGoing,
installOfflineMode = projectConfigOfflineMode
} = installFlags
HaddockFlags {
} = haddockFlags
convertToLegacyProjectConfig :: ProjectConfig -> LegacyProjectConfig
convertToLegacyProjectConfig
projectConfig@ProjectConfig {
projectPackages,
projectPackagesOptional,
projectPackagesRepo,
projectPackagesNamed,
projectConfigAllPackages,
projectConfigLocalPackages,
projectConfigSpecificPackage
} =
LegacyProjectConfig {
legacyPackages = projectPackages,
legacyPackagesOptional = projectPackagesOptional,
legacyPackagesRepo = projectPackagesRepo,
legacyPackagesNamed = projectPackagesNamed,
legacySharedConfig = convertToLegacySharedConfig projectConfig,
legacyAllConfig = convertToLegacyPerPackageConfig
projectConfigAllPackages,
legacyLocalConfig = convertToLegacyAllPackageConfig projectConfig
<> convertToLegacyPerPackageConfig
projectConfigLocalPackages,
legacySpecificConfig = fmap convertToLegacyPerPackageConfig
projectConfigSpecificPackage
}
convertToLegacySharedConfig :: ProjectConfig -> LegacySharedConfig
convertToLegacySharedConfig
ProjectConfig {
projectConfigBuildOnly = ProjectConfigBuildOnly {..},
projectConfigShared = ProjectConfigShared {..},
projectConfigAllPackages = PackageConfig {
packageConfigDocumentation
}
} =
LegacySharedConfig {
legacyGlobalFlags = globalFlags,
legacyConfigureShFlags = configFlags,
legacyConfigureExFlags = configExFlags,
legacyInstallFlags = installFlags
}
where
globalFlags = GlobalFlags {
globalVersion = mempty,
globalNumericVersion = mempty,
globalConfigFile = projectConfigConfigFile,
globalSandboxConfigFile = mempty,
globalConstraintsFile = mempty,
globalRemoteRepos = projectConfigRemoteRepos,
globalCacheDir = projectConfigCacheDir,
globalLocalRepos = projectConfigLocalRepos,
globalLogsDir = projectConfigLogsDir,
globalWorldFile = mempty,
globalRequireSandbox = mempty,
globalIgnoreSandbox = mempty,
globalIgnoreExpiry = projectConfigIgnoreExpiry,
globalHttpTransport = projectConfigHttpTransport,
globalNix = mempty,
globalStoreDir = projectConfigStoreDir,
globalPatchesDir = projectConfigPatchesDir,
globalBinariesDir = projectConfigBinariesDir,
globalAutoUpdate = projectConfigAutoUpdate,
globalSendMetrics = projectConfigSendMetrics,
globalEtaVersion = projectConfigEtaVersion
}
configFlags = mempty {
configVerbosity = projectConfigVerbosity,
configDistPref = projectConfigDistDir,
configAllowOlder = projectConfigAllowOlder,
configAllowNewer = projectConfigAllowNewer
}
configExFlags = ConfigExFlags {
configCabalVersion = projectConfigCabalVersion,
configExConstraints = projectConfigConstraints,
configPreferences = projectConfigPreferences,
configSolver = projectConfigSolver
}
installFlags = InstallFlags {
installDocumentation = packageConfigDocumentation,
installHaddockIndex = projectConfigHaddockIndex,
installDryRun = projectConfigDryRun,
installMaxBackjumps = projectConfigMaxBackjumps,
installReorderGoals = projectConfigReorderGoals,
installCountConflicts = projectConfigCountConflicts,
installIndependentGoals = projectConfigIndependentGoals,
projectConfigShadowPkgs ,
installStrongFlags = projectConfigStrongFlags,
installAllowBootLibInstalls = projectConfigAllowBootLibInstalls,
installOnly = mempty,
installOnlyDeps = projectConfigOnlyDeps,
installIndexState = projectConfigIndexState,
installSummaryFile = projectConfigSummaryFile,
installLogFile = projectConfigLogFile,
installBuildReports = projectConfigBuildReports,
installReportPlanningFailure = projectConfigReportPlanningFailure,
installBinariesOutputDir = mempty,
installSymlinkBinDir = projectConfigSymlinkBinDir,
installPerComponent = projectConfigPerComponent,
installOneShot = projectConfigOneShot,
installNumJobs = projectConfigNumJobs,
installKeepGoing = projectConfigKeepGoing,
installRunTests = mempty,
installOfflineMode = projectConfigOfflineMode,
installProjectFileName = projectConfigProjectFile
}
convertToLegacyAllPackageConfig :: ProjectConfig -> LegacyPackageConfig
convertToLegacyAllPackageConfig
ProjectConfig {
projectConfigBuildOnly = ProjectConfigBuildOnly {..},
projectConfigShared = ProjectConfigShared {..}
} =
LegacyPackageConfig {
legacyConfigureFlags = configFlags,
legacyInstallPkgFlags= mempty,
legacyHaddockFlags = haddockFlags
}
where
configFlags = ConfigFlags {
configArgs = mempty,
configPrograms_ = mempty,
configProgramPaths = mempty,
configProgramArgs = mempty,
configProgramPathExtra = mempty,
configHcFlavor = projectConfigHcFlavor,
configHcPath = projectConfigHcPath,
configHcPkg = projectConfigHcPkg,
configInstantiateWith = mempty,
configVanillaLib = mempty,
configProfLib = mempty,
configSharedLib = mempty,
configDynExe = mempty,
configProfExe = mempty,
configProf = mempty,
configProfDetail = mempty,
configProfLibDetail = mempty,
configConfigureArgs = mempty,
configOptimization = mempty,
configProgPrefix = mempty,
configProgSuffix = mempty,
configInstallDirs = mempty,
configScratchDir = mempty,
configDistPref = mempty,
configCabalFilePath = mempty,
configVerbosity = mempty,
configPackageDBs = projectConfigPackageDBs,
configGHCiLib = mempty,
configSplitObjs = mempty,
configStripExes = mempty,
configStripLibs = mempty,
configExtraLibDirs = mempty,
configExtraFrameworkDirs = mempty,
configConstraints = mempty,
configDependencies = mempty,
configExtraIncludeDirs = mempty,
configDeterministic = mempty,
configIPID = mempty,
configCID = mempty,
configConfigurationsFlags = mempty,
configTests = mempty,
configExactConfiguration = mempty,
configBenchmarks = mempty,
configRelocatable = mempty,
configDebugInfo = mempty,
configAllowOlder = mempty,
configAllowNewer = mempty,
configVerifyMode = mempty
}
haddockFlags = mempty {
haddockKeepTempFiles = projectConfigKeepTempFiles
}
convertToLegacyPerPackageConfig :: PackageConfig -> LegacyPackageConfig
convertToLegacyPerPackageConfig PackageConfig {..} =
LegacyPackageConfig {
legacyConfigureFlags = configFlags,
legacyInstallPkgFlags = installFlags,
legacyHaddockFlags = haddockFlags
}
where
configFlags = ConfigFlags {
configArgs = mempty,
configPrograms_ = configPrograms_ mempty,
configProgramPaths = Map.toList (getMapLast packageConfigProgramPaths),
configProgramArgs = Map.toList (getMapMappend packageConfigProgramArgs),
configProgramPathExtra = packageConfigProgramPathExtra,
configHcFlavor = mempty,
configHcPath = mempty,
configHcPkg = mempty,
configInstantiateWith = mempty,
configVanillaLib = packageConfigVanillaLib,
configProfLib = packageConfigProfLib,
configSharedLib = packageConfigSharedLib,
configDynExe = packageConfigDynExe,
configProfExe = packageConfigProfExe,
configProf = packageConfigProf,
configProfDetail = packageConfigProfDetail,
configProfLibDetail = packageConfigProfLibDetail,
configConfigureArgs = packageConfigConfigureArgs,
configOptimization = packageConfigOptimization,
configProgPrefix = packageConfigProgPrefix,
configProgSuffix = packageConfigProgSuffix,
configInstallDirs = mempty,
configScratchDir = mempty,
configDistPref = mempty,
configCabalFilePath = mempty,
configVerbosity = mempty,
configUserInstall = mempty,
configPackageDBs = mempty,
configGHCiLib = packageConfigGHCiLib,
configSplitObjs = packageConfigSplitObjs,
configStripExes = packageConfigStripExes,
configStripLibs = packageConfigStripLibs,
configExtraLibDirs = packageConfigExtraLibDirs,
configExtraFrameworkDirs = packageConfigExtraFrameworkDirs,
configConstraints = mempty,
configDependencies = mempty,
configExtraIncludeDirs = packageConfigExtraIncludeDirs,
configIPID = mempty,
configCID = mempty,
configDeterministic = mempty,
configConfigurationsFlags = packageConfigFlagAssignment,
configTests = packageConfigTests,
configExactConfiguration = mempty,
configBenchmarks = packageConfigBenchmarks,
configRelocatable = packageConfigRelocatable,
configDebugInfo = packageConfigDebugInfo,
configAllowOlder = mempty,
configAllowNewer = mempty,
configVerifyMode = mempty
}
installFlags = mempty {
installDocumentation = packageConfigDocumentation,
installRunTests = packageConfigRunTests
}
haddockFlags = HaddockFlags {
haddockProgramPaths = mempty,
haddockProgramArgs = mempty,
haddockHoogle = packageConfigHaddockHoogle,
haddockHtml = packageConfigHaddockHtml,
haddockHtmlLocation = packageConfigHaddockHtmlLocation,
haddockForeignLibs = packageConfigHaddockForeignLibs,
haddockExecutables = packageConfigHaddockExecutables,
haddockTestSuites = packageConfigHaddockTestSuites,
haddockBenchmarks = packageConfigHaddockBenchmarks,
haddockInternal = packageConfigHaddockInternal,
haddockCss = packageConfigHaddockCss,
haddockHscolour = packageConfigHaddockHscolour,
haddockHscolourCss = packageConfigHaddockHscolourCss,
haddockContents = packageConfigHaddockContents,
haddockDistPref = mempty,
haddockKeepTempFiles = mempty,
haddockVerbosity = mempty
}
parseLegacyProjectConfig :: String -> ParseResult LegacyProjectConfig
parseLegacyProjectConfig =
parseConfig legacyProjectConfigFieldDescrs
legacyPackageConfigSectionDescrs
mempty
showLegacyProjectConfig :: LegacyProjectConfig -> String
showLegacyProjectConfig config =
Disp.render $
showConfig legacyProjectConfigFieldDescrs
legacyPackageConfigSectionDescrs
config
$+$
Disp.text ""
legacyProjectConfigFieldDescrs :: [FieldDescr LegacyProjectConfig]
legacyProjectConfigFieldDescrs =
[ newLineListField "packages"
(Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ
legacyPackages
(\v flags -> flags { legacyPackages = v })
, newLineListField "optional-packages"
(Disp.text . renderPackageLocationToken) parsePackageLocationTokenQ
legacyPackagesOptional
(\v flags -> flags { legacyPackagesOptional = v })
, commaNewLineListField "extra-packages"
disp parse
legacyPackagesNamed
(\v flags -> flags { legacyPackagesNamed = v })
]
++ map (liftField
legacySharedConfig
(\flags conf -> conf { legacySharedConfig = flags }))
legacySharedConfigFieldDescrs
++ map (liftField
legacyLocalConfig
(\flags conf -> conf { legacyLocalConfig = flags }))
legacyPackageConfigFieldDescrs
parsePackageLocationTokenQ :: ReadP r String
parsePackageLocationTokenQ = parseHaskellString
Parse.<++ parsePackageLocationToken
where
parsePackageLocationToken :: ReadP r String
parsePackageLocationToken = fmap fst (Parse.gather outerTerm)
where
outerTerm = alternateEither1 outerToken (braces innerTerm)
innerTerm = alternateEither innerToken (braces innerTerm)
outerToken = Parse.munch1 outerChar >> return ()
innerToken = Parse.munch1 innerChar >> return ()
outerChar c = not (isSpace c || c == '{' || c == '}' || c == ',')
innerChar c = not (isSpace c || c == '{' || c == '}')
braces = Parse.between (Parse.char '{') (Parse.char '}')
alternateEither, alternateEither1,
alternatePQs, alternate1PQs, alternateQsP, alternate1QsP
:: ReadP r () -> ReadP r () -> ReadP r ()
alternateEither1 p q = alternate1PQs p q +++ alternate1QsP q p
alternateEither p q = alternateEither1 p q +++ return ()
alternate1PQs p q = p >> alternateQsP q p
alternatePQs p q = alternate1PQs p q +++ return ()
alternate1QsP q p = Parse.many1 q >> alternatePQs p q
alternateQsP q p = alternate1QsP q p +++ return ()
renderPackageLocationToken :: String -> String
renderPackageLocationToken s | needsQuoting = show s
| otherwise = s
where
needsQuoting = not (ok 0 s)
ought to be dealt with systematically in ParseUtils .
ok :: Int -> String -> Bool
ok n [] = n == 0
ok _ ('"':_) = False
ok n ('{':cs) = ok (n+1) cs
ok n ('}':cs) = ok (n-1) cs
ok n (',':cs) = (n > 0) && ok n cs
ok _ (c:_)
| isSpace c = False
ok n (_ :cs) = ok n cs
legacySharedConfigFieldDescrs :: [FieldDescr LegacySharedConfig]
legacySharedConfigFieldDescrs =
( liftFields
legacyGlobalFlags
(\flags conf -> conf { legacyGlobalFlags = flags })
. addFields
[ newLineListField "local-repo"
showTokenQ parseTokenQ
(fromNubList . globalLocalRepos)
(\v conf -> conf { globalLocalRepos = toNubList v })
]
. filterFields
[ "remote-repo-cache"
, "logs-dir", "store-dir", "ignore-expiry", "http-transport"
, "patches-dir", "select-eta", "auto-update", "send-metrics"
, "binaries-dir"
]
. commandOptionsToFields
) (commandOptions (globalCommand []) ParseArgs)
++
( liftFields
legacyConfigureShFlags
(\flags conf -> conf { legacyConfigureShFlags = flags })
. addFields
[ simpleField "allow-older"
(maybe mempty dispRelaxDeps) (fmap Just parseRelaxDeps)
(fmap unAllowOlder . configAllowOlder)
(\v conf -> conf { configAllowOlder = fmap AllowOlder v })
]
. addFields
[ simpleField "allow-newer"
(maybe mempty dispRelaxDeps) (fmap Just parseRelaxDeps)
(fmap unAllowNewer . configAllowNewer)
(\v conf -> conf { configAllowNewer = fmap AllowNewer v })
]
. filterFields ["verbose", "builddir" ]
. commandOptionsToFields
) (configureOptions ParseArgs)
++
( liftFields
legacyConfigureExFlags
(\flags conf -> conf { legacyConfigureExFlags = flags })
. addFields
[ commaNewLineListField "constraints"
(disp . fst) (fmap (\constraint -> (constraint, constraintSrc)) parse)
configExConstraints (\v conf -> conf { configExConstraints = v })
, commaNewLineListField "preferences"
disp parse
configPreferences (\v conf -> conf { configPreferences = v })
]
. filterFields
[ "cabal-lib-version", "solver"
]
. commandOptionsToFields
) (configureExOptions ParseArgs constraintSrc)
++
( liftFields
legacyInstallFlags
(\flags conf -> conf { legacyInstallFlags = flags })
. addFields
[ newLineListField "build-summary"
(showTokenQ . fromPathTemplate) (fmap toPathTemplate parseTokenQ)
(fromNubList . installSummaryFile)
(\v conf -> conf { installSummaryFile = toNubList v })
]
. filterFields
[ "doc-index-file"
, "root-cmd", "symlink-bindir"
, "build-log"
, "remote-build-reporting", "report-planning-failure"
, "one-shot", "jobs", "keep-going", "offline", "per-component"
, "max-backjumps", "reorder-goals", "count-conflicts", "independent-goals"
, "strong-flags", "allow-boot-library-installs", "index-state"
]
. commandOptionsToFields
) (installOptions ParseArgs)
where
constraintSrc = ConstraintSourceProjectConfig "TODO"
parseRelaxDeps :: ReadP r RelaxDeps
parseRelaxDeps =
((const RelaxDepsNone <$> (Parse.string "none" +++ Parse.string "None"))
+++ (const RelaxDepsAll <$> (Parse.string "all" +++ Parse.string "All")))
<++ ( RelaxDepsSome <$> parseOptCommaList parse)
dispRelaxDeps :: RelaxDeps -> Doc
dispRelaxDeps RelaxDepsNone = Disp.text "None"
dispRelaxDeps (RelaxDepsSome pkgs) = Disp.fsep . Disp.punctuate Disp.comma
. map disp $ pkgs
dispRelaxDeps RelaxDepsAll = Disp.text "All"
legacyPackageConfigFieldDescrs :: [FieldDescr LegacyPackageConfig]
legacyPackageConfigFieldDescrs =
( liftFields
legacyConfigureFlags
(\flags conf -> conf { legacyConfigureFlags = flags })
. addFields
[ newLineListField "extra-include-dirs"
showTokenQ parseTokenQ
configExtraIncludeDirs
(\v conf -> conf { configExtraIncludeDirs = v })
, newLineListField "extra-lib-dirs"
showTokenQ parseTokenQ
configExtraLibDirs
(\v conf -> conf { configExtraLibDirs = v })
, newLineListField "extra-framework-dirs"
showTokenQ parseTokenQ
configExtraFrameworkDirs
(\v conf -> conf { configExtraFrameworkDirs = v })
, newLineListField "extra-prog-path"
showTokenQ parseTokenQ
(fromNubList . configProgramPathExtra)
(\v conf -> conf { configProgramPathExtra = toNubList v })
, newLineListField "configure-options"
showTokenQ parseTokenQ
configConfigureArgs
(\v conf -> conf { configConfigureArgs = v })
, simpleField "flags"
dispFlagAssignment parseFlagAssignment
configConfigurationsFlags
(\v conf -> conf { configConfigurationsFlags = v })
, newLineListField "package-dbs"
(showTokenQ . showPackageDb) (fmap readPackageDb parseTokenQ)
configPackageDBs
(\v conf -> conf { configPackageDBs = v })
]
. filterFields
[ "with-compiler", "with-hc-pkg"
, "program-prefix", "program-suffix"
, "library-vanilla", "library-profiling"
, "shared", "executable-dynamic", "uberjar-mode"
, "profiling", "executable-profiling"
, "profiling-detail", "library-profiling-detail"
, "library-for-ghci", "split-objs"
, "executable-stripping", "library-stripping"
, "tests", "benchmarks"
, "coverage", "library-coverage"
, "relocatable", "verify"
]
. commandOptionsToFields
) (configureOptions ParseArgs)
++
liftFields
legacyConfigureFlags
(\flags conf -> conf { legacyConfigureFlags = flags })
[ overrideFieldCompiler
, overrideFieldOptimization
, overrideFieldDebugInfo
]
++
( liftFields
legacyInstallPkgFlags
(\flags conf -> conf { legacyInstallPkgFlags = flags })
. filterFields
[ "documentation", "run-tests"
]
. commandOptionsToFields
) (installOptions ParseArgs)
++
( liftFields
legacyHaddockFlags
(\flags conf -> conf { legacyHaddockFlags = flags })
. mapFieldNames
("docs-"++)
. filterFields
[ "hoogle", "html", "html-location"
, "foreign-libraries"
, "executables", "tests", "benchmarks", "all", "internal", "css"
, "hyperlink-source", "hscolour-css"
, "contents-location", "keep-temp-files"
]
. commandOptionsToFields
) (haddockOptions ParseArgs)
where
overrideFieldCompiler =
simpleField "compiler"
(fromFlagOrDefault Disp.empty . fmap disp)
(Parse.option mempty (fmap toFlag parse))
configHcFlavor (\v flags -> flags { configHcFlavor = v })
" debug - info " fields are OptArg , and viewAsFieldDescr fails on that .
overrideFieldOptimization =
liftField configOptimization
(\v flags -> flags { configOptimization = v }) $
let name = "optimization" in
FieldDescr name
(\f -> case f of
Flag NoOptimisation -> Disp.text "False"
Flag NormalOptimisation -> Disp.text "True"
Flag MaximumOptimisation -> Disp.text "2"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoOptimisation)
| str == "True" -> ParseOk [] (Flag NormalOptimisation)
| str == "0" -> ParseOk [] (Flag NoOptimisation)
| str == "1" -> ParseOk [] (Flag NormalOptimisation)
| str == "2" -> ParseOk [] (Flag MaximumOptimisation)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoOptimisation)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalOptimisation)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
overrideFieldDebugInfo =
liftField configDebugInfo (\v flags -> flags { configDebugInfo = v }) $
let name = "debug-info" in
FieldDescr name
(\f -> case f of
Flag NoDebugInfo -> Disp.text "False"
Flag MinimalDebugInfo -> Disp.text "1"
Flag NormalDebugInfo -> Disp.text "True"
Flag MaximalDebugInfo -> Disp.text "3"
_ -> Disp.empty)
(\line str _ -> case () of
_ | str == "False" -> ParseOk [] (Flag NoDebugInfo)
| str == "True" -> ParseOk [] (Flag NormalDebugInfo)
| str == "0" -> ParseOk [] (Flag NoDebugInfo)
| str == "1" -> ParseOk [] (Flag MinimalDebugInfo)
| str == "2" -> ParseOk [] (Flag NormalDebugInfo)
| str == "3" -> ParseOk [] (Flag MaximalDebugInfo)
| lstr == "false" -> ParseOk [caseWarning] (Flag NoDebugInfo)
| lstr == "true" -> ParseOk [caseWarning] (Flag NormalDebugInfo)
| otherwise -> ParseFailed (NoParse name line)
where
lstr = lowercase str
caseWarning = PWarning $
"The '" ++ name ++ "' field is case sensitive, use 'True' or 'False'.")
legacyPackageConfigSectionDescrs :: [SectionDescr LegacyProjectConfig]
legacyPackageConfigSectionDescrs =
[ packageRepoSectionDescr
, allPackagesOptionsSectionDescr
, packageSpecificOptionsSectionDescr
, liftSection
legacyLocalConfig
(\flags conf -> conf { legacyLocalConfig = flags })
programOptionsSectionDescr
, liftSection
legacyLocalConfig
(\flags conf -> conf { legacyLocalConfig = flags })
programLocationsSectionDescr
, liftSection
legacySharedConfig
(\flags conf -> conf { legacySharedConfig = flags }) $
liftSection
legacyGlobalFlags
(\flags conf -> conf { legacyGlobalFlags = flags })
remoteRepoSectionDescr
]
packageRepoSectionDescr :: SectionDescr LegacyProjectConfig
packageRepoSectionDescr =
SectionDescr {
sectionName = "source-repository-package",
sectionFields = sourceRepoFieldDescrs,
sectionSubsections = [],
sectionGet = map (\x->("", x))
. legacyPackagesRepo,
sectionSet =
\lineno unused pkgrepo projconf -> do
unless (null unused) $
syntaxError lineno "the section 'source-repository-package' takes no arguments"
return projconf {
legacyPackagesRepo = legacyPackagesRepo projconf ++ [pkgrepo]
},
sectionEmpty = SourceRepo {
repoType = Nothing,
repoLocation = Nothing,
repoModule = Nothing,
repoBranch = Nothing,
repoTag = Nothing,
repoCommit = Nothing,
repoSubdir = Nothing
}
}
packageSpecificOptionsFieldDescrs :: [FieldDescr LegacyPackageConfig]
packageSpecificOptionsFieldDescrs =
legacyPackageConfigFieldDescrs
++ programOptionsFieldDescrs
(configProgramArgs . legacyConfigureFlags)
(\args pkgconf -> pkgconf {
legacyConfigureFlags = (legacyConfigureFlags pkgconf) {
configProgramArgs = args
}
}
)
++ liftFields
legacyConfigureFlags
(\flags pkgconf -> pkgconf {
legacyConfigureFlags = flags
}
)
programLocationsFieldDescrs
allPackagesOptionsSectionDescr :: SectionDescr LegacyProjectConfig
allPackagesOptionsSectionDescr =
SectionDescr {
sectionName = "all-packages",
sectionFields = packageSpecificOptionsFieldDescrs,
sectionSubsections = [],
sectionGet = (\x->[("", x)])
. legacyAllConfig,
sectionSet =
\lineno unused pkgsconf projconf -> do
unless (null unused) $
syntaxError lineno "the section 'all-packages' takes no arguments"
return projconf {
legacyAllConfig = legacyAllConfig projconf <> pkgsconf
},
sectionEmpty = mempty
}
packageSpecificOptionsSectionDescr :: SectionDescr LegacyProjectConfig
packageSpecificOptionsSectionDescr =
SectionDescr {
sectionName = "package",
sectionFields = packageSpecificOptionsFieldDescrs,
sectionSubsections = [],
sectionGet = \projconf ->
[ (display pkgname, pkgconf)
| (pkgname, pkgconf) <-
Map.toList . getMapMappend
. legacySpecificConfig $ projconf ],
sectionSet =
\lineno pkgnamestr pkgconf projconf -> do
pkgname <- case simpleParse pkgnamestr of
Just pkgname -> return pkgname
Nothing -> syntaxError lineno $
"a 'package' section requires a package name "
++ "as an argument"
return projconf {
legacySpecificConfig =
MapMappend $
Map.insertWith mappend pkgname pkgconf
(getMapMappend $ legacySpecificConfig projconf)
},
sectionEmpty = mempty
}
programOptionsFieldDescrs :: (a -> [(String, [String])])
-> ([(String, [String])] -> a -> a)
-> [FieldDescr a]
programOptionsFieldDescrs get' set =
commandOptionsToFields
$ programDbOptions
defaultProgramDb
ParseArgs get' set
programOptionsSectionDescr :: SectionDescr LegacyPackageConfig
programOptionsSectionDescr =
SectionDescr {
sectionName = "program-options",
sectionFields = programOptionsFieldDescrs
configProgramArgs
(\args conf -> conf { configProgramArgs = args }),
sectionSubsections = [],
sectionGet = (\x->[("", x)])
. legacyConfigureFlags,
sectionSet =
\lineno unused confflags pkgconf -> do
unless (null unused) $
syntaxError lineno "the section 'program-options' takes no arguments"
return pkgconf {
legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags
},
sectionEmpty = mempty
}
programLocationsFieldDescrs :: [FieldDescr ConfigFlags]
programLocationsFieldDescrs =
commandOptionsToFields
$ programDbPaths'
(++ "-location")
defaultProgramDb
ParseArgs
configProgramPaths
(\paths conf -> conf { configProgramPaths = paths })
programLocationsSectionDescr :: SectionDescr LegacyPackageConfig
programLocationsSectionDescr =
SectionDescr {
sectionName = "program-locations",
sectionFields = programLocationsFieldDescrs,
sectionSubsections = [],
sectionGet = (\x->[("", x)])
. legacyConfigureFlags,
sectionSet =
\lineno unused confflags pkgconf -> do
unless (null unused) $
syntaxError lineno "the section 'program-locations' takes no arguments"
return pkgconf {
legacyConfigureFlags = legacyConfigureFlags pkgconf <> confflags
},
sectionEmpty = mempty
}
| For each known program @PROG@ in ' progDb ' , produce a @PROG - options@
programDbOptions
:: ProgramDb
-> ShowOrParseArgs
-> (flags -> [(String, [String])])
-> ([(String, [String])] -> (flags -> flags))
-> [OptionField flags]
programDbOptions progDb showOrParseArgs get' set =
case showOrParseArgs of
ShowArgs -> [programOptions "PROG"]
ParseArgs -> map (programOptions . programName . fst)
(knownPrograms progDb)
where
programOptions prog =
option "" [prog ++ "-options"]
("give extra options to " ++ prog)
get' set
(reqArg' "OPTS" (\args -> [(prog, splitArgs args)])
(\progArgs -> [ joinsArgs args
| (prog', args) <- progArgs, prog==prog' ]))
joinsArgs = unwords . map escape
escape arg | any isSpace arg = "\"" ++ arg ++ "\""
| otherwise = arg
remoteRepoSectionDescr :: SectionDescr GlobalFlags
remoteRepoSectionDescr =
SectionDescr {
sectionName = "repository",
sectionFields = remoteRepoFields,
sectionSubsections = [],
sectionGet = map (\x->(remoteRepoName x, x)) . fromNubList
. globalRemoteRepos,
sectionSet =
\lineno reponame repo0 conf -> do
when (null reponame) $
syntaxError lineno $ "a 'repository' section requires the "
++ "repository name as an argument"
let repo = repo0 { remoteRepoName = reponame }
when (remoteRepoKeyThreshold repo
> length (remoteRepoRootKeys repo)) $
warning $ "'key-threshold' for repository "
++ show (remoteRepoName repo)
++ " higher than number of keys"
when (not (null (remoteRepoRootKeys repo))
&& remoteRepoSecure repo /= Just True) $
warning $ "'root-keys' for repository "
++ show (remoteRepoName repo)
++ " non-empty, but 'secure' not set to True."
return conf {
globalRemoteRepos = overNubList (++[repo]) (globalRemoteRepos conf)
},
sectionEmpty = emptyRemoteRepo ""
}
TODO : [ code cleanup ] this is a different definition from listField , like
newLineListField :: String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
newLineListField = listFieldWithSep Disp.sep
listFieldWithSep :: ([Doc] -> Doc) -> String -> (a -> Doc) -> ReadP [a] a
-> (b -> [a]) -> ([a] -> b -> b) -> FieldDescr b
listFieldWithSep separator name showF readF get' set =
liftField get' set' $
ParseUtils.field name showF' (parseOptCommaList readF)
where
set' xs b = set (get' b ++ xs) b
showF' = separator . map showF
parseOptCommaList :: ReadP r a -> ReadP r [a]
parseOptCommaList p = Parse.sepBy p sep
where
sep = (Parse.skipSpaces >> Parse.char ',' >> Parse.skipSpaces)
+++ (Parse.satisfy isSpace >> Parse.skipSpaces)
D.ParseUtils called showFilePath . This version escapes " . " and " -- " which
showTokenQ :: String -> Doc
showTokenQ "" = Disp.empty
showTokenQ x@('-':'-':_) = Disp.text (show x)
showTokenQ x@('.':[]) = Disp.text (show x)
showTokenQ x = showToken x
This is just a copy of parseTokenQ , using the fixed parseHaskellString
parseTokenQ :: ReadP r String
parseTokenQ = parseHaskellString
<++ Parse.munch1 (\x -> not (isSpace x) && x /= ',')
Distribution . ParseUtils . It turns out Read instance for accepts
parseHaskellString :: ReadP r String
parseHaskellString =
Parse.readS_to_P $
Read.readPrec_to_S (do Read.String s <- Read.lexP; return s) 0
Handy util
addFields :: [FieldDescr a]
-> ([FieldDescr a] -> [FieldDescr a])
addFields = (++)
|
8b126c24e259171075111281ecff48b747b437d082ddb7c0832485a984211c03 | fetburner/Coq2SML | inductiveops.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Names
open Term
open Declarations
open Environ
open Evd
open Sign
* The following three functions are similar to the ones defined in
Inductive , but they expect an env
Inductive, but they expect an env *)
val type_of_inductive : env -> inductive -> types
(** Return type as quoted by the user *)
val type_of_constructor : env -> constructor -> types
val type_of_constructors : env -> inductive -> types array
(** Return constructor types in normal form *)
val arities_of_constructors : env -> inductive -> types array
(** An inductive type with its parameters *)
type inductive_family
val make_ind_family : inductive * constr list -> inductive_family
val dest_ind_family : inductive_family -> inductive * constr list
val map_ind_family : (constr -> constr) -> inductive_family -> inductive_family
val liftn_inductive_family : int -> int -> inductive_family -> inductive_family
val lift_inductive_family : int -> inductive_family -> inductive_family
val substnl_ind_family :
constr list -> int -> inductive_family -> inductive_family
(** An inductive type with its parameters and real arguments *)
type inductive_type = IndType of inductive_family * constr list
val make_ind_type : inductive_family * constr list -> inductive_type
val dest_ind_type : inductive_type -> inductive_family * constr list
val map_inductive_type : (constr -> constr) -> inductive_type -> inductive_type
val liftn_inductive_type : int -> int -> inductive_type -> inductive_type
val lift_inductive_type : int -> inductive_type -> inductive_type
val substnl_ind_type : constr list -> int -> inductive_type -> inductive_type
val mkAppliedInd : inductive_type -> constr
val mis_is_recursive_subset : int list -> wf_paths -> bool
val mis_is_recursive :
inductive * mutual_inductive_body * one_inductive_body -> bool
val mis_nf_constructor_type :
inductive * mutual_inductive_body * one_inductive_body -> int -> constr
(** Extract information from an inductive name *)
* Arity of constructors excluding parameters and local defs
val mis_constr_nargs : inductive -> int array
val mis_constr_nargs_env : env -> inductive -> int array
val nconstructors : inductive -> int
(** Return the lengths of parameters signature and real arguments signature *)
val inductive_nargs : env -> inductive -> int * int
val mis_constructor_nargs_env : env -> constructor -> int
val constructor_nrealargs : env -> constructor -> int
val constructor_nrealhyps : env -> constructor -> int
val get_full_arity_sign : env -> inductive -> rel_context
val allowed_sorts : env -> inductive -> sorts_family list
(** Extract information from an inductive family *)
type constructor_summary = {
cs_cstr : constructor; (* internal name of the constructor *)
cs_params : constr list; (* parameters of the constructor in current ctx *)
cs_nargs : int; (* length of arguments signature (letin included) *)
cs_args : rel_context; (* signature of the arguments (letin included) *)
actual realargs in the concl of cstr
}
val lift_constructor : int -> constructor_summary -> constructor_summary
val get_constructor :
inductive * mutual_inductive_body * one_inductive_body * constr list ->
int -> constructor_summary
val get_arity : env -> inductive_family -> rel_context * sorts_family
val get_constructors : env -> inductive_family -> constructor_summary array
val build_dependent_constructor : constructor_summary -> constr
val build_dependent_inductive : env -> inductive_family -> constr
val make_arity_signature : env -> bool -> inductive_family -> rel_context
val make_arity : env -> bool -> inductive_family -> sorts -> types
val build_branch_type : env -> bool -> constr -> constructor_summary -> types
(** Raise [Not_found] if not given an valid inductive type *)
val extract_mrectype : constr -> inductive * constr list
val find_mrectype : env -> evar_map -> types -> inductive * constr list
val find_rectype : env -> evar_map -> types -> inductive_type
val find_inductive : env -> evar_map -> types -> inductive * constr list
val find_coinductive : env -> evar_map -> types -> inductive * constr list
(********************)
(** Builds the case predicate arity (dependent or not) *)
val arity_of_case_predicate :
env -> inductive_family -> bool -> sorts -> types
val type_case_branches_with_names :
env -> inductive * constr list -> constr -> constr ->
types array * types
(** Annotation for cases *)
val make_case_info : env -> inductive -> case_style -> case_info
i Compatibility
val make_default_case_info : env - > case_style - > inductive - > case_info
i
val make_default_case_info : env -> case_style -> inductive -> case_info
i*)
(********************)
val type_of_inductive_knowing_conclusion :
env -> one_inductive_body -> types -> types
(********************)
val control_only_guard : env -> types -> unit
val subst_inductive : Mod_subst.substitution -> inductive -> inductive
| null | https://raw.githubusercontent.com/fetburner/Coq2SML/322d613619edbb62edafa999bff24b1993f37612/coq-8.4pl4/pretyping/inductiveops.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
* Return type as quoted by the user
* Return constructor types in normal form
* An inductive type with its parameters
* An inductive type with its parameters and real arguments
* Extract information from an inductive name
* Return the lengths of parameters signature and real arguments signature
* Extract information from an inductive family
internal name of the constructor
parameters of the constructor in current ctx
length of arguments signature (letin included)
signature of the arguments (letin included)
* Raise [Not_found] if not given an valid inductive type
******************
* Builds the case predicate arity (dependent or not)
* Annotation for cases
******************
****************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2014
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Names
open Term
open Declarations
open Environ
open Evd
open Sign
* The following three functions are similar to the ones defined in
Inductive , but they expect an env
Inductive, but they expect an env *)
val type_of_inductive : env -> inductive -> types
val type_of_constructor : env -> constructor -> types
val type_of_constructors : env -> inductive -> types array
val arities_of_constructors : env -> inductive -> types array
type inductive_family
val make_ind_family : inductive * constr list -> inductive_family
val dest_ind_family : inductive_family -> inductive * constr list
val map_ind_family : (constr -> constr) -> inductive_family -> inductive_family
val liftn_inductive_family : int -> int -> inductive_family -> inductive_family
val lift_inductive_family : int -> inductive_family -> inductive_family
val substnl_ind_family :
constr list -> int -> inductive_family -> inductive_family
type inductive_type = IndType of inductive_family * constr list
val make_ind_type : inductive_family * constr list -> inductive_type
val dest_ind_type : inductive_type -> inductive_family * constr list
val map_inductive_type : (constr -> constr) -> inductive_type -> inductive_type
val liftn_inductive_type : int -> int -> inductive_type -> inductive_type
val lift_inductive_type : int -> inductive_type -> inductive_type
val substnl_ind_type : constr list -> int -> inductive_type -> inductive_type
val mkAppliedInd : inductive_type -> constr
val mis_is_recursive_subset : int list -> wf_paths -> bool
val mis_is_recursive :
inductive * mutual_inductive_body * one_inductive_body -> bool
val mis_nf_constructor_type :
inductive * mutual_inductive_body * one_inductive_body -> int -> constr
* Arity of constructors excluding parameters and local defs
val mis_constr_nargs : inductive -> int array
val mis_constr_nargs_env : env -> inductive -> int array
val nconstructors : inductive -> int
val inductive_nargs : env -> inductive -> int * int
val mis_constructor_nargs_env : env -> constructor -> int
val constructor_nrealargs : env -> constructor -> int
val constructor_nrealhyps : env -> constructor -> int
val get_full_arity_sign : env -> inductive -> rel_context
val allowed_sorts : env -> inductive -> sorts_family list
type constructor_summary = {
actual realargs in the concl of cstr
}
val lift_constructor : int -> constructor_summary -> constructor_summary
val get_constructor :
inductive * mutual_inductive_body * one_inductive_body * constr list ->
int -> constructor_summary
val get_arity : env -> inductive_family -> rel_context * sorts_family
val get_constructors : env -> inductive_family -> constructor_summary array
val build_dependent_constructor : constructor_summary -> constr
val build_dependent_inductive : env -> inductive_family -> constr
val make_arity_signature : env -> bool -> inductive_family -> rel_context
val make_arity : env -> bool -> inductive_family -> sorts -> types
val build_branch_type : env -> bool -> constr -> constructor_summary -> types
val extract_mrectype : constr -> inductive * constr list
val find_mrectype : env -> evar_map -> types -> inductive * constr list
val find_rectype : env -> evar_map -> types -> inductive_type
val find_inductive : env -> evar_map -> types -> inductive * constr list
val find_coinductive : env -> evar_map -> types -> inductive * constr list
val arity_of_case_predicate :
env -> inductive_family -> bool -> sorts -> types
val type_case_branches_with_names :
env -> inductive * constr list -> constr -> constr ->
types array * types
val make_case_info : env -> inductive -> case_style -> case_info
i Compatibility
val make_default_case_info : env - > case_style - > inductive - > case_info
i
val make_default_case_info : env -> case_style -> inductive -> case_info
i*)
val type_of_inductive_knowing_conclusion :
env -> one_inductive_body -> types -> types
val control_only_guard : env -> types -> unit
val subst_inductive : Mod_subst.substitution -> inductive -> inductive
|
28ed10ff0422e1f4bb2ea60b021b45bfc0e92504ad9600a5fbad66ff3a00bc4c | MaskRay/OJHaskell | RPI.hs | import Control.Monad
import Control.Monad.Instances
main = do
cases <- liftM read getLine
forM_ [1..cases] $ \cc -> do
n <- liftM read getLine
schedule <- forM [1..n] (const getLine)
let total = map (\x -> (fromIntegral $ length $ filter (/='.') $ schedule!!x)) [0..n-1]
wins = map (\x -> (fromIntegral $ length $ filter (=='1') $ schedule!!x)) [0..n-1]
wp = zipWith (/) wins total
owp = [sum [case schedule!!x!!y of
'.' -> 0
'1' -> wins!!y / (total!!y-1)
'0' -> (wins!!y-1) / (total!!y-1)
| y <- [0..n-1]] / total!!x
| x <- [0..n-1]]
oowp = [sum [case schedule!!x!!y of
'.' -> 0
_ -> owp!!y
| y <- [0..n-1]] / total!!x
| x <- [0..n-1]]
putStrLn $ "Case #" ++ show cc ++ ":"
forM_ [0..n-1] $ print . liftM3 (((+).).(+)) ((/4).(wp!!)) ((/2).(owp!!)) ((/4).(oowp!!)) | null | https://raw.githubusercontent.com/MaskRay/OJHaskell/ba24050b2480619f10daa7d37fca558182ba006c/Google%20Code%20Jam/2011/round1b/RPI.hs | haskell | import Control.Monad
import Control.Monad.Instances
main = do
cases <- liftM read getLine
forM_ [1..cases] $ \cc -> do
n <- liftM read getLine
schedule <- forM [1..n] (const getLine)
let total = map (\x -> (fromIntegral $ length $ filter (/='.') $ schedule!!x)) [0..n-1]
wins = map (\x -> (fromIntegral $ length $ filter (=='1') $ schedule!!x)) [0..n-1]
wp = zipWith (/) wins total
owp = [sum [case schedule!!x!!y of
'.' -> 0
'1' -> wins!!y / (total!!y-1)
'0' -> (wins!!y-1) / (total!!y-1)
| y <- [0..n-1]] / total!!x
| x <- [0..n-1]]
oowp = [sum [case schedule!!x!!y of
'.' -> 0
_ -> owp!!y
| y <- [0..n-1]] / total!!x
| x <- [0..n-1]]
putStrLn $ "Case #" ++ show cc ++ ":"
forM_ [0..n-1] $ print . liftM3 (((+).).(+)) ((/4).(wp!!)) ((/2).(owp!!)) ((/4).(oowp!!)) | |
603d6ddb00d63bd779216c8c591f72c8bd362ab82ba06d2a3c16be38f8505078 | codedownio/sandwich | Before.hs | # LANGUAGE DataKinds #
module Before where
import Control.Exception.Safe
import Control.Monad.IO.Class
import Control.Monad.Trans.Writer
import qualified Data.List as L
import GHC.Stack
import Test.Sandwich
import TestUtil
tests :: MonadIO m => WriterT [SomeException] m ()
tests = do
run beforeExceptionSafety
run beforeExceptionSafetyNested
main = mainWith tests
-- * Tests
beforeExceptionSafety :: (HasCallStack) => IO ()
beforeExceptionSafety = do
results <- runAndGetResults $ before "before label" throwSomeUserError $ do
it "does thing 1" $ return ()
it "does thing 2" $ return ()
results `mustBe` (Failure (GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)
: L.replicate 2 (Failure (GetContextException Nothing (SomeExceptionWithEq (toException $ GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)))))
beforeExceptionSafetyNested :: (HasCallStack) => IO ()
beforeExceptionSafetyNested = do
results <- runAndGetResults $ before "before label" throwSomeUserError $ do
it "does thing 1" $ return ()
it "does thing 2" $ return ()
describe "nested things" $ do
it "does nested thing 1" $ return ()
it "does nested thing 2" $ return ()
results `mustBe` (Failure (GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)
: L.replicate 5 (Failure (GetContextException Nothing (SomeExceptionWithEq (toException $ GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)))))
| null | https://raw.githubusercontent.com/codedownio/sandwich/9c8f56b5aee94ba65c70b3e52bde8959010aecc1/sandwich/test/Before.hs | haskell | * Tests | # LANGUAGE DataKinds #
module Before where
import Control.Exception.Safe
import Control.Monad.IO.Class
import Control.Monad.Trans.Writer
import qualified Data.List as L
import GHC.Stack
import Test.Sandwich
import TestUtil
tests :: MonadIO m => WriterT [SomeException] m ()
tests = do
run beforeExceptionSafety
run beforeExceptionSafetyNested
main = mainWith tests
beforeExceptionSafety :: (HasCallStack) => IO ()
beforeExceptionSafety = do
results <- runAndGetResults $ before "before label" throwSomeUserError $ do
it "does thing 1" $ return ()
it "does thing 2" $ return ()
results `mustBe` (Failure (GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)
: L.replicate 2 (Failure (GetContextException Nothing (SomeExceptionWithEq (toException $ GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)))))
beforeExceptionSafetyNested :: (HasCallStack) => IO ()
beforeExceptionSafetyNested = do
results <- runAndGetResults $ before "before label" throwSomeUserError $ do
it "does thing 1" $ return ()
it "does thing 2" $ return ()
describe "nested things" $ do
it "does nested thing 1" $ return ()
it "does nested thing 2" $ return ()
results `mustBe` (Failure (GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)
: L.replicate 5 (Failure (GetContextException Nothing (SomeExceptionWithEq (toException $ GotException Nothing (Just "Exception in before 'before label' handler") someUserErrorWrapped)))))
|
eed59f55bd4f3176070da7bb39359adbef946e6a9901442282cabff294e5d4b4 | sanette/bogue | b_border.ml | (* draw fancy borders to arbitrary shapes *)
(* f : parameterize the boundary *)
h : distance from boundary ( < 0 inside )
open Tsdl
open B_utils
module Theme = B_theme
module Draw = B_draw
type physicalpoint = Sdl.point
type logicalpoint = { x:float; y:float } (* points or vectors *)
(* for both, origin is top-left *)
type boundary = float -> logicalpoint
type normal = float -> logicalpoint (* normal unit vector *)
type distance = logicalpoint -> float
type shape = {
parameterize the boundary from t in [ 0,1 ] , ideally
when t is the rescaled arclength coordinate .
when t is the rescaled arclength coordinate. *)
normal : normal; (* unit normal vector at parameter t *)
some function R² - > R that defines the shape as the
set of points where it is negative
set of points where it is negative *)
size : logicalpoint (* box size *)
}
let add p1 p2 =
{ x = p1.x +. p2.x; y = p1.y +. p2.y }
let mult s p =
{ x = p.x *. s; y = p.y *. s }
let dot p1 p2 =
p1.x *. p2.x +. p1.y *. p2.y
let norm_square p = dot p p
let norm p = sqrt (dot p p)
type colorfn = float -> float -> Draw.color
t - > s - > color at boundary t and distance s along the normal . t and s are
in [ 0,1 ]
in [0,1]*)
let getx = Sdl.Point.x
let gety = Sdl.Point.y
let logical_to_physical p =
Sdl.Point.create
~x:(Theme.scale_from_float p.x) ~y:(Theme.scale_from_float p.y)
let physical_to_logical p =
{ x = Theme.unscale_to_float (getx p);
y = Theme.unscale_to_float (gety p)}
approximates a 0.5+N(0,1 ) law ( random float in [ 0,1 ] )
let gaussian_float () =
let n = 10 in
let rec loop i x =
if i = 0 then x /. (float n)
else loop (i-1) (x +. Random.float 1.)
in
loop n 0.
(* draw (rectangular) pixels *)
let draw_pixel renderer ?(size=1.0) p =
let psize = Theme.scale_from_float size in
print_endline (Printf.sprintf "PSIZE=%i" psize);
TODO subpixel rendering
then go (Sdl.render_draw_point renderer (getx p) (gety p))
else if psize > 1
then Draw.box renderer (getx p) (gety p) psize psize
let draw_random renderer ?(border_width = 10.) shape colorfn npoints =
for _ = 1 to npoints do
let t = Random.float 1. in
let s = 2. *. abs_float (gaussian_float () -. 0.5) in
let p = shape.boundary t in
let n = shape.normal t
|> mult (s *. border_width) |> add p |> logical_to_physical in
let r,g,b,a = colorfn t s in
go (Sdl.set_render_draw_color renderer r g b a);
print_endline (Printf.sprintf "POINT = %i,%i, COLOR = %i,%i,%i,%i" (getx n) (gety n) r g b a);
draw_pixel renderer n
done
(* various shapes *)
let rectangle x0 y0 w h =
let t1 = w /. (2. *. ( w +. h))
and t2 = h /. (2. *. ( w +. h)) in
let boundary t =
let x, y = if t < t1 then t *. w /. t1, 0.
else if t < t1 +. t2 then w, (t -. t1) *. h /. t2
else if t < 2. *. t1 +. t2 then w -. (t -. t1 -. t2) *. w /. t1, h
else 0., (1. -. t) *. h /. t2 in
{ x = x0 +. x; y = y0 +. y} in
let normal t =
let x, y = if t < t1 then (0., -1.)
else if t < t1 +. t2 then (1., 0.)
else if t < 2. *. t1 +. t2 then (0., 1.)
else (-1., 0.) in
{ x; y} in
TODO
{ boundary; normal; distance; size = { x = w; y = h}}
let ellipse center a b =
let pp = 2. *. pi in
{ boundary = (fun t ->
let t = pp *. t in
let x = center.x +. a *. (cos t) in
let y = center.y +. b *. (sin t) in
{x; y} (* Warning t is not the arc length coordinate if a <> b *)
);
normal = (fun t ->
let t = pp *. t in
let x = (cos t) in
let y = (a /. b) *. (sin t) in
mult (1. /. (norm {x; y})) {x; y}
);
distance = (fun p ->
sqrt ( p.x *. p.x /. (a *. a) +. p.y *. p.y /. (b *. b)) -. 1.);
size = { x = 2. *. a; y = 2. *. b }
}
(* about 50ms here *)
let essai renderer =
print_endline "ESSAI BORDER";
let bw = 10. in
let npoints = 1000 in
let a , b = 50 . , 20 . in
(* let center = { x = a +. bw; y = b +. bw } in *)
(* let shape = ellipse center a b in *)
let shape = rectangle bw bw 100. 40. in
let colorfn _ s = let c = round ((1. -. s) *. 255.) in c,0,0,c in
let s = logical_to_physical ( shape.size ) in
* let tex = Draw.create_target renderer ( getx s ) ( gety s ) in
* let save = Draw.push_target renderer tex in
* let tex = Draw.create_target renderer (getx s) (gety s) in
* let save = Draw.push_target renderer tex in *)
draw_random renderer ~border_width:bw shape colorfn npoints
(* Draw.pop_target renderer save;
* tex *)
| null | https://raw.githubusercontent.com/sanette/bogue/be44317b2314c64a4b1d80756435092e434cc147/lib/b_border.ml | ocaml | draw fancy borders to arbitrary shapes
f : parameterize the boundary
points or vectors
for both, origin is top-left
normal unit vector
unit normal vector at parameter t
box size
draw (rectangular) pixels
various shapes
Warning t is not the arc length coordinate if a <> b
about 50ms here
let center = { x = a +. bw; y = b +. bw } in
let shape = ellipse center a b in
Draw.pop_target renderer save;
* tex |
h : distance from boundary ( < 0 inside )
open Tsdl
open B_utils
module Theme = B_theme
module Draw = B_draw
type physicalpoint = Sdl.point
type boundary = float -> logicalpoint
type distance = logicalpoint -> float
type shape = {
parameterize the boundary from t in [ 0,1 ] , ideally
when t is the rescaled arclength coordinate .
when t is the rescaled arclength coordinate. *)
some function R² - > R that defines the shape as the
set of points where it is negative
set of points where it is negative *)
}
let add p1 p2 =
{ x = p1.x +. p2.x; y = p1.y +. p2.y }
let mult s p =
{ x = p.x *. s; y = p.y *. s }
let dot p1 p2 =
p1.x *. p2.x +. p1.y *. p2.y
let norm_square p = dot p p
let norm p = sqrt (dot p p)
type colorfn = float -> float -> Draw.color
t - > s - > color at boundary t and distance s along the normal . t and s are
in [ 0,1 ]
in [0,1]*)
let getx = Sdl.Point.x
let gety = Sdl.Point.y
let logical_to_physical p =
Sdl.Point.create
~x:(Theme.scale_from_float p.x) ~y:(Theme.scale_from_float p.y)
let physical_to_logical p =
{ x = Theme.unscale_to_float (getx p);
y = Theme.unscale_to_float (gety p)}
approximates a 0.5+N(0,1 ) law ( random float in [ 0,1 ] )
let gaussian_float () =
let n = 10 in
let rec loop i x =
if i = 0 then x /. (float n)
else loop (i-1) (x +. Random.float 1.)
in
loop n 0.
let draw_pixel renderer ?(size=1.0) p =
let psize = Theme.scale_from_float size in
print_endline (Printf.sprintf "PSIZE=%i" psize);
TODO subpixel rendering
then go (Sdl.render_draw_point renderer (getx p) (gety p))
else if psize > 1
then Draw.box renderer (getx p) (gety p) psize psize
let draw_random renderer ?(border_width = 10.) shape colorfn npoints =
for _ = 1 to npoints do
let t = Random.float 1. in
let s = 2. *. abs_float (gaussian_float () -. 0.5) in
let p = shape.boundary t in
let n = shape.normal t
|> mult (s *. border_width) |> add p |> logical_to_physical in
let r,g,b,a = colorfn t s in
go (Sdl.set_render_draw_color renderer r g b a);
print_endline (Printf.sprintf "POINT = %i,%i, COLOR = %i,%i,%i,%i" (getx n) (gety n) r g b a);
draw_pixel renderer n
done
let rectangle x0 y0 w h =
let t1 = w /. (2. *. ( w +. h))
and t2 = h /. (2. *. ( w +. h)) in
let boundary t =
let x, y = if t < t1 then t *. w /. t1, 0.
else if t < t1 +. t2 then w, (t -. t1) *. h /. t2
else if t < 2. *. t1 +. t2 then w -. (t -. t1 -. t2) *. w /. t1, h
else 0., (1. -. t) *. h /. t2 in
{ x = x0 +. x; y = y0 +. y} in
let normal t =
let x, y = if t < t1 then (0., -1.)
else if t < t1 +. t2 then (1., 0.)
else if t < 2. *. t1 +. t2 then (0., 1.)
else (-1., 0.) in
{ x; y} in
TODO
{ boundary; normal; distance; size = { x = w; y = h}}
let ellipse center a b =
let pp = 2. *. pi in
{ boundary = (fun t ->
let t = pp *. t in
let x = center.x +. a *. (cos t) in
let y = center.y +. b *. (sin t) in
);
normal = (fun t ->
let t = pp *. t in
let x = (cos t) in
let y = (a /. b) *. (sin t) in
mult (1. /. (norm {x; y})) {x; y}
);
distance = (fun p ->
sqrt ( p.x *. p.x /. (a *. a) +. p.y *. p.y /. (b *. b)) -. 1.);
size = { x = 2. *. a; y = 2. *. b }
}
let essai renderer =
print_endline "ESSAI BORDER";
let bw = 10. in
let npoints = 1000 in
let a , b = 50 . , 20 . in
let shape = rectangle bw bw 100. 40. in
let colorfn _ s = let c = round ((1. -. s) *. 255.) in c,0,0,c in
let s = logical_to_physical ( shape.size ) in
* let tex = Draw.create_target renderer ( getx s ) ( gety s ) in
* let save = Draw.push_target renderer tex in
* let tex = Draw.create_target renderer (getx s) (gety s) in
* let save = Draw.push_target renderer tex in *)
draw_random renderer ~border_width:bw shape colorfn npoints
|
f7556b4f62528f767aaa99f4b4cff95923c1627907242364e58950007cdb8e74 | discus-lang/ddc | Label.hs |
module DDC.Data.Label
( Label
, labelOfText
, hashOfLabel, nameOfLabel)
where
import Control.DeepSeq
import Data.Bits
import qualified "cryptohash-sha256" Crypto.Hash.SHA256 as Sha256
import qualified Data.ByteString as BS
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified GHC.Word as W
-- | Label type.
data Label
= Label
{ labelHash :: !W.Word64
, labelName :: !T.Text }
deriving Show
instance NFData Label where
rnf _ = ()
instance Eq Label where
(==) (Label h1 _) (Label h2 _) = h1 == h2
# INLINE (= =) #
(/=) (Label h1 _) (Label h2 _) = h1 /= h2
{-# INLINE (/=) #-}
instance Ord Label where
compare (Label h1 _) (Label h2 _) = compare h1 h2
# INLINE compare #
-- | Construct a label from a text string.
labelOfText :: T.Text -> Label
labelOfText tx
= Label
{ labelHash = shortHashOfText tx
, labelName = tx }
-- | Get the hash code of a label.
hashOfLabel :: Label -> W.Word64
hashOfLabel (Label h _) = h
# INLINE hashOfLabel #
-- | Get the name of a label.
nameOfLabel :: Label -> T.Text
nameOfLabel (Label _ n) = n
# INLINE nameOfLabel #
-- | Get a short hash code for a text string.
-- We use this as a (most likely) unique key for names.
shortHashOfText :: T.Text -> W.Word64
shortHashOfText tx
= let bs = Sha256.hash $ T.encodeUtf8 tx
(w0 : w1 : w2 : w3 : w4 : w5 : w6 : w7 : _)
= BS.unpack bs
in (shift (fromIntegral w0) 56)
.|. (shift (fromIntegral w1) 48)
.|. (shift (fromIntegral w2) 40)
.|. (shift (fromIntegral w3) 32)
.|. (shift (fromIntegral w4) 24)
.|. (shift (fromIntegral w5) 16)
.|. (shift (fromIntegral w6) 8)
.|. (fromIntegral w7)
| null | https://raw.githubusercontent.com/discus-lang/ddc/2baa1b4e2d43b6b02135257677671a83cb7384ac/src/s1/ddc-core/DDC/Data/Label.hs | haskell | | Label type.
# INLINE (/=) #
| Construct a label from a text string.
| Get the hash code of a label.
| Get the name of a label.
| Get a short hash code for a text string.
We use this as a (most likely) unique key for names. |
module DDC.Data.Label
( Label
, labelOfText
, hashOfLabel, nameOfLabel)
where
import Control.DeepSeq
import Data.Bits
import qualified "cryptohash-sha256" Crypto.Hash.SHA256 as Sha256
import qualified Data.ByteString as BS
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified GHC.Word as W
data Label
= Label
{ labelHash :: !W.Word64
, labelName :: !T.Text }
deriving Show
instance NFData Label where
rnf _ = ()
instance Eq Label where
(==) (Label h1 _) (Label h2 _) = h1 == h2
# INLINE (= =) #
(/=) (Label h1 _) (Label h2 _) = h1 /= h2
instance Ord Label where
compare (Label h1 _) (Label h2 _) = compare h1 h2
# INLINE compare #
labelOfText :: T.Text -> Label
labelOfText tx
= Label
{ labelHash = shortHashOfText tx
, labelName = tx }
hashOfLabel :: Label -> W.Word64
hashOfLabel (Label h _) = h
# INLINE hashOfLabel #
nameOfLabel :: Label -> T.Text
nameOfLabel (Label _ n) = n
# INLINE nameOfLabel #
shortHashOfText :: T.Text -> W.Word64
shortHashOfText tx
= let bs = Sha256.hash $ T.encodeUtf8 tx
(w0 : w1 : w2 : w3 : w4 : w5 : w6 : w7 : _)
= BS.unpack bs
in (shift (fromIntegral w0) 56)
.|. (shift (fromIntegral w1) 48)
.|. (shift (fromIntegral w2) 40)
.|. (shift (fromIntegral w3) 32)
.|. (shift (fromIntegral w4) 24)
.|. (shift (fromIntegral w5) 16)
.|. (shift (fromIntegral w6) 8)
.|. (fromIntegral w7)
|
25a0f2fc69161988a9c32dd8028843385c383c495154506f9224c8e8adbf3472 | protojure/protoc-plugin | oneof.clj | (ns protojure.plugin.parse.oneof
(:require [protojure.plugin.util :as util]))
(declare adjust-fields)
;;-------------------------------------------------------------------
;; Move oneof fields to a parent container named with the oneof name
;;-------------------------------------------------------------------
;;
;; Understanding the flow of this parsing code benefits from a
;; familiarity with the protoc CodeGeneratorRequest oneof descriptor
;; format. The only indication that an element in the :field array
;; (here using the flatland key conversion conventions) is a oneof
;; "possible field" is the presence of the key
;; :oneof-index. e.g.
;;
;; ```
;; :proto-file [
{ : name " "
;; ...
;; :message-type [{:name "Bar"
;; :field [{:name "s",
;; :type :type-string,
: oneof - index 0 ,
;; ...}
;; ```
;;
;; Given this structure, the task is to convert multiple declared
;; elements in the :fields array of the CodeGeneratorRequest to a
single ( protobuf ) tag handler capable of deserializing any one
;; of the multiple declared fields with identical :oneof-index values.
;;
;; In order to do this in the .clj bindings, we group fields with
;; the same :oneof-index value into a form similar to our handling
;; of the protobuf message type, with the additional constraint
that only a single type will appear in the single one - of tag .
;;
;; A further example:
;;
One - of fields " s " and " ss " in " " below will be moved into a container
;; field under the "ofields" key. The name of the container field will be
;; the entry of the "oneof-index" in the ":oneofdecl" key (in this example
" TheStrings " ) . The new container will be used as a regular field with tag " TheStrings "
;; and will serdes any of the child fields.
;;
;; Input message:
{ : name " " ,
;; :fields
;; (
;; {:name "not-a-oneof-field",
: number 1 ,
: one - index 1 ,
;; ...}
;; {:name "s",
: number 2 ,
: one - index 1 ,
;; ...}
;; {:name "ss",
: number 3 ,
: one - index 1 ,
;; ...}
;; )
: oneofdecl [ " FirstOneof " , " TheStrings " ] }
;;
;; Output message:
{ : name " " ,
;; :fields
;; (
;; {:name "not-a-oneof-field",
: number 1 ,
: one - index 1 ,
;; ...}
{ : name " TheStrings "
: one - index 1 ,
;; :ofields [{:name "s",
: number 2 ,
: one - index 1 ,
;; ...}
;; {:name "ss",
: number 3 ,
: one - index 1 ,
;; ...}}
;; )
: oneofdecl [ " FirstOneof " , " TheStrings " ] }
;;-------------------------------------------------------------------
(defn adjust-msg [{:keys [name oneofdecl] :as msg}]
(cond-> msg (not (empty? oneofdecl))
(update-in [:fields] (partial adjust-fields name oneofdecl))))
(defn valid? [oneofdecl {:keys [oneof-index] :as field}]
(contains? oneofdecl oneof-index))
(defn get-index [oneofdecl {:keys [oneof-index] :as field}]
(when (valid? oneofdecl field)
oneof-index))
;;-------------------------------------------------------------------
;; Add oneof fields to the appropriate parent field
;;-------------------------------------------------------------------
(defn- adjust-field [parent-name oneofdecl coll f]
(let [oi (get-index oneofdecl f)
newf (first (filter #(when-let [oiother (get-index oneofdecl %)] (when (= oi oiother) %)) coll))
inewf (.indexOf coll newf)]
(cond
;;-- not a oneof field, just add it
(nil? oi) (conj coll f)
;;--parent not created ?
(nil? newf) (let [name (get-in oneofdecl [oi :name])]
(conj coll {:name name
:oparentname parent-name
:fname (util/clojurify-name
(str parent-name "-" name))
:oneof-index oi
:type :type-oneof
:label :label-optional
:ofields [f]}))
;;--update the parent with the passed oneof
:default (update-in coll [inewf :ofields] (fn [of] (conj of f))))))
(defn- adjust-fields [parent-name oneofdecl fields]
(reduce
(fn [coll f]
(adjust-field parent-name oneofdecl coll f))
[] fields))
| null | https://raw.githubusercontent.com/protojure/protoc-plugin/d14311c67cb21656a3ae8b8cbe3d742b8ad7e1af/src/protojure/plugin/parse/oneof.clj | clojure | -------------------------------------------------------------------
Move oneof fields to a parent container named with the oneof name
-------------------------------------------------------------------
Understanding the flow of this parsing code benefits from a
familiarity with the protoc CodeGeneratorRequest oneof descriptor
format. The only indication that an element in the :field array
(here using the flatland key conversion conventions) is a oneof
"possible field" is the presence of the key
:oneof-index. e.g.
```
:proto-file [
...
:message-type [{:name "Bar"
:field [{:name "s",
:type :type-string,
...}
```
Given this structure, the task is to convert multiple declared
elements in the :fields array of the CodeGeneratorRequest to a
of the multiple declared fields with identical :oneof-index values.
In order to do this in the .clj bindings, we group fields with
the same :oneof-index value into a form similar to our handling
of the protobuf message type, with the additional constraint
A further example:
field under the "ofields" key. The name of the container field will be
the entry of the "oneof-index" in the ":oneofdecl" key (in this example
and will serdes any of the child fields.
Input message:
:fields
(
{:name "not-a-oneof-field",
...}
{:name "s",
...}
{:name "ss",
...}
)
Output message:
:fields
(
{:name "not-a-oneof-field",
...}
:ofields [{:name "s",
...}
{:name "ss",
...}}
)
-------------------------------------------------------------------
-------------------------------------------------------------------
Add oneof fields to the appropriate parent field
-------------------------------------------------------------------
-- not a oneof field, just add it
--parent not created ?
--update the parent with the passed oneof | (ns protojure.plugin.parse.oneof
(:require [protojure.plugin.util :as util]))
(declare adjust-fields)
{ : name " "
: oneof - index 0 ,
single ( protobuf ) tag handler capable of deserializing any one
that only a single type will appear in the single one - of tag .
One - of fields " s " and " ss " in " " below will be moved into a container
" TheStrings " ) . The new container will be used as a regular field with tag " TheStrings "
{ : name " " ,
: number 1 ,
: one - index 1 ,
: number 2 ,
: one - index 1 ,
: number 3 ,
: one - index 1 ,
: oneofdecl [ " FirstOneof " , " TheStrings " ] }
{ : name " " ,
: number 1 ,
: one - index 1 ,
{ : name " TheStrings "
: one - index 1 ,
: number 2 ,
: one - index 1 ,
: number 3 ,
: one - index 1 ,
: oneofdecl [ " FirstOneof " , " TheStrings " ] }
(defn adjust-msg [{:keys [name oneofdecl] :as msg}]
(cond-> msg (not (empty? oneofdecl))
(update-in [:fields] (partial adjust-fields name oneofdecl))))
(defn valid? [oneofdecl {:keys [oneof-index] :as field}]
(contains? oneofdecl oneof-index))
(defn get-index [oneofdecl {:keys [oneof-index] :as field}]
(when (valid? oneofdecl field)
oneof-index))
(defn- adjust-field [parent-name oneofdecl coll f]
(let [oi (get-index oneofdecl f)
newf (first (filter #(when-let [oiother (get-index oneofdecl %)] (when (= oi oiother) %)) coll))
inewf (.indexOf coll newf)]
(cond
(nil? oi) (conj coll f)
(nil? newf) (let [name (get-in oneofdecl [oi :name])]
(conj coll {:name name
:oparentname parent-name
:fname (util/clojurify-name
(str parent-name "-" name))
:oneof-index oi
:type :type-oneof
:label :label-optional
:ofields [f]}))
:default (update-in coll [inewf :ofields] (fn [of] (conj of f))))))
(defn- adjust-fields [parent-name oneofdecl fields]
(reduce
(fn [coll f]
(adjust-field parent-name oneofdecl coll f))
[] fields))
|
1255bf779ae8a173f3b4b2ec84265e95c86433bab9d4ea87513d3ddf6d31a1b4 | josefs/Gradualizer | lint_errors.erl | %% This module doesn't even compile. The errors here are caught by erl_lint.
-module(lint_errors).
-export([local_type/0,
local_call/0,
one_more_for_the_record/0,
local_record/1,
record_field/1,
illegal_binary_segment/1,
invalid_record_info/0,
illegal_pattern/1]).
-spec local_type() -> undefined_type().
local_type() -> ok.
-spec local_call() -> ok.
local_call() -> undefined_call().
-record(r, {a :: #s{}}).
%% The number of expected errors are the number of exported functions,
%% so we create a function without errors, to account for the error in
%% the record definition above.
one_more_for_the_record() -> ok.
-spec local_record(#r{}) -> boolean().
local_record(R) -> if
(R#r.a)#s.a == c -> true;
true -> false
end.
-spec record_field(#r{}) -> boolean().
record_field(R) -> if
R#r.b == c -> true;
true -> false
end.
illegal_binary_segment(X) ->
Size not allowed with utf8/16/32
invalid_record_info() -> record_info(foo, bar).
-spec illegal_pattern(gradualizer:top()) -> gradualizer:top().
illegal_pattern(1 + A) -> ok.
| null | https://raw.githubusercontent.com/josefs/Gradualizer/0657d76e7de893a2bd7e49e0fac186b732fea434/test/misc/lint_errors.erl | erlang | This module doesn't even compile. The errors here are caught by erl_lint.
The number of expected errors are the number of exported functions,
so we create a function without errors, to account for the error in
the record definition above. | -module(lint_errors).
-export([local_type/0,
local_call/0,
one_more_for_the_record/0,
local_record/1,
record_field/1,
illegal_binary_segment/1,
invalid_record_info/0,
illegal_pattern/1]).
-spec local_type() -> undefined_type().
local_type() -> ok.
-spec local_call() -> ok.
local_call() -> undefined_call().
-record(r, {a :: #s{}}).
one_more_for_the_record() -> ok.
-spec local_record(#r{}) -> boolean().
local_record(R) -> if
(R#r.a)#s.a == c -> true;
true -> false
end.
-spec record_field(#r{}) -> boolean().
record_field(R) -> if
R#r.b == c -> true;
true -> false
end.
illegal_binary_segment(X) ->
Size not allowed with utf8/16/32
invalid_record_info() -> record_info(foo, bar).
-spec illegal_pattern(gradualizer:top()) -> gradualizer:top().
illegal_pattern(1 + A) -> ok.
|
93bd1880b35ff3db328844ccebbda9e42c7bcf8f8eadce9a3c68b8ff80ce72a0 | ninjudd/cake | servlet.clj | (ns servlet
(:gen-class))
(defn speak [& _]
(println "woof!"))
(speak) | null | https://raw.githubusercontent.com/ninjudd/cake/3a1627120b74e425ab21aa4d1b263be09e945cfd/examples/servlet/src/servlet.clj | clojure | (ns servlet
(:gen-class))
(defn speak [& _]
(println "woof!"))
(speak) | |
ceac84f2f3431e14e1f7b83353b312e777fc34d0167ee1566f8daff92c1fe343 | karlhof26/gimp-scheme | AutoColorize_flavor0_02.scm |
This script was tested with Gimp 2.10.24
;;
;; New versions will be distributed from </> only
;;
;; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or
;; (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this program; if not, see <>.
;;
; ;Define the function
; Auto colorize image into random number of colors of random hues
author :
date : 2015
(define (script-fu-auto-colorize-flavor0 simage slayer
hatches
rgb
flatten
)
(let*
(
(color-map 0)
(colors 0)
(image-width)
(image-height)
( R 0.2126 ) ; constants for calculating luminance
( G 0.7152 )
;(B 0.0722)
;(0.299*R + 0.587*G + 0.114*B)
( R 0.299 )
;(G 0.587)
( B 0.114 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
;(B (/ 18.0 255))
( R ( / 54.0 255 ) )
( G ( / 182.0 255 ) )
wikipedia
(Bl 0.0722)
(Re 0.2126)
(Gr 0.7152)
;how my camera sees black and white
;(B (/ 147 479))
;(R (/ 138 479))
( G ( / 194 479 ) )
(r 0) ;randomly generated r g b values
(g 0)
(b 0)
(l-original 0) ;luminance original
(l-new 0)
(red 0)
(green 0)
(blue 0)
(y 0)
(hue)
(floating)
(difference)
(image)
(layer)
(try 2)
(tries 256)
(counter 1)
(loopbreak 1)
)
;(gimp-image-undo-disable image); DN = NO UNDO
undo - group in one step
;convert to indexed
(set! image-width (car (gimp-image-width simage)))
(set! image-height (car (gimp-image-height simage)))
creates new image instead of working on existing one
(gimp-selection-all simage)
(gimp-edit-copy-visible simage)
(gimp-edit-copy slayer)
(set! image (car (gimp-edit-paste-as-new-image)))
(set! layer (car (gimp-image-get-active-layer image)))
( gimp - message " ok to here 69 " )
(set! floating (car (gimp-layer-new image image-width image-height
RGBA-IMAGE "Colorize" 100 LAYER-MODE-NORMAL-LEGACY))) ;creates layer
;insert above current layer
;(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
(gimp-image-insert-layer image floating 0 0) ; was 0 0
(gimp-drawable-edit-fill floating FILL-TRANSPARENT)
(gimp-display-new image)
(gimp-displays-flush)
;(set! image (car (gimp-image-new width height RGB))) ;creates new image
(gimp-image-convert-indexed image CONVERT-DITHER-NONE CONVERT-PALETTE-GENERATE hatches FALSE FALSE "unused palette name")
;grabs color map
(set! colors (vector->list (cadr (gimp-image-get-colormap image))))
(gimp-image-convert-rgb image) ;converts it to rgb before we call hatch loop
(set! y hatches) ;loop hatches number of times
;; (srand (car (gettimeofday)))
(srand (realtime))
;(gimp-message "time ok")
(gimp-context-set-sample-threshold-int 5)
(gimp-context-set-sample-criterion 0)
(while (> y 0)
;do work here
(set! red (car colors))
(set! green (cadr colors))
(set! blue (caddr colors))
;select each color
;(gimp-message (number->string r))
;(gimp-message (number->string red))
;(gimp-message (number->string b))
;(gimp-message (number->string blue))
(gimp-image-set-active-layer image layer)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue)) ; was layer
( set ! hue ( rand 360 ) )
( gimp - colorize layer hue 100 0 )
;(gimp-message "color selected")
;(gimp-edit-copy layer)
;(set! floating (car(gimp-edit-paste layer TRUE)))
;(gimp-floating-sel-to-layer floating)
;(gimp-image-set-active-layer image floating)
;;;(set! floating (car (gimp-layer-new image image-width image-height
RGBA - IMAGE " Colorize " 100 LAYER - MODE - NORMAL - LEGACY ) ) ) ; creates layer
;insert above current layer
;(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
;;;(gimp-image-insert-layer image floating 0 1) ; was 0 0
;;;(gimp-drawable-edit-fill floating FILL-TRANSPARENT)
;set that layer to be active layer
;;;(gimp-image-set-active-layer image floating)
;;;(gimp-edit-copy layer)
;;;(set! floating (car (gimp-edit-paste layer TRUE)))
;;;(gimp-floating-sel-to-layer floating)
;;;(gimp-image-set-active-layer image floating)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue)) ; was layer
;;;(gimp-displays-flush)
;;;(gimp-message "quit now")
;;;(quit)
( set ! hue ( rand 360 ) )
( gimp - colorize floating hue 100 0 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( set ! l - original ( sqrt(+ ( pow ( * red R ) 2 ) ( pow ( * green G ) 2 ) ( pow ( * blue B ) 2 ) ) ) )
(set! l-original (+ (* red Re) (* green Gr) (* blue Bl)))
(set! difference 10)
;just randomly pick a color until we find a color of similar luminance
;absolutely not the ideal way of getting a color
(set! loopbreak 1)
(while (and (> difference 1) (< loopbreak 550))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 234))
(set! g (+ (rand 20) 234))
(set! b (+ (rand 20) 234))
)
(begin
was 256
was 256
was 256
)
)
)
)
(if (= rgb 1)
(begin
;;(gimp-message "rgb=1")
(if (< l-original 85)
(begin
(while (or (< b r) (< b g))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 30) 224))
(set! g (+ (rand 30) 224))
was rand 20 and 234
)
(begin
was 256
was 256
was 256
)
)
)
)
)
)
(begin
(if (> l-original (* 85 2))
(begin
(while (or (< g r) (< g b))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 234))
(set! g (+ (rand 20) 234))
(set! b (+ (rand 20) 234))
)
(begin
was 256
was 256
was 256
)
)
)
)
)
)
(begin
(while(or (< r g) (< r b))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 234))
(set! g (+ (rand 20) 234))
(set! b (+ (rand 20) 234))
)
(begin
was 256
was 256
was 256
)
)
)
)
)
)
)
)
)
)
(begin
;;(gimp-message "rgb=0")
;;(gimp-message "inside here")
)
)
(set! loopbreak (+ loopbreak 1))
( set ! l - new ( sqrt(+ ( pow ( * r R ) 2 ) ( pow ( * g G ) 2 ) ( pow ( * b B ) 2 ) ) ) )
(set! l-new (+ (* r Re) (* g Gr) (* b Bl)))
(set! difference (abs (- l-new l-original)))
)
;;(gimp-message "ready to color")
;(gimp-message (number->string difference))
;(gimp-message (number->string r))
;(gimp-message (number->string red))
( script - fu - colorize image floating ( list b ) 100 )
;(gimp-message "ready to color2")
(gimp-image-set-active-layer image layer)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue))
(gimp-image-set-active-layer image floating)
( gimp - context - set - foreground ' ( 123 0 240 ) )
(gimp-context-set-foreground (list r g b))
(gimp-drawable-edit-fill floating FILL-FOREGROUND) ;; was image-edit-fill
;;;(gimp-selection-none image)
(if (> y 1) ;if y is still valid we set colors to the next colors
(begin
(set! colors (cdddr colors))
)
)
;loop control
(set! y (- y 1))
;(gimp-message "flushing and while looping")
;(gimp-displays-flush)
);end of while
(gimp-selection-none image)
(if (= flatten TRUE)
(begin
(gimp-image-flatten image)
)
(begin)
)
;(gimp-image-undo-enable image) ;DN = NO UNDO
undo group in one step
(gimp-displays-flush)
(gc)
)
) ;end of define
(script-fu-register
"script-fu-auto-colorize-flavor0" ;function name
"<Image>/Script-Fu2/Create from Image/Auto Colorize flavour0..." ;menu register
"Randomly colorize image with specified number of colors. \nfile:AutoColorize_flavor0_02.scm" ;description
"Tin Tran" ;author name
"copyright info and description" ;copyright info or description
"2015" ;date
"RGB*, GRAY*" ;mode
SF-IMAGE "Image" 0
SF-DRAWABLE "Layer" 0
SF-ADJUSTMENT "Number of colors" '(5 2 255 1 10 0 0)
SF-OPTION "Colorize with" '("Random colors" "RGB variations")
SF-TOGGLE "Flatten newly created image" FALSE
)
;;----------------------------- | null | https://raw.githubusercontent.com/karlhof26/gimp-scheme/58757c144e7a6f8f9797cdfe927f9caffa7c62a7/AutoColorize_flavor0_02.scm | scheme |
New versions will be distributed from </> only
This program is free software; you can redistribute it and/or modify
either version 3 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, see <>.
;Define the function
Auto colorize image into random number of colors of random hues
constants for calculating luminance
(B 0.0722)
(0.299*R + 0.587*G + 0.114*B)
(G 0.587)
(B (/ 18.0 255))
how my camera sees black and white
(B (/ 147 479))
(R (/ 138 479))
randomly generated r g b values
luminance original
(gimp-image-undo-disable image); DN = NO UNDO
convert to indexed
creates layer
insert above current layer
(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
was 0 0
(set! image (car (gimp-image-new width height RGB))) ;creates new image
grabs color map
converts it to rgb before we call hatch loop
loop hatches number of times
(srand (car (gettimeofday)))
(gimp-message "time ok")
do work here
select each color
(gimp-message (number->string r))
(gimp-message (number->string red))
(gimp-message (number->string b))
(gimp-message (number->string blue))
was layer
(gimp-message "color selected")
(gimp-edit-copy layer)
(set! floating (car(gimp-edit-paste layer TRUE)))
(gimp-floating-sel-to-layer floating)
(gimp-image-set-active-layer image floating)
(set! floating (car (gimp-layer-new image image-width image-height
creates layer
insert above current layer
(gimp-image-insert-layer image new-layer 0 (car (gimp-image-get-item-position image layer)))
(gimp-image-insert-layer image floating 0 1) ; was 0 0
(gimp-drawable-edit-fill floating FILL-TRANSPARENT)
set that layer to be active layer
(gimp-image-set-active-layer image floating)
(gimp-edit-copy layer)
(set! floating (car (gimp-edit-paste layer TRUE)))
(gimp-floating-sel-to-layer floating)
(gimp-image-set-active-layer image floating)
was layer
(gimp-displays-flush)
(gimp-message "quit now")
(quit)
just randomly pick a color until we find a color of similar luminance
absolutely not the ideal way of getting a color
(gimp-message "rgb=1")
(gimp-message "rgb=0")
(gimp-message "inside here")
(gimp-message "ready to color")
(gimp-message (number->string difference))
(gimp-message (number->string r))
(gimp-message (number->string red))
(gimp-message "ready to color2")
was image-edit-fill
(gimp-selection-none image)
if y is still valid we set colors to the next colors
loop control
(gimp-message "flushing and while looping")
(gimp-displays-flush)
end of while
(gimp-image-undo-enable image) ;DN = NO UNDO
end of define
function name
menu register
description
author name
copyright info or description
date
mode
----------------------------- |
This script was tested with Gimp 2.10.24
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
author :
date : 2015
(define (script-fu-auto-colorize-flavor0 simage slayer
hatches
rgb
flatten
)
(let*
(
(color-map 0)
(colors 0)
(image-width)
(image-height)
( G 0.7152 )
( R 0.299 )
( B 0.114 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( R ( / 54.0 255 ) )
( G ( / 182.0 255 ) )
wikipedia
(Bl 0.0722)
(Re 0.2126)
(Gr 0.7152)
( G ( / 194 479 ) )
(g 0)
(b 0)
(l-new 0)
(red 0)
(green 0)
(blue 0)
(y 0)
(hue)
(floating)
(difference)
(image)
(layer)
(try 2)
(tries 256)
(counter 1)
(loopbreak 1)
)
undo - group in one step
(set! image-width (car (gimp-image-width simage)))
(set! image-height (car (gimp-image-height simage)))
creates new image instead of working on existing one
(gimp-selection-all simage)
(gimp-edit-copy-visible simage)
(gimp-edit-copy slayer)
(set! image (car (gimp-edit-paste-as-new-image)))
(set! layer (car (gimp-image-get-active-layer image)))
( gimp - message " ok to here 69 " )
(set! floating (car (gimp-layer-new image image-width image-height
(gimp-drawable-edit-fill floating FILL-TRANSPARENT)
(gimp-display-new image)
(gimp-displays-flush)
(gimp-image-convert-indexed image CONVERT-DITHER-NONE CONVERT-PALETTE-GENERATE hatches FALSE FALSE "unused palette name")
(set! colors (vector->list (cadr (gimp-image-get-colormap image))))
(srand (realtime))
(gimp-context-set-sample-threshold-int 5)
(gimp-context-set-sample-criterion 0)
(while (> y 0)
(set! red (car colors))
(set! green (cadr colors))
(set! blue (caddr colors))
(gimp-image-set-active-layer image layer)
( set ! hue ( rand 360 ) )
( gimp - colorize layer hue 100 0 )
( set ! hue ( rand 360 ) )
( gimp - colorize floating hue 100 0 )
sqrt ( 0.299*R^2 + 0.587*G^2 + 0.114*B^2 )
( set ! l - original ( sqrt(+ ( pow ( * red R ) 2 ) ( pow ( * green G ) 2 ) ( pow ( * blue B ) 2 ) ) ) )
(set! l-original (+ (* red Re) (* green Gr) (* blue Bl)))
(set! difference 10)
(set! loopbreak 1)
(while (and (> difference 1) (< loopbreak 550))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 234))
(set! g (+ (rand 20) 234))
(set! b (+ (rand 20) 234))
)
(begin
was 256
was 256
was 256
)
)
)
)
(if (= rgb 1)
(begin
(if (< l-original 85)
(begin
(while (or (< b r) (< b g))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 30) 224))
(set! g (+ (rand 30) 224))
was rand 20 and 234
)
(begin
was 256
was 256
was 256
)
)
)
)
)
)
(begin
(if (> l-original (* 85 2))
(begin
(while (or (< g r) (< g b))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 234))
(set! g (+ (rand 20) 234))
(set! b (+ (rand 20) 234))
)
(begin
was 256
was 256
was 256
)
)
)
)
)
)
(begin
(while(or (< r g) (< r b))
(if (< l-original 10)
(begin
(set! r (rand 21))
(set! g (rand 21))
(set! b (rand 21))
)
(begin
(if (> l-original 245)
(begin
(set! r (+ (rand 20) 234))
(set! g (+ (rand 20) 234))
(set! b (+ (rand 20) 234))
)
(begin
was 256
was 256
was 256
)
)
)
)
)
)
)
)
)
)
(begin
)
)
(set! loopbreak (+ loopbreak 1))
( set ! l - new ( sqrt(+ ( pow ( * r R ) 2 ) ( pow ( * g G ) 2 ) ( pow ( * b B ) 2 ) ) ) )
(set! l-new (+ (* r Re) (* g Gr) (* b Bl)))
(set! difference (abs (- l-new l-original)))
)
( script - fu - colorize image floating ( list b ) 100 )
(gimp-image-set-active-layer image layer)
(gimp-image-select-color image CHANNEL-OP-REPLACE layer (list red green blue))
(gimp-image-set-active-layer image floating)
( gimp - context - set - foreground ' ( 123 0 240 ) )
(gimp-context-set-foreground (list r g b))
(begin
(set! colors (cdddr colors))
)
)
(set! y (- y 1))
(gimp-selection-none image)
(if (= flatten TRUE)
(begin
(gimp-image-flatten image)
)
(begin)
)
undo group in one step
(gimp-displays-flush)
(gc)
)
(script-fu-register
SF-IMAGE "Image" 0
SF-DRAWABLE "Layer" 0
SF-ADJUSTMENT "Number of colors" '(5 2 255 1 10 0 0)
SF-OPTION "Colorize with" '("Random colors" "RGB variations")
SF-TOGGLE "Flatten newly created image" FALSE
)
|
79747293278bd9aaaae36dda3b1371ba0fcf2fa73f394c42bc49249c585d0ecf | voxoz/emqttd | emqttd_gen_mod.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2013 - 2017 EMQ Enterprise , Inc. ( )
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% @doc emqttd gen_mod behaviour
-module(emqttd_gen_mod).
-author("Feng Lee <>").
-include("emqttd.hrl").
-ifdef(use_specs).
-callback(load(Opts :: any()) -> ok | {error, any()}).
-callback(unload(State :: any()) -> any()).
-else.
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[{load, 1}, {unload, 1}];
behaviour_info(_Other) ->
undefined.
-endif.
| null | https://raw.githubusercontent.com/voxoz/emqttd/2be612e0e7a00a866cd9af350a030966d73fbc09/src/emqttd_gen_mod.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
@doc emqttd gen_mod behaviour | Copyright ( c ) 2013 - 2017 EMQ Enterprise , Inc. ( )
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqttd_gen_mod).
-author("Feng Lee <>").
-include("emqttd.hrl").
-ifdef(use_specs).
-callback(load(Opts :: any()) -> ok | {error, any()}).
-callback(unload(State :: any()) -> any()).
-else.
-export([behaviour_info/1]).
behaviour_info(callbacks) ->
[{load, 1}, {unload, 1}];
behaviour_info(_Other) ->
undefined.
-endif.
|
216064bdafc15e707ee214afeb82757d1b74e82dcfd872b2d0df315386bfb6d5 | postspectacular/devart-codefactory | tooltips.cljs | (ns codefactory.editor.tooltips
(:require-macros
[cljs.core.async.macros :refer [go]]
[thi.ng.macromath.core :as mm])
(:require
[cljs.core.async :refer [<! alts! timeout]]
[codefactory.config :as config]
[thi.ng.cljs.async :as async]
[thi.ng.cljs.log :refer [debug info warn]]
[thi.ng.cljs.route :as route]
[thi.ng.cljs.utils :as utils :refer [->px]]
[thi.ng.cljs.dom :as dom]
[thi.ng.cljs.gestures :as gest]
[thi.ng.geom.core :as g]
[thi.ng.geom.core.vector :as v :refer [vec2]]
[thi.ng.common.math.core :as m]))
(def tooltip-element (memoize #(dom/by-id (str (name %) "-tip"))))
(defn add-tooltip-buttons
[el bus state]
(when (:intro-active? @state)
(let [div (dom/create! "div" el)
skip (dom/create! "input" div {:type "button" :value "skip"})
next (dom/create! "input" div {:type "button" :value "next"})]
(dom/add-listeners
[[skip "click" #(async/publish bus :intro-done nil)]
[next "click" #(async/publish bus :intro-next nil)]]))))
(defn handle-tooltip-display
[bus state]
(let [show (async/subscribe bus :show-tooltip)
hide (async/subscribe bus :hide-tooltip)
tooltips (-> config/app :editor :tooltips)
tip-body (memoize #(dom/query % ".tooltip-content"))]
(go
(loop []
(let [[_ [el id]] (<! show)
tip (tooltip-element id)
{:keys [offset intro-offset content auto?]} (tooltips id)
body (tip-body tip)
intro? (:intro-active? @state)
offset (if intro? (or intro-offset offset) offset)
[x y] (g/+ (vec2 (dom/offset el))
(if (fn? offset) (offset el) offset))]
(dom/set-html! body content)
(add-tooltip-buttons body bus state)
(-> tip
(dom/set-style! {:display "block" :left (->px x) :top (->px y)})
(dom/remove-class! "hidden"))
(when auto?
(js/setTimeout
#(async/publish bus :hide-tooltip id)
(config/timeout :tooltip)))
(recur))))
(go
(loop []
(let [[_ id] (<! hide)]
(dom/set-style! (tooltip-element id) {:display "none"})
(recur))))))
(defn handle-tooltips
[bus state]
(let [tooltips (-> config/app :editor :tooltips)
tips (->> tooltips
(filter (comp :user? val))
keys
(mapv #(-> % name dom/by-id (dom/query "svg"))))
channels (fn [ev] (set (mapv #(first (async/event-channel % ev)) tips)))
on (channels "mouseenter")
off (channels "mouseleave")
touch (channels "touchstart")
all (vec (concat on off touch))]
(go
(loop [tip-state {}]
(let [[e ch] (alts! all)
el (.-target e)
id (-> el dom/parent (dom/get-attribs ["id"]) first)
kid (keyword id)
show? (or (on ch) (and (touch ch) (not (tip-state kid))))]
(when (and id (not (:intro-active? @state)))
(if show?
(async/publish bus :show-tooltip [el kid])
(async/publish bus :hide-tooltip kid)))
(async/publish bus :user-action nil)
(recur (assoc tip-state kid show?)))))))
(defn hide-tooltips
[]
(->> (-> config/app :editor :tooltips)
keys
(map #(-> % tooltip-element (dom/add-class! "hidden")))
(dorun)))
(defn handle-intro
[bus state]
(let [next (async/subscribe bus :intro-next)
done (async/subscribe bus :intro-done)
tips (-> config/app :editor :intro)]
(go
(loop []
(<! next)
(let [id (:intro-id @state)
id' (inc id)]
(when (>= id 0)
(async/publish bus :hide-tooltip (tips id)))
(if (< id' (count tips))
(let [kid (tips id')
el (if-not (= :edit-canvas kid)
(-> kid name dom/by-id (dom/query "svg"))
(-> kid name dom/by-id))]
(swap! state assoc-in [:intro-id] id')
(async/publish bus :show-tooltip [el kid]))
(async/publish bus :intro-done nil)))
(async/publish bus :user-action nil)
(recur)))
(go
(loop []
(<! done)
(let [id (:intro-id @state)]
(when (>= id 0)
(async/publish bus :hide-tooltip (tips id)))
(swap! state assoc :intro-active? false)
(async/publish bus :regenerate-scene nil))
(recur)))))
| null | https://raw.githubusercontent.com/postspectacular/devart-codefactory/9bccdc10e58fa4861a69767e9ae4be0bb8d7f650/src-cljs/codefactory/editor/tooltips.cljs | clojure | (ns codefactory.editor.tooltips
(:require-macros
[cljs.core.async.macros :refer [go]]
[thi.ng.macromath.core :as mm])
(:require
[cljs.core.async :refer [<! alts! timeout]]
[codefactory.config :as config]
[thi.ng.cljs.async :as async]
[thi.ng.cljs.log :refer [debug info warn]]
[thi.ng.cljs.route :as route]
[thi.ng.cljs.utils :as utils :refer [->px]]
[thi.ng.cljs.dom :as dom]
[thi.ng.cljs.gestures :as gest]
[thi.ng.geom.core :as g]
[thi.ng.geom.core.vector :as v :refer [vec2]]
[thi.ng.common.math.core :as m]))
(def tooltip-element (memoize #(dom/by-id (str (name %) "-tip"))))
(defn add-tooltip-buttons
[el bus state]
(when (:intro-active? @state)
(let [div (dom/create! "div" el)
skip (dom/create! "input" div {:type "button" :value "skip"})
next (dom/create! "input" div {:type "button" :value "next"})]
(dom/add-listeners
[[skip "click" #(async/publish bus :intro-done nil)]
[next "click" #(async/publish bus :intro-next nil)]]))))
(defn handle-tooltip-display
[bus state]
(let [show (async/subscribe bus :show-tooltip)
hide (async/subscribe bus :hide-tooltip)
tooltips (-> config/app :editor :tooltips)
tip-body (memoize #(dom/query % ".tooltip-content"))]
(go
(loop []
(let [[_ [el id]] (<! show)
tip (tooltip-element id)
{:keys [offset intro-offset content auto?]} (tooltips id)
body (tip-body tip)
intro? (:intro-active? @state)
offset (if intro? (or intro-offset offset) offset)
[x y] (g/+ (vec2 (dom/offset el))
(if (fn? offset) (offset el) offset))]
(dom/set-html! body content)
(add-tooltip-buttons body bus state)
(-> tip
(dom/set-style! {:display "block" :left (->px x) :top (->px y)})
(dom/remove-class! "hidden"))
(when auto?
(js/setTimeout
#(async/publish bus :hide-tooltip id)
(config/timeout :tooltip)))
(recur))))
(go
(loop []
(let [[_ id] (<! hide)]
(dom/set-style! (tooltip-element id) {:display "none"})
(recur))))))
(defn handle-tooltips
[bus state]
(let [tooltips (-> config/app :editor :tooltips)
tips (->> tooltips
(filter (comp :user? val))
keys
(mapv #(-> % name dom/by-id (dom/query "svg"))))
channels (fn [ev] (set (mapv #(first (async/event-channel % ev)) tips)))
on (channels "mouseenter")
off (channels "mouseleave")
touch (channels "touchstart")
all (vec (concat on off touch))]
(go
(loop [tip-state {}]
(let [[e ch] (alts! all)
el (.-target e)
id (-> el dom/parent (dom/get-attribs ["id"]) first)
kid (keyword id)
show? (or (on ch) (and (touch ch) (not (tip-state kid))))]
(when (and id (not (:intro-active? @state)))
(if show?
(async/publish bus :show-tooltip [el kid])
(async/publish bus :hide-tooltip kid)))
(async/publish bus :user-action nil)
(recur (assoc tip-state kid show?)))))))
(defn hide-tooltips
[]
(->> (-> config/app :editor :tooltips)
keys
(map #(-> % tooltip-element (dom/add-class! "hidden")))
(dorun)))
(defn handle-intro
[bus state]
(let [next (async/subscribe bus :intro-next)
done (async/subscribe bus :intro-done)
tips (-> config/app :editor :intro)]
(go
(loop []
(<! next)
(let [id (:intro-id @state)
id' (inc id)]
(when (>= id 0)
(async/publish bus :hide-tooltip (tips id)))
(if (< id' (count tips))
(let [kid (tips id')
el (if-not (= :edit-canvas kid)
(-> kid name dom/by-id (dom/query "svg"))
(-> kid name dom/by-id))]
(swap! state assoc-in [:intro-id] id')
(async/publish bus :show-tooltip [el kid]))
(async/publish bus :intro-done nil)))
(async/publish bus :user-action nil)
(recur)))
(go
(loop []
(<! done)
(let [id (:intro-id @state)]
(when (>= id 0)
(async/publish bus :hide-tooltip (tips id)))
(swap! state assoc :intro-active? false)
(async/publish bus :regenerate-scene nil))
(recur)))))
| |
acc6c1ecca3e840e4595bd5e2bcfb3104b70da4c0d2d4de0ba315dd0f1d3f5eb | HunterYIboHu/htdp2-solution | ex77-point-in-time.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex77-point-in-time) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t write repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
Time is ( make - time Number Number Number )
- hour is between 0 and 24 = > [ 0 , 24 ]
- min is between 0 and 60 = > [ 0 , 60 ]
- sec is between 0 and 60 = > [ 0 , 60 ]
interpretation Point - in - time represent a time
since midnight
(define-struct time [hour minute second])
| null | https://raw.githubusercontent.com/HunterYIboHu/htdp2-solution/6182b4c2ef650ac7059f3c143f639d09cd708516/Chapter1/Section5/ex77-point-in-time.rkt | racket | about the language level of this file in a form that our tools can easily process. | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname ex77-point-in-time) (read-case-sensitive #t) (teachpacks ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp"))) (htdp-settings #(#t write repeating-decimal #f #t none #f ((lib "image.rkt" "teachpack" "2htdp") (lib "universe.rkt" "teachpack" "2htdp")) #f)))
Time is ( make - time Number Number Number )
- hour is between 0 and 24 = > [ 0 , 24 ]
- min is between 0 and 60 = > [ 0 , 60 ]
- sec is between 0 and 60 = > [ 0 , 60 ]
interpretation Point - in - time represent a time
since midnight
(define-struct time [hour minute second])
|
1c890fe3b50aa360cec6bf2df0727e48fef6701c13303c0bf88c19bb3054de83 | sebashack/servantRestfulAPI | API.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE TypeFamilies #
{-# LANGUAGE TypeOperators #-}
# LANGUAGE UndecidableInstances #
# LANGUAGE ScopedTypeVariables #
module Domains.UserDomain.User.API
(
UserAPI
) where
import Domains.UserDomain.User.DataTypes
import Servant.API
import Servant.API.BasicAuth
import Codec.Picture.Types
import Servant.JuicyPixels
import qualified Data.Time as TM
import qualified Data.Text as T
type UserToken = T.Text
type ProfileName = T.Text
type Password = T.Text
type Email = T.Text
type AvatarId = T.Text
type UserAPI =
1 --
"user" :> Header "Authorization" UserToken
:> Get '[JSON] User
2 --
:<|> "user" :> ReqBody '[JSON] CreateUserData
:> PostCreated '[JSON] NewUser
3 --
:<|> "user" :> "basicData"
:> Header "Authorization" UserToken
:> ReqBody '[JSON] BasicUserData
:> Put '[JSON] TokenInfo
4 --
:<|> "user" :> "isAvailable"
:> "profileName"
:> Capture "profileName" ProfileName
:> Get '[JSON] Bool
5 --
:<|> "user" :> "isAvailable"
:> "email"
:> Capture "email" Email
:> Get '[JSON] Bool
6 --
:<|> "user" :> "changePassword"
:> Header "Authorization" UserToken
:> ReqBody '[JSON] ChangePassData
:> Put '[JSON] ()
7 --
:<|> "user" :> "avatar"
:> Header "Authorization" UserToken
:> ReqBody '[JPEG 100] DynamicImage
:> PostCreated '[JSON] AvatarId
8 --
:<|> "user" :> "avatar"
:> Header "Authorization" UserToken
:> Delete '[JSON] ()
9 --
:<|> "user" :> "getAuthToken"
:> BasicAuth "credentials" (UserToken, TM.UTCTime)
:> Put '[JSON] TokenInfo
10 --
:<|> "user" :> "invalidateToken"
:> Header "Authorization" UserToken
:> Put '[JSON] ()
11 --
:<|> "user" :> "refreshToken"
:> Header "Authorization" UserToken
:> Put '[JSON] TokenInfo
| null | https://raw.githubusercontent.com/sebashack/servantRestfulAPI/e625535d196acefaff4f5bf03108816be668fe4d/libs/Domains/UserDomain/User/API.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeOperators #
| # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
# LANGUAGE UndecidableInstances #
# LANGUAGE ScopedTypeVariables #
module Domains.UserDomain.User.API
(
UserAPI
) where
import Domains.UserDomain.User.DataTypes
import Servant.API
import Servant.API.BasicAuth
import Codec.Picture.Types
import Servant.JuicyPixels
import qualified Data.Time as TM
import qualified Data.Text as T
type UserToken = T.Text
type ProfileName = T.Text
type Password = T.Text
type Email = T.Text
type AvatarId = T.Text
type UserAPI =
"user" :> Header "Authorization" UserToken
:> Get '[JSON] User
:<|> "user" :> ReqBody '[JSON] CreateUserData
:> PostCreated '[JSON] NewUser
:<|> "user" :> "basicData"
:> Header "Authorization" UserToken
:> ReqBody '[JSON] BasicUserData
:> Put '[JSON] TokenInfo
:<|> "user" :> "isAvailable"
:> "profileName"
:> Capture "profileName" ProfileName
:> Get '[JSON] Bool
:<|> "user" :> "isAvailable"
:> "email"
:> Capture "email" Email
:> Get '[JSON] Bool
:<|> "user" :> "changePassword"
:> Header "Authorization" UserToken
:> ReqBody '[JSON] ChangePassData
:> Put '[JSON] ()
:<|> "user" :> "avatar"
:> Header "Authorization" UserToken
:> ReqBody '[JPEG 100] DynamicImage
:> PostCreated '[JSON] AvatarId
:<|> "user" :> "avatar"
:> Header "Authorization" UserToken
:> Delete '[JSON] ()
:<|> "user" :> "getAuthToken"
:> BasicAuth "credentials" (UserToken, TM.UTCTime)
:> Put '[JSON] TokenInfo
:<|> "user" :> "invalidateToken"
:> Header "Authorization" UserToken
:> Put '[JSON] ()
:<|> "user" :> "refreshToken"
:> Header "Authorization" UserToken
:> Put '[JSON] TokenInfo
|
20cd1c891ed08ae7f674d673f581cc69be15d9124e11eadbf1d4c28eee3e6c36 | realworldocaml/book | expansion.ml | open! Base
module Abstract = struct
type t [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__002_ = "expansion.ml.Abstract.t" in
fun x__003_ -> Sexplib0.Sexp_conv_error.empty_type error_source__002_ x__003_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t = (fun _ -> assert false : t -> Sexplib0.Sexp.t)
let _ = sexp_of_t
[@@@end]
end
module Tuple = struct
type t = int * int * int [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__012_ = "expansion.ml.Tuple.t" in
function
| Sexplib0.Sexp.List [ arg0__005_; arg1__006_; arg2__007_ ] ->
let res0__008_ = int_of_sexp arg0__005_
and res1__009_ = int_of_sexp arg1__006_
and res2__010_ = int_of_sexp arg2__007_ in
res0__008_, res1__009_, res2__010_
| sexp__011_ ->
Sexplib0.Sexp_conv_error.tuple_of_size_n_expected error_source__012_ 3 sexp__011_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun (arg0__013_, arg1__014_, arg2__015_) ->
let res0__016_ = sexp_of_int arg0__013_
and res1__017_ = sexp_of_int arg1__014_
and res2__018_ = sexp_of_int arg2__015_ in
Sexplib0.Sexp.List [ res0__016_; res1__017_; res2__018_ ]
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record = struct
type t =
{ a : int
; b : int
; c : int
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__036_ = "expansion.ml.Record.t" in
function
| Sexplib0.Sexp.List field_sexps__021_ as sexp__020_ ->
let a__022_ = Stdlib.ref Stdlib.Option.None
and b__024_ = Stdlib.ref Stdlib.Option.None
and c__026_ = Stdlib.ref Stdlib.Option.None
and duplicates__028_ = Stdlib.ref []
and extra__029_ = Stdlib.ref [] in
let rec iter__037_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__030_ :: (([] | [ _ ]) as _field_sexps__032_))
:: tail__038_ ->
let _field_sexp__031_ () =
match _field_sexps__032_ with
| [ x__039_ ] -> x__039_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__036_
sexp__020_
| _ -> assert false
in
(match field_name__030_ with
| "a" ->
(match Stdlib.( ! ) a__022_ with
| Stdlib.Option.None ->
let _field_sexp__031_ = _field_sexp__031_ () in
let fvalue__035_ = int_of_sexp _field_sexp__031_ in
Stdlib.( := ) a__022_ (Stdlib.Option.Some fvalue__035_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__028_
(field_name__030_ :: Stdlib.( ! ) duplicates__028_))
| "b" ->
(match Stdlib.( ! ) b__024_ with
| Stdlib.Option.None ->
let _field_sexp__031_ = _field_sexp__031_ () in
let fvalue__034_ = int_of_sexp _field_sexp__031_ in
Stdlib.( := ) b__024_ (Stdlib.Option.Some fvalue__034_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__028_
(field_name__030_ :: Stdlib.( ! ) duplicates__028_))
| "c" ->
(match Stdlib.( ! ) c__026_ with
| Stdlib.Option.None ->
let _field_sexp__031_ = _field_sexp__031_ () in
let fvalue__033_ = int_of_sexp _field_sexp__031_ in
Stdlib.( := ) c__026_ (Stdlib.Option.Some fvalue__033_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__028_
(field_name__030_ :: Stdlib.( ! ) duplicates__028_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__029_ (field_name__030_ :: Stdlib.( ! ) extra__029_)
else ());
iter__037_ tail__038_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__020_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__036_
sexp__020_
| [] -> ()
in
iter__037_ field_sexps__021_;
(match Stdlib.( ! ) duplicates__028_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__036_
(Stdlib.( ! ) duplicates__028_)
sexp__020_
| [] ->
(match Stdlib.( ! ) extra__029_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__036_
(Stdlib.( ! ) extra__029_)
sexp__020_
| [] ->
(match Stdlib.( ! ) a__022_, Stdlib.( ! ) b__024_, Stdlib.( ! ) c__026_ with
| ( Stdlib.Option.Some a__023_
, Stdlib.Option.Some b__025_
, Stdlib.Option.Some c__027_ ) -> { a = a__023_; b = b__025_; c = c__027_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__036_
sexp__020_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__022_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__024_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__026_) Stdlib.Option.None, "c"
])))
| Sexplib0.Sexp.Atom _ as sexp__020_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__036_ sexp__020_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__041_; b = b__043_; c = c__045_ } ->
let bnds__040_ = [] in
let bnds__040_ =
let arg__046_ = sexp_of_int c__045_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__046_ ] :: bnds__040_
in
let bnds__040_ =
let arg__044_ = sexp_of_int b__043_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__044_ ] :: bnds__040_
in
let bnds__040_ =
let arg__042_ = sexp_of_int a__041_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__042_ ] :: bnds__040_
in
Sexplib0.Sexp.List bnds__040_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Mutable_record = struct
type t =
{ mutable a : int
; mutable b : int
; mutable c : int
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__064_ = "expansion.ml.Mutable_record.t" in
function
| Sexplib0.Sexp.List field_sexps__049_ as sexp__048_ ->
let a__050_ = Stdlib.ref Stdlib.Option.None
and b__052_ = Stdlib.ref Stdlib.Option.None
and c__054_ = Stdlib.ref Stdlib.Option.None
and duplicates__056_ = Stdlib.ref []
and extra__057_ = Stdlib.ref [] in
let rec iter__065_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__058_ :: (([] | [ _ ]) as _field_sexps__060_))
:: tail__066_ ->
let _field_sexp__059_ () =
match _field_sexps__060_ with
| [ x__067_ ] -> x__067_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__064_
sexp__048_
| _ -> assert false
in
(match field_name__058_ with
| "a" ->
(match Stdlib.( ! ) a__050_ with
| Stdlib.Option.None ->
let _field_sexp__059_ = _field_sexp__059_ () in
let fvalue__063_ = int_of_sexp _field_sexp__059_ in
Stdlib.( := ) a__050_ (Stdlib.Option.Some fvalue__063_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__056_
(field_name__058_ :: Stdlib.( ! ) duplicates__056_))
| "b" ->
(match Stdlib.( ! ) b__052_ with
| Stdlib.Option.None ->
let _field_sexp__059_ = _field_sexp__059_ () in
let fvalue__062_ = int_of_sexp _field_sexp__059_ in
Stdlib.( := ) b__052_ (Stdlib.Option.Some fvalue__062_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__056_
(field_name__058_ :: Stdlib.( ! ) duplicates__056_))
| "c" ->
(match Stdlib.( ! ) c__054_ with
| Stdlib.Option.None ->
let _field_sexp__059_ = _field_sexp__059_ () in
let fvalue__061_ = int_of_sexp _field_sexp__059_ in
Stdlib.( := ) c__054_ (Stdlib.Option.Some fvalue__061_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__056_
(field_name__058_ :: Stdlib.( ! ) duplicates__056_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__057_ (field_name__058_ :: Stdlib.( ! ) extra__057_)
else ());
iter__065_ tail__066_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__048_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__064_
sexp__048_
| [] -> ()
in
iter__065_ field_sexps__049_;
(match Stdlib.( ! ) duplicates__056_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__064_
(Stdlib.( ! ) duplicates__056_)
sexp__048_
| [] ->
(match Stdlib.( ! ) extra__057_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__064_
(Stdlib.( ! ) extra__057_)
sexp__048_
| [] ->
(match Stdlib.( ! ) a__050_, Stdlib.( ! ) b__052_, Stdlib.( ! ) c__054_ with
| ( Stdlib.Option.Some a__051_
, Stdlib.Option.Some b__053_
, Stdlib.Option.Some c__055_ ) -> { a = a__051_; b = b__053_; c = c__055_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__064_
sexp__048_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__050_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__052_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__054_) Stdlib.Option.None, "c"
])))
| Sexplib0.Sexp.Atom _ as sexp__048_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__064_ sexp__048_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__069_; b = b__071_; c = c__073_ } ->
let bnds__068_ = [] in
let bnds__068_ =
let arg__074_ = sexp_of_int c__073_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__074_ ] :: bnds__068_
in
let bnds__068_ =
let arg__072_ = sexp_of_int b__071_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__072_ ] :: bnds__068_
in
let bnds__068_ =
let arg__070_ = sexp_of_int a__069_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__070_ ] :: bnds__068_
in
Sexplib0.Sexp.List bnds__068_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Variant = struct
type t =
| A
| B of int * int
| C of
{ a : int
; b : int
; d : int
}
| D of
{ mutable a : int
; mutable b : int
; mutable t : int
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__077_ = "expansion.ml.Variant.t" in
function
| Sexplib0.Sexp.Atom ("a" | "A") -> A
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("b" | "B") as _tag__080_) :: sexp_args__081_) as
_sexp__079_ ->
(match sexp_args__081_ with
| [ arg0__082_; arg1__083_ ] ->
let res0__084_ = int_of_sexp arg0__082_
and res1__085_ = int_of_sexp arg1__083_ in
B (res0__084_, res1__085_)
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__077_
_tag__080_
_sexp__079_)
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("c" | "C") as _tag__088_) :: field_sexps__086_) as
sexp__087_ ->
let a__089_ = Stdlib.ref Stdlib.Option.None
and b__091_ = Stdlib.ref Stdlib.Option.None
and d__093_ = Stdlib.ref Stdlib.Option.None
and duplicates__095_ = Stdlib.ref []
and extra__096_ = Stdlib.ref [] in
let rec iter__103_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__097_ :: (([] | [ _ ]) as _field_sexps__099_))
:: tail__104_ ->
let _field_sexp__098_ () =
match _field_sexps__099_ with
| [ x__105_ ] -> x__105_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__087_
| _ -> assert false
in
(match field_name__097_ with
| "a" ->
(match Stdlib.( ! ) a__089_ with
| Stdlib.Option.None ->
let _field_sexp__098_ = _field_sexp__098_ () in
let fvalue__102_ = int_of_sexp _field_sexp__098_ in
Stdlib.( := ) a__089_ (Stdlib.Option.Some fvalue__102_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__095_
(field_name__097_ :: Stdlib.( ! ) duplicates__095_))
| "b" ->
(match Stdlib.( ! ) b__091_ with
| Stdlib.Option.None ->
let _field_sexp__098_ = _field_sexp__098_ () in
let fvalue__101_ = int_of_sexp _field_sexp__098_ in
Stdlib.( := ) b__091_ (Stdlib.Option.Some fvalue__101_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__095_
(field_name__097_ :: Stdlib.( ! ) duplicates__095_))
| "d" ->
(match Stdlib.( ! ) d__093_ with
| Stdlib.Option.None ->
let _field_sexp__098_ = _field_sexp__098_ () in
let fvalue__100_ = int_of_sexp _field_sexp__098_ in
Stdlib.( := ) d__093_ (Stdlib.Option.Some fvalue__100_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__095_
(field_name__097_ :: Stdlib.( ! ) duplicates__095_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__096_ (field_name__097_ :: Stdlib.( ! ) extra__096_)
else ());
iter__103_ tail__104_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__087_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__087_
| [] -> ()
in
iter__103_ field_sexps__086_;
(match Stdlib.( ! ) duplicates__095_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__077_
(Stdlib.( ! ) duplicates__095_)
sexp__087_
| [] ->
(match Stdlib.( ! ) extra__096_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__077_
(Stdlib.( ! ) extra__096_)
sexp__087_
| [] ->
(match Stdlib.( ! ) a__089_, Stdlib.( ! ) b__091_, Stdlib.( ! ) d__093_ with
| ( Stdlib.Option.Some a__090_
, Stdlib.Option.Some b__092_
, Stdlib.Option.Some d__094_ ) -> C { a = a__090_; b = b__092_; d = d__094_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__077_
sexp__087_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__089_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__091_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) d__093_) Stdlib.Option.None, "d"
])))
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("d" | "D") as _tag__108_) :: field_sexps__106_) as
sexp__107_ ->
let a__109_ = Stdlib.ref Stdlib.Option.None
and b__111_ = Stdlib.ref Stdlib.Option.None
and t__113_ = Stdlib.ref Stdlib.Option.None
and duplicates__115_ = Stdlib.ref []
and extra__116_ = Stdlib.ref [] in
let rec iter__123_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__117_ :: (([] | [ _ ]) as _field_sexps__119_))
:: tail__124_ ->
let _field_sexp__118_ () =
match _field_sexps__119_ with
| [ x__125_ ] -> x__125_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__107_
| _ -> assert false
in
(match field_name__117_ with
| "a" ->
(match Stdlib.( ! ) a__109_ with
| Stdlib.Option.None ->
let _field_sexp__118_ = _field_sexp__118_ () in
let fvalue__122_ = int_of_sexp _field_sexp__118_ in
Stdlib.( := ) a__109_ (Stdlib.Option.Some fvalue__122_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__115_
(field_name__117_ :: Stdlib.( ! ) duplicates__115_))
| "b" ->
(match Stdlib.( ! ) b__111_ with
| Stdlib.Option.None ->
let _field_sexp__118_ = _field_sexp__118_ () in
let fvalue__121_ = int_of_sexp _field_sexp__118_ in
Stdlib.( := ) b__111_ (Stdlib.Option.Some fvalue__121_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__115_
(field_name__117_ :: Stdlib.( ! ) duplicates__115_))
| "t" ->
(match Stdlib.( ! ) t__113_ with
| Stdlib.Option.None ->
let _field_sexp__118_ = _field_sexp__118_ () in
let fvalue__120_ = int_of_sexp _field_sexp__118_ in
Stdlib.( := ) t__113_ (Stdlib.Option.Some fvalue__120_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__115_
(field_name__117_ :: Stdlib.( ! ) duplicates__115_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__116_ (field_name__117_ :: Stdlib.( ! ) extra__116_)
else ());
iter__123_ tail__124_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__107_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__107_
| [] -> ()
in
iter__123_ field_sexps__106_;
(match Stdlib.( ! ) duplicates__115_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__077_
(Stdlib.( ! ) duplicates__115_)
sexp__107_
| [] ->
(match Stdlib.( ! ) extra__116_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__077_
(Stdlib.( ! ) extra__116_)
sexp__107_
| [] ->
(match Stdlib.( ! ) a__109_, Stdlib.( ! ) b__111_, Stdlib.( ! ) t__113_ with
| ( Stdlib.Option.Some a__110_
, Stdlib.Option.Some b__112_
, Stdlib.Option.Some t__114_ ) -> D { a = a__110_; b = b__112_; t = t__114_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__077_
sexp__107_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__109_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__111_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) t__113_) Stdlib.Option.None, "t"
])))
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("a" | "A") :: _) as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__077_ sexp__078_
| Sexplib0.Sexp.Atom ("b" | "B") as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__077_ sexp__078_
| Sexplib0.Sexp.Atom ("c" | "C") as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__077_ sexp__078_
| Sexplib0.Sexp.Atom ("d" | "D") as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__077_ sexp__078_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__076_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__077_ sexp__076_
| Sexplib0.Sexp.List [] as sexp__076_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__077_ sexp__076_
| sexp__076_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__077_ sexp__076_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(function
| A -> Sexplib0.Sexp.Atom "A"
| B (arg0__126_, arg1__127_) ->
let res0__128_ = sexp_of_int arg0__126_
and res1__129_ = sexp_of_int arg1__127_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; res0__128_; res1__129_ ]
| C { a = a__131_; b = b__133_; d = d__135_ } ->
let bnds__130_ = [] in
let bnds__130_ =
let arg__136_ = sexp_of_int d__135_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d"; arg__136_ ] :: bnds__130_
in
let bnds__130_ =
let arg__134_ = sexp_of_int b__133_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__134_ ] :: bnds__130_
in
let bnds__130_ =
let arg__132_ = sexp_of_int a__131_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__132_ ] :: bnds__130_
in
Sexplib0.Sexp.List (Sexplib0.Sexp.Atom "C" :: bnds__130_)
| D { a = a__138_; b = b__140_; t = t__142_ } ->
let bnds__137_ = [] in
let bnds__137_ =
let arg__143_ = sexp_of_int t__142_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "t"; arg__143_ ] :: bnds__137_
in
let bnds__137_ =
let arg__141_ = sexp_of_int b__140_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__141_ ] :: bnds__137_
in
let bnds__137_ =
let arg__139_ = sexp_of_int a__138_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__139_ ] :: bnds__137_
in
Sexplib0.Sexp.List (Sexplib0.Sexp.Atom "D" :: bnds__137_)
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Poly_variant = struct
type t =
[ `A
| `B of int
]
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let __t_of_sexp__ =
(let error_source__149_ = "expansion.ml.Poly_variant.t" in
function
| Sexplib0.Sexp.Atom atom__145_ as _sexp__147_ ->
(match atom__145_ with
| "A" -> `A
| "B" -> Sexplib0.Sexp_conv_error.ptag_takes_args error_source__149_ _sexp__147_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__145_ :: sexp_args__148_) as
_sexp__147_ ->
(match atom__145_ with
| "B" as _tag__150_ ->
(match sexp_args__148_ with
| [ arg0__151_ ] ->
let res0__152_ = int_of_sexp arg0__151_ in
`B res0__152_
| _ ->
Sexplib0.Sexp_conv_error.ptag_incorrect_n_args
error_source__149_
_tag__150_
_sexp__147_)
| "A" -> Sexplib0.Sexp_conv_error.ptag_no_args error_source__149_ _sexp__147_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__146_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var error_source__149_ sexp__146_
| Sexplib0.Sexp.List [] as sexp__146_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var error_source__149_ sexp__146_
: Sexplib0.Sexp.t -> t)
;;
let _ = __t_of_sexp__
let t_of_sexp =
(let error_source__154_ = "expansion.ml.Poly_variant.t" in
fun sexp__153_ ->
try __t_of_sexp__ sexp__153_ with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found error_source__154_ sexp__153_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(function
| `A -> Sexplib0.Sexp.Atom "A"
| `B v__155_ -> Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; sexp_of_int v__155_ ]
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Inline_poly_variant = struct
type t =
[ Poly_variant.t
| `C of int * int
]
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let __t_of_sexp__ =
(let error_source__167_ = "expansion.ml.Inline_poly_variant.t" in
fun sexp__156_ ->
try (Poly_variant.__t_of_sexp__ sexp__156_ :> t) with
| Sexplib0.Sexp_conv_error.No_variant_match ->
(match sexp__156_ with
| Sexplib0.Sexp.Atom atom__157_ as _sexp__159_ ->
(match atom__157_ with
| "C" ->
Sexplib0.Sexp_conv_error.ptag_takes_args error_source__167_ _sexp__159_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__157_ :: sexp_args__160_) as
_sexp__159_ ->
(match atom__157_ with
| "C" as _tag__161_ ->
(match sexp_args__160_ with
| [ arg0__168_ ] ->
let res0__169_ =
match arg0__168_ with
| Sexplib0.Sexp.List [ arg0__162_; arg1__163_ ] ->
let res0__164_ = int_of_sexp arg0__162_
and res1__165_ = int_of_sexp arg1__163_ in
res0__164_, res1__165_
| sexp__166_ ->
Sexplib0.Sexp_conv_error.tuple_of_size_n_expected
error_source__167_
2
sexp__166_
in
`C res0__169_
| _ ->
Sexplib0.Sexp_conv_error.ptag_incorrect_n_args
error_source__167_
_tag__161_
_sexp__159_)
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__158_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var
error_source__167_
sexp__158_
| Sexplib0.Sexp.List [] as sexp__158_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var
error_source__167_
sexp__158_)
: Sexplib0.Sexp.t -> t)
;;
let _ = __t_of_sexp__
let t_of_sexp =
(let error_source__171_ = "expansion.ml.Inline_poly_variant.t" in
fun sexp__170_ ->
try __t_of_sexp__ sexp__170_ with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found error_source__171_ sexp__170_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(function
| #Poly_variant.t as v__172_ -> Poly_variant.sexp_of_t v__172_
| `C v__173_ ->
Sexplib0.Sexp.List
[ Sexplib0.Sexp.Atom "C"
; (let arg0__174_, arg1__175_ = v__173_ in
let res0__176_ = sexp_of_int arg0__174_
and res1__177_ = sexp_of_int arg1__175_ in
Sexplib0.Sexp.List [ res0__176_; res1__177_ ])
]
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Recursive = struct
type t =
| Banana of t
| Orange
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let rec t_of_sexp =
(let error_source__180_ = "expansion.ml.Recursive.t" in
function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("banana" | "Banana") as _tag__183_) :: sexp_args__184_) as
_sexp__182_ ->
(match sexp_args__184_ with
| [ arg0__185_ ] ->
let res0__186_ = t_of_sexp arg0__185_ in
Banana res0__186_
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__180_
_tag__183_
_sexp__182_)
| Sexplib0.Sexp.Atom ("orange" | "Orange") -> Orange
| Sexplib0.Sexp.Atom ("banana" | "Banana") as sexp__181_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__180_ sexp__181_
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("orange" | "Orange") :: _) as sexp__181_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__180_ sexp__181_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__179_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__180_ sexp__179_
| Sexplib0.Sexp.List [] as sexp__179_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__180_ sexp__179_
| sexp__179_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__180_ sexp__179_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let rec sexp_of_t =
(function
| Banana arg0__187_ ->
let res0__188_ = sexp_of_t arg0__187_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "Banana"; res0__188_ ]
| Orange -> Sexplib0.Sexp.Atom "Orange"
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Nonrecursive = struct
open Recursive
type nonrec t = t [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp = (t_of_sexp : Sexplib0.Sexp.t -> t)
let _ = t_of_sexp
let sexp_of_t = (sexp_of_t : t -> Sexplib0.Sexp.t)
let _ = sexp_of_t
[@@@end]
end
module Mutually_recursive = struct
type a =
| A
| B of b
| C of
{ a : a
; b : b
; c : c
}
and b =
{ a : a
; b : b
}
and c = int [@@deriving_inline sexp]
let _ = fun (_ : a) -> ()
let _ = fun (_ : b) -> ()
let _ = fun (_ : c) -> ()
let rec a_of_sexp =
(let error_source__192_ = "expansion.ml.Mutually_recursive.a" in
function
| Sexplib0.Sexp.Atom ("a" | "A") -> A
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("b" | "B") as _tag__195_) :: sexp_args__196_) as
_sexp__194_ ->
(match sexp_args__196_ with
| [ arg0__197_ ] ->
let res0__198_ = b_of_sexp arg0__197_ in
B res0__198_
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__192_
_tag__195_
_sexp__194_)
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("c" | "C") as _tag__201_) :: field_sexps__199_) as
sexp__200_ ->
let a__202_ = Stdlib.ref Stdlib.Option.None
and b__204_ = Stdlib.ref Stdlib.Option.None
and c__206_ = Stdlib.ref Stdlib.Option.None
and duplicates__208_ = Stdlib.ref []
and extra__209_ = Stdlib.ref [] in
let rec iter__216_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__210_ :: (([] | [ _ ]) as _field_sexps__212_))
:: tail__217_ ->
let _field_sexp__211_ () =
match _field_sexps__212_ with
| [ x__218_ ] -> x__218_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__192_
sexp__200_
| _ -> assert false
in
(match field_name__210_ with
| "a" ->
(match Stdlib.( ! ) a__202_ with
| Stdlib.Option.None ->
let _field_sexp__211_ = _field_sexp__211_ () in
let fvalue__215_ = a_of_sexp _field_sexp__211_ in
Stdlib.( := ) a__202_ (Stdlib.Option.Some fvalue__215_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__208_
(field_name__210_ :: Stdlib.( ! ) duplicates__208_))
| "b" ->
(match Stdlib.( ! ) b__204_ with
| Stdlib.Option.None ->
let _field_sexp__211_ = _field_sexp__211_ () in
let fvalue__214_ = b_of_sexp _field_sexp__211_ in
Stdlib.( := ) b__204_ (Stdlib.Option.Some fvalue__214_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__208_
(field_name__210_ :: Stdlib.( ! ) duplicates__208_))
| "c" ->
(match Stdlib.( ! ) c__206_ with
| Stdlib.Option.None ->
let _field_sexp__211_ = _field_sexp__211_ () in
let fvalue__213_ = c_of_sexp _field_sexp__211_ in
Stdlib.( := ) c__206_ (Stdlib.Option.Some fvalue__213_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__208_
(field_name__210_ :: Stdlib.( ! ) duplicates__208_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__209_ (field_name__210_ :: Stdlib.( ! ) extra__209_)
else ());
iter__216_ tail__217_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__200_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__192_
sexp__200_
| [] -> ()
in
iter__216_ field_sexps__199_;
(match Stdlib.( ! ) duplicates__208_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__192_
(Stdlib.( ! ) duplicates__208_)
sexp__200_
| [] ->
(match Stdlib.( ! ) extra__209_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__192_
(Stdlib.( ! ) extra__209_)
sexp__200_
| [] ->
(match Stdlib.( ! ) a__202_, Stdlib.( ! ) b__204_, Stdlib.( ! ) c__206_ with
| ( Stdlib.Option.Some a__203_
, Stdlib.Option.Some b__205_
, Stdlib.Option.Some c__207_ ) -> C { a = a__203_; b = b__205_; c = c__207_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__192_
sexp__200_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__202_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__204_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__206_) Stdlib.Option.None, "c"
])))
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("a" | "A") :: _) as sexp__193_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__192_ sexp__193_
| Sexplib0.Sexp.Atom ("b" | "B") as sexp__193_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__192_ sexp__193_
| Sexplib0.Sexp.Atom ("c" | "C") as sexp__193_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__192_ sexp__193_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__191_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__192_ sexp__191_
| Sexplib0.Sexp.List [] as sexp__191_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__192_ sexp__191_
| sexp__191_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__192_ sexp__191_
: Sexplib0.Sexp.t -> a)
and b_of_sexp =
(let error_source__233_ = "expansion.ml.Mutually_recursive.b" in
function
| Sexplib0.Sexp.List field_sexps__221_ as sexp__220_ ->
let a__222_ = Stdlib.ref Stdlib.Option.None
and b__224_ = Stdlib.ref Stdlib.Option.None
and duplicates__226_ = Stdlib.ref []
and extra__227_ = Stdlib.ref [] in
let rec iter__234_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__228_ :: (([] | [ _ ]) as _field_sexps__230_))
:: tail__235_ ->
let _field_sexp__229_ () =
match _field_sexps__230_ with
| [ x__236_ ] -> x__236_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__233_
sexp__220_
| _ -> assert false
in
(match field_name__228_ with
| "a" ->
(match Stdlib.( ! ) a__222_ with
| Stdlib.Option.None ->
let _field_sexp__229_ = _field_sexp__229_ () in
let fvalue__232_ = a_of_sexp _field_sexp__229_ in
Stdlib.( := ) a__222_ (Stdlib.Option.Some fvalue__232_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__226_
(field_name__228_ :: Stdlib.( ! ) duplicates__226_))
| "b" ->
(match Stdlib.( ! ) b__224_ with
| Stdlib.Option.None ->
let _field_sexp__229_ = _field_sexp__229_ () in
let fvalue__231_ = b_of_sexp _field_sexp__229_ in
Stdlib.( := ) b__224_ (Stdlib.Option.Some fvalue__231_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__226_
(field_name__228_ :: Stdlib.( ! ) duplicates__226_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__227_ (field_name__228_ :: Stdlib.( ! ) extra__227_)
else ());
iter__234_ tail__235_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__220_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__233_
sexp__220_
| [] -> ()
in
iter__234_ field_sexps__221_;
(match Stdlib.( ! ) duplicates__226_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__233_
(Stdlib.( ! ) duplicates__226_)
sexp__220_
| [] ->
(match Stdlib.( ! ) extra__227_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__233_
(Stdlib.( ! ) extra__227_)
sexp__220_
| [] ->
(match Stdlib.( ! ) a__222_, Stdlib.( ! ) b__224_ with
| Stdlib.Option.Some a__223_, Stdlib.Option.Some b__225_ ->
{ a = a__223_; b = b__225_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__233_
sexp__220_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__222_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__224_) Stdlib.Option.None, "b"
])))
| Sexplib0.Sexp.Atom _ as sexp__220_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__233_ sexp__220_
: Sexplib0.Sexp.t -> b)
and c_of_sexp = (int_of_sexp : Sexplib0.Sexp.t -> c)
let _ = a_of_sexp
and _ = b_of_sexp
and _ = c_of_sexp
let rec sexp_of_a =
(function
| A -> Sexplib0.Sexp.Atom "A"
| B arg0__238_ ->
let res0__239_ = sexp_of_b arg0__238_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; res0__239_ ]
| C { a = a__241_; b = b__243_; c = c__245_ } ->
let bnds__240_ = [] in
let bnds__240_ =
let arg__246_ = sexp_of_c c__245_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__246_ ] :: bnds__240_
in
let bnds__240_ =
let arg__244_ = sexp_of_b b__243_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__244_ ] :: bnds__240_
in
let bnds__240_ =
let arg__242_ = sexp_of_a a__241_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__242_ ] :: bnds__240_
in
Sexplib0.Sexp.List (Sexplib0.Sexp.Atom "C" :: bnds__240_)
: a -> Sexplib0.Sexp.t)
and sexp_of_b =
(fun { a = a__248_; b = b__250_ } ->
let bnds__247_ = [] in
let bnds__247_ =
let arg__251_ = sexp_of_b b__250_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__251_ ] :: bnds__247_
in
let bnds__247_ =
let arg__249_ = sexp_of_a a__248_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__249_ ] :: bnds__247_
in
Sexplib0.Sexp.List bnds__247_
: b -> Sexplib0.Sexp.t)
and sexp_of_c = (sexp_of_int : c -> Sexplib0.Sexp.t)
let _ = sexp_of_a
and _ = sexp_of_b
and _ = sexp_of_c
[@@@end]
end
module Alias = struct
type t = Recursive.t [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp = (Recursive.t_of_sexp : Sexplib0.Sexp.t -> t)
let _ = t_of_sexp
let sexp_of_t = (Recursive.sexp_of_t : t -> Sexplib0.Sexp.t)
let _ = sexp_of_t
[@@@end]
end
module Re_export = struct
type t = Recursive.t =
| Banana of t
| Orange
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let rec t_of_sexp =
(let error_source__255_ = "expansion.ml.Re_export.t" in
function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("banana" | "Banana") as _tag__258_) :: sexp_args__259_) as
_sexp__257_ ->
(match sexp_args__259_ with
| [ arg0__260_ ] ->
let res0__261_ = t_of_sexp arg0__260_ in
Banana res0__261_
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__255_
_tag__258_
_sexp__257_)
| Sexplib0.Sexp.Atom ("orange" | "Orange") -> Orange
| Sexplib0.Sexp.Atom ("banana" | "Banana") as sexp__256_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__255_ sexp__256_
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("orange" | "Orange") :: _) as sexp__256_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__255_ sexp__256_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__254_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__255_ sexp__254_
| Sexplib0.Sexp.List [] as sexp__254_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__255_ sexp__254_
| sexp__254_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__255_ sexp__254_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let rec sexp_of_t =
(function
| Banana arg0__262_ ->
let res0__263_ = sexp_of_t arg0__262_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "Banana"; res0__263_ ]
| Orange -> Sexplib0.Sexp.Atom "Orange"
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Unary = struct
type 'a t = 'a list option [@@deriving_inline sexp]
let _ = fun (_ : 'a t) -> ()
let t_of_sexp : 'a. (Sexplib0.Sexp.t -> 'a) -> Sexplib0.Sexp.t -> 'a t =
fun _of_a__264_ x__266_ -> option_of_sexp (list_of_sexp _of_a__264_) x__266_
;;
let _ = t_of_sexp
let sexp_of_t : 'a. ('a -> Sexplib0.Sexp.t) -> 'a t -> Sexplib0.Sexp.t =
fun _of_a__267_ x__268_ -> sexp_of_option (sexp_of_list _of_a__267_) x__268_
;;
let _ = sexp_of_t
[@@@end]
end
module Binary = struct
type ('a, 'b) t = ('a, 'b) Either.t [@@deriving_inline sexp]
let _ = fun (_ : ('a, 'b) t) -> ()
let t_of_sexp :
'a 'b.
(Sexplib0.Sexp.t -> 'a)
-> (Sexplib0.Sexp.t -> 'b)
-> Sexplib0.Sexp.t
-> ('a, 'b) t
=
Either.t_of_sexp
;;
let _ = t_of_sexp
let sexp_of_t :
'a 'b.
('a -> Sexplib0.Sexp.t)
-> ('b -> Sexplib0.Sexp.t)
-> ('a, 'b) t
-> Sexplib0.Sexp.t
=
Either.sexp_of_t
;;
let _ = sexp_of_t
[@@@end]
end
module First_order = struct
type 'a t = 'a -> 'a [@@deriving_inline sexp]
let _ = fun (_ : 'a t) -> ()
let t_of_sexp : 'a. (Sexplib0.Sexp.t -> 'a) -> Sexplib0.Sexp.t -> 'a t =
fun _of_a__276_ -> Sexplib0.Sexp_conv.fun_of_sexp
;;
let _ = t_of_sexp
let sexp_of_t : 'a. ('a -> Sexplib0.Sexp.t) -> 'a t -> Sexplib0.Sexp.t =
fun _of_a__278_ _ -> Sexplib0.Sexp_conv.sexp_of_fun Sexplib0.Sexp_conv.ignore
;;
let _ = sexp_of_t
[@@@end]
end
module Second_order = struct
type ('a, 'b) t = ('a -> 'a) -> ('a -> 'b) -> ('b -> 'b) -> 'a -> 'b
[@@deriving_inline sexp]
let _ = fun (_ : ('a, 'b) t) -> ()
let t_of_sexp :
'a 'b.
(Sexplib0.Sexp.t -> 'a)
-> (Sexplib0.Sexp.t -> 'b)
-> Sexplib0.Sexp.t
-> ('a, 'b) t
=
fun _of_a__279_ _of_b__280_ -> Sexplib0.Sexp_conv.fun_of_sexp
;;
let _ = t_of_sexp
let sexp_of_t :
'a 'b.
('a -> Sexplib0.Sexp.t)
-> ('b -> Sexplib0.Sexp.t)
-> ('a, 'b) t
-> Sexplib0.Sexp.t
=
fun _of_a__282_ _of_b__283_ _ ->
Sexplib0.Sexp_conv.sexp_of_fun Sexplib0.Sexp_conv.ignore
;;
let _ = sexp_of_t
[@@@end]
end
module Named_arguments = struct
type t = ?a:int -> b:int -> int -> int [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp = (Sexplib0.Sexp_conv.fun_of_sexp : Sexplib0.Sexp.t -> t)
let _ = t_of_sexp
let sexp_of_t =
(fun _ -> Sexplib0.Sexp_conv.sexp_of_fun Sexplib0.Sexp_conv.ignore
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Gadt = struct
type _ t =
| A : _ option t
| B : int -> int t
| C : 'a list -> unit t
[@@deriving_inline sexp_of]
let _ = fun (_ : _ t) -> ()
let sexp_of_t : 'a__285_. ('a__285_ -> Sexplib0.Sexp.t) -> 'a__285_ t -> Sexplib0.Sexp.t
=
fun (type a__291_) : ((a__291_ -> Sexplib0.Sexp.t) -> a__291_ t -> Sexplib0.Sexp.t) ->
fun _of_a__286_ -> function
| A -> Sexplib0.Sexp.Atom "A"
| B arg0__287_ ->
let res0__288_ = sexp_of_int arg0__287_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; res0__288_ ]
| C arg0__289_ ->
let res0__290_ = sexp_of_list (fun _ -> Sexplib0.Sexp.Atom "_") arg0__289_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "C"; res0__290_ ]
;;
let _ = sexp_of_t
[@@@end]
end
module Recursive_record_containing_variant = struct
type t =
{ a : [ `A of t ]
; b : [ `B ] [@sexp_drop_default Poly.equal] [@default `B]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let rec t_of_sexp =
(let (default__324_ : [ `B ]) = `B in
let error_source__310_ = "expansion.ml.Recursive_record_containing_variant.t" in
function
| Sexplib0.Sexp.List field_sexps__294_ as sexp__293_ ->
let a__295_ = Stdlib.ref Stdlib.Option.None
and b__297_ = Stdlib.ref Stdlib.Option.None
and duplicates__299_ = Stdlib.ref []
and extra__300_ = Stdlib.ref [] in
let rec iter__326_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__301_ :: (([] | [ _ ]) as _field_sexps__303_))
:: tail__327_ ->
let _field_sexp__302_ () =
match _field_sexps__303_ with
| [ x__328_ ] -> x__328_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__310_
sexp__293_
| _ -> assert false
in
(match field_name__301_ with
| "a" ->
(match Stdlib.( ! ) a__295_ with
| Stdlib.Option.None ->
let _field_sexp__302_ = _field_sexp__302_ () in
let fvalue__322_ =
let sexp__321_ = _field_sexp__302_ in
try
match sexp__321_ with
| Sexplib0.Sexp.Atom atom__314_ as _sexp__316_ ->
(match atom__314_ with
| "A" ->
Sexplib0.Sexp_conv_error.ptag_takes_args
error_source__310_
_sexp__316_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__314_ :: sexp_args__317_)
as _sexp__316_ ->
(match atom__314_ with
| "A" as _tag__318_ ->
(match sexp_args__317_ with
| [ arg0__319_ ] ->
let res0__320_ = t_of_sexp arg0__319_ in
`A res0__320_
| _ ->
Sexplib0.Sexp_conv_error.ptag_incorrect_n_args
error_source__310_
_tag__318_
_sexp__316_)
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__315_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var
error_source__310_
sexp__315_
| Sexplib0.Sexp.List [] as sexp__315_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var
error_source__310_
sexp__315_
with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found
error_source__310_
sexp__321_
in
Stdlib.( := ) a__295_ (Stdlib.Option.Some fvalue__322_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__299_
(field_name__301_ :: Stdlib.( ! ) duplicates__299_))
| "b" ->
(match Stdlib.( ! ) b__297_ with
| Stdlib.Option.None ->
let _field_sexp__302_ = _field_sexp__302_ () in
let fvalue__312_ =
let sexp__311_ = _field_sexp__302_ in
try
match sexp__311_ with
| Sexplib0.Sexp.Atom atom__306_ as _sexp__308_ ->
(match atom__306_ with
| "B" -> `B
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__306_ :: _) as
_sexp__308_ ->
(match atom__306_ with
| "B" ->
Sexplib0.Sexp_conv_error.ptag_no_args
error_source__310_
_sexp__308_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__307_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var
error_source__310_
sexp__307_
| Sexplib0.Sexp.List [] as sexp__307_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var
error_source__310_
sexp__307_
with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found
error_source__310_
sexp__311_
in
Stdlib.( := ) b__297_ (Stdlib.Option.Some fvalue__312_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__299_
(field_name__301_ :: Stdlib.( ! ) duplicates__299_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__300_ (field_name__301_ :: Stdlib.( ! ) extra__300_)
else ());
iter__326_ tail__327_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__293_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__310_
sexp__293_
| [] -> ()
in
iter__326_ field_sexps__294_;
(match Stdlib.( ! ) duplicates__299_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__310_
(Stdlib.( ! ) duplicates__299_)
sexp__293_
| [] ->
(match Stdlib.( ! ) extra__300_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__310_
(Stdlib.( ! ) extra__300_)
sexp__293_
| [] ->
(match Stdlib.( ! ) a__295_, Stdlib.( ! ) b__297_ with
| Stdlib.Option.Some a__296_, b__298_ ->
{ a = a__296_
; b =
(match b__298_ with
| Stdlib.Option.None -> default__324_
| Stdlib.Option.Some v__325_ -> v__325_)
}
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__310_
sexp__293_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__295_) Stdlib.Option.None, "a" ])))
| Sexplib0.Sexp.Atom _ as sexp__293_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__310_ sexp__293_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let rec sexp_of_t =
(let (default__335_ : [ `B ]) = `B
and (drop_default__334_ : [ `B ] -> [ `B ] -> Stdlib.Bool.t) = Poly.equal in
fun { a = a__330_; b = b__336_ } ->
let bnds__329_ = [] in
let bnds__329_ =
if drop_default__334_ default__335_ b__336_
then bnds__329_
else (
let arg__338_ = (fun `B -> Sexplib0.Sexp.Atom "B") b__336_ in
let bnd__337_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__338_ ] in
bnd__337_ :: bnds__329_)
in
let bnds__329_ =
let arg__331_ =
let (`A v__332_) = a__330_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "A"; sexp_of_t v__332_ ]
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__331_ ] :: bnds__329_
in
Sexplib0.Sexp.List bnds__329_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Poly_record = struct
type t =
{ a : 'a. 'a list
; b : 'b. 'b option
; c : 'c. 'c
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__355_ = "expansion.ml.Poly_record.t" in
function
| Sexplib0.Sexp.List field_sexps__341_ as sexp__340_ ->
let a__366_, b__367_, c__368_ =
let a__342_ = Stdlib.ref Stdlib.Option.None
and b__344_ = Stdlib.ref Stdlib.Option.None
and c__346_ = Stdlib.ref Stdlib.Option.None
and duplicates__348_ = Stdlib.ref []
and extra__349_ = Stdlib.ref [] in
let rec iter__363_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__350_
:: (([] | [ _ ]) as _field_sexps__352_))
:: tail__364_ ->
let _field_sexp__351_ () =
match _field_sexps__352_ with
| [ x__365_ ] -> x__365_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__355_
sexp__340_
| _ -> assert false
in
(match field_name__350_ with
| "a" ->
(match Stdlib.( ! ) a__342_ with
| Stdlib.Option.None ->
let _field_sexp__351_ = _field_sexp__351_ () in
let fvalue__362_ =
let _of_a__360_ sexp__361_ =
Sexplib0.Sexp_conv_error.record_poly_field_value
error_source__355_
sexp__361_
in
list_of_sexp _of_a__360_ _field_sexp__351_
in
Stdlib.( := ) a__342_ (Stdlib.Option.Some fvalue__362_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__348_
(field_name__350_ :: Stdlib.( ! ) duplicates__348_))
| "b" ->
(match Stdlib.( ! ) b__344_ with
| Stdlib.Option.None ->
let _field_sexp__351_ = _field_sexp__351_ () in
let fvalue__359_ =
let _of_b__357_ sexp__358_ =
Sexplib0.Sexp_conv_error.record_poly_field_value
error_source__355_
sexp__358_
in
option_of_sexp _of_b__357_ _field_sexp__351_
in
Stdlib.( := ) b__344_ (Stdlib.Option.Some fvalue__359_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__348_
(field_name__350_ :: Stdlib.( ! ) duplicates__348_))
| "c" ->
(match Stdlib.( ! ) c__346_ with
| Stdlib.Option.None ->
let _field_sexp__351_ = _field_sexp__351_ () in
let fvalue__356_ =
let _of_c__353_ sexp__354_ =
Sexplib0.Sexp_conv_error.record_poly_field_value
error_source__355_
sexp__354_
in
_of_c__353_ _field_sexp__351_
in
Stdlib.( := ) c__346_ (Stdlib.Option.Some fvalue__356_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__348_
(field_name__350_ :: Stdlib.( ! ) duplicates__348_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then
Stdlib.( := ) extra__349_ (field_name__350_ :: Stdlib.( ! ) extra__349_)
else ());
iter__363_ tail__364_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__340_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__355_
sexp__340_
| [] -> ()
in
iter__363_ field_sexps__341_;
match Stdlib.( ! ) duplicates__348_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__355_
(Stdlib.( ! ) duplicates__348_)
sexp__340_
| [] ->
(match Stdlib.( ! ) extra__349_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__355_
(Stdlib.( ! ) extra__349_)
sexp__340_
| [] ->
(match Stdlib.( ! ) a__342_, Stdlib.( ! ) b__344_, Stdlib.( ! ) c__346_ with
| ( Stdlib.Option.Some a__343_
, Stdlib.Option.Some b__345_
, Stdlib.Option.Some c__347_ ) -> a__343_, b__345_, c__347_
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__355_
sexp__340_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__342_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__344_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__346_) Stdlib.Option.None, "c"
]))
in
{ a = a__366_; b = b__367_; c = c__368_ }
| Sexplib0.Sexp.Atom _ as sexp__340_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__355_ sexp__340_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__370_; b = b__373_; c = c__376_ } ->
let bnds__369_ = [] in
let bnds__369_ =
let arg__377_ =
let _of_c__378_ = Sexplib0.Sexp_conv.sexp_of_opaque in
_of_c__378_ c__376_
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__377_ ] :: bnds__369_
in
let bnds__369_ =
let arg__374_ =
let _of_b__375_ = Sexplib0.Sexp_conv.sexp_of_opaque in
sexp_of_option _of_b__375_ b__373_
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__374_ ] :: bnds__369_
in
let bnds__369_ =
let arg__371_ =
let _of_a__372_ = Sexplib0.Sexp_conv.sexp_of_opaque in
sexp_of_list _of_a__372_ a__370_
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__371_ ] :: bnds__369_
in
Sexplib0.Sexp.List bnds__369_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_with_defaults = struct
type t =
{ a : int [@default 0]
; b : int [@default 0] [@sexp_drop_default.compare]
; c : int [@default 0] [@sexp_drop_default.equal]
; d : int [@default 0] [@sexp_drop_default.sexp]
; e : int [@default 0] [@sexp_drop_default ( = )]
; f : int [@sexp_drop_if ( = ) 0]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let (default__415_ : int) = 0
and (default__417_ : int) = 0
and (default__419_ : int) = 0
and (default__421_ : int) = 0
and (default__423_ : int) = 0 in
let error_source__425_ = "expansion.ml.Record_with_defaults.t" in
function
| Sexplib0.Sexp.List field_sexps__381_ as sexp__380_ ->
let a__382_ = Stdlib.ref Stdlib.Option.None
and b__384_ = Stdlib.ref Stdlib.Option.None
and c__386_ = Stdlib.ref Stdlib.Option.None
and d__388_ = Stdlib.ref Stdlib.Option.None
and e__390_ = Stdlib.ref Stdlib.Option.None
and f__392_ = Stdlib.ref Stdlib.Option.None
and duplicates__394_ = Stdlib.ref []
and extra__395_ = Stdlib.ref [] in
let rec iter__426_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__396_ :: (([] | [ _ ]) as _field_sexps__398_))
:: tail__427_ ->
let _field_sexp__397_ () =
match _field_sexps__398_ with
| [ x__428_ ] -> x__428_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__425_
sexp__380_
| _ -> assert false
in
(match field_name__396_ with
| "a" ->
(match Stdlib.( ! ) a__382_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__409_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) a__382_ (Stdlib.Option.Some fvalue__409_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "b" ->
(match Stdlib.( ! ) b__384_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__407_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) b__384_ (Stdlib.Option.Some fvalue__407_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "c" ->
(match Stdlib.( ! ) c__386_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__405_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) c__386_ (Stdlib.Option.Some fvalue__405_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "d" ->
(match Stdlib.( ! ) d__388_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__403_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) d__388_ (Stdlib.Option.Some fvalue__403_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "e" ->
(match Stdlib.( ! ) e__390_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__401_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) e__390_ (Stdlib.Option.Some fvalue__401_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "f" ->
(match Stdlib.( ! ) f__392_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__399_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) f__392_ (Stdlib.Option.Some fvalue__399_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__395_ (field_name__396_ :: Stdlib.( ! ) extra__395_)
else ());
iter__426_ tail__427_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__380_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__425_
sexp__380_
| [] -> ()
in
iter__426_ field_sexps__381_;
(match Stdlib.( ! ) duplicates__394_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__425_
(Stdlib.( ! ) duplicates__394_)
sexp__380_
| [] ->
(match Stdlib.( ! ) extra__395_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__425_
(Stdlib.( ! ) extra__395_)
sexp__380_
| [] ->
(match
( Stdlib.( ! ) a__382_
, Stdlib.( ! ) b__384_
, Stdlib.( ! ) c__386_
, Stdlib.( ! ) d__388_
, Stdlib.( ! ) e__390_
, Stdlib.( ! ) f__392_ )
with
| a__383_, b__385_, c__387_, d__389_, e__391_, Stdlib.Option.Some f__393_ ->
{ a =
(match a__383_ with
| Stdlib.Option.None -> default__415_
| Stdlib.Option.Some v__416_ -> v__416_)
; b =
(match b__385_ with
| Stdlib.Option.None -> default__417_
| Stdlib.Option.Some v__418_ -> v__418_)
; c =
(match c__387_ with
| Stdlib.Option.None -> default__419_
| Stdlib.Option.Some v__420_ -> v__420_)
; d =
(match d__389_ with
| Stdlib.Option.None -> default__421_
| Stdlib.Option.Some v__422_ -> v__422_)
; e =
(match e__391_ with
| Stdlib.Option.None -> default__423_
| Stdlib.Option.Some v__424_ -> v__424_)
; f = f__393_
}
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__425_
sexp__380_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) f__392_) Stdlib.Option.None, "f" ])))
| Sexplib0.Sexp.Atom _ as sexp__380_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__425_ sexp__380_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(let (default__433_ : int) = 0
and (default__438_ : int) = 0
and (default__443_ : int) = 0
and (default__449_ : int) = 0
and (drop_default__448_ : int -> int -> Stdlib.Bool.t) = ( = )
and (drop_if__454_ : Stdlib.Unit.t -> int -> Stdlib.Bool.t) = fun () -> ( = ) 0 in
fun { a = a__430_; b = b__434_; c = c__439_; d = d__444_; e = e__450_; f = f__455_ } ->
let bnds__429_ = [] in
let bnds__429_ =
if (drop_if__454_ ()) f__455_
then bnds__429_
else (
let arg__457_ = sexp_of_int f__455_ in
let bnd__456_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "f"; arg__457_ ] in
bnd__456_ :: bnds__429_)
in
let bnds__429_ =
if drop_default__448_ default__449_ e__450_
then bnds__429_
else (
let arg__452_ = sexp_of_int e__450_ in
let bnd__451_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "e"; arg__452_ ] in
bnd__451_ :: bnds__429_)
in
let bnds__429_ =
let arg__446_ = sexp_of_int d__444_ in
if Sexplib0.Sexp_conv.( = ) (sexp_of_int default__443_) arg__446_
then bnds__429_
else (
let bnd__445_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d"; arg__446_ ] in
bnd__445_ :: bnds__429_)
in
let bnds__429_ =
if [%equal: int] default__438_ c__439_
then bnds__429_
else (
let arg__441_ = sexp_of_int c__439_ in
let bnd__440_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__441_ ] in
bnd__440_ :: bnds__429_)
in
let bnds__429_ =
if [%compare.equal: int] default__433_ b__434_
then bnds__429_
else (
let arg__436_ = sexp_of_int b__434_ in
let bnd__435_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__436_ ] in
bnd__435_ :: bnds__429_)
in
let bnds__429_ =
let arg__431_ = sexp_of_int a__430_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__431_ ] :: bnds__429_
in
Sexplib0.Sexp.List bnds__429_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_with_special_types = struct
type t =
{ a : int option [@sexp.option]
; b : int list [@sexp.list]
; c : int array [@sexp.array]
; d : bool [@sexp.bool]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__480_ = "expansion.ml.Record_with_special_types.t" in
function
| Sexplib0.Sexp.List field_sexps__466_ as sexp__465_ ->
let a__467_ = Stdlib.ref Stdlib.Option.None
and b__469_ = Stdlib.ref Stdlib.Option.None
and c__471_ = Stdlib.ref Stdlib.Option.None
and d__473_ = Stdlib.ref false
and duplicates__475_ = Stdlib.ref []
and extra__476_ = Stdlib.ref [] in
let rec iter__486_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__477_ :: (([] | [ _ ]) as _field_sexps__479_))
:: tail__487_ ->
let _field_sexp__478_ () =
match _field_sexps__479_ with
| [ x__488_ ] -> x__488_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__480_
sexp__465_
| _ -> assert false
in
(match field_name__477_ with
| "a" ->
(match Stdlib.( ! ) a__467_ with
| Stdlib.Option.None ->
let _field_sexp__478_ = _field_sexp__478_ () in
let fvalue__483_ = int_of_sexp _field_sexp__478_ in
Stdlib.( := ) a__467_ (Stdlib.Option.Some fvalue__483_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_))
| "b" ->
(match Stdlib.( ! ) b__469_ with
| Stdlib.Option.None ->
let _field_sexp__478_ = _field_sexp__478_ () in
let fvalue__482_ = list_of_sexp int_of_sexp _field_sexp__478_ in
Stdlib.( := ) b__469_ (Stdlib.Option.Some fvalue__482_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_))
| "c" ->
(match Stdlib.( ! ) c__471_ with
| Stdlib.Option.None ->
let _field_sexp__478_ = _field_sexp__478_ () in
let fvalue__481_ = array_of_sexp int_of_sexp _field_sexp__478_ in
Stdlib.( := ) c__471_ (Stdlib.Option.Some fvalue__481_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_))
| "d" ->
if Stdlib.( ! ) d__473_
then
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_)
else (
match _field_sexps__479_ with
| [] -> Stdlib.( := ) d__473_ true
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_sexp_bool_with_payload
error_source__480_
sexp__465_)
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__476_ (field_name__477_ :: Stdlib.( ! ) extra__476_)
else ());
iter__486_ tail__487_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__465_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__480_
sexp__465_
| [] -> ()
in
iter__486_ field_sexps__466_;
(match Stdlib.( ! ) duplicates__475_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__480_
(Stdlib.( ! ) duplicates__475_)
sexp__465_
| [] ->
(match Stdlib.( ! ) extra__476_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__480_
(Stdlib.( ! ) extra__476_)
sexp__465_
| [] ->
(match
( Stdlib.( ! ) a__467_
, Stdlib.( ! ) b__469_
, Stdlib.( ! ) c__471_
, Stdlib.( ! ) d__473_ )
with
| a__468_, b__470_, c__472_, d__474_ ->
{ a = a__468_
; b =
(match b__470_ with
| Stdlib.Option.None -> []
| Stdlib.Option.Some v__484_ -> v__484_)
; c =
(match c__472_ with
| Stdlib.Option.None -> [||]
| Stdlib.Option.Some v__485_ -> v__485_)
; d = d__474_
})))
| Sexplib0.Sexp.Atom _ as sexp__465_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__480_ sexp__465_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__490_; b = b__495_; c = c__499_; d = d__502_ } ->
let bnds__489_ = [] in
let bnds__489_ =
if d__502_
then (
let bnd__503_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d" ] in
bnd__503_ :: bnds__489_)
else bnds__489_
in
let bnds__489_ =
if match c__499_ with
| [||] -> true
| _ -> false
then bnds__489_
else (
let arg__501_ = (sexp_of_array sexp_of_int) c__499_ in
let bnd__500_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__501_ ] in
bnd__500_ :: bnds__489_)
in
let bnds__489_ =
if match b__495_ with
| [] -> true
| _ -> false
then bnds__489_
else (
let arg__497_ = (sexp_of_list sexp_of_int) b__495_ in
let bnd__496_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__497_ ] in
bnd__496_ :: bnds__489_)
in
let bnds__489_ =
match a__490_ with
| Stdlib.Option.None -> bnds__489_
| Stdlib.Option.Some v__491_ ->
let arg__493_ = sexp_of_int v__491_ in
let bnd__492_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__493_ ] in
bnd__492_ :: bnds__489_
in
Sexplib0.Sexp.List bnds__489_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_with_omit_nil = struct
type t =
{ a : int option [@sexp.omit_nil]
; b : int list [@sexp.omit_nil]
; c : unit [@sexp.omit_nil]
; d : int [@sexp.omit_nil]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__535_ = "expansion.ml.Record_with_omit_nil.t" in
function
| Sexplib0.Sexp.List field_sexps__506_ as sexp__505_ ->
let a__507_ = Stdlib.ref Stdlib.Option.None
and b__509_ = Stdlib.ref Stdlib.Option.None
and c__511_ = Stdlib.ref Stdlib.Option.None
and d__513_ = Stdlib.ref Stdlib.Option.None
and duplicates__515_ = Stdlib.ref []
and extra__516_ = Stdlib.ref [] in
let rec iter__532_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__517_ :: (([] | [ _ ]) as _field_sexps__519_))
:: tail__533_ ->
let _field_sexp__518_ () =
match _field_sexps__519_ with
| [ x__534_ ] -> x__534_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__535_
sexp__505_
| _ -> assert false
in
(match field_name__517_ with
| "a" ->
(match Stdlib.( ! ) a__507_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__523_ = option_of_sexp int_of_sexp _field_sexp__518_ in
Stdlib.( := ) a__507_ (Stdlib.Option.Some fvalue__523_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| "b" ->
(match Stdlib.( ! ) b__509_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__522_ = list_of_sexp int_of_sexp _field_sexp__518_ in
Stdlib.( := ) b__509_ (Stdlib.Option.Some fvalue__522_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| "c" ->
(match Stdlib.( ! ) c__511_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__521_ = unit_of_sexp _field_sexp__518_ in
Stdlib.( := ) c__511_ (Stdlib.Option.Some fvalue__521_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| "d" ->
(match Stdlib.( ! ) d__513_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__520_ = int_of_sexp _field_sexp__518_ in
Stdlib.( := ) d__513_ (Stdlib.Option.Some fvalue__520_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__516_ (field_name__517_ :: Stdlib.( ! ) extra__516_)
else ());
iter__532_ tail__533_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__505_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__535_
sexp__505_
| [] -> ()
in
iter__532_ field_sexps__506_;
(match Stdlib.( ! ) duplicates__515_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__535_
(Stdlib.( ! ) duplicates__515_)
sexp__505_
| [] ->
(match Stdlib.( ! ) extra__516_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__535_
(Stdlib.( ! ) extra__516_)
sexp__505_
| [] ->
(match
( Stdlib.( ! ) a__507_
, Stdlib.( ! ) b__509_
, Stdlib.( ! ) c__511_
, Stdlib.( ! ) d__513_ )
with
| a__508_, b__510_, c__512_, d__514_ ->
{ a =
(match a__508_ with
| Stdlib.Option.Some v__525_ -> v__525_
| Stdlib.Option.None ->
(try option_of_sexp int_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__524_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__524_, sexp__505_))))
; b =
(match b__510_ with
| Stdlib.Option.Some v__527_ -> v__527_
| Stdlib.Option.None ->
(try list_of_sexp int_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__526_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__526_, sexp__505_))))
; c =
(match c__512_ with
| Stdlib.Option.Some v__529_ -> v__529_
| Stdlib.Option.None ->
(try unit_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__528_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__528_, sexp__505_))))
; d =
(match d__514_ with
| Stdlib.Option.Some v__531_ -> v__531_
| Stdlib.Option.None ->
(try int_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__530_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__530_, sexp__505_))))
})))
| Sexplib0.Sexp.Atom _ as sexp__505_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__535_ sexp__505_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__537_; b = b__539_; c = c__541_; d = d__543_ } ->
let bnds__536_ = [] in
let bnds__536_ =
match sexp_of_int d__543_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__544_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d"; arg__544_ ] :: bnds__536_
in
let bnds__536_ =
match sexp_of_unit c__541_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__542_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__542_ ] :: bnds__536_
in
let bnds__536_ =
match sexp_of_list sexp_of_int b__539_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__540_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__540_ ] :: bnds__536_
in
let bnds__536_ =
match sexp_of_option sexp_of_int a__537_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__538_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__538_ ] :: bnds__536_
in
Sexplib0.Sexp.List bnds__536_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Variant_with_sexp_list = struct
type t = A of int list [@sexp.list] [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__547_ = "expansion.ml.Variant_with_sexp_list.t" in
function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("a" | "A") as _tag__550_) :: sexp_args__551_) as
_sexp__549_ -> A (Sexplib0.Sexp_conv.list_map int_of_sexp sexp_args__551_)
| Sexplib0.Sexp.Atom ("a" | "A") as sexp__548_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__547_ sexp__548_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__546_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__547_ sexp__546_
| Sexplib0.Sexp.List [] as sexp__546_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__547_ sexp__546_
| sexp__546_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__547_ sexp__546_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun (A l__552_) ->
Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom "A" :: Sexplib0.Sexp_conv.list_map sexp_of_int l__552_)
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Poly_variant_with_sexp_list = struct
type t = [ `A of int list [@sexp.list] ] [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let __t_of_sexp__ =
(let error_source__559_ = "expansion.ml.Poly_variant_with_sexp_list.t" in
function
| Sexplib0.Sexp.Atom atom__554_ as _sexp__556_ ->
(match atom__554_ with
| "A" -> Sexplib0.Sexp_conv_error.ptag_takes_args error_source__559_ _sexp__556_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__554_ :: sexp_args__557_) as
_sexp__556_ ->
(match atom__554_ with
| "A" as _tag__558_ -> `A (Sexplib0.Sexp_conv.list_map int_of_sexp sexp_args__557_)
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__555_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var error_source__559_ sexp__555_
| Sexplib0.Sexp.List [] as sexp__555_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var error_source__559_ sexp__555_
: Sexplib0.Sexp.t -> t)
;;
let _ = __t_of_sexp__
let t_of_sexp =
(let error_source__561_ = "expansion.ml.Poly_variant_with_sexp_list.t" in
fun sexp__560_ ->
try __t_of_sexp__ sexp__560_ with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found error_source__561_ sexp__560_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun (`A l__562_) ->
Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom "A" :: Sexplib0.Sexp_conv.list_map sexp_of_int l__562_)
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_allowing_extra_fields = struct
type t = { a : int } [@@allow_extra_fields] [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__574_ = "expansion.ml.Record_allowing_extra_fields.t" in
function
| Sexplib0.Sexp.List field_sexps__565_ as sexp__564_ ->
let a__566_ = Stdlib.ref Stdlib.Option.None
and duplicates__568_ = Stdlib.ref []
and extra__569_ = Stdlib.ref [] in
let rec iter__575_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__570_ :: (([] | [ _ ]) as _field_sexps__572_))
:: tail__576_ ->
let _field_sexp__571_ () =
match _field_sexps__572_ with
| [ x__577_ ] -> x__577_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__574_
sexp__564_
| _ -> assert false
in
(match field_name__570_ with
| "a" ->
(match Stdlib.( ! ) a__566_ with
| Stdlib.Option.None ->
let _field_sexp__571_ = _field_sexp__571_ () in
let fvalue__573_ = int_of_sexp _field_sexp__571_ in
Stdlib.( := ) a__566_ (Stdlib.Option.Some fvalue__573_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__568_
(field_name__570_ :: Stdlib.( ! ) duplicates__568_))
| _ -> ());
iter__575_ tail__576_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__564_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__574_
sexp__564_
| [] -> ()
in
iter__575_ field_sexps__565_;
(match Stdlib.( ! ) duplicates__568_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__574_
(Stdlib.( ! ) duplicates__568_)
sexp__564_
| [] ->
(match Stdlib.( ! ) extra__569_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__574_
(Stdlib.( ! ) extra__569_)
sexp__564_
| [] ->
(match Stdlib.( ! ) a__566_ with
| Stdlib.Option.Some a__567_ -> { a = a__567_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__574_
sexp__564_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__566_) Stdlib.Option.None, "a" ])))
| Sexplib0.Sexp.Atom _ as sexp__564_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__574_ sexp__564_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__579_ } ->
let bnds__578_ = [] in
let bnds__578_ =
let arg__580_ = sexp_of_int a__579_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__580_ ] :: bnds__578_
in
Sexplib0.Sexp.List bnds__578_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Opaque = struct
type t = (int[@sexp.opaque]) list [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(fun x__582_ -> list_of_sexp Sexplib0.Sexp_conv.opaque_of_sexp x__582_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun x__583_ -> sexp_of_list Sexplib0.Sexp_conv.sexp_of_opaque x__583_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
| null | https://raw.githubusercontent.com/realworldocaml/book/d822fd065f19dbb6324bf83e0143bc73fd77dbf9/duniverse/ppx_sexp_conv/test/expansion.ml | ocaml | open! Base
module Abstract = struct
type t [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__002_ = "expansion.ml.Abstract.t" in
fun x__003_ -> Sexplib0.Sexp_conv_error.empty_type error_source__002_ x__003_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t = (fun _ -> assert false : t -> Sexplib0.Sexp.t)
let _ = sexp_of_t
[@@@end]
end
module Tuple = struct
type t = int * int * int [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__012_ = "expansion.ml.Tuple.t" in
function
| Sexplib0.Sexp.List [ arg0__005_; arg1__006_; arg2__007_ ] ->
let res0__008_ = int_of_sexp arg0__005_
and res1__009_ = int_of_sexp arg1__006_
and res2__010_ = int_of_sexp arg2__007_ in
res0__008_, res1__009_, res2__010_
| sexp__011_ ->
Sexplib0.Sexp_conv_error.tuple_of_size_n_expected error_source__012_ 3 sexp__011_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun (arg0__013_, arg1__014_, arg2__015_) ->
let res0__016_ = sexp_of_int arg0__013_
and res1__017_ = sexp_of_int arg1__014_
and res2__018_ = sexp_of_int arg2__015_ in
Sexplib0.Sexp.List [ res0__016_; res1__017_; res2__018_ ]
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record = struct
type t =
{ a : int
; b : int
; c : int
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__036_ = "expansion.ml.Record.t" in
function
| Sexplib0.Sexp.List field_sexps__021_ as sexp__020_ ->
let a__022_ = Stdlib.ref Stdlib.Option.None
and b__024_ = Stdlib.ref Stdlib.Option.None
and c__026_ = Stdlib.ref Stdlib.Option.None
and duplicates__028_ = Stdlib.ref []
and extra__029_ = Stdlib.ref [] in
let rec iter__037_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__030_ :: (([] | [ _ ]) as _field_sexps__032_))
:: tail__038_ ->
let _field_sexp__031_ () =
match _field_sexps__032_ with
| [ x__039_ ] -> x__039_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__036_
sexp__020_
| _ -> assert false
in
(match field_name__030_ with
| "a" ->
(match Stdlib.( ! ) a__022_ with
| Stdlib.Option.None ->
let _field_sexp__031_ = _field_sexp__031_ () in
let fvalue__035_ = int_of_sexp _field_sexp__031_ in
Stdlib.( := ) a__022_ (Stdlib.Option.Some fvalue__035_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__028_
(field_name__030_ :: Stdlib.( ! ) duplicates__028_))
| "b" ->
(match Stdlib.( ! ) b__024_ with
| Stdlib.Option.None ->
let _field_sexp__031_ = _field_sexp__031_ () in
let fvalue__034_ = int_of_sexp _field_sexp__031_ in
Stdlib.( := ) b__024_ (Stdlib.Option.Some fvalue__034_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__028_
(field_name__030_ :: Stdlib.( ! ) duplicates__028_))
| "c" ->
(match Stdlib.( ! ) c__026_ with
| Stdlib.Option.None ->
let _field_sexp__031_ = _field_sexp__031_ () in
let fvalue__033_ = int_of_sexp _field_sexp__031_ in
Stdlib.( := ) c__026_ (Stdlib.Option.Some fvalue__033_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__028_
(field_name__030_ :: Stdlib.( ! ) duplicates__028_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__029_ (field_name__030_ :: Stdlib.( ! ) extra__029_)
else ());
iter__037_ tail__038_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__020_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__036_
sexp__020_
| [] -> ()
in
iter__037_ field_sexps__021_;
(match Stdlib.( ! ) duplicates__028_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__036_
(Stdlib.( ! ) duplicates__028_)
sexp__020_
| [] ->
(match Stdlib.( ! ) extra__029_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__036_
(Stdlib.( ! ) extra__029_)
sexp__020_
| [] ->
(match Stdlib.( ! ) a__022_, Stdlib.( ! ) b__024_, Stdlib.( ! ) c__026_ with
| ( Stdlib.Option.Some a__023_
, Stdlib.Option.Some b__025_
, Stdlib.Option.Some c__027_ ) -> { a = a__023_; b = b__025_; c = c__027_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__036_
sexp__020_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__022_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__024_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__026_) Stdlib.Option.None, "c"
])))
| Sexplib0.Sexp.Atom _ as sexp__020_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__036_ sexp__020_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__041_; b = b__043_; c = c__045_ } ->
let bnds__040_ = [] in
let bnds__040_ =
let arg__046_ = sexp_of_int c__045_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__046_ ] :: bnds__040_
in
let bnds__040_ =
let arg__044_ = sexp_of_int b__043_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__044_ ] :: bnds__040_
in
let bnds__040_ =
let arg__042_ = sexp_of_int a__041_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__042_ ] :: bnds__040_
in
Sexplib0.Sexp.List bnds__040_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Mutable_record = struct
type t =
{ mutable a : int
; mutable b : int
; mutable c : int
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__064_ = "expansion.ml.Mutable_record.t" in
function
| Sexplib0.Sexp.List field_sexps__049_ as sexp__048_ ->
let a__050_ = Stdlib.ref Stdlib.Option.None
and b__052_ = Stdlib.ref Stdlib.Option.None
and c__054_ = Stdlib.ref Stdlib.Option.None
and duplicates__056_ = Stdlib.ref []
and extra__057_ = Stdlib.ref [] in
let rec iter__065_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__058_ :: (([] | [ _ ]) as _field_sexps__060_))
:: tail__066_ ->
let _field_sexp__059_ () =
match _field_sexps__060_ with
| [ x__067_ ] -> x__067_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__064_
sexp__048_
| _ -> assert false
in
(match field_name__058_ with
| "a" ->
(match Stdlib.( ! ) a__050_ with
| Stdlib.Option.None ->
let _field_sexp__059_ = _field_sexp__059_ () in
let fvalue__063_ = int_of_sexp _field_sexp__059_ in
Stdlib.( := ) a__050_ (Stdlib.Option.Some fvalue__063_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__056_
(field_name__058_ :: Stdlib.( ! ) duplicates__056_))
| "b" ->
(match Stdlib.( ! ) b__052_ with
| Stdlib.Option.None ->
let _field_sexp__059_ = _field_sexp__059_ () in
let fvalue__062_ = int_of_sexp _field_sexp__059_ in
Stdlib.( := ) b__052_ (Stdlib.Option.Some fvalue__062_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__056_
(field_name__058_ :: Stdlib.( ! ) duplicates__056_))
| "c" ->
(match Stdlib.( ! ) c__054_ with
| Stdlib.Option.None ->
let _field_sexp__059_ = _field_sexp__059_ () in
let fvalue__061_ = int_of_sexp _field_sexp__059_ in
Stdlib.( := ) c__054_ (Stdlib.Option.Some fvalue__061_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__056_
(field_name__058_ :: Stdlib.( ! ) duplicates__056_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__057_ (field_name__058_ :: Stdlib.( ! ) extra__057_)
else ());
iter__065_ tail__066_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__048_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__064_
sexp__048_
| [] -> ()
in
iter__065_ field_sexps__049_;
(match Stdlib.( ! ) duplicates__056_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__064_
(Stdlib.( ! ) duplicates__056_)
sexp__048_
| [] ->
(match Stdlib.( ! ) extra__057_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__064_
(Stdlib.( ! ) extra__057_)
sexp__048_
| [] ->
(match Stdlib.( ! ) a__050_, Stdlib.( ! ) b__052_, Stdlib.( ! ) c__054_ with
| ( Stdlib.Option.Some a__051_
, Stdlib.Option.Some b__053_
, Stdlib.Option.Some c__055_ ) -> { a = a__051_; b = b__053_; c = c__055_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__064_
sexp__048_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__050_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__052_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__054_) Stdlib.Option.None, "c"
])))
| Sexplib0.Sexp.Atom _ as sexp__048_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__064_ sexp__048_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__069_; b = b__071_; c = c__073_ } ->
let bnds__068_ = [] in
let bnds__068_ =
let arg__074_ = sexp_of_int c__073_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__074_ ] :: bnds__068_
in
let bnds__068_ =
let arg__072_ = sexp_of_int b__071_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__072_ ] :: bnds__068_
in
let bnds__068_ =
let arg__070_ = sexp_of_int a__069_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__070_ ] :: bnds__068_
in
Sexplib0.Sexp.List bnds__068_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Variant = struct
type t =
| A
| B of int * int
| C of
{ a : int
; b : int
; d : int
}
| D of
{ mutable a : int
; mutable b : int
; mutable t : int
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__077_ = "expansion.ml.Variant.t" in
function
| Sexplib0.Sexp.Atom ("a" | "A") -> A
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("b" | "B") as _tag__080_) :: sexp_args__081_) as
_sexp__079_ ->
(match sexp_args__081_ with
| [ arg0__082_; arg1__083_ ] ->
let res0__084_ = int_of_sexp arg0__082_
and res1__085_ = int_of_sexp arg1__083_ in
B (res0__084_, res1__085_)
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__077_
_tag__080_
_sexp__079_)
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("c" | "C") as _tag__088_) :: field_sexps__086_) as
sexp__087_ ->
let a__089_ = Stdlib.ref Stdlib.Option.None
and b__091_ = Stdlib.ref Stdlib.Option.None
and d__093_ = Stdlib.ref Stdlib.Option.None
and duplicates__095_ = Stdlib.ref []
and extra__096_ = Stdlib.ref [] in
let rec iter__103_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__097_ :: (([] | [ _ ]) as _field_sexps__099_))
:: tail__104_ ->
let _field_sexp__098_ () =
match _field_sexps__099_ with
| [ x__105_ ] -> x__105_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__087_
| _ -> assert false
in
(match field_name__097_ with
| "a" ->
(match Stdlib.( ! ) a__089_ with
| Stdlib.Option.None ->
let _field_sexp__098_ = _field_sexp__098_ () in
let fvalue__102_ = int_of_sexp _field_sexp__098_ in
Stdlib.( := ) a__089_ (Stdlib.Option.Some fvalue__102_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__095_
(field_name__097_ :: Stdlib.( ! ) duplicates__095_))
| "b" ->
(match Stdlib.( ! ) b__091_ with
| Stdlib.Option.None ->
let _field_sexp__098_ = _field_sexp__098_ () in
let fvalue__101_ = int_of_sexp _field_sexp__098_ in
Stdlib.( := ) b__091_ (Stdlib.Option.Some fvalue__101_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__095_
(field_name__097_ :: Stdlib.( ! ) duplicates__095_))
| "d" ->
(match Stdlib.( ! ) d__093_ with
| Stdlib.Option.None ->
let _field_sexp__098_ = _field_sexp__098_ () in
let fvalue__100_ = int_of_sexp _field_sexp__098_ in
Stdlib.( := ) d__093_ (Stdlib.Option.Some fvalue__100_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__095_
(field_name__097_ :: Stdlib.( ! ) duplicates__095_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__096_ (field_name__097_ :: Stdlib.( ! ) extra__096_)
else ());
iter__103_ tail__104_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__087_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__087_
| [] -> ()
in
iter__103_ field_sexps__086_;
(match Stdlib.( ! ) duplicates__095_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__077_
(Stdlib.( ! ) duplicates__095_)
sexp__087_
| [] ->
(match Stdlib.( ! ) extra__096_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__077_
(Stdlib.( ! ) extra__096_)
sexp__087_
| [] ->
(match Stdlib.( ! ) a__089_, Stdlib.( ! ) b__091_, Stdlib.( ! ) d__093_ with
| ( Stdlib.Option.Some a__090_
, Stdlib.Option.Some b__092_
, Stdlib.Option.Some d__094_ ) -> C { a = a__090_; b = b__092_; d = d__094_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__077_
sexp__087_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__089_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__091_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) d__093_) Stdlib.Option.None, "d"
])))
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("d" | "D") as _tag__108_) :: field_sexps__106_) as
sexp__107_ ->
let a__109_ = Stdlib.ref Stdlib.Option.None
and b__111_ = Stdlib.ref Stdlib.Option.None
and t__113_ = Stdlib.ref Stdlib.Option.None
and duplicates__115_ = Stdlib.ref []
and extra__116_ = Stdlib.ref [] in
let rec iter__123_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__117_ :: (([] | [ _ ]) as _field_sexps__119_))
:: tail__124_ ->
let _field_sexp__118_ () =
match _field_sexps__119_ with
| [ x__125_ ] -> x__125_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__107_
| _ -> assert false
in
(match field_name__117_ with
| "a" ->
(match Stdlib.( ! ) a__109_ with
| Stdlib.Option.None ->
let _field_sexp__118_ = _field_sexp__118_ () in
let fvalue__122_ = int_of_sexp _field_sexp__118_ in
Stdlib.( := ) a__109_ (Stdlib.Option.Some fvalue__122_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__115_
(field_name__117_ :: Stdlib.( ! ) duplicates__115_))
| "b" ->
(match Stdlib.( ! ) b__111_ with
| Stdlib.Option.None ->
let _field_sexp__118_ = _field_sexp__118_ () in
let fvalue__121_ = int_of_sexp _field_sexp__118_ in
Stdlib.( := ) b__111_ (Stdlib.Option.Some fvalue__121_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__115_
(field_name__117_ :: Stdlib.( ! ) duplicates__115_))
| "t" ->
(match Stdlib.( ! ) t__113_ with
| Stdlib.Option.None ->
let _field_sexp__118_ = _field_sexp__118_ () in
let fvalue__120_ = int_of_sexp _field_sexp__118_ in
Stdlib.( := ) t__113_ (Stdlib.Option.Some fvalue__120_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__115_
(field_name__117_ :: Stdlib.( ! ) duplicates__115_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__116_ (field_name__117_ :: Stdlib.( ! ) extra__116_)
else ());
iter__123_ tail__124_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__107_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__077_
sexp__107_
| [] -> ()
in
iter__123_ field_sexps__106_;
(match Stdlib.( ! ) duplicates__115_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__077_
(Stdlib.( ! ) duplicates__115_)
sexp__107_
| [] ->
(match Stdlib.( ! ) extra__116_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__077_
(Stdlib.( ! ) extra__116_)
sexp__107_
| [] ->
(match Stdlib.( ! ) a__109_, Stdlib.( ! ) b__111_, Stdlib.( ! ) t__113_ with
| ( Stdlib.Option.Some a__110_
, Stdlib.Option.Some b__112_
, Stdlib.Option.Some t__114_ ) -> D { a = a__110_; b = b__112_; t = t__114_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__077_
sexp__107_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__109_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__111_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) t__113_) Stdlib.Option.None, "t"
])))
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("a" | "A") :: _) as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__077_ sexp__078_
| Sexplib0.Sexp.Atom ("b" | "B") as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__077_ sexp__078_
| Sexplib0.Sexp.Atom ("c" | "C") as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__077_ sexp__078_
| Sexplib0.Sexp.Atom ("d" | "D") as sexp__078_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__077_ sexp__078_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__076_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__077_ sexp__076_
| Sexplib0.Sexp.List [] as sexp__076_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__077_ sexp__076_
| sexp__076_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__077_ sexp__076_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(function
| A -> Sexplib0.Sexp.Atom "A"
| B (arg0__126_, arg1__127_) ->
let res0__128_ = sexp_of_int arg0__126_
and res1__129_ = sexp_of_int arg1__127_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; res0__128_; res1__129_ ]
| C { a = a__131_; b = b__133_; d = d__135_ } ->
let bnds__130_ = [] in
let bnds__130_ =
let arg__136_ = sexp_of_int d__135_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d"; arg__136_ ] :: bnds__130_
in
let bnds__130_ =
let arg__134_ = sexp_of_int b__133_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__134_ ] :: bnds__130_
in
let bnds__130_ =
let arg__132_ = sexp_of_int a__131_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__132_ ] :: bnds__130_
in
Sexplib0.Sexp.List (Sexplib0.Sexp.Atom "C" :: bnds__130_)
| D { a = a__138_; b = b__140_; t = t__142_ } ->
let bnds__137_ = [] in
let bnds__137_ =
let arg__143_ = sexp_of_int t__142_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "t"; arg__143_ ] :: bnds__137_
in
let bnds__137_ =
let arg__141_ = sexp_of_int b__140_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__141_ ] :: bnds__137_
in
let bnds__137_ =
let arg__139_ = sexp_of_int a__138_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__139_ ] :: bnds__137_
in
Sexplib0.Sexp.List (Sexplib0.Sexp.Atom "D" :: bnds__137_)
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Poly_variant = struct
type t =
[ `A
| `B of int
]
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let __t_of_sexp__ =
(let error_source__149_ = "expansion.ml.Poly_variant.t" in
function
| Sexplib0.Sexp.Atom atom__145_ as _sexp__147_ ->
(match atom__145_ with
| "A" -> `A
| "B" -> Sexplib0.Sexp_conv_error.ptag_takes_args error_source__149_ _sexp__147_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__145_ :: sexp_args__148_) as
_sexp__147_ ->
(match atom__145_ with
| "B" as _tag__150_ ->
(match sexp_args__148_ with
| [ arg0__151_ ] ->
let res0__152_ = int_of_sexp arg0__151_ in
`B res0__152_
| _ ->
Sexplib0.Sexp_conv_error.ptag_incorrect_n_args
error_source__149_
_tag__150_
_sexp__147_)
| "A" -> Sexplib0.Sexp_conv_error.ptag_no_args error_source__149_ _sexp__147_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__146_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var error_source__149_ sexp__146_
| Sexplib0.Sexp.List [] as sexp__146_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var error_source__149_ sexp__146_
: Sexplib0.Sexp.t -> t)
;;
let _ = __t_of_sexp__
let t_of_sexp =
(let error_source__154_ = "expansion.ml.Poly_variant.t" in
fun sexp__153_ ->
try __t_of_sexp__ sexp__153_ with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found error_source__154_ sexp__153_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(function
| `A -> Sexplib0.Sexp.Atom "A"
| `B v__155_ -> Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; sexp_of_int v__155_ ]
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Inline_poly_variant = struct
type t =
[ Poly_variant.t
| `C of int * int
]
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let __t_of_sexp__ =
(let error_source__167_ = "expansion.ml.Inline_poly_variant.t" in
fun sexp__156_ ->
try (Poly_variant.__t_of_sexp__ sexp__156_ :> t) with
| Sexplib0.Sexp_conv_error.No_variant_match ->
(match sexp__156_ with
| Sexplib0.Sexp.Atom atom__157_ as _sexp__159_ ->
(match atom__157_ with
| "C" ->
Sexplib0.Sexp_conv_error.ptag_takes_args error_source__167_ _sexp__159_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__157_ :: sexp_args__160_) as
_sexp__159_ ->
(match atom__157_ with
| "C" as _tag__161_ ->
(match sexp_args__160_ with
| [ arg0__168_ ] ->
let res0__169_ =
match arg0__168_ with
| Sexplib0.Sexp.List [ arg0__162_; arg1__163_ ] ->
let res0__164_ = int_of_sexp arg0__162_
and res1__165_ = int_of_sexp arg1__163_ in
res0__164_, res1__165_
| sexp__166_ ->
Sexplib0.Sexp_conv_error.tuple_of_size_n_expected
error_source__167_
2
sexp__166_
in
`C res0__169_
| _ ->
Sexplib0.Sexp_conv_error.ptag_incorrect_n_args
error_source__167_
_tag__161_
_sexp__159_)
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__158_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var
error_source__167_
sexp__158_
| Sexplib0.Sexp.List [] as sexp__158_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var
error_source__167_
sexp__158_)
: Sexplib0.Sexp.t -> t)
;;
let _ = __t_of_sexp__
let t_of_sexp =
(let error_source__171_ = "expansion.ml.Inline_poly_variant.t" in
fun sexp__170_ ->
try __t_of_sexp__ sexp__170_ with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found error_source__171_ sexp__170_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(function
| #Poly_variant.t as v__172_ -> Poly_variant.sexp_of_t v__172_
| `C v__173_ ->
Sexplib0.Sexp.List
[ Sexplib0.Sexp.Atom "C"
; (let arg0__174_, arg1__175_ = v__173_ in
let res0__176_ = sexp_of_int arg0__174_
and res1__177_ = sexp_of_int arg1__175_ in
Sexplib0.Sexp.List [ res0__176_; res1__177_ ])
]
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Recursive = struct
type t =
| Banana of t
| Orange
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let rec t_of_sexp =
(let error_source__180_ = "expansion.ml.Recursive.t" in
function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("banana" | "Banana") as _tag__183_) :: sexp_args__184_) as
_sexp__182_ ->
(match sexp_args__184_ with
| [ arg0__185_ ] ->
let res0__186_ = t_of_sexp arg0__185_ in
Banana res0__186_
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__180_
_tag__183_
_sexp__182_)
| Sexplib0.Sexp.Atom ("orange" | "Orange") -> Orange
| Sexplib0.Sexp.Atom ("banana" | "Banana") as sexp__181_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__180_ sexp__181_
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("orange" | "Orange") :: _) as sexp__181_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__180_ sexp__181_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__179_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__180_ sexp__179_
| Sexplib0.Sexp.List [] as sexp__179_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__180_ sexp__179_
| sexp__179_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__180_ sexp__179_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let rec sexp_of_t =
(function
| Banana arg0__187_ ->
let res0__188_ = sexp_of_t arg0__187_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "Banana"; res0__188_ ]
| Orange -> Sexplib0.Sexp.Atom "Orange"
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Nonrecursive = struct
open Recursive
type nonrec t = t [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp = (t_of_sexp : Sexplib0.Sexp.t -> t)
let _ = t_of_sexp
let sexp_of_t = (sexp_of_t : t -> Sexplib0.Sexp.t)
let _ = sexp_of_t
[@@@end]
end
module Mutually_recursive = struct
type a =
| A
| B of b
| C of
{ a : a
; b : b
; c : c
}
and b =
{ a : a
; b : b
}
and c = int [@@deriving_inline sexp]
let _ = fun (_ : a) -> ()
let _ = fun (_ : b) -> ()
let _ = fun (_ : c) -> ()
let rec a_of_sexp =
(let error_source__192_ = "expansion.ml.Mutually_recursive.a" in
function
| Sexplib0.Sexp.Atom ("a" | "A") -> A
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("b" | "B") as _tag__195_) :: sexp_args__196_) as
_sexp__194_ ->
(match sexp_args__196_ with
| [ arg0__197_ ] ->
let res0__198_ = b_of_sexp arg0__197_ in
B res0__198_
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__192_
_tag__195_
_sexp__194_)
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("c" | "C") as _tag__201_) :: field_sexps__199_) as
sexp__200_ ->
let a__202_ = Stdlib.ref Stdlib.Option.None
and b__204_ = Stdlib.ref Stdlib.Option.None
and c__206_ = Stdlib.ref Stdlib.Option.None
and duplicates__208_ = Stdlib.ref []
and extra__209_ = Stdlib.ref [] in
let rec iter__216_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__210_ :: (([] | [ _ ]) as _field_sexps__212_))
:: tail__217_ ->
let _field_sexp__211_ () =
match _field_sexps__212_ with
| [ x__218_ ] -> x__218_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__192_
sexp__200_
| _ -> assert false
in
(match field_name__210_ with
| "a" ->
(match Stdlib.( ! ) a__202_ with
| Stdlib.Option.None ->
let _field_sexp__211_ = _field_sexp__211_ () in
let fvalue__215_ = a_of_sexp _field_sexp__211_ in
Stdlib.( := ) a__202_ (Stdlib.Option.Some fvalue__215_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__208_
(field_name__210_ :: Stdlib.( ! ) duplicates__208_))
| "b" ->
(match Stdlib.( ! ) b__204_ with
| Stdlib.Option.None ->
let _field_sexp__211_ = _field_sexp__211_ () in
let fvalue__214_ = b_of_sexp _field_sexp__211_ in
Stdlib.( := ) b__204_ (Stdlib.Option.Some fvalue__214_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__208_
(field_name__210_ :: Stdlib.( ! ) duplicates__208_))
| "c" ->
(match Stdlib.( ! ) c__206_ with
| Stdlib.Option.None ->
let _field_sexp__211_ = _field_sexp__211_ () in
let fvalue__213_ = c_of_sexp _field_sexp__211_ in
Stdlib.( := ) c__206_ (Stdlib.Option.Some fvalue__213_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__208_
(field_name__210_ :: Stdlib.( ! ) duplicates__208_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__209_ (field_name__210_ :: Stdlib.( ! ) extra__209_)
else ());
iter__216_ tail__217_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__200_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__192_
sexp__200_
| [] -> ()
in
iter__216_ field_sexps__199_;
(match Stdlib.( ! ) duplicates__208_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__192_
(Stdlib.( ! ) duplicates__208_)
sexp__200_
| [] ->
(match Stdlib.( ! ) extra__209_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__192_
(Stdlib.( ! ) extra__209_)
sexp__200_
| [] ->
(match Stdlib.( ! ) a__202_, Stdlib.( ! ) b__204_, Stdlib.( ! ) c__206_ with
| ( Stdlib.Option.Some a__203_
, Stdlib.Option.Some b__205_
, Stdlib.Option.Some c__207_ ) -> C { a = a__203_; b = b__205_; c = c__207_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__192_
sexp__200_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__202_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__204_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__206_) Stdlib.Option.None, "c"
])))
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("a" | "A") :: _) as sexp__193_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__192_ sexp__193_
| Sexplib0.Sexp.Atom ("b" | "B") as sexp__193_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__192_ sexp__193_
| Sexplib0.Sexp.Atom ("c" | "C") as sexp__193_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__192_ sexp__193_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__191_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__192_ sexp__191_
| Sexplib0.Sexp.List [] as sexp__191_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__192_ sexp__191_
| sexp__191_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__192_ sexp__191_
: Sexplib0.Sexp.t -> a)
and b_of_sexp =
(let error_source__233_ = "expansion.ml.Mutually_recursive.b" in
function
| Sexplib0.Sexp.List field_sexps__221_ as sexp__220_ ->
let a__222_ = Stdlib.ref Stdlib.Option.None
and b__224_ = Stdlib.ref Stdlib.Option.None
and duplicates__226_ = Stdlib.ref []
and extra__227_ = Stdlib.ref [] in
let rec iter__234_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__228_ :: (([] | [ _ ]) as _field_sexps__230_))
:: tail__235_ ->
let _field_sexp__229_ () =
match _field_sexps__230_ with
| [ x__236_ ] -> x__236_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__233_
sexp__220_
| _ -> assert false
in
(match field_name__228_ with
| "a" ->
(match Stdlib.( ! ) a__222_ with
| Stdlib.Option.None ->
let _field_sexp__229_ = _field_sexp__229_ () in
let fvalue__232_ = a_of_sexp _field_sexp__229_ in
Stdlib.( := ) a__222_ (Stdlib.Option.Some fvalue__232_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__226_
(field_name__228_ :: Stdlib.( ! ) duplicates__226_))
| "b" ->
(match Stdlib.( ! ) b__224_ with
| Stdlib.Option.None ->
let _field_sexp__229_ = _field_sexp__229_ () in
let fvalue__231_ = b_of_sexp _field_sexp__229_ in
Stdlib.( := ) b__224_ (Stdlib.Option.Some fvalue__231_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__226_
(field_name__228_ :: Stdlib.( ! ) duplicates__226_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__227_ (field_name__228_ :: Stdlib.( ! ) extra__227_)
else ());
iter__234_ tail__235_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__220_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__233_
sexp__220_
| [] -> ()
in
iter__234_ field_sexps__221_;
(match Stdlib.( ! ) duplicates__226_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__233_
(Stdlib.( ! ) duplicates__226_)
sexp__220_
| [] ->
(match Stdlib.( ! ) extra__227_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__233_
(Stdlib.( ! ) extra__227_)
sexp__220_
| [] ->
(match Stdlib.( ! ) a__222_, Stdlib.( ! ) b__224_ with
| Stdlib.Option.Some a__223_, Stdlib.Option.Some b__225_ ->
{ a = a__223_; b = b__225_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__233_
sexp__220_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__222_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__224_) Stdlib.Option.None, "b"
])))
| Sexplib0.Sexp.Atom _ as sexp__220_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__233_ sexp__220_
: Sexplib0.Sexp.t -> b)
and c_of_sexp = (int_of_sexp : Sexplib0.Sexp.t -> c)
let _ = a_of_sexp
and _ = b_of_sexp
and _ = c_of_sexp
let rec sexp_of_a =
(function
| A -> Sexplib0.Sexp.Atom "A"
| B arg0__238_ ->
let res0__239_ = sexp_of_b arg0__238_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; res0__239_ ]
| C { a = a__241_; b = b__243_; c = c__245_ } ->
let bnds__240_ = [] in
let bnds__240_ =
let arg__246_ = sexp_of_c c__245_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__246_ ] :: bnds__240_
in
let bnds__240_ =
let arg__244_ = sexp_of_b b__243_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__244_ ] :: bnds__240_
in
let bnds__240_ =
let arg__242_ = sexp_of_a a__241_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__242_ ] :: bnds__240_
in
Sexplib0.Sexp.List (Sexplib0.Sexp.Atom "C" :: bnds__240_)
: a -> Sexplib0.Sexp.t)
and sexp_of_b =
(fun { a = a__248_; b = b__250_ } ->
let bnds__247_ = [] in
let bnds__247_ =
let arg__251_ = sexp_of_b b__250_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__251_ ] :: bnds__247_
in
let bnds__247_ =
let arg__249_ = sexp_of_a a__248_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__249_ ] :: bnds__247_
in
Sexplib0.Sexp.List bnds__247_
: b -> Sexplib0.Sexp.t)
and sexp_of_c = (sexp_of_int : c -> Sexplib0.Sexp.t)
let _ = sexp_of_a
and _ = sexp_of_b
and _ = sexp_of_c
[@@@end]
end
module Alias = struct
type t = Recursive.t [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp = (Recursive.t_of_sexp : Sexplib0.Sexp.t -> t)
let _ = t_of_sexp
let sexp_of_t = (Recursive.sexp_of_t : t -> Sexplib0.Sexp.t)
let _ = sexp_of_t
[@@@end]
end
module Re_export = struct
type t = Recursive.t =
| Banana of t
| Orange
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let rec t_of_sexp =
(let error_source__255_ = "expansion.ml.Re_export.t" in
function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("banana" | "Banana") as _tag__258_) :: sexp_args__259_) as
_sexp__257_ ->
(match sexp_args__259_ with
| [ arg0__260_ ] ->
let res0__261_ = t_of_sexp arg0__260_ in
Banana res0__261_
| _ ->
Sexplib0.Sexp_conv_error.stag_incorrect_n_args
error_source__255_
_tag__258_
_sexp__257_)
| Sexplib0.Sexp.Atom ("orange" | "Orange") -> Orange
| Sexplib0.Sexp.Atom ("banana" | "Banana") as sexp__256_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__255_ sexp__256_
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom ("orange" | "Orange") :: _) as sexp__256_ ->
Sexplib0.Sexp_conv_error.stag_no_args error_source__255_ sexp__256_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__254_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__255_ sexp__254_
| Sexplib0.Sexp.List [] as sexp__254_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__255_ sexp__254_
| sexp__254_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__255_ sexp__254_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let rec sexp_of_t =
(function
| Banana arg0__262_ ->
let res0__263_ = sexp_of_t arg0__262_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "Banana"; res0__263_ ]
| Orange -> Sexplib0.Sexp.Atom "Orange"
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Unary = struct
type 'a t = 'a list option [@@deriving_inline sexp]
let _ = fun (_ : 'a t) -> ()
let t_of_sexp : 'a. (Sexplib0.Sexp.t -> 'a) -> Sexplib0.Sexp.t -> 'a t =
fun _of_a__264_ x__266_ -> option_of_sexp (list_of_sexp _of_a__264_) x__266_
;;
let _ = t_of_sexp
let sexp_of_t : 'a. ('a -> Sexplib0.Sexp.t) -> 'a t -> Sexplib0.Sexp.t =
fun _of_a__267_ x__268_ -> sexp_of_option (sexp_of_list _of_a__267_) x__268_
;;
let _ = sexp_of_t
[@@@end]
end
module Binary = struct
type ('a, 'b) t = ('a, 'b) Either.t [@@deriving_inline sexp]
let _ = fun (_ : ('a, 'b) t) -> ()
let t_of_sexp :
'a 'b.
(Sexplib0.Sexp.t -> 'a)
-> (Sexplib0.Sexp.t -> 'b)
-> Sexplib0.Sexp.t
-> ('a, 'b) t
=
Either.t_of_sexp
;;
let _ = t_of_sexp
let sexp_of_t :
'a 'b.
('a -> Sexplib0.Sexp.t)
-> ('b -> Sexplib0.Sexp.t)
-> ('a, 'b) t
-> Sexplib0.Sexp.t
=
Either.sexp_of_t
;;
let _ = sexp_of_t
[@@@end]
end
module First_order = struct
type 'a t = 'a -> 'a [@@deriving_inline sexp]
let _ = fun (_ : 'a t) -> ()
let t_of_sexp : 'a. (Sexplib0.Sexp.t -> 'a) -> Sexplib0.Sexp.t -> 'a t =
fun _of_a__276_ -> Sexplib0.Sexp_conv.fun_of_sexp
;;
let _ = t_of_sexp
let sexp_of_t : 'a. ('a -> Sexplib0.Sexp.t) -> 'a t -> Sexplib0.Sexp.t =
fun _of_a__278_ _ -> Sexplib0.Sexp_conv.sexp_of_fun Sexplib0.Sexp_conv.ignore
;;
let _ = sexp_of_t
[@@@end]
end
module Second_order = struct
type ('a, 'b) t = ('a -> 'a) -> ('a -> 'b) -> ('b -> 'b) -> 'a -> 'b
[@@deriving_inline sexp]
let _ = fun (_ : ('a, 'b) t) -> ()
let t_of_sexp :
'a 'b.
(Sexplib0.Sexp.t -> 'a)
-> (Sexplib0.Sexp.t -> 'b)
-> Sexplib0.Sexp.t
-> ('a, 'b) t
=
fun _of_a__279_ _of_b__280_ -> Sexplib0.Sexp_conv.fun_of_sexp
;;
let _ = t_of_sexp
let sexp_of_t :
'a 'b.
('a -> Sexplib0.Sexp.t)
-> ('b -> Sexplib0.Sexp.t)
-> ('a, 'b) t
-> Sexplib0.Sexp.t
=
fun _of_a__282_ _of_b__283_ _ ->
Sexplib0.Sexp_conv.sexp_of_fun Sexplib0.Sexp_conv.ignore
;;
let _ = sexp_of_t
[@@@end]
end
module Named_arguments = struct
type t = ?a:int -> b:int -> int -> int [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp = (Sexplib0.Sexp_conv.fun_of_sexp : Sexplib0.Sexp.t -> t)
let _ = t_of_sexp
let sexp_of_t =
(fun _ -> Sexplib0.Sexp_conv.sexp_of_fun Sexplib0.Sexp_conv.ignore
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Gadt = struct
type _ t =
| A : _ option t
| B : int -> int t
| C : 'a list -> unit t
[@@deriving_inline sexp_of]
let _ = fun (_ : _ t) -> ()
let sexp_of_t : 'a__285_. ('a__285_ -> Sexplib0.Sexp.t) -> 'a__285_ t -> Sexplib0.Sexp.t
=
fun (type a__291_) : ((a__291_ -> Sexplib0.Sexp.t) -> a__291_ t -> Sexplib0.Sexp.t) ->
fun _of_a__286_ -> function
| A -> Sexplib0.Sexp.Atom "A"
| B arg0__287_ ->
let res0__288_ = sexp_of_int arg0__287_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "B"; res0__288_ ]
| C arg0__289_ ->
let res0__290_ = sexp_of_list (fun _ -> Sexplib0.Sexp.Atom "_") arg0__289_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "C"; res0__290_ ]
;;
let _ = sexp_of_t
[@@@end]
end
module Recursive_record_containing_variant = struct
type t =
{ a : [ `A of t ]
; b : [ `B ] [@sexp_drop_default Poly.equal] [@default `B]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let rec t_of_sexp =
(let (default__324_ : [ `B ]) = `B in
let error_source__310_ = "expansion.ml.Recursive_record_containing_variant.t" in
function
| Sexplib0.Sexp.List field_sexps__294_ as sexp__293_ ->
let a__295_ = Stdlib.ref Stdlib.Option.None
and b__297_ = Stdlib.ref Stdlib.Option.None
and duplicates__299_ = Stdlib.ref []
and extra__300_ = Stdlib.ref [] in
let rec iter__326_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__301_ :: (([] | [ _ ]) as _field_sexps__303_))
:: tail__327_ ->
let _field_sexp__302_ () =
match _field_sexps__303_ with
| [ x__328_ ] -> x__328_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__310_
sexp__293_
| _ -> assert false
in
(match field_name__301_ with
| "a" ->
(match Stdlib.( ! ) a__295_ with
| Stdlib.Option.None ->
let _field_sexp__302_ = _field_sexp__302_ () in
let fvalue__322_ =
let sexp__321_ = _field_sexp__302_ in
try
match sexp__321_ with
| Sexplib0.Sexp.Atom atom__314_ as _sexp__316_ ->
(match atom__314_ with
| "A" ->
Sexplib0.Sexp_conv_error.ptag_takes_args
error_source__310_
_sexp__316_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__314_ :: sexp_args__317_)
as _sexp__316_ ->
(match atom__314_ with
| "A" as _tag__318_ ->
(match sexp_args__317_ with
| [ arg0__319_ ] ->
let res0__320_ = t_of_sexp arg0__319_ in
`A res0__320_
| _ ->
Sexplib0.Sexp_conv_error.ptag_incorrect_n_args
error_source__310_
_tag__318_
_sexp__316_)
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__315_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var
error_source__310_
sexp__315_
| Sexplib0.Sexp.List [] as sexp__315_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var
error_source__310_
sexp__315_
with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found
error_source__310_
sexp__321_
in
Stdlib.( := ) a__295_ (Stdlib.Option.Some fvalue__322_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__299_
(field_name__301_ :: Stdlib.( ! ) duplicates__299_))
| "b" ->
(match Stdlib.( ! ) b__297_ with
| Stdlib.Option.None ->
let _field_sexp__302_ = _field_sexp__302_ () in
let fvalue__312_ =
let sexp__311_ = _field_sexp__302_ in
try
match sexp__311_ with
| Sexplib0.Sexp.Atom atom__306_ as _sexp__308_ ->
(match atom__306_ with
| "B" -> `B
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__306_ :: _) as
_sexp__308_ ->
(match atom__306_ with
| "B" ->
Sexplib0.Sexp_conv_error.ptag_no_args
error_source__310_
_sexp__308_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__307_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var
error_source__310_
sexp__307_
| Sexplib0.Sexp.List [] as sexp__307_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var
error_source__310_
sexp__307_
with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found
error_source__310_
sexp__311_
in
Stdlib.( := ) b__297_ (Stdlib.Option.Some fvalue__312_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__299_
(field_name__301_ :: Stdlib.( ! ) duplicates__299_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__300_ (field_name__301_ :: Stdlib.( ! ) extra__300_)
else ());
iter__326_ tail__327_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__293_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__310_
sexp__293_
| [] -> ()
in
iter__326_ field_sexps__294_;
(match Stdlib.( ! ) duplicates__299_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__310_
(Stdlib.( ! ) duplicates__299_)
sexp__293_
| [] ->
(match Stdlib.( ! ) extra__300_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__310_
(Stdlib.( ! ) extra__300_)
sexp__293_
| [] ->
(match Stdlib.( ! ) a__295_, Stdlib.( ! ) b__297_ with
| Stdlib.Option.Some a__296_, b__298_ ->
{ a = a__296_
; b =
(match b__298_ with
| Stdlib.Option.None -> default__324_
| Stdlib.Option.Some v__325_ -> v__325_)
}
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__310_
sexp__293_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__295_) Stdlib.Option.None, "a" ])))
| Sexplib0.Sexp.Atom _ as sexp__293_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__310_ sexp__293_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let rec sexp_of_t =
(let (default__335_ : [ `B ]) = `B
and (drop_default__334_ : [ `B ] -> [ `B ] -> Stdlib.Bool.t) = Poly.equal in
fun { a = a__330_; b = b__336_ } ->
let bnds__329_ = [] in
let bnds__329_ =
if drop_default__334_ default__335_ b__336_
then bnds__329_
else (
let arg__338_ = (fun `B -> Sexplib0.Sexp.Atom "B") b__336_ in
let bnd__337_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__338_ ] in
bnd__337_ :: bnds__329_)
in
let bnds__329_ =
let arg__331_ =
let (`A v__332_) = a__330_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "A"; sexp_of_t v__332_ ]
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__331_ ] :: bnds__329_
in
Sexplib0.Sexp.List bnds__329_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Poly_record = struct
type t =
{ a : 'a. 'a list
; b : 'b. 'b option
; c : 'c. 'c
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__355_ = "expansion.ml.Poly_record.t" in
function
| Sexplib0.Sexp.List field_sexps__341_ as sexp__340_ ->
let a__366_, b__367_, c__368_ =
let a__342_ = Stdlib.ref Stdlib.Option.None
and b__344_ = Stdlib.ref Stdlib.Option.None
and c__346_ = Stdlib.ref Stdlib.Option.None
and duplicates__348_ = Stdlib.ref []
and extra__349_ = Stdlib.ref [] in
let rec iter__363_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__350_
:: (([] | [ _ ]) as _field_sexps__352_))
:: tail__364_ ->
let _field_sexp__351_ () =
match _field_sexps__352_ with
| [ x__365_ ] -> x__365_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__355_
sexp__340_
| _ -> assert false
in
(match field_name__350_ with
| "a" ->
(match Stdlib.( ! ) a__342_ with
| Stdlib.Option.None ->
let _field_sexp__351_ = _field_sexp__351_ () in
let fvalue__362_ =
let _of_a__360_ sexp__361_ =
Sexplib0.Sexp_conv_error.record_poly_field_value
error_source__355_
sexp__361_
in
list_of_sexp _of_a__360_ _field_sexp__351_
in
Stdlib.( := ) a__342_ (Stdlib.Option.Some fvalue__362_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__348_
(field_name__350_ :: Stdlib.( ! ) duplicates__348_))
| "b" ->
(match Stdlib.( ! ) b__344_ with
| Stdlib.Option.None ->
let _field_sexp__351_ = _field_sexp__351_ () in
let fvalue__359_ =
let _of_b__357_ sexp__358_ =
Sexplib0.Sexp_conv_error.record_poly_field_value
error_source__355_
sexp__358_
in
option_of_sexp _of_b__357_ _field_sexp__351_
in
Stdlib.( := ) b__344_ (Stdlib.Option.Some fvalue__359_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__348_
(field_name__350_ :: Stdlib.( ! ) duplicates__348_))
| "c" ->
(match Stdlib.( ! ) c__346_ with
| Stdlib.Option.None ->
let _field_sexp__351_ = _field_sexp__351_ () in
let fvalue__356_ =
let _of_c__353_ sexp__354_ =
Sexplib0.Sexp_conv_error.record_poly_field_value
error_source__355_
sexp__354_
in
_of_c__353_ _field_sexp__351_
in
Stdlib.( := ) c__346_ (Stdlib.Option.Some fvalue__356_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__348_
(field_name__350_ :: Stdlib.( ! ) duplicates__348_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then
Stdlib.( := ) extra__349_ (field_name__350_ :: Stdlib.( ! ) extra__349_)
else ());
iter__363_ tail__364_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__340_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__355_
sexp__340_
| [] -> ()
in
iter__363_ field_sexps__341_;
match Stdlib.( ! ) duplicates__348_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__355_
(Stdlib.( ! ) duplicates__348_)
sexp__340_
| [] ->
(match Stdlib.( ! ) extra__349_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__355_
(Stdlib.( ! ) extra__349_)
sexp__340_
| [] ->
(match Stdlib.( ! ) a__342_, Stdlib.( ! ) b__344_, Stdlib.( ! ) c__346_ with
| ( Stdlib.Option.Some a__343_
, Stdlib.Option.Some b__345_
, Stdlib.Option.Some c__347_ ) -> a__343_, b__345_, c__347_
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__355_
sexp__340_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__342_) Stdlib.Option.None, "a"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) b__344_) Stdlib.Option.None, "b"
; Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) c__346_) Stdlib.Option.None, "c"
]))
in
{ a = a__366_; b = b__367_; c = c__368_ }
| Sexplib0.Sexp.Atom _ as sexp__340_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__355_ sexp__340_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__370_; b = b__373_; c = c__376_ } ->
let bnds__369_ = [] in
let bnds__369_ =
let arg__377_ =
let _of_c__378_ = Sexplib0.Sexp_conv.sexp_of_opaque in
_of_c__378_ c__376_
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__377_ ] :: bnds__369_
in
let bnds__369_ =
let arg__374_ =
let _of_b__375_ = Sexplib0.Sexp_conv.sexp_of_opaque in
sexp_of_option _of_b__375_ b__373_
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__374_ ] :: bnds__369_
in
let bnds__369_ =
let arg__371_ =
let _of_a__372_ = Sexplib0.Sexp_conv.sexp_of_opaque in
sexp_of_list _of_a__372_ a__370_
in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__371_ ] :: bnds__369_
in
Sexplib0.Sexp.List bnds__369_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_with_defaults = struct
type t =
{ a : int [@default 0]
; b : int [@default 0] [@sexp_drop_default.compare]
; c : int [@default 0] [@sexp_drop_default.equal]
; d : int [@default 0] [@sexp_drop_default.sexp]
; e : int [@default 0] [@sexp_drop_default ( = )]
; f : int [@sexp_drop_if ( = ) 0]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let (default__415_ : int) = 0
and (default__417_ : int) = 0
and (default__419_ : int) = 0
and (default__421_ : int) = 0
and (default__423_ : int) = 0 in
let error_source__425_ = "expansion.ml.Record_with_defaults.t" in
function
| Sexplib0.Sexp.List field_sexps__381_ as sexp__380_ ->
let a__382_ = Stdlib.ref Stdlib.Option.None
and b__384_ = Stdlib.ref Stdlib.Option.None
and c__386_ = Stdlib.ref Stdlib.Option.None
and d__388_ = Stdlib.ref Stdlib.Option.None
and e__390_ = Stdlib.ref Stdlib.Option.None
and f__392_ = Stdlib.ref Stdlib.Option.None
and duplicates__394_ = Stdlib.ref []
and extra__395_ = Stdlib.ref [] in
let rec iter__426_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__396_ :: (([] | [ _ ]) as _field_sexps__398_))
:: tail__427_ ->
let _field_sexp__397_ () =
match _field_sexps__398_ with
| [ x__428_ ] -> x__428_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__425_
sexp__380_
| _ -> assert false
in
(match field_name__396_ with
| "a" ->
(match Stdlib.( ! ) a__382_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__409_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) a__382_ (Stdlib.Option.Some fvalue__409_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "b" ->
(match Stdlib.( ! ) b__384_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__407_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) b__384_ (Stdlib.Option.Some fvalue__407_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "c" ->
(match Stdlib.( ! ) c__386_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__405_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) c__386_ (Stdlib.Option.Some fvalue__405_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "d" ->
(match Stdlib.( ! ) d__388_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__403_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) d__388_ (Stdlib.Option.Some fvalue__403_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "e" ->
(match Stdlib.( ! ) e__390_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__401_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) e__390_ (Stdlib.Option.Some fvalue__401_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| "f" ->
(match Stdlib.( ! ) f__392_ with
| Stdlib.Option.None ->
let _field_sexp__397_ = _field_sexp__397_ () in
let fvalue__399_ = int_of_sexp _field_sexp__397_ in
Stdlib.( := ) f__392_ (Stdlib.Option.Some fvalue__399_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__394_
(field_name__396_ :: Stdlib.( ! ) duplicates__394_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__395_ (field_name__396_ :: Stdlib.( ! ) extra__395_)
else ());
iter__426_ tail__427_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__380_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__425_
sexp__380_
| [] -> ()
in
iter__426_ field_sexps__381_;
(match Stdlib.( ! ) duplicates__394_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__425_
(Stdlib.( ! ) duplicates__394_)
sexp__380_
| [] ->
(match Stdlib.( ! ) extra__395_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__425_
(Stdlib.( ! ) extra__395_)
sexp__380_
| [] ->
(match
( Stdlib.( ! ) a__382_
, Stdlib.( ! ) b__384_
, Stdlib.( ! ) c__386_
, Stdlib.( ! ) d__388_
, Stdlib.( ! ) e__390_
, Stdlib.( ! ) f__392_ )
with
| a__383_, b__385_, c__387_, d__389_, e__391_, Stdlib.Option.Some f__393_ ->
{ a =
(match a__383_ with
| Stdlib.Option.None -> default__415_
| Stdlib.Option.Some v__416_ -> v__416_)
; b =
(match b__385_ with
| Stdlib.Option.None -> default__417_
| Stdlib.Option.Some v__418_ -> v__418_)
; c =
(match c__387_ with
| Stdlib.Option.None -> default__419_
| Stdlib.Option.Some v__420_ -> v__420_)
; d =
(match d__389_ with
| Stdlib.Option.None -> default__421_
| Stdlib.Option.Some v__422_ -> v__422_)
; e =
(match e__391_ with
| Stdlib.Option.None -> default__423_
| Stdlib.Option.Some v__424_ -> v__424_)
; f = f__393_
}
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__425_
sexp__380_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) f__392_) Stdlib.Option.None, "f" ])))
| Sexplib0.Sexp.Atom _ as sexp__380_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__425_ sexp__380_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(let (default__433_ : int) = 0
and (default__438_ : int) = 0
and (default__443_ : int) = 0
and (default__449_ : int) = 0
and (drop_default__448_ : int -> int -> Stdlib.Bool.t) = ( = )
and (drop_if__454_ : Stdlib.Unit.t -> int -> Stdlib.Bool.t) = fun () -> ( = ) 0 in
fun { a = a__430_; b = b__434_; c = c__439_; d = d__444_; e = e__450_; f = f__455_ } ->
let bnds__429_ = [] in
let bnds__429_ =
if (drop_if__454_ ()) f__455_
then bnds__429_
else (
let arg__457_ = sexp_of_int f__455_ in
let bnd__456_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "f"; arg__457_ ] in
bnd__456_ :: bnds__429_)
in
let bnds__429_ =
if drop_default__448_ default__449_ e__450_
then bnds__429_
else (
let arg__452_ = sexp_of_int e__450_ in
let bnd__451_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "e"; arg__452_ ] in
bnd__451_ :: bnds__429_)
in
let bnds__429_ =
let arg__446_ = sexp_of_int d__444_ in
if Sexplib0.Sexp_conv.( = ) (sexp_of_int default__443_) arg__446_
then bnds__429_
else (
let bnd__445_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d"; arg__446_ ] in
bnd__445_ :: bnds__429_)
in
let bnds__429_ =
if [%equal: int] default__438_ c__439_
then bnds__429_
else (
let arg__441_ = sexp_of_int c__439_ in
let bnd__440_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__441_ ] in
bnd__440_ :: bnds__429_)
in
let bnds__429_ =
if [%compare.equal: int] default__433_ b__434_
then bnds__429_
else (
let arg__436_ = sexp_of_int b__434_ in
let bnd__435_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__436_ ] in
bnd__435_ :: bnds__429_)
in
let bnds__429_ =
let arg__431_ = sexp_of_int a__430_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__431_ ] :: bnds__429_
in
Sexplib0.Sexp.List bnds__429_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_with_special_types = struct
type t =
{ a : int option [@sexp.option]
; b : int list [@sexp.list]
; c : int array [@sexp.array]
; d : bool [@sexp.bool]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__480_ = "expansion.ml.Record_with_special_types.t" in
function
| Sexplib0.Sexp.List field_sexps__466_ as sexp__465_ ->
let a__467_ = Stdlib.ref Stdlib.Option.None
and b__469_ = Stdlib.ref Stdlib.Option.None
and c__471_ = Stdlib.ref Stdlib.Option.None
and d__473_ = Stdlib.ref false
and duplicates__475_ = Stdlib.ref []
and extra__476_ = Stdlib.ref [] in
let rec iter__486_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__477_ :: (([] | [ _ ]) as _field_sexps__479_))
:: tail__487_ ->
let _field_sexp__478_ () =
match _field_sexps__479_ with
| [ x__488_ ] -> x__488_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__480_
sexp__465_
| _ -> assert false
in
(match field_name__477_ with
| "a" ->
(match Stdlib.( ! ) a__467_ with
| Stdlib.Option.None ->
let _field_sexp__478_ = _field_sexp__478_ () in
let fvalue__483_ = int_of_sexp _field_sexp__478_ in
Stdlib.( := ) a__467_ (Stdlib.Option.Some fvalue__483_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_))
| "b" ->
(match Stdlib.( ! ) b__469_ with
| Stdlib.Option.None ->
let _field_sexp__478_ = _field_sexp__478_ () in
let fvalue__482_ = list_of_sexp int_of_sexp _field_sexp__478_ in
Stdlib.( := ) b__469_ (Stdlib.Option.Some fvalue__482_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_))
| "c" ->
(match Stdlib.( ! ) c__471_ with
| Stdlib.Option.None ->
let _field_sexp__478_ = _field_sexp__478_ () in
let fvalue__481_ = array_of_sexp int_of_sexp _field_sexp__478_ in
Stdlib.( := ) c__471_ (Stdlib.Option.Some fvalue__481_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_))
| "d" ->
if Stdlib.( ! ) d__473_
then
Stdlib.( := )
duplicates__475_
(field_name__477_ :: Stdlib.( ! ) duplicates__475_)
else (
match _field_sexps__479_ with
| [] -> Stdlib.( := ) d__473_ true
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_sexp_bool_with_payload
error_source__480_
sexp__465_)
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__476_ (field_name__477_ :: Stdlib.( ! ) extra__476_)
else ());
iter__486_ tail__487_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__465_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__480_
sexp__465_
| [] -> ()
in
iter__486_ field_sexps__466_;
(match Stdlib.( ! ) duplicates__475_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__480_
(Stdlib.( ! ) duplicates__475_)
sexp__465_
| [] ->
(match Stdlib.( ! ) extra__476_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__480_
(Stdlib.( ! ) extra__476_)
sexp__465_
| [] ->
(match
( Stdlib.( ! ) a__467_
, Stdlib.( ! ) b__469_
, Stdlib.( ! ) c__471_
, Stdlib.( ! ) d__473_ )
with
| a__468_, b__470_, c__472_, d__474_ ->
{ a = a__468_
; b =
(match b__470_ with
| Stdlib.Option.None -> []
| Stdlib.Option.Some v__484_ -> v__484_)
; c =
(match c__472_ with
| Stdlib.Option.None -> [||]
| Stdlib.Option.Some v__485_ -> v__485_)
; d = d__474_
})))
| Sexplib0.Sexp.Atom _ as sexp__465_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__480_ sexp__465_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__490_; b = b__495_; c = c__499_; d = d__502_ } ->
let bnds__489_ = [] in
let bnds__489_ =
if d__502_
then (
let bnd__503_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d" ] in
bnd__503_ :: bnds__489_)
else bnds__489_
in
let bnds__489_ =
if match c__499_ with
| [||] -> true
| _ -> false
then bnds__489_
else (
let arg__501_ = (sexp_of_array sexp_of_int) c__499_ in
let bnd__500_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__501_ ] in
bnd__500_ :: bnds__489_)
in
let bnds__489_ =
if match b__495_ with
| [] -> true
| _ -> false
then bnds__489_
else (
let arg__497_ = (sexp_of_list sexp_of_int) b__495_ in
let bnd__496_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__497_ ] in
bnd__496_ :: bnds__489_)
in
let bnds__489_ =
match a__490_ with
| Stdlib.Option.None -> bnds__489_
| Stdlib.Option.Some v__491_ ->
let arg__493_ = sexp_of_int v__491_ in
let bnd__492_ = Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__493_ ] in
bnd__492_ :: bnds__489_
in
Sexplib0.Sexp.List bnds__489_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_with_omit_nil = struct
type t =
{ a : int option [@sexp.omit_nil]
; b : int list [@sexp.omit_nil]
; c : unit [@sexp.omit_nil]
; d : int [@sexp.omit_nil]
}
[@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__535_ = "expansion.ml.Record_with_omit_nil.t" in
function
| Sexplib0.Sexp.List field_sexps__506_ as sexp__505_ ->
let a__507_ = Stdlib.ref Stdlib.Option.None
and b__509_ = Stdlib.ref Stdlib.Option.None
and c__511_ = Stdlib.ref Stdlib.Option.None
and d__513_ = Stdlib.ref Stdlib.Option.None
and duplicates__515_ = Stdlib.ref []
and extra__516_ = Stdlib.ref [] in
let rec iter__532_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__517_ :: (([] | [ _ ]) as _field_sexps__519_))
:: tail__533_ ->
let _field_sexp__518_ () =
match _field_sexps__519_ with
| [ x__534_ ] -> x__534_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__535_
sexp__505_
| _ -> assert false
in
(match field_name__517_ with
| "a" ->
(match Stdlib.( ! ) a__507_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__523_ = option_of_sexp int_of_sexp _field_sexp__518_ in
Stdlib.( := ) a__507_ (Stdlib.Option.Some fvalue__523_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| "b" ->
(match Stdlib.( ! ) b__509_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__522_ = list_of_sexp int_of_sexp _field_sexp__518_ in
Stdlib.( := ) b__509_ (Stdlib.Option.Some fvalue__522_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| "c" ->
(match Stdlib.( ! ) c__511_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__521_ = unit_of_sexp _field_sexp__518_ in
Stdlib.( := ) c__511_ (Stdlib.Option.Some fvalue__521_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| "d" ->
(match Stdlib.( ! ) d__513_ with
| Stdlib.Option.None ->
let _field_sexp__518_ = _field_sexp__518_ () in
let fvalue__520_ = int_of_sexp _field_sexp__518_ in
Stdlib.( := ) d__513_ (Stdlib.Option.Some fvalue__520_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__515_
(field_name__517_ :: Stdlib.( ! ) duplicates__515_))
| _ ->
if Stdlib.( ! ) Sexplib0.Sexp_conv.record_check_extra_fields
then Stdlib.( := ) extra__516_ (field_name__517_ :: Stdlib.( ! ) extra__516_)
else ());
iter__532_ tail__533_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__505_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__535_
sexp__505_
| [] -> ()
in
iter__532_ field_sexps__506_;
(match Stdlib.( ! ) duplicates__515_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__535_
(Stdlib.( ! ) duplicates__515_)
sexp__505_
| [] ->
(match Stdlib.( ! ) extra__516_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__535_
(Stdlib.( ! ) extra__516_)
sexp__505_
| [] ->
(match
( Stdlib.( ! ) a__507_
, Stdlib.( ! ) b__509_
, Stdlib.( ! ) c__511_
, Stdlib.( ! ) d__513_ )
with
| a__508_, b__510_, c__512_, d__514_ ->
{ a =
(match a__508_ with
| Stdlib.Option.Some v__525_ -> v__525_
| Stdlib.Option.None ->
(try option_of_sexp int_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__524_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__524_, sexp__505_))))
; b =
(match b__510_ with
| Stdlib.Option.Some v__527_ -> v__527_
| Stdlib.Option.None ->
(try list_of_sexp int_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__526_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__526_, sexp__505_))))
; c =
(match c__512_ with
| Stdlib.Option.Some v__529_ -> v__529_
| Stdlib.Option.None ->
(try unit_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__528_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__528_, sexp__505_))))
; d =
(match d__514_ with
| Stdlib.Option.Some v__531_ -> v__531_
| Stdlib.Option.None ->
(try int_of_sexp (Sexplib0.Sexp.List []) with
| Sexplib0.Sexp_conv_error.Of_sexp_error (e__530_, _) ->
Stdlib.raise
(Sexplib0.Sexp_conv_error.Of_sexp_error (e__530_, sexp__505_))))
})))
| Sexplib0.Sexp.Atom _ as sexp__505_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__535_ sexp__505_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__537_; b = b__539_; c = c__541_; d = d__543_ } ->
let bnds__536_ = [] in
let bnds__536_ =
match sexp_of_int d__543_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__544_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "d"; arg__544_ ] :: bnds__536_
in
let bnds__536_ =
match sexp_of_unit c__541_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__542_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "c"; arg__542_ ] :: bnds__536_
in
let bnds__536_ =
match sexp_of_list sexp_of_int b__539_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__540_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "b"; arg__540_ ] :: bnds__536_
in
let bnds__536_ =
match sexp_of_option sexp_of_int a__537_ with
| Sexplib0.Sexp.List [] -> bnds__536_
| arg__538_ ->
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__538_ ] :: bnds__536_
in
Sexplib0.Sexp.List bnds__536_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Variant_with_sexp_list = struct
type t = A of int list [@sexp.list] [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__547_ = "expansion.ml.Variant_with_sexp_list.t" in
function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom (("a" | "A") as _tag__550_) :: sexp_args__551_) as
_sexp__549_ -> A (Sexplib0.Sexp_conv.list_map int_of_sexp sexp_args__551_)
| Sexplib0.Sexp.Atom ("a" | "A") as sexp__548_ ->
Sexplib0.Sexp_conv_error.stag_takes_args error_source__547_ sexp__548_
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__546_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_sum error_source__547_ sexp__546_
| Sexplib0.Sexp.List [] as sexp__546_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_sum error_source__547_ sexp__546_
| sexp__546_ ->
Sexplib0.Sexp_conv_error.unexpected_stag error_source__547_ sexp__546_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun (A l__552_) ->
Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom "A" :: Sexplib0.Sexp_conv.list_map sexp_of_int l__552_)
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Poly_variant_with_sexp_list = struct
type t = [ `A of int list [@sexp.list] ] [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let __t_of_sexp__ =
(let error_source__559_ = "expansion.ml.Poly_variant_with_sexp_list.t" in
function
| Sexplib0.Sexp.Atom atom__554_ as _sexp__556_ ->
(match atom__554_ with
| "A" -> Sexplib0.Sexp_conv_error.ptag_takes_args error_source__559_ _sexp__556_
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.Atom atom__554_ :: sexp_args__557_) as
_sexp__556_ ->
(match atom__554_ with
| "A" as _tag__558_ -> `A (Sexplib0.Sexp_conv.list_map int_of_sexp sexp_args__557_)
| _ -> Sexplib0.Sexp_conv_error.no_variant_match ())
| Sexplib0.Sexp.List (Sexplib0.Sexp.List _ :: _) as sexp__555_ ->
Sexplib0.Sexp_conv_error.nested_list_invalid_poly_var error_source__559_ sexp__555_
| Sexplib0.Sexp.List [] as sexp__555_ ->
Sexplib0.Sexp_conv_error.empty_list_invalid_poly_var error_source__559_ sexp__555_
: Sexplib0.Sexp.t -> t)
;;
let _ = __t_of_sexp__
let t_of_sexp =
(let error_source__561_ = "expansion.ml.Poly_variant_with_sexp_list.t" in
fun sexp__560_ ->
try __t_of_sexp__ sexp__560_ with
| Sexplib0.Sexp_conv_error.No_variant_match ->
Sexplib0.Sexp_conv_error.no_matching_variant_found error_source__561_ sexp__560_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun (`A l__562_) ->
Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom "A" :: Sexplib0.Sexp_conv.list_map sexp_of_int l__562_)
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Record_allowing_extra_fields = struct
type t = { a : int } [@@allow_extra_fields] [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(let error_source__574_ = "expansion.ml.Record_allowing_extra_fields.t" in
function
| Sexplib0.Sexp.List field_sexps__565_ as sexp__564_ ->
let a__566_ = Stdlib.ref Stdlib.Option.None
and duplicates__568_ = Stdlib.ref []
and extra__569_ = Stdlib.ref [] in
let rec iter__575_ = function
| Sexplib0.Sexp.List
(Sexplib0.Sexp.Atom field_name__570_ :: (([] | [ _ ]) as _field_sexps__572_))
:: tail__576_ ->
let _field_sexp__571_ () =
match _field_sexps__572_ with
| [ x__577_ ] -> x__577_
| [] ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__574_
sexp__564_
| _ -> assert false
in
(match field_name__570_ with
| "a" ->
(match Stdlib.( ! ) a__566_ with
| Stdlib.Option.None ->
let _field_sexp__571_ = _field_sexp__571_ () in
let fvalue__573_ = int_of_sexp _field_sexp__571_ in
Stdlib.( := ) a__566_ (Stdlib.Option.Some fvalue__573_)
| Stdlib.Option.Some _ ->
Stdlib.( := )
duplicates__568_
(field_name__570_ :: Stdlib.( ! ) duplicates__568_))
| _ -> ());
iter__575_ tail__576_
| ((Sexplib0.Sexp.Atom _ | Sexplib0.Sexp.List _) as sexp__564_) :: _ ->
Sexplib0.Sexp_conv_error.record_only_pairs_expected
error_source__574_
sexp__564_
| [] -> ()
in
iter__575_ field_sexps__565_;
(match Stdlib.( ! ) duplicates__568_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_duplicate_fields
error_source__574_
(Stdlib.( ! ) duplicates__568_)
sexp__564_
| [] ->
(match Stdlib.( ! ) extra__569_ with
| _ :: _ ->
Sexplib0.Sexp_conv_error.record_extra_fields
error_source__574_
(Stdlib.( ! ) extra__569_)
sexp__564_
| [] ->
(match Stdlib.( ! ) a__566_ with
| Stdlib.Option.Some a__567_ -> { a = a__567_ }
| _ ->
Sexplib0.Sexp_conv_error.record_undefined_elements
error_source__574_
sexp__564_
[ Sexplib0.Sexp_conv.( = ) (Stdlib.( ! ) a__566_) Stdlib.Option.None, "a" ])))
| Sexplib0.Sexp.Atom _ as sexp__564_ ->
Sexplib0.Sexp_conv_error.record_list_instead_atom error_source__574_ sexp__564_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun { a = a__579_ } ->
let bnds__578_ = [] in
let bnds__578_ =
let arg__580_ = sexp_of_int a__579_ in
Sexplib0.Sexp.List [ Sexplib0.Sexp.Atom "a"; arg__580_ ] :: bnds__578_
in
Sexplib0.Sexp.List bnds__578_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
module Opaque = struct
type t = (int[@sexp.opaque]) list [@@deriving_inline sexp]
let _ = fun (_ : t) -> ()
let t_of_sexp =
(fun x__582_ -> list_of_sexp Sexplib0.Sexp_conv.opaque_of_sexp x__582_
: Sexplib0.Sexp.t -> t)
;;
let _ = t_of_sexp
let sexp_of_t =
(fun x__583_ -> sexp_of_list Sexplib0.Sexp_conv.sexp_of_opaque x__583_
: t -> Sexplib0.Sexp.t)
;;
let _ = sexp_of_t
[@@@end]
end
| |
fc91ad16c8e07cf93fa74567e126b2fe6eac8a4d857decbc9aea479ebc8be6ed | rd--/hsc3 | lores.help.hs | -- lores
X.lores (whiteNoiseId 'α' ar * 0.5) 880 0.5
-- lores ; modulate param
let src = whiteNoiseId 'α' ar * 0.3
freq = lfNoise0Id 'β' ar 4 * 500 + 600
res = 0.9
in X.lores src freq res
| null | https://raw.githubusercontent.com/rd--/hsc3/60cb422f0e2049f00b7e15076b2667b85ad8f638/Help/Ugen/lores.help.hs | haskell | lores
lores ; modulate param | X.lores (whiteNoiseId 'α' ar * 0.5) 880 0.5
let src = whiteNoiseId 'α' ar * 0.3
freq = lfNoise0Id 'β' ar 4 * 500 + 600
res = 0.9
in X.lores src freq res
|
aeb6165caa777caa4a6edc077a304f05cdefbaca23c7cca33821829b65a3e898 | aristidb/aws | RemoveUserFromGroup.hs | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
module Aws.Iam.Commands.RemoveUserFromGroup
( RemoveUserFromGroup(..)
, RemoveUserFromGroupResponse(..)
) where
import Aws.Core
import Aws.Iam.Core
import Aws.Iam.Internal
import Data.Text (Text)
import Data.Typeable
-- | Removes the specified user from the specified group.
--
-- <>
data RemoveUserFromGroup
= RemoveUserFromGroup {
rufgGroupName :: Text
-- ^ Name of the group to update.
, rufgUserName :: Text
-- ^ The of the user to add.
}
deriving (Eq, Ord, Show, Typeable)
instance SignQuery RemoveUserFromGroup where
type ServiceConfiguration RemoveUserFromGroup = IamConfiguration
signQuery RemoveUserFromGroup{..}
= iamAction "RemoveUserFromGroup" [
("GroupName" , rufgGroupName)
, ("UserName" , rufgUserName)
]
data RemoveUserFromGroupResponse = RemoveUserFromGroupResponse
deriving (Eq, Ord, Show, Typeable)
instance ResponseConsumer RemoveUserFromGroup RemoveUserFromGroupResponse where
type ResponseMetadata RemoveUserFromGroupResponse = IamMetadata
responseConsumer _ _
= iamResponseConsumer (const $ return RemoveUserFromGroupResponse)
instance Transaction RemoveUserFromGroup RemoveUserFromGroupResponse
instance AsMemoryResponse RemoveUserFromGroupResponse where
type MemoryResponse RemoveUserFromGroupResponse = RemoveUserFromGroupResponse
loadToMemory = return
| null | https://raw.githubusercontent.com/aristidb/aws/a99113ed7768f9758346052c0d8939b66c6efa87/Aws/Iam/Commands/RemoveUserFromGroup.hs | haskell | | Removes the specified user from the specified group.
<>
^ Name of the group to update.
^ The of the user to add. | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
module Aws.Iam.Commands.RemoveUserFromGroup
( RemoveUserFromGroup(..)
, RemoveUserFromGroupResponse(..)
) where
import Aws.Core
import Aws.Iam.Core
import Aws.Iam.Internal
import Data.Text (Text)
import Data.Typeable
data RemoveUserFromGroup
= RemoveUserFromGroup {
rufgGroupName :: Text
, rufgUserName :: Text
}
deriving (Eq, Ord, Show, Typeable)
instance SignQuery RemoveUserFromGroup where
type ServiceConfiguration RemoveUserFromGroup = IamConfiguration
signQuery RemoveUserFromGroup{..}
= iamAction "RemoveUserFromGroup" [
("GroupName" , rufgGroupName)
, ("UserName" , rufgUserName)
]
data RemoveUserFromGroupResponse = RemoveUserFromGroupResponse
deriving (Eq, Ord, Show, Typeable)
instance ResponseConsumer RemoveUserFromGroup RemoveUserFromGroupResponse where
type ResponseMetadata RemoveUserFromGroupResponse = IamMetadata
responseConsumer _ _
= iamResponseConsumer (const $ return RemoveUserFromGroupResponse)
instance Transaction RemoveUserFromGroup RemoveUserFromGroupResponse
instance AsMemoryResponse RemoveUserFromGroupResponse where
type MemoryResponse RemoveUserFromGroupResponse = RemoveUserFromGroupResponse
loadToMemory = return
|
bd2378e18eacf251003225a7f5f6b7bc32fe6e4db113f26b624c7140c1e10e8f | tweag/webauthn | Identifier.hs | # LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
# LANGUAGE KindSignatures #
# LANGUAGE StandaloneDeriving #
-- | Stability: experimental
-- This module concerns identification of authenticatiors, notably 'AAGUID',
-- 'SubjectKeyIdentifier' and a generic identifier type 'AuthenticatorIdentifier'
module Crypto.WebAuthn.Model.Identifier
( AuthenticatorIdentifier (..),
AAGUID (..),
SubjectKeyIdentifier (..),
)
where
import Crypto.Hash (Digest, SHA1)
import Crypto.WebAuthn.Internal.ToJSONOrphans ()
import qualified Crypto.WebAuthn.Model.Kinds as M
import Data.Aeson (KeyValue ((.=)), ToJSON (toJSON), Value (String), object)
import Data.ByteArray (convert)
import qualified Data.ByteString as BS
import Data.Hashable (Hashable (hashWithSalt), hashUsing)
import Data.UUID (UUID)
| [ ( spec)]( / TR / webauthn-2/#aaguid )
newtype AAGUID = AAGUID {unAAGUID :: UUID}
deriving (Eq, Show)
deriving newtype (Hashable, ToJSON)
-- | A way to identify an authenticator
data AuthenticatorIdentifier (p :: M.ProtocolKind) where
-- | [(spec)](-metadata-statement-v3.0-ps-20210518.html#dom-metadatastatement-aaguid)
-- A known FIDO2 [authenticator](-2/#authenticator),
identified by a ' AAGUID ' . Note that the ' AAGUID ' may be zero , meaning that
-- we were able to verify that the [public key credential](-2/#public-key-credential).
-- was generated by a trusted [authenticator](-2/#authenticator),
-- but we don't know which model it is.
AuthenticatorIdentifierFido2 ::
{idAaguid :: AAGUID} ->
AuthenticatorIdentifier 'M.Fido2
-- | [(spec)](-metadata-statement-v3.0-ps-20210518.html#dom-metadatastatement-attestationcertificatekeyidentifiers)
-- A known FIDO U2F [authenticator](-2/#authenticator),
identified by a ' SubjectKeyIdentifier ' . Clients that do n't implement CTAP2
( which is used to communicate with FIDO2 authenticators ) will use U2F to
-- communicate with the authenticator instead, which doesn't have support for 'AAGUID's.
AuthenticatorIdentifierFidoU2F ::
{idSubjectKeyIdentifier :: SubjectKeyIdentifier} ->
AuthenticatorIdentifier 'M.FidoU2F
deriving instance Show (AuthenticatorIdentifier p)
deriving instance Eq (AuthenticatorIdentifier p)
instance ToJSON (AuthenticatorIdentifier p) where
toJSON (AuthenticatorIdentifierFido2 aaguid) =
object
[ "tag" .= String "AuthenticatorIdentifierFido2",
"idAaguid" .= aaguid
]
toJSON (AuthenticatorIdentifierFidoU2F subjectKeyIdentifier) =
object
[ "tag" .= String "AuthenticatorIdentifierFidoU2F",
"idSubjectKeyIdentifier" .= subjectKeyIdentifier
]
| [ ( spec)]( / doc / html / )
This type represents method 1 of computing the identifier , as used in the
-- [attestationCertificateKeyIdentifiers](-metadata-service-v3.0-ps-20210518.html#dom-metadatablobpayloadentry-attestationcertificatekeyidentifiers)
-- field of the [Metadata Service](/)
newtype SubjectKeyIdentifier = SubjectKeyIdentifier {unSubjectKeyIdentifier :: Digest SHA1}
deriving (Eq, Show)
instance ToJSON SubjectKeyIdentifier where
toJSON = toJSON @BS.ByteString . convert . unSubjectKeyIdentifier
instance Hashable SubjectKeyIdentifier where
hashWithSalt = hashUsing @BS.ByteString (convert . unSubjectKeyIdentifier)
| null | https://raw.githubusercontent.com/tweag/webauthn/65ac63aa000e565d1e5de334b39597a61dcae75f/src/Crypto/WebAuthn/Model/Identifier.hs | haskell | # LANGUAGE GADTs #
| Stability: experimental
This module concerns identification of authenticatiors, notably 'AAGUID',
'SubjectKeyIdentifier' and a generic identifier type 'AuthenticatorIdentifier'
| A way to identify an authenticator
| [(spec)](-metadata-statement-v3.0-ps-20210518.html#dom-metadatastatement-aaguid)
A known FIDO2 [authenticator](-2/#authenticator),
we were able to verify that the [public key credential](-2/#public-key-credential).
was generated by a trusted [authenticator](-2/#authenticator),
but we don't know which model it is.
| [(spec)](-metadata-statement-v3.0-ps-20210518.html#dom-metadatastatement-attestationcertificatekeyidentifiers)
A known FIDO U2F [authenticator](-2/#authenticator),
communicate with the authenticator instead, which doesn't have support for 'AAGUID's.
[attestationCertificateKeyIdentifiers](-metadata-service-v3.0-ps-20210518.html#dom-metadatablobpayloadentry-attestationcertificatekeyidentifiers)
field of the [Metadata Service](/) | # LANGUAGE DataKinds #
# LANGUAGE KindSignatures #
# LANGUAGE StandaloneDeriving #
module Crypto.WebAuthn.Model.Identifier
( AuthenticatorIdentifier (..),
AAGUID (..),
SubjectKeyIdentifier (..),
)
where
import Crypto.Hash (Digest, SHA1)
import Crypto.WebAuthn.Internal.ToJSONOrphans ()
import qualified Crypto.WebAuthn.Model.Kinds as M
import Data.Aeson (KeyValue ((.=)), ToJSON (toJSON), Value (String), object)
import Data.ByteArray (convert)
import qualified Data.ByteString as BS
import Data.Hashable (Hashable (hashWithSalt), hashUsing)
import Data.UUID (UUID)
| [ ( spec)]( / TR / webauthn-2/#aaguid )
newtype AAGUID = AAGUID {unAAGUID :: UUID}
deriving (Eq, Show)
deriving newtype (Hashable, ToJSON)
data AuthenticatorIdentifier (p :: M.ProtocolKind) where
identified by a ' AAGUID ' . Note that the ' AAGUID ' may be zero , meaning that
AuthenticatorIdentifierFido2 ::
{idAaguid :: AAGUID} ->
AuthenticatorIdentifier 'M.Fido2
identified by a ' SubjectKeyIdentifier ' . Clients that do n't implement CTAP2
( which is used to communicate with FIDO2 authenticators ) will use U2F to
AuthenticatorIdentifierFidoU2F ::
{idSubjectKeyIdentifier :: SubjectKeyIdentifier} ->
AuthenticatorIdentifier 'M.FidoU2F
deriving instance Show (AuthenticatorIdentifier p)
deriving instance Eq (AuthenticatorIdentifier p)
instance ToJSON (AuthenticatorIdentifier p) where
toJSON (AuthenticatorIdentifierFido2 aaguid) =
object
[ "tag" .= String "AuthenticatorIdentifierFido2",
"idAaguid" .= aaguid
]
toJSON (AuthenticatorIdentifierFidoU2F subjectKeyIdentifier) =
object
[ "tag" .= String "AuthenticatorIdentifierFidoU2F",
"idSubjectKeyIdentifier" .= subjectKeyIdentifier
]
| [ ( spec)]( / doc / html / )
This type represents method 1 of computing the identifier , as used in the
newtype SubjectKeyIdentifier = SubjectKeyIdentifier {unSubjectKeyIdentifier :: Digest SHA1}
deriving (Eq, Show)
instance ToJSON SubjectKeyIdentifier where
toJSON = toJSON @BS.ByteString . convert . unSubjectKeyIdentifier
instance Hashable SubjectKeyIdentifier where
hashWithSalt = hashUsing @BS.ByteString (convert . unSubjectKeyIdentifier)
|
85190583077c46e812a25a93875119482c038529d549bff177d6ce6b469eec7a | ghcjs/ghcjs | conc006.hs | module Main where
import Control.Concurrent
-- This test hopefully exercises the black hole code. The main thread
-- forks off another thread and starts on a large computation.
-- The child thread attempts to get the result of the same large
-- computation (and should get blocked doing so, because the parent
-- won't have evaluated it yet). When the result is available, the
-- child passes it back to the parent who prints it out.
test = sum [1..10000]
main = do
x <- newEmptyMVar
forkIO (if test > 0
then putMVar x test
else error "proc"
)
if test > 0 -- evaluate test
then do result <- takeMVar x
print result
else error "main"
| null | https://raw.githubusercontent.com/ghcjs/ghcjs/e4cd4232a31f6371c761acd93853702f4c7ca74c/test/ghc/concurrent/conc006.hs | haskell | This test hopefully exercises the black hole code. The main thread
forks off another thread and starts on a large computation.
The child thread attempts to get the result of the same large
computation (and should get blocked doing so, because the parent
won't have evaluated it yet). When the result is available, the
child passes it back to the parent who prints it out.
evaluate test | module Main where
import Control.Concurrent
test = sum [1..10000]
main = do
x <- newEmptyMVar
forkIO (if test > 0
then putMVar x test
else error "proc"
)
then do result <- takeMVar x
print result
else error "main"
|
f7ec0ab69fa544c2da715ec011eca82a0b1ca2150a0d8aa7d3caae44f2f97963 | BinRoot/Haskell-Data-Analysis-Cookbook | Main.hs | import Data.Geohash
-- Hashing latitude/longitude pairs
main = do
let geohash1 = encode 10 (37.775, -122.419)
putStrLn $ "geohash1 is " ++ (show geohash1)
case geohash1 of
Just g -> putStrLn $ "decoding geohash1: " ++ (show.decode) g
Nothing -> putStrLn "error encoding"
let geohash2 = encode 10 (37.175, -125.419)
putStrLn $ "geohash2 is " ++ (show geohash2)
case geohash2 of
Just g -> putStrLn $ "decoding geohash2: " ++ (show.decode) g
Nothing -> putStrLn "error encoding"
| null | https://raw.githubusercontent.com/BinRoot/Haskell-Data-Analysis-Cookbook/f8c46987d78f4a6c1828b353c5f906b9314c2ef9/Ch04/Code08_geo/Main.hs | haskell | Hashing latitude/longitude pairs | import Data.Geohash
main = do
let geohash1 = encode 10 (37.775, -122.419)
putStrLn $ "geohash1 is " ++ (show geohash1)
case geohash1 of
Just g -> putStrLn $ "decoding geohash1: " ++ (show.decode) g
Nothing -> putStrLn "error encoding"
let geohash2 = encode 10 (37.175, -125.419)
putStrLn $ "geohash2 is " ++ (show geohash2)
case geohash2 of
Just g -> putStrLn $ "decoding geohash2: " ++ (show.decode) g
Nothing -> putStrLn "error encoding"
|
5e69ed323f74bf5f631d0b74dcbd809907e89b718b5a66f81cbae67ea50050ed | shiguredo/eryngii | formatter.ml | open Core.Std
open Located
module Op = struct
type desc =
| Nop
| Text of string
| Comment of string
| Space of int
| Newline of int
| Lparen
| Rparen
| Lbrack
| Rbrack
| Lbrace
| Rbrace
| Lbin (* << *)
| Rbin (* >> *)
| Leveled_indent
| Aligned_indent
| Label of [`Fun] * int
| Labeled_indent of [`Fun] * int
| Dedent
| Semi
| Comma
| Dot
| Larrow
| Larrow2
| Rarrow
type t = {
pos : int;
desc : desc;
}
let create pos desc =
{ pos; desc }
let of_loc loc desc =
{ pos = Location.offset loc; desc }
let space pos len =
create pos (Space len)
let length op =
match op.desc with
| Nop
| Leveled_indent
| Aligned_indent
| Label _
| Labeled_indent _
| Dedent -> None
| Text s
| Comment s -> Some (String.length s)
| Space n
| Newline n -> Some n
| Lparen
| Rparen
| Lbrack
| Rbrack
| Lbrace
| Rbrace
| Semi
| Comma
| Dot -> Some 1
| Lbin
| Rbin
| Larrow
| Larrow2
| Rarrow -> Some 2
let length_exn op =
Option.value_exn (length op)
let length_zero op =
Option.value (length op) ~default:0
let add_pos op len =
{ op with pos = op.pos + len }
let add_pos_of op other =
add_pos op @@ length_exn other
let to_string op =
let open Printf in
match op.desc with
| Nop -> "nop"
| Text s -> sprintf "text(\"%s\")" s
| Comment s -> sprintf "comment(\"%s\")" s
| Space n -> sprintf "space(%d)" n
| Newline n -> sprintf "nl(%d)" n
| Lparen -> "'('"
| Rparen -> "')'"
| Lbrack -> "'['"
| Rbrack -> "']'"
| Lbrace -> "'{'"
| Rbrace -> "'}'"
| Lbin -> "'<<'"
| Rbin -> "'>>'"
| Semi -> "';'"
| Comma -> "','"
| Dot -> "'.'"
| Larrow -> "'<-'"
| Larrow2 -> "'<='"
| Rarrow -> "'->'"
| Leveled_indent -> "l_indent"
| Aligned_indent -> "a_indent"
| Label _ -> "label"
| Labeled_indent _ -> "b_indent"
| Dedent -> "dedent"
end
module Context = struct
type t = {
file : File.t;
mutable ops : Op.t list;
mutable indent : int list;
mutable count : int option;
}
let create file =
{ file;
ops = [];
indent = [0];
count = None;
}
let contents ctx =
List.rev ctx.ops
let clear ctx =
ctx.ops <- []
let start_count ctx =
match ctx.count with
| Some _ -> failwith "already start count"
| None -> ctx.count <- Some 0
let end_count ctx =
match ctx.count with
| None -> failwith "not start count"
| Some count ->
ctx.count <- None;
count
let count ctx =
Option.value_exn ctx.count
let last_pos ctx =
match List.hd ctx.ops with
| None -> None
| Some op -> Some op.pos
let last_pos_exn ctx =
Option.value_exn (last_pos ctx)
let last_loc_exn ctx =
let pos = last_pos_exn ctx in
let pos = { Position.line = 0; col = 0; offset = pos } in
Location.create pos pos
let add ctx op =
ctx.ops <- op :: ctx.ops
let add_loc ctx loc desc =
add ctx @@ Op.of_loc loc desc
let string ctx loc text =
add_loc ctx loc (Op.Text text)
let text ctx text =
add_loc ctx text.loc (Op.Text text.desc)
let atom ctx atom =
match atom with
| `Unenclosed name ->
text ctx name
| `Enclosed name ->
string ctx name.loc "'";
text ctx name;
string ctx name.loc "'"
let erl_string ctx s =
string ctx s.loc "\"";
text ctx s;
string ctx s.loc "\""
let comment ctx text =
let len = String.length text.desc in
let buf = Buffer.create (len+1) in
let body = String.lstrip text.desc ~drop:(fun c -> c = '%') in
let sign = String.make (len - String.length body) '%' in
let body = String.strip body in
Buffer.add_string buf sign;
Buffer.add_string buf " ";
Buffer.add_string buf body;
add_loc ctx text.loc (Op.Comment (Buffer.contents buf))
let space ctx loc n =
add_loc ctx loc (Space n)
let newline ctx loc n =
add_loc ctx loc (Newline n)
let lparen ctx loc =
add_loc ctx loc Lparen
let rparen ctx loc =
add_loc ctx loc Rparen
let lbrack ctx loc =
add_loc ctx loc Lbrack
let rbrack ctx loc =
add_loc ctx loc Rbrack
let lbrace ctx loc =
add_loc ctx loc Lbrace
let rbrace ctx loc =
add_loc ctx loc Rbrace
let lbin ctx loc =
add_loc ctx loc Lbin
let rbin ctx loc =
add_loc ctx loc Rbin
let semi ctx loc =
add_loc ctx loc Semi
let comma ctx loc =
add_loc ctx loc Comma
let dot ctx loc =
add_loc ctx loc Dot
let larrow ctx loc =
add_loc ctx loc Larrow
let larrow2 ctx loc =
add_loc ctx loc Larrow2
let rarrow ctx loc =
add_loc ctx loc Rarrow
let indent ctx loc =
add_loc ctx loc Leveled_indent
let a_indent ctx loc =
add_loc ctx loc Aligned_indent
let label ctx loc name =
add_loc ctx loc (Label (name, 0))
let b_indent ctx loc name extra =
add_loc ctx loc (Labeled_indent (name, extra))
let dedent ctx loc =
add_loc ctx loc Dedent
let dedent_last ctx =
dedent ctx @@ last_loc_exn ctx
end
let sort ops =
List.sort ops ~cmp:Op.(fun a b -> Int.compare a.pos b.pos)
let compact_newlines (ops:Op.t list) =
Conf.debug "compact_newlines: [%s]"
(String.concat (List.map ops ~f:Op.to_string) ~sep:", ");
List.fold_left ops
~init:(None, [])
~f:(fun (count, accu) op ->
match (count, op.desc) with
| None, Newline _ -> (Some 1, accu)
| None, _ -> (None, op :: accu)
| Some n, Newline nl -> (Some (n + nl), accu)
| Some n, _ ->
let n = min n 3 in
let nl = Op.create op.pos (Newline n) in
(None, op :: nl :: accu))
|> snd
|> List.rev
let compact_pos (ops:Op.t list) =
let pos, ops = List.fold_left ops ~init:(0, [])
~f:(fun (pos, accu) op ->
let op = { op with pos = pos } in
let pos, op = match Op.length op with
| None -> (pos, op)
| Some len -> (pos + len, op)
in
(pos, op :: accu))
in
(pos, List.rev ops)
let count_indent (ops:Op.t list) =
let open Op in
let _, _, rev_ops = List.fold_left ops ~init:(0, [0], [])
~f:(fun (col, depth, accu) op ->
let col, depth, accu = match op.desc with
| Lparen | Lbrack | Lbrace | Lbin ->
(col+1, col+1 :: depth, op :: accu)
| Rparen | Rbrack | Rbrace | Rbin | Semi ->
(col+1, List.tl_exn depth, op :: accu)
| Larrow | Larrow2 | Rarrow ->
let size = List.hd_exn depth + 4 in
(col+2, size :: depth, op :: accu)
| Newline _ ->
let size = List.hd_exn depth in
let indent = Op.create op.pos (Space size) in
(size, depth, indent :: op :: accu)
| Leveled_indent ->
let size = List.length depth * 4 in
(col, size :: depth, accu)
| Aligned_indent ->
(col, col :: depth, accu)
| Labeled_indent (name, extra) ->
let found = List.find_map accu ~f:(fun op ->
match op.desc with
| Label (name2, base) when name = name2 ->
let size = base + extra in
let indent = Op.create op.pos (Space size) in
Some (col, size :: depth, indent :: accu)
| _ -> None)
in
begin match found with
| None -> failwith "labeled indent not found"
| Some accu -> accu
end
| Dedent ->
(col, List.tl_exn depth, accu)
| Label (name, _) ->
let op = Op.create op.pos (Label (name, col)) in
(col, depth, op :: accu)
| Comment _ ->
(col, depth, op :: accu)
| _ ->
let col = col + Option.value_exn (Op.length op) in
(col, depth, op :: accu)
in
Conf.debug "count_indent: col %d: depth %d: %s"
col ((List.length depth) - 1) (Op.to_string op);
(col, depth, accu))
in
List.rev rev_ops
let write len (ops:Op.t list) =
let buf = String.make (len*2) ' ' in
let replace pos s =
ignore @@ List.fold_left (String.to_list s)
~init:pos
~f:(fun pos c ->
String.set buf pos c;
pos + 1)
in
let replace_spaces pos len =
replace pos (String.make len ' ')
in
List.iter ops
~f:(fun op ->
match op.desc with
| Text s
| Comment s -> replace op.pos s
| Newline n -> replace op.pos (String.make n '\n')
| Space n -> replace_spaces op.pos n
| Lparen -> replace op.pos "("
| Rparen -> replace op.pos ")"
| Lbrack -> replace op.pos "["
| Rbrack -> replace op.pos "]"
| Lbrace -> replace op.pos "{"
| Rbrace -> replace op.pos "}"
| Lbin -> replace op.pos "<<"
| Rbin -> replace op.pos ">>"
| Semi -> replace op.pos ";"
| Comma -> replace op.pos ","
| Dot -> replace op.pos "."
| Larrow -> replace op.pos "<-"
| Larrow2 -> replace op.pos "<="
| Rarrow -> replace op.pos "->"
| Nop
| Leveled_indent
| Aligned_indent
| Label _
| Labeled_indent _
| Dedent -> ()
);
String.strip buf ^ "\n"
let parse_annots ctx =
let open Context in
List.iter (Annot.all ())
~f:(fun annot ->
match annot with
| Comment text -> comment ctx text
TODO : count \r\n , \r ,
| Newline text -> newline ctx text.loc (String.length text.desc))
let rec parse_node ctx node =
let open Ast_intf in
let open Context in
let open Located in
let open Location in
match node with
| Module m ->
List.iter m.module_decls ~f:(parse_node ctx)
| Modname_attr attr ->
text ctx attr.modname_attr_tag; (* -module *)
indent ctx attr.modname_attr_tag.loc;
lparen ctx attr.modname_attr_open;
text ctx attr.modname_attr_name;
rparen ctx attr.modname_attr_close;
dot ctx attr.modname_attr_dot;
dedent_last ctx
| Export_attr attr ->
text ctx attr.export_attr_tag; (* -export *)
indent ctx attr.export_attr_tag.loc;
lparen ctx attr.export_attr_open;
lbrack ctx attr.export_attr_fun_open;
parse_fun_sigs ctx attr.export_attr_funs;
rbrack ctx attr.export_attr_fun_close;
rparen ctx attr.export_attr_close;
dot ctx attr.export_attr_dot;
dedent_last ctx
| Import_attr attr ->
text ctx attr.import_attr_tag; (* -import *)
indent ctx attr.import_attr_tag.loc;
lparen ctx attr.import_attr_open;
text ctx attr.import_attr_module;
comma ctx attr.import_attr_comma;
space ctx attr.import_attr_comma 1;
lbrack ctx attr.import_attr_fun_open;
parse_fun_sigs ctx attr.import_attr_funs;
rbrack ctx attr.import_attr_fun_close;
rparen ctx attr.import_attr_close;
dot ctx attr.import_attr_dot;
dedent_last ctx
| Include_attr attr ->
text ctx attr.include_attr_tag; (* -include *)
indent ctx attr.include_attr_tag.loc;
lparen ctx attr.include_attr_open;
erl_string ctx attr.include_attr_file;
rparen ctx attr.include_attr_close;
dot ctx attr.include_attr_dot;
dedent_last ctx
| Inclib_attr attr ->
text ctx attr.inclib_attr_tag; (* -include_lib *)
indent ctx attr.inclib_attr_tag.loc;
lparen ctx attr.inclib_attr_open;
erl_string ctx attr.inclib_attr_file;
rparen ctx attr.inclib_attr_close;
dot ctx attr.inclib_attr_dot;
dedent_last ctx
| Define_attr attr ->
text ctx attr.def_attr_tag; (* -define *)
indent ctx attr.def_attr_tag.loc;
lparen ctx attr.def_attr_open;
let def_name = attr.def_attr_name in
text ctx def_name.def_name;
Option.iter def_name.def_args ~f:(fun args ->
lparen ctx args.enc_open;
Seplist.iter args.enc_desc ~f:(fun sep arg ->
text ctx arg;
Option.iter sep ~f:(fun sep -> comma ctx sep));
rparen ctx args.enc_close);
comma ctx attr.def_attr_comma;
space ctx attr.def_attr_comma 1;
parse_node ctx attr.def_attr_value;
rparen ctx attr.def_attr_close;
dot ctx attr.def_attr_dot;
dedent_last ctx
| Spec_attr attr ->
text ctx attr.spec_attr_tag; (* -spec *)
indent ctx attr.spec_attr_tag.loc; (* tag *)
space ctx attr.spec_attr_tag.loc 1;
begin match attr.spec_attr_mname with
| None -> ()
| Some (mname, colon) ->
text ctx mname;
string ctx colon ":"
end;
text ctx attr.spec_attr_fname;
(* spec_clauses *)
a_indent ctx attr.spec_attr_fname.loc;
Seplist.iter attr.spec_attr_clauses
~f:(fun sep clause ->
(* TODO: guard *)
lparen ctx clause.spec_clause_open;
Option.iter clause.spec_clause_args ~f:(fun args ->
Seplist.iter args ~f:(fun sep arg->
parse_spec_type ctx arg;
Option.iter sep ~f:(fun sep ->
string ctx sep ", ")
));
rparen ctx clause.spec_clause_close;
space ctx clause.spec_clause_close 1;
rarrow ctx clause.spec_clause_arrow;
space ctx clause.spec_clause_arrow 1;
parse_spec_type ctx clause.spec_clause_return;
match sep with
| Some sep -> semi ctx sep
| None -> dedent_last ctx);
dedent_last ctx;
dot ctx attr.spec_attr_dot;
dedent_last ctx (* tag *)
| Compile_attr attr ->
text ctx attr.compile_attr_tag; (* -compile *)
indent ctx attr.compile_attr_tag.loc;
lparen ctx attr.compile_attr_open;
lbrack ctx attr.compile_attr_name_open;
Seplist.iter attr.compile_attr_names
~f:(fun sep name ->
atom ctx name;
match sep with
| Some sep ->
comma ctx sep;
space ctx sep 1
| None -> ());
rbrack ctx attr.compile_attr_name_close;
rparen ctx attr.compile_attr_close;
dot ctx attr.compile_attr_dot;
dedent_last ctx
| Fun_decl decl ->
parse_fun_body ctx decl.fun_decl_body;
dot ctx decl.fun_decl_dot
| Call call ->
lparen ctx call.call_open;
begin match call.call_fname.fun_name_mname,
call.call_fname.fun_name_colon with
| Some name, Some colon ->
parse_node ctx name;
string ctx colon ":"
| _ -> ()
end;
parse_node ctx call.call_fname.fun_name_fname;
parse_node_list ctx call.call_args;
rparen ctx call.call_close
| Case case ->
string ctx case.case_begin "case";
space ctx case.case_begin 1;
parse_node ctx case.case_exp;
space ctx case.case_of 1;
string ctx case.case_of "of";
indent ctx case.case_of;
space ctx case.case_of 1;
parse_cr_clauses ctx case.case_clauses;
string ctx case.case_end "end"
| If if_ ->
string ctx if_.if_begin "if ";
indent ctx if_.if_begin;
Seplist.iter if_.if_clauses ~f:(fun sep clause ->
parse_guard ctx clause.if_clause_guard;
space ctx clause.if_clause_arrow 1;
rarrow ctx clause.if_clause_arrow;
space ctx clause.if_clause_arrow 1;
parse_node_list ctx clause.if_clause_body;
Option.iter sep ~f:(fun sep ->
semi ctx sep;
space ctx sep 1));
dedent_last ctx;
dedent_last ctx;
string ctx if_.if_end "end"
| Anon_fun fun_ ->
string ctx fun_.anon_fun_begin "fun";
label ctx fun_.anon_fun_begin `Fun;
parse_fun_body ctx fun_.anon_fun_body;
dedent_last ctx;
dedent_last ctx;
string ctx fun_.anon_fun_end "end"
| Binexp e ->
parse_node ctx e.binexp_left;
space ctx e.binexp_op.loc 1;
parse_op ctx e.binexp_op;
space ctx e.binexp_op.loc 1;
parse_node ctx e.binexp_right
| Paren paren ->
lparen ctx paren.enc_open;
parse_node ctx paren.enc_desc;
rparen ctx paren.enc_close
| Var name ->
text ctx name
| Uscore name ->
text ctx name
| Atom name ->
atom ctx name
| Char name ->
text ctx name
| Int value ->
text ctx value
| Float value ->
text ctx value
| String values ->
let len = List.length values in
List.iteri values ~f:(fun i value ->
erl_string ctx value;
if i+1 < len then
space ctx value.loc 1)
| List list ->
lbrack ctx list.list_open;
parse_node_list ctx list.list_head;
begin match list.list_bar, list.list_tail with
| Some bar, Some tail ->
string ctx bar " | ";
parse_node ctx tail
| _ -> ()
end;
rbrack ctx list.list_close
| Tuple tuple ->
lbrace ctx tuple.enc_open;
parse_node_list ctx tuple.enc_desc;
rbrace ctx tuple.enc_close
| Binary bin ->
lbin ctx bin.enc_open;
parse_node_list ctx bin.enc_desc;
rbin ctx bin.enc_close
| Binary_elt elt ->
parse_node ctx elt.bin_elt_val;
begin match elt.bin_elt_colon, elt.bin_elt_size with
| Some colon, Some size ->
string ctx colon ":";
text ctx size
| _ -> ()
end;
begin match elt.bin_elt_slash, elt.bin_elt_type with
| Some slash, Some ty ->
string ctx slash "/";
text ctx ty
| _ -> ()
end
| List_compr compr ->
parse_compr ctx compr
| List_compr_gen gen ->
parse_node ctx gen.gen_ptn;
space ctx gen.gen_arrow 1;
larrow ctx gen.gen_arrow;
space ctx gen.gen_arrow 1;
parse_node ctx gen.gen_exp;
dedent_last ctx
| Binary_compr compr ->
parse_compr ctx compr
| Binary_compr_gen gen ->
parse_node ctx gen.bin_gen_ptn;
space ctx gen.bin_gen_arrow 1;
larrow2 ctx gen.bin_gen_arrow;
space ctx gen.bin_gen_arrow 1;
parse_node ctx gen.bin_gen_exp;
dedent_last ctx
| Macro macro ->
string ctx macro.macro_q "?";
text ctx macro.macro_name
| Nop -> ()
| _ -> ()
and parse_fun_sigs ctx fsigs =
let open Context in
Seplist.iter fsigs
~f:(fun sep fsig ->
parse_fun_sig ctx fsig;
Option.iter sep ~f:(fun sep ->
string ctx sep ", "))
and parse_fun_sig ctx fsig =
let open Ast in
let open Context in
text ctx fsig.fun_sig_name;
string ctx fsig.fun_sig_sep "/";
text ctx fsig.fun_sig_arity
and parse_spec_type ctx spec =
let open Ast in
let open Context in
match spec with
| Spec_type.Paren paren ->
lparen ctx paren.enc_open;
parse_spec_type ctx paren.enc_desc;
rparen ctx paren.enc_close
| Named named ->
begin match (named.named_module, named.named_colon) with
| Some mname, Some colon ->
text ctx mname;
string ctx colon ":"
| _ -> ()
end;
text ctx named.named_name;
lparen ctx named.named_open;
Option.iter named.named_args ~f:(fun args ->
Seplist.iter args
~f:(fun sep arg ->
parse_spec_type ctx arg;
Option.iter sep ~f:(fun sep -> string ctx sep ", ")));
rparen ctx named.named_close
| Atom name ->
atom ctx name
| List spec ->
lbrack ctx spec.enc_open;
parse_spec_type ctx spec.enc_desc;
rbrack ctx spec.enc_close
| Union spec ->
parse_spec_type ctx spec.union_left;
string ctx spec.union_op " | ";
parse_spec_type ctx spec.union_right
| _ -> ()
and parse_fun_body ctx body =
let open Context in
Seplist.iter body
~f:(fun sep clause ->
parse_fun_clause ctx clause;
Option.iter sep ~f:(fun sep ->
semi ctx sep));
dedent_last ctx
and parse_fun_clause ctx clause =
let open Context in
let is_anon = Option.is_none clause.fun_clause_name in
Option.iter clause.fun_clause_name ~f:(text ctx);
lparen ctx clause.fun_clause_open;
parse_node_list ctx clause.fun_clause_ptns;
rparen ctx clause.fun_clause_close;
space ctx clause.fun_clause_close 1;
begin match clause.fun_clause_when, clause.fun_clause_guard with
| Some when_, Some guard ->
string ctx when_ "when";
space ctx when_ 1;
parse_guard ctx guard;
space ctx clause.fun_clause_arrow 1;
| _ -> ()
end;
rarrow ctx clause.fun_clause_arrow;
space ctx clause.fun_clause_arrow 1;
if is_anon then
b_indent ctx clause.fun_clause_arrow `Fun 4;
parse_node_list ctx clause.fun_clause_body
and parse_guard ctx guard =
let open Context in
Seplist.iter guard
~f:(fun sep es ->
parse_node_list ctx es;
Option.iter sep ~f:(fun sep ->
semi ctx sep));
and parse_cr_clauses ctx clauses =
let open Context in
Seplist.iter clauses
~f:(fun sep clause ->
parse_cr_clause ctx clause;
match sep with
| Some sep -> semi ctx sep
| None -> dedent_last ctx);
dedent_last ctx
and parse_cr_clause ctx clause =
let open Context in
parse_node ctx clause.cr_clause_ptn;
begin match clause.cr_clause_when, clause.cr_clause_guard with
| Some when_, Some guard ->
space ctx when_ 1;
string ctx when_ "when";
space ctx when_ 1;
parse_guard ctx guard
| _ -> ()
end;
space ctx clause.cr_clause_arrow 1;
rarrow ctx clause.cr_clause_arrow;
parse_node_list ctx clause.cr_clause_body
and parse_compr ctx compr =
let open Context in
lbin ctx compr.compr_open;
space ctx compr.compr_open 1;
parse_node ctx compr.compr_exp;
string ctx compr.compr_sep " || ";
parse_node_list ctx compr.compr_quals;
space ctx compr.compr_close 1;
rbin ctx compr.compr_close
and parse_node_list ctx es =
let open Context in
Seplist.iter es
~f:(fun sep e ->
parse_node ctx e;
Option.iter sep ~f:(fun sep ->
comma ctx sep;
space ctx sep 1))
and parse_op ctx op =
let open Context in
let s = match op.desc with
| Op_pos -> "+"
| Op_neg -> "-"
| Op_not -> "not"
| Op_lnot -> "bnot"
| Op_eq -> "="
| Op_ep -> "!"
| Op_eqq -> "=="
| Op_ne -> "/="
| Op_le -> "=<"
| Op_lt -> "<"
| Op_ge -> ">="
| Op_gt -> ">"
| Op_xeq -> "=:="
| Op_xne -> "=/="
| Op_list_add -> "++"
| Op_list_diff -> "--"
| Op_add -> "+"
| Op_sub -> "-"
| Op_mul -> "*"
| Op_div -> "/"
| Op_quo -> "div"
| Op_rem -> "rem"
| Op_and -> "and"
| Op_andalso -> "andalso"
| Op_or -> "or"
| Op_orelse -> "orelse"
| Op_xor -> "xor"
| Op_sand -> "andalso"
| Op_sor -> "orelse"
| Op_land -> "band"
| Op_lor -> "bor"
| Op_lxor -> "bxor"
| Op_lshift -> "bsl"
| Op_rshift -> "bsr"
in
string ctx op.loc s
let format file node =
let ctx = Context.create file in
parse_annots ctx;
parse_node ctx node;
let len, ops =
List.rev ctx.ops
|> sort
|> compact_newlines
|> count_indent
|> compact_pos
in
Conf.debug "[%s]" (String.concat (List.map ops ~f:Op.to_string) ~sep:", ");
(ctx.file.contents, write len ops)
| null | https://raw.githubusercontent.com/shiguredo/eryngii/6c70d9b28a45ed786c4847ee51bb03bfef35ac8d/formatter.ml | ocaml | <<
>>
-module
-export
-import
-include
-include_lib
-define
-spec
tag
spec_clauses
TODO: guard
tag
-compile | open Core.Std
open Located
module Op = struct
type desc =
| Nop
| Text of string
| Comment of string
| Space of int
| Newline of int
| Lparen
| Rparen
| Lbrack
| Rbrack
| Lbrace
| Rbrace
| Leveled_indent
| Aligned_indent
| Label of [`Fun] * int
| Labeled_indent of [`Fun] * int
| Dedent
| Semi
| Comma
| Dot
| Larrow
| Larrow2
| Rarrow
type t = {
pos : int;
desc : desc;
}
let create pos desc =
{ pos; desc }
let of_loc loc desc =
{ pos = Location.offset loc; desc }
let space pos len =
create pos (Space len)
let length op =
match op.desc with
| Nop
| Leveled_indent
| Aligned_indent
| Label _
| Labeled_indent _
| Dedent -> None
| Text s
| Comment s -> Some (String.length s)
| Space n
| Newline n -> Some n
| Lparen
| Rparen
| Lbrack
| Rbrack
| Lbrace
| Rbrace
| Semi
| Comma
| Dot -> Some 1
| Lbin
| Rbin
| Larrow
| Larrow2
| Rarrow -> Some 2
let length_exn op =
Option.value_exn (length op)
let length_zero op =
Option.value (length op) ~default:0
let add_pos op len =
{ op with pos = op.pos + len }
let add_pos_of op other =
add_pos op @@ length_exn other
let to_string op =
let open Printf in
match op.desc with
| Nop -> "nop"
| Text s -> sprintf "text(\"%s\")" s
| Comment s -> sprintf "comment(\"%s\")" s
| Space n -> sprintf "space(%d)" n
| Newline n -> sprintf "nl(%d)" n
| Lparen -> "'('"
| Rparen -> "')'"
| Lbrack -> "'['"
| Rbrack -> "']'"
| Lbrace -> "'{'"
| Rbrace -> "'}'"
| Lbin -> "'<<'"
| Rbin -> "'>>'"
| Semi -> "';'"
| Comma -> "','"
| Dot -> "'.'"
| Larrow -> "'<-'"
| Larrow2 -> "'<='"
| Rarrow -> "'->'"
| Leveled_indent -> "l_indent"
| Aligned_indent -> "a_indent"
| Label _ -> "label"
| Labeled_indent _ -> "b_indent"
| Dedent -> "dedent"
end
module Context = struct
type t = {
file : File.t;
mutable ops : Op.t list;
mutable indent : int list;
mutable count : int option;
}
let create file =
{ file;
ops = [];
indent = [0];
count = None;
}
let contents ctx =
List.rev ctx.ops
let clear ctx =
ctx.ops <- []
let start_count ctx =
match ctx.count with
| Some _ -> failwith "already start count"
| None -> ctx.count <- Some 0
let end_count ctx =
match ctx.count with
| None -> failwith "not start count"
| Some count ->
ctx.count <- None;
count
let count ctx =
Option.value_exn ctx.count
let last_pos ctx =
match List.hd ctx.ops with
| None -> None
| Some op -> Some op.pos
let last_pos_exn ctx =
Option.value_exn (last_pos ctx)
let last_loc_exn ctx =
let pos = last_pos_exn ctx in
let pos = { Position.line = 0; col = 0; offset = pos } in
Location.create pos pos
let add ctx op =
ctx.ops <- op :: ctx.ops
let add_loc ctx loc desc =
add ctx @@ Op.of_loc loc desc
let string ctx loc text =
add_loc ctx loc (Op.Text text)
let text ctx text =
add_loc ctx text.loc (Op.Text text.desc)
let atom ctx atom =
match atom with
| `Unenclosed name ->
text ctx name
| `Enclosed name ->
string ctx name.loc "'";
text ctx name;
string ctx name.loc "'"
let erl_string ctx s =
string ctx s.loc "\"";
text ctx s;
string ctx s.loc "\""
let comment ctx text =
let len = String.length text.desc in
let buf = Buffer.create (len+1) in
let body = String.lstrip text.desc ~drop:(fun c -> c = '%') in
let sign = String.make (len - String.length body) '%' in
let body = String.strip body in
Buffer.add_string buf sign;
Buffer.add_string buf " ";
Buffer.add_string buf body;
add_loc ctx text.loc (Op.Comment (Buffer.contents buf))
let space ctx loc n =
add_loc ctx loc (Space n)
let newline ctx loc n =
add_loc ctx loc (Newline n)
let lparen ctx loc =
add_loc ctx loc Lparen
let rparen ctx loc =
add_loc ctx loc Rparen
let lbrack ctx loc =
add_loc ctx loc Lbrack
let rbrack ctx loc =
add_loc ctx loc Rbrack
let lbrace ctx loc =
add_loc ctx loc Lbrace
let rbrace ctx loc =
add_loc ctx loc Rbrace
let lbin ctx loc =
add_loc ctx loc Lbin
let rbin ctx loc =
add_loc ctx loc Rbin
let semi ctx loc =
add_loc ctx loc Semi
let comma ctx loc =
add_loc ctx loc Comma
let dot ctx loc =
add_loc ctx loc Dot
let larrow ctx loc =
add_loc ctx loc Larrow
let larrow2 ctx loc =
add_loc ctx loc Larrow2
let rarrow ctx loc =
add_loc ctx loc Rarrow
let indent ctx loc =
add_loc ctx loc Leveled_indent
let a_indent ctx loc =
add_loc ctx loc Aligned_indent
let label ctx loc name =
add_loc ctx loc (Label (name, 0))
let b_indent ctx loc name extra =
add_loc ctx loc (Labeled_indent (name, extra))
let dedent ctx loc =
add_loc ctx loc Dedent
let dedent_last ctx =
dedent ctx @@ last_loc_exn ctx
end
let sort ops =
List.sort ops ~cmp:Op.(fun a b -> Int.compare a.pos b.pos)
let compact_newlines (ops:Op.t list) =
Conf.debug "compact_newlines: [%s]"
(String.concat (List.map ops ~f:Op.to_string) ~sep:", ");
List.fold_left ops
~init:(None, [])
~f:(fun (count, accu) op ->
match (count, op.desc) with
| None, Newline _ -> (Some 1, accu)
| None, _ -> (None, op :: accu)
| Some n, Newline nl -> (Some (n + nl), accu)
| Some n, _ ->
let n = min n 3 in
let nl = Op.create op.pos (Newline n) in
(None, op :: nl :: accu))
|> snd
|> List.rev
let compact_pos (ops:Op.t list) =
let pos, ops = List.fold_left ops ~init:(0, [])
~f:(fun (pos, accu) op ->
let op = { op with pos = pos } in
let pos, op = match Op.length op with
| None -> (pos, op)
| Some len -> (pos + len, op)
in
(pos, op :: accu))
in
(pos, List.rev ops)
let count_indent (ops:Op.t list) =
let open Op in
let _, _, rev_ops = List.fold_left ops ~init:(0, [0], [])
~f:(fun (col, depth, accu) op ->
let col, depth, accu = match op.desc with
| Lparen | Lbrack | Lbrace | Lbin ->
(col+1, col+1 :: depth, op :: accu)
| Rparen | Rbrack | Rbrace | Rbin | Semi ->
(col+1, List.tl_exn depth, op :: accu)
| Larrow | Larrow2 | Rarrow ->
let size = List.hd_exn depth + 4 in
(col+2, size :: depth, op :: accu)
| Newline _ ->
let size = List.hd_exn depth in
let indent = Op.create op.pos (Space size) in
(size, depth, indent :: op :: accu)
| Leveled_indent ->
let size = List.length depth * 4 in
(col, size :: depth, accu)
| Aligned_indent ->
(col, col :: depth, accu)
| Labeled_indent (name, extra) ->
let found = List.find_map accu ~f:(fun op ->
match op.desc with
| Label (name2, base) when name = name2 ->
let size = base + extra in
let indent = Op.create op.pos (Space size) in
Some (col, size :: depth, indent :: accu)
| _ -> None)
in
begin match found with
| None -> failwith "labeled indent not found"
| Some accu -> accu
end
| Dedent ->
(col, List.tl_exn depth, accu)
| Label (name, _) ->
let op = Op.create op.pos (Label (name, col)) in
(col, depth, op :: accu)
| Comment _ ->
(col, depth, op :: accu)
| _ ->
let col = col + Option.value_exn (Op.length op) in
(col, depth, op :: accu)
in
Conf.debug "count_indent: col %d: depth %d: %s"
col ((List.length depth) - 1) (Op.to_string op);
(col, depth, accu))
in
List.rev rev_ops
let write len (ops:Op.t list) =
let buf = String.make (len*2) ' ' in
let replace pos s =
ignore @@ List.fold_left (String.to_list s)
~init:pos
~f:(fun pos c ->
String.set buf pos c;
pos + 1)
in
let replace_spaces pos len =
replace pos (String.make len ' ')
in
List.iter ops
~f:(fun op ->
match op.desc with
| Text s
| Comment s -> replace op.pos s
| Newline n -> replace op.pos (String.make n '\n')
| Space n -> replace_spaces op.pos n
| Lparen -> replace op.pos "("
| Rparen -> replace op.pos ")"
| Lbrack -> replace op.pos "["
| Rbrack -> replace op.pos "]"
| Lbrace -> replace op.pos "{"
| Rbrace -> replace op.pos "}"
| Lbin -> replace op.pos "<<"
| Rbin -> replace op.pos ">>"
| Semi -> replace op.pos ";"
| Comma -> replace op.pos ","
| Dot -> replace op.pos "."
| Larrow -> replace op.pos "<-"
| Larrow2 -> replace op.pos "<="
| Rarrow -> replace op.pos "->"
| Nop
| Leveled_indent
| Aligned_indent
| Label _
| Labeled_indent _
| Dedent -> ()
);
String.strip buf ^ "\n"
let parse_annots ctx =
let open Context in
List.iter (Annot.all ())
~f:(fun annot ->
match annot with
| Comment text -> comment ctx text
TODO : count \r\n , \r ,
| Newline text -> newline ctx text.loc (String.length text.desc))
let rec parse_node ctx node =
let open Ast_intf in
let open Context in
let open Located in
let open Location in
match node with
| Module m ->
List.iter m.module_decls ~f:(parse_node ctx)
| Modname_attr attr ->
indent ctx attr.modname_attr_tag.loc;
lparen ctx attr.modname_attr_open;
text ctx attr.modname_attr_name;
rparen ctx attr.modname_attr_close;
dot ctx attr.modname_attr_dot;
dedent_last ctx
| Export_attr attr ->
indent ctx attr.export_attr_tag.loc;
lparen ctx attr.export_attr_open;
lbrack ctx attr.export_attr_fun_open;
parse_fun_sigs ctx attr.export_attr_funs;
rbrack ctx attr.export_attr_fun_close;
rparen ctx attr.export_attr_close;
dot ctx attr.export_attr_dot;
dedent_last ctx
| Import_attr attr ->
indent ctx attr.import_attr_tag.loc;
lparen ctx attr.import_attr_open;
text ctx attr.import_attr_module;
comma ctx attr.import_attr_comma;
space ctx attr.import_attr_comma 1;
lbrack ctx attr.import_attr_fun_open;
parse_fun_sigs ctx attr.import_attr_funs;
rbrack ctx attr.import_attr_fun_close;
rparen ctx attr.import_attr_close;
dot ctx attr.import_attr_dot;
dedent_last ctx
| Include_attr attr ->
indent ctx attr.include_attr_tag.loc;
lparen ctx attr.include_attr_open;
erl_string ctx attr.include_attr_file;
rparen ctx attr.include_attr_close;
dot ctx attr.include_attr_dot;
dedent_last ctx
| Inclib_attr attr ->
indent ctx attr.inclib_attr_tag.loc;
lparen ctx attr.inclib_attr_open;
erl_string ctx attr.inclib_attr_file;
rparen ctx attr.inclib_attr_close;
dot ctx attr.inclib_attr_dot;
dedent_last ctx
| Define_attr attr ->
indent ctx attr.def_attr_tag.loc;
lparen ctx attr.def_attr_open;
let def_name = attr.def_attr_name in
text ctx def_name.def_name;
Option.iter def_name.def_args ~f:(fun args ->
lparen ctx args.enc_open;
Seplist.iter args.enc_desc ~f:(fun sep arg ->
text ctx arg;
Option.iter sep ~f:(fun sep -> comma ctx sep));
rparen ctx args.enc_close);
comma ctx attr.def_attr_comma;
space ctx attr.def_attr_comma 1;
parse_node ctx attr.def_attr_value;
rparen ctx attr.def_attr_close;
dot ctx attr.def_attr_dot;
dedent_last ctx
| Spec_attr attr ->
space ctx attr.spec_attr_tag.loc 1;
begin match attr.spec_attr_mname with
| None -> ()
| Some (mname, colon) ->
text ctx mname;
string ctx colon ":"
end;
text ctx attr.spec_attr_fname;
a_indent ctx attr.spec_attr_fname.loc;
Seplist.iter attr.spec_attr_clauses
~f:(fun sep clause ->
lparen ctx clause.spec_clause_open;
Option.iter clause.spec_clause_args ~f:(fun args ->
Seplist.iter args ~f:(fun sep arg->
parse_spec_type ctx arg;
Option.iter sep ~f:(fun sep ->
string ctx sep ", ")
));
rparen ctx clause.spec_clause_close;
space ctx clause.spec_clause_close 1;
rarrow ctx clause.spec_clause_arrow;
space ctx clause.spec_clause_arrow 1;
parse_spec_type ctx clause.spec_clause_return;
match sep with
| Some sep -> semi ctx sep
| None -> dedent_last ctx);
dedent_last ctx;
dot ctx attr.spec_attr_dot;
| Compile_attr attr ->
indent ctx attr.compile_attr_tag.loc;
lparen ctx attr.compile_attr_open;
lbrack ctx attr.compile_attr_name_open;
Seplist.iter attr.compile_attr_names
~f:(fun sep name ->
atom ctx name;
match sep with
| Some sep ->
comma ctx sep;
space ctx sep 1
| None -> ());
rbrack ctx attr.compile_attr_name_close;
rparen ctx attr.compile_attr_close;
dot ctx attr.compile_attr_dot;
dedent_last ctx
| Fun_decl decl ->
parse_fun_body ctx decl.fun_decl_body;
dot ctx decl.fun_decl_dot
| Call call ->
lparen ctx call.call_open;
begin match call.call_fname.fun_name_mname,
call.call_fname.fun_name_colon with
| Some name, Some colon ->
parse_node ctx name;
string ctx colon ":"
| _ -> ()
end;
parse_node ctx call.call_fname.fun_name_fname;
parse_node_list ctx call.call_args;
rparen ctx call.call_close
| Case case ->
string ctx case.case_begin "case";
space ctx case.case_begin 1;
parse_node ctx case.case_exp;
space ctx case.case_of 1;
string ctx case.case_of "of";
indent ctx case.case_of;
space ctx case.case_of 1;
parse_cr_clauses ctx case.case_clauses;
string ctx case.case_end "end"
| If if_ ->
string ctx if_.if_begin "if ";
indent ctx if_.if_begin;
Seplist.iter if_.if_clauses ~f:(fun sep clause ->
parse_guard ctx clause.if_clause_guard;
space ctx clause.if_clause_arrow 1;
rarrow ctx clause.if_clause_arrow;
space ctx clause.if_clause_arrow 1;
parse_node_list ctx clause.if_clause_body;
Option.iter sep ~f:(fun sep ->
semi ctx sep;
space ctx sep 1));
dedent_last ctx;
dedent_last ctx;
string ctx if_.if_end "end"
| Anon_fun fun_ ->
string ctx fun_.anon_fun_begin "fun";
label ctx fun_.anon_fun_begin `Fun;
parse_fun_body ctx fun_.anon_fun_body;
dedent_last ctx;
dedent_last ctx;
string ctx fun_.anon_fun_end "end"
| Binexp e ->
parse_node ctx e.binexp_left;
space ctx e.binexp_op.loc 1;
parse_op ctx e.binexp_op;
space ctx e.binexp_op.loc 1;
parse_node ctx e.binexp_right
| Paren paren ->
lparen ctx paren.enc_open;
parse_node ctx paren.enc_desc;
rparen ctx paren.enc_close
| Var name ->
text ctx name
| Uscore name ->
text ctx name
| Atom name ->
atom ctx name
| Char name ->
text ctx name
| Int value ->
text ctx value
| Float value ->
text ctx value
| String values ->
let len = List.length values in
List.iteri values ~f:(fun i value ->
erl_string ctx value;
if i+1 < len then
space ctx value.loc 1)
| List list ->
lbrack ctx list.list_open;
parse_node_list ctx list.list_head;
begin match list.list_bar, list.list_tail with
| Some bar, Some tail ->
string ctx bar " | ";
parse_node ctx tail
| _ -> ()
end;
rbrack ctx list.list_close
| Tuple tuple ->
lbrace ctx tuple.enc_open;
parse_node_list ctx tuple.enc_desc;
rbrace ctx tuple.enc_close
| Binary bin ->
lbin ctx bin.enc_open;
parse_node_list ctx bin.enc_desc;
rbin ctx bin.enc_close
| Binary_elt elt ->
parse_node ctx elt.bin_elt_val;
begin match elt.bin_elt_colon, elt.bin_elt_size with
| Some colon, Some size ->
string ctx colon ":";
text ctx size
| _ -> ()
end;
begin match elt.bin_elt_slash, elt.bin_elt_type with
| Some slash, Some ty ->
string ctx slash "/";
text ctx ty
| _ -> ()
end
| List_compr compr ->
parse_compr ctx compr
| List_compr_gen gen ->
parse_node ctx gen.gen_ptn;
space ctx gen.gen_arrow 1;
larrow ctx gen.gen_arrow;
space ctx gen.gen_arrow 1;
parse_node ctx gen.gen_exp;
dedent_last ctx
| Binary_compr compr ->
parse_compr ctx compr
| Binary_compr_gen gen ->
parse_node ctx gen.bin_gen_ptn;
space ctx gen.bin_gen_arrow 1;
larrow2 ctx gen.bin_gen_arrow;
space ctx gen.bin_gen_arrow 1;
parse_node ctx gen.bin_gen_exp;
dedent_last ctx
| Macro macro ->
string ctx macro.macro_q "?";
text ctx macro.macro_name
| Nop -> ()
| _ -> ()
and parse_fun_sigs ctx fsigs =
let open Context in
Seplist.iter fsigs
~f:(fun sep fsig ->
parse_fun_sig ctx fsig;
Option.iter sep ~f:(fun sep ->
string ctx sep ", "))
and parse_fun_sig ctx fsig =
let open Ast in
let open Context in
text ctx fsig.fun_sig_name;
string ctx fsig.fun_sig_sep "/";
text ctx fsig.fun_sig_arity
and parse_spec_type ctx spec =
let open Ast in
let open Context in
match spec with
| Spec_type.Paren paren ->
lparen ctx paren.enc_open;
parse_spec_type ctx paren.enc_desc;
rparen ctx paren.enc_close
| Named named ->
begin match (named.named_module, named.named_colon) with
| Some mname, Some colon ->
text ctx mname;
string ctx colon ":"
| _ -> ()
end;
text ctx named.named_name;
lparen ctx named.named_open;
Option.iter named.named_args ~f:(fun args ->
Seplist.iter args
~f:(fun sep arg ->
parse_spec_type ctx arg;
Option.iter sep ~f:(fun sep -> string ctx sep ", ")));
rparen ctx named.named_close
| Atom name ->
atom ctx name
| List spec ->
lbrack ctx spec.enc_open;
parse_spec_type ctx spec.enc_desc;
rbrack ctx spec.enc_close
| Union spec ->
parse_spec_type ctx spec.union_left;
string ctx spec.union_op " | ";
parse_spec_type ctx spec.union_right
| _ -> ()
and parse_fun_body ctx body =
let open Context in
Seplist.iter body
~f:(fun sep clause ->
parse_fun_clause ctx clause;
Option.iter sep ~f:(fun sep ->
semi ctx sep));
dedent_last ctx
and parse_fun_clause ctx clause =
let open Context in
let is_anon = Option.is_none clause.fun_clause_name in
Option.iter clause.fun_clause_name ~f:(text ctx);
lparen ctx clause.fun_clause_open;
parse_node_list ctx clause.fun_clause_ptns;
rparen ctx clause.fun_clause_close;
space ctx clause.fun_clause_close 1;
begin match clause.fun_clause_when, clause.fun_clause_guard with
| Some when_, Some guard ->
string ctx when_ "when";
space ctx when_ 1;
parse_guard ctx guard;
space ctx clause.fun_clause_arrow 1;
| _ -> ()
end;
rarrow ctx clause.fun_clause_arrow;
space ctx clause.fun_clause_arrow 1;
if is_anon then
b_indent ctx clause.fun_clause_arrow `Fun 4;
parse_node_list ctx clause.fun_clause_body
and parse_guard ctx guard =
let open Context in
Seplist.iter guard
~f:(fun sep es ->
parse_node_list ctx es;
Option.iter sep ~f:(fun sep ->
semi ctx sep));
and parse_cr_clauses ctx clauses =
let open Context in
Seplist.iter clauses
~f:(fun sep clause ->
parse_cr_clause ctx clause;
match sep with
| Some sep -> semi ctx sep
| None -> dedent_last ctx);
dedent_last ctx
and parse_cr_clause ctx clause =
let open Context in
parse_node ctx clause.cr_clause_ptn;
begin match clause.cr_clause_when, clause.cr_clause_guard with
| Some when_, Some guard ->
space ctx when_ 1;
string ctx when_ "when";
space ctx when_ 1;
parse_guard ctx guard
| _ -> ()
end;
space ctx clause.cr_clause_arrow 1;
rarrow ctx clause.cr_clause_arrow;
parse_node_list ctx clause.cr_clause_body
and parse_compr ctx compr =
let open Context in
lbin ctx compr.compr_open;
space ctx compr.compr_open 1;
parse_node ctx compr.compr_exp;
string ctx compr.compr_sep " || ";
parse_node_list ctx compr.compr_quals;
space ctx compr.compr_close 1;
rbin ctx compr.compr_close
and parse_node_list ctx es =
let open Context in
Seplist.iter es
~f:(fun sep e ->
parse_node ctx e;
Option.iter sep ~f:(fun sep ->
comma ctx sep;
space ctx sep 1))
and parse_op ctx op =
let open Context in
let s = match op.desc with
| Op_pos -> "+"
| Op_neg -> "-"
| Op_not -> "not"
| Op_lnot -> "bnot"
| Op_eq -> "="
| Op_ep -> "!"
| Op_eqq -> "=="
| Op_ne -> "/="
| Op_le -> "=<"
| Op_lt -> "<"
| Op_ge -> ">="
| Op_gt -> ">"
| Op_xeq -> "=:="
| Op_xne -> "=/="
| Op_list_add -> "++"
| Op_list_diff -> "--"
| Op_add -> "+"
| Op_sub -> "-"
| Op_mul -> "*"
| Op_div -> "/"
| Op_quo -> "div"
| Op_rem -> "rem"
| Op_and -> "and"
| Op_andalso -> "andalso"
| Op_or -> "or"
| Op_orelse -> "orelse"
| Op_xor -> "xor"
| Op_sand -> "andalso"
| Op_sor -> "orelse"
| Op_land -> "band"
| Op_lor -> "bor"
| Op_lxor -> "bxor"
| Op_lshift -> "bsl"
| Op_rshift -> "bsr"
in
string ctx op.loc s
let format file node =
let ctx = Context.create file in
parse_annots ctx;
parse_node ctx node;
let len, ops =
List.rev ctx.ops
|> sort
|> compact_newlines
|> count_indent
|> compact_pos
in
Conf.debug "[%s]" (String.concat (List.map ops ~f:Op.to_string) ~sep:", ");
(ctx.file.contents, write len ops)
|
99fc26c0f2de8e83b6d5518aac6ae1113b2e51c55be5fb5a5933c08da8ccb119 | JeffreyBenjaminBrown/digraphs-with-text | relevant-types.hs | dwtDfs_unlim :: RSLT -> (SearchVar,QRelspec) -> [Node] -> Either String [Node]
insRelspec :: QRelspec -> RSLT -> Either DwtErr RSLT
type QRelspec = Map.Map RelRole QNodeOrVar
the others are flexible , but the TpltRole must map to a QNodeSpec
data QNodeOrVar = QVarSpec SearchVar | QNodeSpec Node deriving(Show,Read,Eq,Ord)
data RelRole = TpltRole | Mbr MbrPos deriving(Show,Read,Eq,Ord)
data SearchVar = It | Any | Up | Down
| null | https://raw.githubusercontent.com/JeffreyBenjaminBrown/digraphs-with-text/34e47a52aa9abb6fd42028deba1623a92e278aae/howto/search%2Crecursive/relevant-types.hs | haskell | dwtDfs_unlim :: RSLT -> (SearchVar,QRelspec) -> [Node] -> Either String [Node]
insRelspec :: QRelspec -> RSLT -> Either DwtErr RSLT
type QRelspec = Map.Map RelRole QNodeOrVar
the others are flexible , but the TpltRole must map to a QNodeSpec
data QNodeOrVar = QVarSpec SearchVar | QNodeSpec Node deriving(Show,Read,Eq,Ord)
data RelRole = TpltRole | Mbr MbrPos deriving(Show,Read,Eq,Ord)
data SearchVar = It | Any | Up | Down
| |
ecaaa857ec23de69c98271b0f27a490a2bd0bf863a7713c080e4179236441072 | andrenth/routemachine | rtm_sup.erl | -module(rtm_sup).
-include_lib("routemachine.hrl").
-include_lib("session.hrl").
-export([start_link/2]).
-export([init/1]).
start_link(Config, Sessions) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, {Config, Sessions}).
init({Config, Sessions}) ->
ListenPort = rtm_config:get(listen_port, Config, ?DEFAULT_PORT),
Networks = rtm_config:networks(Config),
SockOpts = [binary, {reuseaddr, true}, {packet, raw}, {active, false}],
{ok, ListenSocket} = gen_tcp:listen(ListenPort, SockOpts),
ActiveSessions = dict:filter(fun is_active/2, Sessions),
pg2:create(updaters),
ChildSpecs = [
{rtm_rib,
{rtm_rib, start_link, []},
permanent,
2000,
worker,
[rtm_rib]},
{rtm_watcher,
{rtm_watcher, start_link, [Networks]},
permanent,
2000,
worker,
[rtm_watcher]},
{rtm_server_sup,
{rtm_server_sup, start_link, []},
permanent,
infinity,
supervisor,
[rtm_server_sup]},
{rtm_fsm_sup,
{rtm_fsm_sup, start_link, [ActiveSessions]},
permanent,
infinity,
supervisor,
[rtm_fsm_sup]},
{rtm_updater_sup,
{rtm_updater_sup, start_link, []},
permanent,
infinity,
supervisor,
[rtm_updater_sup]},
{rtm_acceptor,
{rtm_acceptor, start_link, [ListenSocket, Sessions]},
permanent,
brutal_kill,
worker,
[rtm_acceptor]}
],
{ok, {{one_for_one, 1, 1}, ChildSpecs}}.
is_active(_IP, #session{establishment = active}) -> true;
is_active(_IP, #session{establishment = {passive, _Socket}}) -> false.
| null | https://raw.githubusercontent.com/andrenth/routemachine/97fbc4997ac9bbe7d14c2b174aa84bc4a2fd5d20/src/rtm_sup.erl | erlang | -module(rtm_sup).
-include_lib("routemachine.hrl").
-include_lib("session.hrl").
-export([start_link/2]).
-export([init/1]).
start_link(Config, Sessions) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, {Config, Sessions}).
init({Config, Sessions}) ->
ListenPort = rtm_config:get(listen_port, Config, ?DEFAULT_PORT),
Networks = rtm_config:networks(Config),
SockOpts = [binary, {reuseaddr, true}, {packet, raw}, {active, false}],
{ok, ListenSocket} = gen_tcp:listen(ListenPort, SockOpts),
ActiveSessions = dict:filter(fun is_active/2, Sessions),
pg2:create(updaters),
ChildSpecs = [
{rtm_rib,
{rtm_rib, start_link, []},
permanent,
2000,
worker,
[rtm_rib]},
{rtm_watcher,
{rtm_watcher, start_link, [Networks]},
permanent,
2000,
worker,
[rtm_watcher]},
{rtm_server_sup,
{rtm_server_sup, start_link, []},
permanent,
infinity,
supervisor,
[rtm_server_sup]},
{rtm_fsm_sup,
{rtm_fsm_sup, start_link, [ActiveSessions]},
permanent,
infinity,
supervisor,
[rtm_fsm_sup]},
{rtm_updater_sup,
{rtm_updater_sup, start_link, []},
permanent,
infinity,
supervisor,
[rtm_updater_sup]},
{rtm_acceptor,
{rtm_acceptor, start_link, [ListenSocket, Sessions]},
permanent,
brutal_kill,
worker,
[rtm_acceptor]}
],
{ok, {{one_for_one, 1, 1}, ChildSpecs}}.
is_active(_IP, #session{establishment = active}) -> true;
is_active(_IP, #session{establishment = {passive, _Socket}}) -> false.
| |
cfc4982ca1a1be11408ff2d5706efb8ade128d4ed04e75f6d7691513ac2fc39d | sosy-lab/tbf | llvmgen.inferred.mli | module H = Hashtbl
module S = String
exception NotConstant
type llvmBlock = {
lblabel : string;
mutable lbbody : llvmInstruction list;
mutable lbterminator : llvmTerminator;
mutable lbpreds : llvmBlock list;
}
and llvmInstruction = {
mutable liresult : llvmLocal option;
liop : llvmOp;
mutable liargs : llvmValue list;
}
and llvmTerminator =
TUnreachable
| TDead
| TRet of llvmValue list
| TBranch of llvmBlock
| TCond of llvmValue * llvmBlock * llvmBlock
| TSwitch of llvmValue * llvmBlock * (int64 * llvmBlock) list
and llvmValue =
LGlobal of llvmGlobal
| LLocal of llvmLocal
| LBool of bool
| LInt of int64 * Cil.ikind
| LFloat of float * Cil.fkind
| LUndef
| LZero
| LNull of llvmType
| LPhi of llvmValue * llvmBlock
| LType of llvmType
| LGetelementptr of llvmValue list
| LCast of llvmCast * llvmValue * llvmType
| LBinary of llvmBinop * llvmValue * llvmValue * llvmType
| LCmp of llvmCmp * llvmValue * llvmValue
| LFcmp of llvmFCmp * llvmValue * llvmValue
| LSelect of llvmValue * llvmValue * llvmValue
and llvmLocal = string * llvmType
and llvmGlobal = string * llvmType
and llvmType = Cil.typ
and llvmOp =
LIassign
| LIphi
| LIgetelementptr
| LIload
| LIstore
| LIcall
| LIalloca
| LIbinary of llvmBinop
| LIcmp of llvmCmp
| LIfcmp of llvmFCmp
| LIselect
| LIcast of llvmCast
| LIva_arg
and llvmBinop =
LBadd
| LBsub
| LBmul
| LBudiv
| LBsdiv
| LBfdiv
| LBurem
| LBsrem
| LBfrem
| LBshl
| LBlshr
| LBashr
| LBand
| LBor
| LBxor
and llvmCmp =
LCeq
| LCne
| LCslt
| LCult
| LCsle
| LCule
| LCsgt
| LCugt
| LCsge
| LCuge
and llvmFCmp =
LCFoeq
| LCFone
| LCFolt
| LCFole
| LCFogt
| LCFoge
| LCFord
| LCFueq
| LCFune
| LCFult
| LCFule
| LCFugt
| LCFuge
and llvmCast =
LAtrunc
| LAzext
| LAsext
| LAuitofp
| LAsitofp
| LAfptoui
| LAfptosi
| LAfptrunc
| LAfpext
| LAinttoptr
| LAptrtoint
| LAbitcast
val binopName : llvmBinop -> string
val cmpName : llvmCmp -> string
val fcmpName : llvmFCmp -> string
val castName : llvmCast -> string
val i1Type : Cil.typ
val i32Type : Cil.typ
val i8starType : Cil.typ
val llvmTypeOf : llvmValue -> llvmType
val llvmLocalType : Cil.typ -> bool
val llvmUseLocal : Cil.varinfo -> bool
val llvmDoNotUseLocal : Cil.varinfo -> bool
val llvmDestinations : llvmTerminator -> llvmBlock list
val llvmValueEqual : llvmValue -> llvmValue -> bool
val llocal : Cil.varinfo -> llvmLocal
val lglobal : Cil.varinfo -> llvmGlobal
val lvar : Cil.varinfo -> llvmValue
val lint : int -> Cil.typ -> llvmValue
val lzero : Cil.typ -> llvmValue
val mkIns : llvmOp -> llvmLocal -> llvmValue list -> llvmInstruction
val mkVoidIns : llvmOp -> llvmValue list -> llvmInstruction
val mkTrueIns : llvmLocal -> llvmValue -> llvmInstruction
val llvmEscape : string -> string
val llvmValueNegate : llvmValue -> llvmValue
val llvmCastOp : Cil.typ -> Cil.typ -> llvmCast
class type llvmGenerator =
object
method addString : string -> llvmGlobal
method addWString : int64 list -> llvmGlobal
method mkConstant : Cil.constant -> llvmValue
method mkConstantExp : Cil.exp -> llvmValue
method mkFunction : Cil.fundec -> llvmBlock list
method printBlocks : unit -> llvmBlock list -> Pretty.doc
method printGlobals : unit -> Pretty.doc
method printValue : unit -> llvmValue -> Pretty.doc
method printValueNoType : unit -> llvmValue -> Pretty.doc
end
class llvmGeneratorClass : llvmGenerator
| null | https://raw.githubusercontent.com/sosy-lab/tbf/18d08d6cee6fbfbad4d4ebfa7aed235521edec2b/tbf/tools/crest/cil/_build/src/ext/llvmgen.inferred.mli | ocaml | module H = Hashtbl
module S = String
exception NotConstant
type llvmBlock = {
lblabel : string;
mutable lbbody : llvmInstruction list;
mutable lbterminator : llvmTerminator;
mutable lbpreds : llvmBlock list;
}
and llvmInstruction = {
mutable liresult : llvmLocal option;
liop : llvmOp;
mutable liargs : llvmValue list;
}
and llvmTerminator =
TUnreachable
| TDead
| TRet of llvmValue list
| TBranch of llvmBlock
| TCond of llvmValue * llvmBlock * llvmBlock
| TSwitch of llvmValue * llvmBlock * (int64 * llvmBlock) list
and llvmValue =
LGlobal of llvmGlobal
| LLocal of llvmLocal
| LBool of bool
| LInt of int64 * Cil.ikind
| LFloat of float * Cil.fkind
| LUndef
| LZero
| LNull of llvmType
| LPhi of llvmValue * llvmBlock
| LType of llvmType
| LGetelementptr of llvmValue list
| LCast of llvmCast * llvmValue * llvmType
| LBinary of llvmBinop * llvmValue * llvmValue * llvmType
| LCmp of llvmCmp * llvmValue * llvmValue
| LFcmp of llvmFCmp * llvmValue * llvmValue
| LSelect of llvmValue * llvmValue * llvmValue
and llvmLocal = string * llvmType
and llvmGlobal = string * llvmType
and llvmType = Cil.typ
and llvmOp =
LIassign
| LIphi
| LIgetelementptr
| LIload
| LIstore
| LIcall
| LIalloca
| LIbinary of llvmBinop
| LIcmp of llvmCmp
| LIfcmp of llvmFCmp
| LIselect
| LIcast of llvmCast
| LIva_arg
and llvmBinop =
LBadd
| LBsub
| LBmul
| LBudiv
| LBsdiv
| LBfdiv
| LBurem
| LBsrem
| LBfrem
| LBshl
| LBlshr
| LBashr
| LBand
| LBor
| LBxor
and llvmCmp =
LCeq
| LCne
| LCslt
| LCult
| LCsle
| LCule
| LCsgt
| LCugt
| LCsge
| LCuge
and llvmFCmp =
LCFoeq
| LCFone
| LCFolt
| LCFole
| LCFogt
| LCFoge
| LCFord
| LCFueq
| LCFune
| LCFult
| LCFule
| LCFugt
| LCFuge
and llvmCast =
LAtrunc
| LAzext
| LAsext
| LAuitofp
| LAsitofp
| LAfptoui
| LAfptosi
| LAfptrunc
| LAfpext
| LAinttoptr
| LAptrtoint
| LAbitcast
val binopName : llvmBinop -> string
val cmpName : llvmCmp -> string
val fcmpName : llvmFCmp -> string
val castName : llvmCast -> string
val i1Type : Cil.typ
val i32Type : Cil.typ
val i8starType : Cil.typ
val llvmTypeOf : llvmValue -> llvmType
val llvmLocalType : Cil.typ -> bool
val llvmUseLocal : Cil.varinfo -> bool
val llvmDoNotUseLocal : Cil.varinfo -> bool
val llvmDestinations : llvmTerminator -> llvmBlock list
val llvmValueEqual : llvmValue -> llvmValue -> bool
val llocal : Cil.varinfo -> llvmLocal
val lglobal : Cil.varinfo -> llvmGlobal
val lvar : Cil.varinfo -> llvmValue
val lint : int -> Cil.typ -> llvmValue
val lzero : Cil.typ -> llvmValue
val mkIns : llvmOp -> llvmLocal -> llvmValue list -> llvmInstruction
val mkVoidIns : llvmOp -> llvmValue list -> llvmInstruction
val mkTrueIns : llvmLocal -> llvmValue -> llvmInstruction
val llvmEscape : string -> string
val llvmValueNegate : llvmValue -> llvmValue
val llvmCastOp : Cil.typ -> Cil.typ -> llvmCast
class type llvmGenerator =
object
method addString : string -> llvmGlobal
method addWString : int64 list -> llvmGlobal
method mkConstant : Cil.constant -> llvmValue
method mkConstantExp : Cil.exp -> llvmValue
method mkFunction : Cil.fundec -> llvmBlock list
method printBlocks : unit -> llvmBlock list -> Pretty.doc
method printGlobals : unit -> Pretty.doc
method printValue : unit -> llvmValue -> Pretty.doc
method printValueNoType : unit -> llvmValue -> Pretty.doc
end
class llvmGeneratorClass : llvmGenerator
| |
66452e7a5b204340b2b222c108275397b833ad2b56af1883b7cc5c48f5085bc5 | ucsd-progsys/liquidhaskell | ListAnd.hs | module ListAnd where
{-@ LIQUID "--reflection" @-}
{-@ LIQUID "--ple" @-}
import Prelude hiding (and, all)
import Language.Haskell.Liquid.ProofCombinators
{-@ infix : @-}
type Elm = Int
{-@ reflect and @-}
@ and : : x : Bool - > y : Bool - > { z : | z < = > x & & y } @
and :: Bool -> Bool -> Bool
and True y = y
and False _ = False
@ reflect @
@ gte : : x : Elm - > y : Elm - > { z : | z < = > x > = y } @
gte :: Elm -> Elm -> Bool
gte = (>=)
{-@ reflect lte @-}
@ lte : : x : Elm - > y : Elm - > { z : | z < = > x < = y } @
lte :: Elm -> Elm -> Bool
lte = (<=)
{-@ reflect all @-}
all :: (Elm -> Bool) -> [Elm] -> Bool
all _ [] = True
all f (x : xs) = f x && all f xs
{-@ simple :: x : Elm -> y : Elm -> ys : [Elm] -> {v : () | and (lte y x) (all (gte x) ys) = all (gte x) (y:ys) } @-}
simple :: Elm -> Elm -> [Elm] -> Proof
simple x y _ = if lte y x then () else ()
all ( gte x ) ( y : ys )
= = = ( lte y x ) ` and ` all ( gte x ) ys
-- = = = ( gte x y ) ` and ` all ( gte x ) ys
* * * QED
all (gte x) (y : ys)
=== (lte y x) `and` all (gte x ) ys
-- === (gte x y) `and` all (gte x ) ys
*** QED -}
| null | https://raw.githubusercontent.com/ucsd-progsys/liquidhaskell/56f72d6d8d8106fda4e35645e5f0bcab7bc10eee/tests/ple/pos/ListAnd.hs | haskell | @ LIQUID "--reflection" @
@ LIQUID "--ple" @
@ infix : @
@ reflect and @
@ reflect lte @
@ reflect all @
@ simple :: x : Elm -> y : Elm -> ys : [Elm] -> {v : () | and (lte y x) (all (gte x) ys) = all (gte x) (y:ys) } @
= = = ( gte x y ) ` and ` all ( gte x ) ys
=== (gte x y) `and` all (gte x ) ys | module ListAnd where
import Prelude hiding (and, all)
import Language.Haskell.Liquid.ProofCombinators
type Elm = Int
@ and : : x : Bool - > y : Bool - > { z : | z < = > x & & y } @
and :: Bool -> Bool -> Bool
and True y = y
and False _ = False
@ reflect @
@ gte : : x : Elm - > y : Elm - > { z : | z < = > x > = y } @
gte :: Elm -> Elm -> Bool
gte = (>=)
@ lte : : x : Elm - > y : Elm - > { z : | z < = > x < = y } @
lte :: Elm -> Elm -> Bool
lte = (<=)
all :: (Elm -> Bool) -> [Elm] -> Bool
all _ [] = True
all f (x : xs) = f x && all f xs
simple :: Elm -> Elm -> [Elm] -> Proof
simple x y _ = if lte y x then () else ()
all ( gte x ) ( y : ys )
= = = ( lte y x ) ` and ` all ( gte x ) ys
* * * QED
all (gte x) (y : ys)
=== (lte y x) `and` all (gte x ) ys
*** QED -}
|
f714b48fdaa628465ead64684ee0011b6c426ad86a03befca780b093dd94dc17 | metosin/reagent-dev-tools | core.cljs | (ns reagent-dev-tools.core
(:require [reagent.core :as r]
[reagent.dom :as rdom]
[reagent-dev-tools.styles :as s]
[reagent-dev-tools.state-tree :as state-tree]
[reagent-dev-tools.state :as state]
[reagent-dev-tools.utils :refer [window-event-listener]]
[reagent-dev-tools.context :as ctx]))
(def element-id (str ::dev-panel))
(def state-tree state-tree/state-tree-panel)
(def collection-info-handler state-tree/collection-info-handler)
(def register-collection-info-handler! state-tree/register-collection-info-handler)
(defn create-default-panels [options]
(if (:state-atom options)
[{:key ::default
:label (:state-atom-name options "State")
:view [state-tree
{:k :state-atom
:ratom (:state-atom options)}]}]
(if (nil? (:panels options))
[{:key ::default
:label (:state-atom-name options "State")
:view [:div [:p "Configure either `:state-atom` or `:panels`."]]}]
[])))
(defn dev-tool
#_:clj-kondo/ignore
[{:keys [panels]
:as options}]
(let [mouse-state (r/atom nil)]
(fn [{:keys [panels margin-element]}]
(let [{:keys [open? place width height]} @state/dev-state
panels (keep identity panels)
id->panel (into {} (map (juxt :key identity) panels))]
(when margin-element
(set! (.. margin-element -style -marginRight) (when (and open? (= :right place))
(str width "px")))
(set! (.. margin-element -style -marginBottom) (when (and open? (= :bottom place))
(str height "px"))))
[:<>
[:style (s/main-css)]
(if open?
(let [current-k (:current @state/dev-state ::default)
current-panel (or (get id->panel current-k)
(::default id->panel))]
[window-event-listener
{:on-mouse-move (when @mouse-state
(fn [e]
(.preventDefault e)
(swap! state/dev-state
(fn [v]
(case place
:right (assoc v :width (-> (- (.-innerWidth js/window) (.-clientX e))
(max 250)
(min 1000)))
;; Bottom
(assoc v :height (-> (- (.-innerHeight js/window) (.-clientY e))
(max 50)
(min 1000))))))))
:on-mouse-up (when @mouse-state
(fn [_e]
(reset! mouse-state nil)))}
[:div.reagent-dev-tools__panel
{:style (case place
:right {:width (str width "px")
:top 0
:right 0
:height "100%"
:flex-direction "row"}
;; bottom
{:height (str height "px")
:width "100%"
:bottom 0
:left 0
:flex-direction "column"})}
[:div.reagent-dev-tools__sizer
{:style (case place
:right {:width "5px"
:cursor "ew-resize"}
;; bottom
{:height "5px"
:cursor "ns-resize"})
:on-mouse-down (fn [e]
(reset! mouse-state true)
(.preventDefault e))}]
[:div
{:style {:display "flex"
:flex-direction "column"
:flex "1 0 auto"
:width "100%"
:height "100%"}}
[:div.reagent-dev-tools__nav
[:div.reagent-dev-tools__nav-panels
(for [panel panels]
[:div.reagent-dev-tools__nav-li
{:key (name (:key panel))}
[:a.reagent-dev-tools__nav-li-a
{:class (when (keyword-identical? current-k (:key panel)) "reagent-dev-tools__nav-li-a--active")
:on-click #(swap! state/dev-state assoc :current (:key panel))}
(:label panel)]])]
;; Just diplay the button to toggle to the other state.
(if (= :right place)
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__nav-li-a--option-button
{:on-click #(swap! state/dev-state assoc :place :bottom)}
[:div.reagent-dev-tools__bottom-icon]]
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__nav-li-a--option-button
{:on-click #(swap! state/dev-state assoc :place :right)}
[:div.reagent-dev-tools__right-icon]])
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__nav-li-a--close-button
{:on-click #(swap! state/dev-state assoc :open? false)}
[:div.reagent-dev-tools__close-icon]]]
;; Allow the panel component to access panel-options through React context
;; E.g. to access the panel :key or :label
[:div.reagent-dev-tools__panel-content
[:r> ctx/panel-context-provider
#js {:value current-panel}
(:view current-panel)]]]]])
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__toggle-btn
{:on-click (fn [_]
(swap! state/dev-state assoc :open? true)
nil)}
"dev"])]))))
(def ^:private panels-fn-warning
(delay (js/console.warn "Reagent dev tools option `:panels-fn` is deprecated. Use `:panels` instead.")))
;; NOTE: sync the option changes to README.
(defn start!
"Start Reagent dev tool.
Options:
- `:el` (optional) The element to render the dev-tool into. If the property is given,
but is nil, dev tool is not enabled. If not given, new div is created and used.
- `:margin-element` (optional) Element where to set margin-bottom/right if the panel is open.
This is helpful so that the dev tool isn't displayed over the application content.
- `:state-atom` This options adds default `state-tree` panel displaying tree for the given RAtom.
- `:state-atom-name` (optional) Overrides the name for default `state-tree` panel.
- `:panels` List of panel maps to display. This is appended to the default panels, if you
don't want to include default panels, leave out :state-atom option and define all panels here.
Panel options:
- `:key` (Required) React key
- `:label` (Required) Label for tab bar
- `:view` (Required) Reagent Hiccup form to display the panel content
Built-in panel component options:
- `reagent-dev-tools.core/state-tree`
- `:ratom` (Required) The RAtom to display
- `:label` (Optional) Label to display for atom root node, will default to panel :label."
[opts]
(when (:panels-fn opts)
@panels-fn-warning)
(doseq [panel (:panels opts)
:when (some? panel)]
(assert (:key panel) "Panel :key is required")
(assert (vector? (:view panel)) "Panel :view is required and must an vector"))
(when-let [el (if (contains? opts :el)
(:el opts)
(or (.getElementById js/document element-id)
(let [el (.createElement js/document "div")]
(set! (.-id el) element-id)
(.appendChild (.-body js/document) el)
el)))]
(rdom/render
[dev-tool {:margin-element (:margin-element opts)
:panels (into (create-default-panels opts)
(:panels opts))}]
el)))
| null | https://raw.githubusercontent.com/metosin/reagent-dev-tools/9fd5b7d0788c0df6def883bbc8bb9eb578119701/src/reagent_dev_tools/core.cljs | clojure | Bottom
bottom
bottom
Just diplay the button to toggle to the other state.
Allow the panel component to access panel-options through React context
E.g. to access the panel :key or :label
NOTE: sync the option changes to README. | (ns reagent-dev-tools.core
(:require [reagent.core :as r]
[reagent.dom :as rdom]
[reagent-dev-tools.styles :as s]
[reagent-dev-tools.state-tree :as state-tree]
[reagent-dev-tools.state :as state]
[reagent-dev-tools.utils :refer [window-event-listener]]
[reagent-dev-tools.context :as ctx]))
(def element-id (str ::dev-panel))
(def state-tree state-tree/state-tree-panel)
(def collection-info-handler state-tree/collection-info-handler)
(def register-collection-info-handler! state-tree/register-collection-info-handler)
(defn create-default-panels [options]
(if (:state-atom options)
[{:key ::default
:label (:state-atom-name options "State")
:view [state-tree
{:k :state-atom
:ratom (:state-atom options)}]}]
(if (nil? (:panels options))
[{:key ::default
:label (:state-atom-name options "State")
:view [:div [:p "Configure either `:state-atom` or `:panels`."]]}]
[])))
(defn dev-tool
#_:clj-kondo/ignore
[{:keys [panels]
:as options}]
(let [mouse-state (r/atom nil)]
(fn [{:keys [panels margin-element]}]
(let [{:keys [open? place width height]} @state/dev-state
panels (keep identity panels)
id->panel (into {} (map (juxt :key identity) panels))]
(when margin-element
(set! (.. margin-element -style -marginRight) (when (and open? (= :right place))
(str width "px")))
(set! (.. margin-element -style -marginBottom) (when (and open? (= :bottom place))
(str height "px"))))
[:<>
[:style (s/main-css)]
(if open?
(let [current-k (:current @state/dev-state ::default)
current-panel (or (get id->panel current-k)
(::default id->panel))]
[window-event-listener
{:on-mouse-move (when @mouse-state
(fn [e]
(.preventDefault e)
(swap! state/dev-state
(fn [v]
(case place
:right (assoc v :width (-> (- (.-innerWidth js/window) (.-clientX e))
(max 250)
(min 1000)))
(assoc v :height (-> (- (.-innerHeight js/window) (.-clientY e))
(max 50)
(min 1000))))))))
:on-mouse-up (when @mouse-state
(fn [_e]
(reset! mouse-state nil)))}
[:div.reagent-dev-tools__panel
{:style (case place
:right {:width (str width "px")
:top 0
:right 0
:height "100%"
:flex-direction "row"}
{:height (str height "px")
:width "100%"
:bottom 0
:left 0
:flex-direction "column"})}
[:div.reagent-dev-tools__sizer
{:style (case place
:right {:width "5px"
:cursor "ew-resize"}
{:height "5px"
:cursor "ns-resize"})
:on-mouse-down (fn [e]
(reset! mouse-state true)
(.preventDefault e))}]
[:div
{:style {:display "flex"
:flex-direction "column"
:flex "1 0 auto"
:width "100%"
:height "100%"}}
[:div.reagent-dev-tools__nav
[:div.reagent-dev-tools__nav-panels
(for [panel panels]
[:div.reagent-dev-tools__nav-li
{:key (name (:key panel))}
[:a.reagent-dev-tools__nav-li-a
{:class (when (keyword-identical? current-k (:key panel)) "reagent-dev-tools__nav-li-a--active")
:on-click #(swap! state/dev-state assoc :current (:key panel))}
(:label panel)]])]
(if (= :right place)
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__nav-li-a--option-button
{:on-click #(swap! state/dev-state assoc :place :bottom)}
[:div.reagent-dev-tools__bottom-icon]]
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__nav-li-a--option-button
{:on-click #(swap! state/dev-state assoc :place :right)}
[:div.reagent-dev-tools__right-icon]])
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__nav-li-a--close-button
{:on-click #(swap! state/dev-state assoc :open? false)}
[:div.reagent-dev-tools__close-icon]]]
[:div.reagent-dev-tools__panel-content
[:r> ctx/panel-context-provider
#js {:value current-panel}
(:view current-panel)]]]]])
[:button.reagent-dev-tools__nav-li-a.reagent-dev-tools__toggle-btn
{:on-click (fn [_]
(swap! state/dev-state assoc :open? true)
nil)}
"dev"])]))))
(def ^:private panels-fn-warning
(delay (js/console.warn "Reagent dev tools option `:panels-fn` is deprecated. Use `:panels` instead.")))
(defn start!
"Start Reagent dev tool.
Options:
- `:el` (optional) The element to render the dev-tool into. If the property is given,
but is nil, dev tool is not enabled. If not given, new div is created and used.
- `:margin-element` (optional) Element where to set margin-bottom/right if the panel is open.
This is helpful so that the dev tool isn't displayed over the application content.
- `:state-atom` This options adds default `state-tree` panel displaying tree for the given RAtom.
- `:state-atom-name` (optional) Overrides the name for default `state-tree` panel.
- `:panels` List of panel maps to display. This is appended to the default panels, if you
don't want to include default panels, leave out :state-atom option and define all panels here.
Panel options:
- `:key` (Required) React key
- `:label` (Required) Label for tab bar
- `:view` (Required) Reagent Hiccup form to display the panel content
Built-in panel component options:
- `reagent-dev-tools.core/state-tree`
- `:ratom` (Required) The RAtom to display
- `:label` (Optional) Label to display for atom root node, will default to panel :label."
[opts]
(when (:panels-fn opts)
@panels-fn-warning)
(doseq [panel (:panels opts)
:when (some? panel)]
(assert (:key panel) "Panel :key is required")
(assert (vector? (:view panel)) "Panel :view is required and must an vector"))
(when-let [el (if (contains? opts :el)
(:el opts)
(or (.getElementById js/document element-id)
(let [el (.createElement js/document "div")]
(set! (.-id el) element-id)
(.appendChild (.-body js/document) el)
el)))]
(rdom/render
[dev-tool {:margin-element (:margin-element opts)
:panels (into (create-default-panels opts)
(:panels opts))}]
el)))
|
6398791172eb099d3751d662e0c3fd9d86d3e26c2906eb4544bbb31840cd9e19 | camfort/camfort | InferenceBackendSpec.hs | module Camfort.Specification.Stencils.InferenceBackendSpec (spec) where
import Camfort.Specification.Stencils.InferenceBackend
import Camfort.Specification.Stencils.Syntax
import Camfort.Specification.Stencils.Model
import qualified Camfort.Helpers.Vec as V
import Test.Hspec
spec :: Spec
spec =
describe "Inference backend" $ do
describe "spans to approximate regions" $ do
it "handles spans of a(i-2) + a(i) + a(i+2)" $ do
let spans = [ (V.Cons (-2) V.Nil, V.Cons (-2) V.Nil)
, (V.Cons 0 V.Nil, V.Cons 0 V.Nil)
, (V.Cons 2 V.Nil, V.Cons 2 V.Nil) ]
let region = Right $ Bound
(Just . Spatial $ Sum [ Product [ Centered 0 1 True ]])
(Just . Spatial $ Sum [ Product [ Centered 2 1 True ]])
spansToApproxSpatial spans `shouldBe` region
it "handles spans of a(i,0) + a(0,j)" $ do
let spans = [ ( V.Cons 0 (V.Cons absoluteRep V.Nil)
, V.Cons 0 (V.Cons absoluteRep V.Nil) )
, ( V.Cons absoluteRep (V.Cons 0 V.Nil)
, V.Cons absoluteRep (V.Cons 0 V.Nil) ) ]
let region = Right . Exact . Spatial $
Sum [ Product [ Centered 0 1 True ]
, Product [ Centered 0 2 True ] ]
spansToApproxSpatial spans `shouldBe` region
| null | https://raw.githubusercontent.com/camfort/camfort/3421e85f6fbbcaa6503a266b3fae029a09d2ff24/tests/Camfort/Specification/Stencils/InferenceBackendSpec.hs | haskell | module Camfort.Specification.Stencils.InferenceBackendSpec (spec) where
import Camfort.Specification.Stencils.InferenceBackend
import Camfort.Specification.Stencils.Syntax
import Camfort.Specification.Stencils.Model
import qualified Camfort.Helpers.Vec as V
import Test.Hspec
spec :: Spec
spec =
describe "Inference backend" $ do
describe "spans to approximate regions" $ do
it "handles spans of a(i-2) + a(i) + a(i+2)" $ do
let spans = [ (V.Cons (-2) V.Nil, V.Cons (-2) V.Nil)
, (V.Cons 0 V.Nil, V.Cons 0 V.Nil)
, (V.Cons 2 V.Nil, V.Cons 2 V.Nil) ]
let region = Right $ Bound
(Just . Spatial $ Sum [ Product [ Centered 0 1 True ]])
(Just . Spatial $ Sum [ Product [ Centered 2 1 True ]])
spansToApproxSpatial spans `shouldBe` region
it "handles spans of a(i,0) + a(0,j)" $ do
let spans = [ ( V.Cons 0 (V.Cons absoluteRep V.Nil)
, V.Cons 0 (V.Cons absoluteRep V.Nil) )
, ( V.Cons absoluteRep (V.Cons 0 V.Nil)
, V.Cons absoluteRep (V.Cons 0 V.Nil) ) ]
let region = Right . Exact . Spatial $
Sum [ Product [ Centered 0 1 True ]
, Product [ Centered 0 2 True ] ]
spansToApproxSpatial spans `shouldBe` region
| |
190cc1dc943b38deaa3a53a7d65d1510a825d28d054c65763db35e87e5a5369d | MLstate/opalang | surfaceAstPasses.mli |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
*
This module contains the compilation passes that are executed
in SurfaceAst ( the very front end of the compiler )
except the parsing pass ( that is available in OpaParser )
This module contains the compilation passes that are executed
in SurfaceAst (the very front end of the compiler)
except the parsing pass (that is available in OpaParser)
*)
open SurfaceAstPassesTypes
open SurfaceAst
(**
This pass looks for Package declaration of each file, and loads these
dependencies
The node Package is removed from the code and never appears afterwards
*)
val pass_load_objects:
options:options ->
(([< SurfaceAst.all_directives > `static_content `static_content_directory `static_resource `static_resource_directory ] as 'b) parsed_file list * 'b parsed_file list) ->
((string, 'b) SurfaceAst.code_elt ObjectFiles.parsed_code -> unit) ->
unit
*
This pass goes through the whole to transform the syntactic constructions
parsers and xml_parser into real opa code
Parser generation happens in trx_convert.ml
Xml_parser generation happens in
Assumptions : no alpha renaming yet
no directives other than ` xml_parser contains an expression in its variant
Directives removed : [ \ [ ` xml_parser _ \ ] ]
Directives added : [ \[\ ] ]
This pass goes through the whole to transform the syntactic constructions
parsers and xml_parser into real opa code
Parser generation happens in trx_convert.ml
Xml_parser generation happens in xml_pattern.ml
Assumptions: no alpha renaming yet
no directives other than `xml_parser contains an expression in its variant
Directives removed : [\[ `xml_parser _ \]]
Directives added : [\[\]]
*)
val pass_parser_generation :
options:options ->
(string,parsing_directive) env_both_lcodes ->
(string, renaming_directive) env_both_lcodes
*
This pass alpha renames the whole code
It takes care of renaming types , type variables , and identifiers
Assumptions :
- every identifier that is allowed to be unbound should be given ( first argument )
for example , git_version , or release are defined afterwards by the compiler and must be given
in the list
- every type identifier that is allowed to be unbound should given as
a second argument
tuples are a special cases : , where d > = 0 is automatically defined once it is used
afterwards , a mapping from integers to the corresponding tuple identifier can be retrieved
- no directive bind variables in their subexpression , or somehow change the scope
there is of course a few exceptions : - no variant of a directive contains expressions , patterns , types or anything that should be renamed
Directives removed : [ SurfaceAst.alpha_renaming_directive ]
Directives added : [ \ [ ` local \ ] ]
This pass alpha renames the whole code
It takes care of renaming types, type variables, and identifiers
Assumptions:
- every identifier that is allowed to be unbound should be given (first argument)
for example, git_version, or release are defined afterwards by the compiler and must be given
in the list
- every type identifier that is allowed to be unbound should given as
a second argument
tuples are a special cases: tuple_%d, where d >= 0 is automatically defined once it is used
afterwards, a mapping from integers to the corresponding tuple identifier can be retrieved
- no directive bind variables in their subexpression, or somehow change the scope
there is of course a few exceptions: SurfaceAst.alpha_renaming_directive
- no variant of a directive contains expressions, patterns, types or anything that should be renamed
Directives removed : [SurfaceAst.alpha_renaming_directive]
Directives added : [\[ `local \]]
*)
val pass_check_duplication :
string list ->
string list ->
options:options ->
(string, renaming_directive) env_both_lcodes ->
(Ident.t, dependency_directive) env_both_lcodes
*
This pass insert the definitions of for every use that was recorded
by the alpha renaming pass
Assumption : alpha renaming has been performed already
Directives removed : [ \[\ ] ]
Directives added : [ \[\ ] ]
This pass insert the definitions of tuple_%d for every use that was recorded
by the alpha renaming pass
Assumption: alpha renaming has been performed already
Directives removed : [\[\]]
Directives added : [\[\]]
*)
val pass_tuple_types :
options:options ->
(Ident.t, 'b) code ->
(Ident.t, 'b) code
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/compiler/passes/surfaceAstPasses.mli | ocaml | *
This pass looks for Package declaration of each file, and loads these
dependencies
The node Package is removed from the code and never appears afterwards
|
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
*
This module contains the compilation passes that are executed
in SurfaceAst ( the very front end of the compiler )
except the parsing pass ( that is available in OpaParser )
This module contains the compilation passes that are executed
in SurfaceAst (the very front end of the compiler)
except the parsing pass (that is available in OpaParser)
*)
open SurfaceAstPassesTypes
open SurfaceAst
val pass_load_objects:
options:options ->
(([< SurfaceAst.all_directives > `static_content `static_content_directory `static_resource `static_resource_directory ] as 'b) parsed_file list * 'b parsed_file list) ->
((string, 'b) SurfaceAst.code_elt ObjectFiles.parsed_code -> unit) ->
unit
*
This pass goes through the whole to transform the syntactic constructions
parsers and xml_parser into real opa code
Parser generation happens in trx_convert.ml
Xml_parser generation happens in
Assumptions : no alpha renaming yet
no directives other than ` xml_parser contains an expression in its variant
Directives removed : [ \ [ ` xml_parser _ \ ] ]
Directives added : [ \[\ ] ]
This pass goes through the whole to transform the syntactic constructions
parsers and xml_parser into real opa code
Parser generation happens in trx_convert.ml
Xml_parser generation happens in xml_pattern.ml
Assumptions: no alpha renaming yet
no directives other than `xml_parser contains an expression in its variant
Directives removed : [\[ `xml_parser _ \]]
Directives added : [\[\]]
*)
val pass_parser_generation :
options:options ->
(string,parsing_directive) env_both_lcodes ->
(string, renaming_directive) env_both_lcodes
*
This pass alpha renames the whole code
It takes care of renaming types , type variables , and identifiers
Assumptions :
- every identifier that is allowed to be unbound should be given ( first argument )
for example , git_version , or release are defined afterwards by the compiler and must be given
in the list
- every type identifier that is allowed to be unbound should given as
a second argument
tuples are a special cases : , where d > = 0 is automatically defined once it is used
afterwards , a mapping from integers to the corresponding tuple identifier can be retrieved
- no directive bind variables in their subexpression , or somehow change the scope
there is of course a few exceptions : - no variant of a directive contains expressions , patterns , types or anything that should be renamed
Directives removed : [ SurfaceAst.alpha_renaming_directive ]
Directives added : [ \ [ ` local \ ] ]
This pass alpha renames the whole code
It takes care of renaming types, type variables, and identifiers
Assumptions:
- every identifier that is allowed to be unbound should be given (first argument)
for example, git_version, or release are defined afterwards by the compiler and must be given
in the list
- every type identifier that is allowed to be unbound should given as
a second argument
tuples are a special cases: tuple_%d, where d >= 0 is automatically defined once it is used
afterwards, a mapping from integers to the corresponding tuple identifier can be retrieved
- no directive bind variables in their subexpression, or somehow change the scope
there is of course a few exceptions: SurfaceAst.alpha_renaming_directive
- no variant of a directive contains expressions, patterns, types or anything that should be renamed
Directives removed : [SurfaceAst.alpha_renaming_directive]
Directives added : [\[ `local \]]
*)
val pass_check_duplication :
string list ->
string list ->
options:options ->
(string, renaming_directive) env_both_lcodes ->
(Ident.t, dependency_directive) env_both_lcodes
*
This pass insert the definitions of for every use that was recorded
by the alpha renaming pass
Assumption : alpha renaming has been performed already
Directives removed : [ \[\ ] ]
Directives added : [ \[\ ] ]
This pass insert the definitions of tuple_%d for every use that was recorded
by the alpha renaming pass
Assumption: alpha renaming has been performed already
Directives removed : [\[\]]
Directives added : [\[\]]
*)
val pass_tuple_types :
options:options ->
(Ident.t, 'b) code ->
(Ident.t, 'b) code
|
ba33e2bf12cb383f44df3bc8cb434cb8bb823ef08b2f85c35372cc011d7ac7e1 | diagrams/SVGFonts | WriteFont.hs | module Graphics.SVGFonts.WriteFont where
import Numeric ( showHex )
import Data.String ( fromString )
import Data.Char ( ord )
import Data.List ( intercalate )
import qualified Data.Set as Set
import qualified Data.Map as M
import Control.Monad ( forM_ )
import Text.Blaze.Svg11 ((!), toValue)
import qualified Text.Blaze.Internal as B
import qualified Text.Blaze.Svg11 as S
import qualified Text.Blaze.Svg11.Attributes as A
import Graphics.SVGFonts.ReadFont
makeSvgFont :: (Show n, S.ToValue n) => PreparedFont n -> Set.Set String -> S.Svg
makeSvgFont (fd, _) gs =
font ! A.horizAdvX horizAdvX $ do
-- Font meta information
S.fontFace ! A.fontFamily fontFamily
! A.fontStyle fontStyle
! A.fontWeight fontWeight
! A.fontStretch fontStretch
! A.fontVariant fontVariant
# maybeMaybe A.fontSize fontDataSize
! A.unitsPerEm unitsPerEm
# maybeString A.panose1 fontDataPanose
# maybeMaybe A.slope fontDataSlope
! A.ascent ascent
! A.descent descent
! A.xHeight xHeight
! A.capHeight capHeight
# maybeMaybe A.accentHeight fontDataAccentHeight
! A.bbox bbox
! A.underlineThickness underlineT
! A.underlinePosition underlineP
! A.unicodeRange unicodeRange
# maybeMaybe A.widths fontDataWidths
# maybeMaybe A.stemv fontDataHorizontalStem
# maybeMaybe A.stemh fontDataVerticalStem
# maybeMaybe A.ideographic fontDataIdeographicBaseline
# maybeMaybe A.alphabetic fontDataAlphabeticBaseline
# maybeMaybe A.mathematical fontDataMathematicalBaseline
# maybeMaybe A.hanging fontDataHangingBaseline
# maybeMaybe A.vIdeographic fontDataVIdeographicBaseline
# maybeMaybe A.vAlphabetic fontDataVAlphabeticBaseline
# maybeMaybe A.vMathematical fontDataVMathematicalBaseline
# maybeMaybe A.vHanging fontDataVHangingBaseline
# maybeMaybe A.overlinePosition fontDataOverlinePos
# maybeMaybe A.overlineThickness fontDataOverlineThickness
# maybeMaybe A.strikethroughPosition fontDataStrikethroughPos
# maybeMaybe A.strikethroughThickness fontDataStrikethroughThickness
-- Insert the 'missing-glyph'
case M.lookup ".notdef" (fontDataGlyphs fd) of
Nothing -> return ()
Just (_, _, gPath) -> S.missingGlyph ! A.d (toValue gPath)
$ return ()
-- Insert all other glyphs
forM_ (Set.toList gs') $ \g -> case M.lookup g (fontDataGlyphs fd) of
Nothing -> return ()
Just (gName, gHAdv, gPath) ->
S.glyph ! A.glyphName (toValue gName)
! A.horizAdvX (toValue gHAdv)
! A.d (toValue gPath)
# maybeUnicode g
$ return ()
forM_ (fontDataRawKernings fd) $ \(k, g1, g2, u1, u2) -> do
let g1' = filter isGlyph g1
g2' = filter isGlyph g2
u1' = filter isGlyph u1
u2' = filter isGlyph u2
case (not (null g1') && not (null g2')) || (not (null u1') && not (null u2')) of
True ->
S.hkern ! A.k (toValue k)
# maybeString A.g1 (const $ intercalate "," g1')
# maybeString A.g2 (const $ intercalate "," g2')
# maybeString A.u1 (const $ intercalate "," u1')
# maybeString A.u2 (const $ intercalate "," u2')
False -> return ()
where
(#) :: (B.Attributable h) => h -> Maybe S.Attribute -> h
(#) x Nothing = x
(#) x (Just a) = x ! a
unicodeBlacklist :: Set.Set String
unicodeBlacklist = Set.fromList
[ ".notdef"
, ".null"
]
maybeUnicode :: String -> Maybe S.Attribute
maybeUnicode [] = Nothing
maybeUnicode s | s `Set.member` unicodeBlacklist || length s >= 10 = Nothing
maybeUnicode s = Just $ A.unicode $ toValue $ concatMap encodeUnicode s
encodeUnicode :: Char -> String
encodeUnicode c =
let cOrd = ord c
in if cOrd >= 32 && cOrd <= 126
then [c]
else "&#x" ++ showHex cOrd ""
: : ( a )
-- => (S.AttributeValue -> S.Attribute) -> (FontData n -> Maybe a)
- > Maybe S.Attribute
maybeMaybe toF fromF = (toF . toValue) `fmap` fromF fd
-- maybeString :: (S.AttributeValue -> S.Attribute) -> (FontData n -> String)
- > Maybe S.Attribute
maybeString toF fromF = case fromF fd of
"" -> Nothing
s -> Just $ toF $ toValue $ s
font :: S.Svg -> S.Svg
font m = B.Parent (fromString "font") (fromString "<font") (fromString "</font>") m
isGlyph :: String -> Bool
isGlyph g = g `Set.member` gs'
gs' = Set.insert ".notdef" gs
horizAdvX = toValue $ fontDataHorizontalAdvance fd
fontFamily = toValue $ fontDataFamily fd
fontStyle = toValue $ fontDataStyle fd
fontWeight = toValue $ fontDataWeight fd
fontStretch = toValue $ fontDataStretch fd
fontVariant = toValue $ fontDataVariant fd
unitsPerEm = toValue $ fontDataUnitsPerEm fd
ascent = toValue $ fontDataAscent fd
descent = toValue $ fontDataDescent fd
xHeight = toValue $ fontDataXHeight fd
capHeight = toValue $ fontDataCapHeight fd
bbox = toValue $ intercalate " " $ fmap show $ fontDataBoundingBox fd
underlineT = toValue $ fontDataUnderlineThickness fd
underlineP = toValue $ fontDataUnderlinePos fd
unicodeRange = toValue $ fontDataUnicodeRange fd
| null | https://raw.githubusercontent.com/diagrams/SVGFonts/66fa56c5a80f1aaaddf5c3e938ee844f90b60b9d/src/Graphics/SVGFonts/WriteFont.hs | haskell | Font meta information
Insert the 'missing-glyph'
Insert all other glyphs
=> (S.AttributeValue -> S.Attribute) -> (FontData n -> Maybe a)
maybeString :: (S.AttributeValue -> S.Attribute) -> (FontData n -> String) | module Graphics.SVGFonts.WriteFont where
import Numeric ( showHex )
import Data.String ( fromString )
import Data.Char ( ord )
import Data.List ( intercalate )
import qualified Data.Set as Set
import qualified Data.Map as M
import Control.Monad ( forM_ )
import Text.Blaze.Svg11 ((!), toValue)
import qualified Text.Blaze.Internal as B
import qualified Text.Blaze.Svg11 as S
import qualified Text.Blaze.Svg11.Attributes as A
import Graphics.SVGFonts.ReadFont
makeSvgFont :: (Show n, S.ToValue n) => PreparedFont n -> Set.Set String -> S.Svg
makeSvgFont (fd, _) gs =
font ! A.horizAdvX horizAdvX $ do
S.fontFace ! A.fontFamily fontFamily
! A.fontStyle fontStyle
! A.fontWeight fontWeight
! A.fontStretch fontStretch
! A.fontVariant fontVariant
# maybeMaybe A.fontSize fontDataSize
! A.unitsPerEm unitsPerEm
# maybeString A.panose1 fontDataPanose
# maybeMaybe A.slope fontDataSlope
! A.ascent ascent
! A.descent descent
! A.xHeight xHeight
! A.capHeight capHeight
# maybeMaybe A.accentHeight fontDataAccentHeight
! A.bbox bbox
! A.underlineThickness underlineT
! A.underlinePosition underlineP
! A.unicodeRange unicodeRange
# maybeMaybe A.widths fontDataWidths
# maybeMaybe A.stemv fontDataHorizontalStem
# maybeMaybe A.stemh fontDataVerticalStem
# maybeMaybe A.ideographic fontDataIdeographicBaseline
# maybeMaybe A.alphabetic fontDataAlphabeticBaseline
# maybeMaybe A.mathematical fontDataMathematicalBaseline
# maybeMaybe A.hanging fontDataHangingBaseline
# maybeMaybe A.vIdeographic fontDataVIdeographicBaseline
# maybeMaybe A.vAlphabetic fontDataVAlphabeticBaseline
# maybeMaybe A.vMathematical fontDataVMathematicalBaseline
# maybeMaybe A.vHanging fontDataVHangingBaseline
# maybeMaybe A.overlinePosition fontDataOverlinePos
# maybeMaybe A.overlineThickness fontDataOverlineThickness
# maybeMaybe A.strikethroughPosition fontDataStrikethroughPos
# maybeMaybe A.strikethroughThickness fontDataStrikethroughThickness
case M.lookup ".notdef" (fontDataGlyphs fd) of
Nothing -> return ()
Just (_, _, gPath) -> S.missingGlyph ! A.d (toValue gPath)
$ return ()
forM_ (Set.toList gs') $ \g -> case M.lookup g (fontDataGlyphs fd) of
Nothing -> return ()
Just (gName, gHAdv, gPath) ->
S.glyph ! A.glyphName (toValue gName)
! A.horizAdvX (toValue gHAdv)
! A.d (toValue gPath)
# maybeUnicode g
$ return ()
forM_ (fontDataRawKernings fd) $ \(k, g1, g2, u1, u2) -> do
let g1' = filter isGlyph g1
g2' = filter isGlyph g2
u1' = filter isGlyph u1
u2' = filter isGlyph u2
case (not (null g1') && not (null g2')) || (not (null u1') && not (null u2')) of
True ->
S.hkern ! A.k (toValue k)
# maybeString A.g1 (const $ intercalate "," g1')
# maybeString A.g2 (const $ intercalate "," g2')
# maybeString A.u1 (const $ intercalate "," u1')
# maybeString A.u2 (const $ intercalate "," u2')
False -> return ()
where
(#) :: (B.Attributable h) => h -> Maybe S.Attribute -> h
(#) x Nothing = x
(#) x (Just a) = x ! a
unicodeBlacklist :: Set.Set String
unicodeBlacklist = Set.fromList
[ ".notdef"
, ".null"
]
maybeUnicode :: String -> Maybe S.Attribute
maybeUnicode [] = Nothing
maybeUnicode s | s `Set.member` unicodeBlacklist || length s >= 10 = Nothing
maybeUnicode s = Just $ A.unicode $ toValue $ concatMap encodeUnicode s
encodeUnicode :: Char -> String
encodeUnicode c =
let cOrd = ord c
in if cOrd >= 32 && cOrd <= 126
then [c]
else "&#x" ++ showHex cOrd ""
: : ( a )
- > Maybe S.Attribute
maybeMaybe toF fromF = (toF . toValue) `fmap` fromF fd
- > Maybe S.Attribute
maybeString toF fromF = case fromF fd of
"" -> Nothing
s -> Just $ toF $ toValue $ s
font :: S.Svg -> S.Svg
font m = B.Parent (fromString "font") (fromString "<font") (fromString "</font>") m
isGlyph :: String -> Bool
isGlyph g = g `Set.member` gs'
gs' = Set.insert ".notdef" gs
horizAdvX = toValue $ fontDataHorizontalAdvance fd
fontFamily = toValue $ fontDataFamily fd
fontStyle = toValue $ fontDataStyle fd
fontWeight = toValue $ fontDataWeight fd
fontStretch = toValue $ fontDataStretch fd
fontVariant = toValue $ fontDataVariant fd
unitsPerEm = toValue $ fontDataUnitsPerEm fd
ascent = toValue $ fontDataAscent fd
descent = toValue $ fontDataDescent fd
xHeight = toValue $ fontDataXHeight fd
capHeight = toValue $ fontDataCapHeight fd
bbox = toValue $ intercalate " " $ fmap show $ fontDataBoundingBox fd
underlineT = toValue $ fontDataUnderlineThickness fd
underlineP = toValue $ fontDataUnderlinePos fd
unicodeRange = toValue $ fontDataUnicodeRange fd
|
110b038c8247a678ecfebfbd828d68a591d72ece6fb4311c8c0dceec82985fbc | pallix/tikkba | xml.clj | Copyright © 2010 Fraunhofer Gesellschaft
Licensed under the EPL V.1.0
(ns ^{:doc "Utilities function to manipulate the XML vector representation"}
tikkba.utils.xml)
(defn style-str
"Returns a string representing the properties
as a SVG style"
[& props]
(reduce (fn [s [k v]]
(str s " " (name k) ": "
(if (keyword? v)
(name v)
v)
"; "))
"" (apply hash-map props))) | null | https://raw.githubusercontent.com/pallix/tikkba/86fda7f97c3b1ff835f02c2b1c0337f3e134fd2c/src/tikkba/utils/xml.clj | clojure | Copyright © 2010 Fraunhofer Gesellschaft
Licensed under the EPL V.1.0
(ns ^{:doc "Utilities function to manipulate the XML vector representation"}
tikkba.utils.xml)
(defn style-str
"Returns a string representing the properties
as a SVG style"
[& props]
(reduce (fn [s [k v]]
(str s " " (name k) ": "
(if (keyword? v)
(name v)
v)
"; "))
"" (apply hash-map props))) | |
12357c6ce46fd88e26d045f5c004d448c4ec03bc9b02f6cd8dfbf96aae03b91b | Opetushallitus/ataru | accessibility_util.cljs | (ns ataru.application-common.accessibility-util)
(defn is-enter-or-space?
[event]
(or (= 13 (.-keyCode event)) (= 32 (.-keyCode event)))) | null | https://raw.githubusercontent.com/Opetushallitus/ataru/acc274bfdb583877a2dd2ea56ff4cea0649f96ca/src/cljs/ataru/application_common/accessibility_util.cljs | clojure | (ns ataru.application-common.accessibility-util)
(defn is-enter-or-space?
[event]
(or (= 13 (.-keyCode event)) (= 32 (.-keyCode event)))) | |
05b1d08ee00f3945f14626743d0a1e8b7710d8ffde7d02c59b8660d537d83614 | HealthSamurai/re-form | repl.clj | (ns re-form.repl
(:require [figwheel-sidecar.repl-api :as repl]))
(defn start-fw []
(repl/start-figwheel!)
(repl/cljs-repl "app"))
| null | https://raw.githubusercontent.com/HealthSamurai/re-form/810321d67e3946876c834998e3472d0693846953/env/dev/clj/re_form/repl.clj | clojure | (ns re-form.repl
(:require [figwheel-sidecar.repl-api :as repl]))
(defn start-fw []
(repl/start-figwheel!)
(repl/cljs-repl "app"))
| |
c6efc0b2684ce65bd2536ed7056292e5f742f82d813299a018d8f96ec0d4b9db | paddymul/css-lite | paren-css-lite.lisp |
#+parenscript (ps:defpsmacro px (val)
`(ps:+ ,val "px"))
#+parenscript (ps:defpsmacro % (val)
`(ps:+ ,val "%"))
#+parenscript (ps:defpsmacro pt (val)
`(ps:+ ,val "pt"))
#+parenscript (ps:defpsmacro css (&body rules)
(cons 'ps:+ (ps::concat-constant-strings (mapcan #'css-lite::process-css-rule rules))))
#+parenscript (ps:defpsmacro inline-css (&rest properties)
(cons 'ps:+ (ps::concat-constant-strings (css-lite::process-css-properties properties nil :newlines nil))))
#+parenscript (ps:defpsmacro to-string (x)
(if (and (listp x) (eql 'quote (car x)))
(ps:symbol-to-js-string (second x))
x))
#+parenscript (defun css-id-name (symbol)
(format nil "#~a" (ps:symbol-to-js-string symbol)))
| null | https://raw.githubusercontent.com/paddymul/css-lite/3a27bdb62ed2ba5861e4dbe503f8b061954f6753/paren-css-lite.lisp | lisp |
#+parenscript (ps:defpsmacro px (val)
`(ps:+ ,val "px"))
#+parenscript (ps:defpsmacro % (val)
`(ps:+ ,val "%"))
#+parenscript (ps:defpsmacro pt (val)
`(ps:+ ,val "pt"))
#+parenscript (ps:defpsmacro css (&body rules)
(cons 'ps:+ (ps::concat-constant-strings (mapcan #'css-lite::process-css-rule rules))))
#+parenscript (ps:defpsmacro inline-css (&rest properties)
(cons 'ps:+ (ps::concat-constant-strings (css-lite::process-css-properties properties nil :newlines nil))))
#+parenscript (ps:defpsmacro to-string (x)
(if (and (listp x) (eql 'quote (car x)))
(ps:symbol-to-js-string (second x))
x))
#+parenscript (defun css-id-name (symbol)
(format nil "#~a" (ps:symbol-to-js-string symbol)))
| |
e693a1b9c0b9efee2c1c79590ecf8fe264ae36b0e30de5746365e8f6ed2b4754 | okeuday/erlbench | shuffle.erl | -*-Mode : erlang;coding : utf-8;tab - width:4;c - basic - offset:4;indent - tabs - mode:()-*-
ex : set utf-8 sts=4 ts=4 sw=4 et :
-module(shuffle).
-export([shuffle/1]).
Fisher - Yates shuffle
shuffle(Array) ->
shuffle(array:size(Array) - 1, Array).
shuffle(0, Array) ->
Array;
shuffle(I, Array) ->
J = random:uniform(I + 1) - 1,
Temp = array:get(I, Array),
NewArray = array:set(J, Temp, array:set(I, array:get(J, Array), Array)),
shuffle(I - 1, NewArray).
| null | https://raw.githubusercontent.com/okeuday/erlbench/9fc02a2e748b287b85f6e9641db6b2ca68791fa4/src/shuffle.erl | erlang | -*-Mode : erlang;coding : utf-8;tab - width:4;c - basic - offset:4;indent - tabs - mode:()-*-
ex : set utf-8 sts=4 ts=4 sw=4 et :
-module(shuffle).
-export([shuffle/1]).
Fisher - Yates shuffle
shuffle(Array) ->
shuffle(array:size(Array) - 1, Array).
shuffle(0, Array) ->
Array;
shuffle(I, Array) ->
J = random:uniform(I + 1) - 1,
Temp = array:get(I, Array),
NewArray = array:set(J, Temp, array:set(I, array:get(J, Array), Array)),
shuffle(I - 1, NewArray).
| |
9de21d77f20fd6b6e4ef620a90c567baedb1eb0c740b96b00286a25037139c9c | tonyg/racket-abnf | postal-address.rkt | #lang racket/base
(require abnf)
(require abnf/rfc5234/core)
(require racket/match)
(define-abnf-parser parse-postal-address/cst "postal-address-rules.rkt" postal-address values)
(struct postal-address (name street area) #:prefab)
(struct name (personal-parts last-name suffix) #:prefab)
(struct initial (str) #:prefab)
(struct street (apartment house-number name) #:prefab)
(struct area (town state zip) #:prefab)
(define (cst->ast cst)
(traverse (lambda (walk cst)
(match cst
[`(postal-address (: ,n ,s ,z)) (postal-address (walk n) (walk s) (walk z))]
[`(name-part (/ 0 (: (* ((: ,ps ,_) ...)) ,l ,s ,_)))
(name (map walk ps)
(text l)
(match s
[`(* ()) #f]
[`(* ((: ,_ ,s))) (text s)]))]
[`(name-part (/ 1 (: ,p ,_)))
(name (walk p) #f #f)]
[`(personal-part (/ 0 ,n)) (text n)]
[`(personal-part (/ 1 (: (initial ,i) "."))) (initial (text i))]
[`(street (: ,a ,h ,_ ,n ,_))
(street (match a
[`(* ()) #f]
[`(* ((: ,a ,_))) (text a)])
(text h)
(text n))]
[`(zip-part (: ,t "," ,_ ,s ,_ ,z ,_))
(area (text t) (text s) (text z))]))
cst))
(define-abnf-parser parse-postal-address "postal-address-rules.rkt" postal-address cst->ast)
(module+ test
(require rackunit)
(check-equal?
(parse-postal-address/cst "A B. C\r\n1 D\r\nE, FF 99999\r\n")
'(postal-address
(: (name-part
(/ 0 (: (* ((: (personal-part (/ 0 (first-name (* ((ALPHA (/ 0 65))))))) (SP 32))
(: (personal-part (/ 1 (: (initial (ALPHA (/ 0 66))) "."))) (SP 32))))
(last-name (* ((ALPHA (/ 0 67)))))
(* ())
(CRLF (: (CR 13) (LF 10))))))
(street
(: (* ())
(house-num (* ((/ 0 (DIGIT 49)))))
(SP 32)
(street-name (* ((VCHAR 68))))
(CRLF (: (CR 13) (LF 10)))))
(zip-part
(: (town-name (* ((/ 0 (ALPHA (/ 0 69))))))
","
(SP 32)
(state (* ((ALPHA (/ 0 70)) (ALPHA (/ 0 70)))))
(* ((SP 32)))
(zip-code
(: (* ((DIGIT 57) (DIGIT 57) (DIGIT 57) (DIGIT 57) (DIGIT 57))) (* ())))
(CRLF (: (CR 13) (LF 10))))))))
(check-equal?
(parse-postal-address "A B. C\r\n1 D\r\nE, FF 99999\r\n")
(postal-address (name (list "A" (initial "B"))
"C"
#f)
(street #f "1" "D")
(area "E" "FF" "99999")))
(check-equal?
(parse-postal-address "Charles M. Burns\r\n1000 Mammon\r\nSpringfield, XX 99999\r\n")
(postal-address (name (list "Charles" (initial "M"))
"Burns"
#f)
(street #f "1000" "Mammon")
(area "Springfield" "XX" "99999")))
(check-equal?
(parse-postal-address "Barack H. Obama II\r\n1600 Pennsylvania\r\nWashington, DC 20500\r\n"
#:on-ambiguity (convert-all-results cst->ast))
(list '#s(parse-error #s(range 48 57) 59)
(postal-address (name (list "Barack" (initial "H") "Obama")
"II"
#f)
(street #f "1600" "Pennsylvania")
(area "Washington" "DC" "20500"))
(postal-address (name (list "Barack" (initial "H"))
"Obama"
"II")
(street #f "1600" "Pennsylvania")
(area "Washington" "DC" "20500"))))
(check-equal?
(parse-postal-address "John Doe\r\n3 12 Prospect\r\nSpringfield, XX 99999-4444\r\n"
#:on-ambiguity (convert-all-results cst->ast))
(postal-address (name (list "John")
"Doe"
#f)
(street "3" "12" "Prospect")
(area "Springfield" "XX" "99999-4444"))))
| null | https://raw.githubusercontent.com/tonyg/racket-abnf/1079bc5b30a227f52ac00a84dc3fcd539da5f8db/abnf/scribblings/postal-address.rkt | racket | #lang racket/base
(require abnf)
(require abnf/rfc5234/core)
(require racket/match)
(define-abnf-parser parse-postal-address/cst "postal-address-rules.rkt" postal-address values)
(struct postal-address (name street area) #:prefab)
(struct name (personal-parts last-name suffix) #:prefab)
(struct initial (str) #:prefab)
(struct street (apartment house-number name) #:prefab)
(struct area (town state zip) #:prefab)
(define (cst->ast cst)
(traverse (lambda (walk cst)
(match cst
[`(postal-address (: ,n ,s ,z)) (postal-address (walk n) (walk s) (walk z))]
[`(name-part (/ 0 (: (* ((: ,ps ,_) ...)) ,l ,s ,_)))
(name (map walk ps)
(text l)
(match s
[`(* ()) #f]
[`(* ((: ,_ ,s))) (text s)]))]
[`(name-part (/ 1 (: ,p ,_)))
(name (walk p) #f #f)]
[`(personal-part (/ 0 ,n)) (text n)]
[`(personal-part (/ 1 (: (initial ,i) "."))) (initial (text i))]
[`(street (: ,a ,h ,_ ,n ,_))
(street (match a
[`(* ()) #f]
[`(* ((: ,a ,_))) (text a)])
(text h)
(text n))]
[`(zip-part (: ,t "," ,_ ,s ,_ ,z ,_))
(area (text t) (text s) (text z))]))
cst))
(define-abnf-parser parse-postal-address "postal-address-rules.rkt" postal-address cst->ast)
(module+ test
(require rackunit)
(check-equal?
(parse-postal-address/cst "A B. C\r\n1 D\r\nE, FF 99999\r\n")
'(postal-address
(: (name-part
(/ 0 (: (* ((: (personal-part (/ 0 (first-name (* ((ALPHA (/ 0 65))))))) (SP 32))
(: (personal-part (/ 1 (: (initial (ALPHA (/ 0 66))) "."))) (SP 32))))
(last-name (* ((ALPHA (/ 0 67)))))
(* ())
(CRLF (: (CR 13) (LF 10))))))
(street
(: (* ())
(house-num (* ((/ 0 (DIGIT 49)))))
(SP 32)
(street-name (* ((VCHAR 68))))
(CRLF (: (CR 13) (LF 10)))))
(zip-part
(: (town-name (* ((/ 0 (ALPHA (/ 0 69))))))
","
(SP 32)
(state (* ((ALPHA (/ 0 70)) (ALPHA (/ 0 70)))))
(* ((SP 32)))
(zip-code
(: (* ((DIGIT 57) (DIGIT 57) (DIGIT 57) (DIGIT 57) (DIGIT 57))) (* ())))
(CRLF (: (CR 13) (LF 10))))))))
(check-equal?
(parse-postal-address "A B. C\r\n1 D\r\nE, FF 99999\r\n")
(postal-address (name (list "A" (initial "B"))
"C"
#f)
(street #f "1" "D")
(area "E" "FF" "99999")))
(check-equal?
(parse-postal-address "Charles M. Burns\r\n1000 Mammon\r\nSpringfield, XX 99999\r\n")
(postal-address (name (list "Charles" (initial "M"))
"Burns"
#f)
(street #f "1000" "Mammon")
(area "Springfield" "XX" "99999")))
(check-equal?
(parse-postal-address "Barack H. Obama II\r\n1600 Pennsylvania\r\nWashington, DC 20500\r\n"
#:on-ambiguity (convert-all-results cst->ast))
(list '#s(parse-error #s(range 48 57) 59)
(postal-address (name (list "Barack" (initial "H") "Obama")
"II"
#f)
(street #f "1600" "Pennsylvania")
(area "Washington" "DC" "20500"))
(postal-address (name (list "Barack" (initial "H"))
"Obama"
"II")
(street #f "1600" "Pennsylvania")
(area "Washington" "DC" "20500"))))
(check-equal?
(parse-postal-address "John Doe\r\n3 12 Prospect\r\nSpringfield, XX 99999-4444\r\n"
#:on-ambiguity (convert-all-results cst->ast))
(postal-address (name (list "John")
"Doe"
#f)
(street "3" "12" "Prospect")
(area "Springfield" "XX" "99999-4444"))))
| |
1ce9d95b3d5f7f6921eec110bc980a8242bd1694397ee877bff113886ceaf071 | vernemq/vernemq | vmq_tracer.erl | Copyright 2018 Erlio GmbH Basel Switzerland ( )
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%% @doc This module provides a simple tracing facility for VerneMQ
%% MQTT sessions. The original inspiration for a session tracer came
from fantastic ` recon ' tool and we gratefully
%% borrowed some small bits and pieces from there.
%% @end
-module(vmq_tracer).
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("vmq_commons/include/vmq_types.hrl").
-behaviour(gen_server).
%% API
-export([
start_link/1,
stop_tracing/0,
start_session_trace/2,
trace_existing_session/0,
rate_tracer/4,
terminate_tracer/0
]).
%% gen_server callbacks
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
%% for adhoc-testing
-export([sim_client/0]).
-define(SERVER, ?MODULE).
-record(state, {
io_server :: pid(),
max_rate :: {non_neg_integer(), non_neg_integer()},
client_id :: client_id(),
mountpoint :: mountpoint(),
payload_limit :: non_neg_integer(),
tracer :: pid(),
%% A map of all the sessions we are currently tracing. The
%% key is the pid of the session
sessions :: list({pid(), reference()}),
%% The pid of the queue corresponding to the client-id and
%% mountpoint we're tracing.
queue :: undefined | pid()
}).
-type state() :: #state{}.
%% Internal type definitions
-type ftuple() :: {io:format(), [term()]}.
-type unprepf() :: [ftuple() | unprepf()].
%%%===================================================================
%%% API
%%%===================================================================
-spec start_link(map()) -> {ok, Pid :: pid()} | ignore | {error, Error :: term()}.
start_link(Opts) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, Opts, []).
trace_existing_session() ->
gen_server:call(?SERVER, trace_existing_session).
start_session_trace(SessionPid, ConnFrame) ->
gen_server:call(?SERVER, {start_session_trace, SessionPid, ConnFrame}).
stop_tracing() ->
gen_server:call(?SERVER, stop_tracing).
terminate_tracer() ->
gen_server:cast(?SERVER, terminate_tracer).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init(#{
io_server := IoServer,
max_rate := {Max, Time} = MaxRate,
mountpoint := Mountpoint,
client_id := ClientId,
payload_limit := PayloadLimit
}) ->
process_flag(trap_exit, true),
TraceFun =
fun(SessionPid, Frame) ->
maybe_init_session_trace(SessionPid, Frame, ClientId)
end,
Tracer = spawn_link(?SERVER, rate_tracer, [Max, Time, 0, os:timestamp()]),
vmq_config:set_env(trace_fun, TraceFun, false),
{ok, #state{
io_server = IoServer,
max_rate = MaxRate,
client_id = ClientId,
mountpoint = Mountpoint,
payload_limit = PayloadLimit,
tracer = Tracer,
sessions = [],
queue = undefined
}}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
, From , State ) - >
%% {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call(
trace_existing_session,
_From,
#state{
client_id = ClientId,
io_server = IoServer,
mountpoint = MP
} = State
) ->
SId = {MP, ClientId},
NewState =
case vmq_queue_sup_sup:get_queue_pid(SId) of
not_found ->
io:format(IoServer, "~s No sessions found for client \"~s\"~n", [
iso8601(), ClientId
]),
State;
QPid when is_pid(QPid) ->
case vmq_queue:get_sessions(QPid) of
[] ->
io:format(IoServer, "~s No sessions found for client \"~s\"~n", [
iso8601(), ClientId
]),
State;
SPids ->
io:format(
IoServer,
"~s Starting trace for ~p existing sessions for client \"~s\" with PIDs~n"
" ~p~n",
[iso8601(), length(SPids), ClientId, SPids]
),
begin_session_trace(SPids, State)
end
end,
{reply, ok, NewState};
handle_call(
{start_session_trace, SessionPid, ConnFrame},
_From,
#state{
client_id = ClientId,
payload_limit = PayloadLimit,
io_server = IoServer
} = State
) ->
Opts = #{payload_limit => PayloadLimit},
SId = {"", ClientId},
io:format(IoServer, "~s New session with PID ~p found for client \"~s\"~n", [
iso8601(), SessionPid, ClientId
]),
{F, D} = prepf(format_frame(to, SessionPid, os:timestamp(), SId, ConnFrame, Opts)),
io:format(IoServer, F, D),
NewState = begin_session_trace([SessionPid], State),
{reply, ok, NewState};
handle_call(stop_tracing, _From, State) ->
How to disable tracing completely has been borrowed from
%% the `recon` trace tool.
erlang:trace(all, false, [all]),
erlang:trace_pattern({'_', '_', '_'}, false, [local, meta, call_count, call_time]),
erlang:trace_pattern({'_', '_', '_'}, false, []),
{reply, ok, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
%%
handle_cast(terminate_tracer, #state{io_server = IoServer} = State) ->
io:format(IoServer, "~s Trace terminated by external action.~n", [iso8601()]),
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info(
{'DOWN', _MRef, process, Pid, _},
#state{
io_server = IoServer,
client_id = ClientId
} = State
) ->
io:format(IoServer, "~s ~p Trace session for ~s stopped~n", [iso8601(), Pid, ClientId]),
State1 = remove_session_pid(Pid, State),
{noreply, State1};
handle_info(
Trace,
#state{
io_server = IoServer,
sessions = Sessions
} = State
) when
is_tuple(Trace),
element(1, Trace) =:= trace_ts
->
TracePid = get_pid_from_trace(extract_info(Trace)),
case is_trace_active(TracePid, Sessions) of
true ->
{Format, Data} = format_trace(Trace, State),
io:format(IoServer, Format, Data),
State1 = handle_trace(Trace, State),
{noreply, State1};
false ->
{noreply, State}
end;
handle_info(
{'EXIT', Tracer, normal},
#state{
tracer = Tracer,
io_server = IoServer
} = State
) ->
io:format(IoServer, "~s Trace rate limit triggered.~n", [iso8601()]),
{stop, normal, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
vmq_config:set_env(trace_fun, undefined, false),
recon_trace:clear(),
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
-spec handle_trace(term(), state()) -> state().
handle_trace(
{trace, Pid, return_from, {vmq_plugin, all_till_ok, 2}, Ret},
#state{
client_id = ClientId,
mountpoint = Mountpoint,
io_server = IoServer,
tracer = Tracer
} = State
) ->
case Ret of
ok ->
State;
{ok, Payload} when is_binary(Payload) -> State;
{ok, Modifiers} ->
%% The only hook returning a subscriber_id as a modifier
%% is the `auth_on_register` hook, so it should be fine to
%% 'react' to it here even though this code is common to
%% all return values from hooks.
case proplists:get_value(subscriber_id, Modifiers, undefined) of
undefined ->
State;
{Mountpoint, ClientId} ->
%% Modified but still the same, keep tracing!
State;
{NewMountpoint, NewClientId} ->
{F, D} =
case {NewMountpoint, NewClientId} of
{Mountpoint, _} ->
{"~p client id for ~s modified to ~p, stopping trace~n", [
Pid, ClientId, NewMountpoint
]};
{_, ClientId} ->
{"~p mountpoint for ~s modified to ~p, stopping trace~n", [
Pid, ClientId, NewMountpoint
]};
{_, _} ->
{
"~p mountpoint and client id for ~s modified to ~p and ~p,"
" stopping trace~n",
[Pid, ClientId, NewMountpoint, NewClientId]
}
end,
io:format(IoServer, F, D),
State1 = remove_session_pid(Pid, State),
setup_trace(get_trace_pids(State1), Tracer),
State1
end;
_ ->
State
end;
handle_trace(_, State) ->
State.
extract_info(TraceMsg) ->
case tuple_to_list(TraceMsg) of
[trace_ts, Pid, Type | Info] ->
{TraceInfo, [Timestamp]} = lists:split(length(Info) - 1, Info),
{Type, Pid, Timestamp, TraceInfo};
[trace, Pid, Type | TraceInfo] ->
{Type, Pid, os:timestamp(), TraceInfo}
end.
get_pid_from_trace({_, Pid, _, _}) ->
Pid.
is_trace_active(Pid, Sessions) ->
lists:keymember(Pid, 1, Sessions).
maybe_init_session_trace(SessionPid, #mqtt5_connect{client_id = ClientId} = ConnFrame, ClientId) ->
start_session_trace(SessionPid, ConnFrame);
maybe_init_session_trace(SessionPid, #mqtt_connect{client_id = ClientId} = ConnFrame, ClientId) ->
start_session_trace(SessionPid, ConnFrame);
maybe_init_session_trace(_, _, _) ->
ignored.
-spec begin_session_trace(list(pid()), state()) -> state().
begin_session_trace(
SessionPids,
#state{tracer = Tracer} = State
) ->
case add_session_pids(SessionPids, State) of
State ->
%% Dirty way to detect there's nothing new to trace
%% (sessions are sorted).
State;
State1 ->
setup_trace(get_trace_pids(State1), Tracer),
State1
end.
setup_trace(TracePids, Tracer) ->
TSpecs =
[
{vmq_parser, serialise, vmq_m4_parser_serialize_ms()},
{vmq_parser_mqtt5, serialise, vmq_m5_parser_serialize_ms()},
{vmq_mqtt_fsm, connected, vmq_mqtt_fsm_connected_ms()},
{vmq_mqtt5_fsm, connected, vmq_mqtt5_fsm_connected_ms()},
{vmq_plugin, all_till_ok, vmq_plugin_hooks_ms()}
],
MatchOpts = [local],
_Matches =
[
begin
Arity = '_',
Spec = Args,
erlang:trace_pattern({Mod, Fun, Arity}, Spec, MatchOpts)
end
|| {Mod, Fun, Args} <- TSpecs
],
[
begin
ignore if the process has died and : trace
%% throws a badarg.
try
erlang:trace(PSpec, true, [call, timestamp, {tracer, Tracer}])
catch
error:badarg -> ok
end
end
|| PSpec <- TracePids
].
-spec add_session_pids(list(pid()), state()) -> state().
add_session_pids(SessionPids, #state{sessions = Sessions} = State) ->
NewSessions = [{Pid, monitor(process, Pid)} || Pid <- SessionPids],
State#state{sessions = lists:ukeysort(1, NewSessions ++ Sessions)}.
remove_session_pid(Pid, #state{sessions = Sessions} = State) ->
{_, MRef} = lists:keyfind(Pid, 1, Sessions),
demonitor(MRef, [flush]),
State#state{sessions = lists:keydelete(Pid, 1, Sessions)}.
-spec get_trace_pids(state()) -> list(pid()).
get_trace_pids(#state{sessions = Sessions, queue = QueuePid}) ->
case QueuePid of
undefined ->
[P || {P, _} <- Sessions];
_ ->
SessionPids = [P || {P, _} <- Sessions],
[QueuePid | SessionPids]
end.
%% This rate limit function was borrowed almost as is from the recon
trace tool developed by .
rate_tracer(Max, Time, Count, Start) ->
receive
Msg ->
vmq_tracer ! Msg,
Now = os:timestamp(),
Delay = timer:now_diff(Now, Start) div 1000,
if
Delay > Time -> rate_tracer(Max, Time, 0, Now);
Max > Count -> rate_tracer(Max, Time, Count + 1, Start);
Max =:= Count -> exit(normal)
end
end.
vmq_m4_parser_serialize_ms() ->
dbg:fun2ms(
fun
([#mqtt_connect{}]) -> ok;
([#mqtt_connack{}]) -> ok;
([#mqtt_publish{}]) -> ok;
([#mqtt_puback{}]) -> ok;
([#mqtt_pubrec{}]) -> ok;
([#mqtt_pubrel{}]) -> ok;
([#mqtt_pubcomp{}]) -> ok;
([#mqtt_subscribe{}]) -> ok;
([#mqtt_unsubscribe{}]) -> ok;
([#mqtt_suback{}]) -> ok;
([#mqtt_unsuback{}]) -> ok;
([#mqtt_pingreq{}]) -> ok;
([#mqtt_pingresp{}]) -> ok;
([#mqtt_disconnect{}]) -> ok
end
).
vmq_m5_parser_serialize_ms() ->
dbg:fun2ms(
fun
([#mqtt5_connect{}]) -> ok;
([#mqtt5_connack{}]) -> ok;
([#mqtt5_publish{}]) -> ok;
([#mqtt5_puback{}]) -> ok;
([#mqtt5_pubrec{}]) -> ok;
([#mqtt5_pubrel{}]) -> ok;
([#mqtt5_pubcomp{}]) -> ok;
([#mqtt5_subscribe{}]) -> ok;
([#mqtt5_unsubscribe{}]) -> ok;
([#mqtt5_suback{}]) -> ok;
([#mqtt5_unsuback{}]) -> ok;
([#mqtt5_pingreq{}]) -> ok;
([#mqtt5_pingresp{}]) -> ok;
([#mqtt5_disconnect{}]) -> ok;
([#mqtt5_auth{}]) -> ok
end
).
vmq_mqtt_fsm_connected_ms() ->
dbg:fun2ms(
fun
([#mqtt_connect{}, _]) -> ok;
([#mqtt_connack{}, _]) -> ok;
([#mqtt_publish{}, _]) -> ok;
([#mqtt_puback{}, _]) -> ok;
([#mqtt_pubrec{}, _]) -> ok;
([#mqtt_pubrel{}, _]) -> ok;
([#mqtt_pubcomp{}, _]) -> ok;
([#mqtt_subscribe{}, _]) -> ok;
([#mqtt_unsubscribe{}, _]) -> ok;
([#mqtt_suback{}, _]) -> ok;
([#mqtt_unsuback{}, _]) -> ok;
([#mqtt_pingreq{}, _]) -> ok;
([#mqtt_pingresp{}, _]) -> ok;
([#mqtt_disconnect{}, _]) -> ok
end
).
vmq_mqtt5_fsm_connected_ms() ->
dbg:fun2ms(
fun
([#mqtt5_connect{}, _]) -> ok;
([#mqtt5_connack{}, _]) -> ok;
([#mqtt5_publish{}, _]) -> ok;
([#mqtt5_puback{}, _]) -> ok;
([#mqtt5_pubrec{}, _]) -> ok;
([#mqtt5_pubrel{}, _]) -> ok;
([#mqtt5_pubcomp{}, _]) -> ok;
([#mqtt5_subscribe{}, _]) -> ok;
([#mqtt5_unsubscribe{}, _]) -> ok;
([#mqtt5_suback{}, _]) -> ok;
([#mqtt5_unsuback{}, _]) -> ok;
([#mqtt5_pingreq{}, _]) -> ok;
([#mqtt5_pingresp{}, _]) -> ok;
([#mqtt5_disconnect{}, _]) -> ok;
([#mqtt5_auth{}, _]) -> ok
end
).
vmq_plugin_hooks_ms() ->
[
{[auth_on_register, '_'], [], [{return_trace}]},
{[auth_on_publish, '_'], [], [{return_trace}]},
{[auth_on_subscribe, '_'], [], [{return_trace}]},
{[auth_on_register_m5, '_'], [], [{return_trace}]},
{[auth_on_publish_m5, '_'], [], [{return_trace}]},
{[auth_on_subscribe_m5, '_'], [], [{return_trace}]},
{[on_auth_m5, '_'], [], [{return_trace}]}
%%{[on_register,'_'],[],[{return_trace}]},
{ [ on_publish,'_'],[],[{return_trace } ] } ,
%%{[on_deliver,'_'],[],[{return_trace}]},
].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
Trace formatting
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-spec format_trace(term(), state()) -> ftuple().
format_trace(Trace, #state{
client_id = ClientId,
mountpoint = Mountpoint,
payload_limit = PayloadLimit
}) ->
SId = {Mountpoint, ClientId},
Opts = #{
payload_limit => PayloadLimit,
sid => SId
},
Unprepared =
case extract_info(Trace) of
{call, Pid, Timestamp, [{vmq_parser, serialise, [Msg]}]} ->
format_frame(from, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_mqtt_fsm, connected, [Msg, _]}]} ->
format_frame(to, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_parser_mqtt5, serialise, [Msg]}]} ->
format_frame(from, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_mqtt5_fsm, connected, [Msg, _]}]} ->
format_frame(to, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_plugin, all_till_ok, [Hook, Args]}]} ->
format_all_till_ok(Hook, Pid, Timestamp, Args, Opts);
{return_from, Pid, Timestamp, [{vmq_plugin, all_till_ok, 2}, Ret]} ->
format_all_till_ok_ret(Ret, Pid, Timestamp, Opts);
_ ->
format_unknown_trace(Trace)
end,
prepf(lists:flatten(Unprepared)).
format_all_till_ok(Hook, Pid, Timestamp, Args, Opts) ->
[ftimestamp(Timestamp), r(" "), fpid(Pid), r(" "), format_all_till_ok_(Hook, Args, Opts)].
format_all_till_ok_(
auth_on_register = Hook,
[Peer, SubscriberId, User, Password, CleanSession],
_Opts
) ->
{"Calling ~p(~p,~p,~s,~s,~p) ~n", [Hook, Peer, SubscriberId, User, Password, CleanSession]};
format_all_till_ok_(
auth_on_publish = Hook,
[User, SubscriberId, QoS, Topic, Payload, IsRetain],
_Opts
) ->
{
"Calling ~p(~s,~p,~p,~s,~p) with payload:~n"
" ~s~n",
[Hook, User, SubscriberId, QoS, jtopic(Topic), IsRetain, Payload]
};
format_all_till_ok_(auth_on_subscribe = Hook, [User, SubscriberId, Topics], _Opts) ->
[{"Calling ~p(~s,~p) with topics:~n", [Hook, User, SubscriberId]}, ftopics(Topics)];
format_all_till_ok_(
auth_on_register_m5 = Hook,
[Peer, SubscriberId, User, Password, CleanStart, Props],
Opts
) ->
[
{"Calling ~p(~p,~p,~s,~s,~p) ~n", [Hook, Peer, SubscriberId, User, Password, CleanStart]},
format_props(Props, Opts)
];
format_all_till_ok_(
auth_on_publish_m5 = Hook,
[User, SubscriberId, QoS, Topic, Payload, IsRetain, Props],
Opts
) ->
[
{
"Calling ~p(~s,~p,~p,~s,~p) with payload:~n"
" ~s~n",
[Hook, User, SubscriberId, QoS, jtopic(Topic), IsRetain, Payload]
},
format_props(Props, Opts)
];
format_all_till_ok_(auth_on_subscribe_m5 = Hook, [User, SubscriberId, Topics, Props], Opts) ->
[
{"Calling ~p(~s,~p) with topics:~n", [Hook, User, SubscriberId]},
ftopics(Topics),
format_props(Props, Opts)
].
format_all_till_ok_ret(Ret, Pid, Timestamp, Opts) ->
[ftimestamp(Timestamp), r(" "), fpid(Pid), r(" "), format_all_till_ok_ret_(Ret, Opts)].
format_all_till_ok_ret_(ok, _Opts) ->
{"Hook returned \"ok\"~n", []};
format_all_till_ok_ret_({ok, []}, _Opts) ->
{"Hook returned \"ok\"~n", []};
format_all_till_ok_ret_({ok, Payload}, #{payload_limit := Limit}) when is_binary(Payload) ->
{
"Hook returned \"ok\" with modified payload:~n"
" ~s~n",
[trunc_payload(Payload, Limit)]
};
format_all_till_ok_ret_({ok, Modifiers}, _Opts) ->
[
{"Hook returned \"ok\" with modifiers:~n", []},
fmodifiers(Modifiers)
];
format_all_till_ok_ret_(Other, _Opts) ->
{"Hook returned ~p~n", [Other]}.
format_frame(Direction, Pid, Timestamp, SId, M, Opts) ->
[
ftimestamp(Timestamp),
r(" "),
fpid(Pid),
r(" "),
dir(Direction),
r(" "),
sid(SId),
r(" "),
format_frame_(M, Opts)
].
format_props(#{} = M, _Opts) when map_size(M) =:= 0 ->
[];
format_props(undefined, _Opts) ->
[];
format_props(Props, _Opts) ->
{" with properties: ~p~n", [Props]}.
format_lwt(undefined, _Opts) ->
[];
format_lwt(
#mqtt5_lwt{
will_properties = Props,
will_retain = Retain,
will_qos = QoS,
will_topic = Topic,
will_msg = Msg
},
Opts
) ->
format_lwt(Retain, QoS, Topic, Msg, Props, Opts).
format_lwt(Retain, _QoS, _Topic, _Msg, _Props, _Opts) when
Retain =:= undefined
->
[];
format_lwt(Retain, QoS, Topic, Msg, Props, #{payload_limit := Limit} = Opts) ->
[
{
" with LWT(wr: ~p, wq: ~p, wt: ~s) with payload:~n"
" ~s~n",
[Retain, QoS, jtopic(Topic), trunc_payload(Msg, Limit)]
},
format_props(Props, Opts)
].
format_frame_(#mqtt_pingreq{}, _) ->
{"PINGREQ()~n", []};
format_frame_(#mqtt_pingresp{}, _) ->
{"PINGRESP()~n", []};
format_frame_(
#mqtt_connect{
proto_ver = Ver,
username = Username,
password = Password,
clean_session = CleanSession,
keep_alive = KeepAlive,
client_id = ClientId,
will_retain = WillRetain,
will_qos = WillQoS,
will_topic = WillTopic,
will_msg = WillMsg
},
Opts
) ->
[
{"CONNECT(c: ~s, v: ~p, u: ~s, p: ~s, cs: ~p, ka: ~p)~n", [
ClientId, Ver, Username, Password, CleanSession, KeepAlive
]},
format_lwt(WillRetain, WillQoS, WillTopic, WillMsg, undefined, Opts)
];
format_frame_(#mqtt_connack{session_present = SP, return_code = RC}, _) ->
{"CONNACK(sp: ~p, rc: ~p)~n", [fflag(SP), RC]};
format_frame_(
#mqtt_publish{
message_id = MId,
topic = Topic,
qos = QoS,
retain = Retain,
dup = Dup,
payload = Payload
},
#{payload_limit := Limit}
) ->
{
"PUBLISH(d~p, q~p, r~p, m~p, \"~s\") with payload:~n"
" ~s~n",
[fflag(Dup), QoS, fflag(Retain), fmid(MId), jtopic(Topic), trunc_payload(Payload, Limit)]
};
format_frame_(#mqtt_puback{message_id = MId}, _) ->
{"PUBACK(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_pubrec{message_id = MId}, _) ->
{"PUBREC(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_pubrel{message_id = MId}, _) ->
{"PUBREL(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_pubcomp{message_id = MId}, _) ->
{"PUBCOMP(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_subscribe{message_id = MId, topics = Topics}, _) ->
[{"SUBSCRIBE(m~p) with topics:~n", [fmid(MId)]}, ftopics(Topics)];
format_frame_(#mqtt_suback{message_id = MId, qos_table = QoSTable}, _) ->
{"SUBACK(m~p, qt~p)~n", [fmid(MId), QoSTable]};
format_frame_(#mqtt_unsubscribe{message_id = MId}, _) ->
{"UNSUBSCRIBE(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_unsuback{message_id = MId}, _) ->
{"UNSUBACK(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_disconnect{}, _) ->
{"DISCONNECT()~n", []};
format_frame_(#mqtt5_pingreq{}, _) ->
{"PINGREQ()~n", []};
format_frame_(#mqtt5_pingresp{}, _) ->
{"PINGRESP()~n", []};
format_frame_(
#mqtt5_connect{
proto_ver = Ver,
username = Username,
password = Password,
clean_start = CleanStart,
keep_alive = KeepAlive,
client_id = ClientId,
lwt = LWT,
properties = Props
},
Opts
) ->
[
{"CONNECT(c: ~s, v: ~p, u: ~s, p: ~s, cs: ~p, ka: ~p)~n", [
ClientId, Ver, Username, Password, CleanStart, KeepAlive
]},
format_props(Props, Opts),
format_lwt(LWT, Opts)
];
format_frame_(
#mqtt5_connack{
session_present = SP,
reason_code = RC,
properties = Props
},
Opts
) ->
[{"CONNACK(sp: ~p, rc: ~p(~p))~n", [fflag(SP), rc2rcn(RC), RC]}, format_props(Props, Opts)];
format_frame_(
#mqtt5_publish{
message_id = MId,
topic = Topic,
qos = QoS,
retain = Retain,
dup = Dup,
payload = Payload,
properties = Props
},
#{payload_limit := Limit} = Opts
) ->
[
{
"PUBLISH(d~p, q~p, r~p, m~p, \"~s\") with payload:~n"
" ~s~n",
[
fflag(Dup),
QoS,
fflag(Retain),
fmid(MId),
jtopic(Topic),
trunc_payload(Payload, Limit)
]
},
format_props(Props, Opts)
];
format_frame_(#mqtt5_puback{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBACK(m~p, rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_pubrec{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBREC(m~p), rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_pubrel{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBREL(m~p), rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_pubcomp{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBCOMP(m~p), rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_subscribe{message_id = MId, topics = Topics, properties = Props}, Opts) ->
[
{"SUBSCRIBE(m~p) with topics:~n", [fmid(MId)]},
ftopics(Topics),
format_props(Props, Opts)
];
format_frame_(#mqtt5_suback{message_id = MId, reason_codes = RCs, properties = Props}, Opts) ->
[
{"SUBACK(m~p, reason_codes:~p)~n", [fmid(MId), RCs]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_unsubscribe{message_id = MId, topics = Topics, properties = Props}, Opts) ->
[
{"UNSUBSCRIBE(m~p) with topics:~n", [fmid(MId)]},
ftopics(Topics),
format_props(Props, Opts)
];
format_frame_(#mqtt5_unsuback{message_id = MId, reason_codes = RCs, properties = Props}, Opts) ->
[
{"UNSUBACK(m~p, reason_codes:~p)~n", [fmid(MId), RCs]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_disconnect{reason_code = RC, properties = Props}, Opts) ->
[
{"DISCONNECT(rc:~p(~p))~n", [disconnectrc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_auth{reason_code = RC, properties = Props}, Opts) ->
[
{"AUTH(rc:~p(~p))~n", [rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(Unknown, _) ->
{io_lib:format("UNKNOWN: ~p~n", [Unknown]), []}.
trunc_payload(Payload, Limit) when byte_size(Payload) =< Limit ->
Payload;
trunc_payload(Payload, Limit) ->
<<Truncated:Limit/binary, _/binary>> = Payload,
<<Truncated/binary, " (truncated)">>.
takes a list of possibly nested { FormatString , Data } { tuples and
%% normalizes it into a single {F,D} tuple.
-spec prepf(unprepf()) -> ftuple().
prepf(L) ->
{F, D} =
lists:foldl(
fun
({F, D}, {FAcc, DAcc}) ->
{[F | FAcc], [D | DAcc]};
(S, {FAcc, DAcc}) when is_list(S) ->
{[S | FAcc], DAcc}
end,
{[], []},
lists:flatten(L)
),
{lists:concat(lists:reverse(F)), lists:concat(lists:reverse(D))}.
r(S) ->
{S, []}.
fmid(undefined) -> 0;
fmid(Mid) -> Mid.
fflag(1) -> 1;
fflag(0) -> 0;
fflag(true) -> 1;
fflag(false) -> 0.
jtopic(T) when is_list(T) ->
erlang:iolist_to_binary(vmq_topic:unword(T)).
ftopics(Topics) ->
lists:foldl(
fun
({Topic, QoS}, Acc) when is_integer(QoS), is_list(Topic) ->
[{" q:~p, t: \"~s\"~n", [QoS, jtopic(Topic)]} | Acc];
({Topic, {QoS, SubOpts}}, Acc) ->
NL = maps:get(no_local, SubOpts, undefined),
RAP = maps:get(rap, SubOpts, undefined),
RH = maps:get(retain_handling, SubOpts, undefined),
[
{
" q:~p, no_local:~p, rap:~p, rh:~p~n"
" t: \"~s\"~n",
[QoS, NL, RAP, RH, jtopic(Topic)]
}
| Acc
];
(
#mqtt5_subscribe_topic{
topic = Topic,
qos = QoS,
no_local = NL,
rap = RAP,
retain_handling = RH
},
Acc
) ->
[
{
" q:~p, no_local:~p, rap:~p, rh:~p~n"
" t: \"~s\"~n",
[QoS, NL, RAP, RH, jtopic(Topic)]
}
| Acc
]
end,
[],
Topics
).
fmodifiers(Modifiers) ->
lists:foldl(
fun
({retry_interval, Val}, Acc) ->
[{" retry_interval: ~pms~n", [Val]} | Acc];
({upgrade_qos, Val}, Acc) ->
[{" upgrade_qos: ~p~n", [Val]} | Acc];
({max_message_rate, Val}, Acc) ->
[{" max_message_rate: ~p msgs/s~n", [Val]} | Acc];
({max_message_size, Val}, Acc) ->
[{" max_message_size: ~p bytes~n", [Val]} | Acc];
({max_inflight_messages, Val}, Acc) ->
[{" max_inflight_messages: ~p msgs~n", [Val]} | Acc];
({clean_session, Val}, Acc) ->
[{" clean_session: ~p~n", [Val]} | Acc];
(V, Acc) ->
[{" ~p~n", [V]} | Acc]
end,
[],
Modifiers
).
fpid(Pid) ->
{"~p", [Pid]}.
ftimestamp(Timestamp) ->
Iso8601Formatted = iso8601(Timestamp),
{"~s", [Iso8601Formatted]}.
%% @doc Convert a `os:timestamp()' or a calendar-style `{date(), time()}'
tuple to an ISO 8601 formatted binary . Note that this function always
%% returns a binary with no offset (i.e., ending in "Z").
iso8601() ->
Timestamp = os:timestamp(),
iso8601(Timestamp).
iso8601({_, _, _} = Timestamp) ->
iso8601(calendar:now_to_datetime(Timestamp));
iso8601({{Y, Mo, D}, {H, Mn, S}}) when is_float(S) ->
FmtStr = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~9.6.0fZ",
IsoStr = io_lib:format(FmtStr, [Y, Mo, D, H, Mn, S]),
list_to_binary(IsoStr);
iso8601({{Y, Mo, D}, {H, Mn, S}}) ->
FmtStr = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ",
IsoStr = io_lib:format(FmtStr, [Y, Mo, D, H, Mn, S]),
list_to_binary(IsoStr).
sid({"", CId}) ->
{"CID: \"~s\"", [CId]};
sid({MP, CId}) ->
{"MP: \"~s\" CID: \"~s\"", [MP, CId]}.
dir(from) -> {"MQTT SEND:", []};
dir(to) -> {"MQTT RECV:", []}.
format_unknown_trace(V) ->
[{"~s Unknown trace! ~p~n", [iso8601(), V]}].
sim_client() ->
Connect = packetv5:gen_connect("simclient", [{keepalive, 60}]),
Connack = packetv5:gen_connack(0, 0, #{}),
{ok, S} = packetv5:do_client_connect(Connect, Connack, [{port, 1883}]),
Topic = <<"sim/topic">>,
Subscribe = packetv5:gen_subscribe(
77,
[packetv5:gen_subtopic(Topic, 0)],
#{
p_user_property => [
{<<"key1">>, <<"val1">>},
{<<"key2">>, <<"val2">>}
]
}
),
ok = gen_tcp:send(S, Subscribe),
SubAck = packetv5:gen_suback(77, [0], #{}),
ok = packetv5:expect_frame(S, SubAck),
Pub = packetv5:gen_publish(
Topic,
0,
<<"simmsg">>,
[
{properties, #{
p_user_property =>
[
{<<"key1">>, <<"val1">>},
{<<"key1">>, <<"val2">>},
{<<"key2">>, <<"val2">>}
]
}}
]
),
ok = gen_tcp:send(S, Pub).
rc2rcn(RC) ->
vmq_parser_mqtt5:rc2rcn(RC).
disconnectrc2rcn(?M5_NORMAL_DISCONNECT) ->
?NORMAL_DISCONNECT;
disconnectrc2rcn(RC) ->
rc2rcn(RC).
| null | https://raw.githubusercontent.com/vernemq/vernemq/234d253250cb5371b97ebb588622076fdabc6a5f/apps/vmq_server/src/vmq_tracer.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc This module provides a simple tracing facility for VerneMQ
MQTT sessions. The original inspiration for a session tracer came
borrowed some small bits and pieces from there.
@end
API
gen_server callbacks
for adhoc-testing
A map of all the sessions we are currently tracing. The
key is the pid of the session
The pid of the queue corresponding to the client-id and
mountpoint we're tracing.
Internal type definitions
===================================================================
API
===================================================================
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
{reply, Reply, State} |
{stop, Reason, Reply, State} |
{stop, Reason, State}
@end
--------------------------------------------------------------------
the `recon` trace tool.
--------------------------------------------------------------------
@doc
Handling cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
===================================================================
The only hook returning a subscriber_id as a modifier
is the `auth_on_register` hook, so it should be fine to
'react' to it here even though this code is common to
all return values from hooks.
Modified but still the same, keep tracing!
Dirty way to detect there's nothing new to trace
(sessions are sorted).
throws a badarg.
This rate limit function was borrowed almost as is from the recon
{[on_register,'_'],[],[{return_trace}]},
{[on_deliver,'_'],[],[{return_trace}]},
normalizes it into a single {F,D} tuple.
@doc Convert a `os:timestamp()' or a calendar-style `{date(), time()}'
returns a binary with no offset (i.e., ending in "Z"). | Copyright 2018 Erlio GmbH Basel Switzerland ( )
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
from fantastic ` recon ' tool and we gratefully
-module(vmq_tracer).
-include_lib("stdlib/include/ms_transform.hrl").
-include_lib("vmq_commons/include/vmq_types.hrl").
-behaviour(gen_server).
-export([
start_link/1,
stop_tracing/0,
start_session_trace/2,
trace_existing_session/0,
rate_tracer/4,
terminate_tracer/0
]).
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-export([sim_client/0]).
-define(SERVER, ?MODULE).
-record(state, {
io_server :: pid(),
max_rate :: {non_neg_integer(), non_neg_integer()},
client_id :: client_id(),
mountpoint :: mountpoint(),
payload_limit :: non_neg_integer(),
tracer :: pid(),
sessions :: list({pid(), reference()}),
queue :: undefined | pid()
}).
-type state() :: #state{}.
-type ftuple() :: {io:format(), [term()]}.
-type unprepf() :: [ftuple() | unprepf()].
-spec start_link(map()) -> {ok, Pid :: pid()} | ignore | {error, Error :: term()}.
start_link(Opts) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, Opts, []).
trace_existing_session() ->
gen_server:call(?SERVER, trace_existing_session).
start_session_trace(SessionPid, ConnFrame) ->
gen_server:call(?SERVER, {start_session_trace, SessionPid, ConnFrame}).
stop_tracing() ->
gen_server:call(?SERVER, stop_tracing).
terminate_tracer() ->
gen_server:cast(?SERVER, terminate_tracer).
@private
) - > { ok , State } |
{ ok , State , Timeout } |
init(#{
io_server := IoServer,
max_rate := {Max, Time} = MaxRate,
mountpoint := Mountpoint,
client_id := ClientId,
payload_limit := PayloadLimit
}) ->
process_flag(trap_exit, true),
TraceFun =
fun(SessionPid, Frame) ->
maybe_init_session_trace(SessionPid, Frame, ClientId)
end,
Tracer = spawn_link(?SERVER, rate_tracer, [Max, Time, 0, os:timestamp()]),
vmq_config:set_env(trace_fun, TraceFun, false),
{ok, #state{
io_server = IoServer,
max_rate = MaxRate,
client_id = ClientId,
mountpoint = Mountpoint,
payload_limit = PayloadLimit,
tracer = Tracer,
sessions = [],
queue = undefined
}}.
@private
, From , State ) - >
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call(
trace_existing_session,
_From,
#state{
client_id = ClientId,
io_server = IoServer,
mountpoint = MP
} = State
) ->
SId = {MP, ClientId},
NewState =
case vmq_queue_sup_sup:get_queue_pid(SId) of
not_found ->
io:format(IoServer, "~s No sessions found for client \"~s\"~n", [
iso8601(), ClientId
]),
State;
QPid when is_pid(QPid) ->
case vmq_queue:get_sessions(QPid) of
[] ->
io:format(IoServer, "~s No sessions found for client \"~s\"~n", [
iso8601(), ClientId
]),
State;
SPids ->
io:format(
IoServer,
"~s Starting trace for ~p existing sessions for client \"~s\" with PIDs~n"
" ~p~n",
[iso8601(), length(SPids), ClientId, SPids]
),
begin_session_trace(SPids, State)
end
end,
{reply, ok, NewState};
handle_call(
{start_session_trace, SessionPid, ConnFrame},
_From,
#state{
client_id = ClientId,
payload_limit = PayloadLimit,
io_server = IoServer
} = State
) ->
Opts = #{payload_limit => PayloadLimit},
SId = {"", ClientId},
io:format(IoServer, "~s New session with PID ~p found for client \"~s\"~n", [
iso8601(), SessionPid, ClientId
]),
{F, D} = prepf(format_frame(to, SessionPid, os:timestamp(), SId, ConnFrame, Opts)),
io:format(IoServer, F, D),
NewState = begin_session_trace([SessionPid], State),
{reply, ok, NewState};
handle_call(stop_tracing, _From, State) ->
How to disable tracing completely has been borrowed from
erlang:trace(all, false, [all]),
erlang:trace_pattern({'_', '_', '_'}, false, [local, meta, call_count, call_time]),
erlang:trace_pattern({'_', '_', '_'}, false, []),
{reply, ok, State}.
@private
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast(terminate_tracer, #state{io_server = IoServer} = State) ->
io:format(IoServer, "~s Trace terminated by external action.~n", [iso8601()]),
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info(
{'DOWN', _MRef, process, Pid, _},
#state{
io_server = IoServer,
client_id = ClientId
} = State
) ->
io:format(IoServer, "~s ~p Trace session for ~s stopped~n", [iso8601(), Pid, ClientId]),
State1 = remove_session_pid(Pid, State),
{noreply, State1};
handle_info(
Trace,
#state{
io_server = IoServer,
sessions = Sessions
} = State
) when
is_tuple(Trace),
element(1, Trace) =:= trace_ts
->
TracePid = get_pid_from_trace(extract_info(Trace)),
case is_trace_active(TracePid, Sessions) of
true ->
{Format, Data} = format_trace(Trace, State),
io:format(IoServer, Format, Data),
State1 = handle_trace(Trace, State),
{noreply, State1};
false ->
{noreply, State}
end;
handle_info(
{'EXIT', Tracer, normal},
#state{
tracer = Tracer,
io_server = IoServer
} = State
) ->
io:format(IoServer, "~s Trace rate limit triggered.~n", [iso8601()]),
{stop, normal, State}.
@private
with . The return value is ignored .
, State ) - > void ( )
terminate(_Reason, _State) ->
vmq_config:set_env(trace_fun, undefined, false),
recon_trace:clear(),
ok.
@private
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
-spec handle_trace(term(), state()) -> state().
handle_trace(
{trace, Pid, return_from, {vmq_plugin, all_till_ok, 2}, Ret},
#state{
client_id = ClientId,
mountpoint = Mountpoint,
io_server = IoServer,
tracer = Tracer
} = State
) ->
case Ret of
ok ->
State;
{ok, Payload} when is_binary(Payload) -> State;
{ok, Modifiers} ->
case proplists:get_value(subscriber_id, Modifiers, undefined) of
undefined ->
State;
{Mountpoint, ClientId} ->
State;
{NewMountpoint, NewClientId} ->
{F, D} =
case {NewMountpoint, NewClientId} of
{Mountpoint, _} ->
{"~p client id for ~s modified to ~p, stopping trace~n", [
Pid, ClientId, NewMountpoint
]};
{_, ClientId} ->
{"~p mountpoint for ~s modified to ~p, stopping trace~n", [
Pid, ClientId, NewMountpoint
]};
{_, _} ->
{
"~p mountpoint and client id for ~s modified to ~p and ~p,"
" stopping trace~n",
[Pid, ClientId, NewMountpoint, NewClientId]
}
end,
io:format(IoServer, F, D),
State1 = remove_session_pid(Pid, State),
setup_trace(get_trace_pids(State1), Tracer),
State1
end;
_ ->
State
end;
handle_trace(_, State) ->
State.
extract_info(TraceMsg) ->
case tuple_to_list(TraceMsg) of
[trace_ts, Pid, Type | Info] ->
{TraceInfo, [Timestamp]} = lists:split(length(Info) - 1, Info),
{Type, Pid, Timestamp, TraceInfo};
[trace, Pid, Type | TraceInfo] ->
{Type, Pid, os:timestamp(), TraceInfo}
end.
get_pid_from_trace({_, Pid, _, _}) ->
Pid.
is_trace_active(Pid, Sessions) ->
lists:keymember(Pid, 1, Sessions).
maybe_init_session_trace(SessionPid, #mqtt5_connect{client_id = ClientId} = ConnFrame, ClientId) ->
start_session_trace(SessionPid, ConnFrame);
maybe_init_session_trace(SessionPid, #mqtt_connect{client_id = ClientId} = ConnFrame, ClientId) ->
start_session_trace(SessionPid, ConnFrame);
maybe_init_session_trace(_, _, _) ->
ignored.
-spec begin_session_trace(list(pid()), state()) -> state().
begin_session_trace(
SessionPids,
#state{tracer = Tracer} = State
) ->
case add_session_pids(SessionPids, State) of
State ->
State;
State1 ->
setup_trace(get_trace_pids(State1), Tracer),
State1
end.
setup_trace(TracePids, Tracer) ->
TSpecs =
[
{vmq_parser, serialise, vmq_m4_parser_serialize_ms()},
{vmq_parser_mqtt5, serialise, vmq_m5_parser_serialize_ms()},
{vmq_mqtt_fsm, connected, vmq_mqtt_fsm_connected_ms()},
{vmq_mqtt5_fsm, connected, vmq_mqtt5_fsm_connected_ms()},
{vmq_plugin, all_till_ok, vmq_plugin_hooks_ms()}
],
MatchOpts = [local],
_Matches =
[
begin
Arity = '_',
Spec = Args,
erlang:trace_pattern({Mod, Fun, Arity}, Spec, MatchOpts)
end
|| {Mod, Fun, Args} <- TSpecs
],
[
begin
ignore if the process has died and : trace
try
erlang:trace(PSpec, true, [call, timestamp, {tracer, Tracer}])
catch
error:badarg -> ok
end
end
|| PSpec <- TracePids
].
-spec add_session_pids(list(pid()), state()) -> state().
add_session_pids(SessionPids, #state{sessions = Sessions} = State) ->
NewSessions = [{Pid, monitor(process, Pid)} || Pid <- SessionPids],
State#state{sessions = lists:ukeysort(1, NewSessions ++ Sessions)}.
remove_session_pid(Pid, #state{sessions = Sessions} = State) ->
{_, MRef} = lists:keyfind(Pid, 1, Sessions),
demonitor(MRef, [flush]),
State#state{sessions = lists:keydelete(Pid, 1, Sessions)}.
-spec get_trace_pids(state()) -> list(pid()).
get_trace_pids(#state{sessions = Sessions, queue = QueuePid}) ->
case QueuePid of
undefined ->
[P || {P, _} <- Sessions];
_ ->
SessionPids = [P || {P, _} <- Sessions],
[QueuePid | SessionPids]
end.
trace tool developed by .
rate_tracer(Max, Time, Count, Start) ->
receive
Msg ->
vmq_tracer ! Msg,
Now = os:timestamp(),
Delay = timer:now_diff(Now, Start) div 1000,
if
Delay > Time -> rate_tracer(Max, Time, 0, Now);
Max > Count -> rate_tracer(Max, Time, Count + 1, Start);
Max =:= Count -> exit(normal)
end
end.
vmq_m4_parser_serialize_ms() ->
dbg:fun2ms(
fun
([#mqtt_connect{}]) -> ok;
([#mqtt_connack{}]) -> ok;
([#mqtt_publish{}]) -> ok;
([#mqtt_puback{}]) -> ok;
([#mqtt_pubrec{}]) -> ok;
([#mqtt_pubrel{}]) -> ok;
([#mqtt_pubcomp{}]) -> ok;
([#mqtt_subscribe{}]) -> ok;
([#mqtt_unsubscribe{}]) -> ok;
([#mqtt_suback{}]) -> ok;
([#mqtt_unsuback{}]) -> ok;
([#mqtt_pingreq{}]) -> ok;
([#mqtt_pingresp{}]) -> ok;
([#mqtt_disconnect{}]) -> ok
end
).
vmq_m5_parser_serialize_ms() ->
dbg:fun2ms(
fun
([#mqtt5_connect{}]) -> ok;
([#mqtt5_connack{}]) -> ok;
([#mqtt5_publish{}]) -> ok;
([#mqtt5_puback{}]) -> ok;
([#mqtt5_pubrec{}]) -> ok;
([#mqtt5_pubrel{}]) -> ok;
([#mqtt5_pubcomp{}]) -> ok;
([#mqtt5_subscribe{}]) -> ok;
([#mqtt5_unsubscribe{}]) -> ok;
([#mqtt5_suback{}]) -> ok;
([#mqtt5_unsuback{}]) -> ok;
([#mqtt5_pingreq{}]) -> ok;
([#mqtt5_pingresp{}]) -> ok;
([#mqtt5_disconnect{}]) -> ok;
([#mqtt5_auth{}]) -> ok
end
).
vmq_mqtt_fsm_connected_ms() ->
dbg:fun2ms(
fun
([#mqtt_connect{}, _]) -> ok;
([#mqtt_connack{}, _]) -> ok;
([#mqtt_publish{}, _]) -> ok;
([#mqtt_puback{}, _]) -> ok;
([#mqtt_pubrec{}, _]) -> ok;
([#mqtt_pubrel{}, _]) -> ok;
([#mqtt_pubcomp{}, _]) -> ok;
([#mqtt_subscribe{}, _]) -> ok;
([#mqtt_unsubscribe{}, _]) -> ok;
([#mqtt_suback{}, _]) -> ok;
([#mqtt_unsuback{}, _]) -> ok;
([#mqtt_pingreq{}, _]) -> ok;
([#mqtt_pingresp{}, _]) -> ok;
([#mqtt_disconnect{}, _]) -> ok
end
).
vmq_mqtt5_fsm_connected_ms() ->
dbg:fun2ms(
fun
([#mqtt5_connect{}, _]) -> ok;
([#mqtt5_connack{}, _]) -> ok;
([#mqtt5_publish{}, _]) -> ok;
([#mqtt5_puback{}, _]) -> ok;
([#mqtt5_pubrec{}, _]) -> ok;
([#mqtt5_pubrel{}, _]) -> ok;
([#mqtt5_pubcomp{}, _]) -> ok;
([#mqtt5_subscribe{}, _]) -> ok;
([#mqtt5_unsubscribe{}, _]) -> ok;
([#mqtt5_suback{}, _]) -> ok;
([#mqtt5_unsuback{}, _]) -> ok;
([#mqtt5_pingreq{}, _]) -> ok;
([#mqtt5_pingresp{}, _]) -> ok;
([#mqtt5_disconnect{}, _]) -> ok;
([#mqtt5_auth{}, _]) -> ok
end
).
vmq_plugin_hooks_ms() ->
[
{[auth_on_register, '_'], [], [{return_trace}]},
{[auth_on_publish, '_'], [], [{return_trace}]},
{[auth_on_subscribe, '_'], [], [{return_trace}]},
{[auth_on_register_m5, '_'], [], [{return_trace}]},
{[auth_on_publish_m5, '_'], [], [{return_trace}]},
{[auth_on_subscribe_m5, '_'], [], [{return_trace}]},
{[on_auth_m5, '_'], [], [{return_trace}]}
{ [ on_publish,'_'],[],[{return_trace } ] } ,
].
Trace formatting
-spec format_trace(term(), state()) -> ftuple().
format_trace(Trace, #state{
client_id = ClientId,
mountpoint = Mountpoint,
payload_limit = PayloadLimit
}) ->
SId = {Mountpoint, ClientId},
Opts = #{
payload_limit => PayloadLimit,
sid => SId
},
Unprepared =
case extract_info(Trace) of
{call, Pid, Timestamp, [{vmq_parser, serialise, [Msg]}]} ->
format_frame(from, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_mqtt_fsm, connected, [Msg, _]}]} ->
format_frame(to, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_parser_mqtt5, serialise, [Msg]}]} ->
format_frame(from, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_mqtt5_fsm, connected, [Msg, _]}]} ->
format_frame(to, Pid, Timestamp, SId, Msg, Opts);
{call, Pid, Timestamp, [{vmq_plugin, all_till_ok, [Hook, Args]}]} ->
format_all_till_ok(Hook, Pid, Timestamp, Args, Opts);
{return_from, Pid, Timestamp, [{vmq_plugin, all_till_ok, 2}, Ret]} ->
format_all_till_ok_ret(Ret, Pid, Timestamp, Opts);
_ ->
format_unknown_trace(Trace)
end,
prepf(lists:flatten(Unprepared)).
format_all_till_ok(Hook, Pid, Timestamp, Args, Opts) ->
[ftimestamp(Timestamp), r(" "), fpid(Pid), r(" "), format_all_till_ok_(Hook, Args, Opts)].
format_all_till_ok_(
auth_on_register = Hook,
[Peer, SubscriberId, User, Password, CleanSession],
_Opts
) ->
{"Calling ~p(~p,~p,~s,~s,~p) ~n", [Hook, Peer, SubscriberId, User, Password, CleanSession]};
format_all_till_ok_(
auth_on_publish = Hook,
[User, SubscriberId, QoS, Topic, Payload, IsRetain],
_Opts
) ->
{
"Calling ~p(~s,~p,~p,~s,~p) with payload:~n"
" ~s~n",
[Hook, User, SubscriberId, QoS, jtopic(Topic), IsRetain, Payload]
};
format_all_till_ok_(auth_on_subscribe = Hook, [User, SubscriberId, Topics], _Opts) ->
[{"Calling ~p(~s,~p) with topics:~n", [Hook, User, SubscriberId]}, ftopics(Topics)];
format_all_till_ok_(
auth_on_register_m5 = Hook,
[Peer, SubscriberId, User, Password, CleanStart, Props],
Opts
) ->
[
{"Calling ~p(~p,~p,~s,~s,~p) ~n", [Hook, Peer, SubscriberId, User, Password, CleanStart]},
format_props(Props, Opts)
];
format_all_till_ok_(
auth_on_publish_m5 = Hook,
[User, SubscriberId, QoS, Topic, Payload, IsRetain, Props],
Opts
) ->
[
{
"Calling ~p(~s,~p,~p,~s,~p) with payload:~n"
" ~s~n",
[Hook, User, SubscriberId, QoS, jtopic(Topic), IsRetain, Payload]
},
format_props(Props, Opts)
];
format_all_till_ok_(auth_on_subscribe_m5 = Hook, [User, SubscriberId, Topics, Props], Opts) ->
[
{"Calling ~p(~s,~p) with topics:~n", [Hook, User, SubscriberId]},
ftopics(Topics),
format_props(Props, Opts)
].
format_all_till_ok_ret(Ret, Pid, Timestamp, Opts) ->
[ftimestamp(Timestamp), r(" "), fpid(Pid), r(" "), format_all_till_ok_ret_(Ret, Opts)].
format_all_till_ok_ret_(ok, _Opts) ->
{"Hook returned \"ok\"~n", []};
format_all_till_ok_ret_({ok, []}, _Opts) ->
{"Hook returned \"ok\"~n", []};
format_all_till_ok_ret_({ok, Payload}, #{payload_limit := Limit}) when is_binary(Payload) ->
{
"Hook returned \"ok\" with modified payload:~n"
" ~s~n",
[trunc_payload(Payload, Limit)]
};
format_all_till_ok_ret_({ok, Modifiers}, _Opts) ->
[
{"Hook returned \"ok\" with modifiers:~n", []},
fmodifiers(Modifiers)
];
format_all_till_ok_ret_(Other, _Opts) ->
{"Hook returned ~p~n", [Other]}.
format_frame(Direction, Pid, Timestamp, SId, M, Opts) ->
[
ftimestamp(Timestamp),
r(" "),
fpid(Pid),
r(" "),
dir(Direction),
r(" "),
sid(SId),
r(" "),
format_frame_(M, Opts)
].
format_props(#{} = M, _Opts) when map_size(M) =:= 0 ->
[];
format_props(undefined, _Opts) ->
[];
format_props(Props, _Opts) ->
{" with properties: ~p~n", [Props]}.
format_lwt(undefined, _Opts) ->
[];
format_lwt(
#mqtt5_lwt{
will_properties = Props,
will_retain = Retain,
will_qos = QoS,
will_topic = Topic,
will_msg = Msg
},
Opts
) ->
format_lwt(Retain, QoS, Topic, Msg, Props, Opts).
format_lwt(Retain, _QoS, _Topic, _Msg, _Props, _Opts) when
Retain =:= undefined
->
[];
format_lwt(Retain, QoS, Topic, Msg, Props, #{payload_limit := Limit} = Opts) ->
[
{
" with LWT(wr: ~p, wq: ~p, wt: ~s) with payload:~n"
" ~s~n",
[Retain, QoS, jtopic(Topic), trunc_payload(Msg, Limit)]
},
format_props(Props, Opts)
].
format_frame_(#mqtt_pingreq{}, _) ->
{"PINGREQ()~n", []};
format_frame_(#mqtt_pingresp{}, _) ->
{"PINGRESP()~n", []};
format_frame_(
#mqtt_connect{
proto_ver = Ver,
username = Username,
password = Password,
clean_session = CleanSession,
keep_alive = KeepAlive,
client_id = ClientId,
will_retain = WillRetain,
will_qos = WillQoS,
will_topic = WillTopic,
will_msg = WillMsg
},
Opts
) ->
[
{"CONNECT(c: ~s, v: ~p, u: ~s, p: ~s, cs: ~p, ka: ~p)~n", [
ClientId, Ver, Username, Password, CleanSession, KeepAlive
]},
format_lwt(WillRetain, WillQoS, WillTopic, WillMsg, undefined, Opts)
];
format_frame_(#mqtt_connack{session_present = SP, return_code = RC}, _) ->
{"CONNACK(sp: ~p, rc: ~p)~n", [fflag(SP), RC]};
format_frame_(
#mqtt_publish{
message_id = MId,
topic = Topic,
qos = QoS,
retain = Retain,
dup = Dup,
payload = Payload
},
#{payload_limit := Limit}
) ->
{
"PUBLISH(d~p, q~p, r~p, m~p, \"~s\") with payload:~n"
" ~s~n",
[fflag(Dup), QoS, fflag(Retain), fmid(MId), jtopic(Topic), trunc_payload(Payload, Limit)]
};
format_frame_(#mqtt_puback{message_id = MId}, _) ->
{"PUBACK(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_pubrec{message_id = MId}, _) ->
{"PUBREC(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_pubrel{message_id = MId}, _) ->
{"PUBREL(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_pubcomp{message_id = MId}, _) ->
{"PUBCOMP(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_subscribe{message_id = MId, topics = Topics}, _) ->
[{"SUBSCRIBE(m~p) with topics:~n", [fmid(MId)]}, ftopics(Topics)];
format_frame_(#mqtt_suback{message_id = MId, qos_table = QoSTable}, _) ->
{"SUBACK(m~p, qt~p)~n", [fmid(MId), QoSTable]};
format_frame_(#mqtt_unsubscribe{message_id = MId}, _) ->
{"UNSUBSCRIBE(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_unsuback{message_id = MId}, _) ->
{"UNSUBACK(m~p)~n", [fmid(MId)]};
format_frame_(#mqtt_disconnect{}, _) ->
{"DISCONNECT()~n", []};
format_frame_(#mqtt5_pingreq{}, _) ->
{"PINGREQ()~n", []};
format_frame_(#mqtt5_pingresp{}, _) ->
{"PINGRESP()~n", []};
format_frame_(
#mqtt5_connect{
proto_ver = Ver,
username = Username,
password = Password,
clean_start = CleanStart,
keep_alive = KeepAlive,
client_id = ClientId,
lwt = LWT,
properties = Props
},
Opts
) ->
[
{"CONNECT(c: ~s, v: ~p, u: ~s, p: ~s, cs: ~p, ka: ~p)~n", [
ClientId, Ver, Username, Password, CleanStart, KeepAlive
]},
format_props(Props, Opts),
format_lwt(LWT, Opts)
];
format_frame_(
#mqtt5_connack{
session_present = SP,
reason_code = RC,
properties = Props
},
Opts
) ->
[{"CONNACK(sp: ~p, rc: ~p(~p))~n", [fflag(SP), rc2rcn(RC), RC]}, format_props(Props, Opts)];
format_frame_(
#mqtt5_publish{
message_id = MId,
topic = Topic,
qos = QoS,
retain = Retain,
dup = Dup,
payload = Payload,
properties = Props
},
#{payload_limit := Limit} = Opts
) ->
[
{
"PUBLISH(d~p, q~p, r~p, m~p, \"~s\") with payload:~n"
" ~s~n",
[
fflag(Dup),
QoS,
fflag(Retain),
fmid(MId),
jtopic(Topic),
trunc_payload(Payload, Limit)
]
},
format_props(Props, Opts)
];
format_frame_(#mqtt5_puback{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBACK(m~p, rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_pubrec{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBREC(m~p), rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_pubrel{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBREL(m~p), rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_pubcomp{message_id = MId, reason_code = RC, properties = Props}, Opts) ->
[
{"PUBCOMP(m~p), rc: ~p(~p))~n", [fmid(MId), rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_subscribe{message_id = MId, topics = Topics, properties = Props}, Opts) ->
[
{"SUBSCRIBE(m~p) with topics:~n", [fmid(MId)]},
ftopics(Topics),
format_props(Props, Opts)
];
format_frame_(#mqtt5_suback{message_id = MId, reason_codes = RCs, properties = Props}, Opts) ->
[
{"SUBACK(m~p, reason_codes:~p)~n", [fmid(MId), RCs]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_unsubscribe{message_id = MId, topics = Topics, properties = Props}, Opts) ->
[
{"UNSUBSCRIBE(m~p) with topics:~n", [fmid(MId)]},
ftopics(Topics),
format_props(Props, Opts)
];
format_frame_(#mqtt5_unsuback{message_id = MId, reason_codes = RCs, properties = Props}, Opts) ->
[
{"UNSUBACK(m~p, reason_codes:~p)~n", [fmid(MId), RCs]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_disconnect{reason_code = RC, properties = Props}, Opts) ->
[
{"DISCONNECT(rc:~p(~p))~n", [disconnectrc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(#mqtt5_auth{reason_code = RC, properties = Props}, Opts) ->
[
{"AUTH(rc:~p(~p))~n", [rc2rcn(RC), RC]},
format_props(Props, Opts)
];
format_frame_(Unknown, _) ->
{io_lib:format("UNKNOWN: ~p~n", [Unknown]), []}.
trunc_payload(Payload, Limit) when byte_size(Payload) =< Limit ->
Payload;
trunc_payload(Payload, Limit) ->
<<Truncated:Limit/binary, _/binary>> = Payload,
<<Truncated/binary, " (truncated)">>.
takes a list of possibly nested { FormatString , Data } { tuples and
-spec prepf(unprepf()) -> ftuple().
prepf(L) ->
{F, D} =
lists:foldl(
fun
({F, D}, {FAcc, DAcc}) ->
{[F | FAcc], [D | DAcc]};
(S, {FAcc, DAcc}) when is_list(S) ->
{[S | FAcc], DAcc}
end,
{[], []},
lists:flatten(L)
),
{lists:concat(lists:reverse(F)), lists:concat(lists:reverse(D))}.
r(S) ->
{S, []}.
fmid(undefined) -> 0;
fmid(Mid) -> Mid.
fflag(1) -> 1;
fflag(0) -> 0;
fflag(true) -> 1;
fflag(false) -> 0.
jtopic(T) when is_list(T) ->
erlang:iolist_to_binary(vmq_topic:unword(T)).
ftopics(Topics) ->
lists:foldl(
fun
({Topic, QoS}, Acc) when is_integer(QoS), is_list(Topic) ->
[{" q:~p, t: \"~s\"~n", [QoS, jtopic(Topic)]} | Acc];
({Topic, {QoS, SubOpts}}, Acc) ->
NL = maps:get(no_local, SubOpts, undefined),
RAP = maps:get(rap, SubOpts, undefined),
RH = maps:get(retain_handling, SubOpts, undefined),
[
{
" q:~p, no_local:~p, rap:~p, rh:~p~n"
" t: \"~s\"~n",
[QoS, NL, RAP, RH, jtopic(Topic)]
}
| Acc
];
(
#mqtt5_subscribe_topic{
topic = Topic,
qos = QoS,
no_local = NL,
rap = RAP,
retain_handling = RH
},
Acc
) ->
[
{
" q:~p, no_local:~p, rap:~p, rh:~p~n"
" t: \"~s\"~n",
[QoS, NL, RAP, RH, jtopic(Topic)]
}
| Acc
]
end,
[],
Topics
).
fmodifiers(Modifiers) ->
lists:foldl(
fun
({retry_interval, Val}, Acc) ->
[{" retry_interval: ~pms~n", [Val]} | Acc];
({upgrade_qos, Val}, Acc) ->
[{" upgrade_qos: ~p~n", [Val]} | Acc];
({max_message_rate, Val}, Acc) ->
[{" max_message_rate: ~p msgs/s~n", [Val]} | Acc];
({max_message_size, Val}, Acc) ->
[{" max_message_size: ~p bytes~n", [Val]} | Acc];
({max_inflight_messages, Val}, Acc) ->
[{" max_inflight_messages: ~p msgs~n", [Val]} | Acc];
({clean_session, Val}, Acc) ->
[{" clean_session: ~p~n", [Val]} | Acc];
(V, Acc) ->
[{" ~p~n", [V]} | Acc]
end,
[],
Modifiers
).
fpid(Pid) ->
{"~p", [Pid]}.
ftimestamp(Timestamp) ->
Iso8601Formatted = iso8601(Timestamp),
{"~s", [Iso8601Formatted]}.
tuple to an ISO 8601 formatted binary . Note that this function always
iso8601() ->
Timestamp = os:timestamp(),
iso8601(Timestamp).
iso8601({_, _, _} = Timestamp) ->
iso8601(calendar:now_to_datetime(Timestamp));
iso8601({{Y, Mo, D}, {H, Mn, S}}) when is_float(S) ->
FmtStr = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~9.6.0fZ",
IsoStr = io_lib:format(FmtStr, [Y, Mo, D, H, Mn, S]),
list_to_binary(IsoStr);
iso8601({{Y, Mo, D}, {H, Mn, S}}) ->
FmtStr = "~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ",
IsoStr = io_lib:format(FmtStr, [Y, Mo, D, H, Mn, S]),
list_to_binary(IsoStr).
sid({"", CId}) ->
{"CID: \"~s\"", [CId]};
sid({MP, CId}) ->
{"MP: \"~s\" CID: \"~s\"", [MP, CId]}.
dir(from) -> {"MQTT SEND:", []};
dir(to) -> {"MQTT RECV:", []}.
format_unknown_trace(V) ->
[{"~s Unknown trace! ~p~n", [iso8601(), V]}].
sim_client() ->
Connect = packetv5:gen_connect("simclient", [{keepalive, 60}]),
Connack = packetv5:gen_connack(0, 0, #{}),
{ok, S} = packetv5:do_client_connect(Connect, Connack, [{port, 1883}]),
Topic = <<"sim/topic">>,
Subscribe = packetv5:gen_subscribe(
77,
[packetv5:gen_subtopic(Topic, 0)],
#{
p_user_property => [
{<<"key1">>, <<"val1">>},
{<<"key2">>, <<"val2">>}
]
}
),
ok = gen_tcp:send(S, Subscribe),
SubAck = packetv5:gen_suback(77, [0], #{}),
ok = packetv5:expect_frame(S, SubAck),
Pub = packetv5:gen_publish(
Topic,
0,
<<"simmsg">>,
[
{properties, #{
p_user_property =>
[
{<<"key1">>, <<"val1">>},
{<<"key1">>, <<"val2">>},
{<<"key2">>, <<"val2">>}
]
}}
]
),
ok = gen_tcp:send(S, Pub).
rc2rcn(RC) ->
vmq_parser_mqtt5:rc2rcn(RC).
disconnectrc2rcn(?M5_NORMAL_DISCONNECT) ->
?NORMAL_DISCONNECT;
disconnectrc2rcn(RC) ->
rc2rcn(RC).
|
7e35448a7c03fff1f382b886293fb7af83e81c657be161ab5ccb1551b27bdaf1 | craigfe/oskel | main.ml | let main () = Binary.main () |> print_endline
open Cmdliner
let setup_log =
let init style_renderer level =
Fmt_tty.setup_std_outputs ?style_renderer ();
Logs.set_level level;
Logs.set_reporter (Logs_fmt.reporter ())
in
Term.(const init $ Fmt_cli.style_renderer () $ Logs_cli.level ())
let term =
let doc = "Binary that depends on a tested library" in
let exits = Term.default_exits in
let man = [] in
Term.(const main $ setup_log, info "binary" ~doc ~exits ~man)
let () = Term.exit (Term.eval term)
| null | https://raw.githubusercontent.com/craigfe/oskel/057129faaf8171ff4ed49022346e9179b04735b2/examples/binary/bin/main.ml | ocaml | let main () = Binary.main () |> print_endline
open Cmdliner
let setup_log =
let init style_renderer level =
Fmt_tty.setup_std_outputs ?style_renderer ();
Logs.set_level level;
Logs.set_reporter (Logs_fmt.reporter ())
in
Term.(const init $ Fmt_cli.style_renderer () $ Logs_cli.level ())
let term =
let doc = "Binary that depends on a tested library" in
let exits = Term.default_exits in
let man = [] in
Term.(const main $ setup_log, info "binary" ~doc ~exits ~man)
let () = Term.exit (Term.eval term)
| |
9738f0fb0f0da6a1a96a16b681687c3e948bc554a479bb2ca82dbd14158b9b5e | haskell/haskell-language-server | Cabal.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE DuplicateRecordFields #
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE FlexibleInstances #
{-# LANGUAGE LambdaCase #-}
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedStrings #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
module Ide.Plugin.Cabal (descriptor, Log(..)) where
import Control.Concurrent.STM
import Control.Concurrent.Strict
import Control.DeepSeq
import Control.Monad.Extra
import Control.Monad.IO.Class
import qualified Data.ByteString as BS
import Data.Hashable
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import qualified Data.List.NonEmpty as NE
import qualified Data.Text.Encoding as Encoding
import Data.Typeable
import Development.IDE as D
import Development.IDE.Core.Shake (restartShakeSession)
import qualified Development.IDE.Core.Shake as Shake
import Development.IDE.Graph (alwaysRerun)
import GHC.Generics
import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics
import qualified Ide.Plugin.Cabal.LicenseSuggest as LicenseSuggest
import qualified Ide.Plugin.Cabal.Parse as Parse
import Ide.Plugin.Config (Config)
import Ide.Types
import Language.LSP.Server (LspM)
import Language.LSP.Types
import qualified Language.LSP.Types as LSP
import qualified Language.LSP.VFS as VFS
data Log
= LogModificationTime NormalizedFilePath FileVersion
| LogShake Shake.Log
| LogDocOpened Uri
| LogDocModified Uri
| LogDocSaved Uri
| LogDocClosed Uri
| LogFOI (HashMap NormalizedFilePath FileOfInterestStatus)
deriving Show
instance Pretty Log where
pretty = \case
LogShake log' -> pretty log'
LogModificationTime nfp modTime ->
"Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime)
LogDocOpened uri ->
"Opened text document:" <+> pretty (getUri uri)
LogDocModified uri ->
"Modified text document:" <+> pretty (getUri uri)
LogDocSaved uri ->
"Saved text document:" <+> pretty (getUri uri)
LogDocClosed uri ->
"Closed text document:" <+> pretty (getUri uri)
LogFOI files ->
"Set files of interest to:" <+> viaShow files
descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState
descriptor recorder plId = (defaultCabalPluginDescriptor plId)
{ pluginRules = cabalRules recorder
, pluginHandlers = mkPluginHandler STextDocumentCodeAction licenseSuggestCodeAction
, pluginNotificationHandlers = mconcat
[ mkPluginNotificationHandler LSP.STextDocumentDidOpen $
\ide vfs _ (DidOpenTextDocumentParams TextDocumentItem{_uri,_version}) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocOpened _uri
addFileOfInterest recorder ide file Modified{firstOpen=True}
restartCabalShakeSession (shakeExtras ide) vfs file "(opened)"
, mkPluginNotificationHandler LSP.STextDocumentDidChange $
\ide vfs _ (DidChangeTextDocumentParams VersionedTextDocumentIdentifier{_uri} _) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocModified _uri
addFileOfInterest recorder ide file Modified{firstOpen=False}
restartCabalShakeSession (shakeExtras ide) vfs file "(changed)"
, mkPluginNotificationHandler LSP.STextDocumentDidSave $
\ide vfs _ (DidSaveTextDocumentParams TextDocumentIdentifier{_uri} _) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocSaved _uri
addFileOfInterest recorder ide file OnDisk
restartCabalShakeSession (shakeExtras ide) vfs file "(saved)"
, mkPluginNotificationHandler LSP.STextDocumentDidClose $
\ide vfs _ (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocClosed _uri
deleteFileOfInterest recorder ide file
restartCabalShakeSession (shakeExtras ide) vfs file "(closed)"
]
}
where
log' = logWith recorder
whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO ()
whenUriFile uri act = whenJust (LSP.uriToFilePath uri) $ act . toNormalizedFilePath'
-- | Helper function to restart the shake session, specifically for modifying .cabal files.
-- No special logic, just group up a bunch of functions you need for the base
-- Notification Handlers.
--
-- To make sure diagnostics are up to date, we need to tell shake that the file was touched and
-- needs to be re-parsed. That's what we do when we record the dirty key that our parsing
-- rule depends on.
-- Then we restart the shake session, so that changes to our virtual files are actually picked up.
restartCabalShakeSession :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> String -> IO ()
restartCabalShakeSession shakeExtras vfs file actionMsg = do
join $ atomically $ Shake.recordDirtyKeys shakeExtras GetModificationTime [file]
restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) []
-- ----------------------------------------------------------------
-- Plugin Rules
-- ----------------------------------------------------------------
data ParseCabal = ParseCabal
deriving (Eq, Show, Typeable, Generic)
instance Hashable ParseCabal
instance NFData ParseCabal
type instance RuleResult ParseCabal = ()
cabalRules :: Recorder (WithPriority Log) -> Rules ()
cabalRules recorder = do
-- Make sure we initialise the cabal files-of-interest.
ofInterestRules recorder
-- Rule to produce diagnostics for cabal files.
define (cmapWithPrio LogShake recorder) $ \ParseCabal file -> do
-- whenever this key is marked as dirty (e.g., when a user writes stuff to it),
-- we rerun this rule because this rule *depends* on GetModificationTime.
(t, mCabalSource) <- use_ GetFileContents file
log' Debug $ LogModificationTime file t
contents <- case mCabalSource of
Just sources -> pure $ Encoding.encodeUtf8 sources
Nothing -> do
liftIO $ BS.readFile $ fromNormalizedFilePath file
(pWarnings, pm) <- liftIO $ Parse.parseCabalFileContents contents
let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings
case pm of
Left (_cabalVersion, pErrorNE) -> do
let errorDiags = NE.toList $ NE.map (Diagnostics.errorDiagnostic file) pErrorNE
allDiags = errorDiags <> warningDiags
pure (allDiags, Nothing)
Right _ -> do
pure (warningDiags, Just ())
action $ do
-- Run the cabal kick. This code always runs when 'shakeRestart' is run.
-- Must be careful to not impede the performance too much. Crucial to
-- a snappy IDE experience.
kick
where
log' = logWith recorder
-- | This is the kick function for the cabal plugin.
-- We run this action, whenever we shake session us run/restarted, which triggers
-- actions to produce diagnostics for cabal files.
--
-- It is paramount that this kick-function can be run quickly, since it is a blocking
-- function invocation.
kick :: Action ()
kick = do
files <- HashMap.keys <$> getCabalFilesOfInterestUntracked
void $ uses ParseCabal files
-- ----------------------------------------------------------------
-- Code Actions
-- ----------------------------------------------------------------
licenseSuggestCodeAction
:: IdeState
-> PluginId
-> CodeActionParams
-> LspM Config (Either ResponseError (ResponseResult 'TextDocumentCodeAction))
licenseSuggestCodeAction _ _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics=List diags}) =
pure $ Right $ List $ diags >>= (fmap InR . (LicenseSuggest.licenseErrorAction uri))
-- ----------------------------------------------------------------
Cabal file of Interest rules and global variable
-- ----------------------------------------------------------------
| Cabal files that are currently open in the lsp - client .
-- Specific actions happen when these files are saved, closed or modified,
-- such as generating diagnostics, re-parsing, etc...
--
-- We need to store the open files to parse them again if we restart the shake session.
-- Restarting of the shake session happens whenever these files are modified.
newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath FileOfInterestStatus))
instance Shake.IsIdeGlobal OfInterestCabalVar
data IsCabalFileOfInterest = IsCabalFileOfInterest
deriving (Eq, Show, Typeable, Generic)
instance Hashable IsCabalFileOfInterest
instance NFData IsCabalFileOfInterest
type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult
data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus
deriving (Eq, Show, Typeable, Generic)
instance Hashable CabalFileOfInterestResult
instance NFData CabalFileOfInterestResult
-- | The rule that initialises the files of interest state.
--
-- Needs to be run on start-up.
ofInterestRules :: Recorder (WithPriority Log) -> Rules ()
ofInterestRules recorder = do
Shake.addIdeGlobal . OfInterestCabalVar =<< liftIO (newVar HashMap.empty)
Shake.defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsCabalFileOfInterest f -> do
alwaysRerun
filesOfInterest <- getCabalFilesOfInterestUntracked
let foi = maybe NotCabalFOI IsCabalFOI $ f `HashMap.lookup` filesOfInterest
fp = summarize foi
res = (Just fp, Just foi)
return res
where
summarize NotCabalFOI = BS.singleton 0
summarize (IsCabalFOI OnDisk) = BS.singleton 1
summarize (IsCabalFOI (Modified False)) = BS.singleton 2
summarize (IsCabalFOI (Modified True)) = BS.singleton 3
getCabalFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus)
getCabalFilesOfInterestUntracked = do
OfInterestCabalVar var <- Shake.getIdeGlobalAction
liftIO $ readVar var
addFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO ()
addFileOfInterest recorder state f v = do
OfInterestCabalVar var <- Shake.getIdeGlobalState state
(prev, files) <- modifyVar var $ \dict -> do
let (prev, new) = HashMap.alterF (, Just v) f dict
pure (new, (prev, new))
when (prev /= Just v) $ do
join $ atomically $ Shake.recordDirtyKeys (shakeExtras state) IsFileOfInterest [f]
log' Debug $ LogFOI files
where
log' = logWith recorder
deleteFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO ()
deleteFileOfInterest recorder state f = do
OfInterestCabalVar var <- Shake.getIdeGlobalState state
files <- modifyVar' var $ HashMap.delete f
join $ atomically $ Shake.recordDirtyKeys (shakeExtras state) IsFileOfInterest [f]
log' Debug $ LogFOI files
where
log' = logWith recorder
| null | https://raw.githubusercontent.com/haskell/haskell-language-server/d7690c500f204ff3804b1ec7af70a6194c4a9908/plugins/hls-cabal-plugin/src/Ide/Plugin/Cabal.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
| Helper function to restart the shake session, specifically for modifying .cabal files.
No special logic, just group up a bunch of functions you need for the base
Notification Handlers.
To make sure diagnostics are up to date, we need to tell shake that the file was touched and
needs to be re-parsed. That's what we do when we record the dirty key that our parsing
rule depends on.
Then we restart the shake session, so that changes to our virtual files are actually picked up.
----------------------------------------------------------------
Plugin Rules
----------------------------------------------------------------
Make sure we initialise the cabal files-of-interest.
Rule to produce diagnostics for cabal files.
whenever this key is marked as dirty (e.g., when a user writes stuff to it),
we rerun this rule because this rule *depends* on GetModificationTime.
Run the cabal kick. This code always runs when 'shakeRestart' is run.
Must be careful to not impede the performance too much. Crucial to
a snappy IDE experience.
| This is the kick function for the cabal plugin.
We run this action, whenever we shake session us run/restarted, which triggers
actions to produce diagnostics for cabal files.
It is paramount that this kick-function can be run quickly, since it is a blocking
function invocation.
----------------------------------------------------------------
Code Actions
----------------------------------------------------------------
----------------------------------------------------------------
----------------------------------------------------------------
Specific actions happen when these files are saved, closed or modified,
such as generating diagnostics, re-parsing, etc...
We need to store the open files to parse them again if we restart the shake session.
Restarting of the shake session happens whenever these files are modified.
| The rule that initialises the files of interest state.
Needs to be run on start-up. | # LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleInstances #
# LANGUAGE NamedFieldPuns #
# LANGUAGE OverloadedStrings #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
module Ide.Plugin.Cabal (descriptor, Log(..)) where
import Control.Concurrent.STM
import Control.Concurrent.Strict
import Control.DeepSeq
import Control.Monad.Extra
import Control.Monad.IO.Class
import qualified Data.ByteString as BS
import Data.Hashable
import Data.HashMap.Strict (HashMap)
import qualified Data.HashMap.Strict as HashMap
import qualified Data.List.NonEmpty as NE
import qualified Data.Text.Encoding as Encoding
import Data.Typeable
import Development.IDE as D
import Development.IDE.Core.Shake (restartShakeSession)
import qualified Development.IDE.Core.Shake as Shake
import Development.IDE.Graph (alwaysRerun)
import GHC.Generics
import qualified Ide.Plugin.Cabal.Diagnostics as Diagnostics
import qualified Ide.Plugin.Cabal.LicenseSuggest as LicenseSuggest
import qualified Ide.Plugin.Cabal.Parse as Parse
import Ide.Plugin.Config (Config)
import Ide.Types
import Language.LSP.Server (LspM)
import Language.LSP.Types
import qualified Language.LSP.Types as LSP
import qualified Language.LSP.VFS as VFS
data Log
= LogModificationTime NormalizedFilePath FileVersion
| LogShake Shake.Log
| LogDocOpened Uri
| LogDocModified Uri
| LogDocSaved Uri
| LogDocClosed Uri
| LogFOI (HashMap NormalizedFilePath FileOfInterestStatus)
deriving Show
instance Pretty Log where
pretty = \case
LogShake log' -> pretty log'
LogModificationTime nfp modTime ->
"Modified:" <+> pretty (fromNormalizedFilePath nfp) <+> pretty (show modTime)
LogDocOpened uri ->
"Opened text document:" <+> pretty (getUri uri)
LogDocModified uri ->
"Modified text document:" <+> pretty (getUri uri)
LogDocSaved uri ->
"Saved text document:" <+> pretty (getUri uri)
LogDocClosed uri ->
"Closed text document:" <+> pretty (getUri uri)
LogFOI files ->
"Set files of interest to:" <+> viaShow files
descriptor :: Recorder (WithPriority Log) -> PluginId -> PluginDescriptor IdeState
descriptor recorder plId = (defaultCabalPluginDescriptor plId)
{ pluginRules = cabalRules recorder
, pluginHandlers = mkPluginHandler STextDocumentCodeAction licenseSuggestCodeAction
, pluginNotificationHandlers = mconcat
[ mkPluginNotificationHandler LSP.STextDocumentDidOpen $
\ide vfs _ (DidOpenTextDocumentParams TextDocumentItem{_uri,_version}) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocOpened _uri
addFileOfInterest recorder ide file Modified{firstOpen=True}
restartCabalShakeSession (shakeExtras ide) vfs file "(opened)"
, mkPluginNotificationHandler LSP.STextDocumentDidChange $
\ide vfs _ (DidChangeTextDocumentParams VersionedTextDocumentIdentifier{_uri} _) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocModified _uri
addFileOfInterest recorder ide file Modified{firstOpen=False}
restartCabalShakeSession (shakeExtras ide) vfs file "(changed)"
, mkPluginNotificationHandler LSP.STextDocumentDidSave $
\ide vfs _ (DidSaveTextDocumentParams TextDocumentIdentifier{_uri} _) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocSaved _uri
addFileOfInterest recorder ide file OnDisk
restartCabalShakeSession (shakeExtras ide) vfs file "(saved)"
, mkPluginNotificationHandler LSP.STextDocumentDidClose $
\ide vfs _ (DidCloseTextDocumentParams TextDocumentIdentifier{_uri}) -> liftIO $ do
whenUriFile _uri $ \file -> do
log' Debug $ LogDocClosed _uri
deleteFileOfInterest recorder ide file
restartCabalShakeSession (shakeExtras ide) vfs file "(closed)"
]
}
where
log' = logWith recorder
whenUriFile :: Uri -> (NormalizedFilePath -> IO ()) -> IO ()
whenUriFile uri act = whenJust (LSP.uriToFilePath uri) $ act . toNormalizedFilePath'
restartCabalShakeSession :: ShakeExtras -> VFS.VFS -> NormalizedFilePath -> String -> IO ()
restartCabalShakeSession shakeExtras vfs file actionMsg = do
join $ atomically $ Shake.recordDirtyKeys shakeExtras GetModificationTime [file]
restartShakeSession shakeExtras (VFSModified vfs) (fromNormalizedFilePath file ++ " " ++ actionMsg) []
data ParseCabal = ParseCabal
deriving (Eq, Show, Typeable, Generic)
instance Hashable ParseCabal
instance NFData ParseCabal
type instance RuleResult ParseCabal = ()
cabalRules :: Recorder (WithPriority Log) -> Rules ()
cabalRules recorder = do
ofInterestRules recorder
define (cmapWithPrio LogShake recorder) $ \ParseCabal file -> do
(t, mCabalSource) <- use_ GetFileContents file
log' Debug $ LogModificationTime file t
contents <- case mCabalSource of
Just sources -> pure $ Encoding.encodeUtf8 sources
Nothing -> do
liftIO $ BS.readFile $ fromNormalizedFilePath file
(pWarnings, pm) <- liftIO $ Parse.parseCabalFileContents contents
let warningDiags = fmap (Diagnostics.warningDiagnostic file) pWarnings
case pm of
Left (_cabalVersion, pErrorNE) -> do
let errorDiags = NE.toList $ NE.map (Diagnostics.errorDiagnostic file) pErrorNE
allDiags = errorDiags <> warningDiags
pure (allDiags, Nothing)
Right _ -> do
pure (warningDiags, Just ())
action $ do
kick
where
log' = logWith recorder
kick :: Action ()
kick = do
files <- HashMap.keys <$> getCabalFilesOfInterestUntracked
void $ uses ParseCabal files
licenseSuggestCodeAction
:: IdeState
-> PluginId
-> CodeActionParams
-> LspM Config (Either ResponseError (ResponseResult 'TextDocumentCodeAction))
licenseSuggestCodeAction _ _ (CodeActionParams _ _ (TextDocumentIdentifier uri) _range CodeActionContext{_diagnostics=List diags}) =
pure $ Right $ List $ diags >>= (fmap InR . (LicenseSuggest.licenseErrorAction uri))
Cabal file of Interest rules and global variable
| Cabal files that are currently open in the lsp - client .
newtype OfInterestCabalVar = OfInterestCabalVar (Var (HashMap NormalizedFilePath FileOfInterestStatus))
instance Shake.IsIdeGlobal OfInterestCabalVar
data IsCabalFileOfInterest = IsCabalFileOfInterest
deriving (Eq, Show, Typeable, Generic)
instance Hashable IsCabalFileOfInterest
instance NFData IsCabalFileOfInterest
type instance RuleResult IsCabalFileOfInterest = CabalFileOfInterestResult
data CabalFileOfInterestResult = NotCabalFOI | IsCabalFOI FileOfInterestStatus
deriving (Eq, Show, Typeable, Generic)
instance Hashable CabalFileOfInterestResult
instance NFData CabalFileOfInterestResult
ofInterestRules :: Recorder (WithPriority Log) -> Rules ()
ofInterestRules recorder = do
Shake.addIdeGlobal . OfInterestCabalVar =<< liftIO (newVar HashMap.empty)
Shake.defineEarlyCutoff (cmapWithPrio LogShake recorder) $ RuleNoDiagnostics $ \IsCabalFileOfInterest f -> do
alwaysRerun
filesOfInterest <- getCabalFilesOfInterestUntracked
let foi = maybe NotCabalFOI IsCabalFOI $ f `HashMap.lookup` filesOfInterest
fp = summarize foi
res = (Just fp, Just foi)
return res
where
summarize NotCabalFOI = BS.singleton 0
summarize (IsCabalFOI OnDisk) = BS.singleton 1
summarize (IsCabalFOI (Modified False)) = BS.singleton 2
summarize (IsCabalFOI (Modified True)) = BS.singleton 3
getCabalFilesOfInterestUntracked :: Action (HashMap NormalizedFilePath FileOfInterestStatus)
getCabalFilesOfInterestUntracked = do
OfInterestCabalVar var <- Shake.getIdeGlobalAction
liftIO $ readVar var
addFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> FileOfInterestStatus -> IO ()
addFileOfInterest recorder state f v = do
OfInterestCabalVar var <- Shake.getIdeGlobalState state
(prev, files) <- modifyVar var $ \dict -> do
let (prev, new) = HashMap.alterF (, Just v) f dict
pure (new, (prev, new))
when (prev /= Just v) $ do
join $ atomically $ Shake.recordDirtyKeys (shakeExtras state) IsFileOfInterest [f]
log' Debug $ LogFOI files
where
log' = logWith recorder
deleteFileOfInterest :: Recorder (WithPriority Log) -> IdeState -> NormalizedFilePath -> IO ()
deleteFileOfInterest recorder state f = do
OfInterestCabalVar var <- Shake.getIdeGlobalState state
files <- modifyVar' var $ HashMap.delete f
join $ atomically $ Shake.recordDirtyKeys (shakeExtras state) IsFileOfInterest [f]
log' Debug $ LogFOI files
where
log' = logWith recorder
|
c8df57b92849174bec30ac6200e1049221e1ea33f2baa40d652dd1530ee3a19b | rtrusso/scp | read11.scm | (define s (call-with-input-file "tests/read-namedchar.scm"
(lambda (x) (read x))))
(display "read: ")
(newline)
(write s)
(newline)
(for-each (lambda (x)
(display " ")
(write x)
(display " -> #x")
(display (number->string (char->integer x) 16))
(newline))
s)
| null | https://raw.githubusercontent.com/rtrusso/scp/2051e76df14bd36aef81aba519ffafa62b260f5c/src/tests/read11.scm | scheme | (define s (call-with-input-file "tests/read-namedchar.scm"
(lambda (x) (read x))))
(display "read: ")
(newline)
(write s)
(newline)
(for-each (lambda (x)
(display " ")
(write x)
(display " -> #x")
(display (number->string (char->integer x) 16))
(newline))
s)
| |
c56ddb8e6c98bdbfaa41d2104455638f0b47c3c61c6297683d131ce09bcdccd2 | SparkFund/sails-forth | datomic.clj | (ns sails-forth.datomic
"Provides fns to assert the results of salesforce queries as datoms."
(:require [clj-time.coerce :as tc]
[clojure.set :as set]
[clojure.string :as s]
[sails-forth.client :as c]
[sails-forth.clojurify :as clj]
[sails-forth.query :as q]))
(def datomic-types
{"datetime" :db.type/instant
"date" :db.type/instant
"int" :db.type/long
"percent" :db.type/bigdec
"currency" :db.type/bigdec
"id" :db.type/string
"string" :db.type/string
"reference" :db.type/ref
"boolean" :db.type/boolean
"textarea" :db.type/string
"picklist" :db.type/string
"url" :db.type/uri
"multipicklist" :db.type/string
"phone" :db.type/string
"address" :db.type/ref
"email" :db.type/string
"encryptedstring" :db.type/string})
(defn field-ident
[ns-prefix field-name]
(keyword (str (name ns-prefix) ".field") (name field-name)))
(defn field-attr
[ns-prefix object-name field-name]
(keyword (str (name ns-prefix) ".object." (name object-name)) (name field-name)))
(defn compound-ident
[ns-prefix compound-name field-name]
(keyword (str (name ns-prefix) ".compoound." (name compound-name)) (name field-name)))
(defn metadata-schema
[ns-prefix]
;; More field metadata could come along for the ride
[{:db/ident (field-ident ns-prefix "name")
:db/doc "Salesforce field name"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (field-ident ns-prefix "type")
:db/doc "Salesforce field type"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (field-ident ns-prefix "formula")
:db/doc "Salesforce field formula"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (field-ident ns-prefix "helptext")
:db/doc "Salesforce field help text"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "street")
:db/doc "Salesforce address street"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "city")
:db/doc "Salesforce address city"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "state-code")
:db/doc "Salesforce address state"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "postal-code")
:db/doc "Salesforce address postal code"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "country-code")
:db/doc "Salesforce address country"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}])
(defn object-schema
[ns-prefix object-key fields]
(letfn [(field-datoms [[key field]]
(let [{:keys [name
label
type
calculatedFormula
inlineHelpText
unique]}
field
cardinality (if (= "multipicklist" type)
:db.cardinality/many
:db.cardinality/one)
recordtype? (= key :recordtype)
valuetype (cond
recordtype? :db.type/string
(= "double" type)
(case (clj/double-type field)
:long :db.type/long
:bigint :db.type/bigint
:bigdec :db.type/bigdec)
:else
(get datomic-types type))]
(cond-> {:db/ident (field-attr ns-prefix object-key key)
:db/doc label
:db/valueType valuetype
:db/cardinality cardinality
(field-ident ns-prefix "name") name
(field-ident ns-prefix "type") type}
funny that i d types do n't have : unique true
(or (= "id" type) unique)
(assoc :db/unique :db.unique/identity)
(= "address" type)
(assoc :db/isComponent true)
calculatedFormula
(assoc (field-ident ns-prefix "formula") calculatedFormula)
inlineHelpText
(assoc (field-ident ns-prefix "helptext") inlineHelpText))))]
(into [] (map field-datoms) fields)))
(defn build-schema
[client ns-prefix object-keys]
[(metadata-schema ns-prefix)
(into []
(mapcat (fn [object-key]
(object-schema ns-prefix object-key (c/get-fields client object-key))))
object-keys)])
(defn assert-object
[client ns-prefix object-key m]
(let [fields (c/get-fields client object-key)]
(reduce-kv (fn [txn field-key value]
(let [attr (field-attr ns-prefix object-key field-key)
field (get fields field-key)
{:keys [type referenceTo]} field
recordtype? (= field-key :recordtype)
[value ref-types]
(case type
"multipicklist"
[(s/split value #";")]
"date"
[(tc/to-date value)]
"reference"
(if-not recordtype?
(let [ref-key (clj/field->refers-attr field)
ref-object (assert-object client ns-prefix ref-key value)]
[(dissoc ref-object ::types)
(get ref-object ::types)])
[(get value :name)])
"address"
(let [{:keys [street city stateCode postalCode countryCode]} value
attr (partial compound-ident ns-prefix "address")]
[(cond-> {}
street (assoc (attr "street") street)
city (assoc (attr "city") city)
stateCode (assoc (attr "state-code") stateCode)
postalCode (assoc (attr "postal-code") postalCode)
countryCode (assoc (attr "country-code") countryCode))])
[value])]
(-> txn
(assoc attr value)
(cond-> (seq ref-types)
(update ::types into ref-types)))))
{::types #{object-key}}
m)))
(defn assert-query
"Returns a seq of transaction seqs that if transacted in order will assert
the results of the given query in a datomic database.
Given an ns-prefix of `\"sf\"` and a query of
`{:find [:customer :id :sectors [:contact :id :phone]]}`:
The first transaction asserts a set of attributes that will be defined on the
attributes that will model the salesforce fields where there is no direct
datomic analog, e.g. `sf.field/helptext`.
The second transaction asserts a set of attributes that model the
fields of objects used in the query,
e.g. `sf.object.customer/id`. Note this is a complete set of
attributes, not limited simply to those used in the query.
The last transaction asserts the entities returned by the query.
Most field values have natural datomic types. Notable exceptions include:
* picklist, multipicklist: stored as strings. The api does not provide any
access to inactive picklist items, which makes asserting e.g. enum values
problematic
* recordtype references: stored as strings for similar reasons
* address: stored as component references
There are some modest restrictions on the queries that can be asserted.
All join references must include an identity field, except for recordtype
joins which must only include the `:name` field."
[client ns-prefix query]
(let [objects (into []
(comp (map (comp first seq))
(map (partial apply assert-object client ns-prefix)))
(q/query client query))
object-keys (reduce set/union #{} (map ::types objects))]
(conj (build-schema client ns-prefix object-keys)
(into [] (map (fn [m] (dissoc m ::types)) objects)))))
| null | https://raw.githubusercontent.com/SparkFund/sails-forth/acc97f01d5d1bf21ee7139eee9360d363515d737/src/sails_forth/datomic.clj | clojure | More field metadata could come along for the ride | (ns sails-forth.datomic
"Provides fns to assert the results of salesforce queries as datoms."
(:require [clj-time.coerce :as tc]
[clojure.set :as set]
[clojure.string :as s]
[sails-forth.client :as c]
[sails-forth.clojurify :as clj]
[sails-forth.query :as q]))
(def datomic-types
{"datetime" :db.type/instant
"date" :db.type/instant
"int" :db.type/long
"percent" :db.type/bigdec
"currency" :db.type/bigdec
"id" :db.type/string
"string" :db.type/string
"reference" :db.type/ref
"boolean" :db.type/boolean
"textarea" :db.type/string
"picklist" :db.type/string
"url" :db.type/uri
"multipicklist" :db.type/string
"phone" :db.type/string
"address" :db.type/ref
"email" :db.type/string
"encryptedstring" :db.type/string})
(defn field-ident
[ns-prefix field-name]
(keyword (str (name ns-prefix) ".field") (name field-name)))
(defn field-attr
[ns-prefix object-name field-name]
(keyword (str (name ns-prefix) ".object." (name object-name)) (name field-name)))
(defn compound-ident
[ns-prefix compound-name field-name]
(keyword (str (name ns-prefix) ".compoound." (name compound-name)) (name field-name)))
(defn metadata-schema
[ns-prefix]
[{:db/ident (field-ident ns-prefix "name")
:db/doc "Salesforce field name"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (field-ident ns-prefix "type")
:db/doc "Salesforce field type"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (field-ident ns-prefix "formula")
:db/doc "Salesforce field formula"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (field-ident ns-prefix "helptext")
:db/doc "Salesforce field help text"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "street")
:db/doc "Salesforce address street"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "city")
:db/doc "Salesforce address city"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "state-code")
:db/doc "Salesforce address state"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "postal-code")
:db/doc "Salesforce address postal code"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}
{:db/ident (compound-ident ns-prefix "address" "country-code")
:db/doc "Salesforce address country"
:db/valueType :db.type/string
:db/cardinality :db.cardinality/one}])
(defn object-schema
[ns-prefix object-key fields]
(letfn [(field-datoms [[key field]]
(let [{:keys [name
label
type
calculatedFormula
inlineHelpText
unique]}
field
cardinality (if (= "multipicklist" type)
:db.cardinality/many
:db.cardinality/one)
recordtype? (= key :recordtype)
valuetype (cond
recordtype? :db.type/string
(= "double" type)
(case (clj/double-type field)
:long :db.type/long
:bigint :db.type/bigint
:bigdec :db.type/bigdec)
:else
(get datomic-types type))]
(cond-> {:db/ident (field-attr ns-prefix object-key key)
:db/doc label
:db/valueType valuetype
:db/cardinality cardinality
(field-ident ns-prefix "name") name
(field-ident ns-prefix "type") type}
funny that i d types do n't have : unique true
(or (= "id" type) unique)
(assoc :db/unique :db.unique/identity)
(= "address" type)
(assoc :db/isComponent true)
calculatedFormula
(assoc (field-ident ns-prefix "formula") calculatedFormula)
inlineHelpText
(assoc (field-ident ns-prefix "helptext") inlineHelpText))))]
(into [] (map field-datoms) fields)))
(defn build-schema
[client ns-prefix object-keys]
[(metadata-schema ns-prefix)
(into []
(mapcat (fn [object-key]
(object-schema ns-prefix object-key (c/get-fields client object-key))))
object-keys)])
(defn assert-object
[client ns-prefix object-key m]
(let [fields (c/get-fields client object-key)]
(reduce-kv (fn [txn field-key value]
(let [attr (field-attr ns-prefix object-key field-key)
field (get fields field-key)
{:keys [type referenceTo]} field
recordtype? (= field-key :recordtype)
[value ref-types]
(case type
"multipicklist"
[(s/split value #";")]
"date"
[(tc/to-date value)]
"reference"
(if-not recordtype?
(let [ref-key (clj/field->refers-attr field)
ref-object (assert-object client ns-prefix ref-key value)]
[(dissoc ref-object ::types)
(get ref-object ::types)])
[(get value :name)])
"address"
(let [{:keys [street city stateCode postalCode countryCode]} value
attr (partial compound-ident ns-prefix "address")]
[(cond-> {}
street (assoc (attr "street") street)
city (assoc (attr "city") city)
stateCode (assoc (attr "state-code") stateCode)
postalCode (assoc (attr "postal-code") postalCode)
countryCode (assoc (attr "country-code") countryCode))])
[value])]
(-> txn
(assoc attr value)
(cond-> (seq ref-types)
(update ::types into ref-types)))))
{::types #{object-key}}
m)))
(defn assert-query
"Returns a seq of transaction seqs that if transacted in order will assert
the results of the given query in a datomic database.
Given an ns-prefix of `\"sf\"` and a query of
`{:find [:customer :id :sectors [:contact :id :phone]]}`:
The first transaction asserts a set of attributes that will be defined on the
attributes that will model the salesforce fields where there is no direct
datomic analog, e.g. `sf.field/helptext`.
The second transaction asserts a set of attributes that model the
fields of objects used in the query,
e.g. `sf.object.customer/id`. Note this is a complete set of
attributes, not limited simply to those used in the query.
The last transaction asserts the entities returned by the query.
Most field values have natural datomic types. Notable exceptions include:
* picklist, multipicklist: stored as strings. The api does not provide any
access to inactive picklist items, which makes asserting e.g. enum values
problematic
* recordtype references: stored as strings for similar reasons
* address: stored as component references
There are some modest restrictions on the queries that can be asserted.
All join references must include an identity field, except for recordtype
joins which must only include the `:name` field."
[client ns-prefix query]
(let [objects (into []
(comp (map (comp first seq))
(map (partial apply assert-object client ns-prefix)))
(q/query client query))
object-keys (reduce set/union #{} (map ::types objects))]
(conj (build-schema client ns-prefix object-keys)
(into [] (map (fn [m] (dissoc m ::types)) objects)))))
|
e15650e96317279f387c5b59c809f02b1861d2af035522db59118f8f7108f0f3 | amosr/folderol | Zip.hs | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
module Folderol.Process.Zip where
import Folderol.Process.Map
import Folderol.Typed
import qualified Folderol.Typed.Process as Proc
import P
import qualified Folderol.Internal.Haskell as Haskell
zip :: Monad m => Channel a -> Channel b -> Network m (Channel (a, b))
zip as bs = Proc.proc "zip" $ do
i0 <- Proc.input as
i1 <- Proc.input bs
o0 <- Proc.output
l0 <- Proc.label0
l1 <- Proc.label1
l2 <- Proc.label2
l3 <- Proc.label0
l4 <- Proc.label0
l5 <- Proc.label0
Proc.instr0 l0 $
Proc.pull i0 l1 l5
Proc.instr1 l1 $ \x ->
Proc.pull i1 (l2 x) l5
Proc.instr2 l2 $ \x y ->
Proc.push o0 [||($$x,$$y)||] l3
Proc.instr0 l3 $
Proc.drop i0 l4
Proc.instr0 l4 $
Proc.drop i1 l0
Proc.instr0 l5 $
Proc.done
return (l0, o0)
zipWith :: Monad m => Haskell.TExpQ (a -> b -> c) -> Channel a -> Channel b -> Network m (Channel c)
zipWith f as bs =
zip as bs >>= map [|| \(a,b) -> $$f a b ||]
unzip :: Monad m => Channel (a,b) -> Network m (Channel a, Channel b)
unzip as = Proc.proc "unzip" $ do
i0 <- Proc.input as
o0 <- Proc.output
o1 <- Proc.output
l0 <- Proc.label0
l1 <- Proc.label1
l2 <- Proc.label1
l3 <- Proc.label0
l4 <- Proc.label0
Proc.instr0 l0 $
Proc.pull i0 l1 l4
Proc.instr1 l1 $ \x ->
Proc.push o0 [||fst $$x||] (l2 x)
Proc.instr1 l2 $ \x ->
Proc.push o1 [||snd $$x||] l3
Proc.instr0 l3 $
Proc.drop i0 l0
Proc.instr0 l4 $
Proc.done
return (l0, (o0, o1))
| null | https://raw.githubusercontent.com/amosr/folderol/9b8c0cd30cfb798dadaa404cc66404765b1fc4fe/src/Folderol/Process/Zip.hs | haskell | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
module Folderol.Process.Zip where
import Folderol.Process.Map
import Folderol.Typed
import qualified Folderol.Typed.Process as Proc
import P
import qualified Folderol.Internal.Haskell as Haskell
zip :: Monad m => Channel a -> Channel b -> Network m (Channel (a, b))
zip as bs = Proc.proc "zip" $ do
i0 <- Proc.input as
i1 <- Proc.input bs
o0 <- Proc.output
l0 <- Proc.label0
l1 <- Proc.label1
l2 <- Proc.label2
l3 <- Proc.label0
l4 <- Proc.label0
l5 <- Proc.label0
Proc.instr0 l0 $
Proc.pull i0 l1 l5
Proc.instr1 l1 $ \x ->
Proc.pull i1 (l2 x) l5
Proc.instr2 l2 $ \x y ->
Proc.push o0 [||($$x,$$y)||] l3
Proc.instr0 l3 $
Proc.drop i0 l4
Proc.instr0 l4 $
Proc.drop i1 l0
Proc.instr0 l5 $
Proc.done
return (l0, o0)
zipWith :: Monad m => Haskell.TExpQ (a -> b -> c) -> Channel a -> Channel b -> Network m (Channel c)
zipWith f as bs =
zip as bs >>= map [|| \(a,b) -> $$f a b ||]
unzip :: Monad m => Channel (a,b) -> Network m (Channel a, Channel b)
unzip as = Proc.proc "unzip" $ do
i0 <- Proc.input as
o0 <- Proc.output
o1 <- Proc.output
l0 <- Proc.label0
l1 <- Proc.label1
l2 <- Proc.label1
l3 <- Proc.label0
l4 <- Proc.label0
Proc.instr0 l0 $
Proc.pull i0 l1 l4
Proc.instr1 l1 $ \x ->
Proc.push o0 [||fst $$x||] (l2 x)
Proc.instr1 l2 $ \x ->
Proc.push o1 [||snd $$x||] l3
Proc.instr0 l3 $
Proc.drop i0 l0
Proc.instr0 l4 $
Proc.done
return (l0, (o0, o1))
| |
2bbabad8033110da5d39a28e7c732ca36e9e61fc51396ea9e0fe6164ba5a0e9b | plkrueger/CocoaInterface | ccl-additions-for-cocoa-tools.lisp | ;; ccl-additions-for-cocoa-tools
;; This file includes additional functions and methods that arguably should be located
;; elsewhere. Given my reluctance to mess with the standard files they are here instead.
should be in cocoa-editor.lisp
;; This is needed because the IDE search function tries to discover what the default
;; directory should be by looking at where open hemlock windows come from. Unfortunately
;; it tries to call this on all open windows with documents, not just hemlock windows,
;; so we need this hack to avoid a runtime error from in inapplicable method.
This should be defined in cocoa-editor.lisp with other versions of this method .
(defmethod gui::document-pathname ((doc ns:ns-document))
nil)
should be in l1-clos-boot.lisp or maybe in objc-clos.lisp
;; This is just an extension of the methods that already exist to objc-class-object
(defmethod ccl::class-prototype ((class objc::objc-class-object))
(or (ccl::%class.prototype class)
(setf (ccl::%class.prototype class) (allocate-instance class)))) | null | https://raw.githubusercontent.com/plkrueger/CocoaInterface/169490ba6649b5a6e8973bfa3d3ee99c824ddcbd/Cocoa%20Dev/ccl-additions-for-cocoa-tools.lisp | lisp | ccl-additions-for-cocoa-tools
This file includes additional functions and methods that arguably should be located
elsewhere. Given my reluctance to mess with the standard files they are here instead.
This is needed because the IDE search function tries to discover what the default
directory should be by looking at where open hemlock windows come from. Unfortunately
it tries to call this on all open windows with documents, not just hemlock windows,
so we need this hack to avoid a runtime error from in inapplicable method.
This is just an extension of the methods that already exist to objc-class-object |
should be in cocoa-editor.lisp
This should be defined in cocoa-editor.lisp with other versions of this method .
(defmethod gui::document-pathname ((doc ns:ns-document))
nil)
should be in l1-clos-boot.lisp or maybe in objc-clos.lisp
(defmethod ccl::class-prototype ((class objc::objc-class-object))
(or (ccl::%class.prototype class)
(setf (ccl::%class.prototype class) (allocate-instance class)))) |
133e7a17268d82f2cfe7b7064b1bee26d15da2237711d600f369d76fd05db24b | korya/efuns | wX_filesel.mli | (***********************************************************************)
(* *)
(* ____ *)
(* *)
Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
(* *)
Copyright 1999 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
(* *)
(***********************************************************************)
type info =
{ filter: string;
current_selection: string;
predicat: info -> bool;
mutable action: string -> unit;
mutable cancel: unit -> unit }
class t :
WX_root.t ->
info ->
WX_types.base_attributes list ->
object
inherit WX_deleg.wx_object
inherit WX_deleg.wmtop
end
| null | https://raw.githubusercontent.com/korya/efuns/78b21d9dff45b7eec764c63132c7a564f5367c30/toolkit/wX_filesel.mli | ocaml | *********************************************************************
____
********************************************************************* | Fabrice Le Fessant , projet Para / SOR , INRIA Rocquencourt
Copyright 1999 Institut National de Recherche en Informatique et
Automatique . Distributed only by permission .
type info =
{ filter: string;
current_selection: string;
predicat: info -> bool;
mutable action: string -> unit;
mutable cancel: unit -> unit }
class t :
WX_root.t ->
info ->
WX_types.base_attributes list ->
object
inherit WX_deleg.wx_object
inherit WX_deleg.wmtop
end
|
6eb3576a93e5f0cf603e0bf9aabcdb34facd3016a40c85bc07d2dfa1bd4075fe | Mercerenies/eulers-melting-pot | bwgen.lisp |
;;; Beeswax code generator
(defgeneric to-code (code))
beeswax - code is a wrapper around a 2 - dimensional array that
;; provides automatic adjustment to the size when needed.
(defclass beeswax-code ()
((raw-code :accessor raw-code
:initform (make-array '(0 0)
:element-type 'standard-char
:initial-element #\SPACE
:adjustable t))))
Note : If we use : element - type ' standard - char here , says
something silly like " do n't allocate 2 GB arrays " and then crashes .
;; So we use boxed arrays to avoid those nonsense warnings :P
(defun make-code ()
(make-instance 'beeswax-code))
(defun get-char (code y x)
(with-accessors ((raw-code raw-code)) code
(if (array-in-bounds-p raw-code y x)
(aref raw-code y x)
#\SPACE)))
(defun (setf get-char) (new-value code y x)
(with-accessors ((raw-code raw-code)) code
(unless (array-in-bounds-p raw-code y x)
(destructuring-bind (h w) (array-dimensions raw-code)
(let ((h (max h (1+ y)))
(w (max w (1+ x))))
(adjust-array raw-code (list h w) :initial-element #\space))))
(setf (aref raw-code y x) new-value)))
(defun dims (code)
(array-dimensions (raw-code code)))
(defmethod to-code ((code beeswax-code))
(destructuring-bind (h w) (dims code)
(format t "~{~{~A~}~%~}"
(loop for y from 0 below h
collect (loop for x from 0 below w
collect (get-char code y x))))))
;; beeswax-program is a higher-level abstraction that's used to
;; generate the actual code.
(defconstant +local-var-row0+ 0)
(defconstant +local-var-row1+ 1)
(defconstant +gray-row0+ 2)
(defconstant +gray-row1+ 3)
(defconstant +red-row0+ 4)
(defconstant +red-row1+ 5)
(defconstant +code-row+ 6)
(defclass beeswax-program ()
((code :accessor code
:initform (make-code))
(pointer :accessor pointer
:initform 0)))
(defun make-program ()
(make-instance 'beeswax-program))
(defparameter *program* (make-program))
Returns the new program pointer , which points to ( 0 - indexed ) one
after the end or ( 1 - indexed ) the end of the string just added .
(defun write-code (string &key (program *program*))
(with-accessors ((code code) (pointer pointer)) program
(loop for ch across string
do (progn (setf (get-char code +code-row+ pointer) ch)
(incf pointer)))
pointer))
Meant to be used for low numbers ( Maybe up to 15 or so )
(defun produce-number (n)
(etypecase n
(symbol (produce-number (ecase n
(local0 (1+ +local-var-row0+))
(local1 (1+ +local-var-row1+))
(gray0 (1+ +gray-row0+))
(gray1 (1+ +gray-row1+))
(red0 (1+ +red-row0+))
(red1 (1+ +red-row1+))
(code (1+ +code-row+)))))
(number (if (< n 10)
(format nil "~A" n)
(format nil "~AP" (produce-number (1- n)))))))
(defmethod to-code ((program beeswax-program))
(to-code (code program)))
;; Actual commands
(defun raw (s)
(write-code s))
(defun 2^31 ()
(raw (format nil "~A~~~AB" (produce-number 31) (produce-number 2))))
Unpacks lstack[1 ] into lstack[1,2 ] ( mod in 2 , div in 1 )
(raw "f@")
(2^31)
(raw "~@%")
(set-value 'local0 4)
(raw "g?@")
(2^31)
(raw "~@:f")
(get-value 'local0 4)
(raw "~g?"))
(defun pack () ; Packs lstack[1,2] into lstack[1]
(raw "~f~f")
(2^31)
(raw "~g?.~g?+"))
(defun get-value (y x)
(let ((y (produce-number y))
(x (produce-number x)))
(write-code (format nil "~A~~~A@G" y x))))
Sets to top of
(let ((y (produce-number y))
(x (produce-number x)))
(write-code (format nil "~~~A~~@~A@D" y x))))
Assumes x is in top of
(let ((y (produce-number y)))
(write-code (format nil "@~A~~G" y))))
Assumes value is in top of , x is third on
(let ((y (produce-number y)))
(write-code (format nil "~~~A~~D" y))))
(defun get-at-index-1 (y)
(let ((y0 (ecase y (gray 'gray0) (red 'red0)))
(y1 (ecase y (gray 'gray1) (red 'red1))))
(raw "f")
(get-at-index y0) ; Modulo value
(set-value 'local0 4)
(raw "g?")
Div value
(raw "f")
(get-value 'local0 4)
(raw "~g?")
(pack)))
(defun set-at-index-1 (y)
(let ((y0 (ecase y (gray 'gray0) (red 'red0)))
(y1 (ecase y (gray 'gray1) (red 'red1))))
(raw "@f@")
(unpack)
(raw "f~")
(set-value 'local0 5) ; Modulo value
(raw "g?")
(raw "@g@")
(set-at-index y1)
(get-value 'local0 5)
(raw "@g@")
(set-at-index y0)
(raw "?")))
(defun get-local-packed (x)
(get-value 'local0 x) ; Modulo value
(raw "f")
Div value
(raw "~g?~")
(pack))
(defun set-local-packed (x)
(unpack)
(raw "f~")
(set-value 'local0 5) ; Modulo value
(raw "g?")
(set-value 'local1 x)
(get-value 'local0 5)
(set-value 'local0 x))
(defun nop () ; Need these wherever we're going to land from a goto.
(write-code ">"))
(defun ll ()
(write-code "p"))
(defun lr ()
(write-code "q"))
Specify two points and we 'll create a reflect
(if (< b a) ; position between them.
(reflect b a)
1 - indexed argument to 0 - indexed array
(y (+ +code-row+ (- b a))))
(setf (get-char (code *program*) y x) #\u))))
(defun condition-form (form)
(ecase form
(a>0 (raw "\""))
(a<=0 (raw "\"Q"))
(a=0 (raw "'"))
(a!=0 (raw "'Q"))
(a=b (raw "K"))
(a!=b (raw "KQ"))
(a>b (raw "L"))
(a<=b (raw "LQ"))))
(defmacro if-stmt (form &body body)
(let ((jump-point-a (gensym))
(jump-point-b (gensym)))
`(progn (condition-form ',form)
(let ((,jump-point-a (lr)))
,@body
(let ((,jump-point-b (nop)))
(reflect ,jump-point-a ,jump-point-b))))))
(defmacro do-while-stmt (form &body body)
(let ((jump-point-a (gensym))
(jump-point-b (gensym)))
`(let ((,jump-point-a (nop)))
,@body
(condition-form ',form)
(raw "Q")
(let ((,jump-point-b (ll)))
(reflect ,jump-point-a ,jump-point-b)))))
(defmacro while-stmt (form &body body)
`(if-stmt ,form
(do-while-stmt ,form
,@body)))
Locals : i , j , / r , packing1 ,
Program start
(raw "1") (set-value 'gray0 1)
(raw "0") (set-value 'gray1 1)
(raw "1") (set-value 'red0 1)
(raw "0") (set-value 'red1 1)
(raw "2") (set-value 'local0 1)
(raw "2") (set-value 'local0 1)
(do-while-stmt a<=b
;; g calculation
(raw "1") (set-value 'local0 2)
(raw "1") (set-local-packed 3)
(get-value 'local0 2) (raw "PPf") (get-value 'local0 1) (raw "~g?~") ; j + 2 < i
(while-stmt a>b
(get-local-packed 3)
(raw "f")
(get-value 'local0 2)
(get-at-index-1 'red)
(raw "~g?+")
(set-local-packed 3)
j + = 1
(get-value 'local0 2) (raw "PPf") (get-value 'local0 1) (raw "~g?~")) ; j + 2 < i
(get-local-packed 3)
(raw "f")
(get-value 'local0 1)
(raw "@g?")
(set-at-index-1 'gray)
;; r calculation
(raw "1") (set-value 'local0 2)
(raw "1") (set-local-packed 3)
(get-value 'local0 2) (raw "f") (get-value 'local0 1) (raw "~g?~") ; j < i
(while-stmt a>b
(get-local-packed 3)
(raw "f")
(get-value 'local0 2)
(get-at-index-1 'gray)
(raw "~g?+")
(set-local-packed 3)
j + = 1
(get-value 'local0 2) (raw "f") (get-value 'local0 1) (raw "~g?~")) ; j < i
(get-local-packed 3)
(raw "f")
(get-value 'local0 1)
(raw "@g?")
(set-at-index-1 'red)
i + = 1
i < = 51
51
(get-at-index-1 'gray)
(raw "{")
(to-code *program*)
| null | https://raw.githubusercontent.com/Mercerenies/eulers-melting-pot/68bcb9b1d46278c0bab4ab6200bcba09be8e4930/etc/bwgen.lisp | lisp | Beeswax code generator
provides automatic adjustment to the size when needed.
So we use boxed arrays to avoid those nonsense warnings :P
beeswax-program is a higher-level abstraction that's used to
generate the actual code.
Actual commands
Packs lstack[1,2] into lstack[1]
Modulo value
Modulo value
Modulo value
Modulo value
Need these wherever we're going to land from a goto.
position between them.
g calculation
j + 2 < i
j + 2 < i
r calculation
j < i
j < i |
(defgeneric to-code (code))
beeswax - code is a wrapper around a 2 - dimensional array that
(defclass beeswax-code ()
((raw-code :accessor raw-code
:initform (make-array '(0 0)
:element-type 'standard-char
:initial-element #\SPACE
:adjustable t))))
Note : If we use : element - type ' standard - char here , says
something silly like " do n't allocate 2 GB arrays " and then crashes .
(defun make-code ()
(make-instance 'beeswax-code))
(defun get-char (code y x)
(with-accessors ((raw-code raw-code)) code
(if (array-in-bounds-p raw-code y x)
(aref raw-code y x)
#\SPACE)))
(defun (setf get-char) (new-value code y x)
(with-accessors ((raw-code raw-code)) code
(unless (array-in-bounds-p raw-code y x)
(destructuring-bind (h w) (array-dimensions raw-code)
(let ((h (max h (1+ y)))
(w (max w (1+ x))))
(adjust-array raw-code (list h w) :initial-element #\space))))
(setf (aref raw-code y x) new-value)))
(defun dims (code)
(array-dimensions (raw-code code)))
(defmethod to-code ((code beeswax-code))
(destructuring-bind (h w) (dims code)
(format t "~{~{~A~}~%~}"
(loop for y from 0 below h
collect (loop for x from 0 below w
collect (get-char code y x))))))
(defconstant +local-var-row0+ 0)
(defconstant +local-var-row1+ 1)
(defconstant +gray-row0+ 2)
(defconstant +gray-row1+ 3)
(defconstant +red-row0+ 4)
(defconstant +red-row1+ 5)
(defconstant +code-row+ 6)
(defclass beeswax-program ()
((code :accessor code
:initform (make-code))
(pointer :accessor pointer
:initform 0)))
(defun make-program ()
(make-instance 'beeswax-program))
(defparameter *program* (make-program))
Returns the new program pointer , which points to ( 0 - indexed ) one
after the end or ( 1 - indexed ) the end of the string just added .
(defun write-code (string &key (program *program*))
(with-accessors ((code code) (pointer pointer)) program
(loop for ch across string
do (progn (setf (get-char code +code-row+ pointer) ch)
(incf pointer)))
pointer))
Meant to be used for low numbers ( Maybe up to 15 or so )
(defun produce-number (n)
(etypecase n
(symbol (produce-number (ecase n
(local0 (1+ +local-var-row0+))
(local1 (1+ +local-var-row1+))
(gray0 (1+ +gray-row0+))
(gray1 (1+ +gray-row1+))
(red0 (1+ +red-row0+))
(red1 (1+ +red-row1+))
(code (1+ +code-row+)))))
(number (if (< n 10)
(format nil "~A" n)
(format nil "~AP" (produce-number (1- n)))))))
(defmethod to-code ((program beeswax-program))
(to-code (code program)))
(defun raw (s)
(write-code s))
(defun 2^31 ()
(raw (format nil "~A~~~AB" (produce-number 31) (produce-number 2))))
Unpacks lstack[1 ] into lstack[1,2 ] ( mod in 2 , div in 1 )
(raw "f@")
(2^31)
(raw "~@%")
(set-value 'local0 4)
(raw "g?@")
(2^31)
(raw "~@:f")
(get-value 'local0 4)
(raw "~g?"))
(raw "~f~f")
(2^31)
(raw "~g?.~g?+"))
(defun get-value (y x)
(let ((y (produce-number y))
(x (produce-number x)))
(write-code (format nil "~A~~~A@G" y x))))
Sets to top of
(let ((y (produce-number y))
(x (produce-number x)))
(write-code (format nil "~~~A~~@~A@D" y x))))
Assumes x is in top of
(let ((y (produce-number y)))
(write-code (format nil "@~A~~G" y))))
Assumes value is in top of , x is third on
(let ((y (produce-number y)))
(write-code (format nil "~~~A~~D" y))))
(defun get-at-index-1 (y)
(let ((y0 (ecase y (gray 'gray0) (red 'red0)))
(y1 (ecase y (gray 'gray1) (red 'red1))))
(raw "f")
(set-value 'local0 4)
(raw "g?")
Div value
(raw "f")
(get-value 'local0 4)
(raw "~g?")
(pack)))
(defun set-at-index-1 (y)
(let ((y0 (ecase y (gray 'gray0) (red 'red0)))
(y1 (ecase y (gray 'gray1) (red 'red1))))
(raw "@f@")
(unpack)
(raw "f~")
(raw "g?")
(raw "@g@")
(set-at-index y1)
(get-value 'local0 5)
(raw "@g@")
(set-at-index y0)
(raw "?")))
(defun get-local-packed (x)
(raw "f")
Div value
(raw "~g?~")
(pack))
(defun set-local-packed (x)
(unpack)
(raw "f~")
(raw "g?")
(set-value 'local1 x)
(get-value 'local0 5)
(set-value 'local0 x))
(write-code ">"))
(defun ll ()
(write-code "p"))
(defun lr ()
(write-code "q"))
Specify two points and we 'll create a reflect
(reflect b a)
1 - indexed argument to 0 - indexed array
(y (+ +code-row+ (- b a))))
(setf (get-char (code *program*) y x) #\u))))
(defun condition-form (form)
(ecase form
(a>0 (raw "\""))
(a<=0 (raw "\"Q"))
(a=0 (raw "'"))
(a!=0 (raw "'Q"))
(a=b (raw "K"))
(a!=b (raw "KQ"))
(a>b (raw "L"))
(a<=b (raw "LQ"))))
(defmacro if-stmt (form &body body)
(let ((jump-point-a (gensym))
(jump-point-b (gensym)))
`(progn (condition-form ',form)
(let ((,jump-point-a (lr)))
,@body
(let ((,jump-point-b (nop)))
(reflect ,jump-point-a ,jump-point-b))))))
(defmacro do-while-stmt (form &body body)
(let ((jump-point-a (gensym))
(jump-point-b (gensym)))
`(let ((,jump-point-a (nop)))
,@body
(condition-form ',form)
(raw "Q")
(let ((,jump-point-b (ll)))
(reflect ,jump-point-a ,jump-point-b)))))
(defmacro while-stmt (form &body body)
`(if-stmt ,form
(do-while-stmt ,form
,@body)))
Locals : i , j , / r , packing1 ,
Program start
(raw "1") (set-value 'gray0 1)
(raw "0") (set-value 'gray1 1)
(raw "1") (set-value 'red0 1)
(raw "0") (set-value 'red1 1)
(raw "2") (set-value 'local0 1)
(raw "2") (set-value 'local0 1)
(do-while-stmt a<=b
(raw "1") (set-value 'local0 2)
(raw "1") (set-local-packed 3)
(while-stmt a>b
(get-local-packed 3)
(raw "f")
(get-value 'local0 2)
(get-at-index-1 'red)
(raw "~g?+")
(set-local-packed 3)
j + = 1
(get-local-packed 3)
(raw "f")
(get-value 'local0 1)
(raw "@g?")
(set-at-index-1 'gray)
(raw "1") (set-value 'local0 2)
(raw "1") (set-local-packed 3)
(while-stmt a>b
(get-local-packed 3)
(raw "f")
(get-value 'local0 2)
(get-at-index-1 'gray)
(raw "~g?+")
(set-local-packed 3)
j + = 1
(get-local-packed 3)
(raw "f")
(get-value 'local0 1)
(raw "@g?")
(set-at-index-1 'red)
i + = 1
i < = 51
51
(get-at-index-1 'gray)
(raw "{")
(to-code *program*)
|
5bacf8d210badde01c0419f0b6e4cd4f091c83fcce55c1372841307f44cbad3c | hakaru-dev/hakaru | Types.hs | # LANGUAGE CPP
, GADTs
, KindSignatures
, , PolyKinds
, TypeOperators
, Rank2Types
, BangPatterns
, FlexibleContexts
, MultiParamTypeClasses
, FunctionalDependencies
, FlexibleInstances
, UndecidableInstances
, EmptyCase
, ScopedTypeVariables
#
, GADTs
, KindSignatures
, DataKinds
, PolyKinds
, TypeOperators
, Rank2Types
, BangPatterns
, FlexibleContexts
, MultiParamTypeClasses
, FunctionalDependencies
, FlexibleInstances
, UndecidableInstances
, EmptyCase
, ScopedTypeVariables
#-}
{-# OPTIONS_GHC -Wall -fwarn-tabs #-}
----------------------------------------------------------------
-- 2016.04.28
-- |
-- Module : Language.Hakaru.Evaluation.Types
Copyright : Copyright ( c ) 2016 the Hakaru team
-- License : BSD3
-- Maintainer :
-- Stability : experimental
Portability : GHC - only
--
The data types for " Language . Hakaru . Evaluation . Lazy "
--
-- BUG: completely gave up on structure sharing. Need to add that back in.
--
TODO : once we figure out the exact API\/type of ' evaluate ' and
-- can separate it from Disintegrate.hs vs its other clients (i.e.,
-- Sample.hs and Expect.hs), this file will prolly be broken up
-- into Lazy.hs itself vs Disintegrate.hs
----------------------------------------------------------------
module Language.Hakaru.Evaluation.Types
(
-- * Terms in particular known forms\/formats
Head(..), fromHead, toHead, viewHeadDatum
, Whnf(..), fromWhnf, toWhnf, caseWhnf, viewWhnfDatum
, Lazy(..), fromLazy, caseLazy
, getLazyVariable, isLazyVariable
, getLazyLiteral, isLazyLiteral
-- * Lazy partial evaluation
, TermEvaluator
, MeasureEvaluator
, CaseEvaluator
, VariableEvaluator
-- * The monad for partial evaluation
, Purity(..), Statement(..), statementVars, isBoundBy
, Index, indVar, indSize, fromIndex
, Location(..), locEq, locHint, locType, locations1
, fromLocation, fromLocations1, freshenLoc, freshenLocs
, LAssoc, LAssocs , emptyLAssocs, singletonLAssocs
, toLAssocs1, insertLAssocs, lookupLAssoc
#ifdef __TRACE_DISINTEGRATE__
, ppList
, ppInds
, ppStatement
, pretty_Statements
, pretty_Statements_withTerm
, prettyAssocs
#endif
, EvaluationMonad(..)
, defaultCaseEvaluator
, toVarStatements
, extSubst
, extSubsts
, freshVar
, freshenVar
, Hint(..), freshVars
, freshenVars
, freshInd
TODO : should we expose these ?
, freshLocStatement
, push _
, freshLocStatement
, push_
-}
, push
, pushes
) where
import Prelude hiding (id, (.))
import Control.Category (Category(..))
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid (Monoid(..))
import Data.Functor ((<$>))
import Control.Applicative (Applicative(..))
import Data.Traversable
#endif
import Control.Arrow ((***))
import qualified Data.Foldable as F
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.Text as T
import Data.Text (Text)
import Data.Proxy (KProxy(..))
import Language.Hakaru.Syntax.IClasses
import Data.Number.Nat
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Types.Sing (Sing(..))
import Language.Hakaru.Types.Coercion
import Language.Hakaru.Syntax.AST
import Language.Hakaru.Syntax.Datum
import Language.Hakaru.Syntax.DatumCase (DatumEvaluator,
MatchResult(..),
matchBranches)
import Language.Hakaru.Syntax.AST.Eq (alphaEq)
import Language . Hakaru . Syntax .
import Language.Hakaru.Syntax.ABT
import qualified Language.Hakaru.Syntax.Prelude as P
#ifdef __TRACE_DISINTEGRATE__
import qualified Text.PrettyPrint as PP
import Language.Hakaru.Pretty.Haskell
import Debug.Trace (trace)
#endif
----------------------------------------------------------------
----------------------------------------------------------------
-- N.B., when putting things into the context, be sure to freshen
-- the variables as if we were allocating a new location on the
-- heap.
--
-- For simplicity we don't actually distinguish between "variables"
-- and "locations". In the old finally-tagless code we had an @s@
-- parameter like the 'ST' monad does in order to keep track of
-- which heap things belong to. But since we might have nested
-- disintegration, and thus nested heaps, doing that means we'd
have to do some sort of numbering in the @s@ parameter
-- in order to keep track of the nested regions; and that's just
-- too much work to bother with.
TODO : for forward disintegration ( which is not just partial evaluation ) we really do mean proper HNFs not just WHNFs . This falls out from our needing to guarantee that heap - bound variables ca n't possibly escape ; whence the assumption that the result of forward disintegration contains no heap - bound variables .
--
-- TODO: is there a way to integrate this into the actual 'Term'
-- definition in order to reduce repetition?
--
HACK : ca n't use \"H\ " as the prefix because that clashes with
the Hakaru datakind
--
| A \"weak - head\ " for the sake of ' ' . , this does n't
-- exactly correlate with the usual notion of \"weak-head\"; in
-- particular we keep track of type annotations and coercions, and
-- don't reduce integration\/summation. So really we should use
some other name for ' ' ...
data Head :: ([Hakaru] -> Hakaru -> *) -> Hakaru -> * where
-- Simple heads (aka, the usual stuff)
WLiteral :: !(Literal a) -> Head abt a
BUG : even though the ' Datum ' type has a single constructor , we get a warning about not being able to UNPACK it in ' WDatum ' ... wtf ?
WDatum :: !(Datum (abt '[]) (HData' t)) -> Head abt (HData' t)
WEmpty :: !(Sing ('HArray a)) -> Head abt ('HArray a)
WArray :: !(abt '[] 'HNat) -> !(abt '[ 'HNat] a) -> Head abt ('HArray a)
WArrayLiteral
:: [abt '[] a] -> Head abt ('HArray a)
WLam :: !(abt '[ a ] b) -> Head abt (a ':-> b)
Measure heads ( , not simply @abt ' [ ] ( ' HMeasure _ ) @ )
WMeasureOp
:: (typs ~ UnLCs args, args ~ LCs typs)
=> !(MeasureOp typs a)
-> !(SArgs abt args)
-> Head abt ('HMeasure a)
WDirac :: !(abt '[] a) -> Head abt ('HMeasure a)
WMBind
:: !(abt '[] ('HMeasure a))
-> !(abt '[ a ] ('HMeasure b))
-> Head abt ('HMeasure b)
WPlate
:: !(abt '[] 'HNat)
-> !(abt '[ 'HNat ] ('HMeasure a))
-> Head abt ('HMeasure ('HArray a))
WChain
:: !(abt '[] 'HNat)
-> !(abt '[] s)
-> !(abt '[ s ] ('HMeasure (HPair a s)))
-> Head abt ('HMeasure (HPair ('HArray a) s))
WSuperpose
:: !(NonEmpty (abt '[] 'HProb, abt '[] ('HMeasure a)))
-> Head abt ('HMeasure a)
WReject
:: !(Sing ('HMeasure a)) -> Head abt ('HMeasure a)
Type coercion stuff . These are transparent re head - ness ; that is , they behave more like HNF than WHNF .
TODO : we prolly do n't actually want\/need the coercion variants ... we 'd lose some proven - guarantees about cancellation , but everything should work just fine . The one issue that remains is if we have coercion of ' WIntegrate ' or ' WSummate ' , since without the ' WCoerceTo'\/'WUnsafeFrom ' constructors we 'd be forced to call the coercion of an integration \"neutral\"--- even though it 's not actually a neutral term !
WCoerceTo :: !(Coercion a b) -> !(Head abt a) -> Head abt b
WUnsafeFrom :: !(Coercion a b) -> !(Head abt b) -> Head abt a
-- Other funky stuff
WIntegrate
:: !(abt '[] 'HReal)
-> !(abt '[] 'HReal)
-> !(abt '[ 'HReal ] 'HProb)
-> Head abt 'HProb
WSummate
-- :: !(abt '[] 'HReal)
-- -> !(abt '[] 'HReal)
- > ! ( abt ' [ ' HInt ] ' HProb )
- > Head abt ' HProb
-- Quasi-/semi-/demi-/pseudo- normal form stuff
NaryOp _ : : ! ( NaryOp a ) - > ! ( Seq ( abt ' [ ] a ) ) - > Term abt a
PrimOp _
: : ( typs ~ UnLCs args , args ~ )
= > ! ( PrimOp typs a ) - > SCon args a
-- N.B. , not ' ArrayOp _ '
NaryOp_ :: !(NaryOp a) -> !(Seq (abt '[] a)) -> Term abt a
PrimOp_
:: (typs ~ UnLCs args, args ~ LCs typs)
=> !(PrimOp typs a) -> SCon args a
-- N.B., not 'ArrayOp_'
-}
-- | Forget that something is a head.
fromHead :: (ABT Term abt) => Head abt a -> abt '[] a
fromHead (WLiteral v) = syn (Literal_ v)
fromHead (WDatum d) = syn (Datum_ d)
fromHead (WEmpty typ) = syn (Empty_ typ)
fromHead (WArray e1 e2) = syn (Array_ e1 e2)
fromHead (WArrayLiteral es) = syn (ArrayLiteral_ es)
fromHead (WLam e1) = syn (Lam_ :$ e1 :* End)
fromHead (WMeasureOp o es) = syn (MeasureOp_ o :$ es)
fromHead (WDirac e1) = syn (Dirac :$ e1 :* End)
fromHead (WMBind e1 e2) = syn (MBind :$ e1 :* e2 :* End)
fromHead (WPlate e1 e2) = syn (Plate :$ e1 :* e2 :* End)
fromHead (WChain e1 e2 e3) = syn (Chain :$ e1 :* e2 :* e3 :* End)
fromHead (WSuperpose pes) = syn (Superpose_ pes)
fromHead (WReject typ) = syn (Reject_ typ)
fromHead (WCoerceTo c e1) = syn (CoerceTo_ c :$ fromHead e1 :* End)
fromHead (WUnsafeFrom c e1) = syn (UnsafeFrom_ c :$ fromHead e1 :* End)
fromHead (WIntegrate e1 e2 e3) = syn (Integrate :$ e1 :* e2 :* e3 :* End)
fromHead ( WSummate e1 e2 e3 ) = syn ( Summate : $ e1 :* e2 :* e3 :* End )
-- | Identify terms which are already heads.
toHead :: (ABT Term abt) => abt '[] a -> Maybe (Head abt a)
toHead e =
caseVarSyn e (const Nothing) $ \t ->
case t of
Literal_ v -> Just $ WLiteral v
Datum_ d -> Just $ WDatum d
Empty_ typ -> Just $ WEmpty typ
Array_ e1 e2 -> Just $ WArray e1 e2
ArrayLiteral_ es -> Just $ WArrayLiteral es
Lam_ :$ e1 :* End -> Just $ WLam e1
MeasureOp_ o :$ es -> Just $ WMeasureOp o es
Dirac :$ e1 :* End -> Just $ WDirac e1
MBind :$ e1 :* e2 :* End -> Just $ WMBind e1 e2
Plate :$ e1 :* e2 :* End -> Just $ WPlate e1 e2
Chain :$ e1 :* e2 :* e3 :* End -> Just $ WChain e1 e2 e3
Superpose_ pes -> Just $ WSuperpose pes
CoerceTo_ c :$ e1 :* End -> WCoerceTo c <$> toHead e1
UnsafeFrom_ c :$ e1 :* End -> WUnsafeFrom c <$> toHead e1
Integrate :$ e1 :* e2 :* e3 :* End -> Just $ WIntegrate e1 e2 e3
Summate : $ e1 :* e2 :* e3 :* End - > Just $ WSummate e1 e2 e3
_ -> Nothing
instance Functor21 Head where
fmap21 _ (WLiteral v) = WLiteral v
fmap21 f (WDatum d) = WDatum (fmap11 f d)
fmap21 _ (WEmpty typ) = WEmpty typ
fmap21 f (WArray e1 e2) = WArray (f e1) (f e2)
fmap21 f (WArrayLiteral es) = WArrayLiteral (fmap f es)
fmap21 f (WLam e1) = WLam (f e1)
fmap21 f (WMeasureOp o es) = WMeasureOp o (fmap21 f es)
fmap21 f (WDirac e1) = WDirac (f e1)
fmap21 f (WMBind e1 e2) = WMBind (f e1) (f e2)
fmap21 f (WPlate e1 e2) = WPlate (f e1) (f e2)
fmap21 f (WChain e1 e2 e3) = WChain (f e1) (f e2) (f e3)
fmap21 f (WSuperpose pes) = WSuperpose (fmap (f *** f) pes)
fmap21 _ (WReject typ) = WReject typ
fmap21 f (WCoerceTo c e1) = WCoerceTo c (fmap21 f e1)
fmap21 f (WUnsafeFrom c e1) = WUnsafeFrom c (fmap21 f e1)
fmap21 f (WIntegrate e1 e2 e3) = WIntegrate (f e1) (f e2) (f e3)
f ( WSummate e1 e2 e3 ) = WSummate ( f e1 ) ( f e2 ) ( f e3 )
instance Foldable21 Head where
foldMap21 _ (WLiteral _) = mempty
foldMap21 f (WDatum d) = foldMap11 f d
foldMap21 _ (WEmpty _) = mempty
foldMap21 f (WArray e1 e2) = f e1 `mappend` f e2
foldMap21 f (WArrayLiteral es) = F.foldMap f es
foldMap21 f (WLam e1) = f e1
foldMap21 f (WMeasureOp _ es) = foldMap21 f es
foldMap21 f (WDirac e1) = f e1
foldMap21 f (WMBind e1 e2) = f e1 `mappend` f e2
foldMap21 f (WPlate e1 e2) = f e1 `mappend` f e2
foldMap21 f (WChain e1 e2 e3) = f e1 `mappend` f e2 `mappend` f e3
foldMap21 f (WSuperpose pes) = foldMapPairs f pes
foldMap21 _ (WReject _) = mempty
foldMap21 f (WCoerceTo _ e1) = foldMap21 f e1
foldMap21 f (WUnsafeFrom _ e1) = foldMap21 f e1
foldMap21 f (WIntegrate e1 e2 e3) = f e1 `mappend` f e2 `mappend` f e3
f ( WSummate e1 e2 e3 ) = f e1 ` mappend ` f e2 ` mappend ` f e3
instance Traversable21 Head where
traverse21 _ (WLiteral v) = pure $ WLiteral v
traverse21 f (WDatum d) = WDatum <$> traverse11 f d
traverse21 _ (WEmpty typ) = pure $ WEmpty typ
traverse21 f (WArray e1 e2) = WArray <$> f e1 <*> f e2
traverse21 f (WArrayLiteral es) = WArrayLiteral <$> traverse f es
traverse21 f (WLam e1) = WLam <$> f e1
traverse21 f (WMeasureOp o es) = WMeasureOp o <$> traverse21 f es
traverse21 f (WDirac e1) = WDirac <$> f e1
traverse21 f (WMBind e1 e2) = WMBind <$> f e1 <*> f e2
traverse21 f (WPlate e1 e2) = WPlate <$> f e1 <*> f e2
traverse21 f (WChain e1 e2 e3) = WChain <$> f e1 <*> f e2 <*> f e3
traverse21 f (WSuperpose pes) = WSuperpose <$> traversePairs f pes
traverse21 _ (WReject typ) = pure $ WReject typ
traverse21 f (WCoerceTo c e1) = WCoerceTo c <$> traverse21 f e1
traverse21 f (WUnsafeFrom c e1) = WUnsafeFrom c <$> traverse21 f e1
traverse21 f (WIntegrate e1 e2 e3) = WIntegrate <$> f e1 <*> f e2 <*> f e3
traverse21 f ( WSummate e1 e2 e3 ) = WSummate < $ > f e1 < * > f e2 < * > f e3
----------------------------------------------------------------
BUG : haddock does n't like annotations on GADT constructors . So
here we 'll avoid using the GADT syntax , even though it 'd make
-- the data type declaration prettier\/cleaner.
-- <-dev/hakaru/issues/6>
-- | Weak head-normal forms are either heads or neutral terms (i.e.,
-- a term whose reduction is blocked on some free variable).
data Whnf (abt :: [Hakaru] -> Hakaru -> *) (a :: Hakaru)
= Head_ !(Head abt a)
| Neutral !(abt '[] a)
-- TODO: would it be helpful to track which variable it's blocked
-- on? To do so we'd need 'GotStuck' to return that info...
--
-- TODO: is there some /clean/ way to ensure that the neutral term
-- is exactly a chain of blocked redexes? That is, we want to be
-- able to pull out neutral 'Case_' terms; so we want to make sure
-- they're not wrapped in let-bindings, coercions, etc.
| Forget that something is a WHNF .
fromWhnf :: (ABT Term abt) => Whnf abt a -> abt '[] a
fromWhnf (Head_ e) = fromHead e
fromWhnf (Neutral e) = e
| Identify terms which are already heads . , we make no attempt
-- to identify neutral terms, we just massage the type of 'toHead'.
toWhnf :: (ABT Term abt) => abt '[] a -> Maybe (Whnf abt a)
toWhnf e = Head_ <$> toHead e
| Case analysis on ' ' as a combinator .
caseWhnf :: Whnf abt a -> (Head abt a -> r) -> (abt '[] a -> r) -> r
caseWhnf (Head_ e) k _ = k e
caseWhnf (Neutral e) _ k = k e
| Given some WHNF , try to extract a ' Datum ' from it .
viewWhnfDatum
:: (ABT Term abt)
=> Whnf abt (HData' t)
-> Maybe (Datum (abt '[]) (HData' t))
viewWhnfDatum (Head_ v) = Just $ viewHeadDatum v
viewWhnfDatum (Neutral _) = Nothing
-- N.B., we always return Nothing for 'Neutral' terms because of
-- what 'Neutral' is supposed to mean. If we wanted to be paranoid
-- then we could use the following code to throw an error if
we 're given a " term which is in fact a head
-- (because that indicates an error in our logic of constructing
-- 'Neutral' values):
caseVarSyn e ( const Nothing ) $ \t - >
case t of
Datum _ d - > error " bad " value ! "
_ - > Nothing
caseVarSyn e (const Nothing) $ \t ->
case t of
Datum_ d -> error "bad \"neutral\" value!"
_ -> Nothing
-}
viewHeadDatum
:: (ABT Term abt)
=> Head abt (HData' t)
-> Datum (abt '[]) (HData' t)
viewHeadDatum (WDatum d) = d
viewHeadDatum _ = error "viewHeadDatum: the impossible happened"
-- Alas, to avoid the orphanage, this instance must live here rather than in Lazy.hs where it more conceptually belongs.
TODO : better unify the two cases of Whnf
HACK : this instance requires -XUndecidableInstances
instance (ABT Term abt) => Coerce (Whnf abt) where
coerceTo c w =
case w of
Neutral e ->
Neutral . maybe (P.coerceTo_ c e) id
$ caseVarSyn e (const Nothing) $ \t ->
case t of
BUG : literals should never be neutral in the first place ; but even if we got one , we should n't call it neutral after coercing it .
Literal_ x -> Just $ P.literal_ (coerceTo c x)
UnsafeFrom _ c ' : > TODO : cancellation
CoerceTo_ c' :$ es' ->
case es' of
e' :* End -> Just $ P.coerceTo_ (c . c') e'
_ -> Nothing
Head_ v ->
case v of
WLiteral x -> Head_ $ WLiteral (coerceTo c x)
c ' v ' - > TODO : cancellation
WCoerceTo c' v' -> Head_ $ WCoerceTo (c . c') v'
_ -> Head_ $ WCoerceTo c v
coerceFrom c w =
case w of
Neutral e ->
Neutral . maybe (P.unsafeFrom_ c e) id
$ caseVarSyn e (const Nothing) $ \t ->
case t of
BUG : literals should never be neutral in the first place ; but even if we got one , we should n't call it neutral after coercing it .
Literal_ x -> Just $ P.literal_ (coerceFrom c x)
CoerceTo _ c ' : > TODO : cancellation
UnsafeFrom_ c' :$ es' ->
case es' of
e' :* End -> Just $ P.unsafeFrom_ (c' . c) e'
_ -> Nothing
Head_ v ->
case v of
WLiteral x -> Head_ $ WLiteral (coerceFrom c x)
-- WCoerceTo c' v' -> TODO: cancellation
WUnsafeFrom c' v' -> Head_ $ WUnsafeFrom (c' . c) v'
_ -> Head_ $ WUnsafeFrom c v
----------------------------------------------------------------
BUG : haddock does n't like annotations on GADT constructors . So
here we 'll avoid using the GADT syntax , even though it 'd make
-- the data type declaration prettier\/cleaner.
-- <-dev/hakaru/issues/6>
-- | Lazy terms are either thunks (i.e., any term, which we may
decide to evaluate later ) or are already evaluated to WHNF .
data Lazy (abt :: [Hakaru] -> Hakaru -> *) (a :: Hakaru)
= Whnf_ !(Whnf abt a)
| Thunk !(abt '[] a)
| Forget whether a term has been evaluated to WHNF or not .
fromLazy :: (ABT Term abt) => Lazy abt a -> abt '[] a
fromLazy (Whnf_ e) = fromWhnf e
fromLazy (Thunk e) = e
-- | Case analysis on 'Lazy' as a combinator.
caseLazy :: Lazy abt a -> (Whnf abt a -> r) -> (abt '[] a -> r) -> r
caseLazy (Whnf_ e) k _ = k e
caseLazy (Thunk e) _ k = k e
-- | Is the lazy value a variable?
getLazyVariable :: (ABT Term abt) => Lazy abt a -> Maybe (Variable a)
getLazyVariable e =
case e of
Whnf_ (Head_ _) -> Nothing
Whnf_ (Neutral e') -> caseVarSyn e' Just (const Nothing)
Thunk e' -> caseVarSyn e' Just (const Nothing)
-- | Boolean-blind variant of 'getLazyVariable'
isLazyVariable :: (ABT Term abt) => Lazy abt a -> Bool
isLazyVariable = maybe False (const True) . getLazyVariable
-- | Is the lazy value a literal?
getLazyLiteral :: (ABT Term abt) => Lazy abt a -> Maybe (Literal a)
getLazyLiteral e =
case e of
Whnf_ (Head_ (WLiteral v)) -> Just v
Whnf_ _ -> Nothing -- by construction
Thunk e' ->
caseVarSyn e' (const Nothing) $ \t ->
case t of
Literal_ v -> Just v
_ -> Nothing
-- | Boolean-blind variant of 'getLazyLiteral'
isLazyLiteral :: (ABT Term abt) => Lazy abt a -> Bool
isLazyLiteral = maybe False (const True) . getLazyLiteral
----------------------------------------------------------------
-- | A kind for indexing 'Statement' to know whether the statement
-- is pure (and thus can be evaluated in any ambient monad) vs
impure ( i.e. , must be evaluated in the ' HMeasure ' monad ) .
--
-- TODO: better names!
data Purity = Pure | Impure | ExpectP
deriving (Eq, Read, Show)
-- | A type for tracking the arrays under which the term resides
-- This is used as a binding form when we "lift" transformations
( currently only Disintegrate ) to work on arrays
data Index ast = Ind (Variable 'HNat) (ast 'HNat)
instance (ABT Term abt) => Eq (Index (abt '[])) where
Ind i1 s1 == Ind i2 s2 = i1 == i2 && (alphaEq s1 s2)
instance (ABT Term abt) => Ord (Index (abt '[])) where
TODO check this
indVar :: Index ast -> Variable 'HNat
indVar (Ind v _ ) = v
indSize :: Index ast -> ast 'HNat
indSize (Ind _ a) = a
fromIndex :: (ABT Term abt) => Index (abt '[]) -> abt '[] 'HNat
fromIndex (Ind v _) = var v
-- | Distinguish between variables and heap locations
newtype Location (a :: k) = Location (Variable a)
instance Show (Sing a) => Show (Location a) where
show (Location v) = show v
locHint :: Location a -> Text
locHint (Location x) = varHint x
locType :: Location a -> Sing a
locType (Location x) = varType x
locEq :: (Show1 (Sing :: k -> *), JmEq1 (Sing :: k -> *))
=> Location (a :: k)
-> Location (b :: k)
-> Maybe (TypeEq a b)
locEq (Location a) (Location b) = varEq a b
fromLocation :: Location a -> Variable a
fromLocation (Location v) = v
fromLocations1 :: List1 Location a -> List1 Variable a
fromLocations1 = fmap11 fromLocation
locations1 :: List1 Variable a -> List1 Location a
locations1 = fmap11 Location
newtype LAssoc ast = LAssoc (Assoc ast)
newtype LAssocs ast = LAssocs (Assocs ast)
emptyLAssocs :: LAssocs abt
emptyLAssocs = LAssocs (emptyAssocs)
singletonLAssocs :: Location a -> f a -> LAssocs f
singletonLAssocs (Location v) e = LAssocs (singletonAssocs v e)
toLAssocs1 :: List1 Location xs -> List1 ast xs -> LAssocs ast
toLAssocs1 ls es = LAssocs (toAssocs1 (fromLocations1 ls) es)
insertLAssocs :: LAssocs ast -> LAssocs ast -> LAssocs ast
insertLAssocs (LAssocs a) (LAssocs b) = LAssocs (insertAssocs a b)
lookupLAssoc :: (Show1 (Sing :: k -> *), JmEq1 (Sing :: k -> *))
=> Location (a :: k)
-> LAssocs ast
-> Maybe (ast a)
lookupLAssoc (Location v) (LAssocs a) = lookupAssoc v a
-- | A single statement in some ambient monad (specified by the @p@
type index ) . In particular , note that the the first argument to
-- 'MBind' (or 'Let_') together with the variable bound in the
second argument forms the \"statement\ " ( leaving out the body
of the second argument , which may be part of a following statement ) .
-- In addition to these binding constructs, we also include a few
-- non-binding statements like 'SWeight'.
--
-- Statements are parameterized by the type of the bound element,
-- which (if present) is either a Variable or a Location.
--
-- The semantics of this type are as follows. Let @ss :: [Statement
-- abt v p]@ be a sequence of statements. We have @Γ@: the collection
of all free variables that occur in the term expressions in @ss@ ,
-- viewed as a measureable space (namely the product of the measureable
spaces for each variable ) . And we have @Δ@ : the collection of
all variables bound by the statements in @ss@ , also viewed as a
measurable space . The semantic interpretation of @ss@ is a
measurable function of type @Γ ' :-> M Δ@ where is either
@HMeasure@ ( if @p ~ ' Impure@ ) or @Identity@ ( if @p ~ ' Pure@ ) .
data Statement :: ([Hakaru] -> Hakaru -> *) -> (Hakaru -> *) -> Purity -> * where
BUG : haddock does n't like annotations on GADT constructors . So we ca n't make the constructor descriptions below available to .
-- <-dev/hakaru/issues/6>
A variable bound by ' MBind ' to a measure expression .
SBind
:: forall abt (v :: Hakaru -> *) (a :: Hakaru)
. {-# UNPACK #-} !(v a)
-> !(Lazy abt ('HMeasure a))
-> [Index (abt '[])]
-> Statement abt v 'Impure
-- A variable bound by 'Let_' to an expression.
SLet
:: forall abt p (v :: Hakaru -> *) (a :: Hakaru)
. {-# UNPACK #-} !(v a)
-> !(Lazy abt a)
-> [Index (abt '[])]
-> Statement abt v p
A weight ; i.e. , the first component of each argument to
' . This is a statement just so that we can avoid
-- needing to atomize the weight itself.
SWeight
:: forall abt (v :: Hakaru -> *)
. !(Lazy abt 'HProb)
-> [Index (abt '[])]
-> Statement abt v 'Impure
-- A monadic guard statement. If the scrutinee matches the
-- pattern, then we bind the variables as usual; otherwise, we
return the empty measure . , this statement type is only
-- for capturing constraints that some pattern matches /in a/
-- /monadic context/. In pure contexts we should be able to
-- handle case analysis without putting anything onto the heap.
SGuard
:: forall abt (v :: Hakaru -> *) (xs :: [Hakaru]) (a :: Hakaru)
. !(List1 v xs)
-> !(Pattern xs a)
-> !(Lazy abt a)
-> [Index (abt '[])]
-> Statement abt v 'Impure
-- Some arbitrary pure code. This is a statement just so that we can avoid needing to atomize the stuff in the pure code.
--
-- TODO: real names for these.
TODO : generalize to use a ' VarSet ' so we can collapse these
-- TODO: defunctionalize? These break pretty printing...
SStuff0
:: forall abt (v :: Hakaru -> *)
. (abt '[] 'HProb -> abt '[] 'HProb)
-> [Index (abt '[])]
-> Statement abt v 'ExpectP
SStuff1
:: forall abt (v :: Hakaru -> *) (a :: Hakaru)
. {-# UNPACK #-} !(v a)
-> (abt '[] 'HProb -> abt '[] 'HProb)
-> [Index (abt '[])]
-> Statement abt v 'ExpectP
statementVars :: Statement abt Location p -> VarSet ('KProxy :: KProxy Hakaru)
statementVars (SBind x _ _) = singletonVarSet (fromLocation x)
statementVars (SLet x _ _) = singletonVarSet (fromLocation x)
statementVars (SWeight _ _) = emptyVarSet
statementVars (SGuard xs _ _ _) = toVarSet1 (fromLocations1 xs)
statementVars (SStuff0 _ _) = emptyVarSet
statementVars (SStuff1 x _ _) = singletonVarSet (fromLocation x)
-- | Is the Location bound by the statement?
--
We return @Maybe ( ) @ rather than @Bool@ because in our primary
use case we 're already in the @Maybe@ monad and so it 's easier
-- to just stick with that. If we find other situations where we'd
really rather have the @Bool@ , then we can easily change things
-- and use some @boolToMaybe@ function to do the coercion wherever
-- needed.
isBoundBy :: Location (a :: Hakaru) -> Statement abt Location p -> Maybe ()
x `isBoundBy` SBind y _ _ = const () <$> locEq x y
x `isBoundBy` SLet y _ _ = const () <$> locEq x y
_ `isBoundBy` SWeight _ _ = Nothing
x `isBoundBy` SGuard ys _ _ _ =
TODO : just check membership directly , rather than going through VarSet
if memberVarSet (fromLocation x) (toVarSet1 (fmap11 fromLocation ys))
then Just ()
else Nothing
_ `isBoundBy` SStuff0 _ _ = Nothing
x `isBoundBy` SStuff1 y _ _ = const () <$> locEq x y
TODO : remove this CPP guard , provided we do n't end up with a cyclic dependency ...
#ifdef __TRACE_DISINTEGRATE__
instance (ABT Term abt) => Pretty (Whnf abt) where
prettyPrec_ p (Head_ w) = ppApply1 p "Head_" (fromHead w) -- HACK
prettyPrec_ p (Neutral e) = ppApply1 p "Neutral" e
instance (ABT Term abt) => Pretty (Lazy abt) where
prettyPrec_ p (Whnf_ w) = ppFun p "Whnf_" [PP.sep (prettyPrec_ 11 w)]
prettyPrec_ p (Thunk e) = ppApply1 p "Thunk" e
ppApply1 :: (ABT Term abt) => Int -> String -> abt '[] a -> [PP.Doc]
ppApply1 p f e1 =
let d = PP.text f PP.<+> PP.nest (1 + length f) (prettyPrec 11 e1)
in [if p > 9 then PP.parens (PP.nest 1 d) else d]
ppFun :: Int -> String -> [PP.Doc] -> [PP.Doc]
ppFun _ f [] = [PP.text f]
ppFun p f ds =
parens (p > 9) [PP.text f PP.<+> PP.nest (1 + length f) (PP.sep ds)]
parens :: Bool -> [PP.Doc] -> [PP.Doc]
parens True ds = [PP.parens (PP.nest 1 (PP.sep ds))]
parens False ds = ds
ppList :: [PP.Doc] -> PP.Doc
ppList = PP.sep . (:[]) . PP.brackets . PP.nest 1 . PP.fsep . PP.punctuate PP.comma
ppInds :: (ABT Term abt) => [Index (abt '[])] -> PP.Doc
ppInds = ppList . map (ppVariable . indVar)
ppStatement :: (ABT Term abt) => Int -> Statement abt Location p -> PP.Doc
ppStatement p s =
case s of
SBind (Location x) e inds ->
PP.sep $ ppFun p "SBind"
[ ppVariable x
, PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SLet (Location x) e inds ->
PP.sep $ ppFun p "SLet"
[ ppVariable x
, PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SWeight e inds ->
PP.sep $ ppFun p "SWeight"
[ PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SGuard xs pat e inds ->
PP.sep $ ppFun p "SGuard"
[ PP.sep $ ppVariables (fromLocations1 xs)
, PP.sep $ prettyPrec_ 11 pat
, PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SStuff0 _ _ ->
PP.sep $ ppFun p "SStuff0"
[ PP.text "TODO: ppStatement{SStuff0}"
]
SStuff1 _ _ _ ->
PP.sep $ ppFun p "SStuff1"
[ PP.text "TODO: ppStatement{SStuff1}"
]
pretty_Statements :: (ABT Term abt) => [Statement abt Location p] -> PP.Doc
pretty_Statements [] = PP.text "[]"
pretty_Statements (s:ss) =
foldl
(\d s' -> d PP.$+$ PP.comma PP.<+> ppStatement 0 s')
(PP.text "[" PP.<+> ppStatement 0 s)
ss
PP.$+$ PP.text "]"
pretty_Statements_withTerm
:: (ABT Term abt) => [Statement abt Location p] -> abt '[] a -> PP.Doc
pretty_Statements_withTerm ss e =
pretty_Statements ss PP.$+$ pretty e
prettyAssocs
:: (ABT Term abt)
=> Assocs (abt '[])
-> PP.Doc
prettyAssocs a = PP.vcat $ map go (fromAssocs a)
where go (Assoc x e) = ppVariable x PP.<+>
PP.text "->" PP.<+>
pretty e
#endif
-----------------------------------------------------------------
-- | A function for evaluating any term to weak-head normal form.
type TermEvaluator abt m =
forall a. abt '[] a -> m (Whnf abt a)
| A function for \"performing\ " an ' HMeasure ' monadic action .
-- This could mean actual random sampling, or simulated sampling
-- by generating a new term and returning the newly bound variable,
-- or anything else.
type MeasureEvaluator abt m =
forall a. abt '[] ('HMeasure a) -> m (Whnf abt a)
-- | A function for evaluating any case-expression to weak-head
-- normal form.
type CaseEvaluator abt m =
forall a b. abt '[] a -> [Branch a abt b] -> m (Whnf abt b)
-- | A function for evaluating any variable to weak-head normal form.
type VariableEvaluator abt m =
forall a. Variable a -> m (Whnf abt a)
----------------------------------------------------------------
-- | This class captures the monadic operations needed by the
' evaluate ' function in " Language . Hakaru . Lazy " .
class (Functor m, Applicative m, Monad m, ABT Term abt)
=> EvaluationMonad abt m p | m -> abt p
where
-- TODO: should we have a *method* for arbitrarily incrementing the stored 'nextFreshNat'; or should we only rely on it being initialized correctly? Beware correctness issues about updating the lower bound after having called 'freshNat'...
-- | Return a fresh natural number. That is, a number which is
-- not the 'varID' of any free variable in the expressions of
-- interest, and isn't a number we've returned previously.
freshNat :: m Nat
-- | Internal function for renaming the variables bound by a
-- statement. We return the renamed statement along with a substitution
-- for mapping the old variable names to their new variable names.
freshLocStatement
:: Statement abt Variable p
-> m (Statement abt Location p, Assocs (Variable :: Hakaru -> *))
freshLocStatement s =
case s of
SWeight w e -> return (SWeight w e, mempty)
SBind x body i -> do
x' <- freshenVar x
return (SBind (Location x') body i, singletonAssocs x x')
SLet x body i -> do
x' <- freshenVar x
return (SLet (Location x') body i, singletonAssocs x x')
SGuard xs pat scrutinee i -> do
xs' <- freshenVars xs
return (SGuard (locations1 xs') pat scrutinee i,
toAssocs1 xs xs')
SStuff0 e e' -> return (SStuff0 e e', mempty)
SStuff1 x f i -> do
x' <- freshenVar x
return (SStuff1 (Location x') f i, singletonAssocs x x')
-- | Returns the current Indices. Currently, this is only
-- applicable to the Disintegration Monad, but could be
-- relevant as other partial evaluators begin to handle
-- Plate and Array
getIndices :: m [Index (abt '[])]
getIndices = return []
-- | Add a statement to the top of the context. This is unsafe
-- because it may allow confusion between variables with the
-- same name but different scopes (thus, may allow variable
-- capture). Prefer using 'push_', 'push', or 'pushes'.
unsafePush :: Statement abt Location p -> m ()
-- | Call 'unsafePush' repeatedly. Is part of the class since
-- we may be able to do this more efficiently than actually
-- calling 'unsafePush' repeatedly.
--
-- N.B., this should push things in the same order as 'pushes'
-- does.
unsafePushes :: [Statement abt Location p] -> m ()
unsafePushes = mapM_ unsafePush
-- | Look for the statement @s@ binding the variable. If found,
-- then call the continuation with @s@ in the context where @s@
-- itself and everything @s@ (transitively)depends on is included
-- but everything that (transitively)depends on @s@ is excluded;
-- thus, the continuation may only alter the dependencies of
-- @s@. After the continuation returns, restore all the bindings
-- that were removed before calling the continuation. If no
-- such @s@ can be found, then return 'Nothing' without altering
-- the context at all.
--
-- N.B., the statement @s@ itself is popped! Thus, it is up to
-- the continuation to make sure to push new statements that
bind the variables bound by @s@ !
--
-- TODO: pass the continuation more detail, so it can avoid
-- needing to be in the 'Maybe' monad due to the redundant call
to in the continuation . In particular , we want to
-- do this so that we can avoid the return type @m (Maybe (Maybe r))@
-- while still correctly handling statements like 'SStuff1'
-- which (a) do bind variables and thus should shadow bindings
further up the ' ListContext ' , but which ( b ) offer up no
-- expression the variable is bound to, and thus cannot be
-- altered by forcing etc. To do all this, we need to pass the
-- 'TypeEq' proof from (the 'varEq' call in) the 'isBoundBy'
-- call in the instance; but that means we also need some way
-- of tying it together with the existential variable in the
-- 'Statement'. Perhaps we should have an alternative statement
-- type which exposes the existential?
select
:: Location (a :: Hakaru)
-> (Statement abt Location p -> Maybe (m r))
-> m (Maybe r)
substVar :: Variable a -> abt '[] a
-> (forall b'. Variable b' -> m (abt '[] b'))
substVar _ _ = return . var
extFreeVars :: abt xs a -> m (VarSet (KindOf a))
extFreeVars e = return (freeVars e)
The first argument to @evaluateCase@ will be the
' TermEvaluator ' we 're constructing ( thus tying the knot ) .
evaluateCase :: TermEvaluator abt m -> CaseEvaluator abt m
# INLINE evaluateCase #
evaluateCase = defaultCaseEvaluator
-- TODO: figure out how to abstract this so it can be reused by
' constrainValue ' . Especially the ' SBranch case of ' step '
-- TODO: we could speed up the case for free variables by having
-- the 'Context' also keep track of the largest free var. That way,
-- we can just check up front whether @varID x < nextFreeVarID@.
-- Of course, we'd have to make sure we've sufficiently renamed all
bound variables to be above ; but then we have to
-- do that anyways.
evaluateVar :: MeasureEvaluator abt m
-> TermEvaluator abt m
-> VariableEvaluator abt m
evaluateVar perform evaluate_ = \x ->
-- If we get 'Nothing', then it turns out @x@ is a free variable
fmap (maybe (Neutral $ var x) id) . select (Location x) $ \s ->
case s of
SBind y e i -> do
Refl <- locEq (Location x) y
Just $ do
w <- perform $ caseLazy e fromWhnf id
unsafePush (SLet (Location x) (Whnf_ w) i)
#ifdef __TRACE_DISINTEGRATE__
trace ("-- updated "
++ show (ppStatement 11 s)
++ " to "
++ show (ppStatement 11 (SLet (Location x) (Whnf_ w) i))
) $ return ()
#endif
return w
SLet y e i -> do
Refl <- locEq (Location x) y
Just $ do
w <- caseLazy e return evaluate_
unsafePush (SLet (Location x) (Whnf_ w) i)
return w
These two do n't bind any variables , so they definitely
-- can't match.
SWeight _ _ -> Nothing
SStuff0 _ _ -> Nothing
These two do bind variables , but there 's no expression we
-- can return for them because the variables are
-- untouchable\/abstract.
SStuff1 _ _ _ -> Just . return . Neutral $ var x
SGuard _ _ _ _ -> Just . return . Neutral $ var x
| A simple ' CaseEvaluator ' which uses the ' DatumEvaluator ' to
-- force the scrutinee, and if 'matchBranches' succeeds then we
call the ' TermEvaluator ' to continue evaluating the body of the
-- matched branch. If we 'GotStuck' then we return a 'Neutral' term
-- of the case expression itself (n.b, any side effects from having
-- called the 'DatumEvaluator' will still persist when returning
-- this neutral term). If we didn't get stuck and yet none of the
-- branches matches, then we throw an exception.
defaultCaseEvaluator
:: forall abt m p
. (ABT Term abt, EvaluationMonad abt m p)
=> TermEvaluator abt m
-> CaseEvaluator abt m
# INLINE defaultCaseEvaluator #
defaultCaseEvaluator evaluate_ = evaluateCase_
where
-- TODO: At present, whenever we residualize a case expression we'll
-- generate a 'Neutral' term which will, when run, repeat the work
-- we're doing in the evaluation here. We could eliminate this
redundancy by introducing a new variable for each time this
-- function is called--- if only we had some way of getting those
-- variables put into the right place for when we residualize the
-- original scrutinee...
--
-- N.B., 'DatumEvaluator' is a rank-2 type so it requires a signature
evaluateDatum :: DatumEvaluator (abt '[]) m
evaluateDatum e = viewWhnfDatum <$> evaluate_ e
evaluateCase_ :: CaseEvaluator abt m
evaluateCase_ e bs = do
match <- matchBranches evaluateDatum e bs
case match of
Nothing ->
-- TODO: print more info about where this error
-- happened
--
TODO : rather than throwing a error ,
-- instead capture the possibility of failure in
-- the 'EvaluationMonad' monad.
error "defaultCaseEvaluator: non-exhaustive patterns in case!"
Just GotStuck ->
return . Neutral . syn $ Case_ e bs
Just (Matched ss body) ->
pushes (toVarStatements ss) body >>= evaluate_
toVarStatements :: Assocs (abt '[]) -> [Statement abt Variable p]
toVarStatements = map (\(Assoc x e) -> SLet x (Thunk e) []) .
fromAssocs
extSubst
:: forall abt a xs b m p. (EvaluationMonad abt m p)
=> Variable a
-> abt '[] a
-> abt xs b
-> m (abt xs b)
extSubst x e = substM x e (substVar x e)
extSubsts
:: forall abt a xs m p. (EvaluationMonad abt m p)
=> Assocs (abt '[])
-> abt xs a
-> m (abt xs a)
extSubsts rho0 e0 =
F.foldlM (\e (Assoc x v) -> extSubst x v e) e0 (unAssocs rho0)
TODO : define a new NameSupply monad in " Language . Hakaru . Syntax . Variable " for encapsulating these four fresh(en ) functions ?
-- | Given some hint and type, generate a variable with a fresh
-- 'varID'.
freshVar
:: (EvaluationMonad abt m p)
=> Text
-> Sing (a :: Hakaru)
-> m (Variable a)
freshVar hint typ = (\i -> Variable hint i typ) <$> freshNat
TODO : move to " Language . Hakaru . Syntax . Variable " in case anyone else wants it too .
data Hint (a :: Hakaru) = Hint {-# UNPACK #-} !Text !(Sing a)
-- | Call 'freshVar' repeatedly.
-- TODO: make this more efficient than actually calling 'freshVar'
-- repeatedly.
freshVars
:: (EvaluationMonad abt m p)
=> List1 Hint xs
-> m (List1 Variable xs)
freshVars Nil1 = return Nil1
freshVars (Cons1 x xs) = Cons1 <$> freshVar' x <*> freshVars xs
where
freshVar' (Hint hint typ) = freshVar hint typ
-- | Given a variable, return a new variable with the same hint and
-- type but with a fresh 'varID'.
freshenVar
:: (EvaluationMonad abt m p)
=> Variable (a :: Hakaru)
-> m (Variable a)
freshenVar x = (\i -> x{varID=i}) <$> freshNat
-- | Call 'freshenVar' repeatedly.
TODO : make this more efficient than actually calling ' freshenVar '
-- repeatedly.
freshenVars
:: (EvaluationMonad abt m p)
=> List1 Variable (xs :: [Hakaru])
-> m (List1 Variable xs)
freshenVars Nil1 = return Nil1
freshenVars (Cons1 x xs) = Cons1 <$> freshenVar x <*> freshenVars xs
-- TODO : get this faster version to ! And once we do , move it to IClasses.hs or wherever ' List1'\/'DList1 ' end up
freshenVars = go dnil1
where
go : : ( EvaluationMonad abt m p )
= > DList1 Variable ( ys : : [ Hakaru ] )
- > List1 Variable ( zs : : [ Hakaru ] )
- > m ( List1 Variable ( ys + + zs ) )
go k Nil1 = return ( unDList1 k Nil1 ) -- for typechecking , do n't use ' toList1 ' here .
go k ( Cons1 x xs ) = do
x ' < - freshenVar x
go ( k ` dsnoc1 ` x ' ) xs -- BUG : type error ....
-- TODO: get this faster version to typecheck! And once we do, move it to IClasses.hs or wherever 'List1'\/'DList1' end up
freshenVars = go dnil1
where
go :: (EvaluationMonad abt m p)
=> DList1 Variable (ys :: [Hakaru])
-> List1 Variable (zs :: [Hakaru])
-> m (List1 Variable (ys ++ zs))
go k Nil1 = return (unDList1 k Nil1) -- for typechecking, don't use 'toList1' here.
go k (Cons1 x xs) = do
x' <- freshenVar x
go (k `dsnoc1` x') xs -- BUG: type error....
-}
-- | Given a size, generate a fresh Index
freshInd :: (EvaluationMonad abt m p)
=> abt '[] 'HNat
-> m (Index (abt '[]))
freshInd s = do
x <- freshVar T.empty SNat
return $ Ind x s
-- | Given a location, return a new Location with the same hint
-- and type but with a fresh ID
freshenLoc :: (EvaluationMonad abt m p)
=> Location (a :: Hakaru) -> m (Location a)
freshenLoc (Location x) = Location <$> freshenVar x
-- | Call `freshenLoc` repeatedly
freshenLocs :: (EvaluationMonad abt m p)
=> List1 Location (ls :: [Hakaru])
-> m (List1 Location ls)
freshenLocs Nil1 = return Nil1
freshenLocs (Cons1 l ls) = Cons1 <$> freshenLoc l <*> freshenLocs ls
-- | Add a statement to the top of the context, renaming any variables
-- the statement binds and returning the substitution mapping the
-- old variables to the new ones. This is safer than 'unsafePush'
-- because it avoids variable confusion; but it is still somewhat
-- unsafe since you may forget to apply the substitution to \"the
-- rest of the term\". You almost certainly should use 'push' or
-- 'pushes' instead.
push_
:: (ABT Term abt, EvaluationMonad abt m p)
=> Statement abt Variable p
-> m (Assocs (Variable :: Hakaru -> *))
push_ s = do
(s',rho) <- freshLocStatement s
unsafePush s'
return rho
-- | Push a statement onto the context, renaming variables along
the way . The second argument represents \"the rest of the term\ "
-- after we've peeled the statement off; it's passed so that we can
-- update the variable names there so that they match with the
( renamed)binding statement . The third argument is the continuation
-- for what to do with the renamed term. Rather than taking the
second and third arguments we could return an ' Assocs ' giving
-- the renaming of variables; however, doing that would make it too
-- easy to accidentally drop the substitution on the floor rather
-- than applying it to the term before calling the continuation.
push
:: (ABT Term abt, EvaluationMonad abt m p)
=> Statement abt Variable p -- ^ the statement to push
^ the \"rest\ " of the term
-- -> (abt xs a -> m r) -- ^ what to do with the renamed \"rest\"
-> m (abt xs a) -- ^ the final result
push s e = do
rho <- push_ s
return (renames rho e)
| Call ' push ' repeatedly . ( , is more efficient than actually
calling ' push ' repeatedly . ) The head is pushed first and thus
-- is the furthest away in the final context, whereas the tail is
-- pushed last and is the closest in the final context.
pushes
:: (ABT Term abt, EvaluationMonad abt m p)
=> [Statement abt Variable p] -- ^ the statements to push
^ the \"rest\ " of the term
-- -> (abt xs a -> m r) -- ^ what to do with the renamed \"rest\"
-> m (abt xs a) -- ^ the final result
pushes ss e = do
-- TODO: is 'foldlM' the right one? or do we want 'foldrM'?
rho <- F.foldlM (\rho s -> mappend rho <$> push_ s) mempty ss
return (renames rho e)
----------------------------------------------------------------
----------------------------------------------------------- fin.
| null | https://raw.githubusercontent.com/hakaru-dev/hakaru/94157c89ea136c3b654a85cce51f19351245a490/haskell/Language/Hakaru/Evaluation/Types.hs | haskell | # OPTIONS_GHC -Wall -fwarn-tabs #
--------------------------------------------------------------
2016.04.28
|
Module : Language.Hakaru.Evaluation.Types
License : BSD3
Maintainer :
Stability : experimental
BUG: completely gave up on structure sharing. Need to add that back in.
can separate it from Disintegrate.hs vs its other clients (i.e.,
Sample.hs and Expect.hs), this file will prolly be broken up
into Lazy.hs itself vs Disintegrate.hs
--------------------------------------------------------------
* Terms in particular known forms\/formats
* Lazy partial evaluation
* The monad for partial evaluation
--------------------------------------------------------------
--------------------------------------------------------------
N.B., when putting things into the context, be sure to freshen
the variables as if we were allocating a new location on the
heap.
For simplicity we don't actually distinguish between "variables"
and "locations". In the old finally-tagless code we had an @s@
parameter like the 'ST' monad does in order to keep track of
which heap things belong to. But since we might have nested
disintegration, and thus nested heaps, doing that means we'd
in order to keep track of the nested regions; and that's just
too much work to bother with.
TODO: is there a way to integrate this into the actual 'Term'
definition in order to reduce repetition?
exactly correlate with the usual notion of \"weak-head\"; in
particular we keep track of type annotations and coercions, and
don't reduce integration\/summation. So really we should use
Simple heads (aka, the usual stuff)
- even though it 's not actually a neutral term !
Other funky stuff
:: !(abt '[] 'HReal)
-> !(abt '[] 'HReal)
Quasi-/semi-/demi-/pseudo- normal form stuff
N.B. , not ' ArrayOp _ '
N.B., not 'ArrayOp_'
| Forget that something is a head.
| Identify terms which are already heads.
--------------------------------------------------------------
the data type declaration prettier\/cleaner.
<-dev/hakaru/issues/6>
| Weak head-normal forms are either heads or neutral terms (i.e.,
a term whose reduction is blocked on some free variable).
TODO: would it be helpful to track which variable it's blocked
on? To do so we'd need 'GotStuck' to return that info...
TODO: is there some /clean/ way to ensure that the neutral term
is exactly a chain of blocked redexes? That is, we want to be
able to pull out neutral 'Case_' terms; so we want to make sure
they're not wrapped in let-bindings, coercions, etc.
to identify neutral terms, we just massage the type of 'toHead'.
N.B., we always return Nothing for 'Neutral' terms because of
what 'Neutral' is supposed to mean. If we wanted to be paranoid
then we could use the following code to throw an error if
(because that indicates an error in our logic of constructing
'Neutral' values):
Alas, to avoid the orphanage, this instance must live here rather than in Lazy.hs where it more conceptually belongs.
WCoerceTo c' v' -> TODO: cancellation
--------------------------------------------------------------
the data type declaration prettier\/cleaner.
<-dev/hakaru/issues/6>
| Lazy terms are either thunks (i.e., any term, which we may
| Case analysis on 'Lazy' as a combinator.
| Is the lazy value a variable?
| Boolean-blind variant of 'getLazyVariable'
| Is the lazy value a literal?
by construction
| Boolean-blind variant of 'getLazyLiteral'
--------------------------------------------------------------
| A kind for indexing 'Statement' to know whether the statement
is pure (and thus can be evaluated in any ambient monad) vs
TODO: better names!
| A type for tracking the arrays under which the term resides
This is used as a binding form when we "lift" transformations
| Distinguish between variables and heap locations
| A single statement in some ambient monad (specified by the @p@
'MBind' (or 'Let_') together with the variable bound in the
In addition to these binding constructs, we also include a few
non-binding statements like 'SWeight'.
Statements are parameterized by the type of the bound element,
which (if present) is either a Variable or a Location.
The semantics of this type are as follows. Let @ss :: [Statement
abt v p]@ be a sequence of statements. We have @Γ@: the collection
viewed as a measureable space (namely the product of the measureable
<-dev/hakaru/issues/6>
# UNPACK #
A variable bound by 'Let_' to an expression.
# UNPACK #
needing to atomize the weight itself.
A monadic guard statement. If the scrutinee matches the
pattern, then we bind the variables as usual; otherwise, we
for capturing constraints that some pattern matches /in a/
/monadic context/. In pure contexts we should be able to
handle case analysis without putting anything onto the heap.
Some arbitrary pure code. This is a statement just so that we can avoid needing to atomize the stuff in the pure code.
TODO: real names for these.
TODO: defunctionalize? These break pretty printing...
# UNPACK #
| Is the Location bound by the statement?
to just stick with that. If we find other situations where we'd
and use some @boolToMaybe@ function to do the coercion wherever
needed.
HACK
---------------------------------------------------------------
| A function for evaluating any term to weak-head normal form.
This could mean actual random sampling, or simulated sampling
by generating a new term and returning the newly bound variable,
or anything else.
| A function for evaluating any case-expression to weak-head
normal form.
| A function for evaluating any variable to weak-head normal form.
--------------------------------------------------------------
| This class captures the monadic operations needed by the
TODO: should we have a *method* for arbitrarily incrementing the stored 'nextFreshNat'; or should we only rely on it being initialized correctly? Beware correctness issues about updating the lower bound after having called 'freshNat'...
| Return a fresh natural number. That is, a number which is
not the 'varID' of any free variable in the expressions of
interest, and isn't a number we've returned previously.
| Internal function for renaming the variables bound by a
statement. We return the renamed statement along with a substitution
for mapping the old variable names to their new variable names.
| Returns the current Indices. Currently, this is only
applicable to the Disintegration Monad, but could be
relevant as other partial evaluators begin to handle
Plate and Array
| Add a statement to the top of the context. This is unsafe
because it may allow confusion between variables with the
same name but different scopes (thus, may allow variable
capture). Prefer using 'push_', 'push', or 'pushes'.
| Call 'unsafePush' repeatedly. Is part of the class since
we may be able to do this more efficiently than actually
calling 'unsafePush' repeatedly.
N.B., this should push things in the same order as 'pushes'
does.
| Look for the statement @s@ binding the variable. If found,
then call the continuation with @s@ in the context where @s@
itself and everything @s@ (transitively)depends on is included
but everything that (transitively)depends on @s@ is excluded;
thus, the continuation may only alter the dependencies of
@s@. After the continuation returns, restore all the bindings
that were removed before calling the continuation. If no
such @s@ can be found, then return 'Nothing' without altering
the context at all.
N.B., the statement @s@ itself is popped! Thus, it is up to
the continuation to make sure to push new statements that
TODO: pass the continuation more detail, so it can avoid
needing to be in the 'Maybe' monad due to the redundant call
do this so that we can avoid the return type @m (Maybe (Maybe r))@
while still correctly handling statements like 'SStuff1'
which (a) do bind variables and thus should shadow bindings
expression the variable is bound to, and thus cannot be
altered by forcing etc. To do all this, we need to pass the
'TypeEq' proof from (the 'varEq' call in) the 'isBoundBy'
call in the instance; but that means we also need some way
of tying it together with the existential variable in the
'Statement'. Perhaps we should have an alternative statement
type which exposes the existential?
TODO: figure out how to abstract this so it can be reused by
TODO: we could speed up the case for free variables by having
the 'Context' also keep track of the largest free var. That way,
we can just check up front whether @varID x < nextFreeVarID@.
Of course, we'd have to make sure we've sufficiently renamed all
do that anyways.
If we get 'Nothing', then it turns out @x@ is a free variable
can't match.
can return for them because the variables are
untouchable\/abstract.
force the scrutinee, and if 'matchBranches' succeeds then we
matched branch. If we 'GotStuck' then we return a 'Neutral' term
of the case expression itself (n.b, any side effects from having
called the 'DatumEvaluator' will still persist when returning
this neutral term). If we didn't get stuck and yet none of the
branches matches, then we throw an exception.
TODO: At present, whenever we residualize a case expression we'll
generate a 'Neutral' term which will, when run, repeat the work
we're doing in the evaluation here. We could eliminate this
function is called--- if only we had some way of getting those
variables put into the right place for when we residualize the
original scrutinee...
N.B., 'DatumEvaluator' is a rank-2 type so it requires a signature
TODO: print more info about where this error
happened
instead capture the possibility of failure in
the 'EvaluationMonad' monad.
| Given some hint and type, generate a variable with a fresh
'varID'.
# UNPACK #
| Call 'freshVar' repeatedly.
TODO: make this more efficient than actually calling 'freshVar'
repeatedly.
| Given a variable, return a new variable with the same hint and
type but with a fresh 'varID'.
| Call 'freshenVar' repeatedly.
repeatedly.
TODO : get this faster version to ! And once we do , move it to IClasses.hs or wherever ' List1'\/'DList1 ' end up
for typechecking , do n't use ' toList1 ' here .
BUG : type error ....
TODO: get this faster version to typecheck! And once we do, move it to IClasses.hs or wherever 'List1'\/'DList1' end up
for typechecking, don't use 'toList1' here.
BUG: type error....
| Given a size, generate a fresh Index
| Given a location, return a new Location with the same hint
and type but with a fresh ID
| Call `freshenLoc` repeatedly
| Add a statement to the top of the context, renaming any variables
the statement binds and returning the substitution mapping the
old variables to the new ones. This is safer than 'unsafePush'
because it avoids variable confusion; but it is still somewhat
unsafe since you may forget to apply the substitution to \"the
rest of the term\". You almost certainly should use 'push' or
'pushes' instead.
| Push a statement onto the context, renaming variables along
after we've peeled the statement off; it's passed so that we can
update the variable names there so that they match with the
for what to do with the renamed term. Rather than taking the
the renaming of variables; however, doing that would make it too
easy to accidentally drop the substitution on the floor rather
than applying it to the term before calling the continuation.
^ the statement to push
-> (abt xs a -> m r) -- ^ what to do with the renamed \"rest\"
^ the final result
is the furthest away in the final context, whereas the tail is
pushed last and is the closest in the final context.
^ the statements to push
-> (abt xs a -> m r) -- ^ what to do with the renamed \"rest\"
^ the final result
TODO: is 'foldlM' the right one? or do we want 'foldrM'?
--------------------------------------------------------------
--------------------------------------------------------- fin. | # LANGUAGE CPP
, GADTs
, KindSignatures
, , PolyKinds
, TypeOperators
, Rank2Types
, BangPatterns
, FlexibleContexts
, MultiParamTypeClasses
, FunctionalDependencies
, FlexibleInstances
, UndecidableInstances
, EmptyCase
, ScopedTypeVariables
#
, GADTs
, KindSignatures
, DataKinds
, PolyKinds
, TypeOperators
, Rank2Types
, BangPatterns
, FlexibleContexts
, MultiParamTypeClasses
, FunctionalDependencies
, FlexibleInstances
, UndecidableInstances
, EmptyCase
, ScopedTypeVariables
#-}
Copyright : Copyright ( c ) 2016 the Hakaru team
Portability : GHC - only
The data types for " Language . Hakaru . Evaluation . Lazy "
TODO : once we figure out the exact API\/type of ' evaluate ' and
module Language.Hakaru.Evaluation.Types
(
Head(..), fromHead, toHead, viewHeadDatum
, Whnf(..), fromWhnf, toWhnf, caseWhnf, viewWhnfDatum
, Lazy(..), fromLazy, caseLazy
, getLazyVariable, isLazyVariable
, getLazyLiteral, isLazyLiteral
, TermEvaluator
, MeasureEvaluator
, CaseEvaluator
, VariableEvaluator
, Purity(..), Statement(..), statementVars, isBoundBy
, Index, indVar, indSize, fromIndex
, Location(..), locEq, locHint, locType, locations1
, fromLocation, fromLocations1, freshenLoc, freshenLocs
, LAssoc, LAssocs , emptyLAssocs, singletonLAssocs
, toLAssocs1, insertLAssocs, lookupLAssoc
#ifdef __TRACE_DISINTEGRATE__
, ppList
, ppInds
, ppStatement
, pretty_Statements
, pretty_Statements_withTerm
, prettyAssocs
#endif
, EvaluationMonad(..)
, defaultCaseEvaluator
, toVarStatements
, extSubst
, extSubsts
, freshVar
, freshenVar
, Hint(..), freshVars
, freshenVars
, freshInd
TODO : should we expose these ?
, freshLocStatement
, push _
, freshLocStatement
, push_
-}
, push
, pushes
) where
import Prelude hiding (id, (.))
import Control.Category (Category(..))
#if __GLASGOW_HASKELL__ < 710
import Data.Monoid (Monoid(..))
import Data.Functor ((<$>))
import Control.Applicative (Applicative(..))
import Data.Traversable
#endif
import Control.Arrow ((***))
import qualified Data.Foldable as F
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.Text as T
import Data.Text (Text)
import Data.Proxy (KProxy(..))
import Language.Hakaru.Syntax.IClasses
import Data.Number.Nat
import Language.Hakaru.Types.DataKind
import Language.Hakaru.Types.Sing (Sing(..))
import Language.Hakaru.Types.Coercion
import Language.Hakaru.Syntax.AST
import Language.Hakaru.Syntax.Datum
import Language.Hakaru.Syntax.DatumCase (DatumEvaluator,
MatchResult(..),
matchBranches)
import Language.Hakaru.Syntax.AST.Eq (alphaEq)
import Language . Hakaru . Syntax .
import Language.Hakaru.Syntax.ABT
import qualified Language.Hakaru.Syntax.Prelude as P
#ifdef __TRACE_DISINTEGRATE__
import qualified Text.PrettyPrint as PP
import Language.Hakaru.Pretty.Haskell
import Debug.Trace (trace)
#endif
have to do some sort of numbering in the @s@ parameter
TODO : for forward disintegration ( which is not just partial evaluation ) we really do mean proper HNFs not just WHNFs . This falls out from our needing to guarantee that heap - bound variables ca n't possibly escape ; whence the assumption that the result of forward disintegration contains no heap - bound variables .
HACK : ca n't use \"H\ " as the prefix because that clashes with
the Hakaru datakind
| A \"weak - head\ " for the sake of ' ' . , this does n't
some other name for ' ' ...
data Head :: ([Hakaru] -> Hakaru -> *) -> Hakaru -> * where
WLiteral :: !(Literal a) -> Head abt a
BUG : even though the ' Datum ' type has a single constructor , we get a warning about not being able to UNPACK it in ' WDatum ' ... wtf ?
WDatum :: !(Datum (abt '[]) (HData' t)) -> Head abt (HData' t)
WEmpty :: !(Sing ('HArray a)) -> Head abt ('HArray a)
WArray :: !(abt '[] 'HNat) -> !(abt '[ 'HNat] a) -> Head abt ('HArray a)
WArrayLiteral
:: [abt '[] a] -> Head abt ('HArray a)
WLam :: !(abt '[ a ] b) -> Head abt (a ':-> b)
Measure heads ( , not simply @abt ' [ ] ( ' HMeasure _ ) @ )
WMeasureOp
:: (typs ~ UnLCs args, args ~ LCs typs)
=> !(MeasureOp typs a)
-> !(SArgs abt args)
-> Head abt ('HMeasure a)
WDirac :: !(abt '[] a) -> Head abt ('HMeasure a)
WMBind
:: !(abt '[] ('HMeasure a))
-> !(abt '[ a ] ('HMeasure b))
-> Head abt ('HMeasure b)
WPlate
:: !(abt '[] 'HNat)
-> !(abt '[ 'HNat ] ('HMeasure a))
-> Head abt ('HMeasure ('HArray a))
WChain
:: !(abt '[] 'HNat)
-> !(abt '[] s)
-> !(abt '[ s ] ('HMeasure (HPair a s)))
-> Head abt ('HMeasure (HPair ('HArray a) s))
WSuperpose
:: !(NonEmpty (abt '[] 'HProb, abt '[] ('HMeasure a)))
-> Head abt ('HMeasure a)
WReject
:: !(Sing ('HMeasure a)) -> Head abt ('HMeasure a)
Type coercion stuff . These are transparent re head - ness ; that is , they behave more like HNF than WHNF .
WCoerceTo :: !(Coercion a b) -> !(Head abt a) -> Head abt b
WUnsafeFrom :: !(Coercion a b) -> !(Head abt b) -> Head abt a
WIntegrate
:: !(abt '[] 'HReal)
-> !(abt '[] 'HReal)
-> !(abt '[ 'HReal ] 'HProb)
-> Head abt 'HProb
WSummate
- > ! ( abt ' [ ' HInt ] ' HProb )
- > Head abt ' HProb
NaryOp _ : : ! ( NaryOp a ) - > ! ( Seq ( abt ' [ ] a ) ) - > Term abt a
PrimOp _
: : ( typs ~ UnLCs args , args ~ )
= > ! ( PrimOp typs a ) - > SCon args a
NaryOp_ :: !(NaryOp a) -> !(Seq (abt '[] a)) -> Term abt a
PrimOp_
:: (typs ~ UnLCs args, args ~ LCs typs)
=> !(PrimOp typs a) -> SCon args a
-}
fromHead :: (ABT Term abt) => Head abt a -> abt '[] a
fromHead (WLiteral v) = syn (Literal_ v)
fromHead (WDatum d) = syn (Datum_ d)
fromHead (WEmpty typ) = syn (Empty_ typ)
fromHead (WArray e1 e2) = syn (Array_ e1 e2)
fromHead (WArrayLiteral es) = syn (ArrayLiteral_ es)
fromHead (WLam e1) = syn (Lam_ :$ e1 :* End)
fromHead (WMeasureOp o es) = syn (MeasureOp_ o :$ es)
fromHead (WDirac e1) = syn (Dirac :$ e1 :* End)
fromHead (WMBind e1 e2) = syn (MBind :$ e1 :* e2 :* End)
fromHead (WPlate e1 e2) = syn (Plate :$ e1 :* e2 :* End)
fromHead (WChain e1 e2 e3) = syn (Chain :$ e1 :* e2 :* e3 :* End)
fromHead (WSuperpose pes) = syn (Superpose_ pes)
fromHead (WReject typ) = syn (Reject_ typ)
fromHead (WCoerceTo c e1) = syn (CoerceTo_ c :$ fromHead e1 :* End)
fromHead (WUnsafeFrom c e1) = syn (UnsafeFrom_ c :$ fromHead e1 :* End)
fromHead (WIntegrate e1 e2 e3) = syn (Integrate :$ e1 :* e2 :* e3 :* End)
fromHead ( WSummate e1 e2 e3 ) = syn ( Summate : $ e1 :* e2 :* e3 :* End )
toHead :: (ABT Term abt) => abt '[] a -> Maybe (Head abt a)
toHead e =
caseVarSyn e (const Nothing) $ \t ->
case t of
Literal_ v -> Just $ WLiteral v
Datum_ d -> Just $ WDatum d
Empty_ typ -> Just $ WEmpty typ
Array_ e1 e2 -> Just $ WArray e1 e2
ArrayLiteral_ es -> Just $ WArrayLiteral es
Lam_ :$ e1 :* End -> Just $ WLam e1
MeasureOp_ o :$ es -> Just $ WMeasureOp o es
Dirac :$ e1 :* End -> Just $ WDirac e1
MBind :$ e1 :* e2 :* End -> Just $ WMBind e1 e2
Plate :$ e1 :* e2 :* End -> Just $ WPlate e1 e2
Chain :$ e1 :* e2 :* e3 :* End -> Just $ WChain e1 e2 e3
Superpose_ pes -> Just $ WSuperpose pes
CoerceTo_ c :$ e1 :* End -> WCoerceTo c <$> toHead e1
UnsafeFrom_ c :$ e1 :* End -> WUnsafeFrom c <$> toHead e1
Integrate :$ e1 :* e2 :* e3 :* End -> Just $ WIntegrate e1 e2 e3
Summate : $ e1 :* e2 :* e3 :* End - > Just $ WSummate e1 e2 e3
_ -> Nothing
instance Functor21 Head where
fmap21 _ (WLiteral v) = WLiteral v
fmap21 f (WDatum d) = WDatum (fmap11 f d)
fmap21 _ (WEmpty typ) = WEmpty typ
fmap21 f (WArray e1 e2) = WArray (f e1) (f e2)
fmap21 f (WArrayLiteral es) = WArrayLiteral (fmap f es)
fmap21 f (WLam e1) = WLam (f e1)
fmap21 f (WMeasureOp o es) = WMeasureOp o (fmap21 f es)
fmap21 f (WDirac e1) = WDirac (f e1)
fmap21 f (WMBind e1 e2) = WMBind (f e1) (f e2)
fmap21 f (WPlate e1 e2) = WPlate (f e1) (f e2)
fmap21 f (WChain e1 e2 e3) = WChain (f e1) (f e2) (f e3)
fmap21 f (WSuperpose pes) = WSuperpose (fmap (f *** f) pes)
fmap21 _ (WReject typ) = WReject typ
fmap21 f (WCoerceTo c e1) = WCoerceTo c (fmap21 f e1)
fmap21 f (WUnsafeFrom c e1) = WUnsafeFrom c (fmap21 f e1)
fmap21 f (WIntegrate e1 e2 e3) = WIntegrate (f e1) (f e2) (f e3)
f ( WSummate e1 e2 e3 ) = WSummate ( f e1 ) ( f e2 ) ( f e3 )
instance Foldable21 Head where
foldMap21 _ (WLiteral _) = mempty
foldMap21 f (WDatum d) = foldMap11 f d
foldMap21 _ (WEmpty _) = mempty
foldMap21 f (WArray e1 e2) = f e1 `mappend` f e2
foldMap21 f (WArrayLiteral es) = F.foldMap f es
foldMap21 f (WLam e1) = f e1
foldMap21 f (WMeasureOp _ es) = foldMap21 f es
foldMap21 f (WDirac e1) = f e1
foldMap21 f (WMBind e1 e2) = f e1 `mappend` f e2
foldMap21 f (WPlate e1 e2) = f e1 `mappend` f e2
foldMap21 f (WChain e1 e2 e3) = f e1 `mappend` f e2 `mappend` f e3
foldMap21 f (WSuperpose pes) = foldMapPairs f pes
foldMap21 _ (WReject _) = mempty
foldMap21 f (WCoerceTo _ e1) = foldMap21 f e1
foldMap21 f (WUnsafeFrom _ e1) = foldMap21 f e1
foldMap21 f (WIntegrate e1 e2 e3) = f e1 `mappend` f e2 `mappend` f e3
f ( WSummate e1 e2 e3 ) = f e1 ` mappend ` f e2 ` mappend ` f e3
instance Traversable21 Head where
traverse21 _ (WLiteral v) = pure $ WLiteral v
traverse21 f (WDatum d) = WDatum <$> traverse11 f d
traverse21 _ (WEmpty typ) = pure $ WEmpty typ
traverse21 f (WArray e1 e2) = WArray <$> f e1 <*> f e2
traverse21 f (WArrayLiteral es) = WArrayLiteral <$> traverse f es
traverse21 f (WLam e1) = WLam <$> f e1
traverse21 f (WMeasureOp o es) = WMeasureOp o <$> traverse21 f es
traverse21 f (WDirac e1) = WDirac <$> f e1
traverse21 f (WMBind e1 e2) = WMBind <$> f e1 <*> f e2
traverse21 f (WPlate e1 e2) = WPlate <$> f e1 <*> f e2
traverse21 f (WChain e1 e2 e3) = WChain <$> f e1 <*> f e2 <*> f e3
traverse21 f (WSuperpose pes) = WSuperpose <$> traversePairs f pes
traverse21 _ (WReject typ) = pure $ WReject typ
traverse21 f (WCoerceTo c e1) = WCoerceTo c <$> traverse21 f e1
traverse21 f (WUnsafeFrom c e1) = WUnsafeFrom c <$> traverse21 f e1
traverse21 f (WIntegrate e1 e2 e3) = WIntegrate <$> f e1 <*> f e2 <*> f e3
traverse21 f ( WSummate e1 e2 e3 ) = WSummate < $ > f e1 < * > f e2 < * > f e3
BUG : haddock does n't like annotations on GADT constructors . So
here we 'll avoid using the GADT syntax , even though it 'd make
data Whnf (abt :: [Hakaru] -> Hakaru -> *) (a :: Hakaru)
= Head_ !(Head abt a)
| Neutral !(abt '[] a)
| Forget that something is a WHNF .
fromWhnf :: (ABT Term abt) => Whnf abt a -> abt '[] a
fromWhnf (Head_ e) = fromHead e
fromWhnf (Neutral e) = e
| Identify terms which are already heads . , we make no attempt
toWhnf :: (ABT Term abt) => abt '[] a -> Maybe (Whnf abt a)
toWhnf e = Head_ <$> toHead e
| Case analysis on ' ' as a combinator .
caseWhnf :: Whnf abt a -> (Head abt a -> r) -> (abt '[] a -> r) -> r
caseWhnf (Head_ e) k _ = k e
caseWhnf (Neutral e) _ k = k e
| Given some WHNF , try to extract a ' Datum ' from it .
viewWhnfDatum
:: (ABT Term abt)
=> Whnf abt (HData' t)
-> Maybe (Datum (abt '[]) (HData' t))
viewWhnfDatum (Head_ v) = Just $ viewHeadDatum v
viewWhnfDatum (Neutral _) = Nothing
we 're given a " term which is in fact a head
caseVarSyn e ( const Nothing ) $ \t - >
case t of
Datum _ d - > error " bad " value ! "
_ - > Nothing
caseVarSyn e (const Nothing) $ \t ->
case t of
Datum_ d -> error "bad \"neutral\" value!"
_ -> Nothing
-}
viewHeadDatum
:: (ABT Term abt)
=> Head abt (HData' t)
-> Datum (abt '[]) (HData' t)
viewHeadDatum (WDatum d) = d
viewHeadDatum _ = error "viewHeadDatum: the impossible happened"
TODO : better unify the two cases of Whnf
HACK : this instance requires -XUndecidableInstances
instance (ABT Term abt) => Coerce (Whnf abt) where
coerceTo c w =
case w of
Neutral e ->
Neutral . maybe (P.coerceTo_ c e) id
$ caseVarSyn e (const Nothing) $ \t ->
case t of
BUG : literals should never be neutral in the first place ; but even if we got one , we should n't call it neutral after coercing it .
Literal_ x -> Just $ P.literal_ (coerceTo c x)
UnsafeFrom _ c ' : > TODO : cancellation
CoerceTo_ c' :$ es' ->
case es' of
e' :* End -> Just $ P.coerceTo_ (c . c') e'
_ -> Nothing
Head_ v ->
case v of
WLiteral x -> Head_ $ WLiteral (coerceTo c x)
c ' v ' - > TODO : cancellation
WCoerceTo c' v' -> Head_ $ WCoerceTo (c . c') v'
_ -> Head_ $ WCoerceTo c v
coerceFrom c w =
case w of
Neutral e ->
Neutral . maybe (P.unsafeFrom_ c e) id
$ caseVarSyn e (const Nothing) $ \t ->
case t of
BUG : literals should never be neutral in the first place ; but even if we got one , we should n't call it neutral after coercing it .
Literal_ x -> Just $ P.literal_ (coerceFrom c x)
CoerceTo _ c ' : > TODO : cancellation
UnsafeFrom_ c' :$ es' ->
case es' of
e' :* End -> Just $ P.unsafeFrom_ (c' . c) e'
_ -> Nothing
Head_ v ->
case v of
WLiteral x -> Head_ $ WLiteral (coerceFrom c x)
WUnsafeFrom c' v' -> Head_ $ WUnsafeFrom (c' . c) v'
_ -> Head_ $ WUnsafeFrom c v
BUG : haddock does n't like annotations on GADT constructors . So
here we 'll avoid using the GADT syntax , even though it 'd make
decide to evaluate later ) or are already evaluated to WHNF .
data Lazy (abt :: [Hakaru] -> Hakaru -> *) (a :: Hakaru)
= Whnf_ !(Whnf abt a)
| Thunk !(abt '[] a)
| Forget whether a term has been evaluated to WHNF or not .
fromLazy :: (ABT Term abt) => Lazy abt a -> abt '[] a
fromLazy (Whnf_ e) = fromWhnf e
fromLazy (Thunk e) = e
caseLazy :: Lazy abt a -> (Whnf abt a -> r) -> (abt '[] a -> r) -> r
caseLazy (Whnf_ e) k _ = k e
caseLazy (Thunk e) _ k = k e
getLazyVariable :: (ABT Term abt) => Lazy abt a -> Maybe (Variable a)
getLazyVariable e =
case e of
Whnf_ (Head_ _) -> Nothing
Whnf_ (Neutral e') -> caseVarSyn e' Just (const Nothing)
Thunk e' -> caseVarSyn e' Just (const Nothing)
isLazyVariable :: (ABT Term abt) => Lazy abt a -> Bool
isLazyVariable = maybe False (const True) . getLazyVariable
getLazyLiteral :: (ABT Term abt) => Lazy abt a -> Maybe (Literal a)
getLazyLiteral e =
case e of
Whnf_ (Head_ (WLiteral v)) -> Just v
Thunk e' ->
caseVarSyn e' (const Nothing) $ \t ->
case t of
Literal_ v -> Just v
_ -> Nothing
isLazyLiteral :: (ABT Term abt) => Lazy abt a -> Bool
isLazyLiteral = maybe False (const True) . getLazyLiteral
impure ( i.e. , must be evaluated in the ' HMeasure ' monad ) .
data Purity = Pure | Impure | ExpectP
deriving (Eq, Read, Show)
( currently only Disintegrate ) to work on arrays
data Index ast = Ind (Variable 'HNat) (ast 'HNat)
instance (ABT Term abt) => Eq (Index (abt '[])) where
Ind i1 s1 == Ind i2 s2 = i1 == i2 && (alphaEq s1 s2)
instance (ABT Term abt) => Ord (Index (abt '[])) where
TODO check this
indVar :: Index ast -> Variable 'HNat
indVar (Ind v _ ) = v
indSize :: Index ast -> ast 'HNat
indSize (Ind _ a) = a
fromIndex :: (ABT Term abt) => Index (abt '[]) -> abt '[] 'HNat
fromIndex (Ind v _) = var v
newtype Location (a :: k) = Location (Variable a)
instance Show (Sing a) => Show (Location a) where
show (Location v) = show v
locHint :: Location a -> Text
locHint (Location x) = varHint x
locType :: Location a -> Sing a
locType (Location x) = varType x
locEq :: (Show1 (Sing :: k -> *), JmEq1 (Sing :: k -> *))
=> Location (a :: k)
-> Location (b :: k)
-> Maybe (TypeEq a b)
locEq (Location a) (Location b) = varEq a b
fromLocation :: Location a -> Variable a
fromLocation (Location v) = v
fromLocations1 :: List1 Location a -> List1 Variable a
fromLocations1 = fmap11 fromLocation
locations1 :: List1 Variable a -> List1 Location a
locations1 = fmap11 Location
newtype LAssoc ast = LAssoc (Assoc ast)
newtype LAssocs ast = LAssocs (Assocs ast)
emptyLAssocs :: LAssocs abt
emptyLAssocs = LAssocs (emptyAssocs)
singletonLAssocs :: Location a -> f a -> LAssocs f
singletonLAssocs (Location v) e = LAssocs (singletonAssocs v e)
toLAssocs1 :: List1 Location xs -> List1 ast xs -> LAssocs ast
toLAssocs1 ls es = LAssocs (toAssocs1 (fromLocations1 ls) es)
insertLAssocs :: LAssocs ast -> LAssocs ast -> LAssocs ast
insertLAssocs (LAssocs a) (LAssocs b) = LAssocs (insertAssocs a b)
lookupLAssoc :: (Show1 (Sing :: k -> *), JmEq1 (Sing :: k -> *))
=> Location (a :: k)
-> LAssocs ast
-> Maybe (ast a)
lookupLAssoc (Location v) (LAssocs a) = lookupAssoc v a
type index ) . In particular , note that the the first argument to
second argument forms the \"statement\ " ( leaving out the body
of the second argument , which may be part of a following statement ) .
of all free variables that occur in the term expressions in @ss@ ,
spaces for each variable ) . And we have @Δ@ : the collection of
all variables bound by the statements in @ss@ , also viewed as a
measurable space . The semantic interpretation of @ss@ is a
measurable function of type @Γ ' :-> M Δ@ where is either
@HMeasure@ ( if @p ~ ' Impure@ ) or @Identity@ ( if @p ~ ' Pure@ ) .
data Statement :: ([Hakaru] -> Hakaru -> *) -> (Hakaru -> *) -> Purity -> * where
BUG : haddock does n't like annotations on GADT constructors . So we ca n't make the constructor descriptions below available to .
A variable bound by ' MBind ' to a measure expression .
SBind
:: forall abt (v :: Hakaru -> *) (a :: Hakaru)
-> !(Lazy abt ('HMeasure a))
-> [Index (abt '[])]
-> Statement abt v 'Impure
SLet
:: forall abt p (v :: Hakaru -> *) (a :: Hakaru)
-> !(Lazy abt a)
-> [Index (abt '[])]
-> Statement abt v p
A weight ; i.e. , the first component of each argument to
' . This is a statement just so that we can avoid
SWeight
:: forall abt (v :: Hakaru -> *)
. !(Lazy abt 'HProb)
-> [Index (abt '[])]
-> Statement abt v 'Impure
return the empty measure . , this statement type is only
SGuard
:: forall abt (v :: Hakaru -> *) (xs :: [Hakaru]) (a :: Hakaru)
. !(List1 v xs)
-> !(Pattern xs a)
-> !(Lazy abt a)
-> [Index (abt '[])]
-> Statement abt v 'Impure
TODO : generalize to use a ' VarSet ' so we can collapse these
SStuff0
:: forall abt (v :: Hakaru -> *)
. (abt '[] 'HProb -> abt '[] 'HProb)
-> [Index (abt '[])]
-> Statement abt v 'ExpectP
SStuff1
:: forall abt (v :: Hakaru -> *) (a :: Hakaru)
-> (abt '[] 'HProb -> abt '[] 'HProb)
-> [Index (abt '[])]
-> Statement abt v 'ExpectP
statementVars :: Statement abt Location p -> VarSet ('KProxy :: KProxy Hakaru)
statementVars (SBind x _ _) = singletonVarSet (fromLocation x)
statementVars (SLet x _ _) = singletonVarSet (fromLocation x)
statementVars (SWeight _ _) = emptyVarSet
statementVars (SGuard xs _ _ _) = toVarSet1 (fromLocations1 xs)
statementVars (SStuff0 _ _) = emptyVarSet
statementVars (SStuff1 x _ _) = singletonVarSet (fromLocation x)
We return @Maybe ( ) @ rather than @Bool@ because in our primary
use case we 're already in the @Maybe@ monad and so it 's easier
really rather have the @Bool@ , then we can easily change things
isBoundBy :: Location (a :: Hakaru) -> Statement abt Location p -> Maybe ()
x `isBoundBy` SBind y _ _ = const () <$> locEq x y
x `isBoundBy` SLet y _ _ = const () <$> locEq x y
_ `isBoundBy` SWeight _ _ = Nothing
x `isBoundBy` SGuard ys _ _ _ =
TODO : just check membership directly , rather than going through VarSet
if memberVarSet (fromLocation x) (toVarSet1 (fmap11 fromLocation ys))
then Just ()
else Nothing
_ `isBoundBy` SStuff0 _ _ = Nothing
x `isBoundBy` SStuff1 y _ _ = const () <$> locEq x y
TODO : remove this CPP guard , provided we do n't end up with a cyclic dependency ...
#ifdef __TRACE_DISINTEGRATE__
instance (ABT Term abt) => Pretty (Whnf abt) where
prettyPrec_ p (Neutral e) = ppApply1 p "Neutral" e
instance (ABT Term abt) => Pretty (Lazy abt) where
prettyPrec_ p (Whnf_ w) = ppFun p "Whnf_" [PP.sep (prettyPrec_ 11 w)]
prettyPrec_ p (Thunk e) = ppApply1 p "Thunk" e
ppApply1 :: (ABT Term abt) => Int -> String -> abt '[] a -> [PP.Doc]
ppApply1 p f e1 =
let d = PP.text f PP.<+> PP.nest (1 + length f) (prettyPrec 11 e1)
in [if p > 9 then PP.parens (PP.nest 1 d) else d]
ppFun :: Int -> String -> [PP.Doc] -> [PP.Doc]
ppFun _ f [] = [PP.text f]
ppFun p f ds =
parens (p > 9) [PP.text f PP.<+> PP.nest (1 + length f) (PP.sep ds)]
parens :: Bool -> [PP.Doc] -> [PP.Doc]
parens True ds = [PP.parens (PP.nest 1 (PP.sep ds))]
parens False ds = ds
ppList :: [PP.Doc] -> PP.Doc
ppList = PP.sep . (:[]) . PP.brackets . PP.nest 1 . PP.fsep . PP.punctuate PP.comma
ppInds :: (ABT Term abt) => [Index (abt '[])] -> PP.Doc
ppInds = ppList . map (ppVariable . indVar)
ppStatement :: (ABT Term abt) => Int -> Statement abt Location p -> PP.Doc
ppStatement p s =
case s of
SBind (Location x) e inds ->
PP.sep $ ppFun p "SBind"
[ ppVariable x
, PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SLet (Location x) e inds ->
PP.sep $ ppFun p "SLet"
[ ppVariable x
, PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SWeight e inds ->
PP.sep $ ppFun p "SWeight"
[ PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SGuard xs pat e inds ->
PP.sep $ ppFun p "SGuard"
[ PP.sep $ ppVariables (fromLocations1 xs)
, PP.sep $ prettyPrec_ 11 pat
, PP.sep $ prettyPrec_ 11 e
, ppInds inds
]
SStuff0 _ _ ->
PP.sep $ ppFun p "SStuff0"
[ PP.text "TODO: ppStatement{SStuff0}"
]
SStuff1 _ _ _ ->
PP.sep $ ppFun p "SStuff1"
[ PP.text "TODO: ppStatement{SStuff1}"
]
pretty_Statements :: (ABT Term abt) => [Statement abt Location p] -> PP.Doc
pretty_Statements [] = PP.text "[]"
pretty_Statements (s:ss) =
foldl
(\d s' -> d PP.$+$ PP.comma PP.<+> ppStatement 0 s')
(PP.text "[" PP.<+> ppStatement 0 s)
ss
PP.$+$ PP.text "]"
pretty_Statements_withTerm
:: (ABT Term abt) => [Statement abt Location p] -> abt '[] a -> PP.Doc
pretty_Statements_withTerm ss e =
pretty_Statements ss PP.$+$ pretty e
prettyAssocs
:: (ABT Term abt)
=> Assocs (abt '[])
-> PP.Doc
prettyAssocs a = PP.vcat $ map go (fromAssocs a)
where go (Assoc x e) = ppVariable x PP.<+>
PP.text "->" PP.<+>
pretty e
#endif
type TermEvaluator abt m =
forall a. abt '[] a -> m (Whnf abt a)
| A function for \"performing\ " an ' HMeasure ' monadic action .
type MeasureEvaluator abt m =
forall a. abt '[] ('HMeasure a) -> m (Whnf abt a)
type CaseEvaluator abt m =
forall a b. abt '[] a -> [Branch a abt b] -> m (Whnf abt b)
type VariableEvaluator abt m =
forall a. Variable a -> m (Whnf abt a)
' evaluate ' function in " Language . Hakaru . Lazy " .
class (Functor m, Applicative m, Monad m, ABT Term abt)
=> EvaluationMonad abt m p | m -> abt p
where
freshNat :: m Nat
freshLocStatement
:: Statement abt Variable p
-> m (Statement abt Location p, Assocs (Variable :: Hakaru -> *))
freshLocStatement s =
case s of
SWeight w e -> return (SWeight w e, mempty)
SBind x body i -> do
x' <- freshenVar x
return (SBind (Location x') body i, singletonAssocs x x')
SLet x body i -> do
x' <- freshenVar x
return (SLet (Location x') body i, singletonAssocs x x')
SGuard xs pat scrutinee i -> do
xs' <- freshenVars xs
return (SGuard (locations1 xs') pat scrutinee i,
toAssocs1 xs xs')
SStuff0 e e' -> return (SStuff0 e e', mempty)
SStuff1 x f i -> do
x' <- freshenVar x
return (SStuff1 (Location x') f i, singletonAssocs x x')
getIndices :: m [Index (abt '[])]
getIndices = return []
unsafePush :: Statement abt Location p -> m ()
unsafePushes :: [Statement abt Location p] -> m ()
unsafePushes = mapM_ unsafePush
bind the variables bound by @s@ !
to in the continuation . In particular , we want to
further up the ' ListContext ' , but which ( b ) offer up no
select
:: Location (a :: Hakaru)
-> (Statement abt Location p -> Maybe (m r))
-> m (Maybe r)
substVar :: Variable a -> abt '[] a
-> (forall b'. Variable b' -> m (abt '[] b'))
substVar _ _ = return . var
extFreeVars :: abt xs a -> m (VarSet (KindOf a))
extFreeVars e = return (freeVars e)
The first argument to @evaluateCase@ will be the
' TermEvaluator ' we 're constructing ( thus tying the knot ) .
evaluateCase :: TermEvaluator abt m -> CaseEvaluator abt m
# INLINE evaluateCase #
evaluateCase = defaultCaseEvaluator
' constrainValue ' . Especially the ' SBranch case of ' step '
bound variables to be above ; but then we have to
evaluateVar :: MeasureEvaluator abt m
-> TermEvaluator abt m
-> VariableEvaluator abt m
evaluateVar perform evaluate_ = \x ->
fmap (maybe (Neutral $ var x) id) . select (Location x) $ \s ->
case s of
SBind y e i -> do
Refl <- locEq (Location x) y
Just $ do
w <- perform $ caseLazy e fromWhnf id
unsafePush (SLet (Location x) (Whnf_ w) i)
#ifdef __TRACE_DISINTEGRATE__
trace ("-- updated "
++ show (ppStatement 11 s)
++ " to "
++ show (ppStatement 11 (SLet (Location x) (Whnf_ w) i))
) $ return ()
#endif
return w
SLet y e i -> do
Refl <- locEq (Location x) y
Just $ do
w <- caseLazy e return evaluate_
unsafePush (SLet (Location x) (Whnf_ w) i)
return w
These two do n't bind any variables , so they definitely
SWeight _ _ -> Nothing
SStuff0 _ _ -> Nothing
These two do bind variables , but there 's no expression we
SStuff1 _ _ _ -> Just . return . Neutral $ var x
SGuard _ _ _ _ -> Just . return . Neutral $ var x
| A simple ' CaseEvaluator ' which uses the ' DatumEvaluator ' to
call the ' TermEvaluator ' to continue evaluating the body of the
defaultCaseEvaluator
:: forall abt m p
. (ABT Term abt, EvaluationMonad abt m p)
=> TermEvaluator abt m
-> CaseEvaluator abt m
# INLINE defaultCaseEvaluator #
defaultCaseEvaluator evaluate_ = evaluateCase_
where
redundancy by introducing a new variable for each time this
evaluateDatum :: DatumEvaluator (abt '[]) m
evaluateDatum e = viewWhnfDatum <$> evaluate_ e
evaluateCase_ :: CaseEvaluator abt m
evaluateCase_ e bs = do
match <- matchBranches evaluateDatum e bs
case match of
Nothing ->
TODO : rather than throwing a error ,
error "defaultCaseEvaluator: non-exhaustive patterns in case!"
Just GotStuck ->
return . Neutral . syn $ Case_ e bs
Just (Matched ss body) ->
pushes (toVarStatements ss) body >>= evaluate_
toVarStatements :: Assocs (abt '[]) -> [Statement abt Variable p]
toVarStatements = map (\(Assoc x e) -> SLet x (Thunk e) []) .
fromAssocs
extSubst
:: forall abt a xs b m p. (EvaluationMonad abt m p)
=> Variable a
-> abt '[] a
-> abt xs b
-> m (abt xs b)
extSubst x e = substM x e (substVar x e)
extSubsts
:: forall abt a xs m p. (EvaluationMonad abt m p)
=> Assocs (abt '[])
-> abt xs a
-> m (abt xs a)
extSubsts rho0 e0 =
F.foldlM (\e (Assoc x v) -> extSubst x v e) e0 (unAssocs rho0)
TODO : define a new NameSupply monad in " Language . Hakaru . Syntax . Variable " for encapsulating these four fresh(en ) functions ?
freshVar
:: (EvaluationMonad abt m p)
=> Text
-> Sing (a :: Hakaru)
-> m (Variable a)
freshVar hint typ = (\i -> Variable hint i typ) <$> freshNat
TODO : move to " Language . Hakaru . Syntax . Variable " in case anyone else wants it too .
freshVars
:: (EvaluationMonad abt m p)
=> List1 Hint xs
-> m (List1 Variable xs)
freshVars Nil1 = return Nil1
freshVars (Cons1 x xs) = Cons1 <$> freshVar' x <*> freshVars xs
where
freshVar' (Hint hint typ) = freshVar hint typ
freshenVar
:: (EvaluationMonad abt m p)
=> Variable (a :: Hakaru)
-> m (Variable a)
freshenVar x = (\i -> x{varID=i}) <$> freshNat
TODO : make this more efficient than actually calling ' freshenVar '
freshenVars
:: (EvaluationMonad abt m p)
=> List1 Variable (xs :: [Hakaru])
-> m (List1 Variable xs)
freshenVars Nil1 = return Nil1
freshenVars (Cons1 x xs) = Cons1 <$> freshenVar x <*> freshenVars xs
freshenVars = go dnil1
where
go : : ( EvaluationMonad abt m p )
= > DList1 Variable ( ys : : [ Hakaru ] )
- > List1 Variable ( zs : : [ Hakaru ] )
- > m ( List1 Variable ( ys + + zs ) )
go k ( Cons1 x xs ) = do
x ' < - freshenVar x
freshenVars = go dnil1
where
go :: (EvaluationMonad abt m p)
=> DList1 Variable (ys :: [Hakaru])
-> List1 Variable (zs :: [Hakaru])
-> m (List1 Variable (ys ++ zs))
go k (Cons1 x xs) = do
x' <- freshenVar x
-}
freshInd :: (EvaluationMonad abt m p)
=> abt '[] 'HNat
-> m (Index (abt '[]))
freshInd s = do
x <- freshVar T.empty SNat
return $ Ind x s
freshenLoc :: (EvaluationMonad abt m p)
=> Location (a :: Hakaru) -> m (Location a)
freshenLoc (Location x) = Location <$> freshenVar x
freshenLocs :: (EvaluationMonad abt m p)
=> List1 Location (ls :: [Hakaru])
-> m (List1 Location ls)
freshenLocs Nil1 = return Nil1
freshenLocs (Cons1 l ls) = Cons1 <$> freshenLoc l <*> freshenLocs ls
push_
:: (ABT Term abt, EvaluationMonad abt m p)
=> Statement abt Variable p
-> m (Assocs (Variable :: Hakaru -> *))
push_ s = do
(s',rho) <- freshLocStatement s
unsafePush s'
return rho
the way . The second argument represents \"the rest of the term\ "
( renamed)binding statement . The third argument is the continuation
second and third arguments we could return an ' Assocs ' giving
push
:: (ABT Term abt, EvaluationMonad abt m p)
^ the \"rest\ " of the term
push s e = do
rho <- push_ s
return (renames rho e)
| Call ' push ' repeatedly . ( , is more efficient than actually
calling ' push ' repeatedly . ) The head is pushed first and thus
pushes
:: (ABT Term abt, EvaluationMonad abt m p)
^ the \"rest\ " of the term
pushes ss e = do
rho <- F.foldlM (\rho s -> mappend rho <$> push_ s) mempty ss
return (renames rho e)
|
3e861699289e8a64b25381de8b8df49b87383e0f132a84435c25817eef5e8199 | DavidAlphaFox/RabbitMQ | rabbit_web_dispatch.erl | The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
The Original Code is RabbitMQ .
%%
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2010 - 2014 GoPivotal , Inc. All rights reserved .
%%
-module(rabbit_web_dispatch).
-export([register_context_handler/5, register_static_context/6]).
-export([register_port_redirect/4]).
-export([unregister_context/1]).
%% Handler Registration
%% Registers a dynamic selector and handler combination, with a link
%% to display in lists.
register_handler(Name, Listener, Selector, Handler, Link) ->
rabbit_web_dispatch_registry:add(Name, Listener, Selector, Handler, Link).
%% Methods for standard use cases
%% Registers a dynamic handler under a fixed context path, with link
%% to display in the global context.
register_context_handler(Name, Listener, Prefix, Handler, LinkText) ->
register_handler(
Name, Listener, context_selector(Prefix), Handler, {Prefix, LinkText}),
{ok, Prefix}.
%% Convenience function registering a fully static context to serve
%% content from a module-relative directory, with link to display in
%% the global context.
register_static_context(Name, Listener, Prefix, Module, FSPath, LinkText) ->
register_handler(Name, Listener,
context_selector(Prefix),
static_context_handler(Prefix, Module, FSPath),
{Prefix, LinkText}),
{ok, Prefix}.
%% A context which just redirects the request to a different port.
register_port_redirect(Name, Listener, Prefix, RedirectPort) ->
register_context_handler(
Name, Listener, Prefix,
fun (Req) ->
Host = case Req:get_header_value("host") of
undefined -> {ok, {IP, _Port}} = rabbit_net:sockname(
Req:get(socket)),
rabbit_misc:ntoa(IP);
Header -> hd(string:tokens(Header, ":"))
end,
URL = rabbit_misc:format(
"~s://~s:~B~s",
[Req:get(scheme), Host, RedirectPort, Req:get(raw_path)]),
Req:respond({301, [{"Location", URL}], ""})
end,
rabbit_misc:format("Redirect to port ~B", [RedirectPort])).
context_selector("") ->
fun(_Req) -> true end;
context_selector(Prefix) ->
Prefix1 = "/" ++ Prefix,
fun(Req) ->
Path = Req:get(raw_path),
(Path == Prefix1) orelse (string:str(Path, Prefix1 ++ "/") == 1)
end.
%% Produces a handler for use with register_handler that serves up
%% static content from a directory specified relative to the directory
%% containing the ebin directory containing the named module's beam
%% file.
static_context_handler(Prefix, Module, FSPath) ->
{file, Here} = code:is_loaded(Module),
ModuleRoot = filename:dirname(filename:dirname(Here)),
LocalPath = filename:join(ModuleRoot, FSPath),
static_context_handler(Prefix, LocalPath).
%% Produces a handler for use with register_handler that serves up
%% static content from a specified directory.
static_context_handler("", LocalPath) ->
fun(Req) ->
"/" ++ Path = Req:get(raw_path),
serve_file(Req, Path, LocalPath)
end;
static_context_handler(Prefix, LocalPath) ->
fun(Req) ->
"/" ++ Path = Req:get(raw_path),
case string:substr(Path, length(Prefix) + 1) of
"" -> Req:respond({301, [{"Location", "/" ++ Prefix ++ "/"}], ""});
"/" ++ P -> serve_file(Req, P, LocalPath)
end
end.
serve_file(Req, Path, LocalPath) ->
case Req:get(method) of
Method when Method =:= 'GET'; Method =:= 'HEAD' ->
Req:serve_file(Path, LocalPath);
_ ->
Req:respond({405, [{"Allow", "GET, HEAD"}],
"Only GET or HEAD supported for static content"})
end.
%% The opposite of all those register_* functions.
unregister_context(Name) ->
rabbit_web_dispatch_registry:remove(Name).
| null | https://raw.githubusercontent.com/DavidAlphaFox/RabbitMQ/0a64e6f0464a9a4ce85c6baa52fb1c584689f49a/plugins-src/rabbitmq-web-dispatch/src/rabbit_web_dispatch.erl | erlang | Version 1.1 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License
at /
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and
limitations under the License.
Handler Registration
Registers a dynamic selector and handler combination, with a link
to display in lists.
Methods for standard use cases
Registers a dynamic handler under a fixed context path, with link
to display in the global context.
Convenience function registering a fully static context to serve
content from a module-relative directory, with link to display in
the global context.
A context which just redirects the request to a different port.
Produces a handler for use with register_handler that serves up
static content from a directory specified relative to the directory
containing the ebin directory containing the named module's beam
file.
Produces a handler for use with register_handler that serves up
static content from a specified directory.
The opposite of all those register_* functions. | The contents of this file are subject to the Mozilla Public License
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ .
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2010 - 2014 GoPivotal , Inc. All rights reserved .
-module(rabbit_web_dispatch).
-export([register_context_handler/5, register_static_context/6]).
-export([register_port_redirect/4]).
-export([unregister_context/1]).
register_handler(Name, Listener, Selector, Handler, Link) ->
rabbit_web_dispatch_registry:add(Name, Listener, Selector, Handler, Link).
register_context_handler(Name, Listener, Prefix, Handler, LinkText) ->
register_handler(
Name, Listener, context_selector(Prefix), Handler, {Prefix, LinkText}),
{ok, Prefix}.
register_static_context(Name, Listener, Prefix, Module, FSPath, LinkText) ->
register_handler(Name, Listener,
context_selector(Prefix),
static_context_handler(Prefix, Module, FSPath),
{Prefix, LinkText}),
{ok, Prefix}.
register_port_redirect(Name, Listener, Prefix, RedirectPort) ->
register_context_handler(
Name, Listener, Prefix,
fun (Req) ->
Host = case Req:get_header_value("host") of
undefined -> {ok, {IP, _Port}} = rabbit_net:sockname(
Req:get(socket)),
rabbit_misc:ntoa(IP);
Header -> hd(string:tokens(Header, ":"))
end,
URL = rabbit_misc:format(
"~s://~s:~B~s",
[Req:get(scheme), Host, RedirectPort, Req:get(raw_path)]),
Req:respond({301, [{"Location", URL}], ""})
end,
rabbit_misc:format("Redirect to port ~B", [RedirectPort])).
context_selector("") ->
fun(_Req) -> true end;
context_selector(Prefix) ->
Prefix1 = "/" ++ Prefix,
fun(Req) ->
Path = Req:get(raw_path),
(Path == Prefix1) orelse (string:str(Path, Prefix1 ++ "/") == 1)
end.
static_context_handler(Prefix, Module, FSPath) ->
{file, Here} = code:is_loaded(Module),
ModuleRoot = filename:dirname(filename:dirname(Here)),
LocalPath = filename:join(ModuleRoot, FSPath),
static_context_handler(Prefix, LocalPath).
static_context_handler("", LocalPath) ->
fun(Req) ->
"/" ++ Path = Req:get(raw_path),
serve_file(Req, Path, LocalPath)
end;
static_context_handler(Prefix, LocalPath) ->
fun(Req) ->
"/" ++ Path = Req:get(raw_path),
case string:substr(Path, length(Prefix) + 1) of
"" -> Req:respond({301, [{"Location", "/" ++ Prefix ++ "/"}], ""});
"/" ++ P -> serve_file(Req, P, LocalPath)
end
end.
serve_file(Req, Path, LocalPath) ->
case Req:get(method) of
Method when Method =:= 'GET'; Method =:= 'HEAD' ->
Req:serve_file(Path, LocalPath);
_ ->
Req:respond({405, [{"Allow", "GET, HEAD"}],
"Only GET or HEAD supported for static content"})
end.
unregister_context(Name) ->
rabbit_web_dispatch_registry:remove(Name).
|
5fc84161c170c0e0377878f49fa78904b9c96daeb5be514fba740f355b57a1fe | rm-hull/big-bang | config.cljs | (ns big-bang.examples.pacman.config
(:require [cljs.core.async :refer [chan <!] :as async]
[big-bang.examples.pacman.util :refer [into-channel proxy-request]]
[dataview.loader :refer [fetch-image fetch-text]])
(:require-macros [cljs.core.async.macros :refer [go]]))
(def canvas (.getElementById js/document "pacman-canvas"))
(def ctx (when canvas (.getContext canvas "2d")))
(def cell-size 12)
28 + newline
(def height 31)
(def background-size
(mapv (partial * cell-size) [width height]))
(def start-position
"Pacman's starting position"
(mapv (partial * cell-size) [13.5 23]))
TODO arbitrary number alert
(def url {
:sprite-map "-hull/big-bang/master/examples/pacman/data/spritemap-192.png"
:levels ["-hull/big-bang/master/examples/pacman/data/" ".txt"]})
(def sprites (let [c (chan 1)]
(go (into-channel c (repeat (<! (fetch-image (proxy-request (:sprite-map url)))))))
c))
(def level
(memoize
(fn [n]
(let [[prefix suffix] (:levels url)
url (str prefix n suffix)
c (chan 1)]
(go (into-channel c (repeat (<! (fetch-text (proxy-request url))))))
c)))) | null | https://raw.githubusercontent.com/rm-hull/big-bang/2825e7f0bb7615e1158a72d58f426bc1e33bd9ef/examples/pacman/src/config.cljs | clojure | (ns big-bang.examples.pacman.config
(:require [cljs.core.async :refer [chan <!] :as async]
[big-bang.examples.pacman.util :refer [into-channel proxy-request]]
[dataview.loader :refer [fetch-image fetch-text]])
(:require-macros [cljs.core.async.macros :refer [go]]))
(def canvas (.getElementById js/document "pacman-canvas"))
(def ctx (when canvas (.getContext canvas "2d")))
(def cell-size 12)
28 + newline
(def height 31)
(def background-size
(mapv (partial * cell-size) [width height]))
(def start-position
"Pacman's starting position"
(mapv (partial * cell-size) [13.5 23]))
TODO arbitrary number alert
(def url {
:sprite-map "-hull/big-bang/master/examples/pacman/data/spritemap-192.png"
:levels ["-hull/big-bang/master/examples/pacman/data/" ".txt"]})
(def sprites (let [c (chan 1)]
(go (into-channel c (repeat (<! (fetch-image (proxy-request (:sprite-map url)))))))
c))
(def level
(memoize
(fn [n]
(let [[prefix suffix] (:levels url)
url (str prefix n suffix)
c (chan 1)]
(go (into-channel c (repeat (<! (fetch-text (proxy-request url))))))
c)))) | |
f861df65420ae4f4454a2bf9a5c97a0c6ca5524355bdadab949ddd476d9e88cd | haskell-tools/haskell-tools | Removed.hs | module Refactor.OrganizeImports.Removed where
import Control.Monad () | null | https://raw.githubusercontent.com/haskell-tools/haskell-tools/b1189ab4f63b29bbf1aa14af4557850064931e32/src/builtin-refactorings/examples/Refactor/OrganizeImports/Removed.hs | haskell | module Refactor.OrganizeImports.Removed where
import Control.Monad () | |
b9598a8adf5d6230b33e5d90550e0e98d3c1ab719ab07066a5441870b60d3d9f | igorhvr/bedlam | template-util.scm | (load "util.scm")
(import string-io)
(define-java-classes
<java.util.hashtable>
<java.util.vector>)
(define-generic-java-methods
put
add)
;;convert scheme data structure into template data structure
(define (->template data)
(cond
[(java-object? data) ;pass through
data]
[(list? data) ;convert to hashtable
(let ([ht (java-new <java.util.hashtable>)])
(for-each (lambda (e)
(put ht (->template (car e)) (->template (cdr e))))
data)
ht)]
[(vector? data) ;convert to vector
(let ([v (java-new <java.util.vector>)])
(for-each (lambda (e) (add v (->template e))) (vector->list data))
v)]
[else ;convert to string
(->jstring (call-with-output-string (lambda (p)
(display data p))))]))
;;
(define-generic-java-methods
get-request-dispatcher
forward)
(define (fill-template template data request response)
(set-attribute request (->jstring "templateData") (->template data))
(forward (get-request-dispatcher
request
(->jstring (string-append "/templates/" template ".jsp")))
request
response))
(define (display-form form data)
(call/cc (lambda (k) (display-page form
(cons `(cont . ,(store-k k))
data)))))
(define (display-page page data)
(fill-template page data (current-request) (current-response))
((current-return) #f))
| null | https://raw.githubusercontent.com/igorhvr/bedlam/b62e0d047105bb0473bdb47c58b23f6ca0f79a4e/sisc/sisc-contrib/servlets/examples/web/scheme/template-util.scm | scheme | convert scheme data structure into template data structure
pass through
convert to hashtable
convert to vector
convert to string
| (load "util.scm")
(import string-io)
(define-java-classes
<java.util.hashtable>
<java.util.vector>)
(define-generic-java-methods
put
add)
(define (->template data)
(cond
data]
(let ([ht (java-new <java.util.hashtable>)])
(for-each (lambda (e)
(put ht (->template (car e)) (->template (cdr e))))
data)
ht)]
(let ([v (java-new <java.util.vector>)])
(for-each (lambda (e) (add v (->template e))) (vector->list data))
v)]
(->jstring (call-with-output-string (lambda (p)
(display data p))))]))
(define-generic-java-methods
get-request-dispatcher
forward)
(define (fill-template template data request response)
(set-attribute request (->jstring "templateData") (->template data))
(forward (get-request-dispatcher
request
(->jstring (string-append "/templates/" template ".jsp")))
request
response))
(define (display-form form data)
(call/cc (lambda (k) (display-page form
(cons `(cont . ,(store-k k))
data)))))
(define (display-page page data)
(fill-template page data (current-request) (current-response))
((current-return) #f))
|
7eb548d130e94cec179f8a5d680fc2b1fa87b73a9b6ac4209d1fb51b8b481232 | kupl/FixML | sub24.ml | type metro = STATION of name
| AREA of name * metro
| CONNECT of metro * metro
and name = string
let rec checkArea n e =
match e with
AREA(a, b) -> (checkArea (n @ [a]) b)
| CONNECT(a, b) -> ((checkArea n a) && (checkArea n b))
| STATION a -> List.exists (fun x -> x = a) n
let rec checkMetro e =
match e with
AREA(a, b) -> checkArea [a] b
| CONNECT(a,b) -> (checkMetro a) && (checkMetro b)
| STATION n -> true
| null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/wellformedness/wellformedness1/submissions/sub24.ml | ocaml | type metro = STATION of name
| AREA of name * metro
| CONNECT of metro * metro
and name = string
let rec checkArea n e =
match e with
AREA(a, b) -> (checkArea (n @ [a]) b)
| CONNECT(a, b) -> ((checkArea n a) && (checkArea n b))
| STATION a -> List.exists (fun x -> x = a) n
let rec checkMetro e =
match e with
AREA(a, b) -> checkArea [a] b
| CONNECT(a,b) -> (checkMetro a) && (checkMetro b)
| STATION n -> true
| |
884bd4af25af76343b1e920abbcbee6da4af6ce96602e9aba7760a1b3596eab7 | kappelmann/engaging-large-scale-functional-programming | Exercise05.hs | # LANGUAGE TupleSections #
module Exercise05 where
import Data.Array (Array, array, bounds, (!), (//))
type Pos = (Int, Int)
data E = Blank | Rock | Me | Flag | Old | Pass
deriving (Eq)
showMaze :: Int -> Int -> [Pos] -> Pos -> Pos -> String
showMaze h w rocks start flag = unlines [[showPos i j | j <- [0 .. w - 1]] | i <- [0 .. h - 1]]
where
showPos i j
| (i, j) `elem` rocks = 'X'
| (i, j) == start = 'S'
| (i, j) == flag = 'F'
| otherwise = '.'
isValid :: Array (Int, Int) E -> (Int, Int) -> Bool
isValid game (x, y) = here /= Rock
where
here = game ! (x, y)
isGood :: Array (Int, Int) E -> (Int, Int) -> Bool
isGood game (x, y) = here /= Old && here /= Pass
where
here = game ! (x, y)
isNice :: Array (Int, Int) E -> (Int, Int) -> Bool
isNice game (x, y) = here /= Old && here /= Me
where
here = game ! (x, y)
fill :: Array (Int, Int) E -> [(Int, Int)] -> Array (Int, Int) E
fill game fs = game // map (,Pass) (filter (isGood game) fs)
propG :: (Int, Int) -> Array (Int, Int) E -> ([(Int, Int)], Array (Int, Int) E)
propG (x, y) game = (goodNewHeads, filledMapAgainA)
where
(_, (maxX, maxY)) = bounds game
up = reverse (takeWhile (isValid game) (map (,y) [x .. 0]))
(upFirst : ups) = up
down = reverse (takeWhile (isValid game) (map (,y) [x .. maxX]))
(downFirst : downs) = down
left = reverse (takeWhile (isValid game) (map (x,) [y .. 0]))
(leftFirst : lefts) = left
right = reverse (takeWhile (isValid game) (map (x,) [y .. maxY]))
(rightFirst : rights) = right
toFills = (if (not . null) up then ups else []) ++ (if (not . null) down then downs else []) ++ (if (not . null) left then lefts else []) ++ (if (not . null) right then rights else [])
filledMap = fill game toFills
newHeads = [upFirst | (not . null) up] ++ [downFirst | (not . null) down] ++ [leftFirst | (not . null) left] ++ [rightFirst | (not . null) right]
goodNewHeads = filter (isNice filledMap) newHeads
filledMapAgain = filledMap // map (,Me) goodNewHeads
filledMapAgainA = filledMapAgain // [((x, y), Old)]
step :: [(Int, Int)] -> Array (Int, Int) E -> ([(Int, Int)], Array (Int, Int) E)
step [] m = ([], m)
step (head : hs) m = (restH ++ now, nowMap)
where
(restH, afterMap) = step hs m
(now, nowMap) = propG head afterMap
walk :: [(Int, Int)] -> (Int, Int) -> Array (Int, Int) E -> Int
walk [] flag map = -1
walk hs flag map = if afterMap ! flag == Flag then 1 + walk hs flag afterMap else 1
where
(hs, afterMap) = step hs map
incredibleGame :: Int -> Int -> [Pos] -> Pos -> Pos -> Int
incredibleGame h w rocks start flag
| start == flag = 0
| otherwise = walk [start] flag fullMap
where
blankMap = array ((0, 0), (h, w)) [((x, y), Blank) | x <- [0 .. h -1], y <- [0 .. w -1]]
fullMap = blankMap // ((start, Me) : (flag, Flag) : map (,Rock) rocks)
| null | https://raw.githubusercontent.com/kappelmann/engaging-large-scale-functional-programming/8ed2c056fbd611f1531230648497cb5436d489e4/resources/contest/example_data/05/uploads/artemishatesleadingdigits/Exercise05.hs | haskell | # LANGUAGE TupleSections #
module Exercise05 where
import Data.Array (Array, array, bounds, (!), (//))
type Pos = (Int, Int)
data E = Blank | Rock | Me | Flag | Old | Pass
deriving (Eq)
showMaze :: Int -> Int -> [Pos] -> Pos -> Pos -> String
showMaze h w rocks start flag = unlines [[showPos i j | j <- [0 .. w - 1]] | i <- [0 .. h - 1]]
where
showPos i j
| (i, j) `elem` rocks = 'X'
| (i, j) == start = 'S'
| (i, j) == flag = 'F'
| otherwise = '.'
isValid :: Array (Int, Int) E -> (Int, Int) -> Bool
isValid game (x, y) = here /= Rock
where
here = game ! (x, y)
isGood :: Array (Int, Int) E -> (Int, Int) -> Bool
isGood game (x, y) = here /= Old && here /= Pass
where
here = game ! (x, y)
isNice :: Array (Int, Int) E -> (Int, Int) -> Bool
isNice game (x, y) = here /= Old && here /= Me
where
here = game ! (x, y)
fill :: Array (Int, Int) E -> [(Int, Int)] -> Array (Int, Int) E
fill game fs = game // map (,Pass) (filter (isGood game) fs)
propG :: (Int, Int) -> Array (Int, Int) E -> ([(Int, Int)], Array (Int, Int) E)
propG (x, y) game = (goodNewHeads, filledMapAgainA)
where
(_, (maxX, maxY)) = bounds game
up = reverse (takeWhile (isValid game) (map (,y) [x .. 0]))
(upFirst : ups) = up
down = reverse (takeWhile (isValid game) (map (,y) [x .. maxX]))
(downFirst : downs) = down
left = reverse (takeWhile (isValid game) (map (x,) [y .. 0]))
(leftFirst : lefts) = left
right = reverse (takeWhile (isValid game) (map (x,) [y .. maxY]))
(rightFirst : rights) = right
toFills = (if (not . null) up then ups else []) ++ (if (not . null) down then downs else []) ++ (if (not . null) left then lefts else []) ++ (if (not . null) right then rights else [])
filledMap = fill game toFills
newHeads = [upFirst | (not . null) up] ++ [downFirst | (not . null) down] ++ [leftFirst | (not . null) left] ++ [rightFirst | (not . null) right]
goodNewHeads = filter (isNice filledMap) newHeads
filledMapAgain = filledMap // map (,Me) goodNewHeads
filledMapAgainA = filledMapAgain // [((x, y), Old)]
step :: [(Int, Int)] -> Array (Int, Int) E -> ([(Int, Int)], Array (Int, Int) E)
step [] m = ([], m)
step (head : hs) m = (restH ++ now, nowMap)
where
(restH, afterMap) = step hs m
(now, nowMap) = propG head afterMap
walk :: [(Int, Int)] -> (Int, Int) -> Array (Int, Int) E -> Int
walk [] flag map = -1
walk hs flag map = if afterMap ! flag == Flag then 1 + walk hs flag afterMap else 1
where
(hs, afterMap) = step hs map
incredibleGame :: Int -> Int -> [Pos] -> Pos -> Pos -> Int
incredibleGame h w rocks start flag
| start == flag = 0
| otherwise = walk [start] flag fullMap
where
blankMap = array ((0, 0), (h, w)) [((x, y), Blank) | x <- [0 .. h -1], y <- [0 .. w -1]]
fullMap = blankMap // ((start, Me) : (flag, Flag) : map (,Rock) rocks)
| |
64b96b221226dd0a0d638329de1258c9d61165caf0e4177778477fdfd23a4c33 | bennn/dissertation | 2019-05-17.rkt | #lang racket
(require plot)
added by
(require racket/gui)
(define f (new frame% [parent #f] [label "3D Graph"] [min-width 400] [min-height 400]))
(define ec (new editor-canvas% [parent f]))
(define t (new text%))
(send ec set-editor t)
(define ((make-current-value-renderer fn) snip event x y)
(define overlays
(and x y (eq? (send event get-event-type) 'motion)
(list (vrule x #:style 'long-dash)
(point-label (vector x (fn x)) #:anchor 'auto))))
(send snip set-overlay-renderers overlays))
(define snip (plot-snip (function sin) #:x-min 0 #:x-max (* 2 pi) #:y-min -1.5 #:y-max 1.5))
(send snip set-mouse-event-callback (make-current-value-renderer sin))
(send t insert snip)
(send f show #t)
| null | https://raw.githubusercontent.com/bennn/dissertation/779bfe6f8fee19092849b7e2cfc476df33e9357b/dissertation/QA/transient-expressive/plot/2019-05-17.rkt | racket | #lang racket
(require plot)
added by
(require racket/gui)
(define f (new frame% [parent #f] [label "3D Graph"] [min-width 400] [min-height 400]))
(define ec (new editor-canvas% [parent f]))
(define t (new text%))
(send ec set-editor t)
(define ((make-current-value-renderer fn) snip event x y)
(define overlays
(and x y (eq? (send event get-event-type) 'motion)
(list (vrule x #:style 'long-dash)
(point-label (vector x (fn x)) #:anchor 'auto))))
(send snip set-overlay-renderers overlays))
(define snip (plot-snip (function sin) #:x-min 0 #:x-max (* 2 pi) #:y-min -1.5 #:y-max 1.5))
(send snip set-mouse-event-callback (make-current-value-renderer sin))
(send t insert snip)
(send f show #t)
| |
227b9c5523dbb141b3d79ed642ef293a97c737f0b8124a73544f16a7d7ff23a2 | upgradingdave/cljs | lock.cljs | (ns up.auth0.lock
(:require [cljsjs.auth0-lock]
[clojure.walk :as w]
[up.cookies.core :as c]))
(def default-domain "upgradingdave.auth0.com")
(def default-client-id "zxaleUBMxSvDbCCXPjidSCWnljd9ulmF")
(def lock-opts
{:languageDictionary {:title "Clojure Bingo!"}
:theme {:logo ""
:primaryColor "#0f2242"}})
(defn create-client
[& [{:keys [domain client-id]
:or {domain default-domain
client-id default-client-id}
:as opts}]]
(js/Auth0Lock. client-id domain (clj->js lock-opts)))
(defn show [lock]
(.show lock))
;; Example of profile response
{ : name " " , : picture
;; "-7266-pp-madmen_fullbody_normal.jpg",
: description " Follow Your Bliss . " , : lang " en " , : location
" Fredericksburg , VA " , : screen_name " upgradingdave " , : time_zone
" Central Time ( US & Canada ) " , : url " " ,
: utc_offset -21600 , : " zxaleUBMxSvDbCCXPjidSCWnljd9ulmF " ,
: updated_at " 2016 - 11 - 29T21:59:56.918Z " , : user_id
" twitter|185300550 " , : nickname " " , : identities # js
;; [#js {:provider "twitter", :user_id "185300550", :connection
" twitter " , : isSocial true } ] , : created_at
" 2016 - 11 - 29T21:10:47.126Z " , : global_client_id
;; "Sr7Qhh1h7VLJYdAOPBMKRtcfHboLomNL"}
(defn get-profile!
"Call auth0 to try and get profile information"
[lock id-token & [cb]]
(.getProfile
lock id-token
(fn [error profile]
TODO handle errors
(when (not error)
(let [profile (w/keywordize-keys (js->clj profile))
session {:profile profile
:id-token id-token}]
(c/set-cookie! "auth0" session)
(cb session))))))
(defn handle-authentication!
"Setup listener for authentication. Optional callback will be passed
single argument (a map of id-token and profile)"
[lock & [cb]]
(.on lock "authenticated"
(fn [auth-result]
(get-profile! lock (.-idToken auth-result) cb))))
(defn get-auth0-session
"Attempts to find profile and id-token inside cookie."
[]
(c/get-cookie "auth0"))
(defn logout []
(c/remove-cookie! "auth0"))
| null | https://raw.githubusercontent.com/upgradingdave/cljs/1026b6db905214586fb7e04800df078da19b37cc/src/cljs/up/auth0/lock.cljs | clojure | Example of profile response
"-7266-pp-madmen_fullbody_normal.jpg",
[#js {:provider "twitter", :user_id "185300550", :connection
"Sr7Qhh1h7VLJYdAOPBMKRtcfHboLomNL"} | (ns up.auth0.lock
(:require [cljsjs.auth0-lock]
[clojure.walk :as w]
[up.cookies.core :as c]))
(def default-domain "upgradingdave.auth0.com")
(def default-client-id "zxaleUBMxSvDbCCXPjidSCWnljd9ulmF")
(def lock-opts
{:languageDictionary {:title "Clojure Bingo!"}
:theme {:logo ""
:primaryColor "#0f2242"}})
(defn create-client
[& [{:keys [domain client-id]
:or {domain default-domain
client-id default-client-id}
:as opts}]]
(js/Auth0Lock. client-id domain (clj->js lock-opts)))
(defn show [lock]
(.show lock))
{ : name " " , : picture
: description " Follow Your Bliss . " , : lang " en " , : location
" Fredericksburg , VA " , : screen_name " upgradingdave " , : time_zone
" Central Time ( US & Canada ) " , : url " " ,
: utc_offset -21600 , : " zxaleUBMxSvDbCCXPjidSCWnljd9ulmF " ,
: updated_at " 2016 - 11 - 29T21:59:56.918Z " , : user_id
" twitter|185300550 " , : nickname " " , : identities # js
" twitter " , : isSocial true } ] , : created_at
" 2016 - 11 - 29T21:10:47.126Z " , : global_client_id
(defn get-profile!
"Call auth0 to try and get profile information"
[lock id-token & [cb]]
(.getProfile
lock id-token
(fn [error profile]
TODO handle errors
(when (not error)
(let [profile (w/keywordize-keys (js->clj profile))
session {:profile profile
:id-token id-token}]
(c/set-cookie! "auth0" session)
(cb session))))))
(defn handle-authentication!
"Setup listener for authentication. Optional callback will be passed
single argument (a map of id-token and profile)"
[lock & [cb]]
(.on lock "authenticated"
(fn [auth-result]
(get-profile! lock (.-idToken auth-result) cb))))
(defn get-auth0-session
"Attempts to find profile and id-token inside cookie."
[]
(c/get-cookie "auth0"))
(defn logout []
(c/remove-cookie! "auth0"))
|
ee424b30556de40c296d1800789046f1f2467d47eb6039affbe252579625c959 | dbuenzli/vg | rhtmlc.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2013 The vg programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2013 The vg programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
Renders the Vg image database to the HTML canvas element
open Gg
open Vg
open Mui
include Db_contents
let app_name = "rhtmlc"
let str = Format.sprintf
let pp = Format.fprintf
let pp_str = Format.pp_print_string
let pp_renderer ppf = function
| `CNV -> pp ppf "CNV"
| `SVG -> pp ppf "SVG"
| `PDF -> pp ppf "PDF"
| `TXT -> pp ppf "TXT"
let to_str_of_pp pp v =
Format.fprintf Format.str_formatter "%a" pp v;
Format.flush_str_formatter ()
let src_link =
format_of_string "#L%d"
let open_sans_xbold = match Vgr_pdf.otf_font Open_sans.extra_bold with
| Error e -> Log.msg "%a" Otfm.pp_error e; `Sans
| Ok otf -> otf
let font f = match f.Font.name, f.Font.weight with (* PDF font resolver. *)
| "Open Sans", `W800 -> open_sans_xbold
| _ -> Vgr_pdf.font f
(* Resolution *)
let ppi_300 = 11811.
let res_ppcm = [ 2834.; 3779.; 5905.; 11811.; 23622. ]
let pp_res ppf d = pp ppf "%3d ppi" (Float.int_of_round ((d *. 2.54) /. 100.))
(* Persistent ui state. *)
module S = struct
let store_version = "%%VERSION%%-005"
let () = Store.force_version store_version
type t =
{ id : string; (* selected image id. *)
tags : string list; (* selected tags. *)
renderer : [ `CNV | `SVG | `PDF | `TXT ]; (* selected renderer. *)
white_bg : bool; (* white background. *)
resolution : float; } (* render resolution. *)
let state : t Store.key = Store.key ()
let default =
{ id = "rmark-ticks";
tags = [];
renderer = `CNV;
white_bg = true;
resolution = ppi_300; }
let set s = Store.add state s; s
let get () = match Store.find state with
| None -> set default
| Some s ->
match Db.find s.id with (* check id still exists. *)
| Some i -> s
| None -> set { s with id = (List.hd (Db.all ())).Db.id }
let to_hash s = s.id
let set_hash s hash =
if hash = "" then `Fail else
if Db.mem hash then `Ok (set { s with id = hash }) else
`Fail
let ids s =
let imgs = match s.tags with [] -> Db.all () | tags -> Db.search ~tags () in
List.map (fun i -> i.Db.id) imgs
let image s = match Db.find s.id with
| Some i -> i | None -> assert false
end
(* Render *)
let renderers = [ `CNV; `SVG; `PDF; `TXT ]
let render ?limit ?warn target dst i finish =
Log.msg "Render: %s" i.Db.id;
let r = Vgr.create ?limit ?warn target dst in
let warn w = match warn with None -> () | Some warn -> warn w in
let start = Time.now () in
let rec loop steps v = match Vgr.render r v with
| `Ok ->
let rec flush steps v = match Vgr.render r v with
| `Partial -> flush (steps + 1) v
| `Ok -> finish ~exn:false (Time.now () -. start) steps
in
flush steps `End
| `Partial ->
Time.delay 0. begin fun () ->
try (loop (steps + 1) `Await) with
| e ->
warn (`Other "Sorry, a stack overflow occured.");
finish ~exn:true (Time.now () -. start) steps
end
in
try loop 1 (`Image (Db.renderable i)) with
| e ->
warn (`Other "Sorry, a stack overflow occured.");
finish ~exn:true (Time.now () -. start) 0
(* User interface *)
let pdf_pad = 2.
(* Does this have to be so ugly ? *)
let ui_activity () : 'a Ui.t * (bool -> unit) =
let active, set_active = Ui.text ~id:"r-rendering" "" in
let started = ref false in
let state = ref false in
let rec animate () =
let toggle () = Ui.classify active "r-activity" !state; in
if not !started
then (state := false; toggle ())
else (state := not !state; toggle (); Time.delay 0.4 animate)
in
let activate start =
if (!started == start) then () else
(started := start; if (!started) then animate ())
in
active, activate
let ui_render_stats () : 'a Ui.t * (float -> int -> unit) =
let g = Ui.group ~id:"r-stats" () in
let time, set_time = Ui.text ~id:"r-time" "" in
let steps, set_steps = Ui.text ~id:"r-steps" "" in
let set_stats dur steps =
let dur = str "%dms" (Float.int_of_round (dur *. 1000.)) in
let steps = if steps = 1 then "" else str " and %d steps" steps in
set_time dur; set_steps steps
in
g *> (fst (Ui.text "Rendered in ")) *> time *> steps *> (fst (Ui.text ".")),
set_stats
let ui_image_info () : 'a Ui.t * (S.t -> unit) =
let g = Ui.group ~id:"r-image-info" () in
let title, title_conf =
Ui.link ~id:"r-title" ~title:"See the image's source code" ~href:"#" ""
in
let author, author_conf =
Ui.link ~id:"r-author" ~title:"See the author's website" ~href:"#" ""
in
let note, set_note = Ui.text ~id:"r-note" "" in
let set_image_info s =
let i = S.image s in
let src_url = str src_link (fst i.Db.loc) (snd i.Db.loc) in
title_conf (`Text i.Db.title);
title_conf (`Href src_url);
author_conf (`Text (fst i.Db.author));
author_conf (`Href (snd i.Db.author));
begin match i.Db.note with
| None -> Ui.visible ~relayout:true note false
| Some n -> set_note n; Ui.visible note true
end;
in
g *> (Ui.group () *> title *> author) *> note,
set_image_info
let ui_log ppf : 'a Ui.t * ('b -> unit) * (unit -> unit) * (unit -> unit) =
let log, conf_log = Ui.select ppf None ~id:"r-log" [] in
let warns = ref [] in
let add_log w = warns := w :: !warns in
let update_log () = conf_log (`List !warns) in
let clear_log () = warns := []; conf_log (`List []) in
log, add_log, clear_log, update_log
let ui_render_targets () =
let targets = Ui.group ~id:"r-targets" () in
let activity, activate = ui_activity () in
let txt = Ui.group ~id:"r-txt" () in
let cnv, canvas = Ui.canvas ~id:"r-canvas" () in
let cnv_link, conf_cnv_link =
Ui.link ~id:"cnv-link" ~title:"Download PNG file" ~href:"#" ""
in
let svg_link, conf_svg_link =
Ui.link ~id:"svg-link" ~title:"Download SVG file" ~href:"#" ""
in
let pdf, conf_pdf = Ui.object_ ~id:"r-pdf" () in
let pdf_link, conf_pdf_link =
Ui.link ~id:"pdf-link" ~title:"Download PDF file" ~href:"#"
"No viewer: Download PDF"
in
let uis = [`CNV, cnv_link; `SVG, svg_link; `PDF, pdf; `TXT, txt ] in
let show_target i t =
let set (t', ui) = Ui.visible ~relayout:true ui (t = t') in
List.iter set uis;
let height = match t with (* adjust height to baseline *)
| `CNV | `SVG | `PDF ->
let pad = if t = `PDF then pdf_pad else 0. in
let baseline = 18 (* from the style sheet..., getting dynamically
problem at init time. If you delay, flickers. *)
in
let height = (pad +. Size2.h i.Db.size) /. 0.2646 in
let adjust = Float.int_of_round (height /. (float baseline)) in
str "%dpx" (adjust * baseline)
| _ -> "auto"
in
Ui.set_height targets height;
in
let render s ~warn ~start ~finish =
let valid s =
let current = S.get () in
S.image s == S.image current && s.S.renderer = current.S.renderer
in
let i = S.image s in
start ();
activate true;
match s.S.renderer with
| `CNV ->
let finish ~exn dur steps =
if not (valid s) then () (* user moved on *) else
if exn then (activate false; finish dur steps; show_target i `N) else
let url = Ui.canvas_data canvas in
conf_cnv_link (`Href url);
conf_cnv_link (`Download (str "%s.png" i.Db.id));
activate false;
finish dur steps;
show_target i `CNV;
in
let resolution = (V2.v s.S.resolution s.S.resolution) in
render ~warn (Vgr_htmlc.target ~resolution canvas) `Other i finish
| `SVG ->
let b = Buffer.create 2048 in
let finish ~exn dur steps =
if not (valid s) then () (* user moved on *) else
if exn then (activate false; finish dur steps; show_target i `N) else
let svg = Buffer.contents b in
let u = "data:image/svg+xml;base64," ^
(Ui.escape_binary (Buffer.contents b))
in
conf_svg_link (`Href u);
conf_svg_link (`Download (str "%s.svg" i.Db.id));
Ui.set_svg_child svg_link svg;
activate false;
finish dur steps;
show_target i `SVG
in
let create_date, creator_tool = Time.now (), app_name in
let xmp = Db.xmp ~create_date ~creator_tool i in
let t = Vgr_svg.target ~xml_decl:true ~xmp () in
render ~limit:20 ~warn t (`Buffer b) i finish;
| `TXT ->
let b = Buffer.create 2048 in
let ppf = Format.formatter_of_buffer b in
let _, _, img = Db.renderable i in
let start = Time.now () in
activate true;
Time.delay 0. begin fun () ->
if not (valid s) then () else
begin
pp ppf "%a@?" I.pp img;
let dur = Time.now () -. start in
Ui.set_txt_child txt (Buffer.contents b);
activate false;
finish dur 0;
show_target i `TXT
end
end
| `PDF ->
let b = Buffer.create 2048 in
let finish ~exn dur steps =
if not (valid s) then () (* user moved on *) else
if exn then (activate false; finish dur steps; show_target i `N) else
let u = "data:application/pdf;base64," ^
(Ui.escape_binary (Buffer.contents b))
in
let size = V2.(i.Db.size + (v pdf_pad pdf_pad)) in
let file = str "%s.pdf" i.Db.id in
conf_pdf_link (`Href u);
conf_pdf_link (`Download file);
conf_pdf (`Name file);
conf_pdf (`Data u);
conf_pdf (`Size (V2.to_tuple size));
activate false;
finish dur steps;
show_target i `PDF
in
let create_date, creator_tool = Time.now (), app_name in
let xmp = Db.xmp ~create_date ~creator_tool i in
let t = Vgr_pdf.target ~font ~xmp () in
render ~limit:20 ~warn t (`Buffer b) i finish;
in
List.iter (fun (_, ui) -> Ui.visible ~relayout:true ui false) uis;
(targets *> (cnv_link *> cnv) *> (pdf *> pdf_link) *> svg_link *> txt),
render, activity
let ui_ids s =
let db_ids, db_tags = Db.indexes () in
let ids, conf_ids =
Ui.select ~title:"Select an image to render" pp_str (Some s.S.id) db_ids
in
let tags, set_tags =
Ui.mselect ~title:"Filter images matching selected tags"
pp_str s.S.tags db_tags
in
let id_count, set_id_count = Ui.text ~id:"id-count" "" in
let tag_count, set_tag_count = Ui.text ~id:"tag-count" "" in
let set_tags s =
let ids = S.ids s in
let ts = s.S.tags in
let sel = if List.mem s.S.id ids then Some s.S.id else None in
let tag_count = (* if ts = [] then "" else *) str "(%d)" (List.length ts) in
let id_count = str "(%d)" (List.length ids) in
set_tag_count tag_count;
set_id_count id_count;
conf_ids (`List ids); conf_ids (`Select sel)
in
let ids_group =
Ui.group ~id:"r-ids" () *>
(Ui.group () *> Ui.label "Images" *> id_count) *> ids
in
let tags_group =
Ui.group ~id:"r-tags" () *>
(Ui.group () *> Ui.label "Tag filter" *> tag_count) *> tags
in
ids, ids_group, tags, tags_group, set_tags
let ui () =
let s = S.get () in
let ids, ids_group, tags, tags_group, set_tags = ui_ids s in
let rends, _ =
Ui.select ~id:"r-rends" ~title:"Select the image renderer"
pp_renderer (Some s.S.renderer) renderers
in
let white, _ = Ui.bool s.S.white_bg in
let res, _ = Ui.menu ~id:"r-res" pp_res s.S.resolution res_ppcm in
let targets, render, activity = ui_render_targets () in
let image_info, set_image_info = ui_image_info () in
let stats, set_stats = ui_render_stats () in
let log, add_log, clear_log, update_log = ui_log Vgr.pp_warning in
let set_white_bg s = Ui.classify targets "white" s.S.white_bg in
let update ~force o n =
let f = force in
let redraw = ref false in
let finish dur steps = set_stats dur steps; update_log () in
if f || o.S.id <> n.S.id then (set_image_info n; redraw := true);
if f || o.S.tags <> n.S.tags then set_tags n;
if f || o.S.renderer <> n.S.renderer then redraw := true;
if f || o.S.white_bg <> n.S.white_bg then set_white_bg n;
if f || o.S.resolution <> n.S.resolution then redraw := true;
if !redraw then render ~warn:add_log ~start:clear_log ~finish n;
Ui.set_hash (S.to_hash n);
in
let on_change ui f =
let on_ev v =
let old_s = S.get () in
let new_s = S.set (f old_s v) in
update ~force:false old_s new_s;
in
Ui.on_change ui on_ev
in
let link () =
on_change white (fun s b -> { s with S.white_bg = b });
on_change res (fun s r ->
Format.printf "RES: %g@." r;
{ s with S.resolution = r });
on_change ids begin fun s id -> match id with
| Some id -> { s with S.id = id }
| None -> s
end;
on_change tags (fun s ts -> { s with S.tags = ts });
on_change rends begin fun s r -> match r with
| Some r -> { s with S.renderer = r}
| None -> s
end;
in
let init () =
let hash_change ~force hash =
let old_s = S.get () in
let new_s = match S.set_hash old_s hash with
| `Ok new_s -> new_s
| `Fail -> Ui.set_hash (S.to_hash old_s); old_s
in
update ~force old_s new_s
in
Ui.on_hash_change (hash_change ~force:false) ;
hash_change ~force:true (Ui.hash ())
in
let layout () =
let header =
Ui.group () ~id:"r-header" *>
Ui.label "Vg Image database" *>
(fst (Ui.text ~id:"r-version" "%%VERSION%%"))
in
let ui =
Ui.group ~id:"r-ui" () *>
ids_group *>
tags_group *>
(Ui.group ~id:"r-rs" () *>
(Ui.group () *> Ui.label "Renderer" *> activity) *> rends) *>
(Ui.group ~id:"r-set" () *>
Ui.label "Settings" *>
(Ui.label
~title:"Render image against a white background"
~ctrl:true "White background" *> white) *>
(Ui.label
~title:"Canvas resolution in pixel per inches"
~ctrl:true "Resolution" *> res))
in
let image =
Ui.group ~id:"r-image" () *> targets *>
(Ui.group ~id:"r-info" () *> image_info *> stats *> log)
in
Ui.group ~id:"r-app" () *> header *> ui *> image
in
link (); init (); layout ()
let main () = Log.msg "%s loaded" app_name; Ui.show (ui ())
let () = Ui.main main
---------------------------------------------------------------------------
Copyright ( c ) 2013 The vg programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2013 The vg programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/vg/0d84bca1c9dc689f64f923dcd20a7dda7c552011/test/rhtmlc.ml | ocaml | PDF font resolver.
Resolution
Persistent ui state.
selected image id.
selected tags.
selected renderer.
white background.
render resolution.
check id still exists.
Render
User interface
Does this have to be so ugly ?
adjust height to baseline
from the style sheet..., getting dynamically
problem at init time. If you delay, flickers.
user moved on
user moved on
user moved on
if ts = [] then "" else | ---------------------------------------------------------------------------
Copyright ( c ) 2013 The vg programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2013 The vg programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
Renders the Vg image database to the HTML canvas element
open Gg
open Vg
open Mui
include Db_contents
let app_name = "rhtmlc"
let str = Format.sprintf
let pp = Format.fprintf
let pp_str = Format.pp_print_string
let pp_renderer ppf = function
| `CNV -> pp ppf "CNV"
| `SVG -> pp ppf "SVG"
| `PDF -> pp ppf "PDF"
| `TXT -> pp ppf "TXT"
let to_str_of_pp pp v =
Format.fprintf Format.str_formatter "%a" pp v;
Format.flush_str_formatter ()
let src_link =
format_of_string "#L%d"
let open_sans_xbold = match Vgr_pdf.otf_font Open_sans.extra_bold with
| Error e -> Log.msg "%a" Otfm.pp_error e; `Sans
| Ok otf -> otf
| "Open Sans", `W800 -> open_sans_xbold
| _ -> Vgr_pdf.font f
let ppi_300 = 11811.
let res_ppcm = [ 2834.; 3779.; 5905.; 11811.; 23622. ]
let pp_res ppf d = pp ppf "%3d ppi" (Float.int_of_round ((d *. 2.54) /. 100.))
module S = struct
let store_version = "%%VERSION%%-005"
let () = Store.force_version store_version
type t =
let state : t Store.key = Store.key ()
let default =
{ id = "rmark-ticks";
tags = [];
renderer = `CNV;
white_bg = true;
resolution = ppi_300; }
let set s = Store.add state s; s
let get () = match Store.find state with
| None -> set default
| Some s ->
| Some i -> s
| None -> set { s with id = (List.hd (Db.all ())).Db.id }
let to_hash s = s.id
let set_hash s hash =
if hash = "" then `Fail else
if Db.mem hash then `Ok (set { s with id = hash }) else
`Fail
let ids s =
let imgs = match s.tags with [] -> Db.all () | tags -> Db.search ~tags () in
List.map (fun i -> i.Db.id) imgs
let image s = match Db.find s.id with
| Some i -> i | None -> assert false
end
let renderers = [ `CNV; `SVG; `PDF; `TXT ]
let render ?limit ?warn target dst i finish =
Log.msg "Render: %s" i.Db.id;
let r = Vgr.create ?limit ?warn target dst in
let warn w = match warn with None -> () | Some warn -> warn w in
let start = Time.now () in
let rec loop steps v = match Vgr.render r v with
| `Ok ->
let rec flush steps v = match Vgr.render r v with
| `Partial -> flush (steps + 1) v
| `Ok -> finish ~exn:false (Time.now () -. start) steps
in
flush steps `End
| `Partial ->
Time.delay 0. begin fun () ->
try (loop (steps + 1) `Await) with
| e ->
warn (`Other "Sorry, a stack overflow occured.");
finish ~exn:true (Time.now () -. start) steps
end
in
try loop 1 (`Image (Db.renderable i)) with
| e ->
warn (`Other "Sorry, a stack overflow occured.");
finish ~exn:true (Time.now () -. start) 0
let pdf_pad = 2.
let ui_activity () : 'a Ui.t * (bool -> unit) =
let active, set_active = Ui.text ~id:"r-rendering" "" in
let started = ref false in
let state = ref false in
let rec animate () =
let toggle () = Ui.classify active "r-activity" !state; in
if not !started
then (state := false; toggle ())
else (state := not !state; toggle (); Time.delay 0.4 animate)
in
let activate start =
if (!started == start) then () else
(started := start; if (!started) then animate ())
in
active, activate
let ui_render_stats () : 'a Ui.t * (float -> int -> unit) =
let g = Ui.group ~id:"r-stats" () in
let time, set_time = Ui.text ~id:"r-time" "" in
let steps, set_steps = Ui.text ~id:"r-steps" "" in
let set_stats dur steps =
let dur = str "%dms" (Float.int_of_round (dur *. 1000.)) in
let steps = if steps = 1 then "" else str " and %d steps" steps in
set_time dur; set_steps steps
in
g *> (fst (Ui.text "Rendered in ")) *> time *> steps *> (fst (Ui.text ".")),
set_stats
let ui_image_info () : 'a Ui.t * (S.t -> unit) =
let g = Ui.group ~id:"r-image-info" () in
let title, title_conf =
Ui.link ~id:"r-title" ~title:"See the image's source code" ~href:"#" ""
in
let author, author_conf =
Ui.link ~id:"r-author" ~title:"See the author's website" ~href:"#" ""
in
let note, set_note = Ui.text ~id:"r-note" "" in
let set_image_info s =
let i = S.image s in
let src_url = str src_link (fst i.Db.loc) (snd i.Db.loc) in
title_conf (`Text i.Db.title);
title_conf (`Href src_url);
author_conf (`Text (fst i.Db.author));
author_conf (`Href (snd i.Db.author));
begin match i.Db.note with
| None -> Ui.visible ~relayout:true note false
| Some n -> set_note n; Ui.visible note true
end;
in
g *> (Ui.group () *> title *> author) *> note,
set_image_info
let ui_log ppf : 'a Ui.t * ('b -> unit) * (unit -> unit) * (unit -> unit) =
let log, conf_log = Ui.select ppf None ~id:"r-log" [] in
let warns = ref [] in
let add_log w = warns := w :: !warns in
let update_log () = conf_log (`List !warns) in
let clear_log () = warns := []; conf_log (`List []) in
log, add_log, clear_log, update_log
let ui_render_targets () =
let targets = Ui.group ~id:"r-targets" () in
let activity, activate = ui_activity () in
let txt = Ui.group ~id:"r-txt" () in
let cnv, canvas = Ui.canvas ~id:"r-canvas" () in
let cnv_link, conf_cnv_link =
Ui.link ~id:"cnv-link" ~title:"Download PNG file" ~href:"#" ""
in
let svg_link, conf_svg_link =
Ui.link ~id:"svg-link" ~title:"Download SVG file" ~href:"#" ""
in
let pdf, conf_pdf = Ui.object_ ~id:"r-pdf" () in
let pdf_link, conf_pdf_link =
Ui.link ~id:"pdf-link" ~title:"Download PDF file" ~href:"#"
"No viewer: Download PDF"
in
let uis = [`CNV, cnv_link; `SVG, svg_link; `PDF, pdf; `TXT, txt ] in
let show_target i t =
let set (t', ui) = Ui.visible ~relayout:true ui (t = t') in
List.iter set uis;
| `CNV | `SVG | `PDF ->
let pad = if t = `PDF then pdf_pad else 0. in
in
let height = (pad +. Size2.h i.Db.size) /. 0.2646 in
let adjust = Float.int_of_round (height /. (float baseline)) in
str "%dpx" (adjust * baseline)
| _ -> "auto"
in
Ui.set_height targets height;
in
let render s ~warn ~start ~finish =
let valid s =
let current = S.get () in
S.image s == S.image current && s.S.renderer = current.S.renderer
in
let i = S.image s in
start ();
activate true;
match s.S.renderer with
| `CNV ->
let finish ~exn dur steps =
if exn then (activate false; finish dur steps; show_target i `N) else
let url = Ui.canvas_data canvas in
conf_cnv_link (`Href url);
conf_cnv_link (`Download (str "%s.png" i.Db.id));
activate false;
finish dur steps;
show_target i `CNV;
in
let resolution = (V2.v s.S.resolution s.S.resolution) in
render ~warn (Vgr_htmlc.target ~resolution canvas) `Other i finish
| `SVG ->
let b = Buffer.create 2048 in
let finish ~exn dur steps =
if exn then (activate false; finish dur steps; show_target i `N) else
let svg = Buffer.contents b in
let u = "data:image/svg+xml;base64," ^
(Ui.escape_binary (Buffer.contents b))
in
conf_svg_link (`Href u);
conf_svg_link (`Download (str "%s.svg" i.Db.id));
Ui.set_svg_child svg_link svg;
activate false;
finish dur steps;
show_target i `SVG
in
let create_date, creator_tool = Time.now (), app_name in
let xmp = Db.xmp ~create_date ~creator_tool i in
let t = Vgr_svg.target ~xml_decl:true ~xmp () in
render ~limit:20 ~warn t (`Buffer b) i finish;
| `TXT ->
let b = Buffer.create 2048 in
let ppf = Format.formatter_of_buffer b in
let _, _, img = Db.renderable i in
let start = Time.now () in
activate true;
Time.delay 0. begin fun () ->
if not (valid s) then () else
begin
pp ppf "%a@?" I.pp img;
let dur = Time.now () -. start in
Ui.set_txt_child txt (Buffer.contents b);
activate false;
finish dur 0;
show_target i `TXT
end
end
| `PDF ->
let b = Buffer.create 2048 in
let finish ~exn dur steps =
if exn then (activate false; finish dur steps; show_target i `N) else
let u = "data:application/pdf;base64," ^
(Ui.escape_binary (Buffer.contents b))
in
let size = V2.(i.Db.size + (v pdf_pad pdf_pad)) in
let file = str "%s.pdf" i.Db.id in
conf_pdf_link (`Href u);
conf_pdf_link (`Download file);
conf_pdf (`Name file);
conf_pdf (`Data u);
conf_pdf (`Size (V2.to_tuple size));
activate false;
finish dur steps;
show_target i `PDF
in
let create_date, creator_tool = Time.now (), app_name in
let xmp = Db.xmp ~create_date ~creator_tool i in
let t = Vgr_pdf.target ~font ~xmp () in
render ~limit:20 ~warn t (`Buffer b) i finish;
in
List.iter (fun (_, ui) -> Ui.visible ~relayout:true ui false) uis;
(targets *> (cnv_link *> cnv) *> (pdf *> pdf_link) *> svg_link *> txt),
render, activity
let ui_ids s =
let db_ids, db_tags = Db.indexes () in
let ids, conf_ids =
Ui.select ~title:"Select an image to render" pp_str (Some s.S.id) db_ids
in
let tags, set_tags =
Ui.mselect ~title:"Filter images matching selected tags"
pp_str s.S.tags db_tags
in
let id_count, set_id_count = Ui.text ~id:"id-count" "" in
let tag_count, set_tag_count = Ui.text ~id:"tag-count" "" in
let set_tags s =
let ids = S.ids s in
let ts = s.S.tags in
let sel = if List.mem s.S.id ids then Some s.S.id else None in
let id_count = str "(%d)" (List.length ids) in
set_tag_count tag_count;
set_id_count id_count;
conf_ids (`List ids); conf_ids (`Select sel)
in
let ids_group =
Ui.group ~id:"r-ids" () *>
(Ui.group () *> Ui.label "Images" *> id_count) *> ids
in
let tags_group =
Ui.group ~id:"r-tags" () *>
(Ui.group () *> Ui.label "Tag filter" *> tag_count) *> tags
in
ids, ids_group, tags, tags_group, set_tags
let ui () =
let s = S.get () in
let ids, ids_group, tags, tags_group, set_tags = ui_ids s in
let rends, _ =
Ui.select ~id:"r-rends" ~title:"Select the image renderer"
pp_renderer (Some s.S.renderer) renderers
in
let white, _ = Ui.bool s.S.white_bg in
let res, _ = Ui.menu ~id:"r-res" pp_res s.S.resolution res_ppcm in
let targets, render, activity = ui_render_targets () in
let image_info, set_image_info = ui_image_info () in
let stats, set_stats = ui_render_stats () in
let log, add_log, clear_log, update_log = ui_log Vgr.pp_warning in
let set_white_bg s = Ui.classify targets "white" s.S.white_bg in
let update ~force o n =
let f = force in
let redraw = ref false in
let finish dur steps = set_stats dur steps; update_log () in
if f || o.S.id <> n.S.id then (set_image_info n; redraw := true);
if f || o.S.tags <> n.S.tags then set_tags n;
if f || o.S.renderer <> n.S.renderer then redraw := true;
if f || o.S.white_bg <> n.S.white_bg then set_white_bg n;
if f || o.S.resolution <> n.S.resolution then redraw := true;
if !redraw then render ~warn:add_log ~start:clear_log ~finish n;
Ui.set_hash (S.to_hash n);
in
let on_change ui f =
let on_ev v =
let old_s = S.get () in
let new_s = S.set (f old_s v) in
update ~force:false old_s new_s;
in
Ui.on_change ui on_ev
in
let link () =
on_change white (fun s b -> { s with S.white_bg = b });
on_change res (fun s r ->
Format.printf "RES: %g@." r;
{ s with S.resolution = r });
on_change ids begin fun s id -> match id with
| Some id -> { s with S.id = id }
| None -> s
end;
on_change tags (fun s ts -> { s with S.tags = ts });
on_change rends begin fun s r -> match r with
| Some r -> { s with S.renderer = r}
| None -> s
end;
in
let init () =
let hash_change ~force hash =
let old_s = S.get () in
let new_s = match S.set_hash old_s hash with
| `Ok new_s -> new_s
| `Fail -> Ui.set_hash (S.to_hash old_s); old_s
in
update ~force old_s new_s
in
Ui.on_hash_change (hash_change ~force:false) ;
hash_change ~force:true (Ui.hash ())
in
let layout () =
let header =
Ui.group () ~id:"r-header" *>
Ui.label "Vg Image database" *>
(fst (Ui.text ~id:"r-version" "%%VERSION%%"))
in
let ui =
Ui.group ~id:"r-ui" () *>
ids_group *>
tags_group *>
(Ui.group ~id:"r-rs" () *>
(Ui.group () *> Ui.label "Renderer" *> activity) *> rends) *>
(Ui.group ~id:"r-set" () *>
Ui.label "Settings" *>
(Ui.label
~title:"Render image against a white background"
~ctrl:true "White background" *> white) *>
(Ui.label
~title:"Canvas resolution in pixel per inches"
~ctrl:true "Resolution" *> res))
in
let image =
Ui.group ~id:"r-image" () *> targets *>
(Ui.group ~id:"r-info" () *> image_info *> stats *> log)
in
Ui.group ~id:"r-app" () *> header *> ui *> image
in
link (); init (); layout ()
let main () = Log.msg "%s loaded" app_name; Ui.show (ui ())
let () = Ui.main main
---------------------------------------------------------------------------
Copyright ( c ) 2013 The vg programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2013 The vg programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.