_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
efa5c3f8c8da8e61d6d0c0aa71ea7a21593cb110b513d18343780f6d4618400f | cedlemo/OCaml-GI-ctypes-bindings-generator | Font_chooser_widget_private.ml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Font_chooser_widget_private"
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Font_chooser_widget_private.ml | ocaml | open Ctypes
open Foreign
type t
let t_typ : t structure typ = structure "Font_chooser_widget_private"
| |
dc91507a98f335d3f66d7cd861d2a095af8ea8ef316ddb1c9f220b4f1934ba55 | dparis/gen-phzr | animation_manager.cljs | (ns phzr.animation-manager
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser])
(:refer-clojure :exclude [next]))
(defn ->AnimationManager
"The Animation Manager is used to add, play and update Phaser Animations.
Any Game Object such as Phaser.Sprite that supports animation contains a single AnimationManager instance.
Parameters:
* sprite (Phaser.Sprite) - A reference to the Game Object that owns this AnimationManager."
([sprite]
(js/Phaser.AnimationManager. (clj->phaser sprite))))
(defn add
"Adds a new animation under the given key. Optionally set the frames, frame rate and loop.
Animations added in this way are played back with the play function.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) - The unique (within this Sprite) name for the animation, i.e. 'run', 'fire', 'walk'.
* frames (Array) {optional} - An array of numbers/strings that correspond to the frames to add to this animation and in which order. e.g. [1, 2, 3] or ['run0', 'run1', run2]). If null then all frames will be used.
* frame-rate (number) {optional} - The speed at which the animation should play. The speed is given in frames per second.
* loop (boolean) {optional} - Whether or not the animation is looped or just plays once.
* use-numeric-index (boolean) {optional} - Are the given frames using numeric indexes (default) or strings?
Returns: Phaser.Animation - The Animation object that was created."
([animation-manager name]
(phaser->clj
(.add animation-manager
(clj->phaser name))))
([animation-manager name frames]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames))))
([animation-manager name frames frame-rate]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames)
(clj->phaser frame-rate))))
([animation-manager name frames frame-rate loop]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames)
(clj->phaser frame-rate)
(clj->phaser loop))))
([animation-manager name frames frame-rate loop use-numeric-index]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames)
(clj->phaser frame-rate)
(clj->phaser loop)
(clj->phaser use-numeric-index)))))
(defn destroy
"Destroys all references this AnimationManager contains.
Iterates through the list of animations stored in this manager and calls destroy on each of them."
([animation-manager]
(phaser->clj
(.destroy animation-manager))))
(defn get-animation
"Returns an animation that was previously added by name.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) - The name of the animation to be returned, e.g. 'fire'.
Returns: Phaser.Animation - The Animation instance, if found, otherwise null."
([animation-manager name]
(phaser->clj
(.getAnimation animation-manager
(clj->phaser name)))))
(defn next
"Advances by the given number of frames in the current animation, taking the loop value into consideration.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* quantity (number) {optional} - The number of frames to advance."
([animation-manager]
(phaser->clj
(.next animation-manager)))
([animation-manager quantity]
(phaser->clj
(.next animation-manager
(clj->phaser quantity)))))
(defn play
"Play an animation based on the given key. The animation should previously have been added via `animations.add`
If the requested animation is already playing this request will be ignored.
If you need to reset an already running animation do so directly on the Animation object itself.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) - The name of the animation to be played, e.g. 'fire', 'walk', 'jump'.
* frame-rate (number) {optional} - The framerate to play the animation at. The speed is given in frames per second. If not provided the previously set frameRate of the Animation is used.
* loop (boolean) {optional} - Should the animation be looped after playback. If not provided the previously set loop value of the Animation is used.
* kill-on-complete (boolean) {optional} - If set to true when the animation completes (only happens if loop=false) the parent Sprite will be killed.
Returns: Phaser.Animation - A reference to playing Animation instance."
([animation-manager name]
(phaser->clj
(.play animation-manager
(clj->phaser name))))
([animation-manager name frame-rate]
(phaser->clj
(.play animation-manager
(clj->phaser name)
(clj->phaser frame-rate))))
([animation-manager name frame-rate loop]
(phaser->clj
(.play animation-manager
(clj->phaser name)
(clj->phaser frame-rate)
(clj->phaser loop))))
([animation-manager name frame-rate loop kill-on-complete]
(phaser->clj
(.play animation-manager
(clj->phaser name)
(clj->phaser frame-rate)
(clj->phaser loop)
(clj->phaser kill-on-complete)))))
(defn previous
"Moves backwards the given number of frames in the current animation, taking the loop value into consideration.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* quantity (number) {optional} - The number of frames to move back."
([animation-manager]
(phaser->clj
(.previous animation-manager)))
([animation-manager quantity]
(phaser->clj
(.previous animation-manager
(clj->phaser quantity)))))
(defn refresh-frame
"Refreshes the current frame data back to the parent Sprite and also resets the texture data."
([animation-manager]
(phaser->clj
(.refreshFrame animation-manager))))
(defn stop
"Stop playback of an animation. If a name is given that specific animation is stopped, otherwise the current animation is stopped.
The currentAnim property of the AnimationManager is automatically set to the animation given.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) {optional} - The name of the animation to be stopped, e.g. 'fire'. If none is given the currently running animation is stopped.
* reset-frame (boolean) {optional} - When the animation is stopped should the currentFrame be set to the first frame of the animation (true) or paused on the last frame displayed (false)"
([animation-manager]
(phaser->clj
(.stop animation-manager)))
([animation-manager name]
(phaser->clj
(.stop animation-manager
(clj->phaser name))))
([animation-manager name reset-frame]
(phaser->clj
(.stop animation-manager
(clj->phaser name)
(clj->phaser reset-frame)))))
(defn validate-frames
"Check whether the frames in the given array are valid and exist.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* frames (Array) - An array of frames to be validated.
* use-numeric-index (boolean) {optional} - Validate the frames based on their numeric index (true) or string index (false)
Returns: boolean - True if all given Frames are valid, otherwise false."
([animation-manager frames]
(phaser->clj
(.validateFrames animation-manager
(clj->phaser frames))))
([animation-manager frames use-numeric-index]
(phaser->clj
(.validateFrames animation-manager
(clj->phaser frames)
(clj->phaser use-numeric-index))))) | null | https://raw.githubusercontent.com/dparis/gen-phzr/e4c7b272e225ac343718dc15fc84f5f0dce68023/out/animation_manager.cljs | clojure | (ns phzr.animation-manager
(:require [phzr.impl.utils.core :refer [clj->phaser phaser->clj]]
[phzr.impl.extend :as ex]
[cljsjs.phaser])
(:refer-clojure :exclude [next]))
(defn ->AnimationManager
"The Animation Manager is used to add, play and update Phaser Animations.
Any Game Object such as Phaser.Sprite that supports animation contains a single AnimationManager instance.
Parameters:
* sprite (Phaser.Sprite) - A reference to the Game Object that owns this AnimationManager."
([sprite]
(js/Phaser.AnimationManager. (clj->phaser sprite))))
(defn add
"Adds a new animation under the given key. Optionally set the frames, frame rate and loop.
Animations added in this way are played back with the play function.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) - The unique (within this Sprite) name for the animation, i.e. 'run', 'fire', 'walk'.
* frames (Array) {optional} - An array of numbers/strings that correspond to the frames to add to this animation and in which order. e.g. [1, 2, 3] or ['run0', 'run1', run2]). If null then all frames will be used.
* frame-rate (number) {optional} - The speed at which the animation should play. The speed is given in frames per second.
* loop (boolean) {optional} - Whether or not the animation is looped or just plays once.
* use-numeric-index (boolean) {optional} - Are the given frames using numeric indexes (default) or strings?
Returns: Phaser.Animation - The Animation object that was created."
([animation-manager name]
(phaser->clj
(.add animation-manager
(clj->phaser name))))
([animation-manager name frames]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames))))
([animation-manager name frames frame-rate]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames)
(clj->phaser frame-rate))))
([animation-manager name frames frame-rate loop]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames)
(clj->phaser frame-rate)
(clj->phaser loop))))
([animation-manager name frames frame-rate loop use-numeric-index]
(phaser->clj
(.add animation-manager
(clj->phaser name)
(clj->phaser frames)
(clj->phaser frame-rate)
(clj->phaser loop)
(clj->phaser use-numeric-index)))))
(defn destroy
"Destroys all references this AnimationManager contains.
Iterates through the list of animations stored in this manager and calls destroy on each of them."
([animation-manager]
(phaser->clj
(.destroy animation-manager))))
(defn get-animation
"Returns an animation that was previously added by name.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) - The name of the animation to be returned, e.g. 'fire'.
Returns: Phaser.Animation - The Animation instance, if found, otherwise null."
([animation-manager name]
(phaser->clj
(.getAnimation animation-manager
(clj->phaser name)))))
(defn next
"Advances by the given number of frames in the current animation, taking the loop value into consideration.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* quantity (number) {optional} - The number of frames to advance."
([animation-manager]
(phaser->clj
(.next animation-manager)))
([animation-manager quantity]
(phaser->clj
(.next animation-manager
(clj->phaser quantity)))))
(defn play
"Play an animation based on the given key. The animation should previously have been added via `animations.add`
If the requested animation is already playing this request will be ignored.
If you need to reset an already running animation do so directly on the Animation object itself.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) - The name of the animation to be played, e.g. 'fire', 'walk', 'jump'.
* frame-rate (number) {optional} - The framerate to play the animation at. The speed is given in frames per second. If not provided the previously set frameRate of the Animation is used.
* loop (boolean) {optional} - Should the animation be looped after playback. If not provided the previously set loop value of the Animation is used.
* kill-on-complete (boolean) {optional} - If set to true when the animation completes (only happens if loop=false) the parent Sprite will be killed.
Returns: Phaser.Animation - A reference to playing Animation instance."
([animation-manager name]
(phaser->clj
(.play animation-manager
(clj->phaser name))))
([animation-manager name frame-rate]
(phaser->clj
(.play animation-manager
(clj->phaser name)
(clj->phaser frame-rate))))
([animation-manager name frame-rate loop]
(phaser->clj
(.play animation-manager
(clj->phaser name)
(clj->phaser frame-rate)
(clj->phaser loop))))
([animation-manager name frame-rate loop kill-on-complete]
(phaser->clj
(.play animation-manager
(clj->phaser name)
(clj->phaser frame-rate)
(clj->phaser loop)
(clj->phaser kill-on-complete)))))
(defn previous
"Moves backwards the given number of frames in the current animation, taking the loop value into consideration.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* quantity (number) {optional} - The number of frames to move back."
([animation-manager]
(phaser->clj
(.previous animation-manager)))
([animation-manager quantity]
(phaser->clj
(.previous animation-manager
(clj->phaser quantity)))))
(defn refresh-frame
"Refreshes the current frame data back to the parent Sprite and also resets the texture data."
([animation-manager]
(phaser->clj
(.refreshFrame animation-manager))))
(defn stop
"Stop playback of an animation. If a name is given that specific animation is stopped, otherwise the current animation is stopped.
The currentAnim property of the AnimationManager is automatically set to the animation given.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* name (string) {optional} - The name of the animation to be stopped, e.g. 'fire'. If none is given the currently running animation is stopped.
* reset-frame (boolean) {optional} - When the animation is stopped should the currentFrame be set to the first frame of the animation (true) or paused on the last frame displayed (false)"
([animation-manager]
(phaser->clj
(.stop animation-manager)))
([animation-manager name]
(phaser->clj
(.stop animation-manager
(clj->phaser name))))
([animation-manager name reset-frame]
(phaser->clj
(.stop animation-manager
(clj->phaser name)
(clj->phaser reset-frame)))))
(defn validate-frames
"Check whether the frames in the given array are valid and exist.
Parameters:
* animation-manager (Phaser.AnimationManager) - Targeted instance for method
* frames (Array) - An array of frames to be validated.
* use-numeric-index (boolean) {optional} - Validate the frames based on their numeric index (true) or string index (false)
Returns: boolean - True if all given Frames are valid, otherwise false."
([animation-manager frames]
(phaser->clj
(.validateFrames animation-manager
(clj->phaser frames))))
([animation-manager frames use-numeric-index]
(phaser->clj
(.validateFrames animation-manager
(clj->phaser frames)
(clj->phaser use-numeric-index))))) | |
965888b4063e43803673307a1436ce579898d06c08216308383e1f860ccc177a | samrushing/irken-compiler | t_strlen.scm | ;; -*- Mode: Irken -*-
(include "lib/basis.scm")
(include "lib/map.scm")
(libc/strlen (cstring "howdythere\x00"))
| null | https://raw.githubusercontent.com/samrushing/irken-compiler/690da48852d55497f873738df54f14e8e135d006/tests/t_strlen.scm | scheme | -*- Mode: Irken -*- |
(include "lib/basis.scm")
(include "lib/map.scm")
(libc/strlen (cstring "howdythere\x00"))
|
50da8285986693db82074ca99c8ca9f394dc93edccaea70a0bcb1bb516098ace | WFP-VAM/RAMResourcesScripts | CARI-indicator-calculation.sps | Methods to calculating the Consolidated Approach for Reporting Indicators of Food Security
********CALCULATE CARI using FCS, rCSI, LCS and FES ********
***Food Consumption Score***
***define labels
Variable labels
FCSStap ‘How many days over the last 7 days, did members of your household eat cereals, rains, roots and tubers?’
FCSPulse ‘How many days over the last 7 days, did members of your household eat legumes/nuts?’
FCSDairy ‘How many days over the last 7 days, did members of your household drink/eat milk and other dairy products?’
FCSPr ‘How many days over the last 7 days, did members of your household eat meat, fish and eggs?’
FCSVeg ‘How many days over the last 7 days, did members of your household eat vegetables and leaves?’
FCSFruit ‘How many days over the last 7 days, did members of your household eat fruits?’
FCSFat ‘How many days over the last 7 days, did members of your household consume oil?’
FCSSugar ‘How many days over the last 7 days, did members of your household eat sugar, or sweets?’
FCSCond ‘How many days over the last 7 days, did members of your household eat condiments / spices?’.
Compute FCS = sum(FCSStap*2, FCSPulse*3, FCSDairy*4, FCSPr*4, FCSVeg*1, FCSFruit*1, FCSFat*0.5, FCSSugar*0.5).
Variable labels FCS "Food Consumption Score".
EXECUTE.
***Use this when analyzing a country with low consumption of sugar and oil - thresholds 21-35
Recode FCS (lowest thru 21 =1) (21.5 thru 35 =2) (35.5 thru highest =3) into FCSCat21.¬
Variable labels FCSCat21 ‘FCS Categories’.
EXECUTE.
*** define value labels and properties for "FCS Categories".
Value labels FCSCat21 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
*** Important note: pay attention to the threshold used by your CO when selecting the syntax (21 cat. vs 28 cat.)
*** Use this when analyzing a country with high consumption of sugar and oil – thresholds 28-42
***Recode FCS (lowest thru 28 =1) (28.5 thru 42 =2) (42.5 thru highest =3) into FCSCat28.
Variable labels FCSCat28 ‘FCS Categories’.
EXECUTE.
Value labels FCSCat28 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
Recode FCSCat21 (1=4) (2=3) (3=1) INTO FCS_4pt.
Variable labels FCS_4pt '4pt FCG'.
EXECUTE.
Frequencies VARIABLES=FCS_4pt /ORDER=ANALYSIS.
Value labels FCS_4pt 1.00 'Acceptable' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
***Reduced Coping Strategy Index***
***define labels
Variable labels
rCSILessQlty ‘Rely on less preferred and less expensive food in the past 7 days’
rCSIBorrow ‘Borrow food or rely on help from a relative or friend in the past 7 days’
rCSIMealNb ‘Reduce number of meals eaten in a day in the past 7 days’
rCSIMealSize ‘Limit portion size of meals at meal times in the past 7 days’
rCSIMealAdult ‘Restrict consumption by adults in order for small children to eat in the past 7 days’.
Compute rCSI = sum(rCSILessQlty*1,rCSIBorrow*2,rCSIMealNb*1,rCSIMealSize*1,rCSIMealAdult*3).
variable labels rCSI 'Reduced coping strategies index (rCSI)'.
EXECUTE.
FREQUENCIES VARIABLES=rCSI
/FORMAT=NOTABLE
/STATISTICS=MEAN
/ORDER=ANALYSIS.
***Combining rCSI with FCS_4pt for CARI calculation (current consumption)
Do if (rCSI >= 4).
Recode FCS_4pt (1=2).
End if.
EXECUTE.
Value labels FCS_4pt 1.00 'Acceptable' 2.00 ' Acceptable and rCSI>4' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
Frequencies FCS_4pt.
***Livelihood Coping ***
***define labels
Value labels
Lcs_stress_DomAsset
Lcs_stress_CrdtFood
Lcs_stress_saving
Lcs_stress_BorrowCash
Lcs_crisis_ProdAsset
Lcs_crisis_HealthEdu
Lcs_crisis_OutSchool
Lcs_em_ResAsset
Lcs_em_Begged
Lcs_em_IllegalAct
10 ‘No, because I did not need to’
20 ‘No because I already sold those assets or have engaged in this activity within the last 12 months and cannot continue to do it’
30 ‘Yes’
9999 ‘Not applicable (don’t have children/ these assets)’.
***stress strategies*** (must have 4 stress strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables labels
Variable labels
Lcs_stress_DomAsset ‘Sold household assets/goods (radio, furniture, refrigerator, television, jewellery, etc.) due to lack of food’
Lcs_stress_CrdtFood ‘Purchased food/non-food on credit (incur debts) due to lack of food’
Lcs_stress_saving ‘Spent savings due to lack of food’
Lcs_stress_BorrowCash ‘Borrow money due to lack of food’.
do if (Lcs_stress_DomAsset = 20) | (Lcs_stress_DomAsset = 30) | (Lcs_stress_CrdtFood = 20) | (Lcs_stress_CrdtFood = 30) | (Lcs_stress_saving =20) | (Lcs_stress_saving =30) | (Lcs_stress_BorrowCash =20) | (Lcs_stress_BorrowCash =30).
compute stress_coping =1.
else.
compute stress_coping =0.
end if.
EXECUTE.
***crisis strategies***(must have 3 crisis strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_crisis_ProdAsset ‘Sold productive assets or means of transport (sewing machine, wheelbarrow, bicycle, car, etc.) due to lack of food’
Lcs_crisis_HealthEdu ‘Reduced expenses on health (including drugs) or education due to lack of food’
Lcs_crisis_OutSchool ‘Withdrew children from school due to lack of food’.
Do if (Lcs_crisis_ProdAsset = 20) | (Lcs_crisis_ProdAsset =30) | (Lcs_crisis_HealthEdu =20) | (Lcs_crisis_HealthEdu=30) | (Lcs_crisis_OutSchool =20) | (Lcs_crisis_OutSchool =30).
Compute crisis_coping =1.
Else.
Compute crisis_coping =0.
End if.
EXECUTE.
***emergency strategies ***(must have 3 emergency strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_em_ResAsset ‘Mortgaged/Sold house or land due to lack of food’
Lcs_em_Begged ‘Begged and/or scavenged (asked strangers for money/food) due to lack of food’
Lcs_em_IllegalAct ‘Engaged in illegal income activities (theft, prostitution) due to lack of food’.
do if (Lcs_em_ResAsset = 20) | (Lcs_em_ResAsset = 30) | (Lcs_em_Begged = 20) | (Lcs_em_Begged =30) | (Lcs_em_IllegalAct = 20) | (Lcs_em_IllegalAct = 30).
Compute emergency_coping =1.
Else.
Compute emergency_coping = 0.
End if.
EXECUTE.
*** label new variable
Variable labels stress_coping 'Did the HH engage in stress coping strategies?'.
Variable labels crisis_coping 'Did the HH engage in crisis coping strategies?'.
Variable labels emergency_coping 'Did the HH engage in emergency coping strategies?'.
*** recode variables to compute one variable with coping behavior
Recode stress_coping (0=0) (1=2).
Recode crisis_coping (0=0) (1=3).
Recode emergency_coping (0=0) (1=4).
Compute Max_coping_behaviour=MAX(stress_coping, crisis_coping, emergency_coping).
Recode Max_coping_behaviour (0=1).
Value labels Max_coping_behaviour 1 'HH not adopting coping strategies' 2 'Stress coping strategies ' 3 'Crisis coping strategies ' 4 'Emergencies coping strategies'.
Variable Labels Max_coping_behaviour 'Summary of asset depletion'.
EXECUTE.
***Food Expenditure Share ***
**Important note: assistance is considered in both assessments as well as monitoring
***food expenditure***
***define labels
*Important note: add recall periods of _1M or _7D to the variable names below depending on what has been selected for your CO. It is recommended to follow standard recall periods as in the module.
/*Expenditure on cereals (maize, rice, sorghum, wheat, flour of cereals, bread, pasta...)
/*Expenditure on tubers (potatoes, sweet potatoes, cassava, plantains, yams)
/*Expenditure on fruit (fresh and frozen fruit)
/*Expenditure on vegetables (dark green leafy vegetables, orange vegetable, other vegetable)
/*Expenditure on meat (fresh, chilled, frozen meat and poultry, dry and slated meat)
/*Expenditure on eggs
/*Expenditure on fish (fresh and frozen fish and other seafood)
/*Expenditure on oil, fat, and butter
/*Expenditure on milk, cheese, and yogurt
/*Expenditure on sugar, confectionery, and desserts
/*Expenditure on condiments (salt, spices, cubes, fish powder)
/*Expenditure on non-alcoholic beverages (coffee, tea, herbal infusion; bottled water; soft drinks; juice)
/*Expenditure on other meals/snacks consumed outside the home
/*Expenditure on pulses (beans, peas, lentils, nuts in shell or shelled)
Variable labels
HHExpFCer_MN_1M ‘Cash expenditure value on cereals’
HHExpFCer_CRD_1M ‘Credit expenditure value on cereals’
HHExpFCer_GiftAid_1M ‘Assistance expenditure value on cereals’
HHExpFCer_Own_1M ‘Own production expenditure value on cereals’
HHExpFTub_MN_1M ‘Cash expenditure value on tubers’
HHExpFTub_CRD_1M ‘Credit expenditure value on tubers’
HHExpFTub_GiftAid_1M ‘Assistance expenditure value on tubers’
HHExpFTub_Own_1M ‘Own production expenditure value on tubers’
HHExpFPuls_MN_1M ‘Cash expenditure on pulses & nuts’
HHExpFPuls_CRD_1M ‘Credit expenditure on pulses & nuts’
HHExpFPuls_GiftAid_1M ‘Assistance expenditure value on pulses & nuts’
HHExpFPuls_Own_1M ‘Own production expenditure value on pulses & nuts’
HHExpFVeg_MN_1M ‘Cash expenditure on vegetables’
HHExpFVeg_CRD_1M ‘Credit expenditure on vegetables’
HHExpFVeg_GiftAid_1M ‘Assistance expenditure value on vegetables’
HHExpFVeg_Own_1M ‘Own production expenditure value on vegetables’
HHExpFFrt_MN_1M ‘Cash expenditure on fruits’
HHExpFFrt_CRD_1M ‘Credit expenditure on fruits’
HHExpFFrt_GiftAid_1M ‘Assistance expenditure value on fruits’
HHExpFFrt_Own_1M ‘Own production expenditure value on fruits’
HHExpFAnimMeat_MN_1M ‘Cash expenditure on meat’
HHExpFAnimMeat_CRD_1M ‘Credit expenditure on meat’
HHExpFAnimMeat_GiftAid_1M ‘Assistance expenditure value on meat’
HHExpFAnimMeat_Own_1M ‘Own production expenditure value on meat’
HHExpFAnimFish_MN_1M ‘Cash expenditure on fish’
HHExpFAnimFish_CRD_1M ‘Credit expenditure on fish’
HHExpFAnimFish_GiftAid_1M ‘Assistance expenditure value on fish’
HHExpFAnimFish_Own_1M ‘Own production expenditure value on fish’
HHExpFFats_MN_1M ‘Cash expenditure on oil/fat/butter’
HHExpFFats_CRD_1M ‘Credit expenditure on oil/fat/butter’
HHExpFFats_GiftAid_1M ‘Assistance expenditure value on oil/fat/butter’
HHExpFFats_Own_1M ‘Own production expenditure value on oil/fat/butter’
HHExpFDairy_MN_1M ‘Cash expenditure on milk/dairy products’
HHExpFDairy_CRD_1M ‘Credit expenditure on milk/dairy products’
HHExpFDairy_GiftAid_1M ‘Assistance expenditure value on milk/dairy products’
HHExpFDairy_Own_1M ‘Own production expenditure value on milk/dairy products’
HHExpFAnimEgg_MN_1M ‘Cash expenditure on eggs’
HHExpFAnimEgg_CRD_1M ‘Credit expenditure on eggs’
HHExpFAnimEgg_GiftAid_1M ‘Assistance expenditure value on eggs’
HHExpFAnimEgg_Own_1M ‘Own production expenditure value on eggs’
HHExpFSgr_MN_1M ‘Cash expenditure on sugar’
HHExpFSgr_CRD_1M ‘Credit expenditure on sugar’
HHExpFSgr_GiftAid_1M ‘Assistance expenditure value on sugar’
HHExpFSgr_Own_1M ‘Own production expenditure value on sugar’
HHExpFCond_MN_1M ‘Cash expenditure on condiments’
HHExpFCond_CRD_1M ‘Credit expenditure on condiments’
HHExpFCond_GiftAid_1M ‘Assistance expenditure value on condiments’
HHExpFCond_Own_1M ‘Own production expenditure value on condiments’
HHExpFBeverage_MN_1M ‘Cash expenditure on beverages’
HHExpFBeverage_CRD_1M ‘Credit expenditure on beverages’
HHExpFBeverage_GiftAid_1M ‘Assistance expenditure value on beverages’
HHExpFBeverage_Own_1M ‘Own production expenditure value on beverages’
HHExpFOut_MN_1M ‘Cash expenditure on snacks consumed outside the home’
HHExpFOut_CRD_1M ‘Credit expenditure on snacks consumed outside the home’
HHExpFOut_GiftAid_1M ‘Assistance expenditure value on snacks consumed outside the home’
HHExpFOut_Own_1M ‘Own production expenditure value on snacks consumed outside the home’.
Execute.
***Calculate the overall monthly food expenditure
/*If the expenditure was calculated separately for cash, credit, aid/gift and own production, calculate the overall total by summing them up
make sure to transform it to 30 days
Compute HHExpFood_MN_1M =sum(HHExpFCer_MN_1M, HHExpFTub_MN_1M,
HHExpFPuls_MN_1M, HHExpFVeg_MN_1M, HHExpFFrt_MN_1M, HHExpFAnimMeat_MN_1M,
HHExpFAnimFish_MN_1M, HHExpFFats_MN_1M, HHExpFDairy_MN_1M,
HHExpFAnimEgg_MN_1M, HHExpFSgr_MN_1M, HHExpFCond_MN_1M, HHExpFBeverage_MN_1M,
HHExpFOut_MN_1M).
Compute HHExp_Food_CRD_1M =sum(HHExpFCer_CRD_1M, HHExpFTub_CRD_1M,
HHExpFPuls_CRD_1M, HHExpFVeg_CRD_1M, HHExpFFrt_CRD_1M, HHExpFAnimMeat_CRD_1M,
HHExpFAnimFish_CRD_1M, HHExpFFats_CRD_1M, HHExpFDairy_CRD_1M,
HHExpFAnimEgg_CRD_1M, HHExpFSgr_CRD_1M, HHExpFCond_CRD_1M, HHExpFBeverage_CRD_1M,
HHExpFOut_CRD_1M).
Compute HHExp_Food_GiftAid_1M =sum(HHExpFCer_GiftAid_1M, HHExpFTub_GiftAid_1M,
HHExpFPuls_GiftAid_1M, HHExpFVeg_GiftAid_1M, HHExpFFrt_GiftAid_1M, HHExpFAnimMeat_GiftAid_1M,
HHExpFAnimFish_GiftAid_1M, HHExpFFats_GiftAid_1M, HHExpFDairy_GiftAid_1M,
HHExpFAnimEgg_GiftAid_1M, HHExpFSgr_GiftAid_1M, HHExpFCond_GiftAid_1M, HHExpFBeverage_GiftAid_1M,
HHExpFOut_GiftAid_1M).
Compute HHExp_Food_Own_1M =sum(HHExpFCer_Own_1M, HHExpFTub_Own_1M,
HHExpFPuls_Own_1M, HHExpFVeg_Own_1M, HHExpFFrt_Own_1M, HHExpFAnimMeat_Own_1M,
HHExpFAnimFish_Own_1M, HHExpFFats_Own_1M, HHExpFDairy_Own_1M,
HHExpFAnimEgg_Own_1M, HHExpFSgr_Own_1M, HHExpFCond_Own_1M, HHExpFBeverage_Own_1M,
HHExpFOut_Own_1M).
Variable labels
HHExp_Food_MN_1M 'Total food expenditure on cash'
HHExp_Food_CRD_1M 'Total food expenditure on credit'
HHExp_Food_GiftAid_1M 'Total food expenditure value from assistance'
HHExp_Food_Own_1M 'Total food expenditure value from own production'.
Execute.
***Non-food expenditure (30 days)***
***define labels
Variable labels
HHExpNFHyg_MN_1M 'Cash expenditure on soap, hygiene & personal care items'
HHExpNFHyg_CRD_1M 'Credit expenditure on soap, hygiene & personal care items'
HHExpNFHyg_GiftAid_1M 'Assistance expenditure value on soap, hygiene & personal care items'
HHExpNFTransp_MN_1M 'Cash expenditure on transport'
HHExpNFTransp_CRD_1M 'Credit expenditure on transport'
HHExpNFTransp_GiftAid_1M 'Assistance expenditure value on transport'
HHExpNFWat_MN_1M 'Cash expenditure on water supply for domestic consumption'
HHExpNFWat_CRD_1M 'Credit expenditure on water supply for domestic consumption'
HHExpNFWat_GiftAid_1M 'Assistance expenditure value on water supply for domestic consumption'
HHExpNFElec_MN_1M 'Cash expenditure on electricity'
HHExpNFElec_CRD_1M 'Credit expenditure on electricity'
HHExpNFElec_GiftAid_1M 'Assistance expenditure value on electricity'
HHExpNFEnerg_MN_1M 'Cash expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_CRD_1M 'Credit expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_GiftAid_1M ‘Assistance expenditure value on energy (cooking, heating, lighting) from other sources (not electricity)’
HHExpNFDwelServ_MN_1M ‘Cash expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_CRD_1M ‘Credit expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_GiftAid_1M ‘Assistance expenditure value on miscellaneous services relating to the dwelling’
HHExpNFPhone_MN_1M ‘Cash expenditure on information and communication’
HHExpNFPhone_CRD_1M ‘Credit expenditure on information and communication’
HHExpNFPhone_GiftAid_1M ‘Assistance expenditure value on information and communication’
HHExpNFAlcTobac_MN_1M ‘Cash expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_CRD_1M ‘Credit expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_GiftAid_1M ‘Assistance expenditure value on alcoholic beverages and tobacco’
HHExpNFSpec1_MN_1M ‘Cash expenditure on [specific to country]’
HHExpNFSpec1_CRD_1M ‘Credit expenditure on [Specific to country]’
HHExpNFSpec1_GiftAid_1M ‘Assistance expenditure value on [Specific to country]’.
***Non-food expenditure (6 months) ***
Variable labels
HHExpNFMedServ_MN_6M ‘Cash expenditure on health services’
HHExpNFMedServ_CRD_6M "Credit expenditure on health services"
HHExpNFMedServ_GiftAid_6M ‘Assistance expenditure value on health services’
HHExpNFMedGood_MN_6M ‘Cash expenditure on medicines & health products’
HHExpNFMedGood_CRD_6M ‘Credit expenditure on medicines & health products’
HHExpNFMedGood_GiftAid_6M ‘Assistance expenditure value on medicines & health products’
HHExpNFCloth_MN_6M ‘Cash expenditure on clothing and footwear’
HHExpNFCloth_CRD_6M ‘Credit expenditure on clothing and footwear’
HHExpNFCloth_GiftAid_6M ‘Assistance expenditure value on clothing and footwear’
HHExpNFEduFee_MN_6M ‘Cash expenditure on education services’
HHExpNFEduFee_CRD_6M ‘Credit expenditure on education services’
HHExpNFEduFee_GiftAid_6M ‘Assistance expenditure value on education services’
HHExpNFEduGood_MN_6M ‘Cash expenditure on education goods’
HHExpNFEduGood_CRD_6M ‘Credit expenditure on education goods’
HHExpNFEduGood_GiftAid_6M ‘Assistance expenditure value on education goods’
HHExpNFRent_MN_6M ‘Cash expenditure on rent’
HHExpNFRent_CRD_6M ‘Credit expenditure on rent’
HHExpNFRent_GiftAid _6M "Assistance expenditure value on rent"
HHExpNFHHSoft_MN_6M ‘Cash expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_CRD_6M ‘Credit expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_GiftAid_6M ‘Assistance expenditure value on household non-durable furniture and routine maintenance’
HHExpNFSav_MN_6M ‘Cash expenditure on savings’
HHExpNFSav_CRD_6M ‘Credit expenditure on savings’
HHExpNFSav_GiftAid_6M ‘Assistance expenditure value on savings’
HHExpNFDebt_MN_6M "Cash expenditure on debt repayment"
HHExpNFDebt_CRD_6M ‘Credit expenditure on debt repayment’
HHExpNFDebt_GiftAid_6M ‘Assistance expenditure value on debt repayment’
HHExpNFInsurance_MN_6M ‘Cash expenditure on insurance’
HHExpNFInsurance_CRD_6M ‘Credit expenditure on insurance’
HHExpNFInsurance_GiftAid_6M ‘Assistance expenditure value on insurance’.
***Calculate the overall monthly non-food expenditure
/*Make sure to calculate separately for cash, credit, aid/gift and own production, calculate the overall by summing them up
make sure to transform it to 30 days
Compute HHExpNFTotal_MN_6M=sum (HHExpNFRent_MN_6M, HHExpNFMedServ_MN_6M,
HHExpNFMedGood_MN_6M, HHExpNFCloth_MN_6M, HHExpNFEduFee_MN_6M, HHExpNFEduGood_MN_6M,
HHExpNFSoft_MN_6M, HHExpNFSav_MN_6M, HHExpNFInsurance_MN_6M, HHExpNFDebt_MN_6M).
Compute HHExpNFTotal_MN_30D=sum (HHExpNFAlcTobac_MN_1M, HHExpNFHyg_MN_1M,
HHExpNFTransp_MN_1M, HHExpNFWat_MN_1M, HHExpNFDwelServ_MN_1M, HHExpNFElec_MN_1M, HHExpNFEnerg_MN_1M, HHExpNFPhone_MN_1M, HHExpNFSpec1_MN_1M).
/*Sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_MN_1M=(HHExpNFTotal_MN_30D+HHExpNFTotal_MN_6M/6).
/*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_MN_6M HHExpNFTotal_MN_30D.
Compute HHExpNFTotal_CRD_6M=sum(HHExpNFRent_CRD_6M, HHExpNFMedServ_CRD_6M,
HHExpNFMedGood_CRD_6M, HHExpNFCloth_CRD_6M, HHExpNFEduFee_CRD_6M, HHExpNFEduGood_CRD_6M,
HHExpNFSoft_CRD_6M, HHExpNFInsurance_CRD_6M, HHExpNFDebt_CRD_6M).
Compute HHExpNFTotal_CRD_30D = sum(HHExpNFAlcTobac_CRD_1M,HHExpNFHyg_CRD_1M,
HHExpNFTransp_CRD_1M, HHExpNFWat_CRD_1M, HHExpNFDwelServ_CRD_1M, HHExpNFElec_CRD_1M, HHExpNFEnerg_CRD_1M, HHExpNFPhone_CRD_1M, HHExpNFSpec1_CRD_1M).
/*sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_CRD_1M=(HHExpNFTotal_CRD_30D+HHExpNFTotal_CRD_6M/6).
/*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_CRD_6M HHExpNFTotal_CRD_30D.
Compute HHExpNFTotal_GiftAid_6M=sum(HHExpNFRent_GiftAid_6M,HHExpNFMedServ_GiftAid_6M,
HHExpNFMedGood_GiftAid_6M, HHExpNFCloth_GiftAid_6M, HHExpNFEduFee_GiftAid_6M, HHExpNFEduGood_GiftAid_6M,
HHExpNFSoft_GiftAid_6M, HHExpNFSav_GiftAid_6M, HHExpNFInsurance_GiftAid_6M, HHExpNFDebt_GiftAid_6M).
Compute HHExpNFTotal_GiftAid_30D = sum(HHExpNFAlcTobac_GiftAid_1M, HHExpNFHyg_GiftAid_1M,
HHExpNFTransp_GiftAid_1M, HHExpNFWat_GiftAid_1M, HHExpNFDwelServ_GiftAid_1M, HHExpNFElec_GiftAid_1M, HHExpNFEnerg_GiftAid_1M, HHExpNFPhone_GiftAid_1M, HHExpNFSpec1_GiftAid_1M).
/*sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_GiftAid_1M=(HHExpNFTotal_GiftAid_30D+HHExpNFTotal_GiftAid_6M/6).
/*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_GiftAid_6M HHExpNFTotal_GiftAid_30D.
Variable labels HHExpNFTotal_MN_1M 'Total non-food exp on cash'.
Variable labels HHExpNFTotal_CRD_1M 'Total non-food exp on credit'.
Variable labels HHExpNFTotal_GiftAid_1M 'Total non-food exp from gift aid'.
Execute.
***Calculate totals for food and non-food expenditure
Compute HHExpNFTotal_1M=sum(HHExpNFTotal_MN_1M, HHExpNFTotal_CRD_1M, HHExpNFTotal_GiftAid_1M).
Compute HHExpFood_1M=sum(HHExpFood_MN_1M, HHExp_Food_CRD_1M, HHExp_Food_Own_1M, HHExp_Food_GiftAid_1M).
EXECUTE.
**Food Expenditure Share**
Compute FES= HHExpFood_1M /SUM(HHExpFood_1M , HHExpNFTotal_1M).
Variable labels FES 'Household food expenditure share'
EXECUTE.
Recode FES (Lowest thru .4999999=1) (.50 thru .64999999=2) (.65 thru .74999999=3) (.75 thru Highest=4)
into Foodexp_4pt.
Variable labels Foodexp_4pt 'Food expenditure share categories'.
EXECUTE.
***CARI (WITH FOOD EXPENDITURE) ***
Compute Mean_coping_capacity_FES = MEAN (Max_coping_behaviour, Foodexp_4pt).
Compute CARI_unrounded_FES = MEAN (FCS_4pt, Mean_coping_capacity_FES).
Compute CARI_FES = RND (CARI_unrounded_FES).
Execute.
Value labels CARI_FES 1 'Food secure' 2 'Marginally food secure' 3 'Moderately food insecure' 4 'Severely food insecure'.
EXECUTE.
Frequencies CARI_FES.
***create population distribution table, to to explore how the domains interact within the different food security categories
CTABLES
/VLABELS VARIABLES=Foodexp_4pt FCS_4pt Max_coping_behaviour DISPLAY=LABEL
/TABLE Foodexp_4pt [C] BY FCS_4pt [C] > Max_coping_behaviour [C][ROWPCT.COUNT PCT40.1]
/CATEGORIES VARIABLES=Foodexp_4pt ORDER=A KEY=VALUE EMPTY=EXCLUDE
/CATEGORIES VARIABLES=FCS_4pt Max_coping_behaviour ORDER=A KEY=VALUE EMPTY=INCLUDE.
***CALCULATE CARI using FCS, rCSI, LCS and ECMEN
***Food Consumption Score***
***define labels
Variable labels
FCSStap ‘How many days over the last 7 days, did members of your household eat cereals, rains, roots and tubers?’
FCSPulse ‘How many days over the last 7 days, did members of your household eat legumes/nuts?’
FCSDairy ‘How many days over the last 7 days, did members of your household drink/eat milk and other dairy products?’
FCSPr ‘How many days over the last 7 days, did members of your household eat meat, fish and eggs?’
FCSVeg ‘How many days over the last 7 days, did members of your household eat vegetables and leaves?’
FCSFruit ‘How many days over the last 7 days, did members of your household eat fruits?’
FCSFat ‘How many days over the last 7 days, did members of your household consume oil?’
FCSSugar ‘How many days over the last 7 days, did members of your household eat sugar, or sweets?’
FCSCond ‘How many days over the last 7 days, did members of your household eat condiments / spices?’.
Compute FCS = sum(FCSStap*2, FCSPulse*3, FCSDairy*4, FCSPr*4, FCSVeg*1, FCSFruit*1, FCSFat*0.5, FCSSugar*0.5).
Variable labels FCS "Food Consumption Score".
EXECUTE.
***Use this when analyzing a country with low consumption of sugar and oil - thresholds 21-35
Recode FCS (lowest thru 21 =1) (21.5 thru 35 =2) (35.5 thru highest =3) into FCSCat21.
Variable labels FCSCat21 ‘FCS Categories’.
EXECUTE.
*** define value labels and properties for "FCS Categories".
Value labels FCSCat21 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
*** Important note: pay attention to the threshold used by your CO when selecting the syntax (21 cat. vs 28 cat.)
*** Use this when analyzing a country with high consumption of sugar and oil – thresholds 28-42
Recode FCS (lowest thru 28 =1) (28.5 thru 42 =2) (42.5 thru highest =3) into FCSCat28.
Variable labels FCSCat28 ‘FCS Categories’.
EXECUTE.
*** define value labels and properties for "FCS Categories".
Value labels FCSCat28 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
Recode FCSCat21 (1=4) (2=3) (3=1) INTO FCS_4pt.
Variable labels FCS_4pt '4pt FCG'.
EXECUTE.
Frequencies VARIABLES=FCS_4pt /ORDER=ANALYSIS.
Value labels FCS_4pt 1.00 'Acceptable' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
***Reduced Coping Strategy Index***
***define labels
Variable labels
rCSILessQlty ‘Rely on less preferred and less expensive food in the past 7 days’
rCSIBorrow ‘Borrow food or rely on help from a relative or friend in the past 7 days’
rCSIMealNb ‘Reduce number of meals eaten in a day in the past 7 days’
rCSIMealSize ‘Limit portion size of meals at meal times in the past 7 days’
rCSIMealAdult ‘Restrict consumption by adults in order for small children to eat in the past 7 days’.
Compute rCSI = sum(rCSILessQlty*1,rCSIBorrow*2,rCSIMealNb*1,rCSIMealSize*1,rCSIMealAdult*3).
Variable labels rCSI 'Reduced coping strategies index (rCSI)'.
EXECUTE.
FREQUENCIES VARIABLES=rCSI
/FORMAT=NOTABLE
/STATISTICS=MEAN
/ORDER=ANALYSIS.
***Combining rCSI with FCS_4pt for CARI calculation (current consumption)
Do if (rCSI >= 4).
Recode FCS_4pt (1=2).
End if.
EXECUTE.
Value labels FCS_4pt 1.00 'Acceptable' 2.00 ' Acceptable and rCSI>4' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
Frequencies FCS_4pt.
***Livelihood Coping ***
***define labels
Value labels
Lcs_stress_DomAsset
Lcs_stress_CrdtFood
Lcs_stress_saving
Lcs_stress_BorrowCash
Lcs_crisis_ProdAsset
Lcs_crisis_HealthEdu
Lcs_crisis_OutSchool
Lcs_em_ResAsset
Lcs_em_Begged
Lcs_em_IllegalAct
10 ‘No, because I did not need to’
20 ‘No because I already sold those assets or have engaged in this activity within the last 12 months and cannot continue to do it’
30 ‘Yes’
9999 ‘Not applicable (don’t have children/ these assets)’.
***stress strategies*** (must have 4 stress strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables labels
Variable labels
Lcs_stress_DomAsset ‘Sold household assets/goods (radio, furniture, refrigerator, television, jewellery, etc.) due to lack of food’
Lcs_stress_CrdtFood ‘Purchased food/non-food on credit (incur debts) due to lack of food’
Lcs_stress_saving ‘Spent savings due to lack of food’
Lcs_stress_BorrowCash ‘Borrow money due to lack of food’.
Do if (Lcs_stress_DomAsset = 20) | (Lcs_stress_DomAsset = 30) | (Lcs_stress_CrdtFood = 20) | (Lcs_stress_CrdtFood = 30) | (Lcs_stress_saving =20) | (Lcs_stress_saving =30) | (Lcs_stress_BorrowCash =20) | (Lcs_stress_BorrowCash =30).
Compute stress_coping =1.
Else.
Compute stress_coping =0.
End if.
EXECUTE.
***crisis strategies***(must have 3 crisis strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_crisis_ProdAsset ‘Sold productive assets or means of transport (sewing machine, wheelbarrow, bicycle, car, etc.) due to lack of food’
Lcs_crisis_HealthEdu ‘Reduced expenses on health (including drugs) or education due to lack of food’
Lcs_crisis_OutSchool ‘Withdrew children from school due to lack of food’.
Do if (Lcs_crisis_ProdAsset = 20) | (Lcs_crisis_ProdAsset =30) | (Lcs_crisis_HealthEdu =20) | (Lcs_crisis_HealthEdu=30) | (Lcs_crisis_OutSchool =20) | (Lcs_crisis_OutSchool =30).
Compute crisis_coping =1.
Else.
Compute crisis_coping =0.
End if.
EXECUTE.
***emergency strategies ***(must have 3 emergency strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_em_ResAsset ‘Mortgaged/Sold house or land due to lack of food’
Lcs_em_Begged ‘Begged and/or scavenged (asked strangers for money/food) due to lack of food’
Lcs_em_IllegalAct ‘Engaged in illegal income activities (theft, prostitution) due to lack of food’.
Do if (Lcs_em_ResAsset = 20) | (Lcs_em_ResAsset = 30) | (Lcs_em_Begged = 20) | (Lcs_em_Begged =30) | (Lcs_em_IllegalAct = 20) | (Lcs_em_IllegalAct = 30).
Compute emergency_coping =1.
Else.
Compute emergency_coping = 0.
End if.
EXECUTE.
*** label new variable
Variable labels stress_coping 'Did the HH engage in stress coping strategies?'.
Variable labels crisis_coping 'Did the HH engage in crisis coping strategies?'.
Variable labels emergency_coping 'Did the HH engage in emergency coping strategies?'.
*** recode variables to compute one variable with coping behavior
Recode stress_coping (0=0) (1=2).
Recode crisis_coping (0=0) (1=3).
Recode emergency_coping (0=0) (1=4).
Compute Max_coping_behaviour=MAX(stress_coping, crisis_coping, emergency_coping).
Recode Max_coping_behaviour (0=1).
Value labels Max_coping_behaviour 1 'HH not adopting coping strategies' 2 'Stress coping strategies ' 3 'Crisis coping strategies ' 4 'Emergencies coping strategies'.
Variable Labels Max_coping_behaviour 'Summary of asset depletion'.
EXECUTE.
***Economic Capacity to Meet Essential Needs: HHs under the MEB/Poverty line***
/****ECMEN calculation is based on the standard module here:
/*-0000115416/download/
**Important note: Original ECMEN calculation only includes expenditure on cash and own production - credit and assistance (gift/aid) should not be added
***food expenditure***
***define labels
*Important note: add recall periods of _1M or _7D to the variable names below depending on what has been selected for your CO. It is recommended to follow standard recall periods as in the module.
/*Expenditure on cereals (maize, rice, sorghum, wheat, flour of cereals, bread, pasta...)
/*Expenditure on tubers (potatoes, sweet potatoes, cassava, plantains, yams)
/*Expenditure on fruit (fresh and frozen fruit)
/*Expenditure on vegetables (dark green leafy vegetables, orange vegetable, other vegetable)
/*Expenditure on meat (fresh, chilled, frozen meat and poultry, dry and slated meat)
/*Expenditure on eggs
/*Expenditure on fish (fresh and frozen fish and other seafood)
/*Expenditure on oil, fat, and butter
/*Expenditure on milk, cheese, and yogurt
/*Expenditure on sugar, confectionery, and desserts
/*Expenditure on condiments (salt, spices, cubes, fish powder)
/*Expenditure on non-alcoholic beverages (coffee, tea, herbal infusion; bottled water; soft drinks; juice)
/*Expenditure on other meals/snacks consumed outside the home
/*Expenditure on pulses (beans, peas, lentils, nuts in shell or shelled)
variable labels
HHExpFCer_MN_1M ‘Cash expenditure value on cereals’
HHExpFCer_CRD_1M ‘Credit expenditure value on cereals’
HHExpFCer_GiftAid_1M ‘Assistance expenditure value on cereals’
HHExpFCer_Own_1M ‘Own production expenditure value on cereals’
HHExpFTub_MN_1M ‘Cash expenditure value on tubers’
HHExpFTub_CRD_1M ‘Credit expenditure value on tubers’
HHExpFTub_GiftAid_1M ‘Assistance expenditure value on tubers’
HHExpFTub_Own_1M ‘Own production expenditure value on tubers’
HHExpFPuls_MN_1M ‘Cash expenditure on pulses & nuts’
HHExpFPuls_CRD_1M ‘Credit expenditure on pulses & nuts’
HHExpFPuls_GiftAid_1M ‘Assistance expenditure value on pulses & nuts’
HHExpFPuls_Own_1M ‘Own production expenditure value on pulses & nuts’
HHExpFVeg_MN_1M ‘Cash expenditure on vegetables’
HHExpFVeg_CRD_1M ‘Credit expenditure on vegetables’
HHExpFVeg_GiftAid_1M ‘Assistance expenditure value on vegetables’
HHExpFVeg_Own_1M ‘Own production expenditure value on vegetables’
HHExpFFrt_MN_1M ‘Cash expenditure on fruits’
HHExpFFrt_CRD_1M ‘Credit expenditure on fruits’
HHExpFFrt_GiftAid_1M ‘Assistance expenditure value on fruits’
HHExpFFrt_Own_1M ‘Own production expenditure value on fruits’
HHExpFAnimMeat_MN_1M ‘Cash expenditure on meat’
HHExpFAnimMeat_CRD_1M ‘Credit expenditure on meat’
HHExpFAnimMeat_GiftAid_1M ‘Assistance expenditure value on meat’
HHExpFAnimMeat_Own_1M ‘Own production expenditure value on meat’
HHExpFAnimFish_MN_1M ‘Cash expenditure on fish’
HHExpFAnimFish_CRD_1M ‘Credit expenditure on fish’
HHExpFAnimFish_GiftAid_1M ‘Assistance expenditure value on fish’
HHExpFAnimFish_Own_1M ‘Own production expenditure value on fish’
HHExpFFats_MN_1M ‘Cash expenditure on oil/fat/butter’
HHExpFFats_CRD_1M ‘Credit expenditure on oil/fat/butter’
HHExpFFats_GiftAid_1M ‘Assistance expenditure value on oil/fat/butter’
HHExpFFats_Own_1M ‘Own production expenditure value on oil/fat/butter’
HHExpFDairy_MN_1M ‘Cash expenditure on milk/dairy products’
HHExpFDairy_CRD_1M ‘Credit expenditure on milk/dairy products’
HHExpFDairy_GiftAid_1M ‘Assistance expenditure value on milk/dairy products’
HHExpFDairy_Own_1M ‘Own production expenditure value on milk/dairy products’
HHExpFAnimEgg_MN_1M ‘Cash expenditure on eggs’
HHExpFAnimEgg_CRD_1M ‘Credit expenditure on eggs’
HHExpFAnimEgg_GiftAid_1M ‘Assistance expenditure value on eggs’
HHExpFAnimEgg_Own_1M ‘Own production expenditure value on eggs’
HHExpFSgr_MN_1M ‘Cash expenditure on sugar’
HHExpFSgr_CRD_1M ‘Credit expenditure on sugar’
HHExpFSgr_GiftAid_1M ‘Assistance expenditure value on sugar’
HHExpFSgr_Own_1M ‘Own production expenditure value on sugar’
HHExpFCond_MN_1M ‘Cash expenditure on condiments’
HHExpFCond_CRD_1M ‘Credit expenditure on condiments’
HHExpFCond_GiftAid_1M ‘Assistance expenditure value on condiments’
HHExpFCond_Own_1M ‘Own production expenditure value on condiments’
HHExpFBeverage_MN_1M ‘Cash expenditure on beverages’
HHExpFBeverage_CRD_1M ‘Credit expenditure on beverages’
HHExpFBeverage_GiftAid_1M ‘Assistance expenditure value on beverages’
HHExpFBeverage_Own_1M ‘Own production expenditure value on beverages’
HHExpFOut_MN_1M ‘Cash expenditure on snacks consumed outside the home’
HHExpFOut_CRD_1M ‘Credit expenditure on snacks consumed outside the home’
HHExpFOut_GiftAid_1M ‘Assistance expenditure value on snacks consumed outside the home’
HHExpFOut_Own_1M ‘Own production expenditure value on snacks consumed outside the home’.
Execute.
**Calculate the overall monthly food expenditure per household for each category
/*If the expenditure was calculated separately for cash, credit, aid/gift and own production, calculate the overall total by summing them up
/*For ECMEN, only cash and own production to be included
make sure to transform it to 30 days
Compute HHExpFood_MN_1M =sum(HHExpFCer_MN_1M, HHExpFTub_MN_1M,
HHExpFPuls_MN_1M, HHExpFVeg_MN_1M, HHExpFFrt_MN_1M, HHExpFAnimMeat_MN_1M,
HHExpFAnimFish_MN_1M, HHExpFFats_MN_1M, HHExpFDairy_MN_1M,
HHExpFAnimEgg_MN_1M, HHExpFSgr_MN_1M, HHExpFCond_MN_1M, HHExpFBeverage_MN_1M,
HHExpFOut_MN_1M).
Compute HHExp_Food_CRD_1M =sum(HHExpFCer_CRD_1M, HHExpFTub_CRD_1M,
HHExpFPuls_CRD_1M, HHExpFVeg_CRD_1M, HHExpFFrt_CRD_1M, HHExpFAnimMeat_CRD_1M,
HHExpFAnimFish_CRD_1M, HHExpFFats_CRD_1M, HHExpFDairy_CRD_1M,
HHExpFAnimEgg_CRD_1M, HHExpFSgr_CRD_1M, HHExpFCond_CRD_1M, HHExpFBeverage_CRD_1M,
HHExpFOut_CRD_1M).
Compute HHExp_Food_GiftAid_1M =sum(HHExpFCer_GiftAid_1M, HHExpFTub_GiftAid_1M,
HHExpFPuls_GiftAid_1M, HHExpFVeg_GiftAid_1M, HHExpFFrt_GiftAid_1M, HHExpFAnimMeat_GiftAid_1M,
HHExpFAnimFish_GiftAid_1M, HHExpFFats_GiftAid_1M, HHExpFDairy_GiftAid_1M,
HHExpFAnimEgg_GiftAid_1M, HHExpFSgr_GiftAid_1M, HHExpFCond_GiftAid_1M, HHExpFBeverage_GiftAid_1M,
HHExpFOut_GiftAid_1M).
Compute HHExp_Food_Own_1M =sum(HHExpFCer_Own_1M, HHExpFTub_Own_1M,
HHExpFPuls_Own_1M, HHExpFVeg_Own_1M, HHExpFFrt_Own_1M, HHExpFAnimMeat_Own_1M,
HHExpFAnimFish_Own_1M, HHExpFFats_Own_1M, HHExpFDairy_Own_1M,
HHExpFAnimEgg_Own_1M, HHExpFSgr_Own_1M, HHExpFCond_Own_1M, HHExpFBeverage_Own_1M,
HHExpFOut_Own_1M).
Variable labels
HHExp_Food_MN_1M 'Total food expenditure on cash'
HHExp_Food_CRD_1M 'Total food expenditure on credit'
HHExp_Food_GiftAid_1M 'Total food expenditure value from assistance'
HHExp_Food_Own_1M 'Total food expenditure value from own production'.
Execute.
***Non-food expenditure (30 days)***
Variable labels
HHExpNFHyg_MN_1M 'Cash expenditure on soap, hygiene & personal care items'
HHExpNFHyg_CRD_1M 'Credit expenditure on soap, hygiene & personal care items'
HHExpNFHyg_GiftAid_1M 'Assistance expenditure value on soap, hygiene & personal care items'
HHExpNFTransp_MN_1M 'Cash expenditure on transport'
HHExpNFTransp_CRD_1M 'Credit expenditure on transport'
HHExpNFTransp_GiftAid_1M 'Assistance expenditure value on transport'
HHExpNFWat_MN_1M 'Cash expenditure on water supply for domestic consumption'
HHExpNFWat_CRD_1M 'Credit expenditure on water supply for domestic consumption'
HHExpNFWat_GiftAid_1M 'Assistance expenditure value on water supply for domestic consumption'
HHExpNFElec_MN_1M 'Cash expenditure on electricity'
HHExpNFElec_CRD_1M 'Credit expenditure on electricity'
HHExpNFElec_GiftAid_1M 'Assistance expenditure value on electricity'
HHExpNFEnerg_MN_1M 'Cash expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_CRD_1M 'Credit expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_GiftAid_1M ‘Assistance expenditure value on energy (cooking, heating, lighting) from other sources (not electricity)’
HHExpNFDwelServ_MN_1M ‘Cash expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_CRD_1M ‘Credit expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_GiftAid_1M ‘Assistance expenditure value on miscellaneous services relating to the dwelling’
HHExpNFPhone_MN_1M ‘Cash expenditure on information and communication’
HHExpNFPhone_CRD_1M ‘Credit expenditure on information and communication’
HHExpNFPhone_GiftAid_1M ‘Assistance expenditure value on information and communication’
HHExpNFAlcTobac_MN_1M ‘Cash expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_CRD_1M ‘Credit expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_GiftAid_1M ‘Assistance expenditure value on alcoholic beverages and tobacco’
HHExpNFSpec1_MN_1M ‘Cash expenditure on [specific to country]’
HHExpNFSpec1_CRD_1M ‘Credit expenditure on [Specific to country]’
HHExpNFSpec1_GiftAid_1M ‘Assistance expenditure value on [Specific to country]’.
***Non-food expenditure (6 months)***
Variable labels
HHExpNFMedServ_MN_6M ‘Cash expenditure on health services’
HHExpNFMedServ_CRD_6M "Credit expenditure on health services"
HHExpNFMedServ_GiftAid_6M ‘Assistance expenditure value on health services’
HHExpNFMedGood_MN_6M ‘Cash expenditure on medicines & health products’
HHExpNFMedGood_CRD_6M ‘Credit expenditure on medicines & health products’
HHExpNFMedGood_GiftAid_6M ‘Assistance expenditure value on medicines & health products’
HHExpNFCloth_MN_6M ‘Cash expenditure on clothing and footwear’
HHExpNFCloth_CRD_6M ‘Credit expenditure on clothing and footwear’
HHExpNFCloth_GiftAid_6M ‘Assistance expenditure value on clothing and footwear’
HHExpNFEduFee_MN_6M ‘Cash expenditure on education services’
HHExpNFEduFee_CRD_6M ‘Credit expenditure on education services’
HHExpNFEduFee_GiftAid_6M ‘Assistance expenditure value on education services’
HHExpNFEduGood_MN_6M ‘Cash expenditure on education goods’
HHExpNFEduGood_CRD_6M ‘Credit expenditure on education goods’
HHExpNFEduGood_GiftAid_6M ‘Assistance expenditure value on education goods’
HHExpNFRent_MN_6M ‘Cash expenditure on rent’
HHExpNFRent_CRD_6M ‘Credit expenditure on rent’
HHExpNFRent_GiftAid _6M "Assistance expenditure value on rent"
HHExpNFHHSoft_MN_6M ‘Cash expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_CRD_6M ‘Credit expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_GiftAid_6M ‘Assistance expenditure value on household non-durable furniture and routine maintenance’
HHExpNFSav_MN_6M ‘Cash expenditure on savings’
HHExpNFSav_CRD_6M ‘Credit expenditure on savings’
HHExpNFSav_GiftAid_6M ‘Assistance expenditure value on savings’
HHExpNFDebt_MN_6M "Cash expenditure on debt repayment"
HHExpNFDebt_CRD_6M ‘Credit expenditure on debt repayment’
HHExpNFDebt_GiftAid_6M ‘Assistance expenditure value on debt repayment’
HHExpNFInsurance_MN_6M ‘Cash expenditure on insurance’
HHExpNFInsurance_CRD_6M ‘Credit expenditure on insurance’
HHExpNFInsurance_GiftAid_6M ‘Assistance expenditure value on insurance’.
***Calculate the overall monthly non-food expenditure per household
/*Make sure to calculate separately for cash, credit, aid/gift and own production, calculate the overall by summing them up
make sure to transform it to 30 days
Compute HHExpNFTotal_MN_6M=sum(HHExpNFRent_MN_6M,HHExpNFMedServ_MN_6M,
HHExpNFMedGood_MN_6M, HHExpNFCloth_MN_6M, HHExpNFEduFee_MN_6M, HHExpNFEduGood_MN_6M,
HHExpNFSoft_MN_6M, HHExpNFSav_MN_6M, HHExpNFInsurance_MN_6M, HHExpNFDebt_MN_6M).
Compute HHExpNFTotal_MN_30D=sum(HHExpNFAlcTobac_MN_1M,HHExpNFHyg_MN_1M,
HHExpNFTransp_MN_1M, HHExpNFWat_MN_1M, HHExpNFDwelServ_MN_1M, HHExpNFElec_MN_1M, HHExpNFEnerg_MN_1M, HHExpNFPhone_MN_1M, HHExpNFSpec1_MN_1M).
*sum the non-food 1 month and 6 month expenditures
Compute HHExpNFTotal_MN_1M=(HHExpNFTotal_MN_30D+HHExpNFTotal_MN_6M/6).
*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_MN_6M HHExpNFTotal_MN_30D.
Compute HHExpNFTotal_CRD_6M=sum(HHExpNFRent_CRD_6M, HHExpNFMedServ_CRD_6M,
HHExpNFMedGood_CRD_6M, HHExpNFCloth_CRD_6M, HHExpNFEduFee_CRD_6M, HHExpNFEduGood_CRD_6M,
HHExpNFSoft_CRD_6M, HHExpNFInsurance_CRD_6M, HHExpNFDebt_CRD_6M).
Compute HHExpNFTotal_CRD_30D = sum(HHExpNFAlcTobac_CRD_1M,HHExpNFHyg_CRD_1M,
HHExpNFTransp_CRD_1M, HHExpNFWat_CRD_1M, HHExpNFDwelServ_CRD_1M, HHExpNFElec_CRD_1M, HHExpNFEnerg_CRD_1M,
HHExpNFPhone_CRD_1M, HHExpNFSpec1_CRD_1M).
compute HHExpNFTotal_CRD_1M=(HHExpNFTotal_CRD_30D+HHExpNFTotal_CRD_6M/6).
*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_CRD_6M HHExpNFTotal_CRD_30D.
Compute HHExpNFTotal_GiftAid_6M=sum(HHExpNFRent_GiftAid_6M,HHExpNFMedServ_GiftAid_6M,
HHExpNFMedGood_GiftAid_6M, HHExpNFCloth_GiftAid_6M, HHExpNFEduFee_GiftAid_6M, HHExpNFEduGood_GiftAid_6M,
HHExpNFSoft_GiftAid_6M, HHExpNFSav_GiftAid_6M, HHExpNFInsurance_GiftAid_6M, HHExpNFDebt_GiftAid_6M).
Compute HHExpNFTotal_GiftAid_30D = sum(HHExpNFAlcTobac_GiftAid_1M, HHExpNFHyg_GiftAid_1M,
HHExpNFTransp_GiftAid_1M, HHExpNFWat_GiftAid_1M, HHExpNFDwelServ_GiftAid_1M, HHExpNFElec_GiftAid_1M, HHExpNFEnerg_GiftAid_1M, HHExpNFPhone_GiftAid_1M, HHExpNFSpec1_GiftAid_1M).
*sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_GiftAid_1M=(HHExpNFTotal_GiftAid_30D+HHExpNFTotal_GiftAid_6M/6).
*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_GiftAid_6M HHExpNFTotal_GiftAid_30D.
Variable labels HHExpNFTotal_MN_1M 'Total non-food exp on cash'.
Variable labels HHExpNFTotal_CRD_1M 'Total non-food exp on credit'.
Variable labels HHExpNFTotal_GiftAid_1M 'Total non-food exp from gift aid'.
Execute.
***Calculate totals for food and non-food expenditure
Compute HHExpNFTotal_1M=sum(HHExpNFTotal_MN_1M, HHExpNFTotal_CRD_1M, HHExpNFTotal_GiftAid_1M).
Compute HHExpFood_1M=sum( HHExpFood_MN_1M, HHExp_Food_CRD_1M, HHExp_Food_Own_1M, HHExp_Food_GiftAid_1M).
EXECUTE.
***Note: For ECMEN analysis, do not include assistance and credit expenditure. Credit expenditures do not refer to credits repaid in the same month***
/*Only include cash and own production analysis
/*Please feel free to create variations according to the context
/*HHSize variable refers to total household size
***Calculate total household and per capita expenditure***
Compute HHExpTotal=HHExpFood_1M + HHExpNFTotal_1M.
Compute PCExpTotal=HHExpTotal/HHSize.
Variable labels PCExpTotal 'Monthly total per capita exp incl all food and non-food exp in cash, credit, assistance'.
Variable labels HHExpTotal 'Monthly total HH exp incl all food and non-food exp in cash, credit, assistance'.
Execute.
Frequencies HHExpTotal /statistics /histogram.
***Calculate total expenditure excluding assistance and credit for ECMEN analysis
Compute HHExp_ECMEN= HHExpFood_MN_1M+ HHExp_Food_Own_1M+ HHExpNFTotal_MN_1M.
Compute PCExp_ECMEN=HHExp_ECMEN/HHSize.
Variable labels PCExp_ECMEN 'Monthly total per capita exp for ECMEN exc assistance and credit'.
Variable labels HHExp_ECMEN 'Monthly total HH exp for ECMEN exc assistance and credit'.
Execute.
Frequencies HHExp_ECMEN /statistics /histogram.
/*In order to calculate ECMEN, please enter MEB manually as below
/*MEB_PC: Minimum expenditure basket per capita
/*MEB_HH: Minimum expenditure basket per household
/***Calculate ECMEN: Economic Capacity to Meet Essential Needs
If (PCExp_ECMEN <= MEB_PC) ECMEN=0.
If (PCExp_ECMEN > MEB_PC) ECMEN=1.
Variable labels ECMEN 'Percentage of HH with exp above MEB, excl. assistance, credit'.
Value labels ECMEN
0 'HH with no capacity'
1 'HH with capacity'.
Execute.
Frequencies ECMEN /statistics.
If (PCExp_ECMEN <= SMEB_PC) ECMEN_SMEB=0.
If (PCExp_ECMEN > SMEB_PC) ECMEN_SMEB=1.
Variable labels ECMEN_SMEB 'Percentage of HH with exp above SMEB, excl. assistance, credit'.
Value labels ECMEN_SMEB
0 'HH with no capacity'
1 'HH with capacity'.
Execute.
Frequencies ECMEN_SMEB /statistics.
***recode ECMEN based on the MEB and SMEB cut-off points in the area/country for CARI calculation
IF (ECMEN=1) ECMEN_MEB=1.
IF (ECMEN=0 & ECMEN_SMEB=1) ECMEN_MEB=2.
IF (ECMEN=0 & ECMEN_SMEB=0) ECMEN_MEB=3.
***recode the ‘ECMEN_MEB’ variable into a 4pt scale for CARI console.
Recode ECMEN_MEB (1=1) (2=3) (3=4) INTO ECMEN_class_4pt.
Variable labels ECMEN_class_4pt 'ECMEN 4pt'.
EXECUTE.
Frequencies variables= ECMEN _class_4pt /ORDER=ANALYSIS.
Value labels ECMEN _class_4pt 1.00 'Least vulnerable' 3.00 'Vulnerable' 4.00 'Highly vulnerable'.
EXECUTE.
***CARI (WITH ECMEN) ***
Compute Mean_coping_capacity_ECMEN = MEAN (Max_coping_behaviour, ECMEN _class_4pt).
Compute CARI_unrounded_ECMEN = MEAN (FCS_4pt, Mean_coping_capacity_ECMEN).
Compute CARI_ECMEN = RND (CARI_unrounded_ECMEN).
EXECUTE.
Value labels CARI_ECMEN 1 'Food secure' 2 'Marginally food secure' 3 'Moderately food insecure' 4 'Severely food insecure'.
EXECUTE.
Frequencies CARI_ECMEN.
***create population distribution table, to to explore how the domains interact within the different food security categories
CTABLES
/VLABELS VARIABLES= ECMEN _class_4pt FCS_4pt Max_coping_behaviour DISPLAY=LABEL
/TABLE ECMEN _class_4pt [C] BY FCS_4pt [C] > Max_coping_behaviour [C][ROWPCT.COUNT PCT40.1]
/CATEGORIES VARIABLES= ECMEN _class_4pt ORDER=A KEY=VALUE EMPTY=EXCLUDE
/CATEGORIES VARIABLES=FCS_4pt Max_coping_behaviour ORDER=A KEY=VALUE EMPTY=INCLUDE.
| null | https://raw.githubusercontent.com/WFP-VAM/RAMResourcesScripts/236e1a630f053d27161c73dfe01a9ef04b7ebdcc/Indicators/Consolidated-Approach-to-Reporting-Indicators-of-Food-Security/CARI-indicator-calculation.sps | scheme | bottled water; soft drinks; juice)
bottled water; soft drinks; juice) | Methods to calculating the Consolidated Approach for Reporting Indicators of Food Security
********CALCULATE CARI using FCS, rCSI, LCS and FES ********
***Food Consumption Score***
***define labels
Variable labels
FCSStap ‘How many days over the last 7 days, did members of your household eat cereals, rains, roots and tubers?’
FCSPulse ‘How many days over the last 7 days, did members of your household eat legumes/nuts?’
FCSDairy ‘How many days over the last 7 days, did members of your household drink/eat milk and other dairy products?’
FCSPr ‘How many days over the last 7 days, did members of your household eat meat, fish and eggs?’
FCSVeg ‘How many days over the last 7 days, did members of your household eat vegetables and leaves?’
FCSFruit ‘How many days over the last 7 days, did members of your household eat fruits?’
FCSFat ‘How many days over the last 7 days, did members of your household consume oil?’
FCSSugar ‘How many days over the last 7 days, did members of your household eat sugar, or sweets?’
FCSCond ‘How many days over the last 7 days, did members of your household eat condiments / spices?’.
Compute FCS = sum(FCSStap*2, FCSPulse*3, FCSDairy*4, FCSPr*4, FCSVeg*1, FCSFruit*1, FCSFat*0.5, FCSSugar*0.5).
Variable labels FCS "Food Consumption Score".
EXECUTE.
***Use this when analyzing a country with low consumption of sugar and oil - thresholds 21-35
Recode FCS (lowest thru 21 =1) (21.5 thru 35 =2) (35.5 thru highest =3) into FCSCat21.¬
Variable labels FCSCat21 ‘FCS Categories’.
EXECUTE.
*** define value labels and properties for "FCS Categories".
Value labels FCSCat21 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
*** Important note: pay attention to the threshold used by your CO when selecting the syntax (21 cat. vs 28 cat.)
*** Use this when analyzing a country with high consumption of sugar and oil – thresholds 28-42
***Recode FCS (lowest thru 28 =1) (28.5 thru 42 =2) (42.5 thru highest =3) into FCSCat28.
Variable labels FCSCat28 ‘FCS Categories’.
EXECUTE.
Value labels FCSCat28 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
Recode FCSCat21 (1=4) (2=3) (3=1) INTO FCS_4pt.
Variable labels FCS_4pt '4pt FCG'.
EXECUTE.
Frequencies VARIABLES=FCS_4pt /ORDER=ANALYSIS.
Value labels FCS_4pt 1.00 'Acceptable' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
***Reduced Coping Strategy Index***
***define labels
Variable labels
rCSILessQlty ‘Rely on less preferred and less expensive food in the past 7 days’
rCSIBorrow ‘Borrow food or rely on help from a relative or friend in the past 7 days’
rCSIMealNb ‘Reduce number of meals eaten in a day in the past 7 days’
rCSIMealSize ‘Limit portion size of meals at meal times in the past 7 days’
rCSIMealAdult ‘Restrict consumption by adults in order for small children to eat in the past 7 days’.
Compute rCSI = sum(rCSILessQlty*1,rCSIBorrow*2,rCSIMealNb*1,rCSIMealSize*1,rCSIMealAdult*3).
variable labels rCSI 'Reduced coping strategies index (rCSI)'.
EXECUTE.
FREQUENCIES VARIABLES=rCSI
/FORMAT=NOTABLE
/STATISTICS=MEAN
/ORDER=ANALYSIS.
***Combining rCSI with FCS_4pt for CARI calculation (current consumption)
Do if (rCSI >= 4).
Recode FCS_4pt (1=2).
End if.
EXECUTE.
Value labels FCS_4pt 1.00 'Acceptable' 2.00 ' Acceptable and rCSI>4' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
Frequencies FCS_4pt.
***Livelihood Coping ***
***define labels
Value labels
Lcs_stress_DomAsset
Lcs_stress_CrdtFood
Lcs_stress_saving
Lcs_stress_BorrowCash
Lcs_crisis_ProdAsset
Lcs_crisis_HealthEdu
Lcs_crisis_OutSchool
Lcs_em_ResAsset
Lcs_em_Begged
Lcs_em_IllegalAct
10 ‘No, because I did not need to’
20 ‘No because I already sold those assets or have engaged in this activity within the last 12 months and cannot continue to do it’
30 ‘Yes’
9999 ‘Not applicable (don’t have children/ these assets)’.
***stress strategies*** (must have 4 stress strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables labels
Variable labels
Lcs_stress_DomAsset ‘Sold household assets/goods (radio, furniture, refrigerator, television, jewellery, etc.) due to lack of food’
Lcs_stress_CrdtFood ‘Purchased food/non-food on credit (incur debts) due to lack of food’
Lcs_stress_saving ‘Spent savings due to lack of food’
Lcs_stress_BorrowCash ‘Borrow money due to lack of food’.
do if (Lcs_stress_DomAsset = 20) | (Lcs_stress_DomAsset = 30) | (Lcs_stress_CrdtFood = 20) | (Lcs_stress_CrdtFood = 30) | (Lcs_stress_saving =20) | (Lcs_stress_saving =30) | (Lcs_stress_BorrowCash =20) | (Lcs_stress_BorrowCash =30).
compute stress_coping =1.
else.
compute stress_coping =0.
end if.
EXECUTE.
***crisis strategies***(must have 3 crisis strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_crisis_ProdAsset ‘Sold productive assets or means of transport (sewing machine, wheelbarrow, bicycle, car, etc.) due to lack of food’
Lcs_crisis_HealthEdu ‘Reduced expenses on health (including drugs) or education due to lack of food’
Lcs_crisis_OutSchool ‘Withdrew children from school due to lack of food’.
Do if (Lcs_crisis_ProdAsset = 20) | (Lcs_crisis_ProdAsset =30) | (Lcs_crisis_HealthEdu =20) | (Lcs_crisis_HealthEdu=30) | (Lcs_crisis_OutSchool =20) | (Lcs_crisis_OutSchool =30).
Compute crisis_coping =1.
Else.
Compute crisis_coping =0.
End if.
EXECUTE.
***emergency strategies ***(must have 3 emergency strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_em_ResAsset ‘Mortgaged/Sold house or land due to lack of food’
Lcs_em_Begged ‘Begged and/or scavenged (asked strangers for money/food) due to lack of food’
Lcs_em_IllegalAct ‘Engaged in illegal income activities (theft, prostitution) due to lack of food’.
do if (Lcs_em_ResAsset = 20) | (Lcs_em_ResAsset = 30) | (Lcs_em_Begged = 20) | (Lcs_em_Begged =30) | (Lcs_em_IllegalAct = 20) | (Lcs_em_IllegalAct = 30).
Compute emergency_coping =1.
Else.
Compute emergency_coping = 0.
End if.
EXECUTE.
*** label new variable
Variable labels stress_coping 'Did the HH engage in stress coping strategies?'.
Variable labels crisis_coping 'Did the HH engage in crisis coping strategies?'.
Variable labels emergency_coping 'Did the HH engage in emergency coping strategies?'.
*** recode variables to compute one variable with coping behavior
Recode stress_coping (0=0) (1=2).
Recode crisis_coping (0=0) (1=3).
Recode emergency_coping (0=0) (1=4).
Compute Max_coping_behaviour=MAX(stress_coping, crisis_coping, emergency_coping).
Recode Max_coping_behaviour (0=1).
Value labels Max_coping_behaviour 1 'HH not adopting coping strategies' 2 'Stress coping strategies ' 3 'Crisis coping strategies ' 4 'Emergencies coping strategies'.
Variable Labels Max_coping_behaviour 'Summary of asset depletion'.
EXECUTE.
***Food Expenditure Share ***
**Important note: assistance is considered in both assessments as well as monitoring
***food expenditure***
***define labels
*Important note: add recall periods of _1M or _7D to the variable names below depending on what has been selected for your CO. It is recommended to follow standard recall periods as in the module.
/*Expenditure on cereals (maize, rice, sorghum, wheat, flour of cereals, bread, pasta...)
/*Expenditure on tubers (potatoes, sweet potatoes, cassava, plantains, yams)
/*Expenditure on fruit (fresh and frozen fruit)
/*Expenditure on vegetables (dark green leafy vegetables, orange vegetable, other vegetable)
/*Expenditure on meat (fresh, chilled, frozen meat and poultry, dry and slated meat)
/*Expenditure on eggs
/*Expenditure on fish (fresh and frozen fish and other seafood)
/*Expenditure on oil, fat, and butter
/*Expenditure on milk, cheese, and yogurt
/*Expenditure on sugar, confectionery, and desserts
/*Expenditure on condiments (salt, spices, cubes, fish powder)
/*Expenditure on other meals/snacks consumed outside the home
/*Expenditure on pulses (beans, peas, lentils, nuts in shell or shelled)
Variable labels
HHExpFCer_MN_1M ‘Cash expenditure value on cereals’
HHExpFCer_CRD_1M ‘Credit expenditure value on cereals’
HHExpFCer_GiftAid_1M ‘Assistance expenditure value on cereals’
HHExpFCer_Own_1M ‘Own production expenditure value on cereals’
HHExpFTub_MN_1M ‘Cash expenditure value on tubers’
HHExpFTub_CRD_1M ‘Credit expenditure value on tubers’
HHExpFTub_GiftAid_1M ‘Assistance expenditure value on tubers’
HHExpFTub_Own_1M ‘Own production expenditure value on tubers’
HHExpFPuls_MN_1M ‘Cash expenditure on pulses & nuts’
HHExpFPuls_CRD_1M ‘Credit expenditure on pulses & nuts’
HHExpFPuls_GiftAid_1M ‘Assistance expenditure value on pulses & nuts’
HHExpFPuls_Own_1M ‘Own production expenditure value on pulses & nuts’
HHExpFVeg_MN_1M ‘Cash expenditure on vegetables’
HHExpFVeg_CRD_1M ‘Credit expenditure on vegetables’
HHExpFVeg_GiftAid_1M ‘Assistance expenditure value on vegetables’
HHExpFVeg_Own_1M ‘Own production expenditure value on vegetables’
HHExpFFrt_MN_1M ‘Cash expenditure on fruits’
HHExpFFrt_CRD_1M ‘Credit expenditure on fruits’
HHExpFFrt_GiftAid_1M ‘Assistance expenditure value on fruits’
HHExpFFrt_Own_1M ‘Own production expenditure value on fruits’
HHExpFAnimMeat_MN_1M ‘Cash expenditure on meat’
HHExpFAnimMeat_CRD_1M ‘Credit expenditure on meat’
HHExpFAnimMeat_GiftAid_1M ‘Assistance expenditure value on meat’
HHExpFAnimMeat_Own_1M ‘Own production expenditure value on meat’
HHExpFAnimFish_MN_1M ‘Cash expenditure on fish’
HHExpFAnimFish_CRD_1M ‘Credit expenditure on fish’
HHExpFAnimFish_GiftAid_1M ‘Assistance expenditure value on fish’
HHExpFAnimFish_Own_1M ‘Own production expenditure value on fish’
HHExpFFats_MN_1M ‘Cash expenditure on oil/fat/butter’
HHExpFFats_CRD_1M ‘Credit expenditure on oil/fat/butter’
HHExpFFats_GiftAid_1M ‘Assistance expenditure value on oil/fat/butter’
HHExpFFats_Own_1M ‘Own production expenditure value on oil/fat/butter’
HHExpFDairy_MN_1M ‘Cash expenditure on milk/dairy products’
HHExpFDairy_CRD_1M ‘Credit expenditure on milk/dairy products’
HHExpFDairy_GiftAid_1M ‘Assistance expenditure value on milk/dairy products’
HHExpFDairy_Own_1M ‘Own production expenditure value on milk/dairy products’
HHExpFAnimEgg_MN_1M ‘Cash expenditure on eggs’
HHExpFAnimEgg_CRD_1M ‘Credit expenditure on eggs’
HHExpFAnimEgg_GiftAid_1M ‘Assistance expenditure value on eggs’
HHExpFAnimEgg_Own_1M ‘Own production expenditure value on eggs’
HHExpFSgr_MN_1M ‘Cash expenditure on sugar’
HHExpFSgr_CRD_1M ‘Credit expenditure on sugar’
HHExpFSgr_GiftAid_1M ‘Assistance expenditure value on sugar’
HHExpFSgr_Own_1M ‘Own production expenditure value on sugar’
HHExpFCond_MN_1M ‘Cash expenditure on condiments’
HHExpFCond_CRD_1M ‘Credit expenditure on condiments’
HHExpFCond_GiftAid_1M ‘Assistance expenditure value on condiments’
HHExpFCond_Own_1M ‘Own production expenditure value on condiments’
HHExpFBeverage_MN_1M ‘Cash expenditure on beverages’
HHExpFBeverage_CRD_1M ‘Credit expenditure on beverages’
HHExpFBeverage_GiftAid_1M ‘Assistance expenditure value on beverages’
HHExpFBeverage_Own_1M ‘Own production expenditure value on beverages’
HHExpFOut_MN_1M ‘Cash expenditure on snacks consumed outside the home’
HHExpFOut_CRD_1M ‘Credit expenditure on snacks consumed outside the home’
HHExpFOut_GiftAid_1M ‘Assistance expenditure value on snacks consumed outside the home’
HHExpFOut_Own_1M ‘Own production expenditure value on snacks consumed outside the home’.
Execute.
***Calculate the overall monthly food expenditure
/*If the expenditure was calculated separately for cash, credit, aid/gift and own production, calculate the overall total by summing them up
make sure to transform it to 30 days
Compute HHExpFood_MN_1M =sum(HHExpFCer_MN_1M, HHExpFTub_MN_1M,
HHExpFPuls_MN_1M, HHExpFVeg_MN_1M, HHExpFFrt_MN_1M, HHExpFAnimMeat_MN_1M,
HHExpFAnimFish_MN_1M, HHExpFFats_MN_1M, HHExpFDairy_MN_1M,
HHExpFAnimEgg_MN_1M, HHExpFSgr_MN_1M, HHExpFCond_MN_1M, HHExpFBeverage_MN_1M,
HHExpFOut_MN_1M).
Compute HHExp_Food_CRD_1M =sum(HHExpFCer_CRD_1M, HHExpFTub_CRD_1M,
HHExpFPuls_CRD_1M, HHExpFVeg_CRD_1M, HHExpFFrt_CRD_1M, HHExpFAnimMeat_CRD_1M,
HHExpFAnimFish_CRD_1M, HHExpFFats_CRD_1M, HHExpFDairy_CRD_1M,
HHExpFAnimEgg_CRD_1M, HHExpFSgr_CRD_1M, HHExpFCond_CRD_1M, HHExpFBeverage_CRD_1M,
HHExpFOut_CRD_1M).
Compute HHExp_Food_GiftAid_1M =sum(HHExpFCer_GiftAid_1M, HHExpFTub_GiftAid_1M,
HHExpFPuls_GiftAid_1M, HHExpFVeg_GiftAid_1M, HHExpFFrt_GiftAid_1M, HHExpFAnimMeat_GiftAid_1M,
HHExpFAnimFish_GiftAid_1M, HHExpFFats_GiftAid_1M, HHExpFDairy_GiftAid_1M,
HHExpFAnimEgg_GiftAid_1M, HHExpFSgr_GiftAid_1M, HHExpFCond_GiftAid_1M, HHExpFBeverage_GiftAid_1M,
HHExpFOut_GiftAid_1M).
Compute HHExp_Food_Own_1M =sum(HHExpFCer_Own_1M, HHExpFTub_Own_1M,
HHExpFPuls_Own_1M, HHExpFVeg_Own_1M, HHExpFFrt_Own_1M, HHExpFAnimMeat_Own_1M,
HHExpFAnimFish_Own_1M, HHExpFFats_Own_1M, HHExpFDairy_Own_1M,
HHExpFAnimEgg_Own_1M, HHExpFSgr_Own_1M, HHExpFCond_Own_1M, HHExpFBeverage_Own_1M,
HHExpFOut_Own_1M).
Variable labels
HHExp_Food_MN_1M 'Total food expenditure on cash'
HHExp_Food_CRD_1M 'Total food expenditure on credit'
HHExp_Food_GiftAid_1M 'Total food expenditure value from assistance'
HHExp_Food_Own_1M 'Total food expenditure value from own production'.
Execute.
***Non-food expenditure (30 days)***
***define labels
Variable labels
HHExpNFHyg_MN_1M 'Cash expenditure on soap, hygiene & personal care items'
HHExpNFHyg_CRD_1M 'Credit expenditure on soap, hygiene & personal care items'
HHExpNFHyg_GiftAid_1M 'Assistance expenditure value on soap, hygiene & personal care items'
HHExpNFTransp_MN_1M 'Cash expenditure on transport'
HHExpNFTransp_CRD_1M 'Credit expenditure on transport'
HHExpNFTransp_GiftAid_1M 'Assistance expenditure value on transport'
HHExpNFWat_MN_1M 'Cash expenditure on water supply for domestic consumption'
HHExpNFWat_CRD_1M 'Credit expenditure on water supply for domestic consumption'
HHExpNFWat_GiftAid_1M 'Assistance expenditure value on water supply for domestic consumption'
HHExpNFElec_MN_1M 'Cash expenditure on electricity'
HHExpNFElec_CRD_1M 'Credit expenditure on electricity'
HHExpNFElec_GiftAid_1M 'Assistance expenditure value on electricity'
HHExpNFEnerg_MN_1M 'Cash expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_CRD_1M 'Credit expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_GiftAid_1M ‘Assistance expenditure value on energy (cooking, heating, lighting) from other sources (not electricity)’
HHExpNFDwelServ_MN_1M ‘Cash expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_CRD_1M ‘Credit expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_GiftAid_1M ‘Assistance expenditure value on miscellaneous services relating to the dwelling’
HHExpNFPhone_MN_1M ‘Cash expenditure on information and communication’
HHExpNFPhone_CRD_1M ‘Credit expenditure on information and communication’
HHExpNFPhone_GiftAid_1M ‘Assistance expenditure value on information and communication’
HHExpNFAlcTobac_MN_1M ‘Cash expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_CRD_1M ‘Credit expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_GiftAid_1M ‘Assistance expenditure value on alcoholic beverages and tobacco’
HHExpNFSpec1_MN_1M ‘Cash expenditure on [specific to country]’
HHExpNFSpec1_CRD_1M ‘Credit expenditure on [Specific to country]’
HHExpNFSpec1_GiftAid_1M ‘Assistance expenditure value on [Specific to country]’.
***Non-food expenditure (6 months) ***
Variable labels
HHExpNFMedServ_MN_6M ‘Cash expenditure on health services’
HHExpNFMedServ_CRD_6M "Credit expenditure on health services"
HHExpNFMedServ_GiftAid_6M ‘Assistance expenditure value on health services’
HHExpNFMedGood_MN_6M ‘Cash expenditure on medicines & health products’
HHExpNFMedGood_CRD_6M ‘Credit expenditure on medicines & health products’
HHExpNFMedGood_GiftAid_6M ‘Assistance expenditure value on medicines & health products’
HHExpNFCloth_MN_6M ‘Cash expenditure on clothing and footwear’
HHExpNFCloth_CRD_6M ‘Credit expenditure on clothing and footwear’
HHExpNFCloth_GiftAid_6M ‘Assistance expenditure value on clothing and footwear’
HHExpNFEduFee_MN_6M ‘Cash expenditure on education services’
HHExpNFEduFee_CRD_6M ‘Credit expenditure on education services’
HHExpNFEduFee_GiftAid_6M ‘Assistance expenditure value on education services’
HHExpNFEduGood_MN_6M ‘Cash expenditure on education goods’
HHExpNFEduGood_CRD_6M ‘Credit expenditure on education goods’
HHExpNFEduGood_GiftAid_6M ‘Assistance expenditure value on education goods’
HHExpNFRent_MN_6M ‘Cash expenditure on rent’
HHExpNFRent_CRD_6M ‘Credit expenditure on rent’
HHExpNFRent_GiftAid _6M "Assistance expenditure value on rent"
HHExpNFHHSoft_MN_6M ‘Cash expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_CRD_6M ‘Credit expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_GiftAid_6M ‘Assistance expenditure value on household non-durable furniture and routine maintenance’
HHExpNFSav_MN_6M ‘Cash expenditure on savings’
HHExpNFSav_CRD_6M ‘Credit expenditure on savings’
HHExpNFSav_GiftAid_6M ‘Assistance expenditure value on savings’
HHExpNFDebt_MN_6M "Cash expenditure on debt repayment"
HHExpNFDebt_CRD_6M ‘Credit expenditure on debt repayment’
HHExpNFDebt_GiftAid_6M ‘Assistance expenditure value on debt repayment’
HHExpNFInsurance_MN_6M ‘Cash expenditure on insurance’
HHExpNFInsurance_CRD_6M ‘Credit expenditure on insurance’
HHExpNFInsurance_GiftAid_6M ‘Assistance expenditure value on insurance’.
***Calculate the overall monthly non-food expenditure
/*Make sure to calculate separately for cash, credit, aid/gift and own production, calculate the overall by summing them up
make sure to transform it to 30 days
Compute HHExpNFTotal_MN_6M=sum (HHExpNFRent_MN_6M, HHExpNFMedServ_MN_6M,
HHExpNFMedGood_MN_6M, HHExpNFCloth_MN_6M, HHExpNFEduFee_MN_6M, HHExpNFEduGood_MN_6M,
HHExpNFSoft_MN_6M, HHExpNFSav_MN_6M, HHExpNFInsurance_MN_6M, HHExpNFDebt_MN_6M).
Compute HHExpNFTotal_MN_30D=sum (HHExpNFAlcTobac_MN_1M, HHExpNFHyg_MN_1M,
HHExpNFTransp_MN_1M, HHExpNFWat_MN_1M, HHExpNFDwelServ_MN_1M, HHExpNFElec_MN_1M, HHExpNFEnerg_MN_1M, HHExpNFPhone_MN_1M, HHExpNFSpec1_MN_1M).
/*Sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_MN_1M=(HHExpNFTotal_MN_30D+HHExpNFTotal_MN_6M/6).
/*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_MN_6M HHExpNFTotal_MN_30D.
Compute HHExpNFTotal_CRD_6M=sum(HHExpNFRent_CRD_6M, HHExpNFMedServ_CRD_6M,
HHExpNFMedGood_CRD_6M, HHExpNFCloth_CRD_6M, HHExpNFEduFee_CRD_6M, HHExpNFEduGood_CRD_6M,
HHExpNFSoft_CRD_6M, HHExpNFInsurance_CRD_6M, HHExpNFDebt_CRD_6M).
Compute HHExpNFTotal_CRD_30D = sum(HHExpNFAlcTobac_CRD_1M,HHExpNFHyg_CRD_1M,
HHExpNFTransp_CRD_1M, HHExpNFWat_CRD_1M, HHExpNFDwelServ_CRD_1M, HHExpNFElec_CRD_1M, HHExpNFEnerg_CRD_1M, HHExpNFPhone_CRD_1M, HHExpNFSpec1_CRD_1M).
/*sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_CRD_1M=(HHExpNFTotal_CRD_30D+HHExpNFTotal_CRD_6M/6).
/*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_CRD_6M HHExpNFTotal_CRD_30D.
Compute HHExpNFTotal_GiftAid_6M=sum(HHExpNFRent_GiftAid_6M,HHExpNFMedServ_GiftAid_6M,
HHExpNFMedGood_GiftAid_6M, HHExpNFCloth_GiftAid_6M, HHExpNFEduFee_GiftAid_6M, HHExpNFEduGood_GiftAid_6M,
HHExpNFSoft_GiftAid_6M, HHExpNFSav_GiftAid_6M, HHExpNFInsurance_GiftAid_6M, HHExpNFDebt_GiftAid_6M).
Compute HHExpNFTotal_GiftAid_30D = sum(HHExpNFAlcTobac_GiftAid_1M, HHExpNFHyg_GiftAid_1M,
HHExpNFTransp_GiftAid_1M, HHExpNFWat_GiftAid_1M, HHExpNFDwelServ_GiftAid_1M, HHExpNFElec_GiftAid_1M, HHExpNFEnerg_GiftAid_1M, HHExpNFPhone_GiftAid_1M, HHExpNFSpec1_GiftAid_1M).
/*sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_GiftAid_1M=(HHExpNFTotal_GiftAid_30D+HHExpNFTotal_GiftAid_6M/6).
/*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_GiftAid_6M HHExpNFTotal_GiftAid_30D.
Variable labels HHExpNFTotal_MN_1M 'Total non-food exp on cash'.
Variable labels HHExpNFTotal_CRD_1M 'Total non-food exp on credit'.
Variable labels HHExpNFTotal_GiftAid_1M 'Total non-food exp from gift aid'.
Execute.
***Calculate totals for food and non-food expenditure
Compute HHExpNFTotal_1M=sum(HHExpNFTotal_MN_1M, HHExpNFTotal_CRD_1M, HHExpNFTotal_GiftAid_1M).
Compute HHExpFood_1M=sum(HHExpFood_MN_1M, HHExp_Food_CRD_1M, HHExp_Food_Own_1M, HHExp_Food_GiftAid_1M).
EXECUTE.
**Food Expenditure Share**
Compute FES= HHExpFood_1M /SUM(HHExpFood_1M , HHExpNFTotal_1M).
Variable labels FES 'Household food expenditure share'
EXECUTE.
Recode FES (Lowest thru .4999999=1) (.50 thru .64999999=2) (.65 thru .74999999=3) (.75 thru Highest=4)
into Foodexp_4pt.
Variable labels Foodexp_4pt 'Food expenditure share categories'.
EXECUTE.
***CARI (WITH FOOD EXPENDITURE) ***
Compute Mean_coping_capacity_FES = MEAN (Max_coping_behaviour, Foodexp_4pt).
Compute CARI_unrounded_FES = MEAN (FCS_4pt, Mean_coping_capacity_FES).
Compute CARI_FES = RND (CARI_unrounded_FES).
Execute.
Value labels CARI_FES 1 'Food secure' 2 'Marginally food secure' 3 'Moderately food insecure' 4 'Severely food insecure'.
EXECUTE.
Frequencies CARI_FES.
***create population distribution table, to to explore how the domains interact within the different food security categories
CTABLES
/VLABELS VARIABLES=Foodexp_4pt FCS_4pt Max_coping_behaviour DISPLAY=LABEL
/TABLE Foodexp_4pt [C] BY FCS_4pt [C] > Max_coping_behaviour [C][ROWPCT.COUNT PCT40.1]
/CATEGORIES VARIABLES=Foodexp_4pt ORDER=A KEY=VALUE EMPTY=EXCLUDE
/CATEGORIES VARIABLES=FCS_4pt Max_coping_behaviour ORDER=A KEY=VALUE EMPTY=INCLUDE.
***CALCULATE CARI using FCS, rCSI, LCS and ECMEN
***Food Consumption Score***
***define labels
Variable labels
FCSStap ‘How many days over the last 7 days, did members of your household eat cereals, rains, roots and tubers?’
FCSPulse ‘How many days over the last 7 days, did members of your household eat legumes/nuts?’
FCSDairy ‘How many days over the last 7 days, did members of your household drink/eat milk and other dairy products?’
FCSPr ‘How many days over the last 7 days, did members of your household eat meat, fish and eggs?’
FCSVeg ‘How many days over the last 7 days, did members of your household eat vegetables and leaves?’
FCSFruit ‘How many days over the last 7 days, did members of your household eat fruits?’
FCSFat ‘How many days over the last 7 days, did members of your household consume oil?’
FCSSugar ‘How many days over the last 7 days, did members of your household eat sugar, or sweets?’
FCSCond ‘How many days over the last 7 days, did members of your household eat condiments / spices?’.
Compute FCS = sum(FCSStap*2, FCSPulse*3, FCSDairy*4, FCSPr*4, FCSVeg*1, FCSFruit*1, FCSFat*0.5, FCSSugar*0.5).
Variable labels FCS "Food Consumption Score".
EXECUTE.
***Use this when analyzing a country with low consumption of sugar and oil - thresholds 21-35
Recode FCS (lowest thru 21 =1) (21.5 thru 35 =2) (35.5 thru highest =3) into FCSCat21.
Variable labels FCSCat21 ‘FCS Categories’.
EXECUTE.
*** define value labels and properties for "FCS Categories".
Value labels FCSCat21 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
*** Important note: pay attention to the threshold used by your CO when selecting the syntax (21 cat. vs 28 cat.)
*** Use this when analyzing a country with high consumption of sugar and oil – thresholds 28-42
Recode FCS (lowest thru 28 =1) (28.5 thru 42 =2) (42.5 thru highest =3) into FCSCat28.
Variable labels FCSCat28 ‘FCS Categories’.
EXECUTE.
*** define value labels and properties for "FCS Categories".
Value labels FCSCat28 1.00 'Poor' 2.00 'Borderline' 3.00 'Acceptable '.
EXECUTE.
Recode FCSCat21 (1=4) (2=3) (3=1) INTO FCS_4pt.
Variable labels FCS_4pt '4pt FCG'.
EXECUTE.
Frequencies VARIABLES=FCS_4pt /ORDER=ANALYSIS.
Value labels FCS_4pt 1.00 'Acceptable' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
***Reduced Coping Strategy Index***
***define labels
Variable labels
rCSILessQlty ‘Rely on less preferred and less expensive food in the past 7 days’
rCSIBorrow ‘Borrow food or rely on help from a relative or friend in the past 7 days’
rCSIMealNb ‘Reduce number of meals eaten in a day in the past 7 days’
rCSIMealSize ‘Limit portion size of meals at meal times in the past 7 days’
rCSIMealAdult ‘Restrict consumption by adults in order for small children to eat in the past 7 days’.
Compute rCSI = sum(rCSILessQlty*1,rCSIBorrow*2,rCSIMealNb*1,rCSIMealSize*1,rCSIMealAdult*3).
Variable labels rCSI 'Reduced coping strategies index (rCSI)'.
EXECUTE.
FREQUENCIES VARIABLES=rCSI
/FORMAT=NOTABLE
/STATISTICS=MEAN
/ORDER=ANALYSIS.
***Combining rCSI with FCS_4pt for CARI calculation (current consumption)
Do if (rCSI >= 4).
Recode FCS_4pt (1=2).
End if.
EXECUTE.
Value labels FCS_4pt 1.00 'Acceptable' 2.00 ' Acceptable and rCSI>4' 3.00 'Borderline' 4.00 'Poor'.
EXECUTE.
Frequencies FCS_4pt.
***Livelihood Coping ***
***define labels
Value labels
Lcs_stress_DomAsset
Lcs_stress_CrdtFood
Lcs_stress_saving
Lcs_stress_BorrowCash
Lcs_crisis_ProdAsset
Lcs_crisis_HealthEdu
Lcs_crisis_OutSchool
Lcs_em_ResAsset
Lcs_em_Begged
Lcs_em_IllegalAct
10 ‘No, because I did not need to’
20 ‘No because I already sold those assets or have engaged in this activity within the last 12 months and cannot continue to do it’
30 ‘Yes’
9999 ‘Not applicable (don’t have children/ these assets)’.
***stress strategies*** (must have 4 stress strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables labels
Variable labels
Lcs_stress_DomAsset ‘Sold household assets/goods (radio, furniture, refrigerator, television, jewellery, etc.) due to lack of food’
Lcs_stress_CrdtFood ‘Purchased food/non-food on credit (incur debts) due to lack of food’
Lcs_stress_saving ‘Spent savings due to lack of food’
Lcs_stress_BorrowCash ‘Borrow money due to lack of food’.
Do if (Lcs_stress_DomAsset = 20) | (Lcs_stress_DomAsset = 30) | (Lcs_stress_CrdtFood = 20) | (Lcs_stress_CrdtFood = 30) | (Lcs_stress_saving =20) | (Lcs_stress_saving =30) | (Lcs_stress_BorrowCash =20) | (Lcs_stress_BorrowCash =30).
Compute stress_coping =1.
Else.
Compute stress_coping =0.
End if.
EXECUTE.
***crisis strategies***(must have 3 crisis strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_crisis_ProdAsset ‘Sold productive assets or means of transport (sewing machine, wheelbarrow, bicycle, car, etc.) due to lack of food’
Lcs_crisis_HealthEdu ‘Reduced expenses on health (including drugs) or education due to lack of food’
Lcs_crisis_OutSchool ‘Withdrew children from school due to lack of food’.
Do if (Lcs_crisis_ProdAsset = 20) | (Lcs_crisis_ProdAsset =30) | (Lcs_crisis_HealthEdu =20) | (Lcs_crisis_HealthEdu=30) | (Lcs_crisis_OutSchool =20) | (Lcs_crisis_OutSchool =30).
Compute crisis_coping =1.
Else.
Compute crisis_coping =0.
End if.
EXECUTE.
***emergency strategies ***(must have 3 emergency strategies to calculate LCS, if you have more then use the most frequently applied strategies)
***define variables
Variable labels
Lcs_em_ResAsset ‘Mortgaged/Sold house or land due to lack of food’
Lcs_em_Begged ‘Begged and/or scavenged (asked strangers for money/food) due to lack of food’
Lcs_em_IllegalAct ‘Engaged in illegal income activities (theft, prostitution) due to lack of food’.
Do if (Lcs_em_ResAsset = 20) | (Lcs_em_ResAsset = 30) | (Lcs_em_Begged = 20) | (Lcs_em_Begged =30) | (Lcs_em_IllegalAct = 20) | (Lcs_em_IllegalAct = 30).
Compute emergency_coping =1.
Else.
Compute emergency_coping = 0.
End if.
EXECUTE.
*** label new variable
Variable labels stress_coping 'Did the HH engage in stress coping strategies?'.
Variable labels crisis_coping 'Did the HH engage in crisis coping strategies?'.
Variable labels emergency_coping 'Did the HH engage in emergency coping strategies?'.
*** recode variables to compute one variable with coping behavior
Recode stress_coping (0=0) (1=2).
Recode crisis_coping (0=0) (1=3).
Recode emergency_coping (0=0) (1=4).
Compute Max_coping_behaviour=MAX(stress_coping, crisis_coping, emergency_coping).
Recode Max_coping_behaviour (0=1).
Value labels Max_coping_behaviour 1 'HH not adopting coping strategies' 2 'Stress coping strategies ' 3 'Crisis coping strategies ' 4 'Emergencies coping strategies'.
Variable Labels Max_coping_behaviour 'Summary of asset depletion'.
EXECUTE.
***Economic Capacity to Meet Essential Needs: HHs under the MEB/Poverty line***
/****ECMEN calculation is based on the standard module here:
/*-0000115416/download/
**Important note: Original ECMEN calculation only includes expenditure on cash and own production - credit and assistance (gift/aid) should not be added
***food expenditure***
***define labels
*Important note: add recall periods of _1M or _7D to the variable names below depending on what has been selected for your CO. It is recommended to follow standard recall periods as in the module.
/*Expenditure on cereals (maize, rice, sorghum, wheat, flour of cereals, bread, pasta...)
/*Expenditure on tubers (potatoes, sweet potatoes, cassava, plantains, yams)
/*Expenditure on fruit (fresh and frozen fruit)
/*Expenditure on vegetables (dark green leafy vegetables, orange vegetable, other vegetable)
/*Expenditure on meat (fresh, chilled, frozen meat and poultry, dry and slated meat)
/*Expenditure on eggs
/*Expenditure on fish (fresh and frozen fish and other seafood)
/*Expenditure on oil, fat, and butter
/*Expenditure on milk, cheese, and yogurt
/*Expenditure on sugar, confectionery, and desserts
/*Expenditure on condiments (salt, spices, cubes, fish powder)
/*Expenditure on other meals/snacks consumed outside the home
/*Expenditure on pulses (beans, peas, lentils, nuts in shell or shelled)
variable labels
HHExpFCer_MN_1M ‘Cash expenditure value on cereals’
HHExpFCer_CRD_1M ‘Credit expenditure value on cereals’
HHExpFCer_GiftAid_1M ‘Assistance expenditure value on cereals’
HHExpFCer_Own_1M ‘Own production expenditure value on cereals’
HHExpFTub_MN_1M ‘Cash expenditure value on tubers’
HHExpFTub_CRD_1M ‘Credit expenditure value on tubers’
HHExpFTub_GiftAid_1M ‘Assistance expenditure value on tubers’
HHExpFTub_Own_1M ‘Own production expenditure value on tubers’
HHExpFPuls_MN_1M ‘Cash expenditure on pulses & nuts’
HHExpFPuls_CRD_1M ‘Credit expenditure on pulses & nuts’
HHExpFPuls_GiftAid_1M ‘Assistance expenditure value on pulses & nuts’
HHExpFPuls_Own_1M ‘Own production expenditure value on pulses & nuts’
HHExpFVeg_MN_1M ‘Cash expenditure on vegetables’
HHExpFVeg_CRD_1M ‘Credit expenditure on vegetables’
HHExpFVeg_GiftAid_1M ‘Assistance expenditure value on vegetables’
HHExpFVeg_Own_1M ‘Own production expenditure value on vegetables’
HHExpFFrt_MN_1M ‘Cash expenditure on fruits’
HHExpFFrt_CRD_1M ‘Credit expenditure on fruits’
HHExpFFrt_GiftAid_1M ‘Assistance expenditure value on fruits’
HHExpFFrt_Own_1M ‘Own production expenditure value on fruits’
HHExpFAnimMeat_MN_1M ‘Cash expenditure on meat’
HHExpFAnimMeat_CRD_1M ‘Credit expenditure on meat’
HHExpFAnimMeat_GiftAid_1M ‘Assistance expenditure value on meat’
HHExpFAnimMeat_Own_1M ‘Own production expenditure value on meat’
HHExpFAnimFish_MN_1M ‘Cash expenditure on fish’
HHExpFAnimFish_CRD_1M ‘Credit expenditure on fish’
HHExpFAnimFish_GiftAid_1M ‘Assistance expenditure value on fish’
HHExpFAnimFish_Own_1M ‘Own production expenditure value on fish’
HHExpFFats_MN_1M ‘Cash expenditure on oil/fat/butter’
HHExpFFats_CRD_1M ‘Credit expenditure on oil/fat/butter’
HHExpFFats_GiftAid_1M ‘Assistance expenditure value on oil/fat/butter’
HHExpFFats_Own_1M ‘Own production expenditure value on oil/fat/butter’
HHExpFDairy_MN_1M ‘Cash expenditure on milk/dairy products’
HHExpFDairy_CRD_1M ‘Credit expenditure on milk/dairy products’
HHExpFDairy_GiftAid_1M ‘Assistance expenditure value on milk/dairy products’
HHExpFDairy_Own_1M ‘Own production expenditure value on milk/dairy products’
HHExpFAnimEgg_MN_1M ‘Cash expenditure on eggs’
HHExpFAnimEgg_CRD_1M ‘Credit expenditure on eggs’
HHExpFAnimEgg_GiftAid_1M ‘Assistance expenditure value on eggs’
HHExpFAnimEgg_Own_1M ‘Own production expenditure value on eggs’
HHExpFSgr_MN_1M ‘Cash expenditure on sugar’
HHExpFSgr_CRD_1M ‘Credit expenditure on sugar’
HHExpFSgr_GiftAid_1M ‘Assistance expenditure value on sugar’
HHExpFSgr_Own_1M ‘Own production expenditure value on sugar’
HHExpFCond_MN_1M ‘Cash expenditure on condiments’
HHExpFCond_CRD_1M ‘Credit expenditure on condiments’
HHExpFCond_GiftAid_1M ‘Assistance expenditure value on condiments’
HHExpFCond_Own_1M ‘Own production expenditure value on condiments’
HHExpFBeverage_MN_1M ‘Cash expenditure on beverages’
HHExpFBeverage_CRD_1M ‘Credit expenditure on beverages’
HHExpFBeverage_GiftAid_1M ‘Assistance expenditure value on beverages’
HHExpFBeverage_Own_1M ‘Own production expenditure value on beverages’
HHExpFOut_MN_1M ‘Cash expenditure on snacks consumed outside the home’
HHExpFOut_CRD_1M ‘Credit expenditure on snacks consumed outside the home’
HHExpFOut_GiftAid_1M ‘Assistance expenditure value on snacks consumed outside the home’
HHExpFOut_Own_1M ‘Own production expenditure value on snacks consumed outside the home’.
Execute.
**Calculate the overall monthly food expenditure per household for each category
/*If the expenditure was calculated separately for cash, credit, aid/gift and own production, calculate the overall total by summing them up
/*For ECMEN, only cash and own production to be included
make sure to transform it to 30 days
Compute HHExpFood_MN_1M =sum(HHExpFCer_MN_1M, HHExpFTub_MN_1M,
HHExpFPuls_MN_1M, HHExpFVeg_MN_1M, HHExpFFrt_MN_1M, HHExpFAnimMeat_MN_1M,
HHExpFAnimFish_MN_1M, HHExpFFats_MN_1M, HHExpFDairy_MN_1M,
HHExpFAnimEgg_MN_1M, HHExpFSgr_MN_1M, HHExpFCond_MN_1M, HHExpFBeverage_MN_1M,
HHExpFOut_MN_1M).
Compute HHExp_Food_CRD_1M =sum(HHExpFCer_CRD_1M, HHExpFTub_CRD_1M,
HHExpFPuls_CRD_1M, HHExpFVeg_CRD_1M, HHExpFFrt_CRD_1M, HHExpFAnimMeat_CRD_1M,
HHExpFAnimFish_CRD_1M, HHExpFFats_CRD_1M, HHExpFDairy_CRD_1M,
HHExpFAnimEgg_CRD_1M, HHExpFSgr_CRD_1M, HHExpFCond_CRD_1M, HHExpFBeverage_CRD_1M,
HHExpFOut_CRD_1M).
Compute HHExp_Food_GiftAid_1M =sum(HHExpFCer_GiftAid_1M, HHExpFTub_GiftAid_1M,
HHExpFPuls_GiftAid_1M, HHExpFVeg_GiftAid_1M, HHExpFFrt_GiftAid_1M, HHExpFAnimMeat_GiftAid_1M,
HHExpFAnimFish_GiftAid_1M, HHExpFFats_GiftAid_1M, HHExpFDairy_GiftAid_1M,
HHExpFAnimEgg_GiftAid_1M, HHExpFSgr_GiftAid_1M, HHExpFCond_GiftAid_1M, HHExpFBeverage_GiftAid_1M,
HHExpFOut_GiftAid_1M).
Compute HHExp_Food_Own_1M =sum(HHExpFCer_Own_1M, HHExpFTub_Own_1M,
HHExpFPuls_Own_1M, HHExpFVeg_Own_1M, HHExpFFrt_Own_1M, HHExpFAnimMeat_Own_1M,
HHExpFAnimFish_Own_1M, HHExpFFats_Own_1M, HHExpFDairy_Own_1M,
HHExpFAnimEgg_Own_1M, HHExpFSgr_Own_1M, HHExpFCond_Own_1M, HHExpFBeverage_Own_1M,
HHExpFOut_Own_1M).
Variable labels
HHExp_Food_MN_1M 'Total food expenditure on cash'
HHExp_Food_CRD_1M 'Total food expenditure on credit'
HHExp_Food_GiftAid_1M 'Total food expenditure value from assistance'
HHExp_Food_Own_1M 'Total food expenditure value from own production'.
Execute.
***Non-food expenditure (30 days)***
Variable labels
HHExpNFHyg_MN_1M 'Cash expenditure on soap, hygiene & personal care items'
HHExpNFHyg_CRD_1M 'Credit expenditure on soap, hygiene & personal care items'
HHExpNFHyg_GiftAid_1M 'Assistance expenditure value on soap, hygiene & personal care items'
HHExpNFTransp_MN_1M 'Cash expenditure on transport'
HHExpNFTransp_CRD_1M 'Credit expenditure on transport'
HHExpNFTransp_GiftAid_1M 'Assistance expenditure value on transport'
HHExpNFWat_MN_1M 'Cash expenditure on water supply for domestic consumption'
HHExpNFWat_CRD_1M 'Credit expenditure on water supply for domestic consumption'
HHExpNFWat_GiftAid_1M 'Assistance expenditure value on water supply for domestic consumption'
HHExpNFElec_MN_1M 'Cash expenditure on electricity'
HHExpNFElec_CRD_1M 'Credit expenditure on electricity'
HHExpNFElec_GiftAid_1M 'Assistance expenditure value on electricity'
HHExpNFEnerg_MN_1M 'Cash expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_CRD_1M 'Credit expenditure on energy (cooking, heating, lighting) from other sources (not electricity)'
HHExpNFEnerg_GiftAid_1M ‘Assistance expenditure value on energy (cooking, heating, lighting) from other sources (not electricity)’
HHExpNFDwelServ_MN_1M ‘Cash expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_CRD_1M ‘Credit expenditure on miscellaneous services relating to the dwelling’
HHExpNFDwelServ_GiftAid_1M ‘Assistance expenditure value on miscellaneous services relating to the dwelling’
HHExpNFPhone_MN_1M ‘Cash expenditure on information and communication’
HHExpNFPhone_CRD_1M ‘Credit expenditure on information and communication’
HHExpNFPhone_GiftAid_1M ‘Assistance expenditure value on information and communication’
HHExpNFAlcTobac_MN_1M ‘Cash expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_CRD_1M ‘Credit expenditure on alcoholic beverages and tobacco’
HHExpNFAlcTobac_GiftAid_1M ‘Assistance expenditure value on alcoholic beverages and tobacco’
HHExpNFSpec1_MN_1M ‘Cash expenditure on [specific to country]’
HHExpNFSpec1_CRD_1M ‘Credit expenditure on [Specific to country]’
HHExpNFSpec1_GiftAid_1M ‘Assistance expenditure value on [Specific to country]’.
***Non-food expenditure (6 months)***
Variable labels
HHExpNFMedServ_MN_6M ‘Cash expenditure on health services’
HHExpNFMedServ_CRD_6M "Credit expenditure on health services"
HHExpNFMedServ_GiftAid_6M ‘Assistance expenditure value on health services’
HHExpNFMedGood_MN_6M ‘Cash expenditure on medicines & health products’
HHExpNFMedGood_CRD_6M ‘Credit expenditure on medicines & health products’
HHExpNFMedGood_GiftAid_6M ‘Assistance expenditure value on medicines & health products’
HHExpNFCloth_MN_6M ‘Cash expenditure on clothing and footwear’
HHExpNFCloth_CRD_6M ‘Credit expenditure on clothing and footwear’
HHExpNFCloth_GiftAid_6M ‘Assistance expenditure value on clothing and footwear’
HHExpNFEduFee_MN_6M ‘Cash expenditure on education services’
HHExpNFEduFee_CRD_6M ‘Credit expenditure on education services’
HHExpNFEduFee_GiftAid_6M ‘Assistance expenditure value on education services’
HHExpNFEduGood_MN_6M ‘Cash expenditure on education goods’
HHExpNFEduGood_CRD_6M ‘Credit expenditure on education goods’
HHExpNFEduGood_GiftAid_6M ‘Assistance expenditure value on education goods’
HHExpNFRent_MN_6M ‘Cash expenditure on rent’
HHExpNFRent_CRD_6M ‘Credit expenditure on rent’
HHExpNFRent_GiftAid _6M "Assistance expenditure value on rent"
HHExpNFHHSoft_MN_6M ‘Cash expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_CRD_6M ‘Credit expenditure on household non-durable furniture and routine maintenance’
HHExpNFHHSoft_GiftAid_6M ‘Assistance expenditure value on household non-durable furniture and routine maintenance’
HHExpNFSav_MN_6M ‘Cash expenditure on savings’
HHExpNFSav_CRD_6M ‘Credit expenditure on savings’
HHExpNFSav_GiftAid_6M ‘Assistance expenditure value on savings’
HHExpNFDebt_MN_6M "Cash expenditure on debt repayment"
HHExpNFDebt_CRD_6M ‘Credit expenditure on debt repayment’
HHExpNFDebt_GiftAid_6M ‘Assistance expenditure value on debt repayment’
HHExpNFInsurance_MN_6M ‘Cash expenditure on insurance’
HHExpNFInsurance_CRD_6M ‘Credit expenditure on insurance’
HHExpNFInsurance_GiftAid_6M ‘Assistance expenditure value on insurance’.
***Calculate the overall monthly non-food expenditure per household
/*Make sure to calculate separately for cash, credit, aid/gift and own production, calculate the overall by summing them up
make sure to transform it to 30 days
Compute HHExpNFTotal_MN_6M=sum(HHExpNFRent_MN_6M,HHExpNFMedServ_MN_6M,
HHExpNFMedGood_MN_6M, HHExpNFCloth_MN_6M, HHExpNFEduFee_MN_6M, HHExpNFEduGood_MN_6M,
HHExpNFSoft_MN_6M, HHExpNFSav_MN_6M, HHExpNFInsurance_MN_6M, HHExpNFDebt_MN_6M).
Compute HHExpNFTotal_MN_30D=sum(HHExpNFAlcTobac_MN_1M,HHExpNFHyg_MN_1M,
HHExpNFTransp_MN_1M, HHExpNFWat_MN_1M, HHExpNFDwelServ_MN_1M, HHExpNFElec_MN_1M, HHExpNFEnerg_MN_1M, HHExpNFPhone_MN_1M, HHExpNFSpec1_MN_1M).
*sum the non-food 1 month and 6 month expenditures
Compute HHExpNFTotal_MN_1M=(HHExpNFTotal_MN_30D+HHExpNFTotal_MN_6M/6).
*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_MN_6M HHExpNFTotal_MN_30D.
Compute HHExpNFTotal_CRD_6M=sum(HHExpNFRent_CRD_6M, HHExpNFMedServ_CRD_6M,
HHExpNFMedGood_CRD_6M, HHExpNFCloth_CRD_6M, HHExpNFEduFee_CRD_6M, HHExpNFEduGood_CRD_6M,
HHExpNFSoft_CRD_6M, HHExpNFInsurance_CRD_6M, HHExpNFDebt_CRD_6M).
Compute HHExpNFTotal_CRD_30D = sum(HHExpNFAlcTobac_CRD_1M,HHExpNFHyg_CRD_1M,
HHExpNFTransp_CRD_1M, HHExpNFWat_CRD_1M, HHExpNFDwelServ_CRD_1M, HHExpNFElec_CRD_1M, HHExpNFEnerg_CRD_1M,
HHExpNFPhone_CRD_1M, HHExpNFSpec1_CRD_1M).
compute HHExpNFTotal_CRD_1M=(HHExpNFTotal_CRD_30D+HHExpNFTotal_CRD_6M/6).
*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_CRD_6M HHExpNFTotal_CRD_30D.
Compute HHExpNFTotal_GiftAid_6M=sum(HHExpNFRent_GiftAid_6M,HHExpNFMedServ_GiftAid_6M,
HHExpNFMedGood_GiftAid_6M, HHExpNFCloth_GiftAid_6M, HHExpNFEduFee_GiftAid_6M, HHExpNFEduGood_GiftAid_6M,
HHExpNFSoft_GiftAid_6M, HHExpNFSav_GiftAid_6M, HHExpNFInsurance_GiftAid_6M, HHExpNFDebt_GiftAid_6M).
Compute HHExpNFTotal_GiftAid_30D = sum(HHExpNFAlcTobac_GiftAid_1M, HHExpNFHyg_GiftAid_1M,
HHExpNFTransp_GiftAid_1M, HHExpNFWat_GiftAid_1M, HHExpNFDwelServ_GiftAid_1M, HHExpNFElec_GiftAid_1M, HHExpNFEnerg_GiftAid_1M, HHExpNFPhone_GiftAid_1M, HHExpNFSpec1_GiftAid_1M).
*sum the non-food 1 month and 6-month expenditures
Compute HHExpNFTotal_GiftAid_1M=(HHExpNFTotal_GiftAid_30D+HHExpNFTotal_GiftAid_6M/6).
*note: to reduce the number of variables in your dataset, you could run the delete function below.
delete variables HHExpNFTotal_GiftAid_6M HHExpNFTotal_GiftAid_30D.
Variable labels HHExpNFTotal_MN_1M 'Total non-food exp on cash'.
Variable labels HHExpNFTotal_CRD_1M 'Total non-food exp on credit'.
Variable labels HHExpNFTotal_GiftAid_1M 'Total non-food exp from gift aid'.
Execute.
***Calculate totals for food and non-food expenditure
Compute HHExpNFTotal_1M=sum(HHExpNFTotal_MN_1M, HHExpNFTotal_CRD_1M, HHExpNFTotal_GiftAid_1M).
Compute HHExpFood_1M=sum( HHExpFood_MN_1M, HHExp_Food_CRD_1M, HHExp_Food_Own_1M, HHExp_Food_GiftAid_1M).
EXECUTE.
***Note: For ECMEN analysis, do not include assistance and credit expenditure. Credit expenditures do not refer to credits repaid in the same month***
/*Only include cash and own production analysis
/*Please feel free to create variations according to the context
/*HHSize variable refers to total household size
***Calculate total household and per capita expenditure***
Compute HHExpTotal=HHExpFood_1M + HHExpNFTotal_1M.
Compute PCExpTotal=HHExpTotal/HHSize.
Variable labels PCExpTotal 'Monthly total per capita exp incl all food and non-food exp in cash, credit, assistance'.
Variable labels HHExpTotal 'Monthly total HH exp incl all food and non-food exp in cash, credit, assistance'.
Execute.
Frequencies HHExpTotal /statistics /histogram.
***Calculate total expenditure excluding assistance and credit for ECMEN analysis
Compute HHExp_ECMEN= HHExpFood_MN_1M+ HHExp_Food_Own_1M+ HHExpNFTotal_MN_1M.
Compute PCExp_ECMEN=HHExp_ECMEN/HHSize.
Variable labels PCExp_ECMEN 'Monthly total per capita exp for ECMEN exc assistance and credit'.
Variable labels HHExp_ECMEN 'Monthly total HH exp for ECMEN exc assistance and credit'.
Execute.
Frequencies HHExp_ECMEN /statistics /histogram.
/*In order to calculate ECMEN, please enter MEB manually as below
/*MEB_PC: Minimum expenditure basket per capita
/*MEB_HH: Minimum expenditure basket per household
/***Calculate ECMEN: Economic Capacity to Meet Essential Needs
If (PCExp_ECMEN <= MEB_PC) ECMEN=0.
If (PCExp_ECMEN > MEB_PC) ECMEN=1.
Variable labels ECMEN 'Percentage of HH with exp above MEB, excl. assistance, credit'.
Value labels ECMEN
0 'HH with no capacity'
1 'HH with capacity'.
Execute.
Frequencies ECMEN /statistics.
If (PCExp_ECMEN <= SMEB_PC) ECMEN_SMEB=0.
If (PCExp_ECMEN > SMEB_PC) ECMEN_SMEB=1.
Variable labels ECMEN_SMEB 'Percentage of HH with exp above SMEB, excl. assistance, credit'.
Value labels ECMEN_SMEB
0 'HH with no capacity'
1 'HH with capacity'.
Execute.
Frequencies ECMEN_SMEB /statistics.
***recode ECMEN based on the MEB and SMEB cut-off points in the area/country for CARI calculation
IF (ECMEN=1) ECMEN_MEB=1.
IF (ECMEN=0 & ECMEN_SMEB=1) ECMEN_MEB=2.
IF (ECMEN=0 & ECMEN_SMEB=0) ECMEN_MEB=3.
***recode the ‘ECMEN_MEB’ variable into a 4pt scale for CARI console.
Recode ECMEN_MEB (1=1) (2=3) (3=4) INTO ECMEN_class_4pt.
Variable labels ECMEN_class_4pt 'ECMEN 4pt'.
EXECUTE.
Frequencies variables= ECMEN _class_4pt /ORDER=ANALYSIS.
Value labels ECMEN _class_4pt 1.00 'Least vulnerable' 3.00 'Vulnerable' 4.00 'Highly vulnerable'.
EXECUTE.
***CARI (WITH ECMEN) ***
Compute Mean_coping_capacity_ECMEN = MEAN (Max_coping_behaviour, ECMEN _class_4pt).
Compute CARI_unrounded_ECMEN = MEAN (FCS_4pt, Mean_coping_capacity_ECMEN).
Compute CARI_ECMEN = RND (CARI_unrounded_ECMEN).
EXECUTE.
Value labels CARI_ECMEN 1 'Food secure' 2 'Marginally food secure' 3 'Moderately food insecure' 4 'Severely food insecure'.
EXECUTE.
Frequencies CARI_ECMEN.
***create population distribution table, to to explore how the domains interact within the different food security categories
CTABLES
/VLABELS VARIABLES= ECMEN _class_4pt FCS_4pt Max_coping_behaviour DISPLAY=LABEL
/TABLE ECMEN _class_4pt [C] BY FCS_4pt [C] > Max_coping_behaviour [C][ROWPCT.COUNT PCT40.1]
/CATEGORIES VARIABLES= ECMEN _class_4pt ORDER=A KEY=VALUE EMPTY=EXCLUDE
/CATEGORIES VARIABLES=FCS_4pt Max_coping_behaviour ORDER=A KEY=VALUE EMPTY=INCLUDE.
|
ce362c71ac7c9ee9d344bbc862c532589dcddb540fec28949191fbcf8f1b08be | markbastian/replion | spiderman_lambdas.clj | (ns replion.spiderman-lambdas
(:require [datomic.client.api :as d]
[replion.spiderman-db :as spiderman]
[replion.core :as core]
[clojure.pprint :as pp]
[clojure.string :as cs]
[cheshire.core :as ch])
(:import (java.text SimpleDateFormat)))
(defn parker-status-orig
[{:keys [date]}]
(let [db (d/db (core/connection))]
(spiderman/parker-status-query db)))
(defn parker-status-fixed-output
[{:keys [date]}]
(let [db (d/db (core/connection))
[status as-of-date] (first (spiderman/parker-status-query db))]
(format "{\"%s\": \"%s\"}" (name status) as-of-date)))
(defn parker-status-broken
[{:keys [date]}]
(let [db (d/db (core/connection))
as-of-db (d/as-of db date)
[status as-of-date] (first (spiderman/parker-status-query as-of-db))]
(format
"{\"%s\": \"%s\"}"
(name status)
as-of-date)))
(defn parker-status-input-dump
[args]
(let [db (d/db (core/connection))
;as-of-db (d/as-of db date)
[status as-of-date] (first (spiderman/parker-status-query db))]
(format
"{\"args\":\"%s\",\n\"%s\": \"%s\"}"
(with-out-str (pp/pprint args))
(name status)
as-of-date)))
(defn parker-status-pre-cheshire
[{:keys [input]}]
(let [date (.parse (SimpleDateFormat. "yyyy-MM-dd") (cs/replace input #"\"" ""))
db (d/db (core/connection))
as-of-db (d/as-of db date)
[status as-of-date] (first (spiderman/parker-status-query as-of-db))]
(format
"{\"%s\": \"%s\"}"
(name status)
as-of-date)))
(defn parker-status
[{:keys [input]}]
(let [date (.parse (SimpleDateFormat. "yyyy-MM-dd") (ch/parse-string input))
db (d/db (core/connection))
as-of-db (d/as-of db date)
[status as-of-date] (first (spiderman/parker-status-query as-of-db))]
(ch/encode
{status as-of-date}))) | null | https://raw.githubusercontent.com/markbastian/replion/8188ee064e4de3b96a42a0cbb56bcf04b462c2b1/src/replion/spiderman_lambdas.clj | clojure | as-of-db (d/as-of db date) | (ns replion.spiderman-lambdas
(:require [datomic.client.api :as d]
[replion.spiderman-db :as spiderman]
[replion.core :as core]
[clojure.pprint :as pp]
[clojure.string :as cs]
[cheshire.core :as ch])
(:import (java.text SimpleDateFormat)))
(defn parker-status-orig
[{:keys [date]}]
(let [db (d/db (core/connection))]
(spiderman/parker-status-query db)))
(defn parker-status-fixed-output
[{:keys [date]}]
(let [db (d/db (core/connection))
[status as-of-date] (first (spiderman/parker-status-query db))]
(format "{\"%s\": \"%s\"}" (name status) as-of-date)))
(defn parker-status-broken
[{:keys [date]}]
(let [db (d/db (core/connection))
as-of-db (d/as-of db date)
[status as-of-date] (first (spiderman/parker-status-query as-of-db))]
(format
"{\"%s\": \"%s\"}"
(name status)
as-of-date)))
(defn parker-status-input-dump
[args]
(let [db (d/db (core/connection))
[status as-of-date] (first (spiderman/parker-status-query db))]
(format
"{\"args\":\"%s\",\n\"%s\": \"%s\"}"
(with-out-str (pp/pprint args))
(name status)
as-of-date)))
(defn parker-status-pre-cheshire
[{:keys [input]}]
(let [date (.parse (SimpleDateFormat. "yyyy-MM-dd") (cs/replace input #"\"" ""))
db (d/db (core/connection))
as-of-db (d/as-of db date)
[status as-of-date] (first (spiderman/parker-status-query as-of-db))]
(format
"{\"%s\": \"%s\"}"
(name status)
as-of-date)))
(defn parker-status
[{:keys [input]}]
(let [date (.parse (SimpleDateFormat. "yyyy-MM-dd") (ch/parse-string input))
db (d/db (core/connection))
as-of-db (d/as-of db date)
[status as-of-date] (first (spiderman/parker-status-query as-of-db))]
(ch/encode
{status as-of-date}))) |
08b2e85f0a293f3e9dbb3a04bb6fc1c993a148c09de538a24984443983ee0b02 | mu-chaco/ReWire | Pretty.hs | # LANGUAGE Trustworthy #
# OPTIONS_GHC -fno - warn - orphans #
module ReWire.Pretty (($+$), ($$), prettyPrint, text, int, empty, hang, P.Pretty) where
import qualified Prettyprinter as P
import qualified Prettyprinter.Render.Text as P
import TextShow
import Data.Text (Text)
($$) :: P.Doc ann -> P.Doc ann -> P.Doc ann
a $$ b = P.vsep [a, b]
-- TODO(chathhorn): nesting
($+$) :: P.Doc ann -> P.Doc ann -> P.Doc ann
a $+$ b = P.vsep [a, P.nest 2 b]
infixl 5 $$, $+$
text :: Text -> P.Doc ann
text = P.pretty
int :: Int -> P.Doc ann
int = P.pretty
empty :: P.Doc ann
empty = P.emptyDoc
hang :: P.Doc ann -> Int -> P.Doc ann -> P.Doc ann
hang a n b = P.sep [a, P.nest n b]
prettyPrint :: P.Pretty a => a -> Text
prettyPrint = P.renderStrict . P.layoutPretty P.defaultLayoutOptions . P.pretty
-- TODO(chathhorn): orphan
instance TextShow (P.Doc ann) where
showb = showb . show
| null | https://raw.githubusercontent.com/mu-chaco/ReWire/b04686a4cd6cb36ca9976a4b6c42bc195ce69462/src/ReWire/Pretty.hs | haskell | TODO(chathhorn): nesting
TODO(chathhorn): orphan | # LANGUAGE Trustworthy #
# OPTIONS_GHC -fno - warn - orphans #
module ReWire.Pretty (($+$), ($$), prettyPrint, text, int, empty, hang, P.Pretty) where
import qualified Prettyprinter as P
import qualified Prettyprinter.Render.Text as P
import TextShow
import Data.Text (Text)
($$) :: P.Doc ann -> P.Doc ann -> P.Doc ann
a $$ b = P.vsep [a, b]
($+$) :: P.Doc ann -> P.Doc ann -> P.Doc ann
a $+$ b = P.vsep [a, P.nest 2 b]
infixl 5 $$, $+$
text :: Text -> P.Doc ann
text = P.pretty
int :: Int -> P.Doc ann
int = P.pretty
empty :: P.Doc ann
empty = P.emptyDoc
hang :: P.Doc ann -> Int -> P.Doc ann -> P.Doc ann
hang a n b = P.sep [a, P.nest n b]
prettyPrint :: P.Pretty a => a -> Text
prettyPrint = P.renderStrict . P.layoutPretty P.defaultLayoutOptions . P.pretty
instance TextShow (P.Doc ann) where
showb = showb . show
|
ac937eaf06224d510d57c00cb608d68c94a3d187f3cfb92940af8ef0f1ed96dc | ultralisp/ultralisp | source.lisp | (defpackage #:ultralisp/utils/source
(:use #:cl)
(:import-from #:ultralisp/utils/text
#:multi-split)
(:import-from #:str
#:starts-with-p
#:ends-with-p)
(:export
#:make-file-ignorer
#:format-ignore-list
#:parse-ignore-list))
(in-package #:ultralisp/utils/source)
(defun parse-ignore-list (input)
"Returns a sorted list of directories and files.
Each directory will be relative, leading
backslash will be stripped and a new backslash
will be appended to the end, it it is missing.
But if entry ends with .asd extension, then
backslash will not be added. This way it is
possible to ignore separate asd files.
Input should be a text which can contain directories
either comma or newline separated.
Empty items will be omitted."
(check-type input string)
(loop for path in (multi-split '(#\Newline #\,) input)
for path1 = (string-left-trim '(#\/) path)
for path2 = (cond
((ends-with-p "/" path1)
path1)
((ends-with-p ".asd" path1)
path1)
(t
(concatenate 'string path1 "/")))
collect path2 into paths
finally (return (sort paths
#'string<))))
(defun format-ignore-list (paths)
(format nil "~{~A~^, ~}"
paths))
(defun make-file-ignorer (input)
"Returns a function which accepts a single string filename, relative
to the data source dir, and returns T if this file should be ignored.
If there is no directories in the input, then "
(let ((dirs (etypecase input
(string (multi-split '(#\Newline #\,) input))
(list input))))
(if dirs
(lambda (filename)
(loop for dir in dirs
thereis (starts-with-p dir filename)))
;; This way we'll not ignore nested asd files by default.
;; We need this because of issue:
;;
(constantly nil))))
| null | https://raw.githubusercontent.com/ultralisp/ultralisp/37bd5d92b2cf751cd03ced69bac785bf4bcb6c15/src/utils/source.lisp | lisp | This way we'll not ignore nested asd files by default.
We need this because of issue:
| (defpackage #:ultralisp/utils/source
(:use #:cl)
(:import-from #:ultralisp/utils/text
#:multi-split)
(:import-from #:str
#:starts-with-p
#:ends-with-p)
(:export
#:make-file-ignorer
#:format-ignore-list
#:parse-ignore-list))
(in-package #:ultralisp/utils/source)
(defun parse-ignore-list (input)
"Returns a sorted list of directories and files.
Each directory will be relative, leading
backslash will be stripped and a new backslash
will be appended to the end, it it is missing.
But if entry ends with .asd extension, then
backslash will not be added. This way it is
possible to ignore separate asd files.
Input should be a text which can contain directories
either comma or newline separated.
Empty items will be omitted."
(check-type input string)
(loop for path in (multi-split '(#\Newline #\,) input)
for path1 = (string-left-trim '(#\/) path)
for path2 = (cond
((ends-with-p "/" path1)
path1)
((ends-with-p ".asd" path1)
path1)
(t
(concatenate 'string path1 "/")))
collect path2 into paths
finally (return (sort paths
#'string<))))
(defun format-ignore-list (paths)
(format nil "~{~A~^, ~}"
paths))
(defun make-file-ignorer (input)
"Returns a function which accepts a single string filename, relative
to the data source dir, and returns T if this file should be ignored.
If there is no directories in the input, then "
(let ((dirs (etypecase input
(string (multi-split '(#\Newline #\,) input))
(list input))))
(if dirs
(lambda (filename)
(loop for dir in dirs
thereis (starts-with-p dir filename)))
(constantly nil))))
|
e4b42f775e9da2fe3135188f59d8f4b20efbf7d3e4933bdcc3de36b340b458fb | aliaksandr-s/prototyping-with-clojure | transform-data.clj | (defn get-raw-data [] (->
(slurp "./resources/raw/data.edn")
(read-string)
(eval)))
(def keys [:name :country-code :alpha-2 :alpha-3])
(def new-keys {:name :country/name
:country-code :country/code
:alpha-2 :country/alpha-2
:alpha-3 :country/alpha-3})
(defn transform [country]
(-> country
(select-keys keys)
(clojure.set/rename-keys new-keys)))
(defn wrap-with-template [data]
(str {:visitera/data1 {:txes [(vec data)]}}))
(defn save-parsed []
(spit "./resources/raw/parsed-data.edn"
(binding [*print-namespace-maps* false]
(->>
(get-raw-data)
(map transform)
(wrap-with-template)))))
(save-parsed) | null | https://raw.githubusercontent.com/aliaksandr-s/prototyping-with-clojure/e1f90bf66c315de1dfa72624895637f1c609c42e/app/chapter-06/end/visitera/resources/raw/transform-data.clj | clojure | (defn get-raw-data [] (->
(slurp "./resources/raw/data.edn")
(read-string)
(eval)))
(def keys [:name :country-code :alpha-2 :alpha-3])
(def new-keys {:name :country/name
:country-code :country/code
:alpha-2 :country/alpha-2
:alpha-3 :country/alpha-3})
(defn transform [country]
(-> country
(select-keys keys)
(clojure.set/rename-keys new-keys)))
(defn wrap-with-template [data]
(str {:visitera/data1 {:txes [(vec data)]}}))
(defn save-parsed []
(spit "./resources/raw/parsed-data.edn"
(binding [*print-namespace-maps* false]
(->>
(get-raw-data)
(map transform)
(wrap-with-template)))))
(save-parsed) | |
c1a5eff8f19c2e05ac5e418e6570bcb4de617a9357011d4787e1b8a9e04379c8 | ThoughtWorksInc/stonecutter | register_form.cljs | (ns stonecutter.js.dom.register-form
(:require [stonecutter.js.dom.common :as dom]))
(def register-form-element-selector :.clj--register__form)
(def field-invalid-class :form-row--invalid)
(def field-valid-class :form-row--valid)
(def selectors
{:registration-first-name {:input :.clj--registration-first-name__input
:form-row :.clj--registration-first-name
:validation :.clj--registration-first-name__validation}
:registration-last-name {:input :.clj--registration-last-name__input
:form-row :.clj--registration-last-name
:validation :.clj--registration-last-name__validation}
:registration-email {:input :.clj--registration-email__input
:form-row :.clj--registration-email
:validation :.clj--registration-email__validation}
:registration-password {:input :.clj--registration-password__input
:form-row :.clj--registration-password
:validation :.clj--registration-password__validation}})
(defn form-row-selector [field-key]
(get-in selectors [field-key :form-row]))
(defn input-selector [field-key]
(get-in selectors [field-key :input]))
(defn validation-selector [field-key]
(get-in selectors [field-key :validation]))
(defn get-value [field-key]
(dom/get-value (input-selector field-key)))
| null | https://raw.githubusercontent.com/ThoughtWorksInc/stonecutter/37ed22dd276ac652176c4d880e0f1b0c1e27abfe/src-cljs/stonecutter/js/dom/register_form.cljs | clojure | (ns stonecutter.js.dom.register-form
(:require [stonecutter.js.dom.common :as dom]))
(def register-form-element-selector :.clj--register__form)
(def field-invalid-class :form-row--invalid)
(def field-valid-class :form-row--valid)
(def selectors
{:registration-first-name {:input :.clj--registration-first-name__input
:form-row :.clj--registration-first-name
:validation :.clj--registration-first-name__validation}
:registration-last-name {:input :.clj--registration-last-name__input
:form-row :.clj--registration-last-name
:validation :.clj--registration-last-name__validation}
:registration-email {:input :.clj--registration-email__input
:form-row :.clj--registration-email
:validation :.clj--registration-email__validation}
:registration-password {:input :.clj--registration-password__input
:form-row :.clj--registration-password
:validation :.clj--registration-password__validation}})
(defn form-row-selector [field-key]
(get-in selectors [field-key :form-row]))
(defn input-selector [field-key]
(get-in selectors [field-key :input]))
(defn validation-selector [field-key]
(get-in selectors [field-key :validation]))
(defn get-value [field-key]
(dom/get-value (input-selector field-key)))
| |
f1561e3a92e7a9c1afcd68ad3dd8c963b3acd3235817c32761e09098618d6881 | drlivingston/kr | writer_kb.clj | (ns edu.ucdenver.ccp.kr.sesame.writer-kb
(use edu.ucdenver.ccp.kr.kb
[edu.ucdenver.ccp.kr.rdf :exclude (resource)]
edu.ucdenver.ccp.kr.sesame.kb
[edu.ucdenver.ccp.kr.sesame.rdf :exclude (resource)]
[clojure.java.io :exclude (resource)])
org.openrdf.model.impl . ValueFactoryBase
org.openrdf.model.impl.ValueFactoryImpl
java.nio.charset.Charset
(org.openrdf.rio Rio
RDFFormat
RDFWriter
RDFWriterFactory)
org.openrdf.rio.ntriples.NTriplesWriterFactory))
;;; --------------------------------------------------------
;;; connections
;;; --------------------------------------------------------
;;this is nonsese becasue to the circular defintions
;; and what can and cannot be forward delcared
(declare initialize-sesame-writer
open-sesame-writer
close-sesame-writer
sesame-write-statement
sesame-write-statements)
;;; --------------------------------------------------------
;;; protocol implementation
;;; --------------------------------------------------------
(defrecord SesameWriterKB [target connection]
KB
(native [kb] target)
(initialize [kb] kb) ;(initialize-sesame-writer kb))
(open [kb] (open-sesame-writer kb))
(close [kb] (close-sesame-writer kb))
rdfKB
;; (ns-maps [kb] ns-maps-var)
;; (ns-map-to-short [kb] (:ns-map-to-short (deref ns-maps-var)))
;; (ns-map-to-long [kb] (:ns-map-to-long (deref ns-maps-var)))
(root-ns-map [kb] (ns-map-to-long kb))
no - op
(create-resource [kb name] (sesame-create-resource kb name))
(create-property [kb name] (sesame-create-property kb name))
(create-literal [kb val] (sesame-create-literal kb val))
(create-literal [kb val type] (sesame-create-literal kb val type))
TODO convert to creating proper string literals
( create - string - literal [ kb str ] ( sesame - create - string - iteral kb ) )
;; (create-string-literal [kb str lang]
;; (sesame-create-string literal kb val type))
(create-string-literal [kb str] (sesame-create-literal kb str))
(create-string-literal [kb str lang]
(sesame-create-literal kb str lang))
(create-blank-node [kb name] (sesame-create-blank-node kb name))
(create-statement [kb s p o] (sesame-create-statement kb s p o))
(add-statement [kb stmt] (sesame-write-statement kb stmt))
(add-statement [kb stmt context] (sesame-write-statement kb stmt context))
(add-statement [kb s p o] (sesame-write-statement kb s p o))
(add-statement [kb s p o context] (sesame-write-statement kb s p o context))
(add-statements [kb stmts] (sesame-write-statements kb stmts))
(add-statements [kb stmts context] (sesame-write-statements kb stmts context))
( ask - statement [ kb s p o context ] ( sesame - ask - statement s p o context ) )
( query - statement [ kb s p o context ]
( sesame - query - statement s p o context ) )
;; (load-rdf-file [kb file] (sesame-load-rdf-file kb file))
;; (load-rdf-file [kb file type] (sesame-load-rdf-file kb file type))
;;the following will throw exception for unknown rdf format
;;(load-rdf-stream [kb stream] (sesame-load-rdf-stream kb stream))
;;(load-rdf-stream [kb stream type] (sesame-load-rdf-stream kb stream type))
)
;;; "constructors"
;;; --------------------------------------------------------
(defn new-writer [out-stream]
(let [writer (Rio/createWriter RDFFormat/NTRIPLES out-stream)]
;(output-stream target))]
(.startRDF writer) ;side effect function doesn't return itself
writer))
(defn open-sesame-writer [kb]
(let [out (output-stream (:target kb))
writer (new-writer out)]
(copy-sesame-slots (assoc (SesameWriterKB. (:target kb) writer)
;(new-writer (:target kb)))
:output-stream out
:value-factory (:value-factory kb))
kb)))
(defn close-sesame-writer [kb]
(when (:connection kb)
(.endRDF (:connection kb))
(.close (:output-stream kb)))
(copy-sesame-slots (assoc (SesameWriterKB. (:target kb)
nil)
:value-factory (:value-factory kb))
kb))
;;if the target is a zipped output stream it will happily write there
;; e.g. pass in (GZIPOutputStream. (output-stream ...))
(defn new-sesame-writer-kb [target]
(initialize-ns-mappings
(assoc (SesameWriterKB. target nil) ;(initial-ns-mappings) nil)
:value-factory (org.openrdf.model.impl.ValueFactoryImpl.))))
( .getValueFactory repository ) ) )
these ca n't handle graphs ... TODO change to NQUAD writer ? ?
(defn sesame-write-statement
([kb stmt] (.handleStatement (connection! kb)
^Statment stmt))
([kb stmt context] (.handleStatement (connection! kb)
^Statement stmt))
([kb s p o] (.handleStatement (connection! kb)
^Statement (statement kb s p o)))
([kb s p o context] (.handleStatement (connection! kb)
^Statement (statement kb s p o))))
(defn sesame-write-statements
([kb stmts] (dorun (map (partial sesame-write-statement kb) stmts)))
([kb stmts context] (dorun (map (partial sesame-write-statement kb) stmts))))
;;; --------------------------------------------------------
;;; END
;;; --------------------------------------------------------
| null | https://raw.githubusercontent.com/drlivingston/kr/2872ed6f6b1400841091a215e5495a579d153e1c/kr-sesame/kr-sesame-core/src/main/clojure/edu/ucdenver/ccp/kr/sesame/writer_kb.clj | clojure | --------------------------------------------------------
connections
--------------------------------------------------------
this is nonsese becasue to the circular defintions
and what can and cannot be forward delcared
--------------------------------------------------------
protocol implementation
--------------------------------------------------------
(initialize-sesame-writer kb))
(ns-maps [kb] ns-maps-var)
(ns-map-to-short [kb] (:ns-map-to-short (deref ns-maps-var)))
(ns-map-to-long [kb] (:ns-map-to-long (deref ns-maps-var)))
(create-string-literal [kb str lang]
(sesame-create-string literal kb val type))
(load-rdf-file [kb file] (sesame-load-rdf-file kb file))
(load-rdf-file [kb file type] (sesame-load-rdf-file kb file type))
the following will throw exception for unknown rdf format
(load-rdf-stream [kb stream] (sesame-load-rdf-stream kb stream))
(load-rdf-stream [kb stream type] (sesame-load-rdf-stream kb stream type))
"constructors"
--------------------------------------------------------
(output-stream target))]
side effect function doesn't return itself
(new-writer (:target kb)))
if the target is a zipped output stream it will happily write there
e.g. pass in (GZIPOutputStream. (output-stream ...))
(initial-ns-mappings) nil)
--------------------------------------------------------
END
-------------------------------------------------------- | (ns edu.ucdenver.ccp.kr.sesame.writer-kb
(use edu.ucdenver.ccp.kr.kb
[edu.ucdenver.ccp.kr.rdf :exclude (resource)]
edu.ucdenver.ccp.kr.sesame.kb
[edu.ucdenver.ccp.kr.sesame.rdf :exclude (resource)]
[clojure.java.io :exclude (resource)])
org.openrdf.model.impl . ValueFactoryBase
org.openrdf.model.impl.ValueFactoryImpl
java.nio.charset.Charset
(org.openrdf.rio Rio
RDFFormat
RDFWriter
RDFWriterFactory)
org.openrdf.rio.ntriples.NTriplesWriterFactory))
(declare initialize-sesame-writer
open-sesame-writer
close-sesame-writer
sesame-write-statement
sesame-write-statements)
(defrecord SesameWriterKB [target connection]
KB
(native [kb] target)
(open [kb] (open-sesame-writer kb))
(close [kb] (close-sesame-writer kb))
rdfKB
(root-ns-map [kb] (ns-map-to-long kb))
no - op
(create-resource [kb name] (sesame-create-resource kb name))
(create-property [kb name] (sesame-create-property kb name))
(create-literal [kb val] (sesame-create-literal kb val))
(create-literal [kb val type] (sesame-create-literal kb val type))
TODO convert to creating proper string literals
( create - string - literal [ kb str ] ( sesame - create - string - iteral kb ) )
(create-string-literal [kb str] (sesame-create-literal kb str))
(create-string-literal [kb str lang]
(sesame-create-literal kb str lang))
(create-blank-node [kb name] (sesame-create-blank-node kb name))
(create-statement [kb s p o] (sesame-create-statement kb s p o))
(add-statement [kb stmt] (sesame-write-statement kb stmt))
(add-statement [kb stmt context] (sesame-write-statement kb stmt context))
(add-statement [kb s p o] (sesame-write-statement kb s p o))
(add-statement [kb s p o context] (sesame-write-statement kb s p o context))
(add-statements [kb stmts] (sesame-write-statements kb stmts))
(add-statements [kb stmts context] (sesame-write-statements kb stmts context))
( ask - statement [ kb s p o context ] ( sesame - ask - statement s p o context ) )
( query - statement [ kb s p o context ]
( sesame - query - statement s p o context ) )
)
(defn new-writer [out-stream]
(let [writer (Rio/createWriter RDFFormat/NTRIPLES out-stream)]
writer))
(defn open-sesame-writer [kb]
(let [out (output-stream (:target kb))
writer (new-writer out)]
(copy-sesame-slots (assoc (SesameWriterKB. (:target kb) writer)
:output-stream out
:value-factory (:value-factory kb))
kb)))
(defn close-sesame-writer [kb]
(when (:connection kb)
(.endRDF (:connection kb))
(.close (:output-stream kb)))
(copy-sesame-slots (assoc (SesameWriterKB. (:target kb)
nil)
:value-factory (:value-factory kb))
kb))
(defn new-sesame-writer-kb [target]
(initialize-ns-mappings
:value-factory (org.openrdf.model.impl.ValueFactoryImpl.))))
( .getValueFactory repository ) ) )
these ca n't handle graphs ... TODO change to NQUAD writer ? ?
(defn sesame-write-statement
([kb stmt] (.handleStatement (connection! kb)
^Statment stmt))
([kb stmt context] (.handleStatement (connection! kb)
^Statement stmt))
([kb s p o] (.handleStatement (connection! kb)
^Statement (statement kb s p o)))
([kb s p o context] (.handleStatement (connection! kb)
^Statement (statement kb s p o))))
(defn sesame-write-statements
([kb stmts] (dorun (map (partial sesame-write-statement kb) stmts)))
([kb stmts context] (dorun (map (partial sesame-write-statement kb) stmts))))
|
ebfa2d0a321ba7d6193c8b2513b20950ca12dd368b1af0eda936819c3c0f7b35 | exoscale/clojure-kubernetes-client | extensions_v1beta1_ingress.clj | (ns clojure-kubernetes-client.specs.extensions-v1beta1-ingress
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-spec :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-status :refer :all]
)
(:import (java.io File)))
(declare extensions-v1beta1-ingress-data extensions-v1beta1-ingress)
(def extensions-v1beta1-ingress-data
{
(ds/opt :apiVersion) string?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/opt :spec) extensions-v1beta1-ingress-spec
(ds/opt :status) extensions-v1beta1-ingress-status
})
(def extensions-v1beta1-ingress
(ds/spec
{:name ::extensions-v1beta1-ingress
:spec extensions-v1beta1-ingress-data}))
| null | https://raw.githubusercontent.com/exoscale/clojure-kubernetes-client/79d84417f28d048c5ac015c17e3926c73e6ac668/src/clojure_kubernetes_client/specs/extensions_v1beta1_ingress.clj | clojure | (ns clojure-kubernetes-client.specs.extensions-v1beta1-ingress
(:require [clojure.spec.alpha :as s]
[spec-tools.data-spec :as ds]
[clojure-kubernetes-client.specs.v1-object-meta :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-spec :refer :all]
[clojure-kubernetes-client.specs.extensions-v1beta1-ingress-status :refer :all]
)
(:import (java.io File)))
(declare extensions-v1beta1-ingress-data extensions-v1beta1-ingress)
(def extensions-v1beta1-ingress-data
{
(ds/opt :apiVersion) string?
(ds/opt :kind) string?
(ds/opt :metadata) v1-object-meta
(ds/opt :spec) extensions-v1beta1-ingress-spec
(ds/opt :status) extensions-v1beta1-ingress-status
})
(def extensions-v1beta1-ingress
(ds/spec
{:name ::extensions-v1beta1-ingress
:spec extensions-v1beta1-ingress-data}))
| |
a2b824c19a52581cf4d38f5bf342e7a54883738535c5cd7e35a1ac48e88fef82 | dylex/haskell-nfs | GET.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module Network.WebDAV.NFS.GET
( httpGET
) where
import Control.Applicative ((<|>))
import Control.Monad (when, guard)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BSB
import Data.Maybe (mapMaybe)
import Data.Monoid ((<>))
import Data.Word (Word64)
import qualified Network.HTTP.Types as HTTP
import qualified Network.HTTP.Types.Header as HTTP
import qualified Network.NFS.V4 as NFS
import qualified Network.Wai as Wai
import Waimwork.HTTP (parseHTTPDate, formatHTTPDate, parseETag, renderETag)
import Network.WebDAV.NFS.Types
import Network.WebDAV.NFS.Request
import Network.WebDAV.NFS.Response
import Network.WebDAV.NFS.File
import Network.WebDAV.NFS.If
streamFile :: Context -> NFS.FileHandle -> Word64 -> Word64 -> Wai.StreamingBody
streamFile ctx fh start end send done = do
NFS.READ4res'NFS4_OK (NFS.READ4resok eof lbuf) <- NFS.nfsCall (nfsClient $ context ctx)
$ NFS.op (NFS.PUTFH4args fh) *> NFS.op (NFS.READ4args NFS.anonymousStateid start $ fromIntegral l)
let buf = NFS.unOpaqueString $ NFS.unLengthArray lbuf
send $ BSB.byteString buf
let next = start + fromIntegral (BS.length buf)
if next >= end || eof
then done
else streamFile ctx fh next end send done
where
r = end - start
l = r `min` fromIntegral (nfsBlockSize $ context ctx)
httpGET :: Context -> IO Wai.Response
httpGET ctx@Context{ contextFile = FileInfo{..} } = do
checkFileInfo NFS.aCCESS4_READ $ contextFile ctx
when (fileType /= Just NFS.NF4REG) $
throwMethodNotAllowed ctx
let headers =
[ (HTTP.hETag, renderETag fileETag)
, (HTTP.hLastModified, formatHTTPDate fileMTime)
, (HTTP.hAcceptRanges, "bytes")
]
isrange = all (either (fileETag ==) (fileMTime <=)) ifrange
ranges' = guard isrange >> mapMaybe (checkr . clampr (toInteger fileSize)) <$> ranges
sizeb = BSB.word64Dec fileSize
mapM_ (\s -> throwDAV $ HTTPError s headers) $ checkIfHeaders ctx
return $ case ranges' of
Nothing -> Wai.responseStream HTTP.ok200
((HTTP.hContentLength, buildBS sizeb) : headers)
(streamFile ctx fileHandle 0 fileSize)
Just [] -> emptyResponse HTTP.requestedRangeNotSatisfiable416
$ (HTTP.hContentRange, buildBS $ "bytes */" <> sizeb) : headers
Just [(a,b)] -> Wai.responseStream HTTP.partialContent206
( (HTTP.hContentLength, buildBS $ BSB.word64Dec (succ b - a))
: (HTTP.hContentRange, buildBS $ "bytes " <> BSB.word64Dec a <> BSB.char8 '-' <> BSB.word64Dec b <> BSB.char8 '/' <> sizeb)
: headers)
(streamFile ctx fileHandle a $ succ b)
Just _ -> emptyResponse HTTP.notImplemented501 [] -- "multipart/byteranges"
where
ifrange = (\s -> Right <$> parseHTTPDate s <|> Left <$> either (const Nothing) Just (parseETag s)) =<< header HTTP.hIfRange
ranges = HTTP.parseByteRanges =<< Wai.requestHeaderRange (contextRequest ctx)
header = requestHeader ctx
clampr z (HTTP.ByteRangeFrom a) = (a `max` 0, pred z)
clampr z (HTTP.ByteRangeFromTo a b) = (a `max` 0, b `min` pred z)
clampr z (HTTP.ByteRangeSuffix e) = (z - e `max` 0, pred z)
checkr (a, b)
| a <= b = Just (fromInteger a, fromInteger b)
| otherwise = Nothing
| null | https://raw.githubusercontent.com/dylex/haskell-nfs/07d213e09f7bf9e6fe3200aca3de494c3dcd54f7/webdav/Network/WebDAV/NFS/GET.hs | haskell | # LANGUAGE OverloadedStrings #
"multipart/byteranges" | # LANGUAGE RecordWildCards #
module Network.WebDAV.NFS.GET
( httpGET
) where
import Control.Applicative ((<|>))
import Control.Monad (when, guard)
import qualified Data.ByteString as BS
import qualified Data.ByteString.Builder as BSB
import Data.Maybe (mapMaybe)
import Data.Monoid ((<>))
import Data.Word (Word64)
import qualified Network.HTTP.Types as HTTP
import qualified Network.HTTP.Types.Header as HTTP
import qualified Network.NFS.V4 as NFS
import qualified Network.Wai as Wai
import Waimwork.HTTP (parseHTTPDate, formatHTTPDate, parseETag, renderETag)
import Network.WebDAV.NFS.Types
import Network.WebDAV.NFS.Request
import Network.WebDAV.NFS.Response
import Network.WebDAV.NFS.File
import Network.WebDAV.NFS.If
streamFile :: Context -> NFS.FileHandle -> Word64 -> Word64 -> Wai.StreamingBody
streamFile ctx fh start end send done = do
NFS.READ4res'NFS4_OK (NFS.READ4resok eof lbuf) <- NFS.nfsCall (nfsClient $ context ctx)
$ NFS.op (NFS.PUTFH4args fh) *> NFS.op (NFS.READ4args NFS.anonymousStateid start $ fromIntegral l)
let buf = NFS.unOpaqueString $ NFS.unLengthArray lbuf
send $ BSB.byteString buf
let next = start + fromIntegral (BS.length buf)
if next >= end || eof
then done
else streamFile ctx fh next end send done
where
r = end - start
l = r `min` fromIntegral (nfsBlockSize $ context ctx)
httpGET :: Context -> IO Wai.Response
httpGET ctx@Context{ contextFile = FileInfo{..} } = do
checkFileInfo NFS.aCCESS4_READ $ contextFile ctx
when (fileType /= Just NFS.NF4REG) $
throwMethodNotAllowed ctx
let headers =
[ (HTTP.hETag, renderETag fileETag)
, (HTTP.hLastModified, formatHTTPDate fileMTime)
, (HTTP.hAcceptRanges, "bytes")
]
isrange = all (either (fileETag ==) (fileMTime <=)) ifrange
ranges' = guard isrange >> mapMaybe (checkr . clampr (toInteger fileSize)) <$> ranges
sizeb = BSB.word64Dec fileSize
mapM_ (\s -> throwDAV $ HTTPError s headers) $ checkIfHeaders ctx
return $ case ranges' of
Nothing -> Wai.responseStream HTTP.ok200
((HTTP.hContentLength, buildBS sizeb) : headers)
(streamFile ctx fileHandle 0 fileSize)
Just [] -> emptyResponse HTTP.requestedRangeNotSatisfiable416
$ (HTTP.hContentRange, buildBS $ "bytes */" <> sizeb) : headers
Just [(a,b)] -> Wai.responseStream HTTP.partialContent206
( (HTTP.hContentLength, buildBS $ BSB.word64Dec (succ b - a))
: (HTTP.hContentRange, buildBS $ "bytes " <> BSB.word64Dec a <> BSB.char8 '-' <> BSB.word64Dec b <> BSB.char8 '/' <> sizeb)
: headers)
(streamFile ctx fileHandle a $ succ b)
where
ifrange = (\s -> Right <$> parseHTTPDate s <|> Left <$> either (const Nothing) Just (parseETag s)) =<< header HTTP.hIfRange
ranges = HTTP.parseByteRanges =<< Wai.requestHeaderRange (contextRequest ctx)
header = requestHeader ctx
clampr z (HTTP.ByteRangeFrom a) = (a `max` 0, pred z)
clampr z (HTTP.ByteRangeFromTo a b) = (a `max` 0, b `min` pred z)
clampr z (HTTP.ByteRangeSuffix e) = (z - e `max` 0, pred z)
checkr (a, b)
| a <= b = Just (fromInteger a, fromInteger b)
| otherwise = Nothing
|
14fd495de9eba5e449651dcd655252de0664fba4aaf5e4953b6f0f8fe99f4b06 | lispci/fiveam | run.lisp | -*- Mode : LISP ; Syntax : Ansi - Common - Lisp ; Package : FIVEAM ; Base : 10 ; -*-
(in-package :it.bese.fiveam)
;;;; * Running Tests
;;;; Once the programmer has defined what the tests are these need to
;;;; be run and the expected effects should be compared with the
;;;; actual effects. FiveAM provides the function RUN for this
;;;; purpose, RUN executes a number of tests and collects the results
;;;; of each individual check into a list which is then
returned . There are three types of test results : passed , failed
;;;; and skipped, these are represented by TEST-RESULT objects.
Generally running a test will return normally , but there are two
;;;; exceptional situations which can occur:
;;;; - An exception is signaled while running the test. If the
;;;; variable *on-error* is :DEBUG than FiveAM will enter the
;;;; debugger, otherwise a test failure (of type
;;;; unexpected-test-failure) is returned. When entering the
debugger two restarts are made available , one simply reruns the
;;;; current test and another signals a test-failure and continues
;;;; with the remaining tests.
;;;; - A circular dependency is detected. An error is signaled and a
;;;; restart is made available which signals a test-skipped and
;;;; continues with the remaining tests. This restart also sets the
;;;; dependency status of the test to nil, so any tests which depend
;;;; on this one (even if the dependency is not circular) will be
;;;; skipped.
;;;; The functions RUN!, !, !! and !!! are convenient wrappers around
;;;; RUN and EXPLAIN.
(deftype on-problem-action ()
'(member :debug :backtrace nil))
(declaim (type on-problem-action *on-error* *on-failure*))
(defvar *on-error* nil
"The action to perform on error:
- :DEBUG if we should drop into the debugger
- :BACKTRACE to print a backtrace
- NIL to simply continue")
(defvar *on-failure* nil
"The action to perform on check failure:
- :DEBUG if we should drop into the debugger
- :BACKTRACE to print a backtrace
- NIL to simply continue")
(defvar *debug-on-error* nil
"T if we should drop into the debugger on error, NIL otherwise.
OBSOLETE: superseded by *ON-ERROR*")
(defvar *debug-on-failure* nil
"T if we should drop into the debugger on a failing check, NIL otherwise.
OBSOLETE: superseded by *ON-FAILURE*")
(defparameter *print-names* t
"T if we should print test running progress, NIL otherwise.")
(defparameter *test-dribble-indent* (make-array 0
:element-type 'character
:fill-pointer 0
:adjustable t)
"Used to indent tests and test suites in their parent suite")
(defun import-testing-symbols (package-designator)
(import '(5am::is 5am::is-true 5am::is-false 5am::signals 5am::finishes)
package-designator))
(defparameter *run-queue* '()
"List of test waiting to be run.")
(define-condition circular-dependency (error)
((test-case :initarg :test-case))
(:report (lambda (cd stream)
(format stream "A circular dependency wes detected in ~S." (slot-value cd 'test-case))))
(:documentation "Condition signaled when a circular dependency
between test-cases has been detected."))
(defgeneric run-resolving-dependencies (test)
(:documentation "Given a dependency spec determine if the spec
is satisfied or not, this will generally involve running other
tests. If the dependency spec can be satisfied the test is also
run."))
(defmethod run-resolving-dependencies ((test test-case))
"Return true if this test, and its dependencies, are satisfied,
NIL otherwise."
(case (status test)
(:unknown
(setf (status test) :resolving)
(if (or (not (depends-on test))
(eql t (resolve-dependencies (depends-on test))))
(progn
(run-test-lambda test)
(status test))
(with-run-state (result-list)
(unless (eql :circular (status test))
(push (make-instance 'test-skipped
:test-case test
:reason "Dependencies not satisfied")
result-list)
(setf (status test) :depends-not-satisfied)))))
(:resolving
(restart-case
(error 'circular-dependency :test-case test)
(skip ()
:report (lambda (s)
(format s "Skip the test ~S and all its dependencies." (name test)))
(with-run-state (result-list)
(push (make-instance 'test-skipped :reason "Circular dependencies" :test-case test)
result-list))
(setf (status test) :circular))))
(t (status test))))
(defgeneric resolve-dependencies (depends-on))
(defmethod resolve-dependencies ((depends-on symbol))
"A test which depends on a symbol is interpreted as `(AND
,DEPENDS-ON)."
(run-resolving-dependencies (get-test depends-on)))
(defmethod resolve-dependencies ((depends-on list))
"Return true if the dependency spec DEPENDS-ON is satisfied,
nil otherwise."
(if (null depends-on)
t
(flet ((satisfies-depends-p (test)
(funcall test (lambda (dep)
(eql t (resolve-dependencies dep)))
(cdr depends-on))))
(ecase (car depends-on)
(and (satisfies-depends-p #'every))
(or (satisfies-depends-p #'some))
(not (satisfies-depends-p #'notany))
(:before (every #'(lambda (dep)
(let ((status (status (get-test dep))))
(if (eql :unknown status)
(run-resolving-dependencies (get-test dep))
status)))
(cdr depends-on)))))))
(defun results-status (result-list)
"Given a list of test results (generated while running a test)
return true if no results are of type TEST-FAILURE. Returns second
and third values, which are the set of failed tests and skipped
tests respectively."
(let ((failed-tests
(remove-if-not #'test-failure-p result-list))
(skipped-tests
(remove-if-not #'test-skipped-p result-list)))
(values (null failed-tests)
failed-tests
skipped-tests)))
(defun return-result-list (test-lambda)
"Run the test function TEST-LAMBDA and return a list of all
test results generated, does not modify the special environment
variable RESULT-LIST."
(bind-run-state ((result-list '()))
(funcall test-lambda)
result-list))
(defgeneric run-test-lambda (test))
(defmethod run-test-lambda ((test test-case))
(with-run-state (result-list)
(bind-run-state ((current-test test))
(labels ((abort-test (e &optional (reason (format nil "Unexpected Error: ~S~%~A." e e)))
(add-result 'unexpected-test-failure
:test-expr nil
:test-case test
:reason reason
:condition e))
(run-it ()
(let ((result-list '()))
(declare (special result-list))
(handler-bind ((check-failure (lambda (e)
(declare (ignore e))
(cond
((eql *on-failure* :debug)
nil)
(t
(when (eql *on-failure* :backtrace)
(trivial-backtrace:print-backtrace-to-stream
*test-dribble*))
(invoke-restart
(find-restart 'ignore-failure))))))
(error (lambda (e)
(unless (or (eql *on-error* :debug)
(typep e 'check-failure))
(when (eql *on-error* :backtrace)
(trivial-backtrace:print-backtrace-to-stream
*test-dribble*))
(abort-test e)
(return-from run-it result-list)))))
(restart-case
(handler-case
(let ((*readtable* (copy-readtable))
(*package* (runtime-package test)))
(when *print-names*
(format *test-dribble* "~%~ARunning test ~A " *test-dribble-indent* (name test))
(force-output *test-dribble*))
(if (collect-profiling-info test)
Timing info does n't get collected ATM , we need a portable library
( setf ( profiling - info test ) ( collect - timing ( test - lambda test ) ) )
(funcall (test-lambda test))
(funcall (test-lambda test))))
(storage-condition (e)
;; heap-exhausted/constrol-stack-exhausted
;; handler-case unwinds the stack (unlike handler-bind)
(abort-test e (format nil "STORAGE-CONDITION: aborted for safety. ~S~%~A." e e))
(return-from run-it result-list)))
(retest ()
:report (lambda (stream)
#-genera (format stream "~@<Rerun the test ~S~@:>" test)
#+genera (format stream "Rerun the test ~S" test))
(return-from run-it (run-it)))
(ignore ()
:report (lambda (stream)
#-genera (format stream "~@<Signal an exceptional test failure and abort the test ~S.~@:>" test)
#+genera (format stream "Signal an exceptional test failure and abort the test ~S." test))
(abort-test (make-instance 'test-failure :test-case test
:reason "Failure restart."))))
result-list))))
(let ((results (run-it)))
(setf (status test) (results-status results)
result-list (nconc result-list results)))))))
(defgeneric %run (test-spec)
(:documentation "Internal method for running a test. Does not
update the status of the tests nor the special variables !,
!!, !!!"))
(defmethod %run ((test test-case))
(run-resolving-dependencies test))
(defmethod %run ((tests list))
(mapc #'%run tests))
(defmethod %run ((suite test-suite))
(when *print-names*
(format *test-dribble* "~%~ARunning test suite ~A" *test-dribble-indent* (name suite))
(force-output *test-dribble*))
(let ((suite-results '()))
(flet ((run-tests ()
(loop
:for test :in (reverse (%test-names (tests suite)))
:do (%run test))))
(vector-push-extend #\space *test-dribble-indent*)
(unwind-protect
(bind-run-state ((result-list '()))
(unwind-protect
(if (collect-profiling-info suite)
Timing info does n't get collected ATM , we need a portable library
( setf ( profiling - info suite ) ( collect - timing # ' run - tests ) )
(run-tests)
(run-tests)))
(setf suite-results result-list
(status suite) (every #'test-passed-p suite-results)))
(vector-pop *test-dribble-indent*)
(with-run-state (result-list)
(setf result-list (nconc result-list suite-results)))))))
(defmethod %run ((test-name symbol))
(when-let (test (get-test test-name))
(%run test)))
(defvar *initial-!* (lambda () (format t "Haven't run that many tests yet.~%")))
(defvar *!* *initial-!*)
(defvar *!!* *initial-!*)
(defvar *!!!* *initial-!*)
;;;; ** Public entry points
#+#.(cl:if (cl:ignore-errors
(cl:find-symbol "&OPTIONAL-AND-&KEY-IN-LAMBDA-LIST" "SB-KERNEL"))
'(and) '(or))
(declaim (sb-ext:muffle-conditions sb-kernel:&optional-and-&key-in-lambda-list))
(defun run! (&optional (test-spec *suite*)
&key ((:print-names *print-names*) *print-names*))
"Equivalent to (explain! (run TEST-SPEC))."
(explain! (run test-spec)))
(defun explain! (result-list)
"Explain the results of RESULT-LIST using a
detailed-text-explainer with output going to *test-dribble*.
Return a boolean indicating whether no tests failed."
(explain (make-instance 'detailed-text-explainer) result-list *test-dribble*)
(results-status result-list))
(defun debug! (&optional (test-spec *suite*))
"Calls (run! test-spec) but enters the debugger if any kind of error happens."
(let ((*on-error* :debug)
(*on-failure* :debug))
(run! test-spec)))
(defun run (test-spec &key ((:print-names *print-names*) *print-names*))
"Run the test specified by TEST-SPEC.
TEST-SPEC can be either a symbol naming a test or test suite, or
a testable-object object. This function changes the operations
performed by the !, !! and !!! functions."
(psetf *!* (lambda ()
(loop :for test :in (test-names)
:do (setf (status (get-test test)) :unknown))
(bind-run-state ((result-list '()))
(with-simple-restart (explain "Ignore the rest of the tests and explain current results")
(%run test-spec))
result-list))
*!!* *!*
*!!!* *!!*)
(let ((*on-error*
(or *on-error* (cond
(*debug-on-error*
(format *test-dribble* "*DEBUG-ON-ERROR* is obsolete. Use *ON-ERROR*.")
:debug)
(t nil))))
(*on-failure*
(or *on-failure* (cond
(*debug-on-failure*
(format *test-dribble* "*DEBUG-ON-FAILURE* is obsolete. Use *ON-FAILURE*.")
:debug)
(t nil)))))
(funcall *!*)))
(defun ! ()
"Rerun the most recently run test and explain the results."
(explain! (funcall *!*)))
(defun !! ()
"Rerun the second most recently run test and explain the results."
(explain! (funcall *!!*)))
(defun !!! ()
"Rerun the third most recently run test and explain the results."
(explain! (funcall *!!!*)))
(defun run-all-tests (&key (summary :end))
"Runs all defined test suites, T if all tests passed and NIL otherwise.
SUMMARY can be :END to print a summary at the end, :SUITE to print it
after each suite or NIL to skip explanations."
(check-type summary (member nil :suite :end))
(loop :for suite :in (cons 'nil (sort (copy-list *toplevel-suites*) #'string<=))
:for results := (if (suite-emptyp suite) nil (run suite))
:when (consp results)
:collect results :into all-results
:do (cond
((not (eql summary :suite))
nil)
(results
(explain! results))
(suite
(format *test-dribble* "Suite ~A is empty~%" suite)))
:finally (progn
(when (eql summary :end)
(explain! (alexandria:flatten all-results)))
(return (every #'results-status all-results)))))
Copyright ( c ) 2002 - 2003 ,
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions are
;; met:
;;
;; - Redistributions of source code must retain the above copyright
;; notice, this list of conditions and the following disclaimer.
;;
;; - Redistributions in binary form must reproduce the above copyright
;; notice, this list of conditions and the following disclaimer in the
;; documentation and/or other materials provided with the distribution.
;;
- Neither the name of , nor , nor the names
;; of its contributors may be used to endorse or promote products
;; derived from this software without specific prior written permission.
;;
;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE ,
;; DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
;; (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| null | https://raw.githubusercontent.com/lispci/fiveam/e11dee752a8f59065033ef9d60641d4a2f1e8379/src/run.lisp | lisp | Syntax : Ansi - Common - Lisp ; Package : FIVEAM ; Base : 10 ; -*-
* Running Tests
Once the programmer has defined what the tests are these need to
be run and the expected effects should be compared with the
actual effects. FiveAM provides the function RUN for this
purpose, RUN executes a number of tests and collects the results
of each individual check into a list which is then
and skipped, these are represented by TEST-RESULT objects.
exceptional situations which can occur:
- An exception is signaled while running the test. If the
variable *on-error* is :DEBUG than FiveAM will enter the
debugger, otherwise a test failure (of type
unexpected-test-failure) is returned. When entering the
current test and another signals a test-failure and continues
with the remaining tests.
- A circular dependency is detected. An error is signaled and a
restart is made available which signals a test-skipped and
continues with the remaining tests. This restart also sets the
dependency status of the test to nil, so any tests which depend
on this one (even if the dependency is not circular) will be
skipped.
The functions RUN!, !, !! and !!! are convenient wrappers around
RUN and EXPLAIN.
heap-exhausted/constrol-stack-exhausted
handler-case unwinds the stack (unlike handler-bind)
** Public entry points
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
LOSS OF USE ,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
(in-package :it.bese.fiveam)
returned . There are three types of test results : passed , failed
Generally running a test will return normally , but there are two
debugger two restarts are made available , one simply reruns the
(deftype on-problem-action ()
'(member :debug :backtrace nil))
(declaim (type on-problem-action *on-error* *on-failure*))
(defvar *on-error* nil
"The action to perform on error:
- :DEBUG if we should drop into the debugger
- :BACKTRACE to print a backtrace
- NIL to simply continue")
(defvar *on-failure* nil
"The action to perform on check failure:
- :DEBUG if we should drop into the debugger
- :BACKTRACE to print a backtrace
- NIL to simply continue")
(defvar *debug-on-error* nil
"T if we should drop into the debugger on error, NIL otherwise.
OBSOLETE: superseded by *ON-ERROR*")
(defvar *debug-on-failure* nil
"T if we should drop into the debugger on a failing check, NIL otherwise.
OBSOLETE: superseded by *ON-FAILURE*")
(defparameter *print-names* t
"T if we should print test running progress, NIL otherwise.")
(defparameter *test-dribble-indent* (make-array 0
:element-type 'character
:fill-pointer 0
:adjustable t)
"Used to indent tests and test suites in their parent suite")
(defun import-testing-symbols (package-designator)
(import '(5am::is 5am::is-true 5am::is-false 5am::signals 5am::finishes)
package-designator))
(defparameter *run-queue* '()
"List of test waiting to be run.")
(define-condition circular-dependency (error)
((test-case :initarg :test-case))
(:report (lambda (cd stream)
(format stream "A circular dependency wes detected in ~S." (slot-value cd 'test-case))))
(:documentation "Condition signaled when a circular dependency
between test-cases has been detected."))
(defgeneric run-resolving-dependencies (test)
(:documentation "Given a dependency spec determine if the spec
is satisfied or not, this will generally involve running other
tests. If the dependency spec can be satisfied the test is also
run."))
(defmethod run-resolving-dependencies ((test test-case))
"Return true if this test, and its dependencies, are satisfied,
NIL otherwise."
(case (status test)
(:unknown
(setf (status test) :resolving)
(if (or (not (depends-on test))
(eql t (resolve-dependencies (depends-on test))))
(progn
(run-test-lambda test)
(status test))
(with-run-state (result-list)
(unless (eql :circular (status test))
(push (make-instance 'test-skipped
:test-case test
:reason "Dependencies not satisfied")
result-list)
(setf (status test) :depends-not-satisfied)))))
(:resolving
(restart-case
(error 'circular-dependency :test-case test)
(skip ()
:report (lambda (s)
(format s "Skip the test ~S and all its dependencies." (name test)))
(with-run-state (result-list)
(push (make-instance 'test-skipped :reason "Circular dependencies" :test-case test)
result-list))
(setf (status test) :circular))))
(t (status test))))
(defgeneric resolve-dependencies (depends-on))
(defmethod resolve-dependencies ((depends-on symbol))
"A test which depends on a symbol is interpreted as `(AND
,DEPENDS-ON)."
(run-resolving-dependencies (get-test depends-on)))
(defmethod resolve-dependencies ((depends-on list))
"Return true if the dependency spec DEPENDS-ON is satisfied,
nil otherwise."
(if (null depends-on)
t
(flet ((satisfies-depends-p (test)
(funcall test (lambda (dep)
(eql t (resolve-dependencies dep)))
(cdr depends-on))))
(ecase (car depends-on)
(and (satisfies-depends-p #'every))
(or (satisfies-depends-p #'some))
(not (satisfies-depends-p #'notany))
(:before (every #'(lambda (dep)
(let ((status (status (get-test dep))))
(if (eql :unknown status)
(run-resolving-dependencies (get-test dep))
status)))
(cdr depends-on)))))))
(defun results-status (result-list)
"Given a list of test results (generated while running a test)
return true if no results are of type TEST-FAILURE. Returns second
and third values, which are the set of failed tests and skipped
tests respectively."
(let ((failed-tests
(remove-if-not #'test-failure-p result-list))
(skipped-tests
(remove-if-not #'test-skipped-p result-list)))
(values (null failed-tests)
failed-tests
skipped-tests)))
(defun return-result-list (test-lambda)
"Run the test function TEST-LAMBDA and return a list of all
test results generated, does not modify the special environment
variable RESULT-LIST."
(bind-run-state ((result-list '()))
(funcall test-lambda)
result-list))
(defgeneric run-test-lambda (test))
(defmethod run-test-lambda ((test test-case))
(with-run-state (result-list)
(bind-run-state ((current-test test))
(labels ((abort-test (e &optional (reason (format nil "Unexpected Error: ~S~%~A." e e)))
(add-result 'unexpected-test-failure
:test-expr nil
:test-case test
:reason reason
:condition e))
(run-it ()
(let ((result-list '()))
(declare (special result-list))
(handler-bind ((check-failure (lambda (e)
(declare (ignore e))
(cond
((eql *on-failure* :debug)
nil)
(t
(when (eql *on-failure* :backtrace)
(trivial-backtrace:print-backtrace-to-stream
*test-dribble*))
(invoke-restart
(find-restart 'ignore-failure))))))
(error (lambda (e)
(unless (or (eql *on-error* :debug)
(typep e 'check-failure))
(when (eql *on-error* :backtrace)
(trivial-backtrace:print-backtrace-to-stream
*test-dribble*))
(abort-test e)
(return-from run-it result-list)))))
(restart-case
(handler-case
(let ((*readtable* (copy-readtable))
(*package* (runtime-package test)))
(when *print-names*
(format *test-dribble* "~%~ARunning test ~A " *test-dribble-indent* (name test))
(force-output *test-dribble*))
(if (collect-profiling-info test)
Timing info does n't get collected ATM , we need a portable library
( setf ( profiling - info test ) ( collect - timing ( test - lambda test ) ) )
(funcall (test-lambda test))
(funcall (test-lambda test))))
(storage-condition (e)
(abort-test e (format nil "STORAGE-CONDITION: aborted for safety. ~S~%~A." e e))
(return-from run-it result-list)))
(retest ()
:report (lambda (stream)
#-genera (format stream "~@<Rerun the test ~S~@:>" test)
#+genera (format stream "Rerun the test ~S" test))
(return-from run-it (run-it)))
(ignore ()
:report (lambda (stream)
#-genera (format stream "~@<Signal an exceptional test failure and abort the test ~S.~@:>" test)
#+genera (format stream "Signal an exceptional test failure and abort the test ~S." test))
(abort-test (make-instance 'test-failure :test-case test
:reason "Failure restart."))))
result-list))))
(let ((results (run-it)))
(setf (status test) (results-status results)
result-list (nconc result-list results)))))))
(defgeneric %run (test-spec)
(:documentation "Internal method for running a test. Does not
update the status of the tests nor the special variables !,
!!, !!!"))
(defmethod %run ((test test-case))
(run-resolving-dependencies test))
(defmethod %run ((tests list))
(mapc #'%run tests))
(defmethod %run ((suite test-suite))
(when *print-names*
(format *test-dribble* "~%~ARunning test suite ~A" *test-dribble-indent* (name suite))
(force-output *test-dribble*))
(let ((suite-results '()))
(flet ((run-tests ()
(loop
:for test :in (reverse (%test-names (tests suite)))
:do (%run test))))
(vector-push-extend #\space *test-dribble-indent*)
(unwind-protect
(bind-run-state ((result-list '()))
(unwind-protect
(if (collect-profiling-info suite)
Timing info does n't get collected ATM , we need a portable library
( setf ( profiling - info suite ) ( collect - timing # ' run - tests ) )
(run-tests)
(run-tests)))
(setf suite-results result-list
(status suite) (every #'test-passed-p suite-results)))
(vector-pop *test-dribble-indent*)
(with-run-state (result-list)
(setf result-list (nconc result-list suite-results)))))))
(defmethod %run ((test-name symbol))
(when-let (test (get-test test-name))
(%run test)))
(defvar *initial-!* (lambda () (format t "Haven't run that many tests yet.~%")))
(defvar *!* *initial-!*)
(defvar *!!* *initial-!*)
(defvar *!!!* *initial-!*)
#+#.(cl:if (cl:ignore-errors
(cl:find-symbol "&OPTIONAL-AND-&KEY-IN-LAMBDA-LIST" "SB-KERNEL"))
'(and) '(or))
(declaim (sb-ext:muffle-conditions sb-kernel:&optional-and-&key-in-lambda-list))
(defun run! (&optional (test-spec *suite*)
&key ((:print-names *print-names*) *print-names*))
"Equivalent to (explain! (run TEST-SPEC))."
(explain! (run test-spec)))
(defun explain! (result-list)
"Explain the results of RESULT-LIST using a
detailed-text-explainer with output going to *test-dribble*.
Return a boolean indicating whether no tests failed."
(explain (make-instance 'detailed-text-explainer) result-list *test-dribble*)
(results-status result-list))
(defun debug! (&optional (test-spec *suite*))
"Calls (run! test-spec) but enters the debugger if any kind of error happens."
(let ((*on-error* :debug)
(*on-failure* :debug))
(run! test-spec)))
(defun run (test-spec &key ((:print-names *print-names*) *print-names*))
"Run the test specified by TEST-SPEC.
TEST-SPEC can be either a symbol naming a test or test suite, or
a testable-object object. This function changes the operations
performed by the !, !! and !!! functions."
(psetf *!* (lambda ()
(loop :for test :in (test-names)
:do (setf (status (get-test test)) :unknown))
(bind-run-state ((result-list '()))
(with-simple-restart (explain "Ignore the rest of the tests and explain current results")
(%run test-spec))
result-list))
*!!* *!*
*!!!* *!!*)
(let ((*on-error*
(or *on-error* (cond
(*debug-on-error*
(format *test-dribble* "*DEBUG-ON-ERROR* is obsolete. Use *ON-ERROR*.")
:debug)
(t nil))))
(*on-failure*
(or *on-failure* (cond
(*debug-on-failure*
(format *test-dribble* "*DEBUG-ON-FAILURE* is obsolete. Use *ON-FAILURE*.")
:debug)
(t nil)))))
(funcall *!*)))
(defun ! ()
"Rerun the most recently run test and explain the results."
(explain! (funcall *!*)))
(defun !! ()
"Rerun the second most recently run test and explain the results."
(explain! (funcall *!!*)))
(defun !!! ()
"Rerun the third most recently run test and explain the results."
(explain! (funcall *!!!*)))
(defun run-all-tests (&key (summary :end))
"Runs all defined test suites, T if all tests passed and NIL otherwise.
SUMMARY can be :END to print a summary at the end, :SUITE to print it
after each suite or NIL to skip explanations."
(check-type summary (member nil :suite :end))
(loop :for suite :in (cons 'nil (sort (copy-list *toplevel-suites*) #'string<=))
:for results := (if (suite-emptyp suite) nil (run suite))
:when (consp results)
:collect results :into all-results
:do (cond
((not (eql summary :suite))
nil)
(results
(explain! results))
(suite
(format *test-dribble* "Suite ~A is empty~%" suite)))
:finally (progn
(when (eql summary :end)
(explain! (alexandria:flatten all-results)))
(return (every #'results-status all-results)))))
Copyright ( c ) 2002 - 2003 ,
- Neither the name of , nor , nor the names
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
OWNER OR ANY DIRECT , INDIRECT , INCIDENTAL ,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
|
f857c4c5d9066d24fbc72d27921ea5367bcea3c510c75285911c2c6192d5bcd0 | hidaris/thinking-dumps | 12_delay_and_force.rkt | #lang racket
(provide (all-defined-out))
;; this is a silly addition function that purposely runs slows for
;; demonstration purposes
(define (slow-add x y)
(letrec ([slow-id (lambda (y z)
(if (= 0 z)
y
(slow-id y (- z 1))))])
(+ (slow-id x 50000000) y)))
;; multiplies x and result of y-thunk, calling y-thunk x times
assumes x is > = 0
(cond [(= x 0) 0]
[(= x 1) (y-thunk)]
[#t (+ (y-thunk) (my-mult (- x 1) y-thunk))]))
(define (my-delay th)
a one - of " type " we will update /in place/
(define (my-force p)
(if (mcar p)
(mcdr p)
(begin (set-mcar! p #t)
(set-mcdr! p ((mcdr p)))
(mcdr p)))) | null | https://raw.githubusercontent.com/hidaris/thinking-dumps/3fceaf9e6195ab99c8315749814a7377ef8baf86/cse341/racket/12_delay_and_force.rkt | racket | this is a silly addition function that purposely runs slows for
demonstration purposes
multiplies x and result of y-thunk, calling y-thunk x times | #lang racket
(provide (all-defined-out))
(define (slow-add x y)
(letrec ([slow-id (lambda (y z)
(if (= 0 z)
y
(slow-id y (- z 1))))])
(+ (slow-id x 50000000) y)))
assumes x is > = 0
(cond [(= x 0) 0]
[(= x 1) (y-thunk)]
[#t (+ (y-thunk) (my-mult (- x 1) y-thunk))]))
(define (my-delay th)
a one - of " type " we will update /in place/
(define (my-force p)
(if (mcar p)
(mcdr p)
(begin (set-mcar! p #t)
(set-mcdr! p ((mcdr p)))
(mcdr p)))) |
2e478dd11f22c4dfd2832adc1398c4d4317d35737664634dd3be5e88a555b680 | takikawa/racket-ppa | info.rkt | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote ("base" "draw-lib" "gui-lib" "string-constants-lib"))) (define pkg-desc "Functions for constructing icons and logos") (define pkg-authors (quote (ntoronto))) (define license (quote (Apache-2.0 OR MIT)))))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/26d6ae74a1b19258c9789b7c14c074d867a4b56b/share/pkgs/images-gui-lib/info.rkt | racket | (module info setup/infotab (#%module-begin (define collection (quote multi)) (define deps (quote ("base" "draw-lib" "gui-lib" "string-constants-lib"))) (define pkg-desc "Functions for constructing icons and logos") (define pkg-authors (quote (ntoronto))) (define license (quote (Apache-2.0 OR MIT)))))
| |
30ae324479e5c2ea13f5074545f2a8fa18a67e9ccb320481ae88e1a620376754 | choener/ADPfusion | Array.hs |
{-# Language MagicHash #-}
module ADPfusion.Core.SynVar.Array
( module ADPfusion.Core.SynVar.Array.Type
, module ADPfusion.Core.SynVar.Array
) where
import Data.Proxy
import Data.Strict.Tuple hiding (snd)
import Data.Vector.Fusion.Stream.Monadic
import GHC.Exts
import Prelude hiding (map,mapM)
import Data.PrimitiveArray hiding (map)
import ADPfusion.Core.Classes
import ADPfusion.Core.Multi
import ADPfusion.Core.SynVar.Array.Type
import ADPfusion.Core.SynVar.Backtrack
import ADPfusion.Core.SynVar.Indices
import ADPfusion.Core.SynVar.TableWrap
-- | Constraints needed to use @iTblStream@.
type ITblCx m pos ls arr x u c i =
( TableStaticVar pos c u i
, Element ls i
, AddIndexDense (Z:.pos) (Elm (SynVar1 (Elm ls i)) (Z:.i)) (Z:.c) (Z:.u) (Z:.i)
, PrimArrayOps arr u x
)
| General function for @ITbl@s with skalar indices .
iTblStream
∷ forall b s m pos posLeft ls arr x u c i
. ( ITblCx m pos ls arr x u c i
, posLeft ~ LeftPosTy pos (TwITbl b s m arr c u x) i
, MkStream m posLeft ls i
)
⇒ Proxy pos
→ Pair ls (TwITbl b s m arr c u x)
→ Int#
→ LimitType i
→ i
→ Stream m (Elm (ls :!: TwITbl b s m arr c u x) i)
iTblStream pos (ls :!: TW (ITbl c t) _) grd us is
= map (\(s,tt,ii') -> ElmITbl (t!tt) ii' s)
. addIndexDense1 pos c ub us is
$ mkStream (Proxy ∷ Proxy posLeft) ls grd us (tableStreamIndex (Proxy :: Proxy pos) c ub is)
where ub = upperBound t
# Inline iTblStream #
| General function for @Backtrack ITbl@s with skalar indices .
btITblStream
∷ forall b s mB mF pos posLeft ls arr x r u c i
. ( ITblCx mB pos ls arr x u c i
, posLeft ~ LeftPosTy pos (TwITblBt b s arr c u x mF mB r) i
, MkStream mB posLeft ls i
)
⇒ Proxy pos
→ Pair ls (TwITblBt b s arr c u x mF mB r)
→ Int#
→ LimitType i
→ i
→ Stream mB (Elm (ls :!: TwITblBt b s arr c u x mF mB r) i)
btITblStream pos (ls :!: TW (BtITbl c t) bt) grd us is
= mapM (\(s,tt,ii') -> bt ub tt >>= \ ~bb -> return $ ElmBtITbl (t!tt) bb ii' s)
. addIndexDense1 pos c ub us is
$ mkStream (Proxy ∷ Proxy posLeft) ls grd us (tableStreamIndex (Proxy :: Proxy pos) c ub is)
where ub = upperBound t
# Inline btITblStream #
-- ** Instances
instance
( Monad m
, ITblCx m pos ls arr x u c (i I)
, MkStream m (LeftPosTy pos (TwITbl b s m arr c u x) (i I)) ls (i I)
) => MkStream m pos (ls :!: TwITbl b s m arr c u x) (i I) where
mkStream = iTblStream
{-# Inline mkStream #-}
instance
( Monad mB
, ITblCx mB pos ls arr x u c (i I)
, MkStream mB (LeftPosTy pos (TwITblBt b s arr c u x mF mB r) (i I)) ls (i I)
)
⇒ MkStream mB pos (ls :!: TwITblBt b s arr c u x mF mB r) (i I) where
mkStream = btITblStream
{-# Inline mkStream #-}
-- |
--
TODO Unify the IOC streams , since they all have the same implementation
instance
( Monad m
, ITblCx m pos ls arr x u c (i O)
, MkStream m (LeftPosTy pos (TwITbl b s m arr c u x) (i O)) ls (i O)
) => MkStream m pos (ls :!: TwITbl b s m arr c u x) (i O) where
mkStream = iTblStream
{-# Inline mkStream #-}
instance
( Monad mB
, ITblCx mB pos ls arr x u c (i O)
, MkStream mB (LeftPosTy pos (TwITblBt b s arr c u x mF mB r) (i O)) ls (i O)
)
⇒ MkStream mB pos (ls :!: TwITblBt b s arr c u x mF mB r) (i O) where
mkStream = btITblStream
{-# Inline mkStream #-}
instance
( Monad m
, ITblCx m ls arr x u c ( i C )
) = > MkStream m ( ls : ! : TwITbl m c u x ) ( i C ) where
mkStream = iTblStream
{ - # Inline mkStream #
instance
( Monad m
, ITblCx m ls arr x u c (i C)
) => MkStream m (ls :!: TwITbl m arr c u x) (i C) where
mkStream = iTblStream
{-# Inline mkStream #-}
instance
( Monad mB
, ITblCx mB ls arr x u c (i O)
) => MkStream mB (ls :!: TwITblBt arr c u x mF mB r) (i O) where
mkStream = btITblStream
{-# Inline mkStream #-}
instance
( Monad mB
, ITblCx mB ls arr x u c (i C)
) => MkStream mB (ls :!: TwITblBt arr c u x mF mB r) (i C) where
mkStream = btITblStream
{-# Inline mkStream #-}
instance ModifyConstraint (TwITbl m arr EmptyOk i x) where
type TNE (TwITbl m arr EmptyOk i x) = TwITbl m arr NonEmpty i x
type TE (TwITbl m arr EmptyOk i x) = TwITbl m arr EmptyOk i x
toNonEmpty (TW (ITbl b l _ arr) f) = TW (ITbl b l NonEmpty arr) f
# Inline toNonEmpty #
instance ModifyConstraint (TwITblBt arr EmptyOk i x mF mB r) where
type TNE (TwITblBt arr EmptyOk i x mF mB r) = TwITblBt arr NonEmpty i x mF mB r
type TE (TwITblBt arr EmptyOk i x mF mB r) = TwITblBt arr EmptyOk i x mF mB r
toNonEmpty (TW (BtITbl _ arr) bt) = TW (BtITbl NonEmpty arr) bt
# Inline toNonEmpty #
-}
| null | https://raw.githubusercontent.com/choener/ADPfusion/16ae59acddd4518e6f177ce1860f1705ac421b8f/ADPfusion/Core/SynVar/Array.hs | haskell | # Language MagicHash #
| Constraints needed to use @iTblStream@.
** Instances
# Inline mkStream #
# Inline mkStream #
|
# Inline mkStream #
# Inline mkStream #
# Inline mkStream #
# Inline mkStream #
# Inline mkStream # |
module ADPfusion.Core.SynVar.Array
( module ADPfusion.Core.SynVar.Array.Type
, module ADPfusion.Core.SynVar.Array
) where
import Data.Proxy
import Data.Strict.Tuple hiding (snd)
import Data.Vector.Fusion.Stream.Monadic
import GHC.Exts
import Prelude hiding (map,mapM)
import Data.PrimitiveArray hiding (map)
import ADPfusion.Core.Classes
import ADPfusion.Core.Multi
import ADPfusion.Core.SynVar.Array.Type
import ADPfusion.Core.SynVar.Backtrack
import ADPfusion.Core.SynVar.Indices
import ADPfusion.Core.SynVar.TableWrap
type ITblCx m pos ls arr x u c i =
( TableStaticVar pos c u i
, Element ls i
, AddIndexDense (Z:.pos) (Elm (SynVar1 (Elm ls i)) (Z:.i)) (Z:.c) (Z:.u) (Z:.i)
, PrimArrayOps arr u x
)
| General function for @ITbl@s with skalar indices .
iTblStream
∷ forall b s m pos posLeft ls arr x u c i
. ( ITblCx m pos ls arr x u c i
, posLeft ~ LeftPosTy pos (TwITbl b s m arr c u x) i
, MkStream m posLeft ls i
)
⇒ Proxy pos
→ Pair ls (TwITbl b s m arr c u x)
→ Int#
→ LimitType i
→ i
→ Stream m (Elm (ls :!: TwITbl b s m arr c u x) i)
iTblStream pos (ls :!: TW (ITbl c t) _) grd us is
= map (\(s,tt,ii') -> ElmITbl (t!tt) ii' s)
. addIndexDense1 pos c ub us is
$ mkStream (Proxy ∷ Proxy posLeft) ls grd us (tableStreamIndex (Proxy :: Proxy pos) c ub is)
where ub = upperBound t
# Inline iTblStream #
| General function for @Backtrack ITbl@s with skalar indices .
btITblStream
∷ forall b s mB mF pos posLeft ls arr x r u c i
. ( ITblCx mB pos ls arr x u c i
, posLeft ~ LeftPosTy pos (TwITblBt b s arr c u x mF mB r) i
, MkStream mB posLeft ls i
)
⇒ Proxy pos
→ Pair ls (TwITblBt b s arr c u x mF mB r)
→ Int#
→ LimitType i
→ i
→ Stream mB (Elm (ls :!: TwITblBt b s arr c u x mF mB r) i)
btITblStream pos (ls :!: TW (BtITbl c t) bt) grd us is
= mapM (\(s,tt,ii') -> bt ub tt >>= \ ~bb -> return $ ElmBtITbl (t!tt) bb ii' s)
. addIndexDense1 pos c ub us is
$ mkStream (Proxy ∷ Proxy posLeft) ls grd us (tableStreamIndex (Proxy :: Proxy pos) c ub is)
where ub = upperBound t
# Inline btITblStream #
instance
( Monad m
, ITblCx m pos ls arr x u c (i I)
, MkStream m (LeftPosTy pos (TwITbl b s m arr c u x) (i I)) ls (i I)
) => MkStream m pos (ls :!: TwITbl b s m arr c u x) (i I) where
mkStream = iTblStream
instance
( Monad mB
, ITblCx mB pos ls arr x u c (i I)
, MkStream mB (LeftPosTy pos (TwITblBt b s arr c u x mF mB r) (i I)) ls (i I)
)
⇒ MkStream mB pos (ls :!: TwITblBt b s arr c u x mF mB r) (i I) where
mkStream = btITblStream
TODO Unify the IOC streams , since they all have the same implementation
instance
( Monad m
, ITblCx m pos ls arr x u c (i O)
, MkStream m (LeftPosTy pos (TwITbl b s m arr c u x) (i O)) ls (i O)
) => MkStream m pos (ls :!: TwITbl b s m arr c u x) (i O) where
mkStream = iTblStream
instance
( Monad mB
, ITblCx mB pos ls arr x u c (i O)
, MkStream mB (LeftPosTy pos (TwITblBt b s arr c u x mF mB r) (i O)) ls (i O)
)
⇒ MkStream mB pos (ls :!: TwITblBt b s arr c u x mF mB r) (i O) where
mkStream = btITblStream
instance
( Monad m
, ITblCx m ls arr x u c ( i C )
) = > MkStream m ( ls : ! : TwITbl m c u x ) ( i C ) where
mkStream = iTblStream
{ - # Inline mkStream #
instance
( Monad m
, ITblCx m ls arr x u c (i C)
) => MkStream m (ls :!: TwITbl m arr c u x) (i C) where
mkStream = iTblStream
instance
( Monad mB
, ITblCx mB ls arr x u c (i O)
) => MkStream mB (ls :!: TwITblBt arr c u x mF mB r) (i O) where
mkStream = btITblStream
instance
( Monad mB
, ITblCx mB ls arr x u c (i C)
) => MkStream mB (ls :!: TwITblBt arr c u x mF mB r) (i C) where
mkStream = btITblStream
instance ModifyConstraint (TwITbl m arr EmptyOk i x) where
type TNE (TwITbl m arr EmptyOk i x) = TwITbl m arr NonEmpty i x
type TE (TwITbl m arr EmptyOk i x) = TwITbl m arr EmptyOk i x
toNonEmpty (TW (ITbl b l _ arr) f) = TW (ITbl b l NonEmpty arr) f
# Inline toNonEmpty #
instance ModifyConstraint (TwITblBt arr EmptyOk i x mF mB r) where
type TNE (TwITblBt arr EmptyOk i x mF mB r) = TwITblBt arr NonEmpty i x mF mB r
type TE (TwITblBt arr EmptyOk i x mF mB r) = TwITblBt arr EmptyOk i x mF mB r
toNonEmpty (TW (BtITbl _ arr) bt) = TW (BtITbl NonEmpty arr) bt
# Inline toNonEmpty #
-}
|
da406ea4dea9551be577c7992f51805048a1cb114a8d375cdf5004359d6704ee | simmone/racket-simple-qr | timing-pattern.rkt | #lang racket
(require "../../../../share/draw/draw.rkt")
(require "../../../../share/func.rkt")
(provide (contract-out
[write-report-timing-pattern (-> hash? natural? path-string? void?)]
))
(define (write-report-timing-pattern points_map modules express_path)
(let* ([scrbl_dir (build-path express_path "timing-pattern")]
[scrbl_file (build-path scrbl_dir "timing-pattern.scrbl")]
[img_file (build-path scrbl_dir "timing-pattern.img")])
(with-output-to-file (build-path express_path "report.scrbl") #:exists 'append
(lambda ()
(printf "@include-section[\"timing-pattern/timing-pattern.scrbl\"]\n\n")))
(make-directory* scrbl_dir)
(with-output-to-file
scrbl_file
(lambda ()
(printf "#lang scribble/base\n\n")
(printf "@title{Timing Pattern}\n\n")
(printf "draw timing pattern.\n")
(printf "@section{Timing Pattern Bits}\n")
(printf (display-qr-bits modules points_map))
(printf "@section{Timing Pattern Image}\n")
(draw modules 5 points_map (make-hash) '("black" . "white") img_file)
(printf "@image{timing-pattern/timing-pattern.img}")
))))
| null | https://raw.githubusercontent.com/simmone/racket-simple-qr/904f1491bc521badeafeabd0d7d7e97e3d0ee958/simple-qr/write/lib/express/timing-pattern/timing-pattern.rkt | racket | #lang racket
(require "../../../../share/draw/draw.rkt")
(require "../../../../share/func.rkt")
(provide (contract-out
[write-report-timing-pattern (-> hash? natural? path-string? void?)]
))
(define (write-report-timing-pattern points_map modules express_path)
(let* ([scrbl_dir (build-path express_path "timing-pattern")]
[scrbl_file (build-path scrbl_dir "timing-pattern.scrbl")]
[img_file (build-path scrbl_dir "timing-pattern.img")])
(with-output-to-file (build-path express_path "report.scrbl") #:exists 'append
(lambda ()
(printf "@include-section[\"timing-pattern/timing-pattern.scrbl\"]\n\n")))
(make-directory* scrbl_dir)
(with-output-to-file
scrbl_file
(lambda ()
(printf "#lang scribble/base\n\n")
(printf "@title{Timing Pattern}\n\n")
(printf "draw timing pattern.\n")
(printf "@section{Timing Pattern Bits}\n")
(printf (display-qr-bits modules points_map))
(printf "@section{Timing Pattern Image}\n")
(draw modules 5 points_map (make-hash) '("black" . "white") img_file)
(printf "@image{timing-pattern/timing-pattern.img}")
))))
| |
f0ed8f61699bff2f1a543ea0f7e6b2cc5b807dfd1f19848592f596a708817b1b | facebookarchive/pfff | highlight_lisp.mli |
val visit_toplevel :
tag_hook:
(Ast_lisp.info -> Highlight_code.category -> unit) ->
Highlight_code.highlighter_preferences ->
Ast_lisp.program * Parser_lisp.token list ->
unit
| null | https://raw.githubusercontent.com/facebookarchive/pfff/ec21095ab7d445559576513a63314e794378c367/lang_lisp/analyze/highlight_lisp.mli | ocaml |
val visit_toplevel :
tag_hook:
(Ast_lisp.info -> Highlight_code.category -> unit) ->
Highlight_code.highlighter_preferences ->
Ast_lisp.program * Parser_lisp.token list ->
unit
| |
cf911f6df4234f11cbb547ec9391fa16fa663ee6f46f3ae5b87a7d61c9b01546 | Enecuum/Node | Scenario1.hs | # LANGUAGE DuplicateRecordFields #
module Enecuum.TestData.Nodes.Scenario1 where
import Enecuum.Prelude
import qualified Enecuum.Domain as D
import qualified Enecuum.Language as L
import Enecuum.TestData.RPC
import Enecuum.TestData.Nodes.Address
Scenario 1 : master node can interact with boot node .
bootNode :: L.NodeDefinitionL ()
bootNode = do
L.setNodeTag bootNodeTag
void $ L.serving D.Rpc 2000 $ do
L.method acceptHello1
L.method acceptGetHashId
simpleBootNodeDiscovery :: L.NodeL D.Address
simpleBootNodeDiscovery = pure bootNodeAddr
masterNodeInitialization :: L.NodeL (Either Text D.NodeID)
masterNodeInitialization = do
addr <- simpleBootNodeDiscovery
GetHashIDResponse eHashID <- L.makeRpcRequestUnsafe addr GetHashIDRequest
pure $ Right (D.NodeID eHashID)
masterNode :: L.NodeDefinitionL ()
masterNode = do
L.setNodeTag masterNodeTag
nodeId <- D.withSuccess $ L.initialization masterNodeInitialization
L.logInfo $ "Master node got id: " +|| nodeId ||+ "."
void $ L.serving D.Rpc 2000 $ do
L.method acceptHello1
L.method acceptHello2
| null | https://raw.githubusercontent.com/Enecuum/Node/3dfbc6a39c84bd45dd5f4b881e067044dde0153a/test/test-framework/Enecuum/TestData/Nodes/Scenario1.hs | haskell | # LANGUAGE DuplicateRecordFields #
module Enecuum.TestData.Nodes.Scenario1 where
import Enecuum.Prelude
import qualified Enecuum.Domain as D
import qualified Enecuum.Language as L
import Enecuum.TestData.RPC
import Enecuum.TestData.Nodes.Address
Scenario 1 : master node can interact with boot node .
bootNode :: L.NodeDefinitionL ()
bootNode = do
L.setNodeTag bootNodeTag
void $ L.serving D.Rpc 2000 $ do
L.method acceptHello1
L.method acceptGetHashId
simpleBootNodeDiscovery :: L.NodeL D.Address
simpleBootNodeDiscovery = pure bootNodeAddr
masterNodeInitialization :: L.NodeL (Either Text D.NodeID)
masterNodeInitialization = do
addr <- simpleBootNodeDiscovery
GetHashIDResponse eHashID <- L.makeRpcRequestUnsafe addr GetHashIDRequest
pure $ Right (D.NodeID eHashID)
masterNode :: L.NodeDefinitionL ()
masterNode = do
L.setNodeTag masterNodeTag
nodeId <- D.withSuccess $ L.initialization masterNodeInitialization
L.logInfo $ "Master node got id: " +|| nodeId ||+ "."
void $ L.serving D.Rpc 2000 $ do
L.method acceptHello1
L.method acceptHello2
| |
515c15de8654622e0a6dc975096be7695bc4f50a9cf741954475ead1bce7e1a2 | smart-chain-fr/tokenomia | Run.hs | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE TupleSections #
# LANGUAGE NumericUnderscores #
# LANGUAGE NamedFieldPuns #
module Tokenomia.ICO.Funds.Validation.Run
(dryRun
,run) where
import Prelude hiding (round,print)
import Control.Monad.Reader
import Control.Monad.Except
import Tokenomia.Common.Shell.Console (printLn)
import Tokenomia.Common.Environment
import Tokenomia.ICO.Funds.Validation.ChildAddress.State
import Tokenomia.ICO.Funds.Validation.Investor.Plan as Plan
import Tokenomia.Wallet.Type
import Tokenomia.Common.Error
import Tokenomia.ICO.Round.Settings
import Tokenomia.Wallet.ChildAddress.LocalRepository as ChildAddress
import Tokenomia.ICO.Funds.Validation.CardanoCLI.Convert as CardanoCLICommand
import qualified Tokenomia.ICO.Funds.Validation.CardanoCLI.Command as CardanoCLI
import qualified Tokenomia.ICO.Funds.Validation.CardanoCLI.Plan as CardanoCLI
import Tokenomia.ICO.Funds.Validation.CardanoCLI.Transact
import qualified Data.List.NonEmpty as NEL
import qualified Streamly.Prelude as S
import qualified Streamly.Internal.Data.Fold as SF
import Tokenomia.Common.PageNumber
import Data.Function ( (&) )
import Data.Maybe
import qualified Data.Set.NonEmpty as NES
import Tokenomia.ICO.Funds.WhiteListing.Repository
import Tokenomia.ICO.Funds.Validation.Status
import Tokenomia.Common.Transacting
import Tokenomia.ICO.Funds.Validation.Investor.Plan.Settings
dryRun
:: ( MonadIO m
, S.MonadAsync m
, MonadReader Environment m
, MonadError TokenomiaError m)
=> RoundSettings
-> m ()
dryRun round@RoundSettings {addresses = roundAddresses} = do
printLn $ show round
S.drain
$ streamCommandsToTransact round
& S.take 1 -- TODO : to be removed
& S.mapM (buildTx roundAddresses)
& S.mapM (\BuiltTx{estimatedFees} -> do
printLn $ "Tx Fees : " <> show estimatedFees
printLn "--------------------------------------")
printLn "------------------------------------------------"
printLn "- Investor's Funds Validation Ended (dry run) "
printLn "------------------------------------------------"
run
:: ( MonadIO m
, S.MonadAsync m
, MonadReader Environment m
, MonadError TokenomiaError m)
=> RoundSettings
-> m ()
run round@RoundSettings {addresses = roundAddresses} = do
let nbTxSentInParrallel = 500
printLn $ show round
S.drain
$ streamCommandsToTransact round
& S.take 1 -- TODO : to be removed
& S.mapM (transactWithoutConfirmation roundAddresses)
& S.chunksOf nbTxSentInParrallel SF.toList
& S.mapM (return . NEL.fromList)
& S.mapM (waitConfirmation . NEL.last )
printLn "--------------------------------------"
printLn "- Investor's Funds Validation Ended "
printLn "--------------------------------------"
streamCommandsToTransact
:: ( MonadIO m
, S.MonadAsync m
, MonadReader Environment m
, MonadError TokenomiaError m)
=> RoundSettings
-> S.SerialT m (CardanoCLI.Plan CardanoCLI.Command)
streamCommandsToTransact round@RoundSettings {addresses = roundAddresses,investorsWallet = wallet@Wallet{name = investorsWallet}} = do
let nbFundsPerTx = 7
S.fromList (PageNumber <$> [1..])
& S.mapM (\pageNumber -> fetchActiveAddresses roundAddresses pageNumber wallet)
& S.takeWhile isJust & S.map fromJust
& S.mapM (fetchByAddresses investorsWallet)
& S.mapM (fetchAllWhiteListedInvestorRef round)
& S.mapM (fetchAllWhiteListedFunds round)
& S.map (fmap (mkPlan $ mkPlanSettings round))
& S.mapM (displayInvestorPlans round)
& S.mapM (CardanoCLICommand.convertInvestorPlans round)
& S.concatMap S.fromList
& S.chunksOf nbFundsPerTx SF.toList
& S.mapM (return . NES.fromList . NEL.fromList) -- TODO : could break ?...
& S.map (CardanoCLI.mkPlan Nothing)
& S.mapM (\{commands} -> do
fees <- estimatedFees <$> buildTx roundAddresses planWithoutFees
printLn $ "Tx Fees : " <> show fees
printLn $ "> " <> (show . length) commands <> " commands will be sent : \n"
let planWithFees @CardanoCLI.Plan {commands = commandsWithDeductedFees} = CardanoCLI.mkPlan (Just fees) commands
mapM_ (printLn .show) commandsWithDeductedFees
return planWithFees )
| null | https://raw.githubusercontent.com/smart-chain-fr/tokenomia/dfb46829f0a88c559eddb3181e5320ed1a33601e/src/Tokenomia/ICO/Funds/Validation/Run.hs | haskell | # LANGUAGE OverloadedStrings #
TODO : to be removed
TODO : to be removed
TODO : could break ?... | # LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE TupleSections #
# LANGUAGE NumericUnderscores #
# LANGUAGE NamedFieldPuns #
module Tokenomia.ICO.Funds.Validation.Run
(dryRun
,run) where
import Prelude hiding (round,print)
import Control.Monad.Reader
import Control.Monad.Except
import Tokenomia.Common.Shell.Console (printLn)
import Tokenomia.Common.Environment
import Tokenomia.ICO.Funds.Validation.ChildAddress.State
import Tokenomia.ICO.Funds.Validation.Investor.Plan as Plan
import Tokenomia.Wallet.Type
import Tokenomia.Common.Error
import Tokenomia.ICO.Round.Settings
import Tokenomia.Wallet.ChildAddress.LocalRepository as ChildAddress
import Tokenomia.ICO.Funds.Validation.CardanoCLI.Convert as CardanoCLICommand
import qualified Tokenomia.ICO.Funds.Validation.CardanoCLI.Command as CardanoCLI
import qualified Tokenomia.ICO.Funds.Validation.CardanoCLI.Plan as CardanoCLI
import Tokenomia.ICO.Funds.Validation.CardanoCLI.Transact
import qualified Data.List.NonEmpty as NEL
import qualified Streamly.Prelude as S
import qualified Streamly.Internal.Data.Fold as SF
import Tokenomia.Common.PageNumber
import Data.Function ( (&) )
import Data.Maybe
import qualified Data.Set.NonEmpty as NES
import Tokenomia.ICO.Funds.WhiteListing.Repository
import Tokenomia.ICO.Funds.Validation.Status
import Tokenomia.Common.Transacting
import Tokenomia.ICO.Funds.Validation.Investor.Plan.Settings
dryRun
:: ( MonadIO m
, S.MonadAsync m
, MonadReader Environment m
, MonadError TokenomiaError m)
=> RoundSettings
-> m ()
dryRun round@RoundSettings {addresses = roundAddresses} = do
printLn $ show round
S.drain
$ streamCommandsToTransact round
& S.mapM (buildTx roundAddresses)
& S.mapM (\BuiltTx{estimatedFees} -> do
printLn $ "Tx Fees : " <> show estimatedFees
printLn "--------------------------------------")
printLn "------------------------------------------------"
printLn "- Investor's Funds Validation Ended (dry run) "
printLn "------------------------------------------------"
run
:: ( MonadIO m
, S.MonadAsync m
, MonadReader Environment m
, MonadError TokenomiaError m)
=> RoundSettings
-> m ()
run round@RoundSettings {addresses = roundAddresses} = do
let nbTxSentInParrallel = 500
printLn $ show round
S.drain
$ streamCommandsToTransact round
& S.mapM (transactWithoutConfirmation roundAddresses)
& S.chunksOf nbTxSentInParrallel SF.toList
& S.mapM (return . NEL.fromList)
& S.mapM (waitConfirmation . NEL.last )
printLn "--------------------------------------"
printLn "- Investor's Funds Validation Ended "
printLn "--------------------------------------"
streamCommandsToTransact
:: ( MonadIO m
, S.MonadAsync m
, MonadReader Environment m
, MonadError TokenomiaError m)
=> RoundSettings
-> S.SerialT m (CardanoCLI.Plan CardanoCLI.Command)
streamCommandsToTransact round@RoundSettings {addresses = roundAddresses,investorsWallet = wallet@Wallet{name = investorsWallet}} = do
let nbFundsPerTx = 7
S.fromList (PageNumber <$> [1..])
& S.mapM (\pageNumber -> fetchActiveAddresses roundAddresses pageNumber wallet)
& S.takeWhile isJust & S.map fromJust
& S.mapM (fetchByAddresses investorsWallet)
& S.mapM (fetchAllWhiteListedInvestorRef round)
& S.mapM (fetchAllWhiteListedFunds round)
& S.map (fmap (mkPlan $ mkPlanSettings round))
& S.mapM (displayInvestorPlans round)
& S.mapM (CardanoCLICommand.convertInvestorPlans round)
& S.concatMap S.fromList
& S.chunksOf nbFundsPerTx SF.toList
& S.map (CardanoCLI.mkPlan Nothing)
& S.mapM (\{commands} -> do
fees <- estimatedFees <$> buildTx roundAddresses planWithoutFees
printLn $ "Tx Fees : " <> show fees
printLn $ "> " <> (show . length) commands <> " commands will be sent : \n"
let planWithFees @CardanoCLI.Plan {commands = commandsWithDeductedFees} = CardanoCLI.mkPlan (Just fees) commands
mapM_ (printLn .show) commandsWithDeductedFees
return planWithFees )
|
b8e7e778c752059b33ebaad9ad3c18aa082380e191269ebc32fce6c14a766460 | ucsd-progsys/nate | frx_color.ml | (***********************************************************************)
(* *)
MLTk , Tcl / Tk interface of Objective Caml
(* *)
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
(* *)
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
(* described in file LICENSE found in the Objective Caml source tree. *)
(* *)
(***********************************************************************)
open Camltk
open Protocol
module StringSet = Set.Make(struct type t = string let compare = compare end)
(* should we keep a negative cache ? *)
let available_colors = ref (StringSet.empty)
let check s =
if StringSet.mem s !available_colors then true
else begin
try
let f = Frame.create_named Widget.default_toplevel "frxcolorcheck"
[Background (NamedColor s)] in
available_colors := StringSet.add s !available_colors;
destroy f;
true
with
TkError _ -> false
end
| null | https://raw.githubusercontent.com/ucsd-progsys/nate/8b1267cd8b10283d8bc239d16a28c654a4cb8942/eval/sherrloc/easyocaml%2B%2B/otherlibs/labltk/frx/frx_color.ml | ocaml | *********************************************************************
described in file LICENSE found in the Objective Caml source tree.
*********************************************************************
should we keep a negative cache ? | MLTk , Tcl / Tk interface of Objective Caml
, , and
projet Cristal , INRIA Rocquencourt
, Kyoto University RIMS
Copyright 2002 Institut National de Recherche en Informatique et
en Automatique and Kyoto University . All rights reserved .
This file is distributed under the terms of the GNU Library
General Public License , with the special exception on linking
open Camltk
open Protocol
module StringSet = Set.Make(struct type t = string let compare = compare end)
let available_colors = ref (StringSet.empty)
let check s =
if StringSet.mem s !available_colors then true
else begin
try
let f = Frame.create_named Widget.default_toplevel "frxcolorcheck"
[Background (NamedColor s)] in
available_colors := StringSet.add s !available_colors;
destroy f;
true
with
TkError _ -> false
end
|
79c0b5b0043cf9b2cfd38968bb187910be9b8c99643444fa180c559adbe70e5e | yuriy-chumak/ol | case-apply.scm | (define-library (otus case-apply)
(export
arity
case-apply)
(import
(src vm)
(scheme core))
(begin
(define (get-arity func)
(case (type func)
(type-bytecode
(case (ref func 0)
JAF
(-- (ref func 1)))
(else
#false)))
(type-procedure
(get-arity (ref func 1)))
(type-closure
(get-arity (ref func 1)))
(else
#false)))
(define arity get-arity)
(define (case-apply f . args)
(define arity (get-arity f))
(if arity
(let loop ((args args))
(unless (null? args)
(define arg (car args))
(if (and (pair? arg) (eq? arity (car arg)))
(apply f (cdr arg))
(loop (cdr args)))))))
))
| null | https://raw.githubusercontent.com/yuriy-chumak/ol/77b7537e4539489e27a5c317b3128bb7a4c62e2b/libraries/otus/case-apply.scm | scheme | (define-library (otus case-apply)
(export
arity
case-apply)
(import
(src vm)
(scheme core))
(begin
(define (get-arity func)
(case (type func)
(type-bytecode
(case (ref func 0)
JAF
(-- (ref func 1)))
(else
#false)))
(type-procedure
(get-arity (ref func 1)))
(type-closure
(get-arity (ref func 1)))
(else
#false)))
(define arity get-arity)
(define (case-apply f . args)
(define arity (get-arity f))
(if arity
(let loop ((args args))
(unless (null? args)
(define arg (car args))
(if (and (pair? arg) (eq? arity (car arg)))
(apply f (cdr arg))
(loop (cdr args)))))))
))
| |
c1d9beaf0046f6e455d15780264d9c911676d1a5fdee0e1b47d5c3f5b0df0d13 | patricoferris/sesame | git.mli | module type With_fs = Sesame.Types.S with type Input.t = Fpath.t
module Make (FS : With_fs) : sig
val get :
?schedule:Current_cache.Schedule.t ->
?label:string ->
Fpath.t option ->
Current_git.Commit.t Current.t ->
FS.t Current.t
* [ get f repo ] will attempt to build from the locally cloned repository
having first applied [ f ] to the returned checkout path
having first applied [f] to the returned checkout path *)
end
| null | https://raw.githubusercontent.com/patricoferris/sesame/8521e2a086b49d0bc20f0fca705f07675c52e1ae/src/current_sesame/git/git.mli | ocaml | module type With_fs = Sesame.Types.S with type Input.t = Fpath.t
module Make (FS : With_fs) : sig
val get :
?schedule:Current_cache.Schedule.t ->
?label:string ->
Fpath.t option ->
Current_git.Commit.t Current.t ->
FS.t Current.t
* [ get f repo ] will attempt to build from the locally cloned repository
having first applied [ f ] to the returned checkout path
having first applied [f] to the returned checkout path *)
end
| |
b27fb2a44525b5c14404e5e86794e0ad8edc09a564f1f2f6b53fce1991e292a1 | edbutler/nonograms-rule-synthesis | core.rkt | #lang racket
(provide
(all-from-out
"collection.rkt"
"lift.rkt"
"log.rkt"
"math.rkt"
"serialize.rkt"
"timeout.rkt"
"parallel.rkt"
"worker.rkt"
"synth.rkt"
"xml.rkt"
"util.rkt"))
(require
"collection.rkt"
"lift.rkt"
"log.rkt"
"math.rkt"
"serialize.rkt"
"timeout.rkt"
"parallel.rkt"
"worker.rkt"
"synth.rkt"
"xml.rkt"
"util.rkt")
| null | https://raw.githubusercontent.com/edbutler/nonograms-rule-synthesis/16f8dacb17bd77c9d927ab9fa0b8c1678dc68088/src/core/core.rkt | racket | #lang racket
(provide
(all-from-out
"collection.rkt"
"lift.rkt"
"log.rkt"
"math.rkt"
"serialize.rkt"
"timeout.rkt"
"parallel.rkt"
"worker.rkt"
"synth.rkt"
"xml.rkt"
"util.rkt"))
(require
"collection.rkt"
"lift.rkt"
"log.rkt"
"math.rkt"
"serialize.rkt"
"timeout.rkt"
"parallel.rkt"
"worker.rkt"
"synth.rkt"
"xml.rkt"
"util.rkt")
| |
fe214f3f25f297147ec4ff65be772d721ecad1915f6e0f0c217bb0456434d283 | TerrorJack/ghc-alter | Arr.hs | {-# LANGUAGE MagicHash, NoImplicitPrelude, UnboxedTuples #-}
module GHC.Event.Arr
(
Arr(..)
, new
, size
, read
, write
) where
import GHC.Base (($))
import GHC.Prim (MutableArray#, RealWorld, newArray#, readArray#,
sizeofMutableArray#, writeArray#)
import GHC.Types (IO(..), Int(..))
data Arr a = Arr (MutableArray# RealWorld a)
new :: a -> Int -> IO (Arr a)
new defval (I# n#) = IO $ \s0# ->
case newArray# n# defval s0# of (# s1#, marr# #) -> (# s1#, Arr marr# #)
size :: Arr a -> Int
size (Arr a) = I# (sizeofMutableArray# a)
read :: Arr a -> Int -> IO a
read (Arr a) (I# n#) = IO $ \s0# ->
case readArray# a n# s0# of (# s1#, val #) -> (# s1#, val #)
write :: Arr a -> Int -> a -> IO ()
write (Arr a) (I# n#) val = IO $ \s0# ->
case writeArray# a n# val s0# of s1# -> (# s1#, () #)
| null | https://raw.githubusercontent.com/TerrorJack/ghc-alter/db736f34095eef416b7e077f9b26fc03aa78c311/ghc-alter/boot-lib/base/GHC/Event/Arr.hs | haskell | # LANGUAGE MagicHash, NoImplicitPrelude, UnboxedTuples # |
module GHC.Event.Arr
(
Arr(..)
, new
, size
, read
, write
) where
import GHC.Base (($))
import GHC.Prim (MutableArray#, RealWorld, newArray#, readArray#,
sizeofMutableArray#, writeArray#)
import GHC.Types (IO(..), Int(..))
data Arr a = Arr (MutableArray# RealWorld a)
new :: a -> Int -> IO (Arr a)
new defval (I# n#) = IO $ \s0# ->
case newArray# n# defval s0# of (# s1#, marr# #) -> (# s1#, Arr marr# #)
size :: Arr a -> Int
size (Arr a) = I# (sizeofMutableArray# a)
read :: Arr a -> Int -> IO a
read (Arr a) (I# n#) = IO $ \s0# ->
case readArray# a n# s0# of (# s1#, val #) -> (# s1#, val #)
write :: Arr a -> Int -> a -> IO ()
write (Arr a) (I# n#) val = IO $ \s0# ->
case writeArray# a n# val s0# of s1# -> (# s1#, () #)
|
1f2088b531b4a538a81f75ecd69888f607920dcbbbb2ad3a9c3920da7a01ba32 | expipiplus1/spir-v | Parse.hs | {-# LANGUAGE Arrows #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TupleSections #
module Parse
( parseSpec
) where
import Control.Applicative ((<|>))
import Control.Arrow.ArrowList.Extra (arrF)
import Control.Monad (guard, (<=<))
import Data.Attoparsec.Text (Parser, parseOnly, hexadecimal, skipSpace, takeTill, anyChar, takeText, decimal, isEndOfLine, many1, sepBy1, string, satisfy)
import Data.Char (isSpace, isDigit)
import Data.Foldable (minimumBy, asum)
import Data.Function (on)
import Data.List (isInfixOf)
import Data.Maybe.Extra (rightToMaybe, fromMaybe, catMaybes, listToMaybe)
import Data.Text (pack, unpack)
import Data.Version (Version, parseVersion)
import Data.Word (Word32, Word16)
import Safe (headMay, atMay, tailMay, initMay, lastMay)
import Text.Read (readMaybe)
import Text.XML.HXT.Core hiding (xshow, trace)
import Text.ParserCombinators.ReadP (ReadP, readP_to_S)
import ConvertHTML
import Spec
import Table
parseSpec :: String -> IO (Maybe Spec)
parseSpec t =
let doc = readString [withParseHTML yes, withWarnings yes] t
in headMay <$> runX (doc >>> parseSpecHTML)
parseSpecHTML :: IOSLA (XIOState ()) XmlTree Spec
parseSpecHTML =
proc doc ->
do content <- deep (hasAttrValue "id" (=="content")) -< doc
version <- getVersion -< doc
limits <- deep getLimits -< content
magic <- deep getMagic -< content
standardSubSections <- listA (deep getStandardSubsection) -< content
instructionGroups <- listA (deep getInstructionGroup <<< deep isInstructionSubsection) -< content
returnA -< Spec { specVersion = version
, specLimits = limits
, specMagic = magic
, specStandardSubsections = standardSubSections
, specInstructionGroups = instructionGroups
}
--------------------------------------------------------------------------------
-- Parsing the header
--------------------------------------------------------------------------------
getVersion :: ArrowXml a => a XmlTree Version
getVersion = parseHeader
parseHeader :: ArrowXml a => a XmlTree Version
parseHeader =
deep (hasAttrValue "id" (== "revnumber") //> getText >>^ parseVersionText)
| parseVersionText takes a string like " words words 1.2.3 words " and
-- extracts the version
parseVersionText :: String -> Version
parseVersionText = fromMaybe (error "Unable to parse version") .
parseInWords parseVersion
--------------------------------------------------------------------------------
-- Limits
--------------------------------------------------------------------------------
getLimits :: ArrowXml a => a XmlTree [Limit]
getLimits =
hasSubsectionId "_a_id_limits_a_universal_limits"
>>> single (deep getHeadlessTable)
>>> arrF (traverse htmlToPlain)
>>^ limitsFromTable
where limitsFromTable (Table _ body) =
use because this table is oddly formed , and some rows
-- should be ignored
catMaybes . fmap limitFromRow $ body
limitFromRow [entity, dec, _] = Limit entity <$> parseMarkedDecimal dec
limitFromRow _ = Nothing
--------------------------------------------------------------------------------
Magic
--------------------------------------------------------------------------------
getMagic :: ArrowXml a => a XmlTree Word32
getMagic = getTable >>> arrF (traverse htmlToPlain) >>> arrF magicFromTable
where -- magicFromTable :: Table -> Either String Word32
magicFromTable (Table ["Magic Number"] [[n]]) =
parseOnly ("0x" *> hexadecimal) (pack n)
magicFromTable _ = Left ""
--------------------------------------------------------------------------------
-- Standard Subsections
--------------------------------------------------------------------------------
getStandardSubsection :: ArrowXml a => a XmlTree StandardSubsection
getStandardSubsection =
proc tree -> do
subsection <- isSubsection -< tree
title <- single (hasName "h3" <<< getChildren) -< subsection
titleText <- arrF htmlToPlain -< title
titleId <- getAttrValue "id" -< title
TODO : html descriptions
description <- concat ^<< listA (arrF htmlToHaddock <<< neg isTable <<< neg (hasName "h3") <<< getChildren) -< subsection
table <- getTable <<< getChildren -< subsection
arrF standardSubsectionFromTable -<
(titleText, titleId, description, table)
standardSubsectionFromTable :: (String, String, String, Table XmlTree)
-> Maybe StandardSubsection
standardSubsectionFromTable
(subsectionTitle, ident, description, table@(Table header _)) =
do (subsectionNumber, name) <- parseSubsectionTitle subsectionTitle
firstColName <- htmlToPlain =<< headMay header
-- TODO: This should probably print a warning
This fails on 3.25
guard (name == firstColName)
enumElements <- tableElements table
guard (not . null $ enumElements)
pure $ StandardSubsection subsectionNumber
(strip name)
ident
(strip description)
enumElements
| Extract the EnumElements from a table
tableElements :: Table XmlTree -> Maybe [EnumElement]
tableElements (Table header body) =
do plainHeader <- traverse htmlToPlain header
let hasRequiredCapability = "Required Capability" `elem` plainHeader ||
"Depends On" `elem` plainHeader
hasExtraOperands = "Extra Operands" `elem` plainHeader
Get the EnumElement for this row
let rowElement row = do
enum <- readMaybe =<< htmlToPlain =<< row `atMay` 0
nameAndDescription <- row `atMay` 1
name <- headLine =<< htmlToPlain nameAndDescription
guard (not . null $ name)
description <- tailLines <$> htmlToHaddock nameAndDescription
let rcColumn = 2
-- TODO: There's some nasty mixing of Maybe meaning error and
-- Maybe meaning optional parameter here
requiredCapability = do guard hasRequiredCapability
rc <- htmlToPlain =<< (row `atMay` rcColumn)
guard (not . null $ rc)
pure rc
eoColumnStart = if hasRequiredCapability then 3 else 2
extraOperands = if hasExtraOperands
then catMaybes . fmap parseOperand . drop eoColumnStart $ row
else []
pure $ EnumElement enum name description requiredCapability extraOperands
traverse rowElement body
parseOperand :: XmlTree -> Maybe Operand
parseOperand t = do p <- htmlToPlain t
ty <- headLine p
let desc = tailLines p
pure $ Operand ty desc
--------------------------------------------------------------------------------
-- Instructions
--------------------------------------------------------------------------------
-- | Return just the subsection containing the instructions
isInstructionSubsection :: ArrowXml a => a XmlTree XmlTree
isInstructionSubsection = hasSubsectionId "_a_id_instructions_a_instructions"
-- | Get the instructions for a single instruction group subsubsection
getInstructionGroup :: ArrowXml a => a XmlTree InstructionGroup
getInstructionGroup =
proc tree ->
do subsubsection <- isSubsubsection -< tree
title <- single (hasName "h4" <<< getChildren) -< subsubsection
(number, name) <- arrF parseSubsubsectionTitle <<< arrF htmlToPlain -< title
-- Every table in this subsubsection should describe a new instruction
tables <- listA (getHeadlessTable <<< getChildren) -< subsubsection
-- We probably want to warn when a table couldn't be parsed
instructions <- arr (catMaybes . fmap tableInstruction) -< tables
arr3 InstructionGroup -< (number, (name, instructions))
-- | Parse a table and extract the instruction
tableInstruction :: Table XmlTree -> Maybe Instruction
tableInstruction table =
The first box contains the name and description
nameAndDescription <- table `atCell` (0, 0)
The first line is the name
name <- headLine =<< htmlToPlain nameAndDescription
-- And the remainder is a description of the function
description <- tailLines <$> htmlToHaddock nameAndDescription
The next cell in the first row is optional and contains the
-- required capabilities for this instruction
requiredCapabilities = asum $
do rcText <- htmlToPlain =<< (table `atCell` (0, 1))
let rcLines = lines rcText
-- Check that this is actually a capabilities list
guard . (=="Capability:") =<< headMay rcLines
-- The rest of the lines are the capabilities
tailMay rcLines
The second ( and last ) row contains the info on the instruction
infoRow <- table `atRow` 1
plainInfoRow <- traverse htmlToPlain infoRow
The first cell is the word count of the instruction
(wordCount, isVariadic) <- parseWordCount =<< plainInfoRow `atMay` 0
The second cell is the opcode
opCode <- readMaybe =<< plainInfoRow `atMay` 1
-- The operands are the remainder
let operands = drop 2 infoRow
-- If this instruction is variadic then parse the (more complicated)
-- last cell separately
variadicOperand <-
if isVariadic
then pure . parseVariadicOperand =<< lastMay operands
else pure Nothing
-- grab all the fixed operands
fixedOperands <- sequence . fmap parseOperand
=<< if isVariadic
then initMay operands
else pure operands
pure Instruction { instructionName = name
, instructionDescription = description
, instructionRequiredCapabilities = requiredCapabilities
, instructionWordCount = wordCount
, instructionOpCode = opCode
, instructionFixedOperands = fixedOperands
, instructionVariadicOperand = variadicOperand
}
-- | The variadic operands come in the following flavors:
" Literal String
-- name"
-- "type, type, …
-- name, name"
-- "Optional type
-- name"
-- TODO: There are more kinds!
parseVariadicOperand :: XmlTree -> Maybe VariadicOperand
parseVariadicOperand = parseMaybe vo <=< htmlToPlain
where vo = do opType <- string "Literal String"
skipSpace
name <- takeText
pure . VariadicOperandString $ Operand (unpack opType) (unpack name)
<|>
do opType <- takeTill (==',')
anyChar >> skipSpace
_ <- string opType
anyChar >> skipSpace
_ <- string "…"
skipSpace
name <- takeText
pure . VariadicOperandRepeated $ Operand (unpack opType) (unpack name)
<|>
do _ <- string "Optional"
skipSpace
opType <- takeTill isEndOfLine
skipSpace
name <- takeText
pure . VariadicOperandOptional $ Operand (unpack opType) (unpack name)
| parse the first cell of the info row for an instruction returning the word
-- count and whether or not the instruction is variable
parseWordCount :: String -> Maybe (Word16, Bool)
parseWordCount = parseMaybe $
do wordCount <- decimal
isVariadic <- isInfixOf "variable" . unpack <$> takeText
pure (wordCount, isVariadic)
--------------------------------------------------------------------------------
-- Some extra stuff
-- TODO: Move this elsewhere
--------------------------------------------------------------------------------
-- Reading html constructs
--
-- Subsection
--
isSubsection :: ArrowXml a => a XmlTree XmlTree
isSubsection = hasAttrValue "class" (== "sect2")
getSubsectionId :: ArrowXml a => a XmlTree String
getSubsectionId = isSubsection /> single (hasName "h3") >>> getAttrValue "id"
hasSubsectionId :: ArrowXml a => String -> a XmlTree XmlTree
hasSubsectionId n = (getSubsectionId >>> isA (== n)) `guards` this
parseSubsectionTitle :: String -> Maybe ((Int, Int), String)
parseSubsectionTitle = parseMaybe $
do n <- (,) <$> decimal <* "." <*> decimal <* "."
skipSpace
(n, ) . unpack <$> takeText
--
-- Subsubsection
--
isSubsubsection :: ArrowXml a => a XmlTree XmlTree
isSubsubsection = hasAttrValue "class" (== "sect3")
parseSubsubsectionTitle :: String -> Maybe ((Int, Int, Int), String)
parseSubsubsectionTitle = parseMaybe $
do n <- (,,) <$> decimal <* "." <*> decimal <* "." <*> decimal <* "."
skipSpace
(n, ) . unpack <$> takeText
-- Parsing
| parseMarkedDecimal parses numbers in base 10 whose digits may have commas
-- between them
parseMarkedDecimal :: Integral a => String -> Maybe a
parseMarkedDecimal t =
(fmap fromIntegral . (readMaybe :: String -> Maybe Integer) . concat) =<<
parseMaybe (many1 digit `sepBy1` string ",") t
where digit = satisfy isDigit
| parseInWords returns the first valid result after trying the parser on the
-- words in the string
parseInWords :: ReadP a -> String -> Maybe a
parseInWords p s = headMay $
do word <- words s
case longestParse p word of
Nothing -> []
Just (v, _) -> pure v
parseMaybe :: Parser a -> String -> Maybe a
parseMaybe p = rightToMaybe . parseOnly p . pack
longestParse :: ReadP a -> String -> Maybe (a, String)
longestParse p s =
case readP_to_S p s of
[] -> Nothing
parses -> Just (minimumBy (compare `on` length . snd) parses)
headLine :: String -> Maybe String
headLine = fmap strip . listToMaybe . lines
tailLines :: String -> String
tailLines = strip . dropWhile (/= '\n')
strip :: String -> String
strip = stripL . stripR
stripL :: String -> String
stripL = dropWhile isSpace
stripR :: String -> String
stripR = reverse . stripL . reverse
| null | https://raw.githubusercontent.com/expipiplus1/spir-v/5692404f43a63fb8feb0cfaff3bfb3eedada6890/generate/src/Parse.hs | haskell | # LANGUAGE Arrows #
# LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
Parsing the header
------------------------------------------------------------------------------
extracts the version
------------------------------------------------------------------------------
Limits
------------------------------------------------------------------------------
should be ignored
------------------------------------------------------------------------------
------------------------------------------------------------------------------
magicFromTable :: Table -> Either String Word32
------------------------------------------------------------------------------
Standard Subsections
------------------------------------------------------------------------------
TODO: This should probably print a warning
TODO: There's some nasty mixing of Maybe meaning error and
Maybe meaning optional parameter here
------------------------------------------------------------------------------
Instructions
------------------------------------------------------------------------------
| Return just the subsection containing the instructions
| Get the instructions for a single instruction group subsubsection
Every table in this subsubsection should describe a new instruction
We probably want to warn when a table couldn't be parsed
| Parse a table and extract the instruction
And the remainder is a description of the function
required capabilities for this instruction
Check that this is actually a capabilities list
The rest of the lines are the capabilities
The operands are the remainder
If this instruction is variadic then parse the (more complicated)
last cell separately
grab all the fixed operands
| The variadic operands come in the following flavors:
name"
"type, type, …
name, name"
"Optional type
name"
TODO: There are more kinds!
count and whether or not the instruction is variable
------------------------------------------------------------------------------
Some extra stuff
TODO: Move this elsewhere
------------------------------------------------------------------------------
Reading html constructs
Subsection
Subsubsection
Parsing
between them
words in the string | # LANGUAGE TupleSections #
module Parse
( parseSpec
) where
import Control.Applicative ((<|>))
import Control.Arrow.ArrowList.Extra (arrF)
import Control.Monad (guard, (<=<))
import Data.Attoparsec.Text (Parser, parseOnly, hexadecimal, skipSpace, takeTill, anyChar, takeText, decimal, isEndOfLine, many1, sepBy1, string, satisfy)
import Data.Char (isSpace, isDigit)
import Data.Foldable (minimumBy, asum)
import Data.Function (on)
import Data.List (isInfixOf)
import Data.Maybe.Extra (rightToMaybe, fromMaybe, catMaybes, listToMaybe)
import Data.Text (pack, unpack)
import Data.Version (Version, parseVersion)
import Data.Word (Word32, Word16)
import Safe (headMay, atMay, tailMay, initMay, lastMay)
import Text.Read (readMaybe)
import Text.XML.HXT.Core hiding (xshow, trace)
import Text.ParserCombinators.ReadP (ReadP, readP_to_S)
import ConvertHTML
import Spec
import Table
parseSpec :: String -> IO (Maybe Spec)
parseSpec t =
let doc = readString [withParseHTML yes, withWarnings yes] t
in headMay <$> runX (doc >>> parseSpecHTML)
parseSpecHTML :: IOSLA (XIOState ()) XmlTree Spec
parseSpecHTML =
proc doc ->
do content <- deep (hasAttrValue "id" (=="content")) -< doc
version <- getVersion -< doc
limits <- deep getLimits -< content
magic <- deep getMagic -< content
standardSubSections <- listA (deep getStandardSubsection) -< content
instructionGroups <- listA (deep getInstructionGroup <<< deep isInstructionSubsection) -< content
returnA -< Spec { specVersion = version
, specLimits = limits
, specMagic = magic
, specStandardSubsections = standardSubSections
, specInstructionGroups = instructionGroups
}
getVersion :: ArrowXml a => a XmlTree Version
getVersion = parseHeader
parseHeader :: ArrowXml a => a XmlTree Version
parseHeader =
deep (hasAttrValue "id" (== "revnumber") //> getText >>^ parseVersionText)
| parseVersionText takes a string like " words words 1.2.3 words " and
parseVersionText :: String -> Version
parseVersionText = fromMaybe (error "Unable to parse version") .
parseInWords parseVersion
getLimits :: ArrowXml a => a XmlTree [Limit]
getLimits =
hasSubsectionId "_a_id_limits_a_universal_limits"
>>> single (deep getHeadlessTable)
>>> arrF (traverse htmlToPlain)
>>^ limitsFromTable
where limitsFromTable (Table _ body) =
use because this table is oddly formed , and some rows
catMaybes . fmap limitFromRow $ body
limitFromRow [entity, dec, _] = Limit entity <$> parseMarkedDecimal dec
limitFromRow _ = Nothing
Magic
getMagic :: ArrowXml a => a XmlTree Word32
getMagic = getTable >>> arrF (traverse htmlToPlain) >>> arrF magicFromTable
magicFromTable (Table ["Magic Number"] [[n]]) =
parseOnly ("0x" *> hexadecimal) (pack n)
magicFromTable _ = Left ""
getStandardSubsection :: ArrowXml a => a XmlTree StandardSubsection
getStandardSubsection =
proc tree -> do
subsection <- isSubsection -< tree
title <- single (hasName "h3" <<< getChildren) -< subsection
titleText <- arrF htmlToPlain -< title
titleId <- getAttrValue "id" -< title
TODO : html descriptions
description <- concat ^<< listA (arrF htmlToHaddock <<< neg isTable <<< neg (hasName "h3") <<< getChildren) -< subsection
table <- getTable <<< getChildren -< subsection
arrF standardSubsectionFromTable -<
(titleText, titleId, description, table)
standardSubsectionFromTable :: (String, String, String, Table XmlTree)
-> Maybe StandardSubsection
standardSubsectionFromTable
(subsectionTitle, ident, description, table@(Table header _)) =
do (subsectionNumber, name) <- parseSubsectionTitle subsectionTitle
firstColName <- htmlToPlain =<< headMay header
This fails on 3.25
guard (name == firstColName)
enumElements <- tableElements table
guard (not . null $ enumElements)
pure $ StandardSubsection subsectionNumber
(strip name)
ident
(strip description)
enumElements
| Extract the EnumElements from a table
tableElements :: Table XmlTree -> Maybe [EnumElement]
tableElements (Table header body) =
do plainHeader <- traverse htmlToPlain header
let hasRequiredCapability = "Required Capability" `elem` plainHeader ||
"Depends On" `elem` plainHeader
hasExtraOperands = "Extra Operands" `elem` plainHeader
Get the EnumElement for this row
let rowElement row = do
enum <- readMaybe =<< htmlToPlain =<< row `atMay` 0
nameAndDescription <- row `atMay` 1
name <- headLine =<< htmlToPlain nameAndDescription
guard (not . null $ name)
description <- tailLines <$> htmlToHaddock nameAndDescription
let rcColumn = 2
requiredCapability = do guard hasRequiredCapability
rc <- htmlToPlain =<< (row `atMay` rcColumn)
guard (not . null $ rc)
pure rc
eoColumnStart = if hasRequiredCapability then 3 else 2
extraOperands = if hasExtraOperands
then catMaybes . fmap parseOperand . drop eoColumnStart $ row
else []
pure $ EnumElement enum name description requiredCapability extraOperands
traverse rowElement body
parseOperand :: XmlTree -> Maybe Operand
parseOperand t = do p <- htmlToPlain t
ty <- headLine p
let desc = tailLines p
pure $ Operand ty desc
isInstructionSubsection :: ArrowXml a => a XmlTree XmlTree
isInstructionSubsection = hasSubsectionId "_a_id_instructions_a_instructions"
getInstructionGroup :: ArrowXml a => a XmlTree InstructionGroup
getInstructionGroup =
proc tree ->
do subsubsection <- isSubsubsection -< tree
title <- single (hasName "h4" <<< getChildren) -< subsubsection
(number, name) <- arrF parseSubsubsectionTitle <<< arrF htmlToPlain -< title
tables <- listA (getHeadlessTable <<< getChildren) -< subsubsection
instructions <- arr (catMaybes . fmap tableInstruction) -< tables
arr3 InstructionGroup -< (number, (name, instructions))
tableInstruction :: Table XmlTree -> Maybe Instruction
tableInstruction table =
The first box contains the name and description
nameAndDescription <- table `atCell` (0, 0)
The first line is the name
name <- headLine =<< htmlToPlain nameAndDescription
description <- tailLines <$> htmlToHaddock nameAndDescription
The next cell in the first row is optional and contains the
requiredCapabilities = asum $
do rcText <- htmlToPlain =<< (table `atCell` (0, 1))
let rcLines = lines rcText
guard . (=="Capability:") =<< headMay rcLines
tailMay rcLines
The second ( and last ) row contains the info on the instruction
infoRow <- table `atRow` 1
plainInfoRow <- traverse htmlToPlain infoRow
The first cell is the word count of the instruction
(wordCount, isVariadic) <- parseWordCount =<< plainInfoRow `atMay` 0
The second cell is the opcode
opCode <- readMaybe =<< plainInfoRow `atMay` 1
let operands = drop 2 infoRow
variadicOperand <-
if isVariadic
then pure . parseVariadicOperand =<< lastMay operands
else pure Nothing
fixedOperands <- sequence . fmap parseOperand
=<< if isVariadic
then initMay operands
else pure operands
pure Instruction { instructionName = name
, instructionDescription = description
, instructionRequiredCapabilities = requiredCapabilities
, instructionWordCount = wordCount
, instructionOpCode = opCode
, instructionFixedOperands = fixedOperands
, instructionVariadicOperand = variadicOperand
}
" Literal String
parseVariadicOperand :: XmlTree -> Maybe VariadicOperand
parseVariadicOperand = parseMaybe vo <=< htmlToPlain
where vo = do opType <- string "Literal String"
skipSpace
name <- takeText
pure . VariadicOperandString $ Operand (unpack opType) (unpack name)
<|>
do opType <- takeTill (==',')
anyChar >> skipSpace
_ <- string opType
anyChar >> skipSpace
_ <- string "…"
skipSpace
name <- takeText
pure . VariadicOperandRepeated $ Operand (unpack opType) (unpack name)
<|>
do _ <- string "Optional"
skipSpace
opType <- takeTill isEndOfLine
skipSpace
name <- takeText
pure . VariadicOperandOptional $ Operand (unpack opType) (unpack name)
| parse the first cell of the info row for an instruction returning the word
parseWordCount :: String -> Maybe (Word16, Bool)
parseWordCount = parseMaybe $
do wordCount <- decimal
isVariadic <- isInfixOf "variable" . unpack <$> takeText
pure (wordCount, isVariadic)
isSubsection :: ArrowXml a => a XmlTree XmlTree
isSubsection = hasAttrValue "class" (== "sect2")
getSubsectionId :: ArrowXml a => a XmlTree String
getSubsectionId = isSubsection /> single (hasName "h3") >>> getAttrValue "id"
hasSubsectionId :: ArrowXml a => String -> a XmlTree XmlTree
hasSubsectionId n = (getSubsectionId >>> isA (== n)) `guards` this
parseSubsectionTitle :: String -> Maybe ((Int, Int), String)
parseSubsectionTitle = parseMaybe $
do n <- (,) <$> decimal <* "." <*> decimal <* "."
skipSpace
(n, ) . unpack <$> takeText
isSubsubsection :: ArrowXml a => a XmlTree XmlTree
isSubsubsection = hasAttrValue "class" (== "sect3")
parseSubsubsectionTitle :: String -> Maybe ((Int, Int, Int), String)
parseSubsubsectionTitle = parseMaybe $
do n <- (,,) <$> decimal <* "." <*> decimal <* "." <*> decimal <* "."
skipSpace
(n, ) . unpack <$> takeText
| parseMarkedDecimal parses numbers in base 10 whose digits may have commas
parseMarkedDecimal :: Integral a => String -> Maybe a
parseMarkedDecimal t =
(fmap fromIntegral . (readMaybe :: String -> Maybe Integer) . concat) =<<
parseMaybe (many1 digit `sepBy1` string ",") t
where digit = satisfy isDigit
| parseInWords returns the first valid result after trying the parser on the
parseInWords :: ReadP a -> String -> Maybe a
parseInWords p s = headMay $
do word <- words s
case longestParse p word of
Nothing -> []
Just (v, _) -> pure v
parseMaybe :: Parser a -> String -> Maybe a
parseMaybe p = rightToMaybe . parseOnly p . pack
longestParse :: ReadP a -> String -> Maybe (a, String)
longestParse p s =
case readP_to_S p s of
[] -> Nothing
parses -> Just (minimumBy (compare `on` length . snd) parses)
headLine :: String -> Maybe String
headLine = fmap strip . listToMaybe . lines
tailLines :: String -> String
tailLines = strip . dropWhile (/= '\n')
strip :: String -> String
strip = stripL . stripR
stripL :: String -> String
stripL = dropWhile isSpace
stripR :: String -> String
stripR = reverse . stripL . reverse
|
16d500d6cab1dafe3471ac78712afb1135bd1ec23f0a92300186f24443b04a32 | cerner/clara-rules | test_fressian.clj | (ns clara.test-fressian
(:require [clara.rules.durability :as d]
[clara.rules.durability.fressian :as df]
[clojure.data.fressian :as fres]
[clara.rules.platform :as pform]
[clojure.test :refer :all])
(:import [org.fressian
FressianWriter
FressianReader]))
(defn custom-comparator [x y]
(> y x))
(defrecord Tester [x])
(defn serde1 [x]
(with-open [os (java.io.ByteArrayOutputStream.)
^FressianWriter wtr (fres/create-writer os :handlers df/write-handler-lookup)]
;; Write
(pform/thread-local-binding [d/node-id->node-cache (volatile! {})
d/clj-struct-holder (java.util.IdentityHashMap.)]
(fres/write-object wtr x))
;; Read
(let [data (.toByteArray os)]
(pform/thread-local-binding [d/clj-struct-holder (java.util.ArrayList.)]
(with-open [is (java.io.ByteArrayInputStream. data)
^FressianReader rdr (fres/create-reader is :handlers df/read-handler-lookup)]
(fres/read-object rdr))))))
(defn serde [x]
Tests all serialization cases in a way that SerDe 's 2 times to show that the serialization to
;; deserialization process does not lose important details for the next time serializing it.
(-> x serde1 serde1))
(defn test-serde [expected x]
(is (= expected (serde x))))
(defn test-serde-with-meta [expected x]
(let [no-meta (serde x)
test-meta {:test :meta}
x-with-meta (vary-meta x merge test-meta)
;; In case x already has metadata it needs to be added to the expectation
;; along with the test metadata added in case it has none to test already.
expected-meta (meta x-with-meta)
has-meta (serde x-with-meta)]
(is (= expected
no-meta
has-meta))
(is (= expected-meta
(meta has-meta)))))
(deftest test-handlers
(testing "class"
(test-serde String String))
(testing "set"
(test-serde-with-meta #{:x :y} #{:x :y}))
(testing "vec"
(test-serde-with-meta [1 2 3] [1 2 3]))
(testing "list"
(test-serde-with-meta (list "a" "b") (list "a" "b")))
(testing "aseq"
(test-serde-with-meta ['a 'b] (seq ['a 'b])))
(testing "lazy seq"
(test-serde-with-meta [2 3 4] (map inc [1 2 3])))
(testing "map"
(test-serde-with-meta {:x 1 :y 2} {:x 1 :y 2}))
(testing "map entry"
(let [e (first {:x 1})]
(test-serde [:x 1] e)
(is (instance? clojure.lang.MapEntry (serde e))
"preserves map entry type to be sure to still work with `key` and `val`")))
(testing "sym"
(test-serde-with-meta 't 't))
(testing "record"
(test-serde-with-meta (->Tester 10) (->Tester 10)))
(testing "sorted collections"
(let [ss (sorted-set 1 10)
ss-custom (with-meta (sorted-set-by custom-comparator 1 10)
{:clara.rules.durability/comparator-name `custom-comparator})
sm (sorted-map 1 :x 10 :y)
sm-custom (with-meta (sorted-map-by custom-comparator 1 :x 10 :y)
{:clara.rules.durability/comparator-name `custom-comparator})]
(testing "set"
(test-serde-with-meta ss ss)
(test-serde-with-meta ss-custom ss-custom)
(is (thrown? Exception
(serde (with-meta ss-custom {})))
"cannot serialized custom sort comparators without name given in metadata"))
(testing "map"
(test-serde-with-meta sm sm)
(test-serde-with-meta sm-custom sm-custom)
(is (thrown? Exception
(serde (with-meta sm-custom {})))
"cannot serialized custom sort comparators without name given in metadata")))))
(deftest test-handler-identity
(let [v [1 2 3]
l (list 4 5 6)
ls (map inc [1 2 3])
m {:a 1 :b 2}
s #{:a :b :c}
sym 'a
os (sorted-set "a" "c" "b")
om (sorted-map "a" 1 "c" 3 "b" 2)
r (serde (->Tester [v v l l ls ls m m s s sym sym os os om om]))]
(doseq [[x y] (partition 2 (:x r))]
(testing (str "Serde preserves identity for " (type x))
(is (identical? x y)
"preserving object references")))))
| null | https://raw.githubusercontent.com/cerner/clara-rules/8107a5ab7fdb475e323c0bcb39084a83454deb1c/src/test/clojure/clara/test_fressian.clj | clojure | Write
Read
deserialization process does not lose important details for the next time serializing it.
In case x already has metadata it needs to be added to the expectation
along with the test metadata added in case it has none to test already. | (ns clara.test-fressian
(:require [clara.rules.durability :as d]
[clara.rules.durability.fressian :as df]
[clojure.data.fressian :as fres]
[clara.rules.platform :as pform]
[clojure.test :refer :all])
(:import [org.fressian
FressianWriter
FressianReader]))
(defn custom-comparator [x y]
(> y x))
(defrecord Tester [x])
(defn serde1 [x]
(with-open [os (java.io.ByteArrayOutputStream.)
^FressianWriter wtr (fres/create-writer os :handlers df/write-handler-lookup)]
(pform/thread-local-binding [d/node-id->node-cache (volatile! {})
d/clj-struct-holder (java.util.IdentityHashMap.)]
(fres/write-object wtr x))
(let [data (.toByteArray os)]
(pform/thread-local-binding [d/clj-struct-holder (java.util.ArrayList.)]
(with-open [is (java.io.ByteArrayInputStream. data)
^FressianReader rdr (fres/create-reader is :handlers df/read-handler-lookup)]
(fres/read-object rdr))))))
(defn serde [x]
Tests all serialization cases in a way that SerDe 's 2 times to show that the serialization to
(-> x serde1 serde1))
(defn test-serde [expected x]
(is (= expected (serde x))))
(defn test-serde-with-meta [expected x]
(let [no-meta (serde x)
test-meta {:test :meta}
x-with-meta (vary-meta x merge test-meta)
expected-meta (meta x-with-meta)
has-meta (serde x-with-meta)]
(is (= expected
no-meta
has-meta))
(is (= expected-meta
(meta has-meta)))))
(deftest test-handlers
(testing "class"
(test-serde String String))
(testing "set"
(test-serde-with-meta #{:x :y} #{:x :y}))
(testing "vec"
(test-serde-with-meta [1 2 3] [1 2 3]))
(testing "list"
(test-serde-with-meta (list "a" "b") (list "a" "b")))
(testing "aseq"
(test-serde-with-meta ['a 'b] (seq ['a 'b])))
(testing "lazy seq"
(test-serde-with-meta [2 3 4] (map inc [1 2 3])))
(testing "map"
(test-serde-with-meta {:x 1 :y 2} {:x 1 :y 2}))
(testing "map entry"
(let [e (first {:x 1})]
(test-serde [:x 1] e)
(is (instance? clojure.lang.MapEntry (serde e))
"preserves map entry type to be sure to still work with `key` and `val`")))
(testing "sym"
(test-serde-with-meta 't 't))
(testing "record"
(test-serde-with-meta (->Tester 10) (->Tester 10)))
(testing "sorted collections"
(let [ss (sorted-set 1 10)
ss-custom (with-meta (sorted-set-by custom-comparator 1 10)
{:clara.rules.durability/comparator-name `custom-comparator})
sm (sorted-map 1 :x 10 :y)
sm-custom (with-meta (sorted-map-by custom-comparator 1 :x 10 :y)
{:clara.rules.durability/comparator-name `custom-comparator})]
(testing "set"
(test-serde-with-meta ss ss)
(test-serde-with-meta ss-custom ss-custom)
(is (thrown? Exception
(serde (with-meta ss-custom {})))
"cannot serialized custom sort comparators without name given in metadata"))
(testing "map"
(test-serde-with-meta sm sm)
(test-serde-with-meta sm-custom sm-custom)
(is (thrown? Exception
(serde (with-meta sm-custom {})))
"cannot serialized custom sort comparators without name given in metadata")))))
(deftest test-handler-identity
(let [v [1 2 3]
l (list 4 5 6)
ls (map inc [1 2 3])
m {:a 1 :b 2}
s #{:a :b :c}
sym 'a
os (sorted-set "a" "c" "b")
om (sorted-map "a" 1 "c" 3 "b" 2)
r (serde (->Tester [v v l l ls ls m m s s sym sym os os om om]))]
(doseq [[x y] (partition 2 (:x r))]
(testing (str "Serde preserves identity for " (type x))
(is (identical? x y)
"preserving object references")))))
|
3622433c6563f492acbb945f7067d034b2a3d99aebb6ab62be9b205d155cad99 | lem-project/lem | window-panel.lisp | (in-package :lem-capi)
(defclass window-panel (capi:column-layout)
((initialized
:initform nil
:accessor window-panel-initialized)
(minibuffer
:accessor window-panel-minibuffer)
(modified-p
:initform nil
:accessor window-panel-modified-p)
(resizing
:initform nil
:accessor window-panel-resizing)))
(defmethod initialize-instance :after ((window-panel window-panel) &rest initargs)
(declare (ignore initargs))
(let ((minibuffer-pane (make-instance 'window-pane
:visible-max-height '(:character 1)
:window-panel window-panel)))
(setf (window-panel-minibuffer window-panel) minibuffer-pane)
(setf (capi:layout-description window-panel)
(list (make-instance 'capi:column-layout
:description
(list nil
minibuffer-pane))))))
(defun set-first-window (window-panel window-pane)
(with-apply-in-pane-process-wait-single (window-panel)
(setf (window-panel-initialized window-panel) t)
(setf (capi:layout-description (first (capi:layout-description window-panel)))
(list window-pane (window-panel-minibuffer window-panel)))))
(defun map-window-panes (window-panel function)
(with-apply-in-pane-process-wait-single (window-panel)
(capi:map-pane-descendant-children
window-panel
(lambda (pane)
(when (typep pane 'window-pane)
(funcall function pane))))))
(defun all-window-panes (window-panel)
(let ((window-panes '()))
(map-window-panes window-panel
(lambda (window-pane)
(push window-pane window-panes)))
window-panes))
(defun window-panel-width (window-panel)
(with-apply-in-pane-process-wait-single (window-panel)
(let ((window-pane (window-panel-minibuffer window-panel)))
(round (capi:simple-pane-visible-width window-panel)
(window-pane-char-width window-pane)))))
(defun window-panel-height (window-panel)
(with-apply-in-pane-process-wait-single (window-panel)
(let ((window-pane (window-panel-minibuffer window-panel)))
(floor (capi:simple-pane-visible-height window-panel)
(window-pane-char-height window-pane)))))
(defun split-window (window-panel current-window-pane new-window-pane layout-class-name)
(with-apply-in-pane-process-wait-single (window-panel)
(block outer
(let ((*window-is-modifying-p* t))
(capi:map-pane-descendant-children
window-panel
(lambda (pane)
(when (or (typep pane 'capi:column-layout)
(typep pane 'capi:row-layout))
(when-let (pos (position current-window-pane (capi:layout-description pane)))
(setf (capi:layout-description pane)
(nconc (subseq (capi:layout-description pane) 0 pos)
(list (make-instance layout-class-name
:description (list current-window-pane
:divider
new-window-pane)))
(subseq (capi:layout-description pane) (1+ pos))))
(return-from outer)))))))))
(defun split-horizontally (window-panel current-window-pane new-window-pane)
(split-window window-panel current-window-pane new-window-pane 'capi:row-layout))
(defun split-vertically (window-panel current-window-pane new-window-pane)
(split-window window-panel current-window-pane new-window-pane 'capi:column-layout))
(defun window-panel-delete-window (window-panel window-pane)
(labels ((f (pane)
(cond ((typep pane 'capi:layout)
(let ((pos (position window-pane (capi:layout-description pane))))
(setf (capi:layout-ratios pane) nil)
(setf (capi:layout-description pane)
(if pos
(nconc (delete :divider (subseq (capi:layout-description pane) 0 pos)
:count 1 :from-end t)
(delete :divider (subseq (capi:layout-description pane) (1+ pos))
:count 1))
(delete nil
(map 'list #'f (capi:layout-description pane)))))
(cond ((null (capi:layout-description pane))
nil)
((null (rest (capi:layout-description pane)))
(first (capi:layout-description pane)))
(t
pane))))
(t
pane))))
(with-apply-in-pane-process-wait-single (window-panel)
(let ((*window-is-modifying-p* t))
(f window-panel)))))
(defun update-window-ratios (window-panel)
(labels ((sum (list)
(loop :for n :in list
:sum (or n 0) :into sum
:finally (return (if (zerop sum) nil sum))))
(f (pane)
(cond ((typep pane 'capi:row-layout)
(let* ((width-height-list (mapcar #'f (capi:layout-description pane)))
(ratios (mapcar #'first width-height-list)))
(setf (capi:layout-ratios pane) ratios)
(list (sum ratios)
(second (first width-height-list)))))
((typep pane 'capi:column-layout)
(let* ((width-height-list (mapcar #'f (capi:layout-description pane)))
(ratios (mapcar #'second width-height-list)))
(setf (capi:layout-ratios pane) ratios)
(list (first (first width-height-list))
(sum ratios))))
((typep pane 'window-pane)
(let ((window (if (eq pane (window-panel-minibuffer window-panel))
(lem::minibuffer-window)
(window-pane-window pane))))
(list (lem:window-width window)
(lem:window-height window))))
(t
(list nil nil)))))
(with-apply-in-pane-process-wait-single (window-panel)
(let ((*window-is-modifying-p* t))
(f window-panel)))))
(defun update-window-size (window-panel)
(labels ((f (pane x y)
(cond ((typep pane 'capi:row-layout)
(let ((w 0)
(h nil))
(dolist (child (capi:layout-description pane))
(unless (eq child :divider)
(multiple-value-bind (child-x child-y child-w child-h) (f child x y)
(declare (ignore child-x child-y))
(incf x child-w)
(incf w child-w)
(setf h child-h))))
(values x y w h)))
((typep pane 'capi:column-layout)
(let ((w nil)
(h 0))
(dolist (child (capi:layout-description pane))
(unless (eq child :divider)
(multiple-value-bind (child-x child-y child-w child-h) (f child x y)
(declare (ignore child-x child-y))
(incf y child-h)
(incf h child-h)
(setf w child-w))))
(values x y w h)))
((typep pane 'window-pane)
(if (eq pane (window-panel-minibuffer window-panel))
(multiple-value-bind (w h)
(window-pane-size pane)
(values x y w h))
(let ((window (window-pane-window pane)))
(multiple-value-bind (w h)
(window-pane-size pane)
(lem::window-set-pos window x y)
(lem::window-set-size window w h)
(values x y w h))))))))
(with-apply-in-pane-process-wait-single (window-panel)
(f window-panel 0 0)
(lem:send-event :resize)
(setf (window-panel-resizing window-panel) nil))))
(defun window-panel-resize-callback (window-panel)
(cond (*window-is-modifying-p*)
(t
(unless (window-panel-resizing window-panel)
(setf (window-panel-resizing window-panel) t)
(mp:schedule-timer-relative (mp:make-timer #'update-window-size window-panel) 0.1)))))
| null | https://raw.githubusercontent.com/lem-project/lem/4f620f94a1fd3bdfb8b2364185e7db16efab57a1/frontends/capi/window-panel.lisp | lisp | (in-package :lem-capi)
(defclass window-panel (capi:column-layout)
((initialized
:initform nil
:accessor window-panel-initialized)
(minibuffer
:accessor window-panel-minibuffer)
(modified-p
:initform nil
:accessor window-panel-modified-p)
(resizing
:initform nil
:accessor window-panel-resizing)))
(defmethod initialize-instance :after ((window-panel window-panel) &rest initargs)
(declare (ignore initargs))
(let ((minibuffer-pane (make-instance 'window-pane
:visible-max-height '(:character 1)
:window-panel window-panel)))
(setf (window-panel-minibuffer window-panel) minibuffer-pane)
(setf (capi:layout-description window-panel)
(list (make-instance 'capi:column-layout
:description
(list nil
minibuffer-pane))))))
(defun set-first-window (window-panel window-pane)
(with-apply-in-pane-process-wait-single (window-panel)
(setf (window-panel-initialized window-panel) t)
(setf (capi:layout-description (first (capi:layout-description window-panel)))
(list window-pane (window-panel-minibuffer window-panel)))))
(defun map-window-panes (window-panel function)
(with-apply-in-pane-process-wait-single (window-panel)
(capi:map-pane-descendant-children
window-panel
(lambda (pane)
(when (typep pane 'window-pane)
(funcall function pane))))))
(defun all-window-panes (window-panel)
(let ((window-panes '()))
(map-window-panes window-panel
(lambda (window-pane)
(push window-pane window-panes)))
window-panes))
(defun window-panel-width (window-panel)
(with-apply-in-pane-process-wait-single (window-panel)
(let ((window-pane (window-panel-minibuffer window-panel)))
(round (capi:simple-pane-visible-width window-panel)
(window-pane-char-width window-pane)))))
(defun window-panel-height (window-panel)
(with-apply-in-pane-process-wait-single (window-panel)
(let ((window-pane (window-panel-minibuffer window-panel)))
(floor (capi:simple-pane-visible-height window-panel)
(window-pane-char-height window-pane)))))
(defun split-window (window-panel current-window-pane new-window-pane layout-class-name)
(with-apply-in-pane-process-wait-single (window-panel)
(block outer
(let ((*window-is-modifying-p* t))
(capi:map-pane-descendant-children
window-panel
(lambda (pane)
(when (or (typep pane 'capi:column-layout)
(typep pane 'capi:row-layout))
(when-let (pos (position current-window-pane (capi:layout-description pane)))
(setf (capi:layout-description pane)
(nconc (subseq (capi:layout-description pane) 0 pos)
(list (make-instance layout-class-name
:description (list current-window-pane
:divider
new-window-pane)))
(subseq (capi:layout-description pane) (1+ pos))))
(return-from outer)))))))))
(defun split-horizontally (window-panel current-window-pane new-window-pane)
(split-window window-panel current-window-pane new-window-pane 'capi:row-layout))
(defun split-vertically (window-panel current-window-pane new-window-pane)
(split-window window-panel current-window-pane new-window-pane 'capi:column-layout))
(defun window-panel-delete-window (window-panel window-pane)
(labels ((f (pane)
(cond ((typep pane 'capi:layout)
(let ((pos (position window-pane (capi:layout-description pane))))
(setf (capi:layout-ratios pane) nil)
(setf (capi:layout-description pane)
(if pos
(nconc (delete :divider (subseq (capi:layout-description pane) 0 pos)
:count 1 :from-end t)
(delete :divider (subseq (capi:layout-description pane) (1+ pos))
:count 1))
(delete nil
(map 'list #'f (capi:layout-description pane)))))
(cond ((null (capi:layout-description pane))
nil)
((null (rest (capi:layout-description pane)))
(first (capi:layout-description pane)))
(t
pane))))
(t
pane))))
(with-apply-in-pane-process-wait-single (window-panel)
(let ((*window-is-modifying-p* t))
(f window-panel)))))
(defun update-window-ratios (window-panel)
(labels ((sum (list)
(loop :for n :in list
:sum (or n 0) :into sum
:finally (return (if (zerop sum) nil sum))))
(f (pane)
(cond ((typep pane 'capi:row-layout)
(let* ((width-height-list (mapcar #'f (capi:layout-description pane)))
(ratios (mapcar #'first width-height-list)))
(setf (capi:layout-ratios pane) ratios)
(list (sum ratios)
(second (first width-height-list)))))
((typep pane 'capi:column-layout)
(let* ((width-height-list (mapcar #'f (capi:layout-description pane)))
(ratios (mapcar #'second width-height-list)))
(setf (capi:layout-ratios pane) ratios)
(list (first (first width-height-list))
(sum ratios))))
((typep pane 'window-pane)
(let ((window (if (eq pane (window-panel-minibuffer window-panel))
(lem::minibuffer-window)
(window-pane-window pane))))
(list (lem:window-width window)
(lem:window-height window))))
(t
(list nil nil)))))
(with-apply-in-pane-process-wait-single (window-panel)
(let ((*window-is-modifying-p* t))
(f window-panel)))))
(defun update-window-size (window-panel)
(labels ((f (pane x y)
(cond ((typep pane 'capi:row-layout)
(let ((w 0)
(h nil))
(dolist (child (capi:layout-description pane))
(unless (eq child :divider)
(multiple-value-bind (child-x child-y child-w child-h) (f child x y)
(declare (ignore child-x child-y))
(incf x child-w)
(incf w child-w)
(setf h child-h))))
(values x y w h)))
((typep pane 'capi:column-layout)
(let ((w nil)
(h 0))
(dolist (child (capi:layout-description pane))
(unless (eq child :divider)
(multiple-value-bind (child-x child-y child-w child-h) (f child x y)
(declare (ignore child-x child-y))
(incf y child-h)
(incf h child-h)
(setf w child-w))))
(values x y w h)))
((typep pane 'window-pane)
(if (eq pane (window-panel-minibuffer window-panel))
(multiple-value-bind (w h)
(window-pane-size pane)
(values x y w h))
(let ((window (window-pane-window pane)))
(multiple-value-bind (w h)
(window-pane-size pane)
(lem::window-set-pos window x y)
(lem::window-set-size window w h)
(values x y w h))))))))
(with-apply-in-pane-process-wait-single (window-panel)
(f window-panel 0 0)
(lem:send-event :resize)
(setf (window-panel-resizing window-panel) nil))))
(defun window-panel-resize-callback (window-panel)
(cond (*window-is-modifying-p*)
(t
(unless (window-panel-resizing window-panel)
(setf (window-panel-resizing window-panel) t)
(mp:schedule-timer-relative (mp:make-timer #'update-window-size window-panel) 0.1)))))
| |
b11a148d2c12843d1d1ee18dd81a30f6f453e0f8675bdc5a6bef24183451bd11 | monadbobo/ocaml-core | async_print.mli | (** Non-blocking, Async-friendly print functions *)
val print_char : char -> unit
val prerr_char : char -> unit
val print_string : string -> unit
val prerr_string : string -> unit
val print_int : int -> unit
val prerr_int : int -> unit
val print_float : float -> unit
val prerr_float : float -> unit
val print_endline : string -> unit
val prerr_endline : string -> unit
val print_newline : unit -> unit
val prerr_newline : unit -> unit
val printf : ('a, unit, string, unit) format4 -> 'a
val fprintf : Writer.t -> ('a, unit, string, unit) format4 -> 'a
val eprintf : ('a, unit, string, unit) format4 -> 'a
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/async/unix/lib/async_print.mli | ocaml | * Non-blocking, Async-friendly print functions |
val print_char : char -> unit
val prerr_char : char -> unit
val print_string : string -> unit
val prerr_string : string -> unit
val print_int : int -> unit
val prerr_int : int -> unit
val print_float : float -> unit
val prerr_float : float -> unit
val print_endline : string -> unit
val prerr_endline : string -> unit
val print_newline : unit -> unit
val prerr_newline : unit -> unit
val printf : ('a, unit, string, unit) format4 -> 'a
val fprintf : Writer.t -> ('a, unit, string, unit) format4 -> 'a
val eprintf : ('a, unit, string, unit) format4 -> 'a
|
6d9a7ff1fc621ef8ea31e114054a05c5236790305e9c6549c04305efea14add8 | gator1/jepsen | perf_test.clj | (ns block.perf-test
(:require [clojure.test :refer :all]
[block.core :refer :all]
[jepsen.core :as jepsen]
[jepsen.generator :as gen]
[jepsen.tests :as tests])
(:use clojure.tools.logging))
(defn fsperf-map
[t1 t2]
;tests/noop-test
(assoc tests/noop-test
:name "fsperf-test"
:nodes [:osd0 :osd1 :osd2]
;:nodes [:n1 :n2 :n3]
:concurrency 1
:client (client)
:nemesis (partition-node :osd1)
;:nemesis (partition-node :n2)
:generator (->> w
(gen/stagger 1)
(gen/nemesis
(gen/seq (cycle [(gen/sleep t1)
{:type :info, :f :start}
(gen/sleep t2)
{:type :info, :f :stop}])))
(op-limit 200))
:checker perf-checker)
)
; block performance testing
; testcase 0: no network partition
(deftest fsperf-test-0
(info "performance test #0\n")
(let [test (assoc (fsperf-map 0 0)
:generator (->> w
(gen/stagger 1)
(gen/clients)
(op-limit 200)))]
(is (:valid? (:results (jepsen/run! test))))))
testcase 1 : n2 out for 2s
(deftest fsperf-test-1
(info "performance test #1\n")
(is (:valid? (:results (jepsen/run! (fsperf-map 2 2))))))
testcase 2 : n2 out for 100s , which causes no response
(deftest fsperf-test-2
(info "performance test #2\n")
(is (:valid? (:results (jepsen/run! (fsperf-map 0 100))))))
| null | https://raw.githubusercontent.com/gator1/jepsen/1932cbd72cbc1f6c2a27abe0fe347ea989f0cfbb/ceph-block/test/block/perf_test.clj | clojure | tests/noop-test
:nodes [:n1 :n2 :n3]
:nemesis (partition-node :n2)
block performance testing
testcase 0: no network partition | (ns block.perf-test
(:require [clojure.test :refer :all]
[block.core :refer :all]
[jepsen.core :as jepsen]
[jepsen.generator :as gen]
[jepsen.tests :as tests])
(:use clojure.tools.logging))
(defn fsperf-map
[t1 t2]
(assoc tests/noop-test
:name "fsperf-test"
:nodes [:osd0 :osd1 :osd2]
:concurrency 1
:client (client)
:nemesis (partition-node :osd1)
:generator (->> w
(gen/stagger 1)
(gen/nemesis
(gen/seq (cycle [(gen/sleep t1)
{:type :info, :f :start}
(gen/sleep t2)
{:type :info, :f :stop}])))
(op-limit 200))
:checker perf-checker)
)
(deftest fsperf-test-0
(info "performance test #0\n")
(let [test (assoc (fsperf-map 0 0)
:generator (->> w
(gen/stagger 1)
(gen/clients)
(op-limit 200)))]
(is (:valid? (:results (jepsen/run! test))))))
testcase 1 : n2 out for 2s
(deftest fsperf-test-1
(info "performance test #1\n")
(is (:valid? (:results (jepsen/run! (fsperf-map 2 2))))))
testcase 2 : n2 out for 100s , which causes no response
(deftest fsperf-test-2
(info "performance test #2\n")
(is (:valid? (:results (jepsen/run! (fsperf-map 0 100))))))
|
b1f1639c46626fa633e1f178e176a0b26bedef12f9608a0b7d0e279defac1ec1 | ekmett/guanxi | queens.hs | # language ViewPatterns #
{-# language TupleSections #-}
module Main where
import Control.Monad (unless)
import Control.Monad.ST
import Cover.DXZ
import Data.Foldable
import System.Exit
queens :: Int -> Int
queens n = runST $ do
x <- newCover_
rows <- addItems x n
cols <- addItems x n
diag1 <- addOptionalItems x (n+n)
diag2 <- addOptionalItems x (n+n)
let organ i = fromIntegral $ div (if odd i then n-1-i else n+i) 2
for_ [0..n-1] $ \(organ -> r) ->
for_ [0..n-1] $ \(organ -> c) ->
addOption x [rows+r,cols+c,diag1+r+c,diag2+n-1-r+c]
count x
main :: IO ()
main = do
let n = queens 12
print n
unless (n == 14200) exitFailure
| null | https://raw.githubusercontent.com/ekmett/guanxi/e267f4210a9c10d0091371ea9b028b7d6fa8b9f3/test/queens.hs | haskell | # language TupleSections # | # language ViewPatterns #
module Main where
import Control.Monad (unless)
import Control.Monad.ST
import Cover.DXZ
import Data.Foldable
import System.Exit
queens :: Int -> Int
queens n = runST $ do
x <- newCover_
rows <- addItems x n
cols <- addItems x n
diag1 <- addOptionalItems x (n+n)
diag2 <- addOptionalItems x (n+n)
let organ i = fromIntegral $ div (if odd i then n-1-i else n+i) 2
for_ [0..n-1] $ \(organ -> r) ->
for_ [0..n-1] $ \(organ -> c) ->
addOption x [rows+r,cols+c,diag1+r+c,diag2+n-1-r+c]
count x
main :: IO ()
main = do
let n = queens 12
print n
unless (n == 14200) exitFailure
|
80c92ed989a9d9b86583c6e294adace9db910a9d68908cad6e785223318b3940 | kahua/Kahua | xml.scm | -*- coding : utf-8 ; mode : scheme -*-
generic framework to test XML generation code
;;
Copyright ( c ) 2003 - 2007 Scheme Arts , L.L.C. , All rights reserved .
Copyright ( c ) 2003 - 2007 Time Intermedia Corporation , All rights reserved .
;; See COPYING for terms and conditions of using this software
;;
;; This module provides the means of test the result of HTML
;; generating code, such as CGI programs. The output of
;; these code sometimes includes a information which may not be
;; able to predict at the time the test is written; an example
;; of such information is the timestamp and the session id.
;;
;; The test-xml-match? procedure uses a pattern to match the
;; output of the tested code, instead of doing literal match.
;; The pattern may include "don't care" node, and a pattern
;; variable that can be used to check certain constraints.
;;
;; test-xml-match? pattern input &optional extra-check
;;
;; Input may be a string or a list. If it is a list,
;; first it is converted to a string by calling tree->string
;; of text.tree module.
;;
Then , the input string is parsed by ssax XML parser ,
to produce a SXML structure , which is matched to pattern .
;;
Pattern is an S - expression that resembles to SXML , but
;; can contain a pattern variable. The formal specification
;; of pattern is as follows:
;;
;; <pattern> : <node>
;; <node> : <string> | <pattern-variable>
;; | (<key> <attr-node>? <content> ...)
;; <key> : <literal-symbol>
;;
;; <attr-node> : (@ <content> ...)
;; | ?@
;;
;; <content> : <node>
;; | (!seq <pattern> ...)
;; | (!permute <pattern> ...)
;; | (!or <pattern> ...)
;; | (!repeat <pattern> ...)
;;
;; <literal-symbol> : any symbol except that begins with '?' or '!'
;; <pattern-variable> : a symbol that begins with '?'
;;
;; <string> and <literal-symbol> matches to the input as is.
;;
;; <pattern-variable> matches any object in the input, in that place.
;; The matcher records the pattern variable and matched object,
;; which will be used for extra check performed by extra-check
;; procedure described below.
;;
;; (Current version doesn't care about the name of pattern variable,
;; but in future we may add a constraint that the same pattern variable
;; should refer to the isomorphic stucture. To represent a "don't care"
;; part, use a pattern variable ?_, which will be reserved for such
;; a purpose.)
;;
;; A special pattern variable ?@ matches an attr-node, if it is present.
;; If there's no attr-node, ?@ is ignored. It's convenient to silently
;; ignore attributes.
;;
;; A special pattern variable ?* matches as if (!repeat ?_), that is,
;; matches everything after.
;;
;; Attr node is treated specially. Its contents matches arbitrary
;; permutation of the pattern.
;;
;; (!seq <pattern> ...)
;; Matches the sequcne of <pattern> .... When it appears as
;; a <content>, <pattern> ... is taken as if it is spliced
;; into the sequence of <content>; that is, the following pattern:
;;
;; (ul (li "foo") (!seq (li "bar") (li "baz")) (li "oof"))
;;
;; matches the input:
;;
;; (ul (li "foo") (li "bar") (li "baz") (li "oof"))
;;
;; (!permute <pattern> ...)
;; Matches a sequence of any permutation of <pattern>s.
;; The permuted pattern is spliced to the containing
;; sequece of <content>; that is, the following pattern:
;;
;; (ul (li "foo") (!permute (li "bar") (li "baz")) (li "oof"))
;;
;; matches the input:
;;
;; (ul (li "foo") (li "baz") (li "bar") (li "oof"))
;;
;; (!or <pattern> ...)
;;
;; Matches any one of <pattern>s. The splicing rule is applied
;; recursively; the following pattern:
;;
;; (ul (li "foo") (!or (!seq (li "bar") (li "baz")) (li "ZZ")))
;;
;; matches both of the following input:
;;
;; (ul (li "foo") (li "bar") (li "baz"))
;; (ul (li "foo") (li "ZZ"))
;;
;; (!repeat <pattern> ...)
;;
Matches zero or more occurence of input that matches < pattern > ...
;; The matched pattern variables are forgotten in every iteration
;; except the last one. A pattern:
;;
;; (dl (!repeat (dt ?_) (dd ?_)))
;;
;; matches the input:
;;
( dl ( dt " foo " ) ( dd " bar " ) ( dt " foo2 " ) ( dd " bar2 " ) )
;;
;; (!contain <pattern> ...)
;;
;; Matches any sequence that includes all of <pattern>s, in any
;; order. The input pattern may contain items that doesn't
;; match any of <pattern>s. It can be achieved by
;; (!permute ?* <pattern> ?* <pattern> ... <pattern> ?*),
;; but !contain is much more efficient.
;;
;; When an optional argument extra-check is given, it is
called with one argument , an assoc list of pattern variable
;; and the matched value. It can perform extra check, and returns
;; #f if the check fails, or #t if succeeds.
;; -------------------------------------------------------------------------
;;
;; このモジュールは HTML を生成するコード、たとえば、CGIプログラムの
;; 結果をテストする手段を提供するものです。こうしたコードの出力は、
;; テストを書いたときには、分らない情報が含まれることがあります。
;; このような情報の例としては、タイムスタンプやセッションIDがあります。
;;
;; test-xml-match? 手続きはテストされるコードの出力にマッチする
;; パターンを使います。このパターンには、「鎌○ぬ」節を含めることが
できます。また、いくつかの制約をチェックするのに使えるパターン
変数も含められます 。
;;
;; test-xml-match? pattern input &optional extra-check
;;
;; input は文字列またはリスト。リストなら、先ず text.tree モジュールの
;; tree->string 手続きでリストから文字列に変換します。
;;
;; その後、入力された文字列は、ssax XML パーザで構文解析されて、
SXML 構文が生成され、これが、パターンと照合されます 。
;;
パターンは SXML に類似したS式ですが、パターン変数を含められます 。
形式的使用は以下のようになっています 。
;;
;; <pattern> : <node>
;; <node> : <string> | <pattern-variable>
;; | (<key> <attr-node>? <content> ...)
;; <key> : <literal-symbol>
;;
;; <attr-node> : (@ <content> ...)
;; | ?@
;;
;; <content> : <node>
;; | (!seq <pattern> ...)
;; | (!permute <pattern> ...)
;; | (!or <pattern> ...)
;; | (!repeat <pattern> ...)
;;
;; <literal-symbol> : 先頭が'?'あるいは'!'ではないすべてのシンボル
;;
;; <pattern-variable> : 先頭が'?'であるシンボル
;;
< string > と < literal - symbol >
;;
;; <pattern-variable> は入力中のすべてのオブジェクトとマッチします。
;; 照合器はパターン変数とマッチしたオブジェクトとを記録します。
;;
;; (現バージョンでは、パターン変数名については、何を使ってもかまいません
;; しかし、将来、同じ変数名なら同型の構造を参照するという制約を加える
;; 可能性があります。「構わない」部分を表現するのに、パターン変数 ?_ を
;; 使います。これは、その用途のために予約します。
;;
;; パターン変数?@は、attr-nodeがもしあればそれにマッチします。
;; アトリビュートを無視したい場合に便利です。
;;
パターン変数?*は、(!repeat ? _ )
;;
;; (!seq <pattern> ...)
;;
;; <pattern> ... の並びにマッチします。<content> の外観であれば
;; <pattern> ... は繋ぎあわされて <content> の並びとなります。
すなわち、以下のパターン
;;
;; (ul (li "foo") (!seq (li "bar") (li "baz")) (li "oof"))
;;
;; は、以下の入力とマッチします。
;;
;; (ul (li "foo") (li "bar") (li "baz") (li "oof"))
;;
;; (!permute <pattern> ...)
;;
;; <pattern> ... のすべての順列のどれかの並びにマッチします。
;; 並びは、繋ぎあわされ <content> の並びを含むものになります。
すなわち、以下のパターン
;;
;; (ul (li "foo") (!permute (li "bar") (li "baz")) (li "oof"))
;;
;; は以下の入力にマッチします。
;;
;; (ul (li "foo") (li "baz") (li "bar") (li "oof"))
;;
;; (!or <pattern> ...)
;;
;; <pattern> ... のどれにもマッチします。繋ぎあわせのルールは
;; 再帰的に適用されます。以下のパターン
;;
;; (ul (li "foo") (!or (!seq (li "bar") (li "baz")) (li "ZZ")))
;;
;; は、以下の入力の両方にマッチします。
;;
;; (ul (li "foo") (li "bar") (li "baz"))
;; (ul (li "foo") (li "ZZ"))
;;
;; (!repeat <pattern> ...)
;;
;; 入力のなかの <pattern> ... にマッチするもののゼロ個以上の出現に
;; マッチします。各反復中で、マッチしたパターン変数は最後ののぞき
;; 覚えられていません。パターン
;;
;; (dl (!repeat (dt ?_) (dd ?_)))
;;
;; は以下の入力にマッチします。
;;
( dl ( dt " foo " ) ( dd " bar " ) ( dt " foo2 " ) ( dd " bar2 " ) )
;;
オプション引数 extra - check を与えると、これは、パターン変数と
マッチした値との連想リストを引数として呼び出されます。これにより
追加のチェックが行なわれ、失敗すると # f 成功すると # t が返ります 。
;; -------------------------------------------------------------------------
NB : the module name is provisional . Will be either a part of
, or a part of Gauche .
注意 :
;; Gauche の一部とするかのどちらかになります。
(define-module kahua.test.xml
(use srfi-1)
(use srfi-13)
(use gauche.test)
(use util.combinations)
(use text.tree)
(use sxml.ssax)
(use sxml.sxpath)
(export test-xml-match? test-sxml-match?
test-xml-select-matcher test-sxml-select-matcher))
(select-module kahua.test.xml)
(define (pattern-var? obj)
(and (symbol? obj)
(string-prefix? "?" (symbol->string obj))))
(define (pattern-key? obj)
(and (symbol? obj)
(string-prefix? "!" (symbol->string obj))))
(define (attr-node? node)
(and (pair? node) (eq? (car node) '@)))
(define (sort-nodes nodes)
(sort nodes
(lambda (a b)
(if (pair? a)
(if (pair? b)
(string<? (x->string (car a)) (x->string (car b)))
#t)
#f))))
(define (any-permutation pred seq)
(call/cc
(lambda (break)
(permutations*-for-each (lambda (seq) (cond ((pred seq) => break)))
seq equal?)
#f)))
Match one pattern item .
;; Because of "splicing" nature of the pattern, it takes a list of inputs.
;; When matched, the continuation procedure is called with the rest of
;; inputs and the pattern binding alist.
ひとつのパターン項目にマッチする。パターンを"繋ぐ"性質により入力のリスト
;; 束縛 A-リストを引数として呼びだされます。
(define (match-pattern pat ls cont r)
(cond
((eq? pat '?@) ;; specially treats attr-node match
(cond ((null? ls) (cont ls r))
((attr-node? (car ls)) (cont (cdr ls) (acons pat (car ls) r)))
(else (cont ls r))))
((eq? pat '?*) ;; matches the rest of the pattern. note for backtrack.
(match-pattern '(!repeat ?_) ls cont r))
((pattern-var? pat)
(and (not (null? ls))
(cont (cdr ls) (acons pat (car ls) r))))
((not (pair? pat))
(and (not (null? ls))
(equal? pat (car ls))
(cont (cdr ls) r)))
((attr-node? pat)
(and (not (null? ls))
(attr-node? (car ls))
(any-permutation (cute match-contents (sort-nodes (cdr pat)) <>
(lambda (more r)
(and (null? more) (cont (cdr ls) r)))
r)
(sort-nodes (cdar ls)))))
((not (pattern-key? (car pat)))
(and (pair? ls)
(pair? (car ls))
(eq? (car pat) (caar ls))
(match-contents (cdr pat) (cdar ls)
(lambda (more r)
(and (null? more) (cont (cdr ls) r)))
r)))
(else
(case (car pat)
((!seq)
(match-contents (cdr pat) ls cont r))
((!permute)
(any-permutation (cut match-contents <> ls cont r) (cdr pat)))
((!contain)
(any-permutation (cut match-contain <> ls cont r) (cdr pat)))
((!exclude)
(not (match-pattern `(!contain (!or ,@(cdr pat))) ls cont r)))
((!or)
(any (cut match-pattern <> ls cont r)
(cdr pat)))
((!repeat)
(let loop ((ls ls) (r r))
(or (match-contents (cdr pat) ls loop r)
(cont ls r))))
(else (error "unknown pattern directive:" (car pat)))))
))
(define (match-contents pats ls cont r)
(if (null? pats)
(cont ls r)
(match-pattern (car pats) ls
(cute match-contents (cdr pats) <> cont <>)
r)))
(define (match-contain pats ls cont r)
(cond
((null? pats) (cont '() r)) ;; discards remaining inputs
((null? ls) #f) ;; ran out inputs
(else
(or (match-pattern (car pats) ls
(cute match-contain (cdr pats) <> cont <>)
r)
(match-contain pats (cdr ls) cont r)))))
(define (match-input pattern input . opts)
(let ((extra-check (get-optional opts (lambda (r) #t))))
(match-pattern pattern input
(lambda (more r) (and (null? more) (extra-check r)))
'())))
;; Entry
(define (test-sxml-match? pattern input . opts)
(and (not (equal? input *test-error*))
(apply match-input pattern (list input) opts)))
(define (test-xml-match? pattern input . opts)
(and (not (equal? input *test-error*))
(apply match-input pattern
(cdr (call-with-input-string (tree->string input)
(cut ssax:xml->sxml <> '())))
opts)))
(define (test-sxml-select-matcher path . maybe-extra-check)
(let ((selector (sxpath path)))
(lambda (pattern input)
(and (not (equal? input *test-error*))
(apply match-input pattern
;; kludge to deal with *TOP*
(selector (if (and (pair? input) (eq? (car input) '*TOP*))
input
`(*TOP* ,input)))
maybe-extra-check)))))
(define (test-xml-select-matcher path . maybe-extra-check)
(let ((selector (sxpath path)))
(lambda (pattern input)
(and (not (equal? input *test-error*))
(let ((parsed (call-with-input-string (tree->string input)
(cut ssax:xml->sxml <> '()))))
(apply match-input pattern (selector parsed)
maybe-extra-check))))))
(provide "kahua/test/xml")
| null | https://raw.githubusercontent.com/kahua/Kahua/c90fe590233e4540923e4e5cc9f61da32873692c/src/kahua/test/xml.scm | scheme | mode : scheme -*-
See COPYING for terms and conditions of using this software
This module provides the means of test the result of HTML
generating code, such as CGI programs. The output of
these code sometimes includes a information which may not be
able to predict at the time the test is written; an example
of such information is the timestamp and the session id.
The test-xml-match? procedure uses a pattern to match the
output of the tested code, instead of doing literal match.
The pattern may include "don't care" node, and a pattern
variable that can be used to check certain constraints.
test-xml-match? pattern input &optional extra-check
Input may be a string or a list. If it is a list,
first it is converted to a string by calling tree->string
of text.tree module.
can contain a pattern variable. The formal specification
of pattern is as follows:
<pattern> : <node>
<node> : <string> | <pattern-variable>
| (<key> <attr-node>? <content> ...)
<key> : <literal-symbol>
<attr-node> : (@ <content> ...)
| ?@
<content> : <node>
| (!seq <pattern> ...)
| (!permute <pattern> ...)
| (!or <pattern> ...)
| (!repeat <pattern> ...)
<literal-symbol> : any symbol except that begins with '?' or '!'
<pattern-variable> : a symbol that begins with '?'
<string> and <literal-symbol> matches to the input as is.
<pattern-variable> matches any object in the input, in that place.
The matcher records the pattern variable and matched object,
which will be used for extra check performed by extra-check
procedure described below.
(Current version doesn't care about the name of pattern variable,
but in future we may add a constraint that the same pattern variable
should refer to the isomorphic stucture. To represent a "don't care"
part, use a pattern variable ?_, which will be reserved for such
a purpose.)
A special pattern variable ?@ matches an attr-node, if it is present.
If there's no attr-node, ?@ is ignored. It's convenient to silently
ignore attributes.
A special pattern variable ?* matches as if (!repeat ?_), that is,
matches everything after.
Attr node is treated specially. Its contents matches arbitrary
permutation of the pattern.
(!seq <pattern> ...)
Matches the sequcne of <pattern> .... When it appears as
a <content>, <pattern> ... is taken as if it is spliced
into the sequence of <content>; that is, the following pattern:
(ul (li "foo") (!seq (li "bar") (li "baz")) (li "oof"))
matches the input:
(ul (li "foo") (li "bar") (li "baz") (li "oof"))
(!permute <pattern> ...)
Matches a sequence of any permutation of <pattern>s.
The permuted pattern is spliced to the containing
sequece of <content>; that is, the following pattern:
(ul (li "foo") (!permute (li "bar") (li "baz")) (li "oof"))
matches the input:
(ul (li "foo") (li "baz") (li "bar") (li "oof"))
(!or <pattern> ...)
Matches any one of <pattern>s. The splicing rule is applied
recursively; the following pattern:
(ul (li "foo") (!or (!seq (li "bar") (li "baz")) (li "ZZ")))
matches both of the following input:
(ul (li "foo") (li "bar") (li "baz"))
(ul (li "foo") (li "ZZ"))
(!repeat <pattern> ...)
The matched pattern variables are forgotten in every iteration
except the last one. A pattern:
(dl (!repeat (dt ?_) (dd ?_)))
matches the input:
(!contain <pattern> ...)
Matches any sequence that includes all of <pattern>s, in any
order. The input pattern may contain items that doesn't
match any of <pattern>s. It can be achieved by
(!permute ?* <pattern> ?* <pattern> ... <pattern> ?*),
but !contain is much more efficient.
When an optional argument extra-check is given, it is
and the matched value. It can perform extra check, and returns
#f if the check fails, or #t if succeeds.
-------------------------------------------------------------------------
このモジュールは HTML を生成するコード、たとえば、CGIプログラムの
結果をテストする手段を提供するものです。こうしたコードの出力は、
テストを書いたときには、分らない情報が含まれることがあります。
このような情報の例としては、タイムスタンプやセッションIDがあります。
test-xml-match? 手続きはテストされるコードの出力にマッチする
パターンを使います。このパターンには、「鎌○ぬ」節を含めることが
test-xml-match? pattern input &optional extra-check
input は文字列またはリスト。リストなら、先ず text.tree モジュールの
tree->string 手続きでリストから文字列に変換します。
その後、入力された文字列は、ssax XML パーザで構文解析されて、
<pattern> : <node>
<node> : <string> | <pattern-variable>
| (<key> <attr-node>? <content> ...)
<key> : <literal-symbol>
<attr-node> : (@ <content> ...)
| ?@
<content> : <node>
| (!seq <pattern> ...)
| (!permute <pattern> ...)
| (!or <pattern> ...)
| (!repeat <pattern> ...)
<literal-symbol> : 先頭が'?'あるいは'!'ではないすべてのシンボル
<pattern-variable> : 先頭が'?'であるシンボル
<pattern-variable> は入力中のすべてのオブジェクトとマッチします。
照合器はパターン変数とマッチしたオブジェクトとを記録します。
(現バージョンでは、パターン変数名については、何を使ってもかまいません
しかし、将来、同じ変数名なら同型の構造を参照するという制約を加える
可能性があります。「構わない」部分を表現するのに、パターン変数 ?_ を
使います。これは、その用途のために予約します。
パターン変数?@は、attr-nodeがもしあればそれにマッチします。
アトリビュートを無視したい場合に便利です。
(!seq <pattern> ...)
<pattern> ... の並びにマッチします。<content> の外観であれば
<pattern> ... は繋ぎあわされて <content> の並びとなります。
(ul (li "foo") (!seq (li "bar") (li "baz")) (li "oof"))
は、以下の入力とマッチします。
(ul (li "foo") (li "bar") (li "baz") (li "oof"))
(!permute <pattern> ...)
<pattern> ... のすべての順列のどれかの並びにマッチします。
並びは、繋ぎあわされ <content> の並びを含むものになります。
(ul (li "foo") (!permute (li "bar") (li "baz")) (li "oof"))
は以下の入力にマッチします。
(ul (li "foo") (li "baz") (li "bar") (li "oof"))
(!or <pattern> ...)
<pattern> ... のどれにもマッチします。繋ぎあわせのルールは
再帰的に適用されます。以下のパターン
(ul (li "foo") (!or (!seq (li "bar") (li "baz")) (li "ZZ")))
は、以下の入力の両方にマッチします。
(ul (li "foo") (li "bar") (li "baz"))
(ul (li "foo") (li "ZZ"))
(!repeat <pattern> ...)
入力のなかの <pattern> ... にマッチするもののゼロ個以上の出現に
マッチします。各反復中で、マッチしたパターン変数は最後ののぞき
覚えられていません。パターン
(dl (!repeat (dt ?_) (dd ?_)))
は以下の入力にマッチします。
-------------------------------------------------------------------------
Gauche の一部とするかのどちらかになります。
Because of "splicing" nature of the pattern, it takes a list of inputs.
When matched, the continuation procedure is called with the rest of
inputs and the pattern binding alist.
束縛 A-リストを引数として呼びだされます。
specially treats attr-node match
matches the rest of the pattern. note for backtrack.
discards remaining inputs
ran out inputs
Entry
kludge to deal with *TOP* | generic framework to test XML generation code
Copyright ( c ) 2003 - 2007 Scheme Arts , L.L.C. , All rights reserved .
Copyright ( c ) 2003 - 2007 Time Intermedia Corporation , All rights reserved .
Then , the input string is parsed by ssax XML parser ,
to produce a SXML structure , which is matched to pattern .
Pattern is an S - expression that resembles to SXML , but
Matches zero or more occurence of input that matches < pattern > ...
( dl ( dt " foo " ) ( dd " bar " ) ( dt " foo2 " ) ( dd " bar2 " ) )
called with one argument , an assoc list of pattern variable
できます。また、いくつかの制約をチェックするのに使えるパターン
変数も含められます 。
SXML 構文が生成され、これが、パターンと照合されます 。
パターンは SXML に類似したS式ですが、パターン変数を含められます 。
形式的使用は以下のようになっています 。
< string > と < literal - symbol >
パターン変数?*は、(!repeat ? _ )
すなわち、以下のパターン
すなわち、以下のパターン
( dl ( dt " foo " ) ( dd " bar " ) ( dt " foo2 " ) ( dd " bar2 " ) )
オプション引数 extra - check を与えると、これは、パターン変数と
マッチした値との連想リストを引数として呼び出されます。これにより
追加のチェックが行なわれ、失敗すると # f 成功すると # t が返ります 。
NB : the module name is provisional . Will be either a part of
, or a part of Gauche .
注意 :
(define-module kahua.test.xml
(use srfi-1)
(use srfi-13)
(use gauche.test)
(use util.combinations)
(use text.tree)
(use sxml.ssax)
(use sxml.sxpath)
(export test-xml-match? test-sxml-match?
test-xml-select-matcher test-sxml-select-matcher))
(select-module kahua.test.xml)
(define (pattern-var? obj)
(and (symbol? obj)
(string-prefix? "?" (symbol->string obj))))
(define (pattern-key? obj)
(and (symbol? obj)
(string-prefix? "!" (symbol->string obj))))
(define (attr-node? node)
(and (pair? node) (eq? (car node) '@)))
(define (sort-nodes nodes)
(sort nodes
(lambda (a b)
(if (pair? a)
(if (pair? b)
(string<? (x->string (car a)) (x->string (car b)))
#t)
#f))))
(define (any-permutation pred seq)
(call/cc
(lambda (break)
(permutations*-for-each (lambda (seq) (cond ((pred seq) => break)))
seq equal?)
#f)))
Match one pattern item .
ひとつのパターン項目にマッチする。パターンを"繋ぐ"性質により入力のリスト
(define (match-pattern pat ls cont r)
(cond
(cond ((null? ls) (cont ls r))
((attr-node? (car ls)) (cont (cdr ls) (acons pat (car ls) r)))
(else (cont ls r))))
(match-pattern '(!repeat ?_) ls cont r))
((pattern-var? pat)
(and (not (null? ls))
(cont (cdr ls) (acons pat (car ls) r))))
((not (pair? pat))
(and (not (null? ls))
(equal? pat (car ls))
(cont (cdr ls) r)))
((attr-node? pat)
(and (not (null? ls))
(attr-node? (car ls))
(any-permutation (cute match-contents (sort-nodes (cdr pat)) <>
(lambda (more r)
(and (null? more) (cont (cdr ls) r)))
r)
(sort-nodes (cdar ls)))))
((not (pattern-key? (car pat)))
(and (pair? ls)
(pair? (car ls))
(eq? (car pat) (caar ls))
(match-contents (cdr pat) (cdar ls)
(lambda (more r)
(and (null? more) (cont (cdr ls) r)))
r)))
(else
(case (car pat)
((!seq)
(match-contents (cdr pat) ls cont r))
((!permute)
(any-permutation (cut match-contents <> ls cont r) (cdr pat)))
((!contain)
(any-permutation (cut match-contain <> ls cont r) (cdr pat)))
((!exclude)
(not (match-pattern `(!contain (!or ,@(cdr pat))) ls cont r)))
((!or)
(any (cut match-pattern <> ls cont r)
(cdr pat)))
((!repeat)
(let loop ((ls ls) (r r))
(or (match-contents (cdr pat) ls loop r)
(cont ls r))))
(else (error "unknown pattern directive:" (car pat)))))
))
(define (match-contents pats ls cont r)
(if (null? pats)
(cont ls r)
(match-pattern (car pats) ls
(cute match-contents (cdr pats) <> cont <>)
r)))
(define (match-contain pats ls cont r)
(cond
(else
(or (match-pattern (car pats) ls
(cute match-contain (cdr pats) <> cont <>)
r)
(match-contain pats (cdr ls) cont r)))))
(define (match-input pattern input . opts)
(let ((extra-check (get-optional opts (lambda (r) #t))))
(match-pattern pattern input
(lambda (more r) (and (null? more) (extra-check r)))
'())))
(define (test-sxml-match? pattern input . opts)
(and (not (equal? input *test-error*))
(apply match-input pattern (list input) opts)))
(define (test-xml-match? pattern input . opts)
(and (not (equal? input *test-error*))
(apply match-input pattern
(cdr (call-with-input-string (tree->string input)
(cut ssax:xml->sxml <> '())))
opts)))
(define (test-sxml-select-matcher path . maybe-extra-check)
(let ((selector (sxpath path)))
(lambda (pattern input)
(and (not (equal? input *test-error*))
(apply match-input pattern
(selector (if (and (pair? input) (eq? (car input) '*TOP*))
input
`(*TOP* ,input)))
maybe-extra-check)))))
(define (test-xml-select-matcher path . maybe-extra-check)
(let ((selector (sxpath path)))
(lambda (pattern input)
(and (not (equal? input *test-error*))
(let ((parsed (call-with-input-string (tree->string input)
(cut ssax:xml->sxml <> '()))))
(apply match-input pattern (selector parsed)
maybe-extra-check))))))
(provide "kahua/test/xml")
|
cb54b324eca3a2db46c9bef888aa05f04bc6cb3acda37aa79469fc1576ca6f11 | unbounce/encors | core_test.clj | (ns com.unbounce.encors.core-test
(:require [clojure.test :refer :all]
[clojure.set :as set]
[clojure.string :as str]
[com.unbounce.encors.types :as types]
[com.unbounce.encors.core :refer :all]))
(def default-cors-options
{:allowed-origins nil
:allowed-methods #{}
:exposed-headers nil
:request-headers #{}
:max-age nil
:allow-credentials? false
:origin-varies? true
:require-origin? true
:ignore-failures? false})
(deftest cors-common-headers-test
(testing "when origin is not present when origin-varies? is true"
(let [policy default-cors-options]
(is (= (cors-common-headers nil policy)
{"Access-Control-Allow-Origin" "*"
"Vary" "Origin"}))))
(testing "when origin is not present and origin-varies? is false"
(let [policy (merge default-cors-options
{:origin-varies? false})]
(is (= (cors-common-headers nil policy)
{"Access-Control-Allow-Origin" "*"}))))
(testing "when origin is present and allow credentials is true"
(let [origin "foobar.com"
policy (merge default-cors-options {:allowed-origins #{origin}
:allow-credentials? true})]
(is (= (cors-common-headers origin policy)
{"Access-Control-Allow-Origin" origin
"Access-Control-Allow-Credentials" "true"}))))
(testing "when origin is present and allow credentials is false"
(let [origin "foobar.com"
policy (merge default-cors-options
{:allowed-origins #{origin}})]
(is (= (cors-common-headers origin policy)
{"Access-Control-Allow-Origin" origin})))))
(deftest cors-preflight-check-max-age-test
(testing "when max-age is present"
(let [max-age 365
policy (merge default-cors-options
{:max-age 365})]
(is (= (cors-preflight-check-max-age {:headers {}} policy)
[:right {"Access-Control-Max-Age" (str max-age)}]))))
(testing "when max-age is not present"
(let [policy default-cors-options]
(is (= (cors-preflight-check-max-age {:headers {}} policy)
[:right {}])))))
(deftest cors-preflight-check-method-test
(testing "Access-Control-Request-Method header has a method allowed by the policy"
(let [method :get
policy default-cors-options]
(is (= (cors-preflight-check-method
{:headers {"access-control-request-method" "GET"}}
policy)
[:right {"Access-Control-Allow-Methods" "GET, HEAD, POST"}]))))
(testing "Access-Control-Request-Method header has a method not allowed by the policy"
(let [method :delete
policy default-cors-options]
(is (= (cors-preflight-check-method
{:headers {"access-control-request-method" "DELETE"}}
policy)
[:left [(str "Method requested in Access-Control-Request-Method of "
"CORS request is not supported; requested: 'DELETE'; "
"supported are GET, HEAD, POST.")]]))))
(testing "Access-Control-Request-Method header is missing"
(let [method :get
policy default-cors-options]
(is (= (cors-preflight-check-method {:headers {}} policy)
[:left [(str "Access-Control-Request-Method header is missing in CORS "
"preflight request.")]])))))
(deftest cors-preflight-check-request-headers-test
(let [policy (merge default-cors-options
{:request-headers #{"X-Safe-To-Expose"}})]
(testing "Access-Control-Request-Headers doesn't match policy request headers"
(is (= (cors-preflight-check-request-headers
{:headers {"access-control-request-headers"
"X-Not-Safe-To-Expose, X-Blah-Bleh"}}
policy)
[:left [(str "HTTP headers requested in Access-Control-Request-Headers of "
"CORS request is not supported; requested: "
"'X-Not-Safe-To-Expose, X-Blah-Bleh'; "
"supported are 'X-Safe-To-Expose, Origin, Accept-Language, "
"Content-Language, Accept'.")]])))
(testing "Access-Control-Request-Headers match policy request headers"
(is (= (cors-preflight-check-request-headers
{:headers {"access-control-request-headers"
"X-Safe-To-Expose"}}
policy)
[:right {"Access-Control-Allow-Headers"
(str/join ", "
(set/union #{"X-Safe-To-Expose"}
types/simple-headers-wo-content-type))}])))))
(deftest cors-preflight-headers-test
(testing "With a request that complies with policy"
(let [policy (merge default-cors-options
{:max-age 365
:allow-credentials? true
:request-headers #{"X-Cool-Header"}
:allowed-methods #{:get}})]
(is (= (cors-preflight-headers {:headers {"access-control-request-headers"
"X-Cool-Header"
"access-control-request-method"
"GET"}}
policy)
[:right {"Access-Control-Allow-Headers"
(str/join ", "
(set/union #{"X-Cool-Header"}
types/simple-headers-wo-content-type))
"Access-Control-Allow-Methods" "GET, HEAD, POST"
"Access-Control-Max-Age" "365"}])))))
(deftest apply-cors-policy-test
(testing ":allowed-origins has a :star-origin value"
(let [policy (merge default-cors-options
{:allowed-origins types/star-origin
:origin-varies? false})
response (apply-cors-policy {:req {}
:app (constantly {:status 200 :headers {} :body "test is alright"})
:apply-headers apply-ring-headers
:origin nil
:cors-policy policy})]
(is (= (:status response) 200))
(is (= (:headers response) {"Access-Control-Allow-Origin" "*"}))
(is (= (:body response) "test is alright"))))
(testing ":allowed-origins has a :star-origin value"
(let [policy (merge default-cors-options
{:allowed-origins types/star-origin
:origin-varies? false})
response (apply-cors-policy {:req {}
:app (constantly {:status 200 :headers {} :body "test is alright"})
:apply-headers apply-ring-headers
:origin ""
:cors-policy policy})]
(is (= (:status response) 200))
(is (= (:headers response) {"Access-Control-Allow-Origin" "*"}))
(is (= (:body response) "test is alright"))))
(testing ":allowed-origins has a :match-origin value"
(let [policy (merge default-cors-options
{:allowed-origins types/match-origin
:origin-varies? false})
response (apply-cors-policy {:req {}
:app (constantly {:status 200 :headers {} :body "test is alright"})
:apply-headers apply-ring-headers
:origin ""
:cors-policy policy})]
(is (= (:status response) 200))
(is (= (:headers response) {"Access-Control-Allow-Origin" ""}))
(is (= (:body response) "test is alright")))))
| null | https://raw.githubusercontent.com/unbounce/encors/323d7e06816d0767197727ddb4720beeaca5822e/test/com/unbounce/encors/core_test.clj | clojure | (ns com.unbounce.encors.core-test
(:require [clojure.test :refer :all]
[clojure.set :as set]
[clojure.string :as str]
[com.unbounce.encors.types :as types]
[com.unbounce.encors.core :refer :all]))
(def default-cors-options
{:allowed-origins nil
:allowed-methods #{}
:exposed-headers nil
:request-headers #{}
:max-age nil
:allow-credentials? false
:origin-varies? true
:require-origin? true
:ignore-failures? false})
(deftest cors-common-headers-test
(testing "when origin is not present when origin-varies? is true"
(let [policy default-cors-options]
(is (= (cors-common-headers nil policy)
{"Access-Control-Allow-Origin" "*"
"Vary" "Origin"}))))
(testing "when origin is not present and origin-varies? is false"
(let [policy (merge default-cors-options
{:origin-varies? false})]
(is (= (cors-common-headers nil policy)
{"Access-Control-Allow-Origin" "*"}))))
(testing "when origin is present and allow credentials is true"
(let [origin "foobar.com"
policy (merge default-cors-options {:allowed-origins #{origin}
:allow-credentials? true})]
(is (= (cors-common-headers origin policy)
{"Access-Control-Allow-Origin" origin
"Access-Control-Allow-Credentials" "true"}))))
(testing "when origin is present and allow credentials is false"
(let [origin "foobar.com"
policy (merge default-cors-options
{:allowed-origins #{origin}})]
(is (= (cors-common-headers origin policy)
{"Access-Control-Allow-Origin" origin})))))
(deftest cors-preflight-check-max-age-test
(testing "when max-age is present"
(let [max-age 365
policy (merge default-cors-options
{:max-age 365})]
(is (= (cors-preflight-check-max-age {:headers {}} policy)
[:right {"Access-Control-Max-Age" (str max-age)}]))))
(testing "when max-age is not present"
(let [policy default-cors-options]
(is (= (cors-preflight-check-max-age {:headers {}} policy)
[:right {}])))))
(deftest cors-preflight-check-method-test
(testing "Access-Control-Request-Method header has a method allowed by the policy"
(let [method :get
policy default-cors-options]
(is (= (cors-preflight-check-method
{:headers {"access-control-request-method" "GET"}}
policy)
[:right {"Access-Control-Allow-Methods" "GET, HEAD, POST"}]))))
(testing "Access-Control-Request-Method header has a method not allowed by the policy"
(let [method :delete
policy default-cors-options]
(is (= (cors-preflight-check-method
{:headers {"access-control-request-method" "DELETE"}}
policy)
[:left [(str "Method requested in Access-Control-Request-Method of "
"CORS request is not supported; requested: 'DELETE'; "
"supported are GET, HEAD, POST.")]]))))
(testing "Access-Control-Request-Method header is missing"
(let [method :get
policy default-cors-options]
(is (= (cors-preflight-check-method {:headers {}} policy)
[:left [(str "Access-Control-Request-Method header is missing in CORS "
"preflight request.")]])))))
(deftest cors-preflight-check-request-headers-test
(let [policy (merge default-cors-options
{:request-headers #{"X-Safe-To-Expose"}})]
(testing "Access-Control-Request-Headers doesn't match policy request headers"
(is (= (cors-preflight-check-request-headers
{:headers {"access-control-request-headers"
"X-Not-Safe-To-Expose, X-Blah-Bleh"}}
policy)
[:left [(str "HTTP headers requested in Access-Control-Request-Headers of "
"CORS request is not supported; requested: "
"'X-Not-Safe-To-Expose, X-Blah-Bleh'; "
"supported are 'X-Safe-To-Expose, Origin, Accept-Language, "
"Content-Language, Accept'.")]])))
(testing "Access-Control-Request-Headers match policy request headers"
(is (= (cors-preflight-check-request-headers
{:headers {"access-control-request-headers"
"X-Safe-To-Expose"}}
policy)
[:right {"Access-Control-Allow-Headers"
(str/join ", "
(set/union #{"X-Safe-To-Expose"}
types/simple-headers-wo-content-type))}])))))
(deftest cors-preflight-headers-test
(testing "With a request that complies with policy"
(let [policy (merge default-cors-options
{:max-age 365
:allow-credentials? true
:request-headers #{"X-Cool-Header"}
:allowed-methods #{:get}})]
(is (= (cors-preflight-headers {:headers {"access-control-request-headers"
"X-Cool-Header"
"access-control-request-method"
"GET"}}
policy)
[:right {"Access-Control-Allow-Headers"
(str/join ", "
(set/union #{"X-Cool-Header"}
types/simple-headers-wo-content-type))
"Access-Control-Allow-Methods" "GET, HEAD, POST"
"Access-Control-Max-Age" "365"}])))))
(deftest apply-cors-policy-test
(testing ":allowed-origins has a :star-origin value"
(let [policy (merge default-cors-options
{:allowed-origins types/star-origin
:origin-varies? false})
response (apply-cors-policy {:req {}
:app (constantly {:status 200 :headers {} :body "test is alright"})
:apply-headers apply-ring-headers
:origin nil
:cors-policy policy})]
(is (= (:status response) 200))
(is (= (:headers response) {"Access-Control-Allow-Origin" "*"}))
(is (= (:body response) "test is alright"))))
(testing ":allowed-origins has a :star-origin value"
(let [policy (merge default-cors-options
{:allowed-origins types/star-origin
:origin-varies? false})
response (apply-cors-policy {:req {}
:app (constantly {:status 200 :headers {} :body "test is alright"})
:apply-headers apply-ring-headers
:origin ""
:cors-policy policy})]
(is (= (:status response) 200))
(is (= (:headers response) {"Access-Control-Allow-Origin" "*"}))
(is (= (:body response) "test is alright"))))
(testing ":allowed-origins has a :match-origin value"
(let [policy (merge default-cors-options
{:allowed-origins types/match-origin
:origin-varies? false})
response (apply-cors-policy {:req {}
:app (constantly {:status 200 :headers {} :body "test is alright"})
:apply-headers apply-ring-headers
:origin ""
:cors-policy policy})]
(is (= (:status response) 200))
(is (= (:headers response) {"Access-Control-Allow-Origin" ""}))
(is (= (:body response) "test is alright")))))
| |
fcaf3e2842814db7dff2961d901ac7243db00be835a738ac4d55a0d11544f9c2 | glguy/advent2021 | Advent.hs | # Language ImportQualifiedPost , OverloadedStrings #
|
Module : Advent
Description : Solution helper library
Copyright : ( c ) , 2018 - 2021
License : ISC
Maintainer :
This module re - exports the most commonly used modules .
* " Advent . Prelude " is full of useful helper functions
* " Advent . Input " provides quick access to inputs in a few formats
* " Advent . Format " provides a quasi - quoter for making input parsers
Module : Advent
Description : Solution helper library
Copyright : (c) Eric Mertens, 2018-2021
License : ISC
Maintainer :
This module re-exports the most commonly used modules.
* "Advent.Prelude" is full of useful helper functions
* "Advent.Input" provides quick access to inputs in a few formats
* "Advent.Format" provides a quasi-quoter for making input parsers
-}
module Advent (
module Advent.Prelude,
module Advent.Input,
module Advent.Format,
) where
import Advent.Prelude
import Advent.Input
import Advent.Format
| null | https://raw.githubusercontent.com/glguy/advent2021/21c6f1e315fd0351eacc59e295207913973f2423/common/Advent.hs | haskell | # Language ImportQualifiedPost , OverloadedStrings #
|
Module : Advent
Description : Solution helper library
Copyright : ( c ) , 2018 - 2021
License : ISC
Maintainer :
This module re - exports the most commonly used modules .
* " Advent . Prelude " is full of useful helper functions
* " Advent . Input " provides quick access to inputs in a few formats
* " Advent . Format " provides a quasi - quoter for making input parsers
Module : Advent
Description : Solution helper library
Copyright : (c) Eric Mertens, 2018-2021
License : ISC
Maintainer :
This module re-exports the most commonly used modules.
* "Advent.Prelude" is full of useful helper functions
* "Advent.Input" provides quick access to inputs in a few formats
* "Advent.Format" provides a quasi-quoter for making input parsers
-}
module Advent (
module Advent.Prelude,
module Advent.Input,
module Advent.Format,
) where
import Advent.Prelude
import Advent.Input
import Advent.Format
| |
9ebb2ab2d40c12cf58099f15f091f618b364759d082b8a76e66300e58ec78ef5 | philzook58/fib-anyon | Cat.hs | # LANGUAGE GADTs , StandaloneDeriving , NoImplicitPrelude , FlexibleInstances , RankNTypes ,
TypeApplications , ScopedTypeVariables , MultiParamTypeClasses , FunctionalDependencies , FlexibleContexts ,
UndecidableInstances , AllowAmbiguousTypes , TypeFamilies ,
ConstraintKinds , TypeOperators , DataKinds , PolyKinds , InstanceSigs , NoMonomorphismRestriction
#
TypeApplications, ScopedTypeVariables, MultiParamTypeClasses, FunctionalDependencies, FlexibleContexts,
UndecidableInstances, AllowAmbiguousTypes, TypeFamilies ,
ConstraintKinds, TypeOperators, DataKinds, PolyKinds, InstanceSigs, NoMonomorphismRestriction
#-}
module Cat where
import Control.Category
import Prelude hiding ((.), id)
import Fib
import Vec
import Control.Monad ((<=<))
import GHC.TypeNats
import Control.Arrow ((***))
import Data.Tuple (swap)
-- make the leftmost porton of a match b , turning it into c
class LeftMatch pat a c | pat a - > c where
leftmatch : : FibTree e a - > Q ( FibTree e c )
class RightMatch pat a c | pat a - > c where
rightmatch : : FibTree e a - > Q ( FibTree e c )
instance ( LeftMatch l a ( l',r ' ) ,
RightMatch l l ' l ,
LeftMatch r r ' r '' ) = > LeftMatch ( l , r ) a ( l , r '' ) where -- is this necessary ? A larger pattern should be a type error
leftmatch x = do
x ' < - leftmatch @l x
x '' < - lmap ( rightmatch @l ) x '
rmap ( leftmatch @r ) x ''
-- instance LeftMatch ( ( a , b),c )
-- pullLeftLeaf only does fmove ' so this ca n't be enough
-- leftmatch l a , rightmatch r a ,
-- FullMatch l a
instance ( PullLeftLeaf a c ) = > where -- c ~ ( Tau , c ' ) I should have a check like this . Sometimes a = Tau
leftmatch x = pullLeftLeaf x
instance ( PullLeftLeaf a c ) = > LeftMatch I d a c where
leftmatch x = pullLeftLeaf x
instance ( RightMatch r a ( l',r ' ) ,
LeftMatch r r ' r ,
RightMatch l l ' l '' ) = > RightMatch ( l , r ) a ( l'',r ) where -- is this necessary ? A larger pattern should be a type error
rightmatch x = do
x ' < - rightmatch @l x
x '' < - rmap ( leftmatch @l ) x '
lmap ( rightmatch @r ) x ''
instance ( PullRightLeaf a c ) = > RightMatch Tau a c where -- c ~ ( Tau , c ' ) I should have a check like this . Sometimes a = Tau
rightmatch x = pullRightLeaf x
instance ( PullRightLeaf a c ) = > RightMatch I d a c where
rightmatch x = pullRightLeaf x
b a b = > a b where
-- instance LeftMatch b a b = > a b where
-- instead use constraint kinds
type a b = LeftMatch b a b
t1 = , Tau ) ) ( TTT TLeaf ( TTT TLeaf TLeaf ) )
--t2 , Tau),Tau ) ( TTT TLeaf ( TTT TLeaf TLeaf ) )
reassoc : : forall b a b = > FibTree e a - > Q ( FibTree e b )
reassoc x = leftmatch @b x
-- make the leftmost porton of a match b, turning it into c
class LeftMatch pat a c | pat a -> c where
leftmatch :: FibTree e a -> Q (FibTree e c)
class RightMatch pat a c | pat a -> c where
rightmatch :: FibTree e a -> Q (FibTree e c)
instance (LeftMatch l a (l',r'),
RightMatch l l' l,
LeftMatch r r' r'') => LeftMatch (l,r) a (l,r'') where -- is this necessary? A larger pattern should be a type error
leftmatch x = do
x' <- leftmatch @l x
x'' <- lmap (rightmatch @l) x'
rmap (leftmatch @r) x''
-- instance LeftMatch ((a,b),c)
-- pullLeftLeaf only does fmove' so this can't be enough
-- leftmatch l a, rightmatch r a,
-- FullMatch l a
instance (PullLeftLeaf a c) => LeftMatch Tau a c where -- c ~ (Tau,c') I should have a check like this. Sometimes a = Tau
leftmatch x = pullLeftLeaf x
instance (PullLeftLeaf a c) => LeftMatch Id a c where
leftmatch x = pullLeftLeaf x
instance (RightMatch r a (l',r'),
LeftMatch r r' r ,
RightMatch l l' l'') => RightMatch (l,r) a (l'',r) where -- is this necessary? A larger pattern should be a type error
rightmatch x = do
x' <- rightmatch @l x
x'' <- rmap (leftmatch @l) x'
lmap (rightmatch @r) x''
instance (PullRightLeaf a c) => RightMatch Tau a c where -- c ~ (Tau,c') I should have a check like this. Sometimes a = Tau
rightmatch x = pullRightLeaf x
instance (PullRightLeaf a c) => RightMatch Id a c where
rightmatch x = pullRightLeaf x
--class LeftMatch b a b => ReAssoc a b where
-- instance LeftMatch b a b => ReAssoc a b where
-- instead use constraint kinds
type ReAssoc a b = LeftMatch b a b
t1 = leftmatch @(Tau,(Tau,Tau)) (TTT TLeaf (TTT TLeaf TLeaf))
--t2 = leftmatch @((Tau,Tau),Tau) (TTT TLeaf (TTT TLeaf TLeaf))
reassoc :: forall b a e. ReAssoc a b => FibTree e a -> Q (FibTree e b)
reassoc x = leftmatch @b x
-}
class ReAssoc a b where
reassoc :: FibTree e a -> Q (FibTree e b)
instance (n ~ Count l',
gte ~ CmpNat n (Count l),
LeftCollect n gte (l,r) (l'',r''),
ReAssoc l'' l',
ReAssoc r'' r') => ReAssoc (l,r) (l',r') where
reassoc x = do
x' <- leftcollect @n x
x'' <- rmap reassoc x'
lmap reassoc x''
instance { - # OVERLAPS # - } a a where
-- reassoc = pure
--
instance ReAssoc Tau Tau where
reassoc = pure
instance ReAssoc Id Id where
reassoc = pure
\((x , y),z ) - > ( x , ( y , z ) )
-- Compiling to monoidal categories .
-- Could also find ways to pad in or out any ( ) units automatically .
-- Can I compile a linear function to monoidal categories using the associator ?
-- Then I could compile quantum functions . Neat .
-- finding good swaps is tough though .
linear typed functions are symmettric monoidal categories , pretty sure . Not cartesian .
-- The automatic associator
This right here has a lot of what we need to do compiling to categories of just monoidal categries .
\((x,y),z) -> (x, (y,z))
-- Compiling to monoidal categories.
-- Could also find ways to pad in or out any () units automatically.
-- Can I compile a linear function to monoidal categories using the associator?
-- Then I could compile quantum functions. Neat.
-- finding good swaps is tough though.
linear typed functions are symmettric monoidal categories, pretty sure. Not cartesian.
-- The automatic associator
This right here has a lot of what we need to do compiling to categories of just monoidal categries.
-}
t4 :: Q (FibTree Tau (Tau,(Tau,Tau)))
t4 = reassoc (TTT TLeaf (TTT TLeaf TLeaf))
t5 :: Q (FibTree Tau ((Tau,Tau),Tau))
t5 = reassoc (TTT TLeaf (TTT TLeaf TLeaf))
instance n ~ Count l ,
n ' ~ Count ' l ,
gte ~ CmpNat l l '
' gte = > ' ( ( a , b),r ) ( l',r ' ) ' GT
reassoc ' x = do
-- Doing it this way is not efficient .
-- We do n't need to full right associate to get it over there .
-- lcamap of n and n+1
instance ( ( l , a),b ) ( l',r ' ) = > ' ( l,(a , b ) ) ( l',r ' ) ' LT
reassoc ' x = do
x ' < - rmap pullLeftLeaf x
x '' < - fmove x ' -- now we add 1 to the smaller left side
reassoc ' @gte x '' -- and try again
instance l l ' , r r ' = > ' ( l , r ) ( l',r ' ) ' EQ where
reassoc ' x = do
x ' < - lmap reassoc x
rmap reassoc x '
instance ' Tau Tau ' EQ where
ressoc ' = pure
instance ' I d I d ' EQ where
ressoc ' = pure
instance n ~ Count l,
n' ~ Count 'l,
gte ~ CmpNat l l'
ReAssoc' gte => ReAssoc' ((a,b),r) (l',r') 'GT
reassoc' x = do
-- Doing it this way is not efficient.
-- We don't need to full right associate to get it over there.
-- lcamap of n and n+1
instance ReAssoc ((l,a),b) (l',r') gte => ReAssoc' (l,(a,b)) (l',r') 'LT
reassoc' x = do
x' <- rmap pullLeftLeaf x
x'' <- fmove x' -- now we add 1 to the smaller left side
reassoc' @gte x'' -- and try again
instance ReAssoc l l', ReAssoc r r' => ReAssoc' (l,r) (l',r') 'EQ where
reassoc' x = do
x' <- lmap reassoc x
rmap reassoc x'
instance ReAssoc' Tau Tau 'EQ where
ressoc' = pure
instance ReAssoc' Id Id 'EQ where
ressoc' = pure
-}
-- subtask
-- Collect n a b | n a -> b
-- collect n elements of a into the left subtree.
leftcollect :: forall n gte l r o e. (gte ~ CmpNat n (Count l), LeftCollect n gte (l,r) o) => FibTree e (l,r) -> Q (FibTree e o)
leftcollect x = leftcollect' @n @gte x
class LeftCollect n gte a b | n gte a -> b where
leftcollect' :: FibTree e a -> Q (FibTree e b)
-- The process is like a binary search.
LeftCollect pulls n leaves into the left branch of the tuple
-- If n is greater than the size of l, we recurse into the right branch with a new number of leaves to collect
-- then we do a final reshuffle to put those all into the left tree.
instance (
k ~ Count l,
r ~ (l',r'),
n' ~ (n - k),
gte ~ CmpNat n' (Count l'),
LeftCollect n' gte r (l'',r'')) => LeftCollect n 'GT (l,r) ((l,l''),r'') where
leftcollect' x = do
x' <- rmap (leftcollect @n') x -- (l,(l'',r'')) -- l'' is size n - k
fmove x' -- ((l,l''),r'') -- size of (l,l'') = k + (n-k) = n
instance (
l ~ (l',r'),
gte ~ CmpNat n (Count l'),
LeftCollect n gte l (l'',r'')) => LeftCollect n 'LT (l,r) (l'',(r'',r)) where
leftcollect' x = do
x' <- lmap (leftcollect @n) x -- ((l'',r''),r) -- l'' is of size n
fmove' x' -- (l'',(r'',r)
instance LeftCollect n 'EQ (l,r) (l,r) where
leftcollect' = pure
-- We could define these functions for arbitrary monoidal categorues with reassociators (which is all of them)
-- The Count method requires things to be
-- It's gonna be ugly. Would need more of the incoherent trick
Count ( a , b ) or Count a = 1 as default case .
t1 = leftcollect @2 (TTT (TTT TLeaf TLeaf) TLeaf)
t2 = leftcollect @1 (TTT (TTT TLeaf TLeaf) TLeaf)
newtype FibOp ' a b = FibOp ' { runFibOp ' : : forall e a ' b ' . ( a a ' , b ' b ) = > FibTree e a ' - > Q ( FibTree e b ' ) }
--newtype FibOp ' a b = FibOp ' { runFibOp ' : : forall e a ' . ( a a ' , a ' a ) = > FibTree e a ' - > Q ( FibTree e b ) }
-- newtype FibOp ' a b = FibOp ' { runFibOp ' : : forall e a ' b ' . ( a a ' , a ' a , b b ' , b ' b ) = > FibTree e a ' - > Q ( FibTree e b ' ) }
instance Category FibOp ' where
i d : : forall a. FibOp ' a a
i d = FibOp ' pure
( . ) : : forall a b c. FibOp ' b c - > FibOp ' a b - > FibOp ' a c
( FibOp ' f ) . ( FibOp ' g ) = undefined -- FibOp ' $ reassoc < = < f < = < reassoc < = < reassoc < = < g < = < reassoc
{ -$ \x - > do
x ' x
x '' < - reassoc x '
f x ''
newtype FibOp' a b = FibOp' {runFibOp' :: forall e a' b'. (ReAssoc a a', ReAssoc b' b) => FibTree e a' -> Q (FibTree e b')}
--newtype FibOp' a b = FibOp' {runFibOp' :: forall e a'. (ReAssoc a a', ReAssoc a' a) => FibTree e a' -> Q (FibTree e b)}
-- newtype FibOp' a b = FibOp' {runFibOp' :: forall e a' b'. (ReAssoc a a', ReAssoc a' a, ReAssoc b b', ReAssoc b' b) => FibTree e a' -> Q (FibTree e b')}
instance Category FibOp' where
id :: forall a. FibOp' a a
id = FibOp' pure
(.) :: forall a b c. FibOp' b c -> FibOp' a b -> FibOp' a c
(FibOp' f) . (FibOp' g) = undefined -- FibOp' $ reassoc <=< f <=< reassoc <=< reassoc <=< g <=< reassoc
{-$ \x -> do
x' <- g x
x'' <- reassoc x'
f x'' -}
-}
( f < = < reassoc < = )
t3 = leftcollect @3 ( TTT ( TTT TLeaf TLeaf ) TLeaf ) -- error
--
is just a recursive Collect n. Like how RightCaonical is recursive pullLeft
(...) :: ReAssoc b b' => FibOp b' c -> FibOp a b -> FibOp a c
(FibOp f) ... (FibOp g) = FibOp $ f <=< reassoc <=< g
newtype FibOp c a b = FibOp { runFib : : ( FibTree c a - > Q ( FibTree c b ) ) }
newtype FibOp a b = FibOp {runFib :: (forall c. FibTree c a -> Q (FibTree c b))}
type FibOp ' c a b = FibTree c a - > Q ( FibTree c b )
newtype LinOp a b = LinOp {runLin :: a -> Q b}
newtype LinOp a b = LinOp { runLin : : ( Eq a , Eq b ) = > Q ( a , b ) } -- More matrix like form
this is a tree data type . Basically an alternative form of the typed vector a i
also basically an alternativew to raw tuples ( a , a ) . It enforces that all the leaves have to match .
data TreeVec i a where
Leaf :: i -> TreeVec i i
Node :: TreeVec i a -> TreeVec i b -> TreeVec i (a,b)
-- parity sort?
-- liftList :: ([a] -> [a]) -> TreeVec i a -> TreeVec i a
--
what are the invariants here ? Runs on already antisymmettric vectors ? Or runs on possibly not symmettrized vectors ?
-- should there be a tree structure?
newtype AntiOp i a b = AntiOp {runAnti :: TreeVec i a -> Q (TreeVec i b)}
newtype SymOp i a b = SymOp {runSym :: TreeVec i a -> Q (TreeVec i b)}
-- type Fock a = Q [a]
data a b where
: : FibTree e a - > FibTree e b - class FibIdent a where
expansion : : [ ( a a ) ]
instance where
expansion = pure ( TLeaf )
instance FibExpansion I d where
expansion = pure ( ILeaf )
instance ( FibExpansion a , FibExpansion b ) = > FibExpansion ( a , b ) where
expansion = pure ( ILeaf )
e1 = expansion @a
e2 = expansion @b
FibTree e a - > FibTree f b - > FibTree
FibOuter a a - > FibOuter b b - > [ ( a , b ) ( a , b ) ]
( TLeaf ) ( TLeaf ) = [ ( TTT TLeaf TLeaf ) ( TTT TLeaf ) , ( ITT TLeaf TLeaf ) ( ITT TLeaf TLeaf ) ]
data FibOuter a b where
FibOuter :: FibTree e a -> FibTree e b -> FibOuter a b
class FibIdent a where
expansion :: [(FibOuter a a)]
instance FibExpansion Tau where
expansion = pure (FibOuter TLeaf TLeaf)
instance FibExpansion Id where
expansion = pure (FibOuter ILeaf ILeaf)
instance (FibExpansion a , FibExpansion b) => FibExpansion (a,b) where
expansion = pure (FibOuter ILeaf ILeaf)
e1 = expansion @a
e2 = expansion @b
FibTree e a -> FibTree f b -> FibTree
FibOuter a a -> FibOuter b b -> [FibOuter (a,b) (a,b)]
(FibOuter TLeaf TLeaf) (FibOuter TLeaf TLeaf) = [FibOuter (TTT TLeaf TLeaf) (TTT TLeaf TLeaf), (ITT TLeaf TLeaf) (ITT TLeaf TLeaf)]
-}
instance Category LinOp where
id = LinOp pure
(LinOp f) . (LinOp g) = LinOp (f <=< g)
instance Category (AntiOp i) where
id = AntiOp pure
(AntiOp f) . (AntiOp g) = AntiOp (f <=< g) -- as part of composition, we may want a re-sort with parity sign.
instance Category (FibOp) where
id = FibOp pure
(FibOp f) . (FibOp g) = FibOp (f <=< g)
class Category k => Monoidal k where
parC :: k a c -> k b d -> k (a,b) (c,d)
assoc :: k ((a,b),c) (a,(b,c))
unassoc :: k (a,(b,c)) ((a,b),c)
leftUnitor :: k ((),a) a
leftUnitor' :: k a ((),a)
rightUnitor :: k (a,()) a
rightUnitor' :: k a (a,())
maybe we just ignore these . They are annoying .
{-
type I :: *
idl :: k (a, I) a
idl' :: k a (a, I)
idr :: k (I,a) a
idr' :: k a (I,a) -}
instance Num (LinOp a a) where
(LinOp a) + (LinOp b) = LinOp $ \x -> (a x) <> (b x)
x * y = x . y
negate (LinOp f) = LinOp $ \x -> (-1) .* (f x)
fromInteger n = LinOp $ \x -> W [(x, fromInteger n)]
abs f = error "Abs not obvious for LinOp"
signum f = error "Signum not obvious for LinOp"
instance Monoidal (FibOp) where
parC (FibOp f) (FibOp g) = (FibOp (lmap f)) . (FibOp (rmap g)) -- This is where we need c to be forall. We want to be able to par... There isn't a unique way to do Tau?
assoc = FibOp fmove'
unassoc = FibOp fmove
leftUnitor = FibOp leftUnit
leftUnitor' = FibOp leftUnit'
rightUnitor = FibOp rightUnit
rightUnitor' = FibOp rightUnit'
instance Monoidal LinOp where
parC (LinOp f) (LinOp g) = LinOp $ \(a,b) -> kron (f a) (g b) -- This is where we need c to be forall. We want to be able to par... There isn't a unique way to do Tau?
assoc = LinOp (pure . assoc)
unassoc = LinOp (pure . unassoc)
leftUnitor = LinOp (pure . leftUnitor)
leftUnitor' = LinOp (pure .leftUnitor')
rightUnitor = LinOp (pure . rightUnitor)
rightUnitor' = LinOp (pure . rightUnitor')
-- not clear how this should work .
instance Monoidal AntiOp where
parC ( AntiOp f ) ( AntiOp g ) = AntiOp $ \(a , b ) - > kron ( f a ) ( g b ) -- This is where we need c to be forall . We want to be able to par ... There is n't a unique way to do Tau ?
assoc = AntiOp ( pure . assoc )
unassoc = AntiOp ( pure . unassoc )
leftUnitor = AntiOp ( pure . leftUnitor )
leftUnitor ' = AntiOp ( pure .leftUnitor ' )
rightUnitor = AntiOp ( pure . rightUnitor )
rightUnitor ' = AntiOp ( pure . rightUnitor ' )
-- not clear how this should work.
instance Monoidal AntiOp where
parC (AntiOp f) (AntiOp g) = AntiOp $ \(a,b) -> kron (f a) (g b) -- This is where we need c to be forall. We want to be able to par... There isn't a unique way to do Tau?
assoc = AntiOp (pure . assoc)
unassoc = AntiOp (pure . unassoc)
leftUnitor = AntiOp (pure . leftUnitor)
leftUnitor' = AntiOp (pure .leftUnitor')
rightUnitor = AntiOp (pure . rightUnitor)
rightUnitor' = AntiOp (pure . rightUnitor')
-}
instance Monoidal (->) where
parC f g = f *** g
assoc ((x,y),z) = (x,(y,z))
unassoc (x,(y,z)) = ((x,y),z)
leftUnitor (_, x) = x
leftUnitor' x = ((),x)
rightUnitor (x, _) = x
rightUnitor' x = (x,())
type I = I d
idl = FibOp case t of
TTI
idl ' =
type I = Id
idl = FibOp $ \t -> case t of
TTI
idl' = -}
Dagger ? Dual ? Rigid ? , Compact closed
cap : : k ( a , a ) I
cup : : k I ( a , a )
-- daulities . CPS , Either < - > TUple , Adjunctions
type Dual v = Q v - > Double
type Dual a = FibOp a I
b a = DualFibOp Dual a - > Dual b
dag : : FibOp a b - > DualFibOp b a
dag ' : : uses dot ?
-- Dual r a = Dual ( a - > r )
-- a - > b ~ CoYoneda ( a - > r , r - > b )
-- ( a - > ( ) , a ) - > ( )
-- LinOp a ( LinOp b c ) = a - > Q ( b - > Q c ) ~ ( a , b ) - > Q c
-- yeah I think we can swing that
-- Dual a = LinOp a ( )
-- apply : : LinOp ( Dual a , a ) ( ) -- the dual is n't really on the index though ?
-- Bounded Enum a = > LinOp ( ) ( Dual a , a )
--
-- a - > ( , ( ) )
-- in data - category he uses FibOp a a as a stand in for object a
-- in this case FibOp I d I d = ILeaf , FibOp = TLeaf . They are indeed good stand ins
-- what am I doing . Duality is a bog
-- newtype Dagger a b = Dagger { primal : : LinOp a b , dual : : LinOp b a }
-- dag ( Dagger f g ) = Dagger g f
-- dag = FibOp . dot
--
-- We need to also make a category that draws diagrams
-talk.net/2011/09/the-braids-package/
( lmap ( ) ) ( rmap ( ttt ) ) ttt
newtype CFibOp a b = CFibOp { runCFib : : ( forall c. FibTree c ( RightAssoc a ) - > Q ( FibTree c ( RightAssoc b ) ) ) }
assoc = i d
unassoc = i d
par = ?
class Monoidal CFibOp where
type Prod : : * - > * - > *
Prod = Append
par f g = rightAssoc > = > nmap @n ( ( lmap f ) > = > ( rmap g ) )
Dagger? Dual? Rigid?, Compact closed
cap :: k (a,a) I
cup :: k I (a,a)
-- daulities. CPS, Either <-> TUple, Adjunctions
type Dual v = Q v -> Double
type Dual a = FibOp a I
DualFibOp b a = DualFibOp Dual a -> Dual b
dag :: FibOp a b -> DualFibOp b a
dag' :: uses dot?
-- Dual r a = Dual (a -> r)
-- a -> b ~ CoYoneda (a -> r, r -> b)
-- (a -> (), a) -> ()
-- LinOp a (LinOp b c) = a -> Q (b -> Q c) ~ (a,b) -> Q c
-- yeah I think we can swing that
-- Dual a = LinOp a ()
-- apply :: LinOp (Dual a, a) () -- the dual isn't really on the index though?
-- Bounded Enum a => LinOp () (Dual a, a)
--
-- a -> ( , () )
-- in data-category he uses FibOp a a as a stand in for object a
-- in this case FibOp Id Id = ILeaf, FibOp Tau Tau = TLeaf. They are indeed good stand ins
-- what am I doing. Duality is a bog
-- newtype Dagger a b = Dagger {primal :: LinOp a b, dual :: LinOp b a}
-- dag (Dagger f g) = Dagger g f
-- dag = FibOp . dot
--
-- We need to also make a category that draws diagrams
-talk.net/2011/09/the-braids-package/
(lmap () ) (rmap (ttt) ) ttt
newtype CFibOp a b = CFibOp {runCFib :: (forall c. FibTree c (RightAssoc a) -> Q (FibTree c (RightAssoc b)))}
assoc = id
unassoc = id
par = ?
class Monoidal CFibOp where
type Prod :: * -> * -> *
Prod = Append
par f g = rightAssoc >=> nmap @n ((lmap f) >=> (rmap g))
-}
class Monoidal k => Braided k where
over :: k (a,b) (b,a)
under :: k (a,b) (b,a)
{- over . under = id -}
instance Braided FibOp where
over = FibOp braid
under = FibOp braid'
instance Braided (->) where
over = swap
under = swap
instance Braided (LinOp) where
over = LinOp (pure . swap)
under = LinOp (pure . swap)
{-
instance Braided (AntiOp i) where
over | = AntiOp (pure . swap) -1 .*
under = over -- AntiOp (pure . swap)
-}
( Eq a ) = > Q ( a , b ) - > LinOp a b
-- LinOp (LinOp a b) () ->
-- () -> (a,a)
-- (a,a) -> ()
( Bounded a , a ) = > LinOp ( ) ( a , a )
-- Eq a => LinOp (a,a) () ~ curry dot
-- curry :: k a (k b c) -> k (a,b) c
-- uncurry :: k (a,b) c)-> k a (k b c)
Dual k a b = forall ( k b r ) ( k a r )
-- type Dual a = LinOp a ()
-- newtype Dagger a = Dagger a
-- class Dagger k where
-- type Dual :: * -> *
dag : : k a b - > k ( Dual b ) ( Dual a )
-- dag' :: k (Dual a) (Dual b) -> k' b a
-- Dual k
class Monoidal k = > Cartesian k where
fstC : : k ( a , b ) a
sndC : : k ( a , b ) b
dupC : : k a ( a , a )
class Cartesian k = > Closed k where
applyC : : k ( k a b , a ) b
curryC : : k ( a , b ) c - > k a ( k b c )
uncurryC : : k a ( k b c ) - > k ( a , b ) c
fanC f g = ( parC f g ) . dupC
idC : : Category k = > k a a
idC = i d
data FreeCat a b where
Comp : : FreeCat b c - > FreeCat a b - > FreeCat a c
I d : : FreeCat a a
Fst : : FreeCat ( a , b ) a
Snd : : FreeCat ( a , b ) b
Dup : : FreeCat a ( a , a )
Par : : FreeCat a b - > FreeCat c d - > FreeCat ( a , c ) ( b , d )
Add : : FreeCat ( a , a ) a
Mul : : FreeCat ( a , a ) a
Apply : : FreeCat ( FreeCat a b , a ) b
Curry : : FreeCat ( a , b ) c - > FreeCat a ( FreeCat b c )
Uncurry : : FreeCat a ( FreeCat b c ) - > FreeCat ( a , b ) c
instance Closed FreeCat where
applyC = Apply
curryC = Curry
uncurryC = Uncurry
deriving instance Show ( FreeCat a b )
instance Category FreeCat where
( . ) = Comp
i d = I d
instance where
parC = Par
instance Cartesian FreeCat where
fstC = Fst
sndC = Snd
dupC
class Monoidal k => Cartesian k where
fstC :: k (a,b) a
sndC :: k (a,b) b
dupC :: k a (a,a)
class Cartesian k => Closed k where
applyC :: k (k a b,a) b
curryC :: k (a,b) c -> k a (k b c)
uncurryC :: k a (k b c) -> k (a,b) c
fanC f g = (parC f g) . dupC
idC :: Category k => k a a
idC = id
data FreeCat a b where
Comp :: FreeCat b c -> FreeCat a b -> FreeCat a c
Id :: FreeCat a a
Fst :: FreeCat (a,b) a
Snd :: FreeCat (a,b) b
Dup :: FreeCat a (a,a)
Par :: FreeCat a b -> FreeCat c d -> FreeCat (a,c) (b,d)
Add :: FreeCat (a,a) a
Mul :: FreeCat (a,a) a
Apply :: FreeCat (FreeCat a b, a) b
Curry :: FreeCat (a,b) c -> FreeCat a (FreeCat b c)
Uncurry :: FreeCat a (FreeCat b c) -> FreeCat (a,b) c
instance Closed FreeCat where
applyC = Apply
curryC = Curry
uncurryC = Uncurry
deriving instance Show (FreeCat a b)
instance Category FreeCat where
(.) = Comp
id = Id
instance Monoidal FreeCat where
parC = Par
instance Cartesian FreeCat where
fstC = Fst
sndC = Snd
dupC = Dup
-} | null | https://raw.githubusercontent.com/philzook58/fib-anyon/5c81535201ffdd5a40db18510ce894be9ccccbd7/src/Cat.hs | haskell | make the leftmost porton of a match b , turning it into c
is this necessary ? A larger pattern should be a type error
instance LeftMatch ( ( a , b),c )
pullLeftLeaf only does fmove ' so this ca n't be enough
leftmatch l a , rightmatch r a ,
FullMatch l a
c ~ ( Tau , c ' ) I should have a check like this . Sometimes a = Tau
is this necessary ? A larger pattern should be a type error
c ~ ( Tau , c ' ) I should have a check like this . Sometimes a = Tau
instance LeftMatch b a b = > a b where
instead use constraint kinds
t2 , Tau),Tau ) ( TTT TLeaf ( TTT TLeaf TLeaf ) )
make the leftmost porton of a match b, turning it into c
is this necessary? A larger pattern should be a type error
instance LeftMatch ((a,b),c)
pullLeftLeaf only does fmove' so this can't be enough
leftmatch l a, rightmatch r a,
FullMatch l a
c ~ (Tau,c') I should have a check like this. Sometimes a = Tau
is this necessary? A larger pattern should be a type error
c ~ (Tau,c') I should have a check like this. Sometimes a = Tau
class LeftMatch b a b => ReAssoc a b where
instance LeftMatch b a b => ReAssoc a b where
instead use constraint kinds
t2 = leftmatch @((Tau,Tau),Tau) (TTT TLeaf (TTT TLeaf TLeaf))
reassoc = pure
Compiling to monoidal categories .
Could also find ways to pad in or out any ( ) units automatically .
Can I compile a linear function to monoidal categories using the associator ?
Then I could compile quantum functions . Neat .
finding good swaps is tough though .
The automatic associator
Compiling to monoidal categories.
Could also find ways to pad in or out any () units automatically.
Can I compile a linear function to monoidal categories using the associator?
Then I could compile quantum functions. Neat.
finding good swaps is tough though.
The automatic associator
Doing it this way is not efficient .
We do n't need to full right associate to get it over there .
lcamap of n and n+1
now we add 1 to the smaller left side
and try again
Doing it this way is not efficient.
We don't need to full right associate to get it over there.
lcamap of n and n+1
now we add 1 to the smaller left side
and try again
subtask
Collect n a b | n a -> b
collect n elements of a into the left subtree.
The process is like a binary search.
If n is greater than the size of l, we recurse into the right branch with a new number of leaves to collect
then we do a final reshuffle to put those all into the left tree.
(l,(l'',r'')) -- l'' is size n - k
((l,l''),r'') -- size of (l,l'') = k + (n-k) = n
((l'',r''),r) -- l'' is of size n
(l'',(r'',r)
We could define these functions for arbitrary monoidal categorues with reassociators (which is all of them)
The Count method requires things to be
It's gonna be ugly. Would need more of the incoherent trick
newtype FibOp ' a b = FibOp ' { runFibOp ' : : forall e a ' . ( a a ' , a ' a ) = > FibTree e a ' - > Q ( FibTree e b ) }
newtype FibOp ' a b = FibOp ' { runFibOp ' : : forall e a ' b ' . ( a a ' , a ' a , b b ' , b ' b ) = > FibTree e a ' - > Q ( FibTree e b ' ) }
FibOp ' $ reassoc < = < f < = < reassoc < = < reassoc < = < g < = < reassoc
newtype FibOp' a b = FibOp' {runFibOp' :: forall e a'. (ReAssoc a a', ReAssoc a' a) => FibTree e a' -> Q (FibTree e b)}
newtype FibOp' a b = FibOp' {runFibOp' :: forall e a' b'. (ReAssoc a a', ReAssoc a' a, ReAssoc b b', ReAssoc b' b) => FibTree e a' -> Q (FibTree e b')}
FibOp' $ reassoc <=< f <=< reassoc <=< reassoc <=< g <=< reassoc
$ \x -> do
x' <- g x
x'' <- reassoc x'
f x''
error
More matrix like form
parity sort?
liftList :: ([a] -> [a]) -> TreeVec i a -> TreeVec i a
should there be a tree structure?
type Fock a = Q [a]
as part of composition, we may want a re-sort with parity sign.
type I :: *
idl :: k (a, I) a
idl' :: k a (a, I)
idr :: k (I,a) a
idr' :: k a (I,a)
This is where we need c to be forall. We want to be able to par... There isn't a unique way to do Tau?
This is where we need c to be forall. We want to be able to par... There isn't a unique way to do Tau?
not clear how this should work .
This is where we need c to be forall . We want to be able to par ... There is n't a unique way to do Tau ?
not clear how this should work.
This is where we need c to be forall. We want to be able to par... There isn't a unique way to do Tau?
daulities . CPS , Either < - > TUple , Adjunctions
Dual r a = Dual ( a - > r )
a - > b ~ CoYoneda ( a - > r , r - > b )
( a - > ( ) , a ) - > ( )
LinOp a ( LinOp b c ) = a - > Q ( b - > Q c ) ~ ( a , b ) - > Q c
yeah I think we can swing that
Dual a = LinOp a ( )
apply : : LinOp ( Dual a , a ) ( ) -- the dual is n't really on the index though ?
Bounded Enum a = > LinOp ( ) ( Dual a , a )
a - > ( , ( ) )
in data - category he uses FibOp a a as a stand in for object a
in this case FibOp I d I d = ILeaf , FibOp = TLeaf . They are indeed good stand ins
what am I doing . Duality is a bog
newtype Dagger a b = Dagger { primal : : LinOp a b , dual : : LinOp b a }
dag ( Dagger f g ) = Dagger g f
dag = FibOp . dot
We need to also make a category that draws diagrams
daulities. CPS, Either <-> TUple, Adjunctions
Dual r a = Dual (a -> r)
a -> b ~ CoYoneda (a -> r, r -> b)
(a -> (), a) -> ()
LinOp a (LinOp b c) = a -> Q (b -> Q c) ~ (a,b) -> Q c
yeah I think we can swing that
Dual a = LinOp a ()
apply :: LinOp (Dual a, a) () -- the dual isn't really on the index though?
Bounded Enum a => LinOp () (Dual a, a)
a -> ( , () )
in data-category he uses FibOp a a as a stand in for object a
in this case FibOp Id Id = ILeaf, FibOp Tau Tau = TLeaf. They are indeed good stand ins
what am I doing. Duality is a bog
newtype Dagger a b = Dagger {primal :: LinOp a b, dual :: LinOp b a}
dag (Dagger f g) = Dagger g f
dag = FibOp . dot
We need to also make a category that draws diagrams
over . under = id
instance Braided (AntiOp i) where
over | = AntiOp (pure . swap) -1 .*
under = over -- AntiOp (pure . swap)
LinOp (LinOp a b) () ->
() -> (a,a)
(a,a) -> ()
Eq a => LinOp (a,a) () ~ curry dot
curry :: k a (k b c) -> k (a,b) c
uncurry :: k (a,b) c)-> k a (k b c)
type Dual a = LinOp a ()
newtype Dagger a = Dagger a
class Dagger k where
type Dual :: * -> *
dag' :: k (Dual a) (Dual b) -> k' b a
Dual k | # LANGUAGE GADTs , StandaloneDeriving , NoImplicitPrelude , FlexibleInstances , RankNTypes ,
TypeApplications , ScopedTypeVariables , MultiParamTypeClasses , FunctionalDependencies , FlexibleContexts ,
UndecidableInstances , AllowAmbiguousTypes , TypeFamilies ,
ConstraintKinds , TypeOperators , DataKinds , PolyKinds , InstanceSigs , NoMonomorphismRestriction
#
TypeApplications, ScopedTypeVariables, MultiParamTypeClasses, FunctionalDependencies, FlexibleContexts,
UndecidableInstances, AllowAmbiguousTypes, TypeFamilies ,
ConstraintKinds, TypeOperators, DataKinds, PolyKinds, InstanceSigs, NoMonomorphismRestriction
#-}
module Cat where
import Control.Category
import Prelude hiding ((.), id)
import Fib
import Vec
import Control.Monad ((<=<))
import GHC.TypeNats
import Control.Arrow ((***))
import Data.Tuple (swap)
class LeftMatch pat a c | pat a - > c where
leftmatch : : FibTree e a - > Q ( FibTree e c )
class RightMatch pat a c | pat a - > c where
rightmatch : : FibTree e a - > Q ( FibTree e c )
instance ( LeftMatch l a ( l',r ' ) ,
RightMatch l l ' l ,
leftmatch x = do
x ' < - leftmatch @l x
x '' < - lmap ( rightmatch @l ) x '
rmap ( leftmatch @r ) x ''
leftmatch x = pullLeftLeaf x
instance ( PullLeftLeaf a c ) = > LeftMatch I d a c where
leftmatch x = pullLeftLeaf x
instance ( RightMatch r a ( l',r ' ) ,
LeftMatch r r ' r ,
rightmatch x = do
x ' < - rightmatch @l x
x '' < - rmap ( leftmatch @l ) x '
lmap ( rightmatch @r ) x ''
rightmatch x = pullRightLeaf x
instance ( PullRightLeaf a c ) = > RightMatch I d a c where
rightmatch x = pullRightLeaf x
b a b = > a b where
type a b = LeftMatch b a b
t1 = , Tau ) ) ( TTT TLeaf ( TTT TLeaf TLeaf ) )
reassoc : : forall b a b = > FibTree e a - > Q ( FibTree e b )
reassoc x = leftmatch @b x
class LeftMatch pat a c | pat a -> c where
leftmatch :: FibTree e a -> Q (FibTree e c)
class RightMatch pat a c | pat a -> c where
rightmatch :: FibTree e a -> Q (FibTree e c)
instance (LeftMatch l a (l',r'),
RightMatch l l' l,
leftmatch x = do
x' <- leftmatch @l x
x'' <- lmap (rightmatch @l) x'
rmap (leftmatch @r) x''
leftmatch x = pullLeftLeaf x
instance (PullLeftLeaf a c) => LeftMatch Id a c where
leftmatch x = pullLeftLeaf x
instance (RightMatch r a (l',r'),
LeftMatch r r' r ,
rightmatch x = do
x' <- rightmatch @l x
x'' <- rmap (leftmatch @l) x'
lmap (rightmatch @r) x''
rightmatch x = pullRightLeaf x
instance (PullRightLeaf a c) => RightMatch Id a c where
rightmatch x = pullRightLeaf x
type ReAssoc a b = LeftMatch b a b
t1 = leftmatch @(Tau,(Tau,Tau)) (TTT TLeaf (TTT TLeaf TLeaf))
reassoc :: forall b a e. ReAssoc a b => FibTree e a -> Q (FibTree e b)
reassoc x = leftmatch @b x
-}
class ReAssoc a b where
reassoc :: FibTree e a -> Q (FibTree e b)
instance (n ~ Count l',
gte ~ CmpNat n (Count l),
LeftCollect n gte (l,r) (l'',r''),
ReAssoc l'' l',
ReAssoc r'' r') => ReAssoc (l,r) (l',r') where
reassoc x = do
x' <- leftcollect @n x
x'' <- rmap reassoc x'
lmap reassoc x''
instance { - # OVERLAPS # - } a a where
instance ReAssoc Tau Tau where
reassoc = pure
instance ReAssoc Id Id where
reassoc = pure
\((x , y),z ) - > ( x , ( y , z ) )
linear typed functions are symmettric monoidal categories , pretty sure . Not cartesian .
This right here has a lot of what we need to do compiling to categories of just monoidal categries .
\((x,y),z) -> (x, (y,z))
linear typed functions are symmettric monoidal categories, pretty sure. Not cartesian.
This right here has a lot of what we need to do compiling to categories of just monoidal categries.
-}
t4 :: Q (FibTree Tau (Tau,(Tau,Tau)))
t4 = reassoc (TTT TLeaf (TTT TLeaf TLeaf))
t5 :: Q (FibTree Tau ((Tau,Tau),Tau))
t5 = reassoc (TTT TLeaf (TTT TLeaf TLeaf))
instance n ~ Count l ,
n ' ~ Count ' l ,
gte ~ CmpNat l l '
' gte = > ' ( ( a , b),r ) ( l',r ' ) ' GT
reassoc ' x = do
instance ( ( l , a),b ) ( l',r ' ) = > ' ( l,(a , b ) ) ( l',r ' ) ' LT
reassoc ' x = do
x ' < - rmap pullLeftLeaf x
instance l l ' , r r ' = > ' ( l , r ) ( l',r ' ) ' EQ where
reassoc ' x = do
x ' < - lmap reassoc x
rmap reassoc x '
instance ' Tau Tau ' EQ where
ressoc ' = pure
instance ' I d I d ' EQ where
ressoc ' = pure
instance n ~ Count l,
n' ~ Count 'l,
gte ~ CmpNat l l'
ReAssoc' gte => ReAssoc' ((a,b),r) (l',r') 'GT
reassoc' x = do
instance ReAssoc ((l,a),b) (l',r') gte => ReAssoc' (l,(a,b)) (l',r') 'LT
reassoc' x = do
x' <- rmap pullLeftLeaf x
instance ReAssoc l l', ReAssoc r r' => ReAssoc' (l,r) (l',r') 'EQ where
reassoc' x = do
x' <- lmap reassoc x
rmap reassoc x'
instance ReAssoc' Tau Tau 'EQ where
ressoc' = pure
instance ReAssoc' Id Id 'EQ where
ressoc' = pure
-}
leftcollect :: forall n gte l r o e. (gte ~ CmpNat n (Count l), LeftCollect n gte (l,r) o) => FibTree e (l,r) -> Q (FibTree e o)
leftcollect x = leftcollect' @n @gte x
class LeftCollect n gte a b | n gte a -> b where
leftcollect' :: FibTree e a -> Q (FibTree e b)
LeftCollect pulls n leaves into the left branch of the tuple
instance (
k ~ Count l,
r ~ (l',r'),
n' ~ (n - k),
gte ~ CmpNat n' (Count l'),
LeftCollect n' gte r (l'',r'')) => LeftCollect n 'GT (l,r) ((l,l''),r'') where
leftcollect' x = do
instance (
l ~ (l',r'),
gte ~ CmpNat n (Count l'),
LeftCollect n gte l (l'',r'')) => LeftCollect n 'LT (l,r) (l'',(r'',r)) where
leftcollect' x = do
instance LeftCollect n 'EQ (l,r) (l,r) where
leftcollect' = pure
Count ( a , b ) or Count a = 1 as default case .
t1 = leftcollect @2 (TTT (TTT TLeaf TLeaf) TLeaf)
t2 = leftcollect @1 (TTT (TTT TLeaf TLeaf) TLeaf)
newtype FibOp ' a b = FibOp ' { runFibOp ' : : forall e a ' b ' . ( a a ' , b ' b ) = > FibTree e a ' - > Q ( FibTree e b ' ) }
instance Category FibOp ' where
i d : : forall a. FibOp ' a a
i d = FibOp ' pure
( . ) : : forall a b c. FibOp ' b c - > FibOp ' a b - > FibOp ' a c
{ -$ \x - > do
x ' x
x '' < - reassoc x '
f x ''
newtype FibOp' a b = FibOp' {runFibOp' :: forall e a' b'. (ReAssoc a a', ReAssoc b' b) => FibTree e a' -> Q (FibTree e b')}
instance Category FibOp' where
id :: forall a. FibOp' a a
id = FibOp' pure
(.) :: forall a b c. FibOp' b c -> FibOp' a b -> FibOp' a c
-}
( f < = < reassoc < = )
is just a recursive Collect n. Like how RightCaonical is recursive pullLeft
(...) :: ReAssoc b b' => FibOp b' c -> FibOp a b -> FibOp a c
(FibOp f) ... (FibOp g) = FibOp $ f <=< reassoc <=< g
newtype FibOp c a b = FibOp { runFib : : ( FibTree c a - > Q ( FibTree c b ) ) }
newtype FibOp a b = FibOp {runFib :: (forall c. FibTree c a -> Q (FibTree c b))}
type FibOp ' c a b = FibTree c a - > Q ( FibTree c b )
newtype LinOp a b = LinOp {runLin :: a -> Q b}
this is a tree data type . Basically an alternative form of the typed vector a i
also basically an alternativew to raw tuples ( a , a ) . It enforces that all the leaves have to match .
data TreeVec i a where
Leaf :: i -> TreeVec i i
Node :: TreeVec i a -> TreeVec i b -> TreeVec i (a,b)
what are the invariants here ? Runs on already antisymmettric vectors ? Or runs on possibly not symmettrized vectors ?
newtype AntiOp i a b = AntiOp {runAnti :: TreeVec i a -> Q (TreeVec i b)}
newtype SymOp i a b = SymOp {runSym :: TreeVec i a -> Q (TreeVec i b)}
data a b where
: : FibTree e a - > FibTree e b - class FibIdent a where
expansion : : [ ( a a ) ]
instance where
expansion = pure ( TLeaf )
instance FibExpansion I d where
expansion = pure ( ILeaf )
instance ( FibExpansion a , FibExpansion b ) = > FibExpansion ( a , b ) where
expansion = pure ( ILeaf )
e1 = expansion @a
e2 = expansion @b
FibTree e a - > FibTree f b - > FibTree
FibOuter a a - > FibOuter b b - > [ ( a , b ) ( a , b ) ]
( TLeaf ) ( TLeaf ) = [ ( TTT TLeaf TLeaf ) ( TTT TLeaf ) , ( ITT TLeaf TLeaf ) ( ITT TLeaf TLeaf ) ]
data FibOuter a b where
FibOuter :: FibTree e a -> FibTree e b -> FibOuter a b
class FibIdent a where
expansion :: [(FibOuter a a)]
instance FibExpansion Tau where
expansion = pure (FibOuter TLeaf TLeaf)
instance FibExpansion Id where
expansion = pure (FibOuter ILeaf ILeaf)
instance (FibExpansion a , FibExpansion b) => FibExpansion (a,b) where
expansion = pure (FibOuter ILeaf ILeaf)
e1 = expansion @a
e2 = expansion @b
FibTree e a -> FibTree f b -> FibTree
FibOuter a a -> FibOuter b b -> [FibOuter (a,b) (a,b)]
(FibOuter TLeaf TLeaf) (FibOuter TLeaf TLeaf) = [FibOuter (TTT TLeaf TLeaf) (TTT TLeaf TLeaf), (ITT TLeaf TLeaf) (ITT TLeaf TLeaf)]
-}
instance Category LinOp where
id = LinOp pure
(LinOp f) . (LinOp g) = LinOp (f <=< g)
instance Category (AntiOp i) where
id = AntiOp pure
instance Category (FibOp) where
id = FibOp pure
(FibOp f) . (FibOp g) = FibOp (f <=< g)
class Category k => Monoidal k where
parC :: k a c -> k b d -> k (a,b) (c,d)
assoc :: k ((a,b),c) (a,(b,c))
unassoc :: k (a,(b,c)) ((a,b),c)
leftUnitor :: k ((),a) a
leftUnitor' :: k a ((),a)
rightUnitor :: k (a,()) a
rightUnitor' :: k a (a,())
maybe we just ignore these . They are annoying .
instance Num (LinOp a a) where
(LinOp a) + (LinOp b) = LinOp $ \x -> (a x) <> (b x)
x * y = x . y
negate (LinOp f) = LinOp $ \x -> (-1) .* (f x)
fromInteger n = LinOp $ \x -> W [(x, fromInteger n)]
abs f = error "Abs not obvious for LinOp"
signum f = error "Signum not obvious for LinOp"
instance Monoidal (FibOp) where
assoc = FibOp fmove'
unassoc = FibOp fmove
leftUnitor = FibOp leftUnit
leftUnitor' = FibOp leftUnit'
rightUnitor = FibOp rightUnit
rightUnitor' = FibOp rightUnit'
instance Monoidal LinOp where
assoc = LinOp (pure . assoc)
unassoc = LinOp (pure . unassoc)
leftUnitor = LinOp (pure . leftUnitor)
leftUnitor' = LinOp (pure .leftUnitor')
rightUnitor = LinOp (pure . rightUnitor)
rightUnitor' = LinOp (pure . rightUnitor')
instance Monoidal AntiOp where
assoc = AntiOp ( pure . assoc )
unassoc = AntiOp ( pure . unassoc )
leftUnitor = AntiOp ( pure . leftUnitor )
leftUnitor ' = AntiOp ( pure .leftUnitor ' )
rightUnitor = AntiOp ( pure . rightUnitor )
rightUnitor ' = AntiOp ( pure . rightUnitor ' )
instance Monoidal AntiOp where
assoc = AntiOp (pure . assoc)
unassoc = AntiOp (pure . unassoc)
leftUnitor = AntiOp (pure . leftUnitor)
leftUnitor' = AntiOp (pure .leftUnitor')
rightUnitor = AntiOp (pure . rightUnitor)
rightUnitor' = AntiOp (pure . rightUnitor')
-}
instance Monoidal (->) where
parC f g = f *** g
assoc ((x,y),z) = (x,(y,z))
unassoc (x,(y,z)) = ((x,y),z)
leftUnitor (_, x) = x
leftUnitor' x = ((),x)
rightUnitor (x, _) = x
rightUnitor' x = (x,())
type I = I d
idl = FibOp case t of
TTI
idl ' =
type I = Id
idl = FibOp $ \t -> case t of
TTI
idl' = -}
Dagger ? Dual ? Rigid ? , Compact closed
cap : : k ( a , a ) I
cup : : k I ( a , a )
type Dual v = Q v - > Double
type Dual a = FibOp a I
b a = DualFibOp Dual a - > Dual b
dag : : FibOp a b - > DualFibOp b a
dag ' : : uses dot ?
-talk.net/2011/09/the-braids-package/
( lmap ( ) ) ( rmap ( ttt ) ) ttt
newtype CFibOp a b = CFibOp { runCFib : : ( forall c. FibTree c ( RightAssoc a ) - > Q ( FibTree c ( RightAssoc b ) ) ) }
assoc = i d
unassoc = i d
par = ?
class Monoidal CFibOp where
type Prod : : * - > * - > *
Prod = Append
par f g = rightAssoc > = > nmap @n ( ( lmap f ) > = > ( rmap g ) )
Dagger? Dual? Rigid?, Compact closed
cap :: k (a,a) I
cup :: k I (a,a)
type Dual v = Q v -> Double
type Dual a = FibOp a I
DualFibOp b a = DualFibOp Dual a -> Dual b
dag :: FibOp a b -> DualFibOp b a
dag' :: uses dot?
-talk.net/2011/09/the-braids-package/
(lmap () ) (rmap (ttt) ) ttt
newtype CFibOp a b = CFibOp {runCFib :: (forall c. FibTree c (RightAssoc a) -> Q (FibTree c (RightAssoc b)))}
assoc = id
unassoc = id
par = ?
class Monoidal CFibOp where
type Prod :: * -> * -> *
Prod = Append
par f g = rightAssoc >=> nmap @n ((lmap f) >=> (rmap g))
-}
class Monoidal k => Braided k where
over :: k (a,b) (b,a)
under :: k (a,b) (b,a)
instance Braided FibOp where
over = FibOp braid
under = FibOp braid'
instance Braided (->) where
over = swap
under = swap
instance Braided (LinOp) where
over = LinOp (pure . swap)
under = LinOp (pure . swap)
( Eq a ) = > Q ( a , b ) - > LinOp a b
( Bounded a , a ) = > LinOp ( ) ( a , a )
Dual k a b = forall ( k b r ) ( k a r )
dag : : k a b - > k ( Dual b ) ( Dual a )
class Monoidal k = > Cartesian k where
fstC : : k ( a , b ) a
sndC : : k ( a , b ) b
dupC : : k a ( a , a )
class Cartesian k = > Closed k where
applyC : : k ( k a b , a ) b
curryC : : k ( a , b ) c - > k a ( k b c )
uncurryC : : k a ( k b c ) - > k ( a , b ) c
fanC f g = ( parC f g ) . dupC
idC : : Category k = > k a a
idC = i d
data FreeCat a b where
Comp : : FreeCat b c - > FreeCat a b - > FreeCat a c
I d : : FreeCat a a
Fst : : FreeCat ( a , b ) a
Snd : : FreeCat ( a , b ) b
Dup : : FreeCat a ( a , a )
Par : : FreeCat a b - > FreeCat c d - > FreeCat ( a , c ) ( b , d )
Add : : FreeCat ( a , a ) a
Mul : : FreeCat ( a , a ) a
Apply : : FreeCat ( FreeCat a b , a ) b
Curry : : FreeCat ( a , b ) c - > FreeCat a ( FreeCat b c )
Uncurry : : FreeCat a ( FreeCat b c ) - > FreeCat ( a , b ) c
instance Closed FreeCat where
applyC = Apply
curryC = Curry
uncurryC = Uncurry
deriving instance Show ( FreeCat a b )
instance Category FreeCat where
( . ) = Comp
i d = I d
instance where
parC = Par
instance Cartesian FreeCat where
fstC = Fst
sndC = Snd
dupC
class Monoidal k => Cartesian k where
fstC :: k (a,b) a
sndC :: k (a,b) b
dupC :: k a (a,a)
class Cartesian k => Closed k where
applyC :: k (k a b,a) b
curryC :: k (a,b) c -> k a (k b c)
uncurryC :: k a (k b c) -> k (a,b) c
fanC f g = (parC f g) . dupC
idC :: Category k => k a a
idC = id
data FreeCat a b where
Comp :: FreeCat b c -> FreeCat a b -> FreeCat a c
Id :: FreeCat a a
Fst :: FreeCat (a,b) a
Snd :: FreeCat (a,b) b
Dup :: FreeCat a (a,a)
Par :: FreeCat a b -> FreeCat c d -> FreeCat (a,c) (b,d)
Add :: FreeCat (a,a) a
Mul :: FreeCat (a,a) a
Apply :: FreeCat (FreeCat a b, a) b
Curry :: FreeCat (a,b) c -> FreeCat a (FreeCat b c)
Uncurry :: FreeCat a (FreeCat b c) -> FreeCat (a,b) c
instance Closed FreeCat where
applyC = Apply
curryC = Curry
uncurryC = Uncurry
deriving instance Show (FreeCat a b)
instance Category FreeCat where
(.) = Comp
id = Id
instance Monoidal FreeCat where
parC = Par
instance Cartesian FreeCat where
fstC = Fst
sndC = Snd
dupC = Dup
-} |
eeacb9047a6d0cdd30028aa0fd3157976f200a11bcb3fda1e8558c2f7543e893 | cdornan/keystore | CPRNG.hs | # LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE OverloadedStrings #-}
module Data.KeyStore.KS.CPRNG
( CPRNG
, newCPRNG
, testCPRNG
, generateCPRNG
) where
import Crypto.Random
import qualified Data.ByteArray as BA
import System.IO.Unsafe
newtype CPRNG
= CPRNG { _CPRNG :: SystemDRG }
deriving (DRG)
newCPRNG :: IO CPRNG
newCPRNG = CPRNG <$> getSystemDRG
testCPRNG :: CPRNG
testCPRNG = unsafePerformIO newCPRNG
generateCPRNG :: BA.ByteArray ba => Int -> CPRNG -> (ba,CPRNG)
generateCPRNG = randomBytesGenerate
| null | https://raw.githubusercontent.com/cdornan/keystore/5c8a9272537bc880c055c0a6b44327386a16b52f/src/Data/KeyStore/KS/CPRNG.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE GeneralizedNewtypeDeriving #
module Data.KeyStore.KS.CPRNG
( CPRNG
, newCPRNG
, testCPRNG
, generateCPRNG
) where
import Crypto.Random
import qualified Data.ByteArray as BA
import System.IO.Unsafe
newtype CPRNG
= CPRNG { _CPRNG :: SystemDRG }
deriving (DRG)
newCPRNG :: IO CPRNG
newCPRNG = CPRNG <$> getSystemDRG
testCPRNG :: CPRNG
testCPRNG = unsafePerformIO newCPRNG
generateCPRNG :: BA.ByteArray ba => Int -> CPRNG -> (ba,CPRNG)
generateCPRNG = randomBytesGenerate
|
67cd556d53e74cba195f20cc948520ff86cd3cadf93fcd56d2697a99769a5f58 | untangled-web/untangled-ui | user.cljs | (ns cljs.user
(:require
[untangled.client.core :as core]
[cljs.pprint :refer [pprint]]
[guideui.main :as main]
[guideui.ui :as ui]))
(enable-console-print!)
; use this from REPL to view bits of the application db
(defn log-app-state
"Helper for logging the app-state, pass in top-level keywords from the app-state and it will print only those
keys and their values."
[& keywords]
(pprint (let [app-state @(:reconciler @main/ui-client)]
(if (= 0 (count keywords))
app-state
(select-keys app-state keywords)))))
| null | https://raw.githubusercontent.com/untangled-web/untangled-ui/ae101f90cd9b7bf5d0c80e9453595fdfe784923c/dev/cljs/user.cljs | clojure | use this from REPL to view bits of the application db | (ns cljs.user
(:require
[untangled.client.core :as core]
[cljs.pprint :refer [pprint]]
[guideui.main :as main]
[guideui.ui :as ui]))
(enable-console-print!)
(defn log-app-state
"Helper for logging the app-state, pass in top-level keywords from the app-state and it will print only those
keys and their values."
[& keywords]
(pprint (let [app-state @(:reconciler @main/ui-client)]
(if (= 0 (count keywords))
app-state
(select-keys app-state keywords)))))
|
bf1afb2272e89b969f5c438b6a9257d4319b9c916ac37427dd8fe6f002527faf | exercism/common-lisp | largest-series-product-test.lisp | Ensures that largest-series-product.lisp and the testing library are always loaded
(eval-when (:compile-toplevel :load-toplevel :execute)
(load "largest-series-product")
(quicklisp-client:quickload :fiveam))
;; Defines the testing package with symbols from largest-series-product and FiveAM in scope
;; The `run-tests` function is exported for use by both the user and test-runner
(defpackage :largest-series-product-test
(:use :cl :fiveam)
(:export :run-tests))
;; Enter the testing package
(in-package :largest-series-product-test)
;; Define and enter a new FiveAM test-suite
(def-suite* largest-series-product-suite)
(test finds-the-largest-product-if-span-equals-length
(let ((digits "29")
(span 2))
(is (eql 18 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-2-with-numbers-in-order
(let ((digits "0123456789")
(span 2))
(is (eql 72 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-2
(let ((digits "576802143")
(span 2))
(is (eql 48 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-3-with-numbers-in-order
(let ((digits "0123456789")
(span 3))
(is (eql 504 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-3
(let ((digits "1027839564")
(span 3))
(is (eql 270 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-5-with-numbers-in-order
(let ((digits "0123456789")
(span 5))
(is (eql 15120 (largest-series-product:largest-product digits span)))))
(test can-get-the-largest-product-of-a-big-number
(let ((digits "73167176531330624919225119674426574742355349194934")
(span 6))
(is (eql 23520 (largest-series-product:largest-product digits span)))))
(test reports-zero-if-the-only-digits-are-zero
(let ((digits "0000")
(span 2))
(is (eql 0 (largest-series-product:largest-product digits span)))))
(test reports-zero-if-all-spans-include-zero
(let ((digits "99099")
(span 3))
(is (eql 0 (largest-series-product:largest-product digits span)))))
(test rejects-span-longer-than-string-length
(let ((digits "123")
(span 4))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test reports-1-for-empty-string-and-empty-product
(let ((digits "")
(span 0))
(is (eql 1 (largest-series-product:largest-product digits span)))))
(test reports-1-for-nonempty-string-and-empty-product
(let ((digits "123")
(span 0))
(is (eql 1 (largest-series-product:largest-product digits span)))))
(test rejects-empty-string-and-nonzero-span
(let ((digits "")
(span 1))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test rejects-invalid-character-in-digits
(let ((digits "1234a5")
(span 2))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test rejects-negative-span
(let ((digits "12345")
(span -1))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test rejects-negative-span
(let ((digits "12345")
(span -1))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(defun run-tests (&optional (test-or-suite 'largest-series-product-suite))
"Provides human readable results of test run. Default to entire suite."
(run! test-or-suite))
| null | https://raw.githubusercontent.com/exercism/common-lisp/82dc71e313be217703e9ea2d53c2c6db92bfce8e/exercises/practice/largest-series-product/largest-series-product-test.lisp | lisp | Defines the testing package with symbols from largest-series-product and FiveAM in scope
The `run-tests` function is exported for use by both the user and test-runner
Enter the testing package
Define and enter a new FiveAM test-suite | Ensures that largest-series-product.lisp and the testing library are always loaded
(eval-when (:compile-toplevel :load-toplevel :execute)
(load "largest-series-product")
(quicklisp-client:quickload :fiveam))
(defpackage :largest-series-product-test
(:use :cl :fiveam)
(:export :run-tests))
(in-package :largest-series-product-test)
(def-suite* largest-series-product-suite)
(test finds-the-largest-product-if-span-equals-length
(let ((digits "29")
(span 2))
(is (eql 18 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-2-with-numbers-in-order
(let ((digits "0123456789")
(span 2))
(is (eql 72 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-2
(let ((digits "576802143")
(span 2))
(is (eql 48 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-3-with-numbers-in-order
(let ((digits "0123456789")
(span 3))
(is (eql 504 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-3
(let ((digits "1027839564")
(span 3))
(is (eql 270 (largest-series-product:largest-product digits span)))))
(test can-find-the-largest-product-of-5-with-numbers-in-order
(let ((digits "0123456789")
(span 5))
(is (eql 15120 (largest-series-product:largest-product digits span)))))
(test can-get-the-largest-product-of-a-big-number
(let ((digits "73167176531330624919225119674426574742355349194934")
(span 6))
(is (eql 23520 (largest-series-product:largest-product digits span)))))
(test reports-zero-if-the-only-digits-are-zero
(let ((digits "0000")
(span 2))
(is (eql 0 (largest-series-product:largest-product digits span)))))
(test reports-zero-if-all-spans-include-zero
(let ((digits "99099")
(span 3))
(is (eql 0 (largest-series-product:largest-product digits span)))))
(test rejects-span-longer-than-string-length
(let ((digits "123")
(span 4))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test reports-1-for-empty-string-and-empty-product
(let ((digits "")
(span 0))
(is (eql 1 (largest-series-product:largest-product digits span)))))
(test reports-1-for-nonempty-string-and-empty-product
(let ((digits "123")
(span 0))
(is (eql 1 (largest-series-product:largest-product digits span)))))
(test rejects-empty-string-and-nonzero-span
(let ((digits "")
(span 1))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test rejects-invalid-character-in-digits
(let ((digits "1234a5")
(span 2))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test rejects-negative-span
(let ((digits "12345")
(span -1))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(test rejects-negative-span
(let ((digits "12345")
(span -1))
(is (eql NIL (largest-series-product:largest-product digits span)))))
(defun run-tests (&optional (test-or-suite 'largest-series-product-suite))
"Provides human readable results of test run. Default to entire suite."
(run! test-or-suite))
|
8ac6d7fb6400670f07244a9c81d4e6176dee826afbc6e52bfb90bfd77a4fba1c | rowangithub/DOrder | svd.ml | let rec loopa i j l m n =
if j <= n then
(assert(1<=j);assert(j<=n);
assert(1<=i);assert(i<=n);
(*//assert(1<=i);assert(i<=m); // TODO feasible counterexample found*)
assert(1<=l);assert(l<=n);
loopa i (j+1) l m n)
else ()
let rec loopc i j k l m n =
if k <= n then
((*//assert(1<=i);assert(i<=m); // TODO feasible counterexample found*)
assert(1<=k);assert(k<=n);
assert(1<=j);assert(j<=n);
loopc i j (k+1) l m n)
else ()
let rec loopd i j k l m n =
if k <= n then
(assert(1<=k);assert(k<=n);
assert(1<=j);assert(j<=n);
assert(1<=i);assert(i<=n);
loopd i j (k+1) l m n)
else ()
let rec loopb i j l m n =
if j <= n then
(loopc i j l l m n;
loopd i j l l m n;
loopb i (j+1) l m n)
else ()
let rec loope i j l m n =
if j <= n then
(assert(1<=j);assert(j<=n);
assert(1<=i);assert(i<=n);
loope i (j+1) l m n)
else ()
let rec loop i l m n = (* Accumulation of right-hand transformations. *)
if (i >= 1) then
((if (i < n) then (
(if (Random.bool ()) then (
loopa i l l m n; (* Double division to avoid possible underflow. *)
loopb i l l m n
)
else ());
loope i l l m n
) else ());
assert(1<=i);
assert(i<=n);
assert(1<=i);assert(i<=n);
loop (i-1) i m n)
else ()
let main l m n =
if (l>0) then
loop n l m n
else ()
let _ = main 4 0 2
let _ = main 2 0 4
let _ = main 1 0 0
let _ = main (-1) 0 0 | null | https://raw.githubusercontent.com/rowangithub/DOrder/e0d5efeb8853d2a51cc4796d7db0f8be3185d7df/tests/folprograms/sc/svd.ml | ocaml | //assert(1<=i);assert(i<=m); // TODO feasible counterexample found
//assert(1<=i);assert(i<=m); // TODO feasible counterexample found
Accumulation of right-hand transformations.
Double division to avoid possible underflow. | let rec loopa i j l m n =
if j <= n then
(assert(1<=j);assert(j<=n);
assert(1<=i);assert(i<=n);
assert(1<=l);assert(l<=n);
loopa i (j+1) l m n)
else ()
let rec loopc i j k l m n =
if k <= n then
assert(1<=k);assert(k<=n);
assert(1<=j);assert(j<=n);
loopc i j (k+1) l m n)
else ()
let rec loopd i j k l m n =
if k <= n then
(assert(1<=k);assert(k<=n);
assert(1<=j);assert(j<=n);
assert(1<=i);assert(i<=n);
loopd i j (k+1) l m n)
else ()
let rec loopb i j l m n =
if j <= n then
(loopc i j l l m n;
loopd i j l l m n;
loopb i (j+1) l m n)
else ()
let rec loope i j l m n =
if j <= n then
(assert(1<=j);assert(j<=n);
assert(1<=i);assert(i<=n);
loope i (j+1) l m n)
else ()
if (i >= 1) then
((if (i < n) then (
(if (Random.bool ()) then (
loopb i l l m n
)
else ());
loope i l l m n
) else ());
assert(1<=i);
assert(i<=n);
assert(1<=i);assert(i<=n);
loop (i-1) i m n)
else ()
let main l m n =
if (l>0) then
loop n l m n
else ()
let _ = main 4 0 2
let _ = main 2 0 4
let _ = main 1 0 0
let _ = main (-1) 0 0 |
8d133e5be2495b74f1e5980d62f625fd367d9d25c4a6d46653d89e39f1bbc72b | mirage/ocaml-git | unix_backend.mli | open Sigs
module Scheduler : SCHED with type +'a s = 'a
val unix : Scheduler.t scheduler
| null | https://raw.githubusercontent.com/mirage/ocaml-git/37c9ef41944b5b19117c34eee83ca672bb63f482/test/smart/unix_backend.mli | ocaml | open Sigs
module Scheduler : SCHED with type +'a s = 'a
val unix : Scheduler.t scheduler
| |
74e9097d4668a8b1e708007e3f7b9074fa2e3a594f25611f5230fab85e401a2c | earl-ducaine/cl-garnet | garnet-loader.lisp | ;;; -*- Mode: LISP; Syntax: Common-Lisp; Package: COMMON-LISP-USER -*-
(in-package :common-lisp-user)
;; Not likely to be anywhere in the world where this would be useful.
(defparameter garnet-version-number "3.3.post")
(pushnew :garnet *features*)
(pushnew :garnet-v3 *features*)
(pushnew :garnet-v3.3.post *features*)
(pushnew :garnet-test *features*)
;; The :garnet-debug feature allows many different kinds of run-time
;; checking, and also loads some extra test code. After you have
;; debugged your code and want it to run faster, remove :GARNET-DEBUG
from the * features * list and RECOMPILE all of Garnet and your code .
;; The result will be smaller and somewhat faster.
;;
;; To remove :garnet-debug from the *features* list, either defvar
;; Garnet-Garnet-Debug to NIL before you load the garnet-loader, or
simply edit the following defvar to set to nil .
;;
TODO ( ed ): I have a pathological hatred of using * features * . I find it makes
;; for hideously ugly code. So, at some point this should be changed
;; to a runtime special variable that dynamically controls this. That
;; will forfit code size, but will still allow for optimizing
;; production code.
(defvar garnet-garnet-debug t)
(if garnet-garnet-debug
(pushnew :garnet-debug *features*)
(setf *features* (delete :garnet-debug *features*)))
;; The following variable affects compiler policy. Setting it to T
;; uses the settings in *garnet-compile-debug-settings*. Setting it to
NIL uses the ones in * garnet - compile - production - settings * . By
default we simply mirror Garnet - Garnet - Debug .
;; (defvar garnet-compile-debug-mode garnet-garnet-debug
;; "Setting this variable to T sets the policy for the entire system
;; to make it more debuggable.")
;; (defvar garnet-compile-debug-settings
' ( optimize ( speed 2 )
( safety 3 )
( debug 3 )
( space 2 ) )
;; "Use these settings for globally debugging the system or for debugging
;; a specific module. They emphasize debuggability at the cost of some speed.
With :
;; - These settings are type-safe.
;; - They prevent functions declared inline from being expanded inline.
;; Note that as part of this version I have tried to make most
;; non-syntactic macros into inline functions.
;; - They allow all possible debugging features.")
;; (defvar garnet-compile-production-settings
' ( optimize ( speed 3 )
;; (safety 0)
( space 1 )
( debug 1 )
;; (compilation-speed 0))
;; "production compiler policy settings. emphasize speed, de-emphasize debugging.")
;; (defvar default-garnet-proclaim
;; (if garnet-compile-debug-mode
;; garnet-compile-debug-settings
;; garnet-compile-production-settings)
;; "Set compiler optimization settings.
1 . If you want everything debugged , set Garnet - Compile - Debug - Mode to t.
2 . If you want to debug specific modules , set Garnet - Compile - Debug - Mode
;; to nil. Then set the variable in the modules you want debugged to enable
;; debugging that module.
3 . Otherwise ( for ' production ' builds ) just set Garnet - Compile - Debug - Mode
;; to nil and leave everything else alone.")
(defun append-directory (directory sub-directory)
"This is a little utility for accessing the subdirectory of a
directory. It assumes that 'sub-directory' is directly under
'directory'."
(let ((dir (pathname-directory directory))
(subdir (if (listp sub-directory)
sub-directory
(list sub-directory))))
(make-pathname :directory (append dir subdir))))
(defun get-garnet-binary-pathname ()
(let ((directory-name "src"))
(append-directory org.xoanonos.asdf-app-config:*base-directory* directory-name)))
(defvar garnet-src-pathname (append-directory org.xoanonos.asdf-app-config:*base-directory* "src"))
(defvar garnet-lib-pathname (append-directory org.xoanonos.asdf-app-config:*base-directory* "lib"))
(defvar garnet-binary-pathname (get-garnet-binary-pathname))
(
( append - directory " truetype " ) )
(defvar Garnet-Truetype-Pathname
(append-directory Garnet-Binary-Pathname "truetype"))
(defvar Garnet-Inter-Src
(append-directory Garnet-Src-Pathname "inter"))
(defvar Garnet-Inter-Pathname
(append-directory Garnet-Binary-Pathname "inter"))
(defvar Garnet-Gesture-Src
(append-directory Garnet-Src-Pathname "gesture"))
(defvar Garnet-Gesture-Pathname
(append-directory Garnet-Binary-Pathname "gesture"))
(defvar Garnet-Aggregadgets-Src
(append-directory Garnet-Src-Pathname "aggregadgets"))
(defvar Garnet-Aggregadgets-Pathname
(append-directory Garnet-Binary-Pathname "aggregadgets"))
(defvar Garnet-PS-Src
(append-directory Garnet-Src-Pathname "ps"))
(defvar Garnet-Gadgets-Src
(append-directory Garnet-Src-Pathname "gadgets"))
(defvar Garnet-Gadgets-Pathname
(append-directory Garnet-Binary-Pathname "gadgets"))
(defvar Garnet-Debug-Src
(append-directory Garnet-Src-Pathname "debug"))
(defvar Garnet-Debug-Pathname
(append-directory Garnet-Binary-Pathname "debug"))
(defvar Garnet-Demos-Src
(append-directory Garnet-Src-Pathname "demos"))
(defvar Garnet-Demos-Pathname
(append-directory Garnet-Binary-Pathname "demos"))
(defvar Garnet-Gilt-Src
(append-directory Garnet-Src-Pathname "gilt"))
(defvar Garnet-Gilt-Pathname
(append-directory Garnet-Binary-Pathname "gilt"))
(defvar Garnet-C32-Src
(append-directory Garnet-Src-Pathname "c32"))
(defvar Garnet-C32-Pathname
(append-directory Garnet-Binary-Pathname "c32"))
(defvar Garnet-Lapidary-Src
(append-directory Garnet-Src-Pathname "lapidary"))
(defvar Garnet-Lapidary-Pathname
(append-directory Garnet-Binary-Pathname "lapidary"))
(defvar Garnet-Contrib-Src
(append-directory Garnet-Src-Pathname "contrib"))
(defvar Garnet-Contrib-Pathname
(append-directory Garnet-Binary-Pathname "contrib"))
(defvar Garnet-Protected-Eval-Src
(append-directory Garnet-Src-Pathname "protected-eval"))
(defvar Garnet-Protected-Eval-Pathname
(append-directory Garnet-Binary-Pathname "protected-eval"))
(defvar Garnet-Bitmap-Pathname
(append-directory Garnet-Lib-Pathname "bitmaps"))
(defvar Garnet-Pixmap-Pathname
(append-directory Garnet-Lib-Pathname "pixmaps"))
(defvar Garnet-Gilt-Bitmap-Pathname
(append-directory Garnet-Lib-Pathname "gilt"))
(defvar Garnet-C32-Bitmap-Pathname
(append-directory Garnet-Lib-Pathname "c32"))
(defvar Garnet-DataFile-Pathname
(append-directory Garnet-Lib-Pathname "data"))
(defvar Garnet-Gesture-Data-Pathname
(append-directory Garnet-Lib-Pathname "gesture"))
;;; Names of loader files.
(defparameter Garnet-Inter-Loader (merge-pathnames "inter-loader" Garnet-Inter-PathName))
(defparameter Garnet-Gesture-Loader (merge-pathnames "gesture-loader" Garnet-Gesture-PathName))
(defparameter Garnet-Aggregadgets-Loader (merge-pathnames "aggregadgets-loader" Garnet-Aggregadgets-PathName))
(defparameter Garnet-Aggregraphs-Loader (merge-pathnames "aggregraphs-loader" Garnet-Aggregadgets-PathName))
(defparameter Garnet-Gadgets-Loader (merge-pathnames "gadgets-loader" Garnet-Gadgets-PathName))
(defparameter Garnet-Debug-Loader (merge-pathnames "debug-loader" Garnet-Debug-PathName))
(defparameter Garnet-Demos-Loader (merge-pathnames "demos-loader" Garnet-Demos-PathName))
(defparameter Garnet-Gilt-Loader (merge-pathnames "gilt-loader" Garnet-Gilt-PathName))
(defparameter Garnet-C32-Loader (merge-pathnames "c32-loader" Garnet-C32-PathName))
(defparameter Garnet-Lapidary-Loader (merge-pathnames "lapidary-loader" Garnet-Lapidary-PathName))
(defparameter garnet-protected-eval-Loader (merge-pathnames "protected-eval-loader" Garnet-Protected-Eval-PathName))
;; Packages to load and the locations of those packages.
(defparameter garnet-load-alist
Target directories ( binarys )
`(("gg" . ,Garnet-Gadgets-PathName)
("gadgets" . ,Garnet-Gadgets-PathName)
("truetype" . ,Garnet-Truetype-PathName)
("inter" . ,Garnet-Inter-PathName)
("gesture" . ,Garnet-Gesture-PathName)
("gestures" . ,Garnet-Gesture-PathName)
("aggregadgets" . ,Garnet-Aggregadgets-PathName)
("debug" . ,Garnet-Debug-PathName)
("demos" . ,Garnet-Demos-PathName)
("demo" . ,Garnet-Demos-PathName)
("gilt" . ,Garnet-Gilt-PathName)
("c32" . ,Garnet-C32-PathName)
("lapidary" . ,Garnet-Lapidary-PathName)
("contrib" . ,Garnet-Contrib-PathName)
("protected-eval" . ,Garnet-Protected-Eval-PathName)
;;; Source directories.
( " opal - src " . , Garnet - Opal - Src )
("inter-src" . ,Garnet-Inter-Src)
("gesture-src" . ,Garnet-Gesture-Src)
("gestures-src" . ,Garnet-Gesture-Src)
("ps-src" . ,Garnet-PS-Src)
("aggregadgets-src" . ,Garnet-Aggregadgets-Src)
("gadgets-src" . ,Garnet-Gadgets-Src)
("gg-src" . ,Garnet-Gadgets-Src)
("debug-src" . ,Garnet-Debug-Src)
("demos-src" . ,Garnet-Demos-Src)
("demo-src" . ,Garnet-Demos-Src)
("gilt-src" . ,Garnet-Gilt-Src)
("c32-src" . ,Garnet-C32-Src)
("lapidary-src" . ,Garnet-Lapidary-Src)
("contrib-src" . ,Garnet-Contrib-Src)
("protected-eval-src" . ,Garnet-Protected-eval-Src)))
;;; The actual loader code.
(defun Add-Garnet-Load-Prefix (prefix pathname)
(push (cons prefix pathname) Garnet-Load-Alist))
(defun Garnet-Load (filename)
"Load a file. If the file is prefixed with a Garnet module name, get
the file from the proper directory in the Garnet source tree.
Otherwise just load the filename as given."
(let ((pos (position #\: filename)))
(if pos
(let* ((module (subseq filename 0 pos))
(name (subseq filename (1+ pos)))
(module-src-directory
(or (cdr (assoc module Garnet-Load-Alist :test #'string=))
(error "Module ~S is not a Garnet module" module)))
(src-pathname (make-pathname :name name
For Windows .
:device (pathname-device module-src-directory)
:directory (pathname-directory
module-src-directory))))
(force-output *error-output*)
(format T "~&Loading ~s~%" src-pathname)
(force-output)
(load src-pathname))
;; else no module name found; load regular.
(progn
(format T "No module name given: Loading ~s~%" filename)
(load filename)))))
;;; Garnet-Compile.
;; This function will compile your garnet files while keeping the
sources and binaries separated . If you want to just compile one
file from Garnet , like the gadget file gauge.lisp , then you could
;; use this function to compile the source file and automatically
;; save the binary file in the proper directory in the binary tree.
;;
;; Example:
;; (garnet-compile "gadgets:gauge")
Takes the source file from Garnet - Gadgets - Src , compiles it , and
saves the binary file in Garnet - Gadgets - Pathname ( the binary
;; gadgets directory).
;;
(defvar *compiler-extension*
(pathname-type (compile-file-pathname "foo.lisp")))
RGA This will lose on Windows XXX
(defun garnet-mkdir-if-needed (dirname)
"Creates the directory if it does not exist."
(ensure-directories-exist dirname :verbose t))
(defun garnet-compile (filename)
"Compile a single Garnet file, finding the source in the Garnet
source tree and installing the result in the corresponding
directory in the binary tree.
Example:
(garnet-compile \"gadgets:gauge\") akes the source file from
Garnet-Gadgets-Src, compiles it, and aves the binary file in
Garnet-Gadgets-Pathname (the binary adgets directory)."
(let* ((pos (position #\: filename))
(module (if pos
(subseq filename 0 pos)
;; else no colon, abort
(error
"The filename ~A is not prefixed by a garnet module name. Aborting compile"
filename)))
We want to extract just the name part , without the if present .
(filepath (subseq filename (1+ pos)))
(name (pathname-name filepath))
(type (pathname-type name))
(module-src (concatenate 'string module "-src"))
(module-src-directory
(or (cdr (assoc module-src Garnet-Load-Alist
:test #'string=))
(cdr (assoc module Garnet-Load-Alist
:test #'string=))
(error "Module named ~S not found in Garnet-Load-Alist"
module)))
(module-bin-directory
(or (cdr (assoc module Garnet-Load-Alist
:test #'string=))
(error "Module named ~S not found in Garnet-Load-Alist"
module)))
(src-pathname (make-pathname :name name
;; If no user supplied type, add default.
:type (or type "lisp")
:device (pathname-device module-src-directory)
:directory (pathname-directory module-src-directory)))
(bin-pathname (progn
(format t "Never make it here (hopefully).")
(make-pathname :name name
:type *compiler-extension*
:device (pathname-device module-bin-directory)
:directory (pathname-directory module-bin-directory)))))
(force-output *error-output*)
(format T "~&Compiling ~s~%" src-pathname)
(format T "for output to ~s~%" bin-pathname)
(force-output)
sds : make sure that / foo directory is already there
(garnet-mkdir-if-needed bin-pathname)
(let ((*compile-verbose* Garnet-Garnet-Debug)
(*compile-print* Garnet-Garnet-Debug))
(compile-file src-pathname))))
| null | https://raw.githubusercontent.com/earl-ducaine/cl-garnet/f0095848513ba69c370ed1dc51ee01f0bb4dd108/garnet-loader.lisp | lisp | -*- Mode: LISP; Syntax: Common-Lisp; Package: COMMON-LISP-USER -*-
Not likely to be anywhere in the world where this would be useful.
The :garnet-debug feature allows many different kinds of run-time
checking, and also loads some extra test code. After you have
debugged your code and want it to run faster, remove :GARNET-DEBUG
The result will be smaller and somewhat faster.
To remove :garnet-debug from the *features* list, either defvar
Garnet-Garnet-Debug to NIL before you load the garnet-loader, or
for hideously ugly code. So, at some point this should be changed
to a runtime special variable that dynamically controls this. That
will forfit code size, but will still allow for optimizing
production code.
The following variable affects compiler policy. Setting it to T
uses the settings in *garnet-compile-debug-settings*. Setting it to
(defvar garnet-compile-debug-mode garnet-garnet-debug
"Setting this variable to T sets the policy for the entire system
to make it more debuggable.")
(defvar garnet-compile-debug-settings
"Use these settings for globally debugging the system or for debugging
a specific module. They emphasize debuggability at the cost of some speed.
- These settings are type-safe.
- They prevent functions declared inline from being expanded inline.
Note that as part of this version I have tried to make most
non-syntactic macros into inline functions.
- They allow all possible debugging features.")
(defvar garnet-compile-production-settings
(safety 0)
(compilation-speed 0))
"production compiler policy settings. emphasize speed, de-emphasize debugging.")
(defvar default-garnet-proclaim
(if garnet-compile-debug-mode
garnet-compile-debug-settings
garnet-compile-production-settings)
"Set compiler optimization settings.
to nil. Then set the variable in the modules you want debugged to enable
debugging that module.
to nil and leave everything else alone.")
Names of loader files.
Packages to load and the locations of those packages.
Source directories.
The actual loader code.
else no module name found; load regular.
Garnet-Compile.
This function will compile your garnet files while keeping the
use this function to compile the source file and automatically
save the binary file in the proper directory in the binary tree.
Example:
(garnet-compile "gadgets:gauge")
gadgets directory).
else no colon, abort
If no user supplied type, add default. |
(in-package :common-lisp-user)
(defparameter garnet-version-number "3.3.post")
(pushnew :garnet *features*)
(pushnew :garnet-v3 *features*)
(pushnew :garnet-v3.3.post *features*)
(pushnew :garnet-test *features*)
from the * features * list and RECOMPILE all of Garnet and your code .
simply edit the following defvar to set to nil .
TODO ( ed ): I have a pathological hatred of using * features * . I find it makes
(defvar garnet-garnet-debug t)
(if garnet-garnet-debug
(pushnew :garnet-debug *features*)
(setf *features* (delete :garnet-debug *features*)))
NIL uses the ones in * garnet - compile - production - settings * . By
default we simply mirror Garnet - Garnet - Debug .
' ( optimize ( speed 2 )
( safety 3 )
( debug 3 )
( space 2 ) )
With :
' ( optimize ( speed 3 )
( space 1 )
( debug 1 )
1 . If you want everything debugged , set Garnet - Compile - Debug - Mode to t.
2 . If you want to debug specific modules , set Garnet - Compile - Debug - Mode
3 . Otherwise ( for ' production ' builds ) just set Garnet - Compile - Debug - Mode
(defun append-directory (directory sub-directory)
"This is a little utility for accessing the subdirectory of a
directory. It assumes that 'sub-directory' is directly under
'directory'."
(let ((dir (pathname-directory directory))
(subdir (if (listp sub-directory)
sub-directory
(list sub-directory))))
(make-pathname :directory (append dir subdir))))
(defun get-garnet-binary-pathname ()
(let ((directory-name "src"))
(append-directory org.xoanonos.asdf-app-config:*base-directory* directory-name)))
(defvar garnet-src-pathname (append-directory org.xoanonos.asdf-app-config:*base-directory* "src"))
(defvar garnet-lib-pathname (append-directory org.xoanonos.asdf-app-config:*base-directory* "lib"))
(defvar garnet-binary-pathname (get-garnet-binary-pathname))
(
( append - directory " truetype " ) )
(defvar Garnet-Truetype-Pathname
(append-directory Garnet-Binary-Pathname "truetype"))
(defvar Garnet-Inter-Src
(append-directory Garnet-Src-Pathname "inter"))
(defvar Garnet-Inter-Pathname
(append-directory Garnet-Binary-Pathname "inter"))
(defvar Garnet-Gesture-Src
(append-directory Garnet-Src-Pathname "gesture"))
(defvar Garnet-Gesture-Pathname
(append-directory Garnet-Binary-Pathname "gesture"))
(defvar Garnet-Aggregadgets-Src
(append-directory Garnet-Src-Pathname "aggregadgets"))
(defvar Garnet-Aggregadgets-Pathname
(append-directory Garnet-Binary-Pathname "aggregadgets"))
(defvar Garnet-PS-Src
(append-directory Garnet-Src-Pathname "ps"))
(defvar Garnet-Gadgets-Src
(append-directory Garnet-Src-Pathname "gadgets"))
(defvar Garnet-Gadgets-Pathname
(append-directory Garnet-Binary-Pathname "gadgets"))
(defvar Garnet-Debug-Src
(append-directory Garnet-Src-Pathname "debug"))
(defvar Garnet-Debug-Pathname
(append-directory Garnet-Binary-Pathname "debug"))
(defvar Garnet-Demos-Src
(append-directory Garnet-Src-Pathname "demos"))
(defvar Garnet-Demos-Pathname
(append-directory Garnet-Binary-Pathname "demos"))
(defvar Garnet-Gilt-Src
(append-directory Garnet-Src-Pathname "gilt"))
(defvar Garnet-Gilt-Pathname
(append-directory Garnet-Binary-Pathname "gilt"))
(defvar Garnet-C32-Src
(append-directory Garnet-Src-Pathname "c32"))
(defvar Garnet-C32-Pathname
(append-directory Garnet-Binary-Pathname "c32"))
(defvar Garnet-Lapidary-Src
(append-directory Garnet-Src-Pathname "lapidary"))
(defvar Garnet-Lapidary-Pathname
(append-directory Garnet-Binary-Pathname "lapidary"))
(defvar Garnet-Contrib-Src
(append-directory Garnet-Src-Pathname "contrib"))
(defvar Garnet-Contrib-Pathname
(append-directory Garnet-Binary-Pathname "contrib"))
(defvar Garnet-Protected-Eval-Src
(append-directory Garnet-Src-Pathname "protected-eval"))
(defvar Garnet-Protected-Eval-Pathname
(append-directory Garnet-Binary-Pathname "protected-eval"))
(defvar Garnet-Bitmap-Pathname
(append-directory Garnet-Lib-Pathname "bitmaps"))
(defvar Garnet-Pixmap-Pathname
(append-directory Garnet-Lib-Pathname "pixmaps"))
(defvar Garnet-Gilt-Bitmap-Pathname
(append-directory Garnet-Lib-Pathname "gilt"))
(defvar Garnet-C32-Bitmap-Pathname
(append-directory Garnet-Lib-Pathname "c32"))
(defvar Garnet-DataFile-Pathname
(append-directory Garnet-Lib-Pathname "data"))
(defvar Garnet-Gesture-Data-Pathname
(append-directory Garnet-Lib-Pathname "gesture"))
(defparameter Garnet-Inter-Loader (merge-pathnames "inter-loader" Garnet-Inter-PathName))
(defparameter Garnet-Gesture-Loader (merge-pathnames "gesture-loader" Garnet-Gesture-PathName))
(defparameter Garnet-Aggregadgets-Loader (merge-pathnames "aggregadgets-loader" Garnet-Aggregadgets-PathName))
(defparameter Garnet-Aggregraphs-Loader (merge-pathnames "aggregraphs-loader" Garnet-Aggregadgets-PathName))
(defparameter Garnet-Gadgets-Loader (merge-pathnames "gadgets-loader" Garnet-Gadgets-PathName))
(defparameter Garnet-Debug-Loader (merge-pathnames "debug-loader" Garnet-Debug-PathName))
(defparameter Garnet-Demos-Loader (merge-pathnames "demos-loader" Garnet-Demos-PathName))
(defparameter Garnet-Gilt-Loader (merge-pathnames "gilt-loader" Garnet-Gilt-PathName))
(defparameter Garnet-C32-Loader (merge-pathnames "c32-loader" Garnet-C32-PathName))
(defparameter Garnet-Lapidary-Loader (merge-pathnames "lapidary-loader" Garnet-Lapidary-PathName))
(defparameter garnet-protected-eval-Loader (merge-pathnames "protected-eval-loader" Garnet-Protected-Eval-PathName))
(defparameter garnet-load-alist
Target directories ( binarys )
`(("gg" . ,Garnet-Gadgets-PathName)
("gadgets" . ,Garnet-Gadgets-PathName)
("truetype" . ,Garnet-Truetype-PathName)
("inter" . ,Garnet-Inter-PathName)
("gesture" . ,Garnet-Gesture-PathName)
("gestures" . ,Garnet-Gesture-PathName)
("aggregadgets" . ,Garnet-Aggregadgets-PathName)
("debug" . ,Garnet-Debug-PathName)
("demos" . ,Garnet-Demos-PathName)
("demo" . ,Garnet-Demos-PathName)
("gilt" . ,Garnet-Gilt-PathName)
("c32" . ,Garnet-C32-PathName)
("lapidary" . ,Garnet-Lapidary-PathName)
("contrib" . ,Garnet-Contrib-PathName)
("protected-eval" . ,Garnet-Protected-Eval-PathName)
( " opal - src " . , Garnet - Opal - Src )
("inter-src" . ,Garnet-Inter-Src)
("gesture-src" . ,Garnet-Gesture-Src)
("gestures-src" . ,Garnet-Gesture-Src)
("ps-src" . ,Garnet-PS-Src)
("aggregadgets-src" . ,Garnet-Aggregadgets-Src)
("gadgets-src" . ,Garnet-Gadgets-Src)
("gg-src" . ,Garnet-Gadgets-Src)
("debug-src" . ,Garnet-Debug-Src)
("demos-src" . ,Garnet-Demos-Src)
("demo-src" . ,Garnet-Demos-Src)
("gilt-src" . ,Garnet-Gilt-Src)
("c32-src" . ,Garnet-C32-Src)
("lapidary-src" . ,Garnet-Lapidary-Src)
("contrib-src" . ,Garnet-Contrib-Src)
("protected-eval-src" . ,Garnet-Protected-eval-Src)))
(defun Add-Garnet-Load-Prefix (prefix pathname)
(push (cons prefix pathname) Garnet-Load-Alist))
(defun Garnet-Load (filename)
"Load a file. If the file is prefixed with a Garnet module name, get
the file from the proper directory in the Garnet source tree.
Otherwise just load the filename as given."
(let ((pos (position #\: filename)))
(if pos
(let* ((module (subseq filename 0 pos))
(name (subseq filename (1+ pos)))
(module-src-directory
(or (cdr (assoc module Garnet-Load-Alist :test #'string=))
(error "Module ~S is not a Garnet module" module)))
(src-pathname (make-pathname :name name
For Windows .
:device (pathname-device module-src-directory)
:directory (pathname-directory
module-src-directory))))
(force-output *error-output*)
(format T "~&Loading ~s~%" src-pathname)
(force-output)
(load src-pathname))
(progn
(format T "No module name given: Loading ~s~%" filename)
(load filename)))))
sources and binaries separated . If you want to just compile one
file from Garnet , like the gadget file gauge.lisp , then you could
Takes the source file from Garnet - Gadgets - Src , compiles it , and
saves the binary file in Garnet - Gadgets - Pathname ( the binary
(defvar *compiler-extension*
(pathname-type (compile-file-pathname "foo.lisp")))
RGA This will lose on Windows XXX
(defun garnet-mkdir-if-needed (dirname)
"Creates the directory if it does not exist."
(ensure-directories-exist dirname :verbose t))
(defun garnet-compile (filename)
"Compile a single Garnet file, finding the source in the Garnet
source tree and installing the result in the corresponding
directory in the binary tree.
Example:
(garnet-compile \"gadgets:gauge\") akes the source file from
Garnet-Gadgets-Src, compiles it, and aves the binary file in
Garnet-Gadgets-Pathname (the binary adgets directory)."
(let* ((pos (position #\: filename))
(module (if pos
(subseq filename 0 pos)
(error
"The filename ~A is not prefixed by a garnet module name. Aborting compile"
filename)))
We want to extract just the name part , without the if present .
(filepath (subseq filename (1+ pos)))
(name (pathname-name filepath))
(type (pathname-type name))
(module-src (concatenate 'string module "-src"))
(module-src-directory
(or (cdr (assoc module-src Garnet-Load-Alist
:test #'string=))
(cdr (assoc module Garnet-Load-Alist
:test #'string=))
(error "Module named ~S not found in Garnet-Load-Alist"
module)))
(module-bin-directory
(or (cdr (assoc module Garnet-Load-Alist
:test #'string=))
(error "Module named ~S not found in Garnet-Load-Alist"
module)))
(src-pathname (make-pathname :name name
:type (or type "lisp")
:device (pathname-device module-src-directory)
:directory (pathname-directory module-src-directory)))
(bin-pathname (progn
(format t "Never make it here (hopefully).")
(make-pathname :name name
:type *compiler-extension*
:device (pathname-device module-bin-directory)
:directory (pathname-directory module-bin-directory)))))
(force-output *error-output*)
(format T "~&Compiling ~s~%" src-pathname)
(format T "for output to ~s~%" bin-pathname)
(force-output)
sds : make sure that / foo directory is already there
(garnet-mkdir-if-needed bin-pathname)
(let ((*compile-verbose* Garnet-Garnet-Debug)
(*compile-print* Garnet-Garnet-Debug))
(compile-file src-pathname))))
|
29f385679918ccd8ef0a4120939fee366ce7c3f33b691255347997d7013ccf32 | kupl/FixML | sub20.ml |
type exp =
| V of var
| P of var * exp
| C of exp * exp
and var = string
let rec check : exp -> bool
= fun exp ->
match exp with
|P(s, V a) -> if (s= a) then true else false
|P(s, P(s1, e1)) ->
begin match e1 with
|C(e2, e3)-> ( ( check(P(s, e2)) ||check (P(s1, e2)) )&&( check (P(s, e3))||check (P(s1, e3)) ) )
|_ -> if ((check (P(s1,e1)) =true)||(check (P(s,e1)))) then true else false
end
|P(s, C(e1, e2)) ->
begin match (e1, e2) with
|(P(s1, e3), e) -> if (check (P(s, P(s1,e3))) &&check(P(s, P(s1,e)))) then true else false
|(e, P(s1, e3)) -> if (check (P(s, P(s1,e3))) &&check(P(s, P(s1,e)))) then true else false
|_ -> if (check (P(s, e1))&& check(P(s,e2))) then true else false
end
|_ -> false
| null | https://raw.githubusercontent.com/kupl/FixML/0a032a733d68cd8ccc8b1034d2908cd43b241fce/benchmarks/wellformedness/wellformedness/submissions/sub20.ml | ocaml |
type exp =
| V of var
| P of var * exp
| C of exp * exp
and var = string
let rec check : exp -> bool
= fun exp ->
match exp with
|P(s, V a) -> if (s= a) then true else false
|P(s, P(s1, e1)) ->
begin match e1 with
|C(e2, e3)-> ( ( check(P(s, e2)) ||check (P(s1, e2)) )&&( check (P(s, e3))||check (P(s1, e3)) ) )
|_ -> if ((check (P(s1,e1)) =true)||(check (P(s,e1)))) then true else false
end
|P(s, C(e1, e2)) ->
begin match (e1, e2) with
|(P(s1, e3), e) -> if (check (P(s, P(s1,e3))) &&check(P(s, P(s1,e)))) then true else false
|(e, P(s1, e3)) -> if (check (P(s, P(s1,e3))) &&check(P(s, P(s1,e)))) then true else false
|_ -> if (check (P(s, e1))&& check(P(s,e2))) then true else false
end
|_ -> false
| |
6f0a904f2a47a4e8ccdb601c3b1aaedd5fb37106c16f55833420259f68a3a1ba | fjvallarino/monomer | BaseTheme.hs | |
Module : . Core . Themes . BaseTheme
Copyright : ( c ) 2018
License : BSD-3 - Clause ( see the LICENSE file )
Maintainer :
Stability : experimental
Portability : non - portable
Provides a base theme , with fixed sizes and padding but configurable colors .
Module : Monomer.Core.Themes.BaseTheme
Copyright : (c) 2018 Francisco Vallarino
License : BSD-3-Clause (see the LICENSE file)
Maintainer :
Stability : experimental
Portability : non-portable
Provides a base theme, with fixed sizes and padding but configurable colors.
-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Monomer.Core.Themes.BaseTheme (
BaseThemeColors(..),
baseTheme
) where
import Control.Lens ((&), (^.), (.~), (?~), non)
import Data.Default
import Monomer.Core.Combinators
import Monomer.Core.Style
import Monomer.Graphics.Types
import qualified Monomer.Core.Lens as L
import qualified Monomer.Graphics.Lens as L
-- | Creates a theme using the provided colors.
baseTheme :: BaseThemeColors -> Theme
baseTheme themeMod = Theme {
_themeClearColor = clearColor themeMod,
_themeSectionColor = sectionColor themeMod,
_themeUserColorMap = def,
_themeBasic = baseBasic themeMod,
_themeHover = baseHover themeMod,
_themeFocus = baseFocus themeMod,
_themeFocusHover = baseFocusHover themeMod,
_themeActive = baseActive themeMod,
_themeDisabled = baseDisabled themeMod
}
-- | Customizable colors for the theme.
data BaseThemeColors = BaseThemeColors {
clearColor :: Color,
sectionColor :: Color,
btnFocusBorder :: Color,
btnBgBasic :: Color,
btnBgHover :: Color,
btnBgFocus :: Color,
btnBgActive :: Color,
btnBgDisabled :: Color,
btnText :: Color,
btnTextDisabled :: Color,
btnMainFocusBorder :: Color,
btnMainBgBasic :: Color,
btnMainBgHover :: Color,
btnMainBgFocus :: Color,
btnMainBgActive :: Color,
btnMainBgDisabled :: Color,
btnMainText :: Color,
btnMainTextDisabled :: Color,
dialogBg :: Color,
dialogBorder :: Color,
dialogText :: Color,
dialogTitleText :: Color,
emptyOverlay :: Color,
shadow :: Color,
externalLinkBasic :: Color,
externalLinkHover :: Color,
externalLinkFocus :: Color,
externalLinkActive :: Color,
externalLinkDisabled :: Color,
iconFg :: Color,
iconBg :: Color,
inputIconFg :: Color,
inputBorder :: Color,
inputFocusBorder :: Color,
inputBgBasic :: Color,
inputBgHover :: Color,
inputBgFocus :: Color,
inputBgActive :: Color,
inputBgDisabled :: Color,
inputFgBasic :: Color,
inputFgHover :: Color,
inputFgFocus :: Color,
inputFgActive :: Color,
inputFgDisabled :: Color,
inputSndBasic :: Color,
inputSndHover :: Color,
inputSndFocus :: Color,
inputSndActive :: Color,
inputSndDisabled :: Color,
inputHlBasic :: Color,
inputHlHover :: Color,
inputHlFocus :: Color,
inputHlActive :: Color,
inputHlDisabled :: Color,
inputSelBasic :: Color,
inputSelFocus :: Color,
inputText :: Color,
inputTextDisabled :: Color,
labelText :: Color,
scrollBarBasic :: Color,
scrollThumbBasic :: Color,
scrollBarHover :: Color,
scrollThumbHover :: Color,
slMainBg :: Color,
slNormalBgBasic :: Color,
slNormalBgHover :: Color,
slNormalText :: Color,
slNormalFocusBorder :: Color,
slSelectedBgBasic :: Color,
slSelectedBgHover :: Color,
slSelectedText :: Color,
slSelectedFocusBorder :: Color,
tooltipBorder :: Color,
tooltipBg :: Color,
tooltipText :: Color
} deriving (Eq, Show)
btnBorderFocus :: BaseThemeColors -> Border
btnBorderFocus themeMod = border 1 (btnFocusBorder themeMod)
btnMainBorderFocus :: BaseThemeColors -> Border
btnMainBorderFocus themeMod = border 1 (btnMainFocusBorder themeMod)
inputBorderFocus :: BaseThemeColors -> Border
inputBorderFocus themeMod = border 1 (inputFocusBorder themeMod)
normalFont :: TextStyle
normalFont = def
& L.font ?~ Font "Regular"
& L.fontSize ?~ FontSize 16
& L.fontSpaceV ?~ FontSpace 2
titleFont :: TextStyle
titleFont = def
& L.font ?~ Font "Bold"
& L.fontSize ?~ FontSize 20
& L.fontSpaceV ?~ FontSpace 2
colorPopupStyle :: BaseThemeColors -> StyleState
colorPopupStyle themeMod = popupStyle where
sectionBg = sectionColor themeMod
popupStyle = mconcat [width 400, padding 10, bgColor sectionBg, radius 4]
dialogMsgBodyFont :: BaseThemeColors -> TextStyle
dialogMsgBodyFont themeMod = fontStyle where
fontStyle = normalFont
& L.fontColor ?~ dialogText themeMod
externalLinkFont :: BaseThemeColors -> TextStyle
externalLinkFont themeMod = fontStyle where
fontStyle = normalFont
& L.fontColor ?~ externalLinkBasic themeMod
labelFont :: BaseThemeColors -> TextStyle
labelFont themeMod = fontStyle <> textLeft where
fontStyle = normalFont
& L.fontColor ?~ labelText themeMod
btnStyle :: BaseThemeColors -> StyleState
btnStyle themeMod = def
& L.text ?~ (normalFont & L.fontColor ?~ btnText themeMod) <> textCenter
& L.bgColor ?~ btnBgBasic themeMod
& L.border ?~ border 1 (btnBgBasic themeMod)
& L.padding ?~ padding 8
& L.radius ?~ radius 4
btnMainStyle :: BaseThemeColors -> StyleState
btnMainStyle themeMod = btnStyle themeMod
& L.text . non def . L.fontColor ?~ btnMainText themeMod
& L.bgColor ?~ btnMainBgBasic themeMod
& L.border ?~ border 1 (btnMainBgBasic themeMod)
textInputStyle :: BaseThemeColors -> StyleState
textInputStyle themeMod = style where
textStyle = normalFont
& L.fontColor ?~ inputText themeMod
style = def
& L.text ?~ textStyle
& L.bgColor ?~ inputBgBasic themeMod
& L.fgColor ?~ inputFgBasic themeMod
& L.sndColor ?~ (inputSndBasic themeMod & L.a .~ 0.6)
& L.hlColor ?~ inputSelBasic themeMod
& L.border ?~ border 1 (inputBorder themeMod)
& L.radius ?~ radius 4
& L.padding ?~ padding 8
numericInputStyle :: BaseThemeColors -> StyleState
numericInputStyle themeMod = textInputStyle themeMod
& L.text . non def . L.alignH ?~ ATRight
dateInputStyle :: BaseThemeColors -> StyleState
dateInputStyle themeMod = textInputStyle themeMod
& L.text . non def . L.alignH ?~ ATRight
timeInputStyle :: BaseThemeColors -> StyleState
timeInputStyle themeMod = textInputStyle themeMod
& L.text . non def . L.alignH ?~ ATRight
selectListItemStyle :: BaseThemeColors -> StyleState
selectListItemStyle themeMod = def
& L.text ?~ (normalFont & L.fontColor ?~ slNormalText themeMod)
& L.text . non def . L.alignH ?~ ATLeft
& L.bgColor ?~ slNormalBgBasic themeMod
& L.border ?~ border 1 (slNormalBgBasic themeMod)
& L.padding ?~ padding 8
selectListItemSelectedStyle :: BaseThemeColors -> StyleState
selectListItemSelectedStyle themeMod = selectListItemStyle themeMod
& L.text . non def . L.fontColor ?~ slSelectedText themeMod
& L.bgColor ?~ slSelectedBgBasic themeMod
& L.border ?~ border 1 (slSelectedBgBasic themeMod)
tooltipStyle :: BaseThemeColors -> StyleState
tooltipStyle themeMod = def
& L.text . non def . L.font ?~ Font "Regular"
& L.text . non def . L.fontSize ?~ FontSize 14
& L.text . non def . L.fontColor ?~ tooltipText themeMod
& L.bgColor ?~ tooltipBg themeMod
& L.border ?~ border 1 (tooltipBorder themeMod)
& L.padding ?~ padding 6
& L.radius ?~ radius 4
baseBasic :: BaseThemeColors -> ThemeState
baseBasic themeMod = def
& L.emptyOverlayStyle .~ bgColor (emptyOverlay themeMod)
& L.emptyOverlayStyle . L.padding ?~ padding 8
& L.shadowColor .~ shadow themeMod
& L.btnStyle .~ btnStyle themeMod
& L.btnMainStyle .~ btnMainStyle themeMod
& L.colorPopupStyle .~ colorPopupStyle themeMod
& L.checkboxWidth .~ 20
& L.checkboxStyle . L.fgColor ?~ inputFgBasic themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlBasic themeMod
& L.checkboxStyle . L.radius ?~ radius 4
& L.dateFieldStyle .~ dateInputStyle themeMod
& L.dialWidth .~ 50
& L.dialStyle . L.fgColor ?~ inputFgBasic themeMod
& L.dialStyle . L.sndColor ?~ inputSndBasic themeMod
& L.dialogTitleStyle . L.text ?~ (titleFont & L.fontColor ?~ dialogTitleText themeMod)
& L.dialogTitleStyle . L.padding ?~ padding 10
& L.dialogFrameStyle . L.padding ?~ padding 5
& L.dialogFrameStyle . L.radius ?~ radius 10
& L.dialogFrameStyle . L.bgColor ?~ dialogBg themeMod
& L.dialogFrameStyle . L.border ?~ border 1 (dialogBorder themeMod)
& L.dialogCloseIconStyle . L.bgColor ?~ iconBg themeMod
& L.dialogCloseIconStyle . L.fgColor ?~ iconFg themeMod
& L.dialogCloseIconStyle . L.padding ?~ padding 4
& L.dialogCloseIconStyle . L.radius ?~ radius 8
& L.dialogCloseIconStyle . L.sizeReqW ?~ width 16
& L.dialogCloseIconStyle . L.sizeReqH ?~ width 16
& L.dialogButtonsStyle . L.padding ?~ padding 20 <> paddingT 10
& L.dialogMsgBodyStyle . L.padding ?~ padding 20
& L.dialogMsgBodyStyle . L.text ?~ dialogMsgBodyFont themeMod
& L.dialogMsgBodyStyle . L.sizeReqW ?~ maxWidth 600
& L.dropdownStyle .~ textInputStyle themeMod
& L.dropdownStyle . L.fgColor ?~ inputIconFg themeMod
& L.dropdownStyle . L.text . non def . L.alignH ?~ ATLeft
& L.dropdownMaxHeight .~ 200
& L.dropdownListStyle . L.bgColor ?~ slMainBg themeMod
& L.dropdownItemStyle .~ selectListItemStyle themeMod
& L.dropdownItemSelectedStyle .~ selectListItemSelectedStyle themeMod
& L.externalLinkStyle . L.text ?~ externalLinkFont themeMod
& L.labelStyle . L.text ?~ labelFont themeMod
& L.numericFieldStyle .~ numericInputStyle themeMod
& L.optionBtnOnStyle .~ btnMainStyle themeMod
& L.optionBtnOffStyle .~ btnStyle themeMod
& L.selectListStyle . L.bgColor ?~ slMainBg themeMod
& L.selectListStyle . L.border ?~ border 1 (slMainBg themeMod)
& L.selectListItemStyle .~ selectListItemStyle themeMod
& L.selectListItemSelectedStyle .~ selectListItemSelectedStyle themeMod
& L.radioWidth .~ 20
& L.radioStyle . L.fgColor ?~ inputFgBasic themeMod
& L.radioStyle . L.hlColor ?~ inputHlBasic themeMod
& L.scrollOverlay .~ False
& L.scrollFollowFocus .~ True
& L.scrollBarColor .~ scrollBarBasic themeMod
& L.scrollThumbColor .~ scrollThumbBasic themeMod
& L.scrollBarWidth .~ 8
& L.scrollThumbWidth .~ 8
& L.scrollThumbMinSize .~ 25
& L.scrollThumbRadius .~ 4
& L.scrollWheelRate .~ 10
& L.separatorLineWidth .~ 1
& L.separatorLineStyle . L.fgColor ?~ inputSndBasic themeMod
& L.sliderRadius ?~ 2
& L.sliderThumbFactor .~ 1.25
& L.sliderWidth .~ 10
& L.sliderStyle . L.fgColor ?~ inputFgBasic themeMod
& L.sliderStyle . L.hlColor ?~ inputHlBasic themeMod
& L.sliderStyle . L.sndColor ?~ inputSndBasic themeMod
& L.textAreaStyle .~ textInputStyle themeMod
& L.textFieldStyle .~ textInputStyle themeMod
& L.timeFieldStyle .~ timeInputStyle themeMod
& L.toggleBtnOnStyle .~ btnMainStyle themeMod
& L.toggleBtnOffStyle .~ btnStyle themeMod
& L.tooltipStyle .~ tooltipStyle themeMod
baseHover :: BaseThemeColors -> ThemeState
baseHover themeMod = baseBasic themeMod
& L.btnStyle . L.bgColor ?~ btnBgHover themeMod
& L.btnStyle . L.border ?~ border 1 (btnBgHover themeMod)
& L.btnStyle . L.cursorIcon ?~ CursorHand
& L.btnMainStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.btnMainStyle . L.border ?~ border 1 (btnMainBgHover themeMod)
& L.btnMainStyle . L.cursorIcon ?~ CursorHand
& L.checkboxStyle . L.fgColor ?~ inputFgHover themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlHover themeMod
& L.checkboxStyle . L.cursorIcon ?~ CursorHand
& L.dateFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.dialStyle . L.fgColor ?~ inputFgHover themeMod
& L.dialStyle . L.sndColor ?~ inputSndHover themeMod
& L.dialStyle . L.cursorIcon ?~ CursorSizeV
& L.dialogCloseIconStyle . L.cursorIcon ?~ CursorHand
& L.dropdownStyle . L.bgColor ?~ inputBgHover themeMod
& L.dropdownStyle . L.cursorIcon ?~ CursorHand
& L.dropdownListStyle . L.border ?~ border 1 (slMainBg themeMod)
& L.dropdownItemStyle . L.bgColor ?~ slNormalBgHover themeMod
& L.dropdownItemStyle . L.border ?~ border 1 (slNormalBgHover themeMod)
& L.dropdownItemStyle . L.cursorIcon ?~ CursorHand
& L.dropdownItemSelectedStyle . L.bgColor ?~ slSelectedBgHover themeMod
& L.dropdownItemSelectedStyle . L.border ?~ border 1 (slSelectedBgHover themeMod)
& L.dropdownItemSelectedStyle . L.cursorIcon ?~ CursorHand
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkHover themeMod
& L.externalLinkStyle . L.text . non def . L.underline ?~ True
& L.externalLinkStyle . L.cursorIcon ?~ CursorHand
& L.numericFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.optionBtnOnStyle . L.border ?~ border 1 (btnMainBgHover themeMod)
& L.optionBtnOnStyle . L.cursorIcon ?~ CursorHand
& L.optionBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
& L.optionBtnOffStyle . L.border ?~ border 1 (btnBgHover themeMod)
& L.optionBtnOffStyle . L.cursorIcon ?~ CursorHand
& L.selectListItemStyle . L.bgColor ?~ slNormalBgHover themeMod
& L.selectListItemStyle . L.border ?~ border 1 (slNormalBgHover themeMod)
& L.selectListItemStyle . L.cursorIcon ?~ CursorHand
& L.selectListItemSelectedStyle . L.bgColor ?~ slSelectedBgHover themeMod
& L.selectListItemSelectedStyle . L.border ?~ border 1 (slSelectedBgHover themeMod)
& L.selectListItemSelectedStyle . L.cursorIcon ?~ CursorHand
& L.radioStyle . L.fgColor ?~ inputFgHover themeMod
& L.radioStyle . L.hlColor ?~ inputHlHover themeMod
& L.radioStyle . L.cursorIcon ?~ CursorHand
& L.scrollBarColor .~ scrollBarHover themeMod
& L.scrollThumbColor .~ scrollThumbHover themeMod
& L.sliderStyle . L.fgColor ?~ inputFgHover themeMod
& L.sliderStyle . L.hlColor ?~ inputHlHover themeMod
& L.sliderStyle . L.sndColor ?~ inputSndHover themeMod
& L.sliderStyle . L.cursorIcon ?~ CursorHand
& L.textAreaStyle . L.cursorIcon ?~ CursorIBeam
& L.textFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.timeFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.toggleBtnOnStyle . L.border ?~ border 1 (btnMainBgHover themeMod)
& L.toggleBtnOnStyle . L.cursorIcon ?~ CursorHand
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
& L.toggleBtnOffStyle . L.border ?~ border 1 (btnBgHover themeMod)
& L.toggleBtnOffStyle . L.cursorIcon ?~ CursorHand
baseFocus :: BaseThemeColors -> ThemeState
baseFocus themeMod = baseBasic themeMod
& L.btnStyle . L.bgColor ?~ btnBgFocus themeMod
& L.btnStyle . L.border ?~ btnBorderFocus themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgFocus themeMod
& L.btnMainStyle . L.border ?~ btnMainBorderFocus themeMod
& L.checkboxStyle . L.fgColor ?~ inputFgFocus themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlFocus themeMod
& L.dateFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.dateFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.dialStyle . L.fgColor ?~ inputFgFocus themeMod
& L.dialStyle . L.sndColor ?~ inputSndFocus themeMod
& L.dropdownStyle . L.border ?~ inputBorderFocus themeMod
& L.dropdownListStyle . L.border ?~ border 1 (slMainBg themeMod)
& L.dropdownItemStyle . L.border ?~ border 1 (slNormalFocusBorder themeMod)
& L.dropdownItemSelectedStyle . L.border ?~ border 1 (slSelectedFocusBorder themeMod)
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkFocus themeMod
& L.numericFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.numericFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgFocus themeMod
& L.optionBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgFocus themeMod
& L.optionBtnOffStyle . L.border ?~ btnBorderFocus themeMod
& L.selectListStyle . L.border ?~ inputBorderFocus themeMod
& L.selectListItemStyle . L.border ?~ border 1 (slNormalFocusBorder themeMod)
& L.selectListItemSelectedStyle . L.border ?~ border 1 (slSelectedFocusBorder themeMod)
& L.radioStyle . L.fgColor ?~ inputFgFocus themeMod
& L.radioStyle . L.hlColor ?~ inputHlFocus themeMod
& L.sliderStyle . L.fgColor ?~ inputFgFocus themeMod
& L.sliderStyle . L.hlColor ?~ inputHlFocus themeMod
& L.sliderStyle . L.sndColor ?~ inputSndFocus themeMod
& L.textAreaStyle . L.border ?~ inputBorderFocus themeMod
& L.textAreaStyle . L.hlColor ?~ inputSelFocus themeMod
& L.textFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.textFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.timeFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.timeFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgFocus themeMod
& L.toggleBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgFocus themeMod
& L.toggleBtnOffStyle . L.border ?~ btnBorderFocus themeMod
baseFocusHover :: BaseThemeColors -> ThemeState
baseFocusHover themeMod = (baseHover themeMod <> baseFocus themeMod)
& L.btnStyle . L.bgColor ?~ btnBgHover themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.dropdownItemStyle . L.bgColor ?~ slNormalBgHover themeMod
& L.dropdownItemSelectedStyle . L.bgColor ?~ slSelectedBgHover themeMod
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkHover themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
baseActive :: BaseThemeColors -> ThemeState
baseActive themeMod = baseFocusHover themeMod
& L.btnStyle . L.bgColor ?~ btnBgActive themeMod
& L.btnStyle . L.border ?~ btnBorderFocus themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgActive themeMod
& L.btnMainStyle . L.border ?~ btnMainBorderFocus themeMod
& L.checkboxStyle . L.fgColor ?~ inputFgActive themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlActive themeMod
& L.dateFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.dateFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.dialStyle . L.fgColor ?~ inputFgActive themeMod
& L.dialStyle . L.sndColor ?~ inputSndActive themeMod
& L.dropdownStyle . L.bgColor ?~ inputBgActive themeMod
& L.dropdownStyle . L.border ?~ inputBorderFocus themeMod
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkActive themeMod
& L.numericFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.numericFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgActive themeMod
& L.optionBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgActive themeMod
& L.optionBtnOffStyle . L.border ?~ btnBorderFocus themeMod
& L.radioStyle . L.fgColor ?~ inputFgActive themeMod
& L.radioStyle . L.hlColor ?~ inputHlActive themeMod
& L.sliderStyle . L.fgColor ?~ inputFgActive themeMod
& L.sliderStyle . L.hlColor ?~ inputHlActive themeMod
& L.sliderStyle . L.sndColor ?~ inputSndActive themeMod
& L.textAreaStyle . L.border ?~ inputBorderFocus themeMod
& L.textAreaStyle . L.hlColor ?~ inputSelFocus themeMod
& L.textFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.textFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.timeFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.timeFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgActive themeMod
& L.toggleBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgActive themeMod
& L.toggleBtnOffStyle . L.border ?~ btnBorderFocus themeMod
baseDisabled :: BaseThemeColors -> ThemeState
baseDisabled themeMod = baseBasic themeMod
& L.btnStyle . L.text . non def . L.fontColor ?~ btnTextDisabled themeMod
& L.btnStyle . L.bgColor ?~ btnBgDisabled themeMod
& L.btnStyle . L.border ?~ border 1 (btnBgDisabled themeMod)
& L.btnMainStyle . L.text . non def . L.fontColor ?~ btnMainTextDisabled themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgDisabled themeMod
& L.btnMainStyle . L.border ?~ border 1 (btnMainBgDisabled themeMod)
& L.checkboxStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlDisabled themeMod
& L.dateFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.dateFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.dialStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.dialStyle . L.sndColor ?~ inputSndDisabled themeMod
& L.dropdownStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.dropdownStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.dropdownStyle . L.border ?~ border 1 (inputBgDisabled themeMod)
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkDisabled themeMod
& L.numericFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.numericFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.optionBtnOnStyle . L.text . non def . L.fontColor ?~ btnMainTextDisabled themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgDisabled themeMod
& L.optionBtnOnStyle . L.border ?~ border 1 (btnMainBgDisabled themeMod)
& L.optionBtnOffStyle . L.text . non def . L.fontColor ?~ btnTextDisabled themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgDisabled themeMod
& L.optionBtnOffStyle . L.border ?~ border 1 (btnBgDisabled themeMod)
& L.radioStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.radioStyle . L.hlColor ?~ inputHlDisabled themeMod
& L.sliderStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.sliderStyle . L.hlColor ?~ inputHlDisabled themeMod
& L.sliderStyle . L.sndColor ?~ inputSndDisabled themeMod
& L.textAreaStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.textAreaStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.textFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.textFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.timeFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.timeFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.toggleBtnOnStyle . L.text . non def . L.fontColor ?~ btnMainTextDisabled themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgDisabled themeMod
& L.toggleBtnOnStyle . L.border ?~ border 1 (btnMainBgDisabled themeMod)
& L.toggleBtnOffStyle . L.text . non def . L.fontColor ?~ btnTextDisabled themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgDisabled themeMod
& L.toggleBtnOffStyle . L.border ?~ border 1 (btnBgDisabled themeMod)
| null | https://raw.githubusercontent.com/fjvallarino/monomer/b8d3b82b50719204385179614f06bae283f90303/src/Monomer/Core/Themes/BaseTheme.hs | haskell | | Creates a theme using the provided colors.
| Customizable colors for the theme. | |
Module : . Core . Themes . BaseTheme
Copyright : ( c ) 2018
License : BSD-3 - Clause ( see the LICENSE file )
Maintainer :
Stability : experimental
Portability : non - portable
Provides a base theme , with fixed sizes and padding but configurable colors .
Module : Monomer.Core.Themes.BaseTheme
Copyright : (c) 2018 Francisco Vallarino
License : BSD-3-Clause (see the LICENSE file)
Maintainer :
Stability : experimental
Portability : non-portable
Provides a base theme, with fixed sizes and padding but configurable colors.
-}
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Monomer.Core.Themes.BaseTheme (
BaseThemeColors(..),
baseTheme
) where
import Control.Lens ((&), (^.), (.~), (?~), non)
import Data.Default
import Monomer.Core.Combinators
import Monomer.Core.Style
import Monomer.Graphics.Types
import qualified Monomer.Core.Lens as L
import qualified Monomer.Graphics.Lens as L
baseTheme :: BaseThemeColors -> Theme
baseTheme themeMod = Theme {
_themeClearColor = clearColor themeMod,
_themeSectionColor = sectionColor themeMod,
_themeUserColorMap = def,
_themeBasic = baseBasic themeMod,
_themeHover = baseHover themeMod,
_themeFocus = baseFocus themeMod,
_themeFocusHover = baseFocusHover themeMod,
_themeActive = baseActive themeMod,
_themeDisabled = baseDisabled themeMod
}
data BaseThemeColors = BaseThemeColors {
clearColor :: Color,
sectionColor :: Color,
btnFocusBorder :: Color,
btnBgBasic :: Color,
btnBgHover :: Color,
btnBgFocus :: Color,
btnBgActive :: Color,
btnBgDisabled :: Color,
btnText :: Color,
btnTextDisabled :: Color,
btnMainFocusBorder :: Color,
btnMainBgBasic :: Color,
btnMainBgHover :: Color,
btnMainBgFocus :: Color,
btnMainBgActive :: Color,
btnMainBgDisabled :: Color,
btnMainText :: Color,
btnMainTextDisabled :: Color,
dialogBg :: Color,
dialogBorder :: Color,
dialogText :: Color,
dialogTitleText :: Color,
emptyOverlay :: Color,
shadow :: Color,
externalLinkBasic :: Color,
externalLinkHover :: Color,
externalLinkFocus :: Color,
externalLinkActive :: Color,
externalLinkDisabled :: Color,
iconFg :: Color,
iconBg :: Color,
inputIconFg :: Color,
inputBorder :: Color,
inputFocusBorder :: Color,
inputBgBasic :: Color,
inputBgHover :: Color,
inputBgFocus :: Color,
inputBgActive :: Color,
inputBgDisabled :: Color,
inputFgBasic :: Color,
inputFgHover :: Color,
inputFgFocus :: Color,
inputFgActive :: Color,
inputFgDisabled :: Color,
inputSndBasic :: Color,
inputSndHover :: Color,
inputSndFocus :: Color,
inputSndActive :: Color,
inputSndDisabled :: Color,
inputHlBasic :: Color,
inputHlHover :: Color,
inputHlFocus :: Color,
inputHlActive :: Color,
inputHlDisabled :: Color,
inputSelBasic :: Color,
inputSelFocus :: Color,
inputText :: Color,
inputTextDisabled :: Color,
labelText :: Color,
scrollBarBasic :: Color,
scrollThumbBasic :: Color,
scrollBarHover :: Color,
scrollThumbHover :: Color,
slMainBg :: Color,
slNormalBgBasic :: Color,
slNormalBgHover :: Color,
slNormalText :: Color,
slNormalFocusBorder :: Color,
slSelectedBgBasic :: Color,
slSelectedBgHover :: Color,
slSelectedText :: Color,
slSelectedFocusBorder :: Color,
tooltipBorder :: Color,
tooltipBg :: Color,
tooltipText :: Color
} deriving (Eq, Show)
btnBorderFocus :: BaseThemeColors -> Border
btnBorderFocus themeMod = border 1 (btnFocusBorder themeMod)
btnMainBorderFocus :: BaseThemeColors -> Border
btnMainBorderFocus themeMod = border 1 (btnMainFocusBorder themeMod)
inputBorderFocus :: BaseThemeColors -> Border
inputBorderFocus themeMod = border 1 (inputFocusBorder themeMod)
normalFont :: TextStyle
normalFont = def
& L.font ?~ Font "Regular"
& L.fontSize ?~ FontSize 16
& L.fontSpaceV ?~ FontSpace 2
titleFont :: TextStyle
titleFont = def
& L.font ?~ Font "Bold"
& L.fontSize ?~ FontSize 20
& L.fontSpaceV ?~ FontSpace 2
colorPopupStyle :: BaseThemeColors -> StyleState
colorPopupStyle themeMod = popupStyle where
sectionBg = sectionColor themeMod
popupStyle = mconcat [width 400, padding 10, bgColor sectionBg, radius 4]
dialogMsgBodyFont :: BaseThemeColors -> TextStyle
dialogMsgBodyFont themeMod = fontStyle where
fontStyle = normalFont
& L.fontColor ?~ dialogText themeMod
externalLinkFont :: BaseThemeColors -> TextStyle
externalLinkFont themeMod = fontStyle where
fontStyle = normalFont
& L.fontColor ?~ externalLinkBasic themeMod
labelFont :: BaseThemeColors -> TextStyle
labelFont themeMod = fontStyle <> textLeft where
fontStyle = normalFont
& L.fontColor ?~ labelText themeMod
btnStyle :: BaseThemeColors -> StyleState
btnStyle themeMod = def
& L.text ?~ (normalFont & L.fontColor ?~ btnText themeMod) <> textCenter
& L.bgColor ?~ btnBgBasic themeMod
& L.border ?~ border 1 (btnBgBasic themeMod)
& L.padding ?~ padding 8
& L.radius ?~ radius 4
btnMainStyle :: BaseThemeColors -> StyleState
btnMainStyle themeMod = btnStyle themeMod
& L.text . non def . L.fontColor ?~ btnMainText themeMod
& L.bgColor ?~ btnMainBgBasic themeMod
& L.border ?~ border 1 (btnMainBgBasic themeMod)
textInputStyle :: BaseThemeColors -> StyleState
textInputStyle themeMod = style where
textStyle = normalFont
& L.fontColor ?~ inputText themeMod
style = def
& L.text ?~ textStyle
& L.bgColor ?~ inputBgBasic themeMod
& L.fgColor ?~ inputFgBasic themeMod
& L.sndColor ?~ (inputSndBasic themeMod & L.a .~ 0.6)
& L.hlColor ?~ inputSelBasic themeMod
& L.border ?~ border 1 (inputBorder themeMod)
& L.radius ?~ radius 4
& L.padding ?~ padding 8
numericInputStyle :: BaseThemeColors -> StyleState
numericInputStyle themeMod = textInputStyle themeMod
& L.text . non def . L.alignH ?~ ATRight
dateInputStyle :: BaseThemeColors -> StyleState
dateInputStyle themeMod = textInputStyle themeMod
& L.text . non def . L.alignH ?~ ATRight
timeInputStyle :: BaseThemeColors -> StyleState
timeInputStyle themeMod = textInputStyle themeMod
& L.text . non def . L.alignH ?~ ATRight
selectListItemStyle :: BaseThemeColors -> StyleState
selectListItemStyle themeMod = def
& L.text ?~ (normalFont & L.fontColor ?~ slNormalText themeMod)
& L.text . non def . L.alignH ?~ ATLeft
& L.bgColor ?~ slNormalBgBasic themeMod
& L.border ?~ border 1 (slNormalBgBasic themeMod)
& L.padding ?~ padding 8
selectListItemSelectedStyle :: BaseThemeColors -> StyleState
selectListItemSelectedStyle themeMod = selectListItemStyle themeMod
& L.text . non def . L.fontColor ?~ slSelectedText themeMod
& L.bgColor ?~ slSelectedBgBasic themeMod
& L.border ?~ border 1 (slSelectedBgBasic themeMod)
tooltipStyle :: BaseThemeColors -> StyleState
tooltipStyle themeMod = def
& L.text . non def . L.font ?~ Font "Regular"
& L.text . non def . L.fontSize ?~ FontSize 14
& L.text . non def . L.fontColor ?~ tooltipText themeMod
& L.bgColor ?~ tooltipBg themeMod
& L.border ?~ border 1 (tooltipBorder themeMod)
& L.padding ?~ padding 6
& L.radius ?~ radius 4
baseBasic :: BaseThemeColors -> ThemeState
baseBasic themeMod = def
& L.emptyOverlayStyle .~ bgColor (emptyOverlay themeMod)
& L.emptyOverlayStyle . L.padding ?~ padding 8
& L.shadowColor .~ shadow themeMod
& L.btnStyle .~ btnStyle themeMod
& L.btnMainStyle .~ btnMainStyle themeMod
& L.colorPopupStyle .~ colorPopupStyle themeMod
& L.checkboxWidth .~ 20
& L.checkboxStyle . L.fgColor ?~ inputFgBasic themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlBasic themeMod
& L.checkboxStyle . L.radius ?~ radius 4
& L.dateFieldStyle .~ dateInputStyle themeMod
& L.dialWidth .~ 50
& L.dialStyle . L.fgColor ?~ inputFgBasic themeMod
& L.dialStyle . L.sndColor ?~ inputSndBasic themeMod
& L.dialogTitleStyle . L.text ?~ (titleFont & L.fontColor ?~ dialogTitleText themeMod)
& L.dialogTitleStyle . L.padding ?~ padding 10
& L.dialogFrameStyle . L.padding ?~ padding 5
& L.dialogFrameStyle . L.radius ?~ radius 10
& L.dialogFrameStyle . L.bgColor ?~ dialogBg themeMod
& L.dialogFrameStyle . L.border ?~ border 1 (dialogBorder themeMod)
& L.dialogCloseIconStyle . L.bgColor ?~ iconBg themeMod
& L.dialogCloseIconStyle . L.fgColor ?~ iconFg themeMod
& L.dialogCloseIconStyle . L.padding ?~ padding 4
& L.dialogCloseIconStyle . L.radius ?~ radius 8
& L.dialogCloseIconStyle . L.sizeReqW ?~ width 16
& L.dialogCloseIconStyle . L.sizeReqH ?~ width 16
& L.dialogButtonsStyle . L.padding ?~ padding 20 <> paddingT 10
& L.dialogMsgBodyStyle . L.padding ?~ padding 20
& L.dialogMsgBodyStyle . L.text ?~ dialogMsgBodyFont themeMod
& L.dialogMsgBodyStyle . L.sizeReqW ?~ maxWidth 600
& L.dropdownStyle .~ textInputStyle themeMod
& L.dropdownStyle . L.fgColor ?~ inputIconFg themeMod
& L.dropdownStyle . L.text . non def . L.alignH ?~ ATLeft
& L.dropdownMaxHeight .~ 200
& L.dropdownListStyle . L.bgColor ?~ slMainBg themeMod
& L.dropdownItemStyle .~ selectListItemStyle themeMod
& L.dropdownItemSelectedStyle .~ selectListItemSelectedStyle themeMod
& L.externalLinkStyle . L.text ?~ externalLinkFont themeMod
& L.labelStyle . L.text ?~ labelFont themeMod
& L.numericFieldStyle .~ numericInputStyle themeMod
& L.optionBtnOnStyle .~ btnMainStyle themeMod
& L.optionBtnOffStyle .~ btnStyle themeMod
& L.selectListStyle . L.bgColor ?~ slMainBg themeMod
& L.selectListStyle . L.border ?~ border 1 (slMainBg themeMod)
& L.selectListItemStyle .~ selectListItemStyle themeMod
& L.selectListItemSelectedStyle .~ selectListItemSelectedStyle themeMod
& L.radioWidth .~ 20
& L.radioStyle . L.fgColor ?~ inputFgBasic themeMod
& L.radioStyle . L.hlColor ?~ inputHlBasic themeMod
& L.scrollOverlay .~ False
& L.scrollFollowFocus .~ True
& L.scrollBarColor .~ scrollBarBasic themeMod
& L.scrollThumbColor .~ scrollThumbBasic themeMod
& L.scrollBarWidth .~ 8
& L.scrollThumbWidth .~ 8
& L.scrollThumbMinSize .~ 25
& L.scrollThumbRadius .~ 4
& L.scrollWheelRate .~ 10
& L.separatorLineWidth .~ 1
& L.separatorLineStyle . L.fgColor ?~ inputSndBasic themeMod
& L.sliderRadius ?~ 2
& L.sliderThumbFactor .~ 1.25
& L.sliderWidth .~ 10
& L.sliderStyle . L.fgColor ?~ inputFgBasic themeMod
& L.sliderStyle . L.hlColor ?~ inputHlBasic themeMod
& L.sliderStyle . L.sndColor ?~ inputSndBasic themeMod
& L.textAreaStyle .~ textInputStyle themeMod
& L.textFieldStyle .~ textInputStyle themeMod
& L.timeFieldStyle .~ timeInputStyle themeMod
& L.toggleBtnOnStyle .~ btnMainStyle themeMod
& L.toggleBtnOffStyle .~ btnStyle themeMod
& L.tooltipStyle .~ tooltipStyle themeMod
baseHover :: BaseThemeColors -> ThemeState
baseHover themeMod = baseBasic themeMod
& L.btnStyle . L.bgColor ?~ btnBgHover themeMod
& L.btnStyle . L.border ?~ border 1 (btnBgHover themeMod)
& L.btnStyle . L.cursorIcon ?~ CursorHand
& L.btnMainStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.btnMainStyle . L.border ?~ border 1 (btnMainBgHover themeMod)
& L.btnMainStyle . L.cursorIcon ?~ CursorHand
& L.checkboxStyle . L.fgColor ?~ inputFgHover themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlHover themeMod
& L.checkboxStyle . L.cursorIcon ?~ CursorHand
& L.dateFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.dialStyle . L.fgColor ?~ inputFgHover themeMod
& L.dialStyle . L.sndColor ?~ inputSndHover themeMod
& L.dialStyle . L.cursorIcon ?~ CursorSizeV
& L.dialogCloseIconStyle . L.cursorIcon ?~ CursorHand
& L.dropdownStyle . L.bgColor ?~ inputBgHover themeMod
& L.dropdownStyle . L.cursorIcon ?~ CursorHand
& L.dropdownListStyle . L.border ?~ border 1 (slMainBg themeMod)
& L.dropdownItemStyle . L.bgColor ?~ slNormalBgHover themeMod
& L.dropdownItemStyle . L.border ?~ border 1 (slNormalBgHover themeMod)
& L.dropdownItemStyle . L.cursorIcon ?~ CursorHand
& L.dropdownItemSelectedStyle . L.bgColor ?~ slSelectedBgHover themeMod
& L.dropdownItemSelectedStyle . L.border ?~ border 1 (slSelectedBgHover themeMod)
& L.dropdownItemSelectedStyle . L.cursorIcon ?~ CursorHand
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkHover themeMod
& L.externalLinkStyle . L.text . non def . L.underline ?~ True
& L.externalLinkStyle . L.cursorIcon ?~ CursorHand
& L.numericFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.optionBtnOnStyle . L.border ?~ border 1 (btnMainBgHover themeMod)
& L.optionBtnOnStyle . L.cursorIcon ?~ CursorHand
& L.optionBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
& L.optionBtnOffStyle . L.border ?~ border 1 (btnBgHover themeMod)
& L.optionBtnOffStyle . L.cursorIcon ?~ CursorHand
& L.selectListItemStyle . L.bgColor ?~ slNormalBgHover themeMod
& L.selectListItemStyle . L.border ?~ border 1 (slNormalBgHover themeMod)
& L.selectListItemStyle . L.cursorIcon ?~ CursorHand
& L.selectListItemSelectedStyle . L.bgColor ?~ slSelectedBgHover themeMod
& L.selectListItemSelectedStyle . L.border ?~ border 1 (slSelectedBgHover themeMod)
& L.selectListItemSelectedStyle . L.cursorIcon ?~ CursorHand
& L.radioStyle . L.fgColor ?~ inputFgHover themeMod
& L.radioStyle . L.hlColor ?~ inputHlHover themeMod
& L.radioStyle . L.cursorIcon ?~ CursorHand
& L.scrollBarColor .~ scrollBarHover themeMod
& L.scrollThumbColor .~ scrollThumbHover themeMod
& L.sliderStyle . L.fgColor ?~ inputFgHover themeMod
& L.sliderStyle . L.hlColor ?~ inputHlHover themeMod
& L.sliderStyle . L.sndColor ?~ inputSndHover themeMod
& L.sliderStyle . L.cursorIcon ?~ CursorHand
& L.textAreaStyle . L.cursorIcon ?~ CursorIBeam
& L.textFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.timeFieldStyle . L.cursorIcon ?~ CursorIBeam
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.toggleBtnOnStyle . L.border ?~ border 1 (btnMainBgHover themeMod)
& L.toggleBtnOnStyle . L.cursorIcon ?~ CursorHand
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
& L.toggleBtnOffStyle . L.border ?~ border 1 (btnBgHover themeMod)
& L.toggleBtnOffStyle . L.cursorIcon ?~ CursorHand
baseFocus :: BaseThemeColors -> ThemeState
baseFocus themeMod = baseBasic themeMod
& L.btnStyle . L.bgColor ?~ btnBgFocus themeMod
& L.btnStyle . L.border ?~ btnBorderFocus themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgFocus themeMod
& L.btnMainStyle . L.border ?~ btnMainBorderFocus themeMod
& L.checkboxStyle . L.fgColor ?~ inputFgFocus themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlFocus themeMod
& L.dateFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.dateFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.dialStyle . L.fgColor ?~ inputFgFocus themeMod
& L.dialStyle . L.sndColor ?~ inputSndFocus themeMod
& L.dropdownStyle . L.border ?~ inputBorderFocus themeMod
& L.dropdownListStyle . L.border ?~ border 1 (slMainBg themeMod)
& L.dropdownItemStyle . L.border ?~ border 1 (slNormalFocusBorder themeMod)
& L.dropdownItemSelectedStyle . L.border ?~ border 1 (slSelectedFocusBorder themeMod)
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkFocus themeMod
& L.numericFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.numericFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgFocus themeMod
& L.optionBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgFocus themeMod
& L.optionBtnOffStyle . L.border ?~ btnBorderFocus themeMod
& L.selectListStyle . L.border ?~ inputBorderFocus themeMod
& L.selectListItemStyle . L.border ?~ border 1 (slNormalFocusBorder themeMod)
& L.selectListItemSelectedStyle . L.border ?~ border 1 (slSelectedFocusBorder themeMod)
& L.radioStyle . L.fgColor ?~ inputFgFocus themeMod
& L.radioStyle . L.hlColor ?~ inputHlFocus themeMod
& L.sliderStyle . L.fgColor ?~ inputFgFocus themeMod
& L.sliderStyle . L.hlColor ?~ inputHlFocus themeMod
& L.sliderStyle . L.sndColor ?~ inputSndFocus themeMod
& L.textAreaStyle . L.border ?~ inputBorderFocus themeMod
& L.textAreaStyle . L.hlColor ?~ inputSelFocus themeMod
& L.textFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.textFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.timeFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.timeFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgFocus themeMod
& L.toggleBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgFocus themeMod
& L.toggleBtnOffStyle . L.border ?~ btnBorderFocus themeMod
baseFocusHover :: BaseThemeColors -> ThemeState
baseFocusHover themeMod = (baseHover themeMod <> baseFocus themeMod)
& L.btnStyle . L.bgColor ?~ btnBgHover themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.dropdownItemStyle . L.bgColor ?~ slNormalBgHover themeMod
& L.dropdownItemSelectedStyle . L.bgColor ?~ slSelectedBgHover themeMod
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkHover themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgHover themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgHover themeMod
baseActive :: BaseThemeColors -> ThemeState
baseActive themeMod = baseFocusHover themeMod
& L.btnStyle . L.bgColor ?~ btnBgActive themeMod
& L.btnStyle . L.border ?~ btnBorderFocus themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgActive themeMod
& L.btnMainStyle . L.border ?~ btnMainBorderFocus themeMod
& L.checkboxStyle . L.fgColor ?~ inputFgActive themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlActive themeMod
& L.dateFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.dateFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.dialStyle . L.fgColor ?~ inputFgActive themeMod
& L.dialStyle . L.sndColor ?~ inputSndActive themeMod
& L.dropdownStyle . L.bgColor ?~ inputBgActive themeMod
& L.dropdownStyle . L.border ?~ inputBorderFocus themeMod
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkActive themeMod
& L.numericFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.numericFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgActive themeMod
& L.optionBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgActive themeMod
& L.optionBtnOffStyle . L.border ?~ btnBorderFocus themeMod
& L.radioStyle . L.fgColor ?~ inputFgActive themeMod
& L.radioStyle . L.hlColor ?~ inputHlActive themeMod
& L.sliderStyle . L.fgColor ?~ inputFgActive themeMod
& L.sliderStyle . L.hlColor ?~ inputHlActive themeMod
& L.sliderStyle . L.sndColor ?~ inputSndActive themeMod
& L.textAreaStyle . L.border ?~ inputBorderFocus themeMod
& L.textAreaStyle . L.hlColor ?~ inputSelFocus themeMod
& L.textFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.textFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.timeFieldStyle . L.border ?~ inputBorderFocus themeMod
& L.timeFieldStyle . L.hlColor ?~ inputSelFocus themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgActive themeMod
& L.toggleBtnOnStyle . L.border ?~ btnMainBorderFocus themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgActive themeMod
& L.toggleBtnOffStyle . L.border ?~ btnBorderFocus themeMod
baseDisabled :: BaseThemeColors -> ThemeState
baseDisabled themeMod = baseBasic themeMod
& L.btnStyle . L.text . non def . L.fontColor ?~ btnTextDisabled themeMod
& L.btnStyle . L.bgColor ?~ btnBgDisabled themeMod
& L.btnStyle . L.border ?~ border 1 (btnBgDisabled themeMod)
& L.btnMainStyle . L.text . non def . L.fontColor ?~ btnMainTextDisabled themeMod
& L.btnMainStyle . L.bgColor ?~ btnMainBgDisabled themeMod
& L.btnMainStyle . L.border ?~ border 1 (btnMainBgDisabled themeMod)
& L.checkboxStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.checkboxStyle . L.hlColor ?~ inputHlDisabled themeMod
& L.dateFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.dateFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.dialStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.dialStyle . L.sndColor ?~ inputSndDisabled themeMod
& L.dropdownStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.dropdownStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.dropdownStyle . L.border ?~ border 1 (inputBgDisabled themeMod)
& L.externalLinkStyle . L.text . non def . L.fontColor ?~ externalLinkDisabled themeMod
& L.numericFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.numericFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.optionBtnOnStyle . L.text . non def . L.fontColor ?~ btnMainTextDisabled themeMod
& L.optionBtnOnStyle . L.bgColor ?~ btnMainBgDisabled themeMod
& L.optionBtnOnStyle . L.border ?~ border 1 (btnMainBgDisabled themeMod)
& L.optionBtnOffStyle . L.text . non def . L.fontColor ?~ btnTextDisabled themeMod
& L.optionBtnOffStyle . L.bgColor ?~ btnBgDisabled themeMod
& L.optionBtnOffStyle . L.border ?~ border 1 (btnBgDisabled themeMod)
& L.radioStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.radioStyle . L.hlColor ?~ inputHlDisabled themeMod
& L.sliderStyle . L.fgColor ?~ inputFgDisabled themeMod
& L.sliderStyle . L.hlColor ?~ inputHlDisabled themeMod
& L.sliderStyle . L.sndColor ?~ inputSndDisabled themeMod
& L.textAreaStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.textAreaStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.textFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.textFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.timeFieldStyle . L.bgColor ?~ inputBgDisabled themeMod
& L.timeFieldStyle . L.text . non def . L.fontColor ?~ inputTextDisabled themeMod
& L.toggleBtnOnStyle . L.text . non def . L.fontColor ?~ btnMainTextDisabled themeMod
& L.toggleBtnOnStyle . L.bgColor ?~ btnMainBgDisabled themeMod
& L.toggleBtnOnStyle . L.border ?~ border 1 (btnMainBgDisabled themeMod)
& L.toggleBtnOffStyle . L.text . non def . L.fontColor ?~ btnTextDisabled themeMod
& L.toggleBtnOffStyle . L.bgColor ?~ btnBgDisabled themeMod
& L.toggleBtnOffStyle . L.border ?~ border 1 (btnBgDisabled themeMod)
|
9af4538c12809b15b317a0942b56fd3a8acf5f7cfda8fc76e18d4a11862c06e6 | sorenmacbeth/flambo | project.clj | (defproject yieldbot/flambo "0.8.3-SNAPSHOT"
:description "A Clojure DSL for Apache Spark"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:mailing-list {:name "flambo user mailing list"
:archive "-user"
:post ""}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/tools.logging "0.3.1"]
[com.google.guava/guava "18.0"]
[yieldbot/serializable-fn "0.1.2"
:exclusions [com.twitter/chill-java]]
[com.twitter/carbonite "1.5.0"
:exclusions [com.twitter/chill-java]]
[com.twitter/chill_2.11 "0.8.0"
:exclusions [org.scala-lang/scala-library]]]
:profiles {:dev
{:dependencies [[midje "1.6.3"]
[criterium "0.4.3"]]
:plugins [[lein-midje "3.1.3"]
[michaelblume/lein-marginalia "0.9.0"]
;; [codox "0.8.9"]
[funcool/codeina "0.3.0"
:exclusions [org.clojure/clojure]]]
;; so gen-class stuff works in the repl
:aot [flambo.function
flambo.example.tfidf]}
:provided
{:dependencies
[[org.apache.spark/spark-core_2.11 "2.3.1"]
[org.apache.spark/spark-streaming_2.11 "2.3.1"]
[org.apache.spark/spark-streaming-kafka-0-10_2.11 "2.3.1"]
[org.apache.spark/spark-sql_2.11 "2.3.1"]
[org.apache.spark/spark-hive_2.11 "2.3.1"]]}
:clojure-1.6
{:dependencies [[org.clojure/clojure "1.6.0"]]}
:clojure-1.7
{:dependencies [[org.clojure/clojure "1.7.0"]]}
:uberjar
{:aot :all}
:example
{:main flambo.example.tfidf
:source-paths ["test/flambo/example"]
:aot [flambo.example.tfidf]}}
:checksum :warn ;; -5308
:source-paths ["src/clojure"]
:java-source-paths ["src/java"]
:codeina {:reader :clojure
:src ["src/clj"]
:target "doc/codeina"
:src-uri "/"
:src-uri-prefix "#L"
}
:codox {:defaults {:doc/format :markdown}
:include [flambo.api flambo.conf flambo.kryo flambo.sql]
:output-dir "doc/codox"
:src-dir-uri "/"
:src-linenum-anchor-prefix "L"}
:javac-options ["-source" "1.8" "-target" "1.8"]
:jvm-opts ^:replace ["-server" "-Xmx2g"]
:global-vars {*warn-on-reflection* false}
:min-lein-version "2.5.0")
| null | https://raw.githubusercontent.com/sorenmacbeth/flambo/9c61467697547c5fe3d05070be84cd2320cb53ce/project.clj | clojure | [codox "0.8.9"]
so gen-class stuff works in the repl
-5308 | (defproject yieldbot/flambo "0.8.3-SNAPSHOT"
:description "A Clojure DSL for Apache Spark"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:mailing-list {:name "flambo user mailing list"
:archive "-user"
:post ""}
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/tools.logging "0.3.1"]
[com.google.guava/guava "18.0"]
[yieldbot/serializable-fn "0.1.2"
:exclusions [com.twitter/chill-java]]
[com.twitter/carbonite "1.5.0"
:exclusions [com.twitter/chill-java]]
[com.twitter/chill_2.11 "0.8.0"
:exclusions [org.scala-lang/scala-library]]]
:profiles {:dev
{:dependencies [[midje "1.6.3"]
[criterium "0.4.3"]]
:plugins [[lein-midje "3.1.3"]
[michaelblume/lein-marginalia "0.9.0"]
[funcool/codeina "0.3.0"
:exclusions [org.clojure/clojure]]]
:aot [flambo.function
flambo.example.tfidf]}
:provided
{:dependencies
[[org.apache.spark/spark-core_2.11 "2.3.1"]
[org.apache.spark/spark-streaming_2.11 "2.3.1"]
[org.apache.spark/spark-streaming-kafka-0-10_2.11 "2.3.1"]
[org.apache.spark/spark-sql_2.11 "2.3.1"]
[org.apache.spark/spark-hive_2.11 "2.3.1"]]}
:clojure-1.6
{:dependencies [[org.clojure/clojure "1.6.0"]]}
:clojure-1.7
{:dependencies [[org.clojure/clojure "1.7.0"]]}
:uberjar
{:aot :all}
:example
{:main flambo.example.tfidf
:source-paths ["test/flambo/example"]
:aot [flambo.example.tfidf]}}
:source-paths ["src/clojure"]
:java-source-paths ["src/java"]
:codeina {:reader :clojure
:src ["src/clj"]
:target "doc/codeina"
:src-uri "/"
:src-uri-prefix "#L"
}
:codox {:defaults {:doc/format :markdown}
:include [flambo.api flambo.conf flambo.kryo flambo.sql]
:output-dir "doc/codox"
:src-dir-uri "/"
:src-linenum-anchor-prefix "L"}
:javac-options ["-source" "1.8" "-target" "1.8"]
:jvm-opts ^:replace ["-server" "-Xmx2g"]
:global-vars {*warn-on-reflection* false}
:min-lein-version "2.5.0")
|
bc0913368d2fd1ad66b584176f4774fec3cfad4d71eeb90aca546d9e4dd1ecd8 | tweag/ormolu | shebang-with-pragmas-out.hs | #!/usr/bin/env stack
{-# LANGUAGE OverloadedStrings #-}
main = pure ()
| null | https://raw.githubusercontent.com/tweag/ormolu/34bdf62429768f24b70d0f8ba7730fc4d8ae73ba/data/examples/module-header/shebang-with-pragmas-out.hs | haskell | # LANGUAGE OverloadedStrings # | #!/usr/bin/env stack
main = pure ()
|
8722d27848adb11171c9d7273a32f1468812000d99b0638f436707474d045855 | elaforge/karya | SubT.hs | Copyright 2019
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
# LANGUAGE DeriveFunctor #
module Derive.Call.SubT where
import qualified Util.Pretty as Pretty
import qualified Derive.Deriver.Monad as Derive
import qualified Ui.Id as Id
import Global
import Types
data Track = Track {
-- | Usually this comes from a sliced track with a TrackId, but sometimes
-- from 'Derive.ctx_sub_events', or an unnamed track.
_source :: !(Either Text TrackId)
, _events :: ![Event]
}
instance Pretty Track where
format (Track source events) = Pretty.record "Track"
[ ("source", Pretty.format source)
, ("events", Pretty.format (const () <$> events))
]
show_track :: Track -> Text
show_track (Track source _) = "subtrack:" <> either id Id.ident_text source
-- | Sliced sub-events are represented as a start, duration, and opaque
deriver . This is a compromise between a plain NoteDeriver , which is fully
-- abstract but also fully opaque, and some kind of note data structure, which
-- is fully concrete (and thus inflexible), but also transparent to
-- modification.
type Event = EventT Derive.NoteDeriver
data EventT a = EventT {
_start :: !ScoreTime
, _duration :: !ScoreTime
, _note :: !a
} deriving (Show, Functor)
instance Pretty a => Pretty (EventT a) where
pretty (EventT start dur note) =
"Event " <> showt start <> " " <> showt dur
<> " (" <> pretty note <> ")"
end :: EventT a -> ScoreTime
end event = _start event + _duration event
overlaps :: ScoreTime -> EventT a -> Bool
overlaps pos (EventT start dur _)
| dur == 0 = pos == start
| otherwise = start <= pos && pos < start + dur
place :: ScoreTime -> ScoreTime -> EventT a -> EventT a
place shift factor (EventT start dur note) =
EventT ((start - shift) * factor + shift) (dur * factor) note
stretch :: ScoreTime -> EventT a -> EventT a
stretch factor = place 0 factor
at :: ScoreTime -> EventT a -> EventT a
at shift (EventT start dur note) = EventT (start + shift) dur note
| null | https://raw.githubusercontent.com/elaforge/karya/471a2131f5a68b3b10b1a138e6f9ed1282980a18/Derive/Call/SubT.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
| Usually this comes from a sliced track with a TrackId, but sometimes
from 'Derive.ctx_sub_events', or an unnamed track.
| Sliced sub-events are represented as a start, duration, and opaque
abstract but also fully opaque, and some kind of note data structure, which
is fully concrete (and thus inflexible), but also transparent to
modification. | Copyright 2019
# LANGUAGE DeriveFunctor #
module Derive.Call.SubT where
import qualified Util.Pretty as Pretty
import qualified Derive.Deriver.Monad as Derive
import qualified Ui.Id as Id
import Global
import Types
data Track = Track {
_source :: !(Either Text TrackId)
, _events :: ![Event]
}
instance Pretty Track where
format (Track source events) = Pretty.record "Track"
[ ("source", Pretty.format source)
, ("events", Pretty.format (const () <$> events))
]
show_track :: Track -> Text
show_track (Track source _) = "subtrack:" <> either id Id.ident_text source
deriver . This is a compromise between a plain NoteDeriver , which is fully
type Event = EventT Derive.NoteDeriver
data EventT a = EventT {
_start :: !ScoreTime
, _duration :: !ScoreTime
, _note :: !a
} deriving (Show, Functor)
instance Pretty a => Pretty (EventT a) where
pretty (EventT start dur note) =
"Event " <> showt start <> " " <> showt dur
<> " (" <> pretty note <> ")"
end :: EventT a -> ScoreTime
end event = _start event + _duration event
overlaps :: ScoreTime -> EventT a -> Bool
overlaps pos (EventT start dur _)
| dur == 0 = pos == start
| otherwise = start <= pos && pos < start + dur
place :: ScoreTime -> ScoreTime -> EventT a -> EventT a
place shift factor (EventT start dur note) =
EventT ((start - shift) * factor + shift) (dur * factor) note
stretch :: ScoreTime -> EventT a -> EventT a
stretch factor = place 0 factor
at :: ScoreTime -> EventT a -> EventT a
at shift (EventT start dur note) = EventT (start + shift) dur note
|
2ef89c9177b6aa2766e876f2b0dc59f6c8f95dfccd9c2c09fb23c7780ccbe623 | obsidiansystems/obelisk | Thunk.hs | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternGuards #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
module Obelisk.Command.Thunk where
import Control.Applicative
import Control.Exception (displayException, try)
import Control.Lens (ifor, ifor_, (.~))
import Control.Monad
import Control.Monad.Extra (findM)
import Control.Monad.Catch (MonadCatch, handle)
import Control.Monad.Except
import Data.Aeson ((.=))
import qualified Data.Aeson as Aeson
import Data.Aeson.Encode.Pretty
import qualified Data.Aeson.Types as Aeson
import Data.Bifunctor (first)
import qualified Data.ByteString.Lazy as LBS
import Data.Containers.ListUtils (nubOrd)
import Data.Default
import Data.Either.Combinators (fromRight', rightToMaybe)
import Data.Foldable (toList)
import Data.Function ((&))
import Data.Functor ((<&>))
import Data.Git.Ref (Ref)
import qualified Data.Git.Ref as Ref
import qualified Data.List as L
import Data.List.NonEmpty (NonEmpty (..), nonEmpty)
import qualified Data.List.NonEmpty as NonEmpty
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String.Here.Interpolated (i)
import Data.String.Here.Uninterpolated (here)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding
import qualified Data.Text.IO as T
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Data.Traversable (for)
import Data.Yaml (parseMaybe)
import GitHub
import GitHub.Data.Name
import Obelisk.Command.Nix
import System.Directory
import System.Exit
import System.FilePath
import System.IO.Error
import System.IO.Temp
import System.PosixCompat.Files (getSymbolicLinkStatus, modificationTime)
import qualified Text.URI as URI
import Obelisk.App (MonadObelisk)
import Obelisk.CliApp
import Obelisk.Command.Utils
--TODO: Support symlinked thunk data
data ThunkData
= ThunkData_Packed ThunkSpec ThunkPtr
-- ^ Packed thunk
| ThunkData_Checkout
-- ^ Checked out thunk that was unpacked from this pointer
-- | A reference to the exact data that a thunk should translate into
data ThunkPtr = ThunkPtr
{ _thunkPtr_rev :: ThunkRev
, _thunkPtr_source :: ThunkSource
}
deriving (Show, Eq, Ord)
type NixSha256 = Text --TODO: Use a smart constructor and make this actually verify itself
-- | A specific revision of data; it may be available from multiple sources
data ThunkRev = ThunkRev
{ _thunkRev_commit :: Ref Ref.SHA1
, _thunkRev_nixSha256 :: NixSha256
}
deriving (Show, Eq, Ord)
-- | A location from which a thunk's data can be retrieved
data ThunkSource
| A source specialized for GitHub
= ThunkSource_GitHub GitHubSource
-- | A plain repo source
| ThunkSource_Git GitSource
deriving (Show, Eq, Ord)
thunkSourceToGitSource :: ThunkSource -> GitSource
thunkSourceToGitSource = \case
ThunkSource_GitHub s -> forgetGithub False s
ThunkSource_Git s -> s
data GitHubSource = GitHubSource
{ _gitHubSource_owner :: Name Owner
, _gitHubSource_repo :: Name Repo
, _gitHubSource_branch :: Maybe (Name Branch)
, _gitHubSource_private :: Bool
}
deriving (Show, Eq, Ord)
newtype GitUri = GitUri { unGitUri :: URI.URI } deriving (Eq, Ord, Show)
gitUriToText :: GitUri -> Text
gitUriToText (GitUri uri)
| (T.toLower . URI.unRText <$> URI.uriScheme uri) == Just "file"
, Just (_, path) <- URI.uriPath uri
= "/" <> T.intercalate "/" (map URI.unRText $ NonEmpty.toList path)
| otherwise = URI.render uri
data GitSource = GitSource
{ _gitSource_url :: GitUri
, _gitSource_branch :: Maybe (Name Branch)
, _gitSource_fetchSubmodules :: Bool
, _gitSource_private :: Bool
}
deriving (Show, Eq, Ord)
newtype ThunkConfig = ThunkConfig
{ _thunkConfig_private :: Maybe Bool
} deriving Show
data ThunkUpdateConfig = ThunkUpdateConfig
{ _thunkUpdateConfig_branch :: Maybe String
, _thunkUpdateConfig_config :: ThunkConfig
} deriving Show
data ThunkPackConfig = ThunkPackConfig
{ _thunkPackConfig_force :: Bool
, _thunkPackConfig_config :: ThunkConfig
} deriving Show
| Convert a GitHub source to a regular Git source . Assumes no submodules .
forgetGithub :: Bool -> GitHubSource -> GitSource
forgetGithub useSsh s = GitSource
{ _gitSource_url = GitUri $ URI.URI
{ URI.uriScheme = Just $ fromRight' $ URI.mkScheme $ if useSsh then "ssh" else "https"
, URI.uriAuthority = Right $ URI.Authority
{ URI.authUserInfo = URI.UserInfo (fromRight' $ URI.mkUsername "git") Nothing
<$ guard useSsh
, URI.authHost = fromRight' $ URI.mkHost "github.com"
, URI.authPort = Nothing
}
, URI.uriPath = Just ( False
, fromRight' . URI.mkPathPiece <$>
untagName (_gitHubSource_owner s)
:| [ untagName (_gitHubSource_repo s) <> ".git" ]
)
, URI.uriQuery = []
, URI.uriFragment = Nothing
}
, _gitSource_branch = _gitHubSource_branch s
, _gitSource_fetchSubmodules = False
, _gitSource_private = _gitHubSource_private s
}
getThunkGitBranch :: ThunkPtr -> Maybe Text
getThunkGitBranch (ThunkPtr _ src) = fmap untagName $ case src of
ThunkSource_GitHub s -> _gitHubSource_branch s
ThunkSource_Git s -> _gitSource_branch s
commitNameToRef :: Name Commit -> Ref Ref.SHA1
commitNameToRef (N c) = Ref.fromHex $ encodeUtf8 c
-- TODO: Use spinner here.
getNixSha256ForUriUnpacked
:: MonadObelisk m
=> GitUri
-> m NixSha256
getNixSha256ForUriUnpacked uri =
withExitFailMessage ("nix-prefetch-url: Failed to determine sha256 hash of URL " <> gitUriToText uri) $ do
[hash] <- fmap T.lines $ readProcessAndLogOutput (Debug, Debug) $
proc nixPrefetchUrlPath ["--unpack", "--type", "sha256", T.unpack $ gitUriToText uri]
pure hash
nixPrefetchGit :: MonadObelisk m => GitUri -> Text -> Bool -> m NixSha256
nixPrefetchGit uri rev fetchSubmodules =
withExitFailMessage ("nix-prefetch-git: Failed to determine sha256 hash of Git repo " <> gitUriToText uri <> " at " <> rev) $ do
out <- readProcessAndLogStderr Debug $
proc nixPrefetchGitPath $ filter (/="")
[ "--url", T.unpack $ gitUriToText uri
, "--rev", T.unpack rev
, if fetchSubmodules then "--fetch-submodules" else ""
, "--quiet"
]
case parseMaybe (Aeson..: "sha256") =<< Aeson.decodeStrict (encodeUtf8 out) of
Nothing -> failWith $ "nix-prefetch-git: unrecognized output " <> out
Just x -> pure x
--TODO: Pretty print these
data ReadThunkError
= ReadThunkError_UnrecognizedThunk
| ReadThunkError_UnrecognizedPaths (NonEmpty FilePath)
| ReadThunkError_MissingPaths (NonEmpty FilePath)
| ReadThunkError_UnparseablePtr FilePath String
| ReadThunkError_FileError IOError
| ReadThunkError_FileDoesNotMatch FilePath Text
| ReadThunkError_UnrecognizedState String
| ReadThunkError_AmbiguousPackedState ThunkPtr ThunkPtr
deriving (Show)
unpackedDirName :: FilePath
unpackedDirName = "."
attrCacheFileName :: FilePath
attrCacheFileName = ".attr-cache"
-- | Specification for how a file in a thunk version works.
data ThunkFileSpec
^ This file specifies ' ThunkPtr ' data
| ThunkFileSpec_FileMatches Text -- ^ This file must match the given content exactly
| ThunkFileSpec_CheckoutIndicator -- ^ Existence of this directory indicates that the thunk is unpacked
| ThunkFileSpec_AttrCache -- ^ This directory is an attribute cache
-- | Specification for how a set of files in a thunk version work.
data ThunkSpec = ThunkSpec
{ _thunkSpec_name :: !Text
, _thunkSpec_files :: !(Map FilePath ThunkFileSpec)
}
thunkSpecTypes :: NonEmpty (NonEmpty ThunkSpec)
thunkSpecTypes = gitThunkSpecs :| [gitHubThunkSpecs]
| Attempts to match a ' ThunkSpec ' to a given directory .
matchThunkSpecToDir
:: (MonadError ReadThunkError m, MonadIO m, MonadCatch m)
^ ' ThunkSpec ' to match against the given files / directory
-> FilePath -- ^ Path to directory
-> Set FilePath -- ^ Set of file paths relative to the given directory
-> m ThunkData
matchThunkSpecToDir thunkSpec dir dirFiles = do
case nonEmpty (toList $ dirFiles `Set.difference` expectedPaths) of
Just fs -> throwError $ ReadThunkError_UnrecognizedPaths $ (dir </>) <$> fs
Nothing -> pure ()
case nonEmpty (toList $ requiredPaths `Set.difference` dirFiles) of
Just fs -> throwError $ ReadThunkError_MissingPaths $ (dir </>) <$> fs
Nothing -> pure ()
datas <- fmap toList $ flip Map.traverseMaybeWithKey (_thunkSpec_files thunkSpec) $ \expectedPath -> \case
ThunkFileSpec_AttrCache -> Nothing <$ dirMayExist expectedPath
ThunkFileSpec_CheckoutIndicator -> liftIO (doesDirectoryExist (dir </> expectedPath)) <&> \case
False -> Nothing
True -> Just ThunkData_Checkout
ThunkFileSpec_FileMatches expectedContents -> handle (\(e :: IOError) -> throwError $ ReadThunkError_FileError e) $ do
actualContents <- liftIO (T.readFile $ dir </> expectedPath)
case T.strip expectedContents == T.strip actualContents of
True -> pure Nothing
False -> throwError $ ReadThunkError_FileDoesNotMatch (dir </> expectedPath) expectedContents
ThunkFileSpec_Ptr parser -> handle (\(e :: IOError) -> throwError $ ReadThunkError_FileError e) $ do
let path = dir </> expectedPath
liftIO (doesFileExist path) >>= \case
False -> pure Nothing
True -> do
actualContents <- liftIO $ LBS.readFile path
case parser actualContents of
Right v -> pure $ Just (ThunkData_Packed thunkSpec v)
Left e -> throwError $ ReadThunkError_UnparseablePtr (dir </> expectedPath) e
case nonEmpty datas of
Nothing -> throwError ReadThunkError_UnrecognizedThunk
Just xs -> fold1WithM xs $ \a b -> either throwError pure (mergeThunkData a b)
where
rootPathsOnly = Set.fromList . mapMaybe takeRootDir . Map.keys
takeRootDir = fmap NonEmpty.head . nonEmpty . splitPath
expectedPaths = rootPathsOnly $ _thunkSpec_files thunkSpec
requiredPaths = rootPathsOnly $ Map.filter isRequiredFileSpec $ _thunkSpec_files thunkSpec
isRequiredFileSpec = \case
ThunkFileSpec_FileMatches _ -> True
_ -> False
dirMayExist expectedPath = liftIO (doesFileExist (dir </> expectedPath)) >>= \case
True -> throwError $ ReadThunkError_UnrecognizedPaths $ expectedPath :| []
False -> pure ()
Combine ' ThunkData ' from different files , preferring " Checkout " over " Packed "
mergeThunkData ThunkData_Checkout ThunkData_Checkout = Right ThunkData_Checkout
mergeThunkData ThunkData_Checkout ThunkData_Packed{} = Left bothPackedAndUnpacked
mergeThunkData ThunkData_Packed{} ThunkData_Checkout = Left bothPackedAndUnpacked
mergeThunkData a@(ThunkData_Packed _ ptrA) (ThunkData_Packed _ ptrB) =
if ptrA == ptrB then Right a else Left $ ReadThunkError_AmbiguousPackedState ptrA ptrB
bothPackedAndUnpacked = ReadThunkError_UnrecognizedState "Both packed data and checkout present"
fold1WithM (x :| xs) f = foldM f x xs
readThunkWith
:: (MonadObelisk m)
=> NonEmpty (NonEmpty ThunkSpec) -> FilePath -> m (Either ReadThunkError ThunkData)
readThunkWith specTypes dir = do
dirFiles <- Set.fromList <$> liftIO (listDirectory dir)
let specs = concatMap toList $ toList $ NonEmpty.transpose specTypes -- Interleave spec types so we try each one in a "fair" ordering
flip fix specs $ \loop -> \case
[] -> pure $ Left ReadThunkError_UnrecognizedThunk
spec:rest -> runExceptT (matchThunkSpecToDir spec dir dirFiles) >>= \case
Left e -> putLog Debug [i|Thunk specification ${_thunkSpec_name spec} did not match ${dir}: ${e}|] *> loop rest
x@(Right _) -> x <$ putLog Debug [i|Thunk specification ${_thunkSpec_name spec} matched ${dir}|]
-- | Read a thunk and validate that it is exactly a packed thunk.
-- If additional data is present, fail.
readThunk :: (MonadObelisk m) => FilePath -> m (Either ReadThunkError ThunkData)
readThunk = readThunkWith thunkSpecTypes
parseThunkPtr :: (Aeson.Object -> Aeson.Parser ThunkSource) -> Aeson.Object -> Aeson.Parser ThunkPtr
parseThunkPtr parseSrc v = do
rev <- v Aeson..: "rev"
sha256 <- v Aeson..: "sha256"
src <- parseSrc v
pure $ ThunkPtr
{ _thunkPtr_rev = ThunkRev
{ _thunkRev_commit = Ref.fromHexString rev
, _thunkRev_nixSha256 = sha256
}
, _thunkPtr_source = src
}
parseGitHubSource :: Aeson.Object -> Aeson.Parser GitHubSource
parseGitHubSource v = do
owner <- v Aeson..: "owner"
repo <- v Aeson..: "repo"
branch <- v Aeson..:! "branch"
private <- v Aeson..:? "private"
pure $ GitHubSource
{ _gitHubSource_owner = owner
, _gitHubSource_repo = repo
, _gitHubSource_branch = branch
, _gitHubSource_private = fromMaybe False private
}
parseGitSource :: Aeson.Object -> Aeson.Parser GitSource
parseGitSource v = do
Just url <- parseGitUri <$> v Aeson..: "url"
branch <- v Aeson..:! "branch"
fetchSubmodules <- v Aeson..:! "fetchSubmodules"
private <- v Aeson..:? "private"
pure $ GitSource
{ _gitSource_url = url
, _gitSource_branch = branch
, _gitSource_fetchSubmodules = fromMaybe False fetchSubmodules
, _gitSource_private = fromMaybe False private
}
overwriteThunk :: MonadObelisk m => FilePath -> ThunkPtr -> m ()
overwriteThunk target thunk = do
-- Ensure that this directory is a valid thunk (i.e. so we aren't losing any data)
readThunk target >>= \case
Left e -> failWith [i|Invalid thunk at ${target}: ${e}|]
Right _ -> pure ()
--TODO: Is there a safer way to do this overwriting?
liftIO $ removePathForcibly target
createThunk target $ Right thunk
thunkPtrToSpec :: ThunkPtr -> ThunkSpec
thunkPtrToSpec thunk = case _thunkPtr_source thunk of
ThunkSource_GitHub _ -> NonEmpty.head gitHubThunkSpecs
ThunkSource_Git _ -> NonEmpty.head gitThunkSpecs
-- It's important that formatting be very consistent here, because
-- otherwise when people update thunks, their patches will be messy
encodeThunkPtrData :: ThunkPtr -> LBS.ByteString
encodeThunkPtrData (ThunkPtr rev src) = case src of
ThunkSource_GitHub s -> encodePretty' githubCfg $ Aeson.object $ catMaybes
[ Just $ "owner" .= _gitHubSource_owner s
, Just $ "repo" .= _gitHubSource_repo s
, ("branch" .=) <$> _gitHubSource_branch s
, Just $ "rev" .= Ref.toHexString (_thunkRev_commit rev)
, Just $ "sha256" .= _thunkRev_nixSha256 rev
, Just $ "private" .= _gitHubSource_private s
]
ThunkSource_Git s -> encodePretty' plainGitCfg $ Aeson.object $ catMaybes
[ Just $ "url" .= gitUriToText (_gitSource_url s)
, Just $ "rev" .= Ref.toHexString (_thunkRev_commit rev)
, ("branch" .=) <$> _gitSource_branch s
, Just $ "sha256" .= _thunkRev_nixSha256 rev
, Just $ "fetchSubmodules" .= _gitSource_fetchSubmodules s
, Just $ "private" .= _gitSource_private s
]
where
githubCfg = defConfig
{ confIndent = Spaces 2
, confCompare = keyOrder
[ "owner"
, "repo"
, "branch"
, "private"
, "rev"
, "sha256"
] <> compare
, confTrailingNewline = True
}
plainGitCfg = defConfig
{ confIndent = Spaces 2
, confCompare = keyOrder
[ "url"
, "rev"
, "sha256"
, "private"
, "fetchSubmodules"
] <> compare
, confTrailingNewline = True
}
createThunk :: MonadObelisk m => FilePath -> Either ThunkSpec ThunkPtr -> m ()
createThunk target ptrInfo =
ifor_ (_thunkSpec_files spec) $ \path -> \case
ThunkFileSpec_FileMatches content -> withReadyPath path $ \p -> liftIO $ T.writeFile p content
ThunkFileSpec_Ptr _ -> case ptrInfo of
We ca n't write the ptr without it
Right ptr -> withReadyPath path $ \p -> liftIO $ LBS.writeFile p (encodeThunkPtrData ptr)
_ -> pure ()
where
spec = either id thunkPtrToSpec ptrInfo
withReadyPath path f = do
let fullPath = target </> path
putLog Debug $ "Writing thunk file " <> T.pack fullPath
liftIO $ createDirectoryIfMissing True $ takeDirectory fullPath
f fullPath
createThunkWithLatest :: MonadObelisk m => FilePath -> ThunkSource -> m ()
createThunkWithLatest target s = do
rev <- getLatestRev s
createThunk target $ Right $ ThunkPtr
{ _thunkPtr_source = s
, _thunkPtr_rev = rev
}
updateThunkToLatest :: MonadObelisk m => ThunkUpdateConfig -> FilePath -> m ()
updateThunkToLatest (ThunkUpdateConfig mBranch thunkConfig) target = spinner $ do
checkThunkDirectory target
-- check to see if thunk should be updated to a specific branch or just update it's current branch
case mBranch of
Nothing -> do
(overwrite, ptr) <- readThunk target >>= \case
Left err -> failWith [i|Thunk update: ${err}|]
Right c -> case c of
ThunkData_Packed _ t -> return (target, t)
ThunkData_Checkout -> failWith "cannot update an unpacked thunk"
let src = _thunkPtr_source ptr
rev <- getLatestRev src
overwriteThunk overwrite $ modifyThunkPtrByConfig thunkConfig $ ThunkPtr
{ _thunkPtr_source = src
, _thunkPtr_rev = rev
}
Just branch -> readThunk target >>= \case
Left err -> failWith [i|Thunk update: ${err}|]
Right c -> case c of
ThunkData_Packed _ t -> setThunk thunkConfig target (thunkSourceToGitSource $ _thunkPtr_source t) branch
ThunkData_Checkout -> failWith [i|Thunk located at ${target} is unpacked. Use 'ob thunk pack' on the desired directory and then try 'ob thunk update' again.|]
where
spinner = withSpinner' ("Updating thunk " <> T.pack target <> " to latest") (pure $ const $ "Thunk " <> T.pack target <> " updated to latest")
setThunk :: MonadObelisk m => ThunkConfig -> FilePath -> GitSource -> String -> m ()
setThunk thunkConfig target gs branch = do
newThunkPtr <- uriThunkPtr (_gitSource_url gs) (_thunkConfig_private thunkConfig) (Just $ T.pack branch) Nothing
overwriteThunk target newThunkPtr
updateThunkToLatest (ThunkUpdateConfig Nothing thunkConfig) target
-- | All recognized github standalone loaders, ordered from newest to oldest.
-- This tool will only ever produce the newest one when it writes a thunk.
gitHubThunkSpecs :: NonEmpty ThunkSpec
gitHubThunkSpecs =
gitHubThunkSpecV5 :|
[ gitHubThunkSpecV4
, gitHubThunkSpecV3
, gitHubThunkSpecV2
, gitHubThunkSpecV1
]
gitHubThunkSpecV1 :: ThunkSpec
gitHubThunkSpecV1 = legacyGitHubThunkSpec "github-v1"
"import ((import <nixpkgs> {}).fetchFromGitHub (builtins.fromJSON (builtins.readFile ./github.json)))"
gitHubThunkSpecV2 :: ThunkSpec
gitHubThunkSpecV2 = legacyGitHubThunkSpec "github-v2" $ T.unlines
TODO : Add something about how to get more info on Obelisk , etc .
, "import ((import <nixpkgs> {}).fetchFromGitHub ("
, " let json = builtins.fromJSON (builtins.readFile ./github.json);"
, " in { inherit (json) owner repo rev sha256;"
, " private = json.private or false;"
, " }"
, "))"
]
gitHubThunkSpecV3 :: ThunkSpec
gitHubThunkSpecV3 = legacyGitHubThunkSpec "github-v3" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let"
, " fetch = { private ? false, ... }@args: if private && builtins.hasAttr \"fetchGit\" builtins"
, " then fetchFromGitHubPrivate args"
, " else (import <nixpkgs> {}).fetchFromGitHub (builtins.removeAttrs args [\"branch\"]);"
, " fetchFromGitHubPrivate ="
, " { owner, repo, rev, branch ? null, name ? null, sha256 ? null, private ? false"
, " , fetchSubmodules ? false, githubBase ? \"github.com\", ..."
, " }: assert !fetchSubmodules;"
, " builtins.fetchGit ({"
, " url = \"ssh@${githubBase}/${owner}/${repo}.git\";"
, " inherit rev;"
, " }"
, " // (if branch == null then {} else { ref = branch; })"
, " // (if name == null then {} else { inherit name; }));"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./github.json)))"
]
gitHubThunkSpecV4 :: ThunkSpec
gitHubThunkSpecV4 = legacyGitHubThunkSpec "github-v4" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetch = { private ? false, fetchSubmodules ? false, owner, repo, rev, sha256, ... }:"
, " if !fetchSubmodules && !private then builtins.fetchTarball {"
, " url = \"/${owner}/${repo}/archive/${rev}.tar.gz\"; inherit sha256;"
, " } else (import <nixpkgs> {}).fetchFromGitHub {"
, " inherit owner repo rev sha256 fetchSubmodules private;"
, " };"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./github.json)))"
]
legacyGitHubThunkSpec :: Text -> Text -> ThunkSpec
legacyGitHubThunkSpec name loader = ThunkSpec name $ Map.fromList
[ ("default.nix", ThunkFileSpec_FileMatches $ T.strip loader)
, ("github.json" , ThunkFileSpec_Ptr parseGitHubJsonBytes)
, (attrCacheFileName, ThunkFileSpec_AttrCache)
, (".git", ThunkFileSpec_CheckoutIndicator)
]
gitHubThunkSpecV5 :: ThunkSpec
gitHubThunkSpecV5 = mkThunkSpec "github-v5" "github.json" parseGitHubJsonBytes [here|
# DO NOT HAND-EDIT THIS FILE
let fetch = { private ? false, fetchSubmodules ? false, owner, repo, rev, sha256, ... }:
if !fetchSubmodules && !private then builtins.fetchTarball {
url = "/${owner}/${repo}/archive/${rev}.tar.gz"; inherit sha256;
} else (import <nixpkgs> {}).fetchFromGitHub {
inherit owner repo rev sha256 fetchSubmodules private;
};
json = builtins.fromJSON (builtins.readFile ./github.json);
in fetch json
|]
parseGitHubJsonBytes :: LBS.ByteString -> Either String ThunkPtr
parseGitHubJsonBytes = parseJsonObject $ parseThunkPtr $ \v ->
ThunkSource_GitHub <$> parseGitHubSource v <|> ThunkSource_Git <$> parseGitSource v
gitThunkSpecs :: NonEmpty ThunkSpec
gitThunkSpecs =
gitThunkSpecV5 :|
[ gitThunkSpecV4
, gitThunkSpecV3
, gitThunkSpecV2
, gitThunkSpecV1
]
gitThunkSpecV1 :: ThunkSpec
gitThunkSpecV1 = legacyGitThunkSpec "git-v1" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetchGit = {url, rev, ref ? null, branch ? null, sha256 ? null, fetchSubmodules ? null}:"
, " assert !fetchSubmodules; (import <nixpkgs> {}).fetchgit { inherit url rev sha256; };"
, "in import (fetchGit (builtins.fromJSON (builtins.readFile ./git.json)))"
]
gitThunkSpecV2 :: ThunkSpec
gitThunkSpecV2 = legacyGitThunkSpec "git-v2" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetchGit = {url, rev, ref ? null, branch ? null, sha256 ? null, fetchSubmodules ? null}:"
, " if builtins.hasAttr \"fetchGit\" builtins"
, " then builtins.fetchGit ({ inherit url rev; } // (if branch == null then {} else { ref = branch; }))"
, " else abort \"Plain Git repositories are only supported on nix 2.0 or higher.\";"
, "in import (fetchGit (builtins.fromJSON (builtins.readFile ./git.json)))"
]
This loader has a bug because @builtins.fetchGit@ is not given a @ref@
-- and will fail to find commits without this because it does shallow clones.
gitThunkSpecV3 :: ThunkSpec
gitThunkSpecV3 = legacyGitThunkSpec "git-v3" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetch = {url, rev, ref ? null, sha256 ? null, fetchSubmodules ? false, private ? false, ...}:"
, " let realUrl = let firstChar = builtins.substring 0 1 url; in"
, " if firstChar == \"/\" then /. + url"
, " else if firstChar == \".\" then ./. + url"
, " else url;"
, " in if !fetchSubmodules && private then builtins.fetchGit {"
, " url = realUrl; inherit rev;"
, " } else (import <nixpkgs> {}).fetchgit {"
, " url = realUrl; inherit rev sha256;"
, " };"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./git.json)))"
]
gitThunkSpecV4 :: ThunkSpec
gitThunkSpecV4 = legacyGitThunkSpec "git-v4" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetch = {url, rev, branch ? null, sha256 ? null, fetchSubmodules ? false, private ? false, ...}:"
, " let realUrl = let firstChar = builtins.substring 0 1 url; in"
, " if firstChar == \"/\" then /. + url"
, " else if firstChar == \".\" then ./. + url"
, " else url;"
, " in if !fetchSubmodules && private then builtins.fetchGit {"
, " url = realUrl; inherit rev;"
, " ${if branch == null then null else \"ref\"} = branch;"
, " } else (import <nixpkgs> {}).fetchgit {"
, " url = realUrl; inherit rev sha256;"
, " };"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./git.json)))"
]
legacyGitThunkSpec :: Text -> Text -> ThunkSpec
legacyGitThunkSpec name loader = ThunkSpec name $ Map.fromList
[ ("default.nix", ThunkFileSpec_FileMatches $ T.strip loader)
, ("git.json" , ThunkFileSpec_Ptr parseGitJsonBytes)
, (attrCacheFileName, ThunkFileSpec_AttrCache)
, (".git", ThunkFileSpec_CheckoutIndicator)
]
gitThunkSpecV5 :: ThunkSpec
gitThunkSpecV5 = mkThunkSpec "git-v5" "git.json" parseGitJsonBytes [here|
# DO NOT HAND-EDIT THIS FILE
let fetch = {url, rev, branch ? null, sha256 ? null, fetchSubmodules ? false, private ? false, ...}:
let realUrl = let firstChar = builtins.substring 0 1 url; in
if firstChar == "/" then /. + url
else if firstChar == "." then ./. + url
else url;
in if !fetchSubmodules && private then builtins.fetchGit {
url = realUrl; inherit rev;
${if branch == null then null else "ref"} = branch;
} else (import <nixpkgs> {}).fetchgit {
url = realUrl; inherit rev sha256;
};
json = builtins.fromJSON (builtins.readFile ./git.json);
in fetch json
|]
parseGitJsonBytes :: LBS.ByteString -> Either String ThunkPtr
parseGitJsonBytes = parseJsonObject $ parseThunkPtr $ fmap ThunkSource_Git . parseGitSource
mkThunkSpec :: Text -> FilePath -> (LBS.ByteString -> Either String ThunkPtr) -> Text -> ThunkSpec
mkThunkSpec name jsonFileName parser srcNix = ThunkSpec name $ Map.fromList
[ ("default.nix", ThunkFileSpec_FileMatches defaultNixViaSrc)
, ("thunk.nix", ThunkFileSpec_FileMatches srcNix)
, (jsonFileName, ThunkFileSpec_Ptr parser)
, (attrCacheFileName, ThunkFileSpec_AttrCache)
, (normalise $ unpackedDirName </> ".git", ThunkFileSpec_CheckoutIndicator)
]
where
defaultNixViaSrc = [here|
# DO NOT HAND-EDIT THIS FILE
import (import ./thunk.nix)
|]
parseJsonObject :: (Aeson.Object -> Aeson.Parser a) -> LBS.ByteString -> Either String a
parseJsonObject p bytes = Aeson.parseEither p =<< Aeson.eitherDecode bytes
-- | Checks a cache directory to see if there is a fresh symlink
-- to the result of building an attribute of a thunk.
-- If no cache hit is found, nix-build is called to build the attribute
-- and the result is symlinked into the cache.
nixBuildThunkAttrWithCache
:: MonadObelisk m
=> ThunkSpec
-> FilePath
-- ^ Path to directory containing Thunk
-> String
-- ^ Attribute to build
-> m (Maybe FilePath)
-- ^ Symlink to cached or built nix output
-- WARNING: If the thunk uses an impure reference such as '<nixpkgs>'
-- the caching mechanism will fail as it merely measures the modification
-- time of the cache link and the expression to build.
nixBuildThunkAttrWithCache thunkSpec thunkDir attr = do
latestChange <- liftIO $ do
let
getModificationTimeMaybe = fmap rightToMaybe . try @IOError . getModificationTime
thunkFileNames = Map.keys $ _thunkSpec_files thunkSpec
maximum . catMaybes <$> traverse (getModificationTimeMaybe . (thunkDir </>)) thunkFileNames
let cachePaths' = nonEmpty $ Map.keys $ Map.filter (\case ThunkFileSpec_AttrCache -> True; _ -> False) $
_thunkSpec_files thunkSpec
for cachePaths' $ \cachePaths ->
fmap NonEmpty.head $ for cachePaths $ \cacheDir -> do
let
cachePath = thunkDir </> cacheDir </> attr <.> "out"
cacheErrHandler e
| isDoesNotExistError e = pure Nothing -- expected from a cache miss
| otherwise = Nothing <$ putLog Error (T.pack $ displayException e)
cacheHit <- handle cacheErrHandler $ do
cacheTime <- liftIO $ posixSecondsToUTCTime . realToFrac . modificationTime <$> getSymbolicLinkStatus cachePath
pure $ if latestChange <= cacheTime
then Just cachePath
else Nothing
case cacheHit of
Just c -> pure c
Nothing -> do
putLog Warning $ T.pack $ mconcat [thunkDir, ": ", attr, " not cached, building ..."]
liftIO $ createDirectoryIfMissing True (takeDirectory cachePath)
(cachePath <$) $ nixCmd $ NixCmd_Build $ def
& nixBuildConfig_outLink .~ OutLink_IndirectRoot cachePath
& nixCmdConfig_target .~ Target
{ _target_path = Just thunkDir
, _target_attr = Just attr
, _target_expr = Nothing
}
-- | Build a nix attribute, and cache the result if possible
nixBuildAttrWithCache
:: MonadObelisk m
=> FilePath
-- ^ Path to directory containing Thunk
-> String
-- ^ Attribute to build
-> m FilePath
-- ^ Symlink to cached or built nix output
nixBuildAttrWithCache exprPath attr = readThunk exprPath >>= \case
-- Only packed thunks are cached. In particular, checkouts are not.
Right (ThunkData_Packed spec _) ->
maybe build pure =<< nixBuildThunkAttrWithCache spec exprPath attr
_ -> build
where
build = nixCmd $ NixCmd_Build $ def
& nixBuildConfig_outLink .~ OutLink_None
& nixCmdConfig_target .~ Target
{ _target_path = Just exprPath
, _target_attr = Just attr
, _target_expr = Nothing
}
-- | Safely update thunk using a custom action
--
-- A temporary working space is used to do any update. When the custom
-- action successfully completes, the resulting (packed) thunk is copied
-- back to the original location.
updateThunk :: MonadObelisk m => FilePath -> (FilePath -> m a) -> m a
updateThunk p f = withSystemTempDirectory "obelisk-thunkptr-" $ \tmpDir -> do
p' <- copyThunkToTmp tmpDir p
unpackThunk' True p'
result <- f p'
updateThunkFromTmp p'
return result
where
copyThunkToTmp tmpDir thunkDir = readThunk thunkDir >>= \case
Left err -> failWith $ "withThunkUnpacked: " <> T.pack (show err)
Right ThunkData_Packed{} -> do
let tmpThunk = tmpDir </> "thunk"
callProcessAndLogOutput (Notice, Error) $
proc cp ["-r", "-T", thunkDir, tmpThunk]
return tmpThunk
Right _ -> failWith "Thunk is not packed"
updateThunkFromTmp p' = do
_ <- packThunk' True (ThunkPackConfig False (ThunkConfig Nothing)) p'
callProcessAndLogOutput (Notice, Error) $
proc cp ["-r", "-T", p', p]
finalMsg :: Bool -> (a -> Text) -> Maybe (a -> Text)
finalMsg noTrail s = if noTrail then Nothing else Just s
-- | Check that we are not somewhere inside the thunk directory
checkThunkDirectory :: MonadObelisk m => FilePath -> m ()
checkThunkDirectory thunkDir = do
currentDir <- liftIO getCurrentDirectory
thunkDir' <- liftIO $ canonicalizePath thunkDir
when (thunkDir' `L.isInfixOf` currentDir) $
failWith [i|Can't perform thunk operations from within the thunk directory: ${thunkDir}|]
-- Don't let thunk commands work when directly given an unpacked repo
when (takeFileName thunkDir == unpackedDirName) $
readThunk (takeDirectory thunkDir) >>= \case
Right _ -> failWith [i|Refusing to perform thunk operation on ${thunkDir} because it is a thunk's unpacked source|]
Left _ -> pure ()
unpackThunk :: MonadObelisk m => FilePath -> m ()
unpackThunk = unpackThunk' False
unpackThunk' :: MonadObelisk m => Bool -> FilePath -> m ()
unpackThunk' noTrail thunkDir = checkThunkDirectory thunkDir *> readThunk thunkDir >>= \case
Left err -> failWith [i|Invalid thunk at ${thunkDir}: ${err}|]
--TODO: Overwrite option that rechecks out thunk; force option to do so even if working directory is dirty
Right ThunkData_Checkout -> failWith [i|Thunk at ${thunkDir} is already unpacked|]
Right (ThunkData_Packed _ tptr) -> do
let (thunkParent, thunkName) = splitFileName thunkDir
withTempDirectory thunkParent thunkName $ \tmpThunk -> do
let
gitSrc = thunkSourceToGitSource $ _thunkPtr_source tptr
newSpec = case _thunkPtr_source tptr of
ThunkSource_GitHub _ -> NonEmpty.head gitHubThunkSpecs
ThunkSource_Git _ -> NonEmpty.head gitThunkSpecs
withSpinner' ("Fetching thunk " <> T.pack thunkName)
(finalMsg noTrail $ const $ "Fetched thunk " <> T.pack thunkName) $ do
let unpackedPath = tmpThunk </> unpackedDirName
gitCloneForThunkUnpack gitSrc (_thunkRev_commit $ _thunkPtr_rev tptr) unpackedPath
let normalizeMore = dropTrailingPathSeparator . normalise
when (normalizeMore unpackedPath /= normalizeMore tmpThunk) $ -- Only write meta data if the checkout is not inplace
createThunk tmpThunk $ Left newSpec
liftIO $ do
removePathForcibly thunkDir
renameDirectory tmpThunk thunkDir
gitCloneForThunkUnpack
:: MonadObelisk m
=> GitSource -- ^ Git source to use
-> Ref hash -- ^ Commit hash to reset to
-> FilePath -- ^ Directory to clone into
-> m ()
gitCloneForThunkUnpack gitSrc commit dir = do
let git = callProcessAndLogOutput (Notice, Notice) . gitProc dir
git $ [ "clone" ]
++ ["--recursive" | _gitSource_fetchSubmodules gitSrc]
++ [ T.unpack $ gitUriToText $ _gitSource_url gitSrc ]
++ do branch <- maybeToList $ _gitSource_branch gitSrc
[ "--branch", T.unpack $ untagName branch ]
git ["reset", "--hard", Ref.toHexString commit]
when (_gitSource_fetchSubmodules gitSrc) $
git ["submodule", "update", "--recursive", "--init"]
--TODO: add a rollback mode to pack to the original thunk
packThunk :: MonadObelisk m => ThunkPackConfig -> FilePath -> m ThunkPtr
packThunk = packThunk' False
packThunk' :: MonadObelisk m => Bool -> ThunkPackConfig -> FilePath -> m ThunkPtr
packThunk' noTrail (ThunkPackConfig force thunkConfig) thunkDir = checkThunkDirectory thunkDir *> readThunk thunkDir >>= \case
Right ThunkData_Packed{} -> failWith [i|Thunk at ${thunkDir} is is already packed|]
_ -> withSpinner'
("Packing thunk " <> T.pack thunkDir)
(finalMsg noTrail $ const $ "Packed thunk " <> T.pack thunkDir) $
do
let checkClean = if force then CheckClean_NoCheck else CheckClean_FullCheck
thunkPtr <- modifyThunkPtrByConfig thunkConfig <$> getThunkPtr checkClean thunkDir (_thunkConfig_private thunkConfig)
liftIO $ removePathForcibly thunkDir
createThunk thunkDir $ Right thunkPtr
pure thunkPtr
modifyThunkPtrByConfig :: ThunkConfig -> ThunkPtr -> ThunkPtr
modifyThunkPtrByConfig (ThunkConfig markPrivate') ptr = case markPrivate' of
Nothing -> ptr
Just markPrivate -> ptr { _thunkPtr_source = case _thunkPtr_source ptr of
ThunkSource_Git s -> ThunkSource_Git $ s { _gitSource_private = markPrivate }
ThunkSource_GitHub s -> ThunkSource_GitHub $ s { _gitHubSource_private = markPrivate }
}
data CheckClean
= CheckClean_FullCheck
-- ^ Check that the repo is clean, including .gitignored files
| CheckClean_NotIgnored
-- ^ Check that the repo is clean, not including .gitignored files
| CheckClean_NoCheck
-- ^ Don't check that the repo is clean
getThunkPtr :: forall m. MonadObelisk m => CheckClean -> FilePath -> Maybe Bool -> m ThunkPtr
getThunkPtr gitCheckClean dir mPrivate = do
let repoLocations = nubOrd $ map (first normalise)
[(".git", "."), (unpackedDirName </> ".git", unpackedDirName)]
repoLocation' <- liftIO $ flip findM repoLocations $ doesDirectoryExist . (dir </>) . fst
thunkDir <- case repoLocation' of
Nothing -> failWith [i|Can't find an unpacked thunk in ${dir}|]
Just (_, path) -> pure $ normalise $ dir </> path
let (checkClean, checkIgnored) = case gitCheckClean of
CheckClean_FullCheck -> (True, True)
CheckClean_NotIgnored -> (True, False)
CheckClean_NoCheck -> (False, False)
when checkClean $ ensureCleanGitRepo thunkDir checkIgnored
"thunk pack: thunk checkout contains unsaved modifications"
-- Check whether there are any stashes
when checkClean $ do
stashOutput <- readGitProcess thunkDir ["stash", "list"]
unless (T.null stashOutput) $
failWith $ T.unlines $
[ "thunk pack: thunk checkout has stashes"
, "git stash list:"
] ++ T.lines stashOutput
-- Get current branch
(mCurrentBranch, mCurrentCommit) <- do
b <- listToMaybe . T.lines <$> readGitProcess thunkDir ["rev-parse", "--abbrev-ref", "HEAD"]
c <- listToMaybe . T.lines <$> readGitProcess thunkDir ["rev-parse", "HEAD"]
case b of
(Just "HEAD") -> failWith $ T.unlines
[ "thunk pack: You are in 'detached HEAD' state."
, "If you want to pack at the current ref \
\then please create a new branch with 'git checkout -b <new-branch-name>' and push this upstream."
]
_ -> return (b, c)
-- Get information on all branches and their (optional) designated upstream
-- correspondents
headDump :: [Text] <- T.lines <$> readGitProcess thunkDir
[ "for-each-ref"
, "--format=%(refname:short) %(upstream:short) %(upstream:remotename)"
, "refs/heads/"
]
(headInfo :: Map Text (Maybe (Text, Text)))
<- fmap Map.fromList $ forM headDump $ \line -> do
(branch : restOfLine) <- pure $ T.words line
mUpstream <- case restOfLine of
[] -> pure Nothing
[u, r] -> pure $ Just (u, r)
(_:_) -> failWith "git for-each-ref invalid output"
pure (branch, mUpstream)
putLog Debug $ "branches: " <> T.pack (show headInfo)
let errorMap :: Map Text ()
headUpstream :: Map Text (Text, Text)
(errorMap, headUpstream) = flip Map.mapEither headInfo $ \case
Nothing -> Left ()
Just b -> Right b
putLog Debug $ "branches with upstream branch set: " <> T.pack (show headUpstream)
-- Check that every branch has a remote equivalent
when checkClean $ do
let untrackedBranches = Map.keys errorMap
when (not $ L.null untrackedBranches) $ failWith $ T.unlines $
[ "thunk pack: Certain branches in the thunk have no upstream branch \
\set. This means we don't know to check whether all your work is \
\saved. The offending branches are:"
, ""
, T.unwords untrackedBranches
, ""
, "To fix this, you probably want to do:"
, ""
] ++
((\branch -> "git push -u origin " <> branch) <$> untrackedBranches) ++
[ ""
, "These will push the branches to the default remote under the same \
\name, and (thanks to the `-u`) remember that choice so you don't \
\get this error again."
]
-- loosely by -git-ahead-and-behind-info-for-all-branches-including-remotes
stats <- ifor headUpstream $ \branch (upstream, _remote) -> do
(stat :: [Text]) <- T.lines <$> readGitProcess thunkDir
[ "rev-list", "--left-right"
, T.unpack branch <> "..." <> T.unpack upstream
]
let ahead = length $ [ () | Just ('<', _) <- T.uncons <$> stat ]
behind = length $ [ () | Just ('>', _) <- T.uncons <$> stat ]
pure (upstream, (ahead, behind))
-- Those branches which have commits ahead of, i.e. not on, the upstream
-- branch. Purely being behind is fine.
let nonGood = Map.filter ((/= 0) . fst . snd) stats
when (not $ Map.null nonGood) $ failWith $ T.unlines $
[ "thunk pack: Certain branches in the thunk have commits not yet pushed upstream:"
, ""
] ++
flip map (Map.toList nonGood) (\(branch, (upstream, (ahead, behind))) -> mconcat
[" ", branch, " ahead: ", T.pack (show ahead), " behind: ", T.pack (show behind), " remote branch ", upstream]) ++
[ ""
, "Please push these upstream and try again. (Or just fetch, if they are somehow \
\pushed but this repo's remote tracking branches don't know it.)"
]
when checkClean $ do
-- We assume it's safe to pack the thunk at this point
putLog Informational "All changes safe in git remotes. OK to pack thunk."
let remote = maybe "origin" snd $ flip Map.lookup headUpstream =<< mCurrentBranch
[remoteUri'] <- fmap T.lines $ readGitProcess thunkDir
[ "config"
, "--get"
, "remote." <> T.unpack remote <> ".url"
]
remoteUri <- case parseGitUri remoteUri' of
Nothing -> failWith $ "Could not identify git remote: " <> remoteUri'
Just uri -> pure uri
uriThunkPtr remoteUri mPrivate mCurrentBranch mCurrentCommit
-- | Get the latest revision available from the given source
getLatestRev :: MonadObelisk m => ThunkSource -> m ThunkRev
getLatestRev os = do
let gitS = thunkSourceToGitSource os
(_, commit) <- gitGetCommitBranch (_gitSource_url gitS) (untagName <$> _gitSource_branch gitS)
case os of
ThunkSource_GitHub s -> githubThunkRev s commit
ThunkSource_Git s -> gitThunkRev s commit
-- | Convert a URI to a thunk
--
-- If the URL is a github URL, we try to just download an archive for
-- performance. If that doesn't work (e.g. authentication issue), we fall back
-- on just doing things the normal way for git repos in general, and save it as
-- a regular git thunk.
uriThunkPtr :: MonadObelisk m => GitUri -> Maybe Bool -> Maybe Text -> Maybe Text -> m ThunkPtr
uriThunkPtr uri mPrivate mbranch mcommit = do
commit <- case mcommit of
Nothing -> gitGetCommitBranch uri mbranch >>= return . snd
(Just c) -> return c
(src, rev) <- uriToThunkSource uri mPrivate mbranch >>= \case
ThunkSource_GitHub s -> do
rev <- runExceptT $ githubThunkRev s commit
case rev of
Right r -> pure (ThunkSource_GitHub s, r)
Left e -> do
putLog Warning "\
\Failed to fetch archive from GitHub. This is probably a private repo. \
\Falling back on normal fetchgit. Original failure:"
errorToWarning e
let s' = forgetGithub True s
(,) (ThunkSource_Git s') <$> gitThunkRev s' commit
ThunkSource_Git s -> (,) (ThunkSource_Git s) <$> gitThunkRev s commit
pure $ ThunkPtr
{ _thunkPtr_rev = rev
, _thunkPtr_source = src
}
-- | N.B. Cannot infer all fields.
--
If the thunk is a GitHub thunk and fails , we do * not * fall back like with
-- `uriThunkPtr`. Unlike a plain URL, a thunk src explicitly states which method
-- should be employed, and so we respect that.
uriToThunkSource :: MonadObelisk m => GitUri -> Maybe Bool -> Maybe Text -> m ThunkSource
uriToThunkSource (GitUri u) mPrivate
| Right uriAuth <- URI.uriAuthority u
, Just scheme <- URI.unRText <$> URI.uriScheme u
, case scheme of
"ssh" -> uriAuth == URI.Authority
{ URI.authUserInfo = Just $ URI.UserInfo (fromRight' $ URI.mkUsername "git") Nothing
, URI.authHost = fromRight' $ URI.mkHost "github.com"
, URI.authPort = Nothing
}
s -> s `L.elem` [ "git", "https", "http" ] -- "http:" just redirects to "https:"
&& URI.unRText (URI.authHost uriAuth) == "github.com"
, Just (_, owner :| [repoish]) <- URI.uriPath u
= \mbranch -> do
isPrivate <- getIsPrivate
pure $ ThunkSource_GitHub $ GitHubSource
{ _gitHubSource_owner = N $ URI.unRText owner
, _gitHubSource_repo = N $ let
repoish' = URI.unRText repoish
in fromMaybe repoish' $ T.stripSuffix ".git" repoish'
, _gitHubSource_branch = N <$> mbranch
, _gitHubSource_private = isPrivate
}
| otherwise = \mbranch -> do
isPrivate <- getIsPrivate
pure $ ThunkSource_Git $ GitSource
{ _gitSource_url = GitUri u
, _gitSource_branch = N <$> mbranch
, _gitSource_fetchSubmodules = False -- TODO: How do we determine if this should be true?
, _gitSource_private = isPrivate
}
where
getIsPrivate = maybe (guessGitRepoIsPrivate $ GitUri u) pure mPrivate
guessGitRepoIsPrivate :: MonadObelisk m => GitUri -> m Bool
guessGitRepoIsPrivate uri = flip fix urisToTry $ \loop -> \case
[] -> pure True
uriAttempt:xs -> do
result <- readCreateProcessWithExitCode $
isolateGitProc $
gitProcNoRepo
[ "ls-remote"
, "--quiet"
, "--exit-code"
, "--symref"
, T.unpack $ gitUriToText uriAttempt
]
case result of
(ExitSuccess, _, _) -> pure False -- Must be a public repo
_ -> loop xs
where
urisToTry = nubOrd $
Include the original URI if it is n't using SSH because SSH will certainly fail .
[uri | fmap URI.unRText (URI.uriScheme (unGitUri uri)) /= Just "ssh"] <>
[changeScheme "https" uri, changeScheme "http" uri]
changeScheme scheme (GitUri u) = GitUri $ u
{ URI.uriScheme = URI.mkScheme scheme
, URI.uriAuthority = (\x -> x { URI.authUserInfo = Nothing }) <$> URI.uriAuthority u
}
-- Funny signature indicates no effects depend on the optional branch name.
githubThunkRev
:: forall m
. MonadObelisk m
=> GitHubSource
-> Text
-> m ThunkRev
githubThunkRev s commit = do
owner <- forcePP $ _gitHubSource_owner s
repo <- forcePP $ _gitHubSource_repo s
revTarball <- URI.mkPathPiece $ commit <> ".tar.gz"
let archiveUri = GitUri $ URI.URI
{ URI.uriScheme = Just $ fromRight' $ URI.mkScheme "https"
, URI.uriAuthority = Right $ URI.Authority
{ URI.authUserInfo = Nothing
, URI.authHost = fromRight' $ URI.mkHost "github.com"
, URI.authPort = Nothing
}
, URI.uriPath = Just ( False
, owner :| [ repo, fromRight' $ URI.mkPathPiece "archive", revTarball ]
)
, URI.uriQuery = []
, URI.uriFragment = Nothing
}
hash <- getNixSha256ForUriUnpacked archiveUri
putLog Debug $ "Nix sha256 is " <> hash
return $ ThunkRev
{ _thunkRev_commit = commitNameToRef $ N commit
, _thunkRev_nixSha256 = hash
}
where
forcePP :: Name entity -> m (URI.RText 'URI.PathPiece)
forcePP = URI.mkPathPiece . untagName
gitThunkRev
:: MonadObelisk m
=> GitSource
-> Text
-> m ThunkRev
gitThunkRev s commit = do
let u = _gitSource_url s
protocols = ["file", "https", "ssh", "git"]
scheme = maybe "file" URI.unRText $ URI.uriScheme $ (\(GitUri x) -> x) u
unless (T.toLower scheme `elem` protocols) $
failWith $ "obelisk currently only supports "
<> T.intercalate ", " protocols <> " protocols for plain Git remotes"
hash <- nixPrefetchGit u commit $ _gitSource_fetchSubmodules s
putLog Informational $ "Nix sha256 is " <> hash
pure $ ThunkRev
{ _thunkRev_commit = commitNameToRef (N commit)
, _thunkRev_nixSha256 = hash
}
| Given the URI to a git remote , and an optional branch name , return the name
-- of the branch along with the hash of the commit at tip of that branch.
--
-- If the branch name is passed in, it is returned exactly as-is. If it is not
-- passed it, the default branch of the repo is used instead.
gitGetCommitBranch
:: MonadObelisk m => GitUri -> Maybe Text -> m (Text, CommitId)
gitGetCommitBranch uri mbranch = withExitFailMessage ("Failure for git remote " <> uriMsg) $ do
(_, bothMaps) <- gitLsRemote
(T.unpack $ gitUriToText uri)
(GitRef_Branch <$> mbranch)
Nothing
branch <- case mbranch of
Nothing -> withExitFailMessage "Failed to find default branch" $ do
b <- rethrowE $ gitLookupDefaultBranch bothMaps
putLog Debug $ "Default branch for remote repo " <> uriMsg <> " is " <> b
pure b
Just b -> pure b
commit <- rethrowE $ gitLookupCommitForRef bothMaps (GitRef_Branch branch)
putLog Informational $ "Latest commit in branch " <> branch
<> " from remote repo " <> uriMsg
<> " is " <> commit
pure (branch, commit)
where
rethrowE = either failWith pure
uriMsg = gitUriToText uri
parseGitUri :: Text -> Maybe GitUri
parseGitUri x = GitUri <$> (parseFileURI x <|> parseAbsoluteURI x <|> parseSshShorthand x)
parseFileURI :: Text -> Maybe URI.URI
parseFileURI uri = if "/" `T.isPrefixOf` uri then parseAbsoluteURI ("file://" <> uri) else Nothing
parseAbsoluteURI :: Text -> Maybe URI.URI
parseAbsoluteURI uri = do
parsedUri <- URI.mkURI uri
guard $ URI.isPathAbsolute parsedUri
pure parsedUri
parseSshShorthand :: Text -> Maybe URI.URI
parseSshShorthand uri = do
-- This is what git does to check that the remote
-- is not a local file path when parsing shorthand.
-- Last referenced from here:
-- #L712
let
(authAndHostname, colonAndPath) = T.break (== ':') uri
properUri = "ssh://" <> authAndHostname <> "/" <> T.drop 1 colonAndPath
Shorthand is valid iff a colon is present and it occurs before the first slash
-- This check is used to disambiguate a filepath containing a colon from shorthand
guard $ isNothing (T.findIndex (=='/') authAndHostname)
&& not (T.null colonAndPath)
URI.mkURI properUri
| null | https://raw.githubusercontent.com/obsidiansystems/obelisk/d779d5ab007d8ee5c3cd3400473453e1d106f5f1/lib/command/src/Obelisk/Command/Thunk.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
TODO: Support symlinked thunk data
^ Packed thunk
^ Checked out thunk that was unpacked from this pointer
| A reference to the exact data that a thunk should translate into
TODO: Use a smart constructor and make this actually verify itself
| A specific revision of data; it may be available from multiple sources
| A location from which a thunk's data can be retrieved
| A plain repo source
TODO: Use spinner here.
TODO: Pretty print these
| Specification for how a file in a thunk version works.
^ This file must match the given content exactly
^ Existence of this directory indicates that the thunk is unpacked
^ This directory is an attribute cache
| Specification for how a set of files in a thunk version work.
^ Path to directory
^ Set of file paths relative to the given directory
Interleave spec types so we try each one in a "fair" ordering
| Read a thunk and validate that it is exactly a packed thunk.
If additional data is present, fail.
Ensure that this directory is a valid thunk (i.e. so we aren't losing any data)
TODO: Is there a safer way to do this overwriting?
It's important that formatting be very consistent here, because
otherwise when people update thunks, their patches will be messy
check to see if thunk should be updated to a specific branch or just update it's current branch
| All recognized github standalone loaders, ordered from newest to oldest.
This tool will only ever produce the newest one when it writes a thunk.
and will fail to find commits without this because it does shallow clones.
| Checks a cache directory to see if there is a fresh symlink
to the result of building an attribute of a thunk.
If no cache hit is found, nix-build is called to build the attribute
and the result is symlinked into the cache.
^ Path to directory containing Thunk
^ Attribute to build
^ Symlink to cached or built nix output
WARNING: If the thunk uses an impure reference such as '<nixpkgs>'
the caching mechanism will fail as it merely measures the modification
time of the cache link and the expression to build.
expected from a cache miss
| Build a nix attribute, and cache the result if possible
^ Path to directory containing Thunk
^ Attribute to build
^ Symlink to cached or built nix output
Only packed thunks are cached. In particular, checkouts are not.
| Safely update thunk using a custom action
A temporary working space is used to do any update. When the custom
action successfully completes, the resulting (packed) thunk is copied
back to the original location.
| Check that we are not somewhere inside the thunk directory
Don't let thunk commands work when directly given an unpacked repo
TODO: Overwrite option that rechecks out thunk; force option to do so even if working directory is dirty
Only write meta data if the checkout is not inplace
^ Git source to use
^ Commit hash to reset to
^ Directory to clone into
TODO: add a rollback mode to pack to the original thunk
^ Check that the repo is clean, including .gitignored files
^ Check that the repo is clean, not including .gitignored files
^ Don't check that the repo is clean
Check whether there are any stashes
Get current branch
Get information on all branches and their (optional) designated upstream
correspondents
Check that every branch has a remote equivalent
loosely by -git-ahead-and-behind-info-for-all-branches-including-remotes
Those branches which have commits ahead of, i.e. not on, the upstream
branch. Purely being behind is fine.
We assume it's safe to pack the thunk at this point
| Get the latest revision available from the given source
| Convert a URI to a thunk
If the URL is a github URL, we try to just download an archive for
performance. If that doesn't work (e.g. authentication issue), we fall back
on just doing things the normal way for git repos in general, and save it as
a regular git thunk.
| N.B. Cannot infer all fields.
`uriThunkPtr`. Unlike a plain URL, a thunk src explicitly states which method
should be employed, and so we respect that.
"http:" just redirects to "https:"
TODO: How do we determine if this should be true?
Must be a public repo
Funny signature indicates no effects depend on the optional branch name.
of the branch along with the hash of the commit at tip of that branch.
If the branch name is passed in, it is returned exactly as-is. If it is not
passed it, the default branch of the repo is used instead.
This is what git does to check that the remote
is not a local file path when parsing shorthand.
Last referenced from here:
#L712
This check is used to disambiguate a filepath containing a colon from shorthand | # LANGUAGE DataKinds #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PatternGuards #
# LANGUAGE QuasiQuotes #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
module Obelisk.Command.Thunk where
import Control.Applicative
import Control.Exception (displayException, try)
import Control.Lens (ifor, ifor_, (.~))
import Control.Monad
import Control.Monad.Extra (findM)
import Control.Monad.Catch (MonadCatch, handle)
import Control.Monad.Except
import Data.Aeson ((.=))
import qualified Data.Aeson as Aeson
import Data.Aeson.Encode.Pretty
import qualified Data.Aeson.Types as Aeson
import Data.Bifunctor (first)
import qualified Data.ByteString.Lazy as LBS
import Data.Containers.ListUtils (nubOrd)
import Data.Default
import Data.Either.Combinators (fromRight', rightToMaybe)
import Data.Foldable (toList)
import Data.Function ((&))
import Data.Functor ((<&>))
import Data.Git.Ref (Ref)
import qualified Data.Git.Ref as Ref
import qualified Data.List as L
import Data.List.NonEmpty (NonEmpty (..), nonEmpty)
import qualified Data.List.NonEmpty as NonEmpty
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe
import Data.Set (Set)
import qualified Data.Set as Set
import Data.String.Here.Interpolated (i)
import Data.String.Here.Uninterpolated (here)
import Data.Text (Text)
import qualified Data.Text as T
import Data.Text.Encoding
import qualified Data.Text.IO as T
import Data.Time.Clock.POSIX (posixSecondsToUTCTime)
import Data.Traversable (for)
import Data.Yaml (parseMaybe)
import GitHub
import GitHub.Data.Name
import Obelisk.Command.Nix
import System.Directory
import System.Exit
import System.FilePath
import System.IO.Error
import System.IO.Temp
import System.PosixCompat.Files (getSymbolicLinkStatus, modificationTime)
import qualified Text.URI as URI
import Obelisk.App (MonadObelisk)
import Obelisk.CliApp
import Obelisk.Command.Utils
data ThunkData
= ThunkData_Packed ThunkSpec ThunkPtr
| ThunkData_Checkout
data ThunkPtr = ThunkPtr
{ _thunkPtr_rev :: ThunkRev
, _thunkPtr_source :: ThunkSource
}
deriving (Show, Eq, Ord)
data ThunkRev = ThunkRev
{ _thunkRev_commit :: Ref Ref.SHA1
, _thunkRev_nixSha256 :: NixSha256
}
deriving (Show, Eq, Ord)
data ThunkSource
| A source specialized for GitHub
= ThunkSource_GitHub GitHubSource
| ThunkSource_Git GitSource
deriving (Show, Eq, Ord)
thunkSourceToGitSource :: ThunkSource -> GitSource
thunkSourceToGitSource = \case
ThunkSource_GitHub s -> forgetGithub False s
ThunkSource_Git s -> s
data GitHubSource = GitHubSource
{ _gitHubSource_owner :: Name Owner
, _gitHubSource_repo :: Name Repo
, _gitHubSource_branch :: Maybe (Name Branch)
, _gitHubSource_private :: Bool
}
deriving (Show, Eq, Ord)
newtype GitUri = GitUri { unGitUri :: URI.URI } deriving (Eq, Ord, Show)
gitUriToText :: GitUri -> Text
gitUriToText (GitUri uri)
| (T.toLower . URI.unRText <$> URI.uriScheme uri) == Just "file"
, Just (_, path) <- URI.uriPath uri
= "/" <> T.intercalate "/" (map URI.unRText $ NonEmpty.toList path)
| otherwise = URI.render uri
data GitSource = GitSource
{ _gitSource_url :: GitUri
, _gitSource_branch :: Maybe (Name Branch)
, _gitSource_fetchSubmodules :: Bool
, _gitSource_private :: Bool
}
deriving (Show, Eq, Ord)
newtype ThunkConfig = ThunkConfig
{ _thunkConfig_private :: Maybe Bool
} deriving Show
data ThunkUpdateConfig = ThunkUpdateConfig
{ _thunkUpdateConfig_branch :: Maybe String
, _thunkUpdateConfig_config :: ThunkConfig
} deriving Show
data ThunkPackConfig = ThunkPackConfig
{ _thunkPackConfig_force :: Bool
, _thunkPackConfig_config :: ThunkConfig
} deriving Show
| Convert a GitHub source to a regular Git source . Assumes no submodules .
forgetGithub :: Bool -> GitHubSource -> GitSource
forgetGithub useSsh s = GitSource
{ _gitSource_url = GitUri $ URI.URI
{ URI.uriScheme = Just $ fromRight' $ URI.mkScheme $ if useSsh then "ssh" else "https"
, URI.uriAuthority = Right $ URI.Authority
{ URI.authUserInfo = URI.UserInfo (fromRight' $ URI.mkUsername "git") Nothing
<$ guard useSsh
, URI.authHost = fromRight' $ URI.mkHost "github.com"
, URI.authPort = Nothing
}
, URI.uriPath = Just ( False
, fromRight' . URI.mkPathPiece <$>
untagName (_gitHubSource_owner s)
:| [ untagName (_gitHubSource_repo s) <> ".git" ]
)
, URI.uriQuery = []
, URI.uriFragment = Nothing
}
, _gitSource_branch = _gitHubSource_branch s
, _gitSource_fetchSubmodules = False
, _gitSource_private = _gitHubSource_private s
}
getThunkGitBranch :: ThunkPtr -> Maybe Text
getThunkGitBranch (ThunkPtr _ src) = fmap untagName $ case src of
ThunkSource_GitHub s -> _gitHubSource_branch s
ThunkSource_Git s -> _gitSource_branch s
commitNameToRef :: Name Commit -> Ref Ref.SHA1
commitNameToRef (N c) = Ref.fromHex $ encodeUtf8 c
getNixSha256ForUriUnpacked
:: MonadObelisk m
=> GitUri
-> m NixSha256
getNixSha256ForUriUnpacked uri =
withExitFailMessage ("nix-prefetch-url: Failed to determine sha256 hash of URL " <> gitUriToText uri) $ do
[hash] <- fmap T.lines $ readProcessAndLogOutput (Debug, Debug) $
proc nixPrefetchUrlPath ["--unpack", "--type", "sha256", T.unpack $ gitUriToText uri]
pure hash
nixPrefetchGit :: MonadObelisk m => GitUri -> Text -> Bool -> m NixSha256
nixPrefetchGit uri rev fetchSubmodules =
withExitFailMessage ("nix-prefetch-git: Failed to determine sha256 hash of Git repo " <> gitUriToText uri <> " at " <> rev) $ do
out <- readProcessAndLogStderr Debug $
proc nixPrefetchGitPath $ filter (/="")
[ "--url", T.unpack $ gitUriToText uri
, "--rev", T.unpack rev
, if fetchSubmodules then "--fetch-submodules" else ""
, "--quiet"
]
case parseMaybe (Aeson..: "sha256") =<< Aeson.decodeStrict (encodeUtf8 out) of
Nothing -> failWith $ "nix-prefetch-git: unrecognized output " <> out
Just x -> pure x
data ReadThunkError
= ReadThunkError_UnrecognizedThunk
| ReadThunkError_UnrecognizedPaths (NonEmpty FilePath)
| ReadThunkError_MissingPaths (NonEmpty FilePath)
| ReadThunkError_UnparseablePtr FilePath String
| ReadThunkError_FileError IOError
| ReadThunkError_FileDoesNotMatch FilePath Text
| ReadThunkError_UnrecognizedState String
| ReadThunkError_AmbiguousPackedState ThunkPtr ThunkPtr
deriving (Show)
unpackedDirName :: FilePath
unpackedDirName = "."
attrCacheFileName :: FilePath
attrCacheFileName = ".attr-cache"
data ThunkFileSpec
^ This file specifies ' ThunkPtr ' data
data ThunkSpec = ThunkSpec
{ _thunkSpec_name :: !Text
, _thunkSpec_files :: !(Map FilePath ThunkFileSpec)
}
thunkSpecTypes :: NonEmpty (NonEmpty ThunkSpec)
thunkSpecTypes = gitThunkSpecs :| [gitHubThunkSpecs]
| Attempts to match a ' ThunkSpec ' to a given directory .
matchThunkSpecToDir
:: (MonadError ReadThunkError m, MonadIO m, MonadCatch m)
^ ' ThunkSpec ' to match against the given files / directory
-> m ThunkData
matchThunkSpecToDir thunkSpec dir dirFiles = do
case nonEmpty (toList $ dirFiles `Set.difference` expectedPaths) of
Just fs -> throwError $ ReadThunkError_UnrecognizedPaths $ (dir </>) <$> fs
Nothing -> pure ()
case nonEmpty (toList $ requiredPaths `Set.difference` dirFiles) of
Just fs -> throwError $ ReadThunkError_MissingPaths $ (dir </>) <$> fs
Nothing -> pure ()
datas <- fmap toList $ flip Map.traverseMaybeWithKey (_thunkSpec_files thunkSpec) $ \expectedPath -> \case
ThunkFileSpec_AttrCache -> Nothing <$ dirMayExist expectedPath
ThunkFileSpec_CheckoutIndicator -> liftIO (doesDirectoryExist (dir </> expectedPath)) <&> \case
False -> Nothing
True -> Just ThunkData_Checkout
ThunkFileSpec_FileMatches expectedContents -> handle (\(e :: IOError) -> throwError $ ReadThunkError_FileError e) $ do
actualContents <- liftIO (T.readFile $ dir </> expectedPath)
case T.strip expectedContents == T.strip actualContents of
True -> pure Nothing
False -> throwError $ ReadThunkError_FileDoesNotMatch (dir </> expectedPath) expectedContents
ThunkFileSpec_Ptr parser -> handle (\(e :: IOError) -> throwError $ ReadThunkError_FileError e) $ do
let path = dir </> expectedPath
liftIO (doesFileExist path) >>= \case
False -> pure Nothing
True -> do
actualContents <- liftIO $ LBS.readFile path
case parser actualContents of
Right v -> pure $ Just (ThunkData_Packed thunkSpec v)
Left e -> throwError $ ReadThunkError_UnparseablePtr (dir </> expectedPath) e
case nonEmpty datas of
Nothing -> throwError ReadThunkError_UnrecognizedThunk
Just xs -> fold1WithM xs $ \a b -> either throwError pure (mergeThunkData a b)
where
rootPathsOnly = Set.fromList . mapMaybe takeRootDir . Map.keys
takeRootDir = fmap NonEmpty.head . nonEmpty . splitPath
expectedPaths = rootPathsOnly $ _thunkSpec_files thunkSpec
requiredPaths = rootPathsOnly $ Map.filter isRequiredFileSpec $ _thunkSpec_files thunkSpec
isRequiredFileSpec = \case
ThunkFileSpec_FileMatches _ -> True
_ -> False
dirMayExist expectedPath = liftIO (doesFileExist (dir </> expectedPath)) >>= \case
True -> throwError $ ReadThunkError_UnrecognizedPaths $ expectedPath :| []
False -> pure ()
Combine ' ThunkData ' from different files , preferring " Checkout " over " Packed "
mergeThunkData ThunkData_Checkout ThunkData_Checkout = Right ThunkData_Checkout
mergeThunkData ThunkData_Checkout ThunkData_Packed{} = Left bothPackedAndUnpacked
mergeThunkData ThunkData_Packed{} ThunkData_Checkout = Left bothPackedAndUnpacked
mergeThunkData a@(ThunkData_Packed _ ptrA) (ThunkData_Packed _ ptrB) =
if ptrA == ptrB then Right a else Left $ ReadThunkError_AmbiguousPackedState ptrA ptrB
bothPackedAndUnpacked = ReadThunkError_UnrecognizedState "Both packed data and checkout present"
fold1WithM (x :| xs) f = foldM f x xs
readThunkWith
:: (MonadObelisk m)
=> NonEmpty (NonEmpty ThunkSpec) -> FilePath -> m (Either ReadThunkError ThunkData)
readThunkWith specTypes dir = do
dirFiles <- Set.fromList <$> liftIO (listDirectory dir)
flip fix specs $ \loop -> \case
[] -> pure $ Left ReadThunkError_UnrecognizedThunk
spec:rest -> runExceptT (matchThunkSpecToDir spec dir dirFiles) >>= \case
Left e -> putLog Debug [i|Thunk specification ${_thunkSpec_name spec} did not match ${dir}: ${e}|] *> loop rest
x@(Right _) -> x <$ putLog Debug [i|Thunk specification ${_thunkSpec_name spec} matched ${dir}|]
readThunk :: (MonadObelisk m) => FilePath -> m (Either ReadThunkError ThunkData)
readThunk = readThunkWith thunkSpecTypes
parseThunkPtr :: (Aeson.Object -> Aeson.Parser ThunkSource) -> Aeson.Object -> Aeson.Parser ThunkPtr
parseThunkPtr parseSrc v = do
rev <- v Aeson..: "rev"
sha256 <- v Aeson..: "sha256"
src <- parseSrc v
pure $ ThunkPtr
{ _thunkPtr_rev = ThunkRev
{ _thunkRev_commit = Ref.fromHexString rev
, _thunkRev_nixSha256 = sha256
}
, _thunkPtr_source = src
}
parseGitHubSource :: Aeson.Object -> Aeson.Parser GitHubSource
parseGitHubSource v = do
owner <- v Aeson..: "owner"
repo <- v Aeson..: "repo"
branch <- v Aeson..:! "branch"
private <- v Aeson..:? "private"
pure $ GitHubSource
{ _gitHubSource_owner = owner
, _gitHubSource_repo = repo
, _gitHubSource_branch = branch
, _gitHubSource_private = fromMaybe False private
}
parseGitSource :: Aeson.Object -> Aeson.Parser GitSource
parseGitSource v = do
Just url <- parseGitUri <$> v Aeson..: "url"
branch <- v Aeson..:! "branch"
fetchSubmodules <- v Aeson..:! "fetchSubmodules"
private <- v Aeson..:? "private"
pure $ GitSource
{ _gitSource_url = url
, _gitSource_branch = branch
, _gitSource_fetchSubmodules = fromMaybe False fetchSubmodules
, _gitSource_private = fromMaybe False private
}
overwriteThunk :: MonadObelisk m => FilePath -> ThunkPtr -> m ()
overwriteThunk target thunk = do
readThunk target >>= \case
Left e -> failWith [i|Invalid thunk at ${target}: ${e}|]
Right _ -> pure ()
liftIO $ removePathForcibly target
createThunk target $ Right thunk
thunkPtrToSpec :: ThunkPtr -> ThunkSpec
thunkPtrToSpec thunk = case _thunkPtr_source thunk of
ThunkSource_GitHub _ -> NonEmpty.head gitHubThunkSpecs
ThunkSource_Git _ -> NonEmpty.head gitThunkSpecs
encodeThunkPtrData :: ThunkPtr -> LBS.ByteString
encodeThunkPtrData (ThunkPtr rev src) = case src of
ThunkSource_GitHub s -> encodePretty' githubCfg $ Aeson.object $ catMaybes
[ Just $ "owner" .= _gitHubSource_owner s
, Just $ "repo" .= _gitHubSource_repo s
, ("branch" .=) <$> _gitHubSource_branch s
, Just $ "rev" .= Ref.toHexString (_thunkRev_commit rev)
, Just $ "sha256" .= _thunkRev_nixSha256 rev
, Just $ "private" .= _gitHubSource_private s
]
ThunkSource_Git s -> encodePretty' plainGitCfg $ Aeson.object $ catMaybes
[ Just $ "url" .= gitUriToText (_gitSource_url s)
, Just $ "rev" .= Ref.toHexString (_thunkRev_commit rev)
, ("branch" .=) <$> _gitSource_branch s
, Just $ "sha256" .= _thunkRev_nixSha256 rev
, Just $ "fetchSubmodules" .= _gitSource_fetchSubmodules s
, Just $ "private" .= _gitSource_private s
]
where
githubCfg = defConfig
{ confIndent = Spaces 2
, confCompare = keyOrder
[ "owner"
, "repo"
, "branch"
, "private"
, "rev"
, "sha256"
] <> compare
, confTrailingNewline = True
}
plainGitCfg = defConfig
{ confIndent = Spaces 2
, confCompare = keyOrder
[ "url"
, "rev"
, "sha256"
, "private"
, "fetchSubmodules"
] <> compare
, confTrailingNewline = True
}
createThunk :: MonadObelisk m => FilePath -> Either ThunkSpec ThunkPtr -> m ()
createThunk target ptrInfo =
ifor_ (_thunkSpec_files spec) $ \path -> \case
ThunkFileSpec_FileMatches content -> withReadyPath path $ \p -> liftIO $ T.writeFile p content
ThunkFileSpec_Ptr _ -> case ptrInfo of
We ca n't write the ptr without it
Right ptr -> withReadyPath path $ \p -> liftIO $ LBS.writeFile p (encodeThunkPtrData ptr)
_ -> pure ()
where
spec = either id thunkPtrToSpec ptrInfo
withReadyPath path f = do
let fullPath = target </> path
putLog Debug $ "Writing thunk file " <> T.pack fullPath
liftIO $ createDirectoryIfMissing True $ takeDirectory fullPath
f fullPath
createThunkWithLatest :: MonadObelisk m => FilePath -> ThunkSource -> m ()
createThunkWithLatest target s = do
rev <- getLatestRev s
createThunk target $ Right $ ThunkPtr
{ _thunkPtr_source = s
, _thunkPtr_rev = rev
}
updateThunkToLatest :: MonadObelisk m => ThunkUpdateConfig -> FilePath -> m ()
updateThunkToLatest (ThunkUpdateConfig mBranch thunkConfig) target = spinner $ do
checkThunkDirectory target
case mBranch of
Nothing -> do
(overwrite, ptr) <- readThunk target >>= \case
Left err -> failWith [i|Thunk update: ${err}|]
Right c -> case c of
ThunkData_Packed _ t -> return (target, t)
ThunkData_Checkout -> failWith "cannot update an unpacked thunk"
let src = _thunkPtr_source ptr
rev <- getLatestRev src
overwriteThunk overwrite $ modifyThunkPtrByConfig thunkConfig $ ThunkPtr
{ _thunkPtr_source = src
, _thunkPtr_rev = rev
}
Just branch -> readThunk target >>= \case
Left err -> failWith [i|Thunk update: ${err}|]
Right c -> case c of
ThunkData_Packed _ t -> setThunk thunkConfig target (thunkSourceToGitSource $ _thunkPtr_source t) branch
ThunkData_Checkout -> failWith [i|Thunk located at ${target} is unpacked. Use 'ob thunk pack' on the desired directory and then try 'ob thunk update' again.|]
where
spinner = withSpinner' ("Updating thunk " <> T.pack target <> " to latest") (pure $ const $ "Thunk " <> T.pack target <> " updated to latest")
setThunk :: MonadObelisk m => ThunkConfig -> FilePath -> GitSource -> String -> m ()
setThunk thunkConfig target gs branch = do
newThunkPtr <- uriThunkPtr (_gitSource_url gs) (_thunkConfig_private thunkConfig) (Just $ T.pack branch) Nothing
overwriteThunk target newThunkPtr
updateThunkToLatest (ThunkUpdateConfig Nothing thunkConfig) target
gitHubThunkSpecs :: NonEmpty ThunkSpec
gitHubThunkSpecs =
gitHubThunkSpecV5 :|
[ gitHubThunkSpecV4
, gitHubThunkSpecV3
, gitHubThunkSpecV2
, gitHubThunkSpecV1
]
gitHubThunkSpecV1 :: ThunkSpec
gitHubThunkSpecV1 = legacyGitHubThunkSpec "github-v1"
"import ((import <nixpkgs> {}).fetchFromGitHub (builtins.fromJSON (builtins.readFile ./github.json)))"
gitHubThunkSpecV2 :: ThunkSpec
gitHubThunkSpecV2 = legacyGitHubThunkSpec "github-v2" $ T.unlines
TODO : Add something about how to get more info on Obelisk , etc .
, "import ((import <nixpkgs> {}).fetchFromGitHub ("
, " let json = builtins.fromJSON (builtins.readFile ./github.json);"
, " in { inherit (json) owner repo rev sha256;"
, " private = json.private or false;"
, " }"
, "))"
]
gitHubThunkSpecV3 :: ThunkSpec
gitHubThunkSpecV3 = legacyGitHubThunkSpec "github-v3" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let"
, " fetch = { private ? false, ... }@args: if private && builtins.hasAttr \"fetchGit\" builtins"
, " then fetchFromGitHubPrivate args"
, " else (import <nixpkgs> {}).fetchFromGitHub (builtins.removeAttrs args [\"branch\"]);"
, " fetchFromGitHubPrivate ="
, " { owner, repo, rev, branch ? null, name ? null, sha256 ? null, private ? false"
, " , fetchSubmodules ? false, githubBase ? \"github.com\", ..."
, " }: assert !fetchSubmodules;"
, " builtins.fetchGit ({"
, " url = \"ssh@${githubBase}/${owner}/${repo}.git\";"
, " inherit rev;"
, " }"
, " // (if branch == null then {} else { ref = branch; })"
, " // (if name == null then {} else { inherit name; }));"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./github.json)))"
]
gitHubThunkSpecV4 :: ThunkSpec
gitHubThunkSpecV4 = legacyGitHubThunkSpec "github-v4" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetch = { private ? false, fetchSubmodules ? false, owner, repo, rev, sha256, ... }:"
, " if !fetchSubmodules && !private then builtins.fetchTarball {"
, " url = \"/${owner}/${repo}/archive/${rev}.tar.gz\"; inherit sha256;"
, " } else (import <nixpkgs> {}).fetchFromGitHub {"
, " inherit owner repo rev sha256 fetchSubmodules private;"
, " };"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./github.json)))"
]
legacyGitHubThunkSpec :: Text -> Text -> ThunkSpec
legacyGitHubThunkSpec name loader = ThunkSpec name $ Map.fromList
[ ("default.nix", ThunkFileSpec_FileMatches $ T.strip loader)
, ("github.json" , ThunkFileSpec_Ptr parseGitHubJsonBytes)
, (attrCacheFileName, ThunkFileSpec_AttrCache)
, (".git", ThunkFileSpec_CheckoutIndicator)
]
gitHubThunkSpecV5 :: ThunkSpec
gitHubThunkSpecV5 = mkThunkSpec "github-v5" "github.json" parseGitHubJsonBytes [here|
# DO NOT HAND-EDIT THIS FILE
let fetch = { private ? false, fetchSubmodules ? false, owner, repo, rev, sha256, ... }:
if !fetchSubmodules && !private then builtins.fetchTarball {
url = "/${owner}/${repo}/archive/${rev}.tar.gz"; inherit sha256;
} else (import <nixpkgs> {}).fetchFromGitHub {
inherit owner repo rev sha256 fetchSubmodules private;
};
json = builtins.fromJSON (builtins.readFile ./github.json);
in fetch json
|]
parseGitHubJsonBytes :: LBS.ByteString -> Either String ThunkPtr
parseGitHubJsonBytes = parseJsonObject $ parseThunkPtr $ \v ->
ThunkSource_GitHub <$> parseGitHubSource v <|> ThunkSource_Git <$> parseGitSource v
gitThunkSpecs :: NonEmpty ThunkSpec
gitThunkSpecs =
gitThunkSpecV5 :|
[ gitThunkSpecV4
, gitThunkSpecV3
, gitThunkSpecV2
, gitThunkSpecV1
]
gitThunkSpecV1 :: ThunkSpec
gitThunkSpecV1 = legacyGitThunkSpec "git-v1" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetchGit = {url, rev, ref ? null, branch ? null, sha256 ? null, fetchSubmodules ? null}:"
, " assert !fetchSubmodules; (import <nixpkgs> {}).fetchgit { inherit url rev sha256; };"
, "in import (fetchGit (builtins.fromJSON (builtins.readFile ./git.json)))"
]
gitThunkSpecV2 :: ThunkSpec
gitThunkSpecV2 = legacyGitThunkSpec "git-v2" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetchGit = {url, rev, ref ? null, branch ? null, sha256 ? null, fetchSubmodules ? null}:"
, " if builtins.hasAttr \"fetchGit\" builtins"
, " then builtins.fetchGit ({ inherit url rev; } // (if branch == null then {} else { ref = branch; }))"
, " else abort \"Plain Git repositories are only supported on nix 2.0 or higher.\";"
, "in import (fetchGit (builtins.fromJSON (builtins.readFile ./git.json)))"
]
This loader has a bug because @builtins.fetchGit@ is not given a @ref@
gitThunkSpecV3 :: ThunkSpec
gitThunkSpecV3 = legacyGitThunkSpec "git-v3" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetch = {url, rev, ref ? null, sha256 ? null, fetchSubmodules ? false, private ? false, ...}:"
, " let realUrl = let firstChar = builtins.substring 0 1 url; in"
, " if firstChar == \"/\" then /. + url"
, " else if firstChar == \".\" then ./. + url"
, " else url;"
, " in if !fetchSubmodules && private then builtins.fetchGit {"
, " url = realUrl; inherit rev;"
, " } else (import <nixpkgs> {}).fetchgit {"
, " url = realUrl; inherit rev sha256;"
, " };"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./git.json)))"
]
gitThunkSpecV4 :: ThunkSpec
gitThunkSpecV4 = legacyGitThunkSpec "git-v4" $ T.unlines
[ "# DO NOT HAND-EDIT THIS FILE"
, "let fetch = {url, rev, branch ? null, sha256 ? null, fetchSubmodules ? false, private ? false, ...}:"
, " let realUrl = let firstChar = builtins.substring 0 1 url; in"
, " if firstChar == \"/\" then /. + url"
, " else if firstChar == \".\" then ./. + url"
, " else url;"
, " in if !fetchSubmodules && private then builtins.fetchGit {"
, " url = realUrl; inherit rev;"
, " ${if branch == null then null else \"ref\"} = branch;"
, " } else (import <nixpkgs> {}).fetchgit {"
, " url = realUrl; inherit rev sha256;"
, " };"
, "in import (fetch (builtins.fromJSON (builtins.readFile ./git.json)))"
]
legacyGitThunkSpec :: Text -> Text -> ThunkSpec
legacyGitThunkSpec name loader = ThunkSpec name $ Map.fromList
[ ("default.nix", ThunkFileSpec_FileMatches $ T.strip loader)
, ("git.json" , ThunkFileSpec_Ptr parseGitJsonBytes)
, (attrCacheFileName, ThunkFileSpec_AttrCache)
, (".git", ThunkFileSpec_CheckoutIndicator)
]
gitThunkSpecV5 :: ThunkSpec
gitThunkSpecV5 = mkThunkSpec "git-v5" "git.json" parseGitJsonBytes [here|
# DO NOT HAND-EDIT THIS FILE
let fetch = {url, rev, branch ? null, sha256 ? null, fetchSubmodules ? false, private ? false, ...}:
let realUrl = let firstChar = builtins.substring 0 1 url; in
if firstChar == "/" then /. + url
else if firstChar == "." then ./. + url
else url;
in if !fetchSubmodules && private then builtins.fetchGit {
url = realUrl; inherit rev;
${if branch == null then null else "ref"} = branch;
} else (import <nixpkgs> {}).fetchgit {
url = realUrl; inherit rev sha256;
};
json = builtins.fromJSON (builtins.readFile ./git.json);
in fetch json
|]
parseGitJsonBytes :: LBS.ByteString -> Either String ThunkPtr
parseGitJsonBytes = parseJsonObject $ parseThunkPtr $ fmap ThunkSource_Git . parseGitSource
mkThunkSpec :: Text -> FilePath -> (LBS.ByteString -> Either String ThunkPtr) -> Text -> ThunkSpec
mkThunkSpec name jsonFileName parser srcNix = ThunkSpec name $ Map.fromList
[ ("default.nix", ThunkFileSpec_FileMatches defaultNixViaSrc)
, ("thunk.nix", ThunkFileSpec_FileMatches srcNix)
, (jsonFileName, ThunkFileSpec_Ptr parser)
, (attrCacheFileName, ThunkFileSpec_AttrCache)
, (normalise $ unpackedDirName </> ".git", ThunkFileSpec_CheckoutIndicator)
]
where
defaultNixViaSrc = [here|
# DO NOT HAND-EDIT THIS FILE
import (import ./thunk.nix)
|]
parseJsonObject :: (Aeson.Object -> Aeson.Parser a) -> LBS.ByteString -> Either String a
parseJsonObject p bytes = Aeson.parseEither p =<< Aeson.eitherDecode bytes
nixBuildThunkAttrWithCache
:: MonadObelisk m
=> ThunkSpec
-> FilePath
-> String
-> m (Maybe FilePath)
nixBuildThunkAttrWithCache thunkSpec thunkDir attr = do
latestChange <- liftIO $ do
let
getModificationTimeMaybe = fmap rightToMaybe . try @IOError . getModificationTime
thunkFileNames = Map.keys $ _thunkSpec_files thunkSpec
maximum . catMaybes <$> traverse (getModificationTimeMaybe . (thunkDir </>)) thunkFileNames
let cachePaths' = nonEmpty $ Map.keys $ Map.filter (\case ThunkFileSpec_AttrCache -> True; _ -> False) $
_thunkSpec_files thunkSpec
for cachePaths' $ \cachePaths ->
fmap NonEmpty.head $ for cachePaths $ \cacheDir -> do
let
cachePath = thunkDir </> cacheDir </> attr <.> "out"
cacheErrHandler e
| otherwise = Nothing <$ putLog Error (T.pack $ displayException e)
cacheHit <- handle cacheErrHandler $ do
cacheTime <- liftIO $ posixSecondsToUTCTime . realToFrac . modificationTime <$> getSymbolicLinkStatus cachePath
pure $ if latestChange <= cacheTime
then Just cachePath
else Nothing
case cacheHit of
Just c -> pure c
Nothing -> do
putLog Warning $ T.pack $ mconcat [thunkDir, ": ", attr, " not cached, building ..."]
liftIO $ createDirectoryIfMissing True (takeDirectory cachePath)
(cachePath <$) $ nixCmd $ NixCmd_Build $ def
& nixBuildConfig_outLink .~ OutLink_IndirectRoot cachePath
& nixCmdConfig_target .~ Target
{ _target_path = Just thunkDir
, _target_attr = Just attr
, _target_expr = Nothing
}
nixBuildAttrWithCache
:: MonadObelisk m
=> FilePath
-> String
-> m FilePath
nixBuildAttrWithCache exprPath attr = readThunk exprPath >>= \case
Right (ThunkData_Packed spec _) ->
maybe build pure =<< nixBuildThunkAttrWithCache spec exprPath attr
_ -> build
where
build = nixCmd $ NixCmd_Build $ def
& nixBuildConfig_outLink .~ OutLink_None
& nixCmdConfig_target .~ Target
{ _target_path = Just exprPath
, _target_attr = Just attr
, _target_expr = Nothing
}
updateThunk :: MonadObelisk m => FilePath -> (FilePath -> m a) -> m a
updateThunk p f = withSystemTempDirectory "obelisk-thunkptr-" $ \tmpDir -> do
p' <- copyThunkToTmp tmpDir p
unpackThunk' True p'
result <- f p'
updateThunkFromTmp p'
return result
where
copyThunkToTmp tmpDir thunkDir = readThunk thunkDir >>= \case
Left err -> failWith $ "withThunkUnpacked: " <> T.pack (show err)
Right ThunkData_Packed{} -> do
let tmpThunk = tmpDir </> "thunk"
callProcessAndLogOutput (Notice, Error) $
proc cp ["-r", "-T", thunkDir, tmpThunk]
return tmpThunk
Right _ -> failWith "Thunk is not packed"
updateThunkFromTmp p' = do
_ <- packThunk' True (ThunkPackConfig False (ThunkConfig Nothing)) p'
callProcessAndLogOutput (Notice, Error) $
proc cp ["-r", "-T", p', p]
finalMsg :: Bool -> (a -> Text) -> Maybe (a -> Text)
finalMsg noTrail s = if noTrail then Nothing else Just s
checkThunkDirectory :: MonadObelisk m => FilePath -> m ()
checkThunkDirectory thunkDir = do
currentDir <- liftIO getCurrentDirectory
thunkDir' <- liftIO $ canonicalizePath thunkDir
when (thunkDir' `L.isInfixOf` currentDir) $
failWith [i|Can't perform thunk operations from within the thunk directory: ${thunkDir}|]
when (takeFileName thunkDir == unpackedDirName) $
readThunk (takeDirectory thunkDir) >>= \case
Right _ -> failWith [i|Refusing to perform thunk operation on ${thunkDir} because it is a thunk's unpacked source|]
Left _ -> pure ()
unpackThunk :: MonadObelisk m => FilePath -> m ()
unpackThunk = unpackThunk' False
unpackThunk' :: MonadObelisk m => Bool -> FilePath -> m ()
unpackThunk' noTrail thunkDir = checkThunkDirectory thunkDir *> readThunk thunkDir >>= \case
Left err -> failWith [i|Invalid thunk at ${thunkDir}: ${err}|]
Right ThunkData_Checkout -> failWith [i|Thunk at ${thunkDir} is already unpacked|]
Right (ThunkData_Packed _ tptr) -> do
let (thunkParent, thunkName) = splitFileName thunkDir
withTempDirectory thunkParent thunkName $ \tmpThunk -> do
let
gitSrc = thunkSourceToGitSource $ _thunkPtr_source tptr
newSpec = case _thunkPtr_source tptr of
ThunkSource_GitHub _ -> NonEmpty.head gitHubThunkSpecs
ThunkSource_Git _ -> NonEmpty.head gitThunkSpecs
withSpinner' ("Fetching thunk " <> T.pack thunkName)
(finalMsg noTrail $ const $ "Fetched thunk " <> T.pack thunkName) $ do
let unpackedPath = tmpThunk </> unpackedDirName
gitCloneForThunkUnpack gitSrc (_thunkRev_commit $ _thunkPtr_rev tptr) unpackedPath
let normalizeMore = dropTrailingPathSeparator . normalise
createThunk tmpThunk $ Left newSpec
liftIO $ do
removePathForcibly thunkDir
renameDirectory tmpThunk thunkDir
gitCloneForThunkUnpack
:: MonadObelisk m
-> m ()
gitCloneForThunkUnpack gitSrc commit dir = do
let git = callProcessAndLogOutput (Notice, Notice) . gitProc dir
git $ [ "clone" ]
++ ["--recursive" | _gitSource_fetchSubmodules gitSrc]
++ [ T.unpack $ gitUriToText $ _gitSource_url gitSrc ]
++ do branch <- maybeToList $ _gitSource_branch gitSrc
[ "--branch", T.unpack $ untagName branch ]
git ["reset", "--hard", Ref.toHexString commit]
when (_gitSource_fetchSubmodules gitSrc) $
git ["submodule", "update", "--recursive", "--init"]
packThunk :: MonadObelisk m => ThunkPackConfig -> FilePath -> m ThunkPtr
packThunk = packThunk' False
packThunk' :: MonadObelisk m => Bool -> ThunkPackConfig -> FilePath -> m ThunkPtr
packThunk' noTrail (ThunkPackConfig force thunkConfig) thunkDir = checkThunkDirectory thunkDir *> readThunk thunkDir >>= \case
Right ThunkData_Packed{} -> failWith [i|Thunk at ${thunkDir} is is already packed|]
_ -> withSpinner'
("Packing thunk " <> T.pack thunkDir)
(finalMsg noTrail $ const $ "Packed thunk " <> T.pack thunkDir) $
do
let checkClean = if force then CheckClean_NoCheck else CheckClean_FullCheck
thunkPtr <- modifyThunkPtrByConfig thunkConfig <$> getThunkPtr checkClean thunkDir (_thunkConfig_private thunkConfig)
liftIO $ removePathForcibly thunkDir
createThunk thunkDir $ Right thunkPtr
pure thunkPtr
modifyThunkPtrByConfig :: ThunkConfig -> ThunkPtr -> ThunkPtr
modifyThunkPtrByConfig (ThunkConfig markPrivate') ptr = case markPrivate' of
Nothing -> ptr
Just markPrivate -> ptr { _thunkPtr_source = case _thunkPtr_source ptr of
ThunkSource_Git s -> ThunkSource_Git $ s { _gitSource_private = markPrivate }
ThunkSource_GitHub s -> ThunkSource_GitHub $ s { _gitHubSource_private = markPrivate }
}
data CheckClean
= CheckClean_FullCheck
| CheckClean_NotIgnored
| CheckClean_NoCheck
getThunkPtr :: forall m. MonadObelisk m => CheckClean -> FilePath -> Maybe Bool -> m ThunkPtr
getThunkPtr gitCheckClean dir mPrivate = do
let repoLocations = nubOrd $ map (first normalise)
[(".git", "."), (unpackedDirName </> ".git", unpackedDirName)]
repoLocation' <- liftIO $ flip findM repoLocations $ doesDirectoryExist . (dir </>) . fst
thunkDir <- case repoLocation' of
Nothing -> failWith [i|Can't find an unpacked thunk in ${dir}|]
Just (_, path) -> pure $ normalise $ dir </> path
let (checkClean, checkIgnored) = case gitCheckClean of
CheckClean_FullCheck -> (True, True)
CheckClean_NotIgnored -> (True, False)
CheckClean_NoCheck -> (False, False)
when checkClean $ ensureCleanGitRepo thunkDir checkIgnored
"thunk pack: thunk checkout contains unsaved modifications"
when checkClean $ do
stashOutput <- readGitProcess thunkDir ["stash", "list"]
unless (T.null stashOutput) $
failWith $ T.unlines $
[ "thunk pack: thunk checkout has stashes"
, "git stash list:"
] ++ T.lines stashOutput
(mCurrentBranch, mCurrentCommit) <- do
b <- listToMaybe . T.lines <$> readGitProcess thunkDir ["rev-parse", "--abbrev-ref", "HEAD"]
c <- listToMaybe . T.lines <$> readGitProcess thunkDir ["rev-parse", "HEAD"]
case b of
(Just "HEAD") -> failWith $ T.unlines
[ "thunk pack: You are in 'detached HEAD' state."
, "If you want to pack at the current ref \
\then please create a new branch with 'git checkout -b <new-branch-name>' and push this upstream."
]
_ -> return (b, c)
headDump :: [Text] <- T.lines <$> readGitProcess thunkDir
[ "for-each-ref"
, "--format=%(refname:short) %(upstream:short) %(upstream:remotename)"
, "refs/heads/"
]
(headInfo :: Map Text (Maybe (Text, Text)))
<- fmap Map.fromList $ forM headDump $ \line -> do
(branch : restOfLine) <- pure $ T.words line
mUpstream <- case restOfLine of
[] -> pure Nothing
[u, r] -> pure $ Just (u, r)
(_:_) -> failWith "git for-each-ref invalid output"
pure (branch, mUpstream)
putLog Debug $ "branches: " <> T.pack (show headInfo)
let errorMap :: Map Text ()
headUpstream :: Map Text (Text, Text)
(errorMap, headUpstream) = flip Map.mapEither headInfo $ \case
Nothing -> Left ()
Just b -> Right b
putLog Debug $ "branches with upstream branch set: " <> T.pack (show headUpstream)
when checkClean $ do
let untrackedBranches = Map.keys errorMap
when (not $ L.null untrackedBranches) $ failWith $ T.unlines $
[ "thunk pack: Certain branches in the thunk have no upstream branch \
\set. This means we don't know to check whether all your work is \
\saved. The offending branches are:"
, ""
, T.unwords untrackedBranches
, ""
, "To fix this, you probably want to do:"
, ""
] ++
((\branch -> "git push -u origin " <> branch) <$> untrackedBranches) ++
[ ""
, "These will push the branches to the default remote under the same \
\name, and (thanks to the `-u`) remember that choice so you don't \
\get this error again."
]
stats <- ifor headUpstream $ \branch (upstream, _remote) -> do
(stat :: [Text]) <- T.lines <$> readGitProcess thunkDir
[ "rev-list", "--left-right"
, T.unpack branch <> "..." <> T.unpack upstream
]
let ahead = length $ [ () | Just ('<', _) <- T.uncons <$> stat ]
behind = length $ [ () | Just ('>', _) <- T.uncons <$> stat ]
pure (upstream, (ahead, behind))
let nonGood = Map.filter ((/= 0) . fst . snd) stats
when (not $ Map.null nonGood) $ failWith $ T.unlines $
[ "thunk pack: Certain branches in the thunk have commits not yet pushed upstream:"
, ""
] ++
flip map (Map.toList nonGood) (\(branch, (upstream, (ahead, behind))) -> mconcat
[" ", branch, " ahead: ", T.pack (show ahead), " behind: ", T.pack (show behind), " remote branch ", upstream]) ++
[ ""
, "Please push these upstream and try again. (Or just fetch, if they are somehow \
\pushed but this repo's remote tracking branches don't know it.)"
]
when checkClean $ do
putLog Informational "All changes safe in git remotes. OK to pack thunk."
let remote = maybe "origin" snd $ flip Map.lookup headUpstream =<< mCurrentBranch
[remoteUri'] <- fmap T.lines $ readGitProcess thunkDir
[ "config"
, "--get"
, "remote." <> T.unpack remote <> ".url"
]
remoteUri <- case parseGitUri remoteUri' of
Nothing -> failWith $ "Could not identify git remote: " <> remoteUri'
Just uri -> pure uri
uriThunkPtr remoteUri mPrivate mCurrentBranch mCurrentCommit
getLatestRev :: MonadObelisk m => ThunkSource -> m ThunkRev
getLatestRev os = do
let gitS = thunkSourceToGitSource os
(_, commit) <- gitGetCommitBranch (_gitSource_url gitS) (untagName <$> _gitSource_branch gitS)
case os of
ThunkSource_GitHub s -> githubThunkRev s commit
ThunkSource_Git s -> gitThunkRev s commit
uriThunkPtr :: MonadObelisk m => GitUri -> Maybe Bool -> Maybe Text -> Maybe Text -> m ThunkPtr
uriThunkPtr uri mPrivate mbranch mcommit = do
commit <- case mcommit of
Nothing -> gitGetCommitBranch uri mbranch >>= return . snd
(Just c) -> return c
(src, rev) <- uriToThunkSource uri mPrivate mbranch >>= \case
ThunkSource_GitHub s -> do
rev <- runExceptT $ githubThunkRev s commit
case rev of
Right r -> pure (ThunkSource_GitHub s, r)
Left e -> do
putLog Warning "\
\Failed to fetch archive from GitHub. This is probably a private repo. \
\Falling back on normal fetchgit. Original failure:"
errorToWarning e
let s' = forgetGithub True s
(,) (ThunkSource_Git s') <$> gitThunkRev s' commit
ThunkSource_Git s -> (,) (ThunkSource_Git s) <$> gitThunkRev s commit
pure $ ThunkPtr
{ _thunkPtr_rev = rev
, _thunkPtr_source = src
}
If the thunk is a GitHub thunk and fails , we do * not * fall back like with
uriToThunkSource :: MonadObelisk m => GitUri -> Maybe Bool -> Maybe Text -> m ThunkSource
uriToThunkSource (GitUri u) mPrivate
| Right uriAuth <- URI.uriAuthority u
, Just scheme <- URI.unRText <$> URI.uriScheme u
, case scheme of
"ssh" -> uriAuth == URI.Authority
{ URI.authUserInfo = Just $ URI.UserInfo (fromRight' $ URI.mkUsername "git") Nothing
, URI.authHost = fromRight' $ URI.mkHost "github.com"
, URI.authPort = Nothing
}
&& URI.unRText (URI.authHost uriAuth) == "github.com"
, Just (_, owner :| [repoish]) <- URI.uriPath u
= \mbranch -> do
isPrivate <- getIsPrivate
pure $ ThunkSource_GitHub $ GitHubSource
{ _gitHubSource_owner = N $ URI.unRText owner
, _gitHubSource_repo = N $ let
repoish' = URI.unRText repoish
in fromMaybe repoish' $ T.stripSuffix ".git" repoish'
, _gitHubSource_branch = N <$> mbranch
, _gitHubSource_private = isPrivate
}
| otherwise = \mbranch -> do
isPrivate <- getIsPrivate
pure $ ThunkSource_Git $ GitSource
{ _gitSource_url = GitUri u
, _gitSource_branch = N <$> mbranch
, _gitSource_private = isPrivate
}
where
getIsPrivate = maybe (guessGitRepoIsPrivate $ GitUri u) pure mPrivate
guessGitRepoIsPrivate :: MonadObelisk m => GitUri -> m Bool
guessGitRepoIsPrivate uri = flip fix urisToTry $ \loop -> \case
[] -> pure True
uriAttempt:xs -> do
result <- readCreateProcessWithExitCode $
isolateGitProc $
gitProcNoRepo
[ "ls-remote"
, "--quiet"
, "--exit-code"
, "--symref"
, T.unpack $ gitUriToText uriAttempt
]
case result of
_ -> loop xs
where
urisToTry = nubOrd $
Include the original URI if it is n't using SSH because SSH will certainly fail .
[uri | fmap URI.unRText (URI.uriScheme (unGitUri uri)) /= Just "ssh"] <>
[changeScheme "https" uri, changeScheme "http" uri]
changeScheme scheme (GitUri u) = GitUri $ u
{ URI.uriScheme = URI.mkScheme scheme
, URI.uriAuthority = (\x -> x { URI.authUserInfo = Nothing }) <$> URI.uriAuthority u
}
githubThunkRev
:: forall m
. MonadObelisk m
=> GitHubSource
-> Text
-> m ThunkRev
githubThunkRev s commit = do
owner <- forcePP $ _gitHubSource_owner s
repo <- forcePP $ _gitHubSource_repo s
revTarball <- URI.mkPathPiece $ commit <> ".tar.gz"
let archiveUri = GitUri $ URI.URI
{ URI.uriScheme = Just $ fromRight' $ URI.mkScheme "https"
, URI.uriAuthority = Right $ URI.Authority
{ URI.authUserInfo = Nothing
, URI.authHost = fromRight' $ URI.mkHost "github.com"
, URI.authPort = Nothing
}
, URI.uriPath = Just ( False
, owner :| [ repo, fromRight' $ URI.mkPathPiece "archive", revTarball ]
)
, URI.uriQuery = []
, URI.uriFragment = Nothing
}
hash <- getNixSha256ForUriUnpacked archiveUri
putLog Debug $ "Nix sha256 is " <> hash
return $ ThunkRev
{ _thunkRev_commit = commitNameToRef $ N commit
, _thunkRev_nixSha256 = hash
}
where
forcePP :: Name entity -> m (URI.RText 'URI.PathPiece)
forcePP = URI.mkPathPiece . untagName
gitThunkRev
:: MonadObelisk m
=> GitSource
-> Text
-> m ThunkRev
gitThunkRev s commit = do
let u = _gitSource_url s
protocols = ["file", "https", "ssh", "git"]
scheme = maybe "file" URI.unRText $ URI.uriScheme $ (\(GitUri x) -> x) u
unless (T.toLower scheme `elem` protocols) $
failWith $ "obelisk currently only supports "
<> T.intercalate ", " protocols <> " protocols for plain Git remotes"
hash <- nixPrefetchGit u commit $ _gitSource_fetchSubmodules s
putLog Informational $ "Nix sha256 is " <> hash
pure $ ThunkRev
{ _thunkRev_commit = commitNameToRef (N commit)
, _thunkRev_nixSha256 = hash
}
| Given the URI to a git remote , and an optional branch name , return the name
gitGetCommitBranch
:: MonadObelisk m => GitUri -> Maybe Text -> m (Text, CommitId)
gitGetCommitBranch uri mbranch = withExitFailMessage ("Failure for git remote " <> uriMsg) $ do
(_, bothMaps) <- gitLsRemote
(T.unpack $ gitUriToText uri)
(GitRef_Branch <$> mbranch)
Nothing
branch <- case mbranch of
Nothing -> withExitFailMessage "Failed to find default branch" $ do
b <- rethrowE $ gitLookupDefaultBranch bothMaps
putLog Debug $ "Default branch for remote repo " <> uriMsg <> " is " <> b
pure b
Just b -> pure b
commit <- rethrowE $ gitLookupCommitForRef bothMaps (GitRef_Branch branch)
putLog Informational $ "Latest commit in branch " <> branch
<> " from remote repo " <> uriMsg
<> " is " <> commit
pure (branch, commit)
where
rethrowE = either failWith pure
uriMsg = gitUriToText uri
parseGitUri :: Text -> Maybe GitUri
parseGitUri x = GitUri <$> (parseFileURI x <|> parseAbsoluteURI x <|> parseSshShorthand x)
parseFileURI :: Text -> Maybe URI.URI
parseFileURI uri = if "/" `T.isPrefixOf` uri then parseAbsoluteURI ("file://" <> uri) else Nothing
parseAbsoluteURI :: Text -> Maybe URI.URI
parseAbsoluteURI uri = do
parsedUri <- URI.mkURI uri
guard $ URI.isPathAbsolute parsedUri
pure parsedUri
parseSshShorthand :: Text -> Maybe URI.URI
parseSshShorthand uri = do
let
(authAndHostname, colonAndPath) = T.break (== ':') uri
properUri = "ssh://" <> authAndHostname <> "/" <> T.drop 1 colonAndPath
Shorthand is valid iff a colon is present and it occurs before the first slash
guard $ isNothing (T.findIndex (=='/') authAndHostname)
&& not (T.null colonAndPath)
URI.mkURI properUri
|
f9013dd06d125bb7f929630c53dbb05657a660331dc1a36900366a12fab5c55c | donald-pinckney/WasmContinuations | continuation-barrier-removed.rkt | #lang racket
(require racket/control)
(define k1 'undefined)
(define k2 'undefined)
(define (rust-foo)
(wasm-$main))
(define (wasm-$main)
(control k (wasm-$h1 k)))
(define (wasm-$h1 k)
(set! k1 k)
(rust-bar))
(define (rust-bar)
(printf "Call to wasm-$bad from rust-bar.~n")
(wasm-$bad)
(printf "Return from wasm-$bad to rust-bar.~n"))
(define (wasm-$bad)
(control k (wasm-$h2 k)))
(define (wasm-$h2 k)
(set! k2 k)
; Note that this is the behavior that you allow in Wasm.
(k2 'done))
(rust-foo) | null | https://raw.githubusercontent.com/donald-pinckney/WasmContinuations/a9e524858c0481ed97021b64f97266e8b574bf3d/racket/arjuns/continuation-barrier-removed.rkt | racket | Note that this is the behavior that you allow in Wasm. | #lang racket
(require racket/control)
(define k1 'undefined)
(define k2 'undefined)
(define (rust-foo)
(wasm-$main))
(define (wasm-$main)
(control k (wasm-$h1 k)))
(define (wasm-$h1 k)
(set! k1 k)
(rust-bar))
(define (rust-bar)
(printf "Call to wasm-$bad from rust-bar.~n")
(wasm-$bad)
(printf "Return from wasm-$bad to rust-bar.~n"))
(define (wasm-$bad)
(control k (wasm-$h2 k)))
(define (wasm-$h2 k)
(set! k2 k)
(k2 'done))
(rust-foo) |
51b072d960866c68b388fcc25b39c565db3cc4a4f9592db1b8c75bb77684a731 | ntoronto/pict3d | merge-passes.rkt | #lang typed/racket/base
(require racket/unsafe/ops
racket/list
racket/vector
(except-in typed/opengl/ffi -> cast)
"../../gl.rkt"
"../../utils.rkt"
"../utils.rkt"
"types.rkt")
(provide merge-passes)
(: get-counts (-> (Vectorof shape-params)
Nonnegative-Fixnum
Nonnegative-Fixnum
(Values Nonnegative-Fixnum Nonnegative-Fixnum)))
(define (get-counts ps start end)
(for/fold ([vertex-count : Nonnegative-Fixnum 0]
[index-count : Nonnegative-Fixnum 0])
([i (in-range start end)])
(define v (shape-params-vertices (unsafe-vector-ref ps i)))
(values (unsafe-fx+ vertex-count (vertices-vertex-count v))
(unsafe-fx+ index-count (vector-length (vertices-indexes v))))))
(: merge-data (-> gl-program
(Vectorof shape-params)
Nonnegative-Fixnum
Nonnegative-Fixnum
Nonnegative-Fixnum
Nonnegative-Fixnum
vertices))
(define (merge-data program ps start end vertex-count index-count)
(define struct-size (vao-struct-size (gl-program-struct program)))
(define buffer-size (unsafe-fx* vertex-count struct-size))
(define all-vertex-data (make-bytes buffer-size))
(define all-vertex-data-ptr (u8vector->cpointer all-vertex-data))
(define all-indexes ((inst make-vector Index) index-count))
(: vertex-hash (HashTable Bytes Index))
(define vertex-hash (make-hash))
(define-values (vertex-num index-num)
(for/fold ([vertex-num : Nonnegative-Fixnum 0]
[index-num : Nonnegative-Fixnum 0])
([i (in-range start end)])
(define v (shape-params-vertices (unsafe-vector-ref ps i)))
(define old-vertex-count (vertices-vertex-count v))
(define vertex-data (vertices-vertex-data v))
(define indexes (vertices-indexes v))
(define index-count (vector-length indexes))
;; Mapping from old index to new index
(define vertex-indexes ((inst make-vector Index) old-vertex-count 0))
;; Copy the vertex data while merging vertexes
(define vertex-count
(for/fold ([vertex-count : Nonnegative-Fixnum 0])
([j (in-range old-vertex-count)])
(define bs-start (unsafe-fx* j struct-size))
(define bs (subbytes vertex-data bs-start (unsafe-fx+ bs-start struct-size)))
(define new-j (hash-ref vertex-hash bs #f))
(cond
[(not new-j)
(define new-j (assert (unsafe-fx+ vertex-num vertex-count) index?))
(hash-set! vertex-hash bs new-j)
(vector-set! vertex-indexes j new-j)
(memcpy all-vertex-data-ptr
(unsafe-fx* new-j struct-size)
(u8vector->cpointer vertex-data)
(unsafe-fx* j struct-size)
struct-size
_byte)
(unsafe-fx+ vertex-count 1)]
[else
(vector-set! vertex-indexes j new-j)
vertex-count])))
;; Copy the indexes
(for ([k (in-range index-count)])
(define j (unsafe-vector-ref indexes k))
(vector-set! all-indexes
(unsafe-fx+ index-num k)
(vector-ref vertex-indexes j)))
(values (unsafe-fx+ vertex-num vertex-count)
(unsafe-fx+ index-num index-count))))
(vertices (assert vertex-num index?)
(subbytes all-vertex-data 0 (* vertex-num struct-size))
all-indexes))
(: merge-vertices (-> gl-program
(List-Hash String (U Symbol Uniform))
Boolean
Integer
(Vectorof shape-params)
Nonnegative-Fixnum
Nonnegative-Fixnum
(Listof shape-params)))
(define (merge-vertices program uniforms two-sided? mode ps start end)
(define-values (vertex-count index-count) (get-counts ps start end))
(cond
[(> vertex-count max-shape-vertex-count)
(define mid (unsafe-fxquotient (unsafe-fx+ start end) 2))
(when (or (= start mid) (= end mid))
(error 'merge-vertices
"cannot merge a single shape with more than ~a vertices; given ~a vertices"
max-shape-vertex-count
vertex-count))
(append
(merge-vertices program uniforms two-sided? mode ps start mid)
(merge-vertices program uniforms two-sided? mode ps mid end))]
[(> vertex-count 0)
(define verts (merge-data program ps start end vertex-count index-count))
(list (shape-params (λ () program) uniforms two-sided? mode verts))]
[else
empty]))
(: merge-shape-params (-> (Vectorof shape-params) (Vectorof shape-params)))
(define (merge-shape-params ps)
(list->vector
(append*
(for*/list : (Listof (Listof shape-params))
([ks (in-list (group-by-key! ps 0 (vector-length ps) shape-params-program))]
[program (in-value ((car ks)))]
[s (in-value (cdr ks))]
[ks (in-list (group-by-key! ps (span-start s) (span-end s) shape-params-uniforms))]
[uniforms (in-value (car ks))]
[s (in-value (cdr ks))]
[ks (in-list (group-by-key! ps (span-start s) (span-end s) shape-params-two-sided?))]
[face (in-value (car ks))]
[s (in-value (cdr ks))]
[ks (in-list (group-by-key! ps (span-start s) (span-end s) shape-params-mode))]
[mode (in-value (car ks))]
[s (in-value (cdr ks))])
(merge-vertices program uniforms face mode ps (span-start s) (span-end s))))))
(: merge-passes (-> (Listof passes) passes))
(define (merge-passes ps)
(passes
(merge-shape-params (apply vector-append (map passes-light ps)))
(merge-shape-params (apply vector-append (map passes-opaque-material ps)))
(merge-shape-params (apply vector-append (map passes-opaque-color ps)))
(merge-shape-params (apply vector-append (map passes-transparent-material ps)))
(merge-shape-params (apply vector-append (map passes-transparent-color ps)))))
| null | https://raw.githubusercontent.com/ntoronto/pict3d/09283c9d930c63b6a6a3f2caa43e029222091bdb/pict3d/private/engine/draw/merge-passes.rkt | racket | Mapping from old index to new index
Copy the vertex data while merging vertexes
Copy the indexes | #lang typed/racket/base
(require racket/unsafe/ops
racket/list
racket/vector
(except-in typed/opengl/ffi -> cast)
"../../gl.rkt"
"../../utils.rkt"
"../utils.rkt"
"types.rkt")
(provide merge-passes)
(: get-counts (-> (Vectorof shape-params)
Nonnegative-Fixnum
Nonnegative-Fixnum
(Values Nonnegative-Fixnum Nonnegative-Fixnum)))
(define (get-counts ps start end)
(for/fold ([vertex-count : Nonnegative-Fixnum 0]
[index-count : Nonnegative-Fixnum 0])
([i (in-range start end)])
(define v (shape-params-vertices (unsafe-vector-ref ps i)))
(values (unsafe-fx+ vertex-count (vertices-vertex-count v))
(unsafe-fx+ index-count (vector-length (vertices-indexes v))))))
(: merge-data (-> gl-program
(Vectorof shape-params)
Nonnegative-Fixnum
Nonnegative-Fixnum
Nonnegative-Fixnum
Nonnegative-Fixnum
vertices))
(define (merge-data program ps start end vertex-count index-count)
(define struct-size (vao-struct-size (gl-program-struct program)))
(define buffer-size (unsafe-fx* vertex-count struct-size))
(define all-vertex-data (make-bytes buffer-size))
(define all-vertex-data-ptr (u8vector->cpointer all-vertex-data))
(define all-indexes ((inst make-vector Index) index-count))
(: vertex-hash (HashTable Bytes Index))
(define vertex-hash (make-hash))
(define-values (vertex-num index-num)
(for/fold ([vertex-num : Nonnegative-Fixnum 0]
[index-num : Nonnegative-Fixnum 0])
([i (in-range start end)])
(define v (shape-params-vertices (unsafe-vector-ref ps i)))
(define old-vertex-count (vertices-vertex-count v))
(define vertex-data (vertices-vertex-data v))
(define indexes (vertices-indexes v))
(define index-count (vector-length indexes))
(define vertex-indexes ((inst make-vector Index) old-vertex-count 0))
(define vertex-count
(for/fold ([vertex-count : Nonnegative-Fixnum 0])
([j (in-range old-vertex-count)])
(define bs-start (unsafe-fx* j struct-size))
(define bs (subbytes vertex-data bs-start (unsafe-fx+ bs-start struct-size)))
(define new-j (hash-ref vertex-hash bs #f))
(cond
[(not new-j)
(define new-j (assert (unsafe-fx+ vertex-num vertex-count) index?))
(hash-set! vertex-hash bs new-j)
(vector-set! vertex-indexes j new-j)
(memcpy all-vertex-data-ptr
(unsafe-fx* new-j struct-size)
(u8vector->cpointer vertex-data)
(unsafe-fx* j struct-size)
struct-size
_byte)
(unsafe-fx+ vertex-count 1)]
[else
(vector-set! vertex-indexes j new-j)
vertex-count])))
(for ([k (in-range index-count)])
(define j (unsafe-vector-ref indexes k))
(vector-set! all-indexes
(unsafe-fx+ index-num k)
(vector-ref vertex-indexes j)))
(values (unsafe-fx+ vertex-num vertex-count)
(unsafe-fx+ index-num index-count))))
(vertices (assert vertex-num index?)
(subbytes all-vertex-data 0 (* vertex-num struct-size))
all-indexes))
(: merge-vertices (-> gl-program
(List-Hash String (U Symbol Uniform))
Boolean
Integer
(Vectorof shape-params)
Nonnegative-Fixnum
Nonnegative-Fixnum
(Listof shape-params)))
(define (merge-vertices program uniforms two-sided? mode ps start end)
(define-values (vertex-count index-count) (get-counts ps start end))
(cond
[(> vertex-count max-shape-vertex-count)
(define mid (unsafe-fxquotient (unsafe-fx+ start end) 2))
(when (or (= start mid) (= end mid))
(error 'merge-vertices
"cannot merge a single shape with more than ~a vertices; given ~a vertices"
max-shape-vertex-count
vertex-count))
(append
(merge-vertices program uniforms two-sided? mode ps start mid)
(merge-vertices program uniforms two-sided? mode ps mid end))]
[(> vertex-count 0)
(define verts (merge-data program ps start end vertex-count index-count))
(list (shape-params (λ () program) uniforms two-sided? mode verts))]
[else
empty]))
(: merge-shape-params (-> (Vectorof shape-params) (Vectorof shape-params)))
(define (merge-shape-params ps)
(list->vector
(append*
(for*/list : (Listof (Listof shape-params))
([ks (in-list (group-by-key! ps 0 (vector-length ps) shape-params-program))]
[program (in-value ((car ks)))]
[s (in-value (cdr ks))]
[ks (in-list (group-by-key! ps (span-start s) (span-end s) shape-params-uniforms))]
[uniforms (in-value (car ks))]
[s (in-value (cdr ks))]
[ks (in-list (group-by-key! ps (span-start s) (span-end s) shape-params-two-sided?))]
[face (in-value (car ks))]
[s (in-value (cdr ks))]
[ks (in-list (group-by-key! ps (span-start s) (span-end s) shape-params-mode))]
[mode (in-value (car ks))]
[s (in-value (cdr ks))])
(merge-vertices program uniforms face mode ps (span-start s) (span-end s))))))
(: merge-passes (-> (Listof passes) passes))
(define (merge-passes ps)
(passes
(merge-shape-params (apply vector-append (map passes-light ps)))
(merge-shape-params (apply vector-append (map passes-opaque-material ps)))
(merge-shape-params (apply vector-append (map passes-opaque-color ps)))
(merge-shape-params (apply vector-append (map passes-transparent-material ps)))
(merge-shape-params (apply vector-append (map passes-transparent-color ps)))))
|
bf92e5935d4643607d732da0130fdfc5cb4d5b1916bf1c5980de1e2ad99cc246 | vaclavsvejcar/headroom | CSSSpec.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
# LANGUAGE NoImplicitPrelude #
module Headroom.FileSupport.CSSSpec
( spec
)
where
import Headroom.FileSupport.CSS
import Headroom.FileSupport.Types
( FileSupport (..)
, SyntaxAnalysis (..)
)
import RIO
import Test.Hspec
spec :: Spec
spec = do
describe "fsSyntaxAnalysis" $ do
it "correctly detects comment starts/ends" $ do
let samples =
[ ("non comment line", (False, False))
, ("/* block comment start", (True, False))
, ("block comment end */", (False, True))
, ("/* block comment start/end */", (True, True))
]
all checkSyntaxAnalysis samples `shouldBe` True
where
checkSyntaxAnalysis (l, (s, e)) =
let SyntaxAnalysis{..} = fsSyntaxAnalysis fileSupport
in saIsCommentStart l == s && saIsCommentEnd l == e
| null | https://raw.githubusercontent.com/vaclavsvejcar/headroom/3b20a89568248259d59f83f274f60f6e13d16f93/test/Headroom/FileSupport/CSSSpec.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE RecordWildCards #
# LANGUAGE NoImplicitPrelude #
module Headroom.FileSupport.CSSSpec
( spec
)
where
import Headroom.FileSupport.CSS
import Headroom.FileSupport.Types
( FileSupport (..)
, SyntaxAnalysis (..)
)
import RIO
import Test.Hspec
spec :: Spec
spec = do
describe "fsSyntaxAnalysis" $ do
it "correctly detects comment starts/ends" $ do
let samples =
[ ("non comment line", (False, False))
, ("/* block comment start", (True, False))
, ("block comment end */", (False, True))
, ("/* block comment start/end */", (True, True))
]
all checkSyntaxAnalysis samples `shouldBe` True
where
checkSyntaxAnalysis (l, (s, e)) =
let SyntaxAnalysis{..} = fsSyntaxAnalysis fileSupport
in saIsCommentStart l == s && saIsCommentEnd l == e
|
cf0c3717f57e531eb4ef592d241f7e185f105d95fa73777db1ffe45bdc629b19 | clingen-data-model/genegraph | datafy.clj | (ns genegraph.database.datafy
(:require [clojure.core.protocols :as p]
[clojure.datafy :as d]
[genegraph.database.names :as names :refer [property-uri->keyword class-uri->keyword]]
[genegraph.database.instance :refer [db]]
[genegraph.database.util :refer [tx]]
[mount.core :refer [defstate]]
[clojure.set :as set]
[genegraph.database.query :as q])
(:import [org.apache.jena.rdf.model Property Literal Resource ResourceFactory
Statement]))
(declare datafy-resource)
(defn- compose-object [o]
(cond (instance? Literal o) (.toString o)
(instance? Resource o)
(with-meta (-> o .toString symbol)
{::d/obj o
::d/class (class o)
`p/datafy #(-> % meta ::d/obj datafy-resource)})))
TODO compose non - class resource string into namespaced keyword
TODO construct multiple property targets as collection
(defn datafy-resource [this]
(tx
(let [model (.getUnionModel db)
out-attributes (-> model (.listStatements this nil nil) iterator-seq)
in-attributes (-> model (.listStatements nil nil this) iterator-seq)]
{:> (into {} (map
#(vector (-> % .getPredicate property-uri->keyword)
(-> % .getObject compose-object))
out-attributes))
:< (into {} (map
#(vector (-> % .getPredicate property-uri->keyword)
(-> % .getSubject compose-object))
in-attributes))})))
(extend-protocol p/Datafiable
Resource
(datafy [this] (datafy-resource this))
Property
(datafy [this] (property-uri->keyword this))
Literal
(datafy [this] (.getString this)))
| null | https://raw.githubusercontent.com/clingen-data-model/genegraph/8c217e4c3820b3bd0a0937a6e331a6e6a49b8c14/src/genegraph/database/datafy.clj | clojure | (ns genegraph.database.datafy
(:require [clojure.core.protocols :as p]
[clojure.datafy :as d]
[genegraph.database.names :as names :refer [property-uri->keyword class-uri->keyword]]
[genegraph.database.instance :refer [db]]
[genegraph.database.util :refer [tx]]
[mount.core :refer [defstate]]
[clojure.set :as set]
[genegraph.database.query :as q])
(:import [org.apache.jena.rdf.model Property Literal Resource ResourceFactory
Statement]))
(declare datafy-resource)
(defn- compose-object [o]
(cond (instance? Literal o) (.toString o)
(instance? Resource o)
(with-meta (-> o .toString symbol)
{::d/obj o
::d/class (class o)
`p/datafy #(-> % meta ::d/obj datafy-resource)})))
TODO compose non - class resource string into namespaced keyword
TODO construct multiple property targets as collection
(defn datafy-resource [this]
(tx
(let [model (.getUnionModel db)
out-attributes (-> model (.listStatements this nil nil) iterator-seq)
in-attributes (-> model (.listStatements nil nil this) iterator-seq)]
{:> (into {} (map
#(vector (-> % .getPredicate property-uri->keyword)
(-> % .getObject compose-object))
out-attributes))
:< (into {} (map
#(vector (-> % .getPredicate property-uri->keyword)
(-> % .getSubject compose-object))
in-attributes))})))
(extend-protocol p/Datafiable
Resource
(datafy [this] (datafy-resource this))
Property
(datafy [this] (property-uri->keyword this))
Literal
(datafy [this] (.getString this)))
| |
9f8901070a626a6791bd46a8b0d855d074d50780186b3696b608cd3501644510 | mortuosplango/frankentone | core_test.clj | (ns frankentone.core-test
(:use clojure.test
frankentone.core))
| null | https://raw.githubusercontent.com/mortuosplango/frankentone/6602e9623c23f3543b9f779fea7851a043ad7fca/test/frankentone/core_test.clj | clojure | (ns frankentone.core-test
(:use clojure.test
frankentone.core))
| |
dd8f3486cdd8cdfc2c2ae453203bfd5dfb43af7bef97221a25554669bd673bba | akrmn/generic-constructors | Constructors.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE BlockArguments #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE InstanceSigs #
# LANGUAGE NoStarIsType #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Constructors
( constructors
, constructors'
) where
import Control.Applicative
import GHC.Generics
import Data.Proxy
import Data.Kind
class Constructors (f :: Type -> Type) where
type Q f x y :: Type
k :: forall x y. (f () -> x) -> (Q f x y -> y)
instance Constructors f => Constructors (D1 d f) where
type Q (D1 d f) x y = Q f x y
k :: forall x y. (D1 d f () -> x) -> (Q f x y -> y)
k inj = k (inj . M1)
instance Constructors V1 where
type Q V1 x y = y
k :: forall x y. (V1 () -> x) -> (y -> y)
k _ = id
instance (Constructors f, Constructors g) => Constructors (f :+: g) where
type Q (f :+: g) x y = Q f x (Q g x y)
k :: forall x y. ((f :+: g) () -> x) -> (Q f x (Q g x y) -> y)
k inj =
let
r0 = k @g @x @ y (inj . R1)
l0 = k @f @x @(Q g x y) (inj . L1)
in
r0 . l0
class Arguments (f :: Type -> Type) where
type Q0 f x :: Type
k0 :: forall x. (f () -> x) -> Q0 f x
instance Arguments f => Constructors (C1 c f) where
type Q (C1 c f) x y = Q0 f x -> y
k :: forall x y. (C1 c f () -> x) -> ((Q0 f x -> y) -> y)
k inj f = f (k0 (inj . M1))
instance (Arguments f, Arguments g) => Arguments (f :*: g) where
type Q0 (f :*: g) x = Q0 f (Q0 g x)
k0 :: forall x. ((f :*: g) () -> x) -> Q0 f (Q0 g x)
k0 inj = k0 (\f -> k0 (\g -> inj (f :*: g)))
instance Arguments f => Arguments (S1 s f) where
type Q0 (S1 s f) x = Q0 f x
k0 :: forall x. (S1 s f () -> x) -> Q0 f x
k0 inj = k0 (inj . M1)
instance Arguments U1 where
type Q0 U1 x = x
k0 :: forall x y. (U1 () -> x) -> x
k0 inj = inj U1
instance Arguments (K1 i t) where
type Q0 (K1 i t) x = t -> x
k0 :: forall x. (K1 i t () -> x) -> (t -> x)
k0 inj = inj . K1
constructors :: forall t y. (Generic t, Constructors (Rep t)) => Q (Rep t) t y -> y
constructors = k @(Rep t) @t to
constructors' :: forall t y. (Generic t, Constructors (Rep t)) => Proxy t -> Q (Rep t) t y -> y
constructors' _ = constructors @t
| null | https://raw.githubusercontent.com/akrmn/generic-constructors/156cfa56e45cc2c8f91b126df9ed7aafc23711c1/src/Constructors.hs | haskell | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE BlockArguments #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE InstanceSigs #
# LANGUAGE NoStarIsType #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Constructors
( constructors
, constructors'
) where
import Control.Applicative
import GHC.Generics
import Data.Proxy
import Data.Kind
class Constructors (f :: Type -> Type) where
type Q f x y :: Type
k :: forall x y. (f () -> x) -> (Q f x y -> y)
instance Constructors f => Constructors (D1 d f) where
type Q (D1 d f) x y = Q f x y
k :: forall x y. (D1 d f () -> x) -> (Q f x y -> y)
k inj = k (inj . M1)
instance Constructors V1 where
type Q V1 x y = y
k :: forall x y. (V1 () -> x) -> (y -> y)
k _ = id
instance (Constructors f, Constructors g) => Constructors (f :+: g) where
type Q (f :+: g) x y = Q f x (Q g x y)
k :: forall x y. ((f :+: g) () -> x) -> (Q f x (Q g x y) -> y)
k inj =
let
r0 = k @g @x @ y (inj . R1)
l0 = k @f @x @(Q g x y) (inj . L1)
in
r0 . l0
class Arguments (f :: Type -> Type) where
type Q0 f x :: Type
k0 :: forall x. (f () -> x) -> Q0 f x
instance Arguments f => Constructors (C1 c f) where
type Q (C1 c f) x y = Q0 f x -> y
k :: forall x y. (C1 c f () -> x) -> ((Q0 f x -> y) -> y)
k inj f = f (k0 (inj . M1))
instance (Arguments f, Arguments g) => Arguments (f :*: g) where
type Q0 (f :*: g) x = Q0 f (Q0 g x)
k0 :: forall x. ((f :*: g) () -> x) -> Q0 f (Q0 g x)
k0 inj = k0 (\f -> k0 (\g -> inj (f :*: g)))
instance Arguments f => Arguments (S1 s f) where
type Q0 (S1 s f) x = Q0 f x
k0 :: forall x. (S1 s f () -> x) -> Q0 f x
k0 inj = k0 (inj . M1)
instance Arguments U1 where
type Q0 U1 x = x
k0 :: forall x y. (U1 () -> x) -> x
k0 inj = inj U1
instance Arguments (K1 i t) where
type Q0 (K1 i t) x = t -> x
k0 :: forall x. (K1 i t () -> x) -> (t -> x)
k0 inj = inj . K1
constructors :: forall t y. (Generic t, Constructors (Rep t)) => Q (Rep t) t y -> y
constructors = k @(Rep t) @t to
constructors' :: forall t y. (Generic t, Constructors (Rep t)) => Proxy t -> Q (Rep t) t y -> y
constructors' _ = constructors @t
| |
e7dac3fb63eb93c8a188af40989dc89dd9570d7e2ec906e69b7e4d84501fcf72 | moby/vpnkit | vmnet.ml | open Lwt.Infix
let src =
let src = Logs.Src.create "vmnet" ~doc:"vmnet" in
Logs.Src.set_level src (Some Logs.Info);
src
module Log = (val Logs.src_log src : Logs.LOG)
no VLAN
module Init = struct
type t = {
magic: string;
version: int32;
commit: string;
}
let to_string t =
Fmt.str "{ magic = %s; version = %ld; commit = %s }"
t.magic t.version t.commit
let sizeof = 5 + 4 + 40
let default = {
magic = "VMN3T";
version = 22l;
commit = "0123456789012345678901234567890123456789";
}
let marshal t rest =
Cstruct.blit_from_string t.magic 0 rest 0 5;
Cstruct.LE.set_uint32 rest 5 t.version;
Cstruct.blit_from_string t.commit 0 rest 9 40;
Cstruct.shift rest sizeof
let unmarshal rest =
let magic = Cstruct.(to_string @@ sub rest 0 5) in
let version = Cstruct.LE.get_uint32 rest 5 in
let commit = Cstruct.(to_string @@ sub rest 9 40) in
let rest = Cstruct.shift rest sizeof in
{ magic; version; commit }, rest
end
module Command = struct
type t =
36 bytes
36 bytes
| Bind_ipv4 of Ipaddr.V4.t * int * bool
let to_string = function
| Ethernet x -> Fmt.str "Ethernet %a" Uuidm.pp x
| Preferred_ipv4 (uuid, ip) ->
Fmt.str "Preferred_ipv4 %a %a" Uuidm.pp uuid Ipaddr.V4.pp ip
| Bind_ipv4 (ip, port, tcp) ->
Fmt.str "Bind_ipv4 %a %d %b" Ipaddr.V4.pp ip port tcp
let sizeof = 1 + 36 + 4
let marshal t rest = match t with
| Ethernet uuid ->
Cstruct.set_uint8 rest 0 1;
let rest = Cstruct.shift rest 1 in
let uuid_str = Uuidm.to_string uuid in
Cstruct.blit_from_string uuid_str 0 rest 0 (String.length uuid_str);
Cstruct.shift rest (String.length uuid_str)
| Preferred_ipv4 (uuid, ip) ->
Cstruct.set_uint8 rest 0 8;
let rest = Cstruct.shift rest 1 in
let uuid_str = Uuidm.to_string uuid in
Cstruct.blit_from_string uuid_str 0 rest 0 (String.length uuid_str);
let rest = Cstruct.shift rest (String.length uuid_str) in
Cstruct.LE.set_uint32 rest 0 (Ipaddr.V4.to_int32 ip);
Cstruct.shift rest 4
| Bind_ipv4 (ip, port, stream) ->
Cstruct.set_uint8 rest 0 6;
let rest = Cstruct.shift rest 1 in
Cstruct.LE.set_uint32 rest 0 (Ipaddr.V4.to_int32 ip);
let rest = Cstruct.shift rest 4 in
Cstruct.LE.set_uint16 rest 0 port;
let rest = Cstruct.shift rest 2 in
Cstruct.set_uint8 rest 0 (if stream then 0 else 1);
Cstruct.shift rest 1
let unmarshal rest =
let process_uuid uuid_str =
if (String.compare (String.make 36 '\000') uuid_str) = 0 then
begin
let random_uuid = (Uuidm.v `V4) in
Log.info (fun f ->
f "Generated UUID on behalf of client: %a" Uuidm.pp random_uuid);
(* generate random uuid on behalf of client if client sent
array of \0 *)
Some random_uuid
end else
Uuidm.of_string uuid_str
in
match Cstruct.get_uint8 rest 0 with
| 1 -> (* ethernet *)
let uuid_str = Cstruct.(to_string (sub rest 1 36)) in
let rest = Cstruct.shift rest 37 in
(match process_uuid uuid_str with
| Some uuid -> Ok (Ethernet uuid, rest)
| None -> Error (`Msg (Printf.sprintf "Invalid UUID: %s" uuid_str)))
| 8 -> (* preferred_ipv4 *)
let uuid_str = Cstruct.(to_string (sub rest 1 36)) in
let rest = Cstruct.shift rest 37 in
let ip = Ipaddr.V4.of_int32 (Cstruct.LE.get_uint32 rest 0) in
let rest = Cstruct.shift rest 4 in
(match process_uuid uuid_str with
| Some uuid -> Ok (Preferred_ipv4 (uuid, ip), rest)
| None -> Error (`Msg (Printf.sprintf "Invalid UUID: %s" uuid_str)))
| n -> Error (`Msg (Printf.sprintf "Unknown command: %d" n))
end
module Vif = struct
type t = {
mtu: int;
max_packet_size: int;
client_macaddr: Macaddr.t;
}
let to_string t =
Fmt.str "{ mtu = %d; max_packet_size = %d; client_macaddr = %s }"
t.mtu t.max_packet_size (Macaddr.to_string t.client_macaddr)
let create client_macaddr mtu () =
let max_packet_size = mtu + 50 in
{ mtu; max_packet_size; client_macaddr }
let sizeof = 2 + 2 + 6
let marshal t rest =
Cstruct.LE.set_uint16 rest 0 t.mtu;
Cstruct.LE.set_uint16 rest 2 t.max_packet_size;
Cstruct.blit_from_string (Macaddr.to_octets t.client_macaddr) 0 rest 4 6;
Cstruct.shift rest sizeof
let unmarshal rest =
let mtu = Cstruct.LE.get_uint16 rest 0 in
let max_packet_size = Cstruct.LE.get_uint16 rest 2 in
let mac = Cstruct.(to_string @@ sub rest 4 6) in
try
let client_macaddr = Macaddr.of_octets_exn mac in
Ok ({ mtu; max_packet_size; client_macaddr }, Cstruct.shift rest sizeof)
with _ ->
Error (`Msg (Printf.sprintf "Failed to parse MAC: [%s]" mac))
end
module Response = struct
type t =
10 bytes
| Disconnect of string (* disconnect reason *)
let sizeof = 1+1+256 (* leave room for error message and length *)
let marshal t rest = match t with
| Vif vif ->
Cstruct.set_uint8 rest 0 1;
let rest = Cstruct.shift rest 1 in
Vif.marshal vif rest
| Disconnect reason ->
Cstruct.set_uint8 rest 0 2;
let rest = Cstruct.shift rest 1 in
Cstruct.set_uint8 rest 0 (String.length reason);
let rest = Cstruct.shift rest 1 in
Cstruct.blit_from_string reason 0 rest 0 (String.length reason);
Cstruct.shift rest (String.length reason)
let unmarshal rest =
match Cstruct.get_uint8 rest 0 with
| 1 -> (* vif *)
let rest = Cstruct.shift rest 1 in
let vif = Vif.unmarshal rest in
(match vif with
| Ok (vif, rest) -> Ok (Vif vif, rest)
| Error msg -> Error (msg))
| 2 -> (* disconnect *)
let rest = Cstruct.shift rest 1 in
let str_len = Cstruct.get_uint8 rest 0 in
let rest = Cstruct.shift rest 1 in
let reason_str = Cstruct.(to_string (sub rest 0 str_len)) in
let rest = Cstruct.shift rest str_len in
Ok (Disconnect reason_str, rest)
| n -> Error (`Msg (Printf.sprintf "Unknown response: %d" n))
end
module Packet = struct
let sizeof = 2
let marshal t rest =
Cstruct.LE.set_uint16 rest 0 t
let unmarshal rest =
let t = Cstruct.LE.get_uint16 rest 0 in
Ok (t, Cstruct.shift rest sizeof)
end
module Make(C: Sig.CONN) = struct
module Channel = Mirage_channel.Make(C)
type error = [Mirage_net.Net.error | `Channel of Channel.write_error]
let pp_error ppf = function
| #Mirage_net.Net.error as e -> Mirage_net.Net.pp_error ppf e
| `Channel e -> Channel.pp_write_error ppf e
let failf fmt = Fmt.kstr (fun e -> Lwt_result.fail (`Msg e)) fmt
type t = {
mutable fd: Channel.t option;
stats: Mirage_net.stats;
client_uuid: Uuidm.t;
client_macaddr: Macaddr.t;
server_macaddr: Macaddr.t;
mtu: int;
mutable write_header: Cstruct.t;
write_m: Lwt_mutex.t;
mutable pcap: Unix.file_descr option;
mutable pcap_size_limit: int64 option;
pcap_m: Lwt_mutex.t;
mutable listeners: (Cstruct.t -> unit Lwt.t) list;
mutable listening: bool;
after_disconnect: unit Lwt.t;
after_disconnect_u: unit Lwt.u;
NB : The Mirage DHCP client calls ` listen ` and then later the
Tcp_direct_direct will do the same . This behaviour seems to be
undefined , but common implementations adopt a last - caller - wins
semantic . This is the last caller wins callback
Tcp_direct_direct will do the same. This behaviour seems to be
undefined, but common implementations adopt a last-caller-wins
semantic. This is the last caller wins callback *)
mutable callback: (Cstruct.t -> unit Lwt.t);
log_prefix: string;
}
let get_client_uuid t =
t.client_uuid
let get_client_macaddr t =
t.client_macaddr
let err_eof = Lwt_result.fail (`Msg "EOF")
let err_read e = failf "while reading: %a" Channel.pp_error e
let err_flush e = failf "while flushing: %a" Channel.pp_write_error e
let with_read x f =
x >>= function
| Error e -> err_read e
| Ok `Eof -> err_eof
| Ok (`Data x) -> f x
let with_flush x f =
x >>= function
| Error e -> err_flush e
| Ok () -> f ()
let with_msg x f =
match x with
| Ok x -> f x
| Error _ as e -> Lwt.return e
let server_log_prefix = "Vmnet.Server"
let client_log_prefix = "Vmnet.Client"
let server_negotiate ~fd ~connect_client_fn ~mtu =
let assign_uuid_ip uuid ip =
connect_client_fn uuid ip >>= fun mac ->
match mac with
| Error (`Msg msg) ->
let buf = Cstruct.create Response.sizeof in
let (_: Cstruct.t) = Response.marshal (Disconnect msg) buf in
Log.err (fun f -> f "%s.negotiate: disconnecting client, reason: %s" server_log_prefix msg);
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
failf "%s.negotiate: disconnecting client, reason: %s " server_log_prefix msg
| Ok client_macaddr ->
let vif = Vif.create client_macaddr mtu () in
let buf = Cstruct.create Response.sizeof in
let (_: Cstruct.t) = Response.marshal (Vif vif) buf in
Log.info (fun f -> f "%s.negotiate: sending %s" server_log_prefix (Vif.to_string vif));
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
Lwt_result.return (uuid, client_macaddr)
in
with_read (Channel.read_exactly ~len:Init.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
let init, _ = Init.unmarshal buf in
Log.info (fun f -> f "%s.negotiate: received %s" server_log_prefix (Init.to_string init));
match init.version with
| 22l -> begin
let (_: Cstruct.t) = Init.marshal Init.default buf in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
with_read (Channel.read_exactly ~len:Command.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
with_msg (Command.unmarshal buf) @@ fun (command, _) ->
Log.info (fun f ->
f "%s.negotiate: received %s" server_log_prefix (Command.to_string command));
match command with
| Command.Bind_ipv4 _ ->
let buf = Cstruct.create Response.sizeof in
let (_: Cstruct.t) = Response.marshal (Disconnect "Unsupported command Bind_ipv4") buf in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
failf "%s.negotiate: unsupported command Bind_ipv4" server_log_prefix
| Command.Ethernet uuid -> assign_uuid_ip uuid None
| Command.Preferred_ipv4 (uuid, ip) -> assign_uuid_ip uuid (Some ip)
end
| x ->
let (_: Cstruct.t) = Init.marshal Init.default buf in (* write our version before disconnecting *)
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
Log.err (fun f -> f "%s: Client requested protocol version %s, server only supports version %s" server_log_prefix (Int32.to_string x) (Int32.to_string Init.default.version));
Lwt_result.fail (`Msg "Client requested unsupported protocol version")
let client_negotiate ~uuid ?preferred_ip ~fd () =
let buf = Cstruct.create Init.sizeof in
let (_: Cstruct.t) = Init.marshal Init.default buf in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
with_read (Channel.read_exactly ~len:Init.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
let init, _ = Init.unmarshal buf in
Log.info (fun f -> f "%s.negotiate: received %s" client_log_prefix (Init.to_string init));
match init.version with
| 22l ->
let buf = Cstruct.create Command.sizeof in
let (_: Cstruct.t) = match preferred_ip with
| None -> Command.marshal (Command.Ethernet uuid) buf
| Some ip -> Command.marshal (Command.Preferred_ipv4 (uuid, ip)) buf
in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
with_read (Channel.read_exactly ~len:Response.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
let open Lwt_result.Infix in
Lwt.return (Response.unmarshal buf) >>= fun (response, _) ->
(match response with
| Vif vif ->
Log.debug (fun f -> f "%s.negotiate: vif %s" client_log_prefix (Vif.to_string vif));
Lwt_result.return (vif)
| Disconnect reason ->
let msg = "Server disconnected with reason: " ^ reason in
Log.err (fun f -> f "%s.negotiate: %s" client_log_prefix msg);
Lwt_result.fail (`Msg msg))
| x ->
Log.err (fun f -> f "%s: Server requires protocol version %s, we have %s" client_log_prefix (Int32.to_string x) (Int32.to_string Init.default.version));
Lwt_result.fail (`Msg "Server does not support our version of the protocol")
(* Use blocking I/O here so we can avoid Using Lwt_unix or Uwt. Ideally we
would use a FLOW handle referencing a file/stream. *)
let really_write fd str =
let rec loop ofs =
if ofs = (Bytes.length str)
then ()
else
let n = Unix.write fd str ofs (Bytes.length str - ofs) in
loop (ofs + n)
in
loop 0
let start_capture t ?size_limit filename =
Lwt_mutex.with_lock t.pcap_m (fun () ->
(match t.pcap with Some fd -> Unix.close fd | None -> ());
let fd =
Unix.openfile filename [ Unix.O_WRONLY; Unix.O_TRUNC; Unix.O_CREAT ]
0o0644
in
let buf = Cstruct.create Pcap.LE.sizeof_pcap_header in
let open Pcap.LE in
set_pcap_header_magic_number buf Pcap.magic_number;
set_pcap_header_version_major buf Pcap.major_version;
set_pcap_header_version_minor buf Pcap.minor_version;
set_pcap_header_thiszone buf 0l;
set_pcap_header_sigfigs buf 4l;
set_pcap_header_snaplen buf 1500l;
set_pcap_header_network buf
(Pcap.Network.to_int32 Pcap.Network.Ethernet);
really_write fd (Cstruct.to_string buf |> Bytes.of_string);
t.pcap <- Some fd;
t.pcap_size_limit <- size_limit;
Lwt.return ()
)
let stop_capture_already_locked t = match t.pcap with
| None -> ()
| Some fd ->
Unix.close fd;
t.pcap <- None;
t.pcap_size_limit <- None
let stop_capture t =
Lwt_mutex.with_lock t.pcap_m (fun () ->
stop_capture_already_locked t;
Lwt.return_unit
)
let make ~client_macaddr ~server_macaddr ~mtu ~client_uuid ~log_prefix fd =
let fd = Some fd in
let stats = Mirage_net.Stats.create () in
let write_header = Cstruct.create (1024 * Packet.sizeof) in
let write_m = Lwt_mutex.create () in
let pcap = None in
let pcap_size_limit = None in
let pcap_m = Lwt_mutex.create () in
let listeners = [] in
let listening = false in
let after_disconnect, after_disconnect_u = Lwt.task () in
let callback _ = Lwt.return_unit in
{ fd; stats; client_macaddr; client_uuid; server_macaddr; mtu; write_header;
write_m; pcap; pcap_size_limit; pcap_m; listeners; listening;
after_disconnect; after_disconnect_u; callback; log_prefix }
type fd = C.flow
let of_fd ~connect_client_fn ~server_macaddr ~mtu flow =
let open Lwt_result.Infix in
let channel = Channel.create flow in
server_negotiate ~fd:channel ~connect_client_fn ~mtu
>>= fun (client_uuid, client_macaddr) ->
let t = make ~client_macaddr ~server_macaddr ~mtu ~client_uuid
~log_prefix:server_log_prefix channel in
Lwt_result.return t
let client_of_fd ~uuid ?preferred_ip ~server_macaddr flow =
let open Lwt_result.Infix in
let channel = Channel.create flow in
client_negotiate ~uuid ?preferred_ip ~fd:channel ()
>>= fun vif ->
let t =
make ~client_macaddr:server_macaddr
~server_macaddr:vif.Vif.client_macaddr ~mtu:vif.Vif.mtu ~client_uuid:uuid
~log_prefix:client_log_prefix
channel in
Lwt_result.return t
let disconnect t = match t.fd with
| None -> Lwt.return ()
| Some fd ->
Log.info (fun f -> f "%s.disconnect" t.log_prefix);
t.fd <- None;
Log.debug (fun f -> f "%s.disconnect flushing channel" t.log_prefix);
(Channel.flush fd >|= function
| Ok () -> ()
| Error e ->
Log.err (fun l ->
l "%s error while disconnecting the vmtnet connection: %a"
t.log_prefix Channel.pp_write_error e);
) >|= fun () ->
Lwt.wakeup_later t.after_disconnect_u ()
let after_disconnect t = t.after_disconnect
let capture t bufs =
match t.pcap with
| None -> Lwt.return ()
| Some pcap ->
Lwt_mutex.with_lock t.pcap_m (fun () ->
let len = List.(fold_left (+) 0 (map Cstruct.length bufs)) in
let time = Unix.gettimeofday () in
let secs = Int32.of_float time in
let usecs = Int32.of_float (1e6 *. (time -. (floor time))) in
let buf = Cstruct.create Pcap.sizeof_pcap_packet in
let open Pcap.LE in
set_pcap_packet_ts_sec buf secs;
set_pcap_packet_ts_usec buf usecs;
set_pcap_packet_incl_len buf @@ Int32.of_int len;
set_pcap_packet_orig_len buf @@ Int32.of_int len;
really_write pcap (Cstruct.to_string buf |> Bytes.of_string);
List.iter (fun buf -> really_write pcap (Cstruct.to_string buf |> Bytes.of_string)) bufs;
match t.pcap_size_limit with
| None -> Lwt.return () (* no limit *)
| Some limit ->
let limit = Int64.(sub limit (of_int len)) in
t.pcap_size_limit <- Some limit;
if limit < 0L then stop_capture_already_locked t;
Lwt.return_unit
)
let err_eof t =
Log.info (fun f -> f "%s.listen: read EOF so closing connection" t.log_prefix);
disconnect t >>= fun () ->
Lwt.return false
let err_unexpected t pp e =
Log.err (fun f ->
f "%s listen: caught unexpected %a: disconnecting" t.log_prefix pp e);
disconnect t >>= fun () ->
Lwt.return false
let with_fd t f = match t.fd with
| None -> Lwt.return false
| Some fd -> f fd
let with_read t x f =
x >>= function
| Error e -> err_unexpected t Channel.pp_error e
| Ok `Eof -> err_eof t
| Ok (`Data x) -> f x
let with_msg t x f =
match x with
| Error (`Msg e) -> err_unexpected t Fmt.string e
| Ok x -> f x
let listen_nocancel t new_callback =
Log.info (fun f -> f "%s.listen: rebinding the primary listen callback" t.log_prefix);
t.callback <- new_callback;
let last_error_log = ref 0. in
let rec loop () =
(with_fd t @@ fun fd ->
with_read t (Channel.read_exactly ~len:Packet.sizeof fd) @@ fun bufs ->
let read_header = Cstruct.concat bufs in
with_msg t (Packet.unmarshal read_header) @@ fun (len, _) ->
with_read t (Channel.read_exactly ~len fd) @@ fun bufs ->
capture t bufs >>= fun () ->
Log.debug (fun f ->
let b = Buffer.create 128 in
List.iter (Cstruct.hexdump_to_buffer b) bufs;
f "received%s" (Buffer.contents b)
);
let buf = Cstruct.concat bufs in
let callback buf =
Lwt.catch (fun () -> t.callback buf)
(function
| e ->
let now = Unix.gettimeofday () in
if (now -. !last_error_log) > 30. then begin
Log.err (fun f ->
f "%s.listen callback caught %a" t.log_prefix Fmt.exn e);
last_error_log := now;
end;
Lwt.return_unit
)
in
Lwt.async (fun () -> callback buf);
List.iter (fun callback ->
Lwt.async (fun () -> callback buf)
) t.listeners;
Lwt.return true
) >>= function
| true -> loop ()
| false -> Lwt.return ()
in
begin
if not t.listening then begin
t.listening <- true;
Log.info (fun f -> f "%s.listen: starting event loop" t.log_prefix);
loop ()
end else begin
Block forever without running a second loop ( )
Log.info (fun f -> f "%s.listen: blocking until disconnect" t.log_prefix);
t.after_disconnect
>>= fun () ->
Log.info (fun f -> f "%s.listen: disconnected" t.log_prefix);
Lwt.return_unit
end
end
>>= fun () ->
Log.info (fun f -> f "%s.listen returning Ok()" t.log_prefix);
Lwt.return (Ok ())
let listen t ~header_size:_ new_callback =
let task, u = Lwt.task () in
There is a clash over the Netif.listen callbacks between the DHCP client ( which
wants ethernet frames ) and the rest of the TCP / IP stack . It seems to work
usually by accident : first the DHCP client calls ` listen ` , performs a transaction
and then the main stack calls ` listen ` and this overrides the DHCP client listen .
Unfortunately the DHCP client calls ` cancel ` after 4s which can ripple through
and cancel the ethernet ` read ` . We work around that by ignoring ` cancel ` .
wants ethernet frames) and the rest of the TCP/IP stack. It seems to work
usually by accident: first the DHCP client calls `listen`, performs a transaction
and then the main stack calls `listen` and this overrides the DHCP client listen.
Unfortunately the DHCP client calls `cancel` after 4s which can ripple through
and cancel the ethernet `read`. We work around that by ignoring `cancel`. *)
Lwt.on_cancel task (fun () ->
Log.warn (fun f -> f "%s.listen: ignoring Lwt.cancel (called from the DHCP client)" t.log_prefix);
);
let _ =
listen_nocancel t new_callback
>>= fun x ->
Lwt.wakeup_later u x;
Lwt.return_unit
in
task
let write t ~size fill =
Lwt_mutex.with_lock t.write_m (fun () ->
let allocated = Cstruct.create (size + t.mtu) in
let len = fill allocated in
let buf = Cstruct.sub allocated 0 len in
capture t [ buf ] >>= fun () ->
if len > (t.mtu + ethernet_header_length) then begin
Log.err (fun f ->
f "%s Dropping over-large ethernet frame, length = %d, mtu = \
%d" t.log_prefix len t.mtu
);
Lwt.return (Ok ())
end else begin
if Cstruct.length t.write_header < Packet.sizeof then begin
t.write_header <- Cstruct.create (1024 * Packet.sizeof)
end;
Packet.marshal len t.write_header;
match t.fd with
| None -> Lwt.return (Error `Disconnected)
| Some fd ->
Channel.write_buffer fd
(Cstruct.sub t.write_header 0 Packet.sizeof);
t.write_header <- Cstruct.shift t.write_header Packet.sizeof;
Log.debug (fun f ->
let b = Buffer.create 128 in
Cstruct.hexdump_to_buffer b buf;
f "sending%s" (Buffer.contents b)
);
Channel.write_buffer fd buf;
Channel.flush fd >|= function
| Ok () -> Ok ()
| Error e -> Error (`Channel e)
end)
let add_listener t callback = t.listeners <- callback :: t.listeners
let mac t = t.server_macaddr
let mtu t = t.mtu
let get_stats_counters t = t.stats
let reset_stats_counters t = Mirage_net.Stats.reset t.stats
end
| null | https://raw.githubusercontent.com/moby/vpnkit/6039eac025e0740e530f2ff11f57d6d990d1c4a1/src/hostnet/vmnet.ml | ocaml | generate random uuid on behalf of client if client sent
array of \0
ethernet
preferred_ipv4
disconnect reason
leave room for error message and length
vif
disconnect
write our version before disconnecting
Use blocking I/O here so we can avoid Using Lwt_unix or Uwt. Ideally we
would use a FLOW handle referencing a file/stream.
no limit | open Lwt.Infix
let src =
let src = Logs.Src.create "vmnet" ~doc:"vmnet" in
Logs.Src.set_level src (Some Logs.Info);
src
module Log = (val Logs.src_log src : Logs.LOG)
no VLAN
module Init = struct
type t = {
magic: string;
version: int32;
commit: string;
}
let to_string t =
Fmt.str "{ magic = %s; version = %ld; commit = %s }"
t.magic t.version t.commit
let sizeof = 5 + 4 + 40
let default = {
magic = "VMN3T";
version = 22l;
commit = "0123456789012345678901234567890123456789";
}
let marshal t rest =
Cstruct.blit_from_string t.magic 0 rest 0 5;
Cstruct.LE.set_uint32 rest 5 t.version;
Cstruct.blit_from_string t.commit 0 rest 9 40;
Cstruct.shift rest sizeof
let unmarshal rest =
let magic = Cstruct.(to_string @@ sub rest 0 5) in
let version = Cstruct.LE.get_uint32 rest 5 in
let commit = Cstruct.(to_string @@ sub rest 9 40) in
let rest = Cstruct.shift rest sizeof in
{ magic; version; commit }, rest
end
module Command = struct
type t =
36 bytes
36 bytes
| Bind_ipv4 of Ipaddr.V4.t * int * bool
let to_string = function
| Ethernet x -> Fmt.str "Ethernet %a" Uuidm.pp x
| Preferred_ipv4 (uuid, ip) ->
Fmt.str "Preferred_ipv4 %a %a" Uuidm.pp uuid Ipaddr.V4.pp ip
| Bind_ipv4 (ip, port, tcp) ->
Fmt.str "Bind_ipv4 %a %d %b" Ipaddr.V4.pp ip port tcp
let sizeof = 1 + 36 + 4
let marshal t rest = match t with
| Ethernet uuid ->
Cstruct.set_uint8 rest 0 1;
let rest = Cstruct.shift rest 1 in
let uuid_str = Uuidm.to_string uuid in
Cstruct.blit_from_string uuid_str 0 rest 0 (String.length uuid_str);
Cstruct.shift rest (String.length uuid_str)
| Preferred_ipv4 (uuid, ip) ->
Cstruct.set_uint8 rest 0 8;
let rest = Cstruct.shift rest 1 in
let uuid_str = Uuidm.to_string uuid in
Cstruct.blit_from_string uuid_str 0 rest 0 (String.length uuid_str);
let rest = Cstruct.shift rest (String.length uuid_str) in
Cstruct.LE.set_uint32 rest 0 (Ipaddr.V4.to_int32 ip);
Cstruct.shift rest 4
| Bind_ipv4 (ip, port, stream) ->
Cstruct.set_uint8 rest 0 6;
let rest = Cstruct.shift rest 1 in
Cstruct.LE.set_uint32 rest 0 (Ipaddr.V4.to_int32 ip);
let rest = Cstruct.shift rest 4 in
Cstruct.LE.set_uint16 rest 0 port;
let rest = Cstruct.shift rest 2 in
Cstruct.set_uint8 rest 0 (if stream then 0 else 1);
Cstruct.shift rest 1
let unmarshal rest =
let process_uuid uuid_str =
if (String.compare (String.make 36 '\000') uuid_str) = 0 then
begin
let random_uuid = (Uuidm.v `V4) in
Log.info (fun f ->
f "Generated UUID on behalf of client: %a" Uuidm.pp random_uuid);
Some random_uuid
end else
Uuidm.of_string uuid_str
in
match Cstruct.get_uint8 rest 0 with
let uuid_str = Cstruct.(to_string (sub rest 1 36)) in
let rest = Cstruct.shift rest 37 in
(match process_uuid uuid_str with
| Some uuid -> Ok (Ethernet uuid, rest)
| None -> Error (`Msg (Printf.sprintf "Invalid UUID: %s" uuid_str)))
let uuid_str = Cstruct.(to_string (sub rest 1 36)) in
let rest = Cstruct.shift rest 37 in
let ip = Ipaddr.V4.of_int32 (Cstruct.LE.get_uint32 rest 0) in
let rest = Cstruct.shift rest 4 in
(match process_uuid uuid_str with
| Some uuid -> Ok (Preferred_ipv4 (uuid, ip), rest)
| None -> Error (`Msg (Printf.sprintf "Invalid UUID: %s" uuid_str)))
| n -> Error (`Msg (Printf.sprintf "Unknown command: %d" n))
end
module Vif = struct
type t = {
mtu: int;
max_packet_size: int;
client_macaddr: Macaddr.t;
}
let to_string t =
Fmt.str "{ mtu = %d; max_packet_size = %d; client_macaddr = %s }"
t.mtu t.max_packet_size (Macaddr.to_string t.client_macaddr)
let create client_macaddr mtu () =
let max_packet_size = mtu + 50 in
{ mtu; max_packet_size; client_macaddr }
let sizeof = 2 + 2 + 6
let marshal t rest =
Cstruct.LE.set_uint16 rest 0 t.mtu;
Cstruct.LE.set_uint16 rest 2 t.max_packet_size;
Cstruct.blit_from_string (Macaddr.to_octets t.client_macaddr) 0 rest 4 6;
Cstruct.shift rest sizeof
let unmarshal rest =
let mtu = Cstruct.LE.get_uint16 rest 0 in
let max_packet_size = Cstruct.LE.get_uint16 rest 2 in
let mac = Cstruct.(to_string @@ sub rest 4 6) in
try
let client_macaddr = Macaddr.of_octets_exn mac in
Ok ({ mtu; max_packet_size; client_macaddr }, Cstruct.shift rest sizeof)
with _ ->
Error (`Msg (Printf.sprintf "Failed to parse MAC: [%s]" mac))
end
module Response = struct
type t =
10 bytes
let marshal t rest = match t with
| Vif vif ->
Cstruct.set_uint8 rest 0 1;
let rest = Cstruct.shift rest 1 in
Vif.marshal vif rest
| Disconnect reason ->
Cstruct.set_uint8 rest 0 2;
let rest = Cstruct.shift rest 1 in
Cstruct.set_uint8 rest 0 (String.length reason);
let rest = Cstruct.shift rest 1 in
Cstruct.blit_from_string reason 0 rest 0 (String.length reason);
Cstruct.shift rest (String.length reason)
let unmarshal rest =
match Cstruct.get_uint8 rest 0 with
let rest = Cstruct.shift rest 1 in
let vif = Vif.unmarshal rest in
(match vif with
| Ok (vif, rest) -> Ok (Vif vif, rest)
| Error msg -> Error (msg))
let rest = Cstruct.shift rest 1 in
let str_len = Cstruct.get_uint8 rest 0 in
let rest = Cstruct.shift rest 1 in
let reason_str = Cstruct.(to_string (sub rest 0 str_len)) in
let rest = Cstruct.shift rest str_len in
Ok (Disconnect reason_str, rest)
| n -> Error (`Msg (Printf.sprintf "Unknown response: %d" n))
end
module Packet = struct
let sizeof = 2
let marshal t rest =
Cstruct.LE.set_uint16 rest 0 t
let unmarshal rest =
let t = Cstruct.LE.get_uint16 rest 0 in
Ok (t, Cstruct.shift rest sizeof)
end
module Make(C: Sig.CONN) = struct
module Channel = Mirage_channel.Make(C)
type error = [Mirage_net.Net.error | `Channel of Channel.write_error]
let pp_error ppf = function
| #Mirage_net.Net.error as e -> Mirage_net.Net.pp_error ppf e
| `Channel e -> Channel.pp_write_error ppf e
let failf fmt = Fmt.kstr (fun e -> Lwt_result.fail (`Msg e)) fmt
type t = {
mutable fd: Channel.t option;
stats: Mirage_net.stats;
client_uuid: Uuidm.t;
client_macaddr: Macaddr.t;
server_macaddr: Macaddr.t;
mtu: int;
mutable write_header: Cstruct.t;
write_m: Lwt_mutex.t;
mutable pcap: Unix.file_descr option;
mutable pcap_size_limit: int64 option;
pcap_m: Lwt_mutex.t;
mutable listeners: (Cstruct.t -> unit Lwt.t) list;
mutable listening: bool;
after_disconnect: unit Lwt.t;
after_disconnect_u: unit Lwt.u;
NB : The Mirage DHCP client calls ` listen ` and then later the
Tcp_direct_direct will do the same . This behaviour seems to be
undefined , but common implementations adopt a last - caller - wins
semantic . This is the last caller wins callback
Tcp_direct_direct will do the same. This behaviour seems to be
undefined, but common implementations adopt a last-caller-wins
semantic. This is the last caller wins callback *)
mutable callback: (Cstruct.t -> unit Lwt.t);
log_prefix: string;
}
let get_client_uuid t =
t.client_uuid
let get_client_macaddr t =
t.client_macaddr
let err_eof = Lwt_result.fail (`Msg "EOF")
let err_read e = failf "while reading: %a" Channel.pp_error e
let err_flush e = failf "while flushing: %a" Channel.pp_write_error e
let with_read x f =
x >>= function
| Error e -> err_read e
| Ok `Eof -> err_eof
| Ok (`Data x) -> f x
let with_flush x f =
x >>= function
| Error e -> err_flush e
| Ok () -> f ()
let with_msg x f =
match x with
| Ok x -> f x
| Error _ as e -> Lwt.return e
let server_log_prefix = "Vmnet.Server"
let client_log_prefix = "Vmnet.Client"
let server_negotiate ~fd ~connect_client_fn ~mtu =
let assign_uuid_ip uuid ip =
connect_client_fn uuid ip >>= fun mac ->
match mac with
| Error (`Msg msg) ->
let buf = Cstruct.create Response.sizeof in
let (_: Cstruct.t) = Response.marshal (Disconnect msg) buf in
Log.err (fun f -> f "%s.negotiate: disconnecting client, reason: %s" server_log_prefix msg);
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
failf "%s.negotiate: disconnecting client, reason: %s " server_log_prefix msg
| Ok client_macaddr ->
let vif = Vif.create client_macaddr mtu () in
let buf = Cstruct.create Response.sizeof in
let (_: Cstruct.t) = Response.marshal (Vif vif) buf in
Log.info (fun f -> f "%s.negotiate: sending %s" server_log_prefix (Vif.to_string vif));
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
Lwt_result.return (uuid, client_macaddr)
in
with_read (Channel.read_exactly ~len:Init.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
let init, _ = Init.unmarshal buf in
Log.info (fun f -> f "%s.negotiate: received %s" server_log_prefix (Init.to_string init));
match init.version with
| 22l -> begin
let (_: Cstruct.t) = Init.marshal Init.default buf in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
with_read (Channel.read_exactly ~len:Command.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
with_msg (Command.unmarshal buf) @@ fun (command, _) ->
Log.info (fun f ->
f "%s.negotiate: received %s" server_log_prefix (Command.to_string command));
match command with
| Command.Bind_ipv4 _ ->
let buf = Cstruct.create Response.sizeof in
let (_: Cstruct.t) = Response.marshal (Disconnect "Unsupported command Bind_ipv4") buf in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
failf "%s.negotiate: unsupported command Bind_ipv4" server_log_prefix
| Command.Ethernet uuid -> assign_uuid_ip uuid None
| Command.Preferred_ipv4 (uuid, ip) -> assign_uuid_ip uuid (Some ip)
end
| x ->
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
Log.err (fun f -> f "%s: Client requested protocol version %s, server only supports version %s" server_log_prefix (Int32.to_string x) (Int32.to_string Init.default.version));
Lwt_result.fail (`Msg "Client requested unsupported protocol version")
let client_negotiate ~uuid ?preferred_ip ~fd () =
let buf = Cstruct.create Init.sizeof in
let (_: Cstruct.t) = Init.marshal Init.default buf in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
with_read (Channel.read_exactly ~len:Init.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
let init, _ = Init.unmarshal buf in
Log.info (fun f -> f "%s.negotiate: received %s" client_log_prefix (Init.to_string init));
match init.version with
| 22l ->
let buf = Cstruct.create Command.sizeof in
let (_: Cstruct.t) = match preferred_ip with
| None -> Command.marshal (Command.Ethernet uuid) buf
| Some ip -> Command.marshal (Command.Preferred_ipv4 (uuid, ip)) buf
in
Channel.write_buffer fd buf;
with_flush (Channel.flush fd) @@ fun () ->
with_read (Channel.read_exactly ~len:Response.sizeof fd) @@ fun bufs ->
let buf = Cstruct.concat bufs in
let open Lwt_result.Infix in
Lwt.return (Response.unmarshal buf) >>= fun (response, _) ->
(match response with
| Vif vif ->
Log.debug (fun f -> f "%s.negotiate: vif %s" client_log_prefix (Vif.to_string vif));
Lwt_result.return (vif)
| Disconnect reason ->
let msg = "Server disconnected with reason: " ^ reason in
Log.err (fun f -> f "%s.negotiate: %s" client_log_prefix msg);
Lwt_result.fail (`Msg msg))
| x ->
Log.err (fun f -> f "%s: Server requires protocol version %s, we have %s" client_log_prefix (Int32.to_string x) (Int32.to_string Init.default.version));
Lwt_result.fail (`Msg "Server does not support our version of the protocol")
let really_write fd str =
let rec loop ofs =
if ofs = (Bytes.length str)
then ()
else
let n = Unix.write fd str ofs (Bytes.length str - ofs) in
loop (ofs + n)
in
loop 0
let start_capture t ?size_limit filename =
Lwt_mutex.with_lock t.pcap_m (fun () ->
(match t.pcap with Some fd -> Unix.close fd | None -> ());
let fd =
Unix.openfile filename [ Unix.O_WRONLY; Unix.O_TRUNC; Unix.O_CREAT ]
0o0644
in
let buf = Cstruct.create Pcap.LE.sizeof_pcap_header in
let open Pcap.LE in
set_pcap_header_magic_number buf Pcap.magic_number;
set_pcap_header_version_major buf Pcap.major_version;
set_pcap_header_version_minor buf Pcap.minor_version;
set_pcap_header_thiszone buf 0l;
set_pcap_header_sigfigs buf 4l;
set_pcap_header_snaplen buf 1500l;
set_pcap_header_network buf
(Pcap.Network.to_int32 Pcap.Network.Ethernet);
really_write fd (Cstruct.to_string buf |> Bytes.of_string);
t.pcap <- Some fd;
t.pcap_size_limit <- size_limit;
Lwt.return ()
)
let stop_capture_already_locked t = match t.pcap with
| None -> ()
| Some fd ->
Unix.close fd;
t.pcap <- None;
t.pcap_size_limit <- None
let stop_capture t =
Lwt_mutex.with_lock t.pcap_m (fun () ->
stop_capture_already_locked t;
Lwt.return_unit
)
let make ~client_macaddr ~server_macaddr ~mtu ~client_uuid ~log_prefix fd =
let fd = Some fd in
let stats = Mirage_net.Stats.create () in
let write_header = Cstruct.create (1024 * Packet.sizeof) in
let write_m = Lwt_mutex.create () in
let pcap = None in
let pcap_size_limit = None in
let pcap_m = Lwt_mutex.create () in
let listeners = [] in
let listening = false in
let after_disconnect, after_disconnect_u = Lwt.task () in
let callback _ = Lwt.return_unit in
{ fd; stats; client_macaddr; client_uuid; server_macaddr; mtu; write_header;
write_m; pcap; pcap_size_limit; pcap_m; listeners; listening;
after_disconnect; after_disconnect_u; callback; log_prefix }
type fd = C.flow
let of_fd ~connect_client_fn ~server_macaddr ~mtu flow =
let open Lwt_result.Infix in
let channel = Channel.create flow in
server_negotiate ~fd:channel ~connect_client_fn ~mtu
>>= fun (client_uuid, client_macaddr) ->
let t = make ~client_macaddr ~server_macaddr ~mtu ~client_uuid
~log_prefix:server_log_prefix channel in
Lwt_result.return t
let client_of_fd ~uuid ?preferred_ip ~server_macaddr flow =
let open Lwt_result.Infix in
let channel = Channel.create flow in
client_negotiate ~uuid ?preferred_ip ~fd:channel ()
>>= fun vif ->
let t =
make ~client_macaddr:server_macaddr
~server_macaddr:vif.Vif.client_macaddr ~mtu:vif.Vif.mtu ~client_uuid:uuid
~log_prefix:client_log_prefix
channel in
Lwt_result.return t
let disconnect t = match t.fd with
| None -> Lwt.return ()
| Some fd ->
Log.info (fun f -> f "%s.disconnect" t.log_prefix);
t.fd <- None;
Log.debug (fun f -> f "%s.disconnect flushing channel" t.log_prefix);
(Channel.flush fd >|= function
| Ok () -> ()
| Error e ->
Log.err (fun l ->
l "%s error while disconnecting the vmtnet connection: %a"
t.log_prefix Channel.pp_write_error e);
) >|= fun () ->
Lwt.wakeup_later t.after_disconnect_u ()
let after_disconnect t = t.after_disconnect
let capture t bufs =
match t.pcap with
| None -> Lwt.return ()
| Some pcap ->
Lwt_mutex.with_lock t.pcap_m (fun () ->
let len = List.(fold_left (+) 0 (map Cstruct.length bufs)) in
let time = Unix.gettimeofday () in
let secs = Int32.of_float time in
let usecs = Int32.of_float (1e6 *. (time -. (floor time))) in
let buf = Cstruct.create Pcap.sizeof_pcap_packet in
let open Pcap.LE in
set_pcap_packet_ts_sec buf secs;
set_pcap_packet_ts_usec buf usecs;
set_pcap_packet_incl_len buf @@ Int32.of_int len;
set_pcap_packet_orig_len buf @@ Int32.of_int len;
really_write pcap (Cstruct.to_string buf |> Bytes.of_string);
List.iter (fun buf -> really_write pcap (Cstruct.to_string buf |> Bytes.of_string)) bufs;
match t.pcap_size_limit with
| Some limit ->
let limit = Int64.(sub limit (of_int len)) in
t.pcap_size_limit <- Some limit;
if limit < 0L then stop_capture_already_locked t;
Lwt.return_unit
)
let err_eof t =
Log.info (fun f -> f "%s.listen: read EOF so closing connection" t.log_prefix);
disconnect t >>= fun () ->
Lwt.return false
let err_unexpected t pp e =
Log.err (fun f ->
f "%s listen: caught unexpected %a: disconnecting" t.log_prefix pp e);
disconnect t >>= fun () ->
Lwt.return false
let with_fd t f = match t.fd with
| None -> Lwt.return false
| Some fd -> f fd
let with_read t x f =
x >>= function
| Error e -> err_unexpected t Channel.pp_error e
| Ok `Eof -> err_eof t
| Ok (`Data x) -> f x
let with_msg t x f =
match x with
| Error (`Msg e) -> err_unexpected t Fmt.string e
| Ok x -> f x
let listen_nocancel t new_callback =
Log.info (fun f -> f "%s.listen: rebinding the primary listen callback" t.log_prefix);
t.callback <- new_callback;
let last_error_log = ref 0. in
let rec loop () =
(with_fd t @@ fun fd ->
with_read t (Channel.read_exactly ~len:Packet.sizeof fd) @@ fun bufs ->
let read_header = Cstruct.concat bufs in
with_msg t (Packet.unmarshal read_header) @@ fun (len, _) ->
with_read t (Channel.read_exactly ~len fd) @@ fun bufs ->
capture t bufs >>= fun () ->
Log.debug (fun f ->
let b = Buffer.create 128 in
List.iter (Cstruct.hexdump_to_buffer b) bufs;
f "received%s" (Buffer.contents b)
);
let buf = Cstruct.concat bufs in
let callback buf =
Lwt.catch (fun () -> t.callback buf)
(function
| e ->
let now = Unix.gettimeofday () in
if (now -. !last_error_log) > 30. then begin
Log.err (fun f ->
f "%s.listen callback caught %a" t.log_prefix Fmt.exn e);
last_error_log := now;
end;
Lwt.return_unit
)
in
Lwt.async (fun () -> callback buf);
List.iter (fun callback ->
Lwt.async (fun () -> callback buf)
) t.listeners;
Lwt.return true
) >>= function
| true -> loop ()
| false -> Lwt.return ()
in
begin
if not t.listening then begin
t.listening <- true;
Log.info (fun f -> f "%s.listen: starting event loop" t.log_prefix);
loop ()
end else begin
Block forever without running a second loop ( )
Log.info (fun f -> f "%s.listen: blocking until disconnect" t.log_prefix);
t.after_disconnect
>>= fun () ->
Log.info (fun f -> f "%s.listen: disconnected" t.log_prefix);
Lwt.return_unit
end
end
>>= fun () ->
Log.info (fun f -> f "%s.listen returning Ok()" t.log_prefix);
Lwt.return (Ok ())
let listen t ~header_size:_ new_callback =
let task, u = Lwt.task () in
There is a clash over the Netif.listen callbacks between the DHCP client ( which
wants ethernet frames ) and the rest of the TCP / IP stack . It seems to work
usually by accident : first the DHCP client calls ` listen ` , performs a transaction
and then the main stack calls ` listen ` and this overrides the DHCP client listen .
Unfortunately the DHCP client calls ` cancel ` after 4s which can ripple through
and cancel the ethernet ` read ` . We work around that by ignoring ` cancel ` .
wants ethernet frames) and the rest of the TCP/IP stack. It seems to work
usually by accident: first the DHCP client calls `listen`, performs a transaction
and then the main stack calls `listen` and this overrides the DHCP client listen.
Unfortunately the DHCP client calls `cancel` after 4s which can ripple through
and cancel the ethernet `read`. We work around that by ignoring `cancel`. *)
Lwt.on_cancel task (fun () ->
Log.warn (fun f -> f "%s.listen: ignoring Lwt.cancel (called from the DHCP client)" t.log_prefix);
);
let _ =
listen_nocancel t new_callback
>>= fun x ->
Lwt.wakeup_later u x;
Lwt.return_unit
in
task
let write t ~size fill =
Lwt_mutex.with_lock t.write_m (fun () ->
let allocated = Cstruct.create (size + t.mtu) in
let len = fill allocated in
let buf = Cstruct.sub allocated 0 len in
capture t [ buf ] >>= fun () ->
if len > (t.mtu + ethernet_header_length) then begin
Log.err (fun f ->
f "%s Dropping over-large ethernet frame, length = %d, mtu = \
%d" t.log_prefix len t.mtu
);
Lwt.return (Ok ())
end else begin
if Cstruct.length t.write_header < Packet.sizeof then begin
t.write_header <- Cstruct.create (1024 * Packet.sizeof)
end;
Packet.marshal len t.write_header;
match t.fd with
| None -> Lwt.return (Error `Disconnected)
| Some fd ->
Channel.write_buffer fd
(Cstruct.sub t.write_header 0 Packet.sizeof);
t.write_header <- Cstruct.shift t.write_header Packet.sizeof;
Log.debug (fun f ->
let b = Buffer.create 128 in
Cstruct.hexdump_to_buffer b buf;
f "sending%s" (Buffer.contents b)
);
Channel.write_buffer fd buf;
Channel.flush fd >|= function
| Ok () -> Ok ()
| Error e -> Error (`Channel e)
end)
let add_listener t callback = t.listeners <- callback :: t.listeners
let mac t = t.server_macaddr
let mtu t = t.mtu
let get_stats_counters t = t.stats
let reset_stats_counters t = Mirage_net.Stats.reset t.stats
end
|
b031ab10fdbbafc13bb2c9419ed6fceef440477f1389ba01582f621f0943c79c | amnh/poy5 | genNonAdd.mli | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2011 , , , Ward Wheeler
and the American Museum of Natural History .
(* *)
(* This program is free software; you can redistribute it and/or modify *)
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
(* (at your option) any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
* General module implements non - additive characters with sequence
characters so their alphabet size can be unbounded .
characters so their alphabet size can be unbounded. *)
(*cost tuple, like the one in seqCS.ml*)
type cost_tuple =
{
min : float;
max : float;
}
type gnonadd_sequence = {
seq : Sequence.s;
costs : cost_tuple;
weights : float array;
}
* [ init_gnonadd_t weights ] create a new gnoadd with input seq
val init_gnonadd_t : Sequence.s -> float array option -> gnonadd_sequence
(**[to_single alph cost_mat parent mine] return single assignment of mine based
on parent. return cost between parent and new single*)
val to_single :
Alphabet.a -> Cost_matrix.Two_D.m -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence * float
* [ distance cost_mat ] return distance between two sequence .
val distance :
gnonadd_sequence -> gnonadd_sequence -> Cost_matrix.Two_D.m -> float
* [ median cost_mat a b ] return median of two general nonaddictive sequence
val median :
Cost_matrix.Two_D.m -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence * float
* [ median_3_fake h parent mine ]
val median_3_fake : Alphabet.a -> Cost_matrix.Two_D.m -> gnonadd_sequence ->
gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence * float
* [ median_3 h parent mine ] return a new median3 for mine
* with info from parent , child1 and child2
* with info from parent,child1 and child2 *)
val median_3 : Cost_matrix.Three_D.m -> Cost_matrix.Two_D.m -> gnonadd_sequence
-> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence
* [ compare ] compare the sequence of two input .
val compare : gnonadd_sequence -> gnonadd_sequence -> int
(** [get_max_cost cost_tuple] return the max of cost_tuple*)
val get_max_cost : cost_tuple -> float
| null | https://raw.githubusercontent.com/amnh/poy5/da563a2339d3fa9c0110ae86cc35fad576f728ab/src/genNonAdd.mli | ocaml |
This program is free software; you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
cost tuple, like the one in seqCS.ml
*[to_single alph cost_mat parent mine] return single assignment of mine based
on parent. return cost between parent and new single
* [get_max_cost cost_tuple] return the max of cost_tuple | POY 5.1.1 . A phylogenetic analysis program using Dynamic Homologies .
Copyright ( C ) 2011 , , , Ward Wheeler
and the American Museum of Natural History .
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston ,
USA
* General module implements non - additive characters with sequence
characters so their alphabet size can be unbounded .
characters so their alphabet size can be unbounded. *)
type cost_tuple =
{
min : float;
max : float;
}
type gnonadd_sequence = {
seq : Sequence.s;
costs : cost_tuple;
weights : float array;
}
* [ init_gnonadd_t weights ] create a new gnoadd with input seq
val init_gnonadd_t : Sequence.s -> float array option -> gnonadd_sequence
val to_single :
Alphabet.a -> Cost_matrix.Two_D.m -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence * float
* [ distance cost_mat ] return distance between two sequence .
val distance :
gnonadd_sequence -> gnonadd_sequence -> Cost_matrix.Two_D.m -> float
* [ median cost_mat a b ] return median of two general nonaddictive sequence
val median :
Cost_matrix.Two_D.m -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence * float
* [ median_3_fake h parent mine ]
val median_3_fake : Alphabet.a -> Cost_matrix.Two_D.m -> gnonadd_sequence ->
gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence * float
* [ median_3 h parent mine ] return a new median3 for mine
* with info from parent , child1 and child2
* with info from parent,child1 and child2 *)
val median_3 : Cost_matrix.Three_D.m -> Cost_matrix.Two_D.m -> gnonadd_sequence
-> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence -> gnonadd_sequence
* [ compare ] compare the sequence of two input .
val compare : gnonadd_sequence -> gnonadd_sequence -> int
val get_max_cost : cost_tuple -> float
|
0f9558fd780d8eab3fb76d29a5793e04c5fb107b14f0716f35b036c07f70ddde | pietervdvn/ALGT | Test.hs | module Gradualize.Test where
{- -}
import Utils.Utils
import Changer.Changes
import TypeSystem
import AbstractInterpreter.AbstractSet
import Gradualize.DynamicRuntime
import Gradualize.FunctionFixer
import Text.PrettyPrint.ANSI.Leijen
import Data.Map as M
import Data.List as L
import Utils.ToString
import AssetsHelper
import Lens.Micro hiding ((&))
t = t' >> pass
t' :: IO (TypeSystem, Changes)
t' = do (ts, dyn, ch) <- fixSyntax stfl "?" "type" & either error return
ts & toParsable' (24::Int) & putStrLn
let s = get tsSyntax ts
concretization : : - > TypeName - > Name - > String - > [ AbstractSet ] - > ParseTree - > Either String [ AbstractSet ]
let dynSet i = [generateAbstractSet (get tsSyntax ts) (show i) "type"]
let concrFunc = concretization (dyn, dynSet) :: ParseTree -> Arguments
let ptBool = MLiteral () ("typeL", 1) "Bool"
let testPT1 = PtSeq () ("type", -1) [ptBool, MLiteral () ("type",0) "->", ptBool]
let testPT2 = PtSeq () ("type", -1) [dyn, MLiteral () ("type",0) "->", ptBool]
let testPT3 = PtSeq () ("type", -1) [dyn, MLiteral () ("type",0) "->", dyn]
let testPT args0 args1
= do arg0 <- args0 & concrFunc
arg1 <- args1 & concrFunc
let ass
= possibleResults ts "equate" [arg0, arg1] & either error id :: [AbstractSet]
let showed
= inParens (toParsable arg0 ++ ", " ++ toParsable arg1) ++ " = " ++
if L.null ass then "ɛ" else toParsable' "\n\t" ass
return showed
testPT dyn dyn
|> (">>>"++) |+> putStrLn
testPT3 & concrFunc & toParsable ' " | " & putStrLn
return (ts, ch)
| null | https://raw.githubusercontent.com/pietervdvn/ALGT/43a2811931be6daf1362f37cb16f99375ca4999e/src/Gradualize/Test.hs | haskell | module Gradualize.Test where
import Utils.Utils
import Changer.Changes
import TypeSystem
import AbstractInterpreter.AbstractSet
import Gradualize.DynamicRuntime
import Gradualize.FunctionFixer
import Text.PrettyPrint.ANSI.Leijen
import Data.Map as M
import Data.List as L
import Utils.ToString
import AssetsHelper
import Lens.Micro hiding ((&))
t = t' >> pass
t' :: IO (TypeSystem, Changes)
t' = do (ts, dyn, ch) <- fixSyntax stfl "?" "type" & either error return
ts & toParsable' (24::Int) & putStrLn
let s = get tsSyntax ts
concretization : : - > TypeName - > Name - > String - > [ AbstractSet ] - > ParseTree - > Either String [ AbstractSet ]
let dynSet i = [generateAbstractSet (get tsSyntax ts) (show i) "type"]
let concrFunc = concretization (dyn, dynSet) :: ParseTree -> Arguments
let ptBool = MLiteral () ("typeL", 1) "Bool"
let testPT1 = PtSeq () ("type", -1) [ptBool, MLiteral () ("type",0) "->", ptBool]
let testPT2 = PtSeq () ("type", -1) [dyn, MLiteral () ("type",0) "->", ptBool]
let testPT3 = PtSeq () ("type", -1) [dyn, MLiteral () ("type",0) "->", dyn]
let testPT args0 args1
= do arg0 <- args0 & concrFunc
arg1 <- args1 & concrFunc
let ass
= possibleResults ts "equate" [arg0, arg1] & either error id :: [AbstractSet]
let showed
= inParens (toParsable arg0 ++ ", " ++ toParsable arg1) ++ " = " ++
if L.null ass then "ɛ" else toParsable' "\n\t" ass
return showed
testPT dyn dyn
|> (">>>"++) |+> putStrLn
testPT3 & concrFunc & toParsable ' " | " & putStrLn
return (ts, ch)
| |
ed5bcb09c6db33032c6709b8a0bb152cb4709ba9fe15cfbcca151071f5d349e0 | leandrosilva/cameron | misultin_ws.erl | % ==========================================================================================================
MISULTIN - Websocket Request
%
% >-|-|-(°>
%
Copyright ( C ) 2011 , < > .
% All rights reserved.
%
% BSD License
%
% Redistribution and use in source and binary forms, with or without modification, are permitted provided
% that the following conditions are met:
%
% * Redistributions of source code must retain the above copyright notice, this list of conditions and the
% following disclaimer.
% * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
% the following disclaimer in the documentation and/or other materials provided with the distribution.
% * Neither the name of the authors nor the names of its contributors may be used to endorse or promote
% products derived from this software without specific prior written permission.
%
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR
% WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
% NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
% POSSIBILITY OF SUCH DAMAGE.
% ==========================================================================================================
-module(misultin_ws).
-vsn("0.8").
% API
-export([raw/1, get/2, send/2]).
% includes
-include("../include/misultin.hrl").
% types
-type wst() :: {misultin_ws, #ws{}, SocketPid::pid()}.
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ API = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
% Returns raw websocket content.
-spec raw(wst()) -> #ws{}.
raw({misultin_ws, Ws, _SocketPid}) ->
Ws.
% Get websocket info.
-spec get(WsInfo::atom(), wst()) -> term().
get(socket, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.socket;
get(socket_mode, {misultin_ws, Ws, _SocketPid}) ->
Ws#req.socket_mode;
get(peer_addr, {misultin_ws, #ws{headers = Headers} = Ws, _SocketPid}) ->
Host = case misultin_utility:get_key_value("X-Real-Ip", Headers) of
undefined ->
case misultin_utility:get_key_value("X-Forwarded-For", Headers) of
undefined -> undefined;
Hosts0 -> string:strip(lists:nth(1, string:tokens(Hosts0, ",")))
end;
Host0 -> Host0
end,
case Host of
undefined ->
Ws#ws.peer_addr;
_ ->
case inet_parse:address(Host) of
{error, _Reason} ->
Ws#ws.peer_addr;
{ok, IpTuple} ->
IpTuple
end
end;
get(peer_port, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.peer_port;
get(peer_cert, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.peer_cert;
get(vsn, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.vsn;
get(origin, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.origin;
get(host, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.host;
get(path, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.path;
get(headers, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.headers.
% send data
-spec send(Data::list() | binary() | iolist(), wst()) -> term().
send(Data, {misultin_ws, _Ws, SocketPid}) ->
SocketPid ! {send, Data}.
% ============================ /\ API ======================================================================
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ INTERNAL FUNCTIONS = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
% ============================ /\ INTERNAL FUNCTIONS =======================================================
| null | https://raw.githubusercontent.com/leandrosilva/cameron/34051395b620d2c3cb2cb63c854e65234786a176/deps/misultin/src/misultin_ws.erl | erlang | ==========================================================================================================
>-|-|-(°>
All rights reserved.
BSD License
Redistribution and use in source and binary forms, with or without modification, are permitted provided
that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the authors nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
==========================================================================================================
API
includes
types
Returns raw websocket content.
Get websocket info.
send data
============================ /\ API ======================================================================
============================ /\ INTERNAL FUNCTIONS ======================================================= | MISULTIN - Websocket Request
Copyright ( C ) 2011 , < > .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND ANY EXPRESS OR
PARTICULAR PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL THE COPYRIGHT HOLDER OR FOR
ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION )
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
-module(misultin_ws).
-vsn("0.8").
-export([raw/1, get/2, send/2]).
-include("../include/misultin.hrl").
-type wst() :: {misultin_ws, #ws{}, SocketPid::pid()}.
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ API = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
-spec raw(wst()) -> #ws{}.
raw({misultin_ws, Ws, _SocketPid}) ->
Ws.
-spec get(WsInfo::atom(), wst()) -> term().
get(socket, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.socket;
get(socket_mode, {misultin_ws, Ws, _SocketPid}) ->
Ws#req.socket_mode;
get(peer_addr, {misultin_ws, #ws{headers = Headers} = Ws, _SocketPid}) ->
Host = case misultin_utility:get_key_value("X-Real-Ip", Headers) of
undefined ->
case misultin_utility:get_key_value("X-Forwarded-For", Headers) of
undefined -> undefined;
Hosts0 -> string:strip(lists:nth(1, string:tokens(Hosts0, ",")))
end;
Host0 -> Host0
end,
case Host of
undefined ->
Ws#ws.peer_addr;
_ ->
case inet_parse:address(Host) of
{error, _Reason} ->
Ws#ws.peer_addr;
{ok, IpTuple} ->
IpTuple
end
end;
get(peer_port, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.peer_port;
get(peer_cert, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.peer_cert;
get(vsn, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.vsn;
get(origin, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.origin;
get(host, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.host;
get(path, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.path;
get(headers, {misultin_ws, Ws, _SocketPid}) ->
Ws#ws.headers.
-spec send(Data::list() | binary() | iolist(), wst()) -> term().
send(Data, {misultin_ws, _Ws, SocketPid}) ->
SocketPid ! {send, Data}.
= = = = = = = = = = = = = = = = = = = = = = = = = = = = \/ INTERNAL FUNCTIONS = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
|
f7f212428a42d5d1223f2ede23ef84ba0a273457f72b6eafb4c3df9914a8bac6 | brawnski/git-annex | SetKey.hs | git - annex command
-
- Copyright 2010 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.SetKey where
import Control.Monad.State (liftIO)
import Command
import Utility
import LocationLog
import Content
import Messages
command :: [Command]
command = [repoCommand "setkey" paramPath seek
"sets annexed content for a key using a temp file"]
seek :: [CommandSeek]
seek = [withTempFile start]
{- Sets cached content for a key. -}
start :: CommandStartString
start file = do
showStart "setkey" file
next $ perform file
perform :: FilePath -> CommandPerform
perform file = do
key <- cmdlineKey
-- the file might be on a different filesystem, so mv is used
-- rather than simply calling moveToObjectDir; disk space is also
-- checked this way.
ok <- getViaTmp key $ \dest ->
if dest /= file
then liftIO $
boolSystem "mv" [File file, File dest]
else return True
if ok
then next cleanup
else error "mv failed!"
cleanup :: CommandCleanup
cleanup = do
key <- cmdlineKey
logStatus key InfoPresent
return True
| null | https://raw.githubusercontent.com/brawnski/git-annex/8b847517a810d384a79178124b9766141b89bc17/Command/SetKey.hs | haskell | Sets cached content for a key.
the file might be on a different filesystem, so mv is used
rather than simply calling moveToObjectDir; disk space is also
checked this way. | git - annex command
-
- Copyright 2010 < >
-
- Licensed under the GNU GPL version 3 or higher .
-
- Copyright 2010 Joey Hess <>
-
- Licensed under the GNU GPL version 3 or higher.
-}
module Command.SetKey where
import Control.Monad.State (liftIO)
import Command
import Utility
import LocationLog
import Content
import Messages
command :: [Command]
command = [repoCommand "setkey" paramPath seek
"sets annexed content for a key using a temp file"]
seek :: [CommandSeek]
seek = [withTempFile start]
start :: CommandStartString
start file = do
showStart "setkey" file
next $ perform file
perform :: FilePath -> CommandPerform
perform file = do
key <- cmdlineKey
ok <- getViaTmp key $ \dest ->
if dest /= file
then liftIO $
boolSystem "mv" [File file, File dest]
else return True
if ok
then next cleanup
else error "mv failed!"
cleanup :: CommandCleanup
cleanup = do
key <- cmdlineKey
logStatus key InfoPresent
return True
|
8c736f24b2d6524bb336ae88afd62fbb245f950ea2be2cbe79947bd2852b5879 | Deducteam/Logipedia | sttfadk.mli | * This module give an interface for Dedukti symbols use to represent the STTforall logic .
open Kernel
* Name of the file implementing the STTforall logic .
val sttfa_module : Basic.mident
val sttfa_arrow : Basic.ident
val sttfa_eps : Basic.ident
val sttfa_eta : Basic.ident
val sttfa_etap : Basic.ident
val sttfa_forall : Basic.ident
val sttfa_forall_kind_prop : Basic.ident
val sttfa_forall_kind_type : Basic.ident
val sttfa_impl : Basic.ident
val sttfa_leibniz : Basic.ident
val sttfa_p : Basic.ident
val sttfa_prop : Basic.ident
val sttfa_ptype : Basic.ident
val sttfa_type : Basic.ident
val is_sttfa_const : Basic.ident -> Term.term -> bool
val is_tyop : Term.term -> bool
val arity_of_tyop : Term.term -> int
| null | https://raw.githubusercontent.com/Deducteam/Logipedia/09797a35ae36ab671e40e615fcdc09a7bba69134/src/sttfa/sttfadk.mli | ocaml | * This module give an interface for Dedukti symbols use to represent the STTforall logic .
open Kernel
* Name of the file implementing the STTforall logic .
val sttfa_module : Basic.mident
val sttfa_arrow : Basic.ident
val sttfa_eps : Basic.ident
val sttfa_eta : Basic.ident
val sttfa_etap : Basic.ident
val sttfa_forall : Basic.ident
val sttfa_forall_kind_prop : Basic.ident
val sttfa_forall_kind_type : Basic.ident
val sttfa_impl : Basic.ident
val sttfa_leibniz : Basic.ident
val sttfa_p : Basic.ident
val sttfa_prop : Basic.ident
val sttfa_ptype : Basic.ident
val sttfa_type : Basic.ident
val is_sttfa_const : Basic.ident -> Term.term -> bool
val is_tyop : Term.term -> bool
val arity_of_tyop : Term.term -> int
| |
776850e928d22c964a6d79dbe1c80fa4432d2032dd7be90bb26a89c03c97b5e5 | ideas-edu/ideas | Derivation.hs | -----------------------------------------------------------------------------
Copyright 2019 , Ideas project team . This file is distributed under the
terms of the Apache License 2.0 . For more information , see the files
" LICENSE.txt " and " NOTICE.txt " , which are included in the distribution .
-----------------------------------------------------------------------------
-- |
-- Maintainer :
-- Stability : provisional
Portability : portable ( depends on ghc )
--
Datatype for representing a derivation ( parameterized both in the terms
-- and the steps)
--
-----------------------------------------------------------------------------
module Ideas.Common.Derivation
( -- * Data type
Derivation
-- * Constructing a derivation
, emptyDerivation, prepend, extend
, merge, mergeBy, mergeStep
-- * Conversion to/from list
, derivationToList, derivationFromList
-- * Querying a derivation
, isEmpty, derivationLength, terms, steps, triples
, firstTerm, lastTerm, lastStep, withoutLast
, updateSteps, derivationM, splitStep
) where
import Data.Maybe
import Data.Monoid
import Ideas.Common.Classes
import Ideas.Common.Rewriting
import qualified Data.Foldable as F
import qualified Data.Sequence as S
-----------------------------------------------------------------------------
-- Data type definition and instances
data Derivation s a = D a (S.Seq (s, a))
deriving Eq
instance (Show s, Show a) => Show (Derivation s a) where
show (D a xs) = unlines $
show a : concatMap (\(r, b) -> [" => " ++ show r, show b]) (F.toList xs)
instance Functor (Derivation s) where
fmap = mapSecond
instance BiFunctor Derivation where
biMap f g (D a xs) = D (g a) (fmap (biMap f g) xs)
instance (IsTerm s, IsTerm a) => IsTerm (Derivation s a) where
toTerm = TList . derivationToList toTerm toTerm
fromTerm (TList xs) = derivationFromList fromTerm fromTerm xs
fromTerm _ = fail "not a derivation"
-----------------------------------------------------------------------------
-- Constructing a derivation
emptyDerivation :: a -> Derivation s a
emptyDerivation a = D a S.empty
prepend :: (a, s) -> Derivation s a -> Derivation s a
prepend (a, s) (D b xs) = D a ((s, b) S.<| xs)
extend :: Derivation s a -> (s, a) -> Derivation s a
extend (D a xs) p = D a (xs S.|> p)
merge :: Eq a => Derivation s a -> Derivation s a -> Maybe (Derivation s a)
merge = mergeBy (==)
mergeBy :: (a -> a -> Bool) -> Derivation s a -> Derivation s a -> Maybe (Derivation s a)
mergeBy eq d@(D a xs) (D b ys)
| eq (lastTerm d) b = Just $ D a (xs <> ys)
| otherwise = Nothing
mergeStep :: Derivation s a -> s -> Derivation s a -> Derivation s a
mergeStep (D a xs) s (D b ys) = D a (xs <> ((s, b) S.<| ys))
-----------------------------------------------------------------------------
-- Conversion to/from list
derivationToList :: (s -> b) -> (a -> b) -> Derivation s a -> [b]
derivationToList f g d =
g (firstTerm d) : concat [ [f s, g a] | (_, s, a) <- triples d ]
derivationFromList :: Monad m => (b -> m s) -> (b -> m a) -> [b] -> m (Derivation s a)
derivationFromList f g = rec
where
rec [] = fail "derivationFromList"
rec [b] = emptyDerivation <$> g b
rec (b1:b2:bs) = curry prepend <$> g b1 <*> f b2 <*> rec bs
-----------------------------------------------------------------------------
-- Querying a derivation
-- | Tests whether the derivation is empty
isEmpty :: Derivation s a -> Bool
isEmpty (D _ xs) = S.null xs
-- | Returns the number of steps in a derivation
derivationLength :: Derivation s a -> Int
derivationLength (D _ xs) = S.length xs
-- | All terms in a derivation
terms :: Derivation s a -> [a]
terms (D a xs) = a:map snd (F.toList xs)
-- | All steps in a derivation
steps :: Derivation s a -> [s]
steps (D _ xs) = map fst (F.toList xs)
-- | The triples of a derivation, consisting of the before term, the
-- step, and the after term.
triples :: Derivation s a -> [(a, s, a)]
triples d = zip3 (terms d) (steps d) (tail (terms d))
firstTerm :: Derivation s a -> a
firstTerm = head . terms
lastTerm :: Derivation s a -> a
lastTerm = last . terms
lastStep:: Derivation s a -> Maybe s
lastStep = listToMaybe . reverse . steps
withoutLast :: Derivation s a -> Derivation s a
withoutLast d@(D a xs) =
case S.viewr xs of
S.EmptyR -> d
ys S.:> _ -> D a ys
updateSteps :: (a -> s -> a -> t) -> Derivation s a -> Derivation t a
updateSteps f d =
let ts = [ f a b c | (a, b, c) <- triples d ]
x:xs = terms d
in D x (S.fromList (zip ts xs))
-- | Apply a monadic function to each term, and to each step
derivationM :: Monad m => (s -> m ()) -> (a -> m ()) -> Derivation s a -> m ()
derivationM f g (D a xs) = g a >> mapM_ (\(s, b) -> f s >> g b) (F.toList xs)
splitStep :: (s -> Bool) -> Derivation s a -> Maybe (Derivation s a, s, Derivation s a)
splitStep p (D a xs) =
case S.viewl xs2 of
S.EmptyL -> Nothing
(s, b) S.:< ys -> Just (D a xs1, s, D b ys)
where
(xs1, xs2) = S.breakl (p . fst) xs | null | https://raw.githubusercontent.com/ideas-edu/ideas/f84907f92a8c407b7313f99e65a08d2646dc1565/src/Ideas/Common/Derivation.hs | haskell | ---------------------------------------------------------------------------
---------------------------------------------------------------------------
|
Maintainer :
Stability : provisional
and the steps)
---------------------------------------------------------------------------
* Data type
* Constructing a derivation
* Conversion to/from list
* Querying a derivation
---------------------------------------------------------------------------
Data type definition and instances
---------------------------------------------------------------------------
Constructing a derivation
---------------------------------------------------------------------------
Conversion to/from list
---------------------------------------------------------------------------
Querying a derivation
| Tests whether the derivation is empty
| Returns the number of steps in a derivation
| All terms in a derivation
| All steps in a derivation
| The triples of a derivation, consisting of the before term, the
step, and the after term.
| Apply a monadic function to each term, and to each step
| Copyright 2019 , Ideas project team . This file is distributed under the
terms of the Apache License 2.0 . For more information , see the files
" LICENSE.txt " and " NOTICE.txt " , which are included in the distribution .
Portability : portable ( depends on ghc )
Datatype for representing a derivation ( parameterized both in the terms
module Ideas.Common.Derivation
Derivation
, emptyDerivation, prepend, extend
, merge, mergeBy, mergeStep
, derivationToList, derivationFromList
, isEmpty, derivationLength, terms, steps, triples
, firstTerm, lastTerm, lastStep, withoutLast
, updateSteps, derivationM, splitStep
) where
import Data.Maybe
import Data.Monoid
import Ideas.Common.Classes
import Ideas.Common.Rewriting
import qualified Data.Foldable as F
import qualified Data.Sequence as S
data Derivation s a = D a (S.Seq (s, a))
deriving Eq
instance (Show s, Show a) => Show (Derivation s a) where
show (D a xs) = unlines $
show a : concatMap (\(r, b) -> [" => " ++ show r, show b]) (F.toList xs)
instance Functor (Derivation s) where
fmap = mapSecond
instance BiFunctor Derivation where
biMap f g (D a xs) = D (g a) (fmap (biMap f g) xs)
instance (IsTerm s, IsTerm a) => IsTerm (Derivation s a) where
toTerm = TList . derivationToList toTerm toTerm
fromTerm (TList xs) = derivationFromList fromTerm fromTerm xs
fromTerm _ = fail "not a derivation"
emptyDerivation :: a -> Derivation s a
emptyDerivation a = D a S.empty
prepend :: (a, s) -> Derivation s a -> Derivation s a
prepend (a, s) (D b xs) = D a ((s, b) S.<| xs)
extend :: Derivation s a -> (s, a) -> Derivation s a
extend (D a xs) p = D a (xs S.|> p)
merge :: Eq a => Derivation s a -> Derivation s a -> Maybe (Derivation s a)
merge = mergeBy (==)
mergeBy :: (a -> a -> Bool) -> Derivation s a -> Derivation s a -> Maybe (Derivation s a)
mergeBy eq d@(D a xs) (D b ys)
| eq (lastTerm d) b = Just $ D a (xs <> ys)
| otherwise = Nothing
mergeStep :: Derivation s a -> s -> Derivation s a -> Derivation s a
mergeStep (D a xs) s (D b ys) = D a (xs <> ((s, b) S.<| ys))
derivationToList :: (s -> b) -> (a -> b) -> Derivation s a -> [b]
derivationToList f g d =
g (firstTerm d) : concat [ [f s, g a] | (_, s, a) <- triples d ]
derivationFromList :: Monad m => (b -> m s) -> (b -> m a) -> [b] -> m (Derivation s a)
derivationFromList f g = rec
where
rec [] = fail "derivationFromList"
rec [b] = emptyDerivation <$> g b
rec (b1:b2:bs) = curry prepend <$> g b1 <*> f b2 <*> rec bs
isEmpty :: Derivation s a -> Bool
isEmpty (D _ xs) = S.null xs
derivationLength :: Derivation s a -> Int
derivationLength (D _ xs) = S.length xs
terms :: Derivation s a -> [a]
terms (D a xs) = a:map snd (F.toList xs)
steps :: Derivation s a -> [s]
steps (D _ xs) = map fst (F.toList xs)
triples :: Derivation s a -> [(a, s, a)]
triples d = zip3 (terms d) (steps d) (tail (terms d))
firstTerm :: Derivation s a -> a
firstTerm = head . terms
lastTerm :: Derivation s a -> a
lastTerm = last . terms
lastStep:: Derivation s a -> Maybe s
lastStep = listToMaybe . reverse . steps
withoutLast :: Derivation s a -> Derivation s a
withoutLast d@(D a xs) =
case S.viewr xs of
S.EmptyR -> d
ys S.:> _ -> D a ys
updateSteps :: (a -> s -> a -> t) -> Derivation s a -> Derivation t a
updateSteps f d =
let ts = [ f a b c | (a, b, c) <- triples d ]
x:xs = terms d
in D x (S.fromList (zip ts xs))
derivationM :: Monad m => (s -> m ()) -> (a -> m ()) -> Derivation s a -> m ()
derivationM f g (D a xs) = g a >> mapM_ (\(s, b) -> f s >> g b) (F.toList xs)
splitStep :: (s -> Bool) -> Derivation s a -> Maybe (Derivation s a, s, Derivation s a)
splitStep p (D a xs) =
case S.viewl xs2 of
S.EmptyL -> Nothing
(s, b) S.:< ys -> Just (D a xs1, s, D b ys)
where
(xs1, xs2) = S.breakl (p . fst) xs |
14857e83c414eb5984fbb4375e03ee78260b47ddf94c751acfd43a249ba5cb72 | krajj7/BotHack | handlers.clj | (ns bothack.handlers
(:require [clojure.tools.logging :as log]
[bothack.util :refer :all]
[bothack.dungeon :refer :all]
[bothack.delegator :refer :all]))
(defn register-handler
[bh & args]
(send (:delegator bh) #(apply register % args))
bh)
(defn deregister-handler
[bh handler]
(send (:delegator bh) deregister handler)
bh)
(defn replace-handler
[bh handler-old handler-new]
(send (:delegator bh) switch handler-old handler-new)
bh)
(defn update-before-action
"Before the next action is chosen call (apply swap! game f args). This
happens when game state is updated and player position is known."
[bh f & args]
{:pre [(:game bh)]}
(register-handler bh priority-top
(reify AboutToChooseActionHandler
(about-to-choose [this _]
(apply swap! (:game bh) f args)
(deregister-handler bh this)))))
(defn update-on-known-position
"When player position on map is known call (apply swap! game f args). Game
state may not be fully updated yet for the turn."
[bh f & args]
{:pre [(:game bh)]}
(register-handler bh priority-top
(reify
AboutToChooseActionHandler ; handler might have been registered too late to receive know-position this turn
(about-to-choose [this _]
(apply swap! (:game bh) f args)
(deregister-handler bh this))
KnowPositionHandler
(know-position [this _]
(apply swap! (:game bh) f args)
(deregister-handler bh this)))))
(defn update-at-player-when-known
"Update the tile at player's next known position by applying update-fn to its
current value and args"
[bh update-fn & args]
(update-on-known-position bh #(apply update-at-player % update-fn args)))
| null | https://raw.githubusercontent.com/krajj7/BotHack/70226b3c8ed12d29c64068aec0acc0ca71d57adf/src/bothack/handlers.clj | clojure | handler might have been registered too late to receive know-position this turn | (ns bothack.handlers
(:require [clojure.tools.logging :as log]
[bothack.util :refer :all]
[bothack.dungeon :refer :all]
[bothack.delegator :refer :all]))
(defn register-handler
[bh & args]
(send (:delegator bh) #(apply register % args))
bh)
(defn deregister-handler
[bh handler]
(send (:delegator bh) deregister handler)
bh)
(defn replace-handler
[bh handler-old handler-new]
(send (:delegator bh) switch handler-old handler-new)
bh)
(defn update-before-action
"Before the next action is chosen call (apply swap! game f args). This
happens when game state is updated and player position is known."
[bh f & args]
{:pre [(:game bh)]}
(register-handler bh priority-top
(reify AboutToChooseActionHandler
(about-to-choose [this _]
(apply swap! (:game bh) f args)
(deregister-handler bh this)))))
(defn update-on-known-position
"When player position on map is known call (apply swap! game f args). Game
state may not be fully updated yet for the turn."
[bh f & args]
{:pre [(:game bh)]}
(register-handler bh priority-top
(reify
(about-to-choose [this _]
(apply swap! (:game bh) f args)
(deregister-handler bh this))
KnowPositionHandler
(know-position [this _]
(apply swap! (:game bh) f args)
(deregister-handler bh this)))))
(defn update-at-player-when-known
"Update the tile at player's next known position by applying update-fn to its
current value and args"
[bh update-fn & args]
(update-on-known-position bh #(apply update-at-player % update-fn args)))
|
24dd317bdd4c971b2a4164db13d6216e9a38a448f8e6d7db06fbbf7ba5e9f5a8 | hrefhref/styx | styx_web_oauth2_consent.erl | -module(styx_web_oauth2_consent).
-behaviour(cowboy_handler).
-export([init/2]).
init(Req = #{method := <<"GET">>}, State) ->
init_(Req, State, styx_web:req_param(Req, <<"consent_challenge">>));
init(Req = #{method := <<"POST">>}, State) ->
init_(Req, State, styx_web:req_param(Req, <<"consent_challenge">>));
init(Req, _) ->
styx_web_error:init(Req, #{code => 404, status => <<"Not Found">>}).
init_(Req0, State, {ok, Challenge}) ->
Req = styx_web_oauth2_login:unset_cookie(Req0),
Cookie = cowboy_req:header(<<"cookie">>, Req),
authentication(Req, State, Challenge, ory_kratos:whoami(Cookie));
init_(Req, _, {error, {missing_param, _}}) ->
styx_web_error:init(Req, not_found).
authentication(Req0, State, Challenge, {ok, Session = #{<<"active">> := true}}) ->
do(Req0, State, Session, ory_hydra:consent_request(Challenge));
authentication(Req0, State, _Challenge, Error) ->
render_error(Req0, State, Error).
do(Req0 = #{method := <<"GET">>}, State, _Session, {ok, #{<<"challenge">> := Challenge, <<"skip">> := true, <<"requested_scope">> := Scopes}}) ->
ConsentData = #{<<"grant_scope">> => Scopes},
case ory_hydra:accept_consent_request(Challenge, ConsentData) of
{ok, #{<<"redirect_to">> := Url}} ->
Req = styx_web:temporary_redirect(Req0, Url),
{ok, Req, State};
Error ->
render_error(Req0, State, Error)
end;
do(Req0 = #{method := <<"GET">>}, State, _Session, {ok, Flow = #{<<"client">> := Client}}) ->
%% FIXME client_name can be blank, not just undefined.
logger:debug("oAuth request ~p", [Flow]),
AppName = maps:get(<<"client_name">>, Client, maps:get(<<"client_id">>, Client, <<"Unnamed App">>)),
Assigns = [{"page_title", ["Authorize ", AppName]}, {"flow", Flow}],
Html = styx_web:render(Req0, oauth2_consent_form_dtl, Assigns),
Req = styx_web:reply_html(Req0, 200, Html),
{ok, Req, State};
do(Req0 = #{method := <<"POST">>}, State, Session, {ok, Flow}) ->
{ok, Data, Req} = cowboy_req:read_urlencoded_body(Req0),
post(Req, State, Session, Flow, Data).
post(Req0, State, Session, Flow, Data) ->
Consent = case lists:keyfind(<<"consent">>, 1, Data) of
{_, <<"true">>} -> true;
_ -> false
end,
consent(Req0, State, Session, Flow, Data, Consent).
consent(Req0, State, _Session, #{<<"challenge">> := Challenge}, Data, true) ->
ScopesFun = fun
({<<"scope-", S/binary>>, <<"on">>}, Acc) -> [S | Acc];
(_, Acc) -> Acc
end,
Scopes = lists:foldl(ScopesFun, [], Data),
ConsentData = #{<<"grant_scope">> => Scopes},
case ory_hydra:accept_consent_request(Challenge, ConsentData) of
{ok, #{<<"redirect_to">> := Url}} ->
Req = styx_web:temporary_redirect(Req0, Url),
{ok, Req, State};
Error ->
render_error(Req0, State, Error)
end;
consent(Req0, State, _Session, #{<<"challenge">> := Challenge}, _Data, false) ->
Data = #{<<"error">> => <<"User denied access.">>, <<"status_code">> => 403},
case ory_hydra:reject_consent_request(Challenge, Data) of
{ok, #{<<"redirect_to">> := Url}} ->
Req = styx_web:temporary_redirect(Req0, Url),
{ok, Req, State};
Error ->
render_error(Req0, State, Error)
end.
render_error(Req, _State, {error, #{<<"code">> := Code, <<"status">> := Status, <<"message">> := Msg}}) ->
styx_web_error:init(Req, #{code => Code, status => Status, message => Msg}).
| null | https://raw.githubusercontent.com/hrefhref/styx/a808f89428daa2e90ceb7a9876a317e4e85fe5bf/apps/styx_web/src/styx_web_oauth2_consent.erl | erlang | FIXME client_name can be blank, not just undefined. | -module(styx_web_oauth2_consent).
-behaviour(cowboy_handler).
-export([init/2]).
init(Req = #{method := <<"GET">>}, State) ->
init_(Req, State, styx_web:req_param(Req, <<"consent_challenge">>));
init(Req = #{method := <<"POST">>}, State) ->
init_(Req, State, styx_web:req_param(Req, <<"consent_challenge">>));
init(Req, _) ->
styx_web_error:init(Req, #{code => 404, status => <<"Not Found">>}).
init_(Req0, State, {ok, Challenge}) ->
Req = styx_web_oauth2_login:unset_cookie(Req0),
Cookie = cowboy_req:header(<<"cookie">>, Req),
authentication(Req, State, Challenge, ory_kratos:whoami(Cookie));
init_(Req, _, {error, {missing_param, _}}) ->
styx_web_error:init(Req, not_found).
authentication(Req0, State, Challenge, {ok, Session = #{<<"active">> := true}}) ->
do(Req0, State, Session, ory_hydra:consent_request(Challenge));
authentication(Req0, State, _Challenge, Error) ->
render_error(Req0, State, Error).
do(Req0 = #{method := <<"GET">>}, State, _Session, {ok, #{<<"challenge">> := Challenge, <<"skip">> := true, <<"requested_scope">> := Scopes}}) ->
ConsentData = #{<<"grant_scope">> => Scopes},
case ory_hydra:accept_consent_request(Challenge, ConsentData) of
{ok, #{<<"redirect_to">> := Url}} ->
Req = styx_web:temporary_redirect(Req0, Url),
{ok, Req, State};
Error ->
render_error(Req0, State, Error)
end;
do(Req0 = #{method := <<"GET">>}, State, _Session, {ok, Flow = #{<<"client">> := Client}}) ->
logger:debug("oAuth request ~p", [Flow]),
AppName = maps:get(<<"client_name">>, Client, maps:get(<<"client_id">>, Client, <<"Unnamed App">>)),
Assigns = [{"page_title", ["Authorize ", AppName]}, {"flow", Flow}],
Html = styx_web:render(Req0, oauth2_consent_form_dtl, Assigns),
Req = styx_web:reply_html(Req0, 200, Html),
{ok, Req, State};
do(Req0 = #{method := <<"POST">>}, State, Session, {ok, Flow}) ->
{ok, Data, Req} = cowboy_req:read_urlencoded_body(Req0),
post(Req, State, Session, Flow, Data).
post(Req0, State, Session, Flow, Data) ->
Consent = case lists:keyfind(<<"consent">>, 1, Data) of
{_, <<"true">>} -> true;
_ -> false
end,
consent(Req0, State, Session, Flow, Data, Consent).
consent(Req0, State, _Session, #{<<"challenge">> := Challenge}, Data, true) ->
ScopesFun = fun
({<<"scope-", S/binary>>, <<"on">>}, Acc) -> [S | Acc];
(_, Acc) -> Acc
end,
Scopes = lists:foldl(ScopesFun, [], Data),
ConsentData = #{<<"grant_scope">> => Scopes},
case ory_hydra:accept_consent_request(Challenge, ConsentData) of
{ok, #{<<"redirect_to">> := Url}} ->
Req = styx_web:temporary_redirect(Req0, Url),
{ok, Req, State};
Error ->
render_error(Req0, State, Error)
end;
consent(Req0, State, _Session, #{<<"challenge">> := Challenge}, _Data, false) ->
Data = #{<<"error">> => <<"User denied access.">>, <<"status_code">> => 403},
case ory_hydra:reject_consent_request(Challenge, Data) of
{ok, #{<<"redirect_to">> := Url}} ->
Req = styx_web:temporary_redirect(Req0, Url),
{ok, Req, State};
Error ->
render_error(Req0, State, Error)
end.
render_error(Req, _State, {error, #{<<"code">> := Code, <<"status">> := Status, <<"message">> := Msg}}) ->
styx_web_error:init(Req, #{code => Code, status => Status, message => Msg}).
|
3267aae0c9a0d79d64ee974dc41f7d32dcf35a090166c09d9982faebf790fc1a | zelark/AoC-2020 | day_23.clj | (ns zelark.aoc-2020.day-23
(:require [clojure.java.io :as io]))
--- Day 23 : Crab Cups ---
;;
(def input (slurp (io/resource "input_23.txt")))
(defn parse-input [input]
(mapv #(Long/parseLong %) (re-seq #"\d" input)))
(defprotocol ICircleNode
(value [this])
(get-next [this])
(set-next [this node])
(insert [this node])
(insert-n [this start end])
(remove-n [this n]))
(deftype CircleNode [value ^:volatile-mutable next]
ICircleNode
(value [_] value)
(get-next [_] next)
(set-next [this node] (set! next node) this)
(insert [this node]
(set-next node next)
(set-next this node))
(insert-n [this start end]
(set-next end next)
(set-next this start))
(remove-n [this n]
(set-next this (first (drop (inc n) (iterate get-next this))))))
(defn circle-node [value]
(let [node (CircleNode. value nil)]
(set-next node node)))
(defn build-cups [labels]
(first (reduce (fn [[acc prev] label]
(let [node (circle-node label)]
(when prev (insert prev node))
[(assoc acc label node) node]))
[{} nil] labels)))
(defn next-dest [^long n ^long curr]
(let [dest (unchecked-dec curr)]
(if (< dest 1) n dest)))
(defn play-game [{:keys [labels ^long limit result-fn]}]
(let [cups (build-cups labels)
cnt (count labels)
next-dest (partial next-dest cnt)]
(loop [curr (first labels) i 1]
(when (<= i limit)
(let [ccup (cups curr)
a (get-next ccup)
b (get-next a)
c (get-next b)
abc (set (map value [a b c]))
dest (first (drop-while abc (rest (iterate next-dest curr))))
dcup (cups dest)
_ (remove-n ccup 3)
_ (insert-n dcup a c)]
(recur (value (get-next ccup)) (inc i)))))
(result-fn (get-next (cups 1)))))
part 1
(play-game {:labels (parse-input input)
:limit 100
:result-fn #(reduce (fn [acc cup] (+ (* acc 10) (value cup)))
0
49725386
part 2
(play-game {:labels (into (parse-input input) (range 10 1000001))
:limit 1e7
538935646702
| null | https://raw.githubusercontent.com/zelark/AoC-2020/5417c3514889eb02efc23f6be7d69e29fdfa0376/src/zelark/aoc_2020/day_23.clj | clojure | (ns zelark.aoc-2020.day-23
(:require [clojure.java.io :as io]))
--- Day 23 : Crab Cups ---
(def input (slurp (io/resource "input_23.txt")))
(defn parse-input [input]
(mapv #(Long/parseLong %) (re-seq #"\d" input)))
(defprotocol ICircleNode
(value [this])
(get-next [this])
(set-next [this node])
(insert [this node])
(insert-n [this start end])
(remove-n [this n]))
(deftype CircleNode [value ^:volatile-mutable next]
ICircleNode
(value [_] value)
(get-next [_] next)
(set-next [this node] (set! next node) this)
(insert [this node]
(set-next node next)
(set-next this node))
(insert-n [this start end]
(set-next end next)
(set-next this start))
(remove-n [this n]
(set-next this (first (drop (inc n) (iterate get-next this))))))
(defn circle-node [value]
(let [node (CircleNode. value nil)]
(set-next node node)))
(defn build-cups [labels]
(first (reduce (fn [[acc prev] label]
(let [node (circle-node label)]
(when prev (insert prev node))
[(assoc acc label node) node]))
[{} nil] labels)))
(defn next-dest [^long n ^long curr]
(let [dest (unchecked-dec curr)]
(if (< dest 1) n dest)))
(defn play-game [{:keys [labels ^long limit result-fn]}]
(let [cups (build-cups labels)
cnt (count labels)
next-dest (partial next-dest cnt)]
(loop [curr (first labels) i 1]
(when (<= i limit)
(let [ccup (cups curr)
a (get-next ccup)
b (get-next a)
c (get-next b)
abc (set (map value [a b c]))
dest (first (drop-while abc (rest (iterate next-dest curr))))
dcup (cups dest)
_ (remove-n ccup 3)
_ (insert-n dcup a c)]
(recur (value (get-next ccup)) (inc i)))))
(result-fn (get-next (cups 1)))))
part 1
(play-game {:labels (parse-input input)
:limit 100
:result-fn #(reduce (fn [acc cup] (+ (* acc 10) (value cup)))
0
49725386
part 2
(play-game {:labels (into (parse-input input) (range 10 1000001))
:limit 1e7
538935646702
| |
2947c84f4096862760f398946e1ded448f6112496a3adddfc1ae6692264704f8 | well-typed/large-records | Tutorial2.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DataKinds #
{-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE KindSignatures #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeApplications #
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fplugin=Data.Record.Plugin.WithRDP #-}
# OPTIONS_GHC -Wno - missing - signatures -Wno - unused - top - binds #
module Test.Record.Beam.Tutorial2 (
tests
-- * Exported for the benefit of follow-up tutorials
, AddressT(..)
, Address
, PrimaryKey(..)
) where
import Data.Functor.Const
import Data.Int
import Data.Kind
import Data.Record.Plugin
import Data.Text (Text)
import Database.Beam
import Database.Beam.Schema.Tables
import Lens.Micro
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as Text
import qualified Database.SQLite.Simple as SQLite
import qualified GHC.Generics as GHC
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (Assertion, testCase, assertEqual)
import Test.Record.Beam.Tutorial1 hiding (tests)
import Test.Record.Beam.Util.SQLite
{-------------------------------------------------------------------------------
New table: with a foreign key
-------------------------------------------------------------------------------}
# ANN type AddressT largeRecord #
data AddressT (f :: Type -> Type) = Address {
addressId :: C f Int32
, addressLine1 :: C f Text
, addressLine2 :: C f (Maybe Text)
, addressCity :: C f Text
, addressState :: C f Text
, addressZip :: C f Text
, addressForUser :: PrimaryKey UserT f
}
deriving (Show, Eq)
deriving anyclass (Beamable)
type Address = AddressT Identity
type AddressId = PrimaryKey AddressT Identity
instance Table AddressT where
data PrimaryKey AddressT f = AddressId (Columnar f Int32)
deriving stock (GHC.Generic)
deriving anyclass (Beamable)
primaryKey addr = AddressId $ addr.addressId
deriving instance Show (Columnar f Int32) => Show (PrimaryKey AddressT f)
deriving instance Eq (Columnar f Int32) => Eq (PrimaryKey AddressT f)
exampleAddress :: Address
exampleAddress = Address {
addressId = 1
, addressLine1 = "street"
, addressLine2 = Nothing
, addressCity = "city"
, addressState = "state"
, addressZip = "zip"
, addressForUser = UserId ""
}
------------------------------------------------------------------------------
Version 2 of the DB
------------------------------------------------------------------------------
Version 2 of the DB
-------------------------------------------------------------------------------}
{-# ANN type ShoppingCart2Db largeRecord #-}
data ShoppingCart2Db (f :: Type -> Type) = ShoppingCart2Db {
shoppingCart2Users :: f (TableEntity UserT)
, shoppingCart2UserAddresses :: f (TableEntity AddressT)
}
deriving (Show, Eq)
instance Database be ShoppingCart2Db
shoppingCart2Db :: forall be. DatabaseSettings be ShoppingCart2Db
shoppingCart2Db = defaultDbSettings `withDbModification`
dbModification{shoppingCart2UserAddresses =
setEntityName "addresses"
<> modifyTableFields
tableModification{addressLine1 = fieldNamed "address1"
,addressLine2 = fieldNamed "address2"
}
}
------------------------------------------------------------------------------
Derive lenses
TODO : Can we avoid the type signature on ' lensesAddressT ' and co ?
------------------------------------------------------------------------------
Derive lenses
TODO: Can we avoid the type signature on 'lensesAddressT' and co?
-------------------------------------------------------------------------------}
lensesAddressT :: AddressT (Lenses AddressT f)
lensesUserT :: UserT (Lenses UserT f)
lensesAddressT = tableLenses
lensesUserT = tableLenses
lensesShoppingCart2 :: ShoppingCart2Db (TableLens f ShoppingCart2Db)
lensesShoppingCart2 = dbLenses
xaddressId :: Lens' (AddressT f) (Columnar f Int32)
xaddressId = case lensesAddressT.addressId of LensFor x -> x
xaddressLine1 :: Lens' (AddressT f) (Columnar f Text)
xaddressLine1 = case lensesAddressT.addressLine1 of LensFor x -> x
xaddressLine2 :: Lens' (AddressT f) (Columnar f (Maybe Text))
xaddressLine2 = case lensesAddressT.addressLine2 of LensFor x -> x
xaddressCity :: Lens' (AddressT f) (Columnar f Text)
xaddressCity = case lensesAddressT.addressCity of LensFor x -> x
xaddressState :: Lens' (AddressT f) (Columnar f Text)
xaddressState = case lensesAddressT.addressState of LensFor x -> x
xaddressZip :: Lens' (AddressT f) (Columnar f Text)
xaddressZip = case lensesAddressT.addressZip of LensFor x -> x
xaddressForUserId :: Lens' (AddressT f) (Columnar f Text)
xaddressForUserId = case lensesAddressT.addressForUser of UserId (LensFor x) -> x
xuserEmail :: Lens' (UserT f) (Columnar f Text)
xuserEmail = case lensesUserT.userEmail of LensFor x -> x
xuserFirstName :: Lens' (UserT f) (Columnar f Text)
xuserFirstName = case lensesUserT.userFirstName of LensFor x -> x
xuserLastName :: Lens' (UserT f) (Columnar f Text)
xuserLastName = case lensesUserT.userLastName of LensFor x -> x
xuserPassword :: Lens' (UserT f) (Columnar f Text)
xuserPassword = case lensesUserT.userPassword of LensFor x -> x
xshoppingCart2Users :: Lens' (ShoppingCart2Db f) (f (TableEntity UserT))
xshoppingCart2Users = case lensesShoppingCart2.shoppingCart2Users of TableLens x -> x
xshoppingCart2UserAddresses :: Lens' (ShoppingCart2Db f) (f (TableEntity AddressT))
xshoppingCart2UserAddresses = case lensesShoppingCart2.shoppingCart2UserAddresses of TableLens x -> x
{-------------------------------------------------------------------------------
Tests proper
-------------------------------------------------------------------------------}
tests :: TestTree
tests = testGroup "Test.Record.Beam.Tutorial2" [
testCase "defaultDbSettings" test_tutorial2_defaultDbSettings
, testCase "tableLenses" test_tableLenses
, testCase "dbLenses" test_dbLenses
, testCase "SQL" test_SQL
]
test_tutorial2_defaultDbSettings :: Assertion
test_tutorial2_defaultDbSettings =
assertEqual "" expected shoppingCart2Db
where
expected :: DatabaseSettings be ShoppingCart2Db
expected = ShoppingCart2Db {
shoppingCart2Users = DatabaseEntity $ DatabaseTable {
dbTableSchema = Nothing
, dbTableOrigName = "shoppingCart2Users"
, dbTableCurrentName = "cart2_users"
, dbTableSettings = User {
userEmail = TableField {_fieldPath = NE.fromList ["userEmail"] , _fieldName = "email"}
, userFirstName = TableField {_fieldPath = NE.fromList ["userFirstName"] , _fieldName = "first_name"}
, userLastName = TableField {_fieldPath = NE.fromList ["userLastName"] , _fieldName = "last_name"}
, userPassword = TableField {_fieldPath = NE.fromList ["userPassword"] , _fieldName = "password"}
}
}
, shoppingCart2UserAddresses = DatabaseEntity $ DatabaseTable {
dbTableSchema = Nothing
, dbTableOrigName = "shoppingCart2UserAddresses"
, dbTableCurrentName = "addresses"
, dbTableSettings = Address {
addressId = TableField {_fieldPath = NE.fromList ["addressId"] , _fieldName = "id"}
, addressLine1 = TableField {_fieldPath = NE.fromList ["addressLine1"] , _fieldName = "address1"}
, addressLine2 = TableField {_fieldPath = NE.fromList ["addressLine2"] , _fieldName = "address2"}
, addressCity = TableField {_fieldPath = NE.fromList ["addressCity"] , _fieldName = "city"}
, addressState = TableField {_fieldPath = NE.fromList ["addressState"] , _fieldName = "state"}
, addressZip = TableField {_fieldPath = NE.fromList ["addressZip"] , _fieldName = "zip"}
, addressForUser = UserId $ TableField {
_fieldPath = NE.fromList ["addressForUser", "userEmail"]
, _fieldName = "for_user__email"
}
}
}
}
test_tableLenses :: Assertion
test_tableLenses = do
assertEqual "get" expectedGet $
exampleAddress ^. xaddressId
assertEqual "set" expectedSet $
exampleAddress & xaddressForUserId %~ Text.toUpper
where
expectedGet :: Int32
expectedGet = 1
expectedSet :: Address
expectedSet = exampleAddress{addressForUser = UserId ""}
test_dbLenses :: Assertion
test_dbLenses = do
assertEqual "get" expectedGet $
exampleDb ^. xshoppingCart2Users
assertEqual "set" expectedSet $
exampleDb & xshoppingCart2UserAddresses %~ (\(Const n) -> Const (n + 1))
where
expectedGet :: Const Int a
expectedGet = Const 1
exampleDb, expectedSet :: ShoppingCart2Db (Const Int)
exampleDb = ShoppingCart2Db {
shoppingCart2Users = Const 1
, shoppingCart2UserAddresses = Const 2
}
expectedSet = exampleDb{shoppingCart2UserAddresses = Const 3}
test_SQL :: Assertion
test_SQL = runInMemory $ \conn -> do
liftIO $ SQLite.execute_ conn $
"CREATE TABLE cart2_users (email VARCHAR NOT NULL, first_name VARCHAR NOT NULL, last_name VARCHAR NOT NULL, password VARCHAR NOT NULL, PRIMARY KEY( email ));"
liftIO $ SQLite.execute_ conn $
"CREATE TABLE addresses ( id INTEGER PRIMARY KEY, address1 VARCHAR NOT NULL, address2 VARCHAR, city VARCHAR NOT NULL, state VARCHAR NOT NULL, zip VARCHAR NOT NULL, for_user__email VARCHAR NOT NULL );"
runInsert $ insert shoppingCart2Db.shoppingCart2Users $
insertValues [ james, betty, sam ]
runInsert $ insert shoppingCart2Db.shoppingCart2UserAddresses $
insertExpressions addresses
-- Straight-forward SELECT
-- (Checks that primary keys have been assigned correctly)
addressesActual <-
runSelectReturningList $
select (all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses))
liftIO $ assertEqual "addresses"
addressesExpected
addressesActual
-- Simple JOIN
usersAndRelatedAddressesActual <-
runSelectReturningList $ select $ do
user <- all_ (shoppingCart2Db ^. xshoppingCart2Users)
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
guard_ (address ^. xaddressForUserId ==. user ^. xuserEmail)
return (user, address)
liftIO $ assertEqual "usersAndRelatedAddresses"
usersAndRelatedAddressesExpected
usersAndRelatedAddressesActual
-- Alternative way to write the same JOIN
usersAndRelatedAddressesUsingReferences <-
runSelectReturningList $ select $ do
user <- all_ (shoppingCart2Db ^. xshoppingCart2Users)
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
guard_ (address.addressForUser `references_` user)
pure (user, address)
liftIO $ assertEqual "usersAndRelatedAddressesUsingReferences"
usersAndRelatedAddressesExpected
usersAndRelatedAddressesUsingReferences
-- Using ON
usersAndRelatedAddressesUsingRelated <-
runSelectReturningList $ select $ do
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
user <- related_ (shoppingCart2Db ^. xshoppingCart2Users) address.addressForUser
pure (user, address)
liftIO $ assertEqual "usersAndRelatedAddressesUsingRelated"
usersAndRelatedAddressesExpected
usersAndRelatedAddressesUsingRelated
-- WHERE on a foreign key
bettysAddresses <-
runSelectReturningList $ select $ do
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
guard_ (address.addressForUser ==. val_ bettyId)
pure address
liftIO $ assertEqual "bettysAddresses"
[addr2, addr3]
bettysAddresses
-- Simple UPDATE
runUpdate $ save (shoppingCart2Db ^. xshoppingCart2Users) $
james{userPassword = superSecure}
[james'] <- runSelectReturningList $
lookup_ (shoppingCart2Db ^. xshoppingCart2Users) jamesId
liftIO $ assertEqual "James' new password"
superSecure
(james' ^. xuserPassword)
-- More granular UPDATE
runUpdate $ update (shoppingCart2Db ^. xshoppingCart2UserAddresses)
(\address -> mconcat [
address ^. xaddressCity <-. val_ "Sugarville"
, address ^. xaddressZip <-. val_ "12345"
]
)
(\address ->
address ^. xaddressCity ==. val_ "Sugarland"
&&. address ^. xaddressState ==. val_ "TX"
)
updatedAddresses <- runSelectReturningList $
select $ all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
liftIO $ assertEqual "updatedAddresses"
[addr1, addr2, addr3']
updatedAddresses
-- DELETE
runDelete $ delete (shoppingCart2Db ^. xshoppingCart2UserAddresses)
(\address ->
address ^. xaddressCity ==. "Houston"
&&. address.addressForUser `references_` val_ betty
)
afterDelete <- runSelectReturningList $
select $ all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
liftIO $ assertEqual "afterDelete"
[addr1, addr3']
afterDelete
where
james, betty, sam :: User
james = User "" "James" "Smith" "b4cc344d25a2efe540adbf2678e2304c"
betty = User "" "Betty" "Jones" "82b054bd83ffad9b6cf8bdb98ce3cc2f"
sam = User "" "Sam" "Taylor" "332532dcfaa1cbf61e2a266bd723612c"
jamesId, bettyId :: UserId
jamesId = UserId ""
bettyId = UserId ""
The tutorial uses @pk@ directly , rather than @val . pk@.
-- This is possible if we make @james@ and co polymorphic
--
> : : UserT ( QExpr s )
--
We can do that ( because of a ' IsString ' instance for ' QExpr ' , but then we
get into trouble in @addr1@ and co.
addresses :: [AddressT (QExpr Sqlite s)]
addresses = [
Address default_ (val_ "123 Little Street") (val_ Nothing) (val_ "Boston") (val_ "MA") (val_ "12345") (val_ (pk james))
, Address default_ (val_ "222 Main Street") (val_ (Just "Ste 1")) (val_ "Houston") (val_ "TX") (val_ "8888") (val_ (pk betty))
, Address default_ (val_ "9999 Residence Ave") (val_ Nothing) (val_ "Sugarland") (val_ "TX") (val_ "8989") (val_ (pk betty))
]
addr1, addr2, addr3, addr3' :: Address
addr1 = Address 1 "123 Little Street" Nothing "Boston" "MA" "12345" (pk james)
addr2 = Address 2 "222 Main Street" (Just "Ste 1") "Houston" "TX" "8888" (pk betty)
addr3 = Address 3 "9999 Residence Ave" Nothing "Sugarland" "TX" "8989" (pk betty)
addr3' = Address 3 "9999 Residence Ave" Nothing "Sugarville" "TX" "12345" (pk betty)
addressesExpected :: [Address]
addressesExpected = [
addr1
, addr2
, addr3
]
usersAndRelatedAddressesExpected :: [(User, Address)]
usersAndRelatedAddressesExpected = [
(james, addr1)
, (betty, addr2)
, (betty, addr3)
]
superSecure :: Text
superSecure = "52a516ca6df436828d9c0d26e31ef704"
| null | https://raw.githubusercontent.com/well-typed/large-records/fb983aa136c2602499c2421323bd52b6a54b7c9a/beam-large-records/test/Test/Record/Beam/Tutorial2.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE DeriveAnyClass #
# LANGUAGE KindSignatures #
# LANGUAGE OverloadedStrings #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -fplugin=Data.Record.Plugin.WithRDP #
* Exported for the benefit of follow-up tutorials
------------------------------------------------------------------------------
New table: with a foreign key
------------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
# ANN type ShoppingCart2Db largeRecord #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
------------------------------------------------------------------------------
Tests proper
------------------------------------------------------------------------------
Straight-forward SELECT
(Checks that primary keys have been assigned correctly)
Simple JOIN
Alternative way to write the same JOIN
Using ON
WHERE on a foreign key
Simple UPDATE
More granular UPDATE
DELETE
This is possible if we make @james@ and co polymorphic
| # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE ExistentialQuantification #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TypeApplications #
# OPTIONS_GHC -Wno - missing - signatures -Wno - unused - top - binds #
module Test.Record.Beam.Tutorial2 (
tests
, AddressT(..)
, Address
, PrimaryKey(..)
) where
import Data.Functor.Const
import Data.Int
import Data.Kind
import Data.Record.Plugin
import Data.Text (Text)
import Database.Beam
import Database.Beam.Schema.Tables
import Lens.Micro
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as Text
import qualified Database.SQLite.Simple as SQLite
import qualified GHC.Generics as GHC
import Test.Tasty (TestTree, testGroup)
import Test.Tasty.HUnit (Assertion, testCase, assertEqual)
import Test.Record.Beam.Tutorial1 hiding (tests)
import Test.Record.Beam.Util.SQLite
# ANN type AddressT largeRecord #
data AddressT (f :: Type -> Type) = Address {
addressId :: C f Int32
, addressLine1 :: C f Text
, addressLine2 :: C f (Maybe Text)
, addressCity :: C f Text
, addressState :: C f Text
, addressZip :: C f Text
, addressForUser :: PrimaryKey UserT f
}
deriving (Show, Eq)
deriving anyclass (Beamable)
type Address = AddressT Identity
type AddressId = PrimaryKey AddressT Identity
instance Table AddressT where
data PrimaryKey AddressT f = AddressId (Columnar f Int32)
deriving stock (GHC.Generic)
deriving anyclass (Beamable)
primaryKey addr = AddressId $ addr.addressId
deriving instance Show (Columnar f Int32) => Show (PrimaryKey AddressT f)
deriving instance Eq (Columnar f Int32) => Eq (PrimaryKey AddressT f)
exampleAddress :: Address
exampleAddress = Address {
addressId = 1
, addressLine1 = "street"
, addressLine2 = Nothing
, addressCity = "city"
, addressState = "state"
, addressZip = "zip"
, addressForUser = UserId ""
}
Version 2 of the DB
Version 2 of the DB
data ShoppingCart2Db (f :: Type -> Type) = ShoppingCart2Db {
shoppingCart2Users :: f (TableEntity UserT)
, shoppingCart2UserAddresses :: f (TableEntity AddressT)
}
deriving (Show, Eq)
instance Database be ShoppingCart2Db
shoppingCart2Db :: forall be. DatabaseSettings be ShoppingCart2Db
shoppingCart2Db = defaultDbSettings `withDbModification`
dbModification{shoppingCart2UserAddresses =
setEntityName "addresses"
<> modifyTableFields
tableModification{addressLine1 = fieldNamed "address1"
,addressLine2 = fieldNamed "address2"
}
}
Derive lenses
TODO : Can we avoid the type signature on ' lensesAddressT ' and co ?
Derive lenses
TODO: Can we avoid the type signature on 'lensesAddressT' and co?
lensesAddressT :: AddressT (Lenses AddressT f)
lensesUserT :: UserT (Lenses UserT f)
lensesAddressT = tableLenses
lensesUserT = tableLenses
lensesShoppingCart2 :: ShoppingCart2Db (TableLens f ShoppingCart2Db)
lensesShoppingCart2 = dbLenses
xaddressId :: Lens' (AddressT f) (Columnar f Int32)
xaddressId = case lensesAddressT.addressId of LensFor x -> x
xaddressLine1 :: Lens' (AddressT f) (Columnar f Text)
xaddressLine1 = case lensesAddressT.addressLine1 of LensFor x -> x
xaddressLine2 :: Lens' (AddressT f) (Columnar f (Maybe Text))
xaddressLine2 = case lensesAddressT.addressLine2 of LensFor x -> x
xaddressCity :: Lens' (AddressT f) (Columnar f Text)
xaddressCity = case lensesAddressT.addressCity of LensFor x -> x
xaddressState :: Lens' (AddressT f) (Columnar f Text)
xaddressState = case lensesAddressT.addressState of LensFor x -> x
xaddressZip :: Lens' (AddressT f) (Columnar f Text)
xaddressZip = case lensesAddressT.addressZip of LensFor x -> x
xaddressForUserId :: Lens' (AddressT f) (Columnar f Text)
xaddressForUserId = case lensesAddressT.addressForUser of UserId (LensFor x) -> x
xuserEmail :: Lens' (UserT f) (Columnar f Text)
xuserEmail = case lensesUserT.userEmail of LensFor x -> x
xuserFirstName :: Lens' (UserT f) (Columnar f Text)
xuserFirstName = case lensesUserT.userFirstName of LensFor x -> x
xuserLastName :: Lens' (UserT f) (Columnar f Text)
xuserLastName = case lensesUserT.userLastName of LensFor x -> x
xuserPassword :: Lens' (UserT f) (Columnar f Text)
xuserPassword = case lensesUserT.userPassword of LensFor x -> x
xshoppingCart2Users :: Lens' (ShoppingCart2Db f) (f (TableEntity UserT))
xshoppingCart2Users = case lensesShoppingCart2.shoppingCart2Users of TableLens x -> x
xshoppingCart2UserAddresses :: Lens' (ShoppingCart2Db f) (f (TableEntity AddressT))
xshoppingCart2UserAddresses = case lensesShoppingCart2.shoppingCart2UserAddresses of TableLens x -> x
tests :: TestTree
tests = testGroup "Test.Record.Beam.Tutorial2" [
testCase "defaultDbSettings" test_tutorial2_defaultDbSettings
, testCase "tableLenses" test_tableLenses
, testCase "dbLenses" test_dbLenses
, testCase "SQL" test_SQL
]
test_tutorial2_defaultDbSettings :: Assertion
test_tutorial2_defaultDbSettings =
assertEqual "" expected shoppingCart2Db
where
expected :: DatabaseSettings be ShoppingCart2Db
expected = ShoppingCart2Db {
shoppingCart2Users = DatabaseEntity $ DatabaseTable {
dbTableSchema = Nothing
, dbTableOrigName = "shoppingCart2Users"
, dbTableCurrentName = "cart2_users"
, dbTableSettings = User {
userEmail = TableField {_fieldPath = NE.fromList ["userEmail"] , _fieldName = "email"}
, userFirstName = TableField {_fieldPath = NE.fromList ["userFirstName"] , _fieldName = "first_name"}
, userLastName = TableField {_fieldPath = NE.fromList ["userLastName"] , _fieldName = "last_name"}
, userPassword = TableField {_fieldPath = NE.fromList ["userPassword"] , _fieldName = "password"}
}
}
, shoppingCart2UserAddresses = DatabaseEntity $ DatabaseTable {
dbTableSchema = Nothing
, dbTableOrigName = "shoppingCart2UserAddresses"
, dbTableCurrentName = "addresses"
, dbTableSettings = Address {
addressId = TableField {_fieldPath = NE.fromList ["addressId"] , _fieldName = "id"}
, addressLine1 = TableField {_fieldPath = NE.fromList ["addressLine1"] , _fieldName = "address1"}
, addressLine2 = TableField {_fieldPath = NE.fromList ["addressLine2"] , _fieldName = "address2"}
, addressCity = TableField {_fieldPath = NE.fromList ["addressCity"] , _fieldName = "city"}
, addressState = TableField {_fieldPath = NE.fromList ["addressState"] , _fieldName = "state"}
, addressZip = TableField {_fieldPath = NE.fromList ["addressZip"] , _fieldName = "zip"}
, addressForUser = UserId $ TableField {
_fieldPath = NE.fromList ["addressForUser", "userEmail"]
, _fieldName = "for_user__email"
}
}
}
}
test_tableLenses :: Assertion
test_tableLenses = do
assertEqual "get" expectedGet $
exampleAddress ^. xaddressId
assertEqual "set" expectedSet $
exampleAddress & xaddressForUserId %~ Text.toUpper
where
expectedGet :: Int32
expectedGet = 1
expectedSet :: Address
expectedSet = exampleAddress{addressForUser = UserId ""}
test_dbLenses :: Assertion
test_dbLenses = do
assertEqual "get" expectedGet $
exampleDb ^. xshoppingCart2Users
assertEqual "set" expectedSet $
exampleDb & xshoppingCart2UserAddresses %~ (\(Const n) -> Const (n + 1))
where
expectedGet :: Const Int a
expectedGet = Const 1
exampleDb, expectedSet :: ShoppingCart2Db (Const Int)
exampleDb = ShoppingCart2Db {
shoppingCart2Users = Const 1
, shoppingCart2UserAddresses = Const 2
}
expectedSet = exampleDb{shoppingCart2UserAddresses = Const 3}
test_SQL :: Assertion
test_SQL = runInMemory $ \conn -> do
liftIO $ SQLite.execute_ conn $
"CREATE TABLE cart2_users (email VARCHAR NOT NULL, first_name VARCHAR NOT NULL, last_name VARCHAR NOT NULL, password VARCHAR NOT NULL, PRIMARY KEY( email ));"
liftIO $ SQLite.execute_ conn $
"CREATE TABLE addresses ( id INTEGER PRIMARY KEY, address1 VARCHAR NOT NULL, address2 VARCHAR, city VARCHAR NOT NULL, state VARCHAR NOT NULL, zip VARCHAR NOT NULL, for_user__email VARCHAR NOT NULL );"
runInsert $ insert shoppingCart2Db.shoppingCart2Users $
insertValues [ james, betty, sam ]
runInsert $ insert shoppingCart2Db.shoppingCart2UserAddresses $
insertExpressions addresses
addressesActual <-
runSelectReturningList $
select (all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses))
liftIO $ assertEqual "addresses"
addressesExpected
addressesActual
usersAndRelatedAddressesActual <-
runSelectReturningList $ select $ do
user <- all_ (shoppingCart2Db ^. xshoppingCart2Users)
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
guard_ (address ^. xaddressForUserId ==. user ^. xuserEmail)
return (user, address)
liftIO $ assertEqual "usersAndRelatedAddresses"
usersAndRelatedAddressesExpected
usersAndRelatedAddressesActual
usersAndRelatedAddressesUsingReferences <-
runSelectReturningList $ select $ do
user <- all_ (shoppingCart2Db ^. xshoppingCart2Users)
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
guard_ (address.addressForUser `references_` user)
pure (user, address)
liftIO $ assertEqual "usersAndRelatedAddressesUsingReferences"
usersAndRelatedAddressesExpected
usersAndRelatedAddressesUsingReferences
usersAndRelatedAddressesUsingRelated <-
runSelectReturningList $ select $ do
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
user <- related_ (shoppingCart2Db ^. xshoppingCart2Users) address.addressForUser
pure (user, address)
liftIO $ assertEqual "usersAndRelatedAddressesUsingRelated"
usersAndRelatedAddressesExpected
usersAndRelatedAddressesUsingRelated
bettysAddresses <-
runSelectReturningList $ select $ do
address <- all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
guard_ (address.addressForUser ==. val_ bettyId)
pure address
liftIO $ assertEqual "bettysAddresses"
[addr2, addr3]
bettysAddresses
runUpdate $ save (shoppingCart2Db ^. xshoppingCart2Users) $
james{userPassword = superSecure}
[james'] <- runSelectReturningList $
lookup_ (shoppingCart2Db ^. xshoppingCart2Users) jamesId
liftIO $ assertEqual "James' new password"
superSecure
(james' ^. xuserPassword)
runUpdate $ update (shoppingCart2Db ^. xshoppingCart2UserAddresses)
(\address -> mconcat [
address ^. xaddressCity <-. val_ "Sugarville"
, address ^. xaddressZip <-. val_ "12345"
]
)
(\address ->
address ^. xaddressCity ==. val_ "Sugarland"
&&. address ^. xaddressState ==. val_ "TX"
)
updatedAddresses <- runSelectReturningList $
select $ all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
liftIO $ assertEqual "updatedAddresses"
[addr1, addr2, addr3']
updatedAddresses
runDelete $ delete (shoppingCart2Db ^. xshoppingCart2UserAddresses)
(\address ->
address ^. xaddressCity ==. "Houston"
&&. address.addressForUser `references_` val_ betty
)
afterDelete <- runSelectReturningList $
select $ all_ (shoppingCart2Db ^. xshoppingCart2UserAddresses)
liftIO $ assertEqual "afterDelete"
[addr1, addr3']
afterDelete
where
james, betty, sam :: User
james = User "" "James" "Smith" "b4cc344d25a2efe540adbf2678e2304c"
betty = User "" "Betty" "Jones" "82b054bd83ffad9b6cf8bdb98ce3cc2f"
sam = User "" "Sam" "Taylor" "332532dcfaa1cbf61e2a266bd723612c"
jamesId, bettyId :: UserId
jamesId = UserId ""
bettyId = UserId ""
The tutorial uses @pk@ directly , rather than @val . pk@.
> : : UserT ( QExpr s )
We can do that ( because of a ' IsString ' instance for ' QExpr ' , but then we
get into trouble in @addr1@ and co.
addresses :: [AddressT (QExpr Sqlite s)]
addresses = [
Address default_ (val_ "123 Little Street") (val_ Nothing) (val_ "Boston") (val_ "MA") (val_ "12345") (val_ (pk james))
, Address default_ (val_ "222 Main Street") (val_ (Just "Ste 1")) (val_ "Houston") (val_ "TX") (val_ "8888") (val_ (pk betty))
, Address default_ (val_ "9999 Residence Ave") (val_ Nothing) (val_ "Sugarland") (val_ "TX") (val_ "8989") (val_ (pk betty))
]
addr1, addr2, addr3, addr3' :: Address
addr1 = Address 1 "123 Little Street" Nothing "Boston" "MA" "12345" (pk james)
addr2 = Address 2 "222 Main Street" (Just "Ste 1") "Houston" "TX" "8888" (pk betty)
addr3 = Address 3 "9999 Residence Ave" Nothing "Sugarland" "TX" "8989" (pk betty)
addr3' = Address 3 "9999 Residence Ave" Nothing "Sugarville" "TX" "12345" (pk betty)
addressesExpected :: [Address]
addressesExpected = [
addr1
, addr2
, addr3
]
usersAndRelatedAddressesExpected :: [(User, Address)]
usersAndRelatedAddressesExpected = [
(james, addr1)
, (betty, addr2)
, (betty, addr3)
]
superSecure :: Text
superSecure = "52a516ca6df436828d9c0d26e31ef704"
|
4897cb20061d1692dbf2562c4a20a13e65a779639298ca8c3c06d6ecc65c4834 | melange-re/melange | js_dump.ml | compiler
* Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
* /
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2015-2016 Bloomberg Finance L.P.
* /
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
Authors : ,
-are-the-rules-for-javascripts-automatic-semicolon-insertion-asi
ASI catch up
{ [
a = b
+ + c
---
a = b + + c
= = = = = = = = = = = = = = = = = = = =
a + +
---
a
+ +
= = = = = = = = = = = = = = = = = = = =
a --
---
a
--
= = = = = = = = = = = = = = = = = = = =
( continue / break / return / throw ) a
---
( continue / break / return / throw )
a
= = = = = = = = = = = = = = = = = = = =
] }
-are-the-rules-for-javascripts-automatic-semicolon-insertion-asi
ASI catch up
{[
a=b
++c
---
a=b ++c
====================
a ++
---
a
++
====================
a --
---
a
--
====================
(continue/break/return/throw) a
---
(continue/break/return/throw)
a
====================
]}
*)
let name_symbol = Js_op.Symbol_name
module E = Js_exp_make
module S = Js_stmt_make
module L = Js_dump_lit
(* There modules are dynamically inserted in the last stage
{Caml_curry}
{Caml_option}
They can appear anywhere so even if you have a module
{
let module Caml_block = ...
(* Later would insert the use of Caml_block here which should
point tto the runtime module
*)
}
There are no sane way to easy detect it ahead of time, we should be
conservative here.
(our call Js_fun_env.get_unbounded env) is not precise
*)
type cxt = { scope : Ext_pp_scope.t; pp : Ext_pp.t }
let from_pp pp = { scope = Ext_pp_scope.empty; pp }
let from_buffer buf = from_pp (Ext_pp.from_buffer buf)
let update_scope cxt scope = { cxt with scope }
let ident cxt id = update_scope cxt (Ext_pp_scope.ident cxt.scope cxt.pp id)
let string cxt s = Ext_pp.string cxt.pp s
let group cxt = Ext_pp.group cxt.pp
let newline cxt = Ext_pp.newline cxt.pp
let paren_group cxt = Ext_pp.paren_group cxt.pp
let paren_vgroup cxt = Ext_pp.paren_vgroup cxt.pp
let vgroup cxt = Ext_pp.vgroup cxt.pp
let space cxt = Ext_pp.space cxt.pp
let cond_paren_group cxt = Ext_pp.cond_paren_group cxt.pp
let paren cxt = Ext_pp.paren cxt.pp
let brace_vgroup cxt = Ext_pp.brace_vgroup cxt.pp
let bracket_group cxt = Ext_pp.bracket_group cxt.pp
let bracket_vgroup cxt = Ext_pp.bracket_vgroup cxt.pp
let merge_scope cxt l =
let scope = Ext_pp_scope.merge cxt.scope l in
{ cxt with scope }
let sub_scope cxt l = update_scope cxt (Ext_pp_scope.sub_scope cxt.scope l)
let str_of_ident cxt id =
let str, scope = Ext_pp_scope.str_of_ident cxt.scope id in
(str, update_scope cxt scope)
let at_least_two_lines cxt = Ext_pp.at_least_two_lines cxt.pp
let flush cxt () = Ext_pp.flush cxt.pp ()
module Curry_gen = struct
let pp_curry_dot cxt =
string cxt Js_runtime_modules.curry;
string cxt L.dot
let pp_optimize_curry cxt (len : int) =
pp_curry_dot cxt;
string cxt "__";
string cxt (Printf.sprintf "%d" len)
let pp_app_any cxt =
pp_curry_dot cxt;
string cxt "app"
let pp_app cxt (len : int) =
pp_curry_dot cxt;
string cxt "_";
string cxt (Printf.sprintf "%d" len)
end
let return_indent = String.length L.return / Ext_pp.indent_length
let throw_indent = String.length L.throw / Ext_pp.indent_length
let semi cxt = string cxt L.semi
let comma cxt = string cxt L.comma
let exn_block_as_obj ~(stack : bool) (el : J.expression list) (ext : J.tag_info)
: J.expression_desc =
let field_name =
match ext with
| Blk_extension -> (
fun i ->
match i with 0 -> Literals.exception_id | i -> "_" ^ string_of_int i)
| Blk_record_ext ss -> (
fun i -> match i with 0 -> Literals.exception_id | i -> ss.(i - 1))
| _ -> assert false
in
Object
(if stack then
Ext_list.mapi_append el
(fun i e -> (Js_op.Lit (field_name i), e))
[ (Js_op.Lit "Error", E.new_ (E.js_global "Error") []) ]
else Ext_list.mapi el (fun i e -> (Js_op.Lit (field_name i), e)))
let rec iter_lst cxt ls element inter =
match ls with
| [] -> cxt
| [ e ] -> element cxt e
| e :: r ->
let acxt = element cxt e in
inter cxt;
iter_lst acxt r element inter
let raw_snippet_exp_simple_enough (s : string) =
Ext_string.for_all s (fun c ->
match c with 'a' .. 'z' | 'A' .. 'Z' | '_' | '.' -> true | _ -> false)
Parentheses are required when the expression
starts syntactically with " { " or " function "
TODO : be more conservative , since Google Closure will handle
the precedence correctly , we also need people read the code ..
Here we force parens for some alien operators
If we move assign into a statement , will be less ?
TODO : construct a test case that do need parenthesisze for expression
IIE does not apply ( will be inlined ? )
starts syntactically with "{" or "function"
TODO: be more conservative, since Google Closure will handle
the precedence correctly, we also need people read the code..
Here we force parens for some alien operators
If we move assign into a statement, will be less?
TODO: construct a test case that do need parenthesisze for expression
IIE does not apply (will be inlined?)
*)
e = function(x){ ... ) ; is good
*)
let exp_need_paren (e : J.expression) =
match e.expression_desc with
|
| Call ({ expression_desc = Fun _ | Raw_js_code _ }, _, _) -> true
| Raw_js_code { code_info = Exp _ }
| Fun _
| Caml_block
( _,
_,
_,
( Blk_record _ | Blk_module _ | Blk_poly_var | Blk_extension
| Blk_record_ext _ | Blk_record_inlined _ | Blk_constructor _ ) )
| Object _ ->
true
| Raw_js_code { code_info = Stmt _ }
| Length _ | Call _ | Caml_block_tag _ | Seq _ | Static_index _ | Cond _
| Bin _ | Is_null_or_undefined _ | String_index _ | Array_index _
| String_append _ | Char_of_int _ | Char_to_int _ | Var _ | Undefined | Null
| Str _ | Unicode _ | Array _ | Optional_block _ | Caml_block _ | FlatCall _
| Typeof _ | Number _ | Js_not _ | Bool _ | New _ ->
false
let comma_idents (cxt : cxt) ls = iter_lst cxt ls ident comma
let pp_paren_params (cxt : cxt) (lexical : Ident.t list) : unit =
string cxt L.lparen;
let (_ : cxt) = comma_idents cxt lexical in
string cxt L.rparen
(* Print as underscore for unused vars, may not be
needed in the future *)
(* let ipp_ident cxt id (un_used : bool) =
Ext_pp_scope.ident cxt (
if un_used then
Ext_ident.make_unused ()
else
id) *)
let pp_var_assign cxt id =
string cxt L.var;
space cxt;
let acxt = ident cxt id in
space cxt;
string cxt L.eq;
space cxt;
acxt
let pp_var_assign_this cxt id =
let cxt = pp_var_assign cxt id in
string cxt L.this;
space cxt;
semi cxt;
newline cxt;
cxt
let pp_var_declare cxt id =
string cxt L.var;
space cxt;
let acxt = ident cxt id in
semi cxt;
acxt
let pp_direction cxt (direction : J.for_direction) =
match direction with
| Up | Upto -> string cxt L.plus_plus
| Downto -> string cxt L.minus_minus
let return_sp cxt =
string cxt L.return;
space cxt
let bool cxt b = string cxt (if b then L.true_ else L.false_)
let comma_sp cxt =
comma cxt;
space cxt
let comma_nl cxt =
comma cxt;
newline cxt
(* let drop_comment (x : J.expression) =
if x.comment = None then x
else {x with comment = None} *)
let debugger_nl cxt =
newline cxt;
string cxt L.debugger;
semi cxt;
newline cxt
let break_nl cxt =
string cxt L.break;
space cxt;
semi cxt;
newline cxt
let continue cxt s =
string cxt L.continue;
space cxt;
string cxt s;
semi cxt
let formal_parameter_list cxt l = iter_lst cxt l ident comma_sp
(* IdentMap *)
f/122 -- >
f/122 is in the map
if in , use the old mapping
else
check f ,
if in last bumped i d
else
use " f " , register it
check " f "
if not , use " f " , register stamp - > 0
else
check stamp
if in use it
else check last bumped i d , increase it and register
f/122 -->
f/122 is in the map
if in, use the old mapping
else
check f,
if in last bumped id
else
use "f", register it
check "f"
if not , use "f", register stamp -> 0
else
check stamp
if in use it
else check last bumped id, increase it and register
*)
(*
Turn [function f (x,y) { return a (x,y)} ] into [Curry.__2(a)],
The idea is that [Curry.__2] will guess the arity of [a], if it does
hit, then there is no cost when passed
*)
let is_var (b : J.expression) a =
match b.expression_desc with Var (Id i) -> Ident.same i a | _ -> false
type fn_exp_state =
| Is_return (* for sure no name *)
| Name_top of Ident.t
| Name_non_top of Ident.t
| No_name of { single_arg : bool }
(* true means for sure, false -- not sure *)
let default_fn_exp_state = No_name { single_arg = false }
(* TODO: refactoring
Note that {!pp_function} could print both statement and expression when [No_name] is given
*)
let rec try_optimize_curry cxt len function_id =
Curry_gen.pp_optimize_curry cxt len;
paren_group cxt 1 (fun _ -> expression ~level:1 cxt function_id)
and pp_function ~return_unit ~is_method cxt ~fn_state (l : Ident.t list)
(b : J.block) (env : Js_fun_env.t) : cxt =
match b with
| [
{
statement_desc =
Return
{
expression_desc =
Call
( ({ expression_desc = Var v; _ } as function_id),
ls,
{
see # 234
(* TODO: need a case to justify it*)
call_info = Call_builtin_runtime | Call_ml;
} );
};
};
]
match such case :
{ [ function(x , y ) { return u(x , y ) } ] }
it can be optimized in to either [ u ] or [ Curry.__n(u ) ]
{[ function(x,y){ return u(x,y) } ]}
it can be optimized in to either [u] or [Curry.__n(u)]
*)
(not is_method)
&& Ext_list.for_all2_no_exn ls l is_var
&&
match v with
(* This check is needed to avoid some edge cases
{[function(x){return x(x)}]}
here the function is also called `x`
*)
| Id id -> not (Ext_list.exists l (fun x -> Ident.same x id))
| Qualified _ -> true -> (
let optimize len ~p cxt v =
if p then try_optimize_curry cxt len function_id else vident cxt v
in
let len = List.length l in
(* length *)
match fn_state with
| Name_top i | Name_non_top i ->
let cxt = pp_var_assign cxt i in
let cxt = optimize len ~p:(arity = NA && len <= 8) cxt v in
semi cxt;
cxt
| Is_return | No_name _ ->
if fn_state = Is_return then return_sp cxt;
optimize len ~p:(arity = NA && len <= 8) cxt v)
| _ ->
let set_env : Set_ident.t =
(* identifiers will be printed cxtollowing*)
match fn_state with
| Is_return | No_name _ -> Js_fun_env.get_unbounded env
| Name_top id | Name_non_top id ->
Set_ident.add (Js_fun_env.get_unbounded env) id
in
(* the context will be continued after this function *)
let outer_cxt = merge_scope cxt set_env in
(* the context used to be printed inside this function
when printing a function,
only the enclosed variables and function name matters,
if the function does not capture any variable, then the context is empty
*)
let inner_cxt = sub_scope outer_cxt set_env in
let param_body () : unit =
if is_method then (
match l with
| [] -> assert false
| this :: arguments ->
let cxt =
paren_group cxt 1 (fun _ ->
formal_parameter_list inner_cxt arguments)
in
space cxt;
brace_vgroup cxt 1 (fun () ->
let cxt =
if Js_fun_env.get_unused env 0 then cxt
else pp_var_assign_this cxt this
in
function_body ~return_unit cxt b))
else
let cxt =
paren_group cxt 1 (fun _ -> formal_parameter_list inner_cxt l)
in
space cxt;
brace_vgroup cxt 1 (fun _ -> function_body ~return_unit cxt b)
in
let lexical : Set_ident.t = Js_fun_env.get_lexical_scope env in
let enclose lexical =
let handle lexical =
if Set_ident.is_empty lexical then (
match fn_state with
| Is_return ->
return_sp cxt;
string cxt L.function_;
space cxt;
param_body ()
| No_name { single_arg } ->
see # 1692 , add a paren for annoymous function for safety
cond_paren_group cxt (not single_arg) 1 (fun _ ->
string cxt L.function_;
space cxt;
param_body ())
| Name_non_top x ->
ignore (pp_var_assign inner_cxt x : cxt);
string cxt L.function_;
space cxt;
param_body ();
semi cxt
| Name_top x ->
string cxt L.function_;
space cxt;
ignore (ident inner_cxt x : cxt);
param_body ())
else
(* print our closure as
{[(function(x,y){ return function(..){...}} (x,y))]}
Maybe changed to `let` in the future
*)
let lexical = Set_ident.elements lexical in
(match fn_state with
| Is_return -> return_sp cxt
| No_name _ -> ()
| Name_non_top name | Name_top name ->
ignore (pp_var_assign inner_cxt name : cxt));
string cxt L.lparen;
string cxt L.function_;
pp_paren_params inner_cxt lexical;
brace_vgroup cxt 0 (fun _ ->
return_sp cxt;
string cxt L.function_;
space cxt;
(match fn_state with
| Is_return | No_name _ -> ()
| Name_non_top x | Name_top x -> ignore (ident inner_cxt x));
param_body ());
pp_paren_params inner_cxt lexical;
string cxt L.rparen;
match fn_state with
| Is_return | No_name _ -> () (* expression *)
| _ -> semi cxt (* has binding, a statement *)
in
handle
(match fn_state with
| (Name_top name | Name_non_top name) when Set_ident.mem lexical name
->
(*TODO: when calculating lexical we should not include itself *)
Set_ident.remove lexical name
| _ -> lexical)
in
enclose lexical;
outer_cxt
(* Assume the cond would not change the context,
since it can be either [int] or [string]
*)
and pp_one_case_clause : 'a. _ -> (_ -> 'a -> unit) -> 'a * J.case_clause -> _ =
fun cxt pp_cond
(switch_case, ({ switch_body; should_break; comment } : J.case_clause)) ->
let cxt =
group cxt 1 (fun _ ->
group cxt 1 (fun _ ->
string cxt L.case;
space cxt;
pp_comment_option cxt comment;
pp_cond cxt switch_case;
(* could be integer or string *)
space cxt;
string cxt L.colon);
group cxt 1 (fun _ ->
let cxt =
match switch_body with
| [] -> cxt
| _ ->
newline cxt;
statements false cxt switch_body
in
if should_break then (
newline cxt;
string cxt L.break;
semi cxt);
cxt))
in
newline cxt;
cxt
and loop_case_clauses : 'a. _ -> (_ -> 'a -> unit) -> ('a * _) list -> _ =
fun cxt pp_cond cases ->
Ext_list.fold_left cases cxt (fun acc x -> pp_one_case_clause acc pp_cond x)
and vident cxt (v : J.vident) =
match v with
| Id v
| Qualified ({ id = v }, None)
| Qualified ({ id = v; kind = External { default = true } }, _) ->
ident cxt v
| Qualified ({ id; kind = Ml | Runtime }, Some name) ->
let cxt = ident cxt id in
string cxt L.dot;
string cxt (Ext_ident.convert name);
cxt
| Qualified ({ id; kind = External _ }, Some name) ->
let cxt = ident cxt id in
Js_dump_property.property_access cxt.pp name;
cxt
(* The higher the level, the more likely that inner has to add parens *)
and expression ~level:l cxt (exp : J.expression) : cxt =
pp_comment_option cxt exp.comment;
expression_desc cxt ~level:l exp.expression_desc
and expression_desc cxt ~(level : int) x : cxt =
match x with
| Null ->
string cxt L.null;
cxt
| Undefined ->
string cxt L.undefined;
cxt
| Var v -> vident cxt v
| Bool b ->
bool cxt b;
cxt
| Seq (e1, e2) ->
cond_paren_group cxt (level > 0) 1 (fun () ->
let cxt = expression ~level:0 cxt e1 in
comma_sp cxt;
expression ~level:0 cxt e2)
| Fun (is_method, l, b, env, return_unit) ->
(* TODO: dump for comments *)
pp_function ~return_unit ~is_method cxt ~fn_state:default_fn_exp_state l b
env
TODO :
when [ e ] is [ ] with arity
print it in a more precise way
It seems the optimizer already did work to make sure
{ [
Call ( Raw_js_code ( s , Exp i ) , el , { Full } )
when Ext_list.length_equal el i
] }
when [e] is [Js_raw_code] with arity
print it in a more precise way
It seems the optimizer already did work to make sure
{[
Call (Raw_js_code (s, Exp i), el, {Full})
when Ext_list.length_equal el i
]}
*)
| Call (e, el, info) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
group cxt 1 (fun _ ->
match (info, el) with
| { arity = Full }, _ | _, [] ->
let cxt = expression ~level:15 cxt e in
paren_group cxt 1 (fun _ ->
match el with
| [
{
expression_desc =
Fun (is_method, l, b, env, return_unit);
};
] ->
pp_function ~return_unit ~is_method cxt
~fn_state:(No_name { single_arg = true })
l b env
| _ -> arguments cxt el)
| _, _ ->
let len = List.length el in
if 1 <= len && len <= 8 then (
Curry_gen.pp_app cxt len;
paren_group cxt 1 (fun _ -> arguments cxt (e :: el)))
else (
Curry_gen.pp_app_any cxt;
paren_group cxt 1 (fun _ ->
arguments cxt [ e; E.array Mutable el ]))))
| FlatCall (e, el) ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.apply;
paren_group cxt 1 (fun _ ->
string cxt L.null;
comma_sp cxt;
expression ~level:1 cxt el))
| Char_to_int e -> (
match e.expression_desc with
| String_index (a, b) ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt a in
string cxt L.dot;
string cxt L.char_code_at;
paren_group cxt 1 (fun _ -> expression ~level:0 cxt b))
| _ ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.char_code_at;
string cxt "(0)";
cxt))
| Char_of_int e ->
group cxt 1 (fun _ ->
string cxt L.string_cap;
string cxt L.dot;
string cxt L.fromCharcode;
paren_group cxt 1 (fun _ -> arguments cxt [ e ]))
| Unicode s ->
string cxt "\"";
string cxt s;
string cxt "\"";
cxt
| Str (_, s) ->
(*TODO --
when utf8-> it will not escape '\\' which is definitely not we want
*)
Js_dump_string.pp_string cxt.pp s;
cxt
| Raw_js_code { code = s; code_info = info } -> (
match info with
| Exp exp_info ->
let raw_paren =
not
(match exp_info with
| Js_literal _ -> true
| Js_function _ | Js_exp_unknown ->
false || raw_snippet_exp_simple_enough s)
in
if raw_paren then string cxt L.lparen;
string cxt s;
if raw_paren then string cxt L.rparen;
cxt
| Stmt stmt_info ->
if stmt_info = Js_stmt_comment then string cxt s
else (
newline cxt;
string cxt s;
newline cxt);
cxt)
| Number v ->
let s =
match v with
| Float { f } -> Js_number.caml_float_literal_to_js_string f
(* attach string here for float constant folding?*)
| Int { i; c = Some c } -> Format.asprintf "/* %C */%ld" c i
| Int { i; c = None } ->
Int32.to_string
i (* check , js convention with ocaml lexical convention *)
| Uint i -> Format.asprintf "%lu" i
in
let need_paren =
if s.[0] = '-' then
level > 13 (* Negative numbers may need to be parenthesized. *)
else
Parenthesize as well when followed by a dot .
Infinity
NaN
in
let action _ = string cxt s in
if need_paren then paren cxt action else action ();
cxt
| Is_null_or_undefined e ->
cond_paren_group cxt (level > 0) 1 (fun _ ->
let cxt = expression ~level:1 cxt e in
space cxt;
string cxt "==";
space cxt;
string cxt L.null;
cxt)
| Js_not e ->
cond_paren_group cxt (level > 13) 1 (fun _ ->
string cxt "!";
expression ~level:13 cxt e)
| Typeof e ->
string cxt "typeof";
space cxt;
expression ~level:13 cxt e
| Bin
( Minus,
{ expression_desc = Number (Int { i = 0l; _ } | Float { f = "0." }) },
e )
TODO :
Handle multiple cases like
{ [ 0 . - x ] }
{ [ 0.00 - x ] }
{ [ 0.000 - x ] }
Handle multiple cases like
{[ 0. - x ]}
{[ 0.00 - x ]}
{[ 0.000 - x ]}
*) ->
cond_paren_group cxt (level > 13) 1 (fun _ ->
string cxt "-";
expression ~level:13 cxt e)
| Bin (op, e1, e2) ->
let out, lft, rght = Js_op_util.op_prec op in
let need_paren =
level > out || match op with Lsl | Lsr | Asr -> true | _ -> false
in
(* We are more conservative here, to make the generated code more readable
to the user *)
cond_paren_group cxt need_paren 1 (fun _ ->
let cxt = expression ~level:lft cxt e1 in
space cxt;
string cxt (Js_op_util.op_str op);
space cxt;
expression ~level:rght cxt e2)
| String_append (e1, e2) ->
let op : Js_op.binop = Plus in
let out, lft, rght = Js_op_util.op_prec op in
let need_paren =
level > out || match op with Lsl | Lsr | Asr -> true | _ -> false
in
cond_paren_group cxt need_paren 1 (fun _ ->
let cxt = expression ~level:lft cxt e1 in
space cxt;
string cxt "+";
space cxt;
expression ~level:rght cxt e2)
| Array (el, _) -> (
(* TODO: simplify for singleton list *)
match el with
| [] | [ _ ] -> bracket_group cxt 1 (fun _ -> array_element_list cxt el)
| _ -> bracket_vgroup cxt 1 (fun _ -> array_element_list cxt el))
| Optional_block (e, identity) ->
expression ~level cxt
(if identity then e
else E.runtime_call Js_runtime_modules.option "some" [ e ])
| Caml_block (el, _, _, Blk_module fields) ->
expression_desc cxt ~level
(Object
(Ext_list.map_combine fields el (fun x ->
Js_op.Lit (Ext_ident.convert x))))
name convention of Record is slight different from modules
| Caml_block (el, mutable_flag, _, Blk_record fields) ->
if Ext_array.for_alli fields (fun i v -> string_of_int i = v) then
expression_desc cxt ~level (Array (el, mutable_flag))
else
expression_desc cxt ~level
(Object (Ext_list.combine_array fields el (fun i -> Js_op.Lit i)))
| Caml_block (el, _, _, Blk_poly_var) -> (
match el with
| [ { expression_desc = Str (_, name) }; value ] ->
expression_desc cxt ~level
(Object
[
(Js_op.Lit Literals.polyvar_hash, E.str name);
(Lit Literals.polyvar_value, value);
])
| _ -> assert false)
| Caml_block (el, _, _, ((Blk_extension | Blk_record_ext _) as ext)) ->
expression_desc cxt ~level (exn_block_as_obj ~stack:false el ext)
| Caml_block (el, _, tag, Blk_record_inlined p) ->
let objs =
let tails =
Ext_list.combine_array_append p.fields el
(if !Js_config.debug then [ (name_symbol, E.str p.name) ] else [])
(fun i -> Js_op.Lit i)
in
if p.num_nonconst = 1 then tails
else
( Js_op.Lit L.tag,
if !Js_config.debug then tag else { tag with comment = Some p.name }
)
:: tails
in
if p.num_nonconst = 1 && not !Js_config.debug then
pp_comment_option cxt (Some p.name);
expression_desc cxt ~level (Object objs)
| Caml_block (el, _, tag, Blk_constructor p) ->
let not_is_cons = p.name <> Literals.cons in
let objs =
let tails =
Ext_list.mapi_append el
(fun i e ->
( (match (not_is_cons, i) with
| false, 0 -> Js_op.Lit Literals.hd
| false, 1 -> Js_op.Lit Literals.tl
| _ -> Js_op.Lit ("_" ^ string_of_int i)),
e ))
(if !Js_config.debug && not_is_cons then
[ (name_symbol, E.str p.name) ]
else [])
in
if p.num_nonconst = 1 then tails
else
( Js_op.Lit L.tag,
if !Js_config.debug then tag else { tag with comment = Some p.name }
)
:: tails
in
if p.num_nonconst = 1 && (not !Js_config.debug) && not_is_cons then
pp_comment_option cxt (Some p.name);
expression_desc cxt ~level (Object objs)
| Caml_block (_, _, _, (Blk_module_export | Blk_na _)) -> assert false
| Caml_block (el, mutable_flag, _tag, (Blk_tuple | Blk_class | Blk_array)) ->
expression_desc cxt ~level (Array (el, mutable_flag))
| Caml_block_tag e ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.tag;
cxt)
| Array_index (e, p) | String_index (e, p) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
bracket_group cxt 1 (fun _ -> expression ~level:0 cxt p)))
| Static_index (e, s, _) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
Js_dump_property.property_access cxt.pp s;
(* See [ .obj_of_exports]
maybe in the ast level we should have
refer and export
*)
cxt)
| Length (e, _) ->
(* Todo: check parens *)
cond_paren_group cxt (level > 15) 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.length;
cxt)
| New (e, el) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
group cxt 1 (fun _ ->
string cxt L.new_;
space cxt;
let cxt = expression ~level:16 cxt e in
paren_group cxt 1 (fun _ ->
match el with Some el -> arguments cxt el | None -> cxt)))
| Cond (e, e1, e2) ->
let action () =
let cxt = expression ~level:3 cxt e in
space cxt;
string cxt L.question;
space cxt;
[ level 1 ] is correct , however
to make nice indentation , force nested conditional to be parenthesized
[level 1] is correct, however
to make nice indentation , force nested conditional to be parenthesized
*)
let cxt = group cxt 1 (fun _ -> expression ~level:3 cxt e1) in
space cxt;
string cxt L.colon_space;
(* idem *)
group cxt 1 (fun _ -> expression ~level:3 cxt e2)
in
if level > 2 then paren_vgroup cxt 1 action else action ()
| Object lst ->
# 1946 object literal is easy to be
interpreted as block statement
here we avoid parens in such case
{ [
var f = { x : 2 , y : 2 }
] }
interpreted as block statement
here we avoid parens in such case
{[
var f = { x : 2 , y : 2}
]}
*)
cond_paren_group cxt (level > 1) 1 (fun _ ->
if lst = [] then (
string cxt "{}";
cxt)
else
brace_vgroup cxt 1 (fun _ -> property_name_and_value_list cxt lst))
and property_name_and_value_list cxt (l : J.property_map) =
iter_lst cxt l
(fun cxt (pn, e) ->
match e.expression_desc with
| Var (Id v | Qualified ({ id = v; _ }, None)) ->
let key = Js_dump_property.property_key pn in
let str, cxt = str_of_ident cxt v in
let content =
(* if key = str then key
else *)
key ^ L.colon_space ^ str
in
string cxt content;
cxt
| _ ->
let key = Js_dump_property.property_key pn in
string cxt key;
string cxt L.colon_space;
expression ~level:1 cxt e)
comma_nl
and array_element_list cxt (el : E.t list) : cxt =
iter_lst cxt el (expression ~level:1) comma_nl
and arguments cxt (l : E.t list) : cxt =
iter_lst cxt l (expression ~level:1) comma_sp
and variable_declaration top cxt (variable : J.variable_declaration) : cxt =
(* TODO: print [const/var] for different backends *)
match variable with
| { ident = i; value = None; ident_info; _ } ->
if ident_info.used_stats = Dead_pure then cxt else pp_var_declare cxt i
| { ident = name; value = Some e; ident_info = { used_stats; _ } } -> (
match used_stats with
| Dead_pure -> cxt
| Dead_non_pure ->
(* Make sure parens are added correctly *)
statement_desc top cxt (J.Exp e)
| _ -> (
match e.expression_desc with
| Fun (is_method, params, b, env, return_unit) ->
pp_function ~return_unit ~is_method cxt
~fn_state:(if top then Name_top name else Name_non_top name)
params b env
| _ ->
let cxt = pp_var_assign cxt name in
let cxt = expression ~level:1 cxt e in
semi cxt;
cxt))
and ipp_comment : 'a. cxt -> 'a -> unit = fun _cxt _comment -> ()
do n't print a new line -- ASI
FIXME : this still does not work in some cases ...
{ [
return / * ... * /
[ ... ]
] }
FIXME: this still does not work in some cases...
{[
return /* ... */
[... ]
]}
*)
and pp_comment cxt comment =
if String.length comment > 0 then (
string cxt "/* ";
string cxt comment;
string cxt " */")
and pp_comment_option cxt comment =
match comment with None -> () | Some x -> pp_comment cxt x
and f loc =
and statement top cxt ({ statement_desc = s; comment; _ } : J.statement) : cxt =
pp_comment_option cxt comment;
statement_desc top cxt s
and statement_desc top cxt (s : J.statement_desc) : cxt =
match s with
| Block [] ->
ipp_comment cxt L.empty_block;
(* debugging*)
cxt
| Exp { expression_desc = Var _ } ->
(* Does it make sense to optimize here? *)
semi cxt ;
cxt
| Exp e -> (
match e.expression_desc with
| Raw_js_code { code; code_info = Stmt Js_stmt_comment } ->
string cxt code;
cxt
| Raw_js_code { code_info = Exp (Js_literal { comment }) } ->
(match comment with
(* The %raw is just a comment *)
| Some s -> string cxt s
| None -> ());
cxt
| Str _ -> cxt
| _ ->
let cxt =
(if exp_need_paren e then paren_group cxt 1 else group cxt 0)
(fun _ -> expression ~level:0 cxt e)
in
semi cxt;
cxt)
| Block b ->
(* No braces needed here *)
ipp_comment cxt L.start_block;
let cxt = statements top cxt b in
ipp_comment cxt L.end_block;
cxt
| Variable l -> variable_declaration top cxt l
| If (e, s1, s2) -> (
(* TODO: always brace those statements *)
string cxt L.if_;
space cxt;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
let cxt = brace_block cxt s1 in
match s2 with
| [] | [ { statement_desc = Block [] | Exp { expression_desc = Var _ } } ]
->
newline cxt;
cxt
| [ ({ statement_desc = If _ } as nest) ]
| [
{
statement_desc = Block [ ({ statement_desc = If _; _ } as nest) ];
_;
};
] ->
space cxt;
string cxt L.else_;
space cxt;
statement false cxt nest
| _ :: _ as s2 ->
space cxt;
string cxt L.else_;
space cxt;
brace_block cxt s2)
| While (label, e, s, _env) ->
FIXME : print scope as well
(match label with
| Some i ->
string cxt i;
string cxt L.colon;
newline cxt
| None -> ());
let cxt =
match e.expression_desc with
| Number (Int { i = 1l }) ->
string cxt L.while_;
string cxt L.lparen;
string cxt L.true_;
string cxt L.rparen;
space cxt;
cxt
| _ ->
string cxt L.while_;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
cxt
in
let cxt = brace_block cxt s in
semi cxt;
cxt
| ForRange (for_ident_expression, finish, id, direction, s, env) ->
let action cxt =
vgroup cxt 0 (fun _ ->
let cxt =
group cxt 0 (fun _ ->
(* The only place that [semi] may have semantics here *)
string cxt L.for_;
paren_group cxt 1 (fun _ ->
let cxt, new_id =
match
(for_ident_expression, finish.expression_desc)
with
| Some ident_expression, (Number _ | Var _) ->
let cxt = pp_var_assign cxt id in
(expression ~level:0 cxt ident_expression, None)
| Some ident_expression, _ ->
let cxt = pp_var_assign cxt id in
let cxt =
expression ~level:1 cxt ident_expression
in
space cxt;
comma cxt;
let id =
Ext_ident.create (Ident.name id ^ "_finish")
in
let cxt = ident cxt id in
space cxt;
string cxt L.eq;
space cxt;
(expression ~level:1 cxt finish, Some id)
| None, (Number _ | Var _) -> (cxt, None)
| None, _ ->
let id =
Ext_ident.create (Ident.name id ^ "_finish")
in
let cxt = pp_var_assign cxt id in
(expression ~level:15 cxt finish, Some id)
in
semi cxt;
space cxt;
let cxt = ident cxt id in
space cxt;
let right_prec =
match direction with
| Upto ->
let _, _, right = Js_op_util.op_prec Le in
string cxt L.le;
right
| Up ->
let _, _, right = Js_op_util.op_prec Lt in
string cxt L.lt;
right
| Downto ->
let _, _, right = Js_op_util.op_prec Ge in
string cxt L.ge;
right
in
space cxt;
let cxt =
expression ~level:right_prec cxt
(match new_id with
| Some i -> E.var i
| None -> finish)
in
semi cxt;
space cxt;
pp_direction cxt direction;
ident cxt id))
in
brace_block cxt s)
in
let lexical = Js_closure.get_lexical_scope env in
if Set_ident.is_empty lexical then action cxt
else
(* unlike function,
[print for loop] has side effect,
we should take it out
*)
let inner_cxt = merge_scope cxt lexical in
let lexical = Set_ident.elements lexical in
vgroup cxt 0 (fun _ ->
string cxt L.lparen;
string cxt L.function_;
pp_paren_params inner_cxt lexical;
let cxt = brace_vgroup cxt 0 (fun _ -> action inner_cxt) in
pp_paren_params inner_cxt lexical;
string cxt L.rparen;
semi cxt;
cxt)
| Continue s ->
continue cxt s;
newline cxt ; # 2642
| Debugger ->
debugger_nl cxt;
cxt
| Break ->
break_nl cxt;
cxt
| Return e -> (
match e.expression_desc with
| Fun (is_method, l, b, env, return_unit) ->
let cxt =
pp_function ~return_unit ~is_method cxt ~fn_state:Is_return l b env
in
semi cxt;
cxt
| Undefined ->
return_sp cxt;
semi cxt;
cxt
| _ ->
return_sp cxt;
string cxt " return " ; ( \ * ASI -- when there is a comment*\ )
group cxt return_indent (fun _ ->
let cxt = expression ~level:0 cxt e in
semi cxt;
cxt))
| Int_switch (e, cc, def) ->
string cxt L.switch;
space cxt;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
brace_vgroup cxt 1 (fun _ ->
let cxt =
loop_case_clauses cxt (fun cxt i -> string cxt (string_of_int i)) cc
in
match def with
| None -> cxt
| Some def ->
group cxt 1 (fun _ ->
string cxt L.default;
string cxt L.colon;
newline cxt;
statements false cxt def))
| String_switch (e, cc, def) ->
string cxt L.switch;
space cxt;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
brace_vgroup cxt 1 (fun _ ->
let cxt =
loop_case_clauses cxt
(fun cxt s -> Js_dump_string.pp_string cxt.pp s)
cc
in
match def with
| None -> cxt
| Some def ->
group cxt 1 (fun _ ->
string cxt L.default;
string cxt L.colon;
newline cxt;
statements false cxt def))
| Throw e ->
let e =
match e.expression_desc with
| Caml_block (el, _, _, ((Blk_extension | Blk_record_ext _) as ext)) ->
{ e with expression_desc = exn_block_as_obj ~stack:true el ext }
| _ -> e
in
string cxt L.throw;
space cxt;
group cxt throw_indent (fun _ ->
let cxt = expression ~level:0 cxt e in
semi cxt;
cxt)
(* There must be a space between the return and its
argument. A line return would not work *)
| Try (b, ctch, fin) ->
vgroup cxt 0 (fun _ ->
string cxt L.try_;
space cxt;
let cxt = brace_block cxt b in
let cxt =
match ctch with
| None -> cxt
| Some (i, b) ->
newline cxt;
string cxt "catch (";
let cxt = ident cxt i in
string cxt ")";
brace_block cxt b
in
match fin with
| None -> cxt
| Some b ->
group cxt 1 (fun _ ->
string cxt L.finally;
space cxt;
brace_block cxt b))
and function_body (cxt : cxt) ~return_unit (b : J.block) : unit =
match b with
| [] -> ()
| [ s ] -> (
match s.statement_desc with
| If
( bool,
then_,
[ { statement_desc = Return { expression_desc = Undefined } } ] ) ->
ignore
(statement false cxt
{ s with statement_desc = If (bool, then_, []) }
: cxt)
| Return { expression_desc = Undefined } -> ()
| Return exp when return_unit ->
ignore (statement false cxt (S.exp exp) : cxt)
| _ -> ignore (statement false cxt s : cxt))
| [ s; { statement_desc = Return { expression_desc = Undefined } } ] ->
ignore (statement false cxt s : cxt)
| s :: r ->
let cxt = statement false cxt s in
newline cxt;
function_body cxt r ~return_unit
and brace_block cxt b =
(* This one is for '{' *)
brace_vgroup cxt 1 (fun _ -> statements false cxt b)
(* main entry point *)
and statements top cxt b =
iter_lst cxt b
(fun cxt s -> statement top cxt s)
(if top then at_least_two_lines else newline)
let string_of_block (block : J.block) =
let buffer = Buffer.create 50 in
let cxt = from_buffer buffer in
let (_ : cxt) = statements true cxt block in
flush cxt ();
Buffer.contents buffer
let string_of_expression (e : J.expression) =
let buffer = Buffer.create 50 in
let cxt = from_buffer buffer in
let (_ : cxt) = expression ~level:0 cxt e in
flush cxt ();
Buffer.contents buffer
let statements top scope pp b = (statements top { scope; pp } b).scope
| null | https://raw.githubusercontent.com/melange-re/melange/24083b39f10d8df40a786d890d7530b514ba70a4/jscomp/core/js_dump.ml | ocaml | There modules are dynamically inserted in the last stage
{Caml_curry}
{Caml_option}
They can appear anywhere so even if you have a module
{
let module Caml_block = ...
(* Later would insert the use of Caml_block here which should
point tto the runtime module
Print as underscore for unused vars, may not be
needed in the future
let ipp_ident cxt id (un_used : bool) =
Ext_pp_scope.ident cxt (
if un_used then
Ext_ident.make_unused ()
else
id)
let drop_comment (x : J.expression) =
if x.comment = None then x
else {x with comment = None}
IdentMap
Turn [function f (x,y) { return a (x,y)} ] into [Curry.__2(a)],
The idea is that [Curry.__2] will guess the arity of [a], if it does
hit, then there is no cost when passed
for sure no name
true means for sure, false -- not sure
TODO: refactoring
Note that {!pp_function} could print both statement and expression when [No_name] is given
TODO: need a case to justify it
This check is needed to avoid some edge cases
{[function(x){return x(x)}]}
here the function is also called `x`
length
identifiers will be printed cxtollowing
the context will be continued after this function
the context used to be printed inside this function
when printing a function,
only the enclosed variables and function name matters,
if the function does not capture any variable, then the context is empty
print our closure as
{[(function(x,y){ return function(..){...}} (x,y))]}
Maybe changed to `let` in the future
expression
has binding, a statement
TODO: when calculating lexical we should not include itself
Assume the cond would not change the context,
since it can be either [int] or [string]
could be integer or string
The higher the level, the more likely that inner has to add parens
TODO: dump for comments
TODO --
when utf8-> it will not escape '\\' which is definitely not we want
attach string here for float constant folding?
check , js convention with ocaml lexical convention
Negative numbers may need to be parenthesized.
We are more conservative here, to make the generated code more readable
to the user
TODO: simplify for singleton list
See [ .obj_of_exports]
maybe in the ast level we should have
refer and export
Todo: check parens
idem
if key = str then key
else
TODO: print [const/var] for different backends
Make sure parens are added correctly
debugging
Does it make sense to optimize here?
The %raw is just a comment
No braces needed here
TODO: always brace those statements
The only place that [semi] may have semantics here
unlike function,
[print for loop] has side effect,
we should take it out
There must be a space between the return and its
argument. A line return would not work
This one is for '{'
main entry point | compiler
* Copyright ( C ) 2015 - 2016 Bloomberg Finance L.P.
* /
* Copyright ( C ) 2010
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , with linking exception ;
* either version 2.1 of the License , or ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2015-2016 Bloomberg Finance L.P.
* /
* Copyright (C) 2010 Jérôme Vouillon
* Laboratoire PPS - CNRS Université Paris Diderot
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, with linking exception;
* either version 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
Authors : ,
-are-the-rules-for-javascripts-automatic-semicolon-insertion-asi
ASI catch up
{ [
a = b
+ + c
---
a = b + + c
= = = = = = = = = = = = = = = = = = = =
a + +
---
a
+ +
= = = = = = = = = = = = = = = = = = = =
a --
---
a
--
= = = = = = = = = = = = = = = = = = = =
( continue / break / return / throw ) a
---
( continue / break / return / throw )
a
= = = = = = = = = = = = = = = = = = = =
] }
-are-the-rules-for-javascripts-automatic-semicolon-insertion-asi
ASI catch up
{[
a=b
++c
---
a=b ++c
====================
a ++
---
a
++
====================
a --
---
a
--
====================
(continue/break/return/throw) a
---
(continue/break/return/throw)
a
====================
]}
*)
let name_symbol = Js_op.Symbol_name
module E = Js_exp_make
module S = Js_stmt_make
module L = Js_dump_lit
}
There are no sane way to easy detect it ahead of time, we should be
conservative here.
(our call Js_fun_env.get_unbounded env) is not precise
*)
type cxt = { scope : Ext_pp_scope.t; pp : Ext_pp.t }
let from_pp pp = { scope = Ext_pp_scope.empty; pp }
let from_buffer buf = from_pp (Ext_pp.from_buffer buf)
let update_scope cxt scope = { cxt with scope }
let ident cxt id = update_scope cxt (Ext_pp_scope.ident cxt.scope cxt.pp id)
let string cxt s = Ext_pp.string cxt.pp s
let group cxt = Ext_pp.group cxt.pp
let newline cxt = Ext_pp.newline cxt.pp
let paren_group cxt = Ext_pp.paren_group cxt.pp
let paren_vgroup cxt = Ext_pp.paren_vgroup cxt.pp
let vgroup cxt = Ext_pp.vgroup cxt.pp
let space cxt = Ext_pp.space cxt.pp
let cond_paren_group cxt = Ext_pp.cond_paren_group cxt.pp
let paren cxt = Ext_pp.paren cxt.pp
let brace_vgroup cxt = Ext_pp.brace_vgroup cxt.pp
let bracket_group cxt = Ext_pp.bracket_group cxt.pp
let bracket_vgroup cxt = Ext_pp.bracket_vgroup cxt.pp
let merge_scope cxt l =
let scope = Ext_pp_scope.merge cxt.scope l in
{ cxt with scope }
let sub_scope cxt l = update_scope cxt (Ext_pp_scope.sub_scope cxt.scope l)
let str_of_ident cxt id =
let str, scope = Ext_pp_scope.str_of_ident cxt.scope id in
(str, update_scope cxt scope)
let at_least_two_lines cxt = Ext_pp.at_least_two_lines cxt.pp
let flush cxt () = Ext_pp.flush cxt.pp ()
module Curry_gen = struct
let pp_curry_dot cxt =
string cxt Js_runtime_modules.curry;
string cxt L.dot
let pp_optimize_curry cxt (len : int) =
pp_curry_dot cxt;
string cxt "__";
string cxt (Printf.sprintf "%d" len)
let pp_app_any cxt =
pp_curry_dot cxt;
string cxt "app"
let pp_app cxt (len : int) =
pp_curry_dot cxt;
string cxt "_";
string cxt (Printf.sprintf "%d" len)
end
let return_indent = String.length L.return / Ext_pp.indent_length
let throw_indent = String.length L.throw / Ext_pp.indent_length
let semi cxt = string cxt L.semi
let comma cxt = string cxt L.comma
let exn_block_as_obj ~(stack : bool) (el : J.expression list) (ext : J.tag_info)
: J.expression_desc =
let field_name =
match ext with
| Blk_extension -> (
fun i ->
match i with 0 -> Literals.exception_id | i -> "_" ^ string_of_int i)
| Blk_record_ext ss -> (
fun i -> match i with 0 -> Literals.exception_id | i -> ss.(i - 1))
| _ -> assert false
in
Object
(if stack then
Ext_list.mapi_append el
(fun i e -> (Js_op.Lit (field_name i), e))
[ (Js_op.Lit "Error", E.new_ (E.js_global "Error") []) ]
else Ext_list.mapi el (fun i e -> (Js_op.Lit (field_name i), e)))
let rec iter_lst cxt ls element inter =
match ls with
| [] -> cxt
| [ e ] -> element cxt e
| e :: r ->
let acxt = element cxt e in
inter cxt;
iter_lst acxt r element inter
let raw_snippet_exp_simple_enough (s : string) =
Ext_string.for_all s (fun c ->
match c with 'a' .. 'z' | 'A' .. 'Z' | '_' | '.' -> true | _ -> false)
Parentheses are required when the expression
starts syntactically with " { " or " function "
TODO : be more conservative , since Google Closure will handle
the precedence correctly , we also need people read the code ..
Here we force parens for some alien operators
If we move assign into a statement , will be less ?
TODO : construct a test case that do need parenthesisze for expression
IIE does not apply ( will be inlined ? )
starts syntactically with "{" or "function"
TODO: be more conservative, since Google Closure will handle
the precedence correctly, we also need people read the code..
Here we force parens for some alien operators
If we move assign into a statement, will be less?
TODO: construct a test case that do need parenthesisze for expression
IIE does not apply (will be inlined?)
*)
e = function(x){ ... ) ; is good
*)
let exp_need_paren (e : J.expression) =
match e.expression_desc with
|
| Call ({ expression_desc = Fun _ | Raw_js_code _ }, _, _) -> true
| Raw_js_code { code_info = Exp _ }
| Fun _
| Caml_block
( _,
_,
_,
( Blk_record _ | Blk_module _ | Blk_poly_var | Blk_extension
| Blk_record_ext _ | Blk_record_inlined _ | Blk_constructor _ ) )
| Object _ ->
true
| Raw_js_code { code_info = Stmt _ }
| Length _ | Call _ | Caml_block_tag _ | Seq _ | Static_index _ | Cond _
| Bin _ | Is_null_or_undefined _ | String_index _ | Array_index _
| String_append _ | Char_of_int _ | Char_to_int _ | Var _ | Undefined | Null
| Str _ | Unicode _ | Array _ | Optional_block _ | Caml_block _ | FlatCall _
| Typeof _ | Number _ | Js_not _ | Bool _ | New _ ->
false
let comma_idents (cxt : cxt) ls = iter_lst cxt ls ident comma
let pp_paren_params (cxt : cxt) (lexical : Ident.t list) : unit =
string cxt L.lparen;
let (_ : cxt) = comma_idents cxt lexical in
string cxt L.rparen
let pp_var_assign cxt id =
string cxt L.var;
space cxt;
let acxt = ident cxt id in
space cxt;
string cxt L.eq;
space cxt;
acxt
let pp_var_assign_this cxt id =
let cxt = pp_var_assign cxt id in
string cxt L.this;
space cxt;
semi cxt;
newline cxt;
cxt
let pp_var_declare cxt id =
string cxt L.var;
space cxt;
let acxt = ident cxt id in
semi cxt;
acxt
let pp_direction cxt (direction : J.for_direction) =
match direction with
| Up | Upto -> string cxt L.plus_plus
| Downto -> string cxt L.minus_minus
let return_sp cxt =
string cxt L.return;
space cxt
let bool cxt b = string cxt (if b then L.true_ else L.false_)
let comma_sp cxt =
comma cxt;
space cxt
let comma_nl cxt =
comma cxt;
newline cxt
let debugger_nl cxt =
newline cxt;
string cxt L.debugger;
semi cxt;
newline cxt
let break_nl cxt =
string cxt L.break;
space cxt;
semi cxt;
newline cxt
let continue cxt s =
string cxt L.continue;
space cxt;
string cxt s;
semi cxt
let formal_parameter_list cxt l = iter_lst cxt l ident comma_sp
f/122 -- >
f/122 is in the map
if in , use the old mapping
else
check f ,
if in last bumped i d
else
use " f " , register it
check " f "
if not , use " f " , register stamp - > 0
else
check stamp
if in use it
else check last bumped i d , increase it and register
f/122 -->
f/122 is in the map
if in, use the old mapping
else
check f,
if in last bumped id
else
use "f", register it
check "f"
if not , use "f", register stamp -> 0
else
check stamp
if in use it
else check last bumped id, increase it and register
*)
let is_var (b : J.expression) a =
match b.expression_desc with Var (Id i) -> Ident.same i a | _ -> false
type fn_exp_state =
| Name_top of Ident.t
| Name_non_top of Ident.t
| No_name of { single_arg : bool }
let default_fn_exp_state = No_name { single_arg = false }
let rec try_optimize_curry cxt len function_id =
Curry_gen.pp_optimize_curry cxt len;
paren_group cxt 1 (fun _ -> expression ~level:1 cxt function_id)
and pp_function ~return_unit ~is_method cxt ~fn_state (l : Ident.t list)
(b : J.block) (env : Js_fun_env.t) : cxt =
match b with
| [
{
statement_desc =
Return
{
expression_desc =
Call
( ({ expression_desc = Var v; _ } as function_id),
ls,
{
see # 234
call_info = Call_builtin_runtime | Call_ml;
} );
};
};
]
match such case :
{ [ function(x , y ) { return u(x , y ) } ] }
it can be optimized in to either [ u ] or [ Curry.__n(u ) ]
{[ function(x,y){ return u(x,y) } ]}
it can be optimized in to either [u] or [Curry.__n(u)]
*)
(not is_method)
&& Ext_list.for_all2_no_exn ls l is_var
&&
match v with
| Id id -> not (Ext_list.exists l (fun x -> Ident.same x id))
| Qualified _ -> true -> (
let optimize len ~p cxt v =
if p then try_optimize_curry cxt len function_id else vident cxt v
in
let len = List.length l in
match fn_state with
| Name_top i | Name_non_top i ->
let cxt = pp_var_assign cxt i in
let cxt = optimize len ~p:(arity = NA && len <= 8) cxt v in
semi cxt;
cxt
| Is_return | No_name _ ->
if fn_state = Is_return then return_sp cxt;
optimize len ~p:(arity = NA && len <= 8) cxt v)
| _ ->
let set_env : Set_ident.t =
match fn_state with
| Is_return | No_name _ -> Js_fun_env.get_unbounded env
| Name_top id | Name_non_top id ->
Set_ident.add (Js_fun_env.get_unbounded env) id
in
let outer_cxt = merge_scope cxt set_env in
let inner_cxt = sub_scope outer_cxt set_env in
let param_body () : unit =
if is_method then (
match l with
| [] -> assert false
| this :: arguments ->
let cxt =
paren_group cxt 1 (fun _ ->
formal_parameter_list inner_cxt arguments)
in
space cxt;
brace_vgroup cxt 1 (fun () ->
let cxt =
if Js_fun_env.get_unused env 0 then cxt
else pp_var_assign_this cxt this
in
function_body ~return_unit cxt b))
else
let cxt =
paren_group cxt 1 (fun _ -> formal_parameter_list inner_cxt l)
in
space cxt;
brace_vgroup cxt 1 (fun _ -> function_body ~return_unit cxt b)
in
let lexical : Set_ident.t = Js_fun_env.get_lexical_scope env in
let enclose lexical =
let handle lexical =
if Set_ident.is_empty lexical then (
match fn_state with
| Is_return ->
return_sp cxt;
string cxt L.function_;
space cxt;
param_body ()
| No_name { single_arg } ->
see # 1692 , add a paren for annoymous function for safety
cond_paren_group cxt (not single_arg) 1 (fun _ ->
string cxt L.function_;
space cxt;
param_body ())
| Name_non_top x ->
ignore (pp_var_assign inner_cxt x : cxt);
string cxt L.function_;
space cxt;
param_body ();
semi cxt
| Name_top x ->
string cxt L.function_;
space cxt;
ignore (ident inner_cxt x : cxt);
param_body ())
else
let lexical = Set_ident.elements lexical in
(match fn_state with
| Is_return -> return_sp cxt
| No_name _ -> ()
| Name_non_top name | Name_top name ->
ignore (pp_var_assign inner_cxt name : cxt));
string cxt L.lparen;
string cxt L.function_;
pp_paren_params inner_cxt lexical;
brace_vgroup cxt 0 (fun _ ->
return_sp cxt;
string cxt L.function_;
space cxt;
(match fn_state with
| Is_return | No_name _ -> ()
| Name_non_top x | Name_top x -> ignore (ident inner_cxt x));
param_body ());
pp_paren_params inner_cxt lexical;
string cxt L.rparen;
match fn_state with
in
handle
(match fn_state with
| (Name_top name | Name_non_top name) when Set_ident.mem lexical name
->
Set_ident.remove lexical name
| _ -> lexical)
in
enclose lexical;
outer_cxt
and pp_one_case_clause : 'a. _ -> (_ -> 'a -> unit) -> 'a * J.case_clause -> _ =
fun cxt pp_cond
(switch_case, ({ switch_body; should_break; comment } : J.case_clause)) ->
let cxt =
group cxt 1 (fun _ ->
group cxt 1 (fun _ ->
string cxt L.case;
space cxt;
pp_comment_option cxt comment;
pp_cond cxt switch_case;
space cxt;
string cxt L.colon);
group cxt 1 (fun _ ->
let cxt =
match switch_body with
| [] -> cxt
| _ ->
newline cxt;
statements false cxt switch_body
in
if should_break then (
newline cxt;
string cxt L.break;
semi cxt);
cxt))
in
newline cxt;
cxt
and loop_case_clauses : 'a. _ -> (_ -> 'a -> unit) -> ('a * _) list -> _ =
fun cxt pp_cond cases ->
Ext_list.fold_left cases cxt (fun acc x -> pp_one_case_clause acc pp_cond x)
and vident cxt (v : J.vident) =
match v with
| Id v
| Qualified ({ id = v }, None)
| Qualified ({ id = v; kind = External { default = true } }, _) ->
ident cxt v
| Qualified ({ id; kind = Ml | Runtime }, Some name) ->
let cxt = ident cxt id in
string cxt L.dot;
string cxt (Ext_ident.convert name);
cxt
| Qualified ({ id; kind = External _ }, Some name) ->
let cxt = ident cxt id in
Js_dump_property.property_access cxt.pp name;
cxt
and expression ~level:l cxt (exp : J.expression) : cxt =
pp_comment_option cxt exp.comment;
expression_desc cxt ~level:l exp.expression_desc
and expression_desc cxt ~(level : int) x : cxt =
match x with
| Null ->
string cxt L.null;
cxt
| Undefined ->
string cxt L.undefined;
cxt
| Var v -> vident cxt v
| Bool b ->
bool cxt b;
cxt
| Seq (e1, e2) ->
cond_paren_group cxt (level > 0) 1 (fun () ->
let cxt = expression ~level:0 cxt e1 in
comma_sp cxt;
expression ~level:0 cxt e2)
| Fun (is_method, l, b, env, return_unit) ->
pp_function ~return_unit ~is_method cxt ~fn_state:default_fn_exp_state l b
env
TODO :
when [ e ] is [ ] with arity
print it in a more precise way
It seems the optimizer already did work to make sure
{ [
Call ( Raw_js_code ( s , Exp i ) , el , { Full } )
when Ext_list.length_equal el i
] }
when [e] is [Js_raw_code] with arity
print it in a more precise way
It seems the optimizer already did work to make sure
{[
Call (Raw_js_code (s, Exp i), el, {Full})
when Ext_list.length_equal el i
]}
*)
| Call (e, el, info) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
group cxt 1 (fun _ ->
match (info, el) with
| { arity = Full }, _ | _, [] ->
let cxt = expression ~level:15 cxt e in
paren_group cxt 1 (fun _ ->
match el with
| [
{
expression_desc =
Fun (is_method, l, b, env, return_unit);
};
] ->
pp_function ~return_unit ~is_method cxt
~fn_state:(No_name { single_arg = true })
l b env
| _ -> arguments cxt el)
| _, _ ->
let len = List.length el in
if 1 <= len && len <= 8 then (
Curry_gen.pp_app cxt len;
paren_group cxt 1 (fun _ -> arguments cxt (e :: el)))
else (
Curry_gen.pp_app_any cxt;
paren_group cxt 1 (fun _ ->
arguments cxt [ e; E.array Mutable el ]))))
| FlatCall (e, el) ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.apply;
paren_group cxt 1 (fun _ ->
string cxt L.null;
comma_sp cxt;
expression ~level:1 cxt el))
| Char_to_int e -> (
match e.expression_desc with
| String_index (a, b) ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt a in
string cxt L.dot;
string cxt L.char_code_at;
paren_group cxt 1 (fun _ -> expression ~level:0 cxt b))
| _ ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.char_code_at;
string cxt "(0)";
cxt))
| Char_of_int e ->
group cxt 1 (fun _ ->
string cxt L.string_cap;
string cxt L.dot;
string cxt L.fromCharcode;
paren_group cxt 1 (fun _ -> arguments cxt [ e ]))
| Unicode s ->
string cxt "\"";
string cxt s;
string cxt "\"";
cxt
| Str (_, s) ->
Js_dump_string.pp_string cxt.pp s;
cxt
| Raw_js_code { code = s; code_info = info } -> (
match info with
| Exp exp_info ->
let raw_paren =
not
(match exp_info with
| Js_literal _ -> true
| Js_function _ | Js_exp_unknown ->
false || raw_snippet_exp_simple_enough s)
in
if raw_paren then string cxt L.lparen;
string cxt s;
if raw_paren then string cxt L.rparen;
cxt
| Stmt stmt_info ->
if stmt_info = Js_stmt_comment then string cxt s
else (
newline cxt;
string cxt s;
newline cxt);
cxt)
| Number v ->
let s =
match v with
| Float { f } -> Js_number.caml_float_literal_to_js_string f
| Int { i; c = Some c } -> Format.asprintf "/* %C */%ld" c i
| Int { i; c = None } ->
Int32.to_string
| Uint i -> Format.asprintf "%lu" i
in
let need_paren =
if s.[0] = '-' then
else
Parenthesize as well when followed by a dot .
Infinity
NaN
in
let action _ = string cxt s in
if need_paren then paren cxt action else action ();
cxt
| Is_null_or_undefined e ->
cond_paren_group cxt (level > 0) 1 (fun _ ->
let cxt = expression ~level:1 cxt e in
space cxt;
string cxt "==";
space cxt;
string cxt L.null;
cxt)
| Js_not e ->
cond_paren_group cxt (level > 13) 1 (fun _ ->
string cxt "!";
expression ~level:13 cxt e)
| Typeof e ->
string cxt "typeof";
space cxt;
expression ~level:13 cxt e
| Bin
( Minus,
{ expression_desc = Number (Int { i = 0l; _ } | Float { f = "0." }) },
e )
TODO :
Handle multiple cases like
{ [ 0 . - x ] }
{ [ 0.00 - x ] }
{ [ 0.000 - x ] }
Handle multiple cases like
{[ 0. - x ]}
{[ 0.00 - x ]}
{[ 0.000 - x ]}
*) ->
cond_paren_group cxt (level > 13) 1 (fun _ ->
string cxt "-";
expression ~level:13 cxt e)
| Bin (op, e1, e2) ->
let out, lft, rght = Js_op_util.op_prec op in
let need_paren =
level > out || match op with Lsl | Lsr | Asr -> true | _ -> false
in
cond_paren_group cxt need_paren 1 (fun _ ->
let cxt = expression ~level:lft cxt e1 in
space cxt;
string cxt (Js_op_util.op_str op);
space cxt;
expression ~level:rght cxt e2)
| String_append (e1, e2) ->
let op : Js_op.binop = Plus in
let out, lft, rght = Js_op_util.op_prec op in
let need_paren =
level > out || match op with Lsl | Lsr | Asr -> true | _ -> false
in
cond_paren_group cxt need_paren 1 (fun _ ->
let cxt = expression ~level:lft cxt e1 in
space cxt;
string cxt "+";
space cxt;
expression ~level:rght cxt e2)
| Array (el, _) -> (
match el with
| [] | [ _ ] -> bracket_group cxt 1 (fun _ -> array_element_list cxt el)
| _ -> bracket_vgroup cxt 1 (fun _ -> array_element_list cxt el))
| Optional_block (e, identity) ->
expression ~level cxt
(if identity then e
else E.runtime_call Js_runtime_modules.option "some" [ e ])
| Caml_block (el, _, _, Blk_module fields) ->
expression_desc cxt ~level
(Object
(Ext_list.map_combine fields el (fun x ->
Js_op.Lit (Ext_ident.convert x))))
name convention of Record is slight different from modules
| Caml_block (el, mutable_flag, _, Blk_record fields) ->
if Ext_array.for_alli fields (fun i v -> string_of_int i = v) then
expression_desc cxt ~level (Array (el, mutable_flag))
else
expression_desc cxt ~level
(Object (Ext_list.combine_array fields el (fun i -> Js_op.Lit i)))
| Caml_block (el, _, _, Blk_poly_var) -> (
match el with
| [ { expression_desc = Str (_, name) }; value ] ->
expression_desc cxt ~level
(Object
[
(Js_op.Lit Literals.polyvar_hash, E.str name);
(Lit Literals.polyvar_value, value);
])
| _ -> assert false)
| Caml_block (el, _, _, ((Blk_extension | Blk_record_ext _) as ext)) ->
expression_desc cxt ~level (exn_block_as_obj ~stack:false el ext)
| Caml_block (el, _, tag, Blk_record_inlined p) ->
let objs =
let tails =
Ext_list.combine_array_append p.fields el
(if !Js_config.debug then [ (name_symbol, E.str p.name) ] else [])
(fun i -> Js_op.Lit i)
in
if p.num_nonconst = 1 then tails
else
( Js_op.Lit L.tag,
if !Js_config.debug then tag else { tag with comment = Some p.name }
)
:: tails
in
if p.num_nonconst = 1 && not !Js_config.debug then
pp_comment_option cxt (Some p.name);
expression_desc cxt ~level (Object objs)
| Caml_block (el, _, tag, Blk_constructor p) ->
let not_is_cons = p.name <> Literals.cons in
let objs =
let tails =
Ext_list.mapi_append el
(fun i e ->
( (match (not_is_cons, i) with
| false, 0 -> Js_op.Lit Literals.hd
| false, 1 -> Js_op.Lit Literals.tl
| _ -> Js_op.Lit ("_" ^ string_of_int i)),
e ))
(if !Js_config.debug && not_is_cons then
[ (name_symbol, E.str p.name) ]
else [])
in
if p.num_nonconst = 1 then tails
else
( Js_op.Lit L.tag,
if !Js_config.debug then tag else { tag with comment = Some p.name }
)
:: tails
in
if p.num_nonconst = 1 && (not !Js_config.debug) && not_is_cons then
pp_comment_option cxt (Some p.name);
expression_desc cxt ~level (Object objs)
| Caml_block (_, _, _, (Blk_module_export | Blk_na _)) -> assert false
| Caml_block (el, mutable_flag, _tag, (Blk_tuple | Blk_class | Blk_array)) ->
expression_desc cxt ~level (Array (el, mutable_flag))
| Caml_block_tag e ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.tag;
cxt)
| Array_index (e, p) | String_index (e, p) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
group cxt 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
bracket_group cxt 1 (fun _ -> expression ~level:0 cxt p)))
| Static_index (e, s, _) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
Js_dump_property.property_access cxt.pp s;
cxt)
| Length (e, _) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
let cxt = expression ~level:15 cxt e in
string cxt L.dot;
string cxt L.length;
cxt)
| New (e, el) ->
cond_paren_group cxt (level > 15) 1 (fun _ ->
group cxt 1 (fun _ ->
string cxt L.new_;
space cxt;
let cxt = expression ~level:16 cxt e in
paren_group cxt 1 (fun _ ->
match el with Some el -> arguments cxt el | None -> cxt)))
| Cond (e, e1, e2) ->
let action () =
let cxt = expression ~level:3 cxt e in
space cxt;
string cxt L.question;
space cxt;
[ level 1 ] is correct , however
to make nice indentation , force nested conditional to be parenthesized
[level 1] is correct, however
to make nice indentation , force nested conditional to be parenthesized
*)
let cxt = group cxt 1 (fun _ -> expression ~level:3 cxt e1) in
space cxt;
string cxt L.colon_space;
group cxt 1 (fun _ -> expression ~level:3 cxt e2)
in
if level > 2 then paren_vgroup cxt 1 action else action ()
| Object lst ->
# 1946 object literal is easy to be
interpreted as block statement
here we avoid parens in such case
{ [
var f = { x : 2 , y : 2 }
] }
interpreted as block statement
here we avoid parens in such case
{[
var f = { x : 2 , y : 2}
]}
*)
cond_paren_group cxt (level > 1) 1 (fun _ ->
if lst = [] then (
string cxt "{}";
cxt)
else
brace_vgroup cxt 1 (fun _ -> property_name_and_value_list cxt lst))
and property_name_and_value_list cxt (l : J.property_map) =
iter_lst cxt l
(fun cxt (pn, e) ->
match e.expression_desc with
| Var (Id v | Qualified ({ id = v; _ }, None)) ->
let key = Js_dump_property.property_key pn in
let str, cxt = str_of_ident cxt v in
let content =
key ^ L.colon_space ^ str
in
string cxt content;
cxt
| _ ->
let key = Js_dump_property.property_key pn in
string cxt key;
string cxt L.colon_space;
expression ~level:1 cxt e)
comma_nl
and array_element_list cxt (el : E.t list) : cxt =
iter_lst cxt el (expression ~level:1) comma_nl
and arguments cxt (l : E.t list) : cxt =
iter_lst cxt l (expression ~level:1) comma_sp
and variable_declaration top cxt (variable : J.variable_declaration) : cxt =
match variable with
| { ident = i; value = None; ident_info; _ } ->
if ident_info.used_stats = Dead_pure then cxt else pp_var_declare cxt i
| { ident = name; value = Some e; ident_info = { used_stats; _ } } -> (
match used_stats with
| Dead_pure -> cxt
| Dead_non_pure ->
statement_desc top cxt (J.Exp e)
| _ -> (
match e.expression_desc with
| Fun (is_method, params, b, env, return_unit) ->
pp_function ~return_unit ~is_method cxt
~fn_state:(if top then Name_top name else Name_non_top name)
params b env
| _ ->
let cxt = pp_var_assign cxt name in
let cxt = expression ~level:1 cxt e in
semi cxt;
cxt))
and ipp_comment : 'a. cxt -> 'a -> unit = fun _cxt _comment -> ()
do n't print a new line -- ASI
FIXME : this still does not work in some cases ...
{ [
return / * ... * /
[ ... ]
] }
FIXME: this still does not work in some cases...
{[
return /* ... */
[... ]
]}
*)
and pp_comment cxt comment =
if String.length comment > 0 then (
string cxt "/* ";
string cxt comment;
string cxt " */")
and pp_comment_option cxt comment =
match comment with None -> () | Some x -> pp_comment cxt x
and f loc =
and statement top cxt ({ statement_desc = s; comment; _ } : J.statement) : cxt =
pp_comment_option cxt comment;
statement_desc top cxt s
and statement_desc top cxt (s : J.statement_desc) : cxt =
match s with
| Block [] ->
ipp_comment cxt L.empty_block;
cxt
| Exp { expression_desc = Var _ } ->
semi cxt ;
cxt
| Exp e -> (
match e.expression_desc with
| Raw_js_code { code; code_info = Stmt Js_stmt_comment } ->
string cxt code;
cxt
| Raw_js_code { code_info = Exp (Js_literal { comment }) } ->
(match comment with
| Some s -> string cxt s
| None -> ());
cxt
| Str _ -> cxt
| _ ->
let cxt =
(if exp_need_paren e then paren_group cxt 1 else group cxt 0)
(fun _ -> expression ~level:0 cxt e)
in
semi cxt;
cxt)
| Block b ->
ipp_comment cxt L.start_block;
let cxt = statements top cxt b in
ipp_comment cxt L.end_block;
cxt
| Variable l -> variable_declaration top cxt l
| If (e, s1, s2) -> (
string cxt L.if_;
space cxt;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
let cxt = brace_block cxt s1 in
match s2 with
| [] | [ { statement_desc = Block [] | Exp { expression_desc = Var _ } } ]
->
newline cxt;
cxt
| [ ({ statement_desc = If _ } as nest) ]
| [
{
statement_desc = Block [ ({ statement_desc = If _; _ } as nest) ];
_;
};
] ->
space cxt;
string cxt L.else_;
space cxt;
statement false cxt nest
| _ :: _ as s2 ->
space cxt;
string cxt L.else_;
space cxt;
brace_block cxt s2)
| While (label, e, s, _env) ->
FIXME : print scope as well
(match label with
| Some i ->
string cxt i;
string cxt L.colon;
newline cxt
| None -> ());
let cxt =
match e.expression_desc with
| Number (Int { i = 1l }) ->
string cxt L.while_;
string cxt L.lparen;
string cxt L.true_;
string cxt L.rparen;
space cxt;
cxt
| _ ->
string cxt L.while_;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
cxt
in
let cxt = brace_block cxt s in
semi cxt;
cxt
| ForRange (for_ident_expression, finish, id, direction, s, env) ->
let action cxt =
vgroup cxt 0 (fun _ ->
let cxt =
group cxt 0 (fun _ ->
string cxt L.for_;
paren_group cxt 1 (fun _ ->
let cxt, new_id =
match
(for_ident_expression, finish.expression_desc)
with
| Some ident_expression, (Number _ | Var _) ->
let cxt = pp_var_assign cxt id in
(expression ~level:0 cxt ident_expression, None)
| Some ident_expression, _ ->
let cxt = pp_var_assign cxt id in
let cxt =
expression ~level:1 cxt ident_expression
in
space cxt;
comma cxt;
let id =
Ext_ident.create (Ident.name id ^ "_finish")
in
let cxt = ident cxt id in
space cxt;
string cxt L.eq;
space cxt;
(expression ~level:1 cxt finish, Some id)
| None, (Number _ | Var _) -> (cxt, None)
| None, _ ->
let id =
Ext_ident.create (Ident.name id ^ "_finish")
in
let cxt = pp_var_assign cxt id in
(expression ~level:15 cxt finish, Some id)
in
semi cxt;
space cxt;
let cxt = ident cxt id in
space cxt;
let right_prec =
match direction with
| Upto ->
let _, _, right = Js_op_util.op_prec Le in
string cxt L.le;
right
| Up ->
let _, _, right = Js_op_util.op_prec Lt in
string cxt L.lt;
right
| Downto ->
let _, _, right = Js_op_util.op_prec Ge in
string cxt L.ge;
right
in
space cxt;
let cxt =
expression ~level:right_prec cxt
(match new_id with
| Some i -> E.var i
| None -> finish)
in
semi cxt;
space cxt;
pp_direction cxt direction;
ident cxt id))
in
brace_block cxt s)
in
let lexical = Js_closure.get_lexical_scope env in
if Set_ident.is_empty lexical then action cxt
else
let inner_cxt = merge_scope cxt lexical in
let lexical = Set_ident.elements lexical in
vgroup cxt 0 (fun _ ->
string cxt L.lparen;
string cxt L.function_;
pp_paren_params inner_cxt lexical;
let cxt = brace_vgroup cxt 0 (fun _ -> action inner_cxt) in
pp_paren_params inner_cxt lexical;
string cxt L.rparen;
semi cxt;
cxt)
| Continue s ->
continue cxt s;
newline cxt ; # 2642
| Debugger ->
debugger_nl cxt;
cxt
| Break ->
break_nl cxt;
cxt
| Return e -> (
match e.expression_desc with
| Fun (is_method, l, b, env, return_unit) ->
let cxt =
pp_function ~return_unit ~is_method cxt ~fn_state:Is_return l b env
in
semi cxt;
cxt
| Undefined ->
return_sp cxt;
semi cxt;
cxt
| _ ->
return_sp cxt;
string cxt " return " ; ( \ * ASI -- when there is a comment*\ )
group cxt return_indent (fun _ ->
let cxt = expression ~level:0 cxt e in
semi cxt;
cxt))
| Int_switch (e, cc, def) ->
string cxt L.switch;
space cxt;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
brace_vgroup cxt 1 (fun _ ->
let cxt =
loop_case_clauses cxt (fun cxt i -> string cxt (string_of_int i)) cc
in
match def with
| None -> cxt
| Some def ->
group cxt 1 (fun _ ->
string cxt L.default;
string cxt L.colon;
newline cxt;
statements false cxt def))
| String_switch (e, cc, def) ->
string cxt L.switch;
space cxt;
let cxt = paren_group cxt 1 (fun _ -> expression ~level:0 cxt e) in
space cxt;
brace_vgroup cxt 1 (fun _ ->
let cxt =
loop_case_clauses cxt
(fun cxt s -> Js_dump_string.pp_string cxt.pp s)
cc
in
match def with
| None -> cxt
| Some def ->
group cxt 1 (fun _ ->
string cxt L.default;
string cxt L.colon;
newline cxt;
statements false cxt def))
| Throw e ->
let e =
match e.expression_desc with
| Caml_block (el, _, _, ((Blk_extension | Blk_record_ext _) as ext)) ->
{ e with expression_desc = exn_block_as_obj ~stack:true el ext }
| _ -> e
in
string cxt L.throw;
space cxt;
group cxt throw_indent (fun _ ->
let cxt = expression ~level:0 cxt e in
semi cxt;
cxt)
| Try (b, ctch, fin) ->
vgroup cxt 0 (fun _ ->
string cxt L.try_;
space cxt;
let cxt = brace_block cxt b in
let cxt =
match ctch with
| None -> cxt
| Some (i, b) ->
newline cxt;
string cxt "catch (";
let cxt = ident cxt i in
string cxt ")";
brace_block cxt b
in
match fin with
| None -> cxt
| Some b ->
group cxt 1 (fun _ ->
string cxt L.finally;
space cxt;
brace_block cxt b))
and function_body (cxt : cxt) ~return_unit (b : J.block) : unit =
match b with
| [] -> ()
| [ s ] -> (
match s.statement_desc with
| If
( bool,
then_,
[ { statement_desc = Return { expression_desc = Undefined } } ] ) ->
ignore
(statement false cxt
{ s with statement_desc = If (bool, then_, []) }
: cxt)
| Return { expression_desc = Undefined } -> ()
| Return exp when return_unit ->
ignore (statement false cxt (S.exp exp) : cxt)
| _ -> ignore (statement false cxt s : cxt))
| [ s; { statement_desc = Return { expression_desc = Undefined } } ] ->
ignore (statement false cxt s : cxt)
| s :: r ->
let cxt = statement false cxt s in
newline cxt;
function_body cxt r ~return_unit
and brace_block cxt b =
brace_vgroup cxt 1 (fun _ -> statements false cxt b)
and statements top cxt b =
iter_lst cxt b
(fun cxt s -> statement top cxt s)
(if top then at_least_two_lines else newline)
let string_of_block (block : J.block) =
let buffer = Buffer.create 50 in
let cxt = from_buffer buffer in
let (_ : cxt) = statements true cxt block in
flush cxt ();
Buffer.contents buffer
let string_of_expression (e : J.expression) =
let buffer = Buffer.create 50 in
let cxt = from_buffer buffer in
let (_ : cxt) = expression ~level:0 cxt e in
flush cxt ();
Buffer.contents buffer
let statements top scope pp b = (statements top { scope; pp } b).scope
|
01d0786105980753c6c8dbc558bfa3a44dcd59597b590301e39913e2ddaff0f1 | ewilderj/coin-tosser | project.clj | (defproject tosser "1.0.0"
:description "coin tossing"
:main tosser.main
:dependencies
[[org.clojure/clojure "1.3.0"]]
:dev-dependencies [])
| null | https://raw.githubusercontent.com/ewilderj/coin-tosser/dbb78efed32eefb23f2ef00659c8187e642cad77/clojure/project.clj | clojure | (defproject tosser "1.0.0"
:description "coin tossing"
:main tosser.main
:dependencies
[[org.clojure/clojure "1.3.0"]]
:dev-dependencies [])
| |
2ec8f083e661a169681494a4918f89daa64a158626f6c99b9994e96061dc8342 | mdedwards/slippery-chicken | videos-example-21-all.lsp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; File: videos-example-21-all.lsp
;;;
Class Hierarchy : None
;;;
Version : 1.0
;;;
;;; Project: slippery chicken (algorithmic composition)
;;;
Purpose : Lisp example code to accompany video tutorial 21
;;;
Author :
;;;
Creation date : 22nd December 2012
;;;
;;; ****
Licence : Copyright ( c ) 2012
;;;
;;; This file is part of slippery-chicken
;;;
;;; slippery-chicken is free software; you can redistribute it
;;; and/or modify it under the terms of the GNU General
Public License as published by the Free Software
Foundation ; either version 3 of the License , or ( at your
;;; option) any later version.
;;;
;;; slippery-chicken is distributed in the hope that it will
;;; be useful, but WITHOUT ANY WARRANTY; without even the
;;; implied warranty of MERCHANTABILITY or FITNESS FOR A
;;; PARTICULAR PURPOSE. See the GNU General Public License
;;; for more details.
;;;
You should have received a copy of the GNU General Public
;;; License along with slippery-chicken; if not, write to the
Free Software Foundation , Inc. , 59 Temple Place , Suite
330 , Boston , MA 02111 - 1307 USA
;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; get-event
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((2 4) (s) (s) e e e))
:pitch-seq-palette ((1 2 3)))))
:rthm-seq-map '((1 ((vn (1))))))))
(data (get-event mini 1 1 'vn)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; get-note
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((2 4) (s) (s) e e e))
:pitch-seq-palette ((1 2 3)))))
:rthm-seq-map '((1 ((vn (1))))))))
(data (get-note mini 1 1 'vn)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; change-pitch
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e (e) e e - (e) - e e e -))
:pitch-seq-palette ((1 2 3 4 5 6)))))
:rthm-seq-map '((1 ((vn (1 1 1))
(va (1 1 1))
(vc (1 1 1))))))))
(change-pitch mini 3 2 'va 'cs4)
(write-lp-data-for-all mini))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; add-mark-to-note
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e (e) e e - (e) - e e e -))
:pitch-seq-palette ((1 2 3 4 5 6)))))
:rthm-seq-map '((1 ((vn (1 1 1))
(va (1 1 1))
(vc (1 1 1))))))))
(add-mark-to-note mini 3 2 'va 'ppp)
(write-lp-data-for-all mini))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; rm-marks-from-note
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e (e) e e - (e) - e e e -))
:pitch-seq-palette ((1 2 3 4 5 6))
:marks (ppp 2))))
:rthm-seq-map '((1 ((vn (1 1 1))
(va (1 1 1))
(vc (1 1 1))))))))
(rm-marks-from-note mini 3 2 'va 'ppp)
(write-lp-data-for-all mini))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; tie
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4))))
:set-map '((1 (1 1 1 1)))
:rthm-seq-palette '((1 ((((2 4) q s s (s) s))
:pitch-seq-palette ((1 1 2 3)))))
:rthm-seq-map '((1 ((vn (1 1 1 1))))))))
(tie mini 2 1 'vn)
(tie mini 3 2 'vn)
(write-lp-data-for-all mini))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; move-events
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1 1)))
:rthm-seq-palette '((1 ((((2 4) q (e) e))
:pitch-seq-palette ((1 2)))))
:rthm-seq-map '((1 ((vn (1 1 1 1))
(va (nil nil nil nil))
(vc (1 1 1 1))))))))
(move-events mini 'vn 'va 2 1 3 1 :consolidate-rests t :transposition -3)
(write-lp-data-for-all mini))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; double-events
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1 1)))
:rthm-seq-palette '((1 ((((2 4) q (e) e))
:pitch-seq-palette ((1 2)))))
:rthm-seq-map '((1 ((vn (1 1 1 1))
(va (nil nil nil nil))
(vc (nil nil nil nil))))))))
(double-events mini 'vn '(va vc) 2 1 3 1
:consolidate-rests t :transposition -3)
(write-lp-data-for-all mini))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
EOF | null | https://raw.githubusercontent.com/mdedwards/slippery-chicken/c1c11fadcdb40cd869d5b29091ba5e53c5270e04/doc/examples/videos-example-21-all.lsp | lisp |
File: videos-example-21-all.lsp
Project: slippery chicken (algorithmic composition)
****
This file is part of slippery-chicken
slippery-chicken is free software; you can redistribute it
and/or modify it under the terms of the GNU General
either version 3 of the License , or ( at your
option) any later version.
slippery-chicken is distributed in the hope that it will
be useful, but WITHOUT ANY WARRANTY; without even the
implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License
for more details.
License along with slippery-chicken; if not, write to the
get-event
get-note
change-pitch
add-mark-to-note
rm-marks-from-note
tie
move-events
double-events
| Class Hierarchy : None
Version : 1.0
Purpose : Lisp example code to accompany video tutorial 21
Author :
Creation date : 22nd December 2012
Licence : Copyright ( c ) 2012
Public License as published by the Free Software
You should have received a copy of the GNU General Public
Free Software Foundation , Inc. , 59 Temple Place , Suite
330 , Boston , MA 02111 - 1307 USA
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((2 4) (s) (s) e e e))
:pitch-seq-palette ((1 2 3)))))
:rthm-seq-map '((1 ((vn (1))))))))
(data (get-event mini 1 1 'vn)))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1)))
:rthm-seq-palette '((1 ((((2 4) (s) (s) e e e))
:pitch-seq-palette ((1 2 3)))))
:rthm-seq-map '((1 ((vn (1))))))))
(data (get-note mini 1 1 'vn)))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e (e) e e - (e) - e e e -))
:pitch-seq-palette ((1 2 3 4 5 6)))))
:rthm-seq-map '((1 ((vn (1 1 1))
(va (1 1 1))
(vc (1 1 1))))))))
(change-pitch mini 3 2 'va 'cs4)
(write-lp-data-for-all mini))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e (e) e e - (e) - e e e -))
:pitch-seq-palette ((1 2 3 4 5 6)))))
:rthm-seq-map '((1 ((vn (1 1 1))
(va (1 1 1))
(vc (1 1 1))))))))
(add-mark-to-note mini 3 2 'va 'ppp)
(write-lp-data-for-all mini))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1)))
:rthm-seq-palette '((1 ((((4 4) - e (e) e e - (e) - e e e -))
:pitch-seq-palette ((1 2 3 4 5 6))
:marks (ppp 2))))
:rthm-seq-map '((1 ((vn (1 1 1))
(va (1 1 1))
(vc (1 1 1))))))))
(rm-marks-from-note mini 3 2 'va 'ppp)
(write-lp-data-for-all mini))
(let ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))))
:set-palette '((1 ((c4 d4 e4))))
:set-map '((1 (1 1 1 1)))
:rthm-seq-palette '((1 ((((2 4) q s s (s) s))
:pitch-seq-palette ((1 1 2 3)))))
:rthm-seq-map '((1 ((vn (1 1 1 1))))))))
(tie mini 2 1 'vn)
(tie mini 3 2 'vn)
(write-lp-data-for-all mini))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1 1)))
:rthm-seq-palette '((1 ((((2 4) q (e) e))
:pitch-seq-palette ((1 2)))))
:rthm-seq-map '((1 ((vn (1 1 1 1))
(va (nil nil nil nil))
(vc (1 1 1 1))))))))
(move-events mini 'vn 'va 2 1 3 1 :consolidate-rests t :transposition -3)
(write-lp-data-for-all mini))
(let* ((mini
(make-slippery-chicken
'+mini+
:ensemble '(((vn (violin :midi-channel 1))
(va (viola :midi-channel 2))
(vc (cello :midi-channel 3))))
:set-palette '((1 ((c3 d3 e3 f3 g3 a3 b3 c4 d4 e4 f4 g4 a4 b4 c5))))
:set-map '((1 (1 1 1 1)))
:rthm-seq-palette '((1 ((((2 4) q (e) e))
:pitch-seq-palette ((1 2)))))
:rthm-seq-map '((1 ((vn (1 1 1 1))
(va (nil nil nil nil))
(vc (nil nil nil nil))))))))
(double-events mini 'vn '(va vc) 2 1 3 1
:consolidate-rests t :transposition -3)
(write-lp-data-for-all mini))
EOF |
96cbfd78b789e10c2340c04c3a644ceb1fe03987f4184e07c0da8657866398d7 | kelamg/HtDP2e-workthrough | ex335.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex335) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct file [name size content])
; A File.v3 is a structure:
; (make-file String N String)
(define-struct dir [name dirs files])
; A Dir.v3 is a structure:
; (make-dir String Dir* File*)
A Dir * is one of :
; – '()
; – (cons Dir.v3 Dir*)
A File * is one of :
; – '()
; – (cons File.v3 File*)
figure 123
(define DIR
(make-dir
"TS"
(list
(make-dir "Text" '() (list (make-file "part1" 99 "")
(make-file "part2" 52 "")
(make-file "part3" 17 "")))
(make-dir "Libs"
(list (make-dir "Code" '() (list (make-file "hang" 8 "")
(make-file "draw" 2 "")))
(make-dir "Docs" '() (list (make-file "read!" 19 ""))))
'()))
(list (make-file "read!" 10 ""))))
| null | https://raw.githubusercontent.com/kelamg/HtDP2e-workthrough/ec05818d8b667a3c119bea8d1d22e31e72e0a958/HtDP/Intertwined-Data/ex335.rkt | racket | about the language level of this file in a form that our tools can easily process.
A File.v3 is a structure:
(make-file String N String)
A Dir.v3 is a structure:
(make-dir String Dir* File*)
– '()
– (cons Dir.v3 Dir*)
– '()
– (cons File.v3 File*) | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname ex335) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
(define-struct file [name size content])
(define-struct dir [name dirs files])
A Dir * is one of :
A File * is one of :
figure 123
(define DIR
(make-dir
"TS"
(list
(make-dir "Text" '() (list (make-file "part1" 99 "")
(make-file "part2" 52 "")
(make-file "part3" 17 "")))
(make-dir "Libs"
(list (make-dir "Code" '() (list (make-file "hang" 8 "")
(make-file "draw" 2 "")))
(make-dir "Docs" '() (list (make-file "read!" 19 ""))))
'()))
(list (make-file "read!" 10 ""))))
|
d44c9787ac929bc811d8b09d52ccb7dc5cec080a4d54a17ba2f04d36584b2751 | pirapira/coq2rust | globnames.ml | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
open Util
open Errors
open Names
open Term
open Mod_subst
open Libnames
(*s Global reference is a kernel side type for all references together *)
type global_reference =
| VarRef of variable
| ConstRef of constant
| IndRef of inductive
| ConstructRef of constructor
let isVarRef = function VarRef _ -> true | _ -> false
let isConstRef = function ConstRef _ -> true | _ -> false
let isIndRef = function IndRef _ -> true | _ -> false
let isConstructRef = function ConstructRef _ -> true | _ -> false
let eq_gr gr1 gr2 =
gr1 == gr2 || match gr1,gr2 with
| ConstRef con1, ConstRef con2 -> eq_constant con1 con2
| IndRef kn1, IndRef kn2 -> eq_ind kn1 kn2
| ConstructRef kn1, ConstructRef kn2 -> eq_constructor kn1 kn2
| VarRef v1, VarRef v2 -> Id.equal v1 v2
| _ -> false
let destVarRef = function VarRef ind -> ind | _ -> failwith "destVarRef"
let destConstRef = function ConstRef ind -> ind | _ -> failwith "destConstRef"
let destIndRef = function IndRef ind -> ind | _ -> failwith "destIndRef"
let destConstructRef = function ConstructRef ind -> ind | _ -> failwith "destConstructRef"
let subst_constructor subst (ind,j as ref) =
let ind' = subst_ind subst ind in
if ind==ind' then ref, mkConstruct ref
else (ind',j), mkConstruct (ind',j)
let subst_global_reference subst ref = match ref with
| VarRef var -> ref
| ConstRef kn ->
let kn' = subst_constant subst kn in
if kn==kn' then ref else ConstRef kn'
| IndRef ind ->
let ind' = subst_ind subst ind in
if ind==ind' then ref else IndRef ind'
| ConstructRef ((kn,i),j as c) ->
let c',t = subst_constructor subst c in
if c'==c then ref else ConstructRef c'
let subst_global subst ref = match ref with
| VarRef var -> ref, mkVar var
| ConstRef kn ->
let kn',t = subst_con_kn subst kn in
if kn==kn' then ref, mkConst kn else ConstRef kn', t
| IndRef ind ->
let ind' = subst_ind subst ind in
if ind==ind' then ref, mkInd ind else IndRef ind', mkInd ind'
| ConstructRef ((kn,i),j as c) ->
let c',t = subst_constructor subst c in
if c'==c then ref,t else ConstructRef c', t
let canonical_gr = function
| ConstRef con -> ConstRef(constant_of_kn(canonical_con con))
| IndRef (kn,i) -> IndRef(mind_of_kn(canonical_mind kn),i)
| ConstructRef ((kn,i),j )-> ConstructRef((mind_of_kn(canonical_mind kn),i),j)
| VarRef id -> VarRef id
let global_of_constr c = match kind_of_term c with
| Const (sp,u) -> ConstRef sp
| Ind (ind_sp,u) -> IndRef ind_sp
| Construct (cstr_cp,u) -> ConstructRef cstr_cp
| Var id -> VarRef id
| _ -> raise Not_found
let is_global c t =
match c, kind_of_term t with
| ConstRef c, Const (c', _) -> eq_constant c c'
| IndRef i, Ind (i', _) -> eq_ind i i'
| ConstructRef i, Construct (i', _) -> eq_constructor i i'
| VarRef id, Var id' -> id_eq id id'
| _ -> false
let printable_constr_of_global = function
| VarRef id -> mkVar id
| ConstRef sp -> mkConst sp
| ConstructRef sp -> mkConstruct sp
| IndRef sp -> mkInd sp
let reference_of_constr = global_of_constr
let global_eq_gen eq_cst eq_ind eq_cons x y =
x == y ||
match x, y with
| ConstRef cx, ConstRef cy -> eq_cst cx cy
| IndRef indx, IndRef indy -> eq_ind indx indy
| ConstructRef consx, ConstructRef consy -> eq_cons consx consy
| VarRef v1, VarRef v2 -> Id.equal v1 v2
| (VarRef _ | ConstRef _ | IndRef _ | ConstructRef _), _ -> false
let global_ord_gen ord_cst ord_ind ord_cons x y =
if x == y then 0
else match x, y with
| ConstRef cx, ConstRef cy -> ord_cst cx cy
| IndRef indx, IndRef indy -> ord_ind indx indy
| ConstructRef consx, ConstructRef consy -> ord_cons consx consy
| VarRef v1, VarRef v2 -> Id.compare v1 v2
| VarRef _, (ConstRef _ | IndRef _ | ConstructRef _) -> -1
| ConstRef _, VarRef _ -> 1
| ConstRef _, (IndRef _ | ConstructRef _) -> -1
| IndRef _, (VarRef _ | ConstRef _) -> 1
| IndRef _, ConstructRef _ -> -1
| ConstructRef _, (VarRef _ | ConstRef _ | IndRef _) -> 1
let global_hash_gen hash_cst hash_ind hash_cons gr =
let open Hashset.Combine in
match gr with
| ConstRef c -> combinesmall 1 (hash_cst c)
| IndRef i -> combinesmall 2 (hash_ind i)
| ConstructRef c -> combinesmall 3 (hash_cons c)
| VarRef id -> combinesmall 4 (Id.hash id)
(* By default, [global_reference] are ordered on their canonical part *)
module RefOrdered = struct
open Constant.CanOrd
type t = global_reference
let compare gr1 gr2 =
global_ord_gen compare ind_ord constructor_ord gr1 gr2
let equal gr1 gr2 = global_eq_gen equal eq_ind eq_constructor gr1 gr2
let hash gr = global_hash_gen hash ind_hash constructor_hash gr
end
module RefOrdered_env = struct
open Constant.UserOrd
type t = global_reference
let compare gr1 gr2 =
global_ord_gen compare ind_user_ord constructor_user_ord gr1 gr2
let equal gr1 gr2 =
global_eq_gen equal eq_user_ind eq_user_constructor gr1 gr2
let hash gr = global_hash_gen hash ind_user_hash constructor_user_hash gr
end
module Refmap = HMap.Make(RefOrdered)
module Refset = Refmap.Set
(* Alternative sets and maps indexed by the user part of the kernel names *)
module Refmap_env = HMap.Make(RefOrdered_env)
module Refset_env = Refmap_env.Set
(* Extended global references *)
type syndef_name = kernel_name
type extended_global_reference =
| TrueGlobal of global_reference
| SynDef of syndef_name
(* We order [extended_global_reference] via their user part
(cf. pretty printer) *)
module ExtRefOrdered = struct
type t = extended_global_reference
let equal x y =
x == y ||
match x, y with
| TrueGlobal rx, TrueGlobal ry -> RefOrdered_env.equal rx ry
| SynDef knx, SynDef kny -> KerName.equal knx kny
| (TrueGlobal _ | SynDef _), _ -> false
let compare x y =
if x == y then 0
else match x, y with
| TrueGlobal rx, TrueGlobal ry -> RefOrdered_env.compare rx ry
| SynDef knx, SynDef kny -> kn_ord knx kny
| TrueGlobal _, SynDef _ -> -1
| SynDef _, TrueGlobal _ -> 1
open Hashset.Combine
let hash = function
| TrueGlobal gr -> combinesmall 1 (RefOrdered_env.hash gr)
| SynDef kn -> combinesmall 2 (KerName.hash kn)
end
type global_reference_or_constr =
| IsGlobal of global_reference
| IsConstr of constr
* { 6 Temporary function to brutally form kernel names from section paths }
let encode_mind dir id = MutInd.make2 (MPfile dir) (Label.of_id id)
let encode_con dir id = Constant.make2 (MPfile dir) (Label.of_id id)
let check_empty_section dp =
if not (DirPath.is_empty dp) then
anomaly (Pp.str "Section part should be empty!")
let decode_mind kn =
let rec dir_of_mp = function
| MPfile dir -> DirPath.repr dir
| MPbound mbid ->
let _,_,dp = MBId.repr mbid in
let id = MBId.to_id mbid in
id::(DirPath.repr dp)
| MPdot(mp,l) -> (Label.to_id l)::(dir_of_mp mp)
in
let mp,sec_dir,l = repr_mind kn in
check_empty_section sec_dir;
(DirPath.make (dir_of_mp mp)),Label.to_id l
let decode_con kn =
let mp,sec_dir,l = repr_con kn in
check_empty_section sec_dir;
match mp with
| MPfile dir -> (dir,Label.to_id l)
| _ -> anomaly (Pp.str "MPfile expected!")
* Popping one level of section in global names .
These functions are meant to be used during discharge :
user and canonical kernel names must be equal .
These functions are meant to be used during discharge:
user and canonical kernel names must be equal. *)
let pop_con con =
let (mp,dir,l) = repr_con con in
Names.make_con mp (pop_dirpath dir) l
let pop_kn kn =
let (mp,dir,l) = repr_mind kn in
Names.make_mind mp (pop_dirpath dir) l
let pop_global_reference = function
| ConstRef con -> ConstRef (pop_con con)
| IndRef (kn,i) -> IndRef (pop_kn kn,i)
| ConstructRef ((kn,i),j) -> ConstructRef ((pop_kn kn,i),j)
| VarRef id -> anomaly (Pp.str "VarRef not poppable")
| null | https://raw.githubusercontent.com/pirapira/coq2rust/22e8aaefc723bfb324ca2001b2b8e51fcc923543/library/globnames.ml | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
s Global reference is a kernel side type for all references together
By default, [global_reference] are ordered on their canonical part
Alternative sets and maps indexed by the user part of the kernel names
Extended global references
We order [extended_global_reference] via their user part
(cf. pretty printer) | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2012
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
open Util
open Errors
open Names
open Term
open Mod_subst
open Libnames
type global_reference =
| VarRef of variable
| ConstRef of constant
| IndRef of inductive
| ConstructRef of constructor
let isVarRef = function VarRef _ -> true | _ -> false
let isConstRef = function ConstRef _ -> true | _ -> false
let isIndRef = function IndRef _ -> true | _ -> false
let isConstructRef = function ConstructRef _ -> true | _ -> false
let eq_gr gr1 gr2 =
gr1 == gr2 || match gr1,gr2 with
| ConstRef con1, ConstRef con2 -> eq_constant con1 con2
| IndRef kn1, IndRef kn2 -> eq_ind kn1 kn2
| ConstructRef kn1, ConstructRef kn2 -> eq_constructor kn1 kn2
| VarRef v1, VarRef v2 -> Id.equal v1 v2
| _ -> false
let destVarRef = function VarRef ind -> ind | _ -> failwith "destVarRef"
let destConstRef = function ConstRef ind -> ind | _ -> failwith "destConstRef"
let destIndRef = function IndRef ind -> ind | _ -> failwith "destIndRef"
let destConstructRef = function ConstructRef ind -> ind | _ -> failwith "destConstructRef"
let subst_constructor subst (ind,j as ref) =
let ind' = subst_ind subst ind in
if ind==ind' then ref, mkConstruct ref
else (ind',j), mkConstruct (ind',j)
let subst_global_reference subst ref = match ref with
| VarRef var -> ref
| ConstRef kn ->
let kn' = subst_constant subst kn in
if kn==kn' then ref else ConstRef kn'
| IndRef ind ->
let ind' = subst_ind subst ind in
if ind==ind' then ref else IndRef ind'
| ConstructRef ((kn,i),j as c) ->
let c',t = subst_constructor subst c in
if c'==c then ref else ConstructRef c'
let subst_global subst ref = match ref with
| VarRef var -> ref, mkVar var
| ConstRef kn ->
let kn',t = subst_con_kn subst kn in
if kn==kn' then ref, mkConst kn else ConstRef kn', t
| IndRef ind ->
let ind' = subst_ind subst ind in
if ind==ind' then ref, mkInd ind else IndRef ind', mkInd ind'
| ConstructRef ((kn,i),j as c) ->
let c',t = subst_constructor subst c in
if c'==c then ref,t else ConstructRef c', t
let canonical_gr = function
| ConstRef con -> ConstRef(constant_of_kn(canonical_con con))
| IndRef (kn,i) -> IndRef(mind_of_kn(canonical_mind kn),i)
| ConstructRef ((kn,i),j )-> ConstructRef((mind_of_kn(canonical_mind kn),i),j)
| VarRef id -> VarRef id
let global_of_constr c = match kind_of_term c with
| Const (sp,u) -> ConstRef sp
| Ind (ind_sp,u) -> IndRef ind_sp
| Construct (cstr_cp,u) -> ConstructRef cstr_cp
| Var id -> VarRef id
| _ -> raise Not_found
let is_global c t =
match c, kind_of_term t with
| ConstRef c, Const (c', _) -> eq_constant c c'
| IndRef i, Ind (i', _) -> eq_ind i i'
| ConstructRef i, Construct (i', _) -> eq_constructor i i'
| VarRef id, Var id' -> id_eq id id'
| _ -> false
let printable_constr_of_global = function
| VarRef id -> mkVar id
| ConstRef sp -> mkConst sp
| ConstructRef sp -> mkConstruct sp
| IndRef sp -> mkInd sp
let reference_of_constr = global_of_constr
let global_eq_gen eq_cst eq_ind eq_cons x y =
x == y ||
match x, y with
| ConstRef cx, ConstRef cy -> eq_cst cx cy
| IndRef indx, IndRef indy -> eq_ind indx indy
| ConstructRef consx, ConstructRef consy -> eq_cons consx consy
| VarRef v1, VarRef v2 -> Id.equal v1 v2
| (VarRef _ | ConstRef _ | IndRef _ | ConstructRef _), _ -> false
let global_ord_gen ord_cst ord_ind ord_cons x y =
if x == y then 0
else match x, y with
| ConstRef cx, ConstRef cy -> ord_cst cx cy
| IndRef indx, IndRef indy -> ord_ind indx indy
| ConstructRef consx, ConstructRef consy -> ord_cons consx consy
| VarRef v1, VarRef v2 -> Id.compare v1 v2
| VarRef _, (ConstRef _ | IndRef _ | ConstructRef _) -> -1
| ConstRef _, VarRef _ -> 1
| ConstRef _, (IndRef _ | ConstructRef _) -> -1
| IndRef _, (VarRef _ | ConstRef _) -> 1
| IndRef _, ConstructRef _ -> -1
| ConstructRef _, (VarRef _ | ConstRef _ | IndRef _) -> 1
let global_hash_gen hash_cst hash_ind hash_cons gr =
let open Hashset.Combine in
match gr with
| ConstRef c -> combinesmall 1 (hash_cst c)
| IndRef i -> combinesmall 2 (hash_ind i)
| ConstructRef c -> combinesmall 3 (hash_cons c)
| VarRef id -> combinesmall 4 (Id.hash id)
module RefOrdered = struct
open Constant.CanOrd
type t = global_reference
let compare gr1 gr2 =
global_ord_gen compare ind_ord constructor_ord gr1 gr2
let equal gr1 gr2 = global_eq_gen equal eq_ind eq_constructor gr1 gr2
let hash gr = global_hash_gen hash ind_hash constructor_hash gr
end
module RefOrdered_env = struct
open Constant.UserOrd
type t = global_reference
let compare gr1 gr2 =
global_ord_gen compare ind_user_ord constructor_user_ord gr1 gr2
let equal gr1 gr2 =
global_eq_gen equal eq_user_ind eq_user_constructor gr1 gr2
let hash gr = global_hash_gen hash ind_user_hash constructor_user_hash gr
end
module Refmap = HMap.Make(RefOrdered)
module Refset = Refmap.Set
module Refmap_env = HMap.Make(RefOrdered_env)
module Refset_env = Refmap_env.Set
type syndef_name = kernel_name
type extended_global_reference =
| TrueGlobal of global_reference
| SynDef of syndef_name
module ExtRefOrdered = struct
type t = extended_global_reference
let equal x y =
x == y ||
match x, y with
| TrueGlobal rx, TrueGlobal ry -> RefOrdered_env.equal rx ry
| SynDef knx, SynDef kny -> KerName.equal knx kny
| (TrueGlobal _ | SynDef _), _ -> false
let compare x y =
if x == y then 0
else match x, y with
| TrueGlobal rx, TrueGlobal ry -> RefOrdered_env.compare rx ry
| SynDef knx, SynDef kny -> kn_ord knx kny
| TrueGlobal _, SynDef _ -> -1
| SynDef _, TrueGlobal _ -> 1
open Hashset.Combine
let hash = function
| TrueGlobal gr -> combinesmall 1 (RefOrdered_env.hash gr)
| SynDef kn -> combinesmall 2 (KerName.hash kn)
end
type global_reference_or_constr =
| IsGlobal of global_reference
| IsConstr of constr
* { 6 Temporary function to brutally form kernel names from section paths }
let encode_mind dir id = MutInd.make2 (MPfile dir) (Label.of_id id)
let encode_con dir id = Constant.make2 (MPfile dir) (Label.of_id id)
let check_empty_section dp =
if not (DirPath.is_empty dp) then
anomaly (Pp.str "Section part should be empty!")
let decode_mind kn =
let rec dir_of_mp = function
| MPfile dir -> DirPath.repr dir
| MPbound mbid ->
let _,_,dp = MBId.repr mbid in
let id = MBId.to_id mbid in
id::(DirPath.repr dp)
| MPdot(mp,l) -> (Label.to_id l)::(dir_of_mp mp)
in
let mp,sec_dir,l = repr_mind kn in
check_empty_section sec_dir;
(DirPath.make (dir_of_mp mp)),Label.to_id l
let decode_con kn =
let mp,sec_dir,l = repr_con kn in
check_empty_section sec_dir;
match mp with
| MPfile dir -> (dir,Label.to_id l)
| _ -> anomaly (Pp.str "MPfile expected!")
* Popping one level of section in global names .
These functions are meant to be used during discharge :
user and canonical kernel names must be equal .
These functions are meant to be used during discharge:
user and canonical kernel names must be equal. *)
let pop_con con =
let (mp,dir,l) = repr_con con in
Names.make_con mp (pop_dirpath dir) l
let pop_kn kn =
let (mp,dir,l) = repr_mind kn in
Names.make_mind mp (pop_dirpath dir) l
let pop_global_reference = function
| ConstRef con -> ConstRef (pop_con con)
| IndRef (kn,i) -> IndRef (pop_kn kn,i)
| ConstructRef ((kn,i),j) -> ConstructRef ((pop_kn kn,i),j)
| VarRef id -> anomaly (Pp.str "VarRef not poppable")
|
b87a76345c2bba2ffdb72ee386296c494f87eb88516221ee0b7a261b6f995f84 | DHSProgram/DHS-Indicators-SPSS | RH_tables_BR.sps | * Encoding: windows-1252.
*************************************************************************************************************************************************************************************************************
Program: RH_tables_BR.sps
Purpose: produce tables for indicators
Author: Ivana Bjelic
Date last modified: July 17 2019 by Ivana Bjelic
*This do file will produce the following table in excel:
Tables_Deliv: Contains the tables for the delivery indicators
***********************************************************************************************************************************************************************************************************/
* When implementing a crosstabs command instead of ctables command please change:
ctables to *ctables.
*crosstabs to crosstabs.
compute wt=v005/1000000.
weight by wt.
* create denominator.
do if (age < period).
compute nbr=1.
end if.
variable labels nbr "Number of births".
* the total will show on the last row of each table.
* comment out the tables or indicator section you do not want.
****************************************************
* indicators from BR file.
****************************************************
* place of delivery.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_place [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Live births in past 5 yrs by place of delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_place
/format = avalue tables
/cells = row
/count asis.
****************************************************
* type of health facilty.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_pltype [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Live births in past 5 yrs by place of delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_pltype
/format = avalue tables
/cells = row
/count asis.
****************************************************
* type of provider.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_pv [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Person providing assistance during delivery ".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_pv
/format = avalue tables
/cells = row
/count asis.
****************************************************
* skilled provider.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_pvskill [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Skilled assistance during delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_pvskill
/format = avalue tables
/cells = row
/count asis.
****************************************************
* C-section delivery.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_ces [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"C-section delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_ces
/format = avalue tables
/cells = row
/count asis.
****************************************************
* C-section delivery timing.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_cestime [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"C-section delivery timing".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_cestime
/format = avalue tables
/cells = row
/count asis.
****************************************************
* Duration of stay after delivery.
* Vaginal births.
* C-section births.
ctables
/table rh_del_cestimeR [c] by
rh_del_stay [c] [rowpct.validn '' f5.1]+ rh_del_stay [s] [validn, 'Number of women', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=rh_del_stay total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Duration of stay after delivery".
*crosstabs
/tables = rh_del_cestimeR by rh_del_stay
/format = avalue tables
/cells = row
/count asis.
****************************************************.
* Export Output.
output export
/contents export=visible layers=printsetting modelviews=printsetting
/xls documentfile="Tables_Deliv"
operation=createfile.
output close * .
new file.
| null | https://raw.githubusercontent.com/DHSProgram/DHS-Indicators-SPSS/578e6d40eff9edebda7cf0db0d9a0a52a537d98c/Chap09_RH/RH_tables_BR.sps | scheme | * Encoding: windows-1252.
*************************************************************************************************************************************************************************************************************
Program: RH_tables_BR.sps
Purpose: produce tables for indicators
Author: Ivana Bjelic
Date last modified: July 17 2019 by Ivana Bjelic
*This do file will produce the following table in excel:
Tables_Deliv: Contains the tables for the delivery indicators
***********************************************************************************************************************************************************************************************************/
* When implementing a crosstabs command instead of ctables command please change:
ctables to *ctables.
*crosstabs to crosstabs.
compute wt=v005/1000000.
weight by wt.
* create denominator.
do if (age < period).
compute nbr=1.
end if.
variable labels nbr "Number of births".
* the total will show on the last row of each table.
* comment out the tables or indicator section you do not want.
****************************************************
* indicators from BR file.
****************************************************
* place of delivery.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_place [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Live births in past 5 yrs by place of delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_place
/format = avalue tables
/cells = row
/count asis.
****************************************************
* type of health facilty.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_pltype [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Live births in past 5 yrs by place of delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_pltype
/format = avalue tables
/cells = row
/count asis.
****************************************************
* type of provider.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_pv [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Person providing assistance during delivery ".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_pv
/format = avalue tables
/cells = row
/count asis.
****************************************************
* skilled provider.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_pvskill [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Skilled assistance during delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_pvskill
/format = avalue tables
/cells = row
/count asis.
****************************************************
* C-section delivery.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_ces [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"C-section delivery".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_ces
/format = avalue tables
/cells = row
/count asis.
****************************************************
* C-section delivery timing.
ctables
/table v025 [c]
+ v024 [c]
+ v106 [c]
+ v190 [c] by
rh_del_cestime [c] [rowpct.validn '' f5.1] + nbr [s] [sum,'', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=all total=yes position=after label="Total"
/slabels visible=no
/titles title=
"C-section delivery timing".
*crosstabs
/tables = v025 v024 v106 v190 by rh_del_cestime
/format = avalue tables
/cells = row
/count asis.
****************************************************
* Duration of stay after delivery.
* Vaginal births.
* C-section births.
ctables
/table rh_del_cestimeR [c] by
rh_del_stay [c] [rowpct.validn '' f5.1]+ rh_del_stay [s] [validn, 'Number of women', f5.0]
/categories variables=all empty=exclude missing=exclude
/categories variables=rh_del_stay total=yes position=after label="Total"
/slabels visible=no
/titles title=
"Duration of stay after delivery".
*crosstabs
/tables = rh_del_cestimeR by rh_del_stay
/format = avalue tables
/cells = row
/count asis.
****************************************************.
* Export Output.
output export
/contents export=visible layers=printsetting modelviews=printsetting
/xls documentfile="Tables_Deliv"
operation=createfile.
output close * .
new file.
| |
ac0565dcc700e3dff02b7fbe51f49b907c1c40c47e443e3c1fc9be74328f31e7 | apache/couchdb-chttpd | chttpd_plugin.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(chttpd_plugin).
-export([
before_request/1,
after_request/2,
handle_error/1,
before_response/4,
before_serve_file/5
]).
-define(SERVICE_ID, chttpd).
-include_lib("couch/include/couch_db.hrl").
%% ------------------------------------------------------------------
%% API Function Definitions
%% ------------------------------------------------------------------
before_request(HttpReq) ->
[HttpReq1] = with_pipe(before_request, [HttpReq]),
{ok, HttpReq1}.
after_request(HttpReq, Result) ->
[_, Result1] = with_pipe(after_request, [HttpReq, Result]),
{ok, Result1}.
handle_error(Error) ->
[Error1] = with_pipe(handle_error, [Error]),
Error1.
before_response(HttpReq0, Code0, Headers0, Value0) ->
[HttpReq, Code, Headers, Value] =
with_pipe(before_response, [HttpReq0, Code0, Headers0, Value0]),
{ok, {HttpReq, Code, Headers, Value}}.
before_serve_file(Req0, Code0, Headers0, RelativePath0, DocumentRoot0) ->
[HttpReq, Code, Headers, RelativePath, DocumentRoot] =
with_pipe(before_serve_file, [
Req0, Code0, Headers0, RelativePath0, DocumentRoot0]),
{ok, {HttpReq, Code, Headers, RelativePath, DocumentRoot}}.
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
with_pipe(Func, Args) ->
do_apply(Func, Args, [pipe]).
do_apply(Func, Args, Opts) ->
Handle = couch_epi:get_handle(?SERVICE_ID),
couch_epi:apply(Handle, ?SERVICE_ID, Func, Args, Opts).
| null | https://raw.githubusercontent.com/apache/couchdb-chttpd/74002101513c03df74a4c25f3c892f5d003fa5da/src/chttpd_plugin.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
------------------------------------------------------------------
API Function Definitions
------------------------------------------------------------------
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------ | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(chttpd_plugin).
-export([
before_request/1,
after_request/2,
handle_error/1,
before_response/4,
before_serve_file/5
]).
-define(SERVICE_ID, chttpd).
-include_lib("couch/include/couch_db.hrl").
before_request(HttpReq) ->
[HttpReq1] = with_pipe(before_request, [HttpReq]),
{ok, HttpReq1}.
after_request(HttpReq, Result) ->
[_, Result1] = with_pipe(after_request, [HttpReq, Result]),
{ok, Result1}.
handle_error(Error) ->
[Error1] = with_pipe(handle_error, [Error]),
Error1.
before_response(HttpReq0, Code0, Headers0, Value0) ->
[HttpReq, Code, Headers, Value] =
with_pipe(before_response, [HttpReq0, Code0, Headers0, Value0]),
{ok, {HttpReq, Code, Headers, Value}}.
before_serve_file(Req0, Code0, Headers0, RelativePath0, DocumentRoot0) ->
[HttpReq, Code, Headers, RelativePath, DocumentRoot] =
with_pipe(before_serve_file, [
Req0, Code0, Headers0, RelativePath0, DocumentRoot0]),
{ok, {HttpReq, Code, Headers, RelativePath, DocumentRoot}}.
with_pipe(Func, Args) ->
do_apply(Func, Args, [pipe]).
do_apply(Func, Args, Opts) ->
Handle = couch_epi:get_handle(?SERVICE_ID),
couch_epi:apply(Handle, ?SERVICE_ID, Func, Args, Opts).
|
5840488a30af0cdaf172000f8dc436e727776c1ca55a77febf303563aeb25c02 | oblivia-simplex/roper | arm-analysis.lisp | (in-package :arm-analysis)
(defvar +masks+
'((#b00001111111111111111111111010000
#b00000001001011111111111100010000
:Lay=BX) ;; Found a bug in the spec document?
(#b00001111110000000000000011110000
#b00000000000000000000000010010000
:Lay=MULT)
(#b00001111100000000000000011110000
#b00000000100000000000000010010000
:Lay=MULT_L)
(#b00001100000000000000000000000000
#b00000000000000000000000000000000
:Lay=DP)
(#b00001111101100000000111111110000
#b00000001000000000000000010010000
:Lay=SDS)
(#b00001110010000000000111110010000
#b00000000000000000000000010010000
:Lay=HDT_R)
(#b00001110010000000000000010010000
#b00000000010000000000000010010000
:Lay=HDT_I)
(#b00001100000000000000000000000000
#b00000100000000000000000000000000
:Lay=SDT)
(#b00001110000000000000000000010000
#b00000110000000000000000000010000
:Lay=UNDEF)
(#b00001110000000000000000000000000
#b00001000000000000000000000000000
:Lay=BDT)
(#b00001110000000000000000000000000
#b00001010000000000000000000000000
:Lay=BR)
(#b00001110000000000000000000000000
#b00001100000000000000000000000000
:Lay=CDT)
(#b00001111000000000000000000010000
#b00001110000000000000000000000000
:Lay=CDO)
(#b00001111000000000000000000010000
#b00001110000000000000000000010000
:Lay=CRT)
(#b00001111000000000000000000000000
#b00001111000000000000000000000000
:Lay=SWI)))
(defun what-layout (w)
(loop for (mask sig lay) in +masks+ do
(when (= (logand mask w) sig)
(return lay))))
(defun range (lo hi)
(loop for i from lo to (1- hi) collect i))
(defun bdt-rlist (w)
(remove-if-not (lambda (i) (< 0 (logand (ash 1 i) w)))
(range 0 16)))
(defun dp-opcode (w)
(ldb (byte 4 21) w))
(defun dp-immediate (w)
(= (ldb (byte 1 25) w) 1))
;;;;;;;;;;;;;;;;;;;;;;;
Generic Interface
;;;;;;;;;;;;;;;;;;;;;;;
(export 'jumpp)
(defun jumpp (w)
(member (what-layout w)
'(:Lay=BX :Lay=BR)))
(export 'syscallp)
(defun syscallp (w)
(eq (what-layout w) :Lay=SWI))
(export 'jump-reg)
(defun jump-reg (w)
(let ((layout (what-layout w)))
(cond ((eq layout :Lay=BX)
(ldb (byte 4 0) w))
((and (eq layout :Lay=DP)
(= (ldb (byte 1 25) w) 0) ;; immediate flag
(member +pc+ (arith-dst-regs w) :test #'=)
MOV
(= (ldb (byte 8 4) w) 0)) ;; no shift. simplification
(ldb (byte 4 0) w)))))
(export 'pop-regs)
(defun pop-regs (w)
(when (and (eq (what-layout w) :Lay=BDT)
(= (bdt-stack-dir w) +pop-dir+))
(bdt-rlist w)))
(export 'push-regs)
(defun push-regs (w)
(when (and (eq (what-layout w) :Lay=BDT)
(= (bdt-stack-dir w) +push-dir+))
(bdt-rlist w)))
(export 'foo)
(defun foo (w)
(format t "hello from foo! ~S~%" w))
(export 'retp)
(defun retp (w)
(when w
(and (eq (what-layout w) :Lay=BDT)
(eq (ldb (byte 4 16) w) +sp+)
(< 0 (logand (ash 1 +pc+) w)))))
;(defun stack-delta-rlist (w)
; (if (eq (what-layout w) :Lay=BDT)
; (let ((rlist (bdt-rlist w)))
; (list (* (bdt-stack-dir w) (length rlist)) rlist))
; (list 0 nil)))
(junk-drawer:def-bitcounter 16)
(defun stack-delta (w)
(if (and (eq (what-layout w) :Lay=BDT)
(eq (ldb (byte 4 16) w) +sp+) ;; is a push/pop
(eq (ldb (byte 1 21) w) 1)) ;; writeback
(* (bdt-stack-dir w)
(bitcounter-16 w))
0))
(export 'pop-offset)
(defun pop-offset (w r)
(position r (reverse (pop-regs w)) :test #'=))
(export 'arith-dst-reg)
(defun arith-dst-regs (w)
(when (eq (what-layout w) :Lay=DP)
(list (ldb (byte 4 12) w))))
(export 'arith-src-regs)
(defun arith-src-regs (w)
(when (eq (what-layout w) :Lay=DP)
(let ((lst (list (ldb (byte 4 16) w))))
(when (not (dp-immediate w))
(push (ldb (byte 4 0) w) lst)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defvar +sp+ 13)
(defvar +lr+ 14)
(defvar +pc+ 15)
(defvar +special-registers+ (list +sp+ +lr+ +pc+))
(defun bdt-first-reg (w)
(ldb (byte 4 16) w))
(defun read-bit (w i)
(ldb (byte 1 i) w))
(defvar +pop-dir+ +1)
(defvar +push-dir+ -1)
( ( zerop ( read - bit w 21 ) ) 0 )
(defun bdt-stack-dir (w)
(if (zerop (read-bit w 23))
+push-dir+
+pop-dir+))
(defun tally-arith-reg (insts)
(let ((arith-insts (remove-if-not
(lambda (w) (eq (what-layout w) :Lay=DP))
insts))
(src-tally (make-list 16 :initial-element 0))
(dst-tally (make-list 16 :initial-element 0)))
(loop for inst in arith-insts do
(incf (elt src-tally (car (arith-src-regs inst))))
(incf (elt src-tally (cadr (arith-src-regs inst))))
(incf (elt dst-tally (arith-dst-reg inst))))
(list (list :src src-tally)
(list :dst dst-tally))))
(defun partial (f &rest args1)
(lambda (&rest args2)
(apply f (append args1 args2))))
( defun tally - pop - regs ( insts & key ( rets - only t ) )
; (apply #'mapcar #'+
; (append '((0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0))
(defun word->bits (w &key (width 32) (offset 0))
(loop for i from offset to (1- width) collect
(ldb (byte 1 i) w)))
(defun tally-pop-regs (insts &key (mode :arm) (rets-only t))
(apply #'mapcar #'+
(append (list (word->bits 0 :width 16))
(mapcar #'word->bits
(remove-if-not
(lambda (w)
(if rets-only
(retp w)
(eq (what-layout w) :Lay=BDT)))
insts)))))
(defun get-pop-rlists (insts &key (rets-only t))
(mapcar #'bdt-rlist
(remove-if-not
(lambda (w)
(if rets-only (retp w)
(eq (what-layout w) :Lay=BDT)))
insts)))
(defun extract-by-name (elf-obj name)
"Returns a named section as a vector of bytes, and the address at
which the text section begins, as a secondary value."
(let* ((secs (elf:sections elf-obj))
(addrs (mapcar #'elf:address (elf:section-table elf-obj)))
(named-idx (position name secs
:key #'elf:name :test #'equalp)))
(values (elf:data (elt secs named-idx))
(elt addrs named-idx))))
(defun inst-words-from-file (path &key (width 4))
(let* ((elf (elf:read-elf path))
(text (extract-by-name elf ".text")))
(bytes->dwords text)))
;;;;;;;;;;;;;;;;;;;;;;
;; CSV generation
;;;;;;;;;;;;;;;;;;;;;;;;
refactor this , and separate analytics from IO
(defun register-profile (path)
(let* ((insts (inst-words-from-file path))
(pops (list :pops (tally-pop-regs insts :rets-only nil)))
(rets (list :rets (tally-pop-regs insts :rets-only t)))
(dp (tally-arith-reg insts))
(srcs (assoc :src dp))
(dsts (assoc :dst dp)))
(list pops rets srcs dsts)))
(defun profile-crossrows (prof)
(let ((hdr (mapcar #'car prof))
(body (mapcar #'cadr prof)))
(cons hdr
(loop for i in (range 0 16)
collect (mapcar (lambda (x) (elt x i)) body)))))
(defun logscale-crossrows (xprof)
(cons (car xprof)
(mapcar (lambda (x)
(mapcar (lambda (y) (if (zerop y) y (log y 2))) x))
(cdr xprof))))
(defun reg-prof-csv (path &key (out) (logscale))
(let* ((prof (register-profile path))
(xprof (profile-crossrows prof))
(csv (mapcar #'list->csv
(if logscale
(logscale-crossrows xprof)
xprof))))
(if (not out) csv
(with-open-file (stream out :direction :output)
(loop for row in csv
for i from -1 to 15 do
(format stream "~A,~A~%" (if (< i 0) 'REG i) row))))))
| null | https://raw.githubusercontent.com/oblivia-simplex/roper/7714ccf677359126ca82446843030fac89c6655a/lisp/roper/arm-analysis.lisp | lisp | Found a bug in the spec document?
immediate flag
no shift. simplification
(defun stack-delta-rlist (w)
(if (eq (what-layout w) :Lay=BDT)
(let ((rlist (bdt-rlist w)))
(list (* (bdt-stack-dir w) (length rlist)) rlist))
(list 0 nil)))
is a push/pop
writeback
(apply #'mapcar #'+
(append '((0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0))
CSV generation
| (in-package :arm-analysis)
(defvar +masks+
'((#b00001111111111111111111111010000
#b00000001001011111111111100010000
(#b00001111110000000000000011110000
#b00000000000000000000000010010000
:Lay=MULT)
(#b00001111100000000000000011110000
#b00000000100000000000000010010000
:Lay=MULT_L)
(#b00001100000000000000000000000000
#b00000000000000000000000000000000
:Lay=DP)
(#b00001111101100000000111111110000
#b00000001000000000000000010010000
:Lay=SDS)
(#b00001110010000000000111110010000
#b00000000000000000000000010010000
:Lay=HDT_R)
(#b00001110010000000000000010010000
#b00000000010000000000000010010000
:Lay=HDT_I)
(#b00001100000000000000000000000000
#b00000100000000000000000000000000
:Lay=SDT)
(#b00001110000000000000000000010000
#b00000110000000000000000000010000
:Lay=UNDEF)
(#b00001110000000000000000000000000
#b00001000000000000000000000000000
:Lay=BDT)
(#b00001110000000000000000000000000
#b00001010000000000000000000000000
:Lay=BR)
(#b00001110000000000000000000000000
#b00001100000000000000000000000000
:Lay=CDT)
(#b00001111000000000000000000010000
#b00001110000000000000000000000000
:Lay=CDO)
(#b00001111000000000000000000010000
#b00001110000000000000000000010000
:Lay=CRT)
(#b00001111000000000000000000000000
#b00001111000000000000000000000000
:Lay=SWI)))
(defun what-layout (w)
(loop for (mask sig lay) in +masks+ do
(when (= (logand mask w) sig)
(return lay))))
(defun range (lo hi)
(loop for i from lo to (1- hi) collect i))
(defun bdt-rlist (w)
(remove-if-not (lambda (i) (< 0 (logand (ash 1 i) w)))
(range 0 16)))
(defun dp-opcode (w)
(ldb (byte 4 21) w))
(defun dp-immediate (w)
(= (ldb (byte 1 25) w) 1))
Generic Interface
(export 'jumpp)
(defun jumpp (w)
(member (what-layout w)
'(:Lay=BX :Lay=BR)))
(export 'syscallp)
(defun syscallp (w)
(eq (what-layout w) :Lay=SWI))
(export 'jump-reg)
(defun jump-reg (w)
(let ((layout (what-layout w)))
(cond ((eq layout :Lay=BX)
(ldb (byte 4 0) w))
((and (eq layout :Lay=DP)
(member +pc+ (arith-dst-regs w) :test #'=)
MOV
(ldb (byte 4 0) w)))))
(export 'pop-regs)
(defun pop-regs (w)
(when (and (eq (what-layout w) :Lay=BDT)
(= (bdt-stack-dir w) +pop-dir+))
(bdt-rlist w)))
(export 'push-regs)
(defun push-regs (w)
(when (and (eq (what-layout w) :Lay=BDT)
(= (bdt-stack-dir w) +push-dir+))
(bdt-rlist w)))
(export 'foo)
(defun foo (w)
(format t "hello from foo! ~S~%" w))
(export 'retp)
(defun retp (w)
(when w
(and (eq (what-layout w) :Lay=BDT)
(eq (ldb (byte 4 16) w) +sp+)
(< 0 (logand (ash 1 +pc+) w)))))
(junk-drawer:def-bitcounter 16)
(defun stack-delta (w)
(if (and (eq (what-layout w) :Lay=BDT)
(* (bdt-stack-dir w)
(bitcounter-16 w))
0))
(export 'pop-offset)
(defun pop-offset (w r)
(position r (reverse (pop-regs w)) :test #'=))
(export 'arith-dst-reg)
(defun arith-dst-regs (w)
(when (eq (what-layout w) :Lay=DP)
(list (ldb (byte 4 12) w))))
(export 'arith-src-regs)
(defun arith-src-regs (w)
(when (eq (what-layout w) :Lay=DP)
(let ((lst (list (ldb (byte 4 16) w))))
(when (not (dp-immediate w))
(push (ldb (byte 4 0) w) lst)))))
(defvar +sp+ 13)
(defvar +lr+ 14)
(defvar +pc+ 15)
(defvar +special-registers+ (list +sp+ +lr+ +pc+))
(defun bdt-first-reg (w)
(ldb (byte 4 16) w))
(defun read-bit (w i)
(ldb (byte 1 i) w))
(defvar +pop-dir+ +1)
(defvar +push-dir+ -1)
( ( zerop ( read - bit w 21 ) ) 0 )
(defun bdt-stack-dir (w)
(if (zerop (read-bit w 23))
+push-dir+
+pop-dir+))
(defun tally-arith-reg (insts)
(let ((arith-insts (remove-if-not
(lambda (w) (eq (what-layout w) :Lay=DP))
insts))
(src-tally (make-list 16 :initial-element 0))
(dst-tally (make-list 16 :initial-element 0)))
(loop for inst in arith-insts do
(incf (elt src-tally (car (arith-src-regs inst))))
(incf (elt src-tally (cadr (arith-src-regs inst))))
(incf (elt dst-tally (arith-dst-reg inst))))
(list (list :src src-tally)
(list :dst dst-tally))))
(defun partial (f &rest args1)
(lambda (&rest args2)
(apply f (append args1 args2))))
( defun tally - pop - regs ( insts & key ( rets - only t ) )
(defun word->bits (w &key (width 32) (offset 0))
(loop for i from offset to (1- width) collect
(ldb (byte 1 i) w)))
(defun tally-pop-regs (insts &key (mode :arm) (rets-only t))
(apply #'mapcar #'+
(append (list (word->bits 0 :width 16))
(mapcar #'word->bits
(remove-if-not
(lambda (w)
(if rets-only
(retp w)
(eq (what-layout w) :Lay=BDT)))
insts)))))
(defun get-pop-rlists (insts &key (rets-only t))
(mapcar #'bdt-rlist
(remove-if-not
(lambda (w)
(if rets-only (retp w)
(eq (what-layout w) :Lay=BDT)))
insts)))
(defun extract-by-name (elf-obj name)
"Returns a named section as a vector of bytes, and the address at
which the text section begins, as a secondary value."
(let* ((secs (elf:sections elf-obj))
(addrs (mapcar #'elf:address (elf:section-table elf-obj)))
(named-idx (position name secs
:key #'elf:name :test #'equalp)))
(values (elf:data (elt secs named-idx))
(elt addrs named-idx))))
(defun inst-words-from-file (path &key (width 4))
(let* ((elf (elf:read-elf path))
(text (extract-by-name elf ".text")))
(bytes->dwords text)))
refactor this , and separate analytics from IO
(defun register-profile (path)
(let* ((insts (inst-words-from-file path))
(pops (list :pops (tally-pop-regs insts :rets-only nil)))
(rets (list :rets (tally-pop-regs insts :rets-only t)))
(dp (tally-arith-reg insts))
(srcs (assoc :src dp))
(dsts (assoc :dst dp)))
(list pops rets srcs dsts)))
(defun profile-crossrows (prof)
(let ((hdr (mapcar #'car prof))
(body (mapcar #'cadr prof)))
(cons hdr
(loop for i in (range 0 16)
collect (mapcar (lambda (x) (elt x i)) body)))))
(defun logscale-crossrows (xprof)
(cons (car xprof)
(mapcar (lambda (x)
(mapcar (lambda (y) (if (zerop y) y (log y 2))) x))
(cdr xprof))))
(defun reg-prof-csv (path &key (out) (logscale))
(let* ((prof (register-profile path))
(xprof (profile-crossrows prof))
(csv (mapcar #'list->csv
(if logscale
(logscale-crossrows xprof)
xprof))))
(if (not out) csv
(with-open-file (stream out :direction :output)
(loop for row in csv
for i from -1 to 15 do
(format stream "~A,~A~%" (if (< i 0) 'REG i) row))))))
|
2aa41044df519326bcf50355255449540fd51af077b7cc1a365af9044547a44f | simingwang/emqx-plugin-kafkav5 | hex_filename.erl | @private
Excerpt from -20.0.1/lib/stdlib/src/filename.erl#L761-L788
% with modifications for changing local function calls to remote function calls
to the ` filename ` module , for the functions ` pathtype/1 ` , ` split/1 ` , and ` join/1 `
%
safe_relative_path/1 was not present in earlier OTP releases .
%%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 1997 - 2017 . All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%% %CopyrightEnd%
%%
-module(hex_filename).
-export([safe_relative_path/1]).
safe_relative_path(Path) ->
case filename:pathtype(Path) of
relative ->
Cs0 = filename:split(Path),
safe_relative_path_1(Cs0, []);
_ ->
unsafe
end.
safe_relative_path_1(["."|T], Acc) ->
safe_relative_path_1(T, Acc);
safe_relative_path_1([<<".">>|T], Acc) ->
safe_relative_path_1(T, Acc);
safe_relative_path_1([".."|T], Acc) ->
climb(T, Acc);
safe_relative_path_1([<<"..">>|T], Acc) ->
climb(T, Acc);
safe_relative_path_1([H|T], Acc) ->
safe_relative_path_1(T, [H|Acc]);
safe_relative_path_1([], []) ->
[];
safe_relative_path_1([], Acc) ->
filename:join(lists:reverse(Acc)).
climb(_, []) ->
unsafe;
climb(T, [_|Acc]) ->
safe_relative_path_1(T, Acc).
| null | https://raw.githubusercontent.com/simingwang/emqx-plugin-kafkav5/bbf919e56dbc8fd2d4c1c541084532f844a11cbc/_build/default/plugins/hex_core/src/hex_filename.erl | erlang | with modifications for changing local function calls to remote function calls
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
| @private
Excerpt from -20.0.1/lib/stdlib/src/filename.erl#L761-L788
to the ` filename ` module , for the functions ` pathtype/1 ` , ` split/1 ` , and ` join/1 `
safe_relative_path/1 was not present in earlier OTP releases .
Copyright Ericsson AB 1997 - 2017 . All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(hex_filename).
-export([safe_relative_path/1]).
safe_relative_path(Path) ->
case filename:pathtype(Path) of
relative ->
Cs0 = filename:split(Path),
safe_relative_path_1(Cs0, []);
_ ->
unsafe
end.
safe_relative_path_1(["."|T], Acc) ->
safe_relative_path_1(T, Acc);
safe_relative_path_1([<<".">>|T], Acc) ->
safe_relative_path_1(T, Acc);
safe_relative_path_1([".."|T], Acc) ->
climb(T, Acc);
safe_relative_path_1([<<"..">>|T], Acc) ->
climb(T, Acc);
safe_relative_path_1([H|T], Acc) ->
safe_relative_path_1(T, [H|Acc]);
safe_relative_path_1([], []) ->
[];
safe_relative_path_1([], Acc) ->
filename:join(lists:reverse(Acc)).
climb(_, []) ->
unsafe;
climb(T, [_|Acc]) ->
safe_relative_path_1(T, Acc).
|
cb279817851acad0f39676dd9419a4199b54dc741a052b1f6ffa40e46583230d | kongo2002/statser | statser_app.erl | Copyright 2017 - 2018
%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
%%%-------------------------------------------------------------------
%% @doc statser public API
%% @end
%%%-------------------------------------------------------------------
-module(statser_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1, prep_stop/1]).
%%====================================================================
%% API
%%====================================================================
start(_StartType, _StartArgs) ->
% logging
lager:start(),
% yaml parsing
ok = application:ensure_started(yamerl),
statser_sup:start_link().
%%--------------------------------------------------------------------
prep_stop(_State) ->
lager:info("preparing statser application shutdown"),
ok.
%%--------------------------------------------------------------------
stop(_State) ->
lager:info("stopped statser application"),
ok.
%%====================================================================
Internal functions
%%====================================================================
| null | https://raw.githubusercontent.com/kongo2002/statser/1cb0498f56c97d8a010b979c5163dd2750064e98/src/statser_app.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-------------------------------------------------------------------
@doc statser public API
@end
-------------------------------------------------------------------
Application callbacks
====================================================================
API
====================================================================
logging
yaml parsing
--------------------------------------------------------------------
--------------------------------------------------------------------
====================================================================
==================================================================== | Copyright 2017 - 2018
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(statser_app).
-behaviour(application).
-export([start/2, stop/1, prep_stop/1]).
start(_StartType, _StartArgs) ->
lager:start(),
ok = application:ensure_started(yamerl),
statser_sup:start_link().
prep_stop(_State) ->
lager:info("preparing statser application shutdown"),
ok.
stop(_State) ->
lager:info("stopped statser application"),
ok.
Internal functions
|
8de3a87948de5fcd0379da758cc57503dc973b12a78fb026706f0cc42fff9c34 | lambdaisland/souk | rsa_keys.clj | (ns repl-sessions.rsa-keys
(:require
[clojure.string :as str])
(:import
(java.security KeyPairGenerator Signature)
(java.security.spec X509EncodedKeySpec)
(java.util Base64)))
(def kpg (KeyPairGenerator/getInstance "RSA"))
(.initialize kpg 2048)
(def kp (.generateKeyPair kpg))
(.getEncoded (.getPublic kp))
(.getEncoded (.getPrivate kp))
(let [s (.encodeToString (Base64/getEncoder) (.getEncoded (.getPublic kp)))
parts (map (partial apply str) (partition-all 64 s))]
(str/join
(map #(str % "\r\n")
`["-----BEGIN PUBLIC KEY-----"
~@parts
"-----END PUBLIC KEY-----"])))
(def pem "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy3WsUuEyZLsy/2XxJ+ou\r\nnNr14R1x9laQh4EitjT4e1OPJwHHIBqEPUWk4MQzU13Jga4uua28Ecl3BxC9lSnf\r\nDp96Z0NAdkYjuCgC9xo9EjKaK8ijIbm58d4uifIl/XKZE6tYTGXXzmnx4nCfcWfF\r\n67tut/4k+/wVMjjHMLl9VhzHsBz3Wr+h7v+4SLFftq9NorMknWQuIh3IzQUNZBps\r\nCw8JRDUx8Of/I44mJMc2N12f41TLK65VCvkXF3K5qIS9jTEdhhOA8dsB92DEyaTu\r\ns+jhqXM4ivFfxDyOasQRZ0bEO+OEcJua7nnvNsFzGLkIb3/eJ1HlCQ+AKVSUGcBZ\r\nbwIDAQAB\r\n-----END PUBLIC KEY-----\r\n")
(X509EncodedKeySpec.
(.decode (Base64/getDecoder)
(str/replace pem #"(-+(BEGIN|END) PUBLIC KEY-+|\R)" ""))
)
;; sign
(def sign (Signature/getInstance "SHA256withRSA"))
(.initSign sign (.getPrivate kp))
(.update sign (.getBytes "hello"))
(def signature (.sign sign))
(.encodeToString (Base64/getEncoder) signature)
;; verify
(def sign (Signature/getInstance "SHA256withRSA"))
(.initVerify sign (.getPublic kp))
(.update sign (.getBytes "hello"))
(.verify sign signature)
| null | https://raw.githubusercontent.com/lambdaisland/souk/c0945cddb5329308bd167921b2573d05c5f72e59/repl_sessions/rsa_keys.clj | clojure | sign
verify | (ns repl-sessions.rsa-keys
(:require
[clojure.string :as str])
(:import
(java.security KeyPairGenerator Signature)
(java.security.spec X509EncodedKeySpec)
(java.util Base64)))
(def kpg (KeyPairGenerator/getInstance "RSA"))
(.initialize kpg 2048)
(def kp (.generateKeyPair kpg))
(.getEncoded (.getPublic kp))
(.getEncoded (.getPrivate kp))
(let [s (.encodeToString (Base64/getEncoder) (.getEncoded (.getPublic kp)))
parts (map (partial apply str) (partition-all 64 s))]
(str/join
(map #(str % "\r\n")
`["-----BEGIN PUBLIC KEY-----"
~@parts
"-----END PUBLIC KEY-----"])))
(def pem "-----BEGIN PUBLIC KEY-----\r\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAy3WsUuEyZLsy/2XxJ+ou\r\nnNr14R1x9laQh4EitjT4e1OPJwHHIBqEPUWk4MQzU13Jga4uua28Ecl3BxC9lSnf\r\nDp96Z0NAdkYjuCgC9xo9EjKaK8ijIbm58d4uifIl/XKZE6tYTGXXzmnx4nCfcWfF\r\n67tut/4k+/wVMjjHMLl9VhzHsBz3Wr+h7v+4SLFftq9NorMknWQuIh3IzQUNZBps\r\nCw8JRDUx8Of/I44mJMc2N12f41TLK65VCvkXF3K5qIS9jTEdhhOA8dsB92DEyaTu\r\ns+jhqXM4ivFfxDyOasQRZ0bEO+OEcJua7nnvNsFzGLkIb3/eJ1HlCQ+AKVSUGcBZ\r\nbwIDAQAB\r\n-----END PUBLIC KEY-----\r\n")
(X509EncodedKeySpec.
(.decode (Base64/getDecoder)
(str/replace pem #"(-+(BEGIN|END) PUBLIC KEY-+|\R)" ""))
)
(def sign (Signature/getInstance "SHA256withRSA"))
(.initSign sign (.getPrivate kp))
(.update sign (.getBytes "hello"))
(def signature (.sign sign))
(.encodeToString (Base64/getEncoder) signature)
(def sign (Signature/getInstance "SHA256withRSA"))
(.initVerify sign (.getPublic kp))
(.update sign (.getBytes "hello"))
(.verify sign signature)
|
0340053f208ab3a2b6b2bc18ea05b983036068ce230b336e988998f80b84f0ab | Ericson2314/lighthouse | POSIX.hs | {-# OPTIONS -Wall -Werror -cpp #-}
-- | POSIX time, if you need to deal with timestamps and the like.
-- Most people won't need this module.
module Data.Time.Clock.POSIX
(
posixDayLength,POSIXTime,posixSecondsToUTCTime,utcTimeToPOSIXSeconds,getPOSIXTime
) where
import Data.Time.Clock.UTC
import Data.Time.Calendar.Days
import Data.Fixed
import Control.Monad
#ifdef mingw32_HOST_OS
import Data.Word ( Word64)
import System.Win32.Time
#else
import Data.Time.Clock.CTimeval
#endif
| 86400 nominal seconds in every day
posixDayLength :: NominalDiffTime
posixDayLength = 86400
| POSIX time is the nominal time since 1970 - 01 - 01 00:00 UTC
--
To convert from a ' Foreign . C.CTime ' or ' System . . ' , use ' realToFrac ' .
--
type POSIXTime = NominalDiffTime
unixEpochDay :: Day
unixEpochDay = ModifiedJulianDay 40587
posixSecondsToUTCTime :: POSIXTime -> UTCTime
posixSecondsToUTCTime i = let
(d,t) = divMod' i posixDayLength
in UTCTime (addDays d unixEpochDay) (realToFrac t)
utcTimeToPOSIXSeconds :: UTCTime -> POSIXTime
utcTimeToPOSIXSeconds (UTCTime d t) =
(fromInteger (diffDays d unixEpochDay) * posixDayLength) + min posixDayLength (realToFrac t)
-- | Get the current POSIX time from the system clock.
getPOSIXTime :: IO POSIXTime
#ifdef mingw32_HOST_OS
On Windows , the equlvalent of POSIX time is " file time " , defined as
the number of 100 - nanosecond intervals that have elapsed since
12:00 A.M. January 1 , 1601 ( UTC ) . We can convert this into a POSIX
-- time by adjusting the offset to be relative to the POSIX epoch.
getPOSIXTime = do
FILETIME ft <- System.Win32.Time.getSystemTimeAsFileTime
return (fromIntegral (ft - win32_epoch_adjust) / 10000000)
win32_epoch_adjust :: Word64
win32_epoch_adjust = 116444736000000000
#else
-- Use POSIX time
ctimevalToPosixSeconds :: CTimeval -> POSIXTime
ctimevalToPosixSeconds (MkCTimeval s mus) = (fromIntegral s) + (fromIntegral mus) / 1000000
getPOSIXTime = liftM ctimevalToPosixSeconds getCTimeval
#endif
| null | https://raw.githubusercontent.com/Ericson2314/lighthouse/210078b846ebd6c43b89b5f0f735362a01a9af02/ghc-6.8.2/libraries/time/Data/Time/Clock/POSIX.hs | haskell | # OPTIONS -Wall -Werror -cpp #
| POSIX time, if you need to deal with timestamps and the like.
Most people won't need this module.
| Get the current POSIX time from the system clock.
time by adjusting the offset to be relative to the POSIX epoch.
Use POSIX time |
module Data.Time.Clock.POSIX
(
posixDayLength,POSIXTime,posixSecondsToUTCTime,utcTimeToPOSIXSeconds,getPOSIXTime
) where
import Data.Time.Clock.UTC
import Data.Time.Calendar.Days
import Data.Fixed
import Control.Monad
#ifdef mingw32_HOST_OS
import Data.Word ( Word64)
import System.Win32.Time
#else
import Data.Time.Clock.CTimeval
#endif
| 86400 nominal seconds in every day
posixDayLength :: NominalDiffTime
posixDayLength = 86400
| POSIX time is the nominal time since 1970 - 01 - 01 00:00 UTC
To convert from a ' Foreign . C.CTime ' or ' System . . ' , use ' realToFrac ' .
type POSIXTime = NominalDiffTime
unixEpochDay :: Day
unixEpochDay = ModifiedJulianDay 40587
posixSecondsToUTCTime :: POSIXTime -> UTCTime
posixSecondsToUTCTime i = let
(d,t) = divMod' i posixDayLength
in UTCTime (addDays d unixEpochDay) (realToFrac t)
utcTimeToPOSIXSeconds :: UTCTime -> POSIXTime
utcTimeToPOSIXSeconds (UTCTime d t) =
(fromInteger (diffDays d unixEpochDay) * posixDayLength) + min posixDayLength (realToFrac t)
getPOSIXTime :: IO POSIXTime
#ifdef mingw32_HOST_OS
On Windows , the equlvalent of POSIX time is " file time " , defined as
the number of 100 - nanosecond intervals that have elapsed since
12:00 A.M. January 1 , 1601 ( UTC ) . We can convert this into a POSIX
getPOSIXTime = do
FILETIME ft <- System.Win32.Time.getSystemTimeAsFileTime
return (fromIntegral (ft - win32_epoch_adjust) / 10000000)
win32_epoch_adjust :: Word64
win32_epoch_adjust = 116444736000000000
#else
ctimevalToPosixSeconds :: CTimeval -> POSIXTime
ctimevalToPosixSeconds (MkCTimeval s mus) = (fromIntegral s) + (fromIntegral mus) / 1000000
getPOSIXTime = liftM ctimevalToPosixSeconds getCTimeval
#endif
|
24e59ba8a9e3fe275b558ea08c148d29bcf5d69c551d3ce096373704e78423d3 | mightybyte/armor | Armor.hs | # LANGUAGE CPP #
{-# LANGUAGE FlexibleContexts #-}
# LANGUAGE ScopedTypeVariables #
module Armor
( Version(..)
, Armored(..)
, ArmorMode(..)
, ArmorConfig(..)
, defArmorConfig
, testArmor
, testArmorMany
, testSerialization
, GoldenTest(..)
, goldenFilePath
) where
------------------------------------------------------------------------------
import Control.Lens
import Control.Monad
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Char
import Data.Hashable
import Data.Map (Map)
import qualified Data.Map as M
import Data.Typeable
#if !MIN_VERSION_base(4,8,0)
import Data.Word
#endif
import Numeric
import System.Directory
import System.FilePath
import Test.HUnit.Base
import Text.Printf
------------------------------------------------------------------------------
------------------------------------------------------------------------------
-- | Version numbers are simple monotonically increasing positive integers.
newtype Version a = Version { unVersion :: Word }
deriving (Eq,Ord,Show,Read)
------------------------------------------------------------------------------
-- | Core type class for armoring types. Includes a version and all the
-- type's serializations that you want to armor.
class Armored a where
-- | Current version number for the data type.
version :: Version a
-- | Map of serializations keyed by a unique ID used to refer to each
-- serialization. A serialization is a tuple of @(a -> ByteString)@ and
-- @(ByteString -> Maybe a)@. Represented here as a prism.
serializations :: Map String (APrism' ByteString a)
------------------------------------------------------------------------------
-- | The mode of operation for armor test cases.
data ArmorMode
= SaveOnly
-- ^ Write test files for serializations that don't have them, but don't
-- do any tests to verify that existing files are deserialized properly.
| TestOnly
-- ^ Run tests to verify that existing files are deserialized properly,
-- but don't write any missing files.
| SaveAndTest
-- ^ Do both the save and test phases.
deriving (Eq,Ord,Show,Read,Enum,Bounded)
------------------------------------------------------------------------------
-- | Config data for armor tests.
data ArmorConfig = ArmorConfig
{ acArmorMode :: ArmorMode
, acStoreDir :: FilePath
-- ^ Directory where all the test serializations are stored.
, acNumVersions :: Maybe Word
-- ^ How many versions back to test for backwards compatibility. A value
-- of @Just 0@ means that it only tests that the current version satisfies
-- @parse . render == id@. @Just 1@ means that it will verify that the
previous version can still be parse . @Just 2@ the previous two
-- versions, etc. Nothing means that all versions will be tested.
}
------------------------------------------------------------------------------
| Default value for ArmorConfig .
defArmorConfig :: ArmorConfig
defArmorConfig = ArmorConfig SaveAndTest "test-data" Nothing
------------------------------------------------------------------------------
-- | Tests the serialization backwards compatibility of a data type by storing
-- serialized representations in .test files to be checked into your project's
-- version control.
--
First , this function checks the directory ' acStoreDir ' for the existence of
-- a file @foo-000.test@. If it doesn't exist, it creates it for each
-- serialization with the serialized representation of the val parameter.
--
Next , it checks that the serialized formats in the most recent
-- 'acNumVersions' of the stored @.test@ files are parsable by the current
-- version of the serialization.
testArmor
:: (Eq a, Show a, Typeable a, Armored a)
=> ArmorConfig
-> String
-> a
-> Test
testArmor ac valId val =
TestList [ testIt s | s <- M.toList serializations ]
where
testIt s = test (testSerialization ac goldenFilePath valId s val)
------------------------------------------------------------------------------
-- | Same as 'testArmor', but more convenient for testing several values of the
-- same type.
testArmorMany
:: (Eq a, Show a, Typeable a, Armored a)
=> ArmorConfig
-> Map String a
-> Test
testArmorMany ac valMap = TestList $ map doOne $ M.toList valMap
where
doOne (k,v) = TestLabel k $ testArmor ac k v
------------------------------------------------------------------------------
-- | Lower level assertion function that works for a wider array of test
-- frameworks.
--
This function can make two different assertions . It fails if the values fail
-- to parse, and it asserts that the values are equal to the expected value.
-- This latter assertion is only done for the most recent version because
-- changes that impact the structure of a data type can result in erroneous
-- failures due to changes in the order that the test cases are generated.
--
-- In other words, if you make an innocuous change like adding a constructor and
-- start getting "values didn't match" failures, all you need to do is bump the
-- data type's version. Armor will still guarantee that those serializations
-- parse properly but the incorrect value failures will be suppressed.
testSerialization
:: forall a. (Eq a, Show a, Typeable a, Armored a)
=> ArmorConfig
-> (GoldenTest a -> FilePath)
-- ^ Customizable location where the serializations will be stored. We
-- recommend 'goldenFilePath' as a standard out-of-the-box scheme.
-> String
-> (String, APrism' ByteString a)
-> a
-> Assertion
testSerialization ac makeFilePath valName (sname,p) val = do
ensureTestFileExists
when (acArmorMode ac /= SaveOnly) $
mapM_ (assertVersionParses . Version) vs
where
makeGT = GoldenTest val valName sname p
curVer :: Version a
curVer = version
vs = reverse [maybe 0 (unVersion curVer -) (acNumVersions ac) .. unVersion curVer]
ensureTestFileExists = do
let fp = acStoreDir ac </> makeFilePath (makeGT curVer)
d = dropFileName fp
when (acArmorMode ac /= TestOnly) $ do
createDirectoryIfMissing True d
fileExists <- doesFileExist fp
when (not fileExists) $
B.writeFile fp (review (clonePrism p) val)
assertVersionParses ver = do
let fp = acStoreDir ac </> makeFilePath (makeGT ver)
exists <- doesFileExist fp
if exists
then do bs <- B.readFile fp
case preview (clonePrism p) bs of
Nothing -> assertFailure $
printf "Not backwards compatible with version %d: %s"
(unVersion ver) fp
Just v -> when (ver == curVer) $
assertEqual ("File parsed but values didn't match: " ++ fp) val v
else putStrLn $ "\nSkipping missing file " ++ fp
------------------------------------------------------------------------------
-- | Data structure that holds all the values needed for a golden test
data GoldenTest a = GoldenTest
{ gtTestVal :: a
, gtValName :: String
, gtSerializationName :: String
, gtPrism :: APrism' ByteString a
, gtVersion :: Version a
}
------------------------------------------------------------------------------
| Constructs the FilePath where the serialization will be stored ( relative to
-- the base directory defined in ArmorConfig).
--
This function uses as a part of the directory hierarchy to
disambiguate tests for different data types . can contain single
-- quotes, spaces, and parenthesis in the case of type constructors that have
type variables so we only take the first alphanumeric characters so that the
paths will be meaningful to humans and then add four characters of the type 's
-- hash for disambiguation.
goldenFilePath :: Typeable a => GoldenTest a -> FilePath
goldenFilePath gt =
(concat [takeWhile isAlpha ty, "-", h]) </>
gtSerializationName gt </>
printf "%s-%03d.test" (gtValName gt) (unVersion $ gtVersion gt)
where
ty = show $ typeOf $ gtTestVal gt
h = take 4 $ showHex (abs $ hash ty) ""
| null | https://raw.githubusercontent.com/mightybyte/armor/3f0e58b3f3a2b23c242c8e27f056b274cfefb053/src/Armor.hs | haskell | # LANGUAGE FlexibleContexts #
----------------------------------------------------------------------------
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Version numbers are simple monotonically increasing positive integers.
----------------------------------------------------------------------------
| Core type class for armoring types. Includes a version and all the
type's serializations that you want to armor.
| Current version number for the data type.
| Map of serializations keyed by a unique ID used to refer to each
serialization. A serialization is a tuple of @(a -> ByteString)@ and
@(ByteString -> Maybe a)@. Represented here as a prism.
----------------------------------------------------------------------------
| The mode of operation for armor test cases.
^ Write test files for serializations that don't have them, but don't
do any tests to verify that existing files are deserialized properly.
^ Run tests to verify that existing files are deserialized properly,
but don't write any missing files.
^ Do both the save and test phases.
----------------------------------------------------------------------------
| Config data for armor tests.
^ Directory where all the test serializations are stored.
^ How many versions back to test for backwards compatibility. A value
of @Just 0@ means that it only tests that the current version satisfies
@parse . render == id@. @Just 1@ means that it will verify that the
versions, etc. Nothing means that all versions will be tested.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
| Tests the serialization backwards compatibility of a data type by storing
serialized representations in .test files to be checked into your project's
version control.
a file @foo-000.test@. If it doesn't exist, it creates it for each
serialization with the serialized representation of the val parameter.
'acNumVersions' of the stored @.test@ files are parsable by the current
version of the serialization.
----------------------------------------------------------------------------
| Same as 'testArmor', but more convenient for testing several values of the
same type.
----------------------------------------------------------------------------
| Lower level assertion function that works for a wider array of test
frameworks.
to parse, and it asserts that the values are equal to the expected value.
This latter assertion is only done for the most recent version because
changes that impact the structure of a data type can result in erroneous
failures due to changes in the order that the test cases are generated.
In other words, if you make an innocuous change like adding a constructor and
start getting "values didn't match" failures, all you need to do is bump the
data type's version. Armor will still guarantee that those serializations
parse properly but the incorrect value failures will be suppressed.
^ Customizable location where the serializations will be stored. We
recommend 'goldenFilePath' as a standard out-of-the-box scheme.
----------------------------------------------------------------------------
| Data structure that holds all the values needed for a golden test
----------------------------------------------------------------------------
the base directory defined in ArmorConfig).
quotes, spaces, and parenthesis in the case of type constructors that have
hash for disambiguation. | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
module Armor
( Version(..)
, Armored(..)
, ArmorMode(..)
, ArmorConfig(..)
, defArmorConfig
, testArmor
, testArmorMany
, testSerialization
, GoldenTest(..)
, goldenFilePath
) where
import Control.Lens
import Control.Monad
import Data.ByteString (ByteString)
import qualified Data.ByteString as B
import Data.Char
import Data.Hashable
import Data.Map (Map)
import qualified Data.Map as M
import Data.Typeable
#if !MIN_VERSION_base(4,8,0)
import Data.Word
#endif
import Numeric
import System.Directory
import System.FilePath
import Test.HUnit.Base
import Text.Printf
newtype Version a = Version { unVersion :: Word }
deriving (Eq,Ord,Show,Read)
class Armored a where
version :: Version a
serializations :: Map String (APrism' ByteString a)
data ArmorMode
= SaveOnly
| TestOnly
| SaveAndTest
deriving (Eq,Ord,Show,Read,Enum,Bounded)
data ArmorConfig = ArmorConfig
{ acArmorMode :: ArmorMode
, acStoreDir :: FilePath
, acNumVersions :: Maybe Word
previous version can still be parse . @Just 2@ the previous two
}
| Default value for ArmorConfig .
defArmorConfig :: ArmorConfig
defArmorConfig = ArmorConfig SaveAndTest "test-data" Nothing
First , this function checks the directory ' acStoreDir ' for the existence of
Next , it checks that the serialized formats in the most recent
testArmor
:: (Eq a, Show a, Typeable a, Armored a)
=> ArmorConfig
-> String
-> a
-> Test
testArmor ac valId val =
TestList [ testIt s | s <- M.toList serializations ]
where
testIt s = test (testSerialization ac goldenFilePath valId s val)
testArmorMany
:: (Eq a, Show a, Typeable a, Armored a)
=> ArmorConfig
-> Map String a
-> Test
testArmorMany ac valMap = TestList $ map doOne $ M.toList valMap
where
doOne (k,v) = TestLabel k $ testArmor ac k v
This function can make two different assertions . It fails if the values fail
testSerialization
:: forall a. (Eq a, Show a, Typeable a, Armored a)
=> ArmorConfig
-> (GoldenTest a -> FilePath)
-> String
-> (String, APrism' ByteString a)
-> a
-> Assertion
testSerialization ac makeFilePath valName (sname,p) val = do
ensureTestFileExists
when (acArmorMode ac /= SaveOnly) $
mapM_ (assertVersionParses . Version) vs
where
makeGT = GoldenTest val valName sname p
curVer :: Version a
curVer = version
vs = reverse [maybe 0 (unVersion curVer -) (acNumVersions ac) .. unVersion curVer]
ensureTestFileExists = do
let fp = acStoreDir ac </> makeFilePath (makeGT curVer)
d = dropFileName fp
when (acArmorMode ac /= TestOnly) $ do
createDirectoryIfMissing True d
fileExists <- doesFileExist fp
when (not fileExists) $
B.writeFile fp (review (clonePrism p) val)
assertVersionParses ver = do
let fp = acStoreDir ac </> makeFilePath (makeGT ver)
exists <- doesFileExist fp
if exists
then do bs <- B.readFile fp
case preview (clonePrism p) bs of
Nothing -> assertFailure $
printf "Not backwards compatible with version %d: %s"
(unVersion ver) fp
Just v -> when (ver == curVer) $
assertEqual ("File parsed but values didn't match: " ++ fp) val v
else putStrLn $ "\nSkipping missing file " ++ fp
data GoldenTest a = GoldenTest
{ gtTestVal :: a
, gtValName :: String
, gtSerializationName :: String
, gtPrism :: APrism' ByteString a
, gtVersion :: Version a
}
| Constructs the FilePath where the serialization will be stored ( relative to
This function uses as a part of the directory hierarchy to
disambiguate tests for different data types . can contain single
type variables so we only take the first alphanumeric characters so that the
paths will be meaningful to humans and then add four characters of the type 's
goldenFilePath :: Typeable a => GoldenTest a -> FilePath
goldenFilePath gt =
(concat [takeWhile isAlpha ty, "-", h]) </>
gtSerializationName gt </>
printf "%s-%03d.test" (gtValName gt) (unVersion $ gtVersion gt)
where
ty = show $ typeOf $ gtTestVal gt
h = take 4 $ showHex (abs $ hash ty) ""
|
a60e087baf1d530f7eba21625de9711c0b29c651d553b22ca5d00a1f35dbe878 | erlang-nix/rebar3_nix | rebar3_nix.erl | -module(rebar3_nix).
-export([init/1]).
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}.
init(State) ->
{ok, State1} = rebar3_nix_bootstrap_prv:init(State),
{ok, State2} = rebar3_nix_lock_prv:init(State1),
{ok, State3} = rebar3_nix_init_prv:init(State2),
{ok, State3}.
| null | https://raw.githubusercontent.com/erlang-nix/rebar3_nix/fbd1b9d1ebc74c9b8a73e9019c18fb50deb42ea2/src/rebar3_nix.erl | erlang | -module(rebar3_nix).
-export([init/1]).
-spec init(rebar_state:t()) -> {ok, rebar_state:t()}.
init(State) ->
{ok, State1} = rebar3_nix_bootstrap_prv:init(State),
{ok, State2} = rebar3_nix_lock_prv:init(State1),
{ok, State3} = rebar3_nix_init_prv:init(State2),
{ok, State3}.
| |
9ee5a8b264b04375db4cd86575dc802456e889b12a7e2e3804dba374cacf3588 | flybot-sg/Pastoral | router.cljc | Copyright ( c ) 2018 Flybot Pte Ltd , Singapore .
;;
This file is distributed under the Eclipse Public License , the same as
Clojure .
;;
;; This file incorporates work covered by the following copyright and
;; permission notice:
;;
Copyright ( c ) 2015 - 2017
;;
;; Permission is hereby granted, free of charge, to any person obtaining
;; a copy of this software and associated documentation files (the
" Software " ) , to deal in the Software without restriction , including
;; without limitation the rights to use, copy, modify, merge, publish,
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
;; the following conditions:
;;
;; The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software .
;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS
OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
;; IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
;; TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
;; SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
(ns pastoral.router
(:require
[pastoral.console :refer [console]]
[pastoral.events :refer [handle]]
[pastoral.tick :as tick]
[pastoral.trace :as trace]))
-- Router Loop ------------------------------------------------------------
;;
;; A call to "pastoral.core/dispatch" places an event on a queue for processing.
;; A short time later, the handler registered to handle this event will be run.
;; What follows is the implementation of this process.
;;
The task is to process queued events in a perpetual loop , one after
;; the other, FIFO, calling the registered event-handler for each, being idle when
;; there are no events, and firing up when one arrives.
;;
But Unity has only a single thread of control and we must be
;; careful to not hog the CPU. When processing events one after another, we
must regularly hand back control to Unity , so it can redraw etc . But not too
;; regularly! Each time we get back control, we have to process all queued
;; events, or else something producing a lot of events might overwhelm the queue.
;; So there's a balance.
;;
;; The processing/handling of an event happens "asynchronously" sometime after
;; that event was enqueued via "dispatch". The original implementation of this router loop
;; used `core.async`. As a result, it was fairly simple, and it mostly worked,
;; but it did not give enough control. So now we hand-roll our own,
;; finite-state-machine and all.
;;
;; In what follows, the strategy is this:
;; - maintain a FIFO queue of `dispatched` events.
;; - when a new event arrives, "schedule" processing of this queue using
goog.async.nextTick , which means it will happen " very soon " .
;; - when processing events, one after the other, do ALL the currently
queued events . Do n't stop . Do n't yield to Unity . Hog that CPU .
;; - but if any new events are dispatched during this cycle of processing,
do n't do them immediately . Leave them queued . Yield first to Unity ,
;; and do these new events in the next processing cycle. That way we drain
;; the queue up to a point, but we never hog the CPU forever. In
particular , we handle the case where handling one event will beget
;; another event. The freshly begotten event will be handled next cycle,
;; with yielding in-between.
;; - In some cases, an event should not be handled until after the GUI has been
;; updated, i.e., after the next Reagent animation frame. In such a case,
;; the event should be dispatched with :flush-dom metadata like this:
;; (dispatch ^:flush-dom [:event-id other params])
;; Such an event will temporarily block all further processing because
events are processed sequentially : we handle one event completely
;; before we handle the ones behind it.
;;
;; Implementation notes:
;; - queue processing can be in a number of states: scheduled, running, paused
;; etc. So it is modeled as a Finite State Machine.
;; See "-fsm-trigger" (below) for the states and transitions.
;; - the scheduling is done via "goog.async.nextTick" which is pretty quick
;; - when the event has :flush-dom metadata we schedule via
;; "reagent.core.after-render"
;; which will run event processing after the next Reagent animation frame.
;;
;; Events can have metadata which says to pause event processing.
;; event metadata -> "run later" functions
(def later-fns {})
; :yield next-tick}) ;; almost immediately
;; Event Queue Abstraction
(defprotocol IEventQueue
;; -- API
(push [this event])
(add-post-event-callback [this id callack])
(remove-post-event-callback [this f])
(purge [this])
;; -- Implementation via a Finite State Machine
(-fsm-trigger [this trigger arg])
;; -- Finite State Machine actions
(-add-event [this event])
(-process-1st-event-in-queue [this])
(-run-next-tick [this])
(-run-queue [this])
(-exception [this ex])
(-pause [this later-fn])
(-resume [this])
(-call-post-event-callbacks [this event]))
Concrete implementation of IEventQueue
(deftype EventQueue [^:volatile-mutable fsm-state
^:volatile-mutable queue
^:volatile-mutable post-event-callback-fns]
IEventQueue
;; -- API ------------------------------------------------------------------
(push [this event] ;; presumably called by dispatch
(-fsm-trigger this :add-event event))
;; register a callback function which will be called after each event is processed
(add-post-event-callback [_ id callback-fn]
(when (contains? post-event-callback-fns id)
(console :warn "pastoral: overwriting existing post event call back with id:" id))
(->> (assoc post-event-callback-fns id callback-fn)
(set! post-event-callback-fns)))
(remove-post-event-callback [_ id]
(if-not (contains? post-event-callback-fns id)
(console :warn "pastoral: could not remove post event call back with id:" id)
(->> (dissoc post-event-callback-fns id)
(set! post-event-callback-fns))))
(purge [_]
(set! queue clojure.lang.PersistentQueue/EMPTY))
-- FSM Implementation ---------------------------------------------------
(-fsm-trigger
[this trigger arg]
The following " case " implements the Finite State Machine .
Given a " trigger " , and the existing FSM state , it computes the
new FSM state and the transition action ( function ) .
(trace/with-trace {:op-type ::fsm-trigger}
(let [[new-fsm-state action-fn]
(case [fsm-state trigger]
;; You should read the following "case" as:
[ current - FSM - state trigger ] - > [ new - FSM - state action - fn ]
;;
;; So, for example, the next line should be interpreted as:
;; if you are in state ":idle" and a trigger ":add-event"
happens , then move the FSM to state " : scheduled " and execute
that two - part " do " function .
[:idle :add-event] [:scheduled #(do (-add-event this arg)
(-run-next-tick this))]
FSM guard
[:idle]
[:scheduled #(-run-next-tick this)])
State : : scheduled ( the queue is scheduled to run , soon )
[:scheduled :add-event] [:scheduled #(-add-event this arg)]
[:scheduled :run-queue] [:running #(-run-queue this)]
State : : running ( the queue is being processed one event after another )
[:running :add-event] [:running #(-add-event this arg)]
[:running :pause] [:paused #(-pause this arg)]
[:running :exception] [:idle #(-exception this arg)]
FSM guard
[:idle]
[:scheduled #(-run-next-tick this)])
State : : paused (: flush - dom metadata on an event has caused a temporary pause in processing )
[:paused :add-event] [:paused #(-add-event this arg)]
[:paused :resume] [:running #(-resume this)]
(throw (ex-info (str "pastoral: router state transition not found. " fsm-state " " trigger)
{:fsm-state fsm-state, :trigger trigger})))]
The " case " above computed both the new FSM state , and the action . Now , make it happen .
(trace/merge-trace! {:operation [fsm-state trigger]
:tags {:current-state fsm-state
:new-state new-fsm-state}})
(set! fsm-state new-fsm-state)
(when action-fn (action-fn)))))
(-add-event
[_ event]
(set! queue (conj queue event)))
(-process-1st-event-in-queue
[this]
(let [event-v (peek queue)]
(try
(handle event-v)
(set! queue (pop queue))
(-call-post-event-callbacks this event-v)
(catch Exception ex
(-fsm-trigger this :exception ex)))))
(-run-next-tick
[this]
(tick/next #(-fsm-trigger this :run-queue nil)))
;; Process all the events currently in the queue, but not any new ones.
;; Be aware that events might have metadata which will pause processing.
(-run-queue
[this]
(loop [n (count queue)]
(if (zero? n)
(-fsm-trigger this :finish-run nil)
(if-let [later-fn (some later-fns (-> queue peek meta keys))] ;; any metadata which causes pausing?
(-fsm-trigger this :pause later-fn)
(do (-process-1st-event-in-queue this)
(recur (dec n)))))))
(-exception
[this ex]
(purge this) ;; purge the queue
(UnityEngine.Debug/LogException
(or (.InnerException ex) ex)))
(-pause
[this later-fn]
(later-fn #(-fsm-trigger this :resume nil)))
(-call-post-event-callbacks
[_ event-v]
(doseq [callback (vals post-event-callback-fns)]
(callback event-v queue)))
(-resume
[this]
(-process-1st-event-in-queue this) ;; do the event which paused processing
(-run-queue this))) ;; do the rest of the queued events
;; ---------------------------------------------------------------------------
;; Event Queue
;; When "dispatch" is called, the event is added into this event queue. Later,
;; the queue will "run" and the event will be "handled" by the registered function.
;;
(def event-queue (->EventQueue :idle clojure.lang.PersistentQueue/EMPTY {}))
;; ---------------------------------------------------------------------------
;; Dispatching
;;
(defn dispatch
"Enqueue `event` for processing by event handling machinery.
`event` is a vector of length >= 1. The 1st element identifies the kind of event.
Note: the event handler is not run immediately - it is not run
synchronously. It will likely be run 'very soon', although it may be
added to the end of a FIFO queue which already contain events.
Usage:
(dispatch [:order-pizza {:supreme 2 :meatlovers 1 :veg 1})"
[event]
(if (nil? event)
(throw (ex-info "pastoral: you called \"dispatch\" without an event vector." {}))
(push event-queue event))
nil) ;; Ensure nil return. See -Returning-False
(defn dispatch-sync
"Synchronously (immediately) process `event`. Do not queue.
Generally, don't use this. Instead use `dispatch`. It is an error
to use `dispatch-sync` within an event handler.
Useful when any delay in processing is a problem:
1. the `:on-change` handler of a text field where we are expecting fast typing.
2 when initialising your app - see 'main' in todomvc examples
3. in a unit test where we don't want the action 'later'
Usage:
(dispatch-sync [:sing :falsetto 634])"
[event-v]
(handle event-v)
(-call-post-event-callbacks event-queue event-v) ;; slightly ugly hack. Run the registered post event callbacks.
nil) ;; Ensure nil return. See -Returning-False
| null | https://raw.githubusercontent.com/flybot-sg/Pastoral/c0bfb0e083e7fee22cb242d03768fc8a72598683/src/pastoral/router.cljc | clojure |
This file incorporates work covered by the following copyright and
permission notice:
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
without limitation the rights to use, copy, modify, merge, publish,
the following conditions:
The above copyright notice and this permission notice shall be included
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
A call to "pastoral.core/dispatch" places an event on a queue for processing.
A short time later, the handler registered to handle this event will be run.
What follows is the implementation of this process.
the other, FIFO, calling the registered event-handler for each, being idle when
there are no events, and firing up when one arrives.
careful to not hog the CPU. When processing events one after another, we
regularly! Each time we get back control, we have to process all queued
events, or else something producing a lot of events might overwhelm the queue.
So there's a balance.
The processing/handling of an event happens "asynchronously" sometime after
that event was enqueued via "dispatch". The original implementation of this router loop
used `core.async`. As a result, it was fairly simple, and it mostly worked,
but it did not give enough control. So now we hand-roll our own,
finite-state-machine and all.
In what follows, the strategy is this:
- maintain a FIFO queue of `dispatched` events.
- when a new event arrives, "schedule" processing of this queue using
- when processing events, one after the other, do ALL the currently
- but if any new events are dispatched during this cycle of processing,
and do these new events in the next processing cycle. That way we drain
the queue up to a point, but we never hog the CPU forever. In
another event. The freshly begotten event will be handled next cycle,
with yielding in-between.
- In some cases, an event should not be handled until after the GUI has been
updated, i.e., after the next Reagent animation frame. In such a case,
the event should be dispatched with :flush-dom metadata like this:
(dispatch ^:flush-dom [:event-id other params])
Such an event will temporarily block all further processing because
before we handle the ones behind it.
Implementation notes:
- queue processing can be in a number of states: scheduled, running, paused
etc. So it is modeled as a Finite State Machine.
See "-fsm-trigger" (below) for the states and transitions.
- the scheduling is done via "goog.async.nextTick" which is pretty quick
- when the event has :flush-dom metadata we schedule via
"reagent.core.after-render"
which will run event processing after the next Reagent animation frame.
Events can have metadata which says to pause event processing.
event metadata -> "run later" functions
:yield next-tick}) ;; almost immediately
Event Queue Abstraction
-- API
-- Implementation via a Finite State Machine
-- Finite State Machine actions
-- API ------------------------------------------------------------------
presumably called by dispatch
register a callback function which will be called after each event is processed
You should read the following "case" as:
So, for example, the next line should be interpreted as:
if you are in state ":idle" and a trigger ":add-event"
Process all the events currently in the queue, but not any new ones.
Be aware that events might have metadata which will pause processing.
any metadata which causes pausing?
purge the queue
do the event which paused processing
do the rest of the queued events
---------------------------------------------------------------------------
Event Queue
When "dispatch" is called, the event is added into this event queue. Later,
the queue will "run" and the event will be "handled" by the registered function.
---------------------------------------------------------------------------
Dispatching
Ensure nil return. See -Returning-False
slightly ugly hack. Run the registered post event callbacks.
Ensure nil return. See -Returning-False | Copyright ( c ) 2018 Flybot Pte Ltd , Singapore .
This file is distributed under the Eclipse Public License , the same as
Clojure .
Copyright ( c ) 2015 - 2017
" Software " ) , to deal in the Software without restriction , including
distribute , sublicense , and/or sell copies of the Software , and to
permit persons to whom the Software is furnished to do so , subject to
in all copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS
OR , INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY , FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT .
CLAIM , DAMAGES OR OTHER LIABILITY , WHETHER IN AN ACTION OF CONTRACT ,
(ns pastoral.router
(:require
[pastoral.console :refer [console]]
[pastoral.events :refer [handle]]
[pastoral.tick :as tick]
[pastoral.trace :as trace]))
-- Router Loop ------------------------------------------------------------
The task is to process queued events in a perpetual loop , one after
But Unity has only a single thread of control and we must be
must regularly hand back control to Unity , so it can redraw etc . But not too
goog.async.nextTick , which means it will happen " very soon " .
queued events . Do n't stop . Do n't yield to Unity . Hog that CPU .
do n't do them immediately . Leave them queued . Yield first to Unity ,
particular , we handle the case where handling one event will beget
events are processed sequentially : we handle one event completely
(def later-fns {})
(defprotocol IEventQueue
(push [this event])
(add-post-event-callback [this id callack])
(remove-post-event-callback [this f])
(purge [this])
(-fsm-trigger [this trigger arg])
(-add-event [this event])
(-process-1st-event-in-queue [this])
(-run-next-tick [this])
(-run-queue [this])
(-exception [this ex])
(-pause [this later-fn])
(-resume [this])
(-call-post-event-callbacks [this event]))
Concrete implementation of IEventQueue
(deftype EventQueue [^:volatile-mutable fsm-state
^:volatile-mutable queue
^:volatile-mutable post-event-callback-fns]
IEventQueue
(-fsm-trigger this :add-event event))
(add-post-event-callback [_ id callback-fn]
(when (contains? post-event-callback-fns id)
(console :warn "pastoral: overwriting existing post event call back with id:" id))
(->> (assoc post-event-callback-fns id callback-fn)
(set! post-event-callback-fns)))
(remove-post-event-callback [_ id]
(if-not (contains? post-event-callback-fns id)
(console :warn "pastoral: could not remove post event call back with id:" id)
(->> (dissoc post-event-callback-fns id)
(set! post-event-callback-fns))))
(purge [_]
(set! queue clojure.lang.PersistentQueue/EMPTY))
-- FSM Implementation ---------------------------------------------------
(-fsm-trigger
[this trigger arg]
The following " case " implements the Finite State Machine .
Given a " trigger " , and the existing FSM state , it computes the
new FSM state and the transition action ( function ) .
(trace/with-trace {:op-type ::fsm-trigger}
(let [[new-fsm-state action-fn]
(case [fsm-state trigger]
[ current - FSM - state trigger ] - > [ new - FSM - state action - fn ]
happens , then move the FSM to state " : scheduled " and execute
that two - part " do " function .
[:idle :add-event] [:scheduled #(do (-add-event this arg)
(-run-next-tick this))]
FSM guard
[:idle]
[:scheduled #(-run-next-tick this)])
State : : scheduled ( the queue is scheduled to run , soon )
[:scheduled :add-event] [:scheduled #(-add-event this arg)]
[:scheduled :run-queue] [:running #(-run-queue this)]
State : : running ( the queue is being processed one event after another )
[:running :add-event] [:running #(-add-event this arg)]
[:running :pause] [:paused #(-pause this arg)]
[:running :exception] [:idle #(-exception this arg)]
FSM guard
[:idle]
[:scheduled #(-run-next-tick this)])
State : : paused (: flush - dom metadata on an event has caused a temporary pause in processing )
[:paused :add-event] [:paused #(-add-event this arg)]
[:paused :resume] [:running #(-resume this)]
(throw (ex-info (str "pastoral: router state transition not found. " fsm-state " " trigger)
{:fsm-state fsm-state, :trigger trigger})))]
The " case " above computed both the new FSM state , and the action . Now , make it happen .
(trace/merge-trace! {:operation [fsm-state trigger]
:tags {:current-state fsm-state
:new-state new-fsm-state}})
(set! fsm-state new-fsm-state)
(when action-fn (action-fn)))))
(-add-event
[_ event]
(set! queue (conj queue event)))
(-process-1st-event-in-queue
[this]
(let [event-v (peek queue)]
(try
(handle event-v)
(set! queue (pop queue))
(-call-post-event-callbacks this event-v)
(catch Exception ex
(-fsm-trigger this :exception ex)))))
(-run-next-tick
[this]
(tick/next #(-fsm-trigger this :run-queue nil)))
(-run-queue
[this]
(loop [n (count queue)]
(if (zero? n)
(-fsm-trigger this :finish-run nil)
(-fsm-trigger this :pause later-fn)
(do (-process-1st-event-in-queue this)
(recur (dec n)))))))
(-exception
[this ex]
(UnityEngine.Debug/LogException
(or (.InnerException ex) ex)))
(-pause
[this later-fn]
(later-fn #(-fsm-trigger this :resume nil)))
(-call-post-event-callbacks
[_ event-v]
(doseq [callback (vals post-event-callback-fns)]
(callback event-v queue)))
(-resume
[this]
(def event-queue (->EventQueue :idle clojure.lang.PersistentQueue/EMPTY {}))
(defn dispatch
"Enqueue `event` for processing by event handling machinery.
`event` is a vector of length >= 1. The 1st element identifies the kind of event.
Note: the event handler is not run immediately - it is not run
synchronously. It will likely be run 'very soon', although it may be
added to the end of a FIFO queue which already contain events.
Usage:
(dispatch [:order-pizza {:supreme 2 :meatlovers 1 :veg 1})"
[event]
(if (nil? event)
(throw (ex-info "pastoral: you called \"dispatch\" without an event vector." {}))
(push event-queue event))
(defn dispatch-sync
"Synchronously (immediately) process `event`. Do not queue.
Generally, don't use this. Instead use `dispatch`. It is an error
to use `dispatch-sync` within an event handler.
Useful when any delay in processing is a problem:
1. the `:on-change` handler of a text field where we are expecting fast typing.
2 when initialising your app - see 'main' in todomvc examples
3. in a unit test where we don't want the action 'later'
Usage:
(dispatch-sync [:sing :falsetto 634])"
[event-v]
(handle event-v)
|
6cee4c48dbf964c4b1df914e0c83b835c04519ded5d4be0336e93166bc8ceb92 | chaoxu/mgccl-haskell | BinomialCoefficients.hs | import Data.List
import Control.Monad
main = do
n <- getInteger
go 0 n
go i n =
when (i < n) $
do z <- getIntegerList
print (test z)
go (i + 1) n
getInteger = do
l<-getIntegerList
return (head l)
getIntegerList = do
line <- getLine
let tmp = words line
return (map read tmp :: [Integer])
test [n,p] = 1 + n - product (map (+1) (digits p n))
digits p 0 = []
digits p n = digits p q ++[r]
where (q,r) = quotRem n p
| null | https://raw.githubusercontent.com/chaoxu/mgccl-haskell/bb03e39ae43f410bd2a673ac2b438929ab8ef7a1/interviewstreet/BinomialCoefficients.hs | haskell | import Data.List
import Control.Monad
main = do
n <- getInteger
go 0 n
go i n =
when (i < n) $
do z <- getIntegerList
print (test z)
go (i + 1) n
getInteger = do
l<-getIntegerList
return (head l)
getIntegerList = do
line <- getLine
let tmp = words line
return (map read tmp :: [Integer])
test [n,p] = 1 + n - product (map (+1) (digits p n))
digits p 0 = []
digits p n = digits p q ++[r]
where (q,r) = quotRem n p
| |
8ee750b3d21c1a829de557693f4ef5168b1b821a48599b13d64882bb81567663 | axelarge/advent-of-code | day22_test.clj | (ns advent-of-code.y2017.day22-test
(:require [clojure.test :refer :all]
[advent-of-code.y2017.day22 :refer :all]))
(deftest test-solve1
(is (= (:infected (solve1 test-input 7)) 5))
(is (= (:infected (solve1 test-input 70)) 41))
(is (= (:infected (solve1 test-input 10000)) 5587))
(is (= (:infected (solve1 input 10000)) 5223)))
(deftest test-solve2-sample
(is (= (:infected (solve2 test-input 100)) 26)))
(deftest ^:slow test-solve2
(is (= (:infected (solve2 test-input 10000000)) 2511944))
(is (= (:infected (solve2 input 10000000)) 2511456)))
| null | https://raw.githubusercontent.com/axelarge/advent-of-code/4c62a53ef71605780a22cf8219029453d8e1b977/test/advent_of_code/y2017/day22_test.clj | clojure | (ns advent-of-code.y2017.day22-test
(:require [clojure.test :refer :all]
[advent-of-code.y2017.day22 :refer :all]))
(deftest test-solve1
(is (= (:infected (solve1 test-input 7)) 5))
(is (= (:infected (solve1 test-input 70)) 41))
(is (= (:infected (solve1 test-input 10000)) 5587))
(is (= (:infected (solve1 input 10000)) 5223)))
(deftest test-solve2-sample
(is (= (:infected (solve2 test-input 100)) 26)))
(deftest ^:slow test-solve2
(is (= (:infected (solve2 test-input 10000000)) 2511944))
(is (= (:infected (solve2 input 10000000)) 2511456)))
| |
50945b1db46f4ab4c1c5328e6ccc082696f9272779dbb00e3549a67e064c092b | NorfairKing/smos | Streaming.hs | module Smos.Query.Streaming where
import Conduit
import Control.Monad.Reader
import Path
import Smos.Data
import Smos.Query.Env
import Smos.Report.Archive
import Smos.Report.Config
import Smos.Report.Streaming
streamSmosProjectsQ :: ConduitT i (Path Rel File, SmosFile) Q ()
streamSmosProjectsQ = do
dc <- lift $ asks envDirectoryConfig
streamSmosProjectsFiles dc .| streamParseSmosProjects
streamSmosFiles :: HideArchive -> ConduitT i (Path Rel File) Q ()
streamSmosFiles ha = do
dc <- lift $ asks envDirectoryConfig
streamSmosFilesFromWorkflowRel ha dc
streamAllSmosFiles :: ConduitT i (Path Rel File) Q ()
streamAllSmosFiles = do
dc <- lift $ asks envDirectoryConfig
streamSmosFilesFromWorkflowRel Don'tHideArchive dc
streamParseSmosProjects :: ConduitT (Path Rel File) (Path Rel File, SmosFile) Q ()
streamParseSmosProjects = do
dc <- lift $ asks envDirectoryConfig
pd <- liftIO $ resolveDirProjectsDir dc
parseSmosFilesRel pd .| shouldPrintC
streamParseSmosFiles :: ConduitT (Path Rel File) (Path Rel File, SmosFile) Q ()
streamParseSmosFiles = do
dc <- lift $ asks envDirectoryConfig
wd <- liftIO $ resolveDirWorkflowDir dc
parseSmosFilesRel wd .| shouldPrintC
shouldPrintC :: ConduitT (a, Either ParseSmosFileException b) (a, b) Q ()
shouldPrintC = do
sp <- lift getShouldPrint
printShouldPrint sp
| null | https://raw.githubusercontent.com/NorfairKing/smos/55442b20d13b75cd19d71a0fb6ef68e5c85be5b0/smos-query/src/Smos/Query/Streaming.hs | haskell | module Smos.Query.Streaming where
import Conduit
import Control.Monad.Reader
import Path
import Smos.Data
import Smos.Query.Env
import Smos.Report.Archive
import Smos.Report.Config
import Smos.Report.Streaming
streamSmosProjectsQ :: ConduitT i (Path Rel File, SmosFile) Q ()
streamSmosProjectsQ = do
dc <- lift $ asks envDirectoryConfig
streamSmosProjectsFiles dc .| streamParseSmosProjects
streamSmosFiles :: HideArchive -> ConduitT i (Path Rel File) Q ()
streamSmosFiles ha = do
dc <- lift $ asks envDirectoryConfig
streamSmosFilesFromWorkflowRel ha dc
streamAllSmosFiles :: ConduitT i (Path Rel File) Q ()
streamAllSmosFiles = do
dc <- lift $ asks envDirectoryConfig
streamSmosFilesFromWorkflowRel Don'tHideArchive dc
streamParseSmosProjects :: ConduitT (Path Rel File) (Path Rel File, SmosFile) Q ()
streamParseSmosProjects = do
dc <- lift $ asks envDirectoryConfig
pd <- liftIO $ resolveDirProjectsDir dc
parseSmosFilesRel pd .| shouldPrintC
streamParseSmosFiles :: ConduitT (Path Rel File) (Path Rel File, SmosFile) Q ()
streamParseSmosFiles = do
dc <- lift $ asks envDirectoryConfig
wd <- liftIO $ resolveDirWorkflowDir dc
parseSmosFilesRel wd .| shouldPrintC
shouldPrintC :: ConduitT (a, Either ParseSmosFileException b) (a, b) Q ()
shouldPrintC = do
sp <- lift getShouldPrint
printShouldPrint sp
| |
7af83ec73e7ef9773e7c4c25e306def80b2dcc757e5f979ceb3c81f30fb3b8e1 | den1k/vimsical | style.clj | (ns vimsical.frontend.views.style)
(def icons
[:.logo-and-type {:display :flex
:flex-shrink 0
:align-items :center
:cursor :pointer}
[:.vimsical-logo {:height :36px
:width :36px}]
[:.vimsical-type {:width :125px
:fill :#8F9096
:margin-left :15px}]])
(def views
[icons])
| null | https://raw.githubusercontent.com/den1k/vimsical/1e4a1f1297849b1121baf24bdb7a0c6ba3558954/src/frontend/vimsical/frontend/views/style.clj | clojure | (ns vimsical.frontend.views.style)
(def icons
[:.logo-and-type {:display :flex
:flex-shrink 0
:align-items :center
:cursor :pointer}
[:.vimsical-logo {:height :36px
:width :36px}]
[:.vimsical-type {:width :125px
:fill :#8F9096
:margin-left :15px}]])
(def views
[icons])
| |
2cba028da5975f61623f4b587dcdd66d9f2753af22562d829bbdcc6e52d20f8d | nikodemus/SBCL | info-functions.lisp | ;;;; miscellaneous functions which use INFO
;;;;
( In CMU CL , these were in globaldb.lisp . They 've been moved here
;;;; because references to INFO can't be compiled correctly until
globaldb initialization is complete , and the SBCL technique for
;;;; initializing the global database in the cross-compiler isn't
;;;; completed until load time.)
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!C")
;;;; internal utilities defined in terms of INFO
;;; Check that NAME is a valid function name, returning the name if
;;; OK, and signalling an error if not. In addition to checking for
;;; basic well-formedness, we also check that symbol names are not NIL
;;; or the name of a special form.
(defun check-fun-name (name)
(typecase name
(list
(unless (legal-fun-name-p name)
(compiler-error "illegal function name: ~S" name)))
(symbol
(when (eq (info :function :kind name) :special-form)
(compiler-error "Special form is an illegal function name: ~S" name)))
(t
(compiler-error "illegal function name: ~S" name)))
(values))
;;; Record a new function definition, and check its legality.
(defun proclaim-as-fun-name (name)
;; legal name?
(check-fun-name name)
: This can happen when eg . compiling a NAMED - LAMBDA , and is n't
;; guarded against elsewhere -- so we want to assert package locks here. The
;; reason we do it only when stomping on existing stuff is because we want
;; to keep
( WITHOUT - PACKAGE - LOCKS ( DEFUN LOCKED : FOO ... ) )
;; viable, which requires no compile-time violations in the harmless cases.
(with-single-package-locked-error ()
(flet ((assert-it ()
(assert-symbol-home-package-unlocked name "proclaiming ~S as a function")))
(let ((kind (info :function :kind name)))
;; scrubbing old data I: possible collision with a macro
(when (and (fboundp name) (eq :macro kind))
(assert-it)
(compiler-style-warn "~S was previously defined as a macro." name)
(setf (info :function :where-from name) :assumed)
(clear-info :function :macro-function name))
(unless (eq :function kind)
(assert-it)
(setf (info :function :kind name) :function)))))
;; scrubbing old data II: dangling forward references
;;
;; (This could happen if someone executes PROCLAIM FTYPE at
;; macroexpansion time, which is bad style, or at compile time, e.g.
in EVAL - WHEN (: ) inside something like DEFSTRUCT , in which
;; case it's reasonable style. Either way, NAME is no longer a free
;; function.)
(when (boundp '*free-funs*) ; when compiling
(remhash name *free-funs*))
(note-if-setf-fun-and-macro name)
(values))
This is called to do something about SETF functions that overlap
with SETF macros . Perhaps we should interact with the user to see
;;; whether the macro should be blown away, but for now just give a
warning . Due to the weak semantics of the ( SETF FUNCTION ) name , we
ca n't assume that they are n't just naming a function ( SETF FOO )
;;; for the heck of it. NAME is already known to be well-formed.
(defun note-if-setf-fun-and-macro (name)
(when (consp name)
(when (or (info :setf :inverse name)
(info :setf :expander name))
(compiler-style-warn
"defining as a SETF function a name that already has a SETF macro:~
~% ~S"
name)))
(values))
;;; Make NAME no longer be a function name: clear everything back to
;;; the default.
(defun undefine-fun-name (name)
(when name
(macrolet ((frob (type &optional val)
`(unless (eq (info :function ,type name) ,val)
(setf (info :function ,type name) ,val))))
(frob :info)
(frob :type (specifier-type 'function))
(frob :where-from :assumed)
(frob :inlinep)
(frob :kind)
(frob :macro-function)
(frob :inline-expansion-designator)
(frob :source-transform)
(frob :structure-accessor)
(frob :assumed-type)))
(values))
part of what happens with DEFUN , also with some PCL stuff : Make
;;; NAME known to be a function definition.
(defun become-defined-fun-name (name)
(proclaim-as-fun-name name)
(when (eq (info :function :where-from name) :assumed)
(setf (info :function :where-from name) :defined)
(if (info :function :assumed-type name)
(setf (info :function :assumed-type name) nil))))
;;; Decode any raw (INFO :FUNCTION :INLINE-EXPANSION-DESIGNATOR FUN-NAME)
;;; value into a lambda expression, or return NIL if there is none.
(declaim (ftype (function ((or symbol cons)) list) fun-name-inline-expansion))
(defun fun-name-inline-expansion (fun-name)
(let ((info (info :function :inline-expansion-designator fun-name)))
(if (functionp info)
(funcall info)
info)))
;;;; ANSI Common Lisp functions which are defined in terms of the info
;;;; database
(defun sb!xc:macro-function (symbol &optional env)
#!+sb-doc
"If SYMBOL names a macro in ENV, returns the expansion function,
else returns NIL. If ENV is unspecified or NIL, use the global environment
only."
(declare (symbol symbol))
(let* ((fenv (when env (lexenv-funs env)))
(local-def (cdr (assoc symbol fenv))))
(if local-def
(if (and (consp local-def) (eq (car local-def) 'macro))
(cdr local-def)
nil)
(values (info :function :macro-function symbol)))))
(defun (setf sb!xc:macro-function) (function symbol &optional environment)
(declare (symbol symbol) (type function function))
(when environment
Note : Technically there could be an ENV optional argument to SETF
MACRO - FUNCTION , but since ANSI says that the consequences of
;; supplying a non-nil one are undefined, we don't allow it.
;; (Thus our implementation of this unspecified behavior is to
complain . SInce the behavior is unspecified , this is conforming.:- )
(error "Non-NIL environment argument in SETF of MACRO-FUNCTION ~S: ~S"
symbol environment))
(when (eq (info :function :kind symbol) :special-form)
(error "~S names a special form." symbol))
(with-single-package-locked-error (:symbol symbol "setting the macro-function of ~S")
(setf (info :function :kind symbol) :macro)
(setf (info :function :macro-function symbol) function)
This is a nice thing to have in the target SBCL , but in the
;; cross-compilation host it's not nice to mess with
( SYMBOL - FUNCTION FOO ) where FOO might be a symbol in the
;; cross-compilation host's COMMON-LISP package.
#-sb-xc-host
(setf (symbol-function symbol)
(lambda (&rest args)
(declare (ignore args))
;; (ANSI specification of FUNCALL says that this should be
an error of type UNDEFINED - FUNCTION , not just SIMPLE - ERROR . )
(error 'undefined-function :name symbol))))
function)
(defun fun-locally-defined-p (name env)
(and env
(let ((fun (cdr (assoc name (lexenv-funs env) :test #'equal))))
(and fun (not (global-var-p fun))))))
(defun sb!xc:compiler-macro-function (name &optional env)
#!+sb-doc
"If NAME names a compiler-macro in ENV, return the expansion function, else
return NIL. Can be set with SETF when ENV is NIL."
(legal-fun-name-or-type-error name)
;; CLHS 3.2.2.1: Creating a lexical binding for the function name
;; not only creates a new local function or macro definition, but
;; also shadows[2] the compiler macro.
(unless (fun-locally-defined-p name env)
Note : CMU CL used to return NIL here when a NOTINLINE
;; declaration was in force. That's fairly logical, given the
specified effect of NOTINLINE declarations on compiler - macro
;; expansion. However, (1) it doesn't seem to be consistent with
the ANSI spec for COMPILER - MACRO - FUNCTION , and ( 2 ) it would
give surprising behavior for ( SETF ( COMPILER - MACRO - FUNCTION
FOO ) ... ) in the presence of a ( PROCLAIM ' ( NOTINLINE FOO ) ) . So
;; we don't do it.
(values (info :function :compiler-macro-function name))))
(defun (setf sb!xc:compiler-macro-function) (function name &optional env)
(declare (type (or symbol list) name)
(type (or function null) function))
(when env
ANSI says this operation is undefined .
(error "can't SETF COMPILER-MACRO-FUNCTION when ENV is non-NIL"))
(when (eq (info :function :kind name) :special-form)
(error "~S names a special form." name))
(with-single-package-locked-error
(:symbol name "setting the compiler-macro-function of ~A")
(setf (info :function :compiler-macro-function name) function)
function))
;;;; a subset of DOCUMENTATION functionality for bootstrapping
;;; FDOCUMENTATION is like DOCUMENTATION, but with less functionality,
and implemented with DEFUN instead of so that it can
run before CLOS is set up . Supported DOC - TYPE values are
;;; FUNCTION
SETF
;;; STRUCTURE
;;; T
;;; TYPE
;;; VARIABLE
;;; FIXME: Other types end up in INFO :RANDOM-DOCUMENTATION :STUFF. I
;;; should add some code to monitor this and make sure that nothing is
;;; unintentionally being sent to never never land this way.
FIXME : Rename FDOCUMENTATION to BDOCUMENTATION , by analogy with
;;; DEF!STRUCT and DEF!MACRO and so forth. And consider simply saving
all the BDOCUMENTATION entries in a * BDOCUMENTATION * hash table
and slamming them into PCL once PCL gets going .
(defun fdocumentation (x doc-type)
(case doc-type
(variable
(typecase x
(symbol (values (info :variable :documentation x)))))
FUNCTION is not used at the momemnt , just here for symmetry .
(function
(cond ((functionp x)
(%fun-doc x))
((and (legal-fun-name-p x) (fboundp x))
(%fun-doc (or (and (symbolp x) (macro-function x))
(fdefinition x))))))
(structure
(typecase x
(symbol (cond
((eq (info :type :kind x) :instance)
(values (info :type :documentation x)))
((info :typed-structure :info x)
(values (info :typed-structure :documentation x)))))))
(type
(typecase x
(structure-class (values (info :type :documentation (class-name x))))
(t (and (typep x 'symbol) (values (info :type :documentation x))))))
(setf (values (info :setf :documentation x)))
((t)
(typecase x
(function (%fun-doc x))
(package (package-doc-string x))
(structure-class (values (info :type :documentation (class-name x))))
((or symbol cons)
(random-documentation x doc-type))))
(t
(when (typep x '(or symbol cons))
(random-documentation x doc-type)))))
(defun (setf fdocumentation) (string name doc-type)
(declare (type (or null string) string))
(case doc-type
(variable (setf (info :variable :documentation name) string))
(function
: is n't ready early enough during cold - init , so
;; special case for symbols.
(if (symbolp name)
(setf (%fun-doc (symbol-function name)) string)
(when (legal-fun-name-p name)
(setf (%fun-doc (fdefinition name)) string))))
(structure (cond
((eq (info :type :kind name) :instance)
(setf (info :type :documentation name) string))
((info :typed-structure :info name)
(setf (info :typed-structure :documentation name) string))))
(type (setf (info :type :documentation name) string))
(setf (setf (info :setf :documentation name) string))
(t
(when (typep name '(or symbol cons))
(setf (random-documentation name doc-type) string))))
string)
(defun random-documentation (name type)
(cdr (assoc type (info :random-documentation :stuff name))))
(defun (setf random-documentation) (new-value name type)
(let ((pair (assoc type (info :random-documentation :stuff name))))
(if pair
(setf (cdr pair) new-value)
(push (cons type new-value)
(info :random-documentation :stuff name))))
new-value)
| null | https://raw.githubusercontent.com/nikodemus/SBCL/3c11847d1e12db89b24a7887b18a137c45ed4661/src/compiler/info-functions.lisp | lisp | miscellaneous functions which use INFO
because references to INFO can't be compiled correctly until
initializing the global database in the cross-compiler isn't
completed until load time.)
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
internal utilities defined in terms of INFO
Check that NAME is a valid function name, returning the name if
OK, and signalling an error if not. In addition to checking for
basic well-formedness, we also check that symbol names are not NIL
or the name of a special form.
Record a new function definition, and check its legality.
legal name?
guarded against elsewhere -- so we want to assert package locks here. The
reason we do it only when stomping on existing stuff is because we want
to keep
viable, which requires no compile-time violations in the harmless cases.
scrubbing old data I: possible collision with a macro
scrubbing old data II: dangling forward references
(This could happen if someone executes PROCLAIM FTYPE at
macroexpansion time, which is bad style, or at compile time, e.g.
case it's reasonable style. Either way, NAME is no longer a free
function.)
when compiling
whether the macro should be blown away, but for now just give a
for the heck of it. NAME is already known to be well-formed.
Make NAME no longer be a function name: clear everything back to
the default.
NAME known to be a function definition.
Decode any raw (INFO :FUNCTION :INLINE-EXPANSION-DESIGNATOR FUN-NAME)
value into a lambda expression, or return NIL if there is none.
ANSI Common Lisp functions which are defined in terms of the info
database
supplying a non-nil one are undefined, we don't allow it.
(Thus our implementation of this unspecified behavior is to
cross-compilation host it's not nice to mess with
cross-compilation host's COMMON-LISP package.
(ANSI specification of FUNCALL says that this should be
CLHS 3.2.2.1: Creating a lexical binding for the function name
not only creates a new local function or macro definition, but
also shadows[2] the compiler macro.
declaration was in force. That's fairly logical, given the
expansion. However, (1) it doesn't seem to be consistent with
we don't do it.
a subset of DOCUMENTATION functionality for bootstrapping
FDOCUMENTATION is like DOCUMENTATION, but with less functionality,
FUNCTION
STRUCTURE
T
TYPE
VARIABLE
FIXME: Other types end up in INFO :RANDOM-DOCUMENTATION :STUFF. I
should add some code to monitor this and make sure that nothing is
unintentionally being sent to never never land this way.
DEF!STRUCT and DEF!MACRO and so forth. And consider simply saving
special case for symbols. | ( In CMU CL , these were in globaldb.lisp . They 've been moved here
globaldb initialization is complete , and the SBCL technique for
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!C")
(defun check-fun-name (name)
(typecase name
(list
(unless (legal-fun-name-p name)
(compiler-error "illegal function name: ~S" name)))
(symbol
(when (eq (info :function :kind name) :special-form)
(compiler-error "Special form is an illegal function name: ~S" name)))
(t
(compiler-error "illegal function name: ~S" name)))
(values))
(defun proclaim-as-fun-name (name)
(check-fun-name name)
: This can happen when eg . compiling a NAMED - LAMBDA , and is n't
( WITHOUT - PACKAGE - LOCKS ( DEFUN LOCKED : FOO ... ) )
(with-single-package-locked-error ()
(flet ((assert-it ()
(assert-symbol-home-package-unlocked name "proclaiming ~S as a function")))
(let ((kind (info :function :kind name)))
(when (and (fboundp name) (eq :macro kind))
(assert-it)
(compiler-style-warn "~S was previously defined as a macro." name)
(setf (info :function :where-from name) :assumed)
(clear-info :function :macro-function name))
(unless (eq :function kind)
(assert-it)
(setf (info :function :kind name) :function)))))
in EVAL - WHEN (: ) inside something like DEFSTRUCT , in which
(remhash name *free-funs*))
(note-if-setf-fun-and-macro name)
(values))
This is called to do something about SETF functions that overlap
with SETF macros . Perhaps we should interact with the user to see
warning . Due to the weak semantics of the ( SETF FUNCTION ) name , we
ca n't assume that they are n't just naming a function ( SETF FOO )
(defun note-if-setf-fun-and-macro (name)
(when (consp name)
(when (or (info :setf :inverse name)
(info :setf :expander name))
(compiler-style-warn
"defining as a SETF function a name that already has a SETF macro:~
~% ~S"
name)))
(values))
(defun undefine-fun-name (name)
(when name
(macrolet ((frob (type &optional val)
`(unless (eq (info :function ,type name) ,val)
(setf (info :function ,type name) ,val))))
(frob :info)
(frob :type (specifier-type 'function))
(frob :where-from :assumed)
(frob :inlinep)
(frob :kind)
(frob :macro-function)
(frob :inline-expansion-designator)
(frob :source-transform)
(frob :structure-accessor)
(frob :assumed-type)))
(values))
part of what happens with DEFUN , also with some PCL stuff : Make
(defun become-defined-fun-name (name)
(proclaim-as-fun-name name)
(when (eq (info :function :where-from name) :assumed)
(setf (info :function :where-from name) :defined)
(if (info :function :assumed-type name)
(setf (info :function :assumed-type name) nil))))
(declaim (ftype (function ((or symbol cons)) list) fun-name-inline-expansion))
(defun fun-name-inline-expansion (fun-name)
(let ((info (info :function :inline-expansion-designator fun-name)))
(if (functionp info)
(funcall info)
info)))
(defun sb!xc:macro-function (symbol &optional env)
#!+sb-doc
"If SYMBOL names a macro in ENV, returns the expansion function,
else returns NIL. If ENV is unspecified or NIL, use the global environment
only."
(declare (symbol symbol))
(let* ((fenv (when env (lexenv-funs env)))
(local-def (cdr (assoc symbol fenv))))
(if local-def
(if (and (consp local-def) (eq (car local-def) 'macro))
(cdr local-def)
nil)
(values (info :function :macro-function symbol)))))
(defun (setf sb!xc:macro-function) (function symbol &optional environment)
(declare (symbol symbol) (type function function))
(when environment
Note : Technically there could be an ENV optional argument to SETF
MACRO - FUNCTION , but since ANSI says that the consequences of
complain . SInce the behavior is unspecified , this is conforming.:- )
(error "Non-NIL environment argument in SETF of MACRO-FUNCTION ~S: ~S"
symbol environment))
(when (eq (info :function :kind symbol) :special-form)
(error "~S names a special form." symbol))
(with-single-package-locked-error (:symbol symbol "setting the macro-function of ~S")
(setf (info :function :kind symbol) :macro)
(setf (info :function :macro-function symbol) function)
This is a nice thing to have in the target SBCL , but in the
( SYMBOL - FUNCTION FOO ) where FOO might be a symbol in the
#-sb-xc-host
(setf (symbol-function symbol)
(lambda (&rest args)
(declare (ignore args))
an error of type UNDEFINED - FUNCTION , not just SIMPLE - ERROR . )
(error 'undefined-function :name symbol))))
function)
(defun fun-locally-defined-p (name env)
(and env
(let ((fun (cdr (assoc name (lexenv-funs env) :test #'equal))))
(and fun (not (global-var-p fun))))))
(defun sb!xc:compiler-macro-function (name &optional env)
#!+sb-doc
"If NAME names a compiler-macro in ENV, return the expansion function, else
return NIL. Can be set with SETF when ENV is NIL."
(legal-fun-name-or-type-error name)
(unless (fun-locally-defined-p name env)
Note : CMU CL used to return NIL here when a NOTINLINE
specified effect of NOTINLINE declarations on compiler - macro
the ANSI spec for COMPILER - MACRO - FUNCTION , and ( 2 ) it would
give surprising behavior for ( SETF ( COMPILER - MACRO - FUNCTION
FOO ) ... ) in the presence of a ( PROCLAIM ' ( NOTINLINE FOO ) ) . So
(values (info :function :compiler-macro-function name))))
(defun (setf sb!xc:compiler-macro-function) (function name &optional env)
(declare (type (or symbol list) name)
(type (or function null) function))
(when env
ANSI says this operation is undefined .
(error "can't SETF COMPILER-MACRO-FUNCTION when ENV is non-NIL"))
(when (eq (info :function :kind name) :special-form)
(error "~S names a special form." name))
(with-single-package-locked-error
(:symbol name "setting the compiler-macro-function of ~A")
(setf (info :function :compiler-macro-function name) function)
function))
and implemented with DEFUN instead of so that it can
run before CLOS is set up . Supported DOC - TYPE values are
SETF
FIXME : Rename FDOCUMENTATION to BDOCUMENTATION , by analogy with
all the BDOCUMENTATION entries in a * BDOCUMENTATION * hash table
and slamming them into PCL once PCL gets going .
(defun fdocumentation (x doc-type)
(case doc-type
(variable
(typecase x
(symbol (values (info :variable :documentation x)))))
FUNCTION is not used at the momemnt , just here for symmetry .
(function
(cond ((functionp x)
(%fun-doc x))
((and (legal-fun-name-p x) (fboundp x))
(%fun-doc (or (and (symbolp x) (macro-function x))
(fdefinition x))))))
(structure
(typecase x
(symbol (cond
((eq (info :type :kind x) :instance)
(values (info :type :documentation x)))
((info :typed-structure :info x)
(values (info :typed-structure :documentation x)))))))
(type
(typecase x
(structure-class (values (info :type :documentation (class-name x))))
(t (and (typep x 'symbol) (values (info :type :documentation x))))))
(setf (values (info :setf :documentation x)))
((t)
(typecase x
(function (%fun-doc x))
(package (package-doc-string x))
(structure-class (values (info :type :documentation (class-name x))))
((or symbol cons)
(random-documentation x doc-type))))
(t
(when (typep x '(or symbol cons))
(random-documentation x doc-type)))))
(defun (setf fdocumentation) (string name doc-type)
(declare (type (or null string) string))
(case doc-type
(variable (setf (info :variable :documentation name) string))
(function
: is n't ready early enough during cold - init , so
(if (symbolp name)
(setf (%fun-doc (symbol-function name)) string)
(when (legal-fun-name-p name)
(setf (%fun-doc (fdefinition name)) string))))
(structure (cond
((eq (info :type :kind name) :instance)
(setf (info :type :documentation name) string))
((info :typed-structure :info name)
(setf (info :typed-structure :documentation name) string))))
(type (setf (info :type :documentation name) string))
(setf (setf (info :setf :documentation name) string))
(t
(when (typep name '(or symbol cons))
(setf (random-documentation name doc-type) string))))
string)
(defun random-documentation (name type)
(cdr (assoc type (info :random-documentation :stuff name))))
(defun (setf random-documentation) (new-value name type)
(let ((pair (assoc type (info :random-documentation :stuff name))))
(if pair
(setf (cdr pair) new-value)
(push (cons type new-value)
(info :random-documentation :stuff name))))
new-value)
|
16c01568f9554800d0f9daf0b36f197daeff4e7e2fe44aeafe8d4f12cfcce480 | haskell-repa/repa | Base.hs |
module Data.Array.Repa.Base
( Source (..), (!), toList
, deepSeqArrays)
where
import Data.Array.Repa.Shape
-- Source -----------------------------------------------------------------------
-- | Class of array representations that we can read elements from.
class Source r e where
-- Arrays with a representation tag, shape, and element type.
Use one of the type tags like ` D ` , ` U ` and so on for @r@ ,
one of ` DIM1 ` , ` DIM2 ` ... for @sh@.
data Array r sh e
-- | O(1). Take the extent (size) of an array.
extent :: Shape sh => Array r sh e -> sh
-- | O(1). Shape polymorphic indexing.
index, unsafeIndex
:: Shape sh => Array r sh e -> sh -> e
# INLINE index #
index arr ix = arr `linearIndex` toIndex (extent arr) ix
# INLINE unsafeIndex #
unsafeIndex arr ix = arr `unsafeLinearIndex` toIndex (extent arr) ix
-- | O(1). Linear indexing into underlying, row-major, array representation.
linearIndex, unsafeLinearIndex
:: Shape sh => Array r sh e -> Int -> e
# INLINE unsafeLinearIndex #
unsafeLinearIndex = linearIndex
-- | Ensure an array's data structure is fully evaluated.
deepSeqArray
:: Shape sh =>Array r sh e -> b -> b
| O(1 ) . for ` index `
(!) :: Shape sh => Source r e => Array r sh e -> sh -> e
(!) = index
-- | O(n). Convert an array to a list.
toList :: Shape sh => Source r e
=> Array r sh e -> [e]
# INLINE toList #
toList arr
= go 0
where len = size (extent arr)
go ix
| ix == len = []
| otherwise = unsafeLinearIndex arr ix : go (ix + 1)
| Apply ` deepSeqArray ` to up to four arrays .
---
-- NOTE: this shouldn't be needed anymore, as we've made all the shape fields strict.
--
-- The implementation of this function has been hand-unwound to work for up to
four arrays . Putting more in the list yields ` error ` .
--
For functions that are /not/ marked as INLINE , you should apply ` deepSeqArrays `
-- to argument arrays before using them in a @compute@ or @copy@ expression.
-- For example:
--
-- @ processArrays
-- :: Monad m
-- => Array U DIM2 Int -> Array U DIM2 Int
-- -> m (Array U DIM2 Int)
-- processArrays arr1 arr2
-- = [arr1, arr2] \`deepSeqArrays\`
-- do arr3 <- computeP $ map f arr1
arr4 < - computeP $ zipWith g arr3 arr2
return arr4
-- @
--
Applying ` deepSeqArrays ` tells the GHC simplifier that it 's ok to unbox
-- size fields and the pointers to the underlying array data at the start
-- of the function. Without this, they may be unboxed repeatedly when
-- computing elements in the result arrays, which will make your program slow.
--
-- If you INLINE @processArrays@ into the function that computes @arr1@ and @arr2@,
-- then you don't need to apply `deepSeqArrays`. This is because a pointer
-- to the underlying data will be passed directly to the consumers and never boxed.
--
-- If you're not sure, then just follow the example code above.
--
deepSeqArrays
:: Shape sh => Source r e
=> [Array r sh e] -> b -> b
# INLINE deepSeqArrays #
deepSeqArrays arrs x
= case arrs of
[] -> x
[a1]
-> a1 `deepSeqArray` x
[a1, a2]
-> a1 `deepSeqArray` a2 `deepSeqArray` x
[a1, a2, a3]
-> a1 `deepSeqArray` a2 `deepSeqArray` a3 `deepSeqArray` x
[a1, a2, a3, a4]
-> a1 `deepSeqArray` a2 `deepSeqArray` a3 `deepSeqArray` a4 `deepSeqArray` x
_ -> error "deepSeqArrays: only works for up to four arrays"
| null | https://raw.githubusercontent.com/haskell-repa/repa/c867025e99fd008f094a5b18ce4dabd29bed00ba/repa/Data/Array/Repa/Base.hs | haskell | Source -----------------------------------------------------------------------
| Class of array representations that we can read elements from.
Arrays with a representation tag, shape, and element type.
| O(1). Take the extent (size) of an array.
| O(1). Shape polymorphic indexing.
| O(1). Linear indexing into underlying, row-major, array representation.
| Ensure an array's data structure is fully evaluated.
| O(n). Convert an array to a list.
-
NOTE: this shouldn't be needed anymore, as we've made all the shape fields strict.
The implementation of this function has been hand-unwound to work for up to
to argument arrays before using them in a @compute@ or @copy@ expression.
For example:
@ processArrays
:: Monad m
=> Array U DIM2 Int -> Array U DIM2 Int
-> m (Array U DIM2 Int)
processArrays arr1 arr2
= [arr1, arr2] \`deepSeqArrays\`
do arr3 <- computeP $ map f arr1
@
size fields and the pointers to the underlying array data at the start
of the function. Without this, they may be unboxed repeatedly when
computing elements in the result arrays, which will make your program slow.
If you INLINE @processArrays@ into the function that computes @arr1@ and @arr2@,
then you don't need to apply `deepSeqArrays`. This is because a pointer
to the underlying data will be passed directly to the consumers and never boxed.
If you're not sure, then just follow the example code above.
|
module Data.Array.Repa.Base
( Source (..), (!), toList
, deepSeqArrays)
where
import Data.Array.Repa.Shape
class Source r e where
Use one of the type tags like ` D ` , ` U ` and so on for @r@ ,
one of ` DIM1 ` , ` DIM2 ` ... for @sh@.
data Array r sh e
extent :: Shape sh => Array r sh e -> sh
index, unsafeIndex
:: Shape sh => Array r sh e -> sh -> e
# INLINE index #
index arr ix = arr `linearIndex` toIndex (extent arr) ix
# INLINE unsafeIndex #
unsafeIndex arr ix = arr `unsafeLinearIndex` toIndex (extent arr) ix
linearIndex, unsafeLinearIndex
:: Shape sh => Array r sh e -> Int -> e
# INLINE unsafeLinearIndex #
unsafeLinearIndex = linearIndex
deepSeqArray
:: Shape sh =>Array r sh e -> b -> b
| O(1 ) . for ` index `
(!) :: Shape sh => Source r e => Array r sh e -> sh -> e
(!) = index
toList :: Shape sh => Source r e
=> Array r sh e -> [e]
# INLINE toList #
toList arr
= go 0
where len = size (extent arr)
go ix
| ix == len = []
| otherwise = unsafeLinearIndex arr ix : go (ix + 1)
| Apply ` deepSeqArray ` to up to four arrays .
four arrays . Putting more in the list yields ` error ` .
For functions that are /not/ marked as INLINE , you should apply ` deepSeqArrays `
arr4 < - computeP $ zipWith g arr3 arr2
return arr4
Applying ` deepSeqArrays ` tells the GHC simplifier that it 's ok to unbox
deepSeqArrays
:: Shape sh => Source r e
=> [Array r sh e] -> b -> b
# INLINE deepSeqArrays #
deepSeqArrays arrs x
= case arrs of
[] -> x
[a1]
-> a1 `deepSeqArray` x
[a1, a2]
-> a1 `deepSeqArray` a2 `deepSeqArray` x
[a1, a2, a3]
-> a1 `deepSeqArray` a2 `deepSeqArray` a3 `deepSeqArray` x
[a1, a2, a3, a4]
-> a1 `deepSeqArray` a2 `deepSeqArray` a3 `deepSeqArray` a4 `deepSeqArray` x
_ -> error "deepSeqArrays: only works for up to four arrays"
|
b6e01ae9b636ed96eccc3274079cb6218d581b7f4d491f84e948381556fc1ba7 | juspay/atlas | Transporter.hs | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Types . API.Transporter
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Types.API.Transporter
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Types.API.Transporter where
import Beckn.Types.Predicate
import qualified Beckn.Utils.Predicates as P
import Beckn.Utils.Validation
import Data.OpenApi (ToSchema)
import qualified Domain.Types.Organization as SO
import EulerHS.Prelude hiding (id, state)
newtype TransporterRec = TransporterRec
{ organization :: SO.OrganizationAPIEntity
}
deriving (Generic, ToJSON, ToSchema)
data UpdateTransporterReq = UpdateTransporterReq
{ name :: Maybe Text,
description :: Maybe Text,
enabled :: Maybe Bool
}
deriving (Generic, Show, FromJSON, ToSchema)
type UpdateTransporterRes = SO.OrganizationAPIEntity
validateUpdateTransporterReq :: Validate UpdateTransporterReq
validateUpdateTransporterReq UpdateTransporterReq {..} =
sequenceA_
[ validateField "name" name $ InMaybe $ MinLength 3 `And` P.name,
validateField "description" description $ InMaybe $ MinLength 3 `And` P.name
]
| null | https://raw.githubusercontent.com/juspay/atlas/e64b227dc17887fb01c2554db21c08284d18a806/app/driver-offer-bpp/src/Types/API/Transporter.hs | haskell | |
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Module : Types . API.Transporter
Copyright : ( C ) Juspay Technologies Pvt Ltd 2019 - 2022
License : Apache 2.0 ( see the file LICENSE )
Maintainer :
Stability : experimental
Portability : non - portable
Copyright 2022 Juspay Technologies Pvt Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Module : Types.API.Transporter
Copyright : (C) Juspay Technologies Pvt Ltd 2019-2022
License : Apache 2.0 (see the file LICENSE)
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Types.API.Transporter where
import Beckn.Types.Predicate
import qualified Beckn.Utils.Predicates as P
import Beckn.Utils.Validation
import Data.OpenApi (ToSchema)
import qualified Domain.Types.Organization as SO
import EulerHS.Prelude hiding (id, state)
newtype TransporterRec = TransporterRec
{ organization :: SO.OrganizationAPIEntity
}
deriving (Generic, ToJSON, ToSchema)
data UpdateTransporterReq = UpdateTransporterReq
{ name :: Maybe Text,
description :: Maybe Text,
enabled :: Maybe Bool
}
deriving (Generic, Show, FromJSON, ToSchema)
type UpdateTransporterRes = SO.OrganizationAPIEntity
validateUpdateTransporterReq :: Validate UpdateTransporterReq
validateUpdateTransporterReq UpdateTransporterReq {..} =
sequenceA_
[ validateField "name" name $ InMaybe $ MinLength 3 `And` P.name,
validateField "description" description $ InMaybe $ MinLength 3 `And` P.name
]
| |
99597117f5a76dce26bb8af1f2bb055e977c2267db20376d158652885c1e7c7f | static-analysis-engineering/codehawk | jCHCopyPropagationNoArrays.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
class copy_propagation_no_arrays_t :
CHLanguage.system_int ->
object
val iterator : CHIterator.iterator_t
method simplify_code : CHLanguage.code_int -> unit
end
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/98ced4d5e6d7989575092df232759afc2cb851f6/CodeHawk/CHJ/jchlib/jCHCopyPropagationNoArrays.mli | ocaml | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Arnaud Venet
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
class copy_propagation_no_arrays_t :
CHLanguage.system_int ->
object
val iterator : CHIterator.iterator_t
method simplify_code : CHLanguage.code_int -> unit
end
| |
257d341e41a90df55de62a36048a8e215161a75ad919dab91884eeb308b0ad40 | MLstate/opalang | w_OpenSumsDirective.mli |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
exception Open_sum_failure of bool
val open_sum_simple_type:
W_TypingEnv.t -> W_Algebra.simple_type -> W_Algebra.simple_type
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/compiler/libqmlcompil/typer_w/w_OpenSumsDirective.mli | ocaml |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
exception Open_sum_failure of bool
val open_sum_simple_type:
W_TypingEnv.t -> W_Algebra.simple_type -> W_Algebra.simple_type
| |
16d47a545a0eec6e1fba92390697668523e29864a208f690164d56d660e24583 | ndmitchell/hlint | Fixity.hs | # LANGUAGE ViewPatterns #
module Fixity(
FixityInfo, Associativity(..),
defaultFixities,
fromFixitySig, toFixitySig, toFixity,
) where
import GHC.Generics(Associativity(..))
import GHC.Hs.Binds
import GHC.Hs.Extension
import GHC.Types.Name.Occurrence
import GHC.Types.Name.Reader
import GHC.Types.Fixity
import GHC.Types.SourceText
import GHC.Parser.Annotation
import Language.Haskell.Syntax.Extension
import Language.Haskell.GhclibParserEx.GHC.Types.Name.Reader
import Language.Haskell.GhclibParserEx.Fixity
-- Lots of things define a fixity. None define it quite right, so let's have our own type.
-- | A Fixity definition, comprising the name the fixity applies to,
-- the direction and the precedence. As an example, a source file containing:
--
> infixr 3 ` foo `
--
would create @(\"foo\ " , RightAssociative , 3)@.
type FixityInfo = (String, Associativity, Int)
fromFixitySig :: FixitySig GhcPs -> [FixityInfo]
fromFixitySig (FixitySig _ names (Fixity _ i dir)) =
[(rdrNameStr name, f dir, i) | name <- names]
where
f InfixL = LeftAssociative
f InfixR = RightAssociative
f InfixN = NotAssociative
toFixity :: FixityInfo -> (String, Fixity)
toFixity (name, dir, i) = (name, Fixity NoSourceText i $ f dir)
where
f LeftAssociative = InfixL
f RightAssociative = InfixR
f NotAssociative = InfixN
fromFixity :: (String, Fixity) -> FixityInfo
fromFixity (name, Fixity _ i dir) = (name, assoc dir, i)
where
assoc dir = case dir of
InfixL -> LeftAssociative
InfixR -> RightAssociative
InfixN -> NotAssociative
toFixitySig :: FixityInfo -> FixitySig GhcPs
toFixitySig (toFixity -> (name, x)) = FixitySig noExtField [noLocA $ mkRdrUnqual (mkVarOcc name)] x
defaultFixities :: [FixityInfo]
defaultFixities = map fromFixity $ customFixities ++ baseFixities ++ lensFixities ++ otherFixities
List as provided at .
lensFixities :: [(String, Fixity)]
lensFixities = concat
[ infixr_ 4 ["%%@~","<%@~","%%~","<+~","<*~","<-~","<//~","<^~","<^^~","<**~"]
, infix_ 4 ["%%@=","<%@=","%%=","<+=","<*=","<-=","<//=","<^=","<^^=","<**="]
, infixr_ 2 ["<<~"]
, infixr_ 9 ["#."]
, infixl_ 8 [".#"]
, infixr_ 8 ["^!","^@!"]
, infixl_ 1 ["&","<&>","??"]
, infixl_ 8 ["^.","^@."]
, infixr_ 9 ["<.>","<.",".>"]
, infixr_ 4 ["%@~",".~","+~","*~","-~","//~","^~","^^~","**~","&&~","<>~","||~","%~"]
, infix_ 4 ["%@=",".=","+=","*=","-=","//=","^=","^^=","**=","&&=","<>=","||=","%="]
, infixr_ 2 ["<~"]
, infixr_ 2 ["`zoom`","`magnify`"]
, infixl_ 8 ["^..","^?","^?!","^@..","^@?","^@?!"]
, infixl_ 8 ["^#"]
, infixr_ 4 ["<#~","#~","#%~","<#%~","#%%~"]
, infix_ 4 ["<#=","#=","#%=","<#%=","#%%="]
, infixl_ 9 [":>"]
, infixr_ 4 ["</>~","<</>~","<.>~","<<.>~"]
, infix_ 4 ["</>=","<</>=","<.>=","<<.>="]
, infixr_ 4 [".|.~",".&.~","<.|.~","<.&.~"]
, infix_ 4 [".|.=",".&.=","<.|.=","<.&.="]
]
otherFixities :: [(String, Fixity)]
otherFixities = concat
hspec
[ infix_ 1 ["shouldBe","shouldSatisfy","shouldStartWith","shouldEndWith","shouldContain","shouldMatchList"
,"shouldReturn","shouldNotBe","shouldNotSatisfy","shouldNotContain","shouldNotReturn","shouldThrow"]
-- quickcheck
, infixr_ 0 ["==>"]
, infix_ 4 ["==="]
-- esqueleto
, infix_ 4 ["==."]
-- lattices
\/
, infixr_ 6 ["/\\"] -- /\
]
customFixities :: [(String, Fixity)]
customFixities =
infixl_ 1 ["`on`"]
-- See
otherwise GTK apps using ` on ` at a different fixity have
-- spurious warnings.
| null | https://raw.githubusercontent.com/ndmitchell/hlint/e6a6df4e58f9a9866d69187f6c626883d22bb306/src/Fixity.hs | haskell | Lots of things define a fixity. None define it quite right, so let's have our own type.
| A Fixity definition, comprising the name the fixity applies to,
the direction and the precedence. As an example, a source file containing:
quickcheck
esqueleto
lattices
/\
See
spurious warnings. | # LANGUAGE ViewPatterns #
module Fixity(
FixityInfo, Associativity(..),
defaultFixities,
fromFixitySig, toFixitySig, toFixity,
) where
import GHC.Generics(Associativity(..))
import GHC.Hs.Binds
import GHC.Hs.Extension
import GHC.Types.Name.Occurrence
import GHC.Types.Name.Reader
import GHC.Types.Fixity
import GHC.Types.SourceText
import GHC.Parser.Annotation
import Language.Haskell.Syntax.Extension
import Language.Haskell.GhclibParserEx.GHC.Types.Name.Reader
import Language.Haskell.GhclibParserEx.Fixity
> infixr 3 ` foo `
would create @(\"foo\ " , RightAssociative , 3)@.
type FixityInfo = (String, Associativity, Int)
fromFixitySig :: FixitySig GhcPs -> [FixityInfo]
fromFixitySig (FixitySig _ names (Fixity _ i dir)) =
[(rdrNameStr name, f dir, i) | name <- names]
where
f InfixL = LeftAssociative
f InfixR = RightAssociative
f InfixN = NotAssociative
toFixity :: FixityInfo -> (String, Fixity)
toFixity (name, dir, i) = (name, Fixity NoSourceText i $ f dir)
where
f LeftAssociative = InfixL
f RightAssociative = InfixR
f NotAssociative = InfixN
fromFixity :: (String, Fixity) -> FixityInfo
fromFixity (name, Fixity _ i dir) = (name, assoc dir, i)
where
assoc dir = case dir of
InfixL -> LeftAssociative
InfixR -> RightAssociative
InfixN -> NotAssociative
toFixitySig :: FixityInfo -> FixitySig GhcPs
toFixitySig (toFixity -> (name, x)) = FixitySig noExtField [noLocA $ mkRdrUnqual (mkVarOcc name)] x
defaultFixities :: [FixityInfo]
defaultFixities = map fromFixity $ customFixities ++ baseFixities ++ lensFixities ++ otherFixities
List as provided at .
lensFixities :: [(String, Fixity)]
lensFixities = concat
[ infixr_ 4 ["%%@~","<%@~","%%~","<+~","<*~","<-~","<//~","<^~","<^^~","<**~"]
, infix_ 4 ["%%@=","<%@=","%%=","<+=","<*=","<-=","<//=","<^=","<^^=","<**="]
, infixr_ 2 ["<<~"]
, infixr_ 9 ["#."]
, infixl_ 8 [".#"]
, infixr_ 8 ["^!","^@!"]
, infixl_ 1 ["&","<&>","??"]
, infixl_ 8 ["^.","^@."]
, infixr_ 9 ["<.>","<.",".>"]
, infixr_ 4 ["%@~",".~","+~","*~","-~","//~","^~","^^~","**~","&&~","<>~","||~","%~"]
, infix_ 4 ["%@=",".=","+=","*=","-=","//=","^=","^^=","**=","&&=","<>=","||=","%="]
, infixr_ 2 ["<~"]
, infixr_ 2 ["`zoom`","`magnify`"]
, infixl_ 8 ["^..","^?","^?!","^@..","^@?","^@?!"]
, infixl_ 8 ["^#"]
, infixr_ 4 ["<#~","#~","#%~","<#%~","#%%~"]
, infix_ 4 ["<#=","#=","#%=","<#%=","#%%="]
, infixl_ 9 [":>"]
, infixr_ 4 ["</>~","<</>~","<.>~","<<.>~"]
, infix_ 4 ["</>=","<</>=","<.>=","<<.>="]
, infixr_ 4 [".|.~",".&.~","<.|.~","<.&.~"]
, infix_ 4 [".|.=",".&.=","<.|.=","<.&.="]
]
otherFixities :: [(String, Fixity)]
otherFixities = concat
hspec
[ infix_ 1 ["shouldBe","shouldSatisfy","shouldStartWith","shouldEndWith","shouldContain","shouldMatchList"
,"shouldReturn","shouldNotBe","shouldNotSatisfy","shouldNotContain","shouldNotReturn","shouldThrow"]
, infixr_ 0 ["==>"]
, infix_ 4 ["==="]
, infix_ 4 ["==."]
\/
]
customFixities :: [(String, Fixity)]
customFixities =
infixl_ 1 ["`on`"]
otherwise GTK apps using ` on ` at a different fixity have
|
68a61731a20a17204c6ddabb0ba5d93e4ffb9436c1d1e4b7b20a1cfa2d3eccf9 | metosin/muuntaja | core_test.clj | (ns muuntaja.core-test
(:require [clojure.test :refer :all]
[muuntaja.core :as m]
[clojure.string :as str]
[muuntaja.format.core :as core]
[muuntaja.format.form :as form-format]
[muuntaja.format.cheshire :as cheshire-format]
[muuntaja.format.msgpack :as msgpack-format]
[muuntaja.format.yaml :as yaml-format]
[jsonista.core :as j]
[clojure.java.io :as io]
[muuntaja.protocols :as protocols]
[muuntaja.util :as util])
(:import (java.nio.charset Charset)
(java.io FileInputStream)
(java.nio.file Files)))
(defn set-jvm-default-charset! [charset]
(System/setProperty "file.encoding" charset)
(doto
(.getDeclaredField Charset "defaultCharset")
(.setAccessible true)
(.set nil nil))
nil)
(defmacro with-default-charset [charset & body]
`(let [old-charset# (str (Charset/defaultCharset))]
(try
(set-jvm-default-charset! ~charset)
~@body
(finally
(set-jvm-default-charset! old-charset#)))))
(def m
(m/create
(-> m/default-options
(m/install form-format/format)
(m/install msgpack-format/format)
(m/install yaml-format/format)
(m/install cheshire-format/format "application/json+cheshire"))))
(deftest core-test
(testing "muuntaja?"
(is (m/muuntaja? m)))
(testing "default encodes & decodes"
(is (= #{"application/edn"
"application/json"
"application/transit+json"
"application/transit+msgpack"}
(m/encodes m/instance)
(m/decodes m/instance))))
(testing "custom encodes & decodes"
(is (= #{"application/edn"
"application/json"
"application/json+cheshire"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack"
"application/x-www-form-urlencoded"
"application/x-yaml"}
(m/encodes m)
(m/decodes m))))
(testing "encode & decode"
(let [data {:kikka 42}]
(testing "with default instance"
(is (= "{\"kikka\":42}" (slurp (m/encode "application/json" data))))
(is (= data (m/decode "application/json" (m/encode "application/json" data)))))
(testing "with muuntaja instance"
(is (= "{\"kikka\":42}" (slurp (m/encode m "application/json" data))))
(is (= data (m/decode m "application/json" (m/encode m "application/json" data)))))))
(testing "symmetic encode + decode for all formats"
(let [data {:kikka 42, :childs {:facts [1.2 true {:so "nested"}]}}]
(are [format]
(= data (m/decode m format (m/encode m format data)))
"application/json"
"application/json+cheshire"
"application/edn"
"application/x-yaml"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack")))
(testing "charsets"
(testing "default is UTF-8"
(is (= "UTF-8" (str (Charset/defaultCharset)))))
(testing "default can be changed"
(with-default-charset
"UTF-16"
(is (= "UTF-16" (str (Charset/defaultCharset)))))))
(testing "on empty input"
(let [empty (fn [] (util/byte-stream (byte-array 0)))
m2 (m/create
(-> m/default-options
(m/install msgpack-format/format)
(m/install yaml-format/format)
(m/install cheshire-format/format "application/json+cheshire")
(assoc :allow-empty-input? false)))]
(testing "by default - nil is returned for empty stream"
(is (nil? (m/decode m "application/transit+json" (empty)))))
(testing "by default - nil input returns nil stream"
(is (nil? (m/decode m "application/transit+json" nil))))
(testing "optionally decoder can decide to throw"
(is (thrown? Exception (m/decode m2 "application/transit+json" (empty))))
(is (thrown? Exception (m/decode m2 "application/transit+json" nil))))
(testing "all formats"
(testing "with :allow-empty-input? false"
(testing "cheshire json & yaml return nil"
(are [format]
(= nil (m/decode m2 format (empty)))
"application/json+cheshire"
"application/x-yaml"))
(testing "others fail"
(are [format]
(thrown-with-msg? Exception #"Malformed" (m/decode m2 format (empty)))
"application/edn"
"application/json"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack")))
(testing "with defaults"
(testing "all formats return nil"
(are [format]
(= nil (m/decode m format (empty)))
"application/json"
"application/json+cheshire"
"application/edn"
"application/x-yaml"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack"))))))
(testing "non-binary-formats encoding with charsets"
(let [data {:fée "böz"}
iso-encoded #(slurp (m/encode m % data "ISO-8859-1"))]
(testing "application/json & application/edn use the given charset"
(is (= "{\"f�e\":\"b�z\"}" (iso-encoded "application/json")))
(is (= "{\"f�e\":\"b�z\"}" (iso-encoded "application/json+cheshire")))
(is (= "{:f�e \"b�z\"}" (iso-encoded "application/edn"))))
(testing "application/x-yaml & application/transit+json use the platform charset"
(testing "utf-8"
(is (= "{fée: böz}\n" (iso-encoded "application/x-yaml")))
(is (= "[\"^ \",\"~:fée\",\"böz\"]" (iso-encoded "application/transit+json"))))
(testing "when default charset is ISO-8859-1"
(with-default-charset
"ISO-8859-1"
(testing "application/x-yaml works"
(is (= "{f�e: b�z}\n" (iso-encoded "application/x-yaml"))))
(testing "application/transit IS BROKEN"
(is (not= "[\"^ \",\"~:f�e\",\"b�z\"]" (iso-encoded "application/transit+json")))))))))
(testing "all formats handle different charsets symmetrically"
(let [data {:fée "böz"}
encode-decode #(as-> data $
(m/encode m % $ "ISO-8859-1")
(m/decode m % $ "ISO-8859-1"))]
(are [format]
(= data (encode-decode format))
"application/json"
"application/json+cheshire"
"application/edn"
;; platform charset
"application/x-yaml"
;; binary
"application/msgpack"
;; platform charset
"application/transit+json"
;; binary
"application/transit+msgpack")))
(testing "encoder & decoder"
(let [m (m/create)
data {:kikka 42}
json-encoder (m/encoder m "application/json")
json-decoder (m/decoder m "application/json")]
(is (= "{\"kikka\":42}" (slurp (json-encoder data))))
(is (= data (-> data json-encoder json-decoder)))
(testing "invalid encoder /decoder returns nil"
(is (nil? (m/encoder m "application/INVALID")))
(is (nil? (m/decoder m "application/INVALID"))))
(testing "decode exception"
(is (thrown?
Exception
(json-decoder "{:invalid :syntax}"))))))
(testing "adding new format"
(let [name "application/upper"
upper-case-format {:name name
:decoder (reify
core/Decode
(decode [_ data _]
(str/lower-case (slurp data))))
:encoder (reify
core/EncodeToBytes
(encode-to-bytes [_ data _]
(.getBytes (str/upper-case data))))}
m (m/create (m/install m/default-options upper-case-format))
encode (m/encoder m name)
decode (m/decoder m name)
data "olipa kerran avaruus"]
(is (= "OLIPA KERRAN AVARUUS" (slurp (encode data))))
(is (= data (decode (encode data))))))
(testing "invalid format fails fast"
(let [upper-case-format {:name "application/upper"
:decoder (fn [_ data _]
(str/lower-case (slurp data)))}]
(is (thrown? Exception (m/create (m/install m/default-options upper-case-format))))))
(testing "implementing wrong protocol fails fast"
(let [upper-case-format {:name "application/upper"
:return :output-stream
:encoder (reify
core/EncodeToBytes
(encode-to-bytes [_ data _]
(.getBytes (str/upper-case data))))}]
(is (thrown? Exception (m/create (m/install m/default-options upper-case-format))))))
(testing "setting non-existing format as default throws exception"
(is (thrown?
Exception
(m/create
(-> m/default-options
(assoc :default-format "kikka"))))))
(testing "selecting non-existing format as default throws exception"
(is (thrown?
Exception
(m/create
(-> m/default-options
(m/select-formats ["kikka"]))))))
(testing "overriding adapter options"
(let [decode-json-kw (m/decoder
(m/create)
"application/json")
decode-json (m/decoder
(m/create
(assoc-in
m/default-options
[:formats "application/json" :decoder-opts]
{:decode-key-fn false}))
"application/json")
decode-json2 (m/decoder
(m/create
(assoc-in
m/default-options
[:formats "application/json" :opts]
{:decode-key-fn false}))
"application/json")
decode-json3 (m/decoder
(m/create
(assoc-in
m/default-options
[:formats "application/json" :opts]
{:mapper (j/object-mapper {:decode-key-fn false})}))
"application/json")]
(is (= {:kikka true} (decode-json-kw "{\"kikka\":true}")))
(is (= {"kikka" true} (decode-json "{\"kikka\":true}")))
(is (= {"kikka" true} (decode-json2 "{\"kikka\":true}")))
(is (= {"kikka" true} (decode-json3 "{\"kikka\":true}")))))
(testing "overriding invalid adapter options fails"
(is (thrown?
Exception
(m/create
(-> m/default-options
(assoc-in
[:formats "application/jsonz" :encoder-opts]
{:keywords? false})))))
(is (thrown?
Exception
(m/create
(-> m/default-options
(assoc-in
[:formats "application/jsonz" :decoder-opts]
{:keywords? false})))))))
(deftest form-data
(testing "basic form encoding"
(let [data {:kikka 42, :childs ['not "so" "nested"]}
format "application/x-www-form-urlencoded"]
(is (= "kikka=42&childs=not&childs=so&childs=nested"
(slurp (m/encode m format data))))))
(testing "basic form encoding with string keywords"
(let [data {"kikka" 42, "childs" ['not "so" "nested"]}
format "application/x-www-form-urlencoded"]
(is (= "kikka=42&childs=not&childs=so&childs=nested"
(slurp (m/encode m format data))))))
(testing "basic form decoding"
(let [data "kikka=42&childs=not&childs=so&childs=nested=but+messed+up"
format "application/x-www-form-urlencoded"]
(is (= {:kikka "42", :childs ["not" "so" "nested=but messed up"]}
(m/decode m format data)))))
(testing "form decoding with different decode-key-fn"
(let [data "kikka=42&childs=not&childs=so&childs=nested=but+messed+up"
format "application/x-www-form-urlencoded"]
(is (= {"kikka" "42", "childs" ["not" "so" "nested=but messed up"]}
(-> (m/options m)
(assoc-in [:formats "application/x-www-form-urlencoded" :decoder-opts :decode-key-fn] identity)
(m/create)
(m/decode format data)))))))
(deftest cheshire-json-options
(testing "pre 0.6.0 options fail at creation time"
(testing ":bigdecimals?"
(is (thrown-with-msg?
AssertionError
#"default JSON formatter has changed"
(m/create
(-> m/default-options
(assoc-in
[:formats "application/json" :decoder-opts]
{:bigdecimals? false}))))))
(testing ":key-fn"
(is (thrown-with-msg?
Error
#"default JSON formatter has changed"
(m/create
(-> m/default-options
(assoc-in
[:formats "application/json" :decoder-opts]
{:key-fn false}))))))))
(deftest slurp-test
(let [file (io/file "dev-resources/json10b.json")
expected (slurp file)]
(testing "bytes"
(is (= expected (m/slurp (Files/readAllBytes (.toPath file))))))
(testing "File"
(is (= expected (m/slurp file))))
(testing "InputStream"
(is (= expected (m/slurp (FileInputStream. file)))))
(testing "StreamableResponse"
(is (= expected (m/slurp (protocols/->StreamableResponse (partial io/copy file))))))
(testing "String"
(is (= expected (m/slurp expected))))
(testing "nil"
(is (= nil (m/slurp nil))))))
(deftest encode-to-byte-stream-test
(testing "symmetic encode + decode for all formats"
(let [m (m/create
(-> m/default-options
(assoc :return :output-stream)
(m/install form-format/format)
(m/install msgpack-format/format)
(m/install yaml-format/format)
(m/install cheshire-format/format "application/json+cheshire")))]
(let [data {:kikka 42, :childs {:facts [1.2 true {:so "nested"}]}}]
(are [format]
(= data (m/decode m format (m/encode m format data)))
"application/json"
"application/json+cheshire"
"application/edn"
"application/x-yaml"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack")))))
| null | https://raw.githubusercontent.com/metosin/muuntaja/83da35f35518aaa02ef4d1ca6bbd007b781ebf84/test/muuntaja/core_test.clj | clojure | platform charset
binary
platform charset
binary | (ns muuntaja.core-test
(:require [clojure.test :refer :all]
[muuntaja.core :as m]
[clojure.string :as str]
[muuntaja.format.core :as core]
[muuntaja.format.form :as form-format]
[muuntaja.format.cheshire :as cheshire-format]
[muuntaja.format.msgpack :as msgpack-format]
[muuntaja.format.yaml :as yaml-format]
[jsonista.core :as j]
[clojure.java.io :as io]
[muuntaja.protocols :as protocols]
[muuntaja.util :as util])
(:import (java.nio.charset Charset)
(java.io FileInputStream)
(java.nio.file Files)))
(defn set-jvm-default-charset! [charset]
(System/setProperty "file.encoding" charset)
(doto
(.getDeclaredField Charset "defaultCharset")
(.setAccessible true)
(.set nil nil))
nil)
(defmacro with-default-charset [charset & body]
`(let [old-charset# (str (Charset/defaultCharset))]
(try
(set-jvm-default-charset! ~charset)
~@body
(finally
(set-jvm-default-charset! old-charset#)))))
(def m
(m/create
(-> m/default-options
(m/install form-format/format)
(m/install msgpack-format/format)
(m/install yaml-format/format)
(m/install cheshire-format/format "application/json+cheshire"))))
(deftest core-test
(testing "muuntaja?"
(is (m/muuntaja? m)))
(testing "default encodes & decodes"
(is (= #{"application/edn"
"application/json"
"application/transit+json"
"application/transit+msgpack"}
(m/encodes m/instance)
(m/decodes m/instance))))
(testing "custom encodes & decodes"
(is (= #{"application/edn"
"application/json"
"application/json+cheshire"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack"
"application/x-www-form-urlencoded"
"application/x-yaml"}
(m/encodes m)
(m/decodes m))))
(testing "encode & decode"
(let [data {:kikka 42}]
(testing "with default instance"
(is (= "{\"kikka\":42}" (slurp (m/encode "application/json" data))))
(is (= data (m/decode "application/json" (m/encode "application/json" data)))))
(testing "with muuntaja instance"
(is (= "{\"kikka\":42}" (slurp (m/encode m "application/json" data))))
(is (= data (m/decode m "application/json" (m/encode m "application/json" data)))))))
(testing "symmetic encode + decode for all formats"
(let [data {:kikka 42, :childs {:facts [1.2 true {:so "nested"}]}}]
(are [format]
(= data (m/decode m format (m/encode m format data)))
"application/json"
"application/json+cheshire"
"application/edn"
"application/x-yaml"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack")))
(testing "charsets"
(testing "default is UTF-8"
(is (= "UTF-8" (str (Charset/defaultCharset)))))
(testing "default can be changed"
(with-default-charset
"UTF-16"
(is (= "UTF-16" (str (Charset/defaultCharset)))))))
(testing "on empty input"
(let [empty (fn [] (util/byte-stream (byte-array 0)))
m2 (m/create
(-> m/default-options
(m/install msgpack-format/format)
(m/install yaml-format/format)
(m/install cheshire-format/format "application/json+cheshire")
(assoc :allow-empty-input? false)))]
(testing "by default - nil is returned for empty stream"
(is (nil? (m/decode m "application/transit+json" (empty)))))
(testing "by default - nil input returns nil stream"
(is (nil? (m/decode m "application/transit+json" nil))))
(testing "optionally decoder can decide to throw"
(is (thrown? Exception (m/decode m2 "application/transit+json" (empty))))
(is (thrown? Exception (m/decode m2 "application/transit+json" nil))))
(testing "all formats"
(testing "with :allow-empty-input? false"
(testing "cheshire json & yaml return nil"
(are [format]
(= nil (m/decode m2 format (empty)))
"application/json+cheshire"
"application/x-yaml"))
(testing "others fail"
(are [format]
(thrown-with-msg? Exception #"Malformed" (m/decode m2 format (empty)))
"application/edn"
"application/json"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack")))
(testing "with defaults"
(testing "all formats return nil"
(are [format]
(= nil (m/decode m format (empty)))
"application/json"
"application/json+cheshire"
"application/edn"
"application/x-yaml"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack"))))))
(testing "non-binary-formats encoding with charsets"
(let [data {:fée "böz"}
iso-encoded #(slurp (m/encode m % data "ISO-8859-1"))]
(testing "application/json & application/edn use the given charset"
(is (= "{\"f�e\":\"b�z\"}" (iso-encoded "application/json")))
(is (= "{\"f�e\":\"b�z\"}" (iso-encoded "application/json+cheshire")))
(is (= "{:f�e \"b�z\"}" (iso-encoded "application/edn"))))
(testing "application/x-yaml & application/transit+json use the platform charset"
(testing "utf-8"
(is (= "{fée: böz}\n" (iso-encoded "application/x-yaml")))
(is (= "[\"^ \",\"~:fée\",\"böz\"]" (iso-encoded "application/transit+json"))))
(testing "when default charset is ISO-8859-1"
(with-default-charset
"ISO-8859-1"
(testing "application/x-yaml works"
(is (= "{f�e: b�z}\n" (iso-encoded "application/x-yaml"))))
(testing "application/transit IS BROKEN"
(is (not= "[\"^ \",\"~:f�e\",\"b�z\"]" (iso-encoded "application/transit+json")))))))))
(testing "all formats handle different charsets symmetrically"
(let [data {:fée "böz"}
encode-decode #(as-> data $
(m/encode m % $ "ISO-8859-1")
(m/decode m % $ "ISO-8859-1"))]
(are [format]
(= data (encode-decode format))
"application/json"
"application/json+cheshire"
"application/edn"
"application/x-yaml"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack")))
(testing "encoder & decoder"
(let [m (m/create)
data {:kikka 42}
json-encoder (m/encoder m "application/json")
json-decoder (m/decoder m "application/json")]
(is (= "{\"kikka\":42}" (slurp (json-encoder data))))
(is (= data (-> data json-encoder json-decoder)))
(testing "invalid encoder /decoder returns nil"
(is (nil? (m/encoder m "application/INVALID")))
(is (nil? (m/decoder m "application/INVALID"))))
(testing "decode exception"
(is (thrown?
Exception
(json-decoder "{:invalid :syntax}"))))))
(testing "adding new format"
(let [name "application/upper"
upper-case-format {:name name
:decoder (reify
core/Decode
(decode [_ data _]
(str/lower-case (slurp data))))
:encoder (reify
core/EncodeToBytes
(encode-to-bytes [_ data _]
(.getBytes (str/upper-case data))))}
m (m/create (m/install m/default-options upper-case-format))
encode (m/encoder m name)
decode (m/decoder m name)
data "olipa kerran avaruus"]
(is (= "OLIPA KERRAN AVARUUS" (slurp (encode data))))
(is (= data (decode (encode data))))))
(testing "invalid format fails fast"
(let [upper-case-format {:name "application/upper"
:decoder (fn [_ data _]
(str/lower-case (slurp data)))}]
(is (thrown? Exception (m/create (m/install m/default-options upper-case-format))))))
(testing "implementing wrong protocol fails fast"
(let [upper-case-format {:name "application/upper"
:return :output-stream
:encoder (reify
core/EncodeToBytes
(encode-to-bytes [_ data _]
(.getBytes (str/upper-case data))))}]
(is (thrown? Exception (m/create (m/install m/default-options upper-case-format))))))
(testing "setting non-existing format as default throws exception"
(is (thrown?
Exception
(m/create
(-> m/default-options
(assoc :default-format "kikka"))))))
(testing "selecting non-existing format as default throws exception"
(is (thrown?
Exception
(m/create
(-> m/default-options
(m/select-formats ["kikka"]))))))
(testing "overriding adapter options"
(let [decode-json-kw (m/decoder
(m/create)
"application/json")
decode-json (m/decoder
(m/create
(assoc-in
m/default-options
[:formats "application/json" :decoder-opts]
{:decode-key-fn false}))
"application/json")
decode-json2 (m/decoder
(m/create
(assoc-in
m/default-options
[:formats "application/json" :opts]
{:decode-key-fn false}))
"application/json")
decode-json3 (m/decoder
(m/create
(assoc-in
m/default-options
[:formats "application/json" :opts]
{:mapper (j/object-mapper {:decode-key-fn false})}))
"application/json")]
(is (= {:kikka true} (decode-json-kw "{\"kikka\":true}")))
(is (= {"kikka" true} (decode-json "{\"kikka\":true}")))
(is (= {"kikka" true} (decode-json2 "{\"kikka\":true}")))
(is (= {"kikka" true} (decode-json3 "{\"kikka\":true}")))))
(testing "overriding invalid adapter options fails"
(is (thrown?
Exception
(m/create
(-> m/default-options
(assoc-in
[:formats "application/jsonz" :encoder-opts]
{:keywords? false})))))
(is (thrown?
Exception
(m/create
(-> m/default-options
(assoc-in
[:formats "application/jsonz" :decoder-opts]
{:keywords? false})))))))
(deftest form-data
(testing "basic form encoding"
(let [data {:kikka 42, :childs ['not "so" "nested"]}
format "application/x-www-form-urlencoded"]
(is (= "kikka=42&childs=not&childs=so&childs=nested"
(slurp (m/encode m format data))))))
(testing "basic form encoding with string keywords"
(let [data {"kikka" 42, "childs" ['not "so" "nested"]}
format "application/x-www-form-urlencoded"]
(is (= "kikka=42&childs=not&childs=so&childs=nested"
(slurp (m/encode m format data))))))
(testing "basic form decoding"
(let [data "kikka=42&childs=not&childs=so&childs=nested=but+messed+up"
format "application/x-www-form-urlencoded"]
(is (= {:kikka "42", :childs ["not" "so" "nested=but messed up"]}
(m/decode m format data)))))
(testing "form decoding with different decode-key-fn"
(let [data "kikka=42&childs=not&childs=so&childs=nested=but+messed+up"
format "application/x-www-form-urlencoded"]
(is (= {"kikka" "42", "childs" ["not" "so" "nested=but messed up"]}
(-> (m/options m)
(assoc-in [:formats "application/x-www-form-urlencoded" :decoder-opts :decode-key-fn] identity)
(m/create)
(m/decode format data)))))))
(deftest cheshire-json-options
(testing "pre 0.6.0 options fail at creation time"
(testing ":bigdecimals?"
(is (thrown-with-msg?
AssertionError
#"default JSON formatter has changed"
(m/create
(-> m/default-options
(assoc-in
[:formats "application/json" :decoder-opts]
{:bigdecimals? false}))))))
(testing ":key-fn"
(is (thrown-with-msg?
Error
#"default JSON formatter has changed"
(m/create
(-> m/default-options
(assoc-in
[:formats "application/json" :decoder-opts]
{:key-fn false}))))))))
(deftest slurp-test
(let [file (io/file "dev-resources/json10b.json")
expected (slurp file)]
(testing "bytes"
(is (= expected (m/slurp (Files/readAllBytes (.toPath file))))))
(testing "File"
(is (= expected (m/slurp file))))
(testing "InputStream"
(is (= expected (m/slurp (FileInputStream. file)))))
(testing "StreamableResponse"
(is (= expected (m/slurp (protocols/->StreamableResponse (partial io/copy file))))))
(testing "String"
(is (= expected (m/slurp expected))))
(testing "nil"
(is (= nil (m/slurp nil))))))
(deftest encode-to-byte-stream-test
(testing "symmetic encode + decode for all formats"
(let [m (m/create
(-> m/default-options
(assoc :return :output-stream)
(m/install form-format/format)
(m/install msgpack-format/format)
(m/install yaml-format/format)
(m/install cheshire-format/format "application/json+cheshire")))]
(let [data {:kikka 42, :childs {:facts [1.2 true {:so "nested"}]}}]
(are [format]
(= data (m/decode m format (m/encode m format data)))
"application/json"
"application/json+cheshire"
"application/edn"
"application/x-yaml"
"application/msgpack"
"application/transit+json"
"application/transit+msgpack")))))
|
9f47de48cae444f0c161b475b7adb3765f16075d8d5b2aa5b018cd127ace221f | binghe/cl-net-snmp | message.lisp | ;;;; -*- Mode: Lisp -*-
$ Id$
(in-package :snmp)
(defclass message ()
((session :type session
:initarg :session
:accessor session-of)
(pdu :type pdu
:initarg :pdu
:accessor pdu-of)
(context :type string
:initarg :context
:accessor context-of))
(:documentation "SNMP message base class"))
;;; SNMPv1 and SNMPv2c
(defclass v1-message (message) ()
(:documentation "Community-based SNMP v1 Message"))
(defclass v2c-message (v1-message) ()
(:documentation "Community-based SNMP v2c Message"))
(defmethod ber-encode ((message v1-message))
(ber-encode (list (version-of (session-of message))
(community-of (session-of message))
(pdu-of message))))
(defgeneric decode-message (session stream))
(defmethod decode-message ((s session) (source t))
(error "Unknown message source: ~A" source))
(defmethod decode-message ((s session) (data sequence))
(let ((message-list (coerce (ber-decode data) 'list)))
(decode-message s message-list)))
(defmethod decode-message ((s session) (stream stream))
(let ((message-list (coerce (ber-decode stream) 'list)))
(decode-message s message-list)))
(defmethod decode-message ((s v1-session) (message-list list))
(destructuring-bind (version community pdu) message-list
(declare (ignore version community))
(make-instance 'v1-message :session s :pdu pdu)))
(defmethod decode-message ((s v2c-session) (message-list list))
(destructuring-bind (version community pdu) message-list
(declare (ignore version community))
(make-instance 'v2c-message :session s :pdu pdu)))
;;; SNMP v3
(defclass v3-message (message)
start msgID must be big , or net - snmp can not decode our message
((message-id-counter :type (unsigned-byte 32)
:initform 0
:allocation :class)
(message-id :type (unsigned-byte 32)
:initarg :id
:accessor message-id-of)
;; Report flag, for SNMP report use.
(report-flag :type boolean
:initform nil
:initarg :report
:accessor report-flag-of))
(:documentation "User-based SNMP v3 Message"))
(defmethod generate-message-id ((message v3-message))
(with-slots (message-id-counter) message
(portable-threads:atomic-incf message-id-counter)
(the (unsigned-byte 32)
(ldb (byte 32 0) message-id-counter))))
(defmethod initialize-instance :after ((message v3-message) &rest initargs)
(declare (ignore initargs))
(unless (slot-boundp message 'message-id)
(setf (message-id-of message) (generate-message-id message))))
(defun generate-global-data (id level)
(list id
;; msgMaxSize
+max-snmp-packet-size+
;; msgFlags: security-level + reportable flag
(make-string 1 :initial-element (code-char (logior #b100 level)))
msgSecurityModel : USM ( 3 )
+snmp-sec-model-usm+))
(defvar *default-context* "")
;;; SNMPv3 Message Encode
(defmethod ber-encode ((message v3-message))
(let* ((session (session-of message))
(global-data (generate-global-data (message-id-of message)
(if (report-flag-of message) 0
(security-level-of session))))
(message-data (list (engine-id-of session) ; contextEngineID
(or (context-of message)
*default-context*) ; contextName
PDU
(need-auth-p (and (not (report-flag-of message))
(auth-protocol-of session)))
(need-priv-p (and (not (report-flag-of message))
(priv-protocol-of session)))
RFC 2574 ( USM for SNMPv3 ) , 7.3.1 .
1 ) The msgAuthenticationParameters field is set to the
;; serialization, according to the rules in [RFC1906], of an OCTET
STRING containing 12 zero octets .
(message-authentication-parameters (if need-auth-p
(make-string 12 :initial-element (code-char 0))
""))
RFC 2574 ( USM for SNMPv3 ) , 8.1.1.1 . DES key and Initialization Vector
;; Now it's a list, not string, as we do this later.
(message-privacy-parameters (if need-priv-p
(generate-privacy-parameters message)
nil)))
;; Privacy support (we encrypt and replace message-data here)
(when need-priv-p
(setf message-data (encrypt-message message message-privacy-parameters message-data)))
;; Authentication support
(labels ((encode-v3-message (auth)
(ber-encode (list (version-of session)
global-data
(ber-encode->string (list (engine-id-of session)
(engine-boots-of session)
(engine-time-of session)
(if (report-flag-of message)
""
(security-name-of session))
auth
(map 'string #'code-char
message-privacy-parameters)))
message-data))))
(let ((unauth-data (encode-v3-message message-authentication-parameters)))
(if (not need-auth-p) unauth-data
;; authencate the encode-data and re-encode it
(encode-v3-message (authenticate-message
(coerce unauth-data 'octets)
(coerce (auth-local-key-of session) 'octets)
(auth-protocol-of session))))))))
need ironclad package for hmac / md5 and /
(defun authenticate-message (message key digest)
(declare (type octets message key)
(type (member :md5 :sha1) digest))
(let ((hmac (ironclad:make-hmac key digest)))
(ironclad:update-hmac hmac message)
TODO , use a raw - data instead , for efficiency
(map 'string #'code-char
(subseq (ironclad:hmac-digest hmac) 0 12))))
(defun need-report-p (session)
"return true if a SNMPv3 session has no engine infomation set"
(declare (type v3-session session))
(zerop (engine-time-of session)))
(defun update-session-from-report (session security-string)
(declare (type v3-session session)
(type string security-string))
(destructuring-bind (engine-id engine-boots engine-time user auth priv)
security - data : 3rd field of message list
(coerce (ber-decode<-string security-string) 'list)
(declare (ignore user auth priv))
(setf (engine-id-of session) engine-id
(engine-boots-of session) engine-boots
(engine-time-of session) engine-time)
(when (and (auth-protocol-of session) (slot-boundp session 'auth-key))
(setf (auth-local-key-of session)
(generate-kul (map 'octets #'char-code engine-id)
(auth-key-of session))))
(when (and (priv-protocol-of session) (slot-boundp session 'priv-key))
(setf (priv-local-key-of session)
(generate-kul (map 'octets #'char-code engine-id)
(priv-key-of session))))
session))
;;; SNMPv3 Message Decode
(defmethod decode-message ((s v3-session) (message-list list))
(destructuring-bind (version global-data security-string data) message-list
(declare (ignore version))
(let ((message-id (elt global-data 0))
(encrypt-flag (plusp (logand #b10
(char-code (elt (elt global-data 2) 0))))))
(when encrypt-flag
;;; decrypt message
(let ((salt (map 'octets #'char-code
(elt (ber-decode<-string security-string) 5)))
(des-key (subseq (priv-local-key-of s) 0 8))
(pre-iv (subseq (priv-local-key-of s) 8 16))
(data (map 'octets #'char-code data)))
(let* ((iv (map 'octets #'logxor pre-iv salt))
(cipher (ironclad:make-cipher :des ; (priv-protocol-of s)
:mode :cbc
:key des-key
:initialization-vector iv)))
(ironclad:decrypt-in-place cipher data)
(setf data (ber-decode data)))))
(let* ((context (elt data 1))
(pdu (elt data 2))
(report-p (typep pdu 'report-pdu))
(report-flag (and (not (need-report-p s)) report-p)))
(when report-p
(update-session-from-report s security-string))
(make-instance 'v3-message
:session s
:id message-id
:report report-flag
:context context
:pdu pdu)))))
RFC 2574 ( USM for SNMPv3 ) , 8.1.1.1 . DES key and Initialization Vector
(defun generate-privacy-parameters (message)
(declare (type v3-message message))
"generate a 8-bytes privacy-parameters string for use by message encrypt"
octets 0~3
octets 4~7 ( we just reuse msgID )
(let ((salt (logior (ash left 32) right))
(result nil))
(dotimes (i 8 result)
(push (logand salt #xff) result)
(setf salt (ash salt -8))))))
Encrypt msgData
(defun encrypt-message (message message-privacy-parameters message-data)
(declare (type v3-message message)
(type list message-privacy-parameters message-data))
(let ((salt (coerce message-privacy-parameters 'octets))
(pre-iv (subseq (priv-local-key-of (session-of message)) 8 16))
(des-key (subseq (priv-local-key-of (session-of message)) 0 8))
(data (coerce (ber-encode message-data) 'octets)))
(let ((iv (map 'octets #'logxor pre-iv salt))
(result-length (* (1+ (floor (length data) 8)) 8))) ;; extend length to (mod 8)
(let ((cipher (ironclad:make-cipher :des ; (priv-protocol-of (session-of message))
:key des-key
:mode :cbc
:initialization-vector iv))
(result-data (make-sequence 'octets result-length :initial-element 0)))
(replace result-data data)
(ironclad:encrypt-in-place cipher result-data)
(map 'string #'code-char result-data)))))
(defvar *session->message* (make-hash-table :test 'eq :size 3))
(eval-when (:load-toplevel :execute)
(setf (gethash 'v1-session *session->message*) 'v1-message
(gethash 'v2c-session *session->message*) 'v2c-message
(gethash 'v3-session *session->message*) 'v3-message))
| null | https://raw.githubusercontent.com/binghe/cl-net-snmp/3cf053bce75734097f0d7e2245a53fa0c45f5e05/message.lisp | lisp | -*- Mode: Lisp -*-
SNMPv1 and SNMPv2c
SNMP v3
Report flag, for SNMP report use.
msgMaxSize
msgFlags: security-level + reportable flag
SNMPv3 Message Encode
contextEngineID
contextName
serialization, according to the rules in [RFC1906], of an OCTET
Now it's a list, not string, as we do this later.
Privacy support (we encrypt and replace message-data here)
Authentication support
authencate the encode-data and re-encode it
SNMPv3 Message Decode
decrypt message
(priv-protocol-of s)
extend length to (mod 8)
(priv-protocol-of (session-of message)) | $ Id$
(in-package :snmp)
(defclass message ()
((session :type session
:initarg :session
:accessor session-of)
(pdu :type pdu
:initarg :pdu
:accessor pdu-of)
(context :type string
:initarg :context
:accessor context-of))
(:documentation "SNMP message base class"))
(defclass v1-message (message) ()
(:documentation "Community-based SNMP v1 Message"))
(defclass v2c-message (v1-message) ()
(:documentation "Community-based SNMP v2c Message"))
(defmethod ber-encode ((message v1-message))
(ber-encode (list (version-of (session-of message))
(community-of (session-of message))
(pdu-of message))))
(defgeneric decode-message (session stream))
(defmethod decode-message ((s session) (source t))
(error "Unknown message source: ~A" source))
(defmethod decode-message ((s session) (data sequence))
(let ((message-list (coerce (ber-decode data) 'list)))
(decode-message s message-list)))
(defmethod decode-message ((s session) (stream stream))
(let ((message-list (coerce (ber-decode stream) 'list)))
(decode-message s message-list)))
(defmethod decode-message ((s v1-session) (message-list list))
(destructuring-bind (version community pdu) message-list
(declare (ignore version community))
(make-instance 'v1-message :session s :pdu pdu)))
(defmethod decode-message ((s v2c-session) (message-list list))
(destructuring-bind (version community pdu) message-list
(declare (ignore version community))
(make-instance 'v2c-message :session s :pdu pdu)))
(defclass v3-message (message)
start msgID must be big , or net - snmp can not decode our message
((message-id-counter :type (unsigned-byte 32)
:initform 0
:allocation :class)
(message-id :type (unsigned-byte 32)
:initarg :id
:accessor message-id-of)
(report-flag :type boolean
:initform nil
:initarg :report
:accessor report-flag-of))
(:documentation "User-based SNMP v3 Message"))
(defmethod generate-message-id ((message v3-message))
(with-slots (message-id-counter) message
(portable-threads:atomic-incf message-id-counter)
(the (unsigned-byte 32)
(ldb (byte 32 0) message-id-counter))))
(defmethod initialize-instance :after ((message v3-message) &rest initargs)
(declare (ignore initargs))
(unless (slot-boundp message 'message-id)
(setf (message-id-of message) (generate-message-id message))))
(defun generate-global-data (id level)
(list id
+max-snmp-packet-size+
(make-string 1 :initial-element (code-char (logior #b100 level)))
msgSecurityModel : USM ( 3 )
+snmp-sec-model-usm+))
(defvar *default-context* "")
(defmethod ber-encode ((message v3-message))
(let* ((session (session-of message))
(global-data (generate-global-data (message-id-of message)
(if (report-flag-of message) 0
(security-level-of session))))
(or (context-of message)
PDU
(need-auth-p (and (not (report-flag-of message))
(auth-protocol-of session)))
(need-priv-p (and (not (report-flag-of message))
(priv-protocol-of session)))
RFC 2574 ( USM for SNMPv3 ) , 7.3.1 .
1 ) The msgAuthenticationParameters field is set to the
STRING containing 12 zero octets .
(message-authentication-parameters (if need-auth-p
(make-string 12 :initial-element (code-char 0))
""))
RFC 2574 ( USM for SNMPv3 ) , 8.1.1.1 . DES key and Initialization Vector
(message-privacy-parameters (if need-priv-p
(generate-privacy-parameters message)
nil)))
(when need-priv-p
(setf message-data (encrypt-message message message-privacy-parameters message-data)))
(labels ((encode-v3-message (auth)
(ber-encode (list (version-of session)
global-data
(ber-encode->string (list (engine-id-of session)
(engine-boots-of session)
(engine-time-of session)
(if (report-flag-of message)
""
(security-name-of session))
auth
(map 'string #'code-char
message-privacy-parameters)))
message-data))))
(let ((unauth-data (encode-v3-message message-authentication-parameters)))
(if (not need-auth-p) unauth-data
(encode-v3-message (authenticate-message
(coerce unauth-data 'octets)
(coerce (auth-local-key-of session) 'octets)
(auth-protocol-of session))))))))
need ironclad package for hmac / md5 and /
(defun authenticate-message (message key digest)
(declare (type octets message key)
(type (member :md5 :sha1) digest))
(let ((hmac (ironclad:make-hmac key digest)))
(ironclad:update-hmac hmac message)
TODO , use a raw - data instead , for efficiency
(map 'string #'code-char
(subseq (ironclad:hmac-digest hmac) 0 12))))
(defun need-report-p (session)
"return true if a SNMPv3 session has no engine infomation set"
(declare (type v3-session session))
(zerop (engine-time-of session)))
(defun update-session-from-report (session security-string)
(declare (type v3-session session)
(type string security-string))
(destructuring-bind (engine-id engine-boots engine-time user auth priv)
security - data : 3rd field of message list
(coerce (ber-decode<-string security-string) 'list)
(declare (ignore user auth priv))
(setf (engine-id-of session) engine-id
(engine-boots-of session) engine-boots
(engine-time-of session) engine-time)
(when (and (auth-protocol-of session) (slot-boundp session 'auth-key))
(setf (auth-local-key-of session)
(generate-kul (map 'octets #'char-code engine-id)
(auth-key-of session))))
(when (and (priv-protocol-of session) (slot-boundp session 'priv-key))
(setf (priv-local-key-of session)
(generate-kul (map 'octets #'char-code engine-id)
(priv-key-of session))))
session))
(defmethod decode-message ((s v3-session) (message-list list))
(destructuring-bind (version global-data security-string data) message-list
(declare (ignore version))
(let ((message-id (elt global-data 0))
(encrypt-flag (plusp (logand #b10
(char-code (elt (elt global-data 2) 0))))))
(when encrypt-flag
(let ((salt (map 'octets #'char-code
(elt (ber-decode<-string security-string) 5)))
(des-key (subseq (priv-local-key-of s) 0 8))
(pre-iv (subseq (priv-local-key-of s) 8 16))
(data (map 'octets #'char-code data)))
(let* ((iv (map 'octets #'logxor pre-iv salt))
:mode :cbc
:key des-key
:initialization-vector iv)))
(ironclad:decrypt-in-place cipher data)
(setf data (ber-decode data)))))
(let* ((context (elt data 1))
(pdu (elt data 2))
(report-p (typep pdu 'report-pdu))
(report-flag (and (not (need-report-p s)) report-p)))
(when report-p
(update-session-from-report s security-string))
(make-instance 'v3-message
:session s
:id message-id
:report report-flag
:context context
:pdu pdu)))))
RFC 2574 ( USM for SNMPv3 ) , 8.1.1.1 . DES key and Initialization Vector
(defun generate-privacy-parameters (message)
(declare (type v3-message message))
"generate a 8-bytes privacy-parameters string for use by message encrypt"
octets 0~3
octets 4~7 ( we just reuse msgID )
(let ((salt (logior (ash left 32) right))
(result nil))
(dotimes (i 8 result)
(push (logand salt #xff) result)
(setf salt (ash salt -8))))))
Encrypt msgData
(defun encrypt-message (message message-privacy-parameters message-data)
(declare (type v3-message message)
(type list message-privacy-parameters message-data))
(let ((salt (coerce message-privacy-parameters 'octets))
(pre-iv (subseq (priv-local-key-of (session-of message)) 8 16))
(des-key (subseq (priv-local-key-of (session-of message)) 0 8))
(data (coerce (ber-encode message-data) 'octets)))
(let ((iv (map 'octets #'logxor pre-iv salt))
:key des-key
:mode :cbc
:initialization-vector iv))
(result-data (make-sequence 'octets result-length :initial-element 0)))
(replace result-data data)
(ironclad:encrypt-in-place cipher result-data)
(map 'string #'code-char result-data)))))
(defvar *session->message* (make-hash-table :test 'eq :size 3))
(eval-when (:load-toplevel :execute)
(setf (gethash 'v1-session *session->message*) 'v1-message
(gethash 'v2c-session *session->message*) 'v2c-message
(gethash 'v3-session *session->message*) 'v3-message))
|
32eba37b26ee55686e3668a5139f0addf10be4208f033d12fab1ea38abdbaee5 | haskell-gi/haskell-gi | SeqStore.hs | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
-- -*-haskell-*-
GIMP Toolkit ( GTK ) CustomStore TreeModel
--
Author : ,
--
Created : 11 Feburary 2006
--
Copyright ( C ) 2005 - 2016 , ,
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- |
-- Stability : provisional
Portability : portable ( depends on GHC )
--
-- Standard model to store list data.
--
module Data.GI.Gtk.ModelView.SeqStore (
-- * Types
SeqStore(..),
-- * Constructors
seqStoreNew,
seqStoreNewDND,
-- * Implementation of Interfaces
seqStoreDefaultDragSourceIface,
seqStoreDefaultDragDestIface,
-- * Methods
seqStoreIterToIndex,
seqStoreGetValue,
seqStoreSafeGetValue,
seqStoreSetValue,
seqStoreToList,
seqStoreGetSize,
seqStoreInsert,
seqStoreInsertBefore,
seqStoreInsertAfter,
seqStorePrepend,
seqStoreAppend,
seqStoreRemove,
seqStoreClear,
) where
import Prelude ()
import Prelude.Compat
import Control.Monad (when)
import Control.Monad.Trans ( liftIO )
import Data.IORef
import Data.Ix (inRange)
import Foreign.ForeignPtr (ForeignPtr)
import qualified Data.Sequence as Seq
import Data.Sequence (Seq)
import qualified Data.Foldable as F
import Data.Int (Int32)
import Data.GI.Gtk.ModelView.Types
import Data.GI.Gtk.ModelView.CustomStore
(customStoreGetStamp, customStoreGetPrivate,
TreeModelIface(..), customStoreNew, DragDestIface(..),
DragSourceIface(..), CustomStore(..))
import Data.GI.Base.BasicTypes
(TypedObject(..), ManagedPtr(..), GObject)
import Data.GI.Base.ManagedPtr (withManagedPtr)
import GI.Gtk.Interfaces.TreeModel
(treeModelRowDeleted, treeModelRowInserted,
treeModelRowChanged, toTreeModel, TreeModel(..), IsTreeModel(..))
import GI.GObject.Objects.Object (Object(..))
import GI.Gtk.Functions (treeGetRowDragData, treeSetRowDragData)
import GI.Gtk.Flags (TreeModelFlags(..))
import Control.Monad.IO.Class (MonadIO)
import GI.Gtk.Structs.TreeIter
(setTreeIterUserData3, setTreeIterUserData2, setTreeIterStamp,
setTreeIterUserData, getTreeIterUserData, TreeIter(..))
import Data.GI.Base (get, new)
import Data.GI.Base.Overloading (HasParentTypes, ParentTypes)
import Data.Word (Word32)
import Unsafe.Coerce (unsafeCoerce)
import Foreign.Ptr (nullPtr)
seqStoreIterNew :: MonadIO m => Int32 -> Int32 -> m TreeIter
seqStoreIterNew s u1 = do
i <- new TreeIter []
setTreeIterStamp i s
setTreeIterUserData i $ unsafeCoerce u1
setTreeIterUserData2 i nullPtr
setTreeIterUserData3 i nullPtr
return i
newtype SeqStore a = SeqStore (ManagedPtr (CustomStore (IORef (Seq a)) a))
mkSeqStore :: CustomStore (IORef (Seq a)) a -> SeqStore a
mkSeqStore (CustomStore ptr) = SeqStore ptr
instance HasParentTypes (SeqStore a)
type instance ParentTypes (SeqStore a) = '[TreeModel]
instance TypedObject (SeqStore a) where
glibType = glibType @TreeModel
instance GObject (SeqStore a)
instance IsTypedTreeModel SeqStore
-- | Create a new 'TreeModel' that contains a list of elements.
seqStoreNew :: (Applicative m, MonadIO m) => [a] -> m (SeqStore a)
seqStoreNew xs = seqStoreNewDND xs (Just seqStoreDefaultDragSourceIface)
(Just seqStoreDefaultDragDestIface)
| Create a new ' TreeModel ' that contains a list of elements . In addition , specify two
-- interfaces for drag and drop.
--
seqStoreNewDND :: (Applicative m, MonadIO m)
=> [a] -- ^ the initial content of the model
-> Maybe (DragSourceIface SeqStore a) -- ^ an optional interface for drags
-> Maybe (DragDestIface SeqStore a) -- ^ an optional interface to handle drops
-> m (SeqStore a) -- ^ the new model
seqStoreNewDND xs mDSource mDDest = do
rows <- liftIO $ newIORef (Seq.fromList xs)
customStoreNew rows mkSeqStore TreeModelIface {
treeModelIfaceGetFlags = return [TreeModelFlagsListOnly],
treeModelIfaceGetIter = \path -> treePathGetIndices' path >>= \[n] -> readIORef rows >>= \rows ->
if inRange (0, Seq.length rows - 1) (fromIntegral n)
then Just <$> seqStoreIterNew 0 (fromIntegral n)
else return Nothing,
treeModelIfaceGetPath = \i -> do
n <- seqStoreIterToIndex i
treePathNewFromIndices' [fromIntegral n],
treeModelIfaceGetRow = \i -> do
n <- seqStoreIterToIndex i
readIORef rows >>= \rows ->
if inRange (0, Seq.length rows - 1) (fromIntegral n)
then return (rows `Seq.index` fromIntegral n)
else fail "SeqStore.getRow: iter does not refer to a valid entry",
treeModelIfaceIterNext = \i -> do
n <- seqStoreIterToIndex i
readIORef rows >>= \rows ->
if inRange (0, Seq.length rows - 1) (fromIntegral (n+1))
then Just <$> seqStoreIterNew 0 (n+1)
else return Nothing,
treeModelIfaceIterChildren = \index -> readIORef rows >>= \rows ->
case index of
Nothing | not (Seq.null rows) -> Just <$> seqStoreIterNew 0 0
_ -> return Nothing,
treeModelIfaceIterHasChild = \_ -> return False,
treeModelIfaceIterNChildren = \index -> readIORef rows >>= \rows ->
case index of
Nothing -> return $! Seq.length rows
_ -> return 0,
treeModelIfaceIterNthChild = \index n -> case index of
Nothing -> Just <$> seqStoreIterNew 0 (fromIntegral n)
_ -> return Nothing,
treeModelIfaceIterParent = \_ -> return Nothing,
treeModelIfaceRefNode = \_ -> return (),
treeModelIfaceUnrefNode = \_ -> return ()
} mDSource mDDest
-- | Convert a 'TreeIterRaw' to an an index into the 'SeqStore'. Note that this
function merely extracts the second element of the ' TreeIterRaw ' .
seqStoreIterToIndex :: (Applicative m, MonadIO m) => TreeIter -> m Int32
seqStoreIterToIndex i = unsafeCoerce <$> getTreeIterUserData i
| Default drag functions for ' Data . GI.Gtk . ModelView . ' . These
-- functions allow the rows of the model to serve as drag source. Any row is
-- allowed to be dragged and the data set in the 'SelectionDataM' object is
-- set with 'treeSetRowDragData', i.e. it contains the model and the
-- 'TreePath' to the row.
seqStoreDefaultDragSourceIface :: DragSourceIface SeqStore row
seqStoreDefaultDragSourceIface = DragSourceIface {
customDragSourceRowDraggable = \_ _-> return True,
customDragSourceDragDataGet = \model path sel -> treeSetRowDragData sel model path,
customDragSourceDragDataDelete = \model path -> treePathGetIndices' path >>= \(dest:_) -> do
liftIO $ seqStoreRemove model (fromIntegral dest)
return True
}
| Default drop functions for ' Data . GI.Gtk . ModelView . ' . These
-- functions accept a row and insert the row into the new location if it is
-- dragged into a tree view
-- that uses the same model.
seqStoreDefaultDragDestIface :: DragDestIface SeqStore row
seqStoreDefaultDragDestIface = DragDestIface {
customDragDestRowDropPossible = \model path sel -> do
dest <- treePathGetIndices' path
mModelPath <- treeGetRowDragData sel
case mModelPath of
(True, Just model', source) -> do
tm <- toTreeModel model
withManagedPtr tm $ \m ->
withManagedPtr model' $ \m' -> return (m==m')
_ -> return False,
customDragDestDragDataReceived = \model path sel -> do
(dest:_) <- treePathGetIndices' path
mModelPath <- treeGetRowDragData sel
case mModelPath of
(True, Just model', Just path) -> do
(source:_) <- treePathGetIndices' path
tm <- toTreeModel model
withManagedPtr tm $ \m ->
withManagedPtr model' $ \m' ->
if m/=m' then return False
else do
row <- seqStoreGetValue model source
seqStoreInsert model dest row
return True
_ -> return False
}
-- | Extract the value at the given index.
--
seqStoreGetValue :: (Applicative m, MonadIO m) => SeqStore a -> Int32 -> m a
seqStoreGetValue (SeqStore model) index =
(`Seq.index` fromIntegral index) <$> liftIO (readIORef (customStoreGetPrivate (CustomStore model)))
-- | Extract the value at the given index.
--
seqStoreSafeGetValue :: MonadIO m => SeqStore a -> Int32 -> m (Maybe a)
seqStoreSafeGetValue (SeqStore model) index' = do
let index = fromIntegral index'
seq <- liftIO $ readIORef (customStoreGetPrivate (CustomStore model))
return $ if index >=0 && index < Seq.length seq
then Just $ seq `Seq.index` index
else Nothing
-- | Update the value at the given index. The index must exist.
--
seqStoreSetValue :: MonadIO m => SeqStore a -> Int32 -> a -> m ()
seqStoreSetValue (SeqStore model) index value = do
liftIO $ modifyIORef (customStoreGetPrivate (CustomStore model)) (Seq.update (fromIntegral index) value)
stamp <- customStoreGetStamp (CustomStore model)
path <- treePathNewFromIndices' [index]
i <- seqStoreIterNew stamp (fromIntegral index)
treeModelRowChanged (CustomStore model) path i
-- | Extract all data from the store.
--
seqStoreToList :: (Applicative m, MonadIO m) => SeqStore a -> m [a]
seqStoreToList (SeqStore model) =
F.toList <$> liftIO (readIORef (customStoreGetPrivate (CustomStore model)))
-- | Query the number of elements in the store.
seqStoreGetSize :: (Applicative m, MonadIO m) => SeqStore a -> m Int32
seqStoreGetSize (SeqStore model) =
fromIntegral . Seq.length <$> liftIO (readIORef (customStoreGetPrivate (CustomStore model)))
-- | Insert an element in front of the given element. The element is appended
-- if the index is greater or equal to the size of the list.
seqStoreInsert :: MonadIO m => SeqStore a -> Int32 -> a -> m ()
seqStoreInsert (SeqStore model) index value = liftIO $ do
seq <- readIORef (customStoreGetPrivate (CustomStore model))
when (index >= 0) $ do
let index' | fromIntegral index > Seq.length seq = Seq.length seq
| otherwise = fromIntegral $ index
writeIORef (customStoreGetPrivate (CustomStore model)) (insert index' value seq)
stamp <- customStoreGetStamp (CustomStore model)
p <- treePathNewFromIndices' [fromIntegral index']
i <- seqStoreIterNew stamp (fromIntegral index')
treeModelRowInserted (CustomStore model) p i
where insert :: Int -> a -> Seq a -> Seq a
insert i x xs = front Seq.>< x Seq.<| back
where (front, back) = Seq.splitAt i xs
-- | Insert an element in front of the given element.
seqStoreInsertBefore :: (Applicative m, MonadIO m) => SeqStore a -> TreeIter -> a -> m ()
seqStoreInsertBefore store iter value = do
n <- seqStoreIterToIndex iter
seqStoreInsert store n value
-- | Insert an element after the given element.
seqStoreInsertAfter :: (Applicative m, MonadIO m) => SeqStore a -> TreeIter -> a -> m ()
seqStoreInsertAfter store iter value = do
n <- seqStoreIterToIndex iter
seqStoreInsert store (n + 1) value
-- | Prepend the element to the store.
seqStorePrepend :: (Applicative m, MonadIO m) => SeqStore a -> a -> m ()
seqStorePrepend (SeqStore model) value = do
liftIO $ modifyIORef (customStoreGetPrivate (CustomStore model))
(\seq -> value Seq.<| seq)
stamp <- customStoreGetStamp (CustomStore model)
p <- treePathNewFromIndices' [0]
i <- seqStoreIterNew stamp 0
treeModelRowInserted (CustomStore model) p i
---- | Prepend a list to the store. Not implemented yet.
seqStorePrependList : : MonadIO m = > SeqStore a - > [ a ] - > m ( )
--seqStorePrependList store list =
mapM _ ( seqStoreInsert store 0 ) ( reverse list )
-- | Append an element to the store. Returns the index of the inserted
-- element.
seqStoreAppend :: MonadIO m => SeqStore a -> a -> m Int32
seqStoreAppend (SeqStore model) value = do
index <- liftIO $ atomicModifyIORef (customStoreGetPrivate (CustomStore model))
(\seq -> (seq Seq.|> value, Seq.length seq))
stamp <- customStoreGetStamp (CustomStore model)
p <- treePathNewFromIndices' [fromIntegral index]
i <- seqStoreIterNew stamp (fromIntegral index)
treeModelRowInserted (CustomStore model) p i
return $ fromIntegral index
seqStoreAppendList : : MonadIO m = > SeqStore a - > [ a ] - > m ( )
seqStoreAppendList ( SeqStore model ) values = do
seq < - readIORef ( customStoreGetPrivate model )
let seq ' = Seq.fromList values
startIndex = Seq.length seq
endIndex = startIndex + Seq.length seq ' - 1
writeIORef ( customStoreGetPrivate model ) ( seq Seq . > < seq ' )
stamp < - customStoreGetStamp model
flip mapM [ startIndex .. endIndex ] $ \index - >
treeModelRowInserted model [ index ] ( TreeIterRaw stamp ( fromIntegral index ) 0 0 )
seqStoreAppendList :: MonadIO m => SeqStore a -> [a] -> m ()
seqStoreAppendList (SeqStore model) values = do
seq <- readIORef (customStoreGetPrivate model)
let seq' = Seq.fromList values
startIndex = Seq.length seq
endIndex = startIndex + Seq.length seq' - 1
writeIORef (customStoreGetPrivate model) (seq Seq.>< seq')
stamp <- customStoreGetStamp model
flip mapM [startIndex..endIndex] $ \index ->
treeModelRowInserted model [index] (TreeIterRaw stamp (fromIntegral index) 0 0)
-}
-- | Remove the element at the given index.
--
seqStoreRemove :: MonadIO m => SeqStore a -> Int32 -> m ()
seqStoreRemove (SeqStore model) index' = liftIO $ do
seq <- readIORef (customStoreGetPrivate (CustomStore model))
when (index >=0 && index < Seq.length seq) $ do
writeIORef (customStoreGetPrivate (CustomStore model)) (delete index seq)
p <- treePathNewFromIndices' [fromIntegral index]
treeModelRowDeleted (CustomStore model) p
where delete :: Int -> Seq a -> Seq a
delete i xs = front Seq.>< Seq.drop 1 back
where (front, back) = Seq.splitAt i xs
index = fromIntegral index'
-- | Empty the store.
seqStoreClear :: MonadIO m => SeqStore a -> m ()
seqStoreClear (SeqStore model) = liftIO $
-- Since deleting rows can cause callbacks (eg due to selection changes)
-- we have to make sure the model is consitent with the view at each
-- intermediate step of clearing the store. Otherwise at some intermediate
-- stage when the view has only been informed about some delections, the
-- user might query the model expecting to find the remaining rows are there
-- but find them deleted. That'd be bad.
--
let loop (-1) Seq.EmptyR = return ()
loop n (seq Seq.:> _) = do
writeIORef (customStoreGetPrivate (CustomStore model)) seq
p <- treePathNewFromIndices' [fromIntegral n]
treeModelRowDeleted (CustomStore model) p
loop (n-1) (Seq.viewr seq)
in do seq <- readIORef (customStoreGetPrivate (CustomStore model))
loop (Seq.length seq - 1) (Seq.viewr seq)
---- | Permute the rows of the store. Not yet implemented.
seqStoreReorder : : MonadIO m = > SeqStore a - > [ Int ] - > m ( )
--seqStoreReorder store = undefined
--
-- | Swap two rows of the store . Not yet implemented .
seqStoreSwap : : MonadIO m = > SeqStore a - > Int - > Int - > m ( )
--seqStoreSwap store = undefined
--
-- | Move the element at the first index in front of the element denoted by
-- the second index . Not yet implemented .
seqStoreMoveBefore : : MonadIO m = > SeqStore a - > Int - > Int - > m ( )
--seqStoreMoveBefore store = undefined
--
-- | Move the element at the first index past the element denoted by the
-- second index . Not yet implemented .
: : MonadIO m = > SeqStore a - > Int - > Int - > m ( )
--seqStoreMoveAfter store = undefined
| null | https://raw.githubusercontent.com/haskell-gi/haskell-gi/bff8f3b92bf2594ea3d6745c346a8de594fc3709/gi-gtk-hs/src/Data/GI/Gtk/ModelView/SeqStore.hs | haskell | -*-haskell-*-
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
|
Stability : provisional
Standard model to store list data.
* Types
* Constructors
* Implementation of Interfaces
* Methods
| Create a new 'TreeModel' that contains a list of elements.
interfaces for drag and drop.
^ the initial content of the model
^ an optional interface for drags
^ an optional interface to handle drops
^ the new model
| Convert a 'TreeIterRaw' to an an index into the 'SeqStore'. Note that this
functions allow the rows of the model to serve as drag source. Any row is
allowed to be dragged and the data set in the 'SelectionDataM' object is
set with 'treeSetRowDragData', i.e. it contains the model and the
'TreePath' to the row.
functions accept a row and insert the row into the new location if it is
dragged into a tree view
that uses the same model.
| Extract the value at the given index.
| Extract the value at the given index.
| Update the value at the given index. The index must exist.
| Extract all data from the store.
| Query the number of elements in the store.
| Insert an element in front of the given element. The element is appended
if the index is greater or equal to the size of the list.
| Insert an element in front of the given element.
| Insert an element after the given element.
| Prepend the element to the store.
-- | Prepend a list to the store. Not implemented yet.
seqStorePrependList store list =
| Append an element to the store. Returns the index of the inserted
element.
| Remove the element at the given index.
| Empty the store.
Since deleting rows can cause callbacks (eg due to selection changes)
we have to make sure the model is consitent with the view at each
intermediate step of clearing the store. Otherwise at some intermediate
stage when the view has only been informed about some delections, the
user might query the model expecting to find the remaining rows are there
but find them deleted. That'd be bad.
-- | Permute the rows of the store. Not yet implemented.
seqStoreReorder store = undefined
| Swap two rows of the store . Not yet implemented .
seqStoreSwap store = undefined
| Move the element at the first index in front of the element denoted by
the second index . Not yet implemented .
seqStoreMoveBefore store = undefined
| Move the element at the first index past the element denoted by the
second index . Not yet implemented .
seqStoreMoveAfter store = undefined | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE DataKinds #
# LANGUAGE TypeApplications #
GIMP Toolkit ( GTK ) CustomStore TreeModel
Author : ,
Created : 11 Feburary 2006
Copyright ( C ) 2005 - 2016 , ,
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
Portability : portable ( depends on GHC )
module Data.GI.Gtk.ModelView.SeqStore (
SeqStore(..),
seqStoreNew,
seqStoreNewDND,
seqStoreDefaultDragSourceIface,
seqStoreDefaultDragDestIface,
seqStoreIterToIndex,
seqStoreGetValue,
seqStoreSafeGetValue,
seqStoreSetValue,
seqStoreToList,
seqStoreGetSize,
seqStoreInsert,
seqStoreInsertBefore,
seqStoreInsertAfter,
seqStorePrepend,
seqStoreAppend,
seqStoreRemove,
seqStoreClear,
) where
import Prelude ()
import Prelude.Compat
import Control.Monad (when)
import Control.Monad.Trans ( liftIO )
import Data.IORef
import Data.Ix (inRange)
import Foreign.ForeignPtr (ForeignPtr)
import qualified Data.Sequence as Seq
import Data.Sequence (Seq)
import qualified Data.Foldable as F
import Data.Int (Int32)
import Data.GI.Gtk.ModelView.Types
import Data.GI.Gtk.ModelView.CustomStore
(customStoreGetStamp, customStoreGetPrivate,
TreeModelIface(..), customStoreNew, DragDestIface(..),
DragSourceIface(..), CustomStore(..))
import Data.GI.Base.BasicTypes
(TypedObject(..), ManagedPtr(..), GObject)
import Data.GI.Base.ManagedPtr (withManagedPtr)
import GI.Gtk.Interfaces.TreeModel
(treeModelRowDeleted, treeModelRowInserted,
treeModelRowChanged, toTreeModel, TreeModel(..), IsTreeModel(..))
import GI.GObject.Objects.Object (Object(..))
import GI.Gtk.Functions (treeGetRowDragData, treeSetRowDragData)
import GI.Gtk.Flags (TreeModelFlags(..))
import Control.Monad.IO.Class (MonadIO)
import GI.Gtk.Structs.TreeIter
(setTreeIterUserData3, setTreeIterUserData2, setTreeIterStamp,
setTreeIterUserData, getTreeIterUserData, TreeIter(..))
import Data.GI.Base (get, new)
import Data.GI.Base.Overloading (HasParentTypes, ParentTypes)
import Data.Word (Word32)
import Unsafe.Coerce (unsafeCoerce)
import Foreign.Ptr (nullPtr)
seqStoreIterNew :: MonadIO m => Int32 -> Int32 -> m TreeIter
seqStoreIterNew s u1 = do
i <- new TreeIter []
setTreeIterStamp i s
setTreeIterUserData i $ unsafeCoerce u1
setTreeIterUserData2 i nullPtr
setTreeIterUserData3 i nullPtr
return i
newtype SeqStore a = SeqStore (ManagedPtr (CustomStore (IORef (Seq a)) a))
mkSeqStore :: CustomStore (IORef (Seq a)) a -> SeqStore a
mkSeqStore (CustomStore ptr) = SeqStore ptr
instance HasParentTypes (SeqStore a)
type instance ParentTypes (SeqStore a) = '[TreeModel]
instance TypedObject (SeqStore a) where
glibType = glibType @TreeModel
instance GObject (SeqStore a)
instance IsTypedTreeModel SeqStore
seqStoreNew :: (Applicative m, MonadIO m) => [a] -> m (SeqStore a)
seqStoreNew xs = seqStoreNewDND xs (Just seqStoreDefaultDragSourceIface)
(Just seqStoreDefaultDragDestIface)
| Create a new ' TreeModel ' that contains a list of elements . In addition , specify two
seqStoreNewDND :: (Applicative m, MonadIO m)
seqStoreNewDND xs mDSource mDDest = do
rows <- liftIO $ newIORef (Seq.fromList xs)
customStoreNew rows mkSeqStore TreeModelIface {
treeModelIfaceGetFlags = return [TreeModelFlagsListOnly],
treeModelIfaceGetIter = \path -> treePathGetIndices' path >>= \[n] -> readIORef rows >>= \rows ->
if inRange (0, Seq.length rows - 1) (fromIntegral n)
then Just <$> seqStoreIterNew 0 (fromIntegral n)
else return Nothing,
treeModelIfaceGetPath = \i -> do
n <- seqStoreIterToIndex i
treePathNewFromIndices' [fromIntegral n],
treeModelIfaceGetRow = \i -> do
n <- seqStoreIterToIndex i
readIORef rows >>= \rows ->
if inRange (0, Seq.length rows - 1) (fromIntegral n)
then return (rows `Seq.index` fromIntegral n)
else fail "SeqStore.getRow: iter does not refer to a valid entry",
treeModelIfaceIterNext = \i -> do
n <- seqStoreIterToIndex i
readIORef rows >>= \rows ->
if inRange (0, Seq.length rows - 1) (fromIntegral (n+1))
then Just <$> seqStoreIterNew 0 (n+1)
else return Nothing,
treeModelIfaceIterChildren = \index -> readIORef rows >>= \rows ->
case index of
Nothing | not (Seq.null rows) -> Just <$> seqStoreIterNew 0 0
_ -> return Nothing,
treeModelIfaceIterHasChild = \_ -> return False,
treeModelIfaceIterNChildren = \index -> readIORef rows >>= \rows ->
case index of
Nothing -> return $! Seq.length rows
_ -> return 0,
treeModelIfaceIterNthChild = \index n -> case index of
Nothing -> Just <$> seqStoreIterNew 0 (fromIntegral n)
_ -> return Nothing,
treeModelIfaceIterParent = \_ -> return Nothing,
treeModelIfaceRefNode = \_ -> return (),
treeModelIfaceUnrefNode = \_ -> return ()
} mDSource mDDest
function merely extracts the second element of the ' TreeIterRaw ' .
seqStoreIterToIndex :: (Applicative m, MonadIO m) => TreeIter -> m Int32
seqStoreIterToIndex i = unsafeCoerce <$> getTreeIterUserData i
| Default drag functions for ' Data . GI.Gtk . ModelView . ' . These
seqStoreDefaultDragSourceIface :: DragSourceIface SeqStore row
seqStoreDefaultDragSourceIface = DragSourceIface {
customDragSourceRowDraggable = \_ _-> return True,
customDragSourceDragDataGet = \model path sel -> treeSetRowDragData sel model path,
customDragSourceDragDataDelete = \model path -> treePathGetIndices' path >>= \(dest:_) -> do
liftIO $ seqStoreRemove model (fromIntegral dest)
return True
}
| Default drop functions for ' Data . GI.Gtk . ModelView . ' . These
seqStoreDefaultDragDestIface :: DragDestIface SeqStore row
seqStoreDefaultDragDestIface = DragDestIface {
customDragDestRowDropPossible = \model path sel -> do
dest <- treePathGetIndices' path
mModelPath <- treeGetRowDragData sel
case mModelPath of
(True, Just model', source) -> do
tm <- toTreeModel model
withManagedPtr tm $ \m ->
withManagedPtr model' $ \m' -> return (m==m')
_ -> return False,
customDragDestDragDataReceived = \model path sel -> do
(dest:_) <- treePathGetIndices' path
mModelPath <- treeGetRowDragData sel
case mModelPath of
(True, Just model', Just path) -> do
(source:_) <- treePathGetIndices' path
tm <- toTreeModel model
withManagedPtr tm $ \m ->
withManagedPtr model' $ \m' ->
if m/=m' then return False
else do
row <- seqStoreGetValue model source
seqStoreInsert model dest row
return True
_ -> return False
}
seqStoreGetValue :: (Applicative m, MonadIO m) => SeqStore a -> Int32 -> m a
seqStoreGetValue (SeqStore model) index =
(`Seq.index` fromIntegral index) <$> liftIO (readIORef (customStoreGetPrivate (CustomStore model)))
seqStoreSafeGetValue :: MonadIO m => SeqStore a -> Int32 -> m (Maybe a)
seqStoreSafeGetValue (SeqStore model) index' = do
let index = fromIntegral index'
seq <- liftIO $ readIORef (customStoreGetPrivate (CustomStore model))
return $ if index >=0 && index < Seq.length seq
then Just $ seq `Seq.index` index
else Nothing
seqStoreSetValue :: MonadIO m => SeqStore a -> Int32 -> a -> m ()
seqStoreSetValue (SeqStore model) index value = do
liftIO $ modifyIORef (customStoreGetPrivate (CustomStore model)) (Seq.update (fromIntegral index) value)
stamp <- customStoreGetStamp (CustomStore model)
path <- treePathNewFromIndices' [index]
i <- seqStoreIterNew stamp (fromIntegral index)
treeModelRowChanged (CustomStore model) path i
seqStoreToList :: (Applicative m, MonadIO m) => SeqStore a -> m [a]
seqStoreToList (SeqStore model) =
F.toList <$> liftIO (readIORef (customStoreGetPrivate (CustomStore model)))
seqStoreGetSize :: (Applicative m, MonadIO m) => SeqStore a -> m Int32
seqStoreGetSize (SeqStore model) =
fromIntegral . Seq.length <$> liftIO (readIORef (customStoreGetPrivate (CustomStore model)))
seqStoreInsert :: MonadIO m => SeqStore a -> Int32 -> a -> m ()
seqStoreInsert (SeqStore model) index value = liftIO $ do
seq <- readIORef (customStoreGetPrivate (CustomStore model))
when (index >= 0) $ do
let index' | fromIntegral index > Seq.length seq = Seq.length seq
| otherwise = fromIntegral $ index
writeIORef (customStoreGetPrivate (CustomStore model)) (insert index' value seq)
stamp <- customStoreGetStamp (CustomStore model)
p <- treePathNewFromIndices' [fromIntegral index']
i <- seqStoreIterNew stamp (fromIntegral index')
treeModelRowInserted (CustomStore model) p i
where insert :: Int -> a -> Seq a -> Seq a
insert i x xs = front Seq.>< x Seq.<| back
where (front, back) = Seq.splitAt i xs
seqStoreInsertBefore :: (Applicative m, MonadIO m) => SeqStore a -> TreeIter -> a -> m ()
seqStoreInsertBefore store iter value = do
n <- seqStoreIterToIndex iter
seqStoreInsert store n value
seqStoreInsertAfter :: (Applicative m, MonadIO m) => SeqStore a -> TreeIter -> a -> m ()
seqStoreInsertAfter store iter value = do
n <- seqStoreIterToIndex iter
seqStoreInsert store (n + 1) value
seqStorePrepend :: (Applicative m, MonadIO m) => SeqStore a -> a -> m ()
seqStorePrepend (SeqStore model) value = do
liftIO $ modifyIORef (customStoreGetPrivate (CustomStore model))
(\seq -> value Seq.<| seq)
stamp <- customStoreGetStamp (CustomStore model)
p <- treePathNewFromIndices' [0]
i <- seqStoreIterNew stamp 0
treeModelRowInserted (CustomStore model) p i
seqStorePrependList : : MonadIO m = > SeqStore a - > [ a ] - > m ( )
mapM _ ( seqStoreInsert store 0 ) ( reverse list )
seqStoreAppend :: MonadIO m => SeqStore a -> a -> m Int32
seqStoreAppend (SeqStore model) value = do
index <- liftIO $ atomicModifyIORef (customStoreGetPrivate (CustomStore model))
(\seq -> (seq Seq.|> value, Seq.length seq))
stamp <- customStoreGetStamp (CustomStore model)
p <- treePathNewFromIndices' [fromIntegral index]
i <- seqStoreIterNew stamp (fromIntegral index)
treeModelRowInserted (CustomStore model) p i
return $ fromIntegral index
seqStoreAppendList : : MonadIO m = > SeqStore a - > [ a ] - > m ( )
seqStoreAppendList ( SeqStore model ) values = do
seq < - readIORef ( customStoreGetPrivate model )
let seq ' = Seq.fromList values
startIndex = Seq.length seq
endIndex = startIndex + Seq.length seq ' - 1
writeIORef ( customStoreGetPrivate model ) ( seq Seq . > < seq ' )
stamp < - customStoreGetStamp model
flip mapM [ startIndex .. endIndex ] $ \index - >
treeModelRowInserted model [ index ] ( TreeIterRaw stamp ( fromIntegral index ) 0 0 )
seqStoreAppendList :: MonadIO m => SeqStore a -> [a] -> m ()
seqStoreAppendList (SeqStore model) values = do
seq <- readIORef (customStoreGetPrivate model)
let seq' = Seq.fromList values
startIndex = Seq.length seq
endIndex = startIndex + Seq.length seq' - 1
writeIORef (customStoreGetPrivate model) (seq Seq.>< seq')
stamp <- customStoreGetStamp model
flip mapM [startIndex..endIndex] $ \index ->
treeModelRowInserted model [index] (TreeIterRaw stamp (fromIntegral index) 0 0)
-}
seqStoreRemove :: MonadIO m => SeqStore a -> Int32 -> m ()
seqStoreRemove (SeqStore model) index' = liftIO $ do
seq <- readIORef (customStoreGetPrivate (CustomStore model))
when (index >=0 && index < Seq.length seq) $ do
writeIORef (customStoreGetPrivate (CustomStore model)) (delete index seq)
p <- treePathNewFromIndices' [fromIntegral index]
treeModelRowDeleted (CustomStore model) p
where delete :: Int -> Seq a -> Seq a
delete i xs = front Seq.>< Seq.drop 1 back
where (front, back) = Seq.splitAt i xs
index = fromIntegral index'
seqStoreClear :: MonadIO m => SeqStore a -> m ()
seqStoreClear (SeqStore model) = liftIO $
let loop (-1) Seq.EmptyR = return ()
loop n (seq Seq.:> _) = do
writeIORef (customStoreGetPrivate (CustomStore model)) seq
p <- treePathNewFromIndices' [fromIntegral n]
treeModelRowDeleted (CustomStore model) p
loop (n-1) (Seq.viewr seq)
in do seq <- readIORef (customStoreGetPrivate (CustomStore model))
loop (Seq.length seq - 1) (Seq.viewr seq)
seqStoreReorder : : MonadIO m = > SeqStore a - > [ Int ] - > m ( )
seqStoreSwap : : MonadIO m = > SeqStore a - > Int - > Int - > m ( )
seqStoreMoveBefore : : MonadIO m = > SeqStore a - > Int - > Int - > m ( )
: : MonadIO m = > SeqStore a - > Int - > Int - > m ( )
|
437e7699f4f6d7d3cd2cd539fc0499ec8f9eb6bf8525fafc4a74627b61cc8547 | KestrelInstitute/Specware | parse-node-utilities.lisp | -*- Mode : LISP ; Package : Parser ; Base : 10 ; Syntax : Common - Lisp -*-
(in-package :Parser4)
;; ======================================================================
(defmacro warn-pos (location &rest args)
`(warn-pos-fn session ,location ,@args))
(defvar *suppress-warnings?* nil)
(defvar *warnings* '())
(defun warn-pos-fn (session location &rest args)
(cond (*suppress-warnings?*
(push (apply 'format nil args) *warnings*))
(t
(unless (parse-session-warning-issued? session)
(Emacs::goto-file-position (namestring (parse-session-file session))
(first location) (second location))
(setf (parse-session-warning-issued? session) t))
(apply 'warn args))))
(defun parser-attach-rules (session)
;;
(when-debugging (delta-time))
;;
(initialize-location-desired-bvs session)
;;
;;(add-toplevel-node session 0)
;;
(let* ((locations (parse-session-locations session))
(parser (parse-session-parser session))
;;
(ht-string-to-keyword-rules (parser-ht-string-to-keyword-rule parser))
(generic-symbol-rule (parser-symbol-rule parser))
(generic-string-rule (parser-string-rule parser))
(generic-number-rule (parser-number-rule parser))
(generic-character-rule (parser-character-rule parser))
(generic-pragma-rule (parser-pragma-rule parser))
(keywords-are-keywords-only? (parser-keywords-are-keywords-only? parser))
#+DEBUG-PARSER
(number-of-tokens-to-process 0)
(preceding-location-had-no-pending-rules? nil))
;;
(dotimes (i (length locations))
;;
(when-debugging
(when *verbose?*
(unless (> (decf number-of-tokens-to-process) 0)
(let ((str (read-line t nil))) ; t = stream, nil = eof-error-p
(setq number-of-tokens-to-process (if (null str)
1
(or (parse-integer str :junk-allowed t)
1)))))
(terpri)
(format t "====================================================================================================")
(terpri)))
;;
(let ((location (svref locations i)))
;;
; '(let ((token
( dolist ( forward - node ( parser - location - post - nodes location ) )
; (when (eq (parser-node-rule forward-node) +token-rule+)
; (return (parser-node-semantics forward-node))))))
; (format t "~&[~3D : ~30D~% ~S]~%"
; i
; token
(cond ((or (eq i 0)
(not (null (parser-location-partial-node-data location)))
(null (parser-location-post-nodes location)) ; last location
)
(setq preceding-location-had-no-pending-rules? nil))
(preceding-location-had-no-pending-rules?
;; Suppress complaint if immediately preceding location also had the same problem.
This avoids showing millions ( well , perhaps thousands ) of annoying messages .
nil)
(t
(setf (parse-session-error-reported? session) t)
(if (null (parser-location-position location))
(warn "Syntax error at EOF")
(let* ((prior-location (svref locations (- i 1)))
(prior-position (parser-location-position prior-location)))
(let* ((prior-byte-pos (first prior-position))
(prior-line (second prior-position))
(prior-column (third prior-position))
(prior-token-node
(find-if #'(lambda (node) (parser-token-rule-p (parser-node-rule node)))
(parser-location-post-nodes prior-location))))
(cond ((null prior-token-node)
(warn-pos (list prior-line prior-column prior-byte-pos)
"At line ~3D:~2D Peculiar syntax error (no tokens seen)."
prior-line prior-column prior-byte-pos))
((eq (first (parser-node-semantics prior-token-node))
:EXTENDED-COMMENT-ERROR)
(let ((comment-text
trim text of comment down to include at most 2 newlines
(do ((text (second (parser-node-semantics prior-token-node))
(subseq text 0 (1- (length text)))))
((< (count-if #'(lambda (char) (eq char #\newline))
text)
3)
text))))
trim text of comment down to include at most 20 characters
(when (> (length comment-text) 20)
(setq comment-text (format nil "~A ..." (subseq comment-text 0 16))))
(warn-pos (list prior-line prior-column prior-byte-pos)
"At line ~3D:~2D EOF while scanning for close of extended comment starting with \"~A\""
prior-line prior-column ;; prior-byte-pos
comment-text
)))
(t
(warn-pos (list prior-line prior-column prior-byte-pos)
"At line ~3D:~2D Syntactic error with \"~A\""
prior-line prior-column ; prior-byte-pos
(second (parser-node-semantics prior-token-node))
))))))
(setq preceding-location-had-no-pending-rules? t)
))
(when-debugging
(when *verbose?*
(report-pending-rules session i location)))
;;
(dolist (forward-node (parser-location-post-nodes location))
(when (eq (parser-node-rule forward-node) +token-rule+)
(let* ((token (parser-node-semantics forward-node))
( column ( third ( third token ) ) )
)
;;
( when ( and ( zerop ( mod i 1000 ) ) ( > i 0 ) ) ( comment " [ ~8D ] At token ~6D ~4D ~4D ~4D ~S " ( delta - time ) i pos line column xxx ) )
;;
(when (null (parser-location-partial-node-data location))
Maybe we finished one toplevel form and are about to parse another .
;; But if there were errors, we're probably still inside a buggy form,
so do n't try to parse toplevel forms until we get back to column 1 .
(let ((column (third (third token))))
(unless (and (parse-session-error-reported? session) (> column 1))
(debugging-comment "Adding top-level node because of null partial-node-data at location before ~S" token)
(add-toplevel-node session i))))
(let* ((tok2 (second token))
(specific-keyword-rule (if (stringp tok2)
(gethash (second token) ht-string-to-keyword-rules)
nil)))
(case (first token)
(:SYMBOL ; (:WORD-SYMBOL :NON-WORD-SYMBOL)
(cond ((null specific-keyword-rule)
(add-partial-node session generic-symbol-rule forward-node 0))
(keywords-are-keywords-only?
(add-partial-node session specific-keyword-rule forward-node 0))
(t
(add-partial-node session specific-keyword-rule forward-node 0)
(add-partial-node session generic-symbol-rule forward-node 0))))
(:CHARACTER
;; never add keyword rule
(add-partial-node session generic-character-rule forward-node 0))
(:STRING
;; never add keyword rule
(add-partial-node session generic-string-rule forward-node 0))
(:NUMBER
(cond ((null specific-keyword-rule)
(add-partial-node session generic-number-rule forward-node 0))
(keywords-are-keywords-only?
(add-partial-node session specific-keyword-rule forward-node 0))
(t
(add-partial-node session generic-number-rule forward-node 0)
(add-partial-node session specific-keyword-rule forward-node 0))))
(:AD-HOC-KEYWORD-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword is not in the grammar: ~S" token)
(add-partial-node session specific-keyword-rule forward-node 0)))
(:AD-HOC-SYMBOL-ONLY
;; never add keyword rule
(unless (null specific-keyword-rule)
(warn "Token claimed to be only a symbol has appeared as a keyword in the grammar: ~S"
token))
(add-partial-node session generic-symbol-rule forward-node 0))
(:AD-HOC-NUMBER-ONLY
;; never add keyword rule
(unless (null specific-keyword-rule)
(warn "Token claimed to be only a number has appeared as a keyword in the grammar: ~S"
token))
(add-partial-node session generic-number-rule forward-node 0))
(:AD-HOC-KEYWORD-AND-SYMBOL-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword (and a symbol) is not in the grammar: ~S" token)
(add-partial-node session specific-keyword-rule forward-node 0))
(add-partial-node session generic-symbol-rule forward-node 0))
(:AD-HOC-KEYWORD-AND-NUMBER-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword (and a number) is not in the grammar: ~S"
token)
(add-partial-node session specific-keyword-rule forward-node 0))
(add-partial-node session generic-number-rule forward-node 0))
(:AD-HOC-SYMBOL-AND-NUMBER-ONLY
;; never add keyword rule
(unless (null specific-keyword-rule)
(warn "Token claimed to be only a symbol or a number has appeared as a keyword in the grammar: ~S"
token))
(add-partial-node session generic-symbol-rule forward-node 0)
(add-partial-node session generic-number-rule forward-node 0))
(:AD-HOC-KEYWORD-AND-SYMBOL-AND-NUMBER-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword (and a symbol and a number) is not in the grammar: ~S"
token)
(add-partial-node session specific-keyword-rule forward-node 0))
(add-partial-node session generic-symbol-rule forward-node 0)
(add-partial-node session generic-number-rule forward-node 0))
(:PRAGMA
(add-partial-node session generic-pragma-rule forward-node 0))
)))))))))
(defun initialize-location-desired-bvs (session)
(let* ((locations (parse-session-locations session))
(parser (parse-session-parser session))
(bv-size (parser-bv-size parser)))
(dotimes (i (length locations))
(setf (parser-location-desired-bv (svref locations i))
(make-array bv-size
:element-type 'bit
:initial-element 0)))))
;; ======================================================================
(defun add-toplevel-node (session index)
(let* ((locations (parse-session-locations session))
(location (svref locations index))
(start-rule (parse-session-start-rule session))
(new-toplevel-node
(create-parser-node :rule start-rule
:pre-index index
:children (make-array 1 :initial-element nil)))
(handles-bv (parser-anyof-rule-possible-handles-bv start-rule)))
(when-debugging
(when *verbose?*
(let ((position (parser-location-position location)))
(comment "Adding top-level node for index ~D at line ~D, column ~D, byte ~D"
index
(second position)
(third position)
(first position)))))
(augment-location-partial-node-data location new-toplevel-node 0)
(augment-location-desired-bv location handles-bv)
(when-debugging
(when *verbose?*
(report-pending-rules session index location)))
))
(defun report-pending-rules (session index location)
#-DEBUG-PARSER
(declare (ignore parser index location))
#+DEBUG-PARSER
(let ((parser (parse-session-parser session)))
(comment "Pending at ~4D: ~6D" index (length (parser-location-partial-node-data location)))
(dolist (datum (parser-location-partial-node-data location))
(let ((node (car datum))
;; iii refers to position of child node within parent node
(iii (cdr datum)))
(comment "Pending at location ~6D: ~6D ~20A ~D"
index
(parser-node-number node)
(parser-rule-name (parser-node-rule node))
iii)))
(when (eq *verbose?* :very)
(format t "======")
(dolist (rule (rules-for-bv parser (parser-location-desired-bv location)))
(comment "Desired rule: ~A" rule)))
(format t "======")))
;; ======================================================================
(defun add-partial-node (session parent-rule child-node child-index)
(ecase (structure-type-of parent-rule) ; faster than etypecase since we are looking for exact type matches
(parser-anyof-rule (add-unit-reduction session parent-rule child-node))
;;
(parser-tuple-rule (add-partial-tuple-node session parent-rule child-node child-index))
(parser-pieces-rule (add-partial-pieces-node session parent-rule child-node))
(parser-repeat-rule (add-partial-repeat-node session parent-rule child-node))
;;
(parser-keyword-rule (add-unit-reduction session parent-rule child-node))
(parser-atomic-rule (add-unit-reduction session parent-rule child-node))
(parser-id-rule (add-unit-reduction session parent-rule child-node)) ; this case shouldn't happen
))
(defun add-unit-reduction (session rule child-node)
;; "bvi" stands for "bit-vector-index"
(when (null rule) (break "Missing rule in unit-reduction"))
(let* ((pre-index (parser-node-pre-index child-node))
(post-index-ptr (parser-node-post-index-ptr child-node))
(inherited-precedence
(parser-rule-precedence (parser-node-rule child-node)))
(explicit-bvi (parser-rule-bvi rule))
(p2bvi-alist (parser-rule-p2bvi-map rule))
(implicit-bvi (cdr (assoc inherited-precedence p2bvi-alist)))
(new-node (create-parser-node :rule rule
:bvi (or explicit-bvi implicit-bvi)
:semantics nil
:pre-index pre-index
:post-index-ptr post-index-ptr
:parents nil
:children (vector child-node)
:precedence inherited-precedence))
(locations (parse-session-locations session)))
(push new-node (parser-node-parents child-node))
(push new-node (parser-location-post-nodes (svref locations pre-index)))
(when-debugging
(when *verbose?*
(show-node new-node "Completed")))
(parser-propagate-from-node session new-node)
new-node))
(defun augment-location-partial-node-data (location parent-node child-index)
;; child-index refers to the position of the child rule within the parent rule
(push (cons parent-node child-index)
(parser-location-partial-node-data location)))
(defun augment-location-desired-bv (location additional-desired-bv)
;; (when-debugging
;; (when *verbose?*
( comment " At loc ~6D , turn on bits ~S " ( parser - location - index location ) additional - desired - bv )
;; ))
(unless (null additional-desired-bv)
(bit-ior (parser-location-desired-bv location)
additional-desired-bv
(parser-location-desired-bv location))))
(defun add-partial-tuple-node (session rule child-node child-index)
;; child-index will normally be 0, but could be larger if we are skipping past leading optionals
(unless (eq rule (parse-session-start-rule session))
(let* ((child-pre-index (parser-node-pre-index child-node))
(pattern (parser-rule-items rule))
(pattern-size (length pattern))
(next-child-index (1+ child-index))
(children (make-array pattern-size :initial-element nil)))
(declare (simple-vector children))
(let ((new-node
(create-parser-node :rule rule
:bvi (parser-rule-bvi rule)
:pre-index child-pre-index
:children children
)))
(adopt-child new-node children child-index child-node)
(let ((all-other-required-children-present?
(dotimes (i (length children) t)
(when (and
;; ignoring this child (which will be filled in),
(not (equal i child-index))
;; if no child and not optional, return nil
(null (svref children i))
(not (parser-rule-item-optional? (svref pattern i))))
(return nil))))
(this-is-not-last-child?
(< next-child-index (length children)))
(locations (parse-session-locations session)))
(when-debugging
(when *verbose?*
(when (not all-other-required-children-present?)
(show-node new-node "Created "))))
(when all-other-required-children-present?
(install-completed-node session new-node child-node))
(when this-is-not-last-child?
(let ((post-loc
(svref locations (parser-node-post-index child-node))))
(let ((repeat? t))
(loop while (and repeat? (< next-child-index pattern-size))
do
This will normally get the bv for the SECOND item , since we normally
have just matched on the first item and now looking for more items .
But if the first item(s ) is / are optional , and we got to this rule by matching
on the second or a later item , then the bv might be for the third or later item .
;; And if that item is optional, we may also get the bv for subsequent items.
So we could get just the second , or second thru fourth , or just third , or
third thru sixth , etc . We could probably cache this for each way of getting
;; here, but it's probably not worth the effort to avoid the occasional extra bit-ior
;; when this item is optional.
(let* ((next-child-item (svref pattern next-child-index))
(optional? (parser-rule-item-optional? next-child-item))
(handles-bv (parser-rule-item-possible-handles-bv next-child-item)))
(augment-location-partial-node-data post-loc new-node next-child-index)
(augment-location-desired-bv post-loc handles-bv)
(setq repeat? optional?)
(incf next-child-index)
)))
))
new-node)))))
(defun add-partial-pieces-node (session rule child-node)
(let* ((child-pre-index (parser-node-pre-index child-node))
(child-post-index (parser-node-post-index child-node))
(child-post-location (svref (parse-session-locations session)
child-post-index))
(children (make-array (length (parser-rule-items rule)) :initial-element nil))
(new-node
(create-parser-node :rule rule
:bvi (parser-rule-bvi rule)
:pre-index child-pre-index
:children children
))
;; The desired-bv for a pieces rule is just the union of the desired-bvs for
;; each child, any of which can appear in any order, so the desired bv for
;; the next child is always the same.
(handles-bv (parser-pieces-rule-possible-handles-bv rule)))
(adopt-child new-node children 0 child-node)
(augment-location-partial-node-data child-post-location new-node 1)
(augment-location-desired-bv child-post-location handles-bv)
(install-completed-node session new-node child-node)
new-node))
(defun add-partial-repeat-node (session rule child-node)
(let* ((child-pre-index (parser-node-pre-index child-node))
(child-post-index (parser-node-post-index child-node))
(child-post-location (svref (parse-session-locations session)
child-post-index))
(children (make-array 6 :initial-element nil))
(new-node (create-parser-node :rule rule
:bvi (parser-rule-bvi rule)
:pre-index child-pre-index
:children children
))
(desired-rule-item
(or (parser-repeat-rule-separator rule)
(parser-repeat-rule-element rule)))
(handles-bv (parser-rule-item-possible-handles-bv desired-rule-item))
)
(adopt-child new-node children 0 child-node)
(augment-location-partial-node-data child-post-location new-node 1)
(augment-location-desired-bv child-post-location handles-bv)
(install-completed-node session new-node child-node)
new-node))
;; ======================================================================
(defun install-completed-node (session node last-child-node)
(let* ((pre-index (parser-node-pre-index node))
(post-index-ptr (parser-node-post-index-ptr last-child-node))
(rule (parser-node-rule node))
(explicit-precedence (parser-rule-precedence rule)))
(if (null explicit-precedence)
(when (parser-anyof-rule-p rule)
(setf (parser-node-precedence node)
(parser-node-precedence last-child-node)))
(setf (parser-node-precedence node) explicit-precedence))
(setf (parser-node-post-index-ptr node) post-index-ptr)
(when-debugging
(when *verbose?*
(show-node node "Completed")))
(push node (parser-location-post-nodes
(svref (parse-session-locations session)
pre-index)))
(parser-propagate-from-node session node)
node))
(defun parser-propagate-from-node (session this-node)
(unless (or (null (parser-node-pre-index this-node))
(null (parser-node-post-index this-node)))
(attach-reductions session this-node)
(extend-partial-nodes-reaching-this-node session this-node))
(when (eq (parser-node-rule this-node)
(parse-session-start-rule session))
(add-toplevel-node session (parser-node-post-index this-node)))
nil)
(defun attach-reductions (session this-node)
(let* ((locations (parse-session-locations session))
(pre-index (parser-node-pre-index this-node))
(pre-loc (svref locations pre-index))
(desired-bv (parser-location-desired-bv pre-loc))
(this-rule (parser-node-rule this-node))
(reductions (parser-rule-reductions this-rule)))
(dolist (reduction reductions)
(let* ((parent-rule (reduction-parent-rule reduction))
(parent-bv-index
(or (parser-rule-bvi parent-rule)
(cdr (assoc (parser-node-precedence this-node)
(parser-rule-p2bvi-map parent-rule))))))
(if (and parent-bv-index (eq (sbit desired-bv parent-bv-index) 1))
(let ((child-index (reduction-child-index reduction)))
(add-partial-node session parent-rule this-node child-index))
(progn
(debugging-comment "Reduction from ~D not plausible : ~S ~S (bit ~D) at ~D"
(parser-node-number this-node)
(structure-type-of parent-rule)
(parser-rule-name parent-rule)
parent-bv-index
(reduction-child-index reduction)
)))))))
;; ======================================================================
(defun extend-partial-nodes-reaching-this-node (session this-node)
(let* ((pre-index (parser-node-pre-index this-node))
(pre-loc (svref (parse-session-locations session) pre-index))
(partial-node-data (parser-location-partial-node-data pre-loc)))
(dolist (partial-node-datum partial-node-data)
(let ((candidate-parent-node (car partial-node-datum))
(child-index (cdr partial-node-datum)))
(maybe-extend-partial-node session
candidate-parent-node
child-index
this-node)))))
(defun maybe-extend-partial-node (session parent-node child-index this-node)
(let ((parent-rule (parser-node-rule parent-node)))
(ecase (structure-type-of parent-rule) ; faster than etypecase since we are doing exact matches
(parser-tuple-rule
(maybe-extend-partial-tuple-node session parent-node parent-rule child-index this-node))
(parser-pieces-rule
(maybe-extend-partial-pieces-node session parent-node parent-rule child-index this-node))
(parser-repeat-rule
(maybe-extend-partial-repeat-node session parent-node parent-rule child-index this-node))
(parser-anyof-rule
(when-debugging
(unless (eq (parser-node-rule parent-node)
(parse-session-start-rule session))
(warn "Attempt to extend non-toplevel ANYOF node ~S at ~D using node ~S"
parent-node
child-index
(parser-node-number this-node)))))
)))
(defun maybe-extend-partial-tuple-node (session node rule child-index candidate-child)
(let* ((pattern (parser-rule-items rule))
(pattern-size (length pattern))
(desired-item (svref pattern child-index)))
(when (parser-rule-item-matches? desired-item candidate-child)
(let ((children (parser-node-children node)))
(declare (simple-vector children))
(let* ((next-child-index (1+ child-index))
(all-other-required-children-present?
(dotimes (i (length children) t)
(when (and
;; ignoring this child (which will be filled in),
(not (equal i child-index))
;; if no child and not optional, return nil
(null (svref children i))
(not (parser-rule-item-optional? (svref pattern i)))
)
(return nil))))
(this-or-following-child-is-already-present?
(do ((i child-index (1+ i)))
((>= i (length children))
nil)
(unless (null (svref children i))
(return t))))
(this-is-not-last-child? (< next-child-index (length children)))
(this-child-is-optional? (parser-rule-item-optional? desired-item))
(locations (parse-session-locations session))
;;(parser (parse-session-parser session))
(cannibalizing? nil))
;; if this slot is already full, we may replicate the partial parent
(when (and (or this-or-following-child-is-already-present?
(and all-other-required-children-present?
(or this-is-not-last-child?
this-child-is-optional?)))
(not
(setq cannibalizing?
(let ((desired-precedence (parser-rule-item-precedence desired-item)))
(and (not (null desired-precedence))
(let ((candidate-precedence (parser-node-precedence candidate-child)))
(and (not (null candidate-precedence))
(<= candidate-precedence desired-precedence))))))))
(setq node (replicate-parser-node node child-index))
(setq children (parser-node-children node)))
;;
(adopt-child node children child-index candidate-child)
;; number of children is fixed...
(when cannibalizing?
(debugging-comment "Cannibalized ~D. Last node now ~D"
(parser-node-number node)
(parser-node-number candidate-child))
(revise-cannibalized-node node candidate-child))
;; whether or not we're cannibalizing, see if we're done
;; (revision 7/31/03 for forges parsers)
(cond (all-other-required-children-present?
(install-completed-node session node candidate-child))
(t
(when-debugging
(when *verbose?*
(show-node node (format nil "Extended ~D" child-index))))))
(when this-is-not-last-child?
(let ((post-loc (svref locations (parser-node-post-index candidate-child)))
(repeat? t))
(loop while (and repeat? (< next-child-index pattern-size)) do
(let* ((next-child-item (svref pattern next-child-index))
(optional? (parser-rule-item-optional? next-child-item))
(handles-bv (parser-rule-item-possible-handles-bv next-child-item)))
(augment-location-partial-node-data post-loc node next-child-index)
(augment-location-desired-bv post-loc handles-bv)
(setq repeat? optional?)
(incf next-child-index)
))))
)))))
(defun revise-cannibalized-node (node last-child)
(when-debugging
(when *verbose?*
(show-node node "Revised ")))
(let ((post-index (parser-node-post-index last-child)))
put ptr cell from parent into child as well , then mutate to use data from child
(setf (parser-node-post-index-ptr last-child) (parser-node-post-index-ptr node))
(setf (parser-node-post-index last-child) post-index)))
(defun maybe-extend-partial-pieces-node (session node rule child-index candidate-child)
(let ((alternatives (parser-rule-items rule))
(locations (parse-session-locations session)))
(dotimes (i (length alternatives))
(let ((desired-item (svref alternatives i)))
(when (parser-rule-item-matches? desired-item candidate-child)
(let ((children (parser-node-children node)))
;; if this slot is already full, replicate partial parent, otherwise just use it
(unless (null (svref children child-index))
(setq node (replicate-parser-node node child-index))
(setq children (parser-node-children node)))
;;
(adopt-child node children child-index candidate-child)
(let ((post-loc (svref locations (parser-node-post-index candidate-child)))
(handles-bv (parser-pieces-rule-possible-handles-bv rule)))
(augment-location-partial-node-data post-loc node (1+ child-index))
;; The desired-bv for a pieces rule is just the union of the desired-bvs for
;; each child, any of which can appear in any order, so the desired bv for
;; the next child is always the same.
(augment-location-desired-bv post-loc handles-bv))
(install-completed-node session node candidate-child)))))))
(defun maybe-extend-partial-repeat-node (session node rule child-index candidate-child)
(let* ((looking-for-element? (or (evenp child-index)
(null (parser-repeat-rule-separator rule))))
(desired-item (if looking-for-element?
(parser-repeat-rule-element rule)
(parser-repeat-rule-separator rule))))
(when (parser-rule-item-matches? desired-item candidate-child)
(let* ((children (parser-node-children node))
(children-size (length children)))
(declare (simple-vector children))
;; if this slot is already full, replicate partial parent, otherwise just use it
(when (or looking-for-element?
(and
(< child-index children-size)
(not (null (svref children child-index)))))
(setq node (replicate-parser-node node child-index))
(setq children (parser-node-children node)))
;; number of children is indefinite, so we may need to extend vector
(when (>= child-index children-size)
(let ((new-children (make-array (* child-index 2) :initial-element nil)))
(dotimes (i children-size)
(setf (svref new-children i) (svref children i)))
(setf (parser-node-children node) new-children)
(setf children new-children)))
;;
(adopt-child node children child-index candidate-child)
(when looking-for-element?
(install-completed-node session node candidate-child))
(let* ((post-loc (svref (parse-session-locations session)
(parser-node-post-index candidate-child)))
(next-item
;; next subrule is separator if this was element, and vice versa,
;; unless separator is null, in which case always use element
(if (or (oddp child-index) (null (parser-repeat-rule-separator rule)))
(parser-repeat-rule-element rule)
(parser-repeat-rule-separator rule)))
(handles-bv (parser-rule-item-possible-handles-bv next-item)))
(augment-location-partial-node-data post-loc node (1+ child-index))
(augment-location-desired-bv post-loc handles-bv)
)))))
(defun parser-rule-item-matches? (parent-item child-node)
(and (let ((child-bvi (parser-node-bvi child-node)))
(and (if (null child-bvi)
(eq (parser-rule-item-rule parent-item)
(parser-rule-name (parser-node-rule child-node)))
(eq (sbit (parser-rule-item-possible-children-bv parent-item) child-bvi)
1))))
(let ((max-precedence-allowed (parser-rule-item-precedence parent-item)))
(or (null max-precedence-allowed)
(let ((child-precedence (parser-node-precedence child-node)))
(or (null child-precedence)
(<= child-precedence max-precedence-allowed)))))))
;; ======================================================================
(defun replicate-parser-node (old-node child-index)
(declare (fixnum child-index))
(let ((new-node (copy-parser-node old-node)))
;;
(when-debugging
(setf (parser-node-number new-node) (incf *parser-node-number*)))
;;
(let* ((old-children (parser-node-children old-node))
(new-children (make-array (length old-children) :initial-element nil)))
(declare (simple-vector old-children)
(simple-vector new-children))
(dotimes (i child-index)
(setf (svref new-children i) (svref old-children i)))
(setf (parser-node-children new-node) new-children))
;;
(when-debugging
(when *verbose?*
(show-node new-node (format nil "~6D =>" (parser-node-number old-node))))
(push new-node *all-nodes*))
;;
new-node))
;; ======================================================================
(defun adopt-child (node children child-index child-node)
(declare (simple-vector children) (fixnum child-index))
(setf (svref children child-index) child-node)
(push node (parser-node-parents child-node))
(let ((n (length children)))
(do ((i (1+ child-index) (1+ i)))
((>= i n))
(setf (svref children i) nil))))
| null | https://raw.githubusercontent.com/KestrelInstitute/Specware/2be6411c55f26432bf5c9e2f7778128898220c24/Library/Algorithms/Parsing/Chart/Handwritten/Lisp/parse-node-utilities.lisp | lisp | Package : Parser ; Base : 10 ; Syntax : Common - Lisp -*-
======================================================================
(add-toplevel-node session 0)
t = stream, nil = eof-error-p
'(let ((token
(when (eq (parser-node-rule forward-node) +token-rule+)
(return (parser-node-semantics forward-node))))))
(format t "~&[~3D : ~30D~% ~S]~%"
i
token
last location
Suppress complaint if immediately preceding location also had the same problem.
prior-byte-pos
prior-byte-pos
But if there were errors, we're probably still inside a buggy form,
(:WORD-SYMBOL :NON-WORD-SYMBOL)
never add keyword rule
never add keyword rule
never add keyword rule
never add keyword rule
never add keyword rule
======================================================================
iii refers to position of child node within parent node
======================================================================
faster than etypecase since we are looking for exact type matches
this case shouldn't happen
"bvi" stands for "bit-vector-index"
child-index refers to the position of the child rule within the parent rule
(when-debugging
(when *verbose?*
))
child-index will normally be 0, but could be larger if we are skipping past leading optionals
ignoring this child (which will be filled in),
if no child and not optional, return nil
And if that item is optional, we may also get the bv for subsequent items.
here, but it's probably not worth the effort to avoid the occasional extra bit-ior
when this item is optional.
The desired-bv for a pieces rule is just the union of the desired-bvs for
each child, any of which can appear in any order, so the desired bv for
the next child is always the same.
======================================================================
======================================================================
faster than etypecase since we are doing exact matches
ignoring this child (which will be filled in),
if no child and not optional, return nil
(parser (parse-session-parser session))
if this slot is already full, we may replicate the partial parent
number of children is fixed...
whether or not we're cannibalizing, see if we're done
(revision 7/31/03 for forges parsers)
if this slot is already full, replicate partial parent, otherwise just use it
The desired-bv for a pieces rule is just the union of the desired-bvs for
each child, any of which can appear in any order, so the desired bv for
the next child is always the same.
if this slot is already full, replicate partial parent, otherwise just use it
number of children is indefinite, so we may need to extend vector
next subrule is separator if this was element, and vice versa,
unless separator is null, in which case always use element
======================================================================
====================================================================== |
(in-package :Parser4)
(defmacro warn-pos (location &rest args)
`(warn-pos-fn session ,location ,@args))
(defvar *suppress-warnings?* nil)
(defvar *warnings* '())
(defun warn-pos-fn (session location &rest args)
(cond (*suppress-warnings?*
(push (apply 'format nil args) *warnings*))
(t
(unless (parse-session-warning-issued? session)
(Emacs::goto-file-position (namestring (parse-session-file session))
(first location) (second location))
(setf (parse-session-warning-issued? session) t))
(apply 'warn args))))
(defun parser-attach-rules (session)
(when-debugging (delta-time))
(initialize-location-desired-bvs session)
(let* ((locations (parse-session-locations session))
(parser (parse-session-parser session))
(ht-string-to-keyword-rules (parser-ht-string-to-keyword-rule parser))
(generic-symbol-rule (parser-symbol-rule parser))
(generic-string-rule (parser-string-rule parser))
(generic-number-rule (parser-number-rule parser))
(generic-character-rule (parser-character-rule parser))
(generic-pragma-rule (parser-pragma-rule parser))
(keywords-are-keywords-only? (parser-keywords-are-keywords-only? parser))
#+DEBUG-PARSER
(number-of-tokens-to-process 0)
(preceding-location-had-no-pending-rules? nil))
(dotimes (i (length locations))
(when-debugging
(when *verbose?*
(unless (> (decf number-of-tokens-to-process) 0)
(setq number-of-tokens-to-process (if (null str)
1
(or (parse-integer str :junk-allowed t)
1)))))
(terpri)
(format t "====================================================================================================")
(terpri)))
(let ((location (svref locations i)))
( dolist ( forward - node ( parser - location - post - nodes location ) )
(cond ((or (eq i 0)
(not (null (parser-location-partial-node-data location)))
)
(setq preceding-location-had-no-pending-rules? nil))
(preceding-location-had-no-pending-rules?
This avoids showing millions ( well , perhaps thousands ) of annoying messages .
nil)
(t
(setf (parse-session-error-reported? session) t)
(if (null (parser-location-position location))
(warn "Syntax error at EOF")
(let* ((prior-location (svref locations (- i 1)))
(prior-position (parser-location-position prior-location)))
(let* ((prior-byte-pos (first prior-position))
(prior-line (second prior-position))
(prior-column (third prior-position))
(prior-token-node
(find-if #'(lambda (node) (parser-token-rule-p (parser-node-rule node)))
(parser-location-post-nodes prior-location))))
(cond ((null prior-token-node)
(warn-pos (list prior-line prior-column prior-byte-pos)
"At line ~3D:~2D Peculiar syntax error (no tokens seen)."
prior-line prior-column prior-byte-pos))
((eq (first (parser-node-semantics prior-token-node))
:EXTENDED-COMMENT-ERROR)
(let ((comment-text
trim text of comment down to include at most 2 newlines
(do ((text (second (parser-node-semantics prior-token-node))
(subseq text 0 (1- (length text)))))
((< (count-if #'(lambda (char) (eq char #\newline))
text)
3)
text))))
trim text of comment down to include at most 20 characters
(when (> (length comment-text) 20)
(setq comment-text (format nil "~A ..." (subseq comment-text 0 16))))
(warn-pos (list prior-line prior-column prior-byte-pos)
"At line ~3D:~2D EOF while scanning for close of extended comment starting with \"~A\""
comment-text
)))
(t
(warn-pos (list prior-line prior-column prior-byte-pos)
"At line ~3D:~2D Syntactic error with \"~A\""
(second (parser-node-semantics prior-token-node))
))))))
(setq preceding-location-had-no-pending-rules? t)
))
(when-debugging
(when *verbose?*
(report-pending-rules session i location)))
(dolist (forward-node (parser-location-post-nodes location))
(when (eq (parser-node-rule forward-node) +token-rule+)
(let* ((token (parser-node-semantics forward-node))
( column ( third ( third token ) ) )
)
( when ( and ( zerop ( mod i 1000 ) ) ( > i 0 ) ) ( comment " [ ~8D ] At token ~6D ~4D ~4D ~4D ~S " ( delta - time ) i pos line column xxx ) )
(when (null (parser-location-partial-node-data location))
Maybe we finished one toplevel form and are about to parse another .
so do n't try to parse toplevel forms until we get back to column 1 .
(let ((column (third (third token))))
(unless (and (parse-session-error-reported? session) (> column 1))
(debugging-comment "Adding top-level node because of null partial-node-data at location before ~S" token)
(add-toplevel-node session i))))
(let* ((tok2 (second token))
(specific-keyword-rule (if (stringp tok2)
(gethash (second token) ht-string-to-keyword-rules)
nil)))
(case (first token)
(cond ((null specific-keyword-rule)
(add-partial-node session generic-symbol-rule forward-node 0))
(keywords-are-keywords-only?
(add-partial-node session specific-keyword-rule forward-node 0))
(t
(add-partial-node session specific-keyword-rule forward-node 0)
(add-partial-node session generic-symbol-rule forward-node 0))))
(:CHARACTER
(add-partial-node session generic-character-rule forward-node 0))
(:STRING
(add-partial-node session generic-string-rule forward-node 0))
(:NUMBER
(cond ((null specific-keyword-rule)
(add-partial-node session generic-number-rule forward-node 0))
(keywords-are-keywords-only?
(add-partial-node session specific-keyword-rule forward-node 0))
(t
(add-partial-node session generic-number-rule forward-node 0)
(add-partial-node session specific-keyword-rule forward-node 0))))
(:AD-HOC-KEYWORD-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword is not in the grammar: ~S" token)
(add-partial-node session specific-keyword-rule forward-node 0)))
(:AD-HOC-SYMBOL-ONLY
(unless (null specific-keyword-rule)
(warn "Token claimed to be only a symbol has appeared as a keyword in the grammar: ~S"
token))
(add-partial-node session generic-symbol-rule forward-node 0))
(:AD-HOC-NUMBER-ONLY
(unless (null specific-keyword-rule)
(warn "Token claimed to be only a number has appeared as a keyword in the grammar: ~S"
token))
(add-partial-node session generic-number-rule forward-node 0))
(:AD-HOC-KEYWORD-AND-SYMBOL-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword (and a symbol) is not in the grammar: ~S" token)
(add-partial-node session specific-keyword-rule forward-node 0))
(add-partial-node session generic-symbol-rule forward-node 0))
(:AD-HOC-KEYWORD-AND-NUMBER-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword (and a number) is not in the grammar: ~S"
token)
(add-partial-node session specific-keyword-rule forward-node 0))
(add-partial-node session generic-number-rule forward-node 0))
(:AD-HOC-SYMBOL-AND-NUMBER-ONLY
(unless (null specific-keyword-rule)
(warn "Token claimed to be only a symbol or a number has appeared as a keyword in the grammar: ~S"
token))
(add-partial-node session generic-symbol-rule forward-node 0)
(add-partial-node session generic-number-rule forward-node 0))
(:AD-HOC-KEYWORD-AND-SYMBOL-AND-NUMBER-ONLY
(if (null specific-keyword-rule)
(warn "Token claimed to be a keyword (and a symbol and a number) is not in the grammar: ~S"
token)
(add-partial-node session specific-keyword-rule forward-node 0))
(add-partial-node session generic-symbol-rule forward-node 0)
(add-partial-node session generic-number-rule forward-node 0))
(:PRAGMA
(add-partial-node session generic-pragma-rule forward-node 0))
)))))))))
(defun initialize-location-desired-bvs (session)
(let* ((locations (parse-session-locations session))
(parser (parse-session-parser session))
(bv-size (parser-bv-size parser)))
(dotimes (i (length locations))
(setf (parser-location-desired-bv (svref locations i))
(make-array bv-size
:element-type 'bit
:initial-element 0)))))
(defun add-toplevel-node (session index)
(let* ((locations (parse-session-locations session))
(location (svref locations index))
(start-rule (parse-session-start-rule session))
(new-toplevel-node
(create-parser-node :rule start-rule
:pre-index index
:children (make-array 1 :initial-element nil)))
(handles-bv (parser-anyof-rule-possible-handles-bv start-rule)))
(when-debugging
(when *verbose?*
(let ((position (parser-location-position location)))
(comment "Adding top-level node for index ~D at line ~D, column ~D, byte ~D"
index
(second position)
(third position)
(first position)))))
(augment-location-partial-node-data location new-toplevel-node 0)
(augment-location-desired-bv location handles-bv)
(when-debugging
(when *verbose?*
(report-pending-rules session index location)))
))
(defun report-pending-rules (session index location)
#-DEBUG-PARSER
(declare (ignore parser index location))
#+DEBUG-PARSER
(let ((parser (parse-session-parser session)))
(comment "Pending at ~4D: ~6D" index (length (parser-location-partial-node-data location)))
(dolist (datum (parser-location-partial-node-data location))
(let ((node (car datum))
(iii (cdr datum)))
(comment "Pending at location ~6D: ~6D ~20A ~D"
index
(parser-node-number node)
(parser-rule-name (parser-node-rule node))
iii)))
(when (eq *verbose?* :very)
(format t "======")
(dolist (rule (rules-for-bv parser (parser-location-desired-bv location)))
(comment "Desired rule: ~A" rule)))
(format t "======")))
(defun add-partial-node (session parent-rule child-node child-index)
(parser-anyof-rule (add-unit-reduction session parent-rule child-node))
(parser-tuple-rule (add-partial-tuple-node session parent-rule child-node child-index))
(parser-pieces-rule (add-partial-pieces-node session parent-rule child-node))
(parser-repeat-rule (add-partial-repeat-node session parent-rule child-node))
(parser-keyword-rule (add-unit-reduction session parent-rule child-node))
(parser-atomic-rule (add-unit-reduction session parent-rule child-node))
))
(defun add-unit-reduction (session rule child-node)
(when (null rule) (break "Missing rule in unit-reduction"))
(let* ((pre-index (parser-node-pre-index child-node))
(post-index-ptr (parser-node-post-index-ptr child-node))
(inherited-precedence
(parser-rule-precedence (parser-node-rule child-node)))
(explicit-bvi (parser-rule-bvi rule))
(p2bvi-alist (parser-rule-p2bvi-map rule))
(implicit-bvi (cdr (assoc inherited-precedence p2bvi-alist)))
(new-node (create-parser-node :rule rule
:bvi (or explicit-bvi implicit-bvi)
:semantics nil
:pre-index pre-index
:post-index-ptr post-index-ptr
:parents nil
:children (vector child-node)
:precedence inherited-precedence))
(locations (parse-session-locations session)))
(push new-node (parser-node-parents child-node))
(push new-node (parser-location-post-nodes (svref locations pre-index)))
(when-debugging
(when *verbose?*
(show-node new-node "Completed")))
(parser-propagate-from-node session new-node)
new-node))
(defun augment-location-partial-node-data (location parent-node child-index)
(push (cons parent-node child-index)
(parser-location-partial-node-data location)))
(defun augment-location-desired-bv (location additional-desired-bv)
( comment " At loc ~6D , turn on bits ~S " ( parser - location - index location ) additional - desired - bv )
(unless (null additional-desired-bv)
(bit-ior (parser-location-desired-bv location)
additional-desired-bv
(parser-location-desired-bv location))))
(defun add-partial-tuple-node (session rule child-node child-index)
(unless (eq rule (parse-session-start-rule session))
(let* ((child-pre-index (parser-node-pre-index child-node))
(pattern (parser-rule-items rule))
(pattern-size (length pattern))
(next-child-index (1+ child-index))
(children (make-array pattern-size :initial-element nil)))
(declare (simple-vector children))
(let ((new-node
(create-parser-node :rule rule
:bvi (parser-rule-bvi rule)
:pre-index child-pre-index
:children children
)))
(adopt-child new-node children child-index child-node)
(let ((all-other-required-children-present?
(dotimes (i (length children) t)
(when (and
(not (equal i child-index))
(null (svref children i))
(not (parser-rule-item-optional? (svref pattern i))))
(return nil))))
(this-is-not-last-child?
(< next-child-index (length children)))
(locations (parse-session-locations session)))
(when-debugging
(when *verbose?*
(when (not all-other-required-children-present?)
(show-node new-node "Created "))))
(when all-other-required-children-present?
(install-completed-node session new-node child-node))
(when this-is-not-last-child?
(let ((post-loc
(svref locations (parser-node-post-index child-node))))
(let ((repeat? t))
(loop while (and repeat? (< next-child-index pattern-size))
do
This will normally get the bv for the SECOND item , since we normally
have just matched on the first item and now looking for more items .
But if the first item(s ) is / are optional , and we got to this rule by matching
on the second or a later item , then the bv might be for the third or later item .
So we could get just the second , or second thru fourth , or just third , or
third thru sixth , etc . We could probably cache this for each way of getting
(let* ((next-child-item (svref pattern next-child-index))
(optional? (parser-rule-item-optional? next-child-item))
(handles-bv (parser-rule-item-possible-handles-bv next-child-item)))
(augment-location-partial-node-data post-loc new-node next-child-index)
(augment-location-desired-bv post-loc handles-bv)
(setq repeat? optional?)
(incf next-child-index)
)))
))
new-node)))))
(defun add-partial-pieces-node (session rule child-node)
(let* ((child-pre-index (parser-node-pre-index child-node))
(child-post-index (parser-node-post-index child-node))
(child-post-location (svref (parse-session-locations session)
child-post-index))
(children (make-array (length (parser-rule-items rule)) :initial-element nil))
(new-node
(create-parser-node :rule rule
:bvi (parser-rule-bvi rule)
:pre-index child-pre-index
:children children
))
(handles-bv (parser-pieces-rule-possible-handles-bv rule)))
(adopt-child new-node children 0 child-node)
(augment-location-partial-node-data child-post-location new-node 1)
(augment-location-desired-bv child-post-location handles-bv)
(install-completed-node session new-node child-node)
new-node))
(defun add-partial-repeat-node (session rule child-node)
(let* ((child-pre-index (parser-node-pre-index child-node))
(child-post-index (parser-node-post-index child-node))
(child-post-location (svref (parse-session-locations session)
child-post-index))
(children (make-array 6 :initial-element nil))
(new-node (create-parser-node :rule rule
:bvi (parser-rule-bvi rule)
:pre-index child-pre-index
:children children
))
(desired-rule-item
(or (parser-repeat-rule-separator rule)
(parser-repeat-rule-element rule)))
(handles-bv (parser-rule-item-possible-handles-bv desired-rule-item))
)
(adopt-child new-node children 0 child-node)
(augment-location-partial-node-data child-post-location new-node 1)
(augment-location-desired-bv child-post-location handles-bv)
(install-completed-node session new-node child-node)
new-node))
(defun install-completed-node (session node last-child-node)
(let* ((pre-index (parser-node-pre-index node))
(post-index-ptr (parser-node-post-index-ptr last-child-node))
(rule (parser-node-rule node))
(explicit-precedence (parser-rule-precedence rule)))
(if (null explicit-precedence)
(when (parser-anyof-rule-p rule)
(setf (parser-node-precedence node)
(parser-node-precedence last-child-node)))
(setf (parser-node-precedence node) explicit-precedence))
(setf (parser-node-post-index-ptr node) post-index-ptr)
(when-debugging
(when *verbose?*
(show-node node "Completed")))
(push node (parser-location-post-nodes
(svref (parse-session-locations session)
pre-index)))
(parser-propagate-from-node session node)
node))
(defun parser-propagate-from-node (session this-node)
(unless (or (null (parser-node-pre-index this-node))
(null (parser-node-post-index this-node)))
(attach-reductions session this-node)
(extend-partial-nodes-reaching-this-node session this-node))
(when (eq (parser-node-rule this-node)
(parse-session-start-rule session))
(add-toplevel-node session (parser-node-post-index this-node)))
nil)
(defun attach-reductions (session this-node)
(let* ((locations (parse-session-locations session))
(pre-index (parser-node-pre-index this-node))
(pre-loc (svref locations pre-index))
(desired-bv (parser-location-desired-bv pre-loc))
(this-rule (parser-node-rule this-node))
(reductions (parser-rule-reductions this-rule)))
(dolist (reduction reductions)
(let* ((parent-rule (reduction-parent-rule reduction))
(parent-bv-index
(or (parser-rule-bvi parent-rule)
(cdr (assoc (parser-node-precedence this-node)
(parser-rule-p2bvi-map parent-rule))))))
(if (and parent-bv-index (eq (sbit desired-bv parent-bv-index) 1))
(let ((child-index (reduction-child-index reduction)))
(add-partial-node session parent-rule this-node child-index))
(progn
(debugging-comment "Reduction from ~D not plausible : ~S ~S (bit ~D) at ~D"
(parser-node-number this-node)
(structure-type-of parent-rule)
(parser-rule-name parent-rule)
parent-bv-index
(reduction-child-index reduction)
)))))))
(defun extend-partial-nodes-reaching-this-node (session this-node)
(let* ((pre-index (parser-node-pre-index this-node))
(pre-loc (svref (parse-session-locations session) pre-index))
(partial-node-data (parser-location-partial-node-data pre-loc)))
(dolist (partial-node-datum partial-node-data)
(let ((candidate-parent-node (car partial-node-datum))
(child-index (cdr partial-node-datum)))
(maybe-extend-partial-node session
candidate-parent-node
child-index
this-node)))))
(defun maybe-extend-partial-node (session parent-node child-index this-node)
(let ((parent-rule (parser-node-rule parent-node)))
(parser-tuple-rule
(maybe-extend-partial-tuple-node session parent-node parent-rule child-index this-node))
(parser-pieces-rule
(maybe-extend-partial-pieces-node session parent-node parent-rule child-index this-node))
(parser-repeat-rule
(maybe-extend-partial-repeat-node session parent-node parent-rule child-index this-node))
(parser-anyof-rule
(when-debugging
(unless (eq (parser-node-rule parent-node)
(parse-session-start-rule session))
(warn "Attempt to extend non-toplevel ANYOF node ~S at ~D using node ~S"
parent-node
child-index
(parser-node-number this-node)))))
)))
(defun maybe-extend-partial-tuple-node (session node rule child-index candidate-child)
(let* ((pattern (parser-rule-items rule))
(pattern-size (length pattern))
(desired-item (svref pattern child-index)))
(when (parser-rule-item-matches? desired-item candidate-child)
(let ((children (parser-node-children node)))
(declare (simple-vector children))
(let* ((next-child-index (1+ child-index))
(all-other-required-children-present?
(dotimes (i (length children) t)
(when (and
(not (equal i child-index))
(null (svref children i))
(not (parser-rule-item-optional? (svref pattern i)))
)
(return nil))))
(this-or-following-child-is-already-present?
(do ((i child-index (1+ i)))
((>= i (length children))
nil)
(unless (null (svref children i))
(return t))))
(this-is-not-last-child? (< next-child-index (length children)))
(this-child-is-optional? (parser-rule-item-optional? desired-item))
(locations (parse-session-locations session))
(cannibalizing? nil))
(when (and (or this-or-following-child-is-already-present?
(and all-other-required-children-present?
(or this-is-not-last-child?
this-child-is-optional?)))
(not
(setq cannibalizing?
(let ((desired-precedence (parser-rule-item-precedence desired-item)))
(and (not (null desired-precedence))
(let ((candidate-precedence (parser-node-precedence candidate-child)))
(and (not (null candidate-precedence))
(<= candidate-precedence desired-precedence))))))))
(setq node (replicate-parser-node node child-index))
(setq children (parser-node-children node)))
(adopt-child node children child-index candidate-child)
(when cannibalizing?
(debugging-comment "Cannibalized ~D. Last node now ~D"
(parser-node-number node)
(parser-node-number candidate-child))
(revise-cannibalized-node node candidate-child))
(cond (all-other-required-children-present?
(install-completed-node session node candidate-child))
(t
(when-debugging
(when *verbose?*
(show-node node (format nil "Extended ~D" child-index))))))
(when this-is-not-last-child?
(let ((post-loc (svref locations (parser-node-post-index candidate-child)))
(repeat? t))
(loop while (and repeat? (< next-child-index pattern-size)) do
(let* ((next-child-item (svref pattern next-child-index))
(optional? (parser-rule-item-optional? next-child-item))
(handles-bv (parser-rule-item-possible-handles-bv next-child-item)))
(augment-location-partial-node-data post-loc node next-child-index)
(augment-location-desired-bv post-loc handles-bv)
(setq repeat? optional?)
(incf next-child-index)
))))
)))))
(defun revise-cannibalized-node (node last-child)
(when-debugging
(when *verbose?*
(show-node node "Revised ")))
(let ((post-index (parser-node-post-index last-child)))
put ptr cell from parent into child as well , then mutate to use data from child
(setf (parser-node-post-index-ptr last-child) (parser-node-post-index-ptr node))
(setf (parser-node-post-index last-child) post-index)))
(defun maybe-extend-partial-pieces-node (session node rule child-index candidate-child)
(let ((alternatives (parser-rule-items rule))
(locations (parse-session-locations session)))
(dotimes (i (length alternatives))
(let ((desired-item (svref alternatives i)))
(when (parser-rule-item-matches? desired-item candidate-child)
(let ((children (parser-node-children node)))
(unless (null (svref children child-index))
(setq node (replicate-parser-node node child-index))
(setq children (parser-node-children node)))
(adopt-child node children child-index candidate-child)
(let ((post-loc (svref locations (parser-node-post-index candidate-child)))
(handles-bv (parser-pieces-rule-possible-handles-bv rule)))
(augment-location-partial-node-data post-loc node (1+ child-index))
(augment-location-desired-bv post-loc handles-bv))
(install-completed-node session node candidate-child)))))))
(defun maybe-extend-partial-repeat-node (session node rule child-index candidate-child)
(let* ((looking-for-element? (or (evenp child-index)
(null (parser-repeat-rule-separator rule))))
(desired-item (if looking-for-element?
(parser-repeat-rule-element rule)
(parser-repeat-rule-separator rule))))
(when (parser-rule-item-matches? desired-item candidate-child)
(let* ((children (parser-node-children node))
(children-size (length children)))
(declare (simple-vector children))
(when (or looking-for-element?
(and
(< child-index children-size)
(not (null (svref children child-index)))))
(setq node (replicate-parser-node node child-index))
(setq children (parser-node-children node)))
(when (>= child-index children-size)
(let ((new-children (make-array (* child-index 2) :initial-element nil)))
(dotimes (i children-size)
(setf (svref new-children i) (svref children i)))
(setf (parser-node-children node) new-children)
(setf children new-children)))
(adopt-child node children child-index candidate-child)
(when looking-for-element?
(install-completed-node session node candidate-child))
(let* ((post-loc (svref (parse-session-locations session)
(parser-node-post-index candidate-child)))
(next-item
(if (or (oddp child-index) (null (parser-repeat-rule-separator rule)))
(parser-repeat-rule-element rule)
(parser-repeat-rule-separator rule)))
(handles-bv (parser-rule-item-possible-handles-bv next-item)))
(augment-location-partial-node-data post-loc node (1+ child-index))
(augment-location-desired-bv post-loc handles-bv)
)))))
(defun parser-rule-item-matches? (parent-item child-node)
(and (let ((child-bvi (parser-node-bvi child-node)))
(and (if (null child-bvi)
(eq (parser-rule-item-rule parent-item)
(parser-rule-name (parser-node-rule child-node)))
(eq (sbit (parser-rule-item-possible-children-bv parent-item) child-bvi)
1))))
(let ((max-precedence-allowed (parser-rule-item-precedence parent-item)))
(or (null max-precedence-allowed)
(let ((child-precedence (parser-node-precedence child-node)))
(or (null child-precedence)
(<= child-precedence max-precedence-allowed)))))))
(defun replicate-parser-node (old-node child-index)
(declare (fixnum child-index))
(let ((new-node (copy-parser-node old-node)))
(when-debugging
(setf (parser-node-number new-node) (incf *parser-node-number*)))
(let* ((old-children (parser-node-children old-node))
(new-children (make-array (length old-children) :initial-element nil)))
(declare (simple-vector old-children)
(simple-vector new-children))
(dotimes (i child-index)
(setf (svref new-children i) (svref old-children i)))
(setf (parser-node-children new-node) new-children))
(when-debugging
(when *verbose?*
(show-node new-node (format nil "~6D =>" (parser-node-number old-node))))
(push new-node *all-nodes*))
new-node))
(defun adopt-child (node children child-index child-node)
(declare (simple-vector children) (fixnum child-index))
(setf (svref children child-index) child-node)
(push node (parser-node-parents child-node))
(let ((n (length children)))
(do ((i (1+ child-index) (1+ i)))
((>= i n))
(setf (svref children i) nil))))
|
bb769e3d8a4664b54b6b2ba880b4370fb9c587c26f606f4547ba55445bc7b9aa | chessai/semirings | Euclidean.hs | -- |
Module : Data . Euclidean
Copyright : ( c ) 2019
Licence : BSD3
Maintainer : < >
--
# LANGUAGE CPP #
{-# LANGUAGE DefaultSignatures #-}
# LANGUAGE GeneralizedNewtypeDeriving #
{-# LANGUAGE MagicHash #-}
#if MIN_VERSION_base(4,12,0)
{-# LANGUAGE DerivingVia #-}
# LANGUAGE StandaloneDeriving #
#else
{-# LANGUAGE TemplateHaskell #-}
#endif
module Data.Euclidean
( Euclidean(..)
, Field
, GcdDomain(..)
, WrappedIntegral(..)
, WrappedFractional(..)
, gcdExt
) where
import Prelude hiding (quotRem, quot, rem, divMod, div, mod, gcd, lcm, negate, (*), Int, Word)
import qualified Prelude as P
import Control.Exception (throw, ArithException(..))
import Data.Bits (Bits)
import Data.Complex (Complex(..))
import Data.Int (Int, Int8, Int16, Int32, Int64)
import Data.Maybe (isJust)
import Data.Ratio (Ratio)
import Data.Semiring (Semiring(..), Ring(..), (*), minus, isZero, Mod2)
import Data.Word (Word, Word8, Word16, Word32, Word64)
import Foreign.C.Types (CFloat, CDouble)
#if !MIN_VERSION_base(4,12,0)
import Language.Haskell.TH.Syntax (Q, Dec, Type)
#endif
import Numeric.Natural
---------------------------------------------------------------------
-- Classes
---------------------------------------------------------------------
-- | 'GcdDomain' represents a
-- < GCD domain>.
This is a domain , where GCD can be defined ,
-- but which does not necessarily allow a well-behaved
division with remainder ( as in ' Euclidean ' domains ) .
--
-- For example, there is no way to define 'rem' over
-- polynomials with integer coefficients such that
-- remainder is always "smaller" than divisor. However,
-- 'gcd' is still definable, just not by means of
Euclidean algorithm .
--
-- All methods of 'GcdDomain' have default implementations
in terms of ' ' . So most of the time
-- it is enough to write:
--
> instance
> instance where
-- > quotRem = ...
-- > degree = ...
class Semiring a => GcdDomain a where
-- | Division without remainder.
--
-- prop> \x y -> (x * y) `divide` y == Just x
-- prop> \x y -> maybe True (\z -> x == z * y) (x `divide` y)
divide :: a -> a -> Maybe a
default divide :: (Eq a, Euclidean a) => a -> a -> Maybe a
divide x y = let (q, r) = quotRem x y in
if isZero r then Just q else Nothing
-- | Greatest common divisor. Must satisfy
--
-- prop> \x y -> isJust (x `divide` gcd x y) && isJust (y `divide` gcd x y)
prop > \x y z - > isJust ( gcd ( x * z ) ( y * z ) ` divide ` z )
gcd :: a -> a -> a
default gcd :: (Eq a, Euclidean a) => a -> a -> a
gcd a b
| isZero b = a
| otherwise = gcd b (a `rem` b)
-- | Lowest common multiple. Must satisfy
--
prop > \x y - > isJust ( lcm x y ` divide ` x ) & & isJust ( lcm x y ` divide ` y )
prop > \x y z - > isNothing ( z ` divide ` x ) || isNothing ( z ` divide ` y ) || isJust ( z ` divide ` lcm x y )
lcm :: a -> a -> a
default lcm :: Eq a => a -> a -> a
lcm a b
| isZero a || isZero b = zero
| otherwise = case a `divide` gcd a b of
Nothing -> error "lcm: violated gcd invariant"
Just c -> c * b
| Test whether two arguments are
-- < coprime>.
-- Must match its default definition:
--
-- prop> \x y -> coprime x y == isJust (1 `divide` gcd x y)
coprime :: a -> a -> Bool
default coprime :: a -> a -> Bool
coprime x y = isJust (one `divide` gcd x y)
infixl 7 `divide`
| Informally speaking , ' ' is a superclass of ' Integral ' ,
-- lacking 'toInteger', which allows to define division with remainder
for a wider range of types , , complex integers
-- and polynomials with rational coefficients.
--
' ' represents a
-- < Euclidean domain>
endowed by a given Euclidean function ' degree ' .
--
No particular rounding behaviour is expected of ' quotRem ' . ,
it is not guaranteed to truncate towards zero or towards negative
-- infinity (cf. 'P.divMod'), and remainders are not guaranteed to be non-negative.
-- For a faithful representation of residue classes one can use
-- < mod> package instead.
class GcdDomain a => Euclidean a where
# MINIMAL ( quotRem | quot , rem ) , degree #
-- | Division with remainder.
--
prop > \x y - > y = = 0 || let ( q , r ) = x ` quotRem ` y in x = = q * y + r
quotRem :: a -> a -> (a, a)
quotRem x y = (quot x y, rem x y)
-- | Division. Must match its default definition:
--
-- prop> \x y -> quot x y == fst (quotRem x y)
quot :: a -> a -> a
quot x y = fst (quotRem x y)
-- | Remainder. Must match its default definition:
--
prop > \x y - > rem x y = = snd ( quotRem x y )
rem :: a -> a -> a
rem x y = snd (quotRem x y)
| Euclidean ( aka degree , valuation , gauge , norm ) function on Usually @'fromIntegral ' ' . ' ' abs'@.
--
-- 'degree' is rarely used by itself. Its purpose
-- is to provide an evidence of soundness of 'quotRem'
-- by testing the following property:
--
prop > \x y - > y = = 0 || let ( q , r ) = x ` quotRem ` y in ( r = = 0 || degree r < degree y )
degree :: a -> Natural
infixl 7 `quot`
infixl 7 `rem`
coprimeIntegral :: Integral a => a -> a -> Bool
coprimeIntegral x y = (odd x || odd y) && P.gcd x y == 1
| Execute the extended Euclidean algorithm .
For elements @a@ and , compute their greatest common divisor @g@
and the coefficient @s@ satisfying @as + bt = g@ for some
gcdExt :: (Eq a, Euclidean a, Ring a) => a -> a -> (a, a)
gcdExt = go one zero
where
go s s' r r'
| r' == zero = (r, s)
| otherwise = case quotRem r r' of
(q, r'') -> go s' (minus s (times q s')) r' r''
{-# INLINABLE gcdExt #-}
-- | 'Field' represents a
< (mathematics ) field > ,
a ring with a multiplicative inverse for any non - zero element .
class (Euclidean a, Ring a) => Field a
---------------------------------------------------------------------
-- Instances
---------------------------------------------------------------------
instance GcdDomain () where
divide = const $ const (Just ())
gcd = const $ const ()
lcm = const $ const ()
coprime = const $ const True
instance Euclidean () where
degree = const 0
quotRem = const $ const ((), ())
quot = const $ const ()
rem = const $ const ()
instance Field ()
instance GcdDomain Mod2 where
instance Euclidean Mod2 where
degree = const 0
quotRem x y
| isZero y = throw DivideByZero
| otherwise = (x, zero)
instance Field Mod2
-- | Wrapper around 'Integral' with 'GcdDomain'
and ' ' instances .
newtype WrappedIntegral a = WrapIntegral { unwrapIntegral :: a }
deriving (Eq, Ord, Show, Num, Integral, Real, Enum, Bits)
instance Num a => Semiring (WrappedIntegral a) where
plus = (P.+)
zero = 0
times = (P.*)
one = 1
fromNatural = P.fromIntegral
instance Num a => Ring (WrappedIntegral a) where
negate = P.negate
instance Integral a => GcdDomain (WrappedIntegral a) where
divide x y = case x `P.quotRem` y of (q, 0) -> Just q; _ -> Nothing
gcd = P.gcd
lcm = P.lcm
coprime = coprimeIntegral
instance Integral a => Euclidean (WrappedIntegral a) where
degree = P.fromIntegral . abs . unwrapIntegral
quotRem = P.quotRem
quot = P.quot
rem = P.rem
-- | Wrapper around 'Fractional'
-- with trivial 'GcdDomain'
and ' ' instances .
newtype WrappedFractional a = WrapFractional { unwrapFractional :: a }
deriving (Eq, Ord, Show, Num, Fractional)
instance Num a => Semiring (WrappedFractional a) where
plus = (P.+)
zero = 0
times = (P.*)
one = 1
fromNatural = P.fromIntegral
instance Num a => Ring (WrappedFractional a) where
negate = P.negate
instance Fractional a => GcdDomain (WrappedFractional a) where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Fractional a => Euclidean (WrappedFractional a) where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Fractional a => Field (WrappedFractional a)
instance Integral a => GcdDomain (Ratio a) where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Integral a => Euclidean (Ratio a) where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Integral a => Field (Ratio a)
instance GcdDomain Float where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean Float where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field Float
instance GcdDomain Double where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean Double where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field Double
instance GcdDomain CFloat where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean CFloat where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field CFloat
instance GcdDomain CDouble where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean CDouble where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field CDouble
conjQuotAbs :: Field a => Complex a -> Complex a
conjQuotAbs (x :+ y) = x `quot` norm :+ (negate y) `quot` norm
where
norm = (x `times` x) `plus` (y `times` y)
instance Field a => GcdDomain (Complex a) where
divide x y = Just (x `times` conjQuotAbs y)
gcd = const $ const one
lcm = const $ const one
coprime = const $ const True
instance Field a => Euclidean (Complex a) where
degree = const 0
quotRem x y = (quot x y, zero)
quot x y = x `times` conjQuotAbs y
rem = const $ const zero
instance Field a => Field (Complex a)
#if MIN_VERSION_base(4,12,0)
deriving via (WrappedIntegral Int) instance GcdDomain Int
deriving via (WrappedIntegral Int8) instance GcdDomain Int8
deriving via (WrappedIntegral Int16) instance GcdDomain Int16
deriving via (WrappedIntegral Int32) instance GcdDomain Int32
deriving via (WrappedIntegral Int64) instance GcdDomain Int64
deriving via (WrappedIntegral Integer) instance GcdDomain Integer
deriving via (WrappedIntegral Word) instance GcdDomain Word
deriving via (WrappedIntegral Word8) instance GcdDomain Word8
deriving via (WrappedIntegral Word16) instance GcdDomain Word16
deriving via (WrappedIntegral Word32) instance GcdDomain Word32
deriving via (WrappedIntegral Word64) instance GcdDomain Word64
deriving via (WrappedIntegral Natural) instance GcdDomain Natural
#else
$(let
deriveGcdDomain :: Q Type -> Q [Dec]
deriveGcdDomain ty = [d|
instance GcdDomain $ty where
gcd = P.gcd
lcm = P.lcm
coprime = coprimeIntegral
|]
in P.concat P.<$> P.traverse deriveGcdDomain
[[t|Int|]
,[t|Int8|]
,[t|Int16|]
,[t|Int32|]
,[t|Int64|]
,[t|Integer|]
,[t|Word|]
,[t|Word8|]
,[t|Word16|]
,[t|Word32|]
,[t|Word64|]
,[t|Natural|]
])
#endif
#if MIN_VERSION_base(4,12,0)
deriving via (WrappedIntegral Int) instance Euclidean Int
deriving via (WrappedIntegral Int8) instance Euclidean Int8
deriving via (WrappedIntegral Int16) instance Euclidean Int16
deriving via (WrappedIntegral Int32) instance Euclidean Int32
deriving via (WrappedIntegral Int64) instance Euclidean Int64
deriving via (WrappedIntegral Integer) instance Euclidean Integer
deriving via (WrappedIntegral Word) instance Euclidean Word
deriving via (WrappedIntegral Word8) instance Euclidean Word8
deriving via (WrappedIntegral Word16) instance Euclidean Word16
deriving via (WrappedIntegral Word32) instance Euclidean Word32
deriving via (WrappedIntegral Word64) instance Euclidean Word64
deriving via (WrappedIntegral Natural) instance Euclidean Natural
#else
$(let
deriveEuclidean :: Q Type -> Q [Dec]
deriveEuclidean ty = [d|
instance Euclidean $ty where
degree = P.fromIntegral . abs
quotRem = P.quotRem
quot = P.quot
rem = P.rem
|]
in P.concat P.<$> P.traverse deriveEuclidean
[[t|Int|]
,[t|Int8|]
,[t|Int16|]
,[t|Int32|]
,[t|Int64|]
,[t|Integer|]
,[t|Word|]
,[t|Word8|]
,[t|Word16|]
,[t|Word32|]
,[t|Word64|]
,[t|Natural|]
])
#endif
| null | https://raw.githubusercontent.com/chessai/semirings/d494b0bb0c6652b0aa74de34235070434dd8bab7/Data/Euclidean.hs | haskell | |
# LANGUAGE DefaultSignatures #
# LANGUAGE MagicHash #
# LANGUAGE DerivingVia #
# LANGUAGE TemplateHaskell #
-------------------------------------------------------------------
Classes
-------------------------------------------------------------------
| 'GcdDomain' represents a
< GCD domain>.
but which does not necessarily allow a well-behaved
For example, there is no way to define 'rem' over
polynomials with integer coefficients such that
remainder is always "smaller" than divisor. However,
'gcd' is still definable, just not by means of
All methods of 'GcdDomain' have default implementations
it is enough to write:
> quotRem = ...
> degree = ...
| Division without remainder.
prop> \x y -> (x * y) `divide` y == Just x
prop> \x y -> maybe True (\z -> x == z * y) (x `divide` y)
| Greatest common divisor. Must satisfy
prop> \x y -> isJust (x `divide` gcd x y) && isJust (y `divide` gcd x y)
| Lowest common multiple. Must satisfy
< coprime>.
Must match its default definition:
prop> \x y -> coprime x y == isJust (1 `divide` gcd x y)
lacking 'toInteger', which allows to define division with remainder
and polynomials with rational coefficients.
< Euclidean domain>
infinity (cf. 'P.divMod'), and remainders are not guaranteed to be non-negative.
For a faithful representation of residue classes one can use
< mod> package instead.
| Division with remainder.
| Division. Must match its default definition:
prop> \x y -> quot x y == fst (quotRem x y)
| Remainder. Must match its default definition:
'degree' is rarely used by itself. Its purpose
is to provide an evidence of soundness of 'quotRem'
by testing the following property:
# INLINABLE gcdExt #
| 'Field' represents a
-------------------------------------------------------------------
Instances
-------------------------------------------------------------------
| Wrapper around 'Integral' with 'GcdDomain'
| Wrapper around 'Fractional'
with trivial 'GcdDomain' | Module : Data . Euclidean
Copyright : ( c ) 2019
Licence : BSD3
Maintainer : < >
# LANGUAGE CPP #
# LANGUAGE GeneralizedNewtypeDeriving #
#if MIN_VERSION_base(4,12,0)
# LANGUAGE StandaloneDeriving #
#else
#endif
module Data.Euclidean
( Euclidean(..)
, Field
, GcdDomain(..)
, WrappedIntegral(..)
, WrappedFractional(..)
, gcdExt
) where
import Prelude hiding (quotRem, quot, rem, divMod, div, mod, gcd, lcm, negate, (*), Int, Word)
import qualified Prelude as P
import Control.Exception (throw, ArithException(..))
import Data.Bits (Bits)
import Data.Complex (Complex(..))
import Data.Int (Int, Int8, Int16, Int32, Int64)
import Data.Maybe (isJust)
import Data.Ratio (Ratio)
import Data.Semiring (Semiring(..), Ring(..), (*), minus, isZero, Mod2)
import Data.Word (Word, Word8, Word16, Word32, Word64)
import Foreign.C.Types (CFloat, CDouble)
#if !MIN_VERSION_base(4,12,0)
import Language.Haskell.TH.Syntax (Q, Dec, Type)
#endif
import Numeric.Natural
This is a domain , where GCD can be defined ,
division with remainder ( as in ' Euclidean ' domains ) .
Euclidean algorithm .
in terms of ' ' . So most of the time
> instance
> instance where
class Semiring a => GcdDomain a where
divide :: a -> a -> Maybe a
default divide :: (Eq a, Euclidean a) => a -> a -> Maybe a
divide x y = let (q, r) = quotRem x y in
if isZero r then Just q else Nothing
prop > \x y z - > isJust ( gcd ( x * z ) ( y * z ) ` divide ` z )
gcd :: a -> a -> a
default gcd :: (Eq a, Euclidean a) => a -> a -> a
gcd a b
| isZero b = a
| otherwise = gcd b (a `rem` b)
prop > \x y - > isJust ( lcm x y ` divide ` x ) & & isJust ( lcm x y ` divide ` y )
prop > \x y z - > isNothing ( z ` divide ` x ) || isNothing ( z ` divide ` y ) || isJust ( z ` divide ` lcm x y )
lcm :: a -> a -> a
default lcm :: Eq a => a -> a -> a
lcm a b
| isZero a || isZero b = zero
| otherwise = case a `divide` gcd a b of
Nothing -> error "lcm: violated gcd invariant"
Just c -> c * b
| Test whether two arguments are
coprime :: a -> a -> Bool
default coprime :: a -> a -> Bool
coprime x y = isJust (one `divide` gcd x y)
infixl 7 `divide`
| Informally speaking , ' ' is a superclass of ' Integral ' ,
for a wider range of types , , complex integers
' ' represents a
endowed by a given Euclidean function ' degree ' .
No particular rounding behaviour is expected of ' quotRem ' . ,
it is not guaranteed to truncate towards zero or towards negative
class GcdDomain a => Euclidean a where
# MINIMAL ( quotRem | quot , rem ) , degree #
prop > \x y - > y = = 0 || let ( q , r ) = x ` quotRem ` y in x = = q * y + r
quotRem :: a -> a -> (a, a)
quotRem x y = (quot x y, rem x y)
quot :: a -> a -> a
quot x y = fst (quotRem x y)
prop > \x y - > rem x y = = snd ( quotRem x y )
rem :: a -> a -> a
rem x y = snd (quotRem x y)
| Euclidean ( aka degree , valuation , gauge , norm ) function on Usually @'fromIntegral ' ' . ' ' abs'@.
prop > \x y - > y = = 0 || let ( q , r ) = x ` quotRem ` y in ( r = = 0 || degree r < degree y )
degree :: a -> Natural
infixl 7 `quot`
infixl 7 `rem`
coprimeIntegral :: Integral a => a -> a -> Bool
coprimeIntegral x y = (odd x || odd y) && P.gcd x y == 1
| Execute the extended Euclidean algorithm .
For elements @a@ and , compute their greatest common divisor @g@
and the coefficient @s@ satisfying @as + bt = g@ for some
gcdExt :: (Eq a, Euclidean a, Ring a) => a -> a -> (a, a)
gcdExt = go one zero
where
go s s' r r'
| r' == zero = (r, s)
| otherwise = case quotRem r r' of
(q, r'') -> go s' (minus s (times q s')) r' r''
< (mathematics ) field > ,
a ring with a multiplicative inverse for any non - zero element .
class (Euclidean a, Ring a) => Field a
instance GcdDomain () where
divide = const $ const (Just ())
gcd = const $ const ()
lcm = const $ const ()
coprime = const $ const True
instance Euclidean () where
degree = const 0
quotRem = const $ const ((), ())
quot = const $ const ()
rem = const $ const ()
instance Field ()
instance GcdDomain Mod2 where
instance Euclidean Mod2 where
degree = const 0
quotRem x y
| isZero y = throw DivideByZero
| otherwise = (x, zero)
instance Field Mod2
and ' ' instances .
newtype WrappedIntegral a = WrapIntegral { unwrapIntegral :: a }
deriving (Eq, Ord, Show, Num, Integral, Real, Enum, Bits)
instance Num a => Semiring (WrappedIntegral a) where
plus = (P.+)
zero = 0
times = (P.*)
one = 1
fromNatural = P.fromIntegral
instance Num a => Ring (WrappedIntegral a) where
negate = P.negate
instance Integral a => GcdDomain (WrappedIntegral a) where
divide x y = case x `P.quotRem` y of (q, 0) -> Just q; _ -> Nothing
gcd = P.gcd
lcm = P.lcm
coprime = coprimeIntegral
instance Integral a => Euclidean (WrappedIntegral a) where
degree = P.fromIntegral . abs . unwrapIntegral
quotRem = P.quotRem
quot = P.quot
rem = P.rem
and ' ' instances .
newtype WrappedFractional a = WrapFractional { unwrapFractional :: a }
deriving (Eq, Ord, Show, Num, Fractional)
instance Num a => Semiring (WrappedFractional a) where
plus = (P.+)
zero = 0
times = (P.*)
one = 1
fromNatural = P.fromIntegral
instance Num a => Ring (WrappedFractional a) where
negate = P.negate
instance Fractional a => GcdDomain (WrappedFractional a) where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Fractional a => Euclidean (WrappedFractional a) where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Fractional a => Field (WrappedFractional a)
instance Integral a => GcdDomain (Ratio a) where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Integral a => Euclidean (Ratio a) where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Integral a => Field (Ratio a)
instance GcdDomain Float where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean Float where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field Float
instance GcdDomain Double where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean Double where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field Double
instance GcdDomain CFloat where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean CFloat where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field CFloat
instance GcdDomain CDouble where
divide x y = Just (x / y)
gcd = const $ const 1
lcm = const $ const 1
coprime = const $ const True
instance Euclidean CDouble where
degree = const 0
quotRem x y = (x / y, 0)
quot = (/)
rem = const $ const 0
instance Field CDouble
conjQuotAbs :: Field a => Complex a -> Complex a
conjQuotAbs (x :+ y) = x `quot` norm :+ (negate y) `quot` norm
where
norm = (x `times` x) `plus` (y `times` y)
instance Field a => GcdDomain (Complex a) where
divide x y = Just (x `times` conjQuotAbs y)
gcd = const $ const one
lcm = const $ const one
coprime = const $ const True
instance Field a => Euclidean (Complex a) where
degree = const 0
quotRem x y = (quot x y, zero)
quot x y = x `times` conjQuotAbs y
rem = const $ const zero
instance Field a => Field (Complex a)
#if MIN_VERSION_base(4,12,0)
deriving via (WrappedIntegral Int) instance GcdDomain Int
deriving via (WrappedIntegral Int8) instance GcdDomain Int8
deriving via (WrappedIntegral Int16) instance GcdDomain Int16
deriving via (WrappedIntegral Int32) instance GcdDomain Int32
deriving via (WrappedIntegral Int64) instance GcdDomain Int64
deriving via (WrappedIntegral Integer) instance GcdDomain Integer
deriving via (WrappedIntegral Word) instance GcdDomain Word
deriving via (WrappedIntegral Word8) instance GcdDomain Word8
deriving via (WrappedIntegral Word16) instance GcdDomain Word16
deriving via (WrappedIntegral Word32) instance GcdDomain Word32
deriving via (WrappedIntegral Word64) instance GcdDomain Word64
deriving via (WrappedIntegral Natural) instance GcdDomain Natural
#else
$(let
deriveGcdDomain :: Q Type -> Q [Dec]
deriveGcdDomain ty = [d|
instance GcdDomain $ty where
gcd = P.gcd
lcm = P.lcm
coprime = coprimeIntegral
|]
in P.concat P.<$> P.traverse deriveGcdDomain
[[t|Int|]
,[t|Int8|]
,[t|Int16|]
,[t|Int32|]
,[t|Int64|]
,[t|Integer|]
,[t|Word|]
,[t|Word8|]
,[t|Word16|]
,[t|Word32|]
,[t|Word64|]
,[t|Natural|]
])
#endif
#if MIN_VERSION_base(4,12,0)
deriving via (WrappedIntegral Int) instance Euclidean Int
deriving via (WrappedIntegral Int8) instance Euclidean Int8
deriving via (WrappedIntegral Int16) instance Euclidean Int16
deriving via (WrappedIntegral Int32) instance Euclidean Int32
deriving via (WrappedIntegral Int64) instance Euclidean Int64
deriving via (WrappedIntegral Integer) instance Euclidean Integer
deriving via (WrappedIntegral Word) instance Euclidean Word
deriving via (WrappedIntegral Word8) instance Euclidean Word8
deriving via (WrappedIntegral Word16) instance Euclidean Word16
deriving via (WrappedIntegral Word32) instance Euclidean Word32
deriving via (WrappedIntegral Word64) instance Euclidean Word64
deriving via (WrappedIntegral Natural) instance Euclidean Natural
#else
$(let
deriveEuclidean :: Q Type -> Q [Dec]
deriveEuclidean ty = [d|
instance Euclidean $ty where
degree = P.fromIntegral . abs
quotRem = P.quotRem
quot = P.quot
rem = P.rem
|]
in P.concat P.<$> P.traverse deriveEuclidean
[[t|Int|]
,[t|Int8|]
,[t|Int16|]
,[t|Int32|]
,[t|Int64|]
,[t|Integer|]
,[t|Word|]
,[t|Word8|]
,[t|Word16|]
,[t|Word32|]
,[t|Word64|]
,[t|Natural|]
])
#endif
|
f8730fa3c3e2bdf94ab34fdbf3183c31fa4383236420594fede4f747f2136d2a | RefactoringTools/HaRe | ToQC.hs | module ToQC(moduleToQC) where
-- haskell syntax
import HsName
import HasBaseStruct
--import PrettyPrint(pp)
import HsExp(EI,mapEI,seqEI)
import HsPat(PI,isPVar)
import HsDecl(DI,HsMatchI(..),mapDI,seqDI)
import HsGuards(HsRhs(..))
import HsFields(HsFieldI(..))
import HsLiteral(HsLiteral(..))
import SrcLoc(loc0)
import WorkModule(inscpRel)
import Relations(applyRel)
--import Ents(Ent(..))
import TypedIds(IdTy(..),idTy)
import SourceNames(fakeSN)
import QualNames(getQualified)
-- property syntax
import PropSyntax as Prop hiding (quant)
import Syntax as Hs
-- utils
import List(nub)
import Maybe(isJust,fromJust)
import MUtils (( # ))
import StateM
class Trans t e | t -> e where trans :: t -> e
-- names of combinators
qc = Qual (PlainModule "QC")
neg = qc "not"
conj = qc "/\\"
disj = qc "\\/"
impl = qc "==>"
equiv = qc "<==>"
false = qc "false"
true = qc "true"
forAll = qc "forAll"
exists = qc "exists"
arrow = qc "-=>"
nil = Qual mod_Prelude "nil"
lft = qc "!"
lfp = qc "lfp"
gfp = qc "gfp"
equal = qc "==="
formula_type = qc "F"
pneg = qc "pNot"
pconj = qc "^/\\"
pdisj = qc "^\\/"
pimpl = qc "^==>"
pequiv = qc "^<==>"
transOp Conj = HsVar conj
transOp Disj = HsVar disj
transOp Imp = HsVar impl
transOp Equiv = HsVar equiv
transPredOp Conj = HsVar pconj
transPredOp Disj = HsVar pdisj
transPredOp Imp = HsVar pimpl
transPredOp Equiv = HsVar pequiv
predName y
| isConOp y = mapHsName ("%" ++) y
| isTuple y = mapHsName (\s ->"pred_Tuple"++show (tupleArity s)) y
| otherwise = mapHsName ("pred_" ++) y
where
isTuple y = case hsUnQual y of
'(':_ -> True
_ -> False
tupleArity "()" = 0
tupleArity s = 1+length (filter (==',') s)
assertName = mapHsName ("assert_" ++)
string (UnQual x) = hsLit loc0 (HsString x)
string _ = error "Qualified names not allowd in quantifiers."
f $$ x = hsApp f x
pInfixApp e1 op e2 = hsInfixApp (hsParen e1) op (hsParen e2)
class SplitType t' c t | t' -> t c where
splitType :: t' -> (c,t)
instance SplitType (Q [c] t) [c] t where
splitType (c :=> t) = (c,t)
instance (Trans pa e, Trans pp e, Trans e' e, SplitType t' [c] t,
HasBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p),
HasBaseStruct t (TI HsName t))
=> Trans (PA HsName e' t' pa pp) e where
trans (Quant All x t pa) = quant forAll x t pa
trans (Quant Exist x t pa) = quant exists x t pa
trans ( PropId i ) = ( assertName i )
trans (PropApp i ts es) = foldl ($$) (hsEVar (predName i)) (map (either trans trans) es)
trans (PropNeg pa) = hsEVar neg $$ trans pa
trans (PropOp op p1 p2) = pInfixApp (trans p1) (transOp op) (trans p2)
trans (PropEqual e1 e2) = pInfixApp (trans e1) (HsVar equal) (trans e2)
trans (PropHas e pp) = hsParen (trans pp) $$ trans e
trans (PropParen pa) = hsParen (trans pa)
quant name x t pa = hsEVar name $$ string x $$ body
where exp = hsLambda [hsPVar x] (trans pa)
body = case t of
Nothing -> exp
Just t -> hsExpTypeSig loc0 (hsParen exp) ctxt (hsTyFun ty (hsTyCon formula_type))
where (ctxt,ty) = splitType t
instance (Trans pa e, Trans pp e, Trans e' e,
HasBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p),
GetBaseStruct p (PI HsName p),
HasBaseStruct d (DI HsName e p [d] t [c] tp))
=> Trans (PP HsName e' p t1 pa pp) e where
trans ( PredId i ) = ( predName i )
trans (PredApp i ts xs) = foldl ($$) p (map (either trans trans) xs)
where p = hsEVar (predName i)
trans (PredArrow p1 p2) = pInfixApp (trans p1) (HsVar arrow) (trans p2)
trans (PredInfixApp p1 i p2) = pInfixApp (trans p1) p (trans p2)
where p = HsVar (predName i)
trans (PredNeg oppt p) = hsEVar pneg $$ trans p
trans (PredOp op optt p1 p2) = pInfixApp (trans p1) (transPredOp op) (trans p2)
trans (PredLfp i _ pp) = hsEVar lfp $$ hsLambda [hsPVar (predName i)] (trans pp)
trans (PredGfp i _ pp) = hsEVar gfp $$ hsLambda [hsPVar (predName i)] (trans pp)
trans PredNil = hsEVar nil
trans (PredLifted e) = hsEVar lft $$ trans e
trans (PredStrong pp) = trans pp
trans (PredComp ps pa)
| allVars = hsLambda pats exp
| otherwise = hsLet [hsFunBind loc0 ms] (hsEVar x)
where allVars = all isJust (map isPVar pats)
(pats,tys) = unzip ps
exp = trans pa
ms = [ HsMatch loc0 x pats (HsBody exp) []
, HsMatch loc0 x (map und ps) (HsBody $ hsEVar false) []
]
und _ = hsPWildCard
x = UnQual "it"
trans (PredParen pp) = hsParen (trans pp)
instance (HasBaseStruct e (EI HsName e p [d] t [c]), Trans d' d, Trans e' e) => Trans (EI HsName e' p [d'] t [c]) e where
trans = base . mapEI id trans id (map trans) id id
instance (HasBaseStruct d (DI HsName e p [d] t [c] tp), Trans d' d, Trans e' e) => Trans (DI HsName e' p [d'] t [c] tp) d where
trans = base . mapDI id trans id (map trans) id id id
instance (Trans d' d, Trans e' e, Trans pa e, Trans pp e,
HasBaseStruct d (DI HsName e p [d] t [c] tp),
HasBaseStruct e (EI HsName e p [d] t [c]),
GetBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p))
=> Trans (PropDI HsName e' p [d'] t [c] tp pa pp) d where
trans = prop trans transPD
-- PRE: all assertions are named
transPD :: (Trans pa e, Trans pp e,
HasBaseStruct d (DI HsName e p [d] t [c] tp),
HasBaseStruct e (EI HsName e p [d] t [c]),
GetBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p))
=> PD HsName pa pp -> d
transPD (HsAssertion loc (Just i) pa) =
hsPatBind loc (hsPVar (assertName i)) (HsBody (trans pa)) []
transPD (HsPropDecl loc i xs pp) =
case basestruct body of
Just (HsLambda ps e) -> bind (map cvt xs ++ ps) e
_ -> if null xs then bind [hsPVar x]
(hsParen body $$ hsEVar x)
else bind (map cvt xs) body
where cvt (HsCon i) = hsPVar (predName i)
cvt (HsVar i) = hsPVar i
body = trans pp
bind ps body = hsFunBind loc [HsMatch loc (predName i) ps (HsBody body) []]
x = UnQual "x"
transPD (HsAssertion _ Nothing _) = error "unnamed assertion?"
--- recursive -----------------------------------------------------------------
instance Trans (AssertionI HsName) (Hs.HsExpI HsName) where trans = trans . struct
instance Trans (PredicateI HsName) (Hs.HsExpI HsName) where trans = trans . struct
instance Trans (Prop.HsExpI HsName) (Hs.HsExpI HsName) where trans = trans . struct
instance Trans (Prop.HsDeclI HsName) (Hs.HsDeclI HsName) where trans = trans . struct
--------------------------------------------------------------------------------
-- generating predicates for lifted constructors
--------------------------------------------------------------------------------
gen' compNum pat loc name = hsFunBind loc matches
where matches = [ HsMatch loc lftCon (predPats ++ [pat compNames]) (HsBody body) []
, HsMatch loc lftCon (predPats ++ [hsPWildCard]) (HsBody (hsEVar false)) []
]
lftCon = predName name
body = foldr app (hsEVar true) (zipWith ($$) (map hsEVar predNames) (map hsEVar compNames))
where app e1 e2 = pInfixApp e1 (HsVar conj) e2
names x = take compNum [ UnQual (x ++ show n) | n <- [1..] ]
predPats = map hsPVar predNames
predNames = names "p"
compNames = names "x"
gen :: (HasBaseStruct d (DI HsName e p [d] t c tp),
HasBaseStruct e (EI HsName e p [d] t c),
HasBaseStruct p (PI HsName p))
=> HsConDeclI HsName t c -> d
gen (HsConDecl loc _ _ name ts) = gen' (length ts) pat loc name
where pat compNames = hsPApp name (map hsPVar compNames)
gen (HsRecDecl loc _ _ name fs) = gen' (length fieldNames) pat loc name
where fieldNames = concatMap fst fs
pat compNames = hsPRec name $ zipWith HsField fieldNames (map hsPVar compNames)
genLiftedCon :: (HasBaseStruct d (DI HsName e p [d] t c tp), HasBaseStruct e (EI HsName e p [d] t c),
HasBaseStruct p (PI HsName p)) => DI HsName e p [d] t c tp -> [d]
genLiftedCon (HsDataDecl _ _ _ ds _) = map gen ds
genLiftedCon (HsNewTypeDecl _ _ _ d _) = [gen d]
genLiftedCon _ = []
genLiftedConRec :: Hs.HsDeclI HsName -> [Hs.HsDeclI HsName]
genLiftedConRec d = genLiftedCon (struct d)
-- translation of imports and exports ------------------------------------------
transImps exs = map (transImp exs)
transImp exs (Prop.HsImportDecl s mn q as optspecs) =
Hs.HsImportDecl s mn q as (fmap (transImpSpec ex) optspecs)
where ex = applyRel (fromJust (lookup mn exs)) . getQualified . fakeSN
transImpSpec ex (hiding,ents) = (hiding,concatMap (transEnt hiding ex) ents)
transExps ex = fmap (concatMap (transExp (applyRel ex.fakeSN)))
transExp ex exp =
case exp of
EntE e-> map EntE (transEnt False ex e)
_ -> [exp]
transEnt hiding rel ent =
case ent of
Abs i -> nub $ concatMap (absEnt hiding i) (rel i)
ListSubs i is -> ent:[Var (predName c)|HsCon c<-is]
_ -> [ent] -- no constrcutors/assertions/predicates on other cases
absEnt hiding i ent =
case idTy ent of
Assertion -> [Var (assertName i)]
Property -> [Var (predName i)]
ConstrOf{} | hiding -> [Abs i,Var (predName i)]
_ -> [Abs i]
--- translation of modules -----------------------------------------------------
moduleToQC ((wm,ex),HsModule loc name exps imps decls) =
HsModule loc name newExps newImps newDecls
where
newDecls = addLiftedCons . map trans . addNames $ decls
newExps = transExps (inscpRel wm) exps
newImps = imp "QC_combinators" True (as "QC") Nothing :
imp "QC_prelude" False (as "Prelude") Nothing :
transImps ex imps
addNames = withSt names . mapM nameAssert
where names = [UnQual ("unnamed_" ++ show n) | n <- [1..]]
addLiftedCons ds = concatMap genLiftedConRec ds ++ ds
imp = HsImportDecl loc . PlainModule -- !!! no import from Main
as = Just . PlainModule -- hmm
-- name all assertions
class NameAssert t i | t -> i where
nameAssert :: t -> StateM [i] t
instance NameAssert (PD i pa pp) i where
nameAssert (HsAssertion loc Nothing pa) = do n <- newName
return (HsAssertion loc (Just n) pa)
nameAssert x = return x
instance NameAssert (PropDecl i) i where
nameAssert = mapMProp nameAssert nameAssert
instance (NameAssert e i, NameAssert d i) => NameAssert (DI i e p [d] t c tp) i where
nameAssert = seqDI . mapDI return nameAssert return (mapM nameAssert) return return return
instance (NameAssert e i, NameAssert d i) => NameAssert (EI i e p [d] t c) i where
nameAssert = seqEI . mapEI return nameAssert return (mapM nameAssert) return return
instance NameAssert (Prop.HsDeclI i) i where nameAssert x = rec # nameAssert (struct x)
instance NameAssert (Prop.HsExpI i) i where nameAssert x = rec # nameAssert (struct x)
newName :: StateM [i] i
newName = head # updSt tail
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/old/tools/property/transforms/ToQC.hs | haskell | haskell syntax
import PrettyPrint(pp)
import Ents(Ent(..))
property syntax
utils
names of combinators
PRE: all assertions are named
- recursive -----------------------------------------------------------------
------------------------------------------------------------------------------
generating predicates for lifted constructors
------------------------------------------------------------------------------
translation of imports and exports ------------------------------------------
no constrcutors/assertions/predicates on other cases
- translation of modules -----------------------------------------------------
!!! no import from Main
hmm
name all assertions | module ToQC(moduleToQC) where
import HsName
import HasBaseStruct
import HsExp(EI,mapEI,seqEI)
import HsPat(PI,isPVar)
import HsDecl(DI,HsMatchI(..),mapDI,seqDI)
import HsGuards(HsRhs(..))
import HsFields(HsFieldI(..))
import HsLiteral(HsLiteral(..))
import SrcLoc(loc0)
import WorkModule(inscpRel)
import Relations(applyRel)
import TypedIds(IdTy(..),idTy)
import SourceNames(fakeSN)
import QualNames(getQualified)
import PropSyntax as Prop hiding (quant)
import Syntax as Hs
import List(nub)
import Maybe(isJust,fromJust)
import MUtils (( # ))
import StateM
class Trans t e | t -> e where trans :: t -> e
qc = Qual (PlainModule "QC")
neg = qc "not"
conj = qc "/\\"
disj = qc "\\/"
impl = qc "==>"
equiv = qc "<==>"
false = qc "false"
true = qc "true"
forAll = qc "forAll"
exists = qc "exists"
arrow = qc "-=>"
nil = Qual mod_Prelude "nil"
lft = qc "!"
lfp = qc "lfp"
gfp = qc "gfp"
equal = qc "==="
formula_type = qc "F"
pneg = qc "pNot"
pconj = qc "^/\\"
pdisj = qc "^\\/"
pimpl = qc "^==>"
pequiv = qc "^<==>"
transOp Conj = HsVar conj
transOp Disj = HsVar disj
transOp Imp = HsVar impl
transOp Equiv = HsVar equiv
transPredOp Conj = HsVar pconj
transPredOp Disj = HsVar pdisj
transPredOp Imp = HsVar pimpl
transPredOp Equiv = HsVar pequiv
predName y
| isConOp y = mapHsName ("%" ++) y
| isTuple y = mapHsName (\s ->"pred_Tuple"++show (tupleArity s)) y
| otherwise = mapHsName ("pred_" ++) y
where
isTuple y = case hsUnQual y of
'(':_ -> True
_ -> False
tupleArity "()" = 0
tupleArity s = 1+length (filter (==',') s)
assertName = mapHsName ("assert_" ++)
string (UnQual x) = hsLit loc0 (HsString x)
string _ = error "Qualified names not allowd in quantifiers."
f $$ x = hsApp f x
pInfixApp e1 op e2 = hsInfixApp (hsParen e1) op (hsParen e2)
class SplitType t' c t | t' -> t c where
splitType :: t' -> (c,t)
instance SplitType (Q [c] t) [c] t where
splitType (c :=> t) = (c,t)
instance (Trans pa e, Trans pp e, Trans e' e, SplitType t' [c] t,
HasBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p),
HasBaseStruct t (TI HsName t))
=> Trans (PA HsName e' t' pa pp) e where
trans (Quant All x t pa) = quant forAll x t pa
trans (Quant Exist x t pa) = quant exists x t pa
trans ( PropId i ) = ( assertName i )
trans (PropApp i ts es) = foldl ($$) (hsEVar (predName i)) (map (either trans trans) es)
trans (PropNeg pa) = hsEVar neg $$ trans pa
trans (PropOp op p1 p2) = pInfixApp (trans p1) (transOp op) (trans p2)
trans (PropEqual e1 e2) = pInfixApp (trans e1) (HsVar equal) (trans e2)
trans (PropHas e pp) = hsParen (trans pp) $$ trans e
trans (PropParen pa) = hsParen (trans pa)
quant name x t pa = hsEVar name $$ string x $$ body
where exp = hsLambda [hsPVar x] (trans pa)
body = case t of
Nothing -> exp
Just t -> hsExpTypeSig loc0 (hsParen exp) ctxt (hsTyFun ty (hsTyCon formula_type))
where (ctxt,ty) = splitType t
instance (Trans pa e, Trans pp e, Trans e' e,
HasBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p),
GetBaseStruct p (PI HsName p),
HasBaseStruct d (DI HsName e p [d] t [c] tp))
=> Trans (PP HsName e' p t1 pa pp) e where
trans ( PredId i ) = ( predName i )
trans (PredApp i ts xs) = foldl ($$) p (map (either trans trans) xs)
where p = hsEVar (predName i)
trans (PredArrow p1 p2) = pInfixApp (trans p1) (HsVar arrow) (trans p2)
trans (PredInfixApp p1 i p2) = pInfixApp (trans p1) p (trans p2)
where p = HsVar (predName i)
trans (PredNeg oppt p) = hsEVar pneg $$ trans p
trans (PredOp op optt p1 p2) = pInfixApp (trans p1) (transPredOp op) (trans p2)
trans (PredLfp i _ pp) = hsEVar lfp $$ hsLambda [hsPVar (predName i)] (trans pp)
trans (PredGfp i _ pp) = hsEVar gfp $$ hsLambda [hsPVar (predName i)] (trans pp)
trans PredNil = hsEVar nil
trans (PredLifted e) = hsEVar lft $$ trans e
trans (PredStrong pp) = trans pp
trans (PredComp ps pa)
| allVars = hsLambda pats exp
| otherwise = hsLet [hsFunBind loc0 ms] (hsEVar x)
where allVars = all isJust (map isPVar pats)
(pats,tys) = unzip ps
exp = trans pa
ms = [ HsMatch loc0 x pats (HsBody exp) []
, HsMatch loc0 x (map und ps) (HsBody $ hsEVar false) []
]
und _ = hsPWildCard
x = UnQual "it"
trans (PredParen pp) = hsParen (trans pp)
instance (HasBaseStruct e (EI HsName e p [d] t [c]), Trans d' d, Trans e' e) => Trans (EI HsName e' p [d'] t [c]) e where
trans = base . mapEI id trans id (map trans) id id
instance (HasBaseStruct d (DI HsName e p [d] t [c] tp), Trans d' d, Trans e' e) => Trans (DI HsName e' p [d'] t [c] tp) d where
trans = base . mapDI id trans id (map trans) id id id
instance (Trans d' d, Trans e' e, Trans pa e, Trans pp e,
HasBaseStruct d (DI HsName e p [d] t [c] tp),
HasBaseStruct e (EI HsName e p [d] t [c]),
GetBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p))
=> Trans (PropDI HsName e' p [d'] t [c] tp pa pp) d where
trans = prop trans transPD
transPD :: (Trans pa e, Trans pp e,
HasBaseStruct d (DI HsName e p [d] t [c] tp),
HasBaseStruct e (EI HsName e p [d] t [c]),
GetBaseStruct e (EI HsName e p [d] t [c]),
HasBaseStruct p (PI HsName p))
=> PD HsName pa pp -> d
transPD (HsAssertion loc (Just i) pa) =
hsPatBind loc (hsPVar (assertName i)) (HsBody (trans pa)) []
transPD (HsPropDecl loc i xs pp) =
case basestruct body of
Just (HsLambda ps e) -> bind (map cvt xs ++ ps) e
_ -> if null xs then bind [hsPVar x]
(hsParen body $$ hsEVar x)
else bind (map cvt xs) body
where cvt (HsCon i) = hsPVar (predName i)
cvt (HsVar i) = hsPVar i
body = trans pp
bind ps body = hsFunBind loc [HsMatch loc (predName i) ps (HsBody body) []]
x = UnQual "x"
transPD (HsAssertion _ Nothing _) = error "unnamed assertion?"
instance Trans (AssertionI HsName) (Hs.HsExpI HsName) where trans = trans . struct
instance Trans (PredicateI HsName) (Hs.HsExpI HsName) where trans = trans . struct
instance Trans (Prop.HsExpI HsName) (Hs.HsExpI HsName) where trans = trans . struct
instance Trans (Prop.HsDeclI HsName) (Hs.HsDeclI HsName) where trans = trans . struct
gen' compNum pat loc name = hsFunBind loc matches
where matches = [ HsMatch loc lftCon (predPats ++ [pat compNames]) (HsBody body) []
, HsMatch loc lftCon (predPats ++ [hsPWildCard]) (HsBody (hsEVar false)) []
]
lftCon = predName name
body = foldr app (hsEVar true) (zipWith ($$) (map hsEVar predNames) (map hsEVar compNames))
where app e1 e2 = pInfixApp e1 (HsVar conj) e2
names x = take compNum [ UnQual (x ++ show n) | n <- [1..] ]
predPats = map hsPVar predNames
predNames = names "p"
compNames = names "x"
gen :: (HasBaseStruct d (DI HsName e p [d] t c tp),
HasBaseStruct e (EI HsName e p [d] t c),
HasBaseStruct p (PI HsName p))
=> HsConDeclI HsName t c -> d
gen (HsConDecl loc _ _ name ts) = gen' (length ts) pat loc name
where pat compNames = hsPApp name (map hsPVar compNames)
gen (HsRecDecl loc _ _ name fs) = gen' (length fieldNames) pat loc name
where fieldNames = concatMap fst fs
pat compNames = hsPRec name $ zipWith HsField fieldNames (map hsPVar compNames)
genLiftedCon :: (HasBaseStruct d (DI HsName e p [d] t c tp), HasBaseStruct e (EI HsName e p [d] t c),
HasBaseStruct p (PI HsName p)) => DI HsName e p [d] t c tp -> [d]
genLiftedCon (HsDataDecl _ _ _ ds _) = map gen ds
genLiftedCon (HsNewTypeDecl _ _ _ d _) = [gen d]
genLiftedCon _ = []
genLiftedConRec :: Hs.HsDeclI HsName -> [Hs.HsDeclI HsName]
genLiftedConRec d = genLiftedCon (struct d)
transImps exs = map (transImp exs)
transImp exs (Prop.HsImportDecl s mn q as optspecs) =
Hs.HsImportDecl s mn q as (fmap (transImpSpec ex) optspecs)
where ex = applyRel (fromJust (lookup mn exs)) . getQualified . fakeSN
transImpSpec ex (hiding,ents) = (hiding,concatMap (transEnt hiding ex) ents)
transExps ex = fmap (concatMap (transExp (applyRel ex.fakeSN)))
transExp ex exp =
case exp of
EntE e-> map EntE (transEnt False ex e)
_ -> [exp]
transEnt hiding rel ent =
case ent of
Abs i -> nub $ concatMap (absEnt hiding i) (rel i)
ListSubs i is -> ent:[Var (predName c)|HsCon c<-is]
absEnt hiding i ent =
case idTy ent of
Assertion -> [Var (assertName i)]
Property -> [Var (predName i)]
ConstrOf{} | hiding -> [Abs i,Var (predName i)]
_ -> [Abs i]
moduleToQC ((wm,ex),HsModule loc name exps imps decls) =
HsModule loc name newExps newImps newDecls
where
newDecls = addLiftedCons . map trans . addNames $ decls
newExps = transExps (inscpRel wm) exps
newImps = imp "QC_combinators" True (as "QC") Nothing :
imp "QC_prelude" False (as "Prelude") Nothing :
transImps ex imps
addNames = withSt names . mapM nameAssert
where names = [UnQual ("unnamed_" ++ show n) | n <- [1..]]
addLiftedCons ds = concatMap genLiftedConRec ds ++ ds
class NameAssert t i | t -> i where
nameAssert :: t -> StateM [i] t
instance NameAssert (PD i pa pp) i where
nameAssert (HsAssertion loc Nothing pa) = do n <- newName
return (HsAssertion loc (Just n) pa)
nameAssert x = return x
instance NameAssert (PropDecl i) i where
nameAssert = mapMProp nameAssert nameAssert
instance (NameAssert e i, NameAssert d i) => NameAssert (DI i e p [d] t c tp) i where
nameAssert = seqDI . mapDI return nameAssert return (mapM nameAssert) return return return
instance (NameAssert e i, NameAssert d i) => NameAssert (EI i e p [d] t c) i where
nameAssert = seqEI . mapEI return nameAssert return (mapM nameAssert) return return
instance NameAssert (Prop.HsDeclI i) i where nameAssert x = rec # nameAssert (struct x)
instance NameAssert (Prop.HsExpI i) i where nameAssert x = rec # nameAssert (struct x)
newName :: StateM [i] i
newName = head # updSt tail
|
a7a4b90609b31199c7a3f0661feb5091d7217280df93ddeae5d4af46017019fa | xapi-project/xcp-rrdd | rrdd_stats.ml |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
(* Monitor selected processes, periodically logging stats. @group Performance
Monitoring *)
module D = Debug.Make (struct let name = "rrdd_stats" end)
open D
(** Represents a subset of the data in /proc/meminfo *)
type meminfo = {
total: int
KiB
free: int
KiB
buffered: int
KiB
cached: int
KiB
swap_total: int
KiB
KiB
}
(** Represents a subset of the data in /proc/<pid>/status *)
type process_memory_info = {
peak: int
KiB
size: int
KiB
locked: int
KiB
hwm: int
KiB
rss: int
KiB
data: int
KiB
stack: int
KiB
exe: int
KiB
KiB
}
let null_process_memory_info =
{
peak= 0
; size= 0
; locked= 0
; hwm= 0
; rss= 0
; data= 0
; stack= 0
; exe= 0
; lib= 0
}
let plus_process_memory_info pmi1 pmi2 =
{
peak= pmi1.peak + pmi2.peak
; size= pmi1.size + pmi2.size
; locked= pmi1.locked + pmi2.locked
; hwm= pmi1.hwm + pmi2.hwm
; rss= pmi1.rss + pmi2.rss
; data= pmi1.data + pmi2.data
; stack= pmi1.stack + pmi2.stack
; exe= pmi1.exe + pmi2.exe
; lib= pmi1.lib + pmi2.lib
}
(* TODO: Move this function (and its clones) to xen-api-libs. *)
let split_colon line = Astring.String.fields ~empty:false line
let meminfo () =
let all = Xapi_stdext_unix.Unixext.string_of_file "/proc/meminfo" in
let total = ref (-1)
and free = ref (-1)
and buffered = ref (-1)
and cached = ref (-1)
and swap_total = ref (-1)
and swap_free = ref (-1) in
List.iter
(fun line ->
match split_colon line with
| ["MemTotal:"; x; "kB"] ->
total := int_of_string x
| ["MemFree:"; x; "kB"] ->
free := int_of_string x
| ["Buffers:"; x; "kB"] ->
buffered := int_of_string x
| ["Cached:"; x; "kB"] ->
cached := int_of_string x
| ["SwapTotal:"; x; "kB"] ->
swap_total := int_of_string x
| ["SwapFree:"; x; "kB"] ->
swap_free := int_of_string x
| _ ->
())
Astring.String.(cuts ~sep:"\n" all) ;
{
total= !total
; free= !free
; buffered= !buffered
; cached= !cached
; swap_total= !swap_total
; swap_free= !swap_free
}
let string_of_meminfo (x : meminfo) =
Printf.sprintf
"MemTotal: %d KiB; MemFree: %d KiB; Buffered: %d KiB; Cached: %d KiB; \
SwapTotal: %d KiB; SwapFree: %d KiB"
x.total x.free x.buffered x.cached x.swap_total x.swap_free
let process_memory_info_of_pid (pid : int) : process_memory_info =
let all =
Xapi_stdext_unix.Unixext.string_of_file
(Printf.sprintf "/proc/%d/status" pid)
in
let peak = ref (-1)
and size = ref (-1)
and locked = ref (-1)
and hwm = ref (-1)
and rss = ref (-1)
and data = ref (-1)
and stack = ref (-1)
and exe = ref (-1)
and lib = ref (-1) in
List.iter
(fun line ->
match split_colon line with
| ["VmPeak:"; x; "kB"] ->
peak := int_of_string x
| ["VmSize:"; x; "kB"] ->
size := int_of_string x
| ["VmLck:"; x; "kB"] ->
locked := int_of_string x
| ["VmHWM:"; x; "kB"] ->
hwm := int_of_string x
| ["VmRSS:"; x; "kB"] ->
rss := int_of_string x
| ["VmData:"; x; "kB"] ->
data := int_of_string x
| ["VmStk:"; x; "kB"] ->
stack := int_of_string x
| ["VmExe:"; x; "kB"] ->
exe := int_of_string x
| ["VmLib:"; x; "kB"] ->
lib := int_of_string x
| _ ->
())
Astring.String.(cuts ~sep:"\n" all) ;
{
peak= !peak
; size= !size
; locked= !locked
; hwm= !hwm
; rss= !rss
; data= !data
; stack= !stack
; exe= !exe
; lib= !lib
}
let string_of_process_memory_info (x : process_memory_info) =
Printf.sprintf "size: %d KiB; rss: %d KiB; data: %d KiB; stack: %d KiB" x.size
x.rss x.data x.stack
(* Log the initial offset between our monotonic clock and UTC *)
let initial_offset =
Unix.gettimeofday () -. (Int64.to_float (Mtime_clock.now_ns ()) /. 1e9)
let print_system_stats () =
let mi = string_of_meminfo (meminfo ()) in
debug "system stats: %s" mi ;
let current_offset =
Unix.gettimeofday () -. (Int64.to_float (Mtime_clock.now_ns ()) /. 1e9)
in
debug "Clock drift: %.0f" (current_offset -. initial_offset)
(* Obtains process IDs for the specified program. This should probably be moved
into xen-api-libs. *)
let pidof ?(pid_dir = "/var/run") program =
try
let out =
Xapi_stdext_unix.Unixext.string_of_file
(Printf.sprintf "%s/%s.pid" pid_dir program)
in
let words = Astring.String.fields ~empty:false out in
let maybe_parse_int acc i =
try int_of_string i :: acc with Failure _ -> acc
in
List.fold_left maybe_parse_int [] words
with
| Unix.Unix_error (Unix.ENOENT, _, _) | Unix.Unix_error (Unix.EACCES, _, _) ->
[]
let print_stats_for ~program =
let pids = pidof program in
let n = List.length pids in
let pmis = List.map process_memory_info_of_pid pids in
let pmi =
List.fold_left plus_process_memory_info null_process_memory_info pmis
in
debug "%s stats (n = %d): %s" program n (string_of_process_memory_info pmi)
let last_log = ref 0.
let log_interval = 60.
let programs_to_monitor = ["xcp-rrdd"; "xapi"; "xenopsd-xc"; "xenopsd-xenlight"]
let print_stats () =
print_system_stats () ;
List.iter (fun program -> print_stats_for ~program) programs_to_monitor
(** Called from the main monitoring loop. *)
let print_snapshot () =
try
Only run once every minute to avoid spamming the logs
let now = Unix.gettimeofday () in
if now -. !last_log > log_interval then (
last_log := now ;
print_stats ()
)
with e ->
debug "Caught: %s" (Printexc.to_string e) ;
log_backtrace ()
| null | https://raw.githubusercontent.com/xapi-project/xcp-rrdd/f810004ae88d308043b73365a959562302b1d4b5/bin/rrdd/rrdd_stats.ml | ocaml | Monitor selected processes, periodically logging stats. @group Performance
Monitoring
* Represents a subset of the data in /proc/meminfo
* Represents a subset of the data in /proc/<pid>/status
TODO: Move this function (and its clones) to xen-api-libs.
Log the initial offset between our monotonic clock and UTC
Obtains process IDs for the specified program. This should probably be moved
into xen-api-libs.
* Called from the main monitoring loop. |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
module D = Debug.Make (struct let name = "rrdd_stats" end)
open D
type meminfo = {
total: int
KiB
free: int
KiB
buffered: int
KiB
cached: int
KiB
swap_total: int
KiB
KiB
}
type process_memory_info = {
peak: int
KiB
size: int
KiB
locked: int
KiB
hwm: int
KiB
rss: int
KiB
data: int
KiB
stack: int
KiB
exe: int
KiB
KiB
}
let null_process_memory_info =
{
peak= 0
; size= 0
; locked= 0
; hwm= 0
; rss= 0
; data= 0
; stack= 0
; exe= 0
; lib= 0
}
let plus_process_memory_info pmi1 pmi2 =
{
peak= pmi1.peak + pmi2.peak
; size= pmi1.size + pmi2.size
; locked= pmi1.locked + pmi2.locked
; hwm= pmi1.hwm + pmi2.hwm
; rss= pmi1.rss + pmi2.rss
; data= pmi1.data + pmi2.data
; stack= pmi1.stack + pmi2.stack
; exe= pmi1.exe + pmi2.exe
; lib= pmi1.lib + pmi2.lib
}
let split_colon line = Astring.String.fields ~empty:false line
let meminfo () =
let all = Xapi_stdext_unix.Unixext.string_of_file "/proc/meminfo" in
let total = ref (-1)
and free = ref (-1)
and buffered = ref (-1)
and cached = ref (-1)
and swap_total = ref (-1)
and swap_free = ref (-1) in
List.iter
(fun line ->
match split_colon line with
| ["MemTotal:"; x; "kB"] ->
total := int_of_string x
| ["MemFree:"; x; "kB"] ->
free := int_of_string x
| ["Buffers:"; x; "kB"] ->
buffered := int_of_string x
| ["Cached:"; x; "kB"] ->
cached := int_of_string x
| ["SwapTotal:"; x; "kB"] ->
swap_total := int_of_string x
| ["SwapFree:"; x; "kB"] ->
swap_free := int_of_string x
| _ ->
())
Astring.String.(cuts ~sep:"\n" all) ;
{
total= !total
; free= !free
; buffered= !buffered
; cached= !cached
; swap_total= !swap_total
; swap_free= !swap_free
}
let string_of_meminfo (x : meminfo) =
Printf.sprintf
"MemTotal: %d KiB; MemFree: %d KiB; Buffered: %d KiB; Cached: %d KiB; \
SwapTotal: %d KiB; SwapFree: %d KiB"
x.total x.free x.buffered x.cached x.swap_total x.swap_free
let process_memory_info_of_pid (pid : int) : process_memory_info =
let all =
Xapi_stdext_unix.Unixext.string_of_file
(Printf.sprintf "/proc/%d/status" pid)
in
let peak = ref (-1)
and size = ref (-1)
and locked = ref (-1)
and hwm = ref (-1)
and rss = ref (-1)
and data = ref (-1)
and stack = ref (-1)
and exe = ref (-1)
and lib = ref (-1) in
List.iter
(fun line ->
match split_colon line with
| ["VmPeak:"; x; "kB"] ->
peak := int_of_string x
| ["VmSize:"; x; "kB"] ->
size := int_of_string x
| ["VmLck:"; x; "kB"] ->
locked := int_of_string x
| ["VmHWM:"; x; "kB"] ->
hwm := int_of_string x
| ["VmRSS:"; x; "kB"] ->
rss := int_of_string x
| ["VmData:"; x; "kB"] ->
data := int_of_string x
| ["VmStk:"; x; "kB"] ->
stack := int_of_string x
| ["VmExe:"; x; "kB"] ->
exe := int_of_string x
| ["VmLib:"; x; "kB"] ->
lib := int_of_string x
| _ ->
())
Astring.String.(cuts ~sep:"\n" all) ;
{
peak= !peak
; size= !size
; locked= !locked
; hwm= !hwm
; rss= !rss
; data= !data
; stack= !stack
; exe= !exe
; lib= !lib
}
let string_of_process_memory_info (x : process_memory_info) =
Printf.sprintf "size: %d KiB; rss: %d KiB; data: %d KiB; stack: %d KiB" x.size
x.rss x.data x.stack
let initial_offset =
Unix.gettimeofday () -. (Int64.to_float (Mtime_clock.now_ns ()) /. 1e9)
let print_system_stats () =
let mi = string_of_meminfo (meminfo ()) in
debug "system stats: %s" mi ;
let current_offset =
Unix.gettimeofday () -. (Int64.to_float (Mtime_clock.now_ns ()) /. 1e9)
in
debug "Clock drift: %.0f" (current_offset -. initial_offset)
let pidof ?(pid_dir = "/var/run") program =
try
let out =
Xapi_stdext_unix.Unixext.string_of_file
(Printf.sprintf "%s/%s.pid" pid_dir program)
in
let words = Astring.String.fields ~empty:false out in
let maybe_parse_int acc i =
try int_of_string i :: acc with Failure _ -> acc
in
List.fold_left maybe_parse_int [] words
with
| Unix.Unix_error (Unix.ENOENT, _, _) | Unix.Unix_error (Unix.EACCES, _, _) ->
[]
let print_stats_for ~program =
let pids = pidof program in
let n = List.length pids in
let pmis = List.map process_memory_info_of_pid pids in
let pmi =
List.fold_left plus_process_memory_info null_process_memory_info pmis
in
debug "%s stats (n = %d): %s" program n (string_of_process_memory_info pmi)
let last_log = ref 0.
let log_interval = 60.
let programs_to_monitor = ["xcp-rrdd"; "xapi"; "xenopsd-xc"; "xenopsd-xenlight"]
let print_stats () =
print_system_stats () ;
List.iter (fun program -> print_stats_for ~program) programs_to_monitor
let print_snapshot () =
try
Only run once every minute to avoid spamming the logs
let now = Unix.gettimeofday () in
if now -. !last_log > log_interval then (
last_log := now ;
print_stats ()
)
with e ->
debug "Caught: %s" (Printexc.to_string e) ;
log_backtrace ()
|
5d3fa457608b06aa7717afd6af34b8b02ad68a0dc983914c6bd4340c24396629 | BinaryAnalysisPlatform/bap-plugins | trim.ml | open Bap.Std
open Core_kernel
open Cut
open Options
open Graphlib.Std
open Poly
let (^::) = Seq.cons
type trim = {
trim_sub : Sub.t;
cut_group : cut_group;
src_tid : tid;
sink_tid : tid
}
* mark the sink_blk with a no_return , otherwise we ca n't call
Sub.ssa on the blk . Failure can pass silently here , so it 's
not ideal . We should check that at least one blk was labeled
with no_return
Sub.ssa on the blk. Failure can pass silently here, so it's
not ideal. We should check that at least one blk was labeled
with no_return *)
let mark_with_no_return sub sink_blk_tid =
Term.map blk_t sub ~f:(fun blk ->
if Util.tid_of_blk blk = sink_blk_tid then
Term.map jmp_t blk ~f:(fun jmp ->
match Jmp.kind jmp with
| Call c ->
Jmp.create_call ~tid:(Term.tid jmp)
~cond:(Jmp.cond jmp)
(Call.with_noreturn c)
| _ -> jmp)
else blk)
(** test for structural equality: string of right hand side of
defs are all the same. This needs serious rethinking. *)
let cmp blk' blk =
let def_str b =
Term.enum def_t b |> Seq.fold ~init:"" ~f:(fun acc def ->
let s = Exp.pps () (Def.rhs def) in
acc^s) in
def_str blk' = def_str blk
let tid_from_blk_structure blk sub =
let matches =
Term.enum blk_t sub |> Seq.filter ~f:(fun blk' ->
(** find the blk' that matches blk, and get its tid *)
if cmp blk' blk then true else false) in
match Seq.length matches with
| 0 ->
Format.printf " Warning: No blk structure match!";
(1,Util.tid_of_blk blk)
| 1 -> let blk = Seq.hd_exn matches in
(0,Util.tid_of_blk blk)
This happens when there are two or more blocks in * the same *
subroutine that call the sink and are the same structurally .
Weird , but can happen . Unhandled case for now .
subroutine that call the sink and are the same structurally.
Weird, but can happen. Unhandled case for now. *)
| _ -> Format.printf "Warning: More than one blk match!";
(2,Seq.hd_exn matches |> Util.tid_of_blk)
let trim sub src sink =
let module Cfg = Graphs.Tid in
let cfg = Sub.to_graph sub in
let reachable_src =
Graphlib.fold_reachable (module Cfg)
~init:Cfg.Node.Set.empty ~f:Set.add
cfg src in
let reachable_sink =
Graphlib.fold_reachable (module Cfg)
~rev:true ~init:Cfg.Node.Set.empty ~f:Set.add
cfg sink in
let cut_set =
Set.inter reachable_src reachable_sink in
Term.filter blk_t sub ~f:(fun blk ->
Set.mem cut_set (Term.tid blk))
(* returns a sequence of subs that start at the source and go to
the sink. sub is lca in this case *)
let trims sub g highlight with_dots =
let blks_call_src = g.src_caller_blks in
let blks_call_sink = g.sink_caller_blks in
let pairs = Seq.cartesian_product blks_call_src blks_call_sink in
Seq.foldi ~init:Seq.empty pairs ~f:(fun j acc (src_blk, sink_blk) ->
let res_src,src_tid = tid_from_blk_structure src_blk sub in
let res_sink,sink_tid = tid_from_blk_structure sink_blk sub in
if res_src = 0 && res_sink = 0 then (* If the source and sink exist *)
( let sub' = trim sub src_tid sink_tid in
match Term.length blk_t sub' with
| 0 ->
if with_dots then
Output.cut_graph `Invalid src_tid sink_tid sub g.id j;
Format.printf "Sink is before source!\n%!"; acc
| _ ->
if with_dots then
Output.cut_graph `Valid src_tid sink_tid sub g.id j;
let sub' = mark_with_no_return sub' sink_tid in
let t = {trim_sub = sub';
src_tid;
sink_tid;
cut_group = g} in
t ^:: acc)
else
(if with_dots then
Output.cut_graph `Skipped src_tid sink_tid sub g.id j;
Format.printf "[x] Skipping pair %d->%d\n" res_src res_sink;
acc))
| null | https://raw.githubusercontent.com/BinaryAnalysisPlatform/bap-plugins/2e9aa5c7c24ef494d0e7db1b43c5ceedcb4196a8/minos/trim.ml | ocaml | * test for structural equality: string of right hand side of
defs are all the same. This needs serious rethinking.
* find the blk' that matches blk, and get its tid
returns a sequence of subs that start at the source and go to
the sink. sub is lca in this case
If the source and sink exist | open Bap.Std
open Core_kernel
open Cut
open Options
open Graphlib.Std
open Poly
let (^::) = Seq.cons
type trim = {
trim_sub : Sub.t;
cut_group : cut_group;
src_tid : tid;
sink_tid : tid
}
* mark the sink_blk with a no_return , otherwise we ca n't call
Sub.ssa on the blk . Failure can pass silently here , so it 's
not ideal . We should check that at least one blk was labeled
with no_return
Sub.ssa on the blk. Failure can pass silently here, so it's
not ideal. We should check that at least one blk was labeled
with no_return *)
let mark_with_no_return sub sink_blk_tid =
Term.map blk_t sub ~f:(fun blk ->
if Util.tid_of_blk blk = sink_blk_tid then
Term.map jmp_t blk ~f:(fun jmp ->
match Jmp.kind jmp with
| Call c ->
Jmp.create_call ~tid:(Term.tid jmp)
~cond:(Jmp.cond jmp)
(Call.with_noreturn c)
| _ -> jmp)
else blk)
let cmp blk' blk =
let def_str b =
Term.enum def_t b |> Seq.fold ~init:"" ~f:(fun acc def ->
let s = Exp.pps () (Def.rhs def) in
acc^s) in
def_str blk' = def_str blk
let tid_from_blk_structure blk sub =
let matches =
Term.enum blk_t sub |> Seq.filter ~f:(fun blk' ->
if cmp blk' blk then true else false) in
match Seq.length matches with
| 0 ->
Format.printf " Warning: No blk structure match!";
(1,Util.tid_of_blk blk)
| 1 -> let blk = Seq.hd_exn matches in
(0,Util.tid_of_blk blk)
This happens when there are two or more blocks in * the same *
subroutine that call the sink and are the same structurally .
Weird , but can happen . Unhandled case for now .
subroutine that call the sink and are the same structurally.
Weird, but can happen. Unhandled case for now. *)
| _ -> Format.printf "Warning: More than one blk match!";
(2,Seq.hd_exn matches |> Util.tid_of_blk)
let trim sub src sink =
let module Cfg = Graphs.Tid in
let cfg = Sub.to_graph sub in
let reachable_src =
Graphlib.fold_reachable (module Cfg)
~init:Cfg.Node.Set.empty ~f:Set.add
cfg src in
let reachable_sink =
Graphlib.fold_reachable (module Cfg)
~rev:true ~init:Cfg.Node.Set.empty ~f:Set.add
cfg sink in
let cut_set =
Set.inter reachable_src reachable_sink in
Term.filter blk_t sub ~f:(fun blk ->
Set.mem cut_set (Term.tid blk))
let trims sub g highlight with_dots =
let blks_call_src = g.src_caller_blks in
let blks_call_sink = g.sink_caller_blks in
let pairs = Seq.cartesian_product blks_call_src blks_call_sink in
Seq.foldi ~init:Seq.empty pairs ~f:(fun j acc (src_blk, sink_blk) ->
let res_src,src_tid = tid_from_blk_structure src_blk sub in
let res_sink,sink_tid = tid_from_blk_structure sink_blk sub in
( let sub' = trim sub src_tid sink_tid in
match Term.length blk_t sub' with
| 0 ->
if with_dots then
Output.cut_graph `Invalid src_tid sink_tid sub g.id j;
Format.printf "Sink is before source!\n%!"; acc
| _ ->
if with_dots then
Output.cut_graph `Valid src_tid sink_tid sub g.id j;
let sub' = mark_with_no_return sub' sink_tid in
let t = {trim_sub = sub';
src_tid;
sink_tid;
cut_group = g} in
t ^:: acc)
else
(if with_dots then
Output.cut_graph `Skipped src_tid sink_tid sub g.id j;
Format.printf "[x] Skipping pair %d->%d\n" res_src res_sink;
acc))
|
4c0e320ebc272dde2927ae42b7406f586986349b7c89b7b049d60bf53041fd31 | helvm/helms | MTInsertDef.hs | module HelVM.Common.Containers.MTInsertDef where
import Control.Type.Operator
import Data.Default
import Data.MonoTraversable
import Data.Sequence ((|>))
import Data.Sequences
import qualified Data.IntMap as IntMap
import qualified Data.Sequence as Seq
-- | Insert a new element
naturalInsertDef :: (InsertDef seq , Num $ Index seq) => Natural -> Element seq -> seq -> seq
naturalInsertDef = insertDef . fromIntegral
-- | Type Class
class InsertDef seq where
insertDef :: Index seq -> Element seq -> seq -> seq
instance Default a => InsertDef [a] where
insertDef 0 e [] = [e]
insertDef 0 e (_:xs) = e : xs
insertDef i e [] = def : insertDef (i-1) e []
insertDef i e (x:xs) = x : insertDef (i-1) e xs
instance Default a => InsertDef (Seq a) where
insertDef i e c = (check . Seq.length) c where
check l
| i < l = Seq.update i e c
| otherwise = c <> Seq.replicate (i - l) def |> e
instance Index (IntMap a) ~ Int => InsertDef (IntMap a) where
insertDef = IntMap.insert
| null | https://raw.githubusercontent.com/helvm/helms/df44190e9ddc64037a6e58548f4a14c7214df7a3/hs/src/HelVM/Common/Containers/MTInsertDef.hs | haskell | | Insert a new element
| Type Class | module HelVM.Common.Containers.MTInsertDef where
import Control.Type.Operator
import Data.Default
import Data.MonoTraversable
import Data.Sequence ((|>))
import Data.Sequences
import qualified Data.IntMap as IntMap
import qualified Data.Sequence as Seq
naturalInsertDef :: (InsertDef seq , Num $ Index seq) => Natural -> Element seq -> seq -> seq
naturalInsertDef = insertDef . fromIntegral
class InsertDef seq where
insertDef :: Index seq -> Element seq -> seq -> seq
instance Default a => InsertDef [a] where
insertDef 0 e [] = [e]
insertDef 0 e (_:xs) = e : xs
insertDef i e [] = def : insertDef (i-1) e []
insertDef i e (x:xs) = x : insertDef (i-1) e xs
instance Default a => InsertDef (Seq a) where
insertDef i e c = (check . Seq.length) c where
check l
| i < l = Seq.update i e c
| otherwise = c <> Seq.replicate (i - l) def |> e
instance Index (IntMap a) ~ Int => InsertDef (IntMap a) where
insertDef = IntMap.insert
|
b504874065ba6de118f7450e287d87bb9c140c32285376024eef107e1f759876 | OCamlPro/ocaml-solidity | solidity_checker_TYPES.ml | (**************************************************************************)
(* *)
Copyright ( c ) 2021 OCamlPro & Origin Labs
(* *)
(* All rights reserved. *)
(* This file is distributed under the terms of the GNU Lesser General *)
Public License version 2.1 , with the special exception on linking
(* described in the LICENSE.md file in the root directory. *)
(* *)
(* *)
(**************************************************************************)
open Solidity_common
type origin =
| Defined
| Imported
| Inherited
type env = {
upper_env : env option; (* i.e module/contract/fonction/block *)
mutable ident_map : ((ident_desc * origin) list) IdentMap.t;
mutable using_for : (env * (type_ list)) AbsLongIdentMap.t;
(* empty list = all types = * *)
}
and ident_desc =
| Alias of alias_desc (* In: modules (temporary) *)
| Module of module_desc (* In: modules *)
| Contract of contract_desc (* In: modules *)
| Type of type_desc (* In: modules, contracts*)
| Variable of variable_desc (* In: modules, contracts, functions *)
| Function of function_desc (* In: modules, contracts*)
| Modifier of modifier_desc (* In: contracts *)
| Event of event_desc (* In: contracts *)
Internal use , not in envs
Internal use , not in envs
and alias_desc = {
alias_abs_name : absolute LongIdent.t;
alias_pos : pos;
alias_target_id : Ident.t;
alias_target_file : string;
alias_target_env : env;
mutable alias_targets : (ident_desc * origin) list;
}
(* This is just a container for things imported using the import directive *)
(* Can be imported : types, contract, libraries AND MODULES (top-level stuff) *)
and module_desc = {
module_abs_name : absolute LongIdent.t;
module_pos : pos;
module_file : string;
module_env : env; (* this aliases a module env *)
}
and type_desc =
| TDEnum of enum_desc
| TDStruct of struct_desc
and enum_desc = {
enum_abs_name : absolute LongIdent.t;
enum_pos : pos;
enum_values : (Ident.t * int) list;
}
and constr_desc = {
constr_enum_desc : enum_desc;
constr_name : Ident.t;
constr_value : int;
constr_type : type_;
}
and struct_desc = {
struct_abs_name : absolute LongIdent.t;
mutable struct_fields : (Ident.t * type_) list; (* Note: order is important *)
mutable has_mapping : bool;
struct_def : Solidity_ast.struct_definition;
}
and field_desc = {
field_struct_desc : struct_desc;
field_name : Ident.t;
field_type : type_;
}
and contract_desc = {
contract_abs_name : absolute LongIdent.t;
contract_env : env;
mutable contract_hierarchy : (absolute LongIdent.t * contract_desc) list;
Note : the most derived first , including itself
contract_def : Solidity_ast.contract_definition;
}
and variable_desc = {
variable_abs_name : absolute LongIdent.t;
mutable variable_type : type_;
variable_visibility : Solidity_ast.visibility;
variable_mutability : Solidity_ast.var_mutability;
variable_local : bool;
mutable variable_override : absolute LongIdent.t list option;
mutable variable_getter : function_desc option; (* when the variable has a getter*)
variable_is_primitive : bool;
variable_def : Solidity_ast.state_variable_definition option; (* module/contract*)
mutable variable_ops : ( function_desc * variable_operation ) list ;
}
and function_desc = {
function_abs_name : absolute LongIdent.t;
mutable function_params : (type_ * Ident.t option) list;
mutable function_returns : (type_ * Ident.t option) list;
some primitives ( push / pop ) return lvalues
function_visibility : Solidity_ast.visibility;
function_mutability : Solidity_ast.fun_mutability;
mutable function_override : absolute LongIdent.t list option;
mutable function_selector : string option;
function_is_method : bool;
function_is_primitive : bool;
function_def : Solidity_ast.function_definition option; (* Primitives have no definition *)
mutable function_ops : ( variable_desc * variable_operation ) list ;
mutable function_purity : function_purity ;
}
and function_purity = (* whether it modifies its contract *)
| PurityUnknown
| PurityPure
| PurityView
| PurityMute
and variable_operation =
| OpAssign
| OpAccess
| OpCall of function_desc
and modifier_desc = {
modifier_abs_name : absolute LongIdent.t;
mutable modifier_params : (type_ * Ident.t option) list;
modifier_def : Solidity_ast.modifier_definition;
(* Note: Modifiers have no visibility nor mutability *)
}
and event_desc = {
event_abs_name : absolute LongIdent.t;
mutable event_params : (type_ * Ident.t option) list;
event_def : Solidity_ast.event_definition;
}
and fun_kind =
| KOther
| KNewContract
| KExtContractFun
| KReturn
and function_options = {
kind : fun_kind;
value : bool;
gas : bool;
salt : bool;
fields : StringSet.t ;
}
and location =
| LMemory
| LStorage of bool (* false = ref, true = pointer *)
| LCalldata (* is always a reference *)
and abstract_type =
| TvmCell
| TvmSlice
| TvmBuilder
| TvmCall
TvmCall with .extMsg
and type_ =
| TBool
| TInt of int
| TUint of int
| TFixed of int * int
| TUfixed of int * int
| TAddress of bool (* false = address, true = address payable *)
| TFixBytes of int
| TBytes of location
| TString of location
| TEnum of absolute LongIdent.t * enum_desc
| TStruct of absolute LongIdent.t * struct_desc * location
| TContract of absolute LongIdent.t * contract_desc * bool (* super *)
| TArray of type_ * Z.t option * location
| TMapping of type_ * type_ * location (* storage ref or storage pointer *)
| TFunction of function_desc * function_options
| TAbstract of abstract_type
| TOptional of type_
one argument , but anything
| TDots (* any number of arguments, and anything *)
Internal use only
| TModifier of modifier_desc
| TEvent of event_desc
| TTuple of type_ option list
| TArraySlice of type_ * location (* is never an lvalue *)
| TType of type_ (* a type is an expression of type 'type' *)
| TMagic of magic_type
| TModule of absolute LongIdent.t * module_desc
| TRationalConst of Q.t * int option (* Some _ = size in bytes (if hex) *)
| TLiteralString of string
and magic_type =
| TMetaType of type_ (* result of type(X) *)
| TBlock (* type of the 'block' object *)
| TMsg (* type of the 'msg' object *)
| TTx (* type of the 'tx' object *)
| TAbi (* type of the 'abi' object *)
| TTvm (* type of the 'tvm' object *)
| TStatic of ( Ident.t option * type_ ) list
| TMath
| TRnd
(* source_unit (Import) *)
type annot += AImport of Ident.t
(* expression, statement, ident/field (incl. contract) *)
type annot += AType of type_ (* Rename to exp_type *)
type annot += ATypeId of type_desc
(* source_unit (ContractDefinition), inheritance_specifier *)
type annot += AContract of contract_desc
(* contract_part (StateVariableDeclaration), ident/field (even getter) *)
type annot += AVariable of variable_desc * bool (* true = getter *)
contract_part ( FunctionDefinition ) , constructor invocation , ident / field ( functions only )
type annot += AFunction of function_desc * bool (* true = from a using for *)
contract_part ( ModifierDefinition ) , ident / field
type annot += AModifier of modifier_desc
(* contract_part (EventDefinition), ident/field *)
type annot += AEvent of event_desc
type annot += AField of field_desc
type annot += AConstr of constr_desc
type annot += AModule of module_desc
(* ident/field *)
type annot += APrimitive
type args =
| AList of type_ list
| ANamed of (Ident.t * type_) list
type options = {
allow_empty: bool; (* whether to allow empty elements in tuple *)
call_args: args option; (* could just have an in_lvalue flag *)
fun_returns : type_ list;
in_loop: bool;
in_function: function_desc option;
in_modifier: bool;
current_hierarchy: absolute LongIdent.t list;
current_contract: contract_desc option;
}
| null | https://raw.githubusercontent.com/OCamlPro/ocaml-solidity/f39153f0a0b5b559f5b7d94a34977cf327439355/src/solidity-typechecker/solidity_checker_TYPES.ml | ocaml | ************************************************************************
All rights reserved.
This file is distributed under the terms of the GNU Lesser General
described in the LICENSE.md file in the root directory.
************************************************************************
i.e module/contract/fonction/block
empty list = all types = *
In: modules (temporary)
In: modules
In: modules
In: modules, contracts
In: modules, contracts, functions
In: modules, contracts
In: contracts
In: contracts
This is just a container for things imported using the import directive
Can be imported : types, contract, libraries AND MODULES (top-level stuff)
this aliases a module env
Note: order is important
when the variable has a getter
module/contract
Primitives have no definition
whether it modifies its contract
Note: Modifiers have no visibility nor mutability
false = ref, true = pointer
is always a reference
false = address, true = address payable
super
storage ref or storage pointer
any number of arguments, and anything
is never an lvalue
a type is an expression of type 'type'
Some _ = size in bytes (if hex)
result of type(X)
type of the 'block' object
type of the 'msg' object
type of the 'tx' object
type of the 'abi' object
type of the 'tvm' object
source_unit (Import)
expression, statement, ident/field (incl. contract)
Rename to exp_type
source_unit (ContractDefinition), inheritance_specifier
contract_part (StateVariableDeclaration), ident/field (even getter)
true = getter
true = from a using for
contract_part (EventDefinition), ident/field
ident/field
whether to allow empty elements in tuple
could just have an in_lvalue flag | Copyright ( c ) 2021 OCamlPro & Origin Labs
Public License version 2.1 , with the special exception on linking
open Solidity_common
type origin =
| Defined
| Imported
| Inherited
type env = {
mutable ident_map : ((ident_desc * origin) list) IdentMap.t;
mutable using_for : (env * (type_ list)) AbsLongIdentMap.t;
}
and ident_desc =
Internal use , not in envs
Internal use , not in envs
and alias_desc = {
alias_abs_name : absolute LongIdent.t;
alias_pos : pos;
alias_target_id : Ident.t;
alias_target_file : string;
alias_target_env : env;
mutable alias_targets : (ident_desc * origin) list;
}
and module_desc = {
module_abs_name : absolute LongIdent.t;
module_pos : pos;
module_file : string;
}
and type_desc =
| TDEnum of enum_desc
| TDStruct of struct_desc
and enum_desc = {
enum_abs_name : absolute LongIdent.t;
enum_pos : pos;
enum_values : (Ident.t * int) list;
}
and constr_desc = {
constr_enum_desc : enum_desc;
constr_name : Ident.t;
constr_value : int;
constr_type : type_;
}
and struct_desc = {
struct_abs_name : absolute LongIdent.t;
mutable has_mapping : bool;
struct_def : Solidity_ast.struct_definition;
}
and field_desc = {
field_struct_desc : struct_desc;
field_name : Ident.t;
field_type : type_;
}
and contract_desc = {
contract_abs_name : absolute LongIdent.t;
contract_env : env;
mutable contract_hierarchy : (absolute LongIdent.t * contract_desc) list;
Note : the most derived first , including itself
contract_def : Solidity_ast.contract_definition;
}
and variable_desc = {
variable_abs_name : absolute LongIdent.t;
mutable variable_type : type_;
variable_visibility : Solidity_ast.visibility;
variable_mutability : Solidity_ast.var_mutability;
variable_local : bool;
mutable variable_override : absolute LongIdent.t list option;
variable_is_primitive : bool;
mutable variable_ops : ( function_desc * variable_operation ) list ;
}
and function_desc = {
function_abs_name : absolute LongIdent.t;
mutable function_params : (type_ * Ident.t option) list;
mutable function_returns : (type_ * Ident.t option) list;
some primitives ( push / pop ) return lvalues
function_visibility : Solidity_ast.visibility;
function_mutability : Solidity_ast.fun_mutability;
mutable function_override : absolute LongIdent.t list option;
mutable function_selector : string option;
function_is_method : bool;
function_is_primitive : bool;
mutable function_ops : ( variable_desc * variable_operation ) list ;
mutable function_purity : function_purity ;
}
| PurityUnknown
| PurityPure
| PurityView
| PurityMute
and variable_operation =
| OpAssign
| OpAccess
| OpCall of function_desc
and modifier_desc = {
modifier_abs_name : absolute LongIdent.t;
mutable modifier_params : (type_ * Ident.t option) list;
modifier_def : Solidity_ast.modifier_definition;
}
and event_desc = {
event_abs_name : absolute LongIdent.t;
mutable event_params : (type_ * Ident.t option) list;
event_def : Solidity_ast.event_definition;
}
and fun_kind =
| KOther
| KNewContract
| KExtContractFun
| KReturn
and function_options = {
kind : fun_kind;
value : bool;
gas : bool;
salt : bool;
fields : StringSet.t ;
}
and location =
| LMemory
and abstract_type =
| TvmCell
| TvmSlice
| TvmBuilder
| TvmCall
TvmCall with .extMsg
and type_ =
| TBool
| TInt of int
| TUint of int
| TFixed of int * int
| TUfixed of int * int
| TFixBytes of int
| TBytes of location
| TString of location
| TEnum of absolute LongIdent.t * enum_desc
| TStruct of absolute LongIdent.t * struct_desc * location
| TArray of type_ * Z.t option * location
| TFunction of function_desc * function_options
| TAbstract of abstract_type
| TOptional of type_
one argument , but anything
Internal use only
| TModifier of modifier_desc
| TEvent of event_desc
| TTuple of type_ option list
| TMagic of magic_type
| TModule of absolute LongIdent.t * module_desc
| TLiteralString of string
and magic_type =
| TStatic of ( Ident.t option * type_ ) list
| TMath
| TRnd
type annot += AImport of Ident.t
type annot += ATypeId of type_desc
type annot += AContract of contract_desc
contract_part ( FunctionDefinition ) , constructor invocation , ident / field ( functions only )
contract_part ( ModifierDefinition ) , ident / field
type annot += AModifier of modifier_desc
type annot += AEvent of event_desc
type annot += AField of field_desc
type annot += AConstr of constr_desc
type annot += AModule of module_desc
type annot += APrimitive
type args =
| AList of type_ list
| ANamed of (Ident.t * type_) list
type options = {
fun_returns : type_ list;
in_loop: bool;
in_function: function_desc option;
in_modifier: bool;
current_hierarchy: absolute LongIdent.t list;
current_contract: contract_desc option;
}
|
3e271098883ff1d71f52c880c35f7bf5c10ae1c4f77473dec9ac5e2bd1a8a26b | bennn/iPoe | numbers.rkt | #lang ipoe/haiku
#:comment "This-is-a-'certified'-haiku.-Because-numbers-are-converted-to-English."
1 2 3 4 5
12000102
860
| null | https://raw.githubusercontent.com/bennn/iPoe/4a988f6537fb738b4fe842c404f9d78f658ab76f/examples/haiku/numbers.rkt | racket | #lang ipoe/haiku
#:comment "This-is-a-'certified'-haiku.-Because-numbers-are-converted-to-English."
1 2 3 4 5
12000102
860
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.